Merge branch '2411-check-copyright'
[arvados.git] / sdk / cwl / tests / test_job.py
index 7675e3d4bc45bd64b55a690c405597fa6b23a615..5d140ba6b9d56a052f3a75eb0f4697f29b8dad79 100644 (file)
@@ -1,3 +1,7 @@
+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: Apache-2.0
+
 import functools
 import json
 import logging
@@ -10,10 +14,11 @@ import StringIO
 import arvados
 import arvados_cwl
 import cwltool.process
+from arvados.errors import ApiError
 from schema_salad.ref_resolver import Loader
 from schema_salad.sourceline import cmap
 from .mock_discovery import get_rootDesc
-from .matcher import JsonDiffMatcher
+from .matcher import JsonDiffMatcher, StripYAMLComments
 
 if not os.getenv('ARVADOS_DEBUG'):
     logging.getLogger('arvados.cwl-runner').setLevel(logging.WARN)
@@ -33,7 +38,15 @@ class TestJob(unittest.TestCase):
             document_loader, avsc_names, schema_metadata, metaschema_loader = cwltool.process.get_schema("v1.0")
 
             list_images_in_arv.return_value = [["zzzzz-4zz18-zzzzzzzzzzzzzzz"]]
-            runner.api.collections().get().execute.return_vaulue = {"portable_data_hash": "99999999999999999999999999999993+99"}
+            runner.api.collections().get().execute.return_value = {"portable_data_hash": "99999999999999999999999999999993+99"}
+            # Simulate reused job from another project so that we can check is a can_read
+            # link is added.
+            runner.api.jobs().create().execute.return_value = {
+                'state': 'Complete' if enable_reuse else 'Queued',
+                'owner_uuid': 'zzzzz-tpzed-yyyyyyyyyyyyyyy' if enable_reuse else 'zzzzz-8i9sb-zzzzzzzzzzzzzzz',
+                'uuid': 'zzzzz-819sb-yyyyyyyyyyyyyyy',
+                'output': None,
+            }
 
             tool = cmap({
                 "inputs": [],
@@ -41,7 +54,8 @@ class TestJob(unittest.TestCase):
                 "baseCommand": "ls",
                 "arguments": [{"valueFrom": "$(runtime.outdir)"}]
             })
-            make_fs_access=functools.partial(arvados_cwl.CollectionFsAccess, api_client=runner.api)
+            make_fs_access=functools.partial(arvados_cwl.CollectionFsAccess,
+                                         collection_cache=arvados_cwl.CollectionCache(runner.api, None, 0))
             arvtool = arvados_cwl.ArvadosCommandTool(runner, tool, work_api="jobs", avsc_names=avsc_names,
                                                      basedir="", make_fs_access=make_fs_access, loader=Loader({}))
             arvtool.formatgraph = None
@@ -62,7 +76,7 @@ class TestJob(unittest.TestCase):
                         'repository': 'arvados',
                         'script': 'crunchrunner',
                         'runtime_constraints': {
-                            'docker_image': 'arvados/jobs:'+arvados_cwl.__version__,
+                            'docker_image': 'arvados/jobs',
                             'min_cores_per_node': 1,
                             'min_ram_mb_per_node': 1024,
                             'min_scratch_mb_per_node': 2048 # tmpdirSize + outdirSize
@@ -72,8 +86,26 @@ class TestJob(unittest.TestCase):
                     filters=[['repository', '=', 'arvados'],
                              ['script', '=', 'crunchrunner'],
                              ['script_version', 'in git', 'a3f2cb186e437bfce0031b024b2157b73ed2717d'],
-                             ['docker_image_locator', 'in docker', 'arvados/jobs:'+arvados_cwl.__version__]]
+                             ['docker_image_locator', 'in docker', 'arvados/jobs']]
                 )
+                if enable_reuse:
+                    runner.api.links().create.assert_called_with(
+                        body=JsonDiffMatcher({
+                            'link_class': 'permission',
+                            'name': 'can_read',
+                            "tail_uuid": "zzzzz-8i9sb-zzzzzzzzzzzzzzz",
+                            "head_uuid": "zzzzz-819sb-yyyyyyyyyyyyyyy",
+                        })
+                    )
+                    # Simulate an API excepction when trying to create a
+                    # sharing link on the job
+                    runner.api.links().create.side_effect = ApiError(
+                        mock.MagicMock(return_value={'status': 403}),
+                        'Permission denied')
+                    j.run(enable_reuse=enable_reuse)
+                    j.output_callback.assert_called_with({}, 'success')
+                else:
+                    assert not runner.api.links().create.called
 
     # The test passes some fields in builder.resources
     # For the remaining fields, the defaults will apply: {'cores': 1, 'ram': 1024, 'outdirSize': 1024, 'tmpdirSize': 1024}
@@ -105,15 +137,20 @@ class TestJob(unittest.TestCase):
                 "outputDirType": "keep_output_dir"
             }, {
                 "class": "http://arvados.org/cwl#APIRequirement",
+            },
+            {
+                "class": "http://arvados.org/cwl#ReuseRequirement",
+                "enableReuse": False
             }],
             "baseCommand": "ls"
         }
-        make_fs_access=functools.partial(arvados_cwl.CollectionFsAccess, api_client=runner.api)
+        make_fs_access=functools.partial(arvados_cwl.CollectionFsAccess,
+                                         collection_cache=arvados_cwl.CollectionCache(runner.api, None, 0))
         arvtool = arvados_cwl.ArvadosCommandTool(runner, tool, work_api="jobs", avsc_names=avsc_names,
                                                  make_fs_access=make_fs_access, loader=Loader({}))
         arvtool.formatgraph = None
         for j in arvtool.job({}, mock.MagicMock(), basedir="", make_fs_access=make_fs_access):
-            j.run()
+            j.run(enable_reuse=True)
         runner.api.jobs().create.assert_called_with(
             body=JsonDiffMatcher({
                 'owner_uuid': 'zzzzz-8i9sb-zzzzzzzzzzzzzzz',
@@ -130,18 +167,18 @@ class TestJob(unittest.TestCase):
                 'repository': 'arvados',
                 'script': 'crunchrunner',
                 'runtime_constraints': {
-                    'docker_image': 'arvados/jobs:'+arvados_cwl.__version__,
+                    'docker_image': 'arvados/jobs',
                     'min_cores_per_node': 3,
-                    'min_ram_mb_per_node': 3000,
+                    'min_ram_mb_per_node': 3512,     # ramMin + keep_cache
                     'min_scratch_mb_per_node': 5024, # tmpdirSize + outdirSize
                     'keep_cache_mb_per_task': 512
                 }
             }),
-            find_or_create=True,
+            find_or_create=False,
             filters=[['repository', '=', 'arvados'],
                      ['script', '=', 'crunchrunner'],
                      ['script_version', 'in git', 'a3f2cb186e437bfce0031b024b2157b73ed2717d'],
-                     ['docker_image_locator', 'in docker', 'arvados/jobs:'+arvados_cwl.__version__]])
+                     ['docker_image_locator', 'in docker', 'arvados/jobs']])
 
     @mock.patch("arvados.collection.CollectionReader")
     def test_done(self, reader):
@@ -159,7 +196,9 @@ class TestJob(unittest.TestCase):
 2016-11-02_23:12:18 c97qk-8i9sb-cryqw2blvzy4yaj 13358 0 stderr 2016/11/02 23:12:18 crunchrunner: $(task.keep)=/keep
         """)
         api.collections().list().execute.side_effect = ({"items": []},
-                                                        {"items": [{"manifest_text": "XYZ"}]})
+                                                        {"items": [{"manifest_text": "XYZ"}]},
+                                                        {"items": []},
+                                                        {"items": [{"manifest_text": "ABC"}]})
 
         arvjob = arvados_cwl.ArvadosJob(runner)
         arvjob.name = "testjob"
@@ -177,20 +216,37 @@ class TestJob(unittest.TestCase):
 
         api.collections().list.assert_has_calls([
             mock.call(),
+            # Output collection check
             mock.call(filters=[['owner_uuid', '=', 'zzzzz-8i9sb-zzzzzzzzzzzzzzz'],
                           ['portable_data_hash', '=', '99999999999999999999999999999993+99'],
                           ['name', '=', 'Output 9999999 of testjob']]),
             mock.call().execute(num_retries=0),
             mock.call(limit=1, filters=[['portable_data_hash', '=', '99999999999999999999999999999993+99']],
                  select=['manifest_text']),
+            mock.call().execute(num_retries=0),
+            # Log collection's turn
+            mock.call(filters=[['owner_uuid', '=', 'zzzzz-8i9sb-zzzzzzzzzzzzzzz'],
+                          ['portable_data_hash', '=', '99999999999999999999999999999994+99'],
+                          ['name', '=', 'Log of zzzzz-8i9sb-zzzzzzzzzzzzzzz']]),
+            mock.call().execute(num_retries=0),
+            mock.call(limit=1, filters=[['portable_data_hash', '=', '99999999999999999999999999999994+99']],
+                 select=['manifest_text']),
             mock.call().execute(num_retries=0)])
 
-        api.collections().create.assert_called_with(
-            ensure_unique_name=True,
-            body={'portable_data_hash': '99999999999999999999999999999993+99',
-                  'manifest_text': 'XYZ',
-                  'owner_uuid': 'zzzzz-8i9sb-zzzzzzzzzzzzzzz',
-                  'name': 'Output 9999999 of testjob'})
+        api.collections().create.assert_has_calls([
+            mock.call(ensure_unique_name=True,
+                      body={'portable_data_hash': '99999999999999999999999999999993+99',
+                            'manifest_text': 'XYZ',
+                            'owner_uuid': 'zzzzz-8i9sb-zzzzzzzzzzzzzzz',
+                            'name': 'Output 9999999 of testjob'}),
+            mock.call().execute(num_retries=0),
+            mock.call(ensure_unique_name=True,
+                      body={'portable_data_hash': '99999999999999999999999999999994+99',
+                            'manifest_text': 'ABC',
+                            'owner_uuid': 'zzzzz-8i9sb-zzzzzzzzzzzzzzz',
+                            'name': 'Log of zzzzz-8i9sb-zzzzzzzzzzzzzzz'}),
+            mock.call().execute(num_retries=0),
+        ])
 
         arvjob.output_callback.assert_called_with({"out": "stuff"}, "success")
 
@@ -209,7 +265,10 @@ class TestJob(unittest.TestCase):
 2016-11-02_23:12:18 c97qk-8i9sb-cryqw2blvzy4yaj 13358 0 stderr 2016/11/02 23:12:18 crunchrunner: $(task.keep)=/keep
         """)
 
-        api.collections().list().execute.side_effect = ({"items": [{"uuid": "zzzzz-4zz18-zzzzzzzzzzzzzz2"}]},)
+        api.collections().list().execute.side_effect = (
+            {"items": [{"uuid": "zzzzz-4zz18-zzzzzzzzzzzzzz2"}]},
+            {"items": [{"uuid": "zzzzz-4zz18-zzzzzzzzzzzzzz2"}]},
+        )
 
         arvjob = arvados_cwl.ArvadosJob(runner)
         arvjob.name = "testjob"
@@ -227,10 +286,17 @@ class TestJob(unittest.TestCase):
 
         api.collections().list.assert_has_calls([
             mock.call(),
+            # Output collection
             mock.call(filters=[['owner_uuid', '=', 'zzzzz-8i9sb-zzzzzzzzzzzzzzz'],
                                ['portable_data_hash', '=', '99999999999999999999999999999993+99'],
                                ['name', '=', 'Output 9999999 of testjob']]),
-            mock.call().execute(num_retries=0)])
+            mock.call().execute(num_retries=0),
+            # Log collection
+            mock.call(filters=[['owner_uuid', '=', 'zzzzz-8i9sb-zzzzzzzzzzzzzzz'],
+                               ['portable_data_hash', '=', '99999999999999999999999999999994+99'],
+                               ['name', '=', 'Log of zzzzz-8i9sb-zzzzzzzzzzzzzzz']]),
+            mock.call().execute(num_retries=0)
+        ])
 
         self.assertFalse(api.collections().create.called)
 
@@ -264,7 +330,8 @@ class TestWorkflow(unittest.TestCase):
 
         mockcollection().portable_data_hash.return_value = "99999999999999999999999999999999+118"
 
-        make_fs_access=functools.partial(arvados_cwl.CollectionFsAccess, api_client=runner.api)
+        make_fs_access=functools.partial(arvados_cwl.CollectionFsAccess,
+                                         collection_cache=arvados_cwl.CollectionCache(runner.api, None, 0))
         arvtool = arvados_cwl.ArvadosWorkflow(runner, tool, work_api="jobs", avsc_names=avsc_names,
                                               basedir="", make_fs_access=make_fs_access, loader=document_loader,
                                               makeTool=runner.arv_make_tool, metadata=metadata)
@@ -274,7 +341,7 @@ class TestWorkflow(unittest.TestCase):
         it.next().run()
 
         with open("tests/wf/scatter2_subwf.cwl") as f:
-            subwf = f.read()
+            subwf = StripYAMLComments(f.read())
 
         runner.api.jobs().create.assert_called_with(
             body=JsonDiffMatcher({
@@ -295,18 +362,21 @@ class TestWorkflow(unittest.TestCase):
                 'runtime_constraints': {
                     'min_scratch_mb_per_node': 2048,
                     'min_cores_per_node': 1,
-                    'docker_image': 'arvados/jobs:'+arvados_cwl.__version__,
+                    'docker_image': 'arvados/jobs',
                     'min_ram_mb_per_node': 1024
                 },
                 'owner_uuid': 'zzzzz-8i9sb-zzzzzzzzzzzzzzz'}),
             filters=[['repository', '=', 'arvados'],
                      ['script', '=', 'crunchrunner'],
                      ['script_version', 'in git', 'a3f2cb186e437bfce0031b024b2157b73ed2717d'],
-                     ['docker_image_locator', 'in docker', 'arvados/jobs:'+arvados_cwl.__version__]],
+                     ['docker_image_locator', 'in docker', 'arvados/jobs']],
             find_or_create=True)
 
         mockcollection().open().__enter__().write.assert_has_calls([mock.call(subwf)])
-        mockcollection().open().__enter__().write.assert_has_calls([mock.call('sleeptime: 5')])
+        mockcollection().open().__enter__().write.assert_has_calls([mock.call(
+'''{
+  "sleeptime": 5
+}''')])
 
     def test_default_work_api(self):
         arvados_cwl.add_arv_hints()