1 # Copyright (C) The Arvados Authors. All rights reserved.
3 # SPDX-License-Identifier: Apache-2.0
16 import cwltool.process
17 from arvados.errors import ApiError
18 from schema_salad.ref_resolver import Loader
19 from schema_salad.sourceline import cmap
20 from .mock_discovery import get_rootDesc
21 from .matcher import JsonDiffMatcher, StripYAMLComments
23 if not os.getenv('ARVADOS_DEBUG'):
24 logging.getLogger('arvados.cwl-runner').setLevel(logging.WARN)
25 logging.getLogger('arvados.arv-run').setLevel(logging.WARN)
27 class TestJob(unittest.TestCase):
29 # The test passes no builder.resources
30 # Hence the default resources will apply: {'cores': 1, 'ram': 1024, 'outdirSize': 1024, 'tmpdirSize': 1024}
31 @mock.patch('arvados.commands.keepdocker.list_images_in_arv')
32 def test_run(self, list_images_in_arv):
33 for enable_reuse in (True, False):
34 runner = mock.MagicMock()
35 runner.project_uuid = "zzzzz-8i9sb-zzzzzzzzzzzzzzz"
36 runner.ignore_docker_for_reuse = False
37 runner.num_retries = 0
38 document_loader, avsc_names, schema_metadata, metaschema_loader = cwltool.process.get_schema("v1.0")
40 list_images_in_arv.return_value = [["zzzzz-4zz18-zzzzzzzzzzzzzzz"]]
41 runner.api.collections().get().execute.return_value = {"portable_data_hash": "99999999999999999999999999999993+99"}
42 # Simulate reused job from another project so that we can check is a can_read
44 runner.api.jobs().create().execute.return_value = {
45 'state': 'Complete' if enable_reuse else 'Queued',
46 'owner_uuid': 'zzzzz-tpzed-yyyyyyyyyyyyyyy' if enable_reuse else 'zzzzz-8i9sb-zzzzzzzzzzzzzzz',
47 'uuid': 'zzzzz-819sb-yyyyyyyyyyyyyyy',
55 "arguments": [{"valueFrom": "$(runtime.outdir)"}],
57 "class": "CommandLineTool"
59 make_fs_access=functools.partial(arvados_cwl.CollectionFsAccess,
60 collection_cache=arvados_cwl.CollectionCache(runner.api, None, 0))
61 arvtool = arvados_cwl.ArvadosCommandTool(runner, tool, work_api="jobs", avsc_names=avsc_names,
62 basedir="", make_fs_access=make_fs_access, loader=Loader({}))
63 arvtool.formatgraph = None
64 for j in arvtool.job({}, mock.MagicMock(), basedir="", make_fs_access=make_fs_access):
65 j.run(enable_reuse=enable_reuse)
66 runner.api.jobs().create.assert_called_with(
67 body=JsonDiffMatcher({
68 'owner_uuid': 'zzzzz-8i9sb-zzzzzzzzzzzzzzz',
69 'runtime_constraints': {},
70 'script_parameters': {
72 'task.env': {'HOME': '$(task.outdir)', 'TMPDIR': '$(task.tmpdir)'},
73 'command': ['ls', '$(task.outdir)']
76 'script_version': 'master',
77 'minimum_script_version': 'a3f2cb186e437bfce0031b024b2157b73ed2717d',
78 'repository': 'arvados',
79 'script': 'crunchrunner',
80 'runtime_constraints': {
81 'docker_image': 'arvados/jobs',
82 'min_cores_per_node': 1,
83 'min_ram_mb_per_node': 1024,
84 'min_scratch_mb_per_node': 2048 # tmpdirSize + outdirSize
87 find_or_create=enable_reuse,
88 filters=[['repository', '=', 'arvados'],
89 ['script', '=', 'crunchrunner'],
90 ['script_version', 'in git', 'a3f2cb186e437bfce0031b024b2157b73ed2717d'],
91 ['docker_image_locator', 'in docker', 'arvados/jobs']]
94 runner.api.links().create.assert_called_with(
95 body=JsonDiffMatcher({
96 'link_class': 'permission',
98 "tail_uuid": "zzzzz-8i9sb-zzzzzzzzzzzzzzz",
99 "head_uuid": "zzzzz-819sb-yyyyyyyyyyyyyyy",
102 # Simulate an API excepction when trying to create a
103 # sharing link on the job
104 runner.api.links().create.side_effect = ApiError(
105 mock.MagicMock(return_value={'status': 403}),
107 j.run(enable_reuse=enable_reuse)
109 assert not runner.api.links().create.called
111 # The test passes some fields in builder.resources
112 # For the remaining fields, the defaults will apply: {'cores': 1, 'ram': 1024, 'outdirSize': 1024, 'tmpdirSize': 1024}
113 @mock.patch('arvados.commands.keepdocker.list_images_in_arv')
114 def test_resource_requirements(self, list_images_in_arv):
115 runner = mock.MagicMock()
116 runner.project_uuid = "zzzzz-8i9sb-zzzzzzzzzzzzzzz"
117 runner.ignore_docker_for_reuse = False
118 runner.num_retries = 0
119 arvados_cwl.add_arv_hints()
121 list_images_in_arv.return_value = [["zzzzz-4zz18-zzzzzzzzzzzzzzz"]]
122 runner.api.collections().get().execute.return_vaulue = {"portable_data_hash": "99999999999999999999999999999993+99"}
124 document_loader, avsc_names, schema_metadata, metaschema_loader = cwltool.process.get_schema("v1.0")
131 "class": "ResourceRequirement",
136 "class": "http://arvados.org/cwl#RuntimeConstraints",
138 "outputDirType": "keep_output_dir"
140 "class": "http://arvados.org/cwl#APIRequirement",
143 "class": "http://arvados.org/cwl#ReuseRequirement",
148 "class": "CommandLineTool"
150 make_fs_access=functools.partial(arvados_cwl.CollectionFsAccess,
151 collection_cache=arvados_cwl.CollectionCache(runner.api, None, 0))
152 arvtool = arvados_cwl.ArvadosCommandTool(runner, tool, work_api="jobs", avsc_names=avsc_names,
153 make_fs_access=make_fs_access, loader=Loader({}))
154 arvtool.formatgraph = None
155 for j in arvtool.job({}, mock.MagicMock(), basedir="", make_fs_access=make_fs_access):
156 j.run(enable_reuse=True)
157 runner.api.jobs().create.assert_called_with(
158 body=JsonDiffMatcher({
159 'owner_uuid': 'zzzzz-8i9sb-zzzzzzzzzzzzzzz',
160 'runtime_constraints': {},
161 'script_parameters': {
163 'task.env': {'HOME': '$(task.outdir)', 'TMPDIR': '$(task.tmpdir)'},
164 'task.keepTmpOutput': True,
168 'script_version': 'master',
169 'minimum_script_version': 'a3f2cb186e437bfce0031b024b2157b73ed2717d',
170 'repository': 'arvados',
171 'script': 'crunchrunner',
172 'runtime_constraints': {
173 'docker_image': 'arvados/jobs',
174 'min_cores_per_node': 3,
175 'min_ram_mb_per_node': 3512, # ramMin + keep_cache
176 'min_scratch_mb_per_node': 5024, # tmpdirSize + outdirSize
177 'keep_cache_mb_per_task': 512
180 find_or_create=False,
181 filters=[['repository', '=', 'arvados'],
182 ['script', '=', 'crunchrunner'],
183 ['script_version', 'in git', 'a3f2cb186e437bfce0031b024b2157b73ed2717d'],
184 ['docker_image_locator', 'in docker', 'arvados/jobs']])
186 @mock.patch("arvados.collection.CollectionReader")
187 def test_done(self, reader):
188 api = mock.MagicMock()
190 runner = mock.MagicMock()
192 runner.project_uuid = "zzzzz-8i9sb-zzzzzzzzzzzzzzz"
193 runner.num_retries = 0
194 runner.ignore_docker_for_reuse = False
196 reader().open.return_value = StringIO.StringIO(
197 """2016-11-02_23:12:18 c97qk-8i9sb-cryqw2blvzy4yaj 13358 0 stderr 2016/11/02 23:12:18 crunchrunner: $(task.tmpdir)=/tmp/crunch-job-task-work/compute3.1/tmpdir
198 2016-11-02_23:12:18 c97qk-8i9sb-cryqw2blvzy4yaj 13358 0 stderr 2016/11/02 23:12:18 crunchrunner: $(task.outdir)=/tmp/crunch-job-task-work/compute3.1/outdir
199 2016-11-02_23:12:18 c97qk-8i9sb-cryqw2blvzy4yaj 13358 0 stderr 2016/11/02 23:12:18 crunchrunner: $(task.keep)=/keep
201 api.collections().list().execute.side_effect = ({"items": []},
202 {"items": [{"manifest_text": "XYZ"}]},
204 {"items": [{"manifest_text": "ABC"}]})
206 arvjob = arvados_cwl.ArvadosJob(runner)
207 arvjob.name = "testjob"
208 arvjob.builder = mock.MagicMock()
209 arvjob.output_callback = mock.MagicMock()
210 arvjob.collect_outputs = mock.MagicMock()
211 arvjob.collect_outputs.return_value = {"out": "stuff"}
215 "output": "99999999999999999999999999999993+99",
216 "log": "99999999999999999999999999999994+99",
217 "uuid": "zzzzz-8i9sb-zzzzzzzzzzzzzzz"
220 api.collections().list.assert_has_calls([
222 # Output collection check
223 mock.call(filters=[['owner_uuid', '=', 'zzzzz-8i9sb-zzzzzzzzzzzzzzz'],
224 ['portable_data_hash', '=', '99999999999999999999999999999993+99'],
225 ['name', '=', 'Output 9999999 of testjob']]),
226 mock.call().execute(num_retries=0),
227 mock.call(limit=1, filters=[['portable_data_hash', '=', '99999999999999999999999999999993+99']],
228 select=['manifest_text']),
229 mock.call().execute(num_retries=0),
230 # Log collection's turn
231 mock.call(filters=[['owner_uuid', '=', 'zzzzz-8i9sb-zzzzzzzzzzzzzzz'],
232 ['portable_data_hash', '=', '99999999999999999999999999999994+99'],
233 ['name', '=', 'Log of zzzzz-8i9sb-zzzzzzzzzzzzzzz']]),
234 mock.call().execute(num_retries=0),
235 mock.call(limit=1, filters=[['portable_data_hash', '=', '99999999999999999999999999999994+99']],
236 select=['manifest_text']),
237 mock.call().execute(num_retries=0)])
239 api.collections().create.assert_has_calls([
240 mock.call(ensure_unique_name=True,
241 body={'portable_data_hash': '99999999999999999999999999999993+99',
242 'manifest_text': 'XYZ',
243 'owner_uuid': 'zzzzz-8i9sb-zzzzzzzzzzzzzzz',
244 'name': 'Output 9999999 of testjob'}),
245 mock.call().execute(num_retries=0),
246 mock.call(ensure_unique_name=True,
247 body={'portable_data_hash': '99999999999999999999999999999994+99',
248 'manifest_text': 'ABC',
249 'owner_uuid': 'zzzzz-8i9sb-zzzzzzzzzzzzzzz',
250 'name': 'Log of zzzzz-8i9sb-zzzzzzzzzzzzzzz'}),
251 mock.call().execute(num_retries=0),
254 arvjob.output_callback.assert_called_with({"out": "stuff"}, "success")
256 @mock.patch("arvados.collection.CollectionReader")
257 def test_done_use_existing_collection(self, reader):
258 api = mock.MagicMock()
260 runner = mock.MagicMock()
262 runner.project_uuid = "zzzzz-8i9sb-zzzzzzzzzzzzzzz"
263 runner.num_retries = 0
265 reader().open.return_value = StringIO.StringIO(
266 """2016-11-02_23:12:18 c97qk-8i9sb-cryqw2blvzy4yaj 13358 0 stderr 2016/11/02 23:12:18 crunchrunner: $(task.tmpdir)=/tmp/crunch-job-task-work/compute3.1/tmpdir
267 2016-11-02_23:12:18 c97qk-8i9sb-cryqw2blvzy4yaj 13358 0 stderr 2016/11/02 23:12:18 crunchrunner: $(task.outdir)=/tmp/crunch-job-task-work/compute3.1/outdir
268 2016-11-02_23:12:18 c97qk-8i9sb-cryqw2blvzy4yaj 13358 0 stderr 2016/11/02 23:12:18 crunchrunner: $(task.keep)=/keep
271 api.collections().list().execute.side_effect = (
272 {"items": [{"uuid": "zzzzz-4zz18-zzzzzzzzzzzzzz2"}]},
273 {"items": [{"uuid": "zzzzz-4zz18-zzzzzzzzzzzzzz2"}]},
276 arvjob = arvados_cwl.ArvadosJob(runner)
277 arvjob.name = "testjob"
278 arvjob.builder = mock.MagicMock()
279 arvjob.output_callback = mock.MagicMock()
280 arvjob.collect_outputs = mock.MagicMock()
281 arvjob.collect_outputs.return_value = {"out": "stuff"}
285 "output": "99999999999999999999999999999993+99",
286 "log": "99999999999999999999999999999994+99",
287 "uuid": "zzzzz-8i9sb-zzzzzzzzzzzzzzz"
290 api.collections().list.assert_has_calls([
293 mock.call(filters=[['owner_uuid', '=', 'zzzzz-8i9sb-zzzzzzzzzzzzzzz'],
294 ['portable_data_hash', '=', '99999999999999999999999999999993+99'],
295 ['name', '=', 'Output 9999999 of testjob']]),
296 mock.call().execute(num_retries=0),
298 mock.call(filters=[['owner_uuid', '=', 'zzzzz-8i9sb-zzzzzzzzzzzzzzz'],
299 ['portable_data_hash', '=', '99999999999999999999999999999994+99'],
300 ['name', '=', 'Log of zzzzz-8i9sb-zzzzzzzzzzzzzzz']]),
301 mock.call().execute(num_retries=0)
304 self.assertFalse(api.collections().create.called)
306 arvjob.output_callback.assert_called_with({"out": "stuff"}, "success")
309 class TestWorkflow(unittest.TestCase):
310 # The test passes no builder.resources
311 # Hence the default resources will apply: {'cores': 1, 'ram': 1024, 'outdirSize': 1024, 'tmpdirSize': 1024}
312 @mock.patch("arvados.collection.CollectionReader")
313 @mock.patch("arvados.collection.Collection")
314 @mock.patch('arvados.commands.keepdocker.list_images_in_arv')
315 def test_run(self, list_images_in_arv, mockcollection, mockcollectionreader):
316 arvados_cwl.add_arv_hints()
318 api = mock.MagicMock()
319 api._rootDesc = get_rootDesc()
321 runner = arvados_cwl.ArvCwlRunner(api)
322 self.assertEqual(runner.work_api, 'jobs')
324 list_images_in_arv.return_value = [["zzzzz-4zz18-zzzzzzzzzzzzzzz"]]
325 runner.api.collections().get().execute.return_vaulue = {"portable_data_hash": "99999999999999999999999999999993+99"}
326 runner.api.collections().list().execute.return_vaulue = {"items": [{"portable_data_hash": "99999999999999999999999999999993+99"}]}
328 runner.project_uuid = "zzzzz-8i9sb-zzzzzzzzzzzzzzz"
329 runner.ignore_docker_for_reuse = False
330 runner.num_retries = 0
331 document_loader, avsc_names, schema_metadata, metaschema_loader = cwltool.process.get_schema("v1.0")
333 make_fs_access=functools.partial(arvados_cwl.CollectionFsAccess,
334 collection_cache=arvados_cwl.CollectionCache(runner.api, None, 0))
335 document_loader.fetcher_constructor = functools.partial(arvados_cwl.CollectionFetcher, api_client=api, fs_access=make_fs_access(""))
336 document_loader.fetcher = document_loader.fetcher_constructor(document_loader.cache, document_loader.session)
337 document_loader.fetch_text = document_loader.fetcher.fetch_text
338 document_loader.check_exists = document_loader.fetcher.check_exists
340 tool, metadata = document_loader.resolve_ref("tests/wf/scatter2.cwl")
341 metadata["cwlVersion"] = tool["cwlVersion"]
343 mockcollection().portable_data_hash.return_value = "99999999999999999999999999999999+118"
345 arvtool = arvados_cwl.ArvadosWorkflow(runner, tool, work_api="jobs", avsc_names=avsc_names,
346 basedir="", make_fs_access=make_fs_access, loader=document_loader,
347 makeTool=runner.arv_make_tool, metadata=metadata)
348 arvtool.formatgraph = None
349 it = arvtool.job({}, mock.MagicMock(), basedir="", make_fs_access=make_fs_access)
353 with open("tests/wf/scatter2_subwf.cwl") as f:
354 subwf = StripYAMLComments(f.read())
356 runner.api.jobs().create.assert_called_with(
357 body=JsonDiffMatcher({
358 'minimum_script_version': 'a3f2cb186e437bfce0031b024b2157b73ed2717d',
359 'repository': 'arvados',
360 'script_version': 'master',
361 'script': 'crunchrunner',
362 'script_parameters': {
363 'tasks': [{'task.env': {
364 'HOME': '$(task.outdir)',
365 'TMPDIR': '$(task.tmpdir)'},
367 'workflow.cwl': '$(task.keep)/99999999999999999999999999999999+118/workflow.cwl',
368 'cwl.input.yml': '$(task.keep)/99999999999999999999999999999999+118/cwl.input.yml'
370 'command': [u'cwltool', u'--no-container', u'--move-outputs', u'--preserve-entire-environment', u'workflow.cwl#main', u'cwl.input.yml'],
371 'task.stdout': 'cwl.output.json'}]},
372 'runtime_constraints': {
373 'min_scratch_mb_per_node': 2048,
374 'min_cores_per_node': 1,
375 'docker_image': 'arvados/jobs',
376 'min_ram_mb_per_node': 1024
378 'owner_uuid': 'zzzzz-8i9sb-zzzzzzzzzzzzzzz'}),
379 filters=[['repository', '=', 'arvados'],
380 ['script', '=', 'crunchrunner'],
381 ['script_version', 'in git', 'a3f2cb186e437bfce0031b024b2157b73ed2717d'],
382 ['docker_image_locator', 'in docker', 'arvados/jobs']],
385 mockcollection().open().__enter__().write.assert_has_calls([mock.call(subwf)])
386 mockcollection().open().__enter__().write.assert_has_calls([mock.call(
389 "basename": "token.txt",
391 "location": "/keep/99999999999999999999999999999999+118/token.txt"
396 # The test passes no builder.resources
397 # Hence the default resources will apply: {'cores': 1, 'ram': 1024, 'outdirSize': 1024, 'tmpdirSize': 1024}
398 @mock.patch("arvados.collection.CollectionReader")
399 @mock.patch("arvados.collection.Collection")
400 @mock.patch('arvados.commands.keepdocker.list_images_in_arv')
401 def test_overall_resource_singlecontainer(self, list_images_in_arv, mockcollection, mockcollectionreader):
402 arvados_cwl.add_arv_hints()
404 api = mock.MagicMock()
405 api._rootDesc = get_rootDesc()
407 runner = arvados_cwl.ArvCwlRunner(api)
408 self.assertEqual(runner.work_api, 'jobs')
410 list_images_in_arv.return_value = [["zzzzz-4zz18-zzzzzzzzzzzzzzz"]]
411 runner.api.collections().get().execute.return_vaulue = {"portable_data_hash": "99999999999999999999999999999993+99"}
412 runner.api.collections().list().execute.return_vaulue = {"items": [{"portable_data_hash": "99999999999999999999999999999993+99"}]}
414 runner.project_uuid = "zzzzz-8i9sb-zzzzzzzzzzzzzzz"
415 runner.ignore_docker_for_reuse = False
416 runner.num_retries = 0
417 document_loader, avsc_names, schema_metadata, metaschema_loader = cwltool.process.get_schema("v1.0")
419 make_fs_access=functools.partial(arvados_cwl.CollectionFsAccess,
420 collection_cache=arvados_cwl.CollectionCache(runner.api, None, 0))
421 document_loader.fetcher_constructor = functools.partial(arvados_cwl.CollectionFetcher, api_client=api, fs_access=make_fs_access(""))
422 document_loader.fetcher = document_loader.fetcher_constructor(document_loader.cache, document_loader.session)
423 document_loader.fetch_text = document_loader.fetcher.fetch_text
424 document_loader.check_exists = document_loader.fetcher.check_exists
426 tool, metadata = document_loader.resolve_ref("tests/wf/echo-wf.cwl")
427 metadata["cwlVersion"] = tool["cwlVersion"]
429 mockcollection().portable_data_hash.return_value = "99999999999999999999999999999999+118"
431 arvtool = arvados_cwl.ArvadosWorkflow(runner, tool, work_api="jobs", avsc_names=avsc_names,
432 basedir="", make_fs_access=make_fs_access, loader=document_loader,
433 makeTool=runner.arv_make_tool, metadata=metadata)
434 arvtool.formatgraph = None
435 it = arvtool.job({}, mock.MagicMock(), basedir="", make_fs_access=make_fs_access)
439 with open("tests/wf/echo-subwf.cwl") as f:
440 subwf = StripYAMLComments(f.read())
442 runner.api.jobs().create.assert_called_with(
443 body=JsonDiffMatcher({
444 'minimum_script_version': 'a3f2cb186e437bfce0031b024b2157b73ed2717d',
445 'repository': 'arvados',
446 'script_version': 'master',
447 'script': 'crunchrunner',
448 'script_parameters': {
449 'tasks': [{'task.env': {
450 'HOME': '$(task.outdir)',
451 'TMPDIR': '$(task.tmpdir)'},
453 'workflow.cwl': '$(task.keep)/99999999999999999999999999999999+118/workflow.cwl',
454 'cwl.input.yml': '$(task.keep)/99999999999999999999999999999999+118/cwl.input.yml'
456 'command': [u'cwltool', u'--no-container', u'--move-outputs', u'--preserve-entire-environment', u'workflow.cwl#main', u'cwl.input.yml'],
457 'task.stdout': 'cwl.output.json'}]},
458 'runtime_constraints': {
459 'min_scratch_mb_per_node': 4096,
460 'min_cores_per_node': 3,
461 'docker_image': 'arvados/jobs',
462 'min_ram_mb_per_node': 1024
464 'owner_uuid': 'zzzzz-8i9sb-zzzzzzzzzzzzzzz'}),
465 filters=[['repository', '=', 'arvados'],
466 ['script', '=', 'crunchrunner'],
467 ['script_version', 'in git', 'a3f2cb186e437bfce0031b024b2157b73ed2717d'],
468 ['docker_image_locator', 'in docker', 'arvados/jobs']],
471 def test_default_work_api(self):
472 arvados_cwl.add_arv_hints()
474 api = mock.MagicMock()
475 api._rootDesc = copy.deepcopy(get_rootDesc())
476 del api._rootDesc.get('resources')['jobs']['methods']['create']
477 runner = arvados_cwl.ArvCwlRunner(api)
478 self.assertEqual(runner.work_api, 'containers')