12 import cwltool.process
13 from arvados.errors import ApiError
14 from schema_salad.ref_resolver import Loader
15 from schema_salad.sourceline import cmap
16 from .mock_discovery import get_rootDesc
17 from .matcher import JsonDiffMatcher
19 if not os.getenv('ARVADOS_DEBUG'):
20 logging.getLogger('arvados.cwl-runner').setLevel(logging.WARN)
21 logging.getLogger('arvados.arv-run').setLevel(logging.WARN)
23 class TestJob(unittest.TestCase):
25 # The test passes no builder.resources
26 # Hence the default resources will apply: {'cores': 1, 'ram': 1024, 'outdirSize': 1024, 'tmpdirSize': 1024}
27 @mock.patch('arvados.commands.keepdocker.list_images_in_arv')
28 def test_run(self, list_images_in_arv):
29 for enable_reuse in (True, False):
30 runner = mock.MagicMock()
31 runner.project_uuid = "zzzzz-8i9sb-zzzzzzzzzzzzzzz"
32 runner.ignore_docker_for_reuse = False
33 runner.num_retries = 0
34 document_loader, avsc_names, schema_metadata, metaschema_loader = cwltool.process.get_schema("v1.0")
36 list_images_in_arv.return_value = [["zzzzz-4zz18-zzzzzzzzzzzzzzz"]]
37 runner.api.collections().get().execute.return_value = {"portable_data_hash": "99999999999999999999999999999993+99"}
38 # Simulate reused job from another project so that we can check is a can_read
40 runner.api.jobs().create().execute.return_value = {
41 'state': 'Complete' if enable_reuse else 'Queued',
42 'owner_uuid': 'zzzzz-tpzed-yyyyyyyyyyyyyyy' if enable_reuse else 'zzzzz-8i9sb-zzzzzzzzzzzzzzz',
43 'uuid': 'zzzzz-819sb-yyyyyyyyyyyyyyy',
51 "arguments": [{"valueFrom": "$(runtime.outdir)"}]
53 make_fs_access=functools.partial(arvados_cwl.CollectionFsAccess,
54 collection_cache=arvados_cwl.CollectionCache(runner.api, None, 0))
55 arvtool = arvados_cwl.ArvadosCommandTool(runner, tool, work_api="jobs", avsc_names=avsc_names,
56 basedir="", make_fs_access=make_fs_access, loader=Loader({}))
57 arvtool.formatgraph = None
58 for j in arvtool.job({}, mock.MagicMock(), basedir="", make_fs_access=make_fs_access):
59 j.run(enable_reuse=enable_reuse)
60 runner.api.jobs().create.assert_called_with(
61 body=JsonDiffMatcher({
62 'owner_uuid': 'zzzzz-8i9sb-zzzzzzzzzzzzzzz',
63 'runtime_constraints': {},
64 'script_parameters': {
66 'task.env': {'HOME': '$(task.outdir)', 'TMPDIR': '$(task.tmpdir)'},
67 'command': ['ls', '$(task.outdir)']
70 'script_version': 'master',
71 'minimum_script_version': 'a3f2cb186e437bfce0031b024b2157b73ed2717d',
72 'repository': 'arvados',
73 'script': 'crunchrunner',
74 'runtime_constraints': {
75 'docker_image': 'arvados/jobs',
76 'min_cores_per_node': 1,
77 'min_ram_mb_per_node': 1024,
78 'min_scratch_mb_per_node': 2048 # tmpdirSize + outdirSize
81 find_or_create=enable_reuse,
82 filters=[['repository', '=', 'arvados'],
83 ['script', '=', 'crunchrunner'],
84 ['script_version', 'in git', 'a3f2cb186e437bfce0031b024b2157b73ed2717d'],
85 ['docker_image_locator', 'in docker', 'arvados/jobs']]
88 runner.api.links().create.assert_called_with(
89 body=JsonDiffMatcher({
90 'link_class': 'permission',
92 "tail_uuid": "zzzzz-8i9sb-zzzzzzzzzzzzzzz",
93 "head_uuid": "zzzzz-819sb-yyyyyyyyyyyyyyy",
96 # Simulate an API excepction when trying to create a
97 # sharing link on the job
98 runner.api.links().create.side_effect = ApiError(
99 mock.MagicMock(return_value={'status': 403}),
101 j.run(enable_reuse=enable_reuse)
102 j.output_callback.assert_called_with({}, 'success')
104 assert not runner.api.links().create.called
106 # The test passes some fields in builder.resources
107 # For the remaining fields, the defaults will apply: {'cores': 1, 'ram': 1024, 'outdirSize': 1024, 'tmpdirSize': 1024}
108 @mock.patch('arvados.commands.keepdocker.list_images_in_arv')
109 def test_resource_requirements(self, list_images_in_arv):
110 runner = mock.MagicMock()
111 runner.project_uuid = "zzzzz-8i9sb-zzzzzzzzzzzzzzz"
112 runner.ignore_docker_for_reuse = False
113 runner.num_retries = 0
114 arvados_cwl.add_arv_hints()
116 list_images_in_arv.return_value = [["zzzzz-4zz18-zzzzzzzzzzzzzzz"]]
117 runner.api.collections().get().execute.return_vaulue = {"portable_data_hash": "99999999999999999999999999999993+99"}
119 document_loader, avsc_names, schema_metadata, metaschema_loader = cwltool.process.get_schema("v1.0")
126 "class": "ResourceRequirement",
131 "class": "http://arvados.org/cwl#RuntimeConstraints",
133 "outputDirType": "keep_output_dir"
135 "class": "http://arvados.org/cwl#APIRequirement",
138 "class": "http://arvados.org/cwl#ReuseRequirement",
143 make_fs_access=functools.partial(arvados_cwl.CollectionFsAccess,
144 collection_cache=arvados_cwl.CollectionCache(runner.api, None, 0))
145 arvtool = arvados_cwl.ArvadosCommandTool(runner, tool, work_api="jobs", avsc_names=avsc_names,
146 make_fs_access=make_fs_access, loader=Loader({}))
147 arvtool.formatgraph = None
148 for j in arvtool.job({}, mock.MagicMock(), basedir="", make_fs_access=make_fs_access):
149 j.run(enable_reuse=True)
150 runner.api.jobs().create.assert_called_with(
151 body=JsonDiffMatcher({
152 'owner_uuid': 'zzzzz-8i9sb-zzzzzzzzzzzzzzz',
153 'runtime_constraints': {},
154 'script_parameters': {
156 'task.env': {'HOME': '$(task.outdir)', 'TMPDIR': '$(task.tmpdir)'},
157 'task.keepTmpOutput': True,
161 'script_version': 'master',
162 'minimum_script_version': 'a3f2cb186e437bfce0031b024b2157b73ed2717d',
163 'repository': 'arvados',
164 'script': 'crunchrunner',
165 'runtime_constraints': {
166 'docker_image': 'arvados/jobs',
167 'min_cores_per_node': 3,
168 'min_ram_mb_per_node': 3512, # ramMin + keep_cache
169 'min_scratch_mb_per_node': 5024, # tmpdirSize + outdirSize
170 'keep_cache_mb_per_task': 512
173 find_or_create=False,
174 filters=[['repository', '=', 'arvados'],
175 ['script', '=', 'crunchrunner'],
176 ['script_version', 'in git', 'a3f2cb186e437bfce0031b024b2157b73ed2717d'],
177 ['docker_image_locator', 'in docker', 'arvados/jobs']])
179 @mock.patch("arvados.collection.CollectionReader")
180 def test_done(self, reader):
181 api = mock.MagicMock()
183 runner = mock.MagicMock()
185 runner.project_uuid = "zzzzz-8i9sb-zzzzzzzzzzzzzzz"
186 runner.num_retries = 0
187 runner.ignore_docker_for_reuse = False
189 reader().open.return_value = StringIO.StringIO(
190 """2016-11-02_23:12:18 c97qk-8i9sb-cryqw2blvzy4yaj 13358 0 stderr 2016/11/02 23:12:18 crunchrunner: $(task.tmpdir)=/tmp/crunch-job-task-work/compute3.1/tmpdir
191 2016-11-02_23:12:18 c97qk-8i9sb-cryqw2blvzy4yaj 13358 0 stderr 2016/11/02 23:12:18 crunchrunner: $(task.outdir)=/tmp/crunch-job-task-work/compute3.1/outdir
192 2016-11-02_23:12:18 c97qk-8i9sb-cryqw2blvzy4yaj 13358 0 stderr 2016/11/02 23:12:18 crunchrunner: $(task.keep)=/keep
194 api.collections().list().execute.side_effect = ({"items": []},
195 {"items": [{"manifest_text": "XYZ"}]},
197 {"items": [{"manifest_text": "ABC"}]})
199 arvjob = arvados_cwl.ArvadosJob(runner)
200 arvjob.name = "testjob"
201 arvjob.builder = mock.MagicMock()
202 arvjob.output_callback = mock.MagicMock()
203 arvjob.collect_outputs = mock.MagicMock()
204 arvjob.collect_outputs.return_value = {"out": "stuff"}
208 "output": "99999999999999999999999999999993+99",
209 "log": "99999999999999999999999999999994+99",
210 "uuid": "zzzzz-8i9sb-zzzzzzzzzzzzzzz"
213 api.collections().list.assert_has_calls([
215 # Output collection check
216 mock.call(filters=[['owner_uuid', '=', 'zzzzz-8i9sb-zzzzzzzzzzzzzzz'],
217 ['portable_data_hash', '=', '99999999999999999999999999999993+99'],
218 ['name', '=', 'Output 9999999 of testjob']]),
219 mock.call().execute(num_retries=0),
220 mock.call(limit=1, filters=[['portable_data_hash', '=', '99999999999999999999999999999993+99']],
221 select=['manifest_text']),
222 mock.call().execute(num_retries=0),
223 # Log collection's turn
224 mock.call(filters=[['owner_uuid', '=', 'zzzzz-8i9sb-zzzzzzzzzzzzzzz'],
225 ['portable_data_hash', '=', '99999999999999999999999999999994+99'],
226 ['name', '=', 'Log of zzzzz-8i9sb-zzzzzzzzzzzzzzz']]),
227 mock.call().execute(num_retries=0),
228 mock.call(limit=1, filters=[['portable_data_hash', '=', '99999999999999999999999999999994+99']],
229 select=['manifest_text']),
230 mock.call().execute(num_retries=0)])
232 api.collections().create.assert_has_calls([
233 mock.call(ensure_unique_name=True,
234 body={'portable_data_hash': '99999999999999999999999999999993+99',
235 'manifest_text': 'XYZ',
236 'owner_uuid': 'zzzzz-8i9sb-zzzzzzzzzzzzzzz',
237 'name': 'Output 9999999 of testjob'}),
238 mock.call().execute(num_retries=0),
239 mock.call(ensure_unique_name=True,
240 body={'portable_data_hash': '99999999999999999999999999999994+99',
241 'manifest_text': 'ABC',
242 'owner_uuid': 'zzzzz-8i9sb-zzzzzzzzzzzzzzz',
243 'name': 'Log of zzzzz-8i9sb-zzzzzzzzzzzzzzz'}),
244 mock.call().execute(num_retries=0),
247 arvjob.output_callback.assert_called_with({"out": "stuff"}, "success")
249 @mock.patch("arvados.collection.CollectionReader")
250 def test_done_use_existing_collection(self, reader):
251 api = mock.MagicMock()
253 runner = mock.MagicMock()
255 runner.project_uuid = "zzzzz-8i9sb-zzzzzzzzzzzzzzz"
256 runner.num_retries = 0
258 reader().open.return_value = StringIO.StringIO(
259 """2016-11-02_23:12:18 c97qk-8i9sb-cryqw2blvzy4yaj 13358 0 stderr 2016/11/02 23:12:18 crunchrunner: $(task.tmpdir)=/tmp/crunch-job-task-work/compute3.1/tmpdir
260 2016-11-02_23:12:18 c97qk-8i9sb-cryqw2blvzy4yaj 13358 0 stderr 2016/11/02 23:12:18 crunchrunner: $(task.outdir)=/tmp/crunch-job-task-work/compute3.1/outdir
261 2016-11-02_23:12:18 c97qk-8i9sb-cryqw2blvzy4yaj 13358 0 stderr 2016/11/02 23:12:18 crunchrunner: $(task.keep)=/keep
264 api.collections().list().execute.side_effect = (
265 {"items": [{"uuid": "zzzzz-4zz18-zzzzzzzzzzzzzz2"}]},
266 {"items": [{"uuid": "zzzzz-4zz18-zzzzzzzzzzzzzz2"}]},
269 arvjob = arvados_cwl.ArvadosJob(runner)
270 arvjob.name = "testjob"
271 arvjob.builder = mock.MagicMock()
272 arvjob.output_callback = mock.MagicMock()
273 arvjob.collect_outputs = mock.MagicMock()
274 arvjob.collect_outputs.return_value = {"out": "stuff"}
278 "output": "99999999999999999999999999999993+99",
279 "log": "99999999999999999999999999999994+99",
280 "uuid": "zzzzz-8i9sb-zzzzzzzzzzzzzzz"
283 api.collections().list.assert_has_calls([
286 mock.call(filters=[['owner_uuid', '=', 'zzzzz-8i9sb-zzzzzzzzzzzzzzz'],
287 ['portable_data_hash', '=', '99999999999999999999999999999993+99'],
288 ['name', '=', 'Output 9999999 of testjob']]),
289 mock.call().execute(num_retries=0),
291 mock.call(filters=[['owner_uuid', '=', 'zzzzz-8i9sb-zzzzzzzzzzzzzzz'],
292 ['portable_data_hash', '=', '99999999999999999999999999999994+99'],
293 ['name', '=', 'Log of zzzzz-8i9sb-zzzzzzzzzzzzzzz']]),
294 mock.call().execute(num_retries=0)
297 self.assertFalse(api.collections().create.called)
299 arvjob.output_callback.assert_called_with({"out": "stuff"}, "success")
302 class TestWorkflow(unittest.TestCase):
303 # The test passes no builder.resources
304 # Hence the default resources will apply: {'cores': 1, 'ram': 1024, 'outdirSize': 1024, 'tmpdirSize': 1024}
305 @mock.patch("arvados.collection.Collection")
306 @mock.patch('arvados.commands.keepdocker.list_images_in_arv')
307 def test_run(self, list_images_in_arv, mockcollection):
308 arvados_cwl.add_arv_hints()
310 api = mock.MagicMock()
311 api._rootDesc = get_rootDesc()
313 runner = arvados_cwl.ArvCwlRunner(api)
314 self.assertEqual(runner.work_api, 'jobs')
316 list_images_in_arv.return_value = [["zzzzz-4zz18-zzzzzzzzzzzzzzz"]]
317 runner.api.collections().get().execute.return_vaulue = {"portable_data_hash": "99999999999999999999999999999993+99"}
319 runner.project_uuid = "zzzzz-8i9sb-zzzzzzzzzzzzzzz"
320 runner.ignore_docker_for_reuse = False
321 runner.num_retries = 0
322 document_loader, avsc_names, schema_metadata, metaschema_loader = cwltool.process.get_schema("v1.0")
324 tool, metadata = document_loader.resolve_ref("tests/wf/scatter2.cwl")
325 metadata["cwlVersion"] = tool["cwlVersion"]
327 mockcollection().portable_data_hash.return_value = "99999999999999999999999999999999+118"
329 make_fs_access=functools.partial(arvados_cwl.CollectionFsAccess,
330 collection_cache=arvados_cwl.CollectionCache(runner.api, None, 0))
331 arvtool = arvados_cwl.ArvadosWorkflow(runner, tool, work_api="jobs", avsc_names=avsc_names,
332 basedir="", make_fs_access=make_fs_access, loader=document_loader,
333 makeTool=runner.arv_make_tool, metadata=metadata)
334 arvtool.formatgraph = None
335 it = arvtool.job({}, mock.MagicMock(), basedir="", make_fs_access=make_fs_access)
339 with open("tests/wf/scatter2_subwf.cwl") as f:
342 runner.api.jobs().create.assert_called_with(
343 body=JsonDiffMatcher({
344 'minimum_script_version': 'a3f2cb186e437bfce0031b024b2157b73ed2717d',
345 'repository': 'arvados',
346 'script_version': 'master',
347 'script': 'crunchrunner',
348 'script_parameters': {
349 'tasks': [{'task.env': {
350 'HOME': '$(task.outdir)',
351 'TMPDIR': '$(task.tmpdir)'},
353 'workflow.cwl': '$(task.keep)/99999999999999999999999999999999+118/workflow.cwl',
354 'cwl.input.yml': '$(task.keep)/99999999999999999999999999999999+118/cwl.input.yml'
356 'command': [u'cwltool', u'--no-container', u'--move-outputs', u'--preserve-entire-environment', u'workflow.cwl#main', u'cwl.input.yml'],
357 'task.stdout': 'cwl.output.json'}]},
358 'runtime_constraints': {
359 'min_scratch_mb_per_node': 2048,
360 'min_cores_per_node': 1,
361 'docker_image': 'arvados/jobs',
362 'min_ram_mb_per_node': 1024
364 'owner_uuid': 'zzzzz-8i9sb-zzzzzzzzzzzzzzz'}),
365 filters=[['repository', '=', 'arvados'],
366 ['script', '=', 'crunchrunner'],
367 ['script_version', 'in git', 'a3f2cb186e437bfce0031b024b2157b73ed2717d'],
368 ['docker_image_locator', 'in docker', 'arvados/jobs']],
371 mockcollection().open().__enter__().write.assert_has_calls([mock.call(subwf)])
372 mockcollection().open().__enter__().write.assert_has_calls([mock.call(
377 def test_default_work_api(self):
378 arvados_cwl.add_arv_hints()
380 api = mock.MagicMock()
381 api._rootDesc = copy.deepcopy(get_rootDesc())
382 del api._rootDesc.get('resources')['jobs']['methods']['create']
383 runner = arvados_cwl.ArvCwlRunner(api)
384 self.assertEqual(runner.work_api, 'containers')