1 # Copyright (C) The Arvados Authors. All rights reserved.
3 # SPDX-License-Identifier: AGPL-3.0
7 import crunchstat_summary.command
15 TESTS_DIR = os.path.dirname(os.path.abspath(__file__))
18 class ReportDiff(unittest.TestCase):
19 def diff_known_report(self, logfile, cmd):
20 expectfile = logfile+'.report'
21 expect = open(expectfile).readlines()
22 self.diff_report(cmd, expect, expectfile=expectfile)
24 def diff_report(self, cmd, expect, expectfile=None):
25 got = [x+"\n" for x in cmd.report().strip("\n").split("\n")]
26 self.assertEqual(got, expect, "\n"+"".join(difflib.context_diff(
27 expect, got, fromfile=expectfile, tofile="(generated)")))
30 class SummarizeFile(ReportDiff):
31 def test_example_files(self):
32 for fnm in glob.glob(os.path.join(TESTS_DIR, '*.txt.gz')):
33 logfile = os.path.join(TESTS_DIR, fnm)
34 args = crunchstat_summary.command.ArgumentParser().parse_args(
35 ['--log-file', logfile])
36 cmd = crunchstat_summary.command.Command(args)
38 self.diff_known_report(logfile, cmd)
41 class HTMLFromFile(ReportDiff):
42 def test_example_files(self):
43 # Note we don't test the output content at all yet; we're
44 # mainly just verifying the --format=html option isn't ignored
45 # and the HTML code path doesn't crash.
46 for fnm in glob.glob(os.path.join(TESTS_DIR, '*.txt.gz')):
47 logfile = os.path.join(TESTS_DIR, fnm)
48 args = crunchstat_summary.command.ArgumentParser().parse_args(
49 ['--format=html', '--log-file', logfile])
50 cmd = crunchstat_summary.command.Command(args)
52 self.assertRegexpMatches(cmd.report(), r'(?is)<html>.*</html>\s*$')
55 class SummarizeEdgeCases(unittest.TestCase):
56 def test_error_messages(self):
57 logfile = open(os.path.join(TESTS_DIR, 'crunchstat_error_messages.txt'))
58 s = crunchstat_summary.summarizer.Summarizer(logfile)
62 class SummarizeContainer(ReportDiff):
64 'uuid': '9tee4-dz642-mjfb0i5hzojp16a',
65 'created_at': '2017-08-18T14:27:25.371388141',
66 'log': '9tee4-4zz18-ihyzym9tcwjwg4r',
69 'uuid': '9tee4-xvhdp-uper95jktm10d3w',
71 'created_at': '2017-08-18T14:27:25.242339223Z',
72 'container_uuid': fake_container['uuid'],
74 logfile = os.path.join(
75 TESTS_DIR, 'container_9tee4-dz642-mjfb0i5hzojp16a-crunchstat.txt.gz')
77 @mock.patch('arvados.collection.CollectionReader')
78 @mock.patch('arvados.api')
79 def test_container(self, mock_api, mock_cr):
80 mock_api().container_requests().index().execute.return_value = {'items':[]}
81 mock_api().container_requests().get().execute.return_value = self.fake_request
82 mock_api().containers().get().execute.return_value = self.fake_container
83 mock_cr().__iter__.return_value = [
84 'crunch-run.txt', 'stderr.txt', 'node-info.txt',
85 'container.json', 'crunchstat.txt']
86 mock_cr().open.return_value = gzip.open(self.logfile)
87 args = crunchstat_summary.command.ArgumentParser().parse_args(
88 ['--job', self.fake_request['uuid']])
89 cmd = crunchstat_summary.command.Command(args)
91 self.diff_known_report(self.logfile, cmd)
94 class SummarizeJob(ReportDiff):
95 fake_job_uuid = '4xphq-8i9sb-jq0ekny1xou3zoh'
96 fake_log_id = 'fake-log-collection-id'
98 'uuid': fake_job_uuid,
101 logfile = os.path.join(TESTS_DIR, 'logfile_20151204190335.txt.gz')
103 @mock.patch('arvados.collection.CollectionReader')
104 @mock.patch('arvados.api')
105 def test_job_report(self, mock_api, mock_cr):
106 mock_api().jobs().get().execute.return_value = self.fake_job
107 mock_cr().__iter__.return_value = ['fake-logfile.txt']
108 mock_cr().open.return_value = gzip.open(self.logfile)
109 args = crunchstat_summary.command.ArgumentParser().parse_args(
110 ['--job', self.fake_job_uuid])
111 cmd = crunchstat_summary.command.Command(args)
113 self.diff_known_report(self.logfile, cmd)
114 mock_api().jobs().get.assert_called_with(uuid=self.fake_job_uuid)
115 mock_cr.assert_called_with(self.fake_log_id)
116 mock_cr().open.assert_called_with('fake-logfile.txt')
119 class SummarizePipeline(ReportDiff):
121 'uuid': 'zzzzz-d1hrv-i3e77t9z5y8j9cc',
122 'owner_uuid': 'zzzzz-tpzed-xurymjxw79nv3jz',
123 'components': collections.OrderedDict([
126 'uuid': 'zzzzz-8i9sb-000000000000000',
127 'log': 'fake-log-pdh-0',
128 'runtime_constraints': {
129 'min_ram_mb_per_node': 900,
130 'min_cores_per_node': 1,
136 'uuid': 'zzzzz-8i9sb-000000000000001',
137 'log': 'fake-log-pdh-1',
138 'runtime_constraints': {
139 'min_ram_mb_per_node': 900,
140 'min_cores_per_node': 1,
144 ['no-job-assigned', {}],
147 'uuid': 'zzzzz-8i9sb-xxxxxxxxxxxxxxx',
152 'uuid': 'zzzzz-8i9sb-000000000000002',
153 'log': 'fake-log-pdh-2',
154 'runtime_constraints': {
155 'min_ram_mb_per_node': 900,
156 'min_cores_per_node': 1,
162 @mock.patch('arvados.collection.CollectionReader')
163 @mock.patch('arvados.api')
164 def test_pipeline(self, mock_api, mock_cr):
165 logfile = os.path.join(TESTS_DIR, 'logfile_20151204190335.txt.gz')
166 mock_api().pipeline_instances().get().execute. \
167 return_value = self.fake_instance
168 mock_cr().__iter__.return_value = ['fake-logfile.txt']
169 mock_cr().open.side_effect = [gzip.open(logfile) for _ in range(3)]
170 args = crunchstat_summary.command.ArgumentParser().parse_args(
171 ['--pipeline-instance', self.fake_instance['uuid']])
172 cmd = crunchstat_summary.command.Command(args)
176 line for line in open(logfile+'.report').readlines()
177 if not line.startswith('#!! ')]
179 ['### Summary for foo (zzzzz-8i9sb-000000000000000)\n'] +
180 job_report + ['\n'] +
181 ['### Summary for bar (zzzzz-8i9sb-000000000000001)\n'] +
182 job_report + ['\n'] +
183 ['### Summary for unfinished-job (zzzzz-8i9sb-xxxxxxxxxxxxxxx)\n',
184 '(no report generated)\n',
186 ['### Summary for baz (zzzzz-8i9sb-000000000000002)\n'] +
188 self.diff_report(cmd, expect)
189 mock_cr.assert_has_calls(
191 mock.call('fake-log-pdh-0'),
192 mock.call('fake-log-pdh-1'),
193 mock.call('fake-log-pdh-2'),
195 mock_cr().open.assert_called_with('fake-logfile.txt')