7254: Test that replication arg is passed through to KeepClient.put()
[arvados.git] / sdk / python / tests / test_arv_put.py
1 #!/usr/bin/env python
2 # -*- coding: utf-8 -*-
3
4 import apiclient
5 import mock
6 import os
7 import pwd
8 import re
9 import shutil
10 import subprocess
11 import sys
12 import tempfile
13 import time
14 import unittest
15 import yaml
16
17 from cStringIO import StringIO
18
19 import arvados
20 import arvados.commands.put as arv_put
21
22 from arvados_testutil import ArvadosBaseTestCase
23 import run_test_server
24
25 class ArvadosPutResumeCacheTest(ArvadosBaseTestCase):
26     CACHE_ARGSET = [
27         [],
28         ['/dev/null'],
29         ['/dev/null', '--filename', 'empty'],
30         ['/tmp'],
31         ['/tmp', '--max-manifest-depth', '0'],
32         ['/tmp', '--max-manifest-depth', '1']
33         ]
34
35     def tearDown(self):
36         super(ArvadosPutResumeCacheTest, self).tearDown()
37         try:
38             self.last_cache.destroy()
39         except AttributeError:
40             pass
41
42     def cache_path_from_arglist(self, arglist):
43         return arv_put.ResumeCache.make_path(arv_put.parse_arguments(arglist))
44
45     def test_cache_names_stable(self):
46         for argset in self.CACHE_ARGSET:
47             self.assertEqual(self.cache_path_from_arglist(argset),
48                               self.cache_path_from_arglist(argset),
49                               "cache name changed for {}".format(argset))
50
51     def test_cache_names_unique(self):
52         results = []
53         for argset in self.CACHE_ARGSET:
54             path = self.cache_path_from_arglist(argset)
55             self.assertNotIn(path, results)
56             results.append(path)
57
58     def test_cache_names_simple(self):
59         # The goal here is to make sure the filename doesn't use characters
60         # reserved by the filesystem.  Feel free to adjust this regexp as
61         # long as it still does that.
62         bad_chars = re.compile(r'[^-\.\w]')
63         for argset in self.CACHE_ARGSET:
64             path = self.cache_path_from_arglist(argset)
65             self.assertFalse(bad_chars.search(os.path.basename(path)),
66                              "path too exotic: {}".format(path))
67
68     def test_cache_names_ignore_argument_order(self):
69         self.assertEqual(
70             self.cache_path_from_arglist(['a', 'b', 'c']),
71             self.cache_path_from_arglist(['c', 'a', 'b']))
72         self.assertEqual(
73             self.cache_path_from_arglist(['-', '--filename', 'stdin']),
74             self.cache_path_from_arglist(['--filename', 'stdin', '-']))
75
76     def test_cache_names_differ_for_similar_paths(self):
77         # This test needs names at / that don't exist on the real filesystem.
78         self.assertNotEqual(
79             self.cache_path_from_arglist(['/_arvputtest1', '/_arvputtest2']),
80             self.cache_path_from_arglist(['/_arvputtest1/_arvputtest2']))
81
82     def test_cache_names_ignore_irrelevant_arguments(self):
83         # Workaround: parse_arguments bails on --filename with a directory.
84         path1 = self.cache_path_from_arglist(['/tmp'])
85         args = arv_put.parse_arguments(['/tmp'])
86         args.filename = 'tmp'
87         path2 = arv_put.ResumeCache.make_path(args)
88         self.assertEqual(path1, path2,
89                          "cache path considered --filename for directory")
90         self.assertEqual(
91             self.cache_path_from_arglist(['-']),
92             self.cache_path_from_arglist(['-', '--max-manifest-depth', '1']),
93             "cache path considered --max-manifest-depth for file")
94
95     def test_cache_names_treat_negative_manifest_depths_identically(self):
96         base_args = ['/tmp', '--max-manifest-depth']
97         self.assertEqual(
98             self.cache_path_from_arglist(base_args + ['-1']),
99             self.cache_path_from_arglist(base_args + ['-2']))
100
101     def test_cache_names_treat_stdin_consistently(self):
102         self.assertEqual(
103             self.cache_path_from_arglist(['-', '--filename', 'test']),
104             self.cache_path_from_arglist(['/dev/stdin', '--filename', 'test']))
105
106     def test_cache_names_identical_for_synonymous_names(self):
107         self.assertEqual(
108             self.cache_path_from_arglist(['.']),
109             self.cache_path_from_arglist([os.path.realpath('.')]))
110         testdir = self.make_tmpdir()
111         looplink = os.path.join(testdir, 'loop')
112         os.symlink(testdir, looplink)
113         self.assertEqual(
114             self.cache_path_from_arglist([testdir]),
115             self.cache_path_from_arglist([looplink]))
116
117     def test_cache_names_different_by_api_host(self):
118         config = arvados.config.settings()
119         orig_host = config.get('ARVADOS_API_HOST')
120         try:
121             name1 = self.cache_path_from_arglist(['.'])
122             config['ARVADOS_API_HOST'] = 'x' + (orig_host or 'localhost')
123             self.assertNotEqual(name1, self.cache_path_from_arglist(['.']))
124         finally:
125             if orig_host is None:
126                 del config['ARVADOS_API_HOST']
127             else:
128                 config['ARVADOS_API_HOST'] = orig_host
129
130     def test_basic_cache_storage(self):
131         thing = ['test', 'list']
132         with tempfile.NamedTemporaryFile() as cachefile:
133             self.last_cache = arv_put.ResumeCache(cachefile.name)
134         self.last_cache.save(thing)
135         self.assertEqual(thing, self.last_cache.load())
136
137     def test_empty_cache(self):
138         with tempfile.NamedTemporaryFile() as cachefile:
139             cache = arv_put.ResumeCache(cachefile.name)
140         self.assertRaises(ValueError, cache.load)
141
142     def test_cache_persistent(self):
143         thing = ['test', 'list']
144         path = os.path.join(self.make_tmpdir(), 'cache')
145         cache = arv_put.ResumeCache(path)
146         cache.save(thing)
147         cache.close()
148         self.last_cache = arv_put.ResumeCache(path)
149         self.assertEqual(thing, self.last_cache.load())
150
151     def test_multiple_cache_writes(self):
152         thing = ['short', 'list']
153         with tempfile.NamedTemporaryFile() as cachefile:
154             self.last_cache = arv_put.ResumeCache(cachefile.name)
155         # Start writing an object longer than the one we test, to make
156         # sure the cache file gets truncated.
157         self.last_cache.save(['long', 'long', 'list'])
158         self.last_cache.save(thing)
159         self.assertEqual(thing, self.last_cache.load())
160
161     def test_cache_is_locked(self):
162         with tempfile.NamedTemporaryFile() as cachefile:
163             cache = arv_put.ResumeCache(cachefile.name)
164             self.assertRaises(arv_put.ResumeCacheConflict,
165                               arv_put.ResumeCache, cachefile.name)
166
167     def test_cache_stays_locked(self):
168         with tempfile.NamedTemporaryFile() as cachefile:
169             self.last_cache = arv_put.ResumeCache(cachefile.name)
170             path = cachefile.name
171         self.last_cache.save('test')
172         self.assertRaises(arv_put.ResumeCacheConflict,
173                           arv_put.ResumeCache, path)
174
175     def test_destroy_cache(self):
176         cachefile = tempfile.NamedTemporaryFile(delete=False)
177         try:
178             cache = arv_put.ResumeCache(cachefile.name)
179             cache.save('test')
180             cache.destroy()
181             try:
182                 arv_put.ResumeCache(cachefile.name)
183             except arv_put.ResumeCacheConflict:
184                 self.fail("could not load cache after destroying it")
185             self.assertRaises(ValueError, cache.load)
186         finally:
187             if os.path.exists(cachefile.name):
188                 os.unlink(cachefile.name)
189
190     def test_restart_cache(self):
191         path = os.path.join(self.make_tmpdir(), 'cache')
192         cache = arv_put.ResumeCache(path)
193         cache.save('test')
194         cache.restart()
195         self.assertRaises(ValueError, cache.load)
196         self.assertRaises(arv_put.ResumeCacheConflict,
197                           arv_put.ResumeCache, path)
198
199
200 class ArvadosPutCollectionWriterTest(run_test_server.TestCaseWithServers,
201                                      ArvadosBaseTestCase):
202     def setUp(self):
203         super(ArvadosPutCollectionWriterTest, self).setUp()
204         run_test_server.authorize_with('active')
205         with tempfile.NamedTemporaryFile(delete=False) as cachefile:
206             self.cache = arv_put.ResumeCache(cachefile.name)
207             self.cache_filename = cachefile.name
208
209     def tearDown(self):
210         super(ArvadosPutCollectionWriterTest, self).tearDown()
211         if os.path.exists(self.cache_filename):
212             self.cache.destroy()
213         self.cache.close()
214
215     def test_writer_caches(self):
216         cwriter = arv_put.ArvPutCollectionWriter(self.cache)
217         cwriter.write_file('/dev/null')
218         cwriter.cache_state()
219         self.assertTrue(self.cache.load())
220         self.assertEqual(". d41d8cd98f00b204e9800998ecf8427e+0 0:0:null\n", cwriter.manifest_text())
221
222     def test_writer_works_without_cache(self):
223         cwriter = arv_put.ArvPutCollectionWriter()
224         cwriter.write_file('/dev/null')
225         self.assertEqual(". d41d8cd98f00b204e9800998ecf8427e+0 0:0:null\n", cwriter.manifest_text())
226
227     def test_writer_resumes_from_cache(self):
228         cwriter = arv_put.ArvPutCollectionWriter(self.cache)
229         with self.make_test_file() as testfile:
230             cwriter.write_file(testfile.name, 'test')
231             cwriter.cache_state()
232             new_writer = arv_put.ArvPutCollectionWriter.from_cache(
233                 self.cache)
234             self.assertEqual(
235                 ". 098f6bcd4621d373cade4e832627b4f6+4 0:4:test\n",
236                 new_writer.manifest_text())
237
238     def test_new_writer_from_stale_cache(self):
239         cwriter = arv_put.ArvPutCollectionWriter(self.cache)
240         with self.make_test_file() as testfile:
241             cwriter.write_file(testfile.name, 'test')
242         new_writer = arv_put.ArvPutCollectionWriter.from_cache(self.cache)
243         new_writer.write_file('/dev/null')
244         self.assertEqual(". d41d8cd98f00b204e9800998ecf8427e+0 0:0:null\n", new_writer.manifest_text())
245
246     def test_new_writer_from_empty_cache(self):
247         cwriter = arv_put.ArvPutCollectionWriter.from_cache(self.cache)
248         cwriter.write_file('/dev/null')
249         self.assertEqual(". d41d8cd98f00b204e9800998ecf8427e+0 0:0:null\n", cwriter.manifest_text())
250
251     def test_writer_resumable_after_arbitrary_bytes(self):
252         cwriter = arv_put.ArvPutCollectionWriter(self.cache)
253         # These bytes are intentionally not valid UTF-8.
254         with self.make_test_file('\x00\x07\xe2') as testfile:
255             cwriter.write_file(testfile.name, 'test')
256             cwriter.cache_state()
257             new_writer = arv_put.ArvPutCollectionWriter.from_cache(
258                 self.cache)
259         self.assertEqual(cwriter.manifest_text(), new_writer.manifest_text())
260
261     def make_progress_tester(self):
262         progression = []
263         def record_func(written, expected):
264             progression.append((written, expected))
265         return progression, record_func
266
267     def test_progress_reporting(self):
268         for expect_count in (None, 8):
269             progression, reporter = self.make_progress_tester()
270             cwriter = arv_put.ArvPutCollectionWriter(
271                 reporter=reporter, bytes_expected=expect_count)
272             with self.make_test_file() as testfile:
273                 cwriter.write_file(testfile.name, 'test')
274             cwriter.finish_current_stream()
275             self.assertIn((4, expect_count), progression)
276
277     def test_resume_progress(self):
278         cwriter = arv_put.ArvPutCollectionWriter(self.cache, bytes_expected=4)
279         with self.make_test_file() as testfile:
280             # Set up a writer with some flushed bytes.
281             cwriter.write_file(testfile.name, 'test')
282             cwriter.finish_current_stream()
283             cwriter.cache_state()
284             new_writer = arv_put.ArvPutCollectionWriter.from_cache(self.cache)
285             self.assertEqual(new_writer.bytes_written, 4)
286
287
288 class ArvadosExpectedBytesTest(ArvadosBaseTestCase):
289     TEST_SIZE = os.path.getsize(__file__)
290
291     def test_expected_bytes_for_file(self):
292         self.assertEqual(self.TEST_SIZE,
293                           arv_put.expected_bytes_for([__file__]))
294
295     def test_expected_bytes_for_tree(self):
296         tree = self.make_tmpdir()
297         shutil.copyfile(__file__, os.path.join(tree, 'one'))
298         shutil.copyfile(__file__, os.path.join(tree, 'two'))
299         self.assertEqual(self.TEST_SIZE * 2,
300                           arv_put.expected_bytes_for([tree]))
301         self.assertEqual(self.TEST_SIZE * 3,
302                           arv_put.expected_bytes_for([tree, __file__]))
303
304     def test_expected_bytes_for_device(self):
305         self.assertIsNone(arv_put.expected_bytes_for(['/dev/null']))
306         self.assertIsNone(arv_put.expected_bytes_for([__file__, '/dev/null']))
307
308
309 class ArvadosPutReportTest(ArvadosBaseTestCase):
310     def test_machine_progress(self):
311         for count, total in [(0, 1), (0, None), (1, None), (235, 9283)]:
312             expect = ": {} written {} total\n".format(
313                 count, -1 if (total is None) else total)
314             self.assertTrue(
315                 arv_put.machine_progress(count, total).endswith(expect))
316
317     def test_known_human_progress(self):
318         for count, total in [(0, 1), (2, 4), (45, 60)]:
319             expect = '{:.1%}'.format(float(count) / total)
320             actual = arv_put.human_progress(count, total)
321             self.assertTrue(actual.startswith('\r'))
322             self.assertIn(expect, actual)
323
324     def test_unknown_human_progress(self):
325         for count in [1, 20, 300, 4000, 50000]:
326             self.assertTrue(re.search(r'\b{}\b'.format(count),
327                                       arv_put.human_progress(count, None)))
328
329
330 class ArvadosPutTest(run_test_server.TestCaseWithServers, ArvadosBaseTestCase):
331     MAIN_SERVER = {}
332     Z_UUID = 'zzzzz-zzzzz-zzzzzzzzzzzzzzz'
333
334     def call_main_with_args(self, args):
335         self.main_stdout = StringIO()
336         self.main_stderr = StringIO()
337         return arv_put.main(args, self.main_stdout, self.main_stderr)
338
339     def call_main_on_test_file(self, args=[]):
340         with self.make_test_file() as testfile:
341             path = testfile.name
342             self.call_main_with_args(['--stream', '--no-progress'] + args + [path])
343         self.assertTrue(
344             os.path.exists(os.path.join(os.environ['KEEP_LOCAL_STORE'],
345                                         '098f6bcd4621d373cade4e832627b4f6')),
346             "did not find file stream in Keep store")
347
348     def setUp(self):
349         super(ArvadosPutTest, self).setUp()
350         run_test_server.authorize_with('active')
351         arv_put.api_client = None
352
353     def tearDown(self):
354         for outbuf in ['main_stdout', 'main_stderr']:
355             if hasattr(self, outbuf):
356                 getattr(self, outbuf).close()
357                 delattr(self, outbuf)
358         super(ArvadosPutTest, self).tearDown()
359
360     def test_simple_file_put(self):
361         self.call_main_on_test_file()
362
363     def test_put_with_unwriteable_cache_dir(self):
364         orig_cachedir = arv_put.ResumeCache.CACHE_DIR
365         cachedir = self.make_tmpdir()
366         os.chmod(cachedir, 0o0)
367         arv_put.ResumeCache.CACHE_DIR = cachedir
368         try:
369             self.call_main_on_test_file()
370         finally:
371             arv_put.ResumeCache.CACHE_DIR = orig_cachedir
372             os.chmod(cachedir, 0o700)
373
374     def test_put_with_unwritable_cache_subdir(self):
375         orig_cachedir = arv_put.ResumeCache.CACHE_DIR
376         cachedir = self.make_tmpdir()
377         os.chmod(cachedir, 0o0)
378         arv_put.ResumeCache.CACHE_DIR = os.path.join(cachedir, 'cachedir')
379         try:
380             self.call_main_on_test_file()
381         finally:
382             arv_put.ResumeCache.CACHE_DIR = orig_cachedir
383             os.chmod(cachedir, 0o700)
384
385     def test_put_block_replication(self):
386         with mock.patch('arvados.collection.KeepClient.local_store_put') as put_mock:
387             put_mock.return_value = 'acbd18db4cc2f85cedef654fccc4a4d8+3'
388             orig_cachedir = arv_put.ResumeCache.CACHE_DIR
389             cachedir = self.make_tmpdir()
390             arv_put.ResumeCache.CACHE_DIR = os.path.join(cachedir, 'cachedir')
391             try:
392                 self.call_main_on_test_file(['--replication', '1'])
393                 self.call_main_on_test_file(['--replication', '4'])
394                 self.call_main_on_test_file(['--replication', '5'])
395             finally:
396                 arv_put.ResumeCache.CACHE_DIR = orig_cachedir
397             self.assertEqual(
398                 [x[len(x)-1].get('copies') for x in put_mock.call_args_list],
399                 [1, 4, 5])
400
401     def test_normalize(self):
402         testfile1 = self.make_test_file()
403         testfile2 = self.make_test_file()
404         test_paths = [testfile1.name, testfile2.name]
405         # Reverse-sort the paths, so normalization must change their order.
406         test_paths.sort(reverse=True)
407         self.call_main_with_args(['--stream', '--no-progress', '--normalize'] +
408                                  test_paths)
409         manifest = self.main_stdout.getvalue()
410         # Assert the second file we specified appears first in the manifest.
411         file_indices = [manifest.find(':' + os.path.basename(path))
412                         for path in test_paths]
413         self.assertGreater(*file_indices)
414
415     def test_error_name_without_collection(self):
416         self.assertRaises(SystemExit, self.call_main_with_args,
417                           ['--name', 'test without Collection',
418                            '--stream', '/dev/null'])
419
420     def test_error_when_project_not_found(self):
421         self.assertRaises(SystemExit,
422                           self.call_main_with_args,
423                           ['--project-uuid', self.Z_UUID])
424
425     def test_error_bad_project_uuid(self):
426         self.assertRaises(SystemExit,
427                           self.call_main_with_args,
428                           ['--project-uuid', self.Z_UUID, '--stream'])
429
430 class ArvPutIntegrationTest(run_test_server.TestCaseWithServers,
431                             ArvadosBaseTestCase):
432     def _getKeepServerConfig():
433         for config_file, mandatory in [
434                 ['application.yml', False], ['application.default.yml', True]]:
435             path = os.path.join(run_test_server.SERVICES_SRC_DIR,
436                                 "api", "config", config_file)
437             if not mandatory and not os.path.exists(path):
438                 continue
439             with open(path) as f:
440                 rails_config = yaml.load(f.read())
441                 for config_section in ['test', 'common']:
442                     try:
443                         key = rails_config[config_section]["blob_signing_key"]
444                     except (KeyError, TypeError):
445                         pass
446                     else:
447                         return {'blob_signing_key': key,
448                                 'enforce_permissions': True}
449         return {'blog_signing_key': None, 'enforce_permissions': False}
450
451     MAIN_SERVER = {}
452     KEEP_SERVER = _getKeepServerConfig()
453     PROJECT_UUID = run_test_server.fixture('groups')['aproject']['uuid']
454
455     @classmethod
456     def setUpClass(cls):
457         super(ArvPutIntegrationTest, cls).setUpClass()
458         cls.ENVIRON = os.environ.copy()
459         cls.ENVIRON['PYTHONPATH'] = ':'.join(sys.path)
460
461     def setUp(self):
462         super(ArvPutIntegrationTest, self).setUp()
463         arv_put.api_client = None
464
465     def authorize_with(self, token_name):
466         run_test_server.authorize_with(token_name)
467         for v in ["ARVADOS_API_HOST",
468                   "ARVADOS_API_HOST_INSECURE",
469                   "ARVADOS_API_TOKEN"]:
470             self.ENVIRON[v] = arvados.config.settings()[v]
471         arv_put.api_client = arvados.api('v1')
472
473     def current_user(self):
474         return arv_put.api_client.users().current().execute()
475
476     def test_check_real_project_found(self):
477         self.authorize_with('active')
478         self.assertTrue(arv_put.desired_project_uuid(arv_put.api_client, self.PROJECT_UUID, 0),
479                         "did not correctly find test fixture project")
480
481     def test_check_error_finding_nonexistent_uuid(self):
482         BAD_UUID = 'zzzzz-zzzzz-zzzzzzzzzzzzzzz'
483         self.authorize_with('active')
484         try:
485             result = arv_put.desired_project_uuid(arv_put.api_client, BAD_UUID,
486                                                   0)
487         except ValueError as error:
488             self.assertIn(BAD_UUID, error.message)
489         else:
490             self.assertFalse(result, "incorrectly found nonexistent project")
491
492     def test_check_error_finding_nonexistent_project(self):
493         BAD_UUID = 'zzzzz-tpzed-zzzzzzzzzzzzzzz'
494         self.authorize_with('active')
495         with self.assertRaises(apiclient.errors.HttpError):
496             result = arv_put.desired_project_uuid(arv_put.api_client, BAD_UUID,
497                                                   0)
498
499     def test_short_put_from_stdin(self):
500         # Have to run this as an integration test since arv-put can't
501         # read from the tests' stdin.
502         # arv-put usually can't stat(os.path.realpath('/dev/stdin')) in this
503         # case, because the /proc entry is already gone by the time it tries.
504         pipe = subprocess.Popen(
505             [sys.executable, arv_put.__file__, '--stream'],
506             stdin=subprocess.PIPE, stdout=subprocess.PIPE,
507             stderr=subprocess.STDOUT, env=self.ENVIRON)
508         pipe.stdin.write('stdin test\n')
509         pipe.stdin.close()
510         deadline = time.time() + 5
511         while (pipe.poll() is None) and (time.time() < deadline):
512             time.sleep(.1)
513         returncode = pipe.poll()
514         if returncode is None:
515             pipe.terminate()
516             self.fail("arv-put did not PUT from stdin within 5 seconds")
517         elif returncode != 0:
518             sys.stdout.write(pipe.stdout.read())
519             self.fail("arv-put returned exit code {}".format(returncode))
520         self.assertIn('4a9c8b735dce4b5fa3acf221a0b13628+11', pipe.stdout.read())
521
522     def test_ArvPutSignedManifest(self):
523         # ArvPutSignedManifest runs "arv-put foo" and then attempts to get
524         # the newly created manifest from the API server, testing to confirm
525         # that the block locators in the returned manifest are signed.
526         self.authorize_with('active')
527
528         # Before doing anything, demonstrate that the collection
529         # we're about to create is not present in our test fixture.
530         manifest_uuid = "00b4e9f40ac4dd432ef89749f1c01e74+47"
531         with self.assertRaises(apiclient.errors.HttpError):
532             notfound = arv_put.api_client.collections().get(
533                 uuid=manifest_uuid).execute()
534
535         datadir = self.make_tmpdir()
536         with open(os.path.join(datadir, "foo"), "w") as f:
537             f.write("The quick brown fox jumped over the lazy dog")
538         p = subprocess.Popen([sys.executable, arv_put.__file__, datadir],
539                              stdout=subprocess.PIPE, env=self.ENVIRON)
540         (arvout, arverr) = p.communicate()
541         self.assertEqual(arverr, None)
542         self.assertEqual(p.returncode, 0)
543
544         # The manifest text stored in the API server under the same
545         # manifest UUID must use signed locators.
546         c = arv_put.api_client.collections().get(uuid=manifest_uuid).execute()
547         self.assertRegexpMatches(
548             c['manifest_text'],
549             r'^\. 08a008a01d498c404b0c30852b39d3b8\+44\+A[0-9a-f]+@[0-9a-f]+ 0:44:foo\n')
550
551         os.remove(os.path.join(datadir, "foo"))
552         os.rmdir(datadir)
553
554     def run_and_find_collection(self, text, extra_args=[]):
555         self.authorize_with('active')
556         pipe = subprocess.Popen(
557             [sys.executable, arv_put.__file__] + extra_args,
558             stdin=subprocess.PIPE, stdout=subprocess.PIPE,
559             stderr=subprocess.PIPE, env=self.ENVIRON)
560         stdout, stderr = pipe.communicate(text)
561         search_key = ('portable_data_hash'
562                       if '--portable-data-hash' in extra_args else 'uuid')
563         collection_list = arvados.api('v1').collections().list(
564             filters=[[search_key, '=', stdout.strip()]]).execute().get('items', [])
565         self.assertEqual(1, len(collection_list))
566         return collection_list[0]
567
568     def test_put_collection_with_high_redundancy(self):
569         # Write empty data: we're not testing CollectionWriter, just
570         # making sure collections.create tells the API server what our
571         # desired replication level is.
572         collection = self.run_and_find_collection("", ['--replication', '4'])
573         self.assertEqual(4, collection['replication_desired'])
574
575     def test_put_collection_with_default_redundancy(self):
576         collection = self.run_and_find_collection("")
577         self.assertEqual(None, collection['replication_desired'])
578
579     def test_put_collection_with_unnamed_project_link(self):
580         link = self.run_and_find_collection(
581             "Test unnamed collection",
582             ['--portable-data-hash', '--project-uuid', self.PROJECT_UUID])
583         username = pwd.getpwuid(os.getuid()).pw_name
584         self.assertRegexpMatches(
585             link['name'],
586             r'^Saved at .* by {}@'.format(re.escape(username)))
587
588     def test_put_collection_with_name_and_no_project(self):
589         link_name = 'Test Collection Link in home project'
590         collection = self.run_and_find_collection(
591             "Test named collection in home project",
592             ['--portable-data-hash', '--name', link_name])
593         self.assertEqual(link_name, collection['name'])
594         my_user_uuid = self.current_user()['uuid']
595         self.assertEqual(my_user_uuid, collection['owner_uuid'])
596
597     def test_put_collection_with_named_project_link(self):
598         link_name = 'Test auto Collection Link'
599         collection = self.run_and_find_collection("Test named collection",
600                                       ['--portable-data-hash',
601                                        '--name', link_name,
602                                        '--project-uuid', self.PROJECT_UUID])
603         self.assertEqual(link_name, collection['name'])
604
605
606 if __name__ == '__main__':
607     unittest.main()