Merge branch '6142-cancel-slurm' closes #6142
[arvados.git] / sdk / python / tests / test_arv_put.py
1 #!/usr/bin/env python
2 # -*- coding: utf-8 -*-
3
4 import apiclient
5 import mock
6 import os
7 import pwd
8 import re
9 import shutil
10 import subprocess
11 import sys
12 import tempfile
13 import time
14 import unittest
15 import yaml
16
17 from cStringIO import StringIO
18
19 import arvados
20 import arvados.commands.put as arv_put
21
22 from arvados_testutil import ArvadosBaseTestCase
23 import run_test_server
24
25 class ArvadosPutResumeCacheTest(ArvadosBaseTestCase):
26     CACHE_ARGSET = [
27         [],
28         ['/dev/null'],
29         ['/dev/null', '--filename', 'empty'],
30         ['/tmp'],
31         ['/tmp', '--max-manifest-depth', '0'],
32         ['/tmp', '--max-manifest-depth', '1']
33         ]
34
35     def tearDown(self):
36         super(ArvadosPutResumeCacheTest, self).tearDown()
37         try:
38             self.last_cache.destroy()
39         except AttributeError:
40             pass
41
42     def cache_path_from_arglist(self, arglist):
43         return arv_put.ResumeCache.make_path(arv_put.parse_arguments(arglist))
44
45     def test_cache_names_stable(self):
46         for argset in self.CACHE_ARGSET:
47             self.assertEqual(self.cache_path_from_arglist(argset),
48                               self.cache_path_from_arglist(argset),
49                               "cache name changed for {}".format(argset))
50
51     def test_cache_names_unique(self):
52         results = []
53         for argset in self.CACHE_ARGSET:
54             path = self.cache_path_from_arglist(argset)
55             self.assertNotIn(path, results)
56             results.append(path)
57
58     def test_cache_names_simple(self):
59         # The goal here is to make sure the filename doesn't use characters
60         # reserved by the filesystem.  Feel free to adjust this regexp as
61         # long as it still does that.
62         bad_chars = re.compile(r'[^-\.\w]')
63         for argset in self.CACHE_ARGSET:
64             path = self.cache_path_from_arglist(argset)
65             self.assertFalse(bad_chars.search(os.path.basename(path)),
66                              "path too exotic: {}".format(path))
67
68     def test_cache_names_ignore_argument_order(self):
69         self.assertEqual(
70             self.cache_path_from_arglist(['a', 'b', 'c']),
71             self.cache_path_from_arglist(['c', 'a', 'b']))
72         self.assertEqual(
73             self.cache_path_from_arglist(['-', '--filename', 'stdin']),
74             self.cache_path_from_arglist(['--filename', 'stdin', '-']))
75
76     def test_cache_names_differ_for_similar_paths(self):
77         # This test needs names at / that don't exist on the real filesystem.
78         self.assertNotEqual(
79             self.cache_path_from_arglist(['/_arvputtest1', '/_arvputtest2']),
80             self.cache_path_from_arglist(['/_arvputtest1/_arvputtest2']))
81
82     def test_cache_names_ignore_irrelevant_arguments(self):
83         # Workaround: parse_arguments bails on --filename with a directory.
84         path1 = self.cache_path_from_arglist(['/tmp'])
85         args = arv_put.parse_arguments(['/tmp'])
86         args.filename = 'tmp'
87         path2 = arv_put.ResumeCache.make_path(args)
88         self.assertEqual(path1, path2,
89                          "cache path considered --filename for directory")
90         self.assertEqual(
91             self.cache_path_from_arglist(['-']),
92             self.cache_path_from_arglist(['-', '--max-manifest-depth', '1']),
93             "cache path considered --max-manifest-depth for file")
94
95     def test_cache_names_treat_negative_manifest_depths_identically(self):
96         base_args = ['/tmp', '--max-manifest-depth']
97         self.assertEqual(
98             self.cache_path_from_arglist(base_args + ['-1']),
99             self.cache_path_from_arglist(base_args + ['-2']))
100
101     def test_cache_names_treat_stdin_consistently(self):
102         self.assertEqual(
103             self.cache_path_from_arglist(['-', '--filename', 'test']),
104             self.cache_path_from_arglist(['/dev/stdin', '--filename', 'test']))
105
106     def test_cache_names_identical_for_synonymous_names(self):
107         self.assertEqual(
108             self.cache_path_from_arglist(['.']),
109             self.cache_path_from_arglist([os.path.realpath('.')]))
110         testdir = self.make_tmpdir()
111         looplink = os.path.join(testdir, 'loop')
112         os.symlink(testdir, looplink)
113         self.assertEqual(
114             self.cache_path_from_arglist([testdir]),
115             self.cache_path_from_arglist([looplink]))
116
117     def test_cache_names_different_by_api_host(self):
118         config = arvados.config.settings()
119         orig_host = config.get('ARVADOS_API_HOST')
120         try:
121             name1 = self.cache_path_from_arglist(['.'])
122             config['ARVADOS_API_HOST'] = 'x' + (orig_host or 'localhost')
123             self.assertNotEqual(name1, self.cache_path_from_arglist(['.']))
124         finally:
125             if orig_host is None:
126                 del config['ARVADOS_API_HOST']
127             else:
128                 config['ARVADOS_API_HOST'] = orig_host
129
130     def test_basic_cache_storage(self):
131         thing = ['test', 'list']
132         with tempfile.NamedTemporaryFile() as cachefile:
133             self.last_cache = arv_put.ResumeCache(cachefile.name)
134         self.last_cache.save(thing)
135         self.assertEqual(thing, self.last_cache.load())
136
137     def test_empty_cache(self):
138         with tempfile.NamedTemporaryFile() as cachefile:
139             cache = arv_put.ResumeCache(cachefile.name)
140         self.assertRaises(ValueError, cache.load)
141
142     def test_cache_persistent(self):
143         thing = ['test', 'list']
144         path = os.path.join(self.make_tmpdir(), 'cache')
145         cache = arv_put.ResumeCache(path)
146         cache.save(thing)
147         cache.close()
148         self.last_cache = arv_put.ResumeCache(path)
149         self.assertEqual(thing, self.last_cache.load())
150
151     def test_multiple_cache_writes(self):
152         thing = ['short', 'list']
153         with tempfile.NamedTemporaryFile() as cachefile:
154             self.last_cache = arv_put.ResumeCache(cachefile.name)
155         # Start writing an object longer than the one we test, to make
156         # sure the cache file gets truncated.
157         self.last_cache.save(['long', 'long', 'list'])
158         self.last_cache.save(thing)
159         self.assertEqual(thing, self.last_cache.load())
160
161     def test_cache_is_locked(self):
162         with tempfile.NamedTemporaryFile() as cachefile:
163             cache = arv_put.ResumeCache(cachefile.name)
164             self.assertRaises(arv_put.ResumeCacheConflict,
165                               arv_put.ResumeCache, cachefile.name)
166
167     def test_cache_stays_locked(self):
168         with tempfile.NamedTemporaryFile() as cachefile:
169             self.last_cache = arv_put.ResumeCache(cachefile.name)
170             path = cachefile.name
171         self.last_cache.save('test')
172         self.assertRaises(arv_put.ResumeCacheConflict,
173                           arv_put.ResumeCache, path)
174
175     def test_destroy_cache(self):
176         cachefile = tempfile.NamedTemporaryFile(delete=False)
177         try:
178             cache = arv_put.ResumeCache(cachefile.name)
179             cache.save('test')
180             cache.destroy()
181             try:
182                 arv_put.ResumeCache(cachefile.name)
183             except arv_put.ResumeCacheConflict:
184                 self.fail("could not load cache after destroying it")
185             self.assertRaises(ValueError, cache.load)
186         finally:
187             if os.path.exists(cachefile.name):
188                 os.unlink(cachefile.name)
189
190     def test_restart_cache(self):
191         path = os.path.join(self.make_tmpdir(), 'cache')
192         cache = arv_put.ResumeCache(path)
193         cache.save('test')
194         cache.restart()
195         self.assertRaises(ValueError, cache.load)
196         self.assertRaises(arv_put.ResumeCacheConflict,
197                           arv_put.ResumeCache, path)
198
199
200 class ArvadosPutCollectionWriterTest(run_test_server.TestCaseWithServers,
201                                      ArvadosBaseTestCase):
202     def setUp(self):
203         super(ArvadosPutCollectionWriterTest, self).setUp()
204         run_test_server.authorize_with('active')
205         with tempfile.NamedTemporaryFile(delete=False) as cachefile:
206             self.cache = arv_put.ResumeCache(cachefile.name)
207             self.cache_filename = cachefile.name
208
209     def tearDown(self):
210         super(ArvadosPutCollectionWriterTest, self).tearDown()
211         if os.path.exists(self.cache_filename):
212             self.cache.destroy()
213         self.cache.close()
214
215     def test_writer_caches(self):
216         cwriter = arv_put.ArvPutCollectionWriter(self.cache)
217         cwriter.write_file('/dev/null')
218         cwriter.cache_state()
219         self.assertTrue(self.cache.load())
220         self.assertEqual(". d41d8cd98f00b204e9800998ecf8427e+0 0:0:null\n", cwriter.manifest_text())
221
222     def test_writer_works_without_cache(self):
223         cwriter = arv_put.ArvPutCollectionWriter()
224         cwriter.write_file('/dev/null')
225         self.assertEqual(". d41d8cd98f00b204e9800998ecf8427e+0 0:0:null\n", cwriter.manifest_text())
226
227     def test_writer_resumes_from_cache(self):
228         cwriter = arv_put.ArvPutCollectionWriter(self.cache)
229         with self.make_test_file() as testfile:
230             cwriter.write_file(testfile.name, 'test')
231             cwriter.cache_state()
232             new_writer = arv_put.ArvPutCollectionWriter.from_cache(
233                 self.cache)
234             self.assertEqual(
235                 ". 098f6bcd4621d373cade4e832627b4f6+4 0:4:test\n",
236                 new_writer.manifest_text())
237
238     def test_new_writer_from_stale_cache(self):
239         cwriter = arv_put.ArvPutCollectionWriter(self.cache)
240         with self.make_test_file() as testfile:
241             cwriter.write_file(testfile.name, 'test')
242         new_writer = arv_put.ArvPutCollectionWriter.from_cache(self.cache)
243         new_writer.write_file('/dev/null')
244         self.assertEqual(". d41d8cd98f00b204e9800998ecf8427e+0 0:0:null\n", new_writer.manifest_text())
245
246     def test_new_writer_from_empty_cache(self):
247         cwriter = arv_put.ArvPutCollectionWriter.from_cache(self.cache)
248         cwriter.write_file('/dev/null')
249         self.assertEqual(". d41d8cd98f00b204e9800998ecf8427e+0 0:0:null\n", cwriter.manifest_text())
250
251     def test_writer_resumable_after_arbitrary_bytes(self):
252         cwriter = arv_put.ArvPutCollectionWriter(self.cache)
253         # These bytes are intentionally not valid UTF-8.
254         with self.make_test_file('\x00\x07\xe2') as testfile:
255             cwriter.write_file(testfile.name, 'test')
256             cwriter.cache_state()
257             new_writer = arv_put.ArvPutCollectionWriter.from_cache(
258                 self.cache)
259         self.assertEqual(cwriter.manifest_text(), new_writer.manifest_text())
260
261     def make_progress_tester(self):
262         progression = []
263         def record_func(written, expected):
264             progression.append((written, expected))
265         return progression, record_func
266
267     def test_progress_reporting(self):
268         for expect_count in (None, 8):
269             progression, reporter = self.make_progress_tester()
270             cwriter = arv_put.ArvPutCollectionWriter(
271                 reporter=reporter, bytes_expected=expect_count)
272             with self.make_test_file() as testfile:
273                 cwriter.write_file(testfile.name, 'test')
274             cwriter.finish_current_stream()
275             self.assertIn((4, expect_count), progression)
276
277     def test_resume_progress(self):
278         cwriter = arv_put.ArvPutCollectionWriter(self.cache, bytes_expected=4)
279         with self.make_test_file() as testfile:
280             # Set up a writer with some flushed bytes.
281             cwriter.write_file(testfile.name, 'test')
282             cwriter.finish_current_stream()
283             cwriter.cache_state()
284             new_writer = arv_put.ArvPutCollectionWriter.from_cache(self.cache)
285             self.assertEqual(new_writer.bytes_written, 4)
286
287
288 class ArvadosExpectedBytesTest(ArvadosBaseTestCase):
289     TEST_SIZE = os.path.getsize(__file__)
290
291     def test_expected_bytes_for_file(self):
292         self.assertEqual(self.TEST_SIZE,
293                           arv_put.expected_bytes_for([__file__]))
294
295     def test_expected_bytes_for_tree(self):
296         tree = self.make_tmpdir()
297         shutil.copyfile(__file__, os.path.join(tree, 'one'))
298         shutil.copyfile(__file__, os.path.join(tree, 'two'))
299         self.assertEqual(self.TEST_SIZE * 2,
300                           arv_put.expected_bytes_for([tree]))
301         self.assertEqual(self.TEST_SIZE * 3,
302                           arv_put.expected_bytes_for([tree, __file__]))
303
304     def test_expected_bytes_for_device(self):
305         self.assertIsNone(arv_put.expected_bytes_for(['/dev/null']))
306         self.assertIsNone(arv_put.expected_bytes_for([__file__, '/dev/null']))
307
308
309 class ArvadosPutReportTest(ArvadosBaseTestCase):
310     def test_machine_progress(self):
311         for count, total in [(0, 1), (0, None), (1, None), (235, 9283)]:
312             expect = ": {} written {} total\n".format(
313                 count, -1 if (total is None) else total)
314             self.assertTrue(
315                 arv_put.machine_progress(count, total).endswith(expect))
316
317     def test_known_human_progress(self):
318         for count, total in [(0, 1), (2, 4), (45, 60)]:
319             expect = '{:.1%}'.format(float(count) / total)
320             actual = arv_put.human_progress(count, total)
321             self.assertTrue(actual.startswith('\r'))
322             self.assertIn(expect, actual)
323
324     def test_unknown_human_progress(self):
325         for count in [1, 20, 300, 4000, 50000]:
326             self.assertTrue(re.search(r'\b{}\b'.format(count),
327                                       arv_put.human_progress(count, None)))
328
329
330 class ArvadosPutTest(run_test_server.TestCaseWithServers, ArvadosBaseTestCase):
331     MAIN_SERVER = {}
332     Z_UUID = 'zzzzz-zzzzz-zzzzzzzzzzzzzzz'
333
334     def call_main_with_args(self, args):
335         self.main_stdout = StringIO()
336         self.main_stderr = StringIO()
337         return arv_put.main(args, self.main_stdout, self.main_stderr)
338
339     def call_main_on_test_file(self, args=[]):
340         with self.make_test_file() as testfile:
341             path = testfile.name
342             self.call_main_with_args(['--stream', '--no-progress'] + args + [path])
343         self.assertTrue(
344             os.path.exists(os.path.join(os.environ['KEEP_LOCAL_STORE'],
345                                         '098f6bcd4621d373cade4e832627b4f6')),
346             "did not find file stream in Keep store")
347
348     def setUp(self):
349         super(ArvadosPutTest, self).setUp()
350         run_test_server.authorize_with('active')
351         arv_put.api_client = None
352
353     def tearDown(self):
354         for outbuf in ['main_stdout', 'main_stderr']:
355             if hasattr(self, outbuf):
356                 getattr(self, outbuf).close()
357                 delattr(self, outbuf)
358         super(ArvadosPutTest, self).tearDown()
359
360     def test_simple_file_put(self):
361         self.call_main_on_test_file()
362
363     def test_put_with_unwriteable_cache_dir(self):
364         orig_cachedir = arv_put.ResumeCache.CACHE_DIR
365         cachedir = self.make_tmpdir()
366         os.chmod(cachedir, 0o0)
367         arv_put.ResumeCache.CACHE_DIR = cachedir
368         try:
369             self.call_main_on_test_file()
370         finally:
371             arv_put.ResumeCache.CACHE_DIR = orig_cachedir
372             os.chmod(cachedir, 0o700)
373
374     def test_put_with_unwritable_cache_subdir(self):
375         orig_cachedir = arv_put.ResumeCache.CACHE_DIR
376         cachedir = self.make_tmpdir()
377         os.chmod(cachedir, 0o0)
378         arv_put.ResumeCache.CACHE_DIR = os.path.join(cachedir, 'cachedir')
379         try:
380             self.call_main_on_test_file()
381         finally:
382             arv_put.ResumeCache.CACHE_DIR = orig_cachedir
383             os.chmod(cachedir, 0o700)
384
385     def test_put_block_replication(self):
386         with mock.patch('arvados.collection.KeepClient.local_store_put') as put_mock, \
387              mock.patch('arvados.commands.put.ResumeCache.load') as cache_mock:
388             cache_mock.side_effect = ValueError
389             put_mock.return_value = 'acbd18db4cc2f85cedef654fccc4a4d8+3'
390             self.call_main_on_test_file(['--replication', '1'])
391             self.call_main_on_test_file(['--replication', '4'])
392             self.call_main_on_test_file(['--replication', '5'])
393             self.assertEqual(
394                 [x[-1].get('copies') for x in put_mock.call_args_list],
395                 [1, 4, 5])
396
397     def test_normalize(self):
398         testfile1 = self.make_test_file()
399         testfile2 = self.make_test_file()
400         test_paths = [testfile1.name, testfile2.name]
401         # Reverse-sort the paths, so normalization must change their order.
402         test_paths.sort(reverse=True)
403         self.call_main_with_args(['--stream', '--no-progress', '--normalize'] +
404                                  test_paths)
405         manifest = self.main_stdout.getvalue()
406         # Assert the second file we specified appears first in the manifest.
407         file_indices = [manifest.find(':' + os.path.basename(path))
408                         for path in test_paths]
409         self.assertGreater(*file_indices)
410
411     def test_error_name_without_collection(self):
412         self.assertRaises(SystemExit, self.call_main_with_args,
413                           ['--name', 'test without Collection',
414                            '--stream', '/dev/null'])
415
416     def test_error_when_project_not_found(self):
417         self.assertRaises(SystemExit,
418                           self.call_main_with_args,
419                           ['--project-uuid', self.Z_UUID])
420
421     def test_error_bad_project_uuid(self):
422         self.assertRaises(SystemExit,
423                           self.call_main_with_args,
424                           ['--project-uuid', self.Z_UUID, '--stream'])
425
426 class ArvPutIntegrationTest(run_test_server.TestCaseWithServers,
427                             ArvadosBaseTestCase):
428     def _getKeepServerConfig():
429         for config_file, mandatory in [
430                 ['application.yml', False], ['application.default.yml', True]]:
431             path = os.path.join(run_test_server.SERVICES_SRC_DIR,
432                                 "api", "config", config_file)
433             if not mandatory and not os.path.exists(path):
434                 continue
435             with open(path) as f:
436                 rails_config = yaml.load(f.read())
437                 for config_section in ['test', 'common']:
438                     try:
439                         key = rails_config[config_section]["blob_signing_key"]
440                     except (KeyError, TypeError):
441                         pass
442                     else:
443                         return {'blob_signing_key': key,
444                                 'enforce_permissions': True}
445         return {'blog_signing_key': None, 'enforce_permissions': False}
446
447     MAIN_SERVER = {}
448     KEEP_SERVER = _getKeepServerConfig()
449     PROJECT_UUID = run_test_server.fixture('groups')['aproject']['uuid']
450
451     @classmethod
452     def setUpClass(cls):
453         super(ArvPutIntegrationTest, cls).setUpClass()
454         cls.ENVIRON = os.environ.copy()
455         cls.ENVIRON['PYTHONPATH'] = ':'.join(sys.path)
456
457     def setUp(self):
458         super(ArvPutIntegrationTest, self).setUp()
459         arv_put.api_client = None
460
461     def authorize_with(self, token_name):
462         run_test_server.authorize_with(token_name)
463         for v in ["ARVADOS_API_HOST",
464                   "ARVADOS_API_HOST_INSECURE",
465                   "ARVADOS_API_TOKEN"]:
466             self.ENVIRON[v] = arvados.config.settings()[v]
467         arv_put.api_client = arvados.api('v1')
468
469     def current_user(self):
470         return arv_put.api_client.users().current().execute()
471
472     def test_check_real_project_found(self):
473         self.authorize_with('active')
474         self.assertTrue(arv_put.desired_project_uuid(arv_put.api_client, self.PROJECT_UUID, 0),
475                         "did not correctly find test fixture project")
476
477     def test_check_error_finding_nonexistent_uuid(self):
478         BAD_UUID = 'zzzzz-zzzzz-zzzzzzzzzzzzzzz'
479         self.authorize_with('active')
480         try:
481             result = arv_put.desired_project_uuid(arv_put.api_client, BAD_UUID,
482                                                   0)
483         except ValueError as error:
484             self.assertIn(BAD_UUID, error.message)
485         else:
486             self.assertFalse(result, "incorrectly found nonexistent project")
487
488     def test_check_error_finding_nonexistent_project(self):
489         BAD_UUID = 'zzzzz-tpzed-zzzzzzzzzzzzzzz'
490         self.authorize_with('active')
491         with self.assertRaises(apiclient.errors.HttpError):
492             result = arv_put.desired_project_uuid(arv_put.api_client, BAD_UUID,
493                                                   0)
494
495     def test_short_put_from_stdin(self):
496         # Have to run this as an integration test since arv-put can't
497         # read from the tests' stdin.
498         # arv-put usually can't stat(os.path.realpath('/dev/stdin')) in this
499         # case, because the /proc entry is already gone by the time it tries.
500         pipe = subprocess.Popen(
501             [sys.executable, arv_put.__file__, '--stream'],
502             stdin=subprocess.PIPE, stdout=subprocess.PIPE,
503             stderr=subprocess.STDOUT, env=self.ENVIRON)
504         pipe.stdin.write('stdin test\n')
505         pipe.stdin.close()
506         deadline = time.time() + 5
507         while (pipe.poll() is None) and (time.time() < deadline):
508             time.sleep(.1)
509         returncode = pipe.poll()
510         if returncode is None:
511             pipe.terminate()
512             self.fail("arv-put did not PUT from stdin within 5 seconds")
513         elif returncode != 0:
514             sys.stdout.write(pipe.stdout.read())
515             self.fail("arv-put returned exit code {}".format(returncode))
516         self.assertIn('4a9c8b735dce4b5fa3acf221a0b13628+11', pipe.stdout.read())
517
518     def test_ArvPutSignedManifest(self):
519         # ArvPutSignedManifest runs "arv-put foo" and then attempts to get
520         # the newly created manifest from the API server, testing to confirm
521         # that the block locators in the returned manifest are signed.
522         self.authorize_with('active')
523
524         # Before doing anything, demonstrate that the collection
525         # we're about to create is not present in our test fixture.
526         manifest_uuid = "00b4e9f40ac4dd432ef89749f1c01e74+47"
527         with self.assertRaises(apiclient.errors.HttpError):
528             notfound = arv_put.api_client.collections().get(
529                 uuid=manifest_uuid).execute()
530
531         datadir = self.make_tmpdir()
532         with open(os.path.join(datadir, "foo"), "w") as f:
533             f.write("The quick brown fox jumped over the lazy dog")
534         p = subprocess.Popen([sys.executable, arv_put.__file__, datadir],
535                              stdout=subprocess.PIPE, env=self.ENVIRON)
536         (arvout, arverr) = p.communicate()
537         self.assertEqual(arverr, None)
538         self.assertEqual(p.returncode, 0)
539
540         # The manifest text stored in the API server under the same
541         # manifest UUID must use signed locators.
542         c = arv_put.api_client.collections().get(uuid=manifest_uuid).execute()
543         self.assertRegexpMatches(
544             c['manifest_text'],
545             r'^\. 08a008a01d498c404b0c30852b39d3b8\+44\+A[0-9a-f]+@[0-9a-f]+ 0:44:foo\n')
546
547         os.remove(os.path.join(datadir, "foo"))
548         os.rmdir(datadir)
549
550     def run_and_find_collection(self, text, extra_args=[]):
551         self.authorize_with('active')
552         pipe = subprocess.Popen(
553             [sys.executable, arv_put.__file__] + extra_args,
554             stdin=subprocess.PIPE, stdout=subprocess.PIPE,
555             stderr=subprocess.PIPE, env=self.ENVIRON)
556         stdout, stderr = pipe.communicate(text)
557         search_key = ('portable_data_hash'
558                       if '--portable-data-hash' in extra_args else 'uuid')
559         collection_list = arvados.api('v1').collections().list(
560             filters=[[search_key, '=', stdout.strip()]]).execute().get('items', [])
561         self.assertEqual(1, len(collection_list))
562         return collection_list[0]
563
564     def test_put_collection_with_high_redundancy(self):
565         # Write empty data: we're not testing CollectionWriter, just
566         # making sure collections.create tells the API server what our
567         # desired replication level is.
568         collection = self.run_and_find_collection("", ['--replication', '4'])
569         self.assertEqual(4, collection['replication_desired'])
570
571     def test_put_collection_with_default_redundancy(self):
572         collection = self.run_and_find_collection("")
573         self.assertEqual(None, collection['replication_desired'])
574
575     def test_put_collection_with_unnamed_project_link(self):
576         link = self.run_and_find_collection(
577             "Test unnamed collection",
578             ['--portable-data-hash', '--project-uuid', self.PROJECT_UUID])
579         username = pwd.getpwuid(os.getuid()).pw_name
580         self.assertRegexpMatches(
581             link['name'],
582             r'^Saved at .* by {}@'.format(re.escape(username)))
583
584     def test_put_collection_with_name_and_no_project(self):
585         link_name = 'Test Collection Link in home project'
586         collection = self.run_and_find_collection(
587             "Test named collection in home project",
588             ['--portable-data-hash', '--name', link_name])
589         self.assertEqual(link_name, collection['name'])
590         my_user_uuid = self.current_user()['uuid']
591         self.assertEqual(my_user_uuid, collection['owner_uuid'])
592
593     def test_put_collection_with_named_project_link(self):
594         link_name = 'Test auto Collection Link'
595         collection = self.run_and_find_collection("Test named collection",
596                                       ['--portable-data-hash',
597                                        '--name', link_name,
598                                        '--project-uuid', self.PROJECT_UUID])
599         self.assertEqual(link_name, collection['name'])
600
601
602 if __name__ == '__main__':
603     unittest.main()