-from __future__ import absolute_import
-# usage example:
+# Copyright (C) The Arvados Authors. All rights reserved.
#
-# ARVADOS_API_TOKEN=abc ARVADOS_API_HOST=arvados.local python -m unittest discover
+# SPDX-License-Identifier: Apache-2.0
+
+from __future__ import absolute_import
from builtins import object
import arvados
import copy
import mock
import os
-import pprint
+import random
import re
-import tempfile
+import sys
+import datetime
+import ciso8601
+import time
import unittest
+import parameterized
from . import run_test_server
from arvados._ranges import Range, LocatorAndRange
from arvados.collection import Collection, CollectionReader
from . import arvados_testutil as tutil
+from .arvados_testutil import make_block_cache
class TestResumableWriter(arvados.ResumableCollectionWriter):
KEEP_BLOCK_SIZE = 1024 # PUT to Keep every 1K.
def current_state(self):
return self.dump_state(copy.deepcopy)
-
+@parameterized.parameterized_class([{"disk_cache": True}, {"disk_cache": False}])
class ArvadosCollectionsTest(run_test_server.TestCaseWithServers,
tutil.ArvadosBaseTestCase):
+ disk_cache = False
MAIN_SERVER = {}
@classmethod
def setUpClass(cls):
super(ArvadosCollectionsTest, cls).setUpClass()
- run_test_server.authorize_with('active')
+ # need admin privileges to make collections with unsigned blocks
+ run_test_server.authorize_with('admin')
cls.api_client = arvados.api('v1')
cls.keep_client = arvados.KeepClient(api_client=cls.api_client,
- local_store=cls.local_store)
+ local_store=cls.local_store,
+ block_cache=make_block_cache(cls.disk_cache))
def write_foo_bar_baz(self):
cw = arvados.CollectionWriter(self.api_client)
". 3858f62230ac3c915f300c664312c63f+6 0:3:foo.txt 3:3:bar.txt\n" +
"./baz 73feffa4b7f6bb68e44cf984c85f6e88+3 0:3:baz.txt\n",
"wrong manifest: got {}".format(cw.manifest_text()))
- cw.finish()
+ cw.save_new()
return cw.portable_data_hash()
+ def test_pdh_is_native_str(self):
+ pdh = self.write_foo_bar_baz()
+ self.assertEqual(type(''), type(pdh))
+
def test_keep_local_store(self):
self.assertEqual(self.keep_client.put(b'foo'), 'acbd18db4cc2f85cedef654fccc4a4d8+3', 'wrong md5 hash from Keep.put')
self.assertEqual(self.keep_client.get('acbd18db4cc2f85cedef654fccc4a4d8+3'), b'foo', 'wrong data from Keep.get')
self.assertEqual(stream0.readfrom(2**26, 0),
b'',
'reading zero bytes should have returned empty string')
+ self.assertEqual(3, len(cr))
+ self.assertTrue(cr)
def _test_subset(self, collection, expected):
cr = arvados.CollectionReader(collection, self.api_client)
class MockKeep(object):
def __init__(self, content, num_retries=0):
self.content = content
+ self.num_prefetch_threads = 1
- def get(self, locator, num_retries=0):
+ def get(self, locator, num_retries=0, prefetch=False):
return self.content[locator]
def test_stream_reader(self):
self.assertRaises(arvados.errors.AssertionError,
cwriter.write, "badtext")
- def test_read_arbitrary_data_with_collection_reader(self):
- # arv-get relies on this to do "arv-get {keep-locator} -".
- self.write_foo_bar_baz()
- self.assertEqual(
- 'foobar',
- arvados.CollectionReader(
- '3858f62230ac3c915f300c664312c63f+6'
- ).manifest_text())
-
class CollectionTestMixin(tutil.ApiClientMock):
API_COLLECTIONS = run_test_server.fixture('collections')
@tutil.skip_sleep
class CollectionReaderTestCase(unittest.TestCase, CollectionTestMixin):
- def mock_get_collection(self, api_mock, code, body):
- body = self.API_COLLECTIONS.get(body)
+ def mock_get_collection(self, api_mock, code, fixturename):
+ body = self.API_COLLECTIONS.get(fixturename)
self._mock_api_call(api_mock.collections().get, code, body)
def api_client_mock(self, status=200):
self.mock_get_collection(client, status, 'foo_file')
return client
- def test_init_no_default_retries(self):
+ def test_init_default_retries(self):
client = self.api_client_mock(200)
reader = arvados.CollectionReader(self.DEFAULT_UUID, api_client=client)
reader.manifest_text()
- client.collections().get().execute.assert_called_with(num_retries=0)
+ client.collections().get().execute.assert_called_with(num_retries=10)
def test_uuid_init_success(self):
client = self.api_client_mock(200)
api_client=client)
self.assertEqual(self.DEFAULT_MANIFEST, reader.manifest_text())
- def test_locator_init_fallback_to_keep(self):
- # crunch-job needs this to read manifests that have only ever
- # been written to Keep.
- client = self.api_client_mock(200)
- self.mock_get_collection(client, 404, None)
- with tutil.mock_keep_responses(self.DEFAULT_MANIFEST, 200):
- reader = arvados.CollectionReader(self.DEFAULT_DATA_HASH,
- api_client=client)
- self.assertEqual(self.DEFAULT_MANIFEST, reader.manifest_text())
-
- def test_uuid_init_no_fallback_to_keep(self):
- # Do not look up a collection UUID in Keep.
- client = self.api_client_mock(404)
- with tutil.mock_keep_responses(self.DEFAULT_MANIFEST, 200):
- with self.assertRaises(arvados.errors.ApiError):
- reader = arvados.CollectionReader(self.DEFAULT_UUID,
- api_client=client)
-
- def test_try_keep_first_if_permission_hint(self):
- # To verify that CollectionReader tries Keep first here, we
- # mock API server to return the wrong data.
- client = self.api_client_mock(200)
- with tutil.mock_keep_responses(self.ALT_MANIFEST, 200):
- self.assertEqual(
- self.ALT_MANIFEST,
- arvados.CollectionReader(
- self.ALT_DATA_HASH + '+Affffffffffffffffffffffffffffffffffffffff@fedcba98',
- api_client=client).manifest_text())
+ def test_init_no_fallback_to_keep(self):
+ # Do not look up a collection UUID or PDH in Keep.
+ for key in [self.DEFAULT_UUID, self.DEFAULT_DATA_HASH]:
+ client = self.api_client_mock(404)
+ with tutil.mock_keep_responses(self.DEFAULT_MANIFEST, 200):
+ with self.assertRaises(arvados.errors.ApiError):
+ reader = arvados.CollectionReader(key, api_client=client)
def test_init_num_retries_propagated(self):
# More of an integration test...
reader = arvados.CollectionReader('d41d8cd98f00b204e9800998ecf8427e+0',
api_client=client)
self.assertEqual('', reader.manifest_text())
+ self.assertEqual(0, len(reader))
+ self.assertFalse(reader)
def test_api_response(self):
client = self.api_client_mock()
reader = arvados.CollectionReader(self.DEFAULT_UUID, api_client=client)
self.assertEqual(self.DEFAULT_COLLECTION, reader.api_response())
- def test_api_response_with_collection_from_keep(self):
- client = self.api_client_mock()
- self.mock_get_collection(client, 404, 'foo')
- with tutil.mock_keep_responses(self.DEFAULT_MANIFEST, 200):
- reader = arvados.CollectionReader(self.DEFAULT_DATA_HASH,
- api_client=client)
- api_response = reader.api_response()
- self.assertIsNone(api_response)
-
def check_open_file(self, coll_file, stream_name, file_name, file_size):
self.assertFalse(coll_file.closed, "returned file is not open")
self.assertEqual(stream_name, coll_file.stream_name())
def test_open_collection_file_one_argument(self):
client = self.api_client_mock(200)
reader = arvados.CollectionReader(self.DEFAULT_UUID, api_client=client)
- cfile = reader.open('./foo')
+ cfile = reader.open('./foo', 'rb')
self.check_open_file(cfile, '.', 'foo', 3)
def test_open_deep_file(self):
self.mock_get_collection(client, 200, coll_name)
reader = arvados.CollectionReader(
self.API_COLLECTIONS[coll_name]['uuid'], api_client=client)
- cfile = reader.open('./subdir2/subdir3/file2_in_subdir3.txt')
+ cfile = reader.open('./subdir2/subdir3/file2_in_subdir3.txt', 'rb')
self.check_open_file(cfile, './subdir2/subdir3', 'file2_in_subdir3.txt',
32)
self.assertRaises(arvados.errors.AssertionError, writer.open, 'two')
+class CollectionMethods(run_test_server.TestCaseWithServers):
+
+ def test_keys_values_items_support_indexing(self):
+ c = Collection()
+ with c.open('foo', 'wb') as f:
+ f.write(b'foo')
+ with c.open('bar', 'wb') as f:
+ f.write(b'bar')
+ self.assertEqual(2, len(c.keys()))
+ if sys.version_info < (3, 0):
+ # keys() supports indexing only for python2 callers.
+ fn0 = c.keys()[0]
+ fn1 = c.keys()[1]
+ else:
+ fn0, fn1 = c.keys()
+ self.assertEqual(2, len(c.values()))
+ f0 = c.values()[0]
+ f1 = c.values()[1]
+ self.assertEqual(2, len(c.items()))
+ self.assertEqual(fn0, c.items()[0][0])
+ self.assertEqual(fn1, c.items()[1][0])
+
+ def test_get_properties(self):
+ c = Collection()
+ self.assertEqual(c.get_properties(), {})
+ c.save_new(properties={"foo":"bar"})
+ self.assertEqual(c.get_properties(), {"foo":"bar"})
+
+ def test_get_trash_at(self):
+ c = Collection()
+ self.assertEqual(c.get_trash_at(), None)
+ c.save_new(trash_at=datetime.datetime(2111, 1, 1, 11, 11, 11, 111111))
+ self.assertEqual(c.get_trash_at(), ciso8601.parse_datetime('2111-01-01T11:11:11.111111000Z'))
+
+
+class CollectionOpenModes(run_test_server.TestCaseWithServers):
+
+ def test_open_binary_modes(self):
+ c = Collection()
+ for mode in ['wb', 'wb+', 'ab', 'ab+']:
+ with c.open('foo', mode) as f:
+ f.write(b'foo')
+
+ def test_open_invalid_modes(self):
+ c = Collection()
+ for mode in ['+r', 'aa', '++', 'r+b', 'beer', '', None]:
+ with self.assertRaises(Exception):
+ c.open('foo', mode)
+
+ def test_open_text_modes(self):
+ c = Collection()
+ with c.open('foo', 'wb') as f:
+ f.write('foo')
+ for mode in ['r', 'rt', 'r+', 'rt+', 'w', 'wt', 'a', 'at']:
+ with c.open('foo', mode) as f:
+ if mode[0] == 'r' and '+' not in mode:
+ self.assertEqual('foo', f.read(3))
+ else:
+ f.write('bar')
+ f.seek(0, os.SEEK_SET)
+ self.assertEqual('bar', f.read(3))
+
+
+class TextModes(run_test_server.TestCaseWithServers):
+
+ def setUp(self):
+ arvados.config.KEEP_BLOCK_SIZE = 4
+ if sys.version_info < (3, 0):
+ import unicodedata
+ self.sailboat = unicodedata.lookup('SAILBOAT')
+ self.snowman = unicodedata.lookup('SNOWMAN')
+ else:
+ self.sailboat = '\N{SAILBOAT}'
+ self.snowman = '\N{SNOWMAN}'
+
+ def tearDown(self):
+ arvados.config.KEEP_BLOCK_SIZE = 2 ** 26
+
+ def test_read_sailboat_across_block_boundary(self):
+ c = Collection()
+ f = c.open('sailboats', 'wb')
+ data = self.sailboat.encode('utf-8')
+ f.write(data)
+ f.write(data[:1])
+ f.write(data[1:])
+ f.write(b'\n')
+ f.close()
+ self.assertRegex(c.portable_manifest_text(), r'\+4 .*\+3 ')
+
+ f = c.open('sailboats', 'r')
+ string = f.readline()
+ self.assertEqual(string, self.sailboat+self.sailboat+'\n')
+ f.close()
+
+ def test_write_snowman_across_block_boundary(self):
+ c = Collection()
+ f = c.open('snowmany', 'w')
+ data = self.snowman
+ f.write(data+data+'\n'+data+'\n')
+ f.close()
+ self.assertRegex(c.portable_manifest_text(), r'\+4 .*\+4 .*\+3 ')
+
+ f = c.open('snowmany', 'r')
+ self.assertEqual(f.readline(), self.snowman+self.snowman+'\n')
+ self.assertEqual(f.readline(), self.snowman+'\n')
+ f.close()
+
+
class NewCollectionTestCase(unittest.TestCase, CollectionTestMixin):
def test_replication_desired_kept_on_load(self):
c1.save_new()
loc = c1.manifest_locator()
c2 = Collection(loc)
- self.assertEqual(c1.manifest_text, c2.manifest_text)
+ self.assertEqual(c1.manifest_text(strip=True), c2.manifest_text(strip=True))
self.assertEqual(c1.replication_desired, c2.replication_desired)
def test_replication_desired_not_loaded_if_provided(self):
c1.save_new()
loc = c1.manifest_locator()
c2 = Collection(loc, replication_desired=2)
- self.assertEqual(c1.manifest_text, c2.manifest_text)
+ self.assertEqual(c1.manifest_text(strip=True), c2.manifest_text(strip=True))
self.assertNotEqual(c1.replication_desired, c2.replication_desired)
+ def test_storage_classes_desired_kept_on_load(self):
+ m = '. 781e5e245d69b566979b86e28d23f2c7+10 0:10:count1.txt 0:10:count2.txt\n'
+ c1 = Collection(m, storage_classes_desired=['archival'])
+ c1.save_new()
+ loc = c1.manifest_locator()
+ c2 = Collection(loc)
+ self.assertEqual(c1.manifest_text(strip=True), c2.manifest_text(strip=True))
+ self.assertEqual(c1.storage_classes_desired(), c2.storage_classes_desired())
+
+ def test_storage_classes_change_after_save(self):
+ m = '. 781e5e245d69b566979b86e28d23f2c7+10 0:10:count1.txt 0:10:count2.txt\n'
+ c1 = Collection(m, storage_classes_desired=['archival'])
+ c1.save_new()
+ loc = c1.manifest_locator()
+ c2 = Collection(loc)
+ self.assertEqual(['archival'], c2.storage_classes_desired())
+ c2.save(storage_classes=['highIO'])
+ self.assertEqual(['highIO'], c2.storage_classes_desired())
+ c3 = Collection(loc)
+ self.assertEqual(c1.manifest_text(strip=True), c3.manifest_text(strip=True))
+ self.assertEqual(['highIO'], c3.storage_classes_desired())
+
+ def test_storage_classes_desired_not_loaded_if_provided(self):
+ m = '. 781e5e245d69b566979b86e28d23f2c7+10 0:10:count1.txt 0:10:count2.txt\n'
+ c1 = Collection(m, storage_classes_desired=['archival'])
+ c1.save_new()
+ loc = c1.manifest_locator()
+ c2 = Collection(loc, storage_classes_desired=['default'])
+ self.assertEqual(c1.manifest_text(strip=True), c2.manifest_text(strip=True))
+ self.assertNotEqual(c1.storage_classes_desired(), c2.storage_classes_desired())
+
def test_init_manifest(self):
m1 = """. 5348b82a029fd9e971a811ce1f71360b+43 0:43:md5sum.txt
. 085c37f02916da1cad16f93c54d899b7+41 0:41:md5sum.txt
with self.assertRaises(arvados.errors.ArgumentError):
c.remove("")
+ def test_remove_recursive(self):
+ c = Collection('. 781e5e245d69b566979b86e28d23f2c7+10 0:10:a/b/c/d/efg.txt 0:10:xyz.txt\n')
+ self.assertEqual(". 781e5e245d69b566979b86e28d23f2c7+10 0:10:xyz.txt\n./a/b/c/d 781e5e245d69b566979b86e28d23f2c7+10 0:10:efg.txt\n", c.portable_manifest_text())
+ self.assertIn("a", c)
+ self.assertEqual(1, len(c["a"].keys()))
+ # cannot remove non-empty directory with default recursive=False
+ with self.assertRaises(OSError):
+ c.remove("a/b")
+ with self.assertRaises(OSError):
+ c.remove("a/b/c/d")
+ c.remove("a/b", recursive=True)
+ self.assertEqual(0, len(c["a"].keys()))
+ self.assertEqual(". 781e5e245d69b566979b86e28d23f2c7+10 0:10:xyz.txt\n./a d41d8cd98f00b204e9800998ecf8427e+0 0:0:\\056\n", c.portable_manifest_text())
+
def test_find(self):
c = Collection('. 781e5e245d69b566979b86e28d23f2c7+10 0:10:count1.txt 0:10:count2.txt\n')
self.assertIs(c.find("."), c)
self.assertIs(c.find("./nonexistant.txt"), None)
self.assertIs(c.find("./nonexistantsubdir/nonexistant.txt"), None)
+ def test_escaped_paths_dont_get_unescaped_on_manifest(self):
+ # Dir & file names are literally '\056' (escaped form: \134056)
+ manifest = './\\134056\\040Test d41d8cd98f00b204e9800998ecf8427e+0 0:0:\\134056\n'
+ c = Collection(manifest)
+ self.assertEqual(c.portable_manifest_text(), manifest)
+
+ def test_other_special_chars_on_file_token(self):
+ cases = [
+ ('\\000', '\0'),
+ ('\\011', '\t'),
+ ('\\012', '\n'),
+ ('\\072', ':'),
+ ('\\134400', '\\400'),
+ ]
+ for encoded, decoded in cases:
+ manifest = '. d41d8cd98f00b204e9800998ecf8427e+0 0:0:some%sfile.txt\n' % encoded
+ c = Collection(manifest)
+ self.assertEqual(c.portable_manifest_text(), manifest)
+ self.assertIn('some%sfile.txt' % decoded, c.keys())
+
+ def test_escaped_paths_do_get_unescaped_on_listing(self):
+ # Dir & file names are literally '\056' (escaped form: \134056)
+ manifest = './\\134056\\040Test d41d8cd98f00b204e9800998ecf8427e+0 0:0:\\134056\n'
+ c = Collection(manifest)
+ self.assertIn('\\056 Test', c.keys())
+ self.assertIn('\\056', c['\\056 Test'].keys())
+
+ def test_make_empty_dir_with_escaped_chars(self):
+ c = Collection()
+ c.mkdirs('./Empty\\056Dir')
+ self.assertEqual(c.portable_manifest_text(),
+ './Empty\\134056Dir d41d8cd98f00b204e9800998ecf8427e+0 0:0:\\056\n')
+
+ def test_make_empty_dir_with_spaces(self):
+ c = Collection()
+ c.mkdirs('./foo bar/baz waz')
+ self.assertEqual(c.portable_manifest_text(),
+ './foo\\040bar/baz\\040waz d41d8cd98f00b204e9800998ecf8427e+0 0:0:\\056\n')
+
def test_remove_in_subdir(self):
c = Collection('. 781e5e245d69b566979b86e28d23f2c7+10 0:10:count1.txt\n./foo 781e5e245d69b566979b86e28d23f2c7+10 0:10:count2.txt\n')
c.remove("foo/count2.txt")
- self.assertEqual(". 781e5e245d69b566979b86e28d23f2c7+10 0:10:count1.txt\n", c.portable_manifest_text())
+ self.assertEqual(". 781e5e245d69b566979b86e28d23f2c7+10 0:10:count1.txt\n./foo d41d8cd98f00b204e9800998ecf8427e+0 0:0:\\056\n", c.portable_manifest_text())
def test_remove_empty_subdir(self):
c = Collection('. 781e5e245d69b566979b86e28d23f2c7+10 0:10:count1.txt\n./foo 781e5e245d69b566979b86e28d23f2c7+10 0:10:count2.txt\n')
('add', './count2.txt', c2["count2.txt"]),
('del', './count1.txt', c1["count1.txt"]),
])
- f = c1.open("count1.txt", "w")
+ f = c1.open("count1.txt", "wb")
f.write(b"zzzzz")
# c1 changed, so it should not be deleted.
c2 = Collection('. 5348b82a029fd9e971a811ce1f71360b+43 0:10:count1.txt')
d = c1.diff(c2)
self.assertEqual(d, [('mod', './count1.txt', c1["count1.txt"], c2["count1.txt"])])
- f = c1.open("count1.txt", "w")
+ f = c1.open("count1.txt", "wb")
f.write(b"zzzzz")
# c1 changed, so c2 mod will go to a conflict file
('add', './count1.txt', c2["count1.txt"]),
('del', './count2.txt', c1["count2.txt"]),
])
- f = c1.open("count1.txt", "w")
+ f = c1.open("count1.txt", "wb")
f.write(b"zzzzz")
# c1 added count1.txt, so c2 add will go to a conflict file
c1 = Collection()
events = []
c1.subscribe(lambda event, collection, name, item: events.append((event, collection, name, item)))
- f = c1.open("foo.txt", "w")
+ f = c1.open("foo.txt", "wb")
self.assertEqual(events[0], (arvados.collection.ADD, c1, "foo.txt", f.arvadosfile))
def test_open_w(self):
c1 = Collection(". 781e5e245d69b566979b86e28d23f2c7+10 0:10:count1.txt\n")
self.assertEqual(c1["count1.txt"].size(), 10)
- c1.open("count1.txt", "w").close()
+ c1.open("count1.txt", "wb").close()
self.assertEqual(c1["count1.txt"].size(), 0)
+class NewCollectionTestCaseWithServersAndTokens(run_test_server.TestCaseWithServers):
+ MAIN_SERVER = {}
+ KEEP_SERVER = {}
+ local_locator_re = r"[0-9a-f]{32}\+\d+\+A[a-f0-9]{40}@[a-f0-9]{8}"
+ remote_locator_re = r"[0-9a-f]{32}\+\d+\+R[a-z]{5}-[a-f0-9]{40}@[a-f0-9]{8}"
+
+ def setUp(self):
+ self.keep_put = getattr(arvados.keep.KeepClient, 'put')
+
+ @mock.patch('arvados.keep.KeepClient.put', autospec=True)
+ def test_storage_classes_desired(self, put_mock):
+ put_mock.side_effect = self.keep_put
+ c = Collection(storage_classes_desired=['default'])
+ with c.open("file.txt", 'wb') as f:
+ f.write('content')
+ c.save_new()
+ _, kwargs = put_mock.call_args
+ self.assertEqual(['default'], kwargs['classes'])
+
+ @mock.patch('arvados.keep.KeepClient.put', autospec=True)
+ def test_repacked_block_submission_get_permission_token(self, mocked_put):
+ '''
+ Make sure that those blocks that are committed after repacking small ones,
+ get their permission tokens assigned on the collection manifest.
+ '''
+ def wrapped_keep_put(*args, **kwargs):
+ # Simulate slow put operations
+ time.sleep(1)
+ return self.keep_put(*args, **kwargs)
+
+ mocked_put.side_effect = wrapped_keep_put
+ c = Collection()
+ # Write 70 files ~1MiB each so we force to produce 1 big block by repacking
+ # small ones before finishing the upload.
+ for i in range(70):
+ f = c.open("file_{}.txt".format(i), 'wb')
+ f.write(random.choice('abcdefghijklmnopqrstuvwxyz') * (2**20+i))
+ f.close(flush=False)
+ # We should get 2 blocks with their tokens
+ self.assertEqual(len(re.findall(self.local_locator_re, c.manifest_text())), 2)
+
+ @mock.patch('arvados.keep.KeepClient.refresh_signature')
+ def test_copy_remote_blocks_on_save_new(self, rs_mock):
+ remote_block_loc = "acbd18db4cc2f85cedef654fccc4a4d8+3+Remote-" + "a" * 40 + "@abcdef01"
+ local_block_loc = "acbd18db4cc2f85cedef654fccc4a4d8+3+A" + "b" * 40 + "@abcdef01"
+ rs_mock.return_value = local_block_loc
+ c = Collection(". " + remote_block_loc + " 0:3:foofile.txt\n")
+ self.assertEqual(
+ len(re.findall(self.remote_locator_re, c.manifest_text())), 1)
+ self.assertEqual(
+ len(re.findall(self.local_locator_re, c.manifest_text())), 0)
+ c.save_new()
+ rs_mock.assert_called()
+ self.assertEqual(
+ len(re.findall(self.remote_locator_re, c.manifest_text())), 0)
+ self.assertEqual(
+ len(re.findall(self.local_locator_re, c.manifest_text())), 1)
+
+ @mock.patch('arvados.keep.KeepClient.refresh_signature')
+ def test_copy_remote_blocks_on_save(self, rs_mock):
+ remote_block_loc = "acbd18db4cc2f85cedef654fccc4a4d8+3+Remote-" + "a" * 40 + "@abcdef01"
+ local_block_loc = "acbd18db4cc2f85cedef654fccc4a4d8+3+A" + "b" * 40 + "@abcdef01"
+ rs_mock.return_value = local_block_loc
+ # Remote collection
+ remote_c = Collection(". " + remote_block_loc + " 0:3:foofile.txt\n")
+ self.assertEqual(
+ len(re.findall(self.remote_locator_re, remote_c.manifest_text())), 1)
+ # Local collection
+ local_c = Collection()
+ with local_c.open('barfile.txt', 'wb') as f:
+ f.write('bar')
+ local_c.save_new()
+ self.assertEqual(
+ len(re.findall(self.local_locator_re, local_c.manifest_text())), 1)
+ self.assertEqual(
+ len(re.findall(self.remote_locator_re, local_c.manifest_text())), 0)
+ # Copy remote file to local collection
+ local_c.copy('./foofile.txt', './copied/foofile.txt', remote_c)
+ self.assertEqual(
+ len(re.findall(self.local_locator_re, local_c.manifest_text())), 1)
+ self.assertEqual(
+ len(re.findall(self.remote_locator_re, local_c.manifest_text())), 1)
+ # Save local collection: remote block should be copied
+ local_c.save()
+ rs_mock.assert_called()
+ self.assertEqual(
+ len(re.findall(self.local_locator_re, local_c.manifest_text())), 2)
+ self.assertEqual(
+ len(re.findall(self.remote_locator_re, local_c.manifest_text())), 0)
+
+
class NewCollectionTestCaseWithServers(run_test_server.TestCaseWithServers):
+ def test_preserve_version_on_save(self):
+ c = Collection()
+ c.save_new(preserve_version=True)
+ coll_record = arvados.api().collections().get(uuid=c.manifest_locator()).execute()
+ self.assertEqual(coll_record['version'], 1)
+ self.assertEqual(coll_record['preserve_version'], True)
+ with c.open("foo.txt", "wb") as foo:
+ foo.write(b"foo")
+ c.save(preserve_version=True)
+ coll_record = arvados.api().collections().get(uuid=c.manifest_locator()).execute()
+ self.assertEqual(coll_record['version'], 2)
+ self.assertEqual(coll_record['preserve_version'], True)
+ with c.open("bar.txt", "wb") as foo:
+ foo.write(b"bar")
+ c.save(preserve_version=False)
+ coll_record = arvados.api().collections().get(uuid=c.manifest_locator()).execute()
+ self.assertEqual(coll_record['version'], 3)
+ self.assertEqual(coll_record['preserve_version'], False)
+
def test_get_manifest_text_only_committed(self):
c = Collection()
- with c.open("count.txt", "w") as f:
+ with c.open("count.txt", "wb") as f:
# One file committed
- with c.open("foo.txt", "w") as foo:
+ with c.open("foo.txt", "wb") as foo:
foo.write(b"foo")
foo.flush() # Force block commit
f.write(b"0123456789")
def test_only_small_blocks_are_packed_together(self):
c = Collection()
- # Write a couple of small files,
- f = c.open("count.txt", "w")
+ # Write a couple of small files,
+ f = c.open("count.txt", "wb")
f.write(b"0123456789")
f.close(flush=False)
- foo = c.open("foo.txt", "w")
+ foo = c.open("foo.txt", "wb")
foo.write(b"foo")
foo.close(flush=False)
# Then, write a big file, it shouldn't be packed with the ones above
- big = c.open("bigfile.txt", "w")
+ big = c.open("bigfile.txt", "wb")
big.write(b"x" * 1024 * 1024 * 33) # 33 MB > KEEP_BLOCK_SIZE/2
big.close(flush=False)
self.assertEqual(
c.manifest_text("."),
'. 2d303c138c118af809f39319e5d507e9+34603008 a8430a058b8fbf408e1931b794dbd6fb+13 0:34603008:bigfile.txt 34603008:10:count.txt 34603018:3:foo.txt\n')
+ def test_flush_after_small_block_packing(self):
+ c = Collection()
+ # Write a couple of small files,
+ f = c.open("count.txt", "wb")
+ f.write(b"0123456789")
+ f.close(flush=False)
+ foo = c.open("foo.txt", "wb")
+ foo.write(b"foo")
+ foo.close(flush=False)
+
+ self.assertEqual(
+ c.manifest_text(),
+ '. a8430a058b8fbf408e1931b794dbd6fb+13 0:10:count.txt 10:3:foo.txt\n')
+
+ f = c.open("count.txt", "rb+")
+ f.close(flush=True)
+
+ self.assertEqual(
+ c.manifest_text(),
+ '. a8430a058b8fbf408e1931b794dbd6fb+13 0:10:count.txt 10:3:foo.txt\n')
+
+ def test_write_after_small_block_packing2(self):
+ c = Collection()
+ # Write a couple of small files,
+ f = c.open("count.txt", "wb")
+ f.write(b"0123456789")
+ f.close(flush=False)
+ foo = c.open("foo.txt", "wb")
+ foo.write(b"foo")
+ foo.close(flush=False)
+
+ self.assertEqual(
+ c.manifest_text(),
+ '. a8430a058b8fbf408e1931b794dbd6fb+13 0:10:count.txt 10:3:foo.txt\n')
+
+ f = c.open("count.txt", "rb+")
+ f.write(b"abc")
+ f.close(flush=False)
+
+ self.assertEqual(
+ c.manifest_text(),
+ '. 900150983cd24fb0d6963f7d28e17f72+3 a8430a058b8fbf408e1931b794dbd6fb+13 0:3:count.txt 6:7:count.txt 13:3:foo.txt\n')
+
+
+ def test_small_block_packing_with_overwrite(self):
+ c = Collection()
+ c.open("b1", "wb").close()
+ c["b1"].writeto(0, b"b1", 0)
+
+ c.open("b2", "wb").close()
+ c["b2"].writeto(0, b"b2", 0)
+
+ c["b1"].writeto(0, b"1b", 0)
+
+ self.assertEqual(c.manifest_text(), ". ed4f3f67c70b02b29c50ce1ea26666bd+4 0:2:b1 2:2:b2\n")
+ self.assertEqual(c["b1"].manifest_text(), ". ed4f3f67c70b02b29c50ce1ea26666bd+4 0:2:b1\n")
+ self.assertEqual(c["b2"].manifest_text(), ". ed4f3f67c70b02b29c50ce1ea26666bd+4 2:2:b2\n")
+
class CollectionCreateUpdateTest(run_test_server.TestCaseWithServers):
MAIN_SERVER = {}
self.assertEqual(c.portable_data_hash(), "d41d8cd98f00b204e9800998ecf8427e+0")
self.assertEqual(c.api_response()["portable_data_hash"], "d41d8cd98f00b204e9800998ecf8427e+0" )
- with c.open("count.txt", "w") as f:
+ with c.open("count.txt", "wb") as f:
f.write(b"0123456789")
self.assertEqual(c.portable_manifest_text(), ". 781e5e245d69b566979b86e28d23f2c7+10 0:10:count.txt\n")
def test_create_and_save(self):
c = self.create_count_txt()
- c.save()
+ c.save(properties={'type' : 'Intermediate'},
+ storage_classes=['archive'],
+ trash_at=datetime.datetime(2111, 1, 1, 11, 11, 11, 111111))
+
self.assertRegex(
c.manifest_text(),
r"^\. 781e5e245d69b566979b86e28d23f2c7\+10\+A[a-f0-9]{40}@[a-f0-9]{8} 0:10:count\.txt$",)
+ self.assertEqual(c.api_response()["storage_classes_desired"], ['archive'])
+ self.assertEqual(c.api_response()["properties"], {'type' : 'Intermediate'})
+ self.assertEqual(c.api_response()["trash_at"], '2111-01-01T11:11:11.111111000Z')
+
def test_create_and_save_new(self):
c = self.create_count_txt()
- c.save_new()
+ c.save_new(properties={'type' : 'Intermediate'},
+ storage_classes=['archive'],
+ trash_at=datetime.datetime(2111, 1, 1, 11, 11, 11, 111111))
+
self.assertRegex(
c.manifest_text(),
r"^\. 781e5e245d69b566979b86e28d23f2c7\+10\+A[a-f0-9]{40}@[a-f0-9]{8} 0:10:count\.txt$",)
+ self.assertEqual(c.api_response()["storage_classes_desired"], ['archive'])
+ self.assertEqual(c.api_response()["properties"], {'type' : 'Intermediate'})
+ self.assertEqual(c.api_response()["trash_at"], '2111-01-01T11:11:11.111111000Z')
+
+ def test_create_and_save_after_commiting(self):
+ c = self.create_count_txt()
+ c.save(properties={'type' : 'Intermediate'},
+ storage_classes=['hot'],
+ trash_at=datetime.datetime(2111, 1, 1, 11, 11, 11, 111111))
+ c.save(properties={'type' : 'Output'},
+ storage_classes=['cold'],
+ trash_at=datetime.datetime(2222, 2, 2, 22, 22, 22, 222222))
+
+ self.assertEqual(c.api_response()["storage_classes_desired"], ['cold'])
+ self.assertEqual(c.api_response()["properties"], {'type' : 'Output'})
+ self.assertEqual(c.api_response()["trash_at"], '2222-02-02T22:22:22.222222000Z')
def test_create_diff_apply(self):
c1 = self.create_count_txt()
c1.save()
c2 = Collection(c1.manifest_locator())
- with c2.open("count.txt", "w") as f:
+ with c2.open("count.txt", "wb") as f:
f.write(b"abcdefg")
diff = c1.diff(c2)
c1.save()
c2 = arvados.collection.Collection(c1.manifest_locator())
- with c2.open("count.txt", "w") as f:
+ with c2.open("count.txt", "wb") as f:
f.write(b"abcdefg")
c2.save()
c1 = self.create_count_txt()
c1.save()
- with c1.open("count.txt", "w") as f:
+ with c1.open("count.txt", "wb") as f:
f.write(b"XYZ")
c2 = arvados.collection.Collection(c1.manifest_locator())
- with c2.open("count.txt", "w") as f:
+ with c2.open("count.txt", "wb") as f:
f.write(b"abcdefg")
c2.save()
c1.manifest_text(),
r"\. e65075d550f9b5bf9992fa1d71a131be\+3\S* 7ac66c0f148de9519b8bd264312c4d64\+7\S* 0:3:count\.txt 3:7:count\.txt~\d\d\d\d\d\d\d\d-\d\d\d\d\d\d~conflict~$")
+ def test_pdh_is_native_str(self):
+ c1 = self.create_count_txt()
+ pdh = c1.portable_data_hash()
+ self.assertEqual(type(''), type(pdh))
+
if __name__ == '__main__':
unittest.main()