return mock.patch.object(api_client._http, 'request', side_effect=queue_with((
(fake_httplib2_response(code, **headers), body) for code in codes)))
+def str_keep_locator(s):
+ return '{}+{}'.format(hashlib.md5(s).hexdigest(), len(s))
class FakeCurl:
@classmethod
def __init__(self, name='.', *data):
self._name = name
self._data = ''.join(data)
- self._data_locators = ['{}+{}'.format(hashlib.md5(d).hexdigest(),
- len(d)) for d in data]
+ self._data_locators = [str_keep_locator(d) for d in data]
self.num_retries = 0
def name(self):
import arvados
import arvados_testutil as tutil
-import hashlib
class ManifestExamples(object):
def make_manifest(self,
files_per_stream=1,
streams=1):
datablip = 'x' * bytes_per_block
- data_loc = '{}+{}'.format(hashlib.md5(datablip).hexdigest(),
- bytes_per_block)
+ data_loc = tutil.str_keep_locator(datablip)
with tutil.mock_keep_responses(data_loc, 200):
coll = arvados.CollectionWriter()
for si in range(0, streams):
#!/usr/bin/env python
# -*- coding: utf-8 -*-
-import hashlib
import io
import random
import arvados.commands.ls as arv_ls
import run_test_server
+from arvados_testutil import str_keep_locator
+
class ArvLsTestCase(run_test_server.TestCaseWithServers):
FAKE_UUID = 'zzzzz-4zz18-12345abcde12345'
def mock_api_for_manifest(self, manifest_lines, uuid=FAKE_UUID):
manifest_text = self.newline_join(manifest_lines)
- pdh = '{}+{}'.format(hashlib.md5(manifest_text).hexdigest(),
- len(manifest_text))
+ pdh = str_keep_locator(manifest_text)
coll_info = {'uuid': uuid,
'portable_data_hash': pdh,
'manifest_text': manifest_text}
import mock
import os
import unittest
-import hashlib
import time
import arvados
self.requests.append(locator)
return self.blocks.get(locator)
def put(self, data, num_retries=None):
- pdh = "%s+%i" % (hashlib.md5(data).hexdigest(), len(data))
+ pdh = tutil.str_keep_locator(data)
self.blocks[pdh] = str(data)
return pdh
n = 0
blocks = {}
for d in ['01234', '34567', '67890']:
- loc = '{}+{}'.format(hashlib.md5(d).hexdigest(), len(d))
+ loc = tutil.str_keep_locator(d)
blocks[loc] = d
stream.append(Range(loc, n, len(d)))
n += len(d)
import arvados
import copy
-import hashlib
import mock
import os
import pprint
def test_write_directory_tree_with_zero_recursion(self):
cwriter = arvados.CollectionWriter(self.api_client)
content = 'd1/d2/f3d1/f2f1'
- blockhash = hashlib.md5(content).hexdigest() + '+' + str(len(content))
+ blockhash = tutil.str_keep_locator(content)
cwriter.write_directory_tree(
self.build_directory_tree(['f1', 'd1/f2', 'd1/d2/f3']),
max_manifest_depth=0)
self.assertEqual('.', writer.current_stream_name())
self.assertEqual('out', writer.current_file_name())
out_file.write('test data')
- data_loc = hashlib.md5('test data').hexdigest() + '+9'
+ data_loc = tutil.str_keep_locator('test data')
self.assertTrue(out_file.closed, "writer file not closed after context")
self.assertRaises(ValueError, out_file.write, 'extra text')
with self.mock_keep(data_loc, 200) as keep_mock:
writer = arvados.CollectionWriter(client)
with writer.open('six') as out_file:
out_file.writelines(['12', '34', '56'])
- data_loc = hashlib.md5('123456').hexdigest() + '+6'
+ data_loc = tutil.str_keep_locator('123456')
with self.mock_keep(data_loc, 200) as keep_mock:
self.assertEqual(". {} 0:6:six\n".format(data_loc),
writer.manifest_text())
def test_open_flush(self):
client = self.api_client_mock()
- data_loc1 = hashlib.md5('flush1').hexdigest() + '+6'
- data_loc2 = hashlib.md5('flush2').hexdigest() + '+6'
+ data_loc1 = tutil.str_keep_locator('flush1')
+ data_loc2 = tutil.str_keep_locator('flush2')
with self.mock_keep((data_loc1, 200), (data_loc2, 200)) as keep_mock:
writer = arvados.CollectionWriter(client)
with writer.open('flush_test') as out_file:
out_file.write('1st')
with writer.open('.', '2') as out_file:
out_file.write('2nd')
- data_loc = hashlib.md5('1st2nd').hexdigest() + '+6'
+ data_loc = tutil.str_keep_locator('1st2nd')
with self.mock_keep(data_loc, 200) as keep_mock:
self.assertEqual(". {} 0:3:1 3:3:2\n".format(data_loc),
writer.manifest_text())
def test_two_opens_two_streams(self):
client = self.api_client_mock()
- data_loc1 = hashlib.md5('file').hexdigest() + '+4'
- data_loc2 = hashlib.md5('indir').hexdigest() + '+5'
+ data_loc1 = tutil.str_keep_locator('file')
+ data_loc2 = tutil.str_keep_locator('indir')
with self.mock_keep((data_loc1, 200), (data_loc2, 200)) as keep_mock:
writer = arvados.CollectionWriter(client)
with writer.open('file') as out_file:
def test_put_error_does_not_include_successful_puts(self):
data = 'partial failure test'
- data_loc = '{}+{}'.format(hashlib.md5(data).hexdigest(), len(data))
+ data_loc = tutil.str_keep_locator(data)
api_client = self.mock_keep_services(count=3)
with tutil.mock_keep_responses(data_loc, 200, 500, 500) as req_mock, \
self.assertRaises(arvados.errors.KeepWriteError) as exc_check:
def test_proxy_put_with_no_writable_services(self):
data = 'test with no writable services'
- data_loc = '{}+{}'.format(hashlib.md5(data).hexdigest(), len(data))
+ data_loc = tutil.str_keep_locator(data)
api_client = self.mock_keep_services(service_type='proxy', read_only=True, count=1)
with tutil.mock_keep_responses(data_loc, 200, 500, 500) as req_mock, \
self.assertRaises(arvados.errors.KeepWriteError) as exc_check:
def check_64_zeros_error_order(self, verb, exc_class):
data = '0' * 64
if verb == 'get':
- data = hashlib.md5(data).hexdigest() + '+1234'
+ data = tutil.str_keep_locator(data)
# Arbitrary port number:
aport = random.randint(1024,65535)
api_client = self.mock_keep_services(service_port=aport, count=self.services)