+# -*- coding: utf-8 -*-
+
# Copyright (C) The Arvados Authors. All rights reserved.
#
# SPDX-License-Identifier: Apache-2.0
from builtins import range
from functools import partial
import apiclient
+import ciso8601
import datetime
import hashlib
import json
def test_cache_is_locked(self):
with tempfile.NamedTemporaryFile() as cachefile:
- cache = arv_put.ResumeCache(cachefile.name)
+ _ = arv_put.ResumeCache(cachefile.name)
self.assertRaises(arv_put.ResumeCacheConflict,
arv_put.ResumeCache, cachefile.name)
def test_passing_nonexistant_path_raise_exception(self):
uuid_str = str(uuid.uuid4())
with self.assertRaises(arv_put.PathDoesNotExistError):
- cwriter = arv_put.ArvPutUploadJob(["/this/path/does/not/exist/{}".format(uuid_str)])
+ arv_put.ArvPutUploadJob(["/this/path/does/not/exist/{}".format(uuid_str)])
def test_writer_works_without_cache(self):
cwriter = arv_put.ArvPutUploadJob(['/dev/null'], resume=False)
fake_httplib2_response(403), b'{}')
with mock.patch('arvados.collection.Collection.save_new',
new=coll_save_mock):
- with self.assertRaises(SystemExit) as exc_test:
+ with self.assertRaises(SystemExit):
self.call_main_with_args(['/dev/null'])
self.assertRegex(
self.main_stderr.getvalue(), matcher)
class ArvPutIntegrationTest(run_test_server.TestCaseWithServers,
ArvadosBaseTestCase):
- def _getKeepServerConfig():
- for config_file, mandatory in [
- ['application.yml', False], ['application.default.yml', True]]:
- path = os.path.join(run_test_server.SERVICES_SRC_DIR,
- "api", "config", config_file)
- if not mandatory and not os.path.exists(path):
- continue
- with open(path) as f:
- rails_config = yaml.load(f.read())
- for config_section in ['test', 'common']:
- try:
- key = rails_config[config_section]["blob_signing_key"]
- except (KeyError, TypeError):
- pass
- else:
- return {'blob_signing_key': key,
- 'enforce_permissions': True}
- return {'blog_signing_key': None, 'enforce_permissions': False}
-
MAIN_SERVER = {}
- KEEP_SERVER = _getKeepServerConfig()
+ KEEP_SERVER = {'blob_signing': True}
PROJECT_UUID = run_test_server.fixture('groups')['aproject']['uuid']
@classmethod
BAD_UUID = 'zzzzz-tpzed-zzzzzzzzzzzzzzz'
self.authorize_with('active')
with self.assertRaises(apiclient.errors.HttpError):
- result = arv_put.desired_project_uuid(arv_put.api_client, BAD_UUID,
+ arv_put.desired_project_uuid(arv_put.api_client, BAD_UUID,
0)
def test_short_put_from_stdin(self):
# we're about to create is not present in our test fixture.
manifest_uuid = "00b4e9f40ac4dd432ef89749f1c01e74+47"
with self.assertRaises(apiclient.errors.HttpError):
- notfound = arv_put.api_client.collections().get(
+ arv_put.api_client.collections().get(
uuid=manifest_uuid).execute()
datadir = self.make_tmpdir()
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=self.ENVIRON)
- (out, err) = p.communicate()
+ (_, err) = p.communicate()
self.assertRegex(err.decode(), r'INFO: Collection saved as ')
self.assertEqual(p.returncode, 0)
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=self.ENVIRON)
- (out, err) = p.communicate()
+ (_, err) = p.communicate()
self.assertRegex(err.decode(), r'INFO: Creating new cache file at ')
self.assertEqual(p.returncode, 0)
cache_filepath = re.search(r'INFO: Creating new cache file at (.*)',
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=self.ENVIRON)
- (out, err) = p.communicate()
+ (_, err) = p.communicate()
self.assertRegex(
err.decode(),
r'INFO: Cache expired, starting from scratch.*')
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=self.ENVIRON)
- (out, err) = p.communicate()
+ (_, err) = p.communicate()
self.assertRegex(err.decode(), r'INFO: Creating new cache file at ')
self.assertEqual(p.returncode, 0)
cache_filepath = re.search(r'INFO: Creating new cache file at (.*)',
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=self.ENVIRON)
- (out, err) = p.communicate()
+ (_, err) = p.communicate()
self.assertRegex(
err.decode(),
r'ERROR: arv-put: Resume cache contains invalid signature.*')
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=self.ENVIRON)
- (out, err) = p.communicate()
+ (_, err) = p.communicate()
self.assertRegex(err.decode(), r'INFO: Creating new cache file at ')
self.assertEqual(p.returncode, 0)
cache_filepath = re.search(r'INFO: Creating new cache file at (.*)',
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=self.ENVIRON)
- (out, err) = p.communicate()
+ (_, err) = p.communicate()
self.assertRegex(
err.decode(),
r'WARNING: Uploaded file \'.*barfile.txt\' access token expired, will re-upload it from scratch')
c = arv_put.api_client.collections().get(uuid=updated_col['uuid']).execute()
self.assertRegex(c['manifest_text'], r'^\..* .*:44:file2\n')
+ def test_put_collection_with_utc_expiring_datetime(self):
+ tmpdir = self.make_tmpdir()
+ trash_at = (datetime.datetime.utcnow() + datetime.timedelta(days=90)).strftime('%Y%m%dT%H%MZ')
+ with open(os.path.join(tmpdir, 'file1'), 'w') as f:
+ f.write('Relaxing in basins at the end of inlets terminates the endless tests from the box')
+ col = self.run_and_find_collection(
+ "",
+ ['--no-progress', '--trash-at', trash_at, tmpdir])
+ self.assertNotEqual(None, col['uuid'])
+ c = arv_put.api_client.collections().get(uuid=col['uuid']).execute()
+ self.assertEqual(ciso8601.parse_datetime(trash_at),
+ ciso8601.parse_datetime(c['trash_at']))
+
+ def test_put_collection_with_timezone_aware_expiring_datetime(self):
+ tmpdir = self.make_tmpdir()
+ trash_at = (datetime.datetime.utcnow() + datetime.timedelta(days=90)).strftime('%Y%m%dT%H%M-0300')
+ with open(os.path.join(tmpdir, 'file1'), 'w') as f:
+ f.write('Relaxing in basins at the end of inlets terminates the endless tests from the box')
+ col = self.run_and_find_collection(
+ "",
+ ['--no-progress', '--trash-at', trash_at, tmpdir])
+ self.assertNotEqual(None, col['uuid'])
+ c = arv_put.api_client.collections().get(uuid=col['uuid']).execute()
+ self.assertEqual(
+ ciso8601.parse_datetime(trash_at).replace(tzinfo=None) + datetime.timedelta(hours=3),
+ ciso8601.parse_datetime(c['trash_at']).replace(tzinfo=None))
+
+ def test_put_collection_with_timezone_naive_expiring_datetime(self):
+ tmpdir = self.make_tmpdir()
+ trash_at = (datetime.datetime.utcnow() + datetime.timedelta(days=90)).strftime('%Y%m%dT%H%M')
+ with open(os.path.join(tmpdir, 'file1'), 'w') as f:
+ f.write('Relaxing in basins at the end of inlets terminates the endless tests from the box')
+ col = self.run_and_find_collection(
+ "",
+ ['--no-progress', '--trash-at', trash_at, tmpdir])
+ self.assertNotEqual(None, col['uuid'])
+ c = arv_put.api_client.collections().get(uuid=col['uuid']).execute()
+ if time.daylight:
+ offset = datetime.timedelta(seconds=time.altzone)
+ else:
+ offset = datetime.timedelta(seconds=time.timezone)
+ self.assertEqual(
+ ciso8601.parse_datetime(trash_at) + offset,
+ ciso8601.parse_datetime(c['trash_at']).replace(tzinfo=None))
+
+ def test_put_collection_with_expiring_date_only(self):
+ tmpdir = self.make_tmpdir()
+ trash_at = '2140-01-01'
+ end_of_day = datetime.timedelta(hours=23, minutes=59, seconds=59)
+ with open(os.path.join(tmpdir, 'file1'), 'w') as f:
+ f.write('Relaxing in basins at the end of inlets terminates the endless tests from the box')
+ col = self.run_and_find_collection(
+ "",
+ ['--no-progress', '--trash-at', trash_at, tmpdir])
+ self.assertNotEqual(None, col['uuid'])
+ c = arv_put.api_client.collections().get(uuid=col['uuid']).execute()
+ if time.daylight:
+ offset = datetime.timedelta(seconds=time.altzone)
+ else:
+ offset = datetime.timedelta(seconds=time.timezone)
+ self.assertEqual(
+ ciso8601.parse_datetime(trash_at) + end_of_day + offset,
+ ciso8601.parse_datetime(c['trash_at']).replace(tzinfo=None))
+
+ def test_put_collection_with_invalid_absolute_expiring_datetimes(self):
+ cases = ['2100', '210010','2100-10', '2100-Oct']
+ tmpdir = self.make_tmpdir()
+ with open(os.path.join(tmpdir, 'file1'), 'w') as f:
+ f.write('Relaxing in basins at the end of inlets terminates the endless tests from the box')
+ for test_datetime in cases:
+ with self.assertRaises(AssertionError):
+ self.run_and_find_collection(
+ "",
+ ['--no-progress', '--trash-at', test_datetime, tmpdir])
+
+ def test_put_collection_with_relative_expiring_datetime(self):
+ expire_after = 7
+ dt_before = datetime.datetime.utcnow() + datetime.timedelta(days=expire_after)
+ tmpdir = self.make_tmpdir()
+ with open(os.path.join(tmpdir, 'file1'), 'w') as f:
+ f.write('Relaxing in basins at the end of inlets terminates the endless tests from the box')
+ col = self.run_and_find_collection(
+ "",
+ ['--no-progress', '--trash-after', str(expire_after), tmpdir])
+ self.assertNotEqual(None, col['uuid'])
+ dt_after = datetime.datetime.utcnow() + datetime.timedelta(days=expire_after)
+ c = arv_put.api_client.collections().get(uuid=col['uuid']).execute()
+ trash_at = ciso8601.parse_datetime(c['trash_at']).replace(tzinfo=None)
+ self.assertTrue(dt_before < trash_at)
+ self.assertTrue(dt_after > trash_at)
+
+ def test_put_collection_with_invalid_relative_expiring_datetime(self):
+ expire_after = 0 # Must be >= 1
+ tmpdir = self.make_tmpdir()
+ with open(os.path.join(tmpdir, 'file1'), 'w') as f:
+ f.write('Relaxing in basins at the end of inlets terminates the endless tests from the box')
+ with self.assertRaises(AssertionError):
+ self.run_and_find_collection(
+ "",
+ ['--no-progress', '--trash-after', str(expire_after), tmpdir])
+
def test_upload_directory_reference_without_trailing_slash(self):
tmpdir1 = self.make_tmpdir()
tmpdir2 = self.make_tmpdir()
r'^\./%s.*:file2.txt' % os.path.basename(tmpdir))
self.assertRegex(c['manifest_text'], r'^.*:file3.txt')
+ def test_unicode_on_filename(self):
+ tmpdir = self.make_tmpdir()
+ fname = u"iā¤arvados.txt"
+ with open(os.path.join(tmpdir, fname), 'w') as f:
+ f.write("This is a unicode named file")
+ col = self.run_and_find_collection("", ['--no-progress', tmpdir])
+ self.assertNotEqual(None, col['uuid'])
+ c = arv_put.api_client.collections().get(uuid=col['uuid']).execute()
+ self.assertTrue(fname in c['manifest_text'], u"{} does not include {}".format(c['manifest_text'], fname))
+
def test_silent_mode_no_errors(self):
self.authorize_with('active')
tmpdir = self.make_tmpdir()