data = args[1]
# Exit only on last block
if len(data) < arvados.config.KEEP_BLOCK_SIZE:
+ # Simulate a checkpoint before quitting. Ensure block commit.
+ self.writer._update(final=True)
raise SystemExit("Simulated error")
return self.arvfile_write(*args, **kwargs)
mocked_write.side_effect = wrapped_write
writer = arv_put.ArvPutUploadJob([self.large_file_name],
replication_desired=1)
+ # We'll be accessing from inside the wrapper
+ self.writer = writer
with self.assertRaises(SystemExit):
writer.start(save_collection=False)
# Confirm that the file was partially uploaded
self.assertEqual(writer.bytes_written + writer2.bytes_written - writer2.bytes_skipped,
os.path.getsize(self.large_file_name))
writer2.destroy_cache()
+ del(self.writer)
# Test for bug #11002
def test_graceful_exit_while_repacking_small_blocks(self):
- def wrapped_delete(*args, **kwargs):
+ def wrapped_commit(*args, **kwargs):
raise SystemExit("Simulated error")
- with mock.patch('arvados.arvfile._BlockManager._delete_bufferblock',
- autospec=True) as mocked_delete:
- mocked_delete.side_effect = wrapped_delete
- # Upload a little more than 1 block, wrapped_delete will make the first block
+ with mock.patch('arvados.arvfile._BlockManager.commit_bufferblock',
+ autospec=True) as mocked_commit:
+ mocked_commit.side_effect = wrapped_commit
+ # Upload a little more than 1 block, wrapped_commit will make the first block
# commit to fail.
# arv-put should not exit with an exception by trying to commit the collection
# as it's in an inconsistent state.
writer = arv_put.ArvPutUploadJob([self.small_files_dir],
replication_desired=1)
try:
- writer.start(save_collection=False)
- except:
+ with self.assertRaises(SystemExit):
+ writer.start(save_collection=False)
+ except arvados.arvfile.UnownedBlockError:
self.fail("arv-put command is trying to use a corrupted BlockManager. See https://dev.arvados.org/issues/11002")
writer.destroy_cache()
data = args[1]
# Exit only on last block
if len(data) < arvados.config.KEEP_BLOCK_SIZE:
+ # Simulate a checkpoint before quitting.
+ self.writer._update()
raise SystemExit("Simulated error")
return self.arvfile_write(*args, **kwargs)
mocked_write.side_effect = wrapped_write
writer = arv_put.ArvPutUploadJob([self.large_file_name],
replication_desired=1)
+ # We'll be accessing from inside the wrapper
+ self.writer = writer
with self.assertRaises(SystemExit):
writer.start(save_collection=False)
# Confirm that the file was partially uploaded
self.assertEqual(writer2.bytes_written,
os.path.getsize(self.large_file_name))
writer2.destroy_cache()
+ del(self.writer)
def test_no_resume_when_no_cache(self):
def wrapped_write(*args, **kwargs):
data = args[1]
# Exit only on last block
if len(data) < arvados.config.KEEP_BLOCK_SIZE:
+ # Simulate a checkpoint before quitting.
+ self.writer._update()
raise SystemExit("Simulated error")
return self.arvfile_write(*args, **kwargs)
mocked_write.side_effect = wrapped_write
writer = arv_put.ArvPutUploadJob([self.large_file_name],
replication_desired=1)
+ # We'll be accessing from inside the wrapper
+ self.writer = writer
with self.assertRaises(SystemExit):
writer.start(save_collection=False)
# Confirm that the file was partially uploaded
self.assertEqual(writer2.bytes_written,
os.path.getsize(self.large_file_name))
writer2.destroy_cache()
-
+ del(self.writer)
def test_dry_run_feature(self):
def wrapped_write(*args, **kwargs):
data = args[1]
# Exit only on last block
if len(data) < arvados.config.KEEP_BLOCK_SIZE:
+ # Simulate a checkpoint before quitting.
+ self.writer._update()
raise SystemExit("Simulated error")
return self.arvfile_write(*args, **kwargs)
mocked_write.side_effect = wrapped_write
writer = arv_put.ArvPutUploadJob([self.large_file_name],
replication_desired=1)
+ # We'll be accessing from inside the wrapper
+ self.writer = writer
with self.assertRaises(SystemExit):
writer.start(save_collection=False)
# Confirm that the file was partially uploaded
replication_desired=1,
dry_run=True,
resume=False)
-
+ del(self.writer)
class ArvadosExpectedBytesTest(ArvadosBaseTestCase):
TEST_SIZE = os.path.getsize(__file__)