2752: arv-put works when it can't write a cache file.
[arvados.git] / sdk / python / arvados / commands / put.py
index d9e401dcbbdfedec60e573346859cb829cc51eea..01bae2feade3e7345c1e57c352eba713cafe4a81 100644 (file)
@@ -325,8 +325,10 @@ def progress_writer(progress_func, outfile=sys.stderr):
         outfile.write(progress_func(bytes_written, bytes_expected))
     return write_progress
 
+def exit_signal_handler(sigcode, frame):
+    sys.exit(-sigcode)
+
 def main(arguments=None):
-    ResumeCache.setup_user_cache()
     args = parse_arguments(arguments)
 
     if args.progress:
@@ -335,24 +337,27 @@ def main(arguments=None):
         reporter = progress_writer(machine_progress)
     else:
         reporter = None
+    bytes_expected = expected_bytes_for(args.paths)
 
     try:
+        ResumeCache.setup_user_cache()
         resume_cache = ResumeCache(ResumeCache.make_path(args))
-        if not args.resume:
-            resume_cache.restart()
+    except (IOError, OSError):
+        # Couldn't open cache directory/file.  Continue without it.
+        resume_cache = None
+        writer = ArvPutCollectionWriter(resume_cache, reporter, bytes_expected)
     except ResumeCacheConflict:
         print "arv-put: Another process is already uploading this data."
         sys.exit(1)
+    else:
+        if not args.resume:
+            resume_cache.restart()
+        writer = ArvPutCollectionWriter.from_cache(
+            resume_cache, reporter, bytes_expected)
 
-    writer = ArvPutCollectionWriter.from_cache(
-        resume_cache, reporter, expected_bytes_for(args.paths))
-
-    def signal_handler(sigcode, frame):
-        writer.cache_state()
-        sys.exit(-sigcode)
     # Install our signal handler for each code in CAUGHT_SIGNALS, and save
     # the originals.
-    orig_signal_handlers = {sigcode: signal.signal(sigcode, signal_handler)
+    orig_signal_handlers = {sigcode: signal.signal(sigcode, exit_signal_handler)
                             for sigcode in CAUGHT_SIGNALS}
 
     if writer.bytes_written > 0:  # We're resuming a previous upload.
@@ -361,19 +366,15 @@ def main(arguments=None):
                 "         Use the --no-resume option to start over."])
         writer.report_progress()
 
-    try:
-        writer.do_queued_work()  # Do work resumed from cache.
-        for path in args.paths:  # Copy file data to Keep.
-            if os.path.isdir(path):
-                writer.write_directory_tree(
-                    path, max_manifest_depth=args.max_manifest_depth)
-            else:
-                writer.start_new_stream()
-                writer.write_file(path, args.filename or os.path.basename(path))
-        writer.finish_current_stream()
-    except Exception:
-        writer.cache_state()
-        raise
+    writer.do_queued_work()  # Do work resumed from cache.
+    for path in args.paths:  # Copy file data to Keep.
+        if os.path.isdir(path):
+            writer.write_directory_tree(
+                path, max_manifest_depth=args.max_manifest_depth)
+        else:
+            writer.start_new_stream()
+            writer.write_file(path, args.filename or os.path.basename(path))
+    writer.finish_current_stream()
 
     if args.progress:  # Print newline to split stderr from stdout for humans.
         print >>sys.stderr
@@ -384,7 +385,7 @@ def main(arguments=None):
         print ','.join(writer.data_locators())
     else:
         # Register the resulting collection in Arvados.
-        arvados.api().collections().create(
+        collection = arvados.api().collections().create(
             body={
                 'uuid': writer.finish(),
                 'manifest_text': writer.manifest_text(),
@@ -392,12 +393,13 @@ def main(arguments=None):
             ).execute()
 
         # Print the locator (uuid) of the new collection.
-        print writer.finish()
+        print collection['uuid']
 
     for sigcode, orig_handler in orig_signal_handlers.items():
         signal.signal(sigcode, orig_handler)
 
-    resume_cache.destroy()
+    if resume_cache is not None:
+        resume_cache.destroy()
 
 if __name__ == '__main__':
     main()