X-Git-Url: https://git.arvados.org/arvados.git/blobdiff_plain/221c7d2f37a7a83a424597b11e13b638d6e1c21a..0ecea550fde014578e71004360b700cdfeae4909:/sdk/python/arvados/commands/put.py diff --git a/sdk/python/arvados/commands/put.py b/sdk/python/arvados/commands/put.py index d57b2b775d..42510754ab 100644 --- a/sdk/python/arvados/commands/put.py +++ b/sdk/python/arvados/commands/put.py @@ -23,6 +23,8 @@ import sys import tempfile import threading import time +import traceback + from apiclient import errors as apiclient_errors from arvados._version import __version__ @@ -382,14 +384,16 @@ class ArvPutUploadJob(object): self._collection_lock = threading.Lock() self._remote_collection = None # Collection being updated (if asked) self._local_collection = None # Collection from previous run manifest - self._file_paths = [] # Files to be updated in remote collection + self._file_paths = set() # Files to be updated in remote collection self._stop_checkpointer = threading.Event() self._checkpointer = threading.Thread(target=self._update_task) self._checkpointer.daemon = True self._update_task_time = update_time # How many seconds wait between update runs self._files_to_upload = FileUploadList(dry_run=dry_run) + self._upload_started = False self.logger = logger self.dry_run = dry_run + self._checkpoint_before_quit = True if not self.use_cache and self.resume: raise ArvPutArgumentConflict('resume cannot be True when use_cache is False') @@ -444,17 +448,28 @@ class ArvPutUploadJob(object): # report initial progress. self._update() # Actual file upload + self._upload_started = True # Used by the update thread to start checkpointing self._upload_files() + except (SystemExit, Exception) as e: + self._checkpoint_before_quit = False + # Log stack trace only when Ctrl-C isn't pressed (SIGINT) + # Note: We're expecting SystemExit instead of KeyboardInterrupt because + # we have a custom signal handler in place that raises SystemExit with + # the catched signal's code. + if not isinstance(e, SystemExit) or e.code != -2: + self.logger.warning("Abnormal termination:\n{}".format(traceback.format_exc(e))) + raise finally: if not self.dry_run: # Stop the thread before doing anything else self._stop_checkpointer.set() self._checkpointer.join() - # Commit all pending blocks & one last _update() - self._local_collection.manifest_text() - self._update(final=True) - if save_collection: - self.save_collection() + if self._checkpoint_before_quit: + # Commit all pending blocks & one last _update() + self._local_collection.manifest_text() + self._update(final=True) + if save_collection: + self.save_collection() if self.use_cache: self._cache_file.close() @@ -506,28 +521,34 @@ class ArvPutUploadJob(object): Periodically called support task. File uploading is asynchronous so we poll status from the collection. """ - while not self._stop_checkpointer.wait(self._update_task_time): + while not self._stop_checkpointer.wait(1 if not self._upload_started else self._update_task_time): self._update() def _update(self, final=False): """ Update cached manifest text and report progress. """ - with self._collection_lock: - self.bytes_written = self._collection_size(self._local_collection) + if self._upload_started: + with self._collection_lock: + self.bytes_written = self._collection_size(self._local_collection) + if self.use_cache: + if final: + manifest = self._local_collection.manifest_text() + else: + # Get the manifest text without comitting pending blocks + manifest = self._local_collection.manifest_text(strip=False, + normalize=False, + only_committed=True) + # Update cache + with self._state_lock: + self._state['manifest'] = manifest if self.use_cache: - if final: - manifest = self._local_collection.manifest_text() - else: - # Get the manifest text without comitting pending blocks - manifest = self._local_collection.manifest_text(".", strip=False, - normalize=False, - only_committed=True) - # Update cache - with self._state_lock: - self._state['manifest'] = manifest - if self.use_cache: - self._save_state() + try: + self._save_state() + except Exception as e: + self.logger.error("Unexpected error trying to save cache file: {}".format(e)) + else: + self.bytes_written = self.bytes_skipped # Call the reporter, if any self.report_progress() @@ -546,7 +567,7 @@ class ArvPutUploadJob(object): should_upload = False new_file_in_cache = False # Record file path for updating the remote collection before exiting - self._file_paths.append(filename) + self._file_paths.add(filename) with self._state_lock: # If no previous cached data on this file, store it for an eventual @@ -655,10 +676,12 @@ class ArvPutUploadJob(object): cache_filepath = os.path.join( arv_cmd.make_home_conf_dir(self.CACHE_DIR, 0o700, 'raise'), cache_filename) - if self.resume: + if self.resume and os.path.exists(cache_filepath): + self.logger.info("Resuming upload from cache file {}".format(cache_filepath)) self._cache_file = open(cache_filepath, 'a+') else: # --no-resume means start with a empty cache file. + self.logger.info("Creating new cache file at {}".format(cache_filepath)) self._cache_file = open(cache_filepath, 'w+') self._cache_filename = self._cache_file.name self._lock_file(self._cache_file) @@ -675,6 +698,7 @@ class ArvPutUploadJob(object): # Cache file empty, set up new cache self._state = copy.deepcopy(self.EMPTY_STATE) else: + self.logger.info("No cache usage requested for this run.") # No cache file, set empty state self._state = copy.deepcopy(self.EMPTY_STATE) # Load the previous manifest so we can check if files were modified remotely. @@ -701,17 +725,19 @@ class ArvPutUploadJob(object): """ Atomically save current state into cache. """ + with self._state_lock: + # We're not using copy.deepcopy() here because it's a lot slower + # than json.dumps(), and we're already needing JSON format to be + # saved on disk. + state = json.dumps(self._state) try: - with self._state_lock: - state = copy.deepcopy(self._state) - new_cache_fd, new_cache_name = tempfile.mkstemp( - dir=os.path.dirname(self._cache_filename)) - self._lock_file(new_cache_fd) - new_cache = os.fdopen(new_cache_fd, 'r+') - json.dump(state, new_cache) + new_cache = tempfile.NamedTemporaryFile( + dir=os.path.dirname(self._cache_filename), delete=False) + self._lock_file(new_cache) + new_cache.write(state) new_cache.flush() os.fsync(new_cache) - os.rename(new_cache_name, self._cache_filename) + os.rename(new_cache.name, self._cache_filename) except (IOError, OSError, ResumeCacheConflict) as error: self.logger.error("There was a problem while saving the cache file: {}".format(error)) try: @@ -729,7 +755,14 @@ class ArvPutUploadJob(object): return self._my_collection().manifest_locator() def portable_data_hash(self): - return self._my_collection().portable_data_hash() + pdh = self._my_collection().portable_data_hash() + m = self._my_collection().stripped_manifest() + local_pdh = hashlib.md5(m).hexdigest() + '+' + str(len(m)) + if pdh != local_pdh: + logger.warning("\n".join([ + "arv-put: API server provided PDH differs from local manifest.", + " This should not happen; showing API server version."])) + return pdh def manifest_text(self, stream_name=".", strip=False, normalize=False): return self._my_collection().manifest_text(stream_name, strip, normalize) @@ -816,6 +849,7 @@ def main(arguments=None, stdout=sys.stdout, stderr=sys.stderr): global api_client logger = logging.getLogger('arvados.arv_put') + logger.setLevel(logging.INFO) args = parse_arguments(arguments) status = 0 if api_client is None: @@ -855,6 +889,10 @@ def main(arguments=None, stdout=sys.stdout, stderr=sys.stderr): else: reporter = None + # If this is used by a human, and there's at least one directory to be + # uploaded, the expected bytes calculation can take a moment. + if args.progress and any([os.path.isdir(f) for f in args.paths]): + logger.info("Calculating upload size, this could take some time...") bytes_expected = expected_bytes_for(args.paths) try: