X-Git-Url: https://git.arvados.org/arvados.git/blobdiff_plain/5ab34436e2c47cebedd81fe6d38bdcbc40d6c373..4ffe3382ff35cebce873668dfdfad2eef2def3d3:/sdk/python/arvados/collection.py diff --git a/sdk/python/arvados/collection.py b/sdk/python/arvados/collection.py index c000c80718..55be40fa04 100644 --- a/sdk/python/arvados/collection.py +++ b/sdk/python/arvados/collection.py @@ -1296,8 +1296,8 @@ class Collection(RichCollectionBase): :storage_classes_desired: A list of storage class names where to upload the data. If None, - the keepstores are expected to store the data into their default - storage class. + the keep client is expected to store the data into the cluster's + default storage class(es). """ @@ -1344,8 +1344,8 @@ class Collection(RichCollectionBase): try: self._populate() - except (IOError, errors.SyntaxError) as e: - raise errors.ArgumentError("Error processing manifest text: %s", e) + except errors.SyntaxError as e: + raise errors.ArgumentError("Error processing manifest text: %s", str(e)) from None def storage_classes_desired(self): return self._storage_classes_desired or [] @@ -1424,7 +1424,6 @@ class Collection(RichCollectionBase): copies = (self.replication_desired or self._my_api()._rootDesc.get('defaultCollectionReplication', 2)) - classes = self.storage_classes_desired or [] self._block_manager = _BlockManager(self._my_keep(), copies=copies, put_threads=self.put_threads, num_retries=self.num_retries, storage_classes_func=self.storage_classes_desired) return self._block_manager @@ -1547,7 +1546,8 @@ class Collection(RichCollectionBase): storage_classes=None, trash_at=None, merge=True, - num_retries=None): + num_retries=None, + preserve_version=False): """Save collection to an existing collection record. Commit pending buffer blocks to Keep, merge with remote record (if @@ -1577,6 +1577,13 @@ class Collection(RichCollectionBase): :num_retries: Retry count on API calls (if None, use the collection default) + :preserve_version: + If True, indicate that the collection content being saved right now + should be preserved in a version snapshot if the collection record is + updated in the future. Requires that the API server has + Collections.CollectionVersioning enabled, if not, setting this will + raise an exception. + """ if properties and type(properties) is not dict: raise errors.ArgumentError("properties must be dictionary type.") @@ -1589,6 +1596,9 @@ class Collection(RichCollectionBase): if trash_at and type(trash_at) is not datetime.datetime: raise errors.ArgumentError("trash_at must be datetime type.") + if preserve_version and not self._my_api().config()['Collections'].get('CollectionVersioning', False): + raise errors.ArgumentError("preserve_version is not supported when CollectionVersioning is not enabled.") + body={} if properties: body["properties"] = properties @@ -1597,6 +1607,8 @@ class Collection(RichCollectionBase): if trash_at: t = trash_at.strftime("%Y-%m-%dT%H:%M:%S.%fZ") body["trash_at"] = t + if preserve_version: + body["preserve_version"] = preserve_version if not self.committed(): if self._has_remote_blocks: @@ -1642,7 +1654,8 @@ class Collection(RichCollectionBase): storage_classes=None, trash_at=None, ensure_unique_name=False, - num_retries=None): + num_retries=None, + preserve_version=False): """Save collection to a new collection record. Commit pending buffer blocks to Keep and, when create_collection_record @@ -1681,6 +1694,13 @@ class Collection(RichCollectionBase): :num_retries: Retry count on API calls (if None, use the collection default) + :preserve_version: + If True, indicate that the collection content being saved right now + should be preserved in a version snapshot if the collection record is + updated in the future. Requires that the API server has + Collections.CollectionVersioning enabled, if not, setting this will + raise an exception. + """ if properties and type(properties) is not dict: raise errors.ArgumentError("properties must be dictionary type.") @@ -1691,6 +1711,9 @@ class Collection(RichCollectionBase): if trash_at and type(trash_at) is not datetime.datetime: raise errors.ArgumentError("trash_at must be datetime type.") + if preserve_version and not self._my_api().config()['Collections'].get('CollectionVersioning', False): + raise errors.ArgumentError("preserve_version is not supported when CollectionVersioning is not enabled.") + if self._has_remote_blocks: # Copy any remote blocks to the local cluster. self._copy_remote_blocks(remote_blocks={}) @@ -1719,6 +1742,8 @@ class Collection(RichCollectionBase): if trash_at: t = trash_at.strftime("%Y-%m-%dT%H:%M:%S.%fZ") body["trash_at"] = t + if preserve_version: + body["preserve_version"] = preserve_version self._remember_api_response(self._my_api().collections().create(ensure_unique_name=ensure_unique_name, body=body).execute(num_retries=num_retries)) text = self._api_response["manifest_text"] @@ -1791,7 +1816,13 @@ class Collection(RichCollectionBase): self.find_or_create(os.path.join(stream_name, name[:-2]), COLLECTION) else: filepath = os.path.join(stream_name, name) - afile = self.find_or_create(filepath, FILE) + try: + afile = self.find_or_create(filepath, FILE) + except IOError as e: + if e.errno == errno.ENOTDIR: + raise errors.SyntaxError("Dir part of %s conflicts with file of the same name.", filepath) from None + else: + raise e from None if isinstance(afile, ArvadosFile): afile.add_segment(blocks, pos, size) else: