X-Git-Url: https://git.arvados.org/arvados.git/blobdiff_plain/21cdd10032c60451743b6e6294d2e75a356ee61c..04f5f75659e963f02441a566afcc6d22683d0485:/sdk/python/arvados/collection.py diff --git a/sdk/python/arvados/collection.py b/sdk/python/arvados/collection.py index 496136ebe3..ea18123f65 100644 --- a/sdk/python/arvados/collection.py +++ b/sdk/python/arvados/collection.py @@ -1,187 +1,288 @@ -import gflags -import httplib -import httplib2 +import functools import logging import os -import pprint -import sys -import types -import subprocess -import json -import UserDict import re -import hashlib -import string -import bz2 -import zlib -import fcntl -import time -import threading +import errno from collections import deque from stat import * +from .arvfile import ArvadosFileBase, split, ArvadosFile, ArvadosFileWriter, ArvadosFileReader, BlockManager from keep import * -from stream import * +from .stream import StreamReader, normalize_stream, locator_block_size +from .ranges import Range, LocatorAndRange import config import errors import util _logger = logging.getLogger('arvados.collection') -def normalize_stream(s, stream): - stream_tokens = [s] - sortedfiles = list(stream.keys()) - sortedfiles.sort() - - blocks = {} - streamoffset = 0L - for f in sortedfiles: - for b in stream[f]: - if b[arvados.LOCATOR] not in blocks: - stream_tokens.append(b[arvados.LOCATOR]) - blocks[b[arvados.LOCATOR]] = streamoffset - streamoffset += b[arvados.BLOCKSIZE] - - if len(stream_tokens) == 1: - stream_tokens.append(config.EMPTY_BLOCK_LOCATOR) - - for f in sortedfiles: - current_span = None - fout = f.replace(' ', '\\040') - for segment in stream[f]: - segmentoffset = blocks[segment[arvados.LOCATOR]] + segment[arvados.OFFSET] - if current_span == None: - current_span = [segmentoffset, segmentoffset + segment[arvados.SEGMENTSIZE]] - else: - if segmentoffset == current_span[1]: - current_span[1] += segment[arvados.SEGMENTSIZE] - else: - stream_tokens.append("{0}:{1}:{2}".format(current_span[0], current_span[1] - current_span[0], fout)) - current_span = [segmentoffset, segmentoffset + segment[arvados.SEGMENTSIZE]] - - if current_span != None: - stream_tokens.append("{0}:{1}:{2}".format(current_span[0], current_span[1] - current_span[0], fout)) - - if len(stream[f]) == 0: - stream_tokens.append("0:0:{0}".format(fout)) - - return stream_tokens - -def normalize(collection): - streams = {} - for s in collection.all_streams(): - for f in s.all_files(): - filestream = s.name() + "/" + f.name() - r = filestream.rindex("/") - streamname = filestream[:r] - filename = filestream[r+1:] - if streamname not in streams: - streams[streamname] = {} - if filename not in streams[streamname]: - streams[streamname][filename] = [] - for r in f.segments: - streams[streamname][filename].extend(s.locators_and_ranges(r[0], r[1])) - - normalized_streams = [] - sortedstreams = list(streams.keys()) - sortedstreams.sort() - for s in sortedstreams: - normalized_streams.append(normalize_stream(s, streams[s])) - return normalized_streams - - -class CollectionReader(object): - def __init__(self, manifest_locator_or_text, api_client=None, keep_client=None): +class CollectionBase(object): + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + pass + + def _my_keep(self): + if self._keep_client is None: + self._keep_client = KeepClient(api_client=self._api_client, + num_retries=self.num_retries) + return self._keep_client + + def stripped_manifest(self): + """ + Return the manifest for the current collection with all + non-portable hints (i.e., permission signatures and other + hints other than size hints) removed from the locators. + """ + raw = self.manifest_text() + clean = [] + for line in raw.split("\n"): + fields = line.split() + if fields: + clean_fields = fields[:1] + [ + (re.sub(r'\+[^\d][^\+]*', '', x) + if re.match(util.keep_locator_pattern, x) + else x) + for x in fields[1:]] + clean += [' '.join(clean_fields), "\n"] + return ''.join(clean) + + +class CollectionReader(CollectionBase): + def __init__(self, manifest_locator_or_text, api_client=None, + keep_client=None, num_retries=0): + """Instantiate a CollectionReader. + + This class parses Collection manifests to provide a simple interface + to read its underlying files. + + Arguments: + * manifest_locator_or_text: One of a Collection UUID, portable data + hash, or full manifest text. + * api_client: The API client to use to look up Collections. If not + provided, CollectionReader will build one from available Arvados + configuration. + * keep_client: The KeepClient to use to download Collection data. + If not provided, CollectionReader will build one from available + Arvados configuration. + * num_retries: The default number of times to retry failed + service requests. Default 0. You may change this value + after instantiation, but note those changes may not + propagate to related objects like the Keep client. + """ self._api_client = api_client self._keep_client = keep_client - if re.match(r'[a-f0-9]{32}(\+\d+)?(\+\S+)*$', manifest_locator_or_text): + self.num_retries = num_retries + if re.match(util.keep_locator_pattern, manifest_locator_or_text): self._manifest_locator = manifest_locator_or_text self._manifest_text = None - elif re.match(r'[a-z0-9]{5}-[a-z0-9]{5}-[a-z0-9]{15}$', manifest_locator_or_text): + elif re.match(util.collection_uuid_pattern, manifest_locator_or_text): self._manifest_locator = manifest_locator_or_text self._manifest_text = None - elif re.match(r'((\S+)( +[a-f0-9]{32}(\+\d+)(\+\S+)*)+( +\d+:\d+:\S+)+$)+', manifest_locator_or_text, re.MULTILINE): + elif re.match(util.manifest_pattern, manifest_locator_or_text): self._manifest_text = manifest_locator_or_text self._manifest_locator = None else: raise errors.ArgumentError( "Argument to CollectionReader must be a manifest or a collection UUID") + self._api_response = None self._streams = None - def __enter__(self): - pass + def _populate_from_api_server(self): + # As in KeepClient itself, we must wait until the last + # possible moment to instantiate an API client, in order to + # avoid tripping up clients that don't have access to an API + # server. If we do build one, make sure our Keep client uses + # it. If instantiation fails, we'll fall back to the except + # clause, just like any other Collection lookup + # failure. Return an exception, or None if successful. + try: + if self._api_client is None: + self._api_client = arvados.api('v1') + self._keep_client = None # Make a new one with the new api. + self._api_response = self._api_client.collections().get( + uuid=self._manifest_locator).execute( + num_retries=self.num_retries) + self._manifest_text = self._api_response['manifest_text'] + return None + except Exception as e: + return e - def __exit__(self): - pass + def _populate_from_keep(self): + # Retrieve a manifest directly from Keep. This has a chance of + # working if [a] the locator includes a permission signature + # or [b] the Keep services are operating in world-readable + # mode. Return an exception, or None if successful. + try: + self._manifest_text = self._my_keep().get( + self._manifest_locator, num_retries=self.num_retries) + except Exception as e: + return e def _populate(self): - if self._streams is not None: - return - if not self._manifest_text: - try: - # As in KeepClient itself, we must wait until the last possible - # moment to instantiate an API client, in order to avoid - # tripping up clients that don't have access to an API server. - # If we do build one, make sure our Keep client uses it. - # If instantiation fails, we'll fall back to the except clause, - # just like any other Collection lookup failure. - if self._api_client is None: - self._api_client = arvados.api('v1') - self._keep_client = KeepClient(api_client=self._api_client) - if self._keep_client is None: - self._keep_client = KeepClient(api_client=self._api_client) - c = self._api_client.collections().get( - uuid=self._manifest_locator).execute() - self._manifest_text = c['manifest_text'] - except Exception as e: - if not util.portable_data_hash_pattern.match( - self._manifest_locator): - raise - _logger.warning("API lookup failed for collection %s (%s: %s)", - self._manifest_locator, type(e), str(e)) - if self._keep_client is None: - self._keep_client = KeepClient(api_client=self._api_client) - self._manifest_text = self._keep_client.get(self._manifest_locator) - self._streams = [] - for stream_line in self._manifest_text.split("\n"): - if stream_line != '': - stream_tokens = stream_line.split() - self._streams += [stream_tokens] - self._streams = normalize(self) - - # now regenerate the manifest text based on the normalized stream - - #print "normalizing", self._manifest_text - self._manifest_text = ''.join([StreamReader(stream, keep=self._keep_client).manifest_text() for stream in self._streams]) - #print "result", self._manifest_text + error_via_api = None + error_via_keep = None + should_try_keep = ((self._manifest_text is None) and + util.keep_locator_pattern.match( + self._manifest_locator)) + if ((self._manifest_text is None) and + util.signed_locator_pattern.match(self._manifest_locator)): + error_via_keep = self._populate_from_keep() + if self._manifest_text is None: + error_via_api = self._populate_from_api_server() + if error_via_api is not None and not should_try_keep: + raise error_via_api + if ((self._manifest_text is None) and + not error_via_keep and + should_try_keep): + # Looks like a keep locator, and we didn't already try keep above + error_via_keep = self._populate_from_keep() + if self._manifest_text is None: + # Nothing worked! + raise arvados.errors.NotFoundError( + ("Failed to retrieve collection '{}' " + + "from either API server ({}) or Keep ({})." + ).format( + self._manifest_locator, + error_via_api, + error_via_keep)) + self._streams = [sline.split() + for sline in self._manifest_text.split("\n") + if sline] + + def _populate_first(orig_func): + # Decorator for methods that read actual Collection data. + @functools.wraps(orig_func) + def wrapper(self, *args, **kwargs): + if self._streams is None: + self._populate() + return orig_func(self, *args, **kwargs) + return wrapper + + @_populate_first + def api_response(self): + """api_response() -> dict or None + Returns information about this Collection fetched from the API server. + If the Collection exists in Keep but not the API server, currently + returns None. Future versions may provide a synthetic response. + """ + return self._api_response + + @_populate_first + def normalize(self): + # Rearrange streams + streams = {} + for s in self.all_streams(): + for f in s.all_files(): + streamname, filename = split(s.name() + "/" + f.name()) + if streamname not in streams: + streams[streamname] = {} + if filename not in streams[streamname]: + streams[streamname][filename] = [] + for r in f.segments: + streams[streamname][filename].extend(s.locators_and_ranges(r.locator, r.range_size)) + + self._streams = [normalize_stream(s, streams[s]) + for s in sorted(streams)] + + # Regenerate the manifest text based on the normalized streams + self._manifest_text = ''.join( + [StreamReader(stream, keep=self._my_keep()).manifest_text() + for stream in self._streams]) + + @_populate_first + def open(self, streampath, filename=None): + """open(streampath[, filename]) -> file-like object + + Pass in the path of a file to read from the Collection, either as a + single string or as two separate stream name and file name arguments. + This method returns a file-like object to read that file. + """ + if filename is None: + streampath, filename = split(streampath) + keep_client = self._my_keep() + for stream_s in self._streams: + stream = StreamReader(stream_s, keep_client, + num_retries=self.num_retries) + if stream.name() == streampath: + break + else: + raise ValueError("stream '{}' not found in Collection". + format(streampath)) + try: + return stream.files()[filename] + except KeyError: + raise ValueError("file '{}' not found in Collection stream '{}'". + format(filename, streampath)) + @_populate_first def all_streams(self): - self._populate() - resp = [] - for s in self._streams: - resp.append(StreamReader(s, keep=self._keep_client)) - return resp + return [StreamReader(s, self._my_keep(), num_retries=self.num_retries) + for s in self._streams] def all_files(self): for s in self.all_streams(): for f in s.all_files(): yield f - def manifest_text(self, strip=False): - self._populate() - if strip: - m = ''.join([StreamReader(stream, keep=self._keep_client).manifest_text(strip=True) for stream in self._streams]) - return m + @_populate_first + def manifest_text(self, strip=False, normalize=False): + if normalize: + cr = CollectionReader(self.manifest_text()) + cr.normalize() + return cr.manifest_text(strip=strip, normalize=False) + elif strip: + return self.stripped_manifest() else: return self._manifest_text -class CollectionWriter(object): - KEEP_BLOCK_SIZE = 2**26 - def __init__(self, api_client=None): +class _WriterFile(ArvadosFileBase): + def __init__(self, coll_writer, name): + super(_WriterFile, self).__init__(name, 'wb') + self.dest = coll_writer + + def close(self): + super(_WriterFile, self).close() + self.dest.finish_current_file() + + @ArvadosFileBase._before_close + def write(self, data): + self.dest.write(data) + + @ArvadosFileBase._before_close + def writelines(self, seq): + for data in seq: + self.write(data) + + @ArvadosFileBase._before_close + def flush(self): + self.dest.flush_data() + + +class CollectionWriter(CollectionBase): + def __init__(self, api_client=None, num_retries=0): + """Instantiate a CollectionWriter. + + CollectionWriter lets you build a new Arvados Collection from scratch. + Write files to it. The CollectionWriter will upload data to Keep as + appropriate, and provide you with the Collection manifest text when + you're finished. + + Arguments: + * api_client: The API client to use to look up Collections. If not + provided, CollectionReader will build one from available Arvados + configuration. + * num_retries: The default number of times to retry failed + service requests. Default 0. You may change this value + after instantiation, but note those changes may not + propagate to related objects like the Keep client. + """ self._api_client = api_client + self.num_retries = num_retries self._keep_client = None self._data_buffer = [] self._data_buffer_len = 0 @@ -196,16 +297,11 @@ class CollectionWriter(object): self._queued_file = None self._queued_dirents = deque() self._queued_trees = deque() + self._last_open = None - def __enter__(self): - pass - - def __exit__(self): - self.finish() - - def _prep_keep_client(self): - if self._keep_client is None: - self._keep_client = KeepClient(api_client=self._api_client) + def __exit__(self, exc_type, exc_value, traceback): + if exc_type is None: + self.finish() def do_queued_work(self): # The work queue consists of three pieces: @@ -233,7 +329,7 @@ class CollectionWriter(object): def _work_file(self): while True: - buf = self._queued_file.read(self.KEEP_BLOCK_SIZE) + buf = self._queued_file.read(config.KEEP_BLOCK_SIZE) if not buf: break self.write(buf) @@ -262,10 +358,9 @@ class CollectionWriter(object): def _work_trees(self): path, stream_name, max_manifest_depth = self._queued_trees[0] - make_dirents = (util.listdir_recursive if (max_manifest_depth == 0) - else os.listdir) - d = make_dirents(path) - if len(d) > 0: + d = util.listdir_recursive( + path, max_depth = (None if max_manifest_depth == 0 else 0)) + if d: self._queue_dirents(stream_name, d) else: self._queued_trees.popleft() @@ -303,19 +398,47 @@ class CollectionWriter(object): for s in newdata: self.write(s) return - self._data_buffer += [newdata] + self._data_buffer.append(newdata) self._data_buffer_len += len(newdata) self._current_stream_length += len(newdata) - while self._data_buffer_len >= self.KEEP_BLOCK_SIZE: + while self._data_buffer_len >= config.KEEP_BLOCK_SIZE: self.flush_data() + def open(self, streampath, filename=None): + """open(streampath[, filename]) -> file-like object + + Pass in the path of a file to write to the Collection, either as a + single string or as two separate stream name and file name arguments. + This method returns a file-like object you can write to add it to the + Collection. + + You may only have one file object from the Collection open at a time, + so be sure to close the object when you're done. Using the object in + a with statement makes that easy:: + + with cwriter.open('./doc/page1.txt') as outfile: + outfile.write(page1_data) + with cwriter.open('./doc/page2.txt') as outfile: + outfile.write(page2_data) + """ + if filename is None: + streampath, filename = split(streampath) + if self._last_open and not self._last_open.closed: + raise errors.AssertionError( + "can't open '{}' when '{}' is still open".format( + filename, self._last_open.name)) + if streampath != self.current_stream_name(): + self.start_new_stream(streampath) + self.set_current_file_name(filename) + self._last_open = _WriterFile(self, filename) + return self._last_open + def flush_data(self): data_buffer = ''.join(self._data_buffer) - if data_buffer != '': - self._prep_keep_client() + if data_buffer: self._current_stream_locators.append( - self._keep_client.put(data_buffer[0:self.KEEP_BLOCK_SIZE])) - self._data_buffer = [data_buffer[self.KEEP_BLOCK_SIZE:]] + self._my_keep().put(data_buffer[0:config.KEEP_BLOCK_SIZE])) + self._data_buffer = [data_buffer[config.KEEP_BLOCK_SIZE:]] self._data_buffer_len = len(self._data_buffer[0]) def start_new_file(self, newfilename=None): @@ -327,13 +450,17 @@ class CollectionWriter(object): raise errors.AssertionError( "Manifest filenames cannot contain whitespace: %s" % newfilename) + elif re.search(r'\x00', newfilename): + raise errors.AssertionError( + "Manifest filenames cannot contain NUL characters: %s" % + newfilename) self._current_file_name = newfilename def current_file_name(self): return self._current_file_name def finish_current_file(self): - if self._current_file_name == None: + if self._current_file_name is None: if self._current_file_pos == self._current_stream_length: return raise errors.AssertionError( @@ -342,10 +469,12 @@ class CollectionWriter(object): (self._current_stream_length - self._current_file_pos, self._current_file_pos, self._current_stream_name)) - self._current_stream_files += [[self._current_file_pos, - self._current_stream_length - self._current_file_pos, - self._current_file_name]] + self._current_stream_files.append([ + self._current_file_pos, + self._current_stream_length - self._current_file_pos, + self._current_file_name]) self._current_file_pos = self._current_stream_length + self._current_file_name = None def start_new_stream(self, newstreamname='.'): self.finish_current_stream() @@ -363,18 +492,18 @@ class CollectionWriter(object): def finish_current_stream(self): self.finish_current_file() self.flush_data() - if len(self._current_stream_files) == 0: + if not self._current_stream_files: pass - elif self._current_stream_name == None: + elif self._current_stream_name is None: raise errors.AssertionError( "Cannot finish an unnamed stream (%d bytes in %d files)" % (self._current_stream_length, len(self._current_stream_files))) else: - if len(self._current_stream_locators) == 0: - self._current_stream_locators += [config.EMPTY_BLOCK_LOCATOR] - self._finished_streams += [[self._current_stream_name, - self._current_stream_locators, - self._current_stream_files]] + if not self._current_stream_locators: + self._current_stream_locators.append(config.EMPTY_BLOCK_LOCATOR) + self._finished_streams.append([self._current_stream_name, + self._current_stream_locators, + self._current_stream_files]) self._current_stream_files = [] self._current_stream_length = 0 self._current_stream_locators = [] @@ -384,23 +513,11 @@ class CollectionWriter(object): def finish(self): # Store the manifest in Keep and return its locator. - self._prep_keep_client() - return self._keep_client.put(self.manifest_text()) + return self._my_keep().put(self.manifest_text()) - def stripped_manifest(self): - """ - Return the manifest for the current collection with all permission - hints removed from the locators in the manifest. - """ - raw = self.manifest_text() - clean = '' - for line in raw.split("\n"): - fields = line.split() - if len(fields) > 0: - locators = [ re.sub(r'\+A[a-z0-9@_-]+', '', x) - for x in fields[1:-1] ] - clean += fields[0] + ' ' + ' '.join(locators) + ' ' + fields[-1] + "\n" - return clean + def portable_data_hash(self): + stripped = self.stripped_manifest() + return hashlib.md5(stripped).hexdigest() + '+' + str(len(stripped)) def manifest_text(self): self.finish_current_stream() @@ -414,10 +531,7 @@ class CollectionWriter(object): manifest += ' ' + ' '.join("%d:%d:%s" % (sfile[0], sfile[1], sfile[2].replace(' ', '\\040')) for sfile in stream[2]) manifest += "\n" - if len(manifest) > 0: - return CollectionReader(manifest).manifest_text() - else: - return "" + return manifest def data_locators(self): ret = [] @@ -433,9 +547,10 @@ class ResumableCollectionWriter(CollectionWriter): '_data_buffer', '_dependencies', '_finished_streams', '_queued_dirents', '_queued_trees'] - def __init__(self, api_client=None): + def __init__(self, api_client=None, num_retries=0): self._dependencies = {} - super(ResumableCollectionWriter, self).__init__(api_client) + super(ResumableCollectionWriter, self).__init__( + api_client, num_retries=num_retries) @classmethod def from_state(cls, state, *init_args, **init_kwargs): @@ -523,3 +638,413 @@ class ResumableCollectionWriter(CollectionWriter): raise errors.AssertionError( "resumable writer can't accept unsourced data") return super(ResumableCollectionWriter, self).write(data) + + +class Collection(CollectionBase): + def __init__(self, manifest_locator_or_text=None, parent=None, api_client=None, + keep_client=None, num_retries=0, block_manager=None): + + self.parent = parent + self._items = None + self._api_client = api_client + self._keep_client = keep_client + self._block_manager = block_manager + + self.num_retries = num_retries + self._manifest_locator = None + self._manifest_text = None + self._api_response = None + + if manifest_locator_or_text: + if re.match(util.keep_locator_pattern, manifest_locator_or_text): + self._manifest_locator = manifest_locator_or_text + elif re.match(util.collection_uuid_pattern, manifest_locator_or_text): + self._manifest_locator = manifest_locator_or_text + elif re.match(util.manifest_pattern, manifest_locator_or_text): + self._manifest_text = manifest_locator_or_text + else: + raise errors.ArgumentError( + "Argument to CollectionReader must be a manifest or a collection UUID") + + def _my_api(self): + if self._api_client is None: + if self.parent is not None: + return self.parent._my_api() + self._api_client = arvados.api('v1') + self._keep_client = None # Make a new one with the new api. + return self._api_client + + def _my_keep(self): + if self._keep_client is None: + if self.parent is not None: + return self.parent._my_keep() + self._keep_client = KeepClient(api_client=self._my_api(), + num_retries=self.num_retries) + return self._keep_client + + def _my_block_manager(self): + if self._block_manager is None: + if self.parent is not None: + return self.parent._my_block_manager() + self._block_manager = BlockManager(self._my_keep()) + return self._block_manager + + def _populate_from_api_server(self): + # As in KeepClient itself, we must wait until the last + # possible moment to instantiate an API client, in order to + # avoid tripping up clients that don't have access to an API + # server. If we do build one, make sure our Keep client uses + # it. If instantiation fails, we'll fall back to the except + # clause, just like any other Collection lookup + # failure. Return an exception, or None if successful. + try: + self._api_response = self._my_api().collections().get( + uuid=self._manifest_locator).execute( + num_retries=self.num_retries) + self._manifest_text = self._api_response['manifest_text'] + return None + except Exception as e: + return e + + def _populate_from_keep(self): + # Retrieve a manifest directly from Keep. This has a chance of + # working if [a] the locator includes a permission signature + # or [b] the Keep services are operating in world-readable + # mode. Return an exception, or None if successful. + try: + self._manifest_text = self._my_keep().get( + self._manifest_locator, num_retries=self.num_retries) + except Exception as e: + return e + + def _populate(self): + self._items = {} + if self._manifest_locator is None and self._manifest_text is None: + return + error_via_api = None + error_via_keep = None + should_try_keep = ((self._manifest_text is None) and + util.keep_locator_pattern.match( + self._manifest_locator)) + if ((self._manifest_text is None) and + util.signed_locator_pattern.match(self._manifest_locator)): + error_via_keep = self._populate_from_keep() + if self._manifest_text is None: + error_via_api = self._populate_from_api_server() + if error_via_api is not None and not should_try_keep: + raise error_via_api + if ((self._manifest_text is None) and + not error_via_keep and + should_try_keep): + # Looks like a keep locator, and we didn't already try keep above + error_via_keep = self._populate_from_keep() + if self._manifest_text is None: + # Nothing worked! + raise arvados.errors.NotFoundError( + ("Failed to retrieve collection '{}' " + + "from either API server ({}) or Keep ({})." + ).format( + self._manifest_locator, + error_via_api, + error_via_keep)) + # populate + import_manifest(self._manifest_text, self) + + def _populate_first(orig_func): + # Decorator for methods that read actual Collection data. + @functools.wraps(orig_func) + def wrapper(self, *args, **kwargs): + if self._items is None: + self._populate() + return orig_func(self, *args, **kwargs) + return wrapper + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + self.save(no_locator=True) + if self._block_manager is not None: + self._block_manager.stop_threads() + + @_populate_first + def find(self, path, create=False, create_collection=False): + p = path.split("/") + if p[0] == '.': + del p[0] + + if len(p) > 0: + item = self._items.get(p[0]) + if len(p) == 1: + # item must be a file + if item is None and create: + # create new file + if create_collection: + item = Collection(parent=self, num_retries=self.num_retries) + else: + item = ArvadosFile(self) + self._items[p[0]] = item + return item + else: + if item is None and create: + # create new collection + item = Collection(parent=self, num_retries=self.num_retries) + self._items[p[0]] = item + del p[0] + return item.find("/".join(p), create=create) + else: + return self + + @_populate_first + def api_response(self): + """api_response() -> dict or None + + Returns information about this Collection fetched from the API server. + If the Collection exists in Keep but not the API server, currently + returns None. Future versions may provide a synthetic response. + """ + return self._api_response + + def open(self, path, mode): + mode = mode.replace("b", "") + if len(mode) == 0 or mode[0] not in ("r", "w", "a"): + raise ArgumentError("Bad mode '%s'" % mode) + create = (mode != "r") + + f = self.find(path, create=create) + if f is None: + raise IOError((errno.ENOENT, "File not found")) + if not isinstance(f, ArvadosFile): + raise IOError((errno.EISDIR, "Path must refer to a file.")) + + if mode[0] == "w": + f.truncate(0) + + if mode == "r": + return ArvadosFileReader(f, path, mode) + else: + return ArvadosFileWriter(f, path, mode) + + @_populate_first + def modified(self): + for k,v in self._items.items(): + if v.modified(): + return True + return False + + @_populate_first + def set_unmodified(self): + for k,v in self._items.items(): + v.set_unmodified() + + @_populate_first + def __iter__(self): + return self._items.iterkeys() + + @_populate_first + def iterkeys(self): + return self._items.iterkeys() + + @_populate_first + def __getitem__(self, k): + return self._items[k] + + @_populate_first + def __contains__(self, k): + return k in self._items + + @_populate_first + def __len__(self): + return len(self._items) + + @_populate_first + def __delitem__(self, p): + del self._items[p] + + @_populate_first + def keys(self): + return self._items.keys() + + @_populate_first + def values(self): + return self._items.values() + + @_populate_first + def items(self): + return self._items.items() + + @_populate_first + def exists(self, path): + return self.find(path) != None + + @_populate_first + def remove(self, path): + p = path.split("/") + if p[0] == '.': + del p[0] + + if len(p) > 0: + item = self._items.get(p[0]) + if item is None: + raise IOError((errno.ENOENT, "File not found")) + if len(p) == 1: + del self._items[p[0]] + else: + del p[0] + item.remove("/".join(p)) + else: + raise IOError((errno.ENOENT, "File not found")) + + @_populate_first + def manifest_text(self, strip=False, normalize=False): + if self.modified() or self._manifest_text is None or normalize: + return export_manifest(self, stream_name=".", portable_locators=strip) + else: + if strip: + return self.stripped_manifest() + else: + return self._manifest_text + + def portable_data_hash(self): + stripped = self.manifest_text(strip=True) + return hashlib.md5(stripped).hexdigest() + '+' + str(len(stripped)) + + @_populate_first + def save(self, no_locator=False): + if self.modified(): + self._my_block_manager().commit_all() + self._my_keep().put(self.manifest_text(strip=True)) + if self._manifest_locator is not None and re.match(util.collection_uuid_pattern, self._manifest_locator): + self._api_response = self._my_api().collections().update( + uuid=self._manifest_locator, + body={'manifest_text': self.manifest_text(strip=False)} + ).execute( + num_retries=self.num_retries) + elif not no_locator: + raise AssertionError("Collection manifest_locator must be a collection uuid. Use save_as() for new collections.") + self.set_unmodified() + + @_populate_first + def save_as(self, name, owner_uuid=None, ensure_unique_name=False): + self._my_block_manager().commit_all() + self._my_keep().put(self.manifest_text(strip=True)) + body = {"manifest_text": self.manifest_text(strip=False), + "name": name} + if owner_uuid: + body["owner_uuid"] = owner_uuid + self._api_response = self._my_api().collections().create(ensure_unique_name=ensure_unique_name, body=body).execute(num_retries=self.num_retries) + self._manifest_locator = self._api_response["uuid"] + self.set_unmodified() + + @_populate_first + def rename(self, old, new): + old_path, old_fn = os.path.split(old) + old_col = self.find(path) + if old_col is None: + raise IOError((errno.ENOENT, "File not found")) + if not isinstance(old_p, Collection): + raise IOError((errno.ENOTDIR, "Parent in path is a file, not a directory")) + if old_fn in old_col: + new_path, new_fn = os.path.split(new) + new_col = self.find(new_path, create=True, create_collection=True) + if not isinstance(new_col, Collection): + raise IOError((errno.ENOTDIR, "Destination is a file, not a directory")) + ent = old_col[old_fn] + del old_col[old_fn] + ent.parent = new_col + new_col[new_fn] = ent + else: + raise IOError((errno.ENOENT, "File not found")) + +def import_manifest(manifest_text, into_collection=None, api_client=None, keep=None, num_retries=None): + if into_collection is not None: + if len(into_collection) > 0: + raise ArgumentError("Can only import manifest into an empty collection") + c = into_collection + else: + c = Collection(api_client=api_client, keep_client=keep, num_retries=num_retries) + + STREAM_NAME = 0 + BLOCKS = 1 + SEGMENTS = 2 + + stream_name = None + state = STREAM_NAME + + for n in re.finditer(r'(\S+)(\s+|$)', manifest_text): + tok = n.group(1) + sep = n.group(2) + + if state == STREAM_NAME: + # starting a new stream + stream_name = tok.replace('\\040', ' ') + blocks = [] + segments = [] + streamoffset = 0L + state = BLOCKS + continue + + if state == BLOCKS: + s = re.match(r'[0-9a-f]{32}\+(\d+)(\+\S+)*', tok) + if s: + blocksize = long(s.group(1)) + blocks.append(Range(tok, streamoffset, blocksize)) + streamoffset += blocksize + else: + state = SEGMENTS + + if state == SEGMENTS: + s = re.search(r'^(\d+):(\d+):(\S+)', tok) + if s: + pos = long(s.group(1)) + size = long(s.group(2)) + name = s.group(3).replace('\\040', ' ') + f = c.find("%s/%s" % (stream_name, name), create=True) + f.add_segment(blocks, pos, size) + else: + # error! + raise errors.SyntaxError("Invalid manifest format") + + if sep == "\n": + stream_name = None + state = STREAM_NAME + + c.set_unmodified() + return c + +def export_manifest(item, stream_name=".", portable_locators=False): + buf = "" + if isinstance(item, Collection): + stream = {} + sorted_keys = sorted(item.keys()) + for k in [s for s in sorted_keys if isinstance(item[s], ArvadosFile)]: + v = item[k] + st = [] + for s in v.segments: + loc = s.locator + if loc.startswith("bufferblock"): + loc = v.parent._my_block_manager()._bufferblocks[loc].locator() + if portable_locators: + loc = KeepLocator(loc).stripped() + st.append(LocatorAndRange(loc, locator_block_size(loc), + s.segment_offset, s.range_size)) + stream[k] = st + if stream: + buf += ' '.join(normalize_stream(stream_name, stream)) + buf += "\n" + for k in [s for s in sorted_keys if isinstance(item[s], Collection)]: + buf += export_manifest(item[k], stream_name=os.path.join(stream_name, k), portable_locators=portable_locators) + elif isinstance(item, ArvadosFile): + st = [] + for s in item.segments: + loc = s.locator + if loc.startswith("bufferblock"): + loc = item._bufferblocks[loc].calculate_locator() + if portable_locators: + loc = KeepLocator(loc).stripped() + st.append(LocatorAndRange(loc, locator_block_size(loc), + s.segment_offset, s.range_size)) + stream[stream_name] = st + buf += ' '.join(normalize_stream(stream_name, stream)) + buf += "\n" + return buf