import os
import re
import errno
+import hashlib
import time
+import threading
from collections import deque
from stat import *
-from .arvfile import split, FileLikeObjectBase, ArvadosFile, ArvadosFileWriter, ArvadosFileReader, _BlockManager, synchronized, must_be_writable, SYNC_READONLY, SYNC_EXPLICIT, NoopLock
-from keep import *
-from .stream import StreamReader, normalize_stream, locator_block_size
-from .ranges import Range, LocatorAndRange
+from .arvfile import split, _FileLikeObjectBase, ArvadosFile, ArvadosFileWriter, ArvadosFileReader, _BlockManager, synchronized, must_be_writable, NoopLock
+from keep import KeepLocator, KeepClient
+from .stream import StreamReader
+from ._normalize_stream import normalize_stream
+from ._ranges import Range, LocatorAndRange
from .safeapi import ThreadSafeApiCache
import config
import errors
return self._keep_client
def stripped_manifest(self):
- """
+ """Get the manifest with locator hints stripped.
+
Return the manifest for the current collection with all
non-portable hints (i.e., permission signatures and other
hints other than size hints) removed from the locators.
return ''.join(clean)
-class _WriterFile(FileLikeObjectBase):
+class _WriterFile(_FileLikeObjectBase):
def __init__(self, coll_writer, name):
super(_WriterFile, self).__init__(name, 'wb')
self.dest = coll_writer
super(_WriterFile, self).close()
self.dest.finish_current_file()
- @FileLikeObjectBase._before_close
+ @_FileLikeObjectBase._before_close
def write(self, data):
self.dest.write(data)
- @FileLikeObjectBase._before_close
+ @_FileLikeObjectBase._before_close
def writelines(self, seq):
for data in seq:
self.write(data)
- @FileLikeObjectBase._before_close
+ @_FileLikeObjectBase._before_close
def flush(self):
self.dest.flush_data()
def _my_block_manager(self):
raise NotImplementedError()
- def _populate(self):
- raise NotImplementedError()
-
- def sync_mode(self):
+ def writable(self):
raise NotImplementedError()
def root_collection(self):
def notify(self, event, collection, name, item):
raise NotImplementedError()
+ def stream_name(self):
+ raise NotImplementedError()
+
@must_be_writable
@synchronized
def find_or_create(self, path, create_type):
the path.
:create_type:
- One of `arvado.collection.FILE` or
- `arvado.collection.COLLECTION`. If the path is not found, and value
+ One of `arvados.collection.FILE` or
+ `arvados.collection.COLLECTION`. If the path is not found, and value
of create_type is FILE then create and return a new ArvadosFile for
the last path component. If COLLECTION, then create and return a new
Collection for the last path component.
"""
- pathcomponents = path.split("/")
- if pathcomponents[0] == '.':
- del pathcomponents[0]
-
- if pathcomponents and pathcomponents[0]:
+ pathcomponents = path.split("/", 1)
+ if pathcomponents[0]:
item = self._items.get(pathcomponents[0])
if len(pathcomponents) == 1:
- # item must be a file
if item is None:
# create new file
if create_type == COLLECTION:
self._items[pathcomponents[0]] = item
self._modified = True
self.notify(ADD, self, pathcomponents[0], item)
- del pathcomponents[0]
if isinstance(item, SynchronizedCollectionBase):
- return item.find_or_create("/".join(pathcomponents), create_type)
+ return item.find_or_create(pathcomponents[1], create_type)
else:
- raise errors.ArgumentError("Interior path components must be subcollection")
+ raise IOError((errno.ENOTDIR, "Interior path components must be subcollection"))
else:
return self
found.
"""
- pathcomponents = path.split("/")
- if pathcomponents[0] == '.':
- del pathcomponents[0]
+ if not path:
+ raise errors.ArgumentError("Parameter 'path' must not be empty.")
- if pathcomponents and pathcomponents[0]:
- item = self._items.get(pathcomponents[0])
- if len(pathcomponents) == 1:
- # item must be a file
- return item
- else:
- del pathcomponents[0]
- if isinstance(item, SynchronizedCollectionBase):
- return item.find("/".join(pathcomponents))
- else:
- raise errors.ArgumentError("Interior path components must be subcollection")
+ pathcomponents = path.split("/", 1)
+ item = self._items.get(pathcomponents[0])
+ if len(pathcomponents) == 1:
+ return item
else:
- return self
+ if isinstance(item, SynchronizedCollectionBase):
+ if pathcomponents[1]:
+ return item.find(pathcomponents[1])
+ else:
+ return item
+ else:
+ raise IOError((errno.ENOTDIR, "Interior path components must be subcollection"))
def mkdirs(path):
"""Recursive subcollection create.
"""
return self.find_or_create(path, COLLECTION)
- def open(self, path, mode):
+ def open(self, path, mode="r"):
"""Open a file-like object for access.
:path:
"""
mode = mode.replace("b", "")
if len(mode) == 0 or mode[0] not in ("r", "w", "a"):
- raise ArgumentError("Bad mode '%s'" % mode)
+ raise errors.ArgumentError("Bad mode '%s'" % mode)
create = (mode != "r")
- if create and self.sync_mode() == SYNC_READONLY:
+ if create and not self.writable():
raise IOError((errno.EROFS, "Collection is read only"))
if create:
if mode[0] == "w":
arvfile.truncate(0)
+ name = os.path.basename(path)
+
if mode == "r":
- return ArvadosFileReader(arvfile, path, mode, num_retries=self.num_retries)
+ return ArvadosFileReader(arvfile, name, mode, num_retries=self.num_retries)
else:
- return ArvadosFileWriter(arvfile, path, mode, num_retries=self.num_retries)
+ return ArvadosFileWriter(arvfile, name, mode, num_retries=self.num_retries)
@synchronized
def modified(self):
- """Test if the collection (or any subcollection or file) has been modified
- since it was created."""
+ """Test if the collection (or any subcollection or file) has been modified."""
if self._modified:
return True
for k,v in self._items.items():
"""Iterate over names of files and collections contained in this collection."""
return iter(self._items.keys())
- @synchronized
- def iterkeys(self):
- """Iterate over names of files and collections directly contained in this collection."""
- return self._items.keys()
-
@synchronized
def __getitem__(self, k):
- """Get a file or collection that is directly contained by this collection. If
- you want to search a path, use `find()` instead.
+ """Get a file or collection that is directly contained by this collection.
+
+ If you want to search a path, use `find()` instead.
+
"""
return self._items[k]
@synchronized
def __contains__(self, k):
- """If there is a file or collection a directly contained by this collection
- with name `k`."""
+ """Test if there is a file or collection a directly contained by this collection."""
return k in self._items
@synchronized
:recursive:
Specify whether to remove non-empty subcollections (True), or raise an error (False).
"""
- pathcomponents = path.split("/")
- if pathcomponents[0] == '.':
- # Remove '.' from the front of the path
- del pathcomponents[0]
- if len(pathcomponents) > 0:
- item = self._items.get(pathcomponents[0])
- if item is None:
- raise IOError((errno.ENOENT, "File not found"))
- if len(pathcomponents) == 1:
- if isinstance(self._items[pathcomponents[0]], SynchronizedCollectionBase) and len(self._items[pathcomponents[0]]) > 0 and not recursive:
- raise IOError((errno.ENOTEMPTY, "Subcollection not empty"))
- deleteditem = self._items[pathcomponents[0]]
- del self._items[pathcomponents[0]]
- self._modified = True
- self.notify(DEL, self, pathcomponents[0], deleteditem)
- else:
- del pathcomponents[0]
- item.remove("/".join(pathcomponents))
- else:
+ if not path:
+ raise errors.ArgumentError("Parameter 'path' must not be empty.")
+
+ pathcomponents = path.split("/", 1)
+ item = self._items.get(pathcomponents[0])
+ if item is None:
raise IOError((errno.ENOENT, "File not found"))
+ if len(pathcomponents) == 1:
+ if isinstance(self._items[pathcomponents[0]], SynchronizedCollectionBase) and len(self._items[pathcomponents[0]]) > 0 and not recursive:
+ raise IOError((errno.ENOTEMPTY, "Subcollection not empty"))
+ deleteditem = self._items[pathcomponents[0]]
+ del self._items[pathcomponents[0]]
+ self._modified = True
+ self.notify(DEL, self, pathcomponents[0], deleteditem)
+ else:
+ item.remove(pathcomponents[1])
- def _cloneinto(self, target):
- for k,v in self._items.items():
- target._items[k] = v.clone(target)
+ def _clonefrom(self, source):
+ for k,v in source.items():
+ self._items[k] = v.clone(self)
def clone(self):
raise NotImplementedError()
+ @must_be_writable
+ @synchronized
+ def add(self, source_obj, target_name, overwrite=False):
+ """Copy a file or subcollection to this collection.
+
+ :source_obj:
+ An ArvadosFile, or Subcollection object
+
+ :target_name:
+ Destination item name. If the target name already exists and is a
+ file, this will raise an error unless you specify `overwrite=True`.
+
+ :overwrite:
+ Whether to overwrite target file if it already exists.
+
+ """
+
+ if target_name in self and not overwrite:
+ raise IOError((errno.EEXIST, "File already exists"))
+
+ modified_from = None
+ if target_name in self:
+ modified_from = self[target_name]
+
+ # Actually make the copy.
+ dup = source_obj.clone(self)
+ self._items[target_name] = dup
+ self._modified = True
+
+ if modified_from:
+ self.notify(MOD, self, target_name, (modified_from, dup))
+ else:
+ self.notify(ADD, self, target_name, dup)
+
+
@must_be_writable
@synchronized
def copy(self, source, target_path, source_collection=None, overwrite=False):
"""Copy a file or subcollection to a new path in this collection.
:source:
- An ArvadosFile, Subcollection, or string with a path to source file or subcollection
+ A string with a path to source file or subcollection, or an actual ArvadosFile or Subcollection object.
:target_path:
Destination file or path. If the target path already exists and is a
target_dir = self.find_or_create("/".join(targetcomponents[0:-1]), COLLECTION)
- with target_dir.lock:
- if target_name in target_dir:
- if isinstance(target_dir[target_name], SynchronizedCollectionBase) and sourcecomponents:
- target_dir = target_dir[target_name]
- target_name = sourcecomponents[-1]
- elif not overwrite:
- raise IOError((errno.EEXIST, "File already exists"))
-
- modified_from = None
- if target_name in target_dir:
- modified_from = target_dir[target_name]
+ if target_name in target_dir and isinstance(self[target_name], SynchronizedCollectionBase) and sourcecomponents:
+ target_dir = target_dir[target_name]
+ target_name = sourcecomponents[-1]
- # Actually make the copy.
- dup = source_obj.clone(target_dir)
- target_dir._items[target_name] = dup
- target_dir._modified = True
-
- if modified_from:
- self.notify(MOD, target_dir, target_name, (modified_from, dup))
- else:
- self.notify(ADD, target_dir, target_name, dup)
+ target_dir.add(source_obj, target_name, overwrite)
@synchronized
def manifest_text(self, stream_name=".", strip=False, normalize=False):
"""
- portable_locators = strip
if self.modified() or self._manifest_text is None or normalize:
item = self
stream = {}
+ buf = []
sorted_keys = sorted(item.keys())
for filename in [s for s in sorted_keys if isinstance(item[s], ArvadosFile)]:
# Create a stream per file `k`
loc = segment.locator
if arvfile.parent._my_block_manager().is_bufferblock(loc):
loc = arvfile.parent._my_block_manager().get_bufferblock(loc).locator()
- if portable_locators:
+ if strip:
loc = KeepLocator(loc).stripped()
- filestream.append(LocatorAndRange(loc, locator_block_size(loc),
+ filestream.append(LocatorAndRange(loc, KeepLocator(loc).size,
segment.segment_offset, segment.range_size))
stream[filename] = filestream
if stream:
- buf += ' '.join(normalize_stream(stream_name, stream))
- buf += "\n"
+ buf.append(" ".join(normalize_stream(stream_name, stream)) + "\n")
for dirname in [s for s in sorted_keys if isinstance(item[s], SynchronizedCollectionBase)]:
- buf += item[dirname].manifest_text(stream_name=os.path.join(stream_name, dirname), portable_locators=portable_locators)
- return buf
+ buf.append(item[dirname].manifest_text(stream_name=os.path.join(stream_name, dirname), strip=strip))
+ return "".join(buf)
else:
- if portable_locators:
+ if strip:
return self.stripped_manifest()
else:
return self._manifest_text
@synchronized
def diff(self, end_collection, prefix=".", holding_collection=None):
- """
- Generate list of add/modify/delete actions which, when given to `apply`, will
- change `self` to match `end_collection`
+ """Generate list of add/modify/delete actions.
+
+ When given to `apply`, will change `self` to match `end_collection`
+
"""
changes = []
if holding_collection is None:
- holding_collection = Collection(api_client=self._my_api(), keep_client=self._my_keep(), sync=SYNC_EXPLICIT)
+ holding_collection = Collection(api_client=self._my_api(), keep_client=self._my_keep())
for k in self:
if k not in end_collection:
changes.append((DEL, os.path.join(prefix, k), self[k].clone(holding_collection)))
class Collection(SynchronizedCollectionBase):
- """Represents the root of an Arvados Collection, which may be associated with
- an API server Collection record.
+ """Represents the root of an Arvados Collection.
+
+ This class is threadsafe. The root collection object, all subcollections
+ and files are protected by a single lock (i.e. each access locks the entire
+ collection).
- Brief summary of useful methods:
+ Brief summary of
+ useful methods:
:To read an existing file:
`c.open("myfile", "r")`
:To merge remote changes into this object:
`c.update()`
- This class is threadsafe. The root collection object, all subcollections
- and files are protected by a single lock (i.e. each access locks the entire
- collection).
+ Must be associated with an API server Collection record (during
+ initialization, or using `save_new`) to use `save` or `update`
"""
def __init__(self, manifest_locator_or_text=None,
- parent=None,
- apiconfig=None,
api_client=None,
keep_client=None,
num_retries=None,
+ parent=None,
+ apiconfig=None,
block_manager=None):
"""Collection constructor.
else:
self._config = config.settings()
- self.num_retries = num_retries if num_retries is not None else 2
+ self.num_retries = num_retries if num_retries is not None else 0
self._manifest_locator = None
self._manifest_text = None
self._api_response = None
- self._sync = SYNC_EXPLICIT
- if not self.lock:
- self.lock = threading.RLock()
+ self.lock = threading.RLock()
self.callbacks = []
self.events = None
raise errors.ArgumentError(
"Argument to CollectionReader must be a manifest or a collection UUID")
- self._populate()
-
+ try:
+ self._populate()
+ except (IOError, errors.SyntaxError) as e:
+ raise errors.ArgumentError("Error processing manifest text: %s", e)
def root_collection(self):
return self
- def sync_mode(self):
- return self._sync
+ def stream_name(self):
+ return "."
+
+ def writable(self):
+ return True
@synchronized
@retry_method
def update(self, other=None, num_retries=None):
- """Fetch the latest collection record on the API server and merge it with the
- current collection contents.
+ """Merge the latest collection on the API server with the current collection."""
- """
if other is None:
if self._manifest_locator is None:
raise errors.ArgumentError("`other` is None but collection does not have a manifest_locator uuid")
response = self._my_api().collections().get(uuid=self._manifest_locator).execute(num_retries=num_retries)
- other = import_manifest(response["manifest_text"])
- baseline = import_manifest(self._manifest_text)
+ other = CollectionReader(response["manifest_text"])
+ baseline = CollectionReader(self._manifest_text)
self.apply(baseline.diff(other))
@synchronized
if self._api_client is None:
self._my_api()
else:
- self._keep_client = KeepClient(api=self._api_client)
+ self._keep_client = KeepClient(api_client=self._api_client)
return self._keep_client
@synchronized
error_via_keep = self._populate_from_keep()
if self._manifest_text is None:
# Nothing worked!
- raise arvados.errors.NotFoundError(
+ raise errors.NotFoundError(
("Failed to retrieve collection '{}' " +
"from either API server ({}) or Keep ({})."
).format(
error_via_keep))
# populate
self._baseline_manifest = self._manifest_text
- import_manifest(self._manifest_text, self)
+ self._import_manifest(self._manifest_text)
def _has_collection_uuid(self):
def __exit__(self, exc_type, exc_value, traceback):
"""Support scoped auto-commit in a with: block."""
- if self._sync != SYNC_READONLY and self._has_collection_uuid():
- self.save()
+ if exc_type is not None:
+ if self.writable() and self._has_collection_uuid():
+ self.save()
if self._block_manager is not None:
self._block_manager.stop_threads()
@synchronized
- def clone(self, new_parent=None, new_sync=SYNC_READONLY, new_config=None):
+ def manifest_locator(self):
+ """Get the manifest locator. May be None."""
+ return self._manifest_locator
+
+ @synchronized
+ def clone(self, new_parent=None, readonly=False, new_config=None):
if new_config is None:
new_config = self._config
- newcollection = Collection(parent=new_parent, apiconfig=new_config, sync=SYNC_EXPLICIT)
- if new_sync == SYNC_READONLY:
- newcollection.lock = NoopLock()
- self._cloneinto(newcollection)
- newcollection._sync = new_sync
+ if readonly:
+ newcollection = CollectionReader(parent=new_parent, apiconfig=new_config)
+ else:
+ newcollection = Collection(parent=new_parent, apiconfig=new_config)
+
+ newcollection._clonefrom(self)
return newcollection
@synchronized
"""
return self._api_response
+ def find_or_create(self, path, create_type):
+ """See `SynchronizedCollectionBase.find_or_create`"""
+ if path == ".":
+ return self
+ else:
+ return super(Collection, self).find_or_create(path[2:] if path.startswith("./") else path, create_type)
+
+ def find(self, path):
+ """See `SynchronizedCollectionBase.find`"""
+ if path == ".":
+ return self
+ else:
+ return super(Collection, self).find(path[2:] if path.startswith("./") else path)
+
+ def remove(self, path, recursive=False):
+ """See `SynchronizedCollectionBase.remove`"""
+ if path == ".":
+ raise errors.ArgumentError("Cannot remove '.'")
+ else:
+ return super(Collection, self).remove(path[2:] if path.startswith("./") else path, recursive)
+
@must_be_writable
@synchronized
@retry_method
def save(self, merge=True, num_retries=None):
- """Commit pending buffer blocks to Keep, merge with remote record (if
+ """Save collection to an existing collection record.
+
+ Commit pending buffer blocks to Keep, merge with remote record (if
update=True), write the manifest to Keep, and update the collection
record.
the API server. If you want to save a manifest to Keep only, see
`save_new()`.
- :update:
+ :merge:
Update and merge remote changes before saving. Otherwise, any
remote changes will be ignored and overwritten.
+ :num_retries:
+ Retry count on API calls (if None, use the collection default)
+
"""
if self.modified():
if not self._has_collection_uuid():
@synchronized
@retry_method
def save_new(self, name=None, create_collection_record=True, owner_uuid=None, ensure_unique_name=False, num_retries=None):
- """Commit pending buffer blocks to Keep, write the manifest to Keep, and create
- a new collection record (if create_collection_record True).
+ """Save collection to a new collection record.
+ Commit pending buffer blocks to Keep, write the manifest to Keep, and
+ create a new collection record (if create_collection_record True).
After creating a new collection record, this Collection object will be
associated with the new record used by `save()`.
:name:
The collection name.
- :keep_only:
- Only save the manifest to keep, do not create a collection record.
+ :create_collection_record:
+ If True, create a collection record. If False, only save the manifest to keep.
:owner_uuid:
the user, or project uuid that will own this collection.
if it conflicts with a collection with the same name and owner. If
False, a name conflict will result in an error.
+ :num_retries:
+ Retry count on API calls (if None, use the collection default)
+
"""
self._my_block_manager().commit_all()
self._my_keep().put(self.manifest_text(strip=True), num_retries=num_retries)
if len(self) > 0:
raise ArgumentError("Can only import manifest into an empty collection")
- into_collection = self
- save_sync = into_collection.sync_mode()
- into_collection._sync = None
-
STREAM_NAME = 0
BLOCKS = 1
SEGMENTS = 2
stream_name = None
state = STREAM_NAME
- for n in re.finditer(r'(\S+)(\s+|$)', manifest_text):
- tok = n.group(1)
- sep = n.group(2)
+ for token_and_separator in re.finditer(r'(\S+)(\s+|$)', manifest_text):
+ tok = token_and_separator.group(1)
+ sep = token_and_separator.group(2)
if state == STREAM_NAME:
# starting a new stream
continue
if state == BLOCKS:
- s = re.match(r'[0-9a-f]{32}\+(\d+)(\+\S+)*', tok)
- if s:
- blocksize = long(s.group(1))
+ block_locator = re.match(r'[0-9a-f]{32}\+(\d+)(\+\S+)*', tok)
+ if block_locator:
+ blocksize = long(block_locator.group(1))
blocks.append(Range(tok, streamoffset, blocksize))
streamoffset += blocksize
else:
state = SEGMENTS
if state == SEGMENTS:
- s = re.search(r'^(\d+):(\d+):(\S+)', tok)
- if s:
- pos = long(s.group(1))
- size = long(s.group(2))
- name = s.group(3).replace('\\040', ' ')
- f = into_collection.find_or_create("%s/%s" % (stream_name, name), FILE)
- f.add_segment(blocks, pos, size)
+ file_segment = re.search(r'^(\d+):(\d+):(\S+)', tok)
+ if file_segment:
+ pos = long(file_segment.group(1))
+ size = long(file_segment.group(2))
+ name = file_segment.group(3).replace('\\040', ' ')
+ filepath = os.path.join(stream_name, name)
+ afile = self.find_or_create(filepath, FILE)
+ if isinstance(afile, ArvadosFile):
+ afile.add_segment(blocks, pos, size)
+ else:
+ raise errors.SyntaxError("File %s conflicts with stream of the same name.", filepath)
else:
# error!
raise errors.SyntaxError("Invalid manifest format")
stream_name = None
state = STREAM_NAME
- into_collection.set_unmodified()
- into_collection._sync = save_sync
+ self.set_unmodified()
class Subcollection(SynchronizedCollectionBase):
def __init__(self, parent):
super(Subcollection, self).__init__(parent)
self.lock = self.root_collection().lock
+ self._manifest_text = None
def root_collection(self):
return self.parent.root_collection()
- def sync_mode(self):
- return self.root_collection().sync_mode()
+ def writable(self):
+ return self.root_collection().writable()
def _my_api(self):
return self.root_collection()._my_api()
def _my_block_manager(self):
return self.root_collection()._my_block_manager()
- def _populate(self):
- self.root_collection()._populate()
-
def notify(self, event, collection, name, item):
return self.root_collection().notify(event, collection, name, item)
+ def stream_name(self):
+ for k, v in self.parent.items():
+ if v is self:
+ return os.path.join(self.parent.stream_name(), k)
+ return '.'
+
@synchronized
def clone(self, new_parent):
c = Subcollection(new_parent)
- self._cloneinto(c)
+ c._clonefrom(self)
return c
class CollectionReader(Collection):
- """A read-only collection object from an api collection record locator,
- a portable data hash of a manifest, or raw manifest text.
+ """A read-only collection object.
- See `Collection` constructor for detailed options.
+ Initialize from an api collection record locator, a portable data hash of a
+ manifest, or raw manifest text. See `Collection` constructor for detailed
+ options.
"""
- def __init__(self, *args, **kwargs):
- if not args and not kwargs.get("manifest_locator_or_text"):
- raise errors.ArgumentError("Must provide manifest locator or text to initialize ReadOnlyCollection")
+ def __init__(self, manifest_locator_or_text, *args, **kwargs):
+ self._in_init = True
+ super(CollectionReader, self).__init__(manifest_locator_or_text, *args, **kwargs)
+ self._in_init = False
# Forego any locking since it should never change once initialized.
self.lock = NoopLock()
- super(ReadOnlyCollection, self).__init__(*args, **kwargs)
+ # Backwards compatability with old CollectionReader
+ # all_streams() and all_files()
+ self._streams = None
+
+ def writable(self):
+ return self._in_init
+
+ def _populate_streams(orig_func):
+ @functools.wraps(orig_func)
+ def populate_streams_wrapper(self, *args, **kwargs):
+ # Defer populating self._streams until needed since it creates a copy of the manifest.
+ if self._streams is None:
+ if self._manifest_text:
+ self._streams = [sline.split()
+ for sline in self._manifest_text.split("\n")
+ if sline]
+ else:
+ self._streams = []
+ return orig_func(self, *args, **kwargs)
+ return populate_streams_wrapper
- self._sync = SYNC_READONLY
+ @_populate_streams
+ def normalize(self):
+ """Normalize the streams returned by `all_streams`.
- self._streams = [sline.split()
- for sline in self._manifest_text.split("\n")
- if sline]
+ This method is kept for backwards compatability and only affects the
+ behavior of `all_streams()` and `all_files()`
+
+ """
- def normalize(self):
# Rearrange streams
streams = {}
for s in self.all_streams():
self._streams = [normalize_stream(s, streams[s])
for s in sorted(streams)]
-
+ @_populate_streams
def all_streams(self):
return [StreamReader(s, self._my_keep(), num_retries=self.num_retries)
for s in self._streams]
+ @_populate_streams
def all_files(self):
for s in self.all_streams():
for f in s.all_files():