4084: Merge branch 'master' into 4084-log-pane-refresh-TC
[arvados.git] / sdk / python / arvados / collection.py
index 64b2aa706b499a04db4e645b67456bf523abbd33..3a90d6d3b0fb64d2ff49f4262b6e6c5ff3924cc9 100644 (file)
@@ -51,7 +51,7 @@ def normalize_stream(s, stream):
         fout = f.replace(' ', '\\040')
         for segment in stream[f]:
             segmentoffset = blocks[segment[arvados.LOCATOR]] + segment[arvados.OFFSET]
-            if current_span == None:
+            if current_span is None:
                 current_span = [segmentoffset, segmentoffset + segment[arvados.SEGMENTSIZE]]
             else:
                 if segmentoffset == current_span[1]:
@@ -60,10 +60,10 @@ def normalize_stream(s, stream):
                     stream_tokens.append("{0}:{1}:{2}".format(current_span[0], current_span[1] - current_span[0], fout))
                     current_span = [segmentoffset, segmentoffset + segment[arvados.SEGMENTSIZE]]
 
-        if current_span != None:
+        if current_span is not None:
             stream_tokens.append("{0}:{1}:{2}".format(current_span[0], current_span[1] - current_span[0], fout))
 
-        if len(stream[f]) == 0:
+        if not stream[f]:
             stream_tokens.append("0:0:{0}".format(fout))
 
     return stream_tokens
@@ -89,14 +89,18 @@ class CollectionBase(object):
         hints other than size hints) removed from the locators.
         """
         raw = self.manifest_text()
-        clean = ''
+        clean = []
         for line in raw.split("\n"):
             fields = line.split()
-            if len(fields) > 0:
-                locators = [ (re.sub(r'\+[^\d][^\+]*', '', x) if re.match(util.keep_locator_pattern, x) else x)
-                             for x in fields[1:-1] ]
-                clean += fields[0] + ' ' + ' '.join(locators) + ' ' + fields[-1] + "\n"
-        return clean
+            if fields:
+                clean_fields = fields[:1] + [
+                    (re.sub(r'\+[^\d][^\+]*', '', x)
+                     if re.match(util.keep_locator_pattern, x)
+                     else x)
+                    for x in fields[1:]]
+                clean += [' '.join(clean_fields), "\n"]
+        return ''.join(clean)
+
 
 class CollectionReader(CollectionBase):
     def __init__(self, manifest_locator_or_text, api_client=None,
@@ -181,7 +185,7 @@ class CollectionReader(CollectionBase):
             error_via_keep = self._populate_from_keep()
         if not self._manifest_text:
             error_via_api = self._populate_from_api_server()
-            if error_via_api != None and not should_try_keep:
+            if error_via_api is not None and not should_try_keep:
                 raise error_via_api
         if (not self._manifest_text and
             not error_via_keep and
@@ -219,16 +223,13 @@ class CollectionReader(CollectionBase):
                 for r in f.segments:
                     streams[streamname][filename].extend(s.locators_and_ranges(r[0], r[1]))
 
-        self._streams = []
-        sortedstreams = list(streams.keys())
-        sortedstreams.sort()
-        for s in sortedstreams:
-            self._streams.append(normalize_stream(s, streams[s]))
+        self._streams = [normalize_stream(s, streams[s])
+                         for s in sorted(streams)]
 
         # Regenerate the manifest text based on the normalized streams
-        self._manifest_text = ''.join([StreamReader(stream, keep=self._my_keep()).manifest_text() for stream in self._streams])
-
-        return self
+        self._manifest_text = ''.join(
+            [StreamReader(stream, keep=self._my_keep()).manifest_text()
+             for stream in self._streams])
 
     def all_streams(self):
         self._populate()
@@ -240,8 +241,12 @@ class CollectionReader(CollectionBase):
             for f in s.all_files():
                 yield f
 
-    def manifest_text(self, strip=False):
-        if strip:
+    def manifest_text(self, strip=False, normalize=False):
+        if normalize:
+            cr = CollectionReader(self.manifest_text())
+            cr.normalize()
+            return cr.manifest_text(strip=strip, normalize=False)
+        elif strip:
             return self.stripped_manifest()
         else:
             self._populate()
@@ -343,10 +348,9 @@ class CollectionWriter(CollectionBase):
 
     def _work_trees(self):
         path, stream_name, max_manifest_depth = self._queued_trees[0]
-        make_dirents = (util.listdir_recursive if (max_manifest_depth == 0)
-                        else os.listdir)
-        d = make_dirents(path)
-        if len(d) > 0:
+        d = util.listdir_recursive(
+            path, max_depth = (None if max_manifest_depth == 0 else 0))
+        if d:
             self._queue_dirents(stream_name, d)
         else:
             self._queued_trees.popleft()
@@ -413,7 +417,7 @@ class CollectionWriter(CollectionBase):
         return self._current_file_name
 
     def finish_current_file(self):
-        if self._current_file_name == None:
+        if self._current_file_name is None:
             if self._current_file_pos == self._current_stream_length:
                 return
             raise errors.AssertionError(
@@ -484,10 +488,7 @@ class CollectionWriter(CollectionBase):
             manifest += ' ' + ' '.join("%d:%d:%s" % (sfile[0], sfile[1], sfile[2].replace(' ', '\\040')) for sfile in stream[2])
             manifest += "\n"
 
-        if manifest:
-            return manifest
-        else:
-            return ""
+        return manifest
 
     def data_locators(self):
         ret = []