10 from .arvfile import StreamFileReader
11 from arvados.retry import retry_method
16 def locator_block_size(loc):
17 s = re.match(r'[0-9a-f]{32}\+(\d+)(\+\S+)*', loc)
18 return long(s.group(1))
20 def normalize_stream(s, stream):
23 stream is a dict mapping each filename to a list in the form [block locator, block size, segment offset (from beginning of block), segment size]
24 returns the stream as a list of tokens
27 sortedfiles = list(stream.keys())
32 # Go through each file and add each referenced block exactly once.
35 if b.locator not in blocks:
36 stream_tokens.append(b.locator)
37 blocks[b.locator] = streamoffset
38 streamoffset += locator_block_size(b.locator)
40 # Add the empty block if the stream is otherwise empty.
41 if len(stream_tokens) == 1:
42 stream_tokens.append(config.EMPTY_BLOCK_LOCATOR)
45 # Add in file segments
47 fout = f.replace(' ', '\\040')
48 for segment in stream[f]:
49 # Collapse adjacent segments
50 streamoffset = blocks[segment.locator] + segment.segment_offset
51 if current_span is None:
52 current_span = [streamoffset, streamoffset + segment.segment_size]
54 if streamoffset == current_span[1]:
55 current_span[1] += segment.segment_size
57 stream_tokens.append("{0}:{1}:{2}".format(current_span[0], current_span[1] - current_span[0], fout))
58 current_span = [streamoffset, streamoffset + segment.segment_size]
60 if current_span is not None:
61 stream_tokens.append("{0}:{1}:{2}".format(current_span[0], current_span[1] - current_span[0], fout))
64 stream_tokens.append("0:0:{0}".format(fout))
69 class StreamReader(object):
70 def __init__(self, tokens, keep=None, debug=False, _empty=False,
72 self._stream_name = None
73 self._data_locators = []
74 self._files = collections.OrderedDict()
76 self.num_retries = num_retries
82 if debug: print 'tok', tok
83 if self._stream_name is None:
84 self._stream_name = tok.replace('\\040', ' ')
87 s = re.match(r'^[0-9a-f]{32}\+(\d+)(\+\S+)*$', tok)
89 blocksize = long(s.group(1))
90 self._data_locators.append(Range(tok, streamoffset, blocksize))
91 streamoffset += blocksize
94 s = re.search(r'^(\d+):(\d+):(\S+)', tok)
96 pos = long(s.group(1))
97 size = long(s.group(2))
98 name = s.group(3).replace('\\040', ' ')
99 if name not in self._files:
100 self._files[name] = StreamFileReader(self, [Range(pos, 0, size)], name)
102 filereader = self._files[name]
103 filereader.segments.append(Range(pos, filereader.size(), size))
106 raise errors.SyntaxError("Invalid manifest format")
109 return self._stream_name
115 return self._files.values()
118 n = self._data_locators[-1]
119 return n.range_start + n.range_size
124 def locators_and_ranges(self, range_start, range_size):
125 return locators_and_ranges(self._data_locators, range_start, range_size)
128 def _keepget(self, locator, num_retries=None):
129 return self._keep.get(locator, num_retries=num_retries)
132 def readfrom(self, start, size, num_retries=None):
133 return self._readfrom(start, size, num_retries=num_retries)
136 def _readfrom(self, start, size, num_retries=None):
137 """Read up to 'size' bytes from the stream, starting at 'start'"""
140 if self._keep is None:
141 self._keep = KeepClient(num_retries=self.num_retries)
143 for lr in locators_and_ranges(self._data_locators, start, size):
144 data.append(self._keepget(lr.locator, num_retries=num_retries)[lr.segment_offset:lr.segment_offset+lr.segment_size])
147 def manifest_text(self, strip=False):
148 manifest_text = [self.name().replace(' ', '\\040')]
150 for d in self._data_locators:
151 m = re.match(r'^[0-9a-f]{32}\+\d+', d.locator)
152 manifest_text.append(m.group(0))
154 manifest_text.extend([d.locator for d in self._data_locators])
155 manifest_text.extend([' '.join(["{}:{}:{}".format(seg.locator, seg.range_size, f.name.replace(' ', '\\040'))
156 for seg in f.segments])
157 for f in self._files.values()])
158 return ' '.join(manifest_text) + '\n'