Merge branch 'master' into 4823-python-sdk-writable-collection-api
[arvados.git] / sdk / python / arvados / stream.py
1 import collections
2 import hashlib
3 import os
4 import re
5 import threading
6 import functools
7 import copy
8
9 from ._ranges import locators_and_ranges, Range
10 from .arvfile import StreamFileReader
11 from arvados.retry import retry_method
12 from keep import *
13 import config
14 import errors
15 from _normalize_stream import normalize_stream
16
17 class StreamReader(object):
18     def __init__(self, tokens, keep=None, debug=False, _empty=False,
19                  num_retries=0):
20         self._stream_name = None
21         self._data_locators = []
22         self._files = collections.OrderedDict()
23         self._keep = keep
24         self.num_retries = num_retries
25
26         streamoffset = 0L
27
28         # parse stream
29         for tok in tokens:
30             if debug: print 'tok', tok
31             if self._stream_name is None:
32                 self._stream_name = tok.replace('\\040', ' ')
33                 continue
34
35             s = re.match(r'^[0-9a-f]{32}\+(\d+)(\+\S+)*$', tok)
36             if s:
37                 blocksize = long(s.group(1))
38                 self._data_locators.append(Range(tok, streamoffset, blocksize))
39                 streamoffset += blocksize
40                 continue
41
42             s = re.search(r'^(\d+):(\d+):(\S+)', tok)
43             if s:
44                 pos = long(s.group(1))
45                 size = long(s.group(2))
46                 name = s.group(3).replace('\\040', ' ')
47                 if name not in self._files:
48                     self._files[name] = StreamFileReader(self, [Range(pos, 0, size)], name)
49                 else:
50                     filereader = self._files[name]
51                     filereader.segments.append(Range(pos, filereader.size(), size))
52                 continue
53
54             raise errors.SyntaxError("Invalid manifest format")
55
56     def name(self):
57         return self._stream_name
58
59     def files(self):
60         return self._files
61
62     def all_files(self):
63         return self._files.values()
64
65     def _size(self):
66         n = self._data_locators[-1]
67         return n.range_start + n.range_size
68
69     def size(self):
70         return self._size()
71
72     def locators_and_ranges(self, range_start, range_size):
73         return locators_and_ranges(self._data_locators, range_start, range_size)
74
75     @retry_method
76     def _keepget(self, locator, num_retries=None):
77         return self._keep.get(locator, num_retries=num_retries)
78
79     @retry_method
80     def readfrom(self, start, size, num_retries=None):
81         return self._readfrom(start, size, num_retries=num_retries)
82
83     @retry_method
84     def _readfrom(self, start, size, num_retries=None):
85         """Read up to 'size' bytes from the stream, starting at 'start'"""
86         if size == 0:
87             return ''
88         if self._keep is None:
89             self._keep = KeepClient(num_retries=self.num_retries)
90         data = []
91         for lr in locators_and_ranges(self._data_locators, start, size):
92             data.append(self._keepget(lr.locator, num_retries=num_retries)[lr.segment_offset:lr.segment_offset+lr.segment_size])
93         return ''.join(data)
94
95     def manifest_text(self, strip=False):
96         manifest_text = [self.name().replace(' ', '\\040')]
97         if strip:
98             for d in self._data_locators:
99                 m = re.match(r'^[0-9a-f]{32}\+\d+', d.locator)
100                 manifest_text.append(m.group(0))
101         else:
102             manifest_text.extend([d.locator for d in self._data_locators])
103         manifest_text.extend([' '.join(["{}:{}:{}".format(seg.locator, seg.range_size, f.name.replace(' ', '\\040'))
104                                         for seg in f.segments])
105                               for f in self._files.values()])
106         return ' '.join(manifest_text) + '\n'