23 global_client_object = None
29 class KeepLocator(object):
30 EPOCH_DATETIME = datetime.datetime.utcfromtimestamp(0)
31 HEX_RE = re.compile(r'^[0-9a-fA-F]+$')
33 def __init__(self, locator_str):
37 self._perm_expiry = None
38 pieces = iter(locator_str.split('+'))
39 self.md5sum = next(pieces)
41 if hint.startswith('A'):
42 self.parse_permission_hint(hint)
43 elif hint.startswith('K'):
44 self.loc_hint = hint # FIXME
48 raise ValueError("unrecognized hint data {}".format(hint))
52 str(s) for s in [self.md5sum, self.size, self.loc_hint,
53 self.permission_hint()]
56 def _is_hex_length(self, s, *size_spec):
57 if len(size_spec) == 1:
58 good_len = (len(s) == size_spec[0])
60 good_len = (size_spec[0] <= len(s) <= size_spec[1])
61 return good_len and self.HEX_RE.match(s)
63 def _make_hex_prop(name, length):
64 # Build and return a new property with the given name that
65 # must be a hex string of the given length.
66 data_name = '_{}'.format(name)
68 return getattr(self, data_name)
69 def setter(self, hex_str):
70 if not self._is_hex_length(hex_str, length):
71 raise ValueError("{} must be a {}-digit hex string: {}".
72 format(name, length, hex_str))
73 setattr(self, data_name, hex_str)
74 return property(getter, setter)
76 md5sum = _make_hex_prop('md5sum', 32)
77 perm_sig = _make_hex_prop('perm_sig', 40)
80 def perm_expiry(self):
81 return self._perm_expiry
84 def perm_expiry(self, value):
85 if not self._is_hex_length(value, 1, 8):
87 "permission timestamp must be a hex Unix timestamp: {}".
89 self._perm_expiry = datetime.datetime.utcfromtimestamp(int(value, 16))
91 def permission_hint(self):
92 data = [self.perm_sig, self.perm_expiry]
95 data[1] = int((data[1] - self.EPOCH_DATETIME).total_seconds())
96 return "A{}@{:08x}".format(*data)
98 def parse_permission_hint(self, s):
100 self.perm_sig, self.perm_expiry = s[1:].split('@', 1)
102 raise ValueError("bad permission hint {}".format(s))
104 def permission_expired(self, as_of_dt=None):
105 if self.perm_expiry is None:
107 elif as_of_dt is None:
108 as_of_dt = datetime.datetime.now()
109 return self.perm_expiry <= as_of_dt
114 def global_client_object():
115 global global_client_object
116 if global_client_object == None:
117 global_client_object = KeepClient()
118 return global_client_object
121 def get(locator, **kwargs):
122 return Keep.global_client_object().get(locator, **kwargs)
125 def put(data, **kwargs):
126 return Keep.global_client_object().put(data, **kwargs)
128 class KeepClient(object):
130 class ThreadLimiter(object):
132 Limit the number of threads running at a given time to
133 {desired successes} minus {successes reported}. When successes
134 reported == desired, wake up the remaining threads and tell
137 Should be used in a "with" block.
139 def __init__(self, todo):
142 self._todo_lock = threading.Semaphore(todo)
143 self._done_lock = threading.Lock()
146 self._todo_lock.acquire()
149 def __exit__(self, type, value, traceback):
150 self._todo_lock.release()
152 def shall_i_proceed(self):
154 Return true if the current thread should do stuff. Return
155 false if the current thread should just stop.
157 with self._done_lock:
158 return (self._done < self._todo)
160 def increment_done(self):
162 Report that the current thread was successful.
164 with self._done_lock:
169 Return how many successes were reported.
171 with self._done_lock:
174 class KeepWriterThread(threading.Thread):
176 Write a blob of data to the given Keep server. Call
177 increment_done() of the given ThreadLimiter if the write
180 def __init__(self, **kwargs):
181 super(KeepClient.KeepWriterThread, self).__init__()
185 with self.args['thread_limiter'] as limiter:
186 if not limiter.shall_i_proceed():
187 # My turn arrived, but the job has been done without
190 logging.debug("KeepWriterThread %s proceeding %s %s" %
191 (str(threading.current_thread()),
192 self.args['data_hash'],
193 self.args['service_root']))
195 url = self.args['service_root'] + self.args['data_hash']
196 api_token = config.get('ARVADOS_API_TOKEN')
197 headers = {'Authorization': "OAuth2 %s" % api_token}
199 resp, content = h.request(url.encode('utf-8'), 'PUT',
201 body=self.args['data'])
202 if (resp['status'] == '401' and
203 re.match(r'Timestamp verification failed', content)):
204 body = KeepClient.sign_for_old_server(
205 self.args['data_hash'],
208 resp, content = h.request(url.encode('utf-8'), 'PUT',
211 if re.match(r'^2\d\d$', resp['status']):
212 logging.debug("KeepWriterThread %s succeeded %s %s" %
213 (str(threading.current_thread()),
214 self.args['data_hash'],
215 self.args['service_root']))
216 return limiter.increment_done()
217 logging.warning("Request fail: PUT %s => %s %s" %
218 (url, resp['status'], content))
219 except (httplib2.HttpLib2Error, httplib.HTTPException) as e:
220 logging.warning("Request fail: PUT %s => %s: %s" %
221 (url, type(e), str(e)))
224 self.lock = threading.Lock()
225 self.service_roots = None
226 self._cache_lock = threading.Lock()
228 # default 256 megabyte cache
229 self.cache_max = 256 * 1024 * 1024
231 def shuffled_service_roots(self, hash):
232 if self.service_roots == None:
235 keep_disks = arvados.api().keep_disks().list().execute()['items']
236 roots = (("http%s://%s:%d/" %
237 ('s' if f['service_ssl_flag'] else '',
241 self.service_roots = sorted(set(roots))
242 logging.debug(str(self.service_roots))
246 # Build an ordering with which to query the Keep servers based on the
247 # contents of the hash.
248 # "hash" is a hex-encoded number at least 8 digits
251 # seed used to calculate the next keep server from 'pool'
252 # to be added to 'pseq'
255 # Keep servers still to be added to the ordering
256 pool = self.service_roots[:]
258 # output probe sequence
261 # iterate while there are servers left to be assigned
264 # ran out of digits in the seed
265 if len(pseq) < len(hash) / 4:
266 # the number of servers added to the probe sequence is less
267 # than the number of 4-digit slices in 'hash' so refill the
268 # seed with the last 4 digits and then append the contents
270 seed = hash[-4:] + hash
272 # refill the seed with the contents of 'hash'
275 # Take the next 8 digits (32 bytes) and interpret as an integer,
276 # then modulus with the size of the remaining pool to get the next
278 probe = int(seed[0:8], 16) % len(pool)
280 print seed[0:8], int(seed[0:8], 16), len(pool), probe
282 # Append the selected server to the probe sequence and remove it
284 pseq += [pool[probe]]
285 pool = pool[:probe] + pool[probe+1:]
287 # Remove the digits just used from the seed
289 logging.debug(str(pseq))
292 class CacheSlot(object):
293 def __init__(self, locator):
294 self.locator = locator
295 self.ready = threading.Event()
302 def set(self, value):
307 if self.content == None:
310 return len(self.content)
313 '''Cap the cache size to self.cache_max'''
314 self._cache_lock.acquire()
316 self._cache = filter(lambda c: not (c.ready.is_set() and c.content == None), self._cache)
317 sm = sum([slot.size() for slot in self._cache])
318 while sm > self.cache_max:
320 sm = sum([slot.size() for a in self._cache])
322 self._cache_lock.release()
324 def reserve_cache(self, locator):
325 '''Reserve a cache slot for the specified locator,
326 or return the existing slot.'''
327 self._cache_lock.acquire()
329 # Test if the locator is already in the cache
330 for i in xrange(0, len(self._cache)):
331 if self._cache[i].locator == locator:
334 # move it to the front
336 self._cache.insert(0, n)
339 # Add a new cache slot for the locator
340 n = KeepClient.CacheSlot(locator)
341 self._cache.insert(0, n)
344 self._cache_lock.release()
346 def get(self, locator):
347 #logging.debug("Keep.get %s" % (locator))
349 if re.search(r',', locator):
350 return ''.join(self.get(x) for x in locator.split(','))
351 if 'KEEP_LOCAL_STORE' in os.environ:
352 return KeepClient.local_store_get(locator)
353 expect_hash = re.sub(r'\+.*', '', locator)
355 slot, first = self.reserve_cache(expect_hash)
356 #logging.debug("%s %s %s" % (slot, first, expect_hash))
363 for service_root in self.shuffled_service_roots(expect_hash):
364 url = service_root + expect_hash
365 api_token = config.get('ARVADOS_API_TOKEN')
366 headers = {'Authorization': "OAuth2 %s" % api_token,
367 'Accept': 'application/octet-stream'}
368 blob = self.get_url(url, headers, expect_hash)
374 for location_hint in re.finditer(r'\+K@([a-z0-9]+)', locator):
375 instance = location_hint.group(1)
376 url = 'http://keep.' + instance + '.arvadosapi.com/' + expect_hash
377 blob = self.get_url(url, {}, expect_hash)
389 raise arvados.errors.NotFoundError("Block not found: %s" % expect_hash)
391 def get_url(self, url, headers, expect_hash):
394 logging.info("Request: GET %s" % (url))
395 with timer.Timer() as t:
396 resp, content = h.request(url.encode('utf-8'), 'GET',
398 logging.info("Received %s bytes in %s msec (%s MiB/sec)" % (len(content),
400 (len(content)/(1024*1024))/t.secs))
401 if re.match(r'^2\d\d$', resp['status']):
402 m = hashlib.new('md5')
405 if md5 == expect_hash:
407 logging.warning("Checksum fail: md5(%s) = %s" % (url, md5))
408 except Exception as e:
409 logging.info("Request fail: GET %s => %s: %s" %
410 (url, type(e), str(e)))
413 def put(self, data, **kwargs):
414 if 'KEEP_LOCAL_STORE' in os.environ:
415 return KeepClient.local_store_put(data)
416 m = hashlib.new('md5')
418 data_hash = m.hexdigest()
420 want_copies = kwargs.get('copies', 2)
421 if not (want_copies > 0):
424 thread_limiter = KeepClient.ThreadLimiter(want_copies)
425 for service_root in self.shuffled_service_roots(data_hash):
426 t = KeepClient.KeepWriterThread(data=data,
428 service_root=service_root,
429 thread_limiter=thread_limiter)
434 have_copies = thread_limiter.done()
435 if have_copies == want_copies:
436 return (data_hash + '+' + str(len(data)))
437 raise arvados.errors.KeepWriteError(
438 "Write fail for %s: wanted %d but wrote %d" %
439 (data_hash, want_copies, have_copies))
442 def sign_for_old_server(data_hash, data):
443 return (("-----BEGIN PGP SIGNED MESSAGE-----\n\n\n%d %s\n-----BEGIN PGP SIGNATURE-----\n\n-----END PGP SIGNATURE-----\n" % (int(time.time()), data_hash)) + data)
447 def local_store_put(data):
448 m = hashlib.new('md5')
451 locator = '%s+%d' % (md5, len(data))
452 with open(os.path.join(os.environ['KEEP_LOCAL_STORE'], md5 + '.tmp'), 'w') as f:
454 os.rename(os.path.join(os.environ['KEEP_LOCAL_STORE'], md5 + '.tmp'),
455 os.path.join(os.environ['KEEP_LOCAL_STORE'], md5))
459 def local_store_get(locator):
460 r = re.search('^([0-9a-f]{32,})', locator)
462 raise arvados.errors.NotFoundError(
463 "Invalid data locator: '%s'" % locator)
464 if r.group(0) == config.EMPTY_BLOCK_LOCATOR.split('+')[0]:
466 with open(os.path.join(os.environ['KEEP_LOCAL_STORE'], r.group(0)), 'r') as f: