+ def deserialize(self, content):
+ # This is a very slightly modified version of the parent class'
+ # implementation. Copyright (c) 2010 Google.
+ content = content.decode('utf-8')
+ body = json.loads(content, object_pairs_hook=collections.OrderedDict)
+ if self._data_wrapper and isinstance(body, dict) and 'data' in body:
+ body = body['data']
+ return body
+
+
+def _intercept_http_request(self, uri, **kwargs):
+ if (self.max_request_size and
+ kwargs.get('body') and
+ self.max_request_size < len(kwargs['body'])):
+ raise apiclient_errors.MediaUploadSizeError("Request size %i bytes exceeds published limit of %i bytes" % (len(kwargs['body']), self.max_request_size))
+
+ if 'headers' not in kwargs:
+ kwargs['headers'] = {}
+
+ if config.get("ARVADOS_EXTERNAL_CLIENT", "") == "true":
+ kwargs['headers']['X-External-Client'] = '1'
+
+ kwargs['headers']['Authorization'] = 'OAuth2 %s' % self.arvados_api_token
+
+ retryable = kwargs.get('method', 'GET') in [
+ 'DELETE', 'GET', 'HEAD', 'OPTIONS', 'PUT']
+ retry_count = self._retry_count if retryable else 0
+
+ if (not retryable and
+ time.time() - self._last_request_time > self._max_keepalive_idle):
+ # High probability of failure due to connection atrophy. Make
+ # sure this request [re]opens a new connection by closing and
+ # forgetting all cached connections first.
+ for conn in self.connections.values():
+ conn.close()
+ self.connections.clear()
+
+ delay = self._retry_delay_initial
+ for _ in range(retry_count):
+ self._last_request_time = time.time()