projects
/
arvados.git
/ blobdiff
summary
|
shortlog
|
log
|
commit
|
commitdiff
|
tree
raw
|
inline
| side by side
10111: Merge branch 'master' into 10111-collection-labels
[arvados.git]
/
sdk
/
python
/
arvados
/
api.py
diff --git
a/sdk/python/arvados/api.py
b/sdk/python/arvados/api.py
index 543725b516beada820f9b3e001d1267024436b02..6581a8e9acb59bbd81eb57997550d82543887b53 100644
(file)
--- a/
sdk/python/arvados/api.py
+++ b/
sdk/python/arvados/api.py
@@
-1,5
+1,9
@@
+from __future__ import absolute_import
+from future import standard_library
+standard_library.install_aliases()
+from builtins import range
import collections
import collections
-import http
lib
+import http
.client
import httplib2
import json
import logging
import httplib2
import json
import logging
@@
-12,10
+16,10
@@
import types
import apiclient
from apiclient import discovery as apiclient_discovery
from apiclient import errors as apiclient_errors
import apiclient
from apiclient import discovery as apiclient_discovery
from apiclient import errors as apiclient_errors
-import config
-import errors
-import util
-import cache
+
from .
import config
+
from .
import errors
+
from .
import util
+
from .
import cache
_logger = logging.getLogger('arvados.api')
_logger = logging.getLogger('arvados.api')
@@
-67,7
+71,7
@@
def _intercept_http_request(self, uri, method="GET", **kwargs):
# High probability of failure due to connection atrophy. Make
# sure this request [re]opens a new connection by closing and
# forgetting all cached connections first.
# High probability of failure due to connection atrophy. Make
# sure this request [re]opens a new connection by closing and
# forgetting all cached connections first.
- for conn in self.connections.
iter
values():
+ for conn in self.connections.values():
conn.close()
self.connections.clear()
conn.close()
self.connections.clear()
@@
-76,7
+80,7
@@
def _intercept_http_request(self, uri, method="GET", **kwargs):
self._last_request_time = time.time()
try:
return self.orig_http_request(uri, method, **kwargs)
self._last_request_time = time.time()
try:
return self.orig_http_request(uri, method, **kwargs)
- except http
lib
.HTTPException:
+ except http
.client
.HTTPException:
_logger.debug("Retrying API request in %d s after HTTP error",
delay, exc_info=True)
except socket.error:
_logger.debug("Retrying API request in %d s after HTTP error",
delay, exc_info=True)
except socket.error:
@@
-87,7
+91,7
@@
def _intercept_http_request(self, uri, method="GET", **kwargs):
# httplib2 reopens connections when needed.
_logger.debug("Retrying API request in %d s after socket error",
delay, exc_info=True)
# httplib2 reopens connections when needed.
_logger.debug("Retrying API request in %d s after socket error",
delay, exc_info=True)
- for conn in self.connections.
iter
values():
+ for conn in self.connections.values():
conn.close()
time.sleep(delay)
delay = delay * self._retry_delay_backoff
conn.close()
time.sleep(delay)
delay = delay * self._retry_delay_backoff
@@
-113,6
+117,7
@@
_cast_orig = apiclient_discovery._cast
def _cast_objects_too(value, schema_type):
global _cast_orig
if (type(value) != type('') and
def _cast_objects_too(value, schema_type):
global _cast_orig
if (type(value) != type('') and
+ type(value) != type(b'') and
(schema_type == 'object' or schema_type == 'array')):
return json.dumps(value)
else:
(schema_type == 'object' or schema_type == 'array')):
return json.dumps(value)
else: