projects
/
arvados.git
/ commitdiff
summary
|
shortlog
|
log
|
commit
| commitdiff |
tree
raw
|
patch
|
inline
| side by side (parent:
5e46c19
)
11308: Futurize stage2.
author
Tom Clegg <tom@curoverse.com>
Sat, 1 Apr 2017 05:41:10 +0000
(
01:41
-0400)
committer
Tom Clegg <tom@curoverse.com>
Sat, 1 Apr 2017 21:43:54 +0000
(17:43 -0400)
38 files changed:
sdk/python/arvados/__init__.py
patch
|
blob
|
history
sdk/python/arvados/_ranges.py
patch
|
blob
|
history
sdk/python/arvados/api.py
patch
|
blob
|
history
sdk/python/arvados/arvfile.py
patch
|
blob
|
history
sdk/python/arvados/cache.py
patch
|
blob
|
history
sdk/python/arvados/collection.py
patch
|
blob
|
history
sdk/python/arvados/commands/arv_copy.py
patch
|
blob
|
history
sdk/python/arvados/commands/keepdocker.py
patch
|
blob
|
history
sdk/python/arvados/commands/ls.py
patch
|
blob
|
history
sdk/python/arvados/commands/migrate19.py
patch
|
blob
|
history
sdk/python/arvados/commands/put.py
patch
|
blob
|
history
sdk/python/arvados/commands/run.py
patch
|
blob
|
history
sdk/python/arvados/crunch.py
patch
|
blob
|
history
sdk/python/arvados/errors.py
patch
|
blob
|
history
sdk/python/arvados/events.py
patch
|
blob
|
history
sdk/python/arvados/keep.py
patch
|
blob
|
history
sdk/python/arvados/retry.py
patch
|
blob
|
history
sdk/python/arvados/safeapi.py
patch
|
blob
|
history
sdk/python/arvados/stream.py
patch
|
blob
|
history
sdk/python/arvados/timer.py
patch
|
blob
|
history
sdk/python/arvados/util.py
patch
|
blob
|
history
sdk/python/tests/arvados_testutil.py
patch
|
blob
|
history
sdk/python/tests/keepstub.py
patch
|
blob
|
history
sdk/python/tests/manifest_examples.py
patch
|
blob
|
history
sdk/python/tests/performance/test_a_sample.py
patch
|
blob
|
history
sdk/python/tests/run_test_server.py
patch
|
blob
|
history
sdk/python/tests/test_api.py
patch
|
blob
|
history
sdk/python/tests/test_arv_ls.py
patch
|
blob
|
history
sdk/python/tests/test_arv_put.py
patch
|
blob
|
history
sdk/python/tests/test_arvfile.py
patch
|
blob
|
history
sdk/python/tests/test_cache.py
patch
|
blob
|
history
sdk/python/tests/test_collections.py
patch
|
blob
|
history
sdk/python/tests/test_events.py
patch
|
blob
|
history
sdk/python/tests/test_keep_client.py
patch
|
blob
|
history
sdk/python/tests/test_keep_locator.py
patch
|
blob
|
history
sdk/python/tests/test_retry.py
patch
|
blob
|
history
sdk/python/tests/test_retry_job_helpers.py
patch
|
blob
|
history
sdk/python/tests/test_stream.py
patch
|
blob
|
history
diff --git
a/sdk/python/arvados/__init__.py
b/sdk/python/arvados/__init__.py
index 36d54e5cf6b36ee23ded14b260883dab4bf6b09d..5acb961dde63fcc43cc2a084219befa9358b19b0 100644
(file)
--- a/
sdk/python/arvados/__init__.py
+++ b/
sdk/python/arvados/__init__.py
@@
-1,7
+1,10
@@
from __future__ import print_function
from __future__ import absolute_import
from __future__ import print_function
from __future__ import absolute_import
+from future import standard_library
+standard_library.install_aliases()
+from builtins import object
import gflags
import gflags
-import http
lib
+import http
.client
import httplib2
import logging
import os
import httplib2
import logging
import os
@@
-108,7
+111,7
@@
class JobTask(object):
def __init__(self, parameters=dict(), runtime_constraints=dict()):
print("init jobtask %s %s" % (parameters, runtime_constraints))
def __init__(self, parameters=dict(), runtime_constraints=dict()):
print("init jobtask %s %s" % (parameters, runtime_constraints))
-class job_setup:
+class job_setup
(object)
:
@staticmethod
def one_task_per_input_file(if_sequence=0, and_end_task=True, input_as_path=False, api_client=None):
if if_sequence != current_task()['sequence']:
@staticmethod
def one_task_per_input_file(if_sequence=0, and_end_task=True, input_as_path=False, api_client=None):
if if_sequence != current_task()['sequence']:
diff --git
a/sdk/python/arvados/_ranges.py
b/sdk/python/arvados/_ranges.py
index e0fe61509f9373d5e43bc5a6cd4d150ccea9805d..5c8b00fc9d7e6a19ef66a04eb32500225651030d 100644
(file)
--- a/
sdk/python/arvados/_ranges.py
+++ b/
sdk/python/arvados/_ranges.py
@@
-1,3
+1,6
@@
+from __future__ import division
+from past.utils import old_div
+from builtins import object
import logging
_logger = logging.getLogger('arvados.ranges')
import logging
_logger = logging.getLogger('arvados.ranges')
@@
-31,7
+34,7
@@
def first_block(data_locators, range_start):
hi = len(data_locators)
lo = 0
hi = len(data_locators)
lo = 0
- i = int(
(hi + lo) / 2
)
+ i = int(
old_div((hi + lo), 2)
)
block_size = data_locators[i].range_size
block_start = data_locators[i].range_start
block_end = block_start + block_size
block_size = data_locators[i].range_size
block_start = data_locators[i].range_start
block_end = block_start + block_size
@@
-47,7
+50,7
@@
def first_block(data_locators, range_start):
lo = i
else:
hi = i
lo = i
else:
hi = i
- i = int(
(hi + lo) / 2
)
+ i = int(
old_div((hi + lo), 2)
)
block_size = data_locators[i].range_size
block_start = data_locators[i].range_start
block_end = block_start + block_size
block_size = data_locators[i].range_size
block_start = data_locators[i].range_start
block_end = block_start + block_size
diff --git
a/sdk/python/arvados/api.py
b/sdk/python/arvados/api.py
index 65aadbd6b725922f4b540fa8ed3059e42f00810d..59a73b45e558d5433776eed89a8f198345fd65b2 100644
(file)
--- a/
sdk/python/arvados/api.py
+++ b/
sdk/python/arvados/api.py
@@
-1,6
+1,9
@@
from __future__ import absolute_import
from __future__ import absolute_import
+from future import standard_library
+standard_library.install_aliases()
+from builtins import range
import collections
import collections
-import http
lib
+import http
.client
import httplib2
import json
import logging
import httplib2
import json
import logging
@@
-68,7
+71,7
@@
def _intercept_http_request(self, uri, **kwargs):
# High probability of failure due to connection atrophy. Make
# sure this request [re]opens a new connection by closing and
# forgetting all cached connections first.
# High probability of failure due to connection atrophy. Make
# sure this request [re]opens a new connection by closing and
# forgetting all cached connections first.
- for conn in self.connections.
iter
values():
+ for conn in self.connections.values():
conn.close()
self.connections.clear()
conn.close()
self.connections.clear()
@@
-77,7
+80,7
@@
def _intercept_http_request(self, uri, **kwargs):
self._last_request_time = time.time()
try:
return self.orig_http_request(uri, **kwargs)
self._last_request_time = time.time()
try:
return self.orig_http_request(uri, **kwargs)
- except http
lib
.HTTPException:
+ except http
.client
.HTTPException:
_logger.debug("Retrying API request in %d s after HTTP error",
delay, exc_info=True)
except socket.error:
_logger.debug("Retrying API request in %d s after HTTP error",
delay, exc_info=True)
except socket.error:
@@
-88,7
+91,7
@@
def _intercept_http_request(self, uri, **kwargs):
# httplib2 reopens connections when needed.
_logger.debug("Retrying API request in %d s after socket error",
delay, exc_info=True)
# httplib2 reopens connections when needed.
_logger.debug("Retrying API request in %d s after socket error",
delay, exc_info=True)
- for conn in self.connections.
iter
values():
+ for conn in self.connections.values():
conn.close()
time.sleep(delay)
delay = delay * self._retry_delay_backoff
conn.close()
time.sleep(delay)
delay = delay * self._retry_delay_backoff
diff --git
a/sdk/python/arvados/arvfile.py
b/sdk/python/arvados/arvfile.py
index 2c44c349e00de03605367c60b87711a13b39c88e..ab1c64532a130d2880043c518a2c9f9d14b7fc05 100644
(file)
--- a/
sdk/python/arvados/arvfile.py
+++ b/
sdk/python/arvados/arvfile.py
@@
-1,4
+1,10
@@
from __future__ import absolute_import
from __future__ import absolute_import
+from __future__ import division
+from future import standard_library
+standard_library.install_aliases()
+from builtins import range
+from past.utils import old_div
+from builtins import object
import functools
import os
import zlib
import functools
import os
import zlib
@@
-6,7
+12,7
@@
import bz2
from . import config
import hashlib
import threading
from . import config
import hashlib
import threading
-import
Q
ueue
+import
q
ueue
import copy
import errno
import re
import copy
import errno
import re
@@
-508,10
+514,10
@@
class _BlockManager(object):
# blocks pending. If they are full 64 MiB blocks, that means up to
# 256 MiB of internal buffering, which is the same size as the
# default download block cache in KeepClient.
# blocks pending. If they are full 64 MiB blocks, that means up to
# 256 MiB of internal buffering, which is the same size as the
# default download block cache in KeepClient.
- self._put_queue =
Q
ueue.Queue(maxsize=2)
+ self._put_queue =
q
ueue.Queue(maxsize=2)
self._put_threads = []
self._put_threads = []
- for i in
x
range(0, self.num_put_threads):
+ for i in range(0, self.num_put_threads):
thread = threading.Thread(target=self._commit_bufferblock_worker)
self._put_threads.append(thread)
thread.daemon = True
thread = threading.Thread(target=self._commit_bufferblock_worker)
self._put_threads.append(thread)
thread.daemon = True
@@
-531,9
+537,9
@@
class _BlockManager(object):
@synchronized
def start_get_threads(self):
if self._prefetch_threads is None:
@synchronized
def start_get_threads(self):
if self._prefetch_threads is None:
- self._prefetch_queue =
Q
ueue.Queue()
+ self._prefetch_queue =
q
ueue.Queue()
self._prefetch_threads = []
self._prefetch_threads = []
- for i in
x
range(0, self.num_get_threads):
+ for i in range(0, self.num_get_threads):
thread = threading.Thread(target=self._block_prefetch_worker)
self._prefetch_threads.append(thread)
thread.daemon = True
thread = threading.Thread(target=self._block_prefetch_worker)
self._prefetch_threads.append(thread)
thread.daemon = True
@@
-579,7
+585,7
@@
class _BlockManager(object):
# A WRITABLE block with its owner.closed() implies that it's
# size is <= KEEP_BLOCK_SIZE/2.
try:
# A WRITABLE block with its owner.closed() implies that it's
# size is <= KEEP_BLOCK_SIZE/2.
try:
- small_blocks = [b for b in
self._bufferblocks.values(
) if b.state() == _BufferBlock.WRITABLE and b.owner.closed()]
+ small_blocks = [b for b in
list(self._bufferblocks.values()
) if b.state() == _BufferBlock.WRITABLE and b.owner.closed()]
except AttributeError:
# Writable blocks without owner shouldn't exist.
raise UnownedBlockError()
except AttributeError:
# Writable blocks without owner shouldn't exist.
raise UnownedBlockError()
@@
-692,7
+698,7
@@
class _BlockManager(object):
self.repack_small_blocks(force=True, sync=True)
with self.lock:
self.repack_small_blocks(force=True, sync=True)
with self.lock:
- items =
self._bufferblocks.items(
)
+ items =
list(self._bufferblocks.items()
)
for k,v in items:
if v.state() != _BufferBlock.COMMITTED and v.owner:
for k,v in items:
if v.state() != _BufferBlock.COMMITTED and v.owner:
@@
-824,7
+830,7
@@
class ArvadosFile(object):
with self.lock:
if len(self._segments) != len(othersegs):
return False
with self.lock:
if len(self._segments) != len(othersegs):
return False
- for i in
x
range(0, len(othersegs)):
+ for i in range(0, len(othersegs)):
seg1 = self._segments[i]
seg2 = othersegs[i]
loc1 = seg1.locator
seg1 = self._segments[i]
seg2 = othersegs[i]
loc1 = seg1.locator
@@
-884,7
+890,7
@@
class ArvadosFile(object):
"""
self._writers.remove(writer)
"""
self._writers.remove(writer)
- if flush or self.size() >
config.KEEP_BLOCK_SIZE / 2
:
+ if flush or self.size() >
old_div(config.KEEP_BLOCK_SIZE, 2)
:
# File writer closed, not small enough for repacking
self.flush()
elif self.closed():
# File writer closed, not small enough for repacking
self.flush()
elif self.closed():
diff --git
a/sdk/python/arvados/cache.py
b/sdk/python/arvados/cache.py
index ac6d18463c540ad2af84d3b76a9db2129cfa333e..ee1c51fdae324c7c7820ce37db86949554a573dc 100644
(file)
--- a/
sdk/python/arvados/cache.py
+++ b/
sdk/python/arvados/cache.py
@@
-1,3
+1,4
@@
+from builtins import object
import errno
import md5
import os
import errno
import md5
import os
diff --git
a/sdk/python/arvados/collection.py
b/sdk/python/arvados/collection.py
index 1a427814cf4d5bc13ffbeca75f7c22c87134962c..0d88084340dbe227d81bef0a2537618ed8fd66c2 100644
(file)
--- a/
sdk/python/arvados/collection.py
+++ b/
sdk/python/arvados/collection.py
@@
-1,4
+1,7
@@
from __future__ import absolute_import
from __future__ import absolute_import
+from builtins import str
+from past.builtins import basestring
+from builtins import object
import functools
import logging
import os
import functools
import logging
import os
@@
-419,7
+422,7
@@
class ResumableCollectionWriter(CollectionWriter):
return writer
def check_dependencies(self):
return writer
def check_dependencies(self):
- for path, orig_stat in
self._dependencies.items(
):
+ for path, orig_stat in
list(self._dependencies.items()
):
if not S_ISREG(orig_stat[ST_MODE]):
raise errors.StaleWriterStateError("{} not file".format(path))
try:
if not S_ISREG(orig_stat[ST_MODE]):
raise errors.StaleWriterStateError("{} not file".format(path))
try:
@@
-673,7
+676,7
@@
class RichCollectionBase(CollectionBase):
if value == self._committed:
return
if value:
if value == self._committed:
return
if value:
- for k,v in
self._items.items(
):
+ for k,v in
list(self._items.items()
):
v.set_committed(True)
self._committed = True
else:
v.set_committed(True)
self._committed = True
else:
@@
-684,7
+687,7
@@
class RichCollectionBase(CollectionBase):
@synchronized
def __iter__(self):
"""Iterate over names of files and collections contained in this collection."""
@synchronized
def __iter__(self):
"""Iterate over names of files and collections contained in this collection."""
- return iter(
self._items.keys(
))
+ return iter(
list(self._items.keys()
))
@synchronized
def __getitem__(self, k):
@synchronized
def __getitem__(self, k):
@@
-716,17
+719,17
@@
class RichCollectionBase(CollectionBase):
@synchronized
def keys(self):
"""Get a list of names of files and collections directly contained in this collection."""
@synchronized
def keys(self):
"""Get a list of names of files and collections directly contained in this collection."""
- return
self._items.keys(
)
+ return
list(self._items.keys()
)
@synchronized
def values(self):
"""Get a list of files and collection objects directly contained in this collection."""
@synchronized
def values(self):
"""Get a list of files and collection objects directly contained in this collection."""
- return
self._items.values(
)
+ return
list(self._items.values()
)
@synchronized
def items(self):
"""Get a list of (name, object) tuples directly contained in this collection."""
@synchronized
def items(self):
"""Get a list of (name, object) tuples directly contained in this collection."""
- return
self._items.items(
)
+ return
list(self._items.items()
)
def exists(self, path):
"""Test if there is a file or collection at `path`."""
def exists(self, path):
"""Test if there is a file or collection at `path`."""
@@
-759,7
+762,7
@@
class RichCollectionBase(CollectionBase):
item.remove(pathcomponents[1])
def _clonefrom(self, source):
item.remove(pathcomponents[1])
def _clonefrom(self, source):
- for k,v in
source.items(
):
+ for k,v in
list(source.items()
):
self._items[k] = v.clone(self, k)
def clone(self):
self._items[k] = v.clone(self, k)
def clone(self):
@@
-1117,7
+1120,7
@@
class RichCollectionBase(CollectionBase):
@synchronized
def flush(self):
"""Flush bufferblocks to Keep."""
@synchronized
def flush(self):
"""Flush bufferblocks to Keep."""
- for e in
self.values(
):
+ for e in
list(self.values()
):
e.flush()
e.flush()
@@
-1584,7
+1587,7
@@
class Collection(RichCollectionBase):
if state == BLOCKS:
block_locator = re.match(r'[0-9a-f]{32}\+(\d+)(\+\S+)*', tok)
if block_locator:
if state == BLOCKS:
block_locator = re.match(r'[0-9a-f]{32}\+(\d+)(\+\S+)*', tok)
if block_locator:
- blocksize =
long
(block_locator.group(1))
+ blocksize =
int
(block_locator.group(1))
blocks.append(Range(tok, streamoffset, blocksize, 0))
streamoffset += blocksize
else:
blocks.append(Range(tok, streamoffset, blocksize, 0))
streamoffset += blocksize
else:
@@
-1593,8
+1596,8
@@
class Collection(RichCollectionBase):
if state == SEGMENTS:
file_segment = re.search(r'^(\d+):(\d+):(\S+)', tok)
if file_segment:
if state == SEGMENTS:
file_segment = re.search(r'^(\d+):(\d+):(\S+)', tok)
if file_segment:
- pos =
long
(file_segment.group(1))
- size =
long
(file_segment.group(2))
+ pos =
int
(file_segment.group(1))
+ size =
int
(file_segment.group(2))
name = file_segment.group(3).replace('\\040', ' ')
filepath = os.path.join(stream_name, name)
afile = self.find_or_create(filepath, FILE)
name = file_segment.group(3).replace('\\040', ' ')
filepath = os.path.join(stream_name, name)
afile = self.find_or_create(filepath, FILE)
diff --git
a/sdk/python/arvados/commands/arv_copy.py
b/sdk/python/arvados/commands/arv_copy.py
index 5c5192860ccd0ed6b079c98d76b164c2ed3800a6..c5d74efe550a152a1e1f96e97d440084cbebd63f 100755
(executable)
--- a/
sdk/python/arvados/commands/arv_copy.py
+++ b/
sdk/python/arvados/commands/arv_copy.py
@@
-16,6
+16,12
@@
# instances src and dst. If either of these files is not found,
# arv-copy will issue an error.
# instances src and dst. If either of these files is not found,
# arv-copy will issue an error.
+from __future__ import division
+from future import standard_library
+standard_library.install_aliases()
+from past.builtins import basestring
+from builtins import object
+from past.utils import old_div
import argparse
import contextlib
import getpass
import argparse
import contextlib
import getpass
@@
-25,7
+31,7
@@
import shutil
import sys
import logging
import tempfile
import sys
import logging
import tempfile
-import urlparse
+import url
lib.
parse
import arvados
import arvados.config
import arvados
import arvados.config
@@
-152,7
+158,7
@@
def main():
abort("cannot copy object {} of type {}".format(args.object_uuid, t))
# Clean up any outstanding temp git repositories.
abort("cannot copy object {} of type {}".format(args.object_uuid, t))
# Clean up any outstanding temp git repositories.
- for d in l
ocal_repo_dir.values(
):
+ for d in l
ist(local_repo_dir.values()
):
shutil.rmtree(d, ignore_errors=True)
# If no exception was thrown and the response does not have an
shutil.rmtree(d, ignore_errors=True)
# If no exception was thrown and the response does not have an
@@
-344,7
+350,7
@@
def migrate_components_filters(template_components, dst_git_repo):
be None if that is not known.
"""
errors = []
be None if that is not known.
"""
errors = []
- for cname, cspec in template_components.ite
rite
ms():
+ for cname, cspec in template_components.items():
def add_error(errmsg):
errors.append("{}: {}".format(cname, errmsg))
if not isinstance(cspec, dict):
def add_error(errmsg):
errors.append("{}: {}".format(cname, errmsg))
if not isinstance(cspec, dict):
@@
-553,7
+559,7
@@
def migrate_jobspec(jobspec, src, dst, dst_repo, args):
# names. The return value is undefined.
#
def copy_git_repos(p, src, dst, dst_repo, args):
# names. The return value is undefined.
#
def copy_git_repos(p, src, dst, dst_repo, args):
- for component in p['components'].
iter
values():
+ for component in p['components'].values():
migrate_jobspec(component, src, dst, dst_repo, args)
if 'job' in component:
migrate_jobspec(component['job'], src, dst, dst_repo, args)
migrate_jobspec(component, src, dst, dst_repo, args)
if 'job' in component:
migrate_jobspec(component['job'], src, dst, dst_repo, args)
@@
-774,8
+780,8
@@
def select_git_url(api, repo_name, retries, allow_insecure_http, allow_insecure_
git_url = None
for url in priority:
if url.startswith("http"):
git_url = None
for url in priority:
if url.startswith("http"):
- u = urlparse.urlsplit(url)
- baseurl = urlparse.urlunsplit((u.scheme, u.netloc, "", "", ""))
+ u = url
lib.
parse.urlsplit(url)
+ baseurl = url
lib.
parse.urlunsplit((u.scheme, u.netloc, "", "", ""))
git_config = ["-c", "credential.%s/.username=none" % baseurl,
"-c", "credential.%s/.helper=!cred(){ cat >/dev/null; if [ \"$1\" = get ]; then echo password=$ARVADOS_API_TOKEN; fi; };cred" % baseurl]
else:
git_config = ["-c", "credential.%s/.username=none" % baseurl,
"-c", "credential.%s/.helper=!cred(){ cat >/dev/null; if [ \"$1\" = get ]; then echo password=$ARVADOS_API_TOKEN; fi; };cred" % baseurl]
else:
@@
-859,7
+865,7
@@
def copy_docker_images(pipeline, src, dst, args):
runtime_constraints field from src to dst."""
logger.debug('copy_docker_images: {}'.format(pipeline['uuid']))
runtime_constraints field from src to dst."""
logger.debug('copy_docker_images: {}'.format(pipeline['uuid']))
- for c_name, c_info in pipeline['components'].ite
rite
ms():
+ for c_name, c_info in pipeline['components'].items():
if ('runtime_constraints' in c_info and
'docker_image' in c_info['runtime_constraints']):
copy_docker_image(
if ('runtime_constraints' in c_info and
'docker_image' in c_info['runtime_constraints']):
copy_docker_image(
@@
-948,7
+954,7
@@
def human_progress(obj_uuid, bytes_written, bytes_expected):
return "\r{}: {}M / {}M {:.1%} ".format(
obj_uuid,
bytes_written >> 20, bytes_expected >> 20,
return "\r{}: {}M / {}M {:.1%} ".format(
obj_uuid,
bytes_written >> 20, bytes_expected >> 20,
-
float(bytes_written) / bytes_expected
)
+
old_div(float(bytes_written), bytes_expected)
)
else:
return "\r{}: {} ".format(obj_uuid, bytes_written)
else:
return "\r{}: {} ".format(obj_uuid, bytes_written)
diff --git
a/sdk/python/arvados/commands/keepdocker.py
b/sdk/python/arvados/commands/keepdocker.py
index 57832483236fe5e404e9c64b7d3158336e88cd2d..0c491389ab6bc438b6ccfc48903430ef528a8049 100644
(file)
--- a/
sdk/python/arvados/commands/keepdocker.py
+++ b/
sdk/python/arvados/commands/keepdocker.py
@@
-1,5
+1,6
@@
#!/usr/bin/env python
#!/usr/bin/env python
+from builtins import next
import argparse
import collections
import datetime
import argparse
import collections
import datetime
@@
-315,7
+316,7
@@
def list_images_in_arv(api_client, num_retries, image_name=None, image_tag=None)
# and add image listings for them, retaining the API server preference
# sorting.
images_start_size = len(images)
# and add image listings for them, retaining the API server preference
# sorting.
images_start_size = len(images)
- for collection_uuid, link in hash_link_map.ite
rite
ms():
+ for collection_uuid, link in hash_link_map.items():
if not seen_image_names[collection_uuid]:
images.append(_new_image_listing(link, link['name']))
if len(images) > images_start_size:
if not seen_image_names[collection_uuid]:
images.append(_new_image_listing(link, link['name']))
if len(images) > images_start_size:
diff --git
a/sdk/python/arvados/commands/ls.py
b/sdk/python/arvados/commands/ls.py
index a2f2e542754f7e2e44edbd5673cf36d2c5d130af..ea93eff9d8c8ec681e94843a13ef38b877d19233 100755
(executable)
--- a/
sdk/python/arvados/commands/ls.py
+++ b/
sdk/python/arvados/commands/ls.py
@@
-1,7
+1,9
@@
#!/usr/bin/env python
from __future__ import print_function
#!/usr/bin/env python
from __future__ import print_function
+from __future__ import division
+from past.utils import old_div
import argparse
import sys
import argparse
import sys
@@
-26,7
+28,7
@@
def parse_args(args):
return parser.parse_args(args)
def size_formatter(coll_file):
return parser.parse_args(args)
def size_formatter(coll_file):
- return "{:>10}".format(
(coll_file.size() + 1023) / 1024
)
+ return "{:>10}".format(
old_div((coll_file.size() + 1023), 1024)
)
def name_formatter(coll_file):
return "{}/{}".format(coll_file.stream_name(), coll_file.name)
def name_formatter(coll_file):
return "{}/{}".format(coll_file.stream_name(), coll_file.name)
diff --git
a/sdk/python/arvados/commands/migrate19.py
b/sdk/python/arvados/commands/migrate19.py
index 802744ba6872192811a1fab31ffcafc8f66faf9b..349e57b11e11a0268b2be6ca509801f5b6a56aef 100644
(file)
--- a/
sdk/python/arvados/commands/migrate19.py
+++ b/
sdk/python/arvados/commands/migrate19.py
@@
-1,4
+1,6
@@
from __future__ import print_function
from __future__ import print_function
+from __future__ import division
+from past.utils import old_div
import argparse
import time
import sys
import argparse
import time
import sys
@@
-122,8
+124,8
@@
def main(arguments=None):
if pdh not in already_migrated and (only_migrate is None or pdh in only_migrate):
need_migrate[pdh] = img
with CollectionReader(i["manifest_text"]) as c:
if pdh not in already_migrated and (only_migrate is None or pdh in only_migrate):
need_migrate[pdh] = img
with CollectionReader(i["manifest_text"]) as c:
- if
c.values(
)[0].size() > biggest:
- biggest =
c.values(
)[0].size()
+ if
list(c.values()
)[0].size() > biggest:
+ biggest =
list(c.values()
)[0].size()
if args.print_unmigrated:
only_migrate = set()
if args.print_unmigrated:
only_migrate = set()
@@
-134,7
+136,7
@@
def main(arguments=None):
logger.info("Already migrated %i images", len(already_migrated))
logger.info("Need to migrate %i images", len(need_migrate))
logger.info("Using tempdir %s", tempfile.gettempdir())
logger.info("Already migrated %i images", len(already_migrated))
logger.info("Need to migrate %i images", len(need_migrate))
logger.info("Using tempdir %s", tempfile.gettempdir())
- logger.info("Biggest image is about %i MiB, tempdir needs at least %i MiB free",
biggest/(2**20), (biggest*2)/(2**20
))
+ logger.info("Biggest image is about %i MiB, tempdir needs at least %i MiB free",
old_div(biggest,(2**20)), old_div((biggest*2),(2**20)
))
if args.dry_run:
return
if args.dry_run:
return
@@
-142,15
+144,15
@@
def main(arguments=None):
success = []
failures = []
count = 1
success = []
failures = []
count = 1
- for old_image in
need_migrate.values(
):
+ for old_image in
list(need_migrate.values()
):
if uuid_to_collection[old_image["collection"]]["portable_data_hash"] in already_migrated:
continue
oldcol = CollectionReader(uuid_to_collection[old_image["collection"]]["manifest_text"])
if uuid_to_collection[old_image["collection"]]["portable_data_hash"] in already_migrated:
continue
oldcol = CollectionReader(uuid_to_collection[old_image["collection"]]["manifest_text"])
- tarfile =
oldcol.keys(
)[0]
+ tarfile =
list(oldcol.keys()
)[0]
logger.info("[%i/%i] Migrating %s:%s (%s) (%i MiB)", count, len(need_migrate), old_image["repo"],
logger.info("[%i/%i] Migrating %s:%s (%s) (%i MiB)", count, len(need_migrate), old_image["repo"],
- old_image["tag"], old_image["collection"], old
col.values()[0].size()/(2**20
))
+ old_image["tag"], old_image["collection"], old
_div(list(oldcol.values())[0].size(),(2**20)
))
count += 1
start = time.time()
count += 1
start = time.time()
diff --git
a/sdk/python/arvados/commands/put.py
b/sdk/python/arvados/commands/put.py
index 32d5fef6a8588e1f2785517435949082dd5b3534..681e78eed68cd2a926aefd003f9cff88263fb15e 100644
(file)
--- a/
sdk/python/arvados/commands/put.py
+++ b/
sdk/python/arvados/commands/put.py
@@
-3,6
+3,10
@@
# TODO:
# --md5sum - display md5 of each file as read from disk
# TODO:
# --md5sum - display md5 of each file as read from disk
+from __future__ import division
+from builtins import str
+from past.utils import old_div
+from builtins import object
import argparse
import arvados
import arvados.collection
import argparse
import arvados
import arvados.collection
@@
-205,7
+209,7
@@
def parse_arguments(arguments):
if len(args.paths) == 0:
args.paths = ['-']
if len(args.paths) == 0:
args.paths = ['-']
- args.paths =
map(lambda x: "-" if x == "/dev/stdin" else x, args.paths)
+ args.paths =
["-" if x == "/dev/stdin" else x for x in args.paths]
if len(args.paths) != 1 or os.path.isdir(args.paths[0]):
if args.filename:
if len(args.paths) != 1 or os.path.isdir(args.paths[0]):
if args.filename:
@@
-509,7
+513,7
@@
class ArvPutUploadJob(object):
Recursively get the total size of the collection
"""
size = 0
Recursively get the total size of the collection
"""
size = 0
- for item in
collection.values(
):
+ for item in
list(collection.values()
):
if isinstance(item, arvados.collection.Collection) or isinstance(item, arvados.collection.Subcollection):
size += self._collection_size(item)
else:
if isinstance(item, arvados.collection.Collection) or isinstance(item, arvados.collection.Subcollection):
size += self._collection_size(item)
else:
@@
-701,7
+705,7
@@
class ArvPutUploadJob(object):
def collection_file_paths(self, col, path_prefix='.'):
"""Return a list of file paths by recursively go through the entire collection `col`"""
file_paths = []
def collection_file_paths(self, col, path_prefix='.'):
"""Return a list of file paths by recursively go through the entire collection `col`"""
file_paths = []
- for name, item in
col.items(
):
+ for name, item in
list(col.items()
):
if isinstance(item, arvados.arvfile.ArvadosFile):
file_paths.append(os.path.join(path_prefix, name))
elif isinstance(item, arvados.collection.Subcollection):
if isinstance(item, arvados.arvfile.ArvadosFile):
file_paths.append(os.path.join(path_prefix, name))
elif isinstance(item, arvados.collection.Subcollection):
@@
-778,7
+782,7
@@
class ArvPutUploadJob(object):
locators.append(loc)
return locators
elif isinstance(item, arvados.collection.Collection):
locators.append(loc)
return locators
elif isinstance(item, arvados.collection.Collection):
- l = [self._datablocks_on_item(x) for x in
item.values(
)]
+ l = [self._datablocks_on_item(x) for x in
list(item.values()
)]
# Fast list flattener method taken from:
# http://stackoverflow.com/questions/952914/making-a-flat-list-out-of-list-of-lists-in-python
return [loc for sublist in l for loc in sublist]
# Fast list flattener method taken from:
# http://stackoverflow.com/questions/952914/making-a-flat-list-out-of-list-of-lists-in-python
return [loc for sublist in l for loc in sublist]
@@
-817,7
+821,7
@@
def human_progress(bytes_written, bytes_expected):
if bytes_expected:
return "\r{}M / {}M {:.1%} ".format(
bytes_written >> 20, bytes_expected >> 20,
if bytes_expected:
return "\r{}M / {}M {:.1%} ".format(
bytes_written >> 20, bytes_expected >> 20,
-
float(bytes_written) / bytes_expected
)
+
old_div(float(bytes_written), bytes_expected)
)
else:
return "\r{} ".format(bytes_written)
else:
return "\r{} ".format(bytes_written)
@@
-982,7
+986,7
@@
def main(arguments=None, stdout=sys.stdout, stderr=sys.stderr):
if not output.endswith('\n'):
stdout.write('\n')
if not output.endswith('\n'):
stdout.write('\n')
- for sigcode, orig_handler in
orig_signal_handlers.items(
):
+ for sigcode, orig_handler in
list(orig_signal_handlers.items()
):
signal.signal(sigcode, orig_handler)
if status != 0:
signal.signal(sigcode, orig_handler)
if status != 0:
diff --git
a/sdk/python/arvados/commands/run.py
b/sdk/python/arvados/commands/run.py
index 2f3e0427d9eb41137cd4f87721f88b811031b0fe..01a18e51723d435c08218388f378c538ac8becc5 100644
(file)
--- a/
sdk/python/arvados/commands/run.py
+++ b/
sdk/python/arvados/commands/run.py
@@
-2,6
+2,9
@@
from __future__ import print_function
from __future__ import absolute_import
from __future__ import print_function
from __future__ import absolute_import
+from builtins import range
+from past.builtins import basestring
+from builtins import object
import arvados
import arvados.commands.ws as ws
import argparse
import arvados
import arvados.commands.ws as ws
import argparse
@@
-301,7
+304,7
@@
def main(arguments=None):
if files:
uploadfiles(files, api, dry_run=args.dry_run, num_retries=args.retries, project=project)
if files:
uploadfiles(files, api, dry_run=args.dry_run, num_retries=args.retries, project=project)
- for i in
x
range(1, len(slots)):
+ for i in range(1, len(slots)):
slots[i] = [("%s%s" % (c.prefix, c.fn)) if isinstance(c, ArvFile) else c for c in slots[i]]
component = {
slots[i] = [("%s%s" % (c.prefix, c.fn)) if isinstance(c, ArvFile) else c for c in slots[i]]
component = {
@@
-321,8
+324,8
@@
def main(arguments=None):
group_parser.add_argument('-b', '--batch-size', type=int)
group_parser.add_argument('args', nargs=argparse.REMAINDER)
group_parser.add_argument('-b', '--batch-size', type=int)
group_parser.add_argument('args', nargs=argparse.REMAINDER)
- for s in
x
range(2, len(slots)):
- for i in
x
range(0, len(slots[s])):
+ for s in range(2, len(slots)):
+ for i in range(0, len(slots[s])):
if slots[s][i] == '--':
inp = "input%i" % (s-2)
groupargs = group_parser.parse_args(slots[2][i+1:])
if slots[s][i] == '--':
inp = "input%i" % (s-2)
groupargs = group_parser.parse_args(slots[2][i+1:])
diff --git
a/sdk/python/arvados/crunch.py
b/sdk/python/arvados/crunch.py
index c184e6ac7432b159e31ca65153886b8de4b04d18..f4651cd0a5cb8294e281192c29dde7479bd008fc 100644
(file)
--- a/
sdk/python/arvados/crunch.py
+++ b/
sdk/python/arvados/crunch.py
@@
-1,3
+1,4
@@
+from builtins import object
import json
import os
import json
import os
diff --git
a/sdk/python/arvados/errors.py
b/sdk/python/arvados/errors.py
index bfd471ba52bee712a1e1768c91327ce28a9c6603..c9eda2d1c9c6eb306daedba17fc63c3ec89348d2 100644
(file)
--- a/
sdk/python/arvados/errors.py
+++ b/
sdk/python/arvados/errors.py
@@
-37,7
+37,7
@@
class KeepRequestError(Exception):
self._request_errors = OrderedDict(request_errors)
if self._request_errors:
exc_reports = [self._format_error(*err_pair)
self._request_errors = OrderedDict(request_errors)
if self._request_errors:
exc_reports = [self._format_error(*err_pair)
- for err_pair in self._request_errors.ite
rite
ms()]
+ for err_pair in self._request_errors.items()]
base_msg = "{}: {}".format(message, "; ".join(exc_reports))
else:
base_msg = message
base_msg = "{}: {}".format(message, "; ".join(exc_reports))
else:
base_msg = message
diff --git
a/sdk/python/arvados/events.py
b/sdk/python/arvados/events.py
index b385761f7b7b1575dc576e783b3ae61e45eeb2cf..c58abe52be48730e56edb70d5e5109a0e3847592 100644
(file)
--- a/
sdk/python/arvados/events.py
+++ b/
sdk/python/arvados/events.py
@@
-1,4
+1,8
@@
from __future__ import absolute_import
from __future__ import absolute_import
+from future import standard_library
+standard_library.install_aliases()
+from builtins import str
+from builtins import object
import arvados
from . import config
from . import errors
import arvados
from . import config
from . import errors
@@
-6,7
+10,7
@@
from .retry import RetryLoop
import logging
import json
import logging
import json
-import thread
+import
_
thread
import threading
import time
import os
import threading
import time
import os
@@
-116,7
+120,7
@@
class EventClient(object):
self.on_event_cb(m)
except Exception as e:
_logger.exception("Unexpected exception from event callback.")
self.on_event_cb(m)
except Exception as e:
_logger.exception("Unexpected exception from event callback.")
- thread.interrupt_main()
+
_
thread.interrupt_main()
def on_closed(self):
if not self.is_closed.is_set():
def on_closed(self):
if not self.is_closed.is_set():
@@
-131,7
+135,7
@@
class EventClient(object):
if tries_left == 0:
_logger.exception("EventClient thread could not contact websocket server.")
self.is_closed.set()
if tries_left == 0:
_logger.exception("EventClient thread could not contact websocket server.")
self.is_closed.set()
- thread.interrupt_main()
+
_
thread.interrupt_main()
return
def run_forever(self):
return
def run_forever(self):
@@
-226,7
+230,7
@@
class PollClient(threading.Thread):
_logger.exception("PollClient thread could not contact API server.")
with self._closing_lock:
self._closing.set()
_logger.exception("PollClient thread could not contact API server.")
with self._closing_lock:
self._closing.set()
- thread.interrupt_main()
+
_
thread.interrupt_main()
return
for i in items["items"]:
skip_old_events = [["id", ">", str(i["id"])]]
return
for i in items["items"]:
skip_old_events = [["id", ">", str(i["id"])]]
@@
-237,7
+241,7
@@
class PollClient(threading.Thread):
self.on_event(i)
except Exception as e:
_logger.exception("Unexpected exception from event callback.")
self.on_event(i)
except Exception as e:
_logger.exception("Unexpected exception from event callback.")
- thread.interrupt_main()
+
_
thread.interrupt_main()
if items["items_available"] > len(items["items"]):
moreitems = True
if not moreitems:
if items["items_available"] > len(items["items"]):
moreitems = True
if not moreitems:
diff --git
a/sdk/python/arvados/keep.py
b/sdk/python/arvados/keep.py
index 4efa6982a1fa346e3ae6c55a0e72f892968173f1..b709473b78c275ca02e1eb2c7015f3e28bf2ffb0 100644
(file)
--- a/
sdk/python/arvados/keep.py
+++ b/
sdk/python/arvados/keep.py
@@
-1,19
+1,27
@@
from __future__ import absolute_import
from __future__ import absolute_import
-import cStringIO
+from __future__ import division
+from future import standard_library
+standard_library.install_aliases()
+from builtins import next
+from builtins import str
+from builtins import range
+from past.utils import old_div
+from builtins import object
+import io
import datetime
import hashlib
import logging
import math
import os
import pycurl
import datetime
import hashlib
import logging
import math
import os
import pycurl
-import
Q
ueue
+import
q
ueue
import re
import socket
import ssl
import sys
import threading
from . import timer
import re
import socket
import ssl
import sys
import threading
from . import timer
-import urlparse
+import url
lib.
parse
import arvados
import arvados.config as config
import arvados
import arvados.config as config
@@
-191,7
+199,7
@@
class KeepBlockCache(object):
self._cache = [c for c in self._cache if not (c.ready.is_set() and c.content is None)]
sm = sum([slot.size() for slot in self._cache])
while len(self._cache) > 0 and sm > self.cache_max:
self._cache = [c for c in self._cache if not (c.ready.is_set() and c.content is None)]
sm = sum([slot.size() for slot in self._cache])
while len(self._cache) > 0 and sm > self.cache_max:
- for i in
x
range(len(self._cache)-1, -1, -1):
+ for i in range(len(self._cache)-1, -1, -1):
if self._cache[i].ready.is_set():
del self._cache[i]
break
if self._cache[i].ready.is_set():
del self._cache[i]
break
@@
-199,7
+207,7
@@
class KeepBlockCache(object):
def _get(self, locator):
# Test if the locator is already in the cache
def _get(self, locator):
# Test if the locator is already in the cache
- for i in
x
range(0, len(self._cache)):
+ for i in range(0, len(self._cache)):
if self._cache[i].locator == locator:
n = self._cache[i]
if i != 0:
if self._cache[i].locator == locator:
n = self._cache[i]
if i != 0:
@@
-270,7
+278,7
@@
class KeepClient(object):
arvados.errors.HttpError,
)
arvados.errors.HttpError,
)
- def __init__(self, root, user_agent_pool=
Q
ueue.LifoQueue(),
+ def __init__(self, root, user_agent_pool=
q
ueue.LifoQueue(),
upload_counter=None,
download_counter=None, **headers):
self.root = root
upload_counter=None,
download_counter=None, **headers):
self.root = root
@@
-298,7
+306,7
@@
class KeepClient(object):
def _get_user_agent(self):
try:
return self._user_agent_pool.get(block=False)
def _get_user_agent(self):
try:
return self._user_agent_pool.get(block=False)
- except
Q
ueue.Empty:
+ except
q
ueue.Empty:
return pycurl.Curl()
def _put_user_agent(self, ua):
return pycurl.Curl()
def _put_user_agent(self, ua):
@@
-328,12
+336,12
@@
class KeepClient(object):
try:
with timer.Timer() as t:
self._headers = {}
try:
with timer.Timer() as t:
self._headers = {}
- response_body =
cStringIO
.StringIO()
+ response_body =
io
.StringIO()
curl.setopt(pycurl.NOSIGNAL, 1)
curl.setopt(pycurl.OPENSOCKETFUNCTION, self._socket_open)
curl.setopt(pycurl.URL, url.encode('utf-8'))
curl.setopt(pycurl.HTTPHEADER, [
curl.setopt(pycurl.NOSIGNAL, 1)
curl.setopt(pycurl.OPENSOCKETFUNCTION, self._socket_open)
curl.setopt(pycurl.URL, url.encode('utf-8'))
curl.setopt(pycurl.HTTPHEADER, [
- '{}: {}'.format(k,v) for k,v in self.get_headers.ite
rite
ms()])
+ '{}: {}'.format(k,v) for k,v in self.get_headers.items()])
curl.setopt(pycurl.WRITEFUNCTION, response_body.write)
curl.setopt(pycurl.HEADERFUNCTION, self._headerfunction)
if method == "HEAD":
curl.setopt(pycurl.WRITEFUNCTION, response_body.write)
curl.setopt(pycurl.HEADERFUNCTION, self._headerfunction)
if method == "HEAD":
@@
-384,7
+392,7
@@
class KeepClient(object):
self._result['status_code'],
len(self._result['body']),
t.msecs,
self._result['status_code'],
len(self._result['body']),
t.msecs,
-
(len(self._result['body'])/(1024.0*1024))/t.secs
if t.secs > 0 else 0)
+
old_div((old_div(len(self._result['body']),(1024.0*1024))),t.secs)
if t.secs > 0 else 0)
if self.download_counter:
self.download_counter.add(len(self._result['body']))
if self.download_counter:
self.download_counter.add(len(self._result['body']))
@@
-405,8
+413,8
@@
class KeepClient(object):
try:
with timer.Timer() as t:
self._headers = {}
try:
with timer.Timer() as t:
self._headers = {}
- body_reader =
cStringIO
.StringIO(body)
- response_body =
cStringIO
.StringIO()
+ body_reader =
io
.StringIO(body)
+ response_body =
io
.StringIO()
curl.setopt(pycurl.NOSIGNAL, 1)
curl.setopt(pycurl.OPENSOCKETFUNCTION, self._socket_open)
curl.setopt(pycurl.URL, url.encode('utf-8'))
curl.setopt(pycurl.NOSIGNAL, 1)
curl.setopt(pycurl.OPENSOCKETFUNCTION, self._socket_open)
curl.setopt(pycurl.URL, url.encode('utf-8'))
@@
-420,7
+428,7
@@
class KeepClient(object):
curl.setopt(pycurl.INFILESIZE, len(body))
curl.setopt(pycurl.READFUNCTION, body_reader.read)
curl.setopt(pycurl.HTTPHEADER, [
curl.setopt(pycurl.INFILESIZE, len(body))
curl.setopt(pycurl.READFUNCTION, body_reader.read)
curl.setopt(pycurl.HTTPHEADER, [
- '{}: {}'.format(k,v) for k,v in self.put_headers.ite
rite
ms()])
+ '{}: {}'.format(k,v) for k,v in self.put_headers.items()])
curl.setopt(pycurl.WRITEFUNCTION, response_body.write)
curl.setopt(pycurl.HEADERFUNCTION, self._headerfunction)
self._setcurltimeouts(curl, timeout)
curl.setopt(pycurl.WRITEFUNCTION, response_body.write)
curl.setopt(pycurl.HEADERFUNCTION, self._headerfunction)
self._setcurltimeouts(curl, timeout)
@@
-457,7
+465,7
@@
class KeepClient(object):
self._result['status_code'],
len(body),
t.msecs,
self._result['status_code'],
len(body),
t.msecs,
-
(len(body)/(1024.0*1024))/t.secs
if t.secs > 0 else 0)
+
old_div((old_div(len(body),(1024.0*1024))),t.secs)
if t.secs > 0 else 0)
if self.upload_counter:
self.upload_counter.add(len(body))
return True
if self.upload_counter:
self.upload_counter.add(len(body))
return True
@@
-498,9
+506,9
@@
class KeepClient(object):
# Returning None implies all bytes were written
# Returning None implies all bytes were written
- class KeepWriterQueue(
Q
ueue.Queue):
+ class KeepWriterQueue(
q
ueue.Queue):
def __init__(self, copies):
def __init__(self, copies):
-
Q
ueue.Queue.__init__(self) # Old-style superclass
+
q
ueue.Queue.__init__(self) # Old-style superclass
self.wanted_copies = copies
self.successful_copies = 0
self.response = None
self.wanted_copies = copies
self.successful_copies = 0
self.response = None
@@
-548,7
+556,7
@@
class KeepClient(object):
return service, service_root
elif self.empty():
self.pending_tries_notification.notify_all()
return service, service_root
elif self.empty():
self.pending_tries_notification.notify_all()
- raise
Q
ueue.Empty
+ raise
q
ueue.Empty
else:
self.pending_tries_notification.wait()
else:
self.pending_tries_notification.wait()
@@
-560,7
+568,7
@@
class KeepClient(object):
if (not max_service_replicas) or (max_service_replicas >= copies):
num_threads = 1
else:
if (not max_service_replicas) or (max_service_replicas >= copies):
num_threads = 1
else:
- num_threads = int(math.ceil(
float(copies) / max_service_replicas
))
+ num_threads = int(math.ceil(
old_div(float(copies), max_service_replicas)
))
_logger.debug("Pool max threads is %d", num_threads)
self.workers = []
self.queue = KeepClient.KeepWriterQueue(copies)
_logger.debug("Pool max threads is %d", num_threads)
self.workers = []
self.queue = KeepClient.KeepWriterQueue(copies)
@@
-602,7
+610,7
@@
class KeepClient(object):
while True:
try:
service, service_root = self.queue.get_next_task()
while True:
try:
service, service_root = self.queue.get_next_task()
- except
Q
ueue.Empty:
+ except
q
ueue.Empty:
return
try:
locator, copies = self.do_task(service, service_root)
return
try:
locator, copies = self.do_task(service, service_root)
@@
-720,7
+728,7
@@
class KeepClient(object):
self.block_cache = block_cache if block_cache else KeepBlockCache()
self.timeout = timeout
self.proxy_timeout = proxy_timeout
self.block_cache = block_cache if block_cache else KeepBlockCache()
self.timeout = timeout
self.proxy_timeout = proxy_timeout
- self._user_agent_pool =
Q
ueue.LifoQueue()
+ self._user_agent_pool =
q
ueue.LifoQueue()
self.upload_counter = Counter()
self.download_counter = Counter()
self.put_counter = Counter()
self.upload_counter = Counter()
self.download_counter = Counter()
self.put_counter = Counter()
@@
-741,7
+749,7
@@
class KeepClient(object):
if not proxy_uris[i].endswith('/'):
proxy_uris[i] += '/'
# URL validation
if not proxy_uris[i].endswith('/'):
proxy_uris[i] += '/'
# URL validation
- url = urlparse.urlparse(proxy_uris[i])
+ url = url
lib.
parse.urlparse(proxy_uris[i])
if not (url.scheme and url.netloc):
raise arvados.errors.ArgumentError("Invalid proxy URI: {}".format(proxy_uris[i]))
self.api_token = api_token
if not (url.scheme and url.netloc):
raise arvados.errors.ArgumentError("Invalid proxy URI: {}".format(proxy_uris[i]))
self.api_token = api_token
@@
-807,7
+815,7
@@
class KeepClient(object):
raise arvados.errors.NoKeepServersError()
# Precompute the base URI for each service.
raise arvados.errors.NoKeepServersError()
# Precompute the base URI for each service.
- for r in self._gateway_services.
iter
values():
+ for r in self._gateway_services.values():
host = r['service_host']
if not host.startswith('[') and host.find(':') >= 0:
# IPv6 URIs must be formatted like http://[::1]:80/...
host = r['service_host']
if not host.startswith('[') and host.find(':') >= 0:
# IPv6 URIs must be formatted like http://[::1]:80/...
@@
-819,7
+827,7
@@
class KeepClient(object):
_logger.debug(str(self._gateway_services))
self._keep_services = [
_logger.debug(str(self._gateway_services))
self._keep_services = [
- ks for ks in self._gateway_services.
iter
values()
+ ks for ks in self._gateway_services.values()
if not ks.get('service_type', '').startswith('gateway:')]
self._writable_services = [ks for ks in self._keep_services
if not ks.get('read_only')]
if not ks.get('service_type', '').startswith('gateway:')]
self._writable_services = [ks for ks in self._keep_services
if not ks.get('read_only')]
@@
-1059,7
+1067,7
@@
class KeepClient(object):
KeepClient is initialized.
"""
KeepClient is initialized.
"""
- if isinstance(data,
unicode
):
+ if isinstance(data,
str
):
data = data.encode("ascii")
elif not isinstance(data, str):
raise arvados.errors.ArgumentError("Argument 'data' to KeepClient.put is not type 'str'")
data = data.encode("ascii")
elif not isinstance(data, str):
raise arvados.errors.ArgumentError("Argument 'data' to KeepClient.put is not type 'str'")
diff --git
a/sdk/python/arvados/retry.py
b/sdk/python/arvados/retry.py
index 5ba4f4ea41016a6225ebb3fca194265e56b56a0b..168bd3910fc799fd3ddc4d8bfec63f31b2e261bb 100644
(file)
--- a/
sdk/python/arvados/retry.py
+++ b/
sdk/python/arvados/retry.py
@@
-1,5
+1,7
@@
#!/usr/bin/env python
#!/usr/bin/env python
+from builtins import range
+from builtins import object
import functools
import inspect
import pycurl
import functools
import inspect
import pycurl
@@
-9,7
+11,7
@@
from collections import deque
import arvados.errors
import arvados.errors
-_HTTP_SUCCESSES = set(
x
range(200, 300))
+_HTTP_SUCCESSES = set(range(200, 300))
_HTTP_CAN_RETRY = set([408, 409, 422, 423, 500, 502, 503, 504])
class RetryLoop(object):
_HTTP_CAN_RETRY = set([408, 409, 422, 423, 500, 502, 503, 504])
class RetryLoop(object):
@@
-69,7
+71,7
@@
class RetryLoop(object):
def running(self):
return self._running and (self._success is None)
def running(self):
return self._running and (self._success is None)
- def
next
(self):
+ def
__next__
(self):
if self._running is None:
self._running = True
if (self.tries_left < 1) or not self.running():
if self._running is None:
self._running = True
if (self.tries_left < 1) or not self.running():
diff --git
a/sdk/python/arvados/safeapi.py
b/sdk/python/arvados/safeapi.py
index a9ca978865375eea1e34d44db08bb462388811de..488b758a42f69534ae0bc2584d64c660baa7532e 100644
(file)
--- a/
sdk/python/arvados/safeapi.py
+++ b/
sdk/python/arvados/safeapi.py
@@
-1,5
+1,6
@@
from __future__ import absolute_import
from __future__ import absolute_import
+from builtins import object
import copy
import threading
import copy
import threading
diff --git
a/sdk/python/arvados/stream.py
b/sdk/python/arvados/stream.py
index 042ed47e40edb495cf1a91073ea75cc4d08b5a01..59558162b41891d77ae148b9f407d2359035fb5f 100644
(file)
--- a/
sdk/python/arvados/stream.py
+++ b/
sdk/python/arvados/stream.py
@@
-1,5
+1,6
@@
from __future__ import print_function
from __future__ import absolute_import
from __future__ import print_function
from __future__ import absolute_import
+from builtins import object
import collections
import hashlib
import os
import collections
import hashlib
import os
@@
-36,15
+37,15
@@
class StreamReader(object):
s = re.match(r'^[0-9a-f]{32}\+(\d+)(\+\S+)*$', tok)
if s:
s = re.match(r'^[0-9a-f]{32}\+(\d+)(\+\S+)*$', tok)
if s:
- blocksize =
long
(s.group(1))
+ blocksize =
int
(s.group(1))
self._data_locators.append(Range(tok, streamoffset, blocksize, 0))
streamoffset += blocksize
continue
s = re.search(r'^(\d+):(\d+):(\S+)', tok)
if s:
self._data_locators.append(Range(tok, streamoffset, blocksize, 0))
streamoffset += blocksize
continue
s = re.search(r'^(\d+):(\d+):(\S+)', tok)
if s:
- pos =
long
(s.group(1))
- size =
long
(s.group(2))
+ pos =
int
(s.group(1))
+ size =
int
(s.group(2))
name = s.group(3).replace('\\040', ' ')
if name not in self._files:
self._files[name] = StreamFileReader(self, [Range(pos, 0, size, 0)], name)
name = s.group(3).replace('\\040', ' ')
if name not in self._files:
self._files[name] = StreamFileReader(self, [Range(pos, 0, size, 0)], name)
@@
-62,7
+63,7
@@
class StreamReader(object):
return self._files
def all_files(self):
return self._files
def all_files(self):
- return
self._files.values(
)
+ return
list(self._files.values()
)
def size(self):
n = self._data_locators[-1]
def size(self):
n = self._data_locators[-1]
@@
-97,5
+98,5
@@
class StreamReader(object):
manifest_text.extend([d.locator for d in self._data_locators])
manifest_text.extend([' '.join(["{}:{}:{}".format(seg.locator, seg.range_size, f.name.replace(' ', '\\040'))
for seg in f.segments])
manifest_text.extend([d.locator for d in self._data_locators])
manifest_text.extend([' '.join(["{}:{}:{}".format(seg.locator, seg.range_size, f.name.replace(' ', '\\040'))
for seg in f.segments])
- for f in
self._files.values(
)])
+ for f in
list(self._files.values()
)])
return ' '.join(manifest_text) + '\n'
return ' '.join(manifest_text) + '\n'
diff --git
a/sdk/python/arvados/timer.py
b/sdk/python/arvados/timer.py
index 6d4a73f25917c16aab5b49cb21d01c94fff0425f..f3bf839dd5f8ebf9fa23a20e6cdd0ee7beb9452d 100644
(file)
--- a/
sdk/python/arvados/timer.py
+++ b/
sdk/python/arvados/timer.py
@@
-1,4
+1,5
@@
from __future__ import print_function
from __future__ import print_function
+from builtins import object
import time
class Timer(object):
import time
class Timer(object):
diff --git
a/sdk/python/arvados/util.py
b/sdk/python/arvados/util.py
index 522a95e3e4ff276f7b189de395e0d6f95fe5124b..3f50553c4b0251092d5758a33bc3c6ff19bf93ed 100644
(file)
--- a/
sdk/python/arvados/util.py
+++ b/
sdk/python/arvados/util.py
@@
-124,7
+124,7
@@
def tarball_extract(tarball, path):
raise arvados.errors.CommandFailedError(
"tar exited %d" % p.returncode)
os.symlink(tarball, os.path.join(path, '.locator'))
raise arvados.errors.CommandFailedError(
"tar exited %d" % p.returncode)
os.symlink(tarball, os.path.join(path, '.locator'))
- tld_extracts =
filter(lambda f: f != '.locator', os.listdir(path))
+ tld_extracts =
[f for f in os.listdir(path) if f != '.locator']
lockfile.close()
if len(tld_extracts) == 1:
return os.path.join(path, tld_extracts[0])
lockfile.close()
if len(tld_extracts) == 1:
return os.path.join(path, tld_extracts[0])
@@
-190,7
+190,7
@@
def zipball_extract(zipball, path):
"unzip exited %d" % p.returncode)
os.unlink(zip_filename)
os.symlink(zipball, os.path.join(path, '.locator'))
"unzip exited %d" % p.returncode)
os.unlink(zip_filename)
os.symlink(zipball, os.path.join(path, '.locator'))
- tld_extracts =
filter(lambda f: f != '.locator', os.listdir(path))
+ tld_extracts =
[f for f in os.listdir(path) if f != '.locator']
lockfile.close()
if len(tld_extracts) == 1:
return os.path.join(path, tld_extracts[0])
lockfile.close()
if len(tld_extracts) == 1:
return os.path.join(path, tld_extracts[0])
diff --git
a/sdk/python/tests/arvados_testutil.py
b/sdk/python/tests/arvados_testutil.py
index dae3dd3b7b19c923ff53381e9f3ebef8c5abae49..51e85c85bd4727e41b733a3cec0b8f78521eabbc 100644
(file)
--- a/
sdk/python/tests/arvados_testutil.py
+++ b/
sdk/python/tests/arvados_testutil.py
@@
-1,16
+1,21
@@
#!/usr/bin/env python
#!/usr/bin/env python
+from future import standard_library
+standard_library.install_aliases()
+from builtins import str
+from builtins import range
+from builtins import object
import arvados
import contextlib
import errno
import hashlib
import arvados
import contextlib
import errno
import hashlib
-import http
lib
+import http
.client
import httplib2
import io
import mock
import os
import pycurl
import httplib2
import io
import mock
import os
import pycurl
-import
Q
ueue
+import
q
ueue
import shutil
import sys
import tempfile
import shutil
import sys
import tempfile
@@
-29,7
+34,7
@@
def queue_with(items):
given, it will be consumed to fill the queue before queue_with()
returns.
"""
given, it will be consumed to fill the queue before queue_with()
returns.
"""
- queue =
Q
ueue.Queue()
+ queue =
q
ueue.Queue()
for val in items:
queue.put(val)
return lambda *args, **kwargs: queue.get(block=False)
for val in items:
queue.put(val)
return lambda *args, **kwargs: queue.get(block=False)
@@
-38,7
+43,7
@@
def queue_with(items):
# mock calls to httplib2.Http.request()
def fake_httplib2_response(code, **headers):
headers.update(status=str(code),
# mock calls to httplib2.Http.request()
def fake_httplib2_response(code, **headers):
headers.update(status=str(code),
- reason=http
lib
.responses.get(code, "Unknown Response"))
+ reason=http
.client
.responses.get(code, "Unknown Response"))
return httplib2.Response(headers)
def mock_responses(body, *codes, **headers):
return httplib2.Response(headers)
def mock_responses(body, *codes, **headers):
@@
-63,7
+68,7
@@
def redirected_streams(stdout=None, stderr=None):
sys.stderr = orig_stderr
sys.stderr = orig_stderr
-class FakeCurl:
+class FakeCurl
(object)
:
@classmethod
def make(cls, code, body='', headers={}):
return mock.Mock(spec=cls, wraps=cls(code, body, headers))
@classmethod
def make(cls, code, body='', headers={}):
return mock.Mock(spec=cls, wraps=cls(code, body, headers))
@@
-96,7
+101,7
@@
class FakeCurl:
raise ValueError
if self._headerfunction:
self._headerfunction("HTTP/1.1 {} Status".format(self._resp_code))
raise ValueError
if self._headerfunction:
self._headerfunction("HTTP/1.1 {} Status".format(self._resp_code))
- for k, v in self._resp_headers.ite
rite
ms():
+ for k, v in self._resp_headers.items():
self._headerfunction(k + ': ' + str(v))
if type(self._resp_body) is not bool:
self._writer(self._resp_body)
self._headerfunction(k + ': ' + str(v))
if type(self._resp_body) is not bool:
self._writer(self._resp_body)
diff --git
a/sdk/python/tests/keepstub.py
b/sdk/python/tests/keepstub.py
index d79788c07e3ef4d26d055e6d72a8a59f755c66ae..965bf299b86d9bb431e82dec94c46317922a1222 100644
(file)
--- a/
sdk/python/tests/keepstub.py
+++ b/
sdk/python/tests/keepstub.py
@@
-1,11
+1,16
@@
-import BaseHTTPServer
+from __future__ import division
+from future import standard_library
+standard_library.install_aliases()
+from builtins import str
+from past.utils import old_div
+import http.server
import hashlib
import os
import re
import hashlib
import os
import re
-import
SocketS
erver
+import
sockets
erver
import time
import time
-class Server(
SocketServer.ThreadingMixIn, BaseHTTPS
erver.HTTPServer, object):
+class Server(
socketserver.ThreadingMixIn, http.s
erver.HTTPServer, object):
allow_reuse_address = 1
allow_reuse_address = 1
@@
-32,7
+37,7
@@
class Server(SocketServer.ThreadingMixIn, BaseHTTPServer.HTTPServer, object):
def setdelays(self, **kwargs):
"""In future requests, induce delays at the given checkpoints."""
def setdelays(self, **kwargs):
"""In future requests, induce delays at the given checkpoints."""
- for (k, v) in kwargs.ite
rite
ms():
+ for (k, v) in kwargs.items():
self.delays.get(k) # NameError if unknown key
self.delays[k] = v
self.delays.get(k) # NameError if unknown key
self.delays[k] = v
@@
-54,12
+59,12
@@
class Server(SocketServer.ThreadingMixIn, BaseHTTPServer.HTTPServer, object):
self._sleep_at_least(self.delays[k])
self._sleep_at_least(self.delays[k])
-class Handler(
BaseHTTPS
erver.BaseHTTPRequestHandler, object):
+class Handler(
http.s
erver.BaseHTTPRequestHandler, object):
def wfile_bandwidth_write(self, data_to_write):
if self.server.bandwidth == None and self.server.delays['mid_write'] == 0:
self.wfile.write(data_to_write)
else:
def wfile_bandwidth_write(self, data_to_write):
if self.server.bandwidth == None and self.server.delays['mid_write'] == 0:
self.wfile.write(data_to_write)
else:
- BYTES_PER_WRITE = int(
self.server.bandwidth/4.0
) or 32768
+ BYTES_PER_WRITE = int(
old_div(self.server.bandwidth,4.0)
) or 32768
outage_happened = False
num_bytes = len(data_to_write)
num_sent_bytes = 0
outage_happened = False
num_bytes = len(data_to_write)
num_sent_bytes = 0
@@
-75,7
+80,7
@@
class Handler(BaseHTTPServer.BaseHTTPRequestHandler, object):
num_sent_bytes:num_sent_bytes+num_write_bytes])
num_sent_bytes += num_write_bytes
if self.server.bandwidth is not None:
num_sent_bytes:num_sent_bytes+num_write_bytes])
num_sent_bytes += num_write_bytes
if self.server.bandwidth is not None:
- target_time +=
num_write_bytes / self.server.bandwidth
+ target_time +=
old_div(num_write_bytes, self.server.bandwidth)
self.server._sleep_at_least(target_time - time.time())
return None
self.server._sleep_at_least(target_time - time.time())
return None
@@
-83,7
+88,7
@@
class Handler(BaseHTTPServer.BaseHTTPRequestHandler, object):
if self.server.bandwidth == None and self.server.delays['mid_read'] == 0:
return self.rfile.read(bytes_to_read)
else:
if self.server.bandwidth == None and self.server.delays['mid_read'] == 0:
return self.rfile.read(bytes_to_read)
else:
- BYTES_PER_READ = int(
self.server.bandwidth/4.0
) or 32768
+ BYTES_PER_READ = int(
old_div(self.server.bandwidth,4.0)
) or 32768
data = ''
outage_happened = False
bytes_read = 0
data = ''
outage_happened = False
bytes_read = 0
@@
-98,7
+103,7
@@
class Handler(BaseHTTPServer.BaseHTTPRequestHandler, object):
data += self.rfile.read(next_bytes_to_read)
bytes_read += next_bytes_to_read
if self.server.bandwidth is not None:
data += self.rfile.read(next_bytes_to_read)
bytes_read += next_bytes_to_read
if self.server.bandwidth is not None:
- target_time +=
next_bytes_to_read / self.server.bandwidth
+ target_time +=
old_div(next_bytes_to_read, self.server.bandwidth)
self.server._sleep_at_least(target_time - time.time())
return data
self.server._sleep_at_least(target_time - time.time())
return data
diff --git
a/sdk/python/tests/manifest_examples.py
b/sdk/python/tests/manifest_examples.py
index f20d9090229dbd90b1466d6c3f065e6296ccdbb2..91c37e6c66fbffae85c9d39c58bfe09f8ed56017 100644
(file)
--- a/
sdk/python/tests/manifest_examples.py
+++ b/
sdk/python/tests/manifest_examples.py
@@
-1,4
+1,6
@@
from __future__ import absolute_import
from __future__ import absolute_import
+from builtins import range
+from builtins import object
import arvados
from . import arvados_testutil as tutil
import arvados
from . import arvados_testutil as tutil
diff --git
a/sdk/python/tests/performance/test_a_sample.py
b/sdk/python/tests/performance/test_a_sample.py
index b99ca64a3e8d959d87d783bdaf0052d0e9f21ac1..0c5fe3adc8819dfb5670318464679f27ae95d660 100644
(file)
--- a/
sdk/python/tests/performance/test_a_sample.py
+++ b/
sdk/python/tests/performance/test_a_sample.py
@@
-1,5
+1,6
@@
from __future__ import print_function
from __future__ import absolute_import
from __future__ import print_function
from __future__ import absolute_import
+from builtins import range
import unittest
from .performance_profiler import profiled
import unittest
from .performance_profiler import profiled
diff --git
a/sdk/python/tests/run_test_server.py
b/sdk/python/tests/run_test_server.py
index 8d7e708de933e3091d4d11d1e6247b1211865d92..d96612631885f9d1bf4f2321f457787fb3daa112 100644
(file)
--- a/
sdk/python/tests/run_test_server.py
+++ b/
sdk/python/tests/run_test_server.py
@@
-1,6
+1,10
@@
#!/usr/bin/env python
from __future__ import print_function
#!/usr/bin/env python
from __future__ import print_function
+from __future__ import division
+from builtins import str
+from builtins import range
+from past.utils import old_div
import argparse
import atexit
import errno
import argparse
import atexit
import errno
@@
-96,7
+100,7
@@
def kill_server_pid(pidfile, wait=10, passenger_root=False):
# Use up to half of the +wait+ period waiting for "passenger
# stop" to work. If the process hasn't exited by then, start
# sending TERM signals.
# Use up to half of the +wait+ period waiting for "passenger
# stop" to work. If the process hasn't exited by then, start
# sending TERM signals.
- startTERM +=
wait/2
+ startTERM +=
old_div(wait,2)
server_pid = None
while now <= deadline and server_pid is None:
server_pid = None
while now <= deadline and server_pid is None:
@@
-439,7
+443,7
@@
def _start_keep(n, keep_args):
"-listen=:{}".format(port),
"-pid="+_pidfile('keep{}'.format(n))]
"-listen=:{}".format(port),
"-pid="+_pidfile('keep{}'.format(n))]
- for arg, val in keep_args.ite
rite
ms():
+ for arg, val in keep_args.items():
keep_cmd.append("{}={}".format(arg, val))
logf = open(_fifo2stderr('keep{}'.format(n)), 'w')
keep_cmd.append("{}={}".format(arg, val))
logf = open(_fifo2stderr('keep{}'.format(n)), 'w')
@@
-736,7
+740,7
@@
class TestCaseWithServers(unittest.TestCase):
@staticmethod
def _restore_dict(src, dest):
@staticmethod
def _restore_dict(src, dest):
- for key in
dest.keys(
):
+ for key in
list(dest.keys()
):
if key not in src:
del dest[key]
dest.update(src)
if key not in src:
del dest[key]
dest.update(src)
diff --git
a/sdk/python/tests/test_api.py
b/sdk/python/tests/test_api.py
index b6b2b563a74f8a0f8fee7ab1459d2f178c33f2b3..a2dcaa0b2a6cdcdddbd0474cfb865ac3ba2bad3b 100644
(file)
--- a/
sdk/python/tests/test_api.py
+++ b/
sdk/python/tests/test_api.py
@@
-1,6
+1,8
@@
#!/usr/bin/env python
from __future__ import absolute_import
#!/usr/bin/env python
from __future__ import absolute_import
+from builtins import str
+from builtins import range
import arvados
import collections
import httplib2
import arvados
import collections
import httplib2
@@
-106,7
+108,7
@@
class ArvadosApiTest(run_test_server.TestCaseWithServers):
api = arvados.api('v1',
requestBuilder=req_builder, model=OrderedJsonModel())
result = api.humans().get(uuid='test').execute()
api = arvados.api('v1',
requestBuilder=req_builder, model=OrderedJsonModel())
result = api.humans().get(uuid='test').execute()
- self.assertEqual(string.hexdigits, ''.join(
result.keys(
)))
+ self.assertEqual(string.hexdigits, ''.join(
list(result.keys()
)))
class RetryREST(unittest.TestCase):
class RetryREST(unittest.TestCase):
@@
-167,7
+169,7
@@
class RetryREST(unittest.TestCase):
mock_conns = {str(i): mock.MagicMock() for i in range(2)}
self.api._http.connections = mock_conns.copy()
self.api.users().create(body={}).execute()
mock_conns = {str(i): mock.MagicMock() for i in range(2)}
self.api._http.connections = mock_conns.copy()
self.api.users().create(body={}).execute()
- for c in mock_conns.
iter
values():
+ for c in mock_conns.values():
self.assertEqual(c.close.call_count, expect)
@mock.patch('time.sleep')
self.assertEqual(c.close.call_count, expect)
@mock.patch('time.sleep')
diff --git
a/sdk/python/tests/test_arv_ls.py
b/sdk/python/tests/test_arv_ls.py
index 8e5c5ad12a10f8851a5ba4ac016db67ed8d4b152..ae26ae79ceb6b3513caa73ef673a42203e431af9 100644
(file)
--- a/
sdk/python/tests/test_arv_ls.py
+++ b/
sdk/python/tests/test_arv_ls.py
@@
-2,6
+2,8
@@
# -*- coding: utf-8 -*-
from __future__ import absolute_import
# -*- coding: utf-8 -*-
from __future__ import absolute_import
+from builtins import str
+from builtins import range
import io
import os
import random
import io
import os
import random
diff --git
a/sdk/python/tests/test_arv_put.py
b/sdk/python/tests/test_arv_put.py
index 5f314bd3078af98ea5a465d07a24fde4282a6310..5abf38854abc900146723fd27a549823eaff5882 100644
(file)
--- a/
sdk/python/tests/test_arv_put.py
+++ b/
sdk/python/tests/test_arv_put.py
@@
-2,6
+2,12
@@
# -*- coding: utf-8 -*-
from __future__ import absolute_import
# -*- coding: utf-8 -*-
from __future__ import absolute_import
+from __future__ import division
+from future import standard_library
+standard_library.install_aliases()
+from builtins import str
+from builtins import range
+from past.utils import old_div
import apiclient
import io
import mock
import apiclient
import io
import mock
@@
-19,7
+25,7
@@
import threading
import hashlib
import random
import hashlib
import random
-from
cStringIO
import StringIO
+from
io
import StringIO
import arvados
import arvados.commands.put as arv_put
import arvados
import arvados.commands.put as arv_put
@@
-258,7
+264,7
@@
class ArvPutUploadJobTest(run_test_server.TestCaseWithServers,
_, self.large_file_name = tempfile.mkstemp()
fileobj = open(self.large_file_name, 'w')
# Make sure to write just a little more than one block
_, self.large_file_name = tempfile.mkstemp()
fileobj = open(self.large_file_name, 'w')
# Make sure to write just a little more than one block
- for _ in range((
arvados.config.KEEP_BLOCK_SIZE/(1024*1024
))+1):
+ for _ in range((
old_div(arvados.config.KEEP_BLOCK_SIZE,(1024*1024)
))+1):
data = random.choice(['x', 'y', 'z']) * 1024 * 1024 # 1 MB
fileobj.write(data)
fileobj.close()
data = random.choice(['x', 'y', 'z']) * 1024 * 1024 # 1 MB
fileobj.write(data)
fileobj.close()
@@
-525,7
+531,7
@@
class ArvadosPutReportTest(ArvadosBaseTestCase):
def test_known_human_progress(self):
for count, total in [(0, 1), (2, 4), (45, 60)]:
def test_known_human_progress(self):
for count, total in [(0, 1), (2, 4), (45, 60)]:
- expect = '{:.1%}'.format(
float(count) / total
)
+ expect = '{:.1%}'.format(
old_div(float(count), total)
)
actual = arv_put.human_progress(count, total)
self.assertTrue(actual.startswith('\r'))
self.assertIn(expect, actual)
actual = arv_put.human_progress(count, total)
self.assertTrue(actual.startswith('\r'))
self.assertIn(expect, actual)
diff --git
a/sdk/python/tests/test_arvfile.py
b/sdk/python/tests/test_arvfile.py
index 6c4976ee0de1633ca114423624f3ca6c1b9a3546..f4bcd8a49794d71358dc7f8d85c0a36b56d42e70 100644
(file)
--- a/
sdk/python/tests/test_arvfile.py
+++ b/
sdk/python/tests/test_arvfile.py
@@
-1,6
+1,10
@@
#!/usr/bin/env python
from __future__ import absolute_import
#!/usr/bin/env python
from __future__ import absolute_import
+from builtins import hex
+from builtins import str
+from builtins import range
+from builtins import object
import bz2
import datetime
import gzip
import bz2
import datetime
import gzip
@@
-242,7
+246,7
@@
class ArvadosFileWriterTestCase(unittest.TestCase):
api_client=api, keep_client=keep) as c:
writer = c.open("count.txt", "r+")
text = "0123456789" * 100
api_client=api, keep_client=keep) as c:
writer = c.open("count.txt", "r+")
text = "0123456789" * 100
- for b in
x
range(0, 100000):
+ for b in range(0, 100000):
writer.write(text)
self.assertEqual(writer.size(), 100000000)
writer.write(text)
self.assertEqual(writer.size(), 100000000)
@@
-274,7
+278,7
@@
class ArvadosFileWriterTestCase(unittest.TestCase):
with Collection('. ' + arvados.config.EMPTY_BLOCK_LOCATOR + ' 0:0:count.txt',
keep_client=keep) as c:
writer = c.open("count.txt", "r+")
with Collection('. ' + arvados.config.EMPTY_BLOCK_LOCATOR + ' 0:0:count.txt',
keep_client=keep) as c:
writer = c.open("count.txt", "r+")
- for b in
x
range(0, 10):
+ for b in range(0, 10):
writer.seek(0, os.SEEK_SET)
writer.write("0123456789")
writer.seek(0, os.SEEK_SET)
writer.write("0123456789")
@@
-291,7
+295,7
@@
class ArvadosFileWriterTestCase(unittest.TestCase):
with Collection('. 781e5e245d69b566979b86e28d23f2c7+10 0:10:count.txt',
keep_client=keep) as c:
writer = c.open("count.txt", "r+")
with Collection('. 781e5e245d69b566979b86e28d23f2c7+10 0:10:count.txt',
keep_client=keep) as c:
writer = c.open("count.txt", "r+")
- for b in
x
range(0, 10):
+ for b in range(0, 10):
writer.seek(10, os.SEEK_SET)
writer.write("abcdefghij")
writer.seek(10, os.SEEK_SET)
writer.write("abcdefghij")
@@
-309,7
+313,7
@@
class ArvadosFileWriterTestCase(unittest.TestCase):
with Collection('. 781e5e245d69b566979b86e28d23f2c7+10 0:10:count.txt',
keep_client=keep) as c:
writer = c.open("count.txt", "r+")
with Collection('. 781e5e245d69b566979b86e28d23f2c7+10 0:10:count.txt',
keep_client=keep) as c:
writer = c.open("count.txt", "r+")
- for b in
x
range(0, 10):
+ for b in range(0, 10):
writer.seek(5, os.SEEK_SET)
writer.write("abcdefghij")
writer.seek(5, os.SEEK_SET)
writer.write("abcdefghij")
@@
-334,8
+338,8
@@
class ArvadosFileWriterTestCase(unittest.TestCase):
with Collection('. ' + arvados.config.EMPTY_BLOCK_LOCATOR + ' 0:0:count.txt',
api_client=api, keep_client=keep) as c:
writer = c.open("count.txt", "r+")
with Collection('. ' + arvados.config.EMPTY_BLOCK_LOCATOR + ' 0:0:count.txt',
api_client=api, keep_client=keep) as c:
writer = c.open("count.txt", "r+")
- text = ''.join(["0123456789" for a in
x
range(0, 100)])
- for b in
x
range(0, 100000):
+ text = ''.join(["0123456789" for a in range(0, 100)])
+ for b in range(0, 100000):
writer.write(text)
writer.seek(0, os.SEEK_SET)
writer.write("foo")
writer.write(text)
writer.seek(0, os.SEEK_SET)
writer.write("foo")
diff --git
a/sdk/python/tests/test_cache.py
b/sdk/python/tests/test_cache.py
index 93388e6de8d80b56d09038403cdc0bc4f1e4ea38..ea34c8051d6580685ab125d50fd04e659dd16485 100644
(file)
--- a/
sdk/python/tests/test_cache.py
+++ b/
sdk/python/tests/test_cache.py
@@
-1,6
+1,8
@@
from __future__ import print_function
from __future__ import absolute_import
from __future__ import print_function
from __future__ import absolute_import
+from builtins import str
+from builtins import range
import md5
import mock
import os
import md5
import mock
import os
@@
-17,7
+19,7
@@
from . import run_test_server
def _random(n):
def _random(n):
- return bytearray(random.getrandbits(8) for _ in
x
range(n))
+ return bytearray(random.getrandbits(8) for _ in range(n))
class CacheTestThread(threading.Thread):
class CacheTestThread(threading.Thread):
diff --git
a/sdk/python/tests/test_collections.py
b/sdk/python/tests/test_collections.py
index 46682deaa660171a3e6878c98b23c198a4578fd3..259c5aa177cc87a8578480001948ac1a2c985e44 100644
(file)
--- a/
sdk/python/tests/test_collections.py
+++ b/
sdk/python/tests/test_collections.py
@@
-3,6
+3,7
@@
from __future__ import absolute_import
#
# ARVADOS_API_TOKEN=abc ARVADOS_API_HOST=arvados.local python -m unittest discover
#
# ARVADOS_API_TOKEN=abc ARVADOS_API_HOST=arvados.local python -m unittest discover
+from builtins import object
import arvados
import copy
import mock
import arvados
import copy
import mock
diff --git
a/sdk/python/tests/test_events.py
b/sdk/python/tests/test_events.py
index 73b9f2254497b0c90cdb51ebb2ea4dd70cb46f18..2cca77339d1b8be66420db60f7605ec436beef33 100644
(file)
--- a/
sdk/python/tests/test_events.py
+++ b/
sdk/python/tests/test_events.py
@@
-1,10
+1,16
@@
from __future__ import print_function
from __future__ import absolute_import
from __future__ import print_function
from __future__ import absolute_import
+from __future__ import division
+from future import standard_library
+standard_library.install_aliases()
+from builtins import range
+from builtins import object
+from past.utils import old_div
import arvados
import io
import logging
import mock
import arvados
import io
import logging
import mock
-import
Q
ueue
+import
q
ueue
from . import run_test_server
import threading
import time
from . import run_test_server
import threading
import time
@@
-35,7
+41,7
@@
class WebsocketTest(run_test_server.TestCaseWithServers):
def _test_subscribe(self, poll_fallback, expect_type, start_time=None, expected=1):
run_test_server.authorize_with('active')
def _test_subscribe(self, poll_fallback, expect_type, start_time=None, expected=1):
run_test_server.authorize_with('active')
- events =
Q
ueue.Queue(100)
+ events =
q
ueue.Queue(100)
# Create ancestor before subscribing.
# When listening with start_time in the past, this should also be retrieved.
# Create ancestor before subscribing.
# When listening with start_time in the past, this should also be retrieved.
@@
-65,7
+71,7
@@
class WebsocketTest(run_test_server.TestCaseWithServers):
log_object_uuids.append(events.get(True, 5)['object_uuid'])
if expected < 2:
log_object_uuids.append(events.get(True, 5)['object_uuid'])
if expected < 2:
- with self.assertRaises(
Q
ueue.Empty):
+ with self.assertRaises(
q
ueue.Empty):
# assertEqual just serves to show us what unexpected
# thing comes out of the queue when the assertRaises
# fails; when the test passes, this assertEqual
# assertEqual just serves to show us what unexpected
# thing comes out of the queue when the assertRaises
# fails; when the test passes, this assertEqual
@@
-145,16
+151,16
@@
class WebsocketTest(run_test_server.TestCaseWithServers):
return time.strftime('%Y-%m-%dT%H:%M:%SZ', time.gmtime(t))
def localiso(self, t):
return time.strftime('%Y-%m-%dT%H:%M:%SZ', time.gmtime(t))
def localiso(self, t):
- return time.strftime('%Y-%m-%dT%H:%M:%S', time.localtime(t)) + self.isotz(
-time.timezone/60
)
+ return time.strftime('%Y-%m-%dT%H:%M:%S', time.localtime(t)) + self.isotz(
old_div(-time.timezone,60)
)
def isotz(self, offset):
"""Convert minutes-east-of-UTC to RFC3339- and ISO-compatible time zone designator"""
def isotz(self, offset):
"""Convert minutes-east-of-UTC to RFC3339- and ISO-compatible time zone designator"""
- return '{:+03d}:{:02d}'.format(o
ffset/60
, offset%60)
+ return '{:+03d}:{:02d}'.format(o
ld_div(offset,60)
, offset%60)
# Test websocket reconnection on (un)execpted close
def _test_websocket_reconnect(self, close_unexpected):
run_test_server.authorize_with('active')
# Test websocket reconnection on (un)execpted close
def _test_websocket_reconnect(self, close_unexpected):
run_test_server.authorize_with('active')
- events =
Q
ueue.Queue(100)
+ events =
q
ueue.Queue(100)
logstream = io.BytesIO()
rootLogger = logging.getLogger()
logstream = io.BytesIO()
rootLogger = logging.getLogger()
@@
-176,7
+182,7
@@
class WebsocketTest(run_test_server.TestCaseWithServers):
# expect an event
self.assertIn(human['uuid'], events.get(True, 5)['object_uuid'])
# expect an event
self.assertIn(human['uuid'], events.get(True, 5)['object_uuid'])
- with self.assertRaises(
Q
ueue.Empty):
+ with self.assertRaises(
q
ueue.Empty):
self.assertEqual(events.get(True, 2), None)
# close (im)properly
self.assertEqual(events.get(True, 2), None)
# close (im)properly
@@
-195,12
+201,12
@@
class WebsocketTest(run_test_server.TestCaseWithServers):
event = events.get(True, 5)
if event.get('object_uuid') != None:
log_object_uuids.append(event['object_uuid'])
event = events.get(True, 5)
if event.get('object_uuid') != None:
log_object_uuids.append(event['object_uuid'])
- with self.assertRaises(
Q
ueue.Empty):
+ with self.assertRaises(
q
ueue.Empty):
self.assertEqual(events.get(True, 2), None)
self.assertNotIn(human['uuid'], log_object_uuids)
self.assertIn(human2['uuid'], log_object_uuids)
else:
self.assertEqual(events.get(True, 2), None)
self.assertNotIn(human['uuid'], log_object_uuids)
self.assertIn(human2['uuid'], log_object_uuids)
else:
- with self.assertRaises(
Q
ueue.Empty):
+ with self.assertRaises(
q
ueue.Empty):
self.assertEqual(events.get(True, 2), None)
# verify log message to ensure that an (un)expected close
self.assertEqual(events.get(True, 2), None)
# verify log message to ensure that an (un)expected close
@@
-230,7
+236,7
@@
class WebsocketTest(run_test_server.TestCaseWithServers):
rootLogger.addHandler(streamHandler)
run_test_server.authorize_with('active')
rootLogger.addHandler(streamHandler)
run_test_server.authorize_with('active')
- events =
Q
ueue.Queue(100)
+ events =
q
ueue.Queue(100)
filters = [['object_uuid', 'is_a', 'arvados#human']]
self.ws = arvados.events.subscribe(
filters = [['object_uuid', 'is_a', 'arvados#human']]
self.ws = arvados.events.subscribe(
diff --git
a/sdk/python/tests/test_keep_client.py
b/sdk/python/tests/test_keep_client.py
index bb6e983185ccb354ac1774340db0387784b7c653..b69563f94238aee5b9e53713e7ab5df7b59ba582 100644
(file)
--- a/
sdk/python/tests/test_keep_client.py
+++ b/
sdk/python/tests/test_keep_client.py
@@
-1,4
+1,11
@@
from __future__ import absolute_import
from __future__ import absolute_import
+from __future__ import division
+from future import standard_library
+standard_library.install_aliases()
+from builtins import str
+from builtins import range
+from past.utils import old_div
+from builtins import object
import hashlib
import mock
import os
import hashlib
import mock
import os
@@
-9,7
+16,7
@@
import socket
import threading
import time
import unittest
import threading
import time
import unittest
-import urlparse
+import url
lib.
parse
import arvados
import arvados.retry
import arvados
import arvados.retry
@@
-292,7
+299,7
@@
class KeepClientServiceTestCase(unittest.TestCase, tutil.ApiClientMock):
def get_service_roots(self, api_client):
keep_client = arvados.KeepClient(api_client=api_client)
services = keep_client.weighted_service_roots(arvados.KeepLocator('0'*32))
def get_service_roots(self, api_client):
keep_client = arvados.KeepClient(api_client=api_client)
services = keep_client.weighted_service_roots(arvados.KeepLocator('0'*32))
- return [urlparse.urlparse(url) for url in sorted(services)]
+ return [url
lib.
parse.urlparse(url) for url in sorted(services)]
def test_ssl_flag_respected_in_roots(self):
for ssl_flag in [False, True]:
def test_ssl_flag_respected_in_roots(self):
for ssl_flag in [False, True]:
@@
-443,7
+450,7
@@
class KeepClientServiceTestCase(unittest.TestCase, tutil.ApiClientMock):
num_retries=3)
self.assertEqual([403, 403], [
getattr(error, 'status_code', None)
num_retries=3)
self.assertEqual([403, 403], [
getattr(error, 'status_code', None)
- for error in err_check.exception.request_errors().
iter
values()])
+ for error in err_check.exception.request_errors().values()])
def test_get_error_reflects_last_retry(self):
self.check_errors_from_last_retry('get', arvados.errors.KeepReadError)
def test_get_error_reflects_last_retry(self):
self.check_errors_from_last_retry('get', arvados.errors.KeepReadError)
@@
-649,7
+656,7
@@
class KeepClientRendezvousTestCase(unittest.TestCase, tutil.ApiClientMock):
self.assertRaises(exc_class) as err_check:
curl_mock.return_value.side_effect = socket.timeout
getattr(keep_client, verb)(data)
self.assertRaises(exc_class) as err_check:
curl_mock.return_value.side_effect = socket.timeout
getattr(keep_client, verb)(data)
- urls = [urlparse.urlparse(url)
+ urls = [url
lib.
parse.urlparse(url)
for url in err_check.exception.request_errors()]
self.assertEqual([('keep0x' + c, aport) for c in '3eab2d5fc9681074'],
[(url.hostname, url.port) for url in urls])
for url in err_check.exception.request_errors()]
self.assertEqual([('keep0x' + c, aport) for c in '3eab2d5fc9681074'],
[(url.hostname, url.port) for url in urls])
@@
-1118,35
+1125,35
@@
class AvoidOverreplication(unittest.TestCase, tutil.ApiClientMock):
def test_only_write_enough_on_success(self):
for i in range(10):
def test_only_write_enough_on_success(self):
for i in range(10):
- ks = self.FakeKeepService(delay=
i/10.0
, will_succeed=True)
+ ks = self.FakeKeepService(delay=
old_div(i,10.0)
, will_succeed=True)
self.pool.add_task(ks, None)
self.pool.join()
self.assertEqual(self.pool.done(), self.copies)
def test_only_write_enough_on_partial_success(self):
for i in range(5):
self.pool.add_task(ks, None)
self.pool.join()
self.assertEqual(self.pool.done(), self.copies)
def test_only_write_enough_on_partial_success(self):
for i in range(5):
- ks = self.FakeKeepService(delay=
i/10.0
, will_succeed=False)
+ ks = self.FakeKeepService(delay=
old_div(i,10.0)
, will_succeed=False)
self.pool.add_task(ks, None)
self.pool.add_task(ks, None)
- ks = self.FakeKeepService(delay=
i/10.0
, will_succeed=True)
+ ks = self.FakeKeepService(delay=
old_div(i,10.0)
, will_succeed=True)
self.pool.add_task(ks, None)
self.pool.join()
self.assertEqual(self.pool.done(), self.copies)
def test_only_write_enough_when_some_crash(self):
for i in range(5):
self.pool.add_task(ks, None)
self.pool.join()
self.assertEqual(self.pool.done(), self.copies)
def test_only_write_enough_when_some_crash(self):
for i in range(5):
- ks = self.FakeKeepService(delay=
i/10.0
, will_raise=Exception())
+ ks = self.FakeKeepService(delay=
old_div(i,10.0)
, will_raise=Exception())
self.pool.add_task(ks, None)
self.pool.add_task(ks, None)
- ks = self.FakeKeepService(delay=
i/10.0
, will_succeed=True)
+ ks = self.FakeKeepService(delay=
old_div(i,10.0)
, will_succeed=True)
self.pool.add_task(ks, None)
self.pool.join()
self.assertEqual(self.pool.done(), self.copies)
def test_fail_when_too_many_crash(self):
for i in range(self.copies+1):
self.pool.add_task(ks, None)
self.pool.join()
self.assertEqual(self.pool.done(), self.copies)
def test_fail_when_too_many_crash(self):
for i in range(self.copies+1):
- ks = self.FakeKeepService(delay=
i/10.0
, will_raise=Exception())
+ ks = self.FakeKeepService(delay=
old_div(i,10.0)
, will_raise=Exception())
self.pool.add_task(ks, None)
for i in range(self.copies-1):
self.pool.add_task(ks, None)
for i in range(self.copies-1):
- ks = self.FakeKeepService(delay=
i/10.0
, will_succeed=True)
+ ks = self.FakeKeepService(delay=
old_div(i,10.0)
, will_succeed=True)
self.pool.add_task(ks, None)
self.pool.join()
self.assertEqual(self.pool.done(), self.copies-1)
self.pool.add_task(ks, None)
self.pool.join()
self.assertEqual(self.pool.done(), self.copies-1)
diff --git
a/sdk/python/tests/test_keep_locator.py
b/sdk/python/tests/test_keep_locator.py
index 273992aba7f1dd7d793a255fa98f861a3980e140..2a90316e4cdf8500d2549bf34acb04c657112ed1 100644
(file)
--- a/
sdk/python/tests/test_keep_locator.py
+++ b/
sdk/python/tests/test_keep_locator.py
@@
-1,6
+1,10
@@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#!/usr/bin/env python
# -*- coding: utf-8 -*-
+from builtins import next
+from builtins import zip
+from builtins import str
+from builtins import range
import datetime
import itertools
import random
import datetime
import itertools
import random
@@
-14,7
+18,7
@@
class ArvadosKeepLocatorTest(unittest.TestCase):
def numstrs(fmtstr, base, exponent):
def genstrs(self, count=None):
return (fmtstr.format(random.randint(0, base ** exponent))
def numstrs(fmtstr, base, exponent):
def genstrs(self, count=None):
return (fmtstr.format(random.randint(0, base ** exponent))
- for c in
x
range(count or self.DEFAULT_TEST_COUNT))
+ for c in range(count or self.DEFAULT_TEST_COUNT))
return genstrs
checksums = numstrs('{:032x}', 16, 32)
return genstrs
checksums = numstrs('{:032x}', 16, 32)
@@
-24,17
+28,17
@@
class ArvadosKeepLocatorTest(unittest.TestCase):
def base_locators(self, count=DEFAULT_TEST_COUNT):
return ('+'.join(pair) for pair in
def base_locators(self, count=DEFAULT_TEST_COUNT):
return ('+'.join(pair) for pair in
-
itertools.i
zip(self.checksums(count), self.sizes(count)))
+ zip(self.checksums(count), self.sizes(count)))
def perm_hints(self, count=DEFAULT_TEST_COUNT):
def perm_hints(self, count=DEFAULT_TEST_COUNT):
- for sig, ts in
itertools.i
zip(self.signatures(count),
+ for sig, ts in zip(self.signatures(count),
self.timestamps(count)):
yield 'A{}@{}'.format(sig, ts)
def test_good_locators_returned(self):
for hint_gens in [(), (self.sizes(),),
(self.sizes(), self.perm_hints())]:
self.timestamps(count)):
yield 'A{}@{}'.format(sig, ts)
def test_good_locators_returned(self):
for hint_gens in [(), (self.sizes(),),
(self.sizes(), self.perm_hints())]:
- for loc_data in
itertools.i
zip(self.checksums(), *hint_gens):
+ for loc_data in zip(self.checksums(), *hint_gens):
locator = '+'.join(loc_data)
self.assertEqual(locator, str(KeepLocator(locator)))
locator = '+'.join(loc_data)
self.assertEqual(locator, str(KeepLocator(locator)))
diff --git
a/sdk/python/tests/test_retry.py
b/sdk/python/tests/test_retry.py
index cc12f39a355ef9b97a85a34ee5989e3bae38a744..82725c2790b0863479c8fdce2ba05f1c48af1b8f 100644
(file)
--- a/
sdk/python/tests/test_retry.py
+++ b/
sdk/python/tests/test_retry.py
@@
-1,5
+1,8
@@
#!/usr/bin/env python
#!/usr/bin/env python
+from builtins import zip
+from builtins import range
+from builtins import object
import itertools
import unittest
import itertools
import unittest
@@
-25,7
+28,7
@@
class RetryLoopTestMixin(object):
responses = itertools.chain(results, itertools.repeat(None))
retrier = arv_retry.RetryLoop(num_retries, self.loop_success,
**kwargs)
responses = itertools.chain(results, itertools.repeat(None))
retrier = arv_retry.RetryLoop(num_retries, self.loop_success,
**kwargs)
- for tries_left, response in
itertools.i
zip(retrier, responses):
+ for tries_left, response in zip(retrier, responses):
retrier.save_result(response)
return retrier
retrier.save_result(response)
return retrier
@@
-166,11
+169,11
@@
class CheckHTTPResponseSuccessTestCase(unittest.TestCase):
check_is_not = check('assertIsNot')
def test_obvious_successes(self):
check_is_not = check('assertIsNot')
def test_obvious_successes(self):
- self.check_is(True, *
range(200, 207
))
+ self.check_is(True, *
list(range(200, 207)
))
def test_obvious_stops(self):
self.check_is(False, 424, 426, 428, 431,
def test_obvious_stops(self):
self.check_is(False, 424, 426, 428, 431,
- *
range(400, 408) + range(410, 420
))
+ *
list(range(400, 408)) + list(range(410, 420)
))
def test_obvious_retries(self):
self.check_is(None, 500, 502, 503, 504)
def test_obvious_retries(self):
self.check_is(None, 500, 502, 503, 504)
@@
-179,13
+182,13
@@
class CheckHTTPResponseSuccessTestCase(unittest.TestCase):
self.check_is(None, 408, 409, 422, 423)
def test_5xx_failures(self):
self.check_is(None, 408, 409, 422, 423)
def test_5xx_failures(self):
- self.check_is(False, 501, *
range(505, 512
))
+ self.check_is(False, 501, *
list(range(505, 512)
))
def test_1xx_not_retried(self):
self.check_is_not(None, 100, 101)
def test_redirects_not_retried(self):
def test_1xx_not_retried(self):
self.check_is_not(None, 100, 101)
def test_redirects_not_retried(self):
- self.check_is_not(None, *
range(300, 309
))
+ self.check_is_not(None, *
list(range(300, 309)
))
def test_wacky_code_retries(self):
self.check_is(None, 0, 99, 600, -200)
def test_wacky_code_retries(self):
self.check_is(None, 0, 99, 600, -200)
diff --git
a/sdk/python/tests/test_retry_job_helpers.py
b/sdk/python/tests/test_retry_job_helpers.py
index 3ccaa37218f76f26d25f86af1b094164ad643d50..9ad957a3cc71d6cf3e445e27e47882f73056e582 100644
(file)
--- a/
sdk/python/tests/test_retry_job_helpers.py
+++ b/
sdk/python/tests/test_retry_job_helpers.py
@@
-1,6
+1,7
@@
#!/usr/bin/env python
from __future__ import absolute_import
#!/usr/bin/env python
from __future__ import absolute_import
+from builtins import object
import mock
import os
import unittest
import mock
import os
import unittest
diff --git
a/sdk/python/tests/test_stream.py
b/sdk/python/tests/test_stream.py
index 9e35bcf9c8327266c84cfeb25faa2a38b656dfc9..7277628122f31205dd477b20ad593c25a294df46 100644
(file)
--- a/
sdk/python/tests/test_stream.py
+++ b/
sdk/python/tests/test_stream.py
@@
-1,6
+1,7
@@
#!/usr/bin/env python
from __future__ import absolute_import
#!/usr/bin/env python
from __future__ import absolute_import
+from builtins import object
import bz2
import gzip
import io
import bz2
import gzip
import io