11308: Futurize stage2.
authorTom Clegg <tom@curoverse.com>
Sat, 1 Apr 2017 05:41:10 +0000 (01:41 -0400)
committerTom Clegg <tom@curoverse.com>
Sat, 1 Apr 2017 21:43:54 +0000 (17:43 -0400)
38 files changed:
sdk/python/arvados/__init__.py
sdk/python/arvados/_ranges.py
sdk/python/arvados/api.py
sdk/python/arvados/arvfile.py
sdk/python/arvados/cache.py
sdk/python/arvados/collection.py
sdk/python/arvados/commands/arv_copy.py
sdk/python/arvados/commands/keepdocker.py
sdk/python/arvados/commands/ls.py
sdk/python/arvados/commands/migrate19.py
sdk/python/arvados/commands/put.py
sdk/python/arvados/commands/run.py
sdk/python/arvados/crunch.py
sdk/python/arvados/errors.py
sdk/python/arvados/events.py
sdk/python/arvados/keep.py
sdk/python/arvados/retry.py
sdk/python/arvados/safeapi.py
sdk/python/arvados/stream.py
sdk/python/arvados/timer.py
sdk/python/arvados/util.py
sdk/python/tests/arvados_testutil.py
sdk/python/tests/keepstub.py
sdk/python/tests/manifest_examples.py
sdk/python/tests/performance/test_a_sample.py
sdk/python/tests/run_test_server.py
sdk/python/tests/test_api.py
sdk/python/tests/test_arv_ls.py
sdk/python/tests/test_arv_put.py
sdk/python/tests/test_arvfile.py
sdk/python/tests/test_cache.py
sdk/python/tests/test_collections.py
sdk/python/tests/test_events.py
sdk/python/tests/test_keep_client.py
sdk/python/tests/test_keep_locator.py
sdk/python/tests/test_retry.py
sdk/python/tests/test_retry_job_helpers.py
sdk/python/tests/test_stream.py

index 36d54e5cf6b36ee23ded14b260883dab4bf6b09d..5acb961dde63fcc43cc2a084219befa9358b19b0 100644 (file)
@@ -1,7 +1,10 @@
 from __future__ import print_function
 from __future__ import absolute_import
+from future import standard_library
+standard_library.install_aliases()
+from builtins import object
 import gflags
-import httplib
+import http.client
 import httplib2
 import logging
 import os
@@ -108,7 +111,7 @@ class JobTask(object):
     def __init__(self, parameters=dict(), runtime_constraints=dict()):
         print("init jobtask %s %s" % (parameters, runtime_constraints))
 
-class job_setup:
+class job_setup(object):
     @staticmethod
     def one_task_per_input_file(if_sequence=0, and_end_task=True, input_as_path=False, api_client=None):
         if if_sequence != current_task()['sequence']:
index e0fe61509f9373d5e43bc5a6cd4d150ccea9805d..5c8b00fc9d7e6a19ef66a04eb32500225651030d 100644 (file)
@@ -1,3 +1,6 @@
+from __future__ import division
+from past.utils import old_div
+from builtins import object
 import logging
 
 _logger = logging.getLogger('arvados.ranges')
@@ -31,7 +34,7 @@ def first_block(data_locators, range_start):
 
     hi = len(data_locators)
     lo = 0
-    i = int((hi + lo) / 2)
+    i = int(old_div((hi + lo), 2))
     block_size = data_locators[i].range_size
     block_start = data_locators[i].range_start
     block_end = block_start + block_size
@@ -47,7 +50,7 @@ def first_block(data_locators, range_start):
             lo = i
         else:
             hi = i
-        i = int((hi + lo) / 2)
+        i = int(old_div((hi + lo), 2))
         block_size = data_locators[i].range_size
         block_start = data_locators[i].range_start
         block_end = block_start + block_size
index 65aadbd6b725922f4b540fa8ed3059e42f00810d..59a73b45e558d5433776eed89a8f198345fd65b2 100644 (file)
@@ -1,6 +1,9 @@
 from __future__ import absolute_import
+from future import standard_library
+standard_library.install_aliases()
+from builtins import range
 import collections
-import httplib
+import http.client
 import httplib2
 import json
 import logging
@@ -68,7 +71,7 @@ def _intercept_http_request(self, uri, **kwargs):
         # High probability of failure due to connection atrophy. Make
         # sure this request [re]opens a new connection by closing and
         # forgetting all cached connections first.
-        for conn in self.connections.itervalues():
+        for conn in self.connections.values():
             conn.close()
         self.connections.clear()
 
@@ -77,7 +80,7 @@ def _intercept_http_request(self, uri, **kwargs):
         self._last_request_time = time.time()
         try:
             return self.orig_http_request(uri, **kwargs)
-        except httplib.HTTPException:
+        except http.client.HTTPException:
             _logger.debug("Retrying API request in %d s after HTTP error",
                           delay, exc_info=True)
         except socket.error:
@@ -88,7 +91,7 @@ def _intercept_http_request(self, uri, **kwargs):
             # httplib2 reopens connections when needed.
             _logger.debug("Retrying API request in %d s after socket error",
                           delay, exc_info=True)
-            for conn in self.connections.itervalues():
+            for conn in self.connections.values():
                 conn.close()
         time.sleep(delay)
         delay = delay * self._retry_delay_backoff
index 2c44c349e00de03605367c60b87711a13b39c88e..ab1c64532a130d2880043c518a2c9f9d14b7fc05 100644 (file)
@@ -1,4 +1,10 @@
 from __future__ import absolute_import
+from __future__ import division
+from future import standard_library
+standard_library.install_aliases()
+from builtins import range
+from past.utils import old_div
+from builtins import object
 import functools
 import os
 import zlib
@@ -6,7 +12,7 @@ import bz2
 from . import config
 import hashlib
 import threading
-import Queue
+import queue
 import copy
 import errno
 import re
@@ -508,10 +514,10 @@ class _BlockManager(object):
                 # blocks pending.  If they are full 64 MiB blocks, that means up to
                 # 256 MiB of internal buffering, which is the same size as the
                 # default download block cache in KeepClient.
-                self._put_queue = Queue.Queue(maxsize=2)
+                self._put_queue = queue.Queue(maxsize=2)
 
                 self._put_threads = []
-                for i in xrange(0, self.num_put_threads):
+                for i in range(0, self.num_put_threads):
                     thread = threading.Thread(target=self._commit_bufferblock_worker)
                     self._put_threads.append(thread)
                     thread.daemon = True
@@ -531,9 +537,9 @@ class _BlockManager(object):
     @synchronized
     def start_get_threads(self):
         if self._prefetch_threads is None:
-            self._prefetch_queue = Queue.Queue()
+            self._prefetch_queue = queue.Queue()
             self._prefetch_threads = []
-            for i in xrange(0, self.num_get_threads):
+            for i in range(0, self.num_get_threads):
                 thread = threading.Thread(target=self._block_prefetch_worker)
                 self._prefetch_threads.append(thread)
                 thread.daemon = True
@@ -579,7 +585,7 @@ class _BlockManager(object):
             # A WRITABLE block with its owner.closed() implies that it's
             # size is <= KEEP_BLOCK_SIZE/2.
             try:
-                small_blocks = [b for b in self._bufferblocks.values() if b.state() == _BufferBlock.WRITABLE and b.owner.closed()]
+                small_blocks = [b for b in list(self._bufferblocks.values()) if b.state() == _BufferBlock.WRITABLE and b.owner.closed()]
             except AttributeError:
                 # Writable blocks without owner shouldn't exist.
                 raise UnownedBlockError()
@@ -692,7 +698,7 @@ class _BlockManager(object):
         self.repack_small_blocks(force=True, sync=True)
 
         with self.lock:
-            items = self._bufferblocks.items()
+            items = list(self._bufferblocks.items())
 
         for k,v in items:
             if v.state() != _BufferBlock.COMMITTED and v.owner:
@@ -824,7 +830,7 @@ class ArvadosFile(object):
         with self.lock:
             if len(self._segments) != len(othersegs):
                 return False
-            for i in xrange(0, len(othersegs)):
+            for i in range(0, len(othersegs)):
                 seg1 = self._segments[i]
                 seg2 = othersegs[i]
                 loc1 = seg1.locator
@@ -884,7 +890,7 @@ class ArvadosFile(object):
         """
         self._writers.remove(writer)
 
-        if flush or self.size() > config.KEEP_BLOCK_SIZE / 2:
+        if flush or self.size() > old_div(config.KEEP_BLOCK_SIZE, 2):
             # File writer closed, not small enough for repacking
             self.flush()
         elif self.closed():
index ac6d18463c540ad2af84d3b76a9db2129cfa333e..ee1c51fdae324c7c7820ce37db86949554a573dc 100644 (file)
@@ -1,3 +1,4 @@
+from builtins import object
 import errno
 import md5
 import os
index 1a427814cf4d5bc13ffbeca75f7c22c87134962c..0d88084340dbe227d81bef0a2537618ed8fd66c2 100644 (file)
@@ -1,4 +1,7 @@
 from __future__ import absolute_import
+from builtins import str
+from past.builtins import basestring
+from builtins import object
 import functools
 import logging
 import os
@@ -419,7 +422,7 @@ class ResumableCollectionWriter(CollectionWriter):
         return writer
 
     def check_dependencies(self):
-        for path, orig_stat in self._dependencies.items():
+        for path, orig_stat in list(self._dependencies.items()):
             if not S_ISREG(orig_stat[ST_MODE]):
                 raise errors.StaleWriterStateError("{} not file".format(path))
             try:
@@ -673,7 +676,7 @@ class RichCollectionBase(CollectionBase):
         if value == self._committed:
             return
         if value:
-            for k,v in self._items.items():
+            for k,v in list(self._items.items()):
                 v.set_committed(True)
             self._committed = True
         else:
@@ -684,7 +687,7 @@ class RichCollectionBase(CollectionBase):
     @synchronized
     def __iter__(self):
         """Iterate over names of files and collections contained in this collection."""
-        return iter(self._items.keys())
+        return iter(list(self._items.keys()))
 
     @synchronized
     def __getitem__(self, k):
@@ -716,17 +719,17 @@ class RichCollectionBase(CollectionBase):
     @synchronized
     def keys(self):
         """Get a list of names of files and collections directly contained in this collection."""
-        return self._items.keys()
+        return list(self._items.keys())
 
     @synchronized
     def values(self):
         """Get a list of files and collection objects directly contained in this collection."""
-        return self._items.values()
+        return list(self._items.values())
 
     @synchronized
     def items(self):
         """Get a list of (name, object) tuples directly contained in this collection."""
-        return self._items.items()
+        return list(self._items.items())
 
     def exists(self, path):
         """Test if there is a file or collection at `path`."""
@@ -759,7 +762,7 @@ class RichCollectionBase(CollectionBase):
             item.remove(pathcomponents[1])
 
     def _clonefrom(self, source):
-        for k,v in source.items():
+        for k,v in list(source.items()):
             self._items[k] = v.clone(self, k)
 
     def clone(self):
@@ -1117,7 +1120,7 @@ class RichCollectionBase(CollectionBase):
     @synchronized
     def flush(self):
         """Flush bufferblocks to Keep."""
-        for e in self.values():
+        for e in list(self.values()):
             e.flush()
 
 
@@ -1584,7 +1587,7 @@ class Collection(RichCollectionBase):
             if state == BLOCKS:
                 block_locator = re.match(r'[0-9a-f]{32}\+(\d+)(\+\S+)*', tok)
                 if block_locator:
-                    blocksize = long(block_locator.group(1))
+                    blocksize = int(block_locator.group(1))
                     blocks.append(Range(tok, streamoffset, blocksize, 0))
                     streamoffset += blocksize
                 else:
@@ -1593,8 +1596,8 @@ class Collection(RichCollectionBase):
             if state == SEGMENTS:
                 file_segment = re.search(r'^(\d+):(\d+):(\S+)', tok)
                 if file_segment:
-                    pos = long(file_segment.group(1))
-                    size = long(file_segment.group(2))
+                    pos = int(file_segment.group(1))
+                    size = int(file_segment.group(2))
                     name = file_segment.group(3).replace('\\040', ' ')
                     filepath = os.path.join(stream_name, name)
                     afile = self.find_or_create(filepath, FILE)
index 5c5192860ccd0ed6b079c98d76b164c2ed3800a6..c5d74efe550a152a1e1f96e97d440084cbebd63f 100755 (executable)
 # instances src and dst.  If either of these files is not found,
 # arv-copy will issue an error.
 
+from __future__ import division
+from future import standard_library
+standard_library.install_aliases()
+from past.builtins import basestring
+from builtins import object
+from past.utils import old_div
 import argparse
 import contextlib
 import getpass
@@ -25,7 +31,7 @@ import shutil
 import sys
 import logging
 import tempfile
-import urlparse
+import urllib.parse
 
 import arvados
 import arvados.config
@@ -152,7 +158,7 @@ def main():
         abort("cannot copy object {} of type {}".format(args.object_uuid, t))
 
     # Clean up any outstanding temp git repositories.
-    for d in local_repo_dir.values():
+    for d in list(local_repo_dir.values()):
         shutil.rmtree(d, ignore_errors=True)
 
     # If no exception was thrown and the response does not have an
@@ -344,7 +350,7 @@ def migrate_components_filters(template_components, dst_git_repo):
     be None if that is not known.
     """
     errors = []
-    for cname, cspec in template_components.iteritems():
+    for cname, cspec in template_components.items():
         def add_error(errmsg):
             errors.append("{}: {}".format(cname, errmsg))
         if not isinstance(cspec, dict):
@@ -553,7 +559,7 @@ def migrate_jobspec(jobspec, src, dst, dst_repo, args):
 #    names.  The return value is undefined.
 #
 def copy_git_repos(p, src, dst, dst_repo, args):
-    for component in p['components'].itervalues():
+    for component in p['components'].values():
         migrate_jobspec(component, src, dst, dst_repo, args)
         if 'job' in component:
             migrate_jobspec(component['job'], src, dst, dst_repo, args)
@@ -774,8 +780,8 @@ def select_git_url(api, repo_name, retries, allow_insecure_http, allow_insecure_
     git_url = None
     for url in priority:
         if url.startswith("http"):
-            u = urlparse.urlsplit(url)
-            baseurl = urlparse.urlunsplit((u.scheme, u.netloc, "", "", ""))
+            u = urllib.parse.urlsplit(url)
+            baseurl = urllib.parse.urlunsplit((u.scheme, u.netloc, "", "", ""))
             git_config = ["-c", "credential.%s/.username=none" % baseurl,
                           "-c", "credential.%s/.helper=!cred(){ cat >/dev/null; if [ \"$1\" = get ]; then echo password=$ARVADOS_API_TOKEN; fi; };cred" % baseurl]
         else:
@@ -859,7 +865,7 @@ def copy_docker_images(pipeline, src, dst, args):
     runtime_constraints field from src to dst."""
 
     logger.debug('copy_docker_images: {}'.format(pipeline['uuid']))
-    for c_name, c_info in pipeline['components'].iteritems():
+    for c_name, c_info in pipeline['components'].items():
         if ('runtime_constraints' in c_info and
             'docker_image' in c_info['runtime_constraints']):
             copy_docker_image(
@@ -948,7 +954,7 @@ def human_progress(obj_uuid, bytes_written, bytes_expected):
         return "\r{}: {}M / {}M {:.1%} ".format(
             obj_uuid,
             bytes_written >> 20, bytes_expected >> 20,
-            float(bytes_written) / bytes_expected)
+            old_div(float(bytes_written), bytes_expected))
     else:
         return "\r{}: {} ".format(obj_uuid, bytes_written)
 
index 57832483236fe5e404e9c64b7d3158336e88cd2d..0c491389ab6bc438b6ccfc48903430ef528a8049 100644 (file)
@@ -1,5 +1,6 @@
 #!/usr/bin/env python
 
+from builtins import next
 import argparse
 import collections
 import datetime
@@ -315,7 +316,7 @@ def list_images_in_arv(api_client, num_retries, image_name=None, image_tag=None)
     # and add image listings for them, retaining the API server preference
     # sorting.
     images_start_size = len(images)
-    for collection_uuid, link in hash_link_map.iteritems():
+    for collection_uuid, link in hash_link_map.items():
         if not seen_image_names[collection_uuid]:
             images.append(_new_image_listing(link, link['name']))
     if len(images) > images_start_size:
index a2f2e542754f7e2e44edbd5673cf36d2c5d130af..ea93eff9d8c8ec681e94843a13ef38b877d19233 100755 (executable)
@@ -1,7 +1,9 @@
 #!/usr/bin/env python
 
 from __future__ import print_function
+from __future__ import division
 
+from past.utils import old_div
 import argparse
 import sys
 
@@ -26,7 +28,7 @@ def parse_args(args):
     return parser.parse_args(args)
 
 def size_formatter(coll_file):
-    return "{:>10}".format((coll_file.size() + 1023) / 1024)
+    return "{:>10}".format(old_div((coll_file.size() + 1023), 1024))
 
 def name_formatter(coll_file):
     return "{}/{}".format(coll_file.stream_name(), coll_file.name)
index 802744ba6872192811a1fab31ffcafc8f66faf9b..349e57b11e11a0268b2be6ca509801f5b6a56aef 100644 (file)
@@ -1,4 +1,6 @@
 from __future__ import print_function
+from __future__ import division
+from past.utils import old_div
 import argparse
 import time
 import sys
@@ -122,8 +124,8 @@ def main(arguments=None):
         if pdh not in already_migrated and (only_migrate is None or pdh in only_migrate):
             need_migrate[pdh] = img
             with CollectionReader(i["manifest_text"]) as c:
-                if c.values()[0].size() > biggest:
-                    biggest = c.values()[0].size()
+                if list(c.values())[0].size() > biggest:
+                    biggest = list(c.values())[0].size()
 
     if args.print_unmigrated:
         only_migrate = set()
@@ -134,7 +136,7 @@ def main(arguments=None):
     logger.info("Already migrated %i images", len(already_migrated))
     logger.info("Need to migrate %i images", len(need_migrate))
     logger.info("Using tempdir %s", tempfile.gettempdir())
-    logger.info("Biggest image is about %i MiB, tempdir needs at least %i MiB free", biggest/(2**20), (biggest*2)/(2**20))
+    logger.info("Biggest image is about %i MiB, tempdir needs at least %i MiB free", old_div(biggest,(2**20)), old_div((biggest*2),(2**20)))
 
     if args.dry_run:
         return
@@ -142,15 +144,15 @@ def main(arguments=None):
     success = []
     failures = []
     count = 1
-    for old_image in need_migrate.values():
+    for old_image in list(need_migrate.values()):
         if uuid_to_collection[old_image["collection"]]["portable_data_hash"] in already_migrated:
             continue
 
         oldcol = CollectionReader(uuid_to_collection[old_image["collection"]]["manifest_text"])
-        tarfile = oldcol.keys()[0]
+        tarfile = list(oldcol.keys())[0]
 
         logger.info("[%i/%i] Migrating %s:%s (%s) (%i MiB)", count, len(need_migrate), old_image["repo"],
-                    old_image["tag"], old_image["collection"], oldcol.values()[0].size()/(2**20))
+                    old_image["tag"], old_image["collection"], old_div(list(oldcol.values())[0].size(),(2**20)))
         count += 1
         start = time.time()
 
index 32d5fef6a8588e1f2785517435949082dd5b3534..681e78eed68cd2a926aefd003f9cff88263fb15e 100644 (file)
@@ -3,6 +3,10 @@
 # TODO:
 # --md5sum - display md5 of each file as read from disk
 
+from __future__ import division
+from builtins import str
+from past.utils import old_div
+from builtins import object
 import argparse
 import arvados
 import arvados.collection
@@ -205,7 +209,7 @@ def parse_arguments(arguments):
     if len(args.paths) == 0:
         args.paths = ['-']
 
-    args.paths = map(lambda x: "-" if x == "/dev/stdin" else x, args.paths)
+    args.paths = ["-" if x == "/dev/stdin" else x for x in args.paths]
 
     if len(args.paths) != 1 or os.path.isdir(args.paths[0]):
         if args.filename:
@@ -509,7 +513,7 @@ class ArvPutUploadJob(object):
         Recursively get the total size of the collection
         """
         size = 0
-        for item in collection.values():
+        for item in list(collection.values()):
             if isinstance(item, arvados.collection.Collection) or isinstance(item, arvados.collection.Subcollection):
                 size += self._collection_size(item)
             else:
@@ -701,7 +705,7 @@ class ArvPutUploadJob(object):
     def collection_file_paths(self, col, path_prefix='.'):
         """Return a list of file paths by recursively go through the entire collection `col`"""
         file_paths = []
-        for name, item in col.items():
+        for name, item in list(col.items()):
             if isinstance(item, arvados.arvfile.ArvadosFile):
                 file_paths.append(os.path.join(path_prefix, name))
             elif isinstance(item, arvados.collection.Subcollection):
@@ -778,7 +782,7 @@ class ArvPutUploadJob(object):
                     locators.append(loc)
                 return locators
         elif isinstance(item, arvados.collection.Collection):
-            l = [self._datablocks_on_item(x) for x in item.values()]
+            l = [self._datablocks_on_item(x) for x in list(item.values())]
             # Fast list flattener method taken from:
             # http://stackoverflow.com/questions/952914/making-a-flat-list-out-of-list-of-lists-in-python
             return [loc for sublist in l for loc in sublist]
@@ -817,7 +821,7 @@ def human_progress(bytes_written, bytes_expected):
     if bytes_expected:
         return "\r{}M / {}M {:.1%} ".format(
             bytes_written >> 20, bytes_expected >> 20,
-            float(bytes_written) / bytes_expected)
+            old_div(float(bytes_written), bytes_expected))
     else:
         return "\r{} ".format(bytes_written)
 
@@ -982,7 +986,7 @@ def main(arguments=None, stdout=sys.stdout, stderr=sys.stderr):
         if not output.endswith('\n'):
             stdout.write('\n')
 
-    for sigcode, orig_handler in orig_signal_handlers.items():
+    for sigcode, orig_handler in list(orig_signal_handlers.items()):
         signal.signal(sigcode, orig_handler)
 
     if status != 0:
index 2f3e0427d9eb41137cd4f87721f88b811031b0fe..01a18e51723d435c08218388f378c538ac8becc5 100644 (file)
@@ -2,6 +2,9 @@
 
 from __future__ import print_function
 from __future__ import absolute_import
+from builtins import range
+from past.builtins import basestring
+from builtins import object
 import arvados
 import arvados.commands.ws as ws
 import argparse
@@ -301,7 +304,7 @@ def main(arguments=None):
     if files:
         uploadfiles(files, api, dry_run=args.dry_run, num_retries=args.retries, project=project)
 
-    for i in xrange(1, len(slots)):
+    for i in range(1, len(slots)):
         slots[i] = [("%s%s" % (c.prefix, c.fn)) if isinstance(c, ArvFile) else c for c in slots[i]]
 
     component = {
@@ -321,8 +324,8 @@ def main(arguments=None):
     group_parser.add_argument('-b', '--batch-size', type=int)
     group_parser.add_argument('args', nargs=argparse.REMAINDER)
 
-    for s in xrange(2, len(slots)):
-        for i in xrange(0, len(slots[s])):
+    for s in range(2, len(slots)):
+        for i in range(0, len(slots[s])):
             if slots[s][i] == '--':
                 inp = "input%i" % (s-2)
                 groupargs = group_parser.parse_args(slots[2][i+1:])
index c184e6ac7432b159e31ca65153886b8de4b04d18..f4651cd0a5cb8294e281192c29dde7479bd008fc 100644 (file)
@@ -1,3 +1,4 @@
+from builtins import object
 import json
 import os
 
index bfd471ba52bee712a1e1768c91327ce28a9c6603..c9eda2d1c9c6eb306daedba17fc63c3ec89348d2 100644 (file)
@@ -37,7 +37,7 @@ class KeepRequestError(Exception):
         self._request_errors = OrderedDict(request_errors)
         if self._request_errors:
             exc_reports = [self._format_error(*err_pair)
-                           for err_pair in self._request_errors.iteritems()]
+                           for err_pair in self._request_errors.items()]
             base_msg = "{}: {}".format(message, "; ".join(exc_reports))
         else:
             base_msg = message
index b385761f7b7b1575dc576e783b3ae61e45eeb2cf..c58abe52be48730e56edb70d5e5109a0e3847592 100644 (file)
@@ -1,4 +1,8 @@
 from __future__ import absolute_import
+from future import standard_library
+standard_library.install_aliases()
+from builtins import str
+from builtins import object
 import arvados
 from . import config
 from . import errors
@@ -6,7 +10,7 @@ from .retry import RetryLoop
 
 import logging
 import json
-import thread
+import _thread
 import threading
 import time
 import os
@@ -116,7 +120,7 @@ class EventClient(object):
             self.on_event_cb(m)
         except Exception as e:
             _logger.exception("Unexpected exception from event callback.")
-            thread.interrupt_main()
+            _thread.interrupt_main()
 
     def on_closed(self):
         if not self.is_closed.is_set():
@@ -131,7 +135,7 @@ class EventClient(object):
             if tries_left == 0:
                 _logger.exception("EventClient thread could not contact websocket server.")
                 self.is_closed.set()
-                thread.interrupt_main()
+                _thread.interrupt_main()
                 return
 
     def run_forever(self):
@@ -226,7 +230,7 @@ class PollClient(threading.Thread):
                     _logger.exception("PollClient thread could not contact API server.")
                     with self._closing_lock:
                         self._closing.set()
-                    thread.interrupt_main()
+                    _thread.interrupt_main()
                     return
                 for i in items["items"]:
                     skip_old_events = [["id", ">", str(i["id"])]]
@@ -237,7 +241,7 @@ class PollClient(threading.Thread):
                             self.on_event(i)
                         except Exception as e:
                             _logger.exception("Unexpected exception from event callback.")
-                            thread.interrupt_main()
+                            _thread.interrupt_main()
                 if items["items_available"] > len(items["items"]):
                     moreitems = True
             if not moreitems:
index 4efa6982a1fa346e3ae6c55a0e72f892968173f1..b709473b78c275ca02e1eb2c7015f3e28bf2ffb0 100644 (file)
@@ -1,19 +1,27 @@
 from __future__ import absolute_import
-import cStringIO
+from __future__ import division
+from future import standard_library
+standard_library.install_aliases()
+from builtins import next
+from builtins import str
+from builtins import range
+from past.utils import old_div
+from builtins import object
+import io
 import datetime
 import hashlib
 import logging
 import math
 import os
 import pycurl
-import Queue
+import queue
 import re
 import socket
 import ssl
 import sys
 import threading
 from . import timer
-import urlparse
+import urllib.parse
 
 import arvados
 import arvados.config as config
@@ -191,7 +199,7 @@ class KeepBlockCache(object):
             self._cache = [c for c in self._cache if not (c.ready.is_set() and c.content is None)]
             sm = sum([slot.size() for slot in self._cache])
             while len(self._cache) > 0 and sm > self.cache_max:
-                for i in xrange(len(self._cache)-1, -1, -1):
+                for i in range(len(self._cache)-1, -1, -1):
                     if self._cache[i].ready.is_set():
                         del self._cache[i]
                         break
@@ -199,7 +207,7 @@ class KeepBlockCache(object):
 
     def _get(self, locator):
         # Test if the locator is already in the cache
-        for i in xrange(0, len(self._cache)):
+        for i in range(0, len(self._cache)):
             if self._cache[i].locator == locator:
                 n = self._cache[i]
                 if i != 0:
@@ -270,7 +278,7 @@ class KeepClient(object):
             arvados.errors.HttpError,
         )
 
-        def __init__(self, root, user_agent_pool=Queue.LifoQueue(),
+        def __init__(self, root, user_agent_pool=queue.LifoQueue(),
                      upload_counter=None,
                      download_counter=None, **headers):
             self.root = root
@@ -298,7 +306,7 @@ class KeepClient(object):
         def _get_user_agent(self):
             try:
                 return self._user_agent_pool.get(block=False)
-            except Queue.Empty:
+            except queue.Empty:
                 return pycurl.Curl()
 
         def _put_user_agent(self, ua):
@@ -328,12 +336,12 @@ class KeepClient(object):
             try:
                 with timer.Timer() as t:
                     self._headers = {}
-                    response_body = cStringIO.StringIO()
+                    response_body = io.StringIO()
                     curl.setopt(pycurl.NOSIGNAL, 1)
                     curl.setopt(pycurl.OPENSOCKETFUNCTION, self._socket_open)
                     curl.setopt(pycurl.URL, url.encode('utf-8'))
                     curl.setopt(pycurl.HTTPHEADER, [
-                        '{}: {}'.format(k,v) for k,v in self.get_headers.iteritems()])
+                        '{}: {}'.format(k,v) for k,v in self.get_headers.items()])
                     curl.setopt(pycurl.WRITEFUNCTION, response_body.write)
                     curl.setopt(pycurl.HEADERFUNCTION, self._headerfunction)
                     if method == "HEAD":
@@ -384,7 +392,7 @@ class KeepClient(object):
                          self._result['status_code'],
                          len(self._result['body']),
                          t.msecs,
-                         (len(self._result['body'])/(1024.0*1024))/t.secs if t.secs > 0 else 0)
+                         old_div((old_div(len(self._result['body']),(1024.0*1024))),t.secs) if t.secs > 0 else 0)
 
             if self.download_counter:
                 self.download_counter.add(len(self._result['body']))
@@ -405,8 +413,8 @@ class KeepClient(object):
             try:
                 with timer.Timer() as t:
                     self._headers = {}
-                    body_reader = cStringIO.StringIO(body)
-                    response_body = cStringIO.StringIO()
+                    body_reader = io.StringIO(body)
+                    response_body = io.StringIO()
                     curl.setopt(pycurl.NOSIGNAL, 1)
                     curl.setopt(pycurl.OPENSOCKETFUNCTION, self._socket_open)
                     curl.setopt(pycurl.URL, url.encode('utf-8'))
@@ -420,7 +428,7 @@ class KeepClient(object):
                     curl.setopt(pycurl.INFILESIZE, len(body))
                     curl.setopt(pycurl.READFUNCTION, body_reader.read)
                     curl.setopt(pycurl.HTTPHEADER, [
-                        '{}: {}'.format(k,v) for k,v in self.put_headers.iteritems()])
+                        '{}: {}'.format(k,v) for k,v in self.put_headers.items()])
                     curl.setopt(pycurl.WRITEFUNCTION, response_body.write)
                     curl.setopt(pycurl.HEADERFUNCTION, self._headerfunction)
                     self._setcurltimeouts(curl, timeout)
@@ -457,7 +465,7 @@ class KeepClient(object):
                          self._result['status_code'],
                          len(body),
                          t.msecs,
-                         (len(body)/(1024.0*1024))/t.secs if t.secs > 0 else 0)
+                         old_div((old_div(len(body),(1024.0*1024))),t.secs) if t.secs > 0 else 0)
             if self.upload_counter:
                 self.upload_counter.add(len(body))
             return True
@@ -498,9 +506,9 @@ class KeepClient(object):
             # Returning None implies all bytes were written
     
 
-    class KeepWriterQueue(Queue.Queue):
+    class KeepWriterQueue(queue.Queue):
         def __init__(self, copies):
-            Queue.Queue.__init__(self) # Old-style superclass
+            queue.Queue.__init__(self) # Old-style superclass
             self.wanted_copies = copies
             self.successful_copies = 0
             self.response = None
@@ -548,7 +556,7 @@ class KeepClient(object):
                         return service, service_root
                     elif self.empty():
                         self.pending_tries_notification.notify_all()
-                        raise Queue.Empty
+                        raise queue.Empty
                     else:
                         self.pending_tries_notification.wait()
 
@@ -560,7 +568,7 @@ class KeepClient(object):
             if (not max_service_replicas) or (max_service_replicas >= copies):
                 num_threads = 1
             else:
-                num_threads = int(math.ceil(float(copies) / max_service_replicas))
+                num_threads = int(math.ceil(old_div(float(copies), max_service_replicas)))
             _logger.debug("Pool max threads is %d", num_threads)
             self.workers = []
             self.queue = KeepClient.KeepWriterQueue(copies)
@@ -602,7 +610,7 @@ class KeepClient(object):
             while True:
                 try:
                     service, service_root = self.queue.get_next_task()
-                except Queue.Empty:
+                except queue.Empty:
                     return
                 try:
                     locator, copies = self.do_task(service, service_root)
@@ -720,7 +728,7 @@ class KeepClient(object):
         self.block_cache = block_cache if block_cache else KeepBlockCache()
         self.timeout = timeout
         self.proxy_timeout = proxy_timeout
-        self._user_agent_pool = Queue.LifoQueue()
+        self._user_agent_pool = queue.LifoQueue()
         self.upload_counter = Counter()
         self.download_counter = Counter()
         self.put_counter = Counter()
@@ -741,7 +749,7 @@ class KeepClient(object):
                     if not proxy_uris[i].endswith('/'):
                         proxy_uris[i] += '/'
                     # URL validation
-                    url = urlparse.urlparse(proxy_uris[i])
+                    url = urllib.parse.urlparse(proxy_uris[i])
                     if not (url.scheme and url.netloc):
                         raise arvados.errors.ArgumentError("Invalid proxy URI: {}".format(proxy_uris[i]))
                 self.api_token = api_token
@@ -807,7 +815,7 @@ class KeepClient(object):
                 raise arvados.errors.NoKeepServersError()
 
             # Precompute the base URI for each service.
-            for r in self._gateway_services.itervalues():
+            for r in self._gateway_services.values():
                 host = r['service_host']
                 if not host.startswith('[') and host.find(':') >= 0:
                     # IPv6 URIs must be formatted like http://[::1]:80/...
@@ -819,7 +827,7 @@ class KeepClient(object):
 
             _logger.debug(str(self._gateway_services))
             self._keep_services = [
-                ks for ks in self._gateway_services.itervalues()
+                ks for ks in self._gateway_services.values()
                 if not ks.get('service_type', '').startswith('gateway:')]
             self._writable_services = [ks for ks in self._keep_services
                                        if not ks.get('read_only')]
@@ -1059,7 +1067,7 @@ class KeepClient(object):
           KeepClient is initialized.
         """
 
-        if isinstance(data, unicode):
+        if isinstance(data, str):
             data = data.encode("ascii")
         elif not isinstance(data, str):
             raise arvados.errors.ArgumentError("Argument 'data' to KeepClient.put is not type 'str'")
index 5ba4f4ea41016a6225ebb3fca194265e56b56a0b..168bd3910fc799fd3ddc4d8bfec63f31b2e261bb 100644 (file)
@@ -1,5 +1,7 @@
 #!/usr/bin/env python
 
+from builtins import range
+from builtins import object
 import functools
 import inspect
 import pycurl
@@ -9,7 +11,7 @@ from collections import deque
 
 import arvados.errors
 
-_HTTP_SUCCESSES = set(xrange(200, 300))
+_HTTP_SUCCESSES = set(range(200, 300))
 _HTTP_CAN_RETRY = set([408, 409, 422, 423, 500, 502, 503, 504])
 
 class RetryLoop(object):
@@ -69,7 +71,7 @@ class RetryLoop(object):
     def running(self):
         return self._running and (self._success is None)
 
-    def next(self):
+    def __next__(self):
         if self._running is None:
             self._running = True
         if (self.tries_left < 1) or not self.running():
index a9ca978865375eea1e34d44db08bb462388811de..488b758a42f69534ae0bc2584d64c660baa7532e 100644 (file)
@@ -1,5 +1,6 @@
 from __future__ import absolute_import
 
+from builtins import object
 import copy
 import threading
 
index 042ed47e40edb495cf1a91073ea75cc4d08b5a01..59558162b41891d77ae148b9f407d2359035fb5f 100644 (file)
@@ -1,5 +1,6 @@
 from __future__ import print_function
 from __future__ import absolute_import
+from builtins import object
 import collections
 import hashlib
 import os
@@ -36,15 +37,15 @@ class StreamReader(object):
 
             s = re.match(r'^[0-9a-f]{32}\+(\d+)(\+\S+)*$', tok)
             if s:
-                blocksize = long(s.group(1))
+                blocksize = int(s.group(1))
                 self._data_locators.append(Range(tok, streamoffset, blocksize, 0))
                 streamoffset += blocksize
                 continue
 
             s = re.search(r'^(\d+):(\d+):(\S+)', tok)
             if s:
-                pos = long(s.group(1))
-                size = long(s.group(2))
+                pos = int(s.group(1))
+                size = int(s.group(2))
                 name = s.group(3).replace('\\040', ' ')
                 if name not in self._files:
                     self._files[name] = StreamFileReader(self, [Range(pos, 0, size, 0)], name)
@@ -62,7 +63,7 @@ class StreamReader(object):
         return self._files
 
     def all_files(self):
-        return self._files.values()
+        return list(self._files.values())
 
     def size(self):
         n = self._data_locators[-1]
@@ -97,5 +98,5 @@ class StreamReader(object):
             manifest_text.extend([d.locator for d in self._data_locators])
         manifest_text.extend([' '.join(["{}:{}:{}".format(seg.locator, seg.range_size, f.name.replace(' ', '\\040'))
                                         for seg in f.segments])
-                              for f in self._files.values()])
+                              for f in list(self._files.values())])
         return ' '.join(manifest_text) + '\n'
index 6d4a73f25917c16aab5b49cb21d01c94fff0425f..f3bf839dd5f8ebf9fa23a20e6cdd0ee7beb9452d 100644 (file)
@@ -1,4 +1,5 @@
 from __future__ import print_function
+from builtins import object
 import time
 
 class Timer(object):
index 522a95e3e4ff276f7b189de395e0d6f95fe5124b..3f50553c4b0251092d5758a33bc3c6ff19bf93ed 100644 (file)
@@ -124,7 +124,7 @@ def tarball_extract(tarball, path):
                 raise arvados.errors.CommandFailedError(
                     "tar exited %d" % p.returncode)
         os.symlink(tarball, os.path.join(path, '.locator'))
-    tld_extracts = filter(lambda f: f != '.locator', os.listdir(path))
+    tld_extracts = [f for f in os.listdir(path) if f != '.locator']
     lockfile.close()
     if len(tld_extracts) == 1:
         return os.path.join(path, tld_extracts[0])
@@ -190,7 +190,7 @@ def zipball_extract(zipball, path):
                     "unzip exited %d" % p.returncode)
             os.unlink(zip_filename)
         os.symlink(zipball, os.path.join(path, '.locator'))
-    tld_extracts = filter(lambda f: f != '.locator', os.listdir(path))
+    tld_extracts = [f for f in os.listdir(path) if f != '.locator']
     lockfile.close()
     if len(tld_extracts) == 1:
         return os.path.join(path, tld_extracts[0])
index dae3dd3b7b19c923ff53381e9f3ebef8c5abae49..51e85c85bd4727e41b733a3cec0b8f78521eabbc 100644 (file)
@@ -1,16 +1,21 @@
 #!/usr/bin/env python
 
+from future import standard_library
+standard_library.install_aliases()
+from builtins import str
+from builtins import range
+from builtins import object
 import arvados
 import contextlib
 import errno
 import hashlib
-import httplib
+import http.client
 import httplib2
 import io
 import mock
 import os
 import pycurl
-import Queue
+import queue
 import shutil
 import sys
 import tempfile
@@ -29,7 +34,7 @@ def queue_with(items):
     given, it will be consumed to fill the queue before queue_with()
     returns.
     """
-    queue = Queue.Queue()
+    queue = queue.Queue()
     for val in items:
         queue.put(val)
     return lambda *args, **kwargs: queue.get(block=False)
@@ -38,7 +43,7 @@ def queue_with(items):
 # mock calls to httplib2.Http.request()
 def fake_httplib2_response(code, **headers):
     headers.update(status=str(code),
-                   reason=httplib.responses.get(code, "Unknown Response"))
+                   reason=http.client.responses.get(code, "Unknown Response"))
     return httplib2.Response(headers)
 
 def mock_responses(body, *codes, **headers):
@@ -63,7 +68,7 @@ def redirected_streams(stdout=None, stderr=None):
         sys.stderr = orig_stderr
 
 
-class FakeCurl:
+class FakeCurl(object):
     @classmethod
     def make(cls, code, body='', headers={}):
         return mock.Mock(spec=cls, wraps=cls(code, body, headers))
@@ -96,7 +101,7 @@ class FakeCurl:
             raise ValueError
         if self._headerfunction:
             self._headerfunction("HTTP/1.1 {} Status".format(self._resp_code))
-            for k, v in self._resp_headers.iteritems():
+            for k, v in self._resp_headers.items():
                 self._headerfunction(k + ': ' + str(v))
         if type(self._resp_body) is not bool:
             self._writer(self._resp_body)
index d79788c07e3ef4d26d055e6d72a8a59f755c66ae..965bf299b86d9bb431e82dec94c46317922a1222 100644 (file)
@@ -1,11 +1,16 @@
-import BaseHTTPServer
+from __future__ import division
+from future import standard_library
+standard_library.install_aliases()
+from builtins import str
+from past.utils import old_div
+import http.server
 import hashlib
 import os
 import re
-import SocketServer
+import socketserver
 import time
 
-class Server(SocketServer.ThreadingMixIn, BaseHTTPServer.HTTPServer, object):
+class Server(socketserver.ThreadingMixIn, http.server.HTTPServer, object):
 
     allow_reuse_address = 1
 
@@ -32,7 +37,7 @@ class Server(SocketServer.ThreadingMixIn, BaseHTTPServer.HTTPServer, object):
 
     def setdelays(self, **kwargs):
         """In future requests, induce delays at the given checkpoints."""
-        for (k, v) in kwargs.iteritems():
+        for (k, v) in kwargs.items():
             self.delays.get(k) # NameError if unknown key
             self.delays[k] = v
 
@@ -54,12 +59,12 @@ class Server(SocketServer.ThreadingMixIn, BaseHTTPServer.HTTPServer, object):
         self._sleep_at_least(self.delays[k])
 
 
-class Handler(BaseHTTPServer.BaseHTTPRequestHandler, object):
+class Handler(http.server.BaseHTTPRequestHandler, object):
     def wfile_bandwidth_write(self, data_to_write):
         if self.server.bandwidth == None and self.server.delays['mid_write'] == 0:
             self.wfile.write(data_to_write)
         else:
-            BYTES_PER_WRITE = int(self.server.bandwidth/4.0) or 32768
+            BYTES_PER_WRITE = int(old_div(self.server.bandwidth,4.0)) or 32768
             outage_happened = False
             num_bytes = len(data_to_write)
             num_sent_bytes = 0
@@ -75,7 +80,7 @@ class Handler(BaseHTTPServer.BaseHTTPRequestHandler, object):
                     num_sent_bytes:num_sent_bytes+num_write_bytes])
                 num_sent_bytes += num_write_bytes
                 if self.server.bandwidth is not None:
-                    target_time += num_write_bytes / self.server.bandwidth
+                    target_time += old_div(num_write_bytes, self.server.bandwidth)
                     self.server._sleep_at_least(target_time - time.time())
         return None
 
@@ -83,7 +88,7 @@ class Handler(BaseHTTPServer.BaseHTTPRequestHandler, object):
         if self.server.bandwidth == None and self.server.delays['mid_read'] == 0:
             return self.rfile.read(bytes_to_read)
         else:
-            BYTES_PER_READ = int(self.server.bandwidth/4.0) or 32768
+            BYTES_PER_READ = int(old_div(self.server.bandwidth,4.0)) or 32768
             data = ''
             outage_happened = False
             bytes_read = 0
@@ -98,7 +103,7 @@ class Handler(BaseHTTPServer.BaseHTTPRequestHandler, object):
                 data += self.rfile.read(next_bytes_to_read)
                 bytes_read += next_bytes_to_read
                 if self.server.bandwidth is not None:
-                    target_time += next_bytes_to_read / self.server.bandwidth
+                    target_time += old_div(next_bytes_to_read, self.server.bandwidth)
                     self.server._sleep_at_least(target_time - time.time())
         return data
 
index f20d9090229dbd90b1466d6c3f065e6296ccdbb2..91c37e6c66fbffae85c9d39c58bfe09f8ed56017 100644 (file)
@@ -1,4 +1,6 @@
 from __future__ import absolute_import
+from builtins import range
+from builtins import object
 import arvados
 from . import arvados_testutil as tutil
 
index b99ca64a3e8d959d87d783bdaf0052d0e9f21ac1..0c5fe3adc8819dfb5670318464679f27ae95d660 100644 (file)
@@ -1,5 +1,6 @@
 from __future__ import print_function
 from __future__ import absolute_import
+from builtins import range
 import unittest
 
 from .performance_profiler import profiled
index 8d7e708de933e3091d4d11d1e6247b1211865d92..d96612631885f9d1bf4f2321f457787fb3daa112 100644 (file)
@@ -1,6 +1,10 @@
 #!/usr/bin/env python
 
 from __future__ import print_function
+from __future__ import division
+from builtins import str
+from builtins import range
+from past.utils import old_div
 import argparse
 import atexit
 import errno
@@ -96,7 +100,7 @@ def kill_server_pid(pidfile, wait=10, passenger_root=False):
         # Use up to half of the +wait+ period waiting for "passenger
         # stop" to work. If the process hasn't exited by then, start
         # sending TERM signals.
-        startTERM += wait/2
+        startTERM += old_div(wait,2)
 
     server_pid = None
     while now <= deadline and server_pid is None:
@@ -439,7 +443,7 @@ def _start_keep(n, keep_args):
                 "-listen=:{}".format(port),
                 "-pid="+_pidfile('keep{}'.format(n))]
 
-    for arg, val in keep_args.iteritems():
+    for arg, val in keep_args.items():
         keep_cmd.append("{}={}".format(arg, val))
 
     logf = open(_fifo2stderr('keep{}'.format(n)), 'w')
@@ -736,7 +740,7 @@ class TestCaseWithServers(unittest.TestCase):
 
     @staticmethod
     def _restore_dict(src, dest):
-        for key in dest.keys():
+        for key in list(dest.keys()):
             if key not in src:
                 del dest[key]
         dest.update(src)
index b6b2b563a74f8a0f8fee7ab1459d2f178c33f2b3..a2dcaa0b2a6cdcdddbd0474cfb865ac3ba2bad3b 100644 (file)
@@ -1,6 +1,8 @@
 #!/usr/bin/env python
 
 from __future__ import absolute_import
+from builtins import str
+from builtins import range
 import arvados
 import collections
 import httplib2
@@ -106,7 +108,7 @@ class ArvadosApiTest(run_test_server.TestCaseWithServers):
         api = arvados.api('v1',
                           requestBuilder=req_builder, model=OrderedJsonModel())
         result = api.humans().get(uuid='test').execute()
-        self.assertEqual(string.hexdigits, ''.join(result.keys()))
+        self.assertEqual(string.hexdigits, ''.join(list(result.keys())))
 
 
 class RetryREST(unittest.TestCase):
@@ -167,7 +169,7 @@ class RetryREST(unittest.TestCase):
         mock_conns = {str(i): mock.MagicMock() for i in range(2)}
         self.api._http.connections = mock_conns.copy()
         self.api.users().create(body={}).execute()
-        for c in mock_conns.itervalues():
+        for c in mock_conns.values():
             self.assertEqual(c.close.call_count, expect)
 
     @mock.patch('time.sleep')
index 8e5c5ad12a10f8851a5ba4ac016db67ed8d4b152..ae26ae79ceb6b3513caa73ef673a42203e431af9 100644 (file)
@@ -2,6 +2,8 @@
 # -*- coding: utf-8 -*-
 
 from __future__ import absolute_import
+from builtins import str
+from builtins import range
 import io
 import os
 import random
index 5f314bd3078af98ea5a465d07a24fde4282a6310..5abf38854abc900146723fd27a549823eaff5882 100644 (file)
@@ -2,6 +2,12 @@
 # -*- coding: utf-8 -*-
 
 from __future__ import absolute_import
+from __future__ import division
+from future import standard_library
+standard_library.install_aliases()
+from builtins import str
+from builtins import range
+from past.utils import old_div
 import apiclient
 import io
 import mock
@@ -19,7 +25,7 @@ import threading
 import hashlib
 import random
 
-from cStringIO import StringIO
+from io import StringIO
 
 import arvados
 import arvados.commands.put as arv_put
@@ -258,7 +264,7 @@ class ArvPutUploadJobTest(run_test_server.TestCaseWithServers,
         _, self.large_file_name = tempfile.mkstemp()
         fileobj = open(self.large_file_name, 'w')
         # Make sure to write just a little more than one block
-        for _ in range((arvados.config.KEEP_BLOCK_SIZE/(1024*1024))+1):
+        for _ in range((old_div(arvados.config.KEEP_BLOCK_SIZE,(1024*1024)))+1):
             data = random.choice(['x', 'y', 'z']) * 1024 * 1024 # 1 MB
             fileobj.write(data)
         fileobj.close()
@@ -525,7 +531,7 @@ class ArvadosPutReportTest(ArvadosBaseTestCase):
 
     def test_known_human_progress(self):
         for count, total in [(0, 1), (2, 4), (45, 60)]:
-            expect = '{:.1%}'.format(float(count) / total)
+            expect = '{:.1%}'.format(old_div(float(count), total))
             actual = arv_put.human_progress(count, total)
             self.assertTrue(actual.startswith('\r'))
             self.assertIn(expect, actual)
index 6c4976ee0de1633ca114423624f3ca6c1b9a3546..f4bcd8a49794d71358dc7f8d85c0a36b56d42e70 100644 (file)
@@ -1,6 +1,10 @@
 #!/usr/bin/env python
 
 from __future__ import absolute_import
+from builtins import hex
+from builtins import str
+from builtins import range
+from builtins import object
 import bz2
 import datetime
 import gzip
@@ -242,7 +246,7 @@ class ArvadosFileWriterTestCase(unittest.TestCase):
                              api_client=api, keep_client=keep) as c:
             writer = c.open("count.txt", "r+")
             text = "0123456789" * 100
-            for b in xrange(0, 100000):
+            for b in range(0, 100000):
                 writer.write(text)
             self.assertEqual(writer.size(), 100000000)
 
@@ -274,7 +278,7 @@ class ArvadosFileWriterTestCase(unittest.TestCase):
         with Collection('. ' + arvados.config.EMPTY_BLOCK_LOCATOR + ' 0:0:count.txt',
                              keep_client=keep) as c:
             writer = c.open("count.txt", "r+")
-            for b in xrange(0, 10):
+            for b in range(0, 10):
                 writer.seek(0, os.SEEK_SET)
                 writer.write("0123456789")
 
@@ -291,7 +295,7 @@ class ArvadosFileWriterTestCase(unittest.TestCase):
         with Collection('. 781e5e245d69b566979b86e28d23f2c7+10 0:10:count.txt',
                              keep_client=keep) as c:
             writer = c.open("count.txt", "r+")
-            for b in xrange(0, 10):
+            for b in range(0, 10):
                 writer.seek(10, os.SEEK_SET)
                 writer.write("abcdefghij")
 
@@ -309,7 +313,7 @@ class ArvadosFileWriterTestCase(unittest.TestCase):
         with Collection('. 781e5e245d69b566979b86e28d23f2c7+10 0:10:count.txt',
                              keep_client=keep) as c:
             writer = c.open("count.txt", "r+")
-            for b in xrange(0, 10):
+            for b in range(0, 10):
                 writer.seek(5, os.SEEK_SET)
                 writer.write("abcdefghij")
 
@@ -334,8 +338,8 @@ class ArvadosFileWriterTestCase(unittest.TestCase):
         with Collection('. ' + arvados.config.EMPTY_BLOCK_LOCATOR + ' 0:0:count.txt',
                              api_client=api, keep_client=keep) as c:
             writer = c.open("count.txt", "r+")
-            text = ''.join(["0123456789" for a in xrange(0, 100)])
-            for b in xrange(0, 100000):
+            text = ''.join(["0123456789" for a in range(0, 100)])
+            for b in range(0, 100000):
                 writer.write(text)
             writer.seek(0, os.SEEK_SET)
             writer.write("foo")
index 93388e6de8d80b56d09038403cdc0bc4f1e4ea38..ea34c8051d6580685ab125d50fd04e659dd16485 100644 (file)
@@ -1,6 +1,8 @@
 from __future__ import print_function
 from __future__ import absolute_import
 
+from builtins import str
+from builtins import range
 import md5
 import mock
 import os
@@ -17,7 +19,7 @@ from . import run_test_server
 
 
 def _random(n):
-    return bytearray(random.getrandbits(8) for _ in xrange(n))
+    return bytearray(random.getrandbits(8) for _ in range(n))
 
 
 class CacheTestThread(threading.Thread):
index 46682deaa660171a3e6878c98b23c198a4578fd3..259c5aa177cc87a8578480001948ac1a2c985e44 100644 (file)
@@ -3,6 +3,7 @@ from __future__ import absolute_import
 #
 # ARVADOS_API_TOKEN=abc ARVADOS_API_HOST=arvados.local python -m unittest discover
 
+from builtins import object
 import arvados
 import copy
 import mock
index 73b9f2254497b0c90cdb51ebb2ea4dd70cb46f18..2cca77339d1b8be66420db60f7605ec436beef33 100644 (file)
@@ -1,10 +1,16 @@
 from __future__ import print_function
 from __future__ import absolute_import
+from __future__ import division
+from future import standard_library
+standard_library.install_aliases()
+from builtins import range
+from builtins import object
+from past.utils import old_div
 import arvados
 import io
 import logging
 import mock
-import Queue
+import queue
 from . import run_test_server
 import threading
 import time
@@ -35,7 +41,7 @@ class WebsocketTest(run_test_server.TestCaseWithServers):
 
     def _test_subscribe(self, poll_fallback, expect_type, start_time=None, expected=1):
         run_test_server.authorize_with('active')
-        events = Queue.Queue(100)
+        events = queue.Queue(100)
 
         # Create ancestor before subscribing.
         # When listening with start_time in the past, this should also be retrieved.
@@ -65,7 +71,7 @@ class WebsocketTest(run_test_server.TestCaseWithServers):
             log_object_uuids.append(events.get(True, 5)['object_uuid'])
 
         if expected < 2:
-            with self.assertRaises(Queue.Empty):
+            with self.assertRaises(queue.Empty):
                 # assertEqual just serves to show us what unexpected
                 # thing comes out of the queue when the assertRaises
                 # fails; when the test passes, this assertEqual
@@ -145,16 +151,16 @@ class WebsocketTest(run_test_server.TestCaseWithServers):
         return time.strftime('%Y-%m-%dT%H:%M:%SZ', time.gmtime(t))
 
     def localiso(self, t):
-        return time.strftime('%Y-%m-%dT%H:%M:%S', time.localtime(t)) + self.isotz(-time.timezone/60)
+        return time.strftime('%Y-%m-%dT%H:%M:%S', time.localtime(t)) + self.isotz(old_div(-time.timezone,60))
 
     def isotz(self, offset):
         """Convert minutes-east-of-UTC to RFC3339- and ISO-compatible time zone designator"""
-        return '{:+03d}:{:02d}'.format(offset/60, offset%60)
+        return '{:+03d}:{:02d}'.format(old_div(offset,60), offset%60)
 
     # Test websocket reconnection on (un)execpted close
     def _test_websocket_reconnect(self, close_unexpected):
         run_test_server.authorize_with('active')
-        events = Queue.Queue(100)
+        events = queue.Queue(100)
 
         logstream = io.BytesIO()
         rootLogger = logging.getLogger()
@@ -176,7 +182,7 @@ class WebsocketTest(run_test_server.TestCaseWithServers):
 
         # expect an event
         self.assertIn(human['uuid'], events.get(True, 5)['object_uuid'])
-        with self.assertRaises(Queue.Empty):
+        with self.assertRaises(queue.Empty):
             self.assertEqual(events.get(True, 2), None)
 
         # close (im)properly
@@ -195,12 +201,12 @@ class WebsocketTest(run_test_server.TestCaseWithServers):
                 event = events.get(True, 5)
                 if event.get('object_uuid') != None:
                     log_object_uuids.append(event['object_uuid'])
-            with self.assertRaises(Queue.Empty):
+            with self.assertRaises(queue.Empty):
                 self.assertEqual(events.get(True, 2), None)
             self.assertNotIn(human['uuid'], log_object_uuids)
             self.assertIn(human2['uuid'], log_object_uuids)
         else:
-            with self.assertRaises(Queue.Empty):
+            with self.assertRaises(queue.Empty):
                 self.assertEqual(events.get(True, 2), None)
 
         # verify log message to ensure that an (un)expected close
@@ -230,7 +236,7 @@ class WebsocketTest(run_test_server.TestCaseWithServers):
         rootLogger.addHandler(streamHandler)
 
         run_test_server.authorize_with('active')
-        events = Queue.Queue(100)
+        events = queue.Queue(100)
 
         filters = [['object_uuid', 'is_a', 'arvados#human']]
         self.ws = arvados.events.subscribe(
index bb6e983185ccb354ac1774340db0387784b7c653..b69563f94238aee5b9e53713e7ab5df7b59ba582 100644 (file)
@@ -1,4 +1,11 @@
 from __future__ import absolute_import
+from __future__ import division
+from future import standard_library
+standard_library.install_aliases()
+from builtins import str
+from builtins import range
+from past.utils import old_div
+from builtins import object
 import hashlib
 import mock
 import os
@@ -9,7 +16,7 @@ import socket
 import threading
 import time
 import unittest
-import urlparse
+import urllib.parse
 
 import arvados
 import arvados.retry
@@ -292,7 +299,7 @@ class KeepClientServiceTestCase(unittest.TestCase, tutil.ApiClientMock):
     def get_service_roots(self, api_client):
         keep_client = arvados.KeepClient(api_client=api_client)
         services = keep_client.weighted_service_roots(arvados.KeepLocator('0'*32))
-        return [urlparse.urlparse(url) for url in sorted(services)]
+        return [urllib.parse.urlparse(url) for url in sorted(services)]
 
     def test_ssl_flag_respected_in_roots(self):
         for ssl_flag in [False, True]:
@@ -443,7 +450,7 @@ class KeepClientServiceTestCase(unittest.TestCase, tutil.ApiClientMock):
                                        num_retries=3)
         self.assertEqual([403, 403], [
                 getattr(error, 'status_code', None)
-                for error in err_check.exception.request_errors().itervalues()])
+                for error in err_check.exception.request_errors().values()])
 
     def test_get_error_reflects_last_retry(self):
         self.check_errors_from_last_retry('get', arvados.errors.KeepReadError)
@@ -649,7 +656,7 @@ class KeepClientRendezvousTestCase(unittest.TestCase, tutil.ApiClientMock):
              self.assertRaises(exc_class) as err_check:
             curl_mock.return_value.side_effect = socket.timeout
             getattr(keep_client, verb)(data)
-        urls = [urlparse.urlparse(url)
+        urls = [urllib.parse.urlparse(url)
                 for url in err_check.exception.request_errors()]
         self.assertEqual([('keep0x' + c, aport) for c in '3eab2d5fc9681074'],
                          [(url.hostname, url.port) for url in urls])
@@ -1118,35 +1125,35 @@ class AvoidOverreplication(unittest.TestCase, tutil.ApiClientMock):
 
     def test_only_write_enough_on_success(self):
         for i in range(10):
-            ks = self.FakeKeepService(delay=i/10.0, will_succeed=True)
+            ks = self.FakeKeepService(delay=old_div(i,10.0), will_succeed=True)
             self.pool.add_task(ks, None)
         self.pool.join()
         self.assertEqual(self.pool.done(), self.copies)
 
     def test_only_write_enough_on_partial_success(self):
         for i in range(5):
-            ks = self.FakeKeepService(delay=i/10.0, will_succeed=False)
+            ks = self.FakeKeepService(delay=old_div(i,10.0), will_succeed=False)
             self.pool.add_task(ks, None)
-            ks = self.FakeKeepService(delay=i/10.0, will_succeed=True)
+            ks = self.FakeKeepService(delay=old_div(i,10.0), will_succeed=True)
             self.pool.add_task(ks, None)
         self.pool.join()
         self.assertEqual(self.pool.done(), self.copies)
 
     def test_only_write_enough_when_some_crash(self):
         for i in range(5):
-            ks = self.FakeKeepService(delay=i/10.0, will_raise=Exception())
+            ks = self.FakeKeepService(delay=old_div(i,10.0), will_raise=Exception())
             self.pool.add_task(ks, None)
-            ks = self.FakeKeepService(delay=i/10.0, will_succeed=True)
+            ks = self.FakeKeepService(delay=old_div(i,10.0), will_succeed=True)
             self.pool.add_task(ks, None)
         self.pool.join()
         self.assertEqual(self.pool.done(), self.copies)
 
     def test_fail_when_too_many_crash(self):
         for i in range(self.copies+1):
-            ks = self.FakeKeepService(delay=i/10.0, will_raise=Exception())
+            ks = self.FakeKeepService(delay=old_div(i,10.0), will_raise=Exception())
             self.pool.add_task(ks, None)
         for i in range(self.copies-1):
-            ks = self.FakeKeepService(delay=i/10.0, will_succeed=True)
+            ks = self.FakeKeepService(delay=old_div(i,10.0), will_succeed=True)
             self.pool.add_task(ks, None)
         self.pool.join()
         self.assertEqual(self.pool.done(), self.copies-1)
index 273992aba7f1dd7d793a255fa98f861a3980e140..2a90316e4cdf8500d2549bf34acb04c657112ed1 100644 (file)
@@ -1,6 +1,10 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 
+from builtins import next
+from builtins import zip
+from builtins import str
+from builtins import range
 import datetime
 import itertools
 import random
@@ -14,7 +18,7 @@ class ArvadosKeepLocatorTest(unittest.TestCase):
     def numstrs(fmtstr, base, exponent):
         def genstrs(self, count=None):
             return (fmtstr.format(random.randint(0, base ** exponent))
-                    for c in xrange(count or self.DEFAULT_TEST_COUNT))
+                    for c in range(count or self.DEFAULT_TEST_COUNT))
         return genstrs
 
     checksums = numstrs('{:032x}', 16, 32)
@@ -24,17 +28,17 @@ class ArvadosKeepLocatorTest(unittest.TestCase):
 
     def base_locators(self, count=DEFAULT_TEST_COUNT):
         return ('+'.join(pair) for pair in
-                itertools.izip(self.checksums(count), self.sizes(count)))
+                zip(self.checksums(count), self.sizes(count)))
 
     def perm_hints(self, count=DEFAULT_TEST_COUNT):
-        for sig, ts in itertools.izip(self.signatures(count),
+        for sig, ts in zip(self.signatures(count),
                                       self.timestamps(count)):
             yield 'A{}@{}'.format(sig, ts)
 
     def test_good_locators_returned(self):
         for hint_gens in [(), (self.sizes(),),
                           (self.sizes(), self.perm_hints())]:
-            for loc_data in itertools.izip(self.checksums(), *hint_gens):
+            for loc_data in zip(self.checksums(), *hint_gens):
                 locator = '+'.join(loc_data)
                 self.assertEqual(locator, str(KeepLocator(locator)))
 
index cc12f39a355ef9b97a85a34ee5989e3bae38a744..82725c2790b0863479c8fdce2ba05f1c48af1b8f 100644 (file)
@@ -1,5 +1,8 @@
 #!/usr/bin/env python
 
+from builtins import zip
+from builtins import range
+from builtins import object
 import itertools
 import unittest
 
@@ -25,7 +28,7 @@ class RetryLoopTestMixin(object):
         responses = itertools.chain(results, itertools.repeat(None))
         retrier = arv_retry.RetryLoop(num_retries, self.loop_success,
                                       **kwargs)
-        for tries_left, response in itertools.izip(retrier, responses):
+        for tries_left, response in zip(retrier, responses):
             retrier.save_result(response)
         return retrier
 
@@ -166,11 +169,11 @@ class CheckHTTPResponseSuccessTestCase(unittest.TestCase):
     check_is_not = check('assertIsNot')
 
     def test_obvious_successes(self):
-        self.check_is(True, *range(200, 207))
+        self.check_is(True, *list(range(200, 207)))
 
     def test_obvious_stops(self):
         self.check_is(False, 424, 426, 428, 431,
-                      *range(400, 408) + range(410, 420))
+                      *list(range(400, 408)) + list(range(410, 420)))
 
     def test_obvious_retries(self):
         self.check_is(None, 500, 502, 503, 504)
@@ -179,13 +182,13 @@ class CheckHTTPResponseSuccessTestCase(unittest.TestCase):
         self.check_is(None, 408, 409, 422, 423)
 
     def test_5xx_failures(self):
-        self.check_is(False, 501, *range(505, 512))
+        self.check_is(False, 501, *list(range(505, 512)))
 
     def test_1xx_not_retried(self):
         self.check_is_not(None, 100, 101)
 
     def test_redirects_not_retried(self):
-        self.check_is_not(None, *range(300, 309))
+        self.check_is_not(None, *list(range(300, 309)))
 
     def test_wacky_code_retries(self):
         self.check_is(None, 0, 99, 600, -200)
index 3ccaa37218f76f26d25f86af1b094164ad643d50..9ad957a3cc71d6cf3e445e27e47882f73056e582 100644 (file)
@@ -1,6 +1,7 @@
 #!/usr/bin/env python
 
 from __future__ import absolute_import
+from builtins import object
 import mock
 import os
 import unittest
index 9e35bcf9c8327266c84cfeb25faa2a38b656dfc9..7277628122f31205dd477b20ad593c25a294df46 100644 (file)
@@ -1,6 +1,7 @@
 #!/usr/bin/env python
 
 from __future__ import absolute_import
+from builtins import object
 import bz2
 import gzip
 import io