+from __future__ import print_function
+from __future__ import absolute_import
import gflags
import httplib
import httplib2
import threading
from .api import api, http_cache
-from collection import CollectionReader, CollectionWriter, ResumableCollectionWriter
-from keep import *
-from stream import *
-from arvfile import StreamFileReader
-from retry import RetryLoop
-import errors
-import util
+from .collection import CollectionReader, CollectionWriter, ResumableCollectionWriter
+from .keep import *
+from .stream import *
+from .arvfile import StreamFileReader
+from .retry import RetryLoop
+from . import errors
+from . import util
# Set up Arvados logging based on the user's configuration.
# All Arvados code should log under the arvados hierarchy.
class JobTask(object):
def __init__(self, parameters=dict(), runtime_constraints=dict()):
- print "init jobtask %s %s" % (parameters, runtime_constraints)
+ print("init jobtask %s %s" % (parameters, runtime_constraints))
class job_setup:
@staticmethod
-import config
+from __future__ import absolute_import
+from . import config
def normalize_stream(stream_name, stream):
"""Take manifest stream and return a list of tokens in normalized format.
sortedfiles.sort()
blocks = {}
- streamoffset = 0L
+ streamoffset = 0
# Go through each file and add each referenced block exactly once.
for streamfile in sortedfiles:
for segment in stream[streamfile]:
self.segment_offset == other.segment_offset)
def first_block(data_locators, range_start):
- block_start = 0L
+ block_start = 0
# range_start/block_start is the inclusive lower bound
# range_end/block_end is the exclusive upper bound
+from __future__ import absolute_import
import collections
import httplib
import httplib2
import apiclient
from apiclient import discovery as apiclient_discovery
from apiclient import errors as apiclient_errors
-import config
-import errors
-import util
-import cache
+from . import config
+from . import errors
+from . import util
+from . import cache
_logger = logging.getLogger('arvados.api')
+from __future__ import absolute_import
import functools
import os
import zlib
import bz2
-import config
+from . import config
import hashlib
import threading
import Queue
class ArvadosFileReaderBase(_FileLikeObjectBase):
def __init__(self, name, mode, num_retries=None):
super(ArvadosFileReaderBase, self).__init__(name, mode)
- self._filepos = 0L
+ self._filepos = 0
self.num_retries = num_retries
self._readline_cache = (None, None)
pos += self._filepos
elif whence == os.SEEK_END:
pos += self.size()
- self._filepos = min(max(pos, 0L), self.size())
+ self._filepos = min(max(pos, 0), self.size())
def tell(self):
return self._filepos
try:
with open(filename, 'rb') as f:
return f.read()
- except IOError, OSError:
+ except IOError as OSError:
return None
def set(self, url, content):
+from __future__ import absolute_import
import functools
import logging
import os
from stat import *
from .arvfile import split, _FileLikeObjectBase, ArvadosFile, ArvadosFileWriter, ArvadosFileReader, _BlockManager, synchronized, must_be_writable, NoopLock
-from keep import KeepLocator, KeepClient
+from .keep import KeepLocator, KeepClient
from .stream import StreamReader
from ._normalize_stream import normalize_stream
from ._ranges import Range, LocatorAndRange
from .safeapi import ThreadSafeApiCache
-import config
-import errors
-import util
-import events
+from . import config
+from . import errors
+from . import util
+from . import events
from arvados.retry import retry_method
_logger = logging.getLogger('arvados.collection')
stream_name = tok.replace('\\040', ' ')
blocks = []
segments = []
- streamoffset = 0L
+ streamoffset = 0
state = BLOCKS
self.find_or_create(stream_name, COLLECTION)
continue
+from __future__ import print_function
import argparse
import time
import sys
if args.print_unmigrated:
only_migrate = set()
for pdh in need_migrate:
- print pdh
+ print(pdh)
return
logger.info("Already migrated %i images", len(already_migrated))
#!/usr/bin/env python
+from __future__ import print_function
+from __future__ import absolute_import
import arvados
import arvados.commands.ws as ws
import argparse
import re
import os
import stat
-import put
+from . import put
import time
import subprocess
import logging
#!/usr/bin/env python
+from __future__ import print_function
import sys
import logging
import argparse
elif 'status' in ev and ev['status'] == 200:
pass
else:
- print json.dumps(ev)
+ print(json.dumps(ev))
try:
ws = subscribe(arvados.api('v1'), filters, on_message, poll_fallback=args.poll_interval, last_log_id=last_log_id)
+from __future__ import absolute_import
import arvados
-import config
-import errors
-from retry import RetryLoop
+from . import config
+from . import errors
+from .retry import RetryLoop
import logging
import json
+from __future__ import absolute_import
import cStringIO
import datetime
import hashlib
import ssl
import sys
import threading
-import timer
+from . import timer
import urlparse
import arvados
+from __future__ import absolute_import
import threading
-import api
-import keep
-import config
+from . import api
+from . import keep
+from . import config
import copy
class ThreadSafeApiCache(object):
+from __future__ import print_function
+from __future__ import absolute_import
import collections
import hashlib
import os
from ._ranges import locators_and_ranges, Range
from .arvfile import StreamFileReader
from arvados.retry import retry_method
-from keep import *
-import config
-import errors
-from _normalize_stream import normalize_stream
+from .keep import *
+from . import config
+from . import errors
+from ._normalize_stream import normalize_stream
class StreamReader(object):
def __init__(self, tokens, keep=None, debug=False, _empty=False,
self._keep = keep
self.num_retries = num_retries
- streamoffset = 0L
+ streamoffset = 0
# parse stream
for tok in tokens:
- if debug: print 'tok', tok
+ if debug: print('tok', tok)
if self._stream_name is None:
self._stream_name = tok.replace('\\040', ' ')
continue
+from __future__ import print_function
import time
class Timer(object):
self.secs = self.end - self.start
self.msecs = self.secs * 1000 # millisecs
if self.verbose:
- print 'elapsed time: %f ms' % self.msecs
+ print('elapsed time: %f ms' % self.msecs)
def list_all(fn, num_retries=0, **kwargs):
# Default limit to (effectively) api server's MAX_LIMIT
- kwargs.setdefault('limit', sys.maxint)
+ kwargs.setdefault('limit', sys.maxsize)
items = []
offset = 0
- items_available = sys.maxint
+ items_available = sys.maxsize
while len(items) < items_available:
c = fn(offset=offset, **kwargs).execute(num_retries=num_retries)
items += c['items']
+from __future__ import absolute_import
import arvados
-import arvados_testutil as tutil
+from . import arvados_testutil as tutil
class ManifestExamples(object):
def make_manifest(self,
+from __future__ import print_function
+from __future__ import absolute_import
import unittest
-from performance_profiler import profiled
+from .performance_profiler import profiled
class PerformanceTestSample(unittest.TestCase):
def foo(self):
for i in range(0,2**20):
j += i
self.foo()
- print 'Hello'
+ print('Hello')
except OSError as error:
if error.errno != errno.ENOENT:
raise
- os.mkfifo(fifo, 0700)
+ os.mkfifo(fifo, 0o700)
subprocess.Popen(
['stdbuf', '-i0', '-oL', '-eL', 'sed', '-e', 's/^/['+label+'] /', fifo],
stdout=sys.stderr)
#!/usr/bin/env python
+from __future__ import absolute_import
import arvados
import collections
import httplib2
import unittest
import mock
-import run_test_server
+from . import run_test_server
from apiclient import errors as apiclient_errors
from apiclient import http as apiclient_http
from arvados.api import OrderedJsonModel, RETRY_DELAY_INITIAL, RETRY_DELAY_BACKOFF, RETRY_COUNT
-from arvados_testutil import fake_httplib2_response, queue_with
+from .arvados_testutil import fake_httplib2_response, queue_with
if not mimetypes.inited:
mimetypes.init()
#!/usr/bin/env python
# -*- coding: utf-8 -*-
+from __future__ import absolute_import
import io
import os
import sys
import unittest
import arvados.commands.arv_copy as arv_copy
-import arvados_testutil as tutil
+from . import arvados_testutil as tutil
class ArvCopyTestCase(unittest.TestCase):
def run_copy(self, args):
#!/usr/bin/env python
# -*- coding: utf-8 -*-
+from __future__ import absolute_import
import arvados
import hashlib
import io
import logging
import arvados.commands.keepdocker as arv_keepdocker
-import arvados_testutil as tutil
-import run_test_server
+from . import arvados_testutil as tutil
+from . import run_test_server
class StopTest(Exception):
#!/usr/bin/env python
# -*- coding: utf-8 -*-
+from __future__ import absolute_import
import io
import os
import random
import arvados.errors as arv_error
import arvados.commands.ls as arv_ls
-import run_test_server
+from . import run_test_server
-from arvados_testutil import str_keep_locator, redirected_streams
+from .arvados_testutil import str_keep_locator, redirected_streams
class ArvLsTestCase(run_test_server.TestCaseWithServers):
FAKE_UUID = 'zzzzz-4zz18-12345abcde12345'
#!/usr/bin/env python
# -*- coding: utf-8 -*-
+from __future__ import absolute_import
import apiclient
import io
import mock
import arvados
import arvados.commands.put as arv_put
-import arvados_testutil as tutil
+from . import arvados_testutil as tutil
-from arvados_testutil import ArvadosBaseTestCase, fake_httplib2_response
-import run_test_server
+from .arvados_testutil import ArvadosBaseTestCase, fake_httplib2_response
+from . import run_test_server
class ArvadosPutResumeCacheTest(ArvadosBaseTestCase):
CACHE_ARGSET = [
#!/usr/bin/env python
# -*- coding: utf-8 -*-
+from __future__ import absolute_import
import io
import os
import sys
import unittest
import arvados.commands.run as arv_run
-import arvados_testutil as tutil
+from . import arvados_testutil as tutil
class ArvRunTestCase(unittest.TestCase):
def run_arv_run(self, args):
#!/usr/bin/env python
+from __future__ import absolute_import
import io
import os
import sys
import arvados.errors as arv_error
import arvados.commands.ws as arv_ws
-import arvados_testutil as tutil
+from . import arvados_testutil as tutil
class ArvWsTestCase(unittest.TestCase):
def run_ws(self, args):
#!/usr/bin/env python
+from __future__ import absolute_import
import bz2
import datetime
import gzip
from arvados.collection import Collection, CollectionReader
from arvados.arvfile import ArvadosFile, ArvadosFileReader
-import arvados_testutil as tutil
-from test_stream import StreamFileReaderTestCase, StreamRetryTestMixin
+from . import arvados_testutil as tutil
+from .test_stream import StreamFileReaderTestCase, StreamRetryTestMixin
class ArvadosFileWriterTestCase(unittest.TestCase):
class MockKeep(object):
+from __future__ import absolute_import
import arvados
import sys
-import run_test_server
-import arvados_testutil as tutil
-import manifest_examples
-from performance.performance_profiler import profiled
+from . import run_test_server
+from . import arvados_testutil as tutil
+from . import manifest_examples
+from .performance.performance_profiler import profiled
class CollectionBenchmark(run_test_server.TestCaseWithServers,
tutil.ArvadosBaseTestCase,
from __future__ import print_function
+from __future__ import absolute_import
import md5
import mock
import threading
import unittest
-import arvados.cache
import arvados
-import run_test_server
+import arvados.cache
+from . import run_test_server
def _random(n):
+from __future__ import absolute_import
# usage example:
#
# ARVADOS_API_TOKEN=abc ARVADOS_API_HOST=arvados.local python -m unittest discover
import tempfile
import unittest
-import run_test_server
+from . import run_test_server
from arvados._ranges import Range, LocatorAndRange
from arvados.collection import Collection, CollectionReader
-import arvados_testutil as tutil
+from . import arvados_testutil as tutil
class TestResumableWriter(arvados.ResumableCollectionWriter):
KEEP_BLOCK_SIZE = 1024 # PUT to Keep every 1K.
#!/usr/bin/env python
+from __future__ import absolute_import
import traceback
import unittest
import arvados.errors as arv_error
-import arvados_testutil as tutil
+from . import arvados_testutil as tutil
class KeepRequestErrorTestCase(unittest.TestCase):
REQUEST_ERRORS = [
+from __future__ import print_function
+from __future__ import absolute_import
import arvados
import io
import logging
import mock
import Queue
-import run_test_server
+from . import run_test_server
import threading
import time
import unittest
-import arvados_testutil
+from . import arvados_testutil
class WebsocketTest(run_test_server.TestCaseWithServers):
MAIN_SERVER = {}
if self.ws:
self.ws.close()
except Exception as e:
- print("Error in teardown: ", e)
+ print(("Error in teardown: ", e))
super(WebsocketTest, self).tearDown()
run_test_server.reset()
+from __future__ import absolute_import
import hashlib
import mock
import os
import arvados
import arvados.retry
-import arvados_testutil as tutil
-import keepstub
-import run_test_server
+from . import arvados_testutil as tutil
+from . import keepstub
+from . import run_test_server
class KeepTestCase(run_test_server.TestCaseWithServers):
MAIN_SERVER = {}
+from __future__ import absolute_import
# usage example:
#
# ARVADOS_API_TOKEN=abc ARVADOS_API_HOST=arvados.local python -m unittest discover
import unittest
import arvados
import apiclient
-import run_test_server
+from . import run_test_server
class PipelineTemplateTest(run_test_server.TestCaseWithServers):
MAIN_SERVER = {}
#!/usr/bin/env python
+from __future__ import absolute_import
import mock
import os
import unittest
import hashlib
-import run_test_server
+from . import run_test_server
import json
import arvados
-import arvados_testutil as tutil
+from . import arvados_testutil as tutil
from apiclient import http as apiclient_http
#!/usr/bin/env python
+from __future__ import absolute_import
import bz2
import gzip
import io
from arvados import StreamReader, StreamFileReader
from arvados._ranges import Range
-import arvados_testutil as tutil
-import run_test_server
+from . import arvados_testutil as tutil
+from . import run_test_server
class StreamFileReaderTestCase(unittest.TestCase):
def make_count_reader(self):