Merge branch 'master' into 1646-arv-put
[arvados.git] / sdk / python / arvados.py
index a1b947850d70240c879ec06f19476be20e7e3044..8208e836672461aeadfc31d7788ab2e735c8a2c0 100644 (file)
@@ -1,4 +1,5 @@
 import gflags
+import httplib
 import httplib2
 import logging
 import os
@@ -14,11 +15,30 @@ import string
 import bz2
 import zlib
 import fcntl
+import time
+import threading
 
 from apiclient import errors
 from apiclient.discovery import build
 
-class CredentialsFromEnv:
+if 'ARVADOS_DEBUG' in os.environ:
+    logging.basicConfig(level=logging.DEBUG)
+
+class errors:
+    class SyntaxError(Exception):
+        pass
+    class AssertionError(Exception):
+        pass
+    class NotFoundError(Exception):
+        pass
+    class CommandFailedError(Exception):
+        pass
+    class KeepWriteError(Exception):
+        pass
+    class NotImplementedError(Exception):
+        pass
+
+class CredentialsFromEnv(object):
     @staticmethod
     def http_request(self, uri, **kwargs):
         from httplib import BadStatusLine
@@ -43,18 +63,26 @@ class CredentialsFromEnv:
 url = ('https://%s/discovery/v1/apis/'
        '{api}/{apiVersion}/rest' % os.environ['ARVADOS_API_HOST'])
 credentials = CredentialsFromEnv()
-http = httplib2.Http()
+
+# Use system's CA certificates (if we find them) instead of httplib2's
+ca_certs = '/etc/ssl/certs/ca-certificates.crt'
+if not os.path.exists(ca_certs):
+    ca_certs = None             # use httplib2 default
+
+http = httplib2.Http(ca_certs=ca_certs)
 http = credentials.authorize(http)
-http.disable_ssl_certificate_validation=True
+if re.match(r'(?i)^(true|1|yes)$',
+            os.environ.get('ARVADOS_API_HOST_INSECURE', '')):
+    http.disable_ssl_certificate_validation=True
 service = build("arvados", "v1", http=http, discoveryServiceUrl=url)
 
 def task_set_output(self,s):
     service.job_tasks().update(uuid=self['uuid'],
-                               job_task=json.dumps({
-                'output':s,
-                'success':True,
-                'progress':1.0
-                })).execute()
+                               body={
+            'output':s,
+            'success':True,
+            'progress':1.0
+            }).execute()
 
 _current_task = None
 def current_task():
@@ -79,12 +107,15 @@ def current_job():
     _current_job = t
     return t
 
+def getjobparam(*args):
+    return current_job()['script_parameters'].get(*args)
+
 def api():
     return service
 
-class JobTask:
-    def __init__(self, parameters=dict(), resource_limits=dict()):
-        print "init jobtask %s %s" % (parameters, resource_limits)
+class JobTask(object):
+    def __init__(self, parameters=dict(), runtime_constraints=dict()):
+        print "init jobtask %s %s" % (parameters, runtime_constraints)
 
 class job_setup:
     @staticmethod
@@ -104,10 +135,10 @@ class job_setup:
                         'input':task_input
                         }
                     }
-                service.job_tasks().create(job_task=json.dumps(new_task_attrs)).execute()
+                service.job_tasks().create(body=new_task_attrs).execute()
         if and_end_task:
             service.job_tasks().update(uuid=current_task()['uuid'],
-                                       job_task=json.dumps({'success':True})
+                                       body={'success':True}
                                        ).execute()
             exit(0)
 
@@ -127,28 +158,42 @@ class job_setup:
                     'input':task_input
                     }
                 }
-            service.job_tasks().create(job_task=json.dumps(new_task_attrs)).execute()
+            service.job_tasks().create(body=new_task_attrs).execute()
         if and_end_task:
             service.job_tasks().update(uuid=current_task()['uuid'],
-                                       job_task=json.dumps({'success':True})
+                                       body={'success':True}
                                        ).execute()
             exit(0)
 
 class util:
+    @staticmethod
+    def clear_tmpdir(path=None):
+        """
+        Ensure the given directory (or TASK_TMPDIR if none given)
+        exists and is empty.
+        """
+        if path == None:
+            path = current_task().tmpdir
+        if os.path.exists(path):
+            p = subprocess.Popen(['rm', '-rf', path])
+            stdout, stderr = p.communicate(None)
+            if p.returncode != 0:
+                raise Exception('rm -rf %s: %s' % (path, stderr))
+        os.mkdir(path)
+
     @staticmethod
     def run_command(execargs, **kwargs):
-        if 'stdin' not in kwargs:
-            kwargs['stdin'] = subprocess.PIPE
-        if 'stdout' not in kwargs:
-            kwargs['stdout'] = subprocess.PIPE
-        if 'stderr' not in kwargs:
-            kwargs['stderr'] = subprocess.PIPE
-        p = subprocess.Popen(execargs, close_fds=True, shell=False,
-                             **kwargs)
+        kwargs.setdefault('stdin', subprocess.PIPE)
+        kwargs.setdefault('stdout', subprocess.PIPE)
+        kwargs.setdefault('stderr', sys.stderr)
+        kwargs.setdefault('close_fds', True)
+        kwargs.setdefault('shell', False)
+        p = subprocess.Popen(execargs, **kwargs)
         stdoutdata, stderrdata = p.communicate(None)
         if p.returncode != 0:
-            raise Exception("run_command %s exit %d:\n%s" %
-                            (execargs, p.returncode, stderrdata))
+            raise errors.CommandFailedError(
+                "run_command %s exit %d:\n%s" %
+                (execargs, p.returncode, stderrdata))
         return stdoutdata, stderrdata
 
     @staticmethod
@@ -208,14 +253,14 @@ class util:
 
             for f in CollectionReader(tarball).all_files():
                 if re.search('\.(tbz|tar.bz2)$', f.name()):
-                    p = tar_extractor(path, 'j')
+                    p = util.tar_extractor(path, 'j')
                 elif re.search('\.(tgz|tar.gz)$', f.name()):
-                    p = tar_extractor(path, 'z')
+                    p = util.tar_extractor(path, 'z')
                 elif re.search('\.tar$', f.name()):
-                    p = tar_extractor(path, '')
+                    p = util.tar_extractor(path, '')
                 else:
-                    raise Exception("tarball_extract cannot handle filename %s"
-                                    % f.name())
+                    raise errors.AssertionError(
+                        "tarball_extract cannot handle filename %s" % f.name())
                 while True:
                     buf = f.read(2**20)
                     if len(buf) == 0:
@@ -225,7 +270,8 @@ class util:
                 p.wait()
                 if p.returncode != 0:
                     lockfile.close()
-                    raise Exception("tar exited %d" % p.returncode)
+                    raise errors.CommandFailedError(
+                        "tar exited %d" % p.returncode)
             os.symlink(tarball, os.path.join(path, '.locator'))
         tld_extracts = filter(lambda f: f != '.locator', os.listdir(path))
         lockfile.close()
@@ -269,8 +315,8 @@ class util:
 
             for f in CollectionReader(zipball).all_files():
                 if not re.search('\.zip$', f.name()):
-                    raise Exception("zipball_extract cannot handle filename %s"
-                                    % f.name())
+                    raise errors.NotImplementedError(
+                        "zipball_extract cannot handle filename %s" % f.name())
                 zip_filename = os.path.join(path, os.path.basename(f.name()))
                 zip_file = open(zip_filename, 'wb')
                 while True:
@@ -290,7 +336,8 @@ class util:
                 p.wait()
                 if p.returncode != 0:
                     lockfile.close()
-                    raise Exception("unzip exited %d" % p.returncode)
+                    raise errors.CommandFailedError(
+                        "unzip exited %d" % p.returncode)
                 os.unlink(zip_filename)
             os.symlink(zipball, os.path.join(path, '.locator'))
         tld_extracts = filter(lambda f: f != '.locator', os.listdir(path))
@@ -308,6 +355,11 @@ class util:
         collection -- collection locator
         path -- where to extract: absolute, or relative to job tmp
         """
+        matches = re.search(r'^([0-9a-f]+)(\+[\w@]+)*$', collection)
+        if matches:
+            collection_hash = matches.group(1)
+        else:
+            collection_hash = hashlib.md5(collection).hexdigest()
         if not re.search('^/', path):
             path = os.path.join(current_job().tmpdir, path)
         lockfile = open(path + '.lock', 'w')
@@ -318,7 +370,7 @@ class util:
             os.mkdir(path)
         already_have_it = False
         try:
-            if os.readlink(os.path.join(path, '.locator')) == collection:
+            if os.readlink(os.path.join(path, '.locator')) == collection_hash:
                 already_have_it = True
         except OSError:
             pass
@@ -331,23 +383,29 @@ class util:
                 os.unlink(os.path.join(path, '.locator'))
 
         files_got = []
-        for f in CollectionReader(collection).all_files():
-            if (files == [] or
-                ((f.name() not in files_got) and
-                 (f.name() in files or
-                  (decompress and f.decompressed_name() in files)))):
-                outname = f.decompressed_name() if decompress else f.name()
-                files_got += [outname]
-                if os.path.exists(os.path.join(path, outname)):
-                    continue
-                outfile = open(os.path.join(path, outname), 'wb')
-                for buf in (f.readall_decompressed() if decompress
-                            else f.readall()):
-                    outfile.write(buf)
-                outfile.close()
+        for s in CollectionReader(collection).all_streams():
+            stream_name = s.name()
+            for f in s.all_files():
+                if (files == [] or
+                    ((f.name() not in files_got) and
+                     (f.name() in files or
+                      (decompress and f.decompressed_name() in files)))):
+                    outname = f.decompressed_name() if decompress else f.name()
+                    files_got += [outname]
+                    if os.path.exists(os.path.join(path, stream_name, outname)):
+                        continue
+                    util.mkdir_dash_p(os.path.dirname(os.path.join(path, stream_name, outname)))
+                    outfile = open(os.path.join(path, stream_name, outname), 'wb')
+                    for buf in (f.readall_decompressed() if decompress
+                                else f.readall()):
+                        outfile.write(buf)
+                    outfile.close()
         if len(files_got) < len(files):
-            raise Exception("Wanted files %s but only got %s from %s" % (files, files_got, map(lambda z: z.name(), list(CollectionReader(collection).all_files()))))
-        os.symlink(collection, os.path.join(path, '.locator'))
+            raise errors.AssertionError(
+                "Wanted files %s but only got %s from %s" %
+                (files, files_got,
+                 [z.name() for z in CollectionReader(collection).all_files()]))
+        os.symlink(collection_hash, os.path.join(path, '.locator'))
 
         lockfile.close()
         return path
@@ -355,7 +413,7 @@ class util:
     @staticmethod
     def mkdir_dash_p(path):
         if not os.path.exists(path):
-            mkdir_dash_p(os.dirname(path))
+            util.mkdir_dash_p(os.path.dirname(path))
             try:
                 os.mkdir(path)
             except OSError:
@@ -389,44 +447,33 @@ class util:
                 outname = f.decompressed_name() if decompress else f.name()
                 files_got += [outname]
                 if os.path.exists(os.path.join(path, outname)):
-                    continue
-                mkdir_dash_p(os.path.dirname(os.path.join(path, outname)))
+                    os.unlink(os.path.join(path, outname))
+                util.mkdir_dash_p(os.path.dirname(os.path.join(path, outname)))
                 outfile = open(os.path.join(path, outname), 'wb')
                 for buf in (f.readall_decompressed() if decompress
                             else f.readall()):
                     outfile.write(buf)
                 outfile.close()
         if len(files_got) < len(files):
-            raise Exception("Wanted files %s but only got %s from %s" %
-                            (files, files_got, map(lambda z: z.name(),
-                                                   list(stream.all_files()))))
+            raise errors.AssertionError(
+                "Wanted files %s but only got %s from %s" %
+                (files, files_got, [z.name() for z in stream.all_files()]))
         lockfile.close()
         return path
 
-class DataReader:
-    def __init__(self, data_locator):
-        self.data_locator = data_locator
-        self.p = subprocess.Popen(["whget", "-r", self.data_locator, "-"],
-                                  stdout=subprocess.PIPE,
-                                  stdin=None, stderr=subprocess.PIPE,
-                                  shell=False, close_fds=True)
-    def __enter__(self):
-        pass
-    def __exit__(self):
-        self.close()
-    def read(self, size, **kwargs):
-        return self.p.stdout.read(size, **kwargs)
-    def close(self):
-        self.p.stdout.close()
-        if not self.p.stderr.closed:
-            for err in self.p.stderr:
-                print >> sys.stderr, err
-            self.p.stderr.close()
-        self.p.wait()
-        if self.p.returncode != 0:
-            raise Exception("whget subprocess exited %d" % self.p.returncode)
-
-class StreamFileReader:
+    @staticmethod
+    def listdir_recursive(dirname, base=None):
+        allfiles = []
+        for ent in sorted(os.listdir(dirname)):
+            ent_path = os.path.join(dirname, ent)
+            ent_base = os.path.join(base, ent) if base else ent
+            if os.path.isdir(ent_path):
+                allfiles += util.listdir_recursive(ent_path, ent_base)
+            else:
+                allfiles += [ent_base]
+        return allfiles
+
+class StreamFileReader(object):
     def __init__(self, stream, pos, size, name):
         self._stream = stream
         self._pos = pos
@@ -498,7 +545,7 @@ class StreamFileReader:
         return string.join(self._stream.tokens_for_range(self._pos, self._size),
                            " ") + "\n"
 
-class StreamReader:
+class StreamReader(object):
     def __init__(self, tokens):
         self._tokens = tokens
         self._current_datablock_data = None
@@ -519,7 +566,7 @@ class StreamReader:
                 pos, size, name = tok.split(':',2)
                 self.files += [[int(pos), int(size), name]]
             else:
-                raise Exception("Invalid manifest format")
+                raise errors.SyntaxError("Invalid manifest format")
 
     def tokens(self):
         return self._tokens
@@ -610,7 +657,7 @@ class StreamReader:
         self._pos += len(data)
         return data
 
-class CollectionReader:
+class CollectionReader(object):
     def __init__(self, manifest_locator_or_text):
         if re.search(r'^\S+( [a-f0-9]{32,}(\+\S+)*)+( \d+:\d+:\S+)+\n', manifest_locator_or_text):
             self._manifest_text = manifest_locator_or_text
@@ -630,8 +677,9 @@ class CollectionReader:
             self._manifest_text = Keep.get(self._manifest_locator)
         self._streams = []
         for stream_line in self._manifest_text.split("\n"):
-            stream_tokens = stream_line.split()
-            self._streams += [stream_tokens]
+            if stream_line != '':
+                stream_tokens = stream_line.split()
+                self._streams += [stream_tokens]
     def all_streams(self):
         self._populate()
         resp = []
@@ -646,7 +694,7 @@ class CollectionReader:
         self._populate()
         return self._manifest_text
 
-class CollectionWriter:
+class CollectionWriter(object):
     KEEP_BLOCK_SIZE = 2**26
     def __init__(self):
         self._data_buffer = []
@@ -662,7 +710,36 @@ class CollectionWriter:
         pass
     def __exit__(self):
         self.finish()
+    def write_directory_tree(self,
+                             path, stream_name='.', max_manifest_depth=-1):
+        self.start_new_stream(stream_name)
+        todo = []
+        if max_manifest_depth == 0:
+            dirents = sorted(util.listdir_recursive(path))
+        else:
+            dirents = sorted(os.listdir(path))
+        for dirent in dirents:
+            target = os.path.join(path, dirent)
+            if os.path.isdir(target):
+                todo += [[target,
+                          os.path.join(stream_name, dirent),
+                          max_manifest_depth-1]]
+            else:
+                self.start_new_file(dirent)
+                with open(target, 'rb') as f:
+                    while True:
+                        buf = f.read(2**26)
+                        if len(buf) == 0:
+                            break
+                        self.write(buf)
+        self.finish_current_stream()
+        map(lambda x: self.write_directory_tree(*x), todo)
+
     def write(self, newdata):
+        if hasattr(newdata, '__iter__'):
+            for s in newdata:
+                self.write(s)
+            return
         self._data_buffer += [newdata]
         self._data_buffer_len += len(newdata)
         self._current_stream_length += len(newdata)
@@ -678,8 +755,11 @@ class CollectionWriter:
         self.finish_current_file()
         self.set_current_file_name(newfilename)
     def set_current_file_name(self, newfilename):
+        newfilename = re.sub(r' ', '\\\\040', newfilename)
         if re.search(r'[ \t\n]', newfilename):
-            raise AssertionError("Manifest filenames cannot contain whitespace")
+            raise errors.AssertionError(
+                "Manifest filenames cannot contain whitespace: %s" %
+                newfilename)
         self._current_file_name = newfilename
     def current_file_name(self):
         return self._current_file_name
@@ -687,7 +767,12 @@ class CollectionWriter:
         if self._current_file_name == None:
             if self._current_file_pos == self._current_stream_length:
                 return
-            raise Exception("Cannot finish an unnamed file (%d bytes at offset %d in '%s' stream)" % (self._current_stream_length - self._current_file_pos, self._current_file_pos, self._current_stream_name))
+            raise errors.AssertionError(
+                "Cannot finish an unnamed file " +
+                "(%d bytes at offset %d in '%s' stream)" %
+                (self._current_stream_length - self._current_file_pos,
+                 self._current_file_pos,
+                 self._current_stream_name))
         self._current_stream_files += [[self._current_file_pos,
                                        self._current_stream_length - self._current_file_pos,
                                        self._current_file_name]]
@@ -697,8 +782,9 @@ class CollectionWriter:
         self.set_current_stream_name(newstreamname)
     def set_current_stream_name(self, newstreamname):
         if re.search(r'[ \t\n]', newstreamname):
-            raise AssertionError("Manifest stream names cannot contain whitespace")
-        self._current_stream_name = newstreamname
+            raise errors.AssertionError(
+                "Manifest stream names cannot contain whitespace")
+        self._current_stream_name = '.' if newstreamname=='' else newstreamname
     def current_stream_name(self):
         return self._current_stream_name
     def finish_current_stream(self):
@@ -707,7 +793,9 @@ class CollectionWriter:
         if len(self._current_stream_files) == 0:
             pass
         elif self._current_stream_name == None:
-            raise Exception("Cannot finish an unnamed stream (%d bytes in %d files)" % (self._current_stream_length, len(self._current_stream_files)))
+            raise errors.AssertionError(
+                "Cannot finish an unnamed stream (%d bytes in %d files)" %
+                (self._current_stream_length, len(self._current_stream_files)))
         else:
             self._finished_streams += [[self._current_stream_name,
                                        self._current_stream_locators,
@@ -736,41 +824,209 @@ class CollectionWriter:
                 manifest += " %d:%d:%s" % (sfile[0], sfile[1], sfile[2])
             manifest += "\n"
         return manifest
+    def data_locators(self):
+        ret = []
+        for name, locators, files in self._finished_streams:
+            ret += locators
+        return ret
+
+global_client_object = None
 
 class Keep:
     @staticmethod
-    def put(data):
-        if 'KEEP_LOCAL_STORE' in os.environ:
-            return Keep.local_store_put(data)
-        p = subprocess.Popen(["whput", "-"],
-                             stdout=subprocess.PIPE,
-                             stdin=subprocess.PIPE,
-                             stderr=subprocess.PIPE,
-                             shell=False, close_fds=True)
-        stdoutdata, stderrdata = p.communicate(data)
-        if p.returncode != 0:
-            raise Exception("whput subprocess exited %d - stderr:\n%s" % (p.returncode, stderrdata))
-        return stdoutdata.rstrip()
+    def global_client_object():
+        global global_client_object
+        if global_client_object == None:
+            global_client_object = KeepClient()
+        return global_client_object
+
     @staticmethod
-    def get(locator):
+    def get(locator, **kwargs):
+        return Keep.global_client_object().get(locator, **kwargs)
+
+    @staticmethod
+    def put(data, **kwargs):
+        return Keep.global_client_object().put(data, **kwargs)
+
+class KeepClient(object):
+
+    class ThreadLimiter(object):
+        """
+        Limit the number of threads running at a given time to
+        {desired successes} minus {successes reported}. When successes
+        reported == desired, wake up the remaining threads and tell
+        them to quit.
+
+        Should be used in a "with" block.
+        """
+        def __init__(self, todo):
+            self._todo = todo
+            self._done = 0
+            self._todo_lock = threading.Semaphore(todo)
+            self._done_lock = threading.Lock()
+        def __enter__(self):
+            self._todo_lock.acquire()
+            return self
+        def __exit__(self, type, value, traceback):
+            self._todo_lock.release()
+        def shall_i_proceed(self):
+            """
+            Return true if the current thread should do stuff. Return
+            false if the current thread should just stop.
+            """
+            with self._done_lock:
+                return (self._done < self._todo)
+        def increment_done(self):
+            """
+            Report that the current thread was successful.
+            """
+            with self._done_lock:
+                self._done += 1
+        def done(self):
+            """
+            Return how many successes were reported.
+            """
+            with self._done_lock:
+                return self._done
+
+    class KeepWriterThread(threading.Thread):
+        """
+        Write a blob of data to the given Keep server. Call
+        increment_done() of the given ThreadLimiter if the write
+        succeeds.
+        """
+        def __init__(self, **kwargs):
+            super(KeepClient.KeepWriterThread, self).__init__()
+            self.args = kwargs
+        def run(self):
+            with self.args['thread_limiter'] as limiter:
+                if not limiter.shall_i_proceed():
+                    # My turn arrived, but the job has been done without
+                    # me.
+                    return
+                logging.debug("KeepWriterThread %s proceeding %s %s" %
+                              (str(threading.current_thread()),
+                               self.args['data_hash'],
+                               self.args['service_root']))
+                h = httplib2.Http()
+                url = self.args['service_root'] + self.args['data_hash']
+                api_token = os.environ['ARVADOS_API_TOKEN']
+                headers = {'Authorization': "OAuth2 %s" % api_token}
+                try:
+                    resp, content = h.request(url.encode('utf-8'), 'PUT',
+                                              headers=headers,
+                                              body=self.args['data'])
+                    if (resp['status'] == '401' and
+                        re.match(r'Timestamp verification failed', content)):
+                        body = KeepClient.sign_for_old_server(
+                            self.args['data_hash'],
+                            self.args['data'])
+                        h = httplib2.Http()
+                        resp, content = h.request(url.encode('utf-8'), 'PUT',
+                                                  headers=headers,
+                                                  body=body)
+                    if re.match(r'^2\d\d$', resp['status']):
+                        logging.debug("KeepWriterThread %s succeeded %s %s" %
+                                      (str(threading.current_thread()),
+                                       self.args['data_hash'],
+                                       self.args['service_root']))
+                        return limiter.increment_done()
+                    logging.warning("Request fail: PUT %s => %s %s" %
+                                    (url, resp['status'], content))
+                except (httplib2.HttpLib2Error, httplib.HTTPException) as e:
+                    logging.warning("Request fail: PUT %s => %s: %s" %
+                                    (url, type(e), str(e)))
+
+    def __init__(self):
+        self.lock = threading.Lock()
+        self.service_roots = None
+
+    def shuffled_service_roots(self, hash):
+        if self.service_roots == None:
+            self.lock.acquire()
+            keep_disks = api().keep_disks().list().execute()['items']
+            roots = (("http%s://%s:%d/" %
+                      ('s' if f['service_ssl_flag'] else '',
+                       f['service_host'],
+                       f['service_port']))
+                     for f in keep_disks)
+            self.service_roots = sorted(set(roots))
+            logging.debug(str(self.service_roots))
+            self.lock.release()
+        seed = hash
+        pool = self.service_roots[:]
+        pseq = []
+        while len(pool) > 0:
+            if len(seed) < 8:
+                if len(pseq) < len(hash) / 4: # first time around
+                    seed = hash[-4:] + hash
+                else:
+                    seed += hash
+            probe = int(seed[0:8], 16) % len(pool)
+            pseq += [pool[probe]]
+            pool = pool[:probe] + pool[probe+1:]
+            seed = seed[8:]
+        logging.debug(str(pseq))
+        return pseq
+
+    def get(self, locator):
         if 'KEEP_LOCAL_STORE' in os.environ:
-            return Keep.local_store_get(locator)
-        p = subprocess.Popen(["whget", locator, "-"],
-                             stdout=subprocess.PIPE,
-                             stdin=None,
-                             stderr=subprocess.PIPE,
-                             shell=False, close_fds=True)
-        stdoutdata, stderrdata = p.communicate(None)
-        if p.returncode != 0:
-            raise Exception("whget subprocess exited %d - stderr:\n%s" % (p.returncode, stderrdata))
+            return KeepClient.local_store_get(locator)
+        expect_hash = re.sub(r'\+.*', '', locator)
+        for service_root in self.shuffled_service_roots(expect_hash):
+            h = httplib2.Http()
+            url = service_root + expect_hash
+            api_token = os.environ['ARVADOS_API_TOKEN']
+            headers = {'Authorization': "OAuth2 %s" % api_token,
+                       'Accept': 'application/octet-stream'}
+            try:
+                resp, content = h.request(url.encode('utf-8'), 'GET',
+                                          headers=headers)
+                if re.match(r'^2\d\d$', resp['status']):
+                    m = hashlib.new('md5')
+                    m.update(content)
+                    md5 = m.hexdigest()
+                    if md5 == expect_hash:
+                        return content
+                    logging.warning("Checksum fail: md5(%s) = %s" % (url, md5))
+            except (httplib2.HttpLib2Error, httplib.ResponseNotReady) as e:
+                logging.info("Request fail: GET %s => %s: %s" %
+                             (url, type(e), str(e)))
+        raise errors.NotFoundError("Block not found: %s" % expect_hash)
+
+    def put(self, data, **kwargs):
+        if 'KEEP_LOCAL_STORE' in os.environ:
+            return KeepClient.local_store_put(data)
         m = hashlib.new('md5')
-        m.update(stdoutdata)
-        try:
-            if locator.index(m.hexdigest()) == 0:
-                return stdoutdata
-        except ValueError:
-            pass
-        raise Exception("md5 checksum mismatch: md5(get(%s)) == %s" % (locator, m.hexdigest()))
+        m.update(data)
+        data_hash = m.hexdigest()
+        have_copies = 0
+        want_copies = kwargs.get('copies', 2)
+        if not (want_copies > 0):
+            return data_hash
+        threads = []
+        thread_limiter = KeepClient.ThreadLimiter(want_copies)
+        for service_root in self.shuffled_service_roots(data_hash):
+            t = KeepClient.KeepWriterThread(data=data,
+                                            data_hash=data_hash,
+                                            service_root=service_root,
+                                            thread_limiter=thread_limiter)
+            t.start()
+            threads += [t]
+        for t in threads:
+            t.join()
+        have_copies = thread_limiter.done()
+        if have_copies == want_copies:
+            return (data_hash + '+' + str(len(data)))
+        raise errors.KeepWriteError(
+            "Write fail for %s: wanted %d but wrote %d" %
+            (data_hash, want_copies, have_copies))
+
+    @staticmethod
+    def sign_for_old_server(data_hash, data):
+        return (("-----BEGIN PGP SIGNED MESSAGE-----\n\n\n%d %s\n-----BEGIN PGP SIGNATURE-----\n\n-----END PGP SIGNATURE-----\n" % (int(time.time()), data_hash)) + data)
+
+
     @staticmethod
     def local_store_put(data):
         m = hashlib.new('md5')
@@ -786,7 +1042,8 @@ class Keep:
     def local_store_get(locator):
         r = re.search('^([0-9a-f]{32,})', locator)
         if not r:
-            raise Exception("Keep.get: invalid data locator '%s'" % locator)
+            raise errors.NotFoundError(
+                "Invalid data locator: '%s'" % locator)
         if r.group(0) == 'd41d8cd98f00b204e9800998ecf8427e':
             return ''
         with open(os.path.join(os.environ['KEEP_LOCAL_STORE'], r.group(0)), 'r') as f: