Add arvados.getjobparam() convenience function.
[arvados.git] / sdk / python / arvados.py
index ebe6bf3121aa07d11292c71d34d9b3ced4074fc4..f4fe0707865c22e37cd6e4eab317d31723d13372 100644 (file)
@@ -24,7 +24,7 @@ from apiclient.discovery import build
 if 'ARVADOS_DEBUG' in os.environ:
     logging.basicConfig(level=logging.DEBUG)
 
-class CredentialsFromEnv:
+class CredentialsFromEnv(object):
     @staticmethod
     def http_request(self, uri, **kwargs):
         from httplib import BadStatusLine
@@ -93,10 +93,13 @@ def current_job():
     _current_job = t
     return t
 
+def getjobparam(*args):
+    return current_job()['script_parameters'].get(*args)
+
 def api():
     return service
 
-class JobTask:
+class JobTask(object):
     def __init__(self, parameters=dict(), runtime_constraints=dict()):
         print "init jobtask %s %s" % (parameters, runtime_constraints)
 
@@ -149,16 +152,29 @@ class job_setup:
             exit(0)
 
 class util:
+    @staticmethod
+    def clear_tmpdir(path=None):
+        """
+        Ensure the given directory (or TASK_TMPDIR if none given)
+        exists and is empty.
+        """
+        if path == None:
+            path = current_task().tmpdir
+        if os.path.exists(path):
+            p = subprocess.Popen(['rm', '-rf', path])
+            stdout, stderr = p.communicate(None)
+            if p.returncode != 0:
+                raise Exception('rm -rf %s: %s' % (path, stderr))
+        os.mkdir(path)
+
     @staticmethod
     def run_command(execargs, **kwargs):
-        if 'stdin' not in kwargs:
-            kwargs['stdin'] = subprocess.PIPE
-        if 'stdout' not in kwargs:
-            kwargs['stdout'] = subprocess.PIPE
-        if 'stderr' not in kwargs:
-            kwargs['stderr'] = subprocess.PIPE
-        p = subprocess.Popen(execargs, close_fds=True, shell=False,
-                             **kwargs)
+        kwargs.setdefault('stdin', subprocess.PIPE)
+        kwargs.setdefault('stdout', subprocess.PIPE)
+        kwargs.setdefault('stderr', sys.stderr)
+        kwargs.setdefault('close_fds', True)
+        kwargs.setdefault('shell', False)
+        p = subprocess.Popen(execargs, **kwargs)
         stdoutdata, stderrdata = p.communicate(None)
         if p.returncode != 0:
             raise Exception("run_command %s exit %d:\n%s" %
@@ -322,6 +338,11 @@ class util:
         collection -- collection locator
         path -- where to extract: absolute, or relative to job tmp
         """
+        matches = re.search(r'^([0-9a-f]+)(\+[\w@]+)*$', collection)
+        if matches:
+            collection_hash = matches.group(1)
+        else:
+            collection_hash = hashlib.md5(collection).hexdigest()
         if not re.search('^/', path):
             path = os.path.join(current_job().tmpdir, path)
         lockfile = open(path + '.lock', 'w')
@@ -332,7 +353,7 @@ class util:
             os.mkdir(path)
         already_have_it = False
         try:
-            if os.readlink(os.path.join(path, '.locator')) == collection:
+            if os.readlink(os.path.join(path, '.locator')) == collection_hash:
                 already_have_it = True
         except OSError:
             pass
@@ -364,7 +385,7 @@ class util:
                     outfile.close()
         if len(files_got) < len(files):
             raise Exception("Wanted files %s but only got %s from %s" % (files, files_got, map(lambda z: z.name(), list(CollectionReader(collection).all_files()))))
-        os.symlink(collection, os.path.join(path, '.locator'))
+        os.symlink(collection_hash, os.path.join(path, '.locator'))
 
         lockfile.close()
         return path
@@ -432,7 +453,7 @@ class util:
                 allfiles += [ent_base]
         return allfiles
 
-class StreamFileReader:
+class StreamFileReader(object):
     def __init__(self, stream, pos, size, name):
         self._stream = stream
         self._pos = pos
@@ -504,7 +525,7 @@ class StreamFileReader:
         return string.join(self._stream.tokens_for_range(self._pos, self._size),
                            " ") + "\n"
 
-class StreamReader:
+class StreamReader(object):
     def __init__(self, tokens):
         self._tokens = tokens
         self._current_datablock_data = None
@@ -616,7 +637,7 @@ class StreamReader:
         self._pos += len(data)
         return data
 
-class CollectionReader:
+class CollectionReader(object):
     def __init__(self, manifest_locator_or_text):
         if re.search(r'^\S+( [a-f0-9]{32,}(\+\S+)*)+( \d+:\d+:\S+)+\n', manifest_locator_or_text):
             self._manifest_text = manifest_locator_or_text
@@ -653,7 +674,7 @@ class CollectionReader:
         self._populate()
         return self._manifest_text
 
-class CollectionWriter:
+class CollectionWriter(object):
     KEEP_BLOCK_SIZE = 2**26
     def __init__(self):
         self._data_buffer = []
@@ -674,7 +695,7 @@ class CollectionWriter:
         self.start_new_stream(stream_name)
         todo = []
         if max_manifest_depth == 0:
-            dirents = util.listdir_recursive(path)
+            dirents = sorted(util.listdir_recursive(path))
         else:
             dirents = sorted(os.listdir(path))
         for dirent in dirents:
@@ -695,6 +716,10 @@ class CollectionWriter:
         map(lambda x: self.write_directory_tree(*x), todo)
 
     def write(self, newdata):
+        if hasattr(newdata, '__iter__'):
+            for s in newdata:
+                self.write(s)
+            return
         self._data_buffer += [newdata]
         self._data_buffer_len += len(newdata)
         self._current_stream_length += len(newdata)
@@ -731,7 +756,7 @@ class CollectionWriter:
     def set_current_stream_name(self, newstreamname):
         if re.search(r'[ \t\n]', newstreamname):
             raise AssertionError("Manifest stream names cannot contain whitespace")
-        self._current_stream_name = newstreamname
+        self._current_stream_name = '.' if newstreamname=='' else newstreamname
     def current_stream_name(self):
         return self._current_stream_name
     def finish_current_stream(self):
@@ -781,14 +806,102 @@ class Keep:
         return global_client_object
 
     @staticmethod
-    def get(locator):
-        return Keep.global_client_object().get(locator)
+    def get(locator, **kwargs):
+        return Keep.global_client_object().get(locator, **kwargs)
 
     @staticmethod
-    def put(data):
-        return Keep.global_client_object().put(data)
+    def put(data, **kwargs):
+        return Keep.global_client_object().put(data, **kwargs)
+
+class KeepClient(object):
+
+    class ThreadLimiter(object):
+        """
+        Limit the number of threads running at a given time to
+        {desired successes} minus {successes reported}. When successes
+        reported == desired, wake up the remaining threads and tell
+        them to quit.
+
+        Should be used in a "with" block.
+        """
+        def __init__(self, todo):
+            self._todo = todo
+            self._done = 0
+            self._todo_lock = threading.Semaphore(todo)
+            self._done_lock = threading.Lock()
+        def __enter__(self):
+            self._todo_lock.acquire()
+            return self
+        def __exit__(self, type, value, traceback):
+            self._todo_lock.release()
+        def shall_i_proceed(self):
+            """
+            Return true if the current thread should do stuff. Return
+            false if the current thread should just stop.
+            """
+            with self._done_lock:
+                return (self._done < self._todo)
+        def increment_done(self):
+            """
+            Report that the current thread was successful.
+            """
+            with self._done_lock:
+                self._done += 1
+        def done(self):
+            """
+            Return how many successes were reported.
+            """
+            with self._done_lock:
+                return self._done
+
+    class KeepWriterThread(threading.Thread):
+        """
+        Write a blob of data to the given Keep server. Call
+        increment_done() of the given ThreadLimiter if the write
+        succeeds.
+        """
+        def __init__(self, **kwargs):
+            super(KeepClient.KeepWriterThread, self).__init__()
+            self.args = kwargs
+        def run(self):
+            with self.args['thread_limiter'] as limiter:
+                if not limiter.shall_i_proceed():
+                    # My turn arrived, but the job has been done without
+                    # me.
+                    return
+                logging.debug("KeepWriterThread %s proceeding %s %s" %
+                              (str(threading.current_thread()),
+                               self.args['data_hash'],
+                               self.args['service_root']))
+                h = httplib2.Http()
+                url = self.args['service_root'] + self.args['data_hash']
+                api_token = os.environ['ARVADOS_API_TOKEN']
+                headers = {'Authorization': "OAuth2 %s" % api_token}
+                try:
+                    resp, content = h.request(url.encode('utf-8'), 'PUT',
+                                              headers=headers,
+                                              body=self.args['data'])
+                    if (resp['status'] == '401' and
+                        re.match(r'Timestamp verification failed', content)):
+                        body = KeepClient.sign_for_old_server(
+                            self.args['data_hash'],
+                            self.args['data'])
+                        h = httplib2.Http()
+                        resp, content = h.request(url.encode('utf-8'), 'PUT',
+                                                  headers=headers,
+                                                  body=body)
+                    if re.match(r'^2\d\d$', resp['status']):
+                        logging.debug("KeepWriterThread %s succeeded %s %s" %
+                                      (str(threading.current_thread()),
+                                       self.args['data_hash'],
+                                       self.args['service_root']))
+                        return limiter.increment_done()
+                    logging.warning("Request fail: PUT %s => %s %s" %
+                                    (url, resp['status'], content))
+                except (httplib2.HttpLib2Error, httplib.HTTPException) as e:
+                    logging.warning("Request fail: PUT %s => %s: %s" %
+                                    (url, type(e), str(e)))
 
-class KeepClient:
     def __init__(self):
         self.lock = threading.Lock()
         self.service_roots = None
@@ -832,7 +945,8 @@ class KeepClient:
             headers = {'Authorization': "OAuth2 %s" % api_token,
                        'Accept': 'application/octet-stream'}
             try:
-                resp, content = h.request(url, 'GET', headers=headers)
+                resp, content = h.request(url.encode('utf-8'), 'GET',
+                                          headers=headers)
                 if re.match(r'^2\d\d$', resp['status']):
                     m = hashlib.new('md5')
                     m.update(content)
@@ -853,36 +967,27 @@ class KeepClient:
         data_hash = m.hexdigest()
         have_copies = 0
         want_copies = kwargs.get('copies', 2)
+        if not (want_copies > 0):
+            return data_hash
+        threads = []
+        thread_limiter = KeepClient.ThreadLimiter(want_copies)
         for service_root in self.shuffled_service_roots(data_hash):
-            h = httplib2.Http()
-            url = service_root + data_hash
-            api_token = os.environ['ARVADOS_API_TOKEN']
-            headers = {'Authorization': "OAuth2 %s" % api_token}
-            try:
-                resp, content = h.request(url, 'PUT',
-                                          headers=headers,
-                                          body=data)
-                if (resp['status'] == '401' and
-                    re.match(r'Timestamp verification failed', content)):
-                    body = self.sign_for_old_server(data_hash, data)
-                    h = httplib2.Http()
-                    resp, content = h.request(url, 'PUT',
-                                              headers=headers,
-                                              body=body)
-                if re.match(r'^2\d\d$', resp['status']):
-                    have_copies += 1
-                    if have_copies == want_copies:
-                        return data_hash + '+' + str(len(data))
-                else:
-                    logging.warning("Request fail: PUT %s => %s %s" %
-                                    (url, resp['status'], content))
-            except (httplib2.HttpLib2Error, httplib.HTTPException) as e:
-                logging.warning("Request fail: PUT %s => %s: %s" %
-                                (url, type(e), str(e)))
+            t = KeepClient.KeepWriterThread(data=data,
+                                            data_hash=data_hash,
+                                            service_root=service_root,
+                                            thread_limiter=thread_limiter)
+            t.start()
+            threads += [t]
+        for t in threads:
+            t.join()
+        have_copies = thread_limiter.done()
+        if have_copies == want_copies:
+            return (data_hash + '+' + str(len(data)))
         raise Exception("Write fail for %s: wanted %d but wrote %d" %
                         (data_hash, want_copies, have_copies))
 
-    def sign_for_old_server(self, data_hash, data):
+    @staticmethod
+    def sign_for_old_server(data_hash, data):
         return (("-----BEGIN PGP SIGNED MESSAGE-----\n\n\n%d %s\n-----BEGIN PGP SIGNATURE-----\n\n-----END PGP SIGNATURE-----\n" % (int(time.time()), data_hash)) + data)