X-Git-Url: https://git.arvados.org/arvados.git/blobdiff_plain/0e21fa5e4df50ca201474fed35c4055945beceaf..441533f5d2e2c3c4079ea63ae0408078a0ca1bfa:/sdk/python/arvados.py diff --git a/sdk/python/arvados.py b/sdk/python/arvados.py index a1c7de9984..166fbd14ef 100644 --- a/sdk/python/arvados.py +++ b/sdk/python/arvados.py @@ -1,4 +1,5 @@ import gflags +import httplib import httplib2 import logging import os @@ -14,10 +15,14 @@ import string import bz2 import zlib import fcntl +import time from apiclient import errors from apiclient.discovery import build +if 'ARVADOS_DEBUG' in os.environ: + logging.basicConfig(level=logging.DEBUG) + class CredentialsFromEnv: @staticmethod def http_request(self, uri, **kwargs): @@ -43,9 +48,17 @@ class CredentialsFromEnv: url = ('https://%s/discovery/v1/apis/' '{api}/{apiVersion}/rest' % os.environ['ARVADOS_API_HOST']) credentials = CredentialsFromEnv() -http = httplib2.Http() + +# Use system's CA certificates (if we find them) instead of httplib2's +ca_certs = '/etc/ssl/certs/ca-certificates.crt' +if not os.path.exists(ca_certs): + ca_certs = None # use httplib2 default + +http = httplib2.Http(ca_certs=ca_certs) http = credentials.authorize(http) -http.disable_ssl_certificate_validation=True +if re.match(r'(?i)^(true|1|yes)$', + os.environ.get('ARVADOS_API_HOST_INSECURE', '')): + http.disable_ssl_certificate_validation=True service = build("arvados", "v1", http=http, discoveryServiceUrl=url) def task_set_output(self,s): @@ -83,8 +96,8 @@ def api(): return service class JobTask: - def __init__(self, parameters=dict(), resource_limits=dict()): - print "init jobtask %s %s" % (parameters, resource_limits) + def __init__(self, parameters=dict(), runtime_constraints=dict()): + print "init jobtask %s %s" % (parameters, runtime_constraints) class job_setup: @staticmethod @@ -111,13 +124,39 @@ class job_setup: ).execute() exit(0) + @staticmethod + def one_task_per_input_stream(if_sequence=0, and_end_task=True): + if if_sequence != current_task()['sequence']: + return + job_input = current_job()['script_parameters']['input'] + cr = CollectionReader(job_input) + for s in cr.all_streams(): + task_input = s.tokens() + new_task_attrs = { + 'job_uuid': current_job()['uuid'], + 'created_by_job_task_uuid': current_task()['uuid'], + 'sequence': if_sequence + 1, + 'parameters': { + 'input':task_input + } + } + service.job_tasks().create(job_task=json.dumps(new_task_attrs)).execute() + if and_end_task: + service.job_tasks().update(uuid=current_task()['uuid'], + job_task=json.dumps({'success':True}) + ).execute() + exit(0) + class util: @staticmethod def run_command(execargs, **kwargs): + if 'stdin' not in kwargs: + kwargs['stdin'] = subprocess.PIPE + if 'stdout' not in kwargs: + kwargs['stdout'] = subprocess.PIPE + if 'stderr' not in kwargs: + kwargs['stderr'] = subprocess.PIPE p = subprocess.Popen(execargs, close_fds=True, shell=False, - stdin=subprocess.PIPE, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, **kwargs) stdoutdata, stderrdata = p.communicate(None) if p.returncode != 0: @@ -136,6 +175,16 @@ class util: cwd=path) return path + @staticmethod + def tar_extractor(path, decompress_flag): + return subprocess.Popen(["tar", + "-C", path, + ("-x%sf" % decompress_flag), + "-"], + stdout=None, + stdin=subprocess.PIPE, stderr=sys.stderr, + shell=False, close_fds=True) + @staticmethod def tarball_extract(tarball, path): """Retrieve a tarball from Keep and extract it to a local @@ -171,18 +220,15 @@ class util: os.unlink(os.path.join(path, '.locator')) for f in CollectionReader(tarball).all_files(): - decompress_flag = '' if re.search('\.(tbz|tar.bz2)$', f.name()): - decompress_flag = 'j' + p = util.tar_extractor(path, 'j') elif re.search('\.(tgz|tar.gz)$', f.name()): - decompress_flag = 'z' - p = subprocess.Popen(["tar", - "-C", path, - ("-x%sf" % decompress_flag), - "-"], - stdout=None, - stdin=subprocess.PIPE, stderr=sys.stderr, - shell=False, close_fds=True) + p = util.tar_extractor(path, 'z') + elif re.search('\.tar$', f.name()): + p = util.tar_extractor(path, '') + else: + raise Exception("tarball_extract cannot handle filename %s" + % f.name()) while True: buf = f.read(2**20) if len(buf) == 0: @@ -201,13 +247,15 @@ class util: return path @staticmethod - def collection_extract(collection, path, files=[]): - """Retrieve a collection from Keep and extract it to a local - directory. Return the absolute path where the collection was - extracted. + def zipball_extract(zipball, path): + """Retrieve a zip archive from Keep and extract it to a local + directory. Return the absolute path where the archive was + extracted. If the top level of the archive contained just one + file or directory, return the absolute path of that single + item. - collection -- collection locator - path -- where to extract: absolute, or relative to job tmp + zipball -- collection locator + path -- where to extract the archive: absolute, or relative to job tmp """ if not re.search('^/', path): path = os.path.join(current_job().tmpdir, path) @@ -219,11 +267,12 @@ class util: os.mkdir(path) already_have_it = False try: - if os.readlink(os.path.join(path, '.locator')) == collection: + if os.readlink(os.path.join(path, '.locator')) == zipball: already_have_it = True except OSError: pass if not already_have_it: + # emulate "rm -f" (i.e., if the file does not exist, we win) try: os.unlink(os.path.join(path, '.locator')) @@ -231,19 +280,157 @@ class util: if os.path.exists(os.path.join(path, '.locator')): os.unlink(os.path.join(path, '.locator')) - for f in CollectionReader(collection).all_files(): - if files == [] or f.name() in files: - outfile = open(os.path.join(path, f.name()), 'w') - while True: - buf = f.read(2**20) - if len(buf) == 0: - break + for f in CollectionReader(zipball).all_files(): + if not re.search('\.zip$', f.name()): + raise Exception("zipball_extract cannot handle filename %s" + % f.name()) + zip_filename = os.path.join(path, os.path.basename(f.name())) + zip_file = open(zip_filename, 'wb') + while True: + buf = f.read(2**20) + if len(buf) == 0: + break + zip_file.write(buf) + zip_file.close() + + p = subprocess.Popen(["unzip", + "-q", "-o", + "-d", path, + zip_filename], + stdout=None, + stdin=None, stderr=sys.stderr, + shell=False, close_fds=True) + p.wait() + if p.returncode != 0: + lockfile.close() + raise Exception("unzip exited %d" % p.returncode) + os.unlink(zip_filename) + os.symlink(zipball, os.path.join(path, '.locator')) + tld_extracts = filter(lambda f: f != '.locator', os.listdir(path)) + lockfile.close() + if len(tld_extracts) == 1: + return os.path.join(path, tld_extracts[0]) + return path + + @staticmethod + def collection_extract(collection, path, files=[], decompress=True): + """Retrieve a collection from Keep and extract it to a local + directory. Return the absolute path where the collection was + extracted. + + collection -- collection locator + path -- where to extract: absolute, or relative to job tmp + """ + if not re.search('^/', path): + path = os.path.join(current_job().tmpdir, path) + lockfile = open(path + '.lock', 'w') + fcntl.flock(lockfile, fcntl.LOCK_EX) + try: + os.stat(path) + except OSError: + os.mkdir(path) + already_have_it = False + try: + if os.readlink(os.path.join(path, '.locator')) == collection: + already_have_it = True + except OSError: + pass + + # emulate "rm -f" (i.e., if the file does not exist, we win) + try: + os.unlink(os.path.join(path, '.locator')) + except OSError: + if os.path.exists(os.path.join(path, '.locator')): + os.unlink(os.path.join(path, '.locator')) + + files_got = [] + for s in CollectionReader(collection).all_streams(): + stream_name = s.name() + for f in s.all_files(): + if (files == [] or + ((f.name() not in files_got) and + (f.name() in files or + (decompress and f.decompressed_name() in files)))): + outname = f.decompressed_name() if decompress else f.name() + files_got += [outname] + if os.path.exists(os.path.join(path, stream_name, outname)): + continue + util.mkdir_dash_p(os.path.dirname(os.path.join(path, stream_name, outname))) + outfile = open(os.path.join(path, stream_name, outname), 'wb') + for buf in (f.readall_decompressed() if decompress + else f.readall()): outfile.write(buf) outfile.close() - os.symlink(collection, os.path.join(path, '.locator')) + if len(files_got) < len(files): + raise Exception("Wanted files %s but only got %s from %s" % (files, files_got, map(lambda z: z.name(), list(CollectionReader(collection).all_files())))) + os.symlink(collection, os.path.join(path, '.locator')) + lockfile.close() return path + @staticmethod + def mkdir_dash_p(path): + if not os.path.exists(path): + util.mkdir_dash_p(os.path.dirname(path)) + try: + os.mkdir(path) + except OSError: + if not os.path.exists(path): + os.mkdir(path) + + @staticmethod + def stream_extract(stream, path, files=[], decompress=True): + """Retrieve a stream from Keep and extract it to a local + directory. Return the absolute path where the stream was + extracted. + + stream -- StreamReader object + path -- where to extract: absolute, or relative to job tmp + """ + if not re.search('^/', path): + path = os.path.join(current_job().tmpdir, path) + lockfile = open(path + '.lock', 'w') + fcntl.flock(lockfile, fcntl.LOCK_EX) + try: + os.stat(path) + except OSError: + os.mkdir(path) + + files_got = [] + for f in stream.all_files(): + if (files == [] or + ((f.name() not in files_got) and + (f.name() in files or + (decompress and f.decompressed_name() in files)))): + outname = f.decompressed_name() if decompress else f.name() + files_got += [outname] + if os.path.exists(os.path.join(path, outname)): + os.unlink(os.path.join(path, outname)) + util.mkdir_dash_p(os.path.dirname(os.path.join(path, outname))) + outfile = open(os.path.join(path, outname), 'wb') + for buf in (f.readall_decompressed() if decompress + else f.readall()): + outfile.write(buf) + outfile.close() + if len(files_got) < len(files): + raise Exception("Wanted files %s but only got %s from %s" % + (files, files_got, map(lambda z: z.name(), + list(stream.all_files())))) + lockfile.close() + return path + + @staticmethod + def listdir_recursive(dirname, base=None): + allfiles = [] + for ent in sorted(os.listdir(dirname)): + ent_path = os.path.join(dirname, ent) + ent_base = os.path.join(base, ent) if base else ent + if os.path.isdir(ent_path): + allfiles += util.listdir_recursive(ent_path, ent_base) + else: + allfiles += [ent_base] + return allfiles + class DataReader: def __init__(self, data_locator): self.data_locator = data_locator @@ -287,7 +474,7 @@ class StreamFileReader: data = self._stream.read(min(size, self._size - self._filepos)) self._filepos += len(data) return data - def readall(self, size, **kwargs): + def readall(self, size=2**20, **kwargs): while True: data = self.read(size, **kwargs) if data == '': @@ -305,14 +492,20 @@ class StreamFileReader: data = decompressor.decompress(decompressor.unconsumed_tail + chunk) if data and data != '': yield data - def readlines(self, decompress=True): + def readall_decompressed(self, size=2**20): self._stream.seek(self._pos + self._filepos) - if decompress and re.search('\.bz2$', self._name): - datasource = self.bunzip2(2**10) - elif decompress and re.search('\.gz$', self._name): - datasource = self.gunzip(2**10) + if re.search('\.bz2$', self._name): + return self.bunzip2(size) + elif re.search('\.gz$', self._name): + return self.gunzip(size) + else: + return self.readall(size) + def readlines(self, decompress=True): + if decompress: + datasource = self.readall_decompressed() else: - datasource = self.readall(2**10) + self._stream.seek(self._pos + self._filepos) + datasource = self.readall() data = '' for newdata in datasource: data += newdata @@ -355,6 +548,9 @@ class StreamReader: self.files += [[int(pos), int(size), name]] else: raise Exception("Invalid manifest format") + + def tokens(self): + return self._tokens def tokens_for_range(self, range_start, range_size): resp = [self._stream_name] return_all_tokens = False @@ -462,8 +658,9 @@ class CollectionReader: self._manifest_text = Keep.get(self._manifest_locator) self._streams = [] for stream_line in self._manifest_text.split("\n"): - stream_tokens = stream_line.split() - self._streams += [stream_tokens] + if stream_line != '': + stream_tokens = stream_line.split() + self._streams += [stream_tokens] def all_streams(self): self._populate() resp = [] @@ -474,6 +671,9 @@ class CollectionReader: for s in self.all_streams(): for f in s.all_files(): yield f + def manifest_text(self): + self._populate() + return self._manifest_text class CollectionWriter: KEEP_BLOCK_SIZE = 2**26 @@ -491,6 +691,31 @@ class CollectionWriter: pass def __exit__(self): self.finish() + def write_directory_tree(self, + path, stream_name='.', max_manifest_depth=-1): + self.start_new_stream(stream_name) + todo = [] + if max_manifest_depth == 0: + dirents = util.listdir_recursive(path) + else: + dirents = sorted(os.listdir(path)) + for dirent in dirents: + target = os.path.join(path, dirent) + if os.path.isdir(target): + todo += [[target, + os.path.join(stream_name, dirent), + max_manifest_depth-1]] + else: + self.start_new_file(dirent) + with open(target, 'rb') as f: + while True: + buf = f.read(2**26) + if len(buf) == 0: + break + self.write(buf) + self.finish_current_stream() + map(lambda x: self.write_directory_tree(*x), todo) + def write(self, newdata): self._data_buffer += [newdata] self._data_buffer_len += len(newdata) @@ -502,10 +727,12 @@ class CollectionWriter: if data_buffer != '': self._current_stream_locators += [Keep.put(data_buffer[0:self.KEEP_BLOCK_SIZE])] self._data_buffer = [data_buffer[self.KEEP_BLOCK_SIZE:]] + self._data_buffer_len = len(self._data_buffer[0]) def start_new_file(self, newfilename=None): self.finish_current_file() self.set_current_file_name(newfilename) def set_current_file_name(self, newfilename): + newfilename = re.sub(r' ', '\\\\040', newfilename) if re.search(r'[ \t\n]', newfilename): raise AssertionError("Manifest filenames cannot contain whitespace") self._current_file_name = newfilename @@ -520,7 +747,7 @@ class CollectionWriter: self._current_stream_length - self._current_file_pos, self._current_file_name]] self._current_file_pos = self._current_stream_length - def start_new_stream(self, newstreamname=None): + def start_new_stream(self, newstreamname='.'): self.finish_current_stream() self.set_current_stream_name(newstreamname) def set_current_stream_name(self, newstreamname): @@ -552,6 +779,8 @@ class CollectionWriter: self.finish_current_stream() manifest = '' for stream in self._finished_streams: + if not re.search(r'^\.(/.*)?$', stream[0]): + manifest += './' manifest += stream[0] if len(stream[1]) == 0: manifest += " d41d8cd98f00b204e9800998ecf8427e+0" @@ -563,40 +792,119 @@ class CollectionWriter: manifest += "\n" return manifest +global_client_object = None + class Keep: @staticmethod - def put(data): - if 'KEEP_LOCAL_STORE' in os.environ: - return Keep.local_store_put(data) - p = subprocess.Popen(["whput", "-"], - stdout=subprocess.PIPE, - stdin=subprocess.PIPE, - stderr=subprocess.PIPE, - shell=False, close_fds=True) - stdoutdata, stderrdata = p.communicate(data) - if p.returncode != 0: - raise Exception("whput subprocess exited %d - stderr:\n%s" % (p.returncode, stderrdata)) - return stdoutdata.rstrip() + def global_client_object(): + global global_client_object + if global_client_object == None: + global_client_object = KeepClient() + return global_client_object + @staticmethod def get(locator): + return Keep.global_client_object().get(locator) + + @staticmethod + def put(data): + return Keep.global_client_object().put(data) + +class KeepClient: + def __init__(self): + self.service_roots = None + + def shuffled_service_roots(self, hash): + if self.service_roots == None: + keep_disks = api().keep_disks().list().execute()['items'] + roots = (("http%s://%s:%d/" % + ('s' if f['service_ssl_flag'] else '', + f['service_host'], + f['service_port'])) + for f in keep_disks) + self.service_roots = sorted(set(roots)) + logging.debug(str(self.service_roots)) + seed = hash + pool = self.service_roots[:] + pseq = [] + while len(pool) > 0: + if len(seed) < 8: + if len(pseq) < len(hash) / 4: # first time around + seed = hash[-4:] + hash + else: + seed += hash + probe = int(seed[0:8], 16) % len(pool) + pseq += [pool[probe]] + pool = pool[:probe] + pool[probe+1:] + seed = seed[8:] + logging.debug(str(pseq)) + return pseq + + def get(self, locator): if 'KEEP_LOCAL_STORE' in os.environ: - return Keep.local_store_get(locator) - p = subprocess.Popen(["whget", locator, "-"], - stdout=subprocess.PIPE, - stdin=None, - stderr=subprocess.PIPE, - shell=False, close_fds=True) - stdoutdata, stderrdata = p.communicate(None) - if p.returncode != 0: - raise Exception("whget subprocess exited %d - stderr:\n%s" % (p.returncode, stderrdata)) + return KeepClient.local_store_get(locator) + expect_hash = re.sub(r'\+.*', '', locator) + for service_root in self.shuffled_service_roots(expect_hash): + h = httplib2.Http() + url = service_root + expect_hash + api_token = os.environ['ARVADOS_API_TOKEN'] + headers = {'Authorization': "OAuth2 %s" % api_token, + 'Accept': 'application/octet-stream'} + try: + resp, content = h.request(url, 'GET', headers=headers) + if re.match(r'^2\d\d$', resp['status']): + m = hashlib.new('md5') + m.update(content) + md5 = m.hexdigest() + if md5 == expect_hash: + return content + logging.warning("Checksum fail: md5(%s) = %s" % (url, md5)) + except (httplib2.HttpLib2Error, httplib.ResponseNotReady) as e: + logging.info("Request fail: GET %s => %s: %s" % + (url, type(e), str(e))) + raise Exception("Not found: %s" % expect_hash) + + def put(self, data, **kwargs): + if 'KEEP_LOCAL_STORE' in os.environ: + return KeepClient.local_store_put(data) m = hashlib.new('md5') - m.update(stdoutdata) - try: - if locator.index(m.hexdigest()) == 0: - return stdoutdata - except ValueError: - pass - raise Exception("md5 checksum mismatch: md5(get(%s)) == %s" % (locator, m.hexdigest())) + m.update(data) + data_hash = m.hexdigest() + have_copies = 0 + want_copies = kwargs.get('copies', 2) + for service_root in self.shuffled_service_roots(data_hash): + h = httplib2.Http() + url = service_root + data_hash + api_token = os.environ['ARVADOS_API_TOKEN'] + headers = {'Authorization': "OAuth2 %s" % api_token} + try: + resp, content = h.request(url, 'PUT', + headers=headers, + body=data) + if (resp['status'] == '401' and + re.match(r'Timestamp verification failed', content)): + body = self.sign_for_old_server(data_hash, data) + h = httplib2.Http() + resp, content = h.request(url, 'PUT', + headers=headers, + body=body) + if re.match(r'^2\d\d$', resp['status']): + have_copies += 1 + if have_copies == want_copies: + return data_hash + '+' + str(len(data)) + else: + logging.warning("Request fail: PUT %s => %s %s" % + (url, resp['status'], content)) + except (httplib2.HttpLib2Error, httplib.HTTPException) as e: + logging.warning("Request fail: PUT %s => %s: %s" % + (url, type(e), str(e))) + raise Exception("Write fail for %s: wanted %d but wrote %d" % + (data_hash, want_copies, have_copies)) + + def sign_for_old_server(self, data_hash, data): + return (("-----BEGIN PGP SIGNED MESSAGE-----\n\n\n%d %s\n-----BEGIN PGP SIGNATURE-----\n\n-----END PGP SIGNATURE-----\n" % (int(time.time()), data_hash)) + data) + + @staticmethod def local_store_put(data): m = hashlib.new('md5')