import zlib
import fcntl
import time
+import threading
from apiclient import errors
from apiclient.discovery import build
if 'ARVADOS_DEBUG' in os.environ:
logging.basicConfig(level=logging.DEBUG)
-class CredentialsFromEnv:
+class errors:
+ class SyntaxError(Exception):
+ pass
+ class AssertionError(Exception):
+ pass
+ class NotFoundError(Exception):
+ pass
+ class CommandFailedError(Exception):
+ pass
+ class KeepWriteError(Exception):
+ pass
+ class NotImplementedError(Exception):
+ pass
+
+class CredentialsFromEnv(object):
@staticmethod
def http_request(self, uri, **kwargs):
from httplib import BadStatusLine
def task_set_output(self,s):
service.job_tasks().update(uuid=self['uuid'],
- job_task=json.dumps({
- 'output':s,
- 'success':True,
- 'progress':1.0
- })).execute()
+ body={
+ 'output':s,
+ 'success':True,
+ 'progress':1.0
+ }).execute()
_current_task = None
def current_task():
_current_job = t
return t
+def getjobparam(*args):
+ return current_job()['script_parameters'].get(*args)
+
def api():
return service
-class JobTask:
+class JobTask(object):
def __init__(self, parameters=dict(), runtime_constraints=dict()):
print "init jobtask %s %s" % (parameters, runtime_constraints)
'input':task_input
}
}
- service.job_tasks().create(job_task=json.dumps(new_task_attrs)).execute()
+ service.job_tasks().create(body=new_task_attrs).execute()
if and_end_task:
service.job_tasks().update(uuid=current_task()['uuid'],
- job_task=json.dumps({'success':True})
+ body={'success':True}
).execute()
exit(0)
'input':task_input
}
}
- service.job_tasks().create(job_task=json.dumps(new_task_attrs)).execute()
+ service.job_tasks().create(body=new_task_attrs).execute()
if and_end_task:
service.job_tasks().update(uuid=current_task()['uuid'],
- job_task=json.dumps({'success':True})
+ body={'success':True}
).execute()
exit(0)
class util:
+ @staticmethod
+ def clear_tmpdir(path=None):
+ """
+ Ensure the given directory (or TASK_TMPDIR if none given)
+ exists and is empty.
+ """
+ if path == None:
+ path = current_task().tmpdir
+ if os.path.exists(path):
+ p = subprocess.Popen(['rm', '-rf', path])
+ stdout, stderr = p.communicate(None)
+ if p.returncode != 0:
+ raise Exception('rm -rf %s: %s' % (path, stderr))
+ os.mkdir(path)
+
@staticmethod
def run_command(execargs, **kwargs):
- if 'stdin' not in kwargs:
- kwargs['stdin'] = subprocess.PIPE
- if 'stdout' not in kwargs:
- kwargs['stdout'] = subprocess.PIPE
- if 'stderr' not in kwargs:
- kwargs['stderr'] = subprocess.PIPE
- p = subprocess.Popen(execargs, close_fds=True, shell=False,
- **kwargs)
+ kwargs.setdefault('stdin', subprocess.PIPE)
+ kwargs.setdefault('stdout', subprocess.PIPE)
+ kwargs.setdefault('stderr', sys.stderr)
+ kwargs.setdefault('close_fds', True)
+ kwargs.setdefault('shell', False)
+ p = subprocess.Popen(execargs, **kwargs)
stdoutdata, stderrdata = p.communicate(None)
if p.returncode != 0:
- raise Exception("run_command %s exit %d:\n%s" %
- (execargs, p.returncode, stderrdata))
+ raise errors.CommandFailedError(
+ "run_command %s exit %d:\n%s" %
+ (execargs, p.returncode, stderrdata))
return stdoutdata, stderrdata
@staticmethod
elif re.search('\.tar$', f.name()):
p = util.tar_extractor(path, '')
else:
- raise Exception("tarball_extract cannot handle filename %s"
- % f.name())
+ raise errors.AssertionError(
+ "tarball_extract cannot handle filename %s" % f.name())
while True:
buf = f.read(2**20)
if len(buf) == 0:
p.wait()
if p.returncode != 0:
lockfile.close()
- raise Exception("tar exited %d" % p.returncode)
+ raise errors.CommandFailedError(
+ "tar exited %d" % p.returncode)
os.symlink(tarball, os.path.join(path, '.locator'))
tld_extracts = filter(lambda f: f != '.locator', os.listdir(path))
lockfile.close()
for f in CollectionReader(zipball).all_files():
if not re.search('\.zip$', f.name()):
- raise Exception("zipball_extract cannot handle filename %s"
- % f.name())
+ raise errors.NotImplementedError(
+ "zipball_extract cannot handle filename %s" % f.name())
zip_filename = os.path.join(path, os.path.basename(f.name()))
zip_file = open(zip_filename, 'wb')
while True:
p.wait()
if p.returncode != 0:
lockfile.close()
- raise Exception("unzip exited %d" % p.returncode)
+ raise errors.CommandFailedError(
+ "unzip exited %d" % p.returncode)
os.unlink(zip_filename)
os.symlink(zipball, os.path.join(path, '.locator'))
tld_extracts = filter(lambda f: f != '.locator', os.listdir(path))
collection -- collection locator
path -- where to extract: absolute, or relative to job tmp
"""
+ matches = re.search(r'^([0-9a-f]+)(\+[\w@]+)*$', collection)
+ if matches:
+ collection_hash = matches.group(1)
+ else:
+ collection_hash = hashlib.md5(collection).hexdigest()
if not re.search('^/', path):
path = os.path.join(current_job().tmpdir, path)
lockfile = open(path + '.lock', 'w')
os.mkdir(path)
already_have_it = False
try:
- if os.readlink(os.path.join(path, '.locator')) == collection:
+ if os.readlink(os.path.join(path, '.locator')) == collection_hash:
already_have_it = True
except OSError:
pass
outfile.write(buf)
outfile.close()
if len(files_got) < len(files):
- raise Exception("Wanted files %s but only got %s from %s" % (files, files_got, map(lambda z: z.name(), list(CollectionReader(collection).all_files()))))
- os.symlink(collection, os.path.join(path, '.locator'))
+ raise errors.AssertionError(
+ "Wanted files %s but only got %s from %s" %
+ (files, files_got,
+ [z.name() for z in CollectionReader(collection).all_files()]))
+ os.symlink(collection_hash, os.path.join(path, '.locator'))
lockfile.close()
return path
outfile.write(buf)
outfile.close()
if len(files_got) < len(files):
- raise Exception("Wanted files %s but only got %s from %s" %
- (files, files_got, map(lambda z: z.name(),
- list(stream.all_files()))))
+ raise errors.AssertionError(
+ "Wanted files %s but only got %s from %s" %
+ (files, files_got, [z.name() for z in stream.all_files()]))
lockfile.close()
return path
allfiles += [ent_base]
return allfiles
-class DataReader:
- def __init__(self, data_locator):
- self.data_locator = data_locator
- self.p = subprocess.Popen(["whget", "-r", self.data_locator, "-"],
- stdout=subprocess.PIPE,
- stdin=None, stderr=subprocess.PIPE,
- shell=False, close_fds=True)
- def __enter__(self):
- pass
- def __exit__(self):
- self.close()
- def read(self, size, **kwargs):
- return self.p.stdout.read(size, **kwargs)
- def close(self):
- self.p.stdout.close()
- if not self.p.stderr.closed:
- for err in self.p.stderr:
- print >> sys.stderr, err
- self.p.stderr.close()
- self.p.wait()
- if self.p.returncode != 0:
- raise Exception("whget subprocess exited %d" % self.p.returncode)
-
-class StreamFileReader:
+class StreamFileReader(object):
def __init__(self, stream, pos, size, name):
self._stream = stream
self._pos = pos
return string.join(self._stream.tokens_for_range(self._pos, self._size),
" ") + "\n"
-class StreamReader:
+class StreamReader(object):
def __init__(self, tokens):
self._tokens = tokens
self._current_datablock_data = None
pos, size, name = tok.split(':',2)
self.files += [[int(pos), int(size), name]]
else:
- raise Exception("Invalid manifest format")
+ raise errors.SyntaxError("Invalid manifest format")
def tokens(self):
return self._tokens
self._pos += len(data)
return data
-class CollectionReader:
+class CollectionReader(object):
def __init__(self, manifest_locator_or_text):
if re.search(r'^\S+( [a-f0-9]{32,}(\+\S+)*)+( \d+:\d+:\S+)+\n', manifest_locator_or_text):
self._manifest_text = manifest_locator_or_text
self._populate()
return self._manifest_text
-class CollectionWriter:
+class CollectionWriter(object):
KEEP_BLOCK_SIZE = 2**26
def __init__(self):
self._data_buffer = []
self.start_new_stream(stream_name)
todo = []
if max_manifest_depth == 0:
- dirents = util.listdir_recursive(path)
+ dirents = sorted(util.listdir_recursive(path))
else:
dirents = sorted(os.listdir(path))
for dirent in dirents:
map(lambda x: self.write_directory_tree(*x), todo)
def write(self, newdata):
+ if hasattr(newdata, '__iter__'):
+ for s in newdata:
+ self.write(s)
+ return
self._data_buffer += [newdata]
self._data_buffer_len += len(newdata)
self._current_stream_length += len(newdata)
def set_current_file_name(self, newfilename):
newfilename = re.sub(r' ', '\\\\040', newfilename)
if re.search(r'[ \t\n]', newfilename):
- raise AssertionError("Manifest filenames cannot contain whitespace")
+ raise errors.AssertionError(
+ "Manifest filenames cannot contain whitespace: %s" %
+ newfilename)
self._current_file_name = newfilename
def current_file_name(self):
return self._current_file_name
if self._current_file_name == None:
if self._current_file_pos == self._current_stream_length:
return
- raise Exception("Cannot finish an unnamed file (%d bytes at offset %d in '%s' stream)" % (self._current_stream_length - self._current_file_pos, self._current_file_pos, self._current_stream_name))
+ raise errors.AssertionError(
+ "Cannot finish an unnamed file " +
+ "(%d bytes at offset %d in '%s' stream)" %
+ (self._current_stream_length - self._current_file_pos,
+ self._current_file_pos,
+ self._current_stream_name))
self._current_stream_files += [[self._current_file_pos,
self._current_stream_length - self._current_file_pos,
self._current_file_name]]
self.set_current_stream_name(newstreamname)
def set_current_stream_name(self, newstreamname):
if re.search(r'[ \t\n]', newstreamname):
- raise AssertionError("Manifest stream names cannot contain whitespace")
- self._current_stream_name = newstreamname
+ raise errors.AssertionError(
+ "Manifest stream names cannot contain whitespace")
+ self._current_stream_name = '.' if newstreamname=='' else newstreamname
def current_stream_name(self):
return self._current_stream_name
def finish_current_stream(self):
if len(self._current_stream_files) == 0:
pass
elif self._current_stream_name == None:
- raise Exception("Cannot finish an unnamed stream (%d bytes in %d files)" % (self._current_stream_length, len(self._current_stream_files)))
+ raise errors.AssertionError(
+ "Cannot finish an unnamed stream (%d bytes in %d files)" %
+ (self._current_stream_length, len(self._current_stream_files)))
else:
self._finished_streams += [[self._current_stream_name,
self._current_stream_locators,
manifest += " %d:%d:%s" % (sfile[0], sfile[1], sfile[2])
manifest += "\n"
return manifest
+ def data_locators(self):
+ ret = []
+ for name, locators, files in self._finished_streams:
+ ret += locators
+ return ret
global_client_object = None
return global_client_object
@staticmethod
- def get(locator):
- return Keep.global_client_object().get(locator)
+ def get(locator, **kwargs):
+ return Keep.global_client_object().get(locator, **kwargs)
@staticmethod
- def put(data):
- return Keep.global_client_object().put(data)
+ def put(data, **kwargs):
+ return Keep.global_client_object().put(data, **kwargs)
+
+class KeepClient(object):
+
+ class ThreadLimiter(object):
+ """
+ Limit the number of threads running at a given time to
+ {desired successes} minus {successes reported}. When successes
+ reported == desired, wake up the remaining threads and tell
+ them to quit.
+
+ Should be used in a "with" block.
+ """
+ def __init__(self, todo):
+ self._todo = todo
+ self._done = 0
+ self._todo_lock = threading.Semaphore(todo)
+ self._done_lock = threading.Lock()
+ def __enter__(self):
+ self._todo_lock.acquire()
+ return self
+ def __exit__(self, type, value, traceback):
+ self._todo_lock.release()
+ def shall_i_proceed(self):
+ """
+ Return true if the current thread should do stuff. Return
+ false if the current thread should just stop.
+ """
+ with self._done_lock:
+ return (self._done < self._todo)
+ def increment_done(self):
+ """
+ Report that the current thread was successful.
+ """
+ with self._done_lock:
+ self._done += 1
+ def done(self):
+ """
+ Return how many successes were reported.
+ """
+ with self._done_lock:
+ return self._done
+
+ class KeepWriterThread(threading.Thread):
+ """
+ Write a blob of data to the given Keep server. Call
+ increment_done() of the given ThreadLimiter if the write
+ succeeds.
+ """
+ def __init__(self, **kwargs):
+ super(KeepClient.KeepWriterThread, self).__init__()
+ self.args = kwargs
+ def run(self):
+ with self.args['thread_limiter'] as limiter:
+ if not limiter.shall_i_proceed():
+ # My turn arrived, but the job has been done without
+ # me.
+ return
+ logging.debug("KeepWriterThread %s proceeding %s %s" %
+ (str(threading.current_thread()),
+ self.args['data_hash'],
+ self.args['service_root']))
+ h = httplib2.Http()
+ url = self.args['service_root'] + self.args['data_hash']
+ api_token = os.environ['ARVADOS_API_TOKEN']
+ headers = {'Authorization': "OAuth2 %s" % api_token}
+ try:
+ resp, content = h.request(url.encode('utf-8'), 'PUT',
+ headers=headers,
+ body=self.args['data'])
+ if (resp['status'] == '401' and
+ re.match(r'Timestamp verification failed', content)):
+ body = KeepClient.sign_for_old_server(
+ self.args['data_hash'],
+ self.args['data'])
+ h = httplib2.Http()
+ resp, content = h.request(url.encode('utf-8'), 'PUT',
+ headers=headers,
+ body=body)
+ if re.match(r'^2\d\d$', resp['status']):
+ logging.debug("KeepWriterThread %s succeeded %s %s" %
+ (str(threading.current_thread()),
+ self.args['data_hash'],
+ self.args['service_root']))
+ return limiter.increment_done()
+ logging.warning("Request fail: PUT %s => %s %s" %
+ (url, resp['status'], content))
+ except (httplib2.HttpLib2Error, httplib.HTTPException) as e:
+ logging.warning("Request fail: PUT %s => %s: %s" %
+ (url, type(e), str(e)))
-class KeepClient:
def __init__(self):
+ self.lock = threading.Lock()
self.service_roots = None
def shuffled_service_roots(self, hash):
if self.service_roots == None:
+ self.lock.acquire()
keep_disks = api().keep_disks().list().execute()['items']
roots = (("http%s://%s:%d/" %
('s' if f['service_ssl_flag'] else '',
for f in keep_disks)
self.service_roots = sorted(set(roots))
logging.debug(str(self.service_roots))
+ self.lock.release()
seed = hash
pool = self.service_roots[:]
pseq = []
headers = {'Authorization': "OAuth2 %s" % api_token,
'Accept': 'application/octet-stream'}
try:
- resp, content = h.request(url, 'GET', headers=headers)
+ resp, content = h.request(url.encode('utf-8'), 'GET',
+ headers=headers)
if re.match(r'^2\d\d$', resp['status']):
m = hashlib.new('md5')
m.update(content)
except (httplib2.HttpLib2Error, httplib.ResponseNotReady) as e:
logging.info("Request fail: GET %s => %s: %s" %
(url, type(e), str(e)))
- raise Exception("Not found: %s" % expect_hash)
+ raise errors.NotFoundError("Block not found: %s" % expect_hash)
def put(self, data, **kwargs):
if 'KEEP_LOCAL_STORE' in os.environ:
data_hash = m.hexdigest()
have_copies = 0
want_copies = kwargs.get('copies', 2)
+ if not (want_copies > 0):
+ return data_hash
+ threads = []
+ thread_limiter = KeepClient.ThreadLimiter(want_copies)
for service_root in self.shuffled_service_roots(data_hash):
- h = httplib2.Http()
- url = service_root + data_hash
- api_token = os.environ['ARVADOS_API_TOKEN']
- headers = {'Authorization': "OAuth2 %s" % api_token}
- try:
- resp, content = h.request(url, 'PUT',
- headers=headers,
- body=data)
- if (resp['status'] == '401' and
- re.match(r'Timestamp verification failed', content)):
- body = self.sign_for_old_server(data_hash, data)
- h = httplib2.Http()
- resp, content = h.request(url, 'PUT',
- headers=headers,
- body=body)
- if re.match(r'^2\d\d$', resp['status']):
- have_copies += 1
- if have_copies == want_copies:
- return data_hash + '+' + str(len(data))
- else:
- logging.warning("Request fail: PUT %s => %s %s" %
- (url, resp['status'], content))
- except (httplib2.HttpLib2Error, httplib.HTTPException) as e:
- logging.warning("Request fail: PUT %s => %s: %s" %
- (url, type(e), str(e)))
- raise Exception("Write fail for %s: wanted %d but wrote %d" %
- (data_hash, want_copies, have_copies))
+ t = KeepClient.KeepWriterThread(data=data,
+ data_hash=data_hash,
+ service_root=service_root,
+ thread_limiter=thread_limiter)
+ t.start()
+ threads += [t]
+ for t in threads:
+ t.join()
+ have_copies = thread_limiter.done()
+ if have_copies == want_copies:
+ return (data_hash + '+' + str(len(data)))
+ raise errors.KeepWriteError(
+ "Write fail for %s: wanted %d but wrote %d" %
+ (data_hash, want_copies, have_copies))
- def sign_for_old_server(self, data_hash, data):
+ @staticmethod
+ def sign_for_old_server(data_hash, data):
return (("-----BEGIN PGP SIGNED MESSAGE-----\n\n\n%d %s\n-----BEGIN PGP SIGNATURE-----\n\n-----END PGP SIGNATURE-----\n" % (int(time.time()), data_hash)) + data)
def local_store_get(locator):
r = re.search('^([0-9a-f]{32,})', locator)
if not r:
- raise Exception("Keep.get: invalid data locator '%s'" % locator)
+ raise errors.NotFoundError(
+ "Invalid data locator: '%s'" % locator)
if r.group(0) == 'd41d8cd98f00b204e9800998ecf8427e':
return ''
with open(os.path.join(os.environ['KEEP_LOCAL_STORE'], r.group(0)), 'r') as f: