import hashlib
import string
import bz2
+import zlib
+import fcntl
from apiclient import errors
from apiclient.discovery import build
t = service.job_tasks().get(uuid=os.environ['TASK_UUID']).execute()
t = UserDict.UserDict(t)
t.set_output = types.MethodType(task_set_output, t)
+ t.tmpdir = os.environ['TASK_WORK']
_current_task = t
return t
if _current_job:
return _current_job
t = service.jobs().get(uuid=os.environ['JOB_UUID']).execute()
+ t = UserDict.UserDict(t)
+ t.tmpdir = os.environ['JOB_WORK']
_current_job = t
return t
task_input = f.as_manifest()
new_task_attrs = {
'job_uuid': current_job()['uuid'],
- 'created_by_job_task': current_task()['uuid'],
+ 'created_by_job_task_uuid': current_task()['uuid'],
'sequence': if_sequence + 1,
'parameters': {
'input':task_input
).execute()
exit(0)
+ @staticmethod
+ def one_task_per_input_stream(if_sequence=0, and_end_task=True):
+ if if_sequence != current_task()['sequence']:
+ return
+ job_input = current_job()['script_parameters']['input']
+ cr = CollectionReader(job_input)
+ for s in cr.all_streams():
+ task_input = s.tokens()
+ new_task_attrs = {
+ 'job_uuid': current_job()['uuid'],
+ 'created_by_job_task_uuid': current_task()['uuid'],
+ 'sequence': if_sequence + 1,
+ 'parameters': {
+ 'input':task_input
+ }
+ }
+ service.job_tasks().create(job_task=json.dumps(new_task_attrs)).execute()
+ if and_end_task:
+ service.job_tasks().update(uuid=current_task()['uuid'],
+ job_task=json.dumps({'success':True})
+ ).execute()
+ exit(0)
+
+class util:
+ @staticmethod
+ def run_command(execargs, **kwargs):
+ if 'stdin' not in kwargs:
+ kwargs['stdin'] = subprocess.PIPE
+ if 'stdout' not in kwargs:
+ kwargs['stdout'] = subprocess.PIPE
+ if 'stderr' not in kwargs:
+ kwargs['stderr'] = subprocess.PIPE
+ p = subprocess.Popen(execargs, close_fds=True, shell=False,
+ **kwargs)
+ stdoutdata, stderrdata = p.communicate(None)
+ if p.returncode != 0:
+ raise Exception("run_command %s exit %d:\n%s" %
+ (execargs, p.returncode, stderrdata))
+ return stdoutdata, stderrdata
+
+ @staticmethod
+ def git_checkout(url, version, path):
+ if not re.search('^/', path):
+ path = os.path.join(current_job().tmpdir, path)
+ if not os.path.exists(path):
+ util.run_command(["git", "clone", url, path],
+ cwd=os.path.dirname(path))
+ util.run_command(["git", "checkout", version],
+ cwd=path)
+ return path
+
+ @staticmethod
+ def tar_extractor(path, decompress_flag):
+ return subprocess.Popen(["tar",
+ "-C", path,
+ ("-x%sf" % decompress_flag),
+ "-"],
+ stdout=None,
+ stdin=subprocess.PIPE, stderr=sys.stderr,
+ shell=False, close_fds=True)
+
+ @staticmethod
+ def tarball_extract(tarball, path):
+ """Retrieve a tarball from Keep and extract it to a local
+ directory. Return the absolute path where the tarball was
+ extracted. If the top level of the tarball contained just one
+ file or directory, return the absolute path of that single
+ item.
+
+ tarball -- collection locator
+ path -- where to extract the tarball: absolute, or relative to job tmp
+ """
+ if not re.search('^/', path):
+ path = os.path.join(current_job().tmpdir, path)
+ lockfile = open(path + '.lock', 'w')
+ fcntl.flock(lockfile, fcntl.LOCK_EX)
+ try:
+ os.stat(path)
+ except OSError:
+ os.mkdir(path)
+ already_have_it = False
+ try:
+ if os.readlink(os.path.join(path, '.locator')) == tarball:
+ already_have_it = True
+ except OSError:
+ pass
+ if not already_have_it:
+
+ # emulate "rm -f" (i.e., if the file does not exist, we win)
+ try:
+ os.unlink(os.path.join(path, '.locator'))
+ except OSError:
+ if os.path.exists(os.path.join(path, '.locator')):
+ os.unlink(os.path.join(path, '.locator'))
+
+ for f in CollectionReader(tarball).all_files():
+ if re.search('\.(tbz|tar.bz2)$', f.name()):
+ p = tar_extractor(path, 'j')
+ elif re.search('\.(tgz|tar.gz)$', f.name()):
+ p = tar_extractor(path, 'z')
+ elif re.search('\.tar$', f.name()):
+ p = tar_extractor(path, '')
+ else:
+ raise Exception("tarball_extract cannot handle filename %s"
+ % f.name())
+ while True:
+ buf = f.read(2**20)
+ if len(buf) == 0:
+ break
+ p.stdin.write(buf)
+ p.stdin.close()
+ p.wait()
+ if p.returncode != 0:
+ lockfile.close()
+ raise Exception("tar exited %d" % p.returncode)
+ os.symlink(tarball, os.path.join(path, '.locator'))
+ tld_extracts = filter(lambda f: f != '.locator', os.listdir(path))
+ lockfile.close()
+ if len(tld_extracts) == 1:
+ return os.path.join(path, tld_extracts[0])
+ return path
+
+ @staticmethod
+ def zipball_extract(zipball, path):
+ """Retrieve a zip archive from Keep and extract it to a local
+ directory. Return the absolute path where the archive was
+ extracted. If the top level of the archive contained just one
+ file or directory, return the absolute path of that single
+ item.
+
+ zipball -- collection locator
+ path -- where to extract the archive: absolute, or relative to job tmp
+ """
+ if not re.search('^/', path):
+ path = os.path.join(current_job().tmpdir, path)
+ lockfile = open(path + '.lock', 'w')
+ fcntl.flock(lockfile, fcntl.LOCK_EX)
+ try:
+ os.stat(path)
+ except OSError:
+ os.mkdir(path)
+ already_have_it = False
+ try:
+ if os.readlink(os.path.join(path, '.locator')) == zipball:
+ already_have_it = True
+ except OSError:
+ pass
+ if not already_have_it:
+
+ # emulate "rm -f" (i.e., if the file does not exist, we win)
+ try:
+ os.unlink(os.path.join(path, '.locator'))
+ except OSError:
+ if os.path.exists(os.path.join(path, '.locator')):
+ os.unlink(os.path.join(path, '.locator'))
+
+ for f in CollectionReader(zipball).all_files():
+ if not re.search('\.zip$', f.name()):
+ raise Exception("zipball_extract cannot handle filename %s"
+ % f.name())
+ zip_filename = os.path.join(path, os.path.basename(f.name()))
+ zip_file = open(zip_filename, 'wb')
+ while True:
+ buf = f.read(2**20)
+ if len(buf) == 0:
+ break
+ zip_file.write(buf)
+ zip_file.close()
+
+ p = subprocess.Popen(["unzip",
+ "-q", "-o",
+ "-d", path,
+ zip_filename],
+ stdout=None,
+ stdin=None, stderr=sys.stderr,
+ shell=False, close_fds=True)
+ p.wait()
+ if p.returncode != 0:
+ lockfile.close()
+ raise Exception("unzip exited %d" % p.returncode)
+ os.unlink(zip_filename)
+ os.symlink(zipball, os.path.join(path, '.locator'))
+ tld_extracts = filter(lambda f: f != '.locator', os.listdir(path))
+ lockfile.close()
+ if len(tld_extracts) == 1:
+ return os.path.join(path, tld_extracts[0])
+ return path
+
+ @staticmethod
+ def collection_extract(collection, path, files=[], decompress=True):
+ """Retrieve a collection from Keep and extract it to a local
+ directory. Return the absolute path where the collection was
+ extracted.
+
+ collection -- collection locator
+ path -- where to extract: absolute, or relative to job tmp
+ """
+ if not re.search('^/', path):
+ path = os.path.join(current_job().tmpdir, path)
+ lockfile = open(path + '.lock', 'w')
+ fcntl.flock(lockfile, fcntl.LOCK_EX)
+ try:
+ os.stat(path)
+ except OSError:
+ os.mkdir(path)
+ already_have_it = False
+ try:
+ if os.readlink(os.path.join(path, '.locator')) == collection:
+ already_have_it = True
+ except OSError:
+ pass
+
+ # emulate "rm -f" (i.e., if the file does not exist, we win)
+ try:
+ os.unlink(os.path.join(path, '.locator'))
+ except OSError:
+ if os.path.exists(os.path.join(path, '.locator')):
+ os.unlink(os.path.join(path, '.locator'))
+
+ files_got = []
+ for f in CollectionReader(collection).all_files():
+ if (files == [] or
+ ((f.name() not in files_got) and
+ (f.name() in files or
+ (decompress and f.decompressed_name() in files)))):
+ outname = f.decompressed_name() if decompress else f.name()
+ files_got += [outname]
+ if os.path.exists(os.path.join(path, outname)):
+ continue
+ util.mkdir_dash_p(os.path.dirname(os.path.join(path, outname)))
+ outfile = open(os.path.join(path, outname), 'wb')
+ for buf in (f.readall_decompressed() if decompress
+ else f.readall()):
+ outfile.write(buf)
+ outfile.close()
+ if len(files_got) < len(files):
+ raise Exception("Wanted files %s but only got %s from %s" % (files, files_got, map(lambda z: z.name(), list(CollectionReader(collection).all_files()))))
+ os.symlink(collection, os.path.join(path, '.locator'))
+
+ lockfile.close()
+ return path
+
+ @staticmethod
+ def mkdir_dash_p(path):
+ if not os.path.exists(path):
+ mkdir_dash_p(os.dirname(path))
+ try:
+ os.mkdir(path)
+ except OSError:
+ if not os.path.exists(path):
+ os.mkdir(path)
+
+ @staticmethod
+ def stream_extract(stream, path, files=[], decompress=True):
+ """Retrieve a stream from Keep and extract it to a local
+ directory. Return the absolute path where the stream was
+ extracted.
+
+ stream -- StreamReader object
+ path -- where to extract: absolute, or relative to job tmp
+ """
+ if not re.search('^/', path):
+ path = os.path.join(current_job().tmpdir, path)
+ lockfile = open(path + '.lock', 'w')
+ fcntl.flock(lockfile, fcntl.LOCK_EX)
+ try:
+ os.stat(path)
+ except OSError:
+ os.mkdir(path)
+
+ files_got = []
+ for f in stream.all_files():
+ if (files == [] or
+ ((f.name() not in files_got) and
+ (f.name() in files or
+ (decompress and f.decompressed_name() in files)))):
+ outname = f.decompressed_name() if decompress else f.name()
+ files_got += [outname]
+ if os.path.exists(os.path.join(path, outname)):
+ continue
+ mkdir_dash_p(os.path.dirname(os.path.join(path, outname)))
+ outfile = open(os.path.join(path, outname), 'wb')
+ for buf in (f.readall_decompressed() if decompress
+ else f.readall()):
+ outfile.write(buf)
+ outfile.close()
+ if len(files_got) < len(files):
+ raise Exception("Wanted files %s but only got %s from %s" %
+ (files, files_got, map(lambda z: z.name(),
+ list(stream.all_files()))))
+ lockfile.close()
+ return path
+
class DataReader:
def __init__(self, data_locator):
self.data_locator = data_locator
self._filepos = 0
def name(self):
return self._name
+ def decompressed_name(self):
+ return re.sub('\.(bz2|gz)$', '', self._name)
def size(self):
return self._size
def stream_name(self):
data = self._stream.read(min(size, self._size - self._filepos))
self._filepos += len(data)
return data
- def readall(self, size, **kwargs):
+ def readall(self, size=2**20, **kwargs):
while True:
data = self.read(size, **kwargs)
if data == '':
data = decompressor.decompress(chunk)
if data and data != '':
yield data
- def readlines(self, decompress=True):
+ def gunzip(self, size):
+ decompressor = zlib.decompressobj(16+zlib.MAX_WBITS)
+ for chunk in self.readall(size):
+ data = decompressor.decompress(decompressor.unconsumed_tail + chunk)
+ if data and data != '':
+ yield data
+ def readall_decompressed(self, size=2**20):
self._stream.seek(self._pos + self._filepos)
- if decompress and re.search('\.bz2$', self._name):
- datasource = self.bunzip2(2**10)
+ if re.search('\.bz2$', self._name):
+ return self.bunzip2(size)
+ elif re.search('\.gz$', self._name):
+ return self.gunzip(size)
+ else:
+ return self.readall(size)
+ def readlines(self, decompress=True):
+ if decompress:
+ datasource = self.readall_decompressed()
else:
- datasource = self.readall(2**10)
+ self._stream.seek(self._pos + self._filepos)
+ datasource = self.readall()
data = ''
for newdata in datasource:
data += newdata
self.files += [[int(pos), int(size), name]]
else:
raise Exception("Invalid manifest format")
+
+ def tokens(self):
+ return self._tokens
def tokens_for_range(self, range_start, range_size):
resp = [self._stream_name]
return_all_tokens = False
for s in self.all_streams():
for f in s.all_files():
yield f
+ def manifest_text(self):
+ self._populate()
+ return self._manifest_text
class CollectionWriter:
KEEP_BLOCK_SIZE = 2**26
def __init__(self):
- self._data_buffer = ''
+ self._data_buffer = []
+ self._data_buffer_len = 0
self._current_stream_files = []
self._current_stream_length = 0
self._current_stream_locators = []
def __exit__(self):
self.finish()
def write(self, newdata):
- self._data_buffer += newdata
+ self._data_buffer += [newdata]
+ self._data_buffer_len += len(newdata)
self._current_stream_length += len(newdata)
- while len(self._data_buffer) >= self.KEEP_BLOCK_SIZE:
+ while self._data_buffer_len >= self.KEEP_BLOCK_SIZE:
self.flush_data()
def flush_data(self):
- if self._data_buffer != '':
- self._current_stream_locators += [Keep.put(self._data_buffer[0:self.KEEP_BLOCK_SIZE])]
- self._data_buffer = self._data_buffer[self.KEEP_BLOCK_SIZE:]
+ data_buffer = ''.join(self._data_buffer)
+ if data_buffer != '':
+ self._current_stream_locators += [Keep.put(data_buffer[0:self.KEEP_BLOCK_SIZE])]
+ self._data_buffer = [data_buffer[self.KEEP_BLOCK_SIZE:]]
+ self._data_buffer_len = len(self._data_buffer[0])
def start_new_file(self, newfilename=None):
self.finish_current_file()
self.set_current_file_name(newfilename)
self._current_stream_length - self._current_file_pos,
self._current_file_name]]
self._current_file_pos = self._current_stream_length
- def start_new_stream(self, newstreamname=None):
+ def start_new_stream(self, newstreamname='.'):
self.finish_current_stream()
self.set_current_stream_name(newstreamname)
def set_current_stream_name(self, newstreamname):
self.finish_current_stream()
manifest = ''
for stream in self._finished_streams:
+ if not re.search(r'^\.(/.*)?$', stream[0]):
+ manifest += './'
manifest += stream[0]
if len(stream[1]) == 0:
manifest += " d41d8cd98f00b204e9800998ecf8427e+0"