import string
import bz2
import zlib
+import fcntl
from apiclient import errors
from apiclient.discovery import build
t = service.job_tasks().get(uuid=os.environ['TASK_UUID']).execute()
t = UserDict.UserDict(t)
t.set_output = types.MethodType(task_set_output, t)
- t.tmpdir = os.environ['TASK_TMPDIR']
+ t.tmpdir = os.environ['TASK_WORK']
_current_task = t
return t
return _current_job
t = service.jobs().get(uuid=os.environ['JOB_UUID']).execute()
t = UserDict.UserDict(t)
- t.tmpdir = os.environ['CRUNCH_WORK']
+ t.tmpdir = os.environ['JOB_WORK']
_current_job = t
return t
task_input = f.as_manifest()
new_task_attrs = {
'job_uuid': current_job()['uuid'],
- 'created_by_job_task': current_task()['uuid'],
+ 'created_by_job_task_uuid': current_task()['uuid'],
'sequence': if_sequence + 1,
'parameters': {
'input':task_input
class util:
@staticmethod
def run_command(execargs, **kwargs):
+ if 'stdin' not in kwargs:
+ kwargs['stdin'] = subprocess.PIPE
+ if 'stdout' not in kwargs:
+ kwargs['stdout'] = subprocess.PIPE
+ if 'stderr' not in kwargs:
+ kwargs['stderr'] = subprocess.PIPE
p = subprocess.Popen(execargs, close_fds=True, shell=False,
- stdin=subprocess.PIPE,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
**kwargs)
stdoutdata, stderrdata = p.communicate(None)
if p.returncode != 0:
path = os.path.join(current_job().tmpdir, path)
if not os.path.exists(path):
util.run_command(["git", "clone", url, path],
- cwd=os.path.dirname(parser_path))
+ cwd=os.path.dirname(path))
util.run_command(["git", "checkout", version],
- cwd=parser_path)
+ cwd=path)
+ return path
+
+ @staticmethod
+ def tar_extractor(path, decompress_flag):
+ return subprocess.Popen(["tar",
+ "-C", path,
+ ("-x%sf" % decompress_flag),
+ "-"],
+ stdout=None,
+ stdin=subprocess.PIPE, stderr=sys.stderr,
+ shell=False, close_fds=True)
+
+ @staticmethod
+ def tarball_extract(tarball, path):
+ """Retrieve a tarball from Keep and extract it to a local
+ directory. Return the absolute path where the tarball was
+ extracted. If the top level of the tarball contained just one
+ file or directory, return the absolute path of that single
+ item.
+
+ tarball -- collection locator
+ path -- where to extract the tarball: absolute, or relative to job tmp
+ """
+ if not re.search('^/', path):
+ path = os.path.join(current_job().tmpdir, path)
+ lockfile = open(path + '.lock', 'w')
+ fcntl.flock(lockfile, fcntl.LOCK_EX)
+ try:
+ os.stat(path)
+ except OSError:
+ os.mkdir(path)
+ already_have_it = False
+ try:
+ if os.readlink(os.path.join(path, '.locator')) == tarball:
+ already_have_it = True
+ except OSError:
+ pass
+ if not already_have_it:
+
+ # emulate "rm -f" (i.e., if the file does not exist, we win)
+ try:
+ os.unlink(os.path.join(path, '.locator'))
+ except OSError:
+ if os.path.exists(os.path.join(path, '.locator')):
+ os.unlink(os.path.join(path, '.locator'))
+
+ for f in CollectionReader(tarball).all_files():
+ if re.search('\.(tbz|tar.bz2)$', f.name()):
+ p = tar_extractor(path, 'j')
+ elif re.search('\.(tgz|tar.gz)$', f.name()):
+ p = tar_extractor(path, 'z')
+ elif re.search('\.tar$', f.name()):
+ p = tar_extractor(path, '')
+ else:
+ raise Exception("tarball_extract cannot handle filename %s"
+ % f.name())
+ while True:
+ buf = f.read(2**20)
+ if len(buf) == 0:
+ break
+ p.stdin.write(buf)
+ p.stdin.close()
+ p.wait()
+ if p.returncode != 0:
+ lockfile.close()
+ raise Exception("tar exited %d" % p.returncode)
+ os.symlink(tarball, os.path.join(path, '.locator'))
+ tld_extracts = filter(lambda f: f != '.locator', os.listdir(path))
+ lockfile.close()
+ if len(tld_extracts) == 1:
+ return os.path.join(path, tld_extracts[0])
+ return path
+
+ @staticmethod
+ def zipball_extract(zipball, path):
+ """Retrieve a zip archive from Keep and extract it to a local
+ directory. Return the absolute path where the archive was
+ extracted. If the top level of the archive contained just one
+ file or directory, return the absolute path of that single
+ item.
+
+ zipball -- collection locator
+ path -- where to extract the archive: absolute, or relative to job tmp
+ """
+ if not re.search('^/', path):
+ path = os.path.join(current_job().tmpdir, path)
+ lockfile = open(path + '.lock', 'w')
+ fcntl.flock(lockfile, fcntl.LOCK_EX)
+ try:
+ os.stat(path)
+ except OSError:
+ os.mkdir(path)
+ already_have_it = False
+ try:
+ if os.readlink(os.path.join(path, '.locator')) == zipball:
+ already_have_it = True
+ except OSError:
+ pass
+ if not already_have_it:
+
+ # emulate "rm -f" (i.e., if the file does not exist, we win)
+ try:
+ os.unlink(os.path.join(path, '.locator'))
+ except OSError:
+ if os.path.exists(os.path.join(path, '.locator')):
+ os.unlink(os.path.join(path, '.locator'))
+
+ for f in CollectionReader(zipball).all_files():
+ if not re.search('\.zip$', f.name()):
+ raise Exception("zipball_extract cannot handle filename %s"
+ % f.name())
+ zip_filename = os.path.join(path, os.path.basename(f.name()))
+ zip_file = open(zip_filename, 'wb')
+ while True:
+ buf = f.read(2**20)
+ if len(buf) == 0:
+ break
+ zip_file.write(buf)
+ zip_file.close()
+
+ p = subprocess.Popen(["unzip",
+ "-q", "-o",
+ "-d", path,
+ zip_filename],
+ stdout=None,
+ stdin=None, stderr=sys.stderr,
+ shell=False, close_fds=True)
+ p.wait()
+ if p.returncode != 0:
+ lockfile.close()
+ raise Exception("unzip exited %d" % p.returncode)
+ os.unlink(zip_filename)
+ os.symlink(zipball, os.path.join(path, '.locator'))
+ tld_extracts = filter(lambda f: f != '.locator', os.listdir(path))
+ lockfile.close()
+ if len(tld_extracts) == 1:
+ return os.path.join(path, tld_extracts[0])
+ return path
+
+ @staticmethod
+ def collection_extract(collection, path, files=[], decompress=True):
+ """Retrieve a collection from Keep and extract it to a local
+ directory. Return the absolute path where the collection was
+ extracted.
+
+ collection -- collection locator
+ path -- where to extract: absolute, or relative to job tmp
+ """
+ if not re.search('^/', path):
+ path = os.path.join(current_job().tmpdir, path)
+ lockfile = open(path + '.lock', 'w')
+ fcntl.flock(lockfile, fcntl.LOCK_EX)
+ try:
+ os.stat(path)
+ except OSError:
+ os.mkdir(path)
+ already_have_it = False
+ try:
+ if os.readlink(os.path.join(path, '.locator')) == collection:
+ already_have_it = True
+ except OSError:
+ pass
+
+ # emulate "rm -f" (i.e., if the file does not exist, we win)
+ files_got = []
+ try:
+ os.unlink(os.path.join(path, '.locator'))
+ except OSError:
+ if os.path.exists(os.path.join(path, '.locator')):
+ os.unlink(os.path.join(path, '.locator'))
+
+ for f in CollectionReader(collection).all_files():
+ if (files == [] or
+ ((f.name() not in files_got) and
+ (f.name() in files or
+ (decompress and f.decompressed_name() in files)))):
+ outname = f.decompressed_name() if decompress else f.name()
+ files_got += [outname]
+ if os.path.exists(os.path.join(path, outname)):
+ continue
+ outfile = open(os.path.join(path, outname), 'w')
+ for buf in (f.readall_decompressed() if decompress
+ else f.readall()):
+ outfile.write(buf)
+ outfile.close()
+ if len(files_got) < len(files):
+ raise Exception("Wanted files %s but only got %s from %s" % (files, files_got, map(lambda z: z.name(), list(CollectionReader(collection).all_files()))))
+ os.symlink(collection, os.path.join(path, '.locator'))
+
+ lockfile.close()
return path
class DataReader:
data = self._stream.read(min(size, self._size - self._filepos))
self._filepos += len(data)
return data
- def readall(self, size, **kwargs):
+ def readall(self, size=2**20, **kwargs):
while True:
data = self.read(size, **kwargs)
if data == '':
data = decompressor.decompress(decompressor.unconsumed_tail + chunk)
if data and data != '':
yield data
- def readlines(self, decompress=True):
+ def readall_decompressed(self, size=2**20):
self._stream.seek(self._pos + self._filepos)
- if decompress and re.search('\.bz2$', self._name):
- datasource = self.bunzip2(2**10)
- elif decompress and re.search('\.gz$', self._name):
- datasource = self.gunzip(2**10)
+ if re.search('\.bz2$', self._name):
+ return self.bunzip2(size)
+ elif re.search('\.gz$', self._name):
+ return self.gunzip(size)
+ else:
+ return self.readall(size)
+ def readlines(self, decompress=True):
+ if decompress:
+ datasource = self.readall_decompressed()
else:
- datasource = self.readall(2**10)
+ self._stream.seek(self._pos + self._filepos)
+ datasource = self.readall()
data = ''
for newdata in datasource:
data += newdata
for s in self.all_streams():
for f in s.all_files():
yield f
+ def manifest_text(self):
+ self._populate()
+ return self._manifest_text
class CollectionWriter:
KEEP_BLOCK_SIZE = 2**26
if data_buffer != '':
self._current_stream_locators += [Keep.put(data_buffer[0:self.KEEP_BLOCK_SIZE])]
self._data_buffer = [data_buffer[self.KEEP_BLOCK_SIZE:]]
+ self._data_buffer_len = len(self._data_buffer[0])
def start_new_file(self, newfilename=None):
self.finish_current_file()
self.set_current_file_name(newfilename)