import os
import re
import subprocess
+import errno
+import sys
+from arvados.collection import *
def clear_tmpdir(path=None):
"""
exists and is empty.
"""
if path == None:
- path = current_task().tmpdir
+ path = arvados.current_task().tmpdir
if os.path.exists(path):
p = subprocess.Popen(['rm', '-rf', path])
stdout, stderr = p.communicate(None)
def git_checkout(url, version, path):
if not re.search('^/', path):
- path = os.path.join(current_job().tmpdir, path)
+ path = os.path.join(arvados.current_job().tmpdir, path)
if not os.path.exists(path):
- util.run_command(["git", "clone", url, path],
- cwd=os.path.dirname(path))
- util.run_command(["git", "checkout", version],
- cwd=path)
+ run_command(["git", "clone", url, path],
+ cwd=os.path.dirname(path))
+ run_command(["git", "checkout", version],
+ cwd=path)
return path
def tar_extractor(path, decompress_flag):
path -- where to extract the tarball: absolute, or relative to job tmp
"""
if not re.search('^/', path):
- path = os.path.join(current_job().tmpdir, path)
+ path = os.path.join(arvados.current_job().tmpdir, path)
lockfile = open(path + '.lock', 'w')
fcntl.flock(lockfile, fcntl.LOCK_EX)
try:
for f in CollectionReader(tarball).all_files():
if re.search('\.(tbz|tar.bz2)$', f.name()):
- p = util.tar_extractor(path, 'j')
+ p = tar_extractor(path, 'j')
elif re.search('\.(tgz|tar.gz)$', f.name()):
- p = util.tar_extractor(path, 'z')
+ p = tar_extractor(path, 'z')
elif re.search('\.tar$', f.name()):
- p = util.tar_extractor(path, '')
+ p = tar_extractor(path, '')
else:
raise errors.AssertionError(
"tarball_extract cannot handle filename %s" % f.name())
path -- where to extract the archive: absolute, or relative to job tmp
"""
if not re.search('^/', path):
- path = os.path.join(current_job().tmpdir, path)
+ path = os.path.join(arvados.current_job().tmpdir, path)
lockfile = open(path + '.lock', 'w')
fcntl.flock(lockfile, fcntl.LOCK_EX)
try:
else:
collection_hash = hashlib.md5(collection).hexdigest()
if not re.search('^/', path):
- path = os.path.join(current_job().tmpdir, path)
+ path = os.path.join(arvados.current_job().tmpdir, path)
lockfile = open(path + '.lock', 'w')
fcntl.flock(lockfile, fcntl.LOCK_EX)
try:
files_got += [outname]
if os.path.exists(os.path.join(path, stream_name, outname)):
continue
- util.mkdir_dash_p(os.path.dirname(os.path.join(path, stream_name, outname)))
+ mkdir_dash_p(os.path.dirname(os.path.join(path, stream_name, outname)))
outfile = open(os.path.join(path, stream_name, outname), 'wb')
for buf in (f.readall_decompressed() if decompress
else f.readall()):
return path
def mkdir_dash_p(path):
- if not os.path.exists(path):
- util.mkdir_dash_p(os.path.dirname(path))
+ if not os.path.isdir(path):
try:
- os.mkdir(path)
- except OSError:
- if not os.path.exists(path):
- os.mkdir(path)
+ os.makedirs(path)
+ except OSError as e:
+ if e.errno == errno.EEXIST and os.path.isdir(path):
+ # It is not an error if someone else creates the
+ # directory between our exists() and makedirs() calls.
+ pass
+ else:
+ raise
def stream_extract(stream, path, files=[], decompress=True):
"""Retrieve a stream from Keep and extract it to a local
path -- where to extract: absolute, or relative to job tmp
"""
if not re.search('^/', path):
- path = os.path.join(current_job().tmpdir, path)
+ path = os.path.join(arvados.current_job().tmpdir, path)
lockfile = open(path + '.lock', 'w')
fcntl.flock(lockfile, fcntl.LOCK_EX)
try:
files_got += [outname]
if os.path.exists(os.path.join(path, outname)):
os.unlink(os.path.join(path, outname))
- util.mkdir_dash_p(os.path.dirname(os.path.join(path, outname)))
+ mkdir_dash_p(os.path.dirname(os.path.join(path, outname)))
outfile = open(os.path.join(path, outname), 'wb')
for buf in (f.readall_decompressed() if decompress
else f.readall()):
ent_path = os.path.join(dirname, ent)
ent_base = os.path.join(base, ent) if base else ent
if os.path.isdir(ent_path):
- allfiles += util.listdir_recursive(ent_path, ent_base)
+ allfiles += listdir_recursive(ent_path, ent_base)
else:
allfiles += [ent_base]
return allfiles