2755: add support for signed locators in the Python SDK.
[arvados.git] / sdk / python / arvados / util.py
index 4e0d6f05dcd9f78bcd239e65f4a3b7863e406509..7148b9295b354d37cc941beb92a570e68720117e 100644 (file)
@@ -3,6 +3,9 @@ import hashlib
 import os
 import re
 import subprocess
+import errno
+import sys
+from arvados.collection import *
 
 def clear_tmpdir(path=None):
     """
@@ -10,7 +13,7 @@ def clear_tmpdir(path=None):
     exists and is empty.
     """
     if path == None:
-        path = current_task().tmpdir
+        path = arvados.current_task().tmpdir
     if os.path.exists(path):
         p = subprocess.Popen(['rm', '-rf', path])
         stdout, stderr = p.communicate(None)
@@ -34,12 +37,12 @@ def run_command(execargs, **kwargs):
 
 def git_checkout(url, version, path):
     if not re.search('^/', path):
-        path = os.path.join(current_job().tmpdir, path)
+        path = os.path.join(arvados.current_job().tmpdir, path)
     if not os.path.exists(path):
-        util.run_command(["git", "clone", url, path],
-                         cwd=os.path.dirname(path))
-    util.run_command(["git", "checkout", version],
-                     cwd=path)
+        run_command(["git", "clone", url, path],
+                    cwd=os.path.dirname(path))
+    run_command(["git", "checkout", version],
+                cwd=path)
     return path
 
 def tar_extractor(path, decompress_flag):
@@ -62,7 +65,7 @@ def tarball_extract(tarball, path):
     path -- where to extract the tarball: absolute, or relative to job tmp
     """
     if not re.search('^/', path):
-        path = os.path.join(current_job().tmpdir, path)
+        path = os.path.join(arvados.current_job().tmpdir, path)
     lockfile = open(path + '.lock', 'w')
     fcntl.flock(lockfile, fcntl.LOCK_EX)
     try:
@@ -86,11 +89,11 @@ def tarball_extract(tarball, path):
 
         for f in CollectionReader(tarball).all_files():
             if re.search('\.(tbz|tar.bz2)$', f.name()):
-                p = util.tar_extractor(path, 'j')
+                p = tar_extractor(path, 'j')
             elif re.search('\.(tgz|tar.gz)$', f.name()):
-                p = util.tar_extractor(path, 'z')
+                p = tar_extractor(path, 'z')
             elif re.search('\.tar$', f.name()):
-                p = util.tar_extractor(path, '')
+                p = tar_extractor(path, '')
             else:
                 raise errors.AssertionError(
                     "tarball_extract cannot handle filename %s" % f.name())
@@ -123,7 +126,7 @@ def zipball_extract(zipball, path):
     path -- where to extract the archive: absolute, or relative to job tmp
     """
     if not re.search('^/', path):
-        path = os.path.join(current_job().tmpdir, path)
+        path = os.path.join(arvados.current_job().tmpdir, path)
     lockfile = open(path + '.lock', 'w')
     fcntl.flock(lockfile, fcntl.LOCK_EX)
     try:
@@ -192,7 +195,7 @@ def collection_extract(collection, path, files=[], decompress=True):
     else:
         collection_hash = hashlib.md5(collection).hexdigest()
     if not re.search('^/', path):
-        path = os.path.join(current_job().tmpdir, path)
+        path = os.path.join(arvados.current_job().tmpdir, path)
     lockfile = open(path + '.lock', 'w')
     fcntl.flock(lockfile, fcntl.LOCK_EX)
     try:
@@ -242,8 +245,16 @@ def collection_extract(collection, path, files=[], decompress=True):
     return path
 
 def mkdir_dash_p(path):
-    if not os.path.exists(path):
-        os.makedirs(path)
+    if not os.path.isdir(path):
+        try:
+            os.makedirs(path)
+        except OSError as e:
+            if e.errno == errno.EEXIST and os.path.isdir(path):
+                # It is not an error if someone else creates the
+                # directory between our exists() and makedirs() calls.
+                pass
+            else:
+                raise
 
 def stream_extract(stream, path, files=[], decompress=True):
     """Retrieve a stream from Keep and extract it to a local
@@ -254,7 +265,7 @@ def stream_extract(stream, path, files=[], decompress=True):
     path -- where to extract: absolute, or relative to job tmp
     """
     if not re.search('^/', path):
-        path = os.path.join(current_job().tmpdir, path)
+        path = os.path.join(arvados.current_job().tmpdir, path)
     lockfile = open(path + '.lock', 'w')
     fcntl.flock(lockfile, fcntl.LOCK_EX)
     try:
@@ -272,7 +283,7 @@ def stream_extract(stream, path, files=[], decompress=True):
             files_got += [outname]
             if os.path.exists(os.path.join(path, outname)):
                 os.unlink(os.path.join(path, outname))
-            util.mkdir_dash_p(os.path.dirname(os.path.join(path, outname)))
+            mkdir_dash_p(os.path.dirname(os.path.join(path, outname)))
             outfile = open(os.path.join(path, outname), 'wb')
             for buf in (f.readall_decompressed() if decompress
                         else f.readall()):
@@ -291,7 +302,7 @@ def listdir_recursive(dirname, base=None):
         ent_path = os.path.join(dirname, ent)
         ent_base = os.path.join(base, ent) if base else ent
         if os.path.isdir(ent_path):
-            allfiles += util.listdir_recursive(ent_path, ent_base)
+            allfiles += listdir_recursive(ent_path, ent_base)
         else:
             allfiles += [ent_base]
     return allfiles