import config
import errors
import util
+import cache
_logger = logging.getLogger('arvados.api')
util.mkdir_dash_p(path)
except OSError:
path = None
- return path
+ return cache.SafeHTTPCache(path)
def api(version=None, cache=True, host=None, token=None, insecure=False, **kwargs):
"""Return an apiclient Resources object for an Arvados instance.
--- /dev/null
+import errno
+import md5
+import os
+import tempfile
+
+class SafeHTTPCache(object):
+ def __init__(self, path=None):
+ self._dir = path
+
+ def __str__(self):
+ return self._dir
+
+ def _filename(self, url):
+ return os.path.join(self._dir, md5.new(url).hexdigest())
+
+ def get(self, url):
+ filename = self._filename(url)
+ try:
+ with open(filename, 'rb') as f:
+ return f.read()
+ except IOError, OSError:
+ return None
+
+ def set(self, url, content):
+ try:
+ fd, tempname = tempfile.mkstemp(dir=self._dir)
+ except:
+ return None
+ try:
+ try:
+ f = os.fdopen(fd, 'w')
+ except:
+ os.close(fd)
+ raise
+ try:
+ f.write(content)
+ finally:
+ f.close()
+ os.rename(tempname, self._filename(url))
+ tempname = None
+ finally:
+ if tempname:
+ os.unlink(tempname)
+
+ def delete(self, url):
+ try:
+ os.unlink(self._filename(url))
+ except OSError as err:
+ if err.errno != errno.ENOENT:
+ raise
# This will clear cached docs that belong to other processes (like
# concurrent test suites) even if they're still running. They should
# be able to tolerate that.
- for fn in glob.glob(os.path.join(arvados.http_cache('discovery'),
- '*,arvados,v1,rest,*')):
+ for fn in glob.glob(os.path.join(
+ str(arvados.http_cache('discovery')),
+ '*,arvados,v1,rest,*')):
os.unlink(fn)
pid_file = _pidfile('api')
--- /dev/null
+import md5
+import shutil
+import random
+import tempfile
+import threading
+import unittest
+
+import arvados.cache
+
+
+def _random(n):
+ return bytearray(random.getrandbits(8) for _ in xrange(n))
+
+
+class CacheTestThread(threading.Thread):
+ def __init__(self, dir):
+ super(CacheTestThread, self).__init__()
+ self._dir = dir
+
+ def run(self):
+ c = arvados.cache.SafeHTTPCache(self._dir)
+ url = 'http://example.com/foo'
+ for x in range(16):
+ data_in = _random(128)
+ data_in = md5.new(data_in).hexdigest() + "\n" + str(data_in)
+ c.set(url, data_in)
+ data_out = c.get(url)
+ digest, content = data_out.split("\n", 1)
+ self.ok = (digest == md5.new(content).hexdigest())
+
+
+class CacheTest(unittest.TestCase):
+ def setUp(self):
+ self._dir = tempfile.mkdtemp()
+
+ def tearDown(self):
+ shutil.rmtree(self._dir)
+
+ def test_cache_crud(self):
+ c = arvados.cache.SafeHTTPCache(self._dir)
+ url = 'https://example.com/foo?bar=baz'
+ data1 = _random(256)
+ data2 = _random(128)
+ self.assertEqual(None, c.get(url))
+ c.delete(url)
+ c.set(url, data1)
+ self.assertEqual(data1, c.get(url))
+ c.delete(url)
+ self.assertEqual(None, c.get(url))
+ c.set(url, data1)
+ c.set(url, data2)
+ self.assertEqual(data2, c.get(url))
+
+ def test_cache_threads(self):
+ threads = []
+ for _ in range(64):
+ t = CacheTestThread(dir=self._dir)
+ t.start()
+ threads.append(t)
+ for t in threads:
+ t.join()
+ self.assertTrue(t.ok)