+from future import standard_library
+standard_library.install_aliases()
+from builtins import object
# Copyright (C) The Arvados Authors. All rights reserved.
#
# SPDX-License-Identifier: Apache-2.0
import fnmatch
import os
import errno
-import urlparse
+import urllib.parse
import re
import logging
import threading
logger = logging.getLogger('arvados.cwl-runner')
+pdh_size = re.compile(r'([0-9a-f]{32})\+(\d+)(\+\S+)*')
+
class CollectionCache(object):
def __init__(self, api_client, keep_client, num_retries,
cap=256*1024*1024,
self.cap = cap
self.min_entries = min_entries
- def cap_cache(self):
- if self.total > self.cap:
- # ordered list iterates from oldest to newest
- for pdh, v in self.collections.items():
- if self.total < self.cap or len(self.collections) < self.min_entries:
- break
- # cut it loose
- logger.debug("Evicting collection reader %s from cache", pdh)
- del self.collections[pdh]
- self.total -= v[1]
+ def set_cap(self, cap):
+ self.cap = cap
+
+ def cap_cache(self, required):
+ # ordered dict iterates from oldest to newest
+ for pdh, v in list(self.collections.items()):
+ available = self.cap - self.total
+ if available >= required or len(self.collections) < self.min_entries:
+ return
+ # cut it loose
+ logger.debug("Evicting collection reader %s from cache (cap %s total %s required %s)", pdh, self.cap, self.total, required)
+ del self.collections[pdh]
+ self.total -= v[1]
def get(self, pdh):
with self.lock:
if pdh not in self.collections:
+ m = pdh_size.match(pdh)
+ if m:
+ self.cap_cache(int(m.group(2)) * 128)
logger.debug("Creating collection reader for %s", pdh)
cr = arvados.collection.CollectionReader(pdh, api_client=self.api_client,
keep_client=self.keep_client,
sz = len(cr.manifest_text()) * 128
self.collections[pdh] = (cr, sz)
self.total += sz
- self.cap_cache()
else:
cr, sz = self.collections[pdh]
# bump it to the back
p = sp[0]
if p.startswith("keep:") and arvados.util.keep_locator_pattern.match(p[5:]):
pdh = p[5:]
- return (self.collection_cache.get(pdh), urlparse.unquote(sp[1]) if len(sp) == 2 else None)
+ return (self.collection_cache.get(pdh), urllib.parse.unquote(sp[1]) if len(sp) == 2 else None)
else:
return (None, path)
raise IOError(errno.ENOENT, "Directory '%s' in '%s' not found" % (rest, collection.portable_data_hash()))
if not isinstance(dir, arvados.collection.RichCollectionBase):
raise IOError(errno.ENOENT, "Path '%s' in '%s' is not a Directory" % (rest, collection.portable_data_hash()))
- return [abspath(l, fn) for l in dir.keys()]
+ return [abspath(l, fn) for l in list(dir.keys())]
else:
return super(CollectionFsAccess, self).listdir(fn)
if not url:
return base_url
- urlsp = urlparse.urlsplit(url)
+ urlsp = urllib.parse.urlsplit(url)
if urlsp.scheme or not base_url:
return url
- basesp = urlparse.urlsplit(base_url)
+ basesp = urllib.parse.urlsplit(base_url)
if basesp.scheme in ("keep", "arvwf"):
if not basesp.path:
raise IOError(errno.EINVAL, "Invalid Keep locator", base_url)
baseparts.pop()
path = "/".join([pdh] + baseparts + urlparts)
- return urlparse.urlunsplit((basesp.scheme, "", path, "", urlsp.fragment))
+ return urllib.parse.urlunsplit((basesp.scheme, "", path, "", urlsp.fragment))
return super(CollectionFetcher, self).urljoin(base_url, url)
+ schemes = [u"file", u"http", u"https", u"mailto", u"keep", u"arvwf"]
+
+ def supported_schemes(self): # type: () -> List[Text]
+ return self.schemes
+
+
workflow_uuid_pattern = re.compile(r'[a-z0-9]{5}-7fd4e-[a-z0-9]{15}')
pipeline_template_uuid_pattern = re.compile(r'[a-z0-9]{5}-p5p6p-[a-z0-9]{15}')
if pipeline_template_uuid_pattern.match(uri):
pt = api_client.pipeline_templates().get(uuid=uri).execute(num_retries=num_retries)
- return "keep:" + pt["components"].values()[0]["script_parameters"]["cwl:tool"]
+ return "keep:" + list(pt["components"].values())[0]["script_parameters"]["cwl:tool"]
p = uri.split("/")
if arvados.util.keep_locator_pattern.match(p[0]):