X-Git-Url: https://git.arvados.org/arvados.git/blobdiff_plain/c0e203e7f3e9e40736eac63cbe440d5e46e379c0..af6447c82f24b283b6b5f32b2b66dc6a47e24487:/sdk/cwl/arvados_cwl/pathmapper.py diff --git a/sdk/cwl/arvados_cwl/pathmapper.py b/sdk/cwl/arvados_cwl/pathmapper.py index 5a2d814f5d..e0445febdc 100644 --- a/sdk/cwl/arvados_cwl/pathmapper.py +++ b/sdk/cwl/arvados_cwl/pathmapper.py @@ -2,20 +2,30 @@ # # SPDX-License-Identifier: Apache-2.0 +from future import standard_library +standard_library.install_aliases() +from builtins import str +from past.builtins import basestring +from future.utils import viewitems + import re import logging import uuid import os -import urllib +import urllib.request, urllib.parse, urllib.error +import arvados_cwl.util import arvados.commands.run import arvados.collection from schema_salad.sourceline import SourceLine +from arvados.errors import ApiError from cwltool.pathmapper import PathMapper, MapperEnt, abspath, adjustFileObjs, adjustDirObjs from cwltool.workflow import WorkflowException +from .http import http_to_keep + logger = logging.getLogger('arvados.cwl-runner') def trim_listing(obj): @@ -40,7 +50,7 @@ class ArvPathMapper(PathMapper): pdh_dirpath = re.compile(r'^keep:[0-9a-f]{32}\+\d+(/.*)?$') def __init__(self, arvrunner, referenced_files, input_basedir, - collection_pattern, file_pattern, name=None, single_collection=False, **kwargs): + collection_pattern, file_pattern, name=None, single_collection=False): self.arvrunner = arvrunner self.input_basedir = input_basedir self.collection_pattern = collection_pattern @@ -56,7 +66,9 @@ class ArvPathMapper(PathMapper): src = src[:src.index("#")] if isinstance(src, basestring) and ArvPathMapper.pdh_dirpath.match(src): - self._pathmap[src] = MapperEnt(src, self.collection_pattern % urllib.unquote(src[5:]), srcobj["class"], True) + self._pathmap[src] = MapperEnt(src, self.collection_pattern % urllib.parse.unquote(src[5:]), srcobj["class"], True) + + debug = logger.isEnabledFor(logging.DEBUG) if src not in self._pathmap: if src.startswith("file:"): @@ -67,11 +79,11 @@ class ArvPathMapper(PathMapper): fnPattern="keep:%s/%s", dirPattern="keep:%s/%s", raiseOSError=True) - with SourceLine(srcobj, "location", WorkflowException): + with SourceLine(srcobj, "location", WorkflowException, debug): if isinstance(st, arvados.commands.run.UploadFile): uploadfiles.add((src, ab, st)) elif isinstance(st, arvados.commands.run.ArvFile): - self._pathmap[src] = MapperEnt(st.fn, self.collection_pattern % urllib.unquote(st.fn[5:]), "File", True) + self._pathmap[src] = MapperEnt(st.fn, self.collection_pattern % urllib.parse.unquote(st.fn[5:]), "File", True) else: raise WorkflowException("Input file path '%s' is invalid" % st) elif src.startswith("_:"): @@ -79,34 +91,73 @@ class ArvPathMapper(PathMapper): raise WorkflowException("File literal '%s' is missing `contents`" % src) if srcobj["class"] == "Directory" and "listing" not in srcobj: raise WorkflowException("Directory literal '%s' is missing `listing`" % src) + elif src.startswith("http:") or src.startswith("https:"): + keepref = http_to_keep(self.arvrunner.api, self.arvrunner.project_uuid, src) + logger.info("%s is %s", src, keepref) + self._pathmap[src] = MapperEnt(keepref, keepref, srcobj["class"], True) else: self._pathmap[src] = MapperEnt(src, src, srcobj["class"], True) - with SourceLine(srcobj, "secondaryFiles", WorkflowException): + with SourceLine(srcobj, "secondaryFiles", WorkflowException, debug): for l in srcobj.get("secondaryFiles", []): self.visit(l, uploadfiles) - with SourceLine(srcobj, "listing", WorkflowException): + with SourceLine(srcobj, "listing", WorkflowException, debug): for l in srcobj.get("listing", []): self.visit(l, uploadfiles) - def addentry(self, obj, c, path, subdirs): + def addentry(self, obj, c, path, remap): if obj["location"] in self._pathmap: src, srcpath = self.arvrunner.fs_access.get_collection(self._pathmap[obj["location"]].resolved) if srcpath == "": srcpath = "." c.copy(srcpath, path + "/" + obj["basename"], source_collection=src, overwrite=True) + remap.append((obj["location"], path + "/" + obj["basename"])) for l in obj.get("secondaryFiles", []): - self.addentry(l, c, path, subdirs) + self.addentry(l, c, path, remap) elif obj["class"] == "Directory": for l in obj.get("listing", []): - self.addentry(l, c, path + "/" + obj["basename"], subdirs) - subdirs.append((obj["location"], path + "/" + obj["basename"])) + self.addentry(l, c, path + "/" + obj["basename"], remap) + remap.append((obj["location"], path + "/" + obj["basename"])) elif obj["location"].startswith("_:") and "contents" in obj: with c.open(path + "/" + obj["basename"], "w") as f: - f.write(obj["contents"].encode("utf-8")) + f.write(obj["contents"]) + remap.append((obj["location"], path + "/" + obj["basename"])) else: raise SourceLine(obj, "location", WorkflowException).makeError("Don't know what to do with '%s'" % obj["location"]) + def needs_new_collection(self, srcobj, prefix=""): + """Check if files need to be staged into a new collection. + + If all the files are in the same collection and in the same + paths they would be staged to, return False. Otherwise, a new + collection is needed with files copied/created in the + appropriate places. + """ + + loc = srcobj["location"] + if loc.startswith("_:"): + return True + if prefix: + if loc != prefix+srcobj["basename"]: + return True + else: + i = loc.rfind("/") + if i > -1: + prefix = loc[:i+1] + else: + prefix = loc+"/" + if srcobj["class"] == "File" and loc not in self._pathmap: + return True + for s in srcobj.get("secondaryFiles", []): + if self.needs_new_collection(s, prefix): + return True + if srcobj.get("listing"): + prefix = "%s%s/" % (prefix, srcobj["basename"]) + for l in srcobj["listing"]: + if self.needs_new_collection(l, prefix): + return True + return False + def setup(self, referenced_files, basedir): # type: (List[Any], unicode) -> None uploadfiles = set() @@ -117,19 +168,6 @@ class ArvPathMapper(PathMapper): keep_client=self.arvrunner.keep_client, num_retries=self.arvrunner.num_retries) - already_uploaded = self.arvrunner.get_uploaded() - copied_files = set() - for k in referenced_files: - loc = k["location"] - if loc in already_uploaded: - v = already_uploaded[loc] - self._pathmap[loc] = MapperEnt(v.resolved, self.collection_pattern % urllib.unquote(v.resolved[5:]), v.type, True) - if self.single_collection: - basename = k["basename"] - if basename not in collection: - self.addentry({"location": loc, "class": v.type, "basename": basename}, collection, ".", []) - copied_files.add((loc, basename, v.type)) - for srcobj in referenced_files: self.visit(srcobj, uploadfiles) @@ -140,67 +178,88 @@ class ArvPathMapper(PathMapper): fnPattern="keep:%s/%s", name=self.name, project=self.arvrunner.project_uuid, - collection=collection) + collection=collection, + packed=False) for src, ab, st in uploadfiles: - self._pathmap[src] = MapperEnt(urllib.quote(st.fn, "/:+@"), self.collection_pattern % st.fn[5:], + self._pathmap[src] = MapperEnt(urllib.parse.quote(st.fn, "/:+@"), self.collection_pattern % st.fn[5:], "Directory" if os.path.isdir(ab) else "File", True) - self.arvrunner.add_uploaded(src, self._pathmap[src]) - - for loc, basename, cls in copied_files: - fn = "keep:%s/%s" % (collection.portable_data_hash(), basename) - self._pathmap[loc] = MapperEnt(urllib.quote(fn, "/:+@"), self.collection_pattern % fn[5:], cls, True) for srcobj in referenced_files: - subdirs = [] + remap = [] if srcobj["class"] == "Directory" and srcobj["location"] not in self._pathmap: c = arvados.collection.Collection(api_client=self.arvrunner.api, keep_client=self.arvrunner.keep_client, num_retries=self.arvrunner.num_retries) for l in srcobj.get("listing", []): - self.addentry(l, c, ".", subdirs) + self.addentry(l, c, ".", remap) + + container = arvados_cwl.util.get_current_container(self.arvrunner.api, self.arvrunner.num_retries, logger) + info = arvados_cwl.util.get_intermediate_collection_info(None, container, self.arvrunner.intermediate_output_ttl) - check = self.arvrunner.api.collections().list(filters=[["portable_data_hash", "=", c.portable_data_hash()]], limit=1).execute(num_retries=self.arvrunner.num_retries) - if not check["items"]: - c.save_new(owner_uuid=self.arvrunner.project_uuid) + c.save_new(name=info["name"], + owner_uuid=self.arvrunner.project_uuid, + ensure_unique_name=True, + trash_at=info["trash_at"], + properties=info["properties"]) ab = self.collection_pattern % c.portable_data_hash() self._pathmap[srcobj["location"]] = MapperEnt("keep:"+c.portable_data_hash(), ab, "Directory", True) elif srcobj["class"] == "File" and (srcobj.get("secondaryFiles") or (srcobj["location"].startswith("_:") and "contents" in srcobj)): + # If all secondary files/directories are located in + # the same collection as the primary file and the + # paths and names that are consistent with staging, + # don't create a new collection. + if not self.needs_new_collection(srcobj): + continue + c = arvados.collection.Collection(api_client=self.arvrunner.api, keep_client=self.arvrunner.keep_client, num_retries=self.arvrunner.num_retries ) - self.addentry(srcobj, c, ".", subdirs) + self.addentry(srcobj, c, ".", remap) + + container = arvados_cwl.util.get_current_container(self.arvrunner.api, self.arvrunner.num_retries, logger) + info = arvados_cwl.util.get_intermediate_collection_info(None, container, self.arvrunner.intermediate_output_ttl) - check = self.arvrunner.api.collections().list(filters=[["portable_data_hash", "=", c.portable_data_hash()]], limit=1).execute(num_retries=self.arvrunner.num_retries) - if not check["items"]: - c.save_new(owner_uuid=self.arvrunner.project_uuid) + c.save_new(name=info["name"], + owner_uuid=self.arvrunner.project_uuid, + ensure_unique_name=True, + trash_at=info["trash_at"], + properties=info["properties"]) ab = self.file_pattern % (c.portable_data_hash(), srcobj["basename"]) self._pathmap[srcobj["location"]] = MapperEnt("keep:%s/%s" % (c.portable_data_hash(), srcobj["basename"]), ab, "File", True) if srcobj.get("secondaryFiles"): ab = self.collection_pattern % c.portable_data_hash() - self._pathmap["_:" + unicode(uuid.uuid4())] = MapperEnt("keep:"+c.portable_data_hash(), ab, "Directory", True) + self._pathmap["_:" + str(uuid.uuid4())] = MapperEnt("keep:"+c.portable_data_hash(), ab, "Directory", True) - if subdirs: - for loc, sub in subdirs: - # subdirs will all start with "./", strip it off - ab = self.file_pattern % (c.portable_data_hash(), sub[2:]) + if remap: + for loc, sub in remap: + # subdirs start with "./", strip it off + if sub.startswith("./"): + ab = self.file_pattern % (c.portable_data_hash(), sub[2:]) + else: + ab = self.file_pattern % (c.portable_data_hash(), sub) self._pathmap[loc] = MapperEnt("keep:%s/%s" % (c.portable_data_hash(), sub[2:]), ab, "Directory", True) self.keepdir = None def reversemap(self, target): - if target.startswith("keep:"): + p = super(ArvPathMapper, self).reversemap(target) + if p: + return p + elif target.startswith("keep:"): return (target, target) elif self.keepdir and target.startswith(self.keepdir): - return (target, "keep:" + target[len(self.keepdir)+1:]) + kp = "keep:" + target[len(self.keepdir)+1:] + return (kp, kp) else: - return super(ArvPathMapper, self).reversemap(target) + return None + class StagingPathMapper(PathMapper): _follow_dirs = True @@ -215,12 +274,16 @@ class StagingPathMapper(PathMapper): tgt = os.path.join(stagedir, obj["basename"]) basetgt, baseext = os.path.splitext(tgt) n = 1 - while tgt in self.targets: - n += 1 - tgt = "%s_%i%s" % (basetgt, n, baseext) + if tgt in self.targets and (self.reversemap(tgt)[0] != loc): + while tgt in self.targets: + n += 1 + tgt = "%s_%i%s" % (basetgt, n, baseext) self.targets.add(tgt) if obj["class"] == "Directory": - self._pathmap[loc] = MapperEnt(loc, tgt, "Directory", staged) + if obj.get("writable"): + self._pathmap[loc] = MapperEnt(loc, tgt, "WritableDirectory", staged) + else: + self._pathmap[loc] = MapperEnt(loc, tgt, "Directory", staged) if loc.startswith("_:") or self._follow_dirs: self.visitlisting(obj.get("listing", []), tgt, basedir) elif obj["class"] == "File": @@ -229,7 +292,7 @@ class StagingPathMapper(PathMapper): if "contents" in obj and loc.startswith("_:"): self._pathmap[loc] = MapperEnt(obj["contents"], tgt, "CreateFile", staged) else: - if copy: + if copy or obj.get("writable"): self._pathmap[loc] = MapperEnt(loc, tgt, "WritableFile", staged) else: self._pathmap[loc] = MapperEnt(loc, tgt, "File", staged) @@ -244,7 +307,7 @@ class VwdPathMapper(StagingPathMapper): # with any secondary files. self.visitlisting(referenced_files, self.stagedir, basedir) - for path, (ab, tgt, type, staged) in self._pathmap.items(): + for path, (ab, tgt, type, staged) in viewitems(self._pathmap): if type in ("File", "Directory") and ab.startswith("keep:"): self._pathmap[path] = MapperEnt("$(task.keep)/%s" % ab[5:], tgt, type, staged)