X-Git-Url: https://git.arvados.org/arvados.git/blobdiff_plain/5c6b040cabe60623364f19dc35920a084d6c6fc6..215d15bc03a38be1965a5d9df21417a3f7eae032:/sdk/cwl/arvados_cwl/pathmapper.py diff --git a/sdk/cwl/arvados_cwl/pathmapper.py b/sdk/cwl/arvados_cwl/pathmapper.py index bb95ba9ee4..38135899dc 100644 --- a/sdk/cwl/arvados_cwl/pathmapper.py +++ b/sdk/cwl/arvados_cwl/pathmapper.py @@ -2,20 +2,30 @@ # # SPDX-License-Identifier: Apache-2.0 +from future import standard_library +standard_library.install_aliases() +from builtins import str +from past.builtins import basestring +from future.utils import viewitems + import re import logging import uuid import os -import urllib +import urllib.request, urllib.parse, urllib.error +import arvados_cwl.util import arvados.commands.run import arvados.collection from schema_salad.sourceline import SourceLine +from arvados.errors import ApiError from cwltool.pathmapper import PathMapper, MapperEnt, abspath, adjustFileObjs, adjustDirObjs from cwltool.workflow import WorkflowException +from .http import http_to_keep + logger = logging.getLogger('arvados.cwl-runner') def trim_listing(obj): @@ -40,7 +50,7 @@ class ArvPathMapper(PathMapper): pdh_dirpath = re.compile(r'^keep:[0-9a-f]{32}\+\d+(/.*)?$') def __init__(self, arvrunner, referenced_files, input_basedir, - collection_pattern, file_pattern, name=None, single_collection=False, **kwargs): + collection_pattern, file_pattern, name=None, single_collection=False): self.arvrunner = arvrunner self.input_basedir = input_basedir self.collection_pattern = collection_pattern @@ -48,6 +58,7 @@ class ArvPathMapper(PathMapper): self.name = name self.referenced_files = [r["location"] for r in referenced_files] self.single_collection = single_collection + self.pdh_to_uuid = {} super(ArvPathMapper, self).__init__(referenced_files, input_basedir, None) def visit(self, srcobj, uploadfiles): @@ -56,7 +67,9 @@ class ArvPathMapper(PathMapper): src = src[:src.index("#")] if isinstance(src, basestring) and ArvPathMapper.pdh_dirpath.match(src): - self._pathmap[src] = MapperEnt(src, self.collection_pattern % urllib.unquote(src[5:]), srcobj["class"], True) + self._pathmap[src] = MapperEnt(src, self.collection_pattern % urllib.parse.unquote(src[5:]), srcobj["class"], True) + if arvados_cwl.util.collectionUUID in srcobj: + self.pdh_to_uuid[src.split("/", 1)[0][5:]] = srcobj[arvados_cwl.util.collectionUUID] debug = logger.isEnabledFor(logging.DEBUG) @@ -73,7 +86,7 @@ class ArvPathMapper(PathMapper): if isinstance(st, arvados.commands.run.UploadFile): uploadfiles.add((src, ab, st)) elif isinstance(st, arvados.commands.run.ArvFile): - self._pathmap[src] = MapperEnt(st.fn, self.collection_pattern % urllib.unquote(st.fn[5:]), "File", True) + self._pathmap[src] = MapperEnt(st.fn, self.collection_pattern % urllib.parse.unquote(st.fn[5:]), "File", True) else: raise WorkflowException("Input file path '%s' is invalid" % st) elif src.startswith("_:"): @@ -81,6 +94,10 @@ class ArvPathMapper(PathMapper): raise WorkflowException("File literal '%s' is missing `contents`" % src) if srcobj["class"] == "Directory" and "listing" not in srcobj: raise WorkflowException("Directory literal '%s' is missing `listing`" % src) + elif src.startswith("http:") or src.startswith("https:"): + keepref = http_to_keep(self.arvrunner.api, self.arvrunner.project_uuid, src) + logger.info("%s is %s", src, keepref) + self._pathmap[src] = MapperEnt(keepref, keepref, srcobj["class"], True) else: self._pathmap[src] = MapperEnt(src, src, srcobj["class"], True) @@ -106,10 +123,44 @@ class ArvPathMapper(PathMapper): remap.append((obj["location"], path + "/" + obj["basename"])) elif obj["location"].startswith("_:") and "contents" in obj: with c.open(path + "/" + obj["basename"], "w") as f: - f.write(obj["contents"].encode("utf-8")) + f.write(obj["contents"]) + remap.append((obj["location"], path + "/" + obj["basename"])) else: raise SourceLine(obj, "location", WorkflowException).makeError("Don't know what to do with '%s'" % obj["location"]) + def needs_new_collection(self, srcobj, prefix=""): + """Check if files need to be staged into a new collection. + + If all the files are in the same collection and in the same + paths they would be staged to, return False. Otherwise, a new + collection is needed with files copied/created in the + appropriate places. + """ + + loc = srcobj["location"] + if loc.startswith("_:"): + return True + if prefix: + if loc != prefix+srcobj["basename"]: + return True + else: + i = loc.rfind("/") + if i > -1: + prefix = loc[:i+1] + else: + prefix = loc+"/" + if srcobj["class"] == "File" and loc not in self._pathmap: + return True + for s in srcobj.get("secondaryFiles", []): + if self.needs_new_collection(s, prefix): + return True + if srcobj.get("listing"): + prefix = "%s%s/" % (prefix, srcobj["basename"]) + for l in srcobj["listing"]: + if self.needs_new_collection(l, prefix): + return True + return False + def setup(self, referenced_files, basedir): # type: (List[Any], unicode) -> None uploadfiles = set() @@ -120,19 +171,6 @@ class ArvPathMapper(PathMapper): keep_client=self.arvrunner.keep_client, num_retries=self.arvrunner.num_retries) - already_uploaded = self.arvrunner.get_uploaded() - copied_files = set() - for k in referenced_files: - loc = k["location"] - if loc in already_uploaded: - v = already_uploaded[loc] - self._pathmap[loc] = MapperEnt(v.resolved, self.collection_pattern % urllib.unquote(v.resolved[5:]), v.type, True) - if self.single_collection: - basename = k["basename"] - if basename not in collection: - self.addentry({"location": loc, "class": v.type, "basename": basename}, collection, ".", []) - copied_files.add((loc, basename, v.type)) - for srcobj in referenced_files: self.visit(srcobj, uploadfiles) @@ -143,16 +181,12 @@ class ArvPathMapper(PathMapper): fnPattern="keep:%s/%s", name=self.name, project=self.arvrunner.project_uuid, - collection=collection) + collection=collection, + packed=False) for src, ab, st in uploadfiles: - self._pathmap[src] = MapperEnt(urllib.quote(st.fn, "/:+@"), self.collection_pattern % st.fn[5:], + self._pathmap[src] = MapperEnt(urllib.parse.quote(st.fn, "/:+@"), self.collection_pattern % st.fn[5:], "Directory" if os.path.isdir(ab) else "File", True) - self.arvrunner.add_uploaded(src, self._pathmap[src]) - - for loc, basename, cls in copied_files: - fn = "keep:%s/%s" % (collection.portable_data_hash(), basename) - self._pathmap[loc] = MapperEnt(urllib.quote(fn, "/:+@"), self.collection_pattern % fn[5:], cls, True) for srcobj in referenced_files: remap = [] @@ -163,30 +197,47 @@ class ArvPathMapper(PathMapper): for l in srcobj.get("listing", []): self.addentry(l, c, ".", remap) - check = self.arvrunner.api.collections().list(filters=[["portable_data_hash", "=", c.portable_data_hash()]], limit=1).execute(num_retries=self.arvrunner.num_retries) - if not check["items"]: - c.save_new(owner_uuid=self.arvrunner.project_uuid) + container = arvados_cwl.util.get_current_container(self.arvrunner.api, self.arvrunner.num_retries, logger) + info = arvados_cwl.util.get_intermediate_collection_info(None, container, self.arvrunner.intermediate_output_ttl) + + c.save_new(name=info["name"], + owner_uuid=self.arvrunner.project_uuid, + ensure_unique_name=True, + trash_at=info["trash_at"], + properties=info["properties"]) ab = self.collection_pattern % c.portable_data_hash() self._pathmap[srcobj["location"]] = MapperEnt("keep:"+c.portable_data_hash(), ab, "Directory", True) elif srcobj["class"] == "File" and (srcobj.get("secondaryFiles") or (srcobj["location"].startswith("_:") and "contents" in srcobj)): + # If all secondary files/directories are located in + # the same collection as the primary file and the + # paths and names that are consistent with staging, + # don't create a new collection. + if not self.needs_new_collection(srcobj): + continue + c = arvados.collection.Collection(api_client=self.arvrunner.api, keep_client=self.arvrunner.keep_client, num_retries=self.arvrunner.num_retries ) self.addentry(srcobj, c, ".", remap) - check = self.arvrunner.api.collections().list(filters=[["portable_data_hash", "=", c.portable_data_hash()]], limit=1).execute(num_retries=self.arvrunner.num_retries) - if not check["items"]: - c.save_new(owner_uuid=self.arvrunner.project_uuid) + container = arvados_cwl.util.get_current_container(self.arvrunner.api, self.arvrunner.num_retries, logger) + info = arvados_cwl.util.get_intermediate_collection_info(None, container, self.arvrunner.intermediate_output_ttl) + + c.save_new(name=info["name"], + owner_uuid=self.arvrunner.project_uuid, + ensure_unique_name=True, + trash_at=info["trash_at"], + properties=info["properties"]) ab = self.file_pattern % (c.portable_data_hash(), srcobj["basename"]) self._pathmap[srcobj["location"]] = MapperEnt("keep:%s/%s" % (c.portable_data_hash(), srcobj["basename"]), ab, "File", True) if srcobj.get("secondaryFiles"): ab = self.collection_pattern % c.portable_data_hash() - self._pathmap["_:" + unicode(uuid.uuid4())] = MapperEnt("keep:"+c.portable_data_hash(), ab, "Directory", True) + self._pathmap["_:" + str(uuid.uuid4())] = MapperEnt("keep:"+c.portable_data_hash(), ab, "Directory", True) if remap: for loc, sub in remap: @@ -212,6 +263,7 @@ class ArvPathMapper(PathMapper): else: return None + class StagingPathMapper(PathMapper): _follow_dirs = True @@ -225,9 +277,10 @@ class StagingPathMapper(PathMapper): tgt = os.path.join(stagedir, obj["basename"]) basetgt, baseext = os.path.splitext(tgt) n = 1 - while tgt in self.targets: - n += 1 - tgt = "%s_%i%s" % (basetgt, n, baseext) + if tgt in self.targets and (self.reversemap(tgt)[0] != loc): + while tgt in self.targets: + n += 1 + tgt = "%s_%i%s" % (basetgt, n, baseext) self.targets.add(tgt) if obj["class"] == "Directory": if obj.get("writable"): @@ -257,7 +310,7 @@ class VwdPathMapper(StagingPathMapper): # with any secondary files. self.visitlisting(referenced_files, self.stagedir, basedir) - for path, (ab, tgt, type, staged) in self._pathmap.items(): + for path, (ab, tgt, type, staged) in viewitems(self._pathmap): if type in ("File", "Directory") and ab.startswith("keep:"): self._pathmap[path] = MapperEnt("$(task.keep)/%s" % ab[5:], tgt, type, staged)