#
# SPDX-License-Identifier: Apache-2.0
+from future import standard_library
+standard_library.install_aliases()
+from builtins import str
+from past.builtins import basestring
+from future.utils import viewitems
+
import re
import logging
import uuid
import os
-import urllib
+import urllib.request, urllib.parse, urllib.error
-from arvados_cwl.util import get_current_container, get_intermediate_collection_info
+import arvados_cwl.util
import arvados.commands.run
import arvados.collection
if obj.get("location", "").startswith("keep:") and "listing" in obj:
del obj["listing"]
+collection_pdh_path = re.compile(r'^keep:[0-9a-f]{32}\+\d+/.+$')
+collection_pdh_pattern = re.compile(r'^keep:([0-9a-f]{32}\+\d+)(/.*)?')
+collection_uuid_pattern = re.compile(r'^keep:([a-z0-9]{5}-4zz18-[a-z0-9]{15})(/.*)?$')
class ArvPathMapper(PathMapper):
"""Convert container-local paths to and from Keep collection ids."""
- pdh_path = re.compile(r'^keep:[0-9a-f]{32}\+\d+/.+$')
- pdh_dirpath = re.compile(r'^keep:[0-9a-f]{32}\+\d+(/.*)?$')
-
def __init__(self, arvrunner, referenced_files, input_basedir,
collection_pattern, file_pattern, name=None, single_collection=False):
self.arvrunner = arvrunner
self.name = name
self.referenced_files = [r["location"] for r in referenced_files]
self.single_collection = single_collection
+ self.pdh_to_uuid = {}
super(ArvPathMapper, self).__init__(referenced_files, input_basedir, None)
def visit(self, srcobj, uploadfiles):
if "#" in src:
src = src[:src.index("#")]
- if isinstance(src, basestring) and ArvPathMapper.pdh_dirpath.match(src):
- self._pathmap[src] = MapperEnt(src, self.collection_pattern % urllib.unquote(src[5:]), srcobj["class"], True)
-
debug = logger.isEnabledFor(logging.DEBUG)
+ if isinstance(src, basestring) and src.startswith("keep:"):
+ if collection_pdh_pattern.match(src):
+ self._pathmap[src] = MapperEnt(src, self.collection_pattern % urllib.parse.unquote(src[5:]), srcobj["class"], True)
+ if arvados_cwl.util.collectionUUID in srcobj:
+ self.pdh_to_uuid[src.split("/", 1)[0][5:]] = srcobj[arvados_cwl.util.collectionUUID]
+ elif not collection_uuid_pattern.match(src):
+ with SourceLine(srcobj, "location", WorkflowException, debug):
+ raise WorkflowException("Invalid keep reference '%s'" % src)
+
if src not in self._pathmap:
if src.startswith("file:"):
# Local FS ref, may need to be uploaded or may be on keep
if isinstance(st, arvados.commands.run.UploadFile):
uploadfiles.add((src, ab, st))
elif isinstance(st, arvados.commands.run.ArvFile):
- self._pathmap[src] = MapperEnt(st.fn, self.collection_pattern % urllib.unquote(st.fn[5:]), "File", True)
+ self._pathmap[src] = MapperEnt(st.fn, self.collection_pattern % urllib.parse.unquote(st.fn[5:]), "File", True)
else:
raise WorkflowException("Input file path '%s' is invalid" % st)
elif src.startswith("_:"):
remap.append((obj["location"], path + "/" + obj["basename"]))
elif obj["location"].startswith("_:") and "contents" in obj:
with c.open(path + "/" + obj["basename"], "w") as f:
- f.write(obj["contents"].encode("utf-8"))
+ f.write(obj["contents"])
remap.append((obj["location"], path + "/" + obj["basename"]))
else:
raise SourceLine(obj, "location", WorkflowException).makeError("Don't know what to do with '%s'" % obj["location"])
+ def needs_new_collection(self, srcobj, prefix=""):
+ """Check if files need to be staged into a new collection.
+
+ If all the files are in the same collection and in the same
+ paths they would be staged to, return False. Otherwise, a new
+ collection is needed with files copied/created in the
+ appropriate places.
+ """
+
+ loc = srcobj["location"]
+ if loc.startswith("_:"):
+ return True
+ if prefix:
+ if loc != prefix+srcobj["basename"]:
+ return True
+ else:
+ i = loc.rfind("/")
+ if i > -1:
+ prefix = loc[:i+1]
+ else:
+ prefix = loc+"/"
+ if srcobj["class"] == "File" and loc not in self._pathmap:
+ return True
+ for s in srcobj.get("secondaryFiles", []):
+ if self.needs_new_collection(s, prefix):
+ return True
+ if srcobj.get("listing"):
+ prefix = "%s%s/" % (prefix, srcobj["basename"])
+ for l in srcobj["listing"]:
+ if self.needs_new_collection(l, prefix):
+ return True
+ return False
+
def setup(self, referenced_files, basedir):
# type: (List[Any], unicode) -> None
uploadfiles = set()
packed=False)
for src, ab, st in uploadfiles:
- self._pathmap[src] = MapperEnt(urllib.quote(st.fn, "/:+@"), self.collection_pattern % st.fn[5:],
+ self._pathmap[src] = MapperEnt(urllib.parse.quote(st.fn, "/:+@"), self.collection_pattern % st.fn[5:],
"Directory" if os.path.isdir(ab) else "File", True)
for srcobj in referenced_files:
for l in srcobj.get("listing", []):
self.addentry(l, c, ".", remap)
- container = get_current_container(self.arvrunner.api, self.arvrunner.num_retries, logger)
- info = get_intermediate_collection_info(None, container, self.arvrunner.intermediate_output_ttl)
+ container = arvados_cwl.util.get_current_container(self.arvrunner.api, self.arvrunner.num_retries, logger)
+ info = arvados_cwl.util.get_intermediate_collection_info(None, container, self.arvrunner.intermediate_output_ttl)
c.save_new(name=info["name"],
owner_uuid=self.arvrunner.project_uuid,
elif srcobj["class"] == "File" and (srcobj.get("secondaryFiles") or
(srcobj["location"].startswith("_:") and "contents" in srcobj)):
+ # If all secondary files/directories are located in
+ # the same collection as the primary file and the
+ # paths and names that are consistent with staging,
+ # don't create a new collection.
+ if not self.needs_new_collection(srcobj):
+ continue
+
c = arvados.collection.Collection(api_client=self.arvrunner.api,
keep_client=self.arvrunner.keep_client,
num_retries=self.arvrunner.num_retries )
self.addentry(srcobj, c, ".", remap)
- container = get_current_container(self.arvrunner.api, self.arvrunner.num_retries, logger)
- info = get_intermediate_collection_info(None, container, self.arvrunner.intermediate_output_ttl)
+ container = arvados_cwl.util.get_current_container(self.arvrunner.api, self.arvrunner.num_retries, logger)
+ info = arvados_cwl.util.get_intermediate_collection_info(None, container, self.arvrunner.intermediate_output_ttl)
c.save_new(name=info["name"],
owner_uuid=self.arvrunner.project_uuid,
ab, "File", True)
if srcobj.get("secondaryFiles"):
ab = self.collection_pattern % c.portable_data_hash()
- self._pathmap["_:" + unicode(uuid.uuid4())] = MapperEnt("keep:"+c.portable_data_hash(), ab, "Directory", True)
+ self._pathmap["_:" + str(uuid.uuid4())] = MapperEnt("keep:"+c.portable_data_hash(), ab, "Directory", True)
if remap:
for loc, sub in remap:
class StagingPathMapper(PathMapper):
+ # Note that StagingPathMapper internally maps files from target to source.
+ # Specifically, the 'self._pathmap' dict keys are the target location and the
+ # values are 'MapperEnt' named tuples from which we use the 'resolved' attribute
+ # as the file identifier. This makes it possible to map an input file to multiple
+ # target directories. The exception is for file literals, which store the contents of
+ # the file in 'MapperEnt.resolved' and are therefore still mapped from source to target.
+
_follow_dirs = True
def __init__(self, referenced_files, basedir, stagedir, separateDirs=True):
loc = obj["location"]
tgt = os.path.join(stagedir, obj["basename"])
basetgt, baseext = os.path.splitext(tgt)
+
+ def targetExists():
+ return tgt in self.targets and ("contents" not in obj) and (self._pathmap[tgt].resolved != loc)
+ def literalTargetExists():
+ return tgt in self.targets and "contents" in obj
+
n = 1
- if tgt in self.targets and (self.reversemap(tgt)[0] != loc):
+ if targetExists() or literalTargetExists():
while tgt in self.targets:
n += 1
tgt = "%s_%i%s" % (basetgt, n, baseext)
self.targets.add(tgt)
if obj["class"] == "Directory":
if obj.get("writable"):
- self._pathmap[loc] = MapperEnt(loc, tgt, "WritableDirectory", staged)
+ self._pathmap[tgt] = MapperEnt(loc, tgt, "WritableDirectory", staged)
else:
- self._pathmap[loc] = MapperEnt(loc, tgt, "Directory", staged)
+ self._pathmap[tgt] = MapperEnt(loc, tgt, "Directory", staged)
if loc.startswith("_:") or self._follow_dirs:
self.visitlisting(obj.get("listing", []), tgt, basedir)
elif obj["class"] == "File":
- if loc in self._pathmap:
+ if tgt in self._pathmap:
return
if "contents" in obj and loc.startswith("_:"):
self._pathmap[loc] = MapperEnt(obj["contents"], tgt, "CreateFile", staged)
else:
if copy or obj.get("writable"):
- self._pathmap[loc] = MapperEnt(loc, tgt, "WritableFile", staged)
+ self._pathmap[tgt] = MapperEnt(loc, tgt, "WritableFile", staged)
else:
- self._pathmap[loc] = MapperEnt(loc, tgt, "File", staged)
+ self._pathmap[tgt] = MapperEnt(loc, tgt, "File", staged)
self.visitlisting(obj.get("secondaryFiles", []), stagedir, basedir)
+ def mapper(self, src): # type: (Text) -> MapperEnt.
+ # Overridden to maintain the use case of mapping by source (identifier) to
+ # target regardless of how the map is structured interally.
+ def getMapperEnt(src):
+ for k,v in viewitems(self._pathmap):
+ if (v.type != "CreateFile" and v.resolved == src) or (v.type == "CreateFile" and k == src):
+ return v
+
+ if u"#" in src:
+ i = src.index(u"#")
+ v = getMapperEnt(src[i:])
+ return MapperEnt(v.resolved, v.target + src[i:], v.type, v.staged)
+ return getMapperEnt(src)
+
class VwdPathMapper(StagingPathMapper):
def setup(self, referenced_files, basedir):
# with any secondary files.
self.visitlisting(referenced_files, self.stagedir, basedir)
- for path, (ab, tgt, type, staged) in self._pathmap.items():
+ for path, (ab, tgt, type, staged) in viewitems(self._pathmap):
if type in ("File", "Directory") and ab.startswith("keep:"):
self._pathmap[path] = MapperEnt("$(task.keep)/%s" % ab[5:], tgt, type, staged)