X-Git-Url: https://git.arvados.org/arvados.git/blobdiff_plain/504e09d413026fcac8ac94530134da2fce4dc0f2..dddfa30b07b2584353df378528f84945faa3ad7f:/sdk/cwl/arvados_cwl/runner.py diff --git a/sdk/cwl/arvados_cwl/runner.py b/sdk/cwl/arvados_cwl/runner.py index 2239e0f9df..71e499ebca 100644 --- a/sdk/cwl/arvados_cwl/runner.py +++ b/sdk/cwl/arvados_cwl/runner.py @@ -42,6 +42,7 @@ import schema_salad.validate as validate import arvados.collection from .util import collectionUUID import ruamel.yaml as yaml +from ruamel.yaml.comments import CommentedMap, CommentedSeq import arvados_cwl.arvdocker from .pathmapper import ArvPathMapper, trim_listing, collection_pdh_pattern, collection_uuid_pattern @@ -168,21 +169,47 @@ def set_secondary(fsaccess, builder, inputschema, secondaryspec, primary, discov # # Found a file, check for secondaryFiles # - primary["secondaryFiles"] = [] + specs = [] + primary["secondaryFiles"] = secondaryspec for i, sf in enumerate(aslist(secondaryspec)): pattern = builder.do_eval(sf["pattern"], context=primary) if pattern is None: continue + if isinstance(pattern, list): + specs.extend(pattern) + elif isinstance(pattern, dict): + specs.append(pattern) + elif isinstance(pattern, str): + specs.append({"pattern": pattern}) + else: + raise SourceLine(primary["secondaryFiles"], i, validate.ValidationException).makeError( + "Expression must return list, object, string or null") + + found = [] + for i, sf in enumerate(specs): + if isinstance(sf, dict): + if sf.get("class") == "File": + pattern = sf["basename"] + else: + pattern = sf["pattern"] + required = sf.get("required") + elif isinstance(sf, str): + pattern = sf + required = True + else: + raise SourceLine(primary["secondaryFiles"], i, validate.ValidationException).makeError( + "Expression must return list, object, string or null") + sfpath = substitute(primary["location"], pattern) - required = builder.do_eval(sf.get("required"), context=primary) + required = builder.do_eval(required, context=primary) if fsaccess.exists(sfpath): - primary["secondaryFiles"].append({"location": sfpath, "class": "File"}) + found.append({"location": sfpath, "class": "File"}) elif required: raise SourceLine(primary["secondaryFiles"], i, validate.ValidationException).makeError( "Required secondary file '%s' does not exist" % sfpath) - primary["secondaryFiles"] = cmap(primary["secondaryFiles"]) + primary["secondaryFiles"] = cmap(found) if discovered is not None: discovered[primary["location"]] = primary["secondaryFiles"] elif inputschema["type"] not in primitive_types_set: @@ -392,7 +419,7 @@ def upload_dependencies(arvrunner, name, document_loader, discovered_secondaryfiles[mapper.mapper(d).resolved] = discovered[d] if "$schemas" in workflowobj: - sch = [] + sch = CommentedSeq() for s in workflowobj["$schemas"]: sch.append(mapper.mapper(s).resolved) workflowobj["$schemas"] = sch @@ -424,17 +451,22 @@ def packed_workflow(arvrunner, tool, merged_map): A "packed" workflow is one where all the components have been combined into a single document.""" rewrites = {} - packed = pack(tool.doc_loader, tool.doc_loader.fetch(tool.tool["id"]), - tool.tool["id"], tool.metadata, rewrite_out=rewrites) + packed = pack(arvrunner.loadingContext, tool.tool["id"], + rewrite_out=rewrites, + loader=tool.doc_loader) rewrite_to_orig = {v: k for k,v in viewitems(rewrites)} def visit(v, cur_id): if isinstance(v, dict): if v.get("class") in ("CommandLineTool", "Workflow"): - if "id" not in v: - raise SourceLine(v, None, Exception).makeError("Embedded process object is missing required 'id' field") - cur_id = rewrite_to_orig.get(v["id"], v["id"]) + if tool.metadata["cwlVersion"] == "v1.0" and "id" not in v: + raise SourceLine(v, None, Exception).makeError("Embedded process object is missing required 'id' field, add an 'id' or use to cwlVersion: v1.1") + if "id" in v: + cur_id = rewrite_to_orig.get(v["id"], v["id"]) + if "path" in v and "location" not in v: + v["location"] = v["path"] + del v["path"] if "location" in v and not v["location"].startswith("keep:"): v["location"] = merged_map[cur_id].resolved[v["location"]] if "location" in v and v["location"] in merged_map[cur_id].secondaryFiles: