X-Git-Url: https://git.arvados.org/arvados.git/blobdiff_plain/29a8298caf922a2651fbd51baf7b97b58ae25840..ae92d144610446849eb568247a44f02ae985c281:/sdk/cwl/arvados_cwl/runner.py diff --git a/sdk/cwl/arvados_cwl/runner.py b/sdk/cwl/arvados_cwl/runner.py index 7664abef7c..7d4310b0e0 100644 --- a/sdk/cwl/arvados_cwl/runner.py +++ b/sdk/cwl/arvados_cwl/runner.py @@ -40,7 +40,7 @@ import schema_salad.validate as validate import arvados.collection from .util import collectionUUID -import ruamel.yaml as yaml +from ruamel.yaml import YAML from ruamel.yaml.comments import CommentedMap, CommentedSeq import arvados_cwl.arvdocker @@ -105,7 +105,8 @@ def make_builder(joborder, hints, requirements, runtimeContext, metadata): outdir="", # type: Text tmpdir="", # type: Text stagedir="", # type: Text - cwlVersion=metadata.get("http://commonwl.org/cwltool#original_cwlVersion") or metadata.get("cwlVersion") + cwlVersion=metadata.get("http://commonwl.org/cwltool#original_cwlVersion") or metadata.get("cwlVersion"), + container_engine="docker" ) def search_schemadef(name, reqs): @@ -183,7 +184,10 @@ def set_secondary(fsaccess, builder, inputschema, secondaryspec, primary, discov elif isinstance(pattern, dict): specs.append(pattern) elif isinstance(pattern, str): - specs.append({"pattern": pattern}) + if builder.cwlVersion == "v1.0": + specs.append({"pattern": pattern, "required": True}) + else: + specs.append({"pattern": pattern, "required": sf.get("required")}) else: raise SourceLine(primary["secondaryFiles"], i, validate.ValidationException).makeError( "Expression must return list, object, string or null") @@ -192,7 +196,12 @@ def set_secondary(fsaccess, builder, inputschema, secondaryspec, primary, discov for i, sf in enumerate(specs): if isinstance(sf, dict): if sf.get("class") == "File": - pattern = sf["basename"] + pattern = None + if sf.get("location") is None: + raise SourceLine(primary["secondaryFiles"], i, validate.ValidationException).makeError( + "File object is missing 'location': %s" % sf) + sfpath = sf["location"] + required = True else: pattern = sf["pattern"] required = sf.get("required") @@ -203,11 +212,16 @@ def set_secondary(fsaccess, builder, inputschema, secondaryspec, primary, discov raise SourceLine(primary["secondaryFiles"], i, validate.ValidationException).makeError( "Expression must return list, object, string or null") - sfpath = substitute(primary["location"], pattern) + if pattern is not None: + sfpath = substitute(primary["location"], pattern) + required = builder.do_eval(required, context=primary) if fsaccess.exists(sfpath): - found.append({"location": sfpath, "class": "File"}) + if pattern is not None: + found.append({"location": sfpath, "class": "File"}) + else: + found.append(sf) elif required: raise SourceLine(primary["secondaryFiles"], i, validate.ValidationException).makeError( "Required secondary file '%s' does not exist" % sfpath) @@ -251,7 +265,8 @@ def upload_dependencies(arvrunner, name, document_loader, textIO = StringIO(text.decode('utf-8')) else: textIO = StringIO(text) - return yaml.safe_load(textIO) + yamlloader = YAML(typ='safe', pure=True) + return yamlloader.load(textIO) else: return {} @@ -269,9 +284,18 @@ def upload_dependencies(arvrunner, name, document_loader, metadata = scanobj sc_result = scandeps(uri, scanobj, - loadref_fields, - set(("$include", "$schemas", "location")), - loadref, urljoin=document_loader.fetcher.urljoin) + loadref_fields, + set(("$include", "location")), + loadref, urljoin=document_loader.fetcher.urljoin, + nestdirs=False) + + optional_deps = scandeps(uri, scanobj, + loadref_fields, + set(("$schemas",)), + loadref, urljoin=document_loader.fetcher.urljoin, + nestdirs=False) + + sc_result.extend(optional_deps) sc = [] uuids = {} @@ -329,24 +353,14 @@ def upload_dependencies(arvrunner, name, document_loader, if include_primary and "id" in workflowobj: sc.append({"class": "File", "location": workflowobj["id"]}) - if "$schemas" in workflowobj: - for s in workflowobj["$schemas"]: - sc.append({"class": "File", "location": s}) - def visit_default(obj): - remove = [False] - def ensure_default_location(f): + def defaults_are_optional(f): if "location" not in f and "path" in f: f["location"] = f["path"] del f["path"] - if "location" in f and not arvrunner.fs_access.exists(f["location"]): - # Doesn't exist, remove from list of dependencies to upload - sc[:] = [x for x in sc if x["location"] != f["location"]] - # Delete "default" from workflowobj - remove[0] = True - visit_class(obj["default"], ("File", "Directory"), ensure_default_location) - if remove[0]: - del obj["default"] + normalizeFilesDirs(f) + optional_deps.append(f) + visit_class(obj["default"], ("File", "Directory"), defaults_are_optional) find_defaults(workflowobj, visit_default) @@ -382,7 +396,8 @@ def upload_dependencies(arvrunner, name, document_loader, "keep:%s", "keep:%s/%s", name=name, - single_collection=True) + single_collection=True, + optional_deps=optional_deps) def setloc(p): loc = p.get("location") @@ -446,12 +461,14 @@ def upload_docker(arvrunner, tool): "Option 'dockerOutputDirectory' of DockerRequirement not supported.") arvados_cwl.arvdocker.arv_docker_get_image(arvrunner.api, docker_req, True, arvrunner.project_uuid, arvrunner.runtimeContext.force_docker_pull, - arvrunner.runtimeContext.tmp_outdir_prefix) + arvrunner.runtimeContext.tmp_outdir_prefix, + arvrunner.runtimeContext.match_local_docker) else: arvados_cwl.arvdocker.arv_docker_get_image(arvrunner.api, {"dockerPull": "arvados/jobs:"+__version__}, True, arvrunner.project_uuid, arvrunner.runtimeContext.force_docker_pull, - arvrunner.runtimeContext.tmp_outdir_prefix) + arvrunner.runtimeContext.tmp_outdir_prefix, + arvrunner.runtimeContext.match_local_docker) elif isinstance(tool, cwltool.workflow.Workflow): for s in tool.steps: upload_docker(arvrunner, s.embedded_tool) @@ -471,7 +488,7 @@ def packed_workflow(arvrunner, tool, merged_map): def visit(v, cur_id): if isinstance(v, dict): - if v.get("class") in ("CommandLineTool", "Workflow"): + if v.get("class") in ("CommandLineTool", "Workflow", "ExpressionTool"): if tool.metadata["cwlVersion"] == "v1.0" and "id" not in v: raise SourceLine(v, None, Exception).makeError("Embedded process object is missing required 'id' field, add an 'id' or use to cwlVersion: v1.1") if "id" in v: @@ -479,15 +496,17 @@ def packed_workflow(arvrunner, tool, merged_map): if "path" in v and "location" not in v: v["location"] = v["path"] del v["path"] - if "location" in v and not v["location"].startswith("keep:"): - v["location"] = merged_map[cur_id].resolved[v["location"]] - if "location" in v and v["location"] in merged_map[cur_id].secondaryFiles: - v["secondaryFiles"] = merged_map[cur_id].secondaryFiles[v["location"]] + if "location" in v and cur_id in merged_map: + if v["location"] in merged_map[cur_id].resolved: + v["location"] = merged_map[cur_id].resolved[v["location"]] + if v["location"] in merged_map[cur_id].secondaryFiles: + v["secondaryFiles"] = merged_map[cur_id].secondaryFiles[v["location"]] if v.get("class") == "DockerRequirement": v["http://arvados.org/cwl#dockerCollectionPDH"] = arvados_cwl.arvdocker.arv_docker_get_image(arvrunner.api, v, True, arvrunner.project_uuid, arvrunner.runtimeContext.force_docker_pull, - arvrunner.runtimeContext.tmp_outdir_prefix) + arvrunner.runtimeContext.tmp_outdir_prefix, + arvrunner.runtimeContext.match_local_docker) for l in v: visit(v[l], cur_id) if isinstance(v, list): @@ -594,7 +613,8 @@ def arvados_jobs_image(arvrunner, img): try: return arvados_cwl.arvdocker.arv_docker_get_image(arvrunner.api, {"dockerPull": img}, True, arvrunner.project_uuid, arvrunner.runtimeContext.force_docker_pull, - arvrunner.runtimeContext.tmp_outdir_prefix) + arvrunner.runtimeContext.tmp_outdir_prefix, + arvrunner.runtimeContext.match_local_docker) except Exception as e: raise Exception("Docker image %s is not available\n%s" % (img, e) )