+ textIO = StringIO(text)
+ return yaml.safe_load(textIO)
+ else:
+ return {}
+
+ if loadref_run:
+ loadref_fields = set(("$import", "run"))
+ else:
+ loadref_fields = set(("$import",))
+
+ scanobj = workflowobj
+ if "id" in workflowobj:
+ # Need raw file content (before preprocessing) to ensure
+ # that external references in $include and $mixin are captured.
+ scanobj = loadref("", workflowobj["id"])
+
+ sc = scandeps(uri, scanobj,
+ loadref_fields,
+ set(("$include", "$schemas", "location")),
+ loadref, urljoin=document_loader.fetcher.urljoin)
+
+ normalizeFilesDirs(sc)
+
+ if include_primary and "id" in workflowobj:
+ sc.append({"class": "File", "location": workflowobj["id"]})
+
+ if "$schemas" in workflowobj:
+ for s in workflowobj["$schemas"]:
+ sc.append({"class": "File", "location": s})
+
+ def visit_default(obj):
+ remove = [False]
+ def ensure_default_location(f):
+ if "location" not in f and "path" in f:
+ f["location"] = f["path"]
+ del f["path"]
+ if "location" in f and not arvrunner.fs_access.exists(f["location"]):
+ # Doesn't exist, remove from list of dependencies to upload
+ sc[:] = [x for x in sc if x["location"] != f["location"]]
+ # Delete "default" from workflowobj
+ remove[0] = True
+ visit_class(obj["default"], ("File", "Directory"), ensure_default_location)
+ if remove[0]:
+ del obj["default"]
+
+ find_defaults(workflowobj, visit_default)
+
+ discovered = {}
+ def discover_default_secondary_files(obj):
+ discover_secondary_files(obj["inputs"],
+ {shortname(t["id"]): t["default"] for t in obj["inputs"] if "default" in t},
+ discovered)
+
+ visit_class(workflowobj, ("CommandLineTool", "Workflow"), discover_default_secondary_files)
+
+ for d in discovered:
+ sc.extend(discovered[d])
+
+ mapper = ArvPathMapper(arvrunner, sc, "",
+ "keep:%s",
+ "keep:%s/%s",
+ name=name,
+ single_collection=True)
+
+ def setloc(p):
+ if "location" in p and (not p["location"].startswith("_:")) and (not p["location"].startswith("keep:")):
+ p["location"] = mapper.mapper(p["location"]).resolved
+
+ visit_class(workflowobj, ("File", "Directory"), setloc)
+ visit_class(discovered, ("File", "Directory"), setloc)
+
+ if discovered_secondaryfiles is not None:
+ for d in discovered:
+ discovered_secondaryfiles[mapper.mapper(d).resolved] = discovered[d]
+
+ if "$schemas" in workflowobj:
+ sch = []
+ for s in workflowobj["$schemas"]:
+ sch.append(mapper.mapper(s).resolved)
+ workflowobj["$schemas"] = sch
+
+ return mapper
+
+
+def upload_docker(arvrunner, tool):
+ """Uploads Docker images used in CommandLineTool objects."""
+
+ if isinstance(tool, CommandLineTool):
+ (docker_req, docker_is_req) = get_feature(tool, "DockerRequirement")
+ if docker_req:
+ if docker_req.get("dockerOutputDirectory") and arvrunner.work_api != "containers":
+ # TODO: can be supported by containers API, but not jobs API.
+ raise SourceLine(docker_req, "dockerOutputDirectory", UnsupportedRequirement).makeError(
+ "Option 'dockerOutputDirectory' of DockerRequirement not supported.")
+ arv_docker_get_image(arvrunner.api, docker_req, True, arvrunner.project_uuid)
+ else:
+ arv_docker_get_image(arvrunner.api, {"dockerPull": "arvados/jobs"}, True, arvrunner.project_uuid)
+ elif isinstance(tool, cwltool.workflow.Workflow):
+ for s in tool.steps:
+ upload_docker(arvrunner, s.embedded_tool)
+
+
+def packed_workflow(arvrunner, tool, merged_map):
+ """Create a packed workflow.
+
+ A "packed" workflow is one where all the components have been combined into a single document."""
+
+ rewrites = {}
+ packed = pack(tool.doc_loader, tool.doc_loader.fetch(tool.tool["id"]),
+ tool.tool["id"], tool.metadata, rewrite_out=rewrites)