X-Git-Url: https://git.arvados.org/arvados.git/blobdiff_plain/42fff42165a0fa1602758a078746f8697f265f83..62e7af59cbad5577423b844213be7b2f59709602:/sdk/cwl/arvados_cwl/arvworkflow.py diff --git a/sdk/cwl/arvados_cwl/arvworkflow.py b/sdk/cwl/arvados_cwl/arvworkflow.py index 4fe82a6fe1..f66e50dca9 100644 --- a/sdk/cwl/arvados_cwl/arvworkflow.py +++ b/sdk/cwl/arvados_cwl/arvworkflow.py @@ -9,10 +9,20 @@ import os import json import copy import logging +import urllib +from io import StringIO +import sys + +from typing import (MutableSequence, MutableMapping) + +from ruamel.yaml import YAML +from ruamel.yaml.comments import CommentedMap, CommentedSeq from schema_salad.sourceline import SourceLine, cmap import schema_salad.ref_resolver +import arvados.collection + from cwltool.pack import pack from cwltool.load_tool import fetch_document, resolve_and_validate_document from cwltool.process import shortname @@ -20,6 +30,8 @@ from cwltool.workflow import Workflow, WorkflowException, WorkflowStep from cwltool.utils import adjustFileObjs, adjustDirObjs, visit_class, normalizeFilesDirs from cwltool.context import LoadingContext +from schema_salad.ref_resolver import file_uri, uri_file_path + import ruamel.yaml as yaml from .runner import (upload_dependencies, packed_workflow, upload_workflow_collection, @@ -37,11 +49,218 @@ metrics = logging.getLogger('arvados.cwl-runner.metrics') max_res_pars = ("coresMin", "coresMax", "ramMin", "ramMax", "tmpdirMin", "tmpdirMax") sum_res_pars = ("outdirMin", "outdirMax") -def upload_workflow(arvRunner, tool, job_order, project_uuid, uuid=None, +def make_wrapper_workflow(arvRunner, main, packed, project_uuid, name, git_info, tool): + col = arvados.collection.Collection(api_client=arvRunner.api, + keep_client=arvRunner.keep_client) + + with col.open("workflow.json", "wt") as f: + json.dump(packed, f, sort_keys=True, indent=4, separators=(',',': ')) + + pdh = col.portable_data_hash() + + toolname = tool.tool.get("label") or tool.metadata.get("label") or os.path.basename(tool.tool["id"]) + if git_info and git_info.get("http://arvados.org/cwl#gitDescribe"): + toolname = "%s (%s)" % (toolname, git_info.get("http://arvados.org/cwl#gitDescribe")) + + existing = arvRunner.api.collections().list(filters=[["portable_data_hash", "=", pdh], ["owner_uuid", "=", project_uuid]]).execute(num_retries=arvRunner.num_retries) + if len(existing["items"]) == 0: + col.save_new(name=toolname, owner_uuid=project_uuid, ensure_unique_name=True) + + # now construct the wrapper + + step = { + "id": "#main/" + toolname, + "in": [], + "out": [], + "run": "keep:%s/workflow.json#main" % pdh, + "label": name + } + + newinputs = [] + for i in main["inputs"]: + inp = {} + # Make sure to only copy known fields that are meaningful at + # the workflow level. In practice this ensures that if we're + # wrapping a CommandLineTool we don't grab inputBinding. + # Right now also excludes extension fields, which is fine, + # Arvados doesn't currently look for any extension fields on + # input parameters. + for f in ("type", "label", "secondaryFiles", "streamable", + "doc", "id", "format", "loadContents", + "loadListing", "default"): + if f in i: + inp[f] = i[f] + newinputs.append(inp) + + wrapper = { + "class": "Workflow", + "id": "#main", + "inputs": newinputs, + "outputs": [], + "steps": [step] + } + + for i in main["inputs"]: + step["in"].append({ + "id": "#main/step/%s" % shortname(i["id"]), + "source": i["id"] + }) + + for i in main["outputs"]: + step["out"].append({"id": "#main/step/%s" % shortname(i["id"])}) + wrapper["outputs"].append({"outputSource": "#main/step/%s" % shortname(i["id"]), + "type": i["type"], + "id": i["id"]}) + + wrapper["requirements"] = [{"class": "SubworkflowFeatureRequirement"}] + + if main.get("requirements"): + wrapper["requirements"].extend(main["requirements"]) + if main.get("hints"): + wrapper["hints"] = main["hints"] + + doc = {"cwlVersion": "v1.2", "$graph": [wrapper]} + + if git_info: + for g in git_info: + doc[g] = git_info[g] + + return json.dumps(doc, sort_keys=True, indent=4, separators=(',',': ')) + +def rel_ref(s, baseuri, urlexpander, merged_map): + uri = urlexpander(s, baseuri) + if baseuri in merged_map: + replacements = merged_map[baseuri].resolved + if uri in replacements: + return replacements[uri] + + p1 = os.path.dirname(uri_file_path(baseuri)) + p2 = os.path.dirname(uri_file_path(uri)) + p3 = os.path.basename(uri_file_path(uri)) + r = os.path.relpath(p2, p1) + if r == ".": + r = "" + print("AAA", uri, s) + print("BBBB", p1, p2, p3, r) + return os.path.join(r, p3) + + +def update_refs(d, baseuri, urlexpander, merged_map, set_block_style): + if isinstance(d, CommentedSeq): + if set_block_style: + d.fa.set_block_style() + for s in d: + update_refs(s, baseuri, urlexpander, merged_map, set_block_style) + elif isinstance(d, CommentedMap): + if set_block_style: + d.fa.set_block_style() + + if "id" in d: + baseuri = urlexpander(d["id"], baseuri, scoped_id=True) + + for s in d: + for field in ("$include", "$import", "location", "run"): + if field in d and isinstance(d[field], str): + d[field] = rel_ref(d[field], baseuri, urlexpander, merged_map) + + if "$schemas" in d: + for n, s in enumerate(d["$schemas"]): + d["$schemas"][n] = rel_ref(d["$schemas"][n], baseuri, urlexpander, merged_map) + + update_refs(d[s], baseuri, urlexpander, merged_map, set_block_style) + +def new_upload_workflow(arvRunner, tool, job_order, project_uuid, + runtimeContext, uuid=None, + submit_runner_ram=0, name=None, merged_map=None, + submit_runner_image=None, + git_info=None): + + firstfile = None + workflow_files = set() + import_files = set() + include_files = set() + + for w in tool.doc_loader.idx: + if w.startswith("file://"): + workflow_files.add(urllib.parse.urldefrag(w)[0]) + if firstfile is None: + firstfile = urllib.parse.urldefrag(w)[0] + if w.startswith("import:file://"): + import_files.add(urllib.parse.urldefrag(w[7:])[0]) + if w.startswith("include:file://"): + include_files.add(urllib.parse.urldefrag(w[8:])[0]) + + all_files = workflow_files | import_files | include_files + + n = 7 + allmatch = True + while allmatch: + n += 1 + for f in all_files: + if len(f)-1 < n: + n -= 1 + allmatch = False + break + if f[n] != firstfile[n]: + allmatch = False + break + + while firstfile[n] != "/": + n -= 1 + + prefix = firstfile[:n+1] + + col = arvados.collection.Collection() + + #print(merged_map.keys()) + + for w in workflow_files | import_files: + # 1. load YAML + + text = tool.doc_loader.fetch_text(w) + if isinstance(text, bytes): + textIO = StringIO(text.decode('utf-8')) + else: + textIO = StringIO(text) + + yamlloader = schema_salad.utils.yaml_no_ts() + result = yamlloader.load(textIO) + + set_block_style = False + if result.fa.flow_style(): + set_block_style = True + + # 2. find $import, $include, $schema, run, location + # 3. update field value + update_refs(result, w, tool.doc_loader.expand_url, merged_map, set_block_style) + + with col.open(w[n+1:], "wt") as f: + yamlloader.dump(result, stream=f) + + for w in include_files: + with col.open(w[n+1:], "wb") as f1: + with open(uri_file_path(w), "rb") as f2: + dat = f2.read(65536) + while dat: + f1.write(dat) + dat = f2.read(65536) + + toolname = tool.tool.get("label") or tool.metadata.get("label") or os.path.basename(tool.tool["id"]) + if git_info and git_info.get("http://arvados.org/cwl#gitDescribe"): + toolname = "%s (%s)" % (toolname, git_info.get("http://arvados.org/cwl#gitDescribe")) + + col.save_new(name=toolname, owner_uuid=arvRunner.project_uuid, ensure_unique_name=True) + + return col.manifest_locator() + + +def upload_workflow(arvRunner, tool, job_order, project_uuid, + runtimeContext, uuid=None, submit_runner_ram=0, name=None, merged_map=None, - submit_runner_image=None): + submit_runner_image=None, + git_info=None): - packed = packed_workflow(arvRunner, tool, merged_map) + packed = packed_workflow(arvRunner, tool, merged_map, runtimeContext, git_info) adjustDirObjs(job_order, trim_listing) adjustFileObjs(job_order, trim_anonymous_location) @@ -57,7 +276,8 @@ def upload_workflow(arvRunner, tool, job_order, project_uuid, uuid=None, name = tool.tool.get("label", os.path.basename(tool.tool["id"])) upload_dependencies(arvRunner, name, tool.doc_loader, - packed, tool.tool["id"], False) + packed, tool.tool["id"], + runtimeContext) wf_runner_resources = None @@ -72,18 +292,22 @@ def upload_workflow(arvRunner, tool, job_order, project_uuid, uuid=None, wf_runner_resources = {"class": "http://arvados.org/cwl#WorkflowRunnerResources"} hints.append(wf_runner_resources) - wf_runner_resources["acrContainerImage"] = arvados_jobs_image(arvRunner, submit_runner_image or "arvados/jobs:"+__version__) + wf_runner_resources["acrContainerImage"] = arvados_jobs_image(arvRunner, + submit_runner_image or "arvados/jobs:"+__version__, + runtimeContext) if submit_runner_ram: wf_runner_resources["ramMin"] = submit_runner_ram main["hints"] = hints + wrapper = make_wrapper_workflow(arvRunner, main, packed, project_uuid, name, git_info, tool) + body = { "workflow": { "name": name, "description": tool.tool.get("doc", ""), - "definition":json.dumps(packed, sort_keys=True, indent=4, separators=(',',': ')) + "definition": wrapper }} if project_uuid: body["workflow"]["owner_uuid"] = project_uuid @@ -142,8 +366,13 @@ class ArvadosWorkflowStep(WorkflowStep): **argv ): # type: (...) -> None - super(ArvadosWorkflowStep, self).__init__(toolpath_object, pos, loadingContext, *argc, **argv) - self.tool["class"] = "WorkflowStep" + if arvrunner.fast_submit: + self.tool = toolpath_object + self.tool["inputs"] = [] + self.tool["outputs"] = [] + else: + super(ArvadosWorkflowStep, self).__init__(toolpath_object, pos, loadingContext, *argc, **argv) + self.tool["class"] = "WorkflowStep" self.arvrunner = arvrunner def job(self, joborder, output_callback, runtimeContext): @@ -194,7 +423,7 @@ class ArvadosWorkflow(Workflow): self.doc_loader, joborder, joborder.get("id", "#"), - False) + runtimeContext) if self.wf_pdh is None: packed = pack(self.loadingContext, self.tool["id"], loader=self.doc_loader) @@ -237,7 +466,7 @@ class ArvadosWorkflow(Workflow): self.doc_loader, packed, self.tool["id"], - False) + runtimeContext) # Discover files/directories referenced by the # workflow (mainly "default" values) @@ -301,7 +530,7 @@ class ArvadosWorkflow(Workflow): if self.wf_pdh is None: adjustFileObjs(packed, keepmount) adjustDirObjs(packed, keepmount) - self.wf_pdh = upload_workflow_collection(self.arvrunner, shortname(self.tool["id"]), packed) + self.wf_pdh = upload_workflow_collection(self.arvrunner, shortname(self.tool["id"]), packed, runtimeContext) self.loadingContext = self.loadingContext.copy() self.loadingContext.metadata = self.loadingContext.metadata.copy()