#
# SPDX-License-Identifier: Apache-2.0
+from past.builtins import basestring
+from future.utils import viewitems
+
import os
import json
import copy
import logging
from schema_salad.sourceline import SourceLine, cmap
+import schema_salad.ref_resolver
+
+import arvados.collection
from cwltool.pack import pack
-from cwltool.load_tool import fetch_document
+from cwltool.load_tool import fetch_document, resolve_and_validate_document
from cwltool.process import shortname
from cwltool.workflow import Workflow, WorkflowException, WorkflowStep
-from cwltool.pathmapper import adjustFileObjs, adjustDirObjs, visit_class
+from cwltool.utils import adjustFileObjs, adjustDirObjs, visit_class, normalizeFilesDirs
from cwltool.context import LoadingContext
import ruamel.yaml as yaml
from .runner import (upload_dependencies, packed_workflow, upload_workflow_collection,
- trim_anonymous_location, remove_redundant_fields, discover_secondary_files)
+ trim_anonymous_location, remove_redundant_fields, discover_secondary_files,
+ make_builder, arvados_jobs_image)
from .pathmapper import ArvPathMapper, trim_listing
-from .arvtool import ArvadosCommandTool, set_cluster_target, make_builder
+from .arvtool import ArvadosCommandTool, set_cluster_target
+from ._version import __version__
+
from .perf import Perf
logger = logging.getLogger('arvados.cwl-runner')
max_res_pars = ("coresMin", "coresMax", "ramMin", "ramMax", "tmpdirMin", "tmpdirMax")
sum_res_pars = ("outdirMin", "outdirMax")
-def upload_workflow(arvRunner, tool, job_order, project_uuid, uuid=None,
- submit_runner_ram=0, name=None, merged_map=None):
+def make_wrapper_workflow(arvRunner, main, packed, project_uuid, name, git_info, tool):
+ col = arvados.collection.Collection(api_client=arvRunner.api,
+ keep_client=arvRunner.keep_client)
+
+ with col.open("workflow.json", "wt") as f:
+ json.dump(packed, f, sort_keys=True, indent=4, separators=(',',': '))
+
+ pdh = col.portable_data_hash()
+
+ toolname = tool.tool.get("label") or tool.metadata.get("label") or os.path.basename(tool.tool["id"])
+ if git_info and git_info.get("http://arvados.org/cwl#gitDescribe"):
+ toolname = "%s (%s)" % (toolname, git_info.get("http://arvados.org/cwl#gitDescribe"))
+
+ existing = arvRunner.api.collections().list(filters=[["portable_data_hash", "=", pdh], ["owner_uuid", "=", project_uuid]]).execute(num_retries=arvRunner.num_retries)
+ if len(existing["items"]) == 0:
+ col.save_new(name=toolname, owner_uuid=project_uuid, ensure_unique_name=True)
+
+ # now construct the wrapper
+
+ step = {
+ "id": "#main/" + toolname,
+ "in": [],
+ "out": [],
+ "run": "keep:%s/workflow.json#main" % pdh,
+ "label": name
+ }
+
+ newinputs = []
+ for i in main["inputs"]:
+ inp = {}
+ # Make sure to only copy known fields that are meaningful at
+ # the workflow level. In practice this ensures that if we're
+ # wrapping a CommandLineTool we don't grab inputBinding.
+ # Right now also excludes extension fields, which is fine,
+ # Arvados doesn't currently look for any extension fields on
+ # input parameters.
+ for f in ("type", "label", "secondaryFiles", "streamable",
+ "doc", "id", "format", "loadContents",
+ "loadListing", "default"):
+ if f in i:
+ inp[f] = i[f]
+ newinputs.append(inp)
+
+ wrapper = {
+ "class": "Workflow",
+ "id": "#main",
+ "inputs": newinputs,
+ "outputs": [],
+ "steps": [step]
+ }
+
+ for i in main["inputs"]:
+ step["in"].append({
+ "id": "#main/step/%s" % shortname(i["id"]),
+ "source": i["id"]
+ })
+
+ for i in main["outputs"]:
+ step["out"].append({"id": "#main/step/%s" % shortname(i["id"])})
+ wrapper["outputs"].append({"outputSource": "#main/step/%s" % shortname(i["id"]),
+ "type": i["type"],
+ "id": i["id"]})
+
+ wrapper["requirements"] = [{"class": "SubworkflowFeatureRequirement"}]
+
+ if main.get("requirements"):
+ wrapper["requirements"].extend(main["requirements"])
+ if main.get("hints"):
+ wrapper["hints"] = main["hints"]
+
+ doc = {"cwlVersion": "v1.2", "$graph": [wrapper]}
+
+ if git_info:
+ for g in git_info:
+ doc[g] = git_info[g]
- packed = packed_workflow(arvRunner, tool, merged_map)
+ return json.dumps(doc, sort_keys=True, indent=4, separators=(',',': '))
+
+def upload_workflow(arvRunner, tool, job_order, project_uuid,
+ runtimeContext, uuid=None,
+ submit_runner_ram=0, name=None, merged_map=None,
+ submit_runner_image=None,
+ git_info=None):
+
+ packed = packed_workflow(arvRunner, tool, merged_map, runtimeContext, git_info)
adjustDirObjs(job_order, trim_listing)
adjustFileObjs(job_order, trim_anonymous_location)
name = tool.tool.get("label", os.path.basename(tool.tool["id"]))
upload_dependencies(arvRunner, name, tool.doc_loader,
- packed, tool.tool["id"], False)
+ packed, tool.tool["id"], False,
+ runtimeContext)
+
+ wf_runner_resources = None
+
+ hints = main.get("hints", [])
+ found = False
+ for h in hints:
+ if h["class"] == "http://arvados.org/cwl#WorkflowRunnerResources":
+ wf_runner_resources = h
+ found = True
+ break
+ if not found:
+ wf_runner_resources = {"class": "http://arvados.org/cwl#WorkflowRunnerResources"}
+ hints.append(wf_runner_resources)
+
+ wf_runner_resources["acrContainerImage"] = arvados_jobs_image(arvRunner,
+ submit_runner_image or "arvados/jobs:"+__version__,
+ runtimeContext)
if submit_runner_ram:
- hints = main.get("hints", [])
- found = False
- for h in hints:
- if h["class"] == "http://arvados.org/cwl#WorkflowRunnerResources":
- h["ramMin"] = submit_runner_ram
- found = True
- break
- if not found:
- hints.append({"class": "http://arvados.org/cwl#WorkflowRunnerResources",
- "ramMin": submit_runner_ram})
- main["hints"] = hints
+ wf_runner_resources["ramMin"] = submit_runner_ram
+
+ main["hints"] = hints
+
+ wrapper = make_wrapper_workflow(arvRunner, main, packed, project_uuid, name, git_info, tool)
body = {
"workflow": {
"name": name,
"description": tool.tool.get("doc", ""),
- "definition":json.dumps(packed, sort_keys=True, indent=4, separators=(',',': '))
+ "definition": wrapper
}}
if project_uuid:
body["workflow"]["owner_uuid"] = project_uuid
**argv
): # type: (...) -> None
- super(ArvadosWorkflowStep, self).__init__(toolpath_object, pos, loadingContext, *argc, **argv)
- self.tool["class"] = "WorkflowStep"
+ if arvrunner.fast_submit:
+ self.tool = toolpath_object
+ self.tool["inputs"] = []
+ self.tool["outputs"] = []
+ else:
+ super(ArvadosWorkflowStep, self).__init__(toolpath_object, pos, loadingContext, *argc, **argv)
+ self.tool["class"] = "WorkflowStep"
self.arvrunner = arvrunner
def job(self, joborder, output_callback, runtimeContext):
runtimeContext = runtimeContext.copy()
runtimeContext.toplevel = True # Preserve behavior for #13365
- builder = make_builder({shortname(k): v for k,v in joborder.items()}, self.hints, self.requirements, runtimeContext)
+ builder = make_builder({shortname(k): v for k,v in viewitems(joborder)}, self.hints, self.requirements,
+ runtimeContext, self.metadata)
runtimeContext = set_cluster_target(self.tool, self.arvrunner, builder, runtimeContext)
return super(ArvadosWorkflowStep, self).job(joborder, output_callback, runtimeContext)
def job(self, joborder, output_callback, runtimeContext):
- builder = make_builder(joborder, self.hints, self.requirements, runtimeContext)
+ builder = make_builder(joborder, self.hints, self.requirements, runtimeContext, self.metadata)
runtimeContext = set_cluster_target(self.tool, self.arvrunner, builder, runtimeContext)
req, _ = self.get_requirement("http://arvados.org/cwl#RunInSingleContainer")
with SourceLine(self.tool, None, WorkflowException, logger.isEnabledFor(logging.DEBUG)):
if "id" not in self.tool:
raise WorkflowException("%s object must have 'id'" % (self.tool["class"]))
- document_loader, workflowobj, uri = (self.doc_loader, self.doc_loader.fetch(self.tool["id"]), self.tool["id"])
- discover_secondary_files(self.tool["inputs"], joborder)
+ discover_secondary_files(self.arvrunner.fs_access, builder,
+ self.tool["inputs"], joborder)
+ normalizeFilesDirs(joborder)
with Perf(metrics, "subworkflow upload_deps"):
upload_dependencies(self.arvrunner,
os.path.basename(joborder.get("id", "#")),
- document_loader,
+ self.doc_loader,
joborder,
joborder.get("id", "#"),
- False)
+ False,
+ runtimeContext)
if self.wf_pdh is None:
- workflowobj["requirements"] = dedup_reqs(self.requirements)
- workflowobj["hints"] = dedup_reqs(self.hints)
+ packed = pack(self.loadingContext, self.tool["id"], loader=self.doc_loader)
- packed = pack(document_loader, workflowobj, uri, self.metadata)
+ for p in packed["$graph"]:
+ if p["id"] == "#main":
+ p["requirements"] = dedup_reqs(self.requirements)
+ p["hints"] = dedup_reqs(self.hints)
def visit(item):
+ if "requirements" in item:
+ item["requirements"] = [i for i in item["requirements"] if i["class"] != "DockerRequirement"]
for t in ("hints", "requirements"):
if t not in item:
continue
raise WorkflowException("Non-top-level ResourceRequirement in single container cannot have expressions")
if not dyn:
self.static_resource_req.append(req)
- if req["class"] == "DockerRequirement":
- if "http://arvados.org/cwl#dockerCollectionPDH" in req:
- del req["http://arvados.org/cwl#dockerCollectionPDH"]
visit_class(packed["$graph"], ("Workflow", "CommandLineTool"), visit)
upload_dependencies(self.arvrunner,
runtimeContext.name,
- document_loader,
+ self.doc_loader,
packed,
- uri,
- False)
+ self.tool["id"],
+ False,
+ runtimeContext)
# Discover files/directories referenced by the
# workflow (mainly "default" values)
if self.wf_pdh is None:
adjustFileObjs(packed, keepmount)
adjustDirObjs(packed, keepmount)
- self.wf_pdh = upload_workflow_collection(self.arvrunner, shortname(self.tool["id"]), packed)
+ self.wf_pdh = upload_workflow_collection(self.arvrunner, shortname(self.tool["id"]), packed, runtimeContext)
+
+ self.loadingContext = self.loadingContext.copy()
+ self.loadingContext.metadata = self.loadingContext.metadata.copy()
+ self.loadingContext.metadata["http://commonwl.org/cwltool#original_cwlVersion"] = "v1.0"
+
+ if len(job_res_reqs) == 1:
+ # RAM request needs to be at least 128 MiB or the workflow
+ # runner itself won't run reliably.
+ if job_res_reqs[0].get("ramMin", 1024) < 128:
+ job_res_reqs[0]["ramMin"] = 128
+
+ arguments = ["--no-container", "--move-outputs", "--preserve-entire-environment", "workflow.cwl", "cwl.input.yml"]
+ if runtimeContext.debug:
+ arguments.insert(0, '--debug')
wf_runner = cmap({
"class": "CommandLineTool",
}]
}],
"hints": self.hints,
- "arguments": ["--no-container", "--move-outputs", "--preserve-entire-environment", "workflow.cwl#main", "cwl.input.yml"],
+ "arguments": arguments,
"id": "#"
})
return ArvadosCommandTool(self.arvrunner, wf_runner, self.loadingContext).job(joborder_resolved, output_callback, runtimeContext)