from cwltool.load_tool import fetch_document, resolve_and_validate_document
from cwltool.process import shortname
from cwltool.workflow import Workflow, WorkflowException, WorkflowStep
-from cwltool.pathmapper import adjustFileObjs, adjustDirObjs, visit_class
+from cwltool.utils import adjustFileObjs, adjustDirObjs, visit_class
from cwltool.context import LoadingContext
import ruamel.yaml as yaml
from .runner import (upload_dependencies, packed_workflow, upload_workflow_collection,
trim_anonymous_location, remove_redundant_fields, discover_secondary_files,
- make_builder)
+ make_builder, arvados_jobs_image)
from .pathmapper import ArvPathMapper, trim_listing
from .arvtool import ArvadosCommandTool, set_cluster_target
+from ._version import __version__
from .perf import Perf
sum_res_pars = ("outdirMin", "outdirMax")
def upload_workflow(arvRunner, tool, job_order, project_uuid, uuid=None,
- submit_runner_ram=0, name=None, merged_map=None):
+ submit_runner_ram=0, name=None, merged_map=None,
+ submit_runner_image=None):
packed = packed_workflow(arvRunner, tool, merged_map)
upload_dependencies(arvRunner, name, tool.doc_loader,
packed, tool.tool["id"], False)
+ wf_runner_resources = None
+
+ hints = main.get("hints", [])
+ found = False
+ for h in hints:
+ if h["class"] == "http://arvados.org/cwl#WorkflowRunnerResources":
+ wf_runner_resources = h
+ found = True
+ break
+ if not found:
+ wf_runner_resources = {"class": "http://arvados.org/cwl#WorkflowRunnerResources"}
+ hints.append(wf_runner_resources)
+
+ wf_runner_resources["acrContainerImage"] = arvados_jobs_image(arvRunner, submit_runner_image or "arvados/jobs:"+__version__)
+
if submit_runner_ram:
- hints = main.get("hints", [])
- found = False
- for h in hints:
- if h["class"] == "http://arvados.org/cwl#WorkflowRunnerResources":
- h["ramMin"] = submit_runner_ram
- found = True
- break
- if not found:
- hints.append({"class": "http://arvados.org/cwl#WorkflowRunnerResources",
- "ramMin": submit_runner_ram})
- main["hints"] = hints
+ wf_runner_resources["ramMin"] = submit_runner_ram
+
+ main["hints"] = hints
body = {
"workflow": {
runtimeContext = runtimeContext.copy()
runtimeContext.toplevel = True # Preserve behavior for #13365
- builder = make_builder({shortname(k): v for k,v in viewitems(joborder)}, self.hints, self.requirements, runtimeContext)
+ builder = make_builder({shortname(k): v for k,v in viewitems(joborder)}, self.hints, self.requirements,
+ runtimeContext, self.metadata)
runtimeContext = set_cluster_target(self.tool, self.arvrunner, builder, runtimeContext)
return super(ArvadosWorkflowStep, self).job(joborder, output_callback, runtimeContext)
def job(self, joborder, output_callback, runtimeContext):
- builder = make_builder(joborder, self.hints, self.requirements, runtimeContext)
+ builder = make_builder(joborder, self.hints, self.requirements, runtimeContext, self.metadata)
runtimeContext = set_cluster_target(self.tool, self.arvrunner, builder, runtimeContext)
req, _ = self.get_requirement("http://arvados.org/cwl#RunInSingleContainer")
False)
if self.wf_pdh is None:
- ### We have to reload the document to get the original version.
- #
- # The workflow document we have in memory right now was
- # updated to the internal CWL version. We need to reload the
- # document to go back to its original version, because the
- # version of cwltool installed in the user's container is
- # likely to reject our internal version, and we don't want to
- # break existing workflows.
- #
- # What's going on here is that the updater replaces the
- # documents/fragments in the index with updated ones, the
- # index is also used as a cache, so we need to go through the
- # loading process with an empty index and updating turned off
- # so we have the original un-updated documents.
- #
- loadingContext = self.loadingContext.copy()
- loadingContext.do_update = False
- document_loader = schema_salad.ref_resolver.SubLoader(loadingContext.loader)
- loadingContext.loader = document_loader
- loadingContext.loader.idx = {}
- uri = self.tool["id"]
- loadingContext, workflowobj, uri = fetch_document(uri, loadingContext)
- loadingContext, uri = resolve_and_validate_document(
- loadingContext, workflowobj, uri
- )
- workflowobj, metadata = loadingContext.loader.resolve_ref(uri)
- ###
-
- workflowobj["requirements"] = dedup_reqs(self.requirements)
- workflowobj["hints"] = dedup_reqs(self.hints)
-
- packed = pack(document_loader, workflowobj, uri, metadata)
+ packed = pack(self.loadingContext, self.tool["id"], loader=self.doc_loader)
+
+ for p in packed["$graph"]:
+ if p["id"] == "#main":
+ p["requirements"] = dedup_reqs(self.requirements)
+ p["hints"] = dedup_reqs(self.hints)
def visit(item):
+ if "requirements" in item:
+ item["requirements"] = [i for i in item["requirements"] if i["class"] != "DockerRequirement"]
for t in ("hints", "requirements"):
if t not in item:
continue
raise WorkflowException("Non-top-level ResourceRequirement in single container cannot have expressions")
if not dyn:
self.static_resource_req.append(req)
- if req["class"] == "DockerRequirement":
- if "http://arvados.org/cwl#dockerCollectionPDH" in req:
- del req["http://arvados.org/cwl#dockerCollectionPDH"]
visit_class(packed["$graph"], ("Workflow", "CommandLineTool"), visit)
upload_dependencies(self.arvrunner,
runtimeContext.name,
- document_loader,
+ self.doc_loader,
packed,
- uri,
+ self.tool["id"],
False)
# Discover files/directories referenced by the
if job_res_reqs[0].get("ramMin", 1024) < 128:
job_res_reqs[0]["ramMin"] = 128
+ arguments = ["--no-container", "--move-outputs", "--preserve-entire-environment", "workflow.cwl", "cwl.input.yml"]
+ if runtimeContext.debug:
+ arguments.insert(0, '--debug')
+
wf_runner = cmap({
"class": "CommandLineTool",
"baseCommand": "cwltool",
}]
}],
"hints": self.hints,
- "arguments": ["--no-container", "--move-outputs", "--preserve-entire-environment", "workflow.cwl#main", "cwl.input.yml"],
+ "arguments": arguments,
"id": "#"
})
return ArvadosCommandTool(self.arvrunner, wf_runner, self.loadingContext).job(joborder_resolved, output_callback, runtimeContext)