X-Git-Url: https://git.arvados.org/arvados.git/blobdiff_plain/dd58470216d6a416e4d831a3b9c25bfdaa255fff..a08e3bb86caa758df7d33a3df3f6b8c333e47838:/sdk/cwl/arvados_cwl/__init__.py diff --git a/sdk/cwl/arvados_cwl/__init__.py b/sdk/cwl/arvados_cwl/__init__.py index 94d602de1d..5756789cb1 100644 --- a/sdk/cwl/arvados_cwl/__init__.py +++ b/sdk/cwl/arvados_cwl/__init__.py @@ -1,4 +1,7 @@ #!/usr/bin/env python +# Copyright (C) The Arvados Authors. All rights reserved. +# +# SPDX-License-Identifier: Apache-2.0 # Implement cwl-runner interface for submitting and running work on Arvados, using # either the Crunch jobs API or Crunch containers API. @@ -76,6 +79,7 @@ class ArvCwlRunner(object): self.project_uuid = None self.intermediate_output_ttl = 0 self.intermediate_output_collections = [] + self.trash_intermediate = False if keep_client is not None: self.keep_client = keep_client @@ -107,7 +111,9 @@ class ArvCwlRunner(object): kwargs["fetcher_constructor"] = partial(CollectionFetcher, api_client=self.api, fs_access=CollectionFsAccess("", collection_cache=self.collection_cache), - num_retries=self.num_retries) + num_retries=self.num_retries, + overrides=kwargs.get("override_tools")) + kwargs["resolver"] = partial(collectionResolver, self.api, num_retries=self.num_retries) if "class" in toolpath_object and toolpath_object["class"] == "CommandLineTool": return ArvadosCommandTool(self, toolpath_object, **kwargs) elif "class" in toolpath_object and toolpath_object["class"] == "Workflow": @@ -212,7 +218,7 @@ class ArvCwlRunner(object): logger.info("Cleaning up intermediate output collections") for i in self.intermediate_output_collections: try: - self.api_client.collections().delete(uuid=i).execute(num_retries=self.num_retries) + self.api.collections().delete(uuid=i).execute(num_retries=self.num_retries) except: logger.warn("Failed to delete intermediate output: %s", sys.exc_info()[1], exc_info=(sys.exc_info()[1] if self.debug else False)) if sys.exc_info()[0] is KeyboardInterrupt: @@ -231,7 +237,7 @@ class ArvCwlRunner(object): self.check_features(v) elif isinstance(obj, list): for i,v in enumerate(obj): - with SourceLine(obj, i, UnsupportedRequirement): + with SourceLine(obj, i, UnsupportedRequirement, logger.isEnabledFor(logging.DEBUG)): self.check_features(v) def make_output_collection(self, name, tagsString, outputObj): @@ -275,7 +281,7 @@ class ArvCwlRunner(object): def rewrite(fileobj): fileobj["location"] = generatemapper.mapper(fileobj["location"]).target - for k in ("basename", "listing", "contents"): + for k in ("basename", "listing", "contents", "nameext", "nameroot", "dirname"): if k in fileobj: del fileobj[k] @@ -345,16 +351,24 @@ class ArvCwlRunner(object): collection_cache=self.collection_cache) self.fs_access = make_fs_access(kwargs["basedir"]) + + self.trash_intermediate = kwargs["trash_intermediate"] + if self.trash_intermediate and self.work_api != "containers": + raise Exception("--trash-intermediate is only supported with --api=containers.") + self.intermediate_output_ttl = kwargs["intermediate_output_ttl"] if self.intermediate_output_ttl and self.work_api != "containers": - raise Exception("--intermediate-output-ttl is only supported when using the containers api.") + raise Exception("--intermediate-output-ttl is only supported with --api=containers.") + if self.intermediate_output_ttl < 0: + raise Exception("Invalid value %d for --intermediate-output-ttl, cannot be less than zero" % self.intermediate_output_ttl) if not kwargs.get("name"): kwargs["name"] = self.name = tool.tool.get("label") or tool.metadata.get("label") or os.path.basename(tool.tool["id"]) # Upload direct dependencies of workflow steps, get back mapping of files to keep references. # Also uploads docker images. - upload_workflow_deps(self, tool) + override_tools = {} + upload_workflow_deps(self, tool, override_tools) # Reload tool object which may have been updated by # upload_workflow_deps @@ -362,7 +376,8 @@ class ArvCwlRunner(object): makeTool=self.arv_make_tool, loader=tool.doc_loader, avsc_names=tool.doc_schema, - metadata=tool.metadata) + metadata=tool.metadata, + override_tools=override_tools) # Upload local file references in the job order. job_order = upload_job_order(self, "%s input" % kwargs["name"], @@ -410,14 +425,8 @@ class ArvCwlRunner(object): if kwargs.get("submit"): # Submit a runner job to run the workflow for us. if self.work_api == "containers": - if tool.tool["class"] == "CommandLineTool": + if tool.tool["class"] == "CommandLineTool" and kwargs.get("wait"): kwargs["runnerjob"] = tool.tool["id"] - upload_dependencies(self, - kwargs["name"], - tool.doc_loader, - tool.tool, - tool.tool["id"], - False) runnerjob = tool.job(job_order, self.output_callback, **kwargs).next() @@ -438,8 +447,7 @@ class ArvCwlRunner(object): name=kwargs.get("name"), on_error=kwargs.get("on_error"), submit_runner_image=kwargs.get("submit_runner_image")) - - if not kwargs.get("submit") and "cwl_runner_job" not in kwargs and self.work_api == "jobs": + elif "cwl_runner_job" not in kwargs and self.work_api == "jobs": # Create pipeline for local run self.pipeline = self.api.pipeline_instances().create( body={ @@ -533,7 +541,7 @@ class ArvCwlRunner(object): adjustDirObjs(self.final_output, partial(get_listing, self.fs_access)) adjustFileObjs(self.final_output, partial(compute_checksums, self.fs_access)) - if self.intermediate_output_ttl and self.final_status == "success": + if self.trash_intermediate and self.final_status == "success": self.trash_intermediate_output() return (self.final_output, self.final_status) @@ -582,10 +590,10 @@ def arg_parser(): # type: () -> argparse.ArgumentParser exgroup = parser.add_mutually_exclusive_group() exgroup.add_argument("--enable-reuse", action="store_true", default=True, dest="enable_reuse", - help="") + help="Enable job or container reuse (default)") exgroup.add_argument("--disable-reuse", action="store_false", default=True, dest="enable_reuse", - help="") + help="Disable job or container reuse") parser.add_argument("--project-uuid", type=str, metavar="UUID", help="Project that will own the workflow jobs, if not provided, will go to home project.") parser.add_argument("--output-name", type=str, help="Name to use for collection that stores the final output.", default=None) @@ -618,7 +626,8 @@ def arg_parser(): # type: () -> argparse.ArgumentParser parser.add_argument("--api", type=str, default=None, dest="work_api", - help="Select work submission API, one of 'jobs' or 'containers'. Default is 'jobs' if that API is available, otherwise 'containers'.") + choices=("jobs", "containers"), + help="Select work submission API. Default is 'jobs' if that API is available, otherwise 'containers'.") parser.add_argument("--compute-checksum", action="store_true", default=False, help="Compute checksum of contents while collecting outputs", @@ -643,10 +652,19 @@ def arg_parser(): # type: () -> argparse.ArgumentParser parser.add_argument("--enable-dev", action="store_true", help="Enable loading and running development versions " "of CWL spec.", default=False) + parser.add_argument("--intermediate-output-ttl", type=int, metavar="N", - help="If N > 0, intermediate output collections will be trashed N seconds after creation, or on successful completion of workflow (whichever comes first).", + help="If N > 0, intermediate output collections will be trashed N seconds after creation. Default is 0 (don't trash).", default=0) + exgroup = parser.add_mutually_exclusive_group() + exgroup.add_argument("--trash-intermediate", action="store_true", + default=False, dest="trash_intermediate", + help="Immediately trash intermediate outputs on workflow success.") + exgroup.add_argument("--no-trash-intermediate", action="store_false", + default=False, dest="trash_intermediate", + help="Do not trash intermediate outputs (default).") + parser.add_argument("workflow", type=str, nargs="?", default=None, help="The workflow to execute") parser.add_argument("job_order", nargs=argparse.REMAINDER, help="The input object to the workflow.") @@ -664,7 +682,9 @@ def add_arv_hints(): "http://arvados.org/cwl#RuntimeConstraints", "http://arvados.org/cwl#PartitionRequirement", "http://arvados.org/cwl#APIRequirement", - "http://commonwl.org/cwltool#LoadListingRequirement" + "http://commonwl.org/cwltool#LoadListingRequirement", + "http://arvados.org/cwl#IntermediateOutput", + "http://arvados.org/cwl#ReuseRequirement" ]) def main(args, stdout, stderr, api_client=None, keep_client=None): @@ -731,6 +751,7 @@ def main(args, stdout, stderr, api_client=None, keep_client=None): arvargs.use_container = True arvargs.relax_path_checks = True arvargs.validate = None + arvargs.print_supported_versions = False make_fs_access = partial(CollectionFsAccess, collection_cache=runner.collection_cache)