#!/usr/bin/env python
+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: Apache-2.0
# Implement cwl-runner interface for submitting and running work on Arvados, using
# either the Crunch jobs API or Crunch containers API.
from. runner import Runner, upload_docker, upload_job_order, upload_workflow_deps, upload_dependencies
from .arvtool import ArvadosCommandTool
from .arvworkflow import ArvadosWorkflow, upload_workflow
-from .fsaccess import CollectionFsAccess, CollectionFetcher, collectionResolver
+from .fsaccess import CollectionFsAccess, CollectionFetcher, collectionResolver, CollectionCache
from .perf import Perf
from .pathmapper import NoFollowPathMapper
from ._version import __version__
from cwltool.pack import pack
-from cwltool.process import shortname, UnsupportedRequirement, getListing
-from cwltool.pathmapper import adjustFileObjs, adjustDirObjs
+from cwltool.process import shortname, UnsupportedRequirement, use_custom_schema
+from cwltool.pathmapper import adjustFileObjs, adjustDirObjs, get_listing
from cwltool.draft2tool import compute_checksums
from arvados.api import OrderedJsonModel
self.output_name = output_name
self.output_tags = output_tags
self.project_uuid = None
+ self.intermediate_output_ttl = 0
+ self.intermediate_output_collections = []
+ self.trash_intermediate = False
if keep_client is not None:
self.keep_client = keep_client
else:
self.keep_client = arvados.keep.KeepClient(api_client=self.api, num_retries=self.num_retries)
+ self.collection_cache = CollectionCache(self.api, self.keep_client, self.num_retries)
+
self.work_api = None
expected_api = ["jobs", "containers"]
for api in expected_api:
kwargs["work_api"] = self.work_api
kwargs["fetcher_constructor"] = partial(CollectionFetcher,
api_client=self.api,
- keep_client=self.keep_client)
+ fs_access=CollectionFsAccess("", collection_cache=self.collection_cache),
+ num_retries=self.num_retries,
+ overrides=kwargs.get("override_tools"))
+ kwargs["resolver"] = partial(collectionResolver, self.api, num_retries=self.num_retries)
if "class" in toolpath_object and toolpath_object["class"] == "CommandLineTool":
return ArvadosCommandTool(self, toolpath_object, **kwargs)
elif "class" in toolpath_object and toolpath_object["class"] == "Workflow":
def add_uploaded(self, src, pair):
self.uploaded[src] = pair
+ def add_intermediate_output(self, uuid):
+ if uuid:
+ self.intermediate_output_collections.append(uuid)
+
+ def trash_intermediate_output(self):
+ logger.info("Cleaning up intermediate output collections")
+ for i in self.intermediate_output_collections:
+ try:
+ self.api.collections().delete(uuid=i).execute(num_retries=self.num_retries)
+ except:
+ logger.warn("Failed to delete intermediate output: %s", sys.exc_info()[1], exc_info=(sys.exc_info()[1] if self.debug else False))
+ if sys.exc_info()[0] is KeyboardInterrupt:
+ break
+
def check_features(self, obj):
if isinstance(obj, dict):
if obj.get("writable"):
self.check_features(v)
elif isinstance(obj, list):
for i,v in enumerate(obj):
- with SourceLine(obj, i, UnsupportedRequirement):
+ with SourceLine(obj, i, UnsupportedRequirement, logger.isEnabledFor(logging.DEBUG)):
self.check_features(v)
def make_output_collection(self, name, tagsString, outputObj):
keep_client=self.keep_client,
num_retries=self.num_retries)
- srccollections = {}
for k,v in generatemapper.items():
if k.startswith("_:"):
if v.type == "Directory":
raise Exception("Output source is not in keep or a literal")
sp = k.split("/")
srccollection = sp[0][5:]
- if srccollection not in srccollections:
- try:
- srccollections[srccollection] = arvados.collection.CollectionReader(
- srccollection,
- api_client=self.api,
- keep_client=self.keep_client,
- num_retries=self.num_retries)
- except arvados.errors.ArgumentError as e:
- logger.error("Creating CollectionReader for '%s' '%s': %s", k, v, e)
- raise
- reader = srccollections[srccollection]
try:
+ reader = self.collection_cache.get(srccollection)
srcpath = "/".join(sp[1:]) if len(sp) > 1 else "."
final.copy(srcpath, v.target, source_collection=reader, overwrite=False)
+ except arvados.errors.ArgumentError as e:
+ logger.error("Creating CollectionReader for '%s' '%s': %s", k, v, e)
+ raise
except IOError as e:
logger.warn("While preparing output collection: %s", e)
def rewrite(fileobj):
fileobj["location"] = generatemapper.mapper(fileobj["location"]).target
- for k in ("basename", "listing", "contents"):
+ for k in ("basename", "listing", "contents", "nameext", "nameroot", "dirname"):
if k in fileobj:
del fileobj[k]
self.project_uuid = kwargs.get("project_uuid")
self.pipeline = None
make_fs_access = kwargs.get("make_fs_access") or partial(CollectionFsAccess,
- api_client=self.api,
- keep_client=self.keep_client)
+ collection_cache=self.collection_cache)
self.fs_access = make_fs_access(kwargs["basedir"])
+
+ self.trash_intermediate = kwargs["trash_intermediate"]
+ if self.trash_intermediate and self.work_api != "containers":
+ raise Exception("--trash-intermediate is only supported with --api=containers.")
+
+ self.intermediate_output_ttl = kwargs["intermediate_output_ttl"]
+ if self.intermediate_output_ttl and self.work_api != "containers":
+ raise Exception("--intermediate-output-ttl is only supported with --api=containers.")
+ if self.intermediate_output_ttl < 0:
+ raise Exception("Invalid value %d for --intermediate-output-ttl, cannot be less than zero" % self.intermediate_output_ttl)
+
if not kwargs.get("name"):
kwargs["name"] = self.name = tool.tool.get("label") or tool.metadata.get("label") or os.path.basename(tool.tool["id"])
# Upload direct dependencies of workflow steps, get back mapping of files to keep references.
# Also uploads docker images.
- upload_workflow_deps(self, tool)
+ override_tools = {}
+ upload_workflow_deps(self, tool, override_tools)
# Reload tool object which may have been updated by
# upload_workflow_deps
makeTool=self.arv_make_tool,
loader=tool.doc_loader,
avsc_names=tool.doc_schema,
- metadata=tool.metadata)
+ metadata=tool.metadata,
+ override_tools=override_tools)
# Upload local file references in the job order.
job_order = upload_job_order(self, "%s input" % kwargs["name"],
if kwargs.get("submit"):
# Submit a runner job to run the workflow for us.
if self.work_api == "containers":
- if tool.tool["class"] == "CommandLineTool":
+ if tool.tool["class"] == "CommandLineTool" and kwargs.get("wait"):
kwargs["runnerjob"] = tool.tool["id"]
- upload_dependencies(self,
- kwargs["name"],
- tool.doc_loader,
- tool.tool,
- tool.tool["id"],
- False)
runnerjob = tool.job(job_order,
self.output_callback,
**kwargs).next()
submit_runner_ram=kwargs.get("submit_runner_ram"),
name=kwargs.get("name"),
on_error=kwargs.get("on_error"),
- submit_runner_image=kwargs.get("submit_runner_image"))
+ submit_runner_image=kwargs.get("submit_runner_image"),
+ intermediate_output_ttl=kwargs.get("intermediate_output_ttl"))
elif self.work_api == "jobs":
runnerjob = RunnerJob(self, tool, job_order, kwargs.get("enable_reuse"),
self.output_name,
name=kwargs.get("name"),
on_error=kwargs.get("on_error"),
submit_runner_image=kwargs.get("submit_runner_image"))
-
- if not kwargs.get("submit") and "cwl_runner_job" not in kwargs and self.work_api == "jobs":
+ elif "cwl_runner_job" not in kwargs and self.work_api == "jobs":
# Create pipeline for local run
self.pipeline = self.api.pipeline_instances().create(
body={
self.set_crunch_output()
if kwargs.get("compute_checksum"):
- adjustDirObjs(self.final_output, partial(getListing, self.fs_access))
+ adjustDirObjs(self.final_output, partial(get_listing, self.fs_access))
adjustFileObjs(self.final_output, partial(compute_checksums, self.fs_access))
+ if self.trash_intermediate and self.final_status == "success":
+ self.trash_intermediate_output()
+
return (self.final_output, self.final_status)
help="Time to wait for a Javascript expression to evaluate before giving an error, default 20s.",
type=float,
default=20)
- parser.add_argument("--version", action="store_true", help="Print version and exit")
+
+ exgroup = parser.add_mutually_exclusive_group()
+ exgroup.add_argument("--print-dot", action="store_true",
+ help="Print workflow visualization in graphviz format and exit")
+ exgroup.add_argument("--version", action="store_true", help="Print version and exit")
+ exgroup.add_argument("--validate", action="store_true", help="Validate CWL document only.")
exgroup = parser.add_mutually_exclusive_group()
exgroup.add_argument("--verbose", action="store_true", help="Default logging")
exgroup = parser.add_mutually_exclusive_group()
exgroup.add_argument("--enable-reuse", action="store_true",
default=True, dest="enable_reuse",
- help="")
+ help="Enable job or container reuse (default)")
exgroup.add_argument("--disable-reuse", action="store_false",
default=True, dest="enable_reuse",
- help="")
+ help="Disable job or container reuse")
parser.add_argument("--project-uuid", type=str, metavar="UUID", help="Project that will own the workflow jobs, if not provided, will go to home project.")
parser.add_argument("--output-name", type=str, help="Name to use for collection that stores the final output.", default=None)
parser.add_argument("--api", type=str,
default=None, dest="work_api",
- help="Select work submission API, one of 'jobs' or 'containers'. Default is 'jobs' if that API is available, otherwise 'containers'.")
+ choices=("jobs", "containers"),
+ help="Select work submission API. Default is 'jobs' if that API is available, otherwise 'containers'.")
parser.add_argument("--compute-checksum", action="store_true", default=False,
help="Compute checksum of contents while collecting outputs",
help="Desired workflow behavior when a step fails. One of 'stop' or 'continue'. "
"Default is 'continue'.", default="continue", choices=("stop", "continue"))
+ parser.add_argument("--enable-dev", action="store_true",
+ help="Enable loading and running development versions "
+ "of CWL spec.", default=False)
+
+ parser.add_argument("--intermediate-output-ttl", type=int, metavar="N",
+ help="If N > 0, intermediate output collections will be trashed N seconds after creation. Default is 0 (don't trash).",
+ default=0)
+
+ exgroup = parser.add_mutually_exclusive_group()
+ exgroup.add_argument("--trash-intermediate", action="store_true",
+ default=False, dest="trash_intermediate",
+ help="Immediately trash intermediate outputs on workflow success.")
+ exgroup.add_argument("--no-trash-intermediate", action="store_false",
+ default=False, dest="trash_intermediate",
+ help="Do not trash intermediate outputs (default).")
+
parser.add_argument("workflow", type=str, nargs="?", default=None, help="The workflow to execute")
parser.add_argument("job_order", nargs=argparse.REMAINDER, help="The input object to the workflow.")
return parser
def add_arv_hints():
- cache = {}
- cwltool.draft2tool.ACCEPTLIST_RE = re.compile(r".*")
cwltool.draft2tool.ACCEPTLIST_EN_RELAXED_RE = re.compile(r".*")
+ cwltool.draft2tool.ACCEPTLIST_RE = cwltool.draft2tool.ACCEPTLIST_EN_RELAXED_RE
res = pkg_resources.resource_stream(__name__, 'arv-cwl-schema.yml')
- cache["http://arvados.org/cwl"] = res.read()
+ use_custom_schema("v1.0", "http://arvados.org/cwl", res.read())
res.close()
- document_loader, cwlnames, _, _ = cwltool.process.get_schema("v1.0")
- _, extnames, _, _ = schema_salad.schema.load_schema("http://arvados.org/cwl", cache=cache)
- for n in extnames.names:
- if not cwlnames.has_name("http://arvados.org/cwl#"+n, ""):
- cwlnames.add_name("http://arvados.org/cwl#"+n, "", extnames.get_name(n, ""))
- document_loader.idx["http://arvados.org/cwl#"+n] = {}
+ cwltool.process.supportedProcessRequirements.extend([
+ "http://arvados.org/cwl#RunInSingleContainer",
+ "http://arvados.org/cwl#OutputDirType",
+ "http://arvados.org/cwl#RuntimeConstraints",
+ "http://arvados.org/cwl#PartitionRequirement",
+ "http://arvados.org/cwl#APIRequirement",
+ "http://commonwl.org/cwltool#LoadListingRequirement",
+ "http://arvados.org/cwl#IntermediateOutput",
+ "http://arvados.org/cwl#ReuseRequirement"
+ ])
def main(args, stdout, stderr, api_client=None, keep_client=None):
parser = arg_parser()
arvargs.use_container = True
arvargs.relax_path_checks = True
arvargs.validate = None
+ arvargs.print_supported_versions = False
+
+ make_fs_access = partial(CollectionFsAccess,
+ collection_cache=runner.collection_cache)
return cwltool.main.main(args=arvargs,
stdout=stdout,
makeTool=runner.arv_make_tool,
versionfunc=versionstring,
job_order_object=job_order_object,
- make_fs_access=partial(CollectionFsAccess,
- api_client=api_client,
- keep_client=keep_client),
+ make_fs_access=make_fs_access,
fetcher_constructor=partial(CollectionFetcher,
api_client=api_client,
- keep_client=keep_client,
+ fs_access=make_fs_access(""),
num_retries=runner.num_retries),
resolver=partial(collectionResolver, api_client, num_retries=runner.num_retries),
- logger_handler=arvados.log_handler)
+ logger_handler=arvados.log_handler,
+ custom_schema_callback=add_arv_hints)