2 # Copyright (C) The Arvados Authors. All rights reserved.
4 # SPDX-License-Identifier: Apache-2.0
6 # Implement cwl-runner interface for submitting and running work on Arvados, using
7 # the Crunch containers API.
9 from future.utils import viewitems
10 from builtins import str
17 import pkg_resources # part of setuptools
19 from schema_salad.sourceline import SourceLine
20 import schema_salad.validate as validate
22 import cwltool.workflow
23 import cwltool.process
24 import cwltool.argparser
25 from cwltool.errors import WorkflowException
26 from cwltool.process import shortname, UnsupportedRequirement, use_custom_schema
27 from cwltool.utils import adjustFileObjs, adjustDirObjs, get_listing
31 import arvados.logging
32 from arvados.keep import KeepClient
33 from arvados.errors import ApiError
34 import arvados.commands._util as arv_cmd
36 from .perf import Perf
37 from ._version import __version__
38 from .executor import ArvCwlExecutor
39 from .fsaccess import workflow_uuid_pattern
41 # These aren't used directly in this file but
42 # other code expects to import them from here
43 from .arvcontainer import ArvadosContainer
44 from .arvtool import ArvadosCommandTool
45 from .fsaccess import CollectionFsAccess, CollectionCache, CollectionFetcher
46 from .util import get_current_container
47 from .executor import RuntimeStatusLoggingHandler, DEFAULT_PRIORITY
48 from .arvworkflow import ArvadosWorkflow
50 logger = logging.getLogger('arvados.cwl-runner')
51 metrics = logging.getLogger('arvados.cwl-runner.metrics')
52 logger.setLevel(logging.INFO)
54 arvados.log_handler.setFormatter(logging.Formatter(
55 '%(asctime)s %(name)s %(levelname)s: %(message)s',
59 """Print version string of key packages for provenance and debugging."""
61 arvcwlpkg = pkg_resources.require("arvados-cwl-runner")
62 arvpkg = pkg_resources.require("arvados-python-client")
63 cwlpkg = pkg_resources.require("cwltool")
65 return "%s %s, %s %s, %s %s" % (sys.argv[0], arvcwlpkg[0].version,
66 "arvados-python-client", arvpkg[0].version,
67 "cwltool", cwlpkg[0].version)
70 def arg_parser(): # type: () -> argparse.ArgumentParser
71 parser = argparse.ArgumentParser(
72 description='Arvados executor for Common Workflow Language',
73 parents=[arv_cmd.retry_opt],
76 parser.add_argument("--basedir",
77 help="Base directory used to resolve relative references in the input, default to directory of input object file or current directory (if inputs piped/provided on command line).")
78 parser.add_argument("--outdir", default=os.path.abspath('.'),
79 help="Output directory, default current directory")
81 parser.add_argument("--eval-timeout",
82 help="Time to wait for a Javascript expression to evaluate before giving an error, default 20s.",
86 exgroup = parser.add_mutually_exclusive_group()
87 exgroup.add_argument("--print-dot", action="store_true",
88 help="Print workflow visualization in graphviz format and exit")
89 exgroup.add_argument("--version", action="version", help="Print version and exit", version=versionstring())
90 exgroup.add_argument("--validate", action="store_true", help="Validate CWL document only.")
92 exgroup = parser.add_mutually_exclusive_group()
93 exgroup.add_argument("--verbose", action="store_true", help="Default logging")
94 exgroup.add_argument("--quiet", action="store_true", help="Only print warnings and errors.")
95 exgroup.add_argument("--debug", action="store_true", help="Print even more logging")
97 parser.add_argument("--metrics", action="store_true", help="Print timing metrics")
99 parser.add_argument("--tool-help", action="store_true", help="Print command line help for tool")
101 exgroup = parser.add_mutually_exclusive_group()
102 exgroup.add_argument("--enable-reuse", action="store_true",
103 default=True, dest="enable_reuse",
104 help="Enable container reuse (default)")
105 exgroup.add_argument("--disable-reuse", action="store_false",
106 default=True, dest="enable_reuse",
107 help="Disable container reuse")
109 parser.add_argument("--project-uuid", metavar="UUID", help="Project that will own the workflow containers, if not provided, will go to home project.")
110 parser.add_argument("--output-name", help="Name to use for collection that stores the final output.", default=None)
111 parser.add_argument("--output-tags", help="Tags for the final output collection separated by commas, e.g., '--output-tags tag0,tag1,tag2'.", default=None)
112 parser.add_argument("--ignore-docker-for-reuse", action="store_true",
113 help="Ignore Docker image version when deciding whether to reuse past containers.",
116 exgroup = parser.add_mutually_exclusive_group()
117 exgroup.add_argument("--submit", action="store_true", help="Submit workflow to run on Arvados.",
118 default=True, dest="submit")
119 exgroup.add_argument("--local", action="store_false", help="Run workflow on local host (submits containers to Arvados).",
120 default=True, dest="submit")
121 exgroup.add_argument("--create-template", action="store_true", help="(Deprecated) synonym for --create-workflow.",
122 dest="create_workflow")
123 exgroup.add_argument("--create-workflow", action="store_true", help="Register an Arvados workflow that can be run from Workbench")
124 exgroup.add_argument("--update-workflow", metavar="UUID", help="Update an existing Arvados workflow with the given UUID.")
126 exgroup.add_argument("--print-keep-deps", action="store_true", help="To assist copying, print a list of Keep collections that this workflow depends on.")
128 exgroup = parser.add_mutually_exclusive_group()
129 exgroup.add_argument("--wait", action="store_true", help="After submitting workflow runner, wait for completion.",
130 default=True, dest="wait")
131 exgroup.add_argument("--no-wait", action="store_false", help="Submit workflow runner and exit.",
132 default=True, dest="wait")
134 exgroup = parser.add_mutually_exclusive_group()
135 exgroup.add_argument("--log-timestamps", action="store_true", help="Prefix logging lines with timestamp",
136 default=True, dest="log_timestamps")
137 exgroup.add_argument("--no-log-timestamps", action="store_false", help="No timestamp on logging lines",
138 default=True, dest="log_timestamps")
140 parser.add_argument("--api",
141 default=None, dest="work_api",
142 choices=("containers",),
143 help="Select work submission API. Only supports 'containers'")
145 parser.add_argument("--compute-checksum", action="store_true", default=False,
146 help="Compute checksum of contents while collecting outputs",
147 dest="compute_checksum")
149 parser.add_argument("--submit-runner-ram", type=int,
150 help="RAM (in MiB) required for the workflow runner job (default 1024)",
153 parser.add_argument("--submit-runner-image",
154 help="Docker image for workflow runner job, default arvados/jobs:%s" % __version__,
157 parser.add_argument("--always-submit-runner", action="store_true",
158 help="When invoked with --submit --wait, always submit a runner to manage the workflow, even when only running a single CommandLineTool",
161 parser.add_argument("--match-submitter-images", action="store_true",
162 default=False, dest="match_local_docker",
163 help="Where Arvados has more than one Docker image of the same name, use image from the Docker instance on the submitting node.")
165 exgroup = parser.add_mutually_exclusive_group()
166 exgroup.add_argument("--submit-request-uuid",
168 help="Update and commit to supplied container request instead of creating a new one.",
170 exgroup.add_argument("--submit-runner-cluster",
171 help="Submit workflow runner to a remote cluster",
173 metavar="CLUSTER_ID")
175 parser.add_argument("--collection-cache-size", type=int,
177 help="Collection cache size (in MiB, default 256).")
179 parser.add_argument("--name",
180 help="Name to use for workflow execution instance.",
183 parser.add_argument("--on-error",
184 help="Desired workflow behavior when a step fails. One of 'stop' (do not submit any more steps) or "
185 "'continue' (may submit other steps that are not downstream from the error). Default is 'continue'.",
186 default="continue", choices=("stop", "continue"))
188 parser.add_argument("--enable-dev", action="store_true",
189 help="Enable loading and running development versions "
190 "of the CWL standards.", default=False)
191 parser.add_argument('--storage-classes', default="default",
192 help="Specify comma separated list of storage classes to be used when saving final workflow output to Keep.")
193 parser.add_argument('--intermediate-storage-classes', default="default",
194 help="Specify comma separated list of storage classes to be used when saving intermediate workflow output to Keep.")
196 parser.add_argument("--intermediate-output-ttl", type=int, metavar="N",
197 help="If N > 0, intermediate output collections will be trashed N seconds after creation. Default is 0 (don't trash).",
200 parser.add_argument("--priority", type=int,
201 help="Workflow priority (range 1..1000, higher has precedence over lower)",
202 default=DEFAULT_PRIORITY)
204 parser.add_argument("--disable-validate", dest="do_validate",
205 action="store_false", default=True,
206 help=argparse.SUPPRESS)
208 parser.add_argument("--disable-git", dest="git_info",
209 action="store_false", default=True,
210 help=argparse.SUPPRESS)
212 parser.add_argument("--disable-color", dest="enable_color",
213 action="store_false", default=True,
214 help=argparse.SUPPRESS)
216 parser.add_argument("--disable-js-validation",
217 action="store_true", default=False,
218 help=argparse.SUPPRESS)
220 parser.add_argument("--fast-parser", dest="fast_parser",
221 action="store_true", default=False,
222 help=argparse.SUPPRESS)
224 parser.add_argument("--thread-count", type=int,
225 default=0, help="Number of threads to use for job submit and output collection.")
227 parser.add_argument("--http-timeout", type=int,
228 default=5*60, dest="http_timeout", help="API request timeout in seconds. Default is 300 seconds (5 minutes).")
230 parser.add_argument("--defer-downloads", action="store_true", default=False,
231 help="When submitting a workflow, defer downloading HTTP URLs to workflow launch instead of downloading to Keep before submit.")
233 parser.add_argument("--varying-url-params", type=str, default="",
234 help="A comma separated list of URL query parameters that should be ignored when storing HTTP URLs in Keep.")
236 parser.add_argument("--prefer-cached-downloads", action="store_true", default=False,
237 help="If a HTTP URL is found in Keep, skip upstream URL freshness check (will not notice if the upstream has changed, but also not error if upstream is unavailable).")
239 exgroup = parser.add_mutually_exclusive_group()
240 exgroup.add_argument("--enable-preemptible", dest="enable_preemptible", default=None, action="store_true", help="Use preemptible instances. Control individual steps with arv:UsePreemptible hint.")
241 exgroup.add_argument("--disable-preemptible", dest="enable_preemptible", default=None, action="store_false", help="Don't use preemptible instances.")
243 exgroup = parser.add_mutually_exclusive_group()
244 exgroup.add_argument("--copy-deps", dest="copy_deps", default=None, action="store_true", help="Copy dependencies into the destination project.")
245 exgroup.add_argument("--no-copy-deps", dest="copy_deps", default=None, action="store_false", help="Leave dependencies where they are.")
250 help="Skip loading of schemas",
255 exgroup = parser.add_mutually_exclusive_group()
256 exgroup.add_argument("--trash-intermediate", action="store_true",
257 default=False, dest="trash_intermediate",
258 help="Immediately trash intermediate outputs on workflow success.")
259 exgroup.add_argument("--no-trash-intermediate", action="store_false",
260 default=False, dest="trash_intermediate",
261 help="Do not trash intermediate outputs (default).")
263 parser.add_argument("workflow", default=None, help="The workflow to execute")
264 parser.add_argument("job_order", nargs=argparse.REMAINDER, help="The input object to the workflow.")
269 cwltool.command_line_tool.ACCEPTLIST_EN_RELAXED_RE = re.compile(r".*")
270 cwltool.command_line_tool.ACCEPTLIST_RE = cwltool.command_line_tool.ACCEPTLIST_EN_RELAXED_RE
271 supported_versions = ["v1.0", "v1.1", "v1.2"]
272 for s in supported_versions:
273 res = pkg_resources.resource_stream(__name__, 'arv-cwl-schema-%s.yml' % s)
274 customschema = res.read().decode('utf-8')
275 use_custom_schema(s, "http://arvados.org/cwl", customschema)
277 cwltool.process.supportedProcessRequirements.extend([
278 "http://arvados.org/cwl#RunInSingleContainer",
279 "http://arvados.org/cwl#OutputDirType",
280 "http://arvados.org/cwl#RuntimeConstraints",
281 "http://arvados.org/cwl#PartitionRequirement",
282 "http://arvados.org/cwl#APIRequirement",
283 "http://commonwl.org/cwltool#LoadListingRequirement",
284 "http://arvados.org/cwl#IntermediateOutput",
285 "http://arvados.org/cwl#ReuseRequirement",
286 "http://arvados.org/cwl#ClusterTarget",
287 "http://arvados.org/cwl#OutputStorageClass",
288 "http://arvados.org/cwl#ProcessProperties",
289 "http://commonwl.org/cwltool#CUDARequirement",
290 "http://arvados.org/cwl#UsePreemptible",
291 "http://arvados.org/cwl#OutputCollectionProperties",
292 "http://arvados.org/cwl#KeepCacheTypeRequirement",
293 "http://arvados.org/cwl#OutOfMemoryRetry",
296 def exit_signal_handler(sigcode, frame):
297 logger.error(str(u"Caught signal {}, exiting.").format(sigcode))
300 def main(args=sys.argv[1:],
305 install_sig_handlers=True):
306 parser = arg_parser()
308 job_order_object = None
309 arvargs = parser.parse_args(args)
311 arvargs.use_container = True
312 arvargs.relax_path_checks = True
313 arvargs.print_supported_versions = False
315 if install_sig_handlers:
316 arv_cmd.install_signal_handlers()
318 if arvargs.update_workflow:
319 if arvargs.update_workflow.find('-7fd4e-') == 5:
320 want_api = 'containers'
323 if want_api and arvargs.work_api and want_api != arvargs.work_api:
324 logger.error(str(u'--update-workflow arg {!r} uses {!r} API, but --api={!r} specified').format(
325 arvargs.update_workflow, want_api, arvargs.work_api))
327 arvargs.work_api = want_api
329 workflow_op = arvargs.create_workflow or arvargs.update_workflow or arvargs.print_keep_deps
331 if workflow_op and not arvargs.job_order:
332 job_order_object = ({}, "")
336 for key, val in viewitems(cwltool.argparser.get_default_args()):
337 if not hasattr(arvargs, key):
338 setattr(arvargs, key, val)
341 if api_client is None:
342 api_client = arvados.safeapi.ThreadSafeApiCache(
344 'num_retries': arvargs.retries,
345 'timeout': arvargs.http_timeout,
348 'num_retries': arvargs.retries,
352 keep_client = api_client.keep
353 # Make an API object now so errors are reported early.
354 api_client.users().current().execute()
355 if keep_client is None:
356 block_cache = arvados.keep.KeepBlockCache(disk_cache=True)
357 keep_client = arvados.keep.KeepClient(
358 api_client=api_client,
359 block_cache=block_cache,
360 num_retries=arvargs.retries,
362 executor = ArvCwlExecutor(
365 keep_client=keep_client,
366 num_retries=arvargs.retries,
369 except WorkflowException as e:
370 logger.error(e, exc_info=(sys.exc_info()[1] if arvargs.debug else False))
373 logger.exception("Error creating the Arvados CWL Executor")
376 # Note that unless in debug mode, some stack traces related to user
377 # workflow errors may be suppressed.
379 # Set the logging on most modules INFO (instead of default which is WARNING)
380 logger.setLevel(logging.INFO)
381 logging.getLogger('arvados').setLevel(logging.INFO)
382 logging.getLogger('arvados.keep').setLevel(logging.WARNING)
383 # API retries are filtered to the INFO level and can be noisy, but as long as
384 # they succeed we don't need to see warnings about it.
385 googleapiclient_http_logger = logging.getLogger('googleapiclient.http')
386 googleapiclient_http_logger.addFilter(arvados.logging.GoogleHTTPClientFilter())
387 googleapiclient_http_logger.setLevel(logging.WARNING)
390 logger.setLevel(logging.DEBUG)
391 logging.getLogger('arvados').setLevel(logging.DEBUG)
392 # In debug mode show logs about retries, but we arn't
393 # debugging the google client so we don't need to see
395 googleapiclient_http_logger.setLevel(logging.NOTSET)
396 logging.getLogger('googleapiclient').setLevel(logging.INFO)
399 logger.setLevel(logging.WARN)
400 logging.getLogger('arvados').setLevel(logging.WARN)
401 logging.getLogger('arvados.arv-run').setLevel(logging.WARN)
404 metrics.setLevel(logging.DEBUG)
405 logging.getLogger("cwltool.metrics").setLevel(logging.DEBUG)
407 if arvargs.log_timestamps:
408 arvados.log_handler.setFormatter(logging.Formatter(
409 '%(asctime)s %(name)s %(levelname)s: %(message)s',
410 '%Y-%m-%d %H:%M:%S'))
412 arvados.log_handler.setFormatter(logging.Formatter('%(name)s %(levelname)s: %(message)s'))
414 if stdout is sys.stdout:
415 # cwltool.main has code to work around encoding issues with
416 # sys.stdout and unix pipes (they default to ASCII encoding,
417 # we want utf-8), so when stdout is sys.stdout set it to None
418 # to take advantage of that. Don't override it for all cases
419 # since we still want to be able to capture stdout for the
423 executor.loadingContext.default_docker_image = arvargs.submit_runner_image or "arvados/jobs:"+__version__
425 if arvargs.workflow.startswith("arvwf:") or workflow_uuid_pattern.match(arvargs.workflow) or arvargs.workflow.startswith("keep:"):
426 executor.loadingContext.do_validate = False
427 if arvargs.submit and not workflow_op:
428 executor.fast_submit = True
430 return cwltool.main.main(args=arvargs,
433 executor=executor.arv_executor,
434 versionfunc=versionstring,
435 job_order_object=job_order_object,
436 logger_handler=arvados.log_handler,
437 custom_schema_callback=add_arv_hints,
438 loadingContext=executor.loadingContext,
439 runtimeContext=executor.toplevel_runtimeContext,
440 input_required=not workflow_op)