2 # Copyright (C) The Arvados Authors. All rights reserved.
4 # SPDX-License-Identifier: Apache-2.0
6 # Implement cwl-runner interface for submitting and running work on Arvados, using
7 # the Crunch containers API.
10 import importlib.metadata
11 import importlib.resources
17 from schema_salad.sourceline import SourceLine
18 import schema_salad.validate as validate
20 import cwltool.workflow
21 import cwltool.process
22 import cwltool.argparser
23 from cwltool.errors import WorkflowException
24 from cwltool.process import shortname, UnsupportedRequirement, use_custom_schema
25 from cwltool.utils import adjustFileObjs, adjustDirObjs, get_listing
29 import arvados.logging
30 from arvados.keep import KeepClient
31 from arvados.errors import ApiError
32 import arvados.commands._util as arv_cmd
34 from .perf import Perf
35 from ._version import __version__
36 from .executor import ArvCwlExecutor
37 from .fsaccess import workflow_uuid_pattern
39 # These aren't used directly in this file but
40 # other code expects to import them from here
41 from .arvcontainer import ArvadosContainer
42 from .arvtool import ArvadosCommandTool
43 from .fsaccess import CollectionFsAccess, CollectionCache, CollectionFetcher
44 from .util import get_current_container
45 from .executor import RuntimeStatusLoggingHandler, DEFAULT_PRIORITY
46 from .arvworkflow import ArvadosWorkflow
48 logger = logging.getLogger('arvados.cwl-runner')
49 metrics = logging.getLogger('arvados.cwl-runner.metrics')
50 logger.setLevel(logging.INFO)
52 arvados.log_handler.setFormatter(logging.Formatter(
53 '%(asctime)s %(name)s %(levelname)s: %(message)s',
57 """Print version string of key packages for provenance and debugging."""
58 return "{} {}, arvados-python-client {}, cwltool {}".format(
60 importlib.metadata.version('arvados-cwl-runner'),
61 importlib.metadata.version('arvados-python-client'),
62 importlib.metadata.version('cwltool'),
65 def arg_parser(): # type: () -> argparse.ArgumentParser
66 parser = argparse.ArgumentParser(
67 description='Arvados executor for Common Workflow Language',
68 parents=[arv_cmd.retry_opt],
71 parser.add_argument("--basedir",
72 help="Base directory used to resolve relative references in the input, default to directory of input object file or current directory (if inputs piped/provided on command line).")
73 parser.add_argument("--outdir", default=os.path.abspath('.'),
74 help="Output directory, default current directory")
76 parser.add_argument("--eval-timeout",
77 help="Time to wait for a Javascript expression to evaluate before giving an error, default 20s.",
81 exgroup = parser.add_mutually_exclusive_group()
82 exgroup.add_argument("--print-dot", action="store_true",
83 help="Print workflow visualization in graphviz format and exit")
84 exgroup.add_argument("--version", action="version", help="Print version and exit", version=versionstring())
85 exgroup.add_argument("--validate", action="store_true", help="Validate CWL document only.")
87 exgroup = parser.add_mutually_exclusive_group()
88 exgroup.add_argument("--verbose", action="store_true", help="Default logging")
89 exgroup.add_argument("--quiet", action="store_true", help="Only print warnings and errors.")
90 exgroup.add_argument("--debug", action="store_true", help="Print even more logging")
92 parser.add_argument("--metrics", action="store_true", help="Print timing metrics")
94 parser.add_argument("--tool-help", action="store_true", help="Print command line help for tool")
96 exgroup = parser.add_mutually_exclusive_group()
97 exgroup.add_argument("--enable-reuse", action="store_true",
98 default=True, dest="enable_reuse",
99 help="Enable container reuse (default)")
100 exgroup.add_argument("--disable-reuse", action="store_false",
101 default=True, dest="enable_reuse",
102 help="Disable container reuse")
104 parser.add_argument("--project-uuid", metavar="UUID", help="Project that will own the workflow containers, if not provided, will go to home project.")
105 parser.add_argument("--output-name", help="Name to use for collection that stores the final output.", default=None)
106 parser.add_argument("--output-tags", help="Tags for the final output collection separated by commas, e.g., '--output-tags tag0,tag1,tag2'.", default=None)
107 parser.add_argument("--ignore-docker-for-reuse", action="store_true",
108 help="Ignore Docker image version when deciding whether to reuse past containers.",
111 exgroup = parser.add_mutually_exclusive_group()
112 exgroup.add_argument("--submit", action="store_true", help="Submit workflow to run on Arvados.",
113 default=True, dest="submit")
114 exgroup.add_argument("--local", action="store_false", help="Run workflow on local host (submits containers to Arvados).",
115 default=True, dest="submit")
116 exgroup.add_argument("--create-template", action="store_true", help="(Deprecated) synonym for --create-workflow.",
117 dest="create_workflow")
118 exgroup.add_argument("--create-workflow", action="store_true", help="Register an Arvados workflow that can be run from Workbench")
119 exgroup.add_argument("--update-workflow", metavar="UUID", help="Update an existing Arvados workflow with the given UUID.")
121 exgroup.add_argument("--print-keep-deps", action="store_true", help="To assist copying, print a list of Keep collections that this workflow depends on.")
123 exgroup = parser.add_mutually_exclusive_group()
124 exgroup.add_argument("--wait", action="store_true", help="After submitting workflow runner, wait for completion.",
125 default=True, dest="wait")
126 exgroup.add_argument("--no-wait", action="store_false", help="Submit workflow runner and exit.",
127 default=True, dest="wait")
129 exgroup = parser.add_mutually_exclusive_group()
130 exgroup.add_argument("--log-timestamps", action="store_true", help="Prefix logging lines with timestamp",
131 default=True, dest="log_timestamps")
132 exgroup.add_argument("--no-log-timestamps", action="store_false", help="No timestamp on logging lines",
133 default=True, dest="log_timestamps")
135 parser.add_argument("--api",
136 default=None, dest="work_api",
137 choices=("containers",),
138 help="Select work submission API. Only supports 'containers'")
140 parser.add_argument("--compute-checksum", action="store_true", default=False,
141 help="Compute checksum of contents while collecting outputs",
142 dest="compute_checksum")
144 parser.add_argument("--submit-runner-ram", type=int,
145 help="RAM (in MiB) required for the workflow runner job (default 1024)",
148 parser.add_argument("--submit-runner-image",
149 help="Docker image for workflow runner job, default arvados/jobs:%s" % __version__,
152 parser.add_argument("--always-submit-runner", action="store_true",
153 help="When invoked with --submit --wait, always submit a runner to manage the workflow, even when only running a single CommandLineTool",
156 parser.add_argument("--match-submitter-images", action="store_true",
157 default=False, dest="match_local_docker",
158 help="Where Arvados has more than one Docker image of the same name, use image from the Docker instance on the submitting node.")
160 exgroup = parser.add_mutually_exclusive_group()
161 exgroup.add_argument("--submit-request-uuid",
163 help="Update and commit to supplied container request instead of creating a new one.",
165 exgroup.add_argument("--submit-runner-cluster",
166 help="Submit workflow runner to a remote cluster",
168 metavar="CLUSTER_ID")
170 parser.add_argument("--collection-cache-size", type=int,
172 help="Collection cache size (in MiB, default 256).")
174 parser.add_argument("--name",
175 help="Name to use for workflow execution instance.",
178 parser.add_argument("--on-error",
179 help="Desired workflow behavior when a step fails. One of 'stop' (do not submit any more steps) or "
180 "'continue' (may submit other steps that are not downstream from the error). Default is 'continue'.",
181 default="continue", choices=("stop", "continue"))
183 parser.add_argument("--enable-dev", action="store_true",
184 help="Enable loading and running development versions "
185 "of the CWL standards.", default=False)
186 parser.add_argument('--storage-classes', default="default",
187 help="Specify comma separated list of storage classes to be used when saving final workflow output to Keep.")
188 parser.add_argument('--intermediate-storage-classes', default="default",
189 help="Specify comma separated list of storage classes to be used when saving intermediate workflow output to Keep.")
191 parser.add_argument("--intermediate-output-ttl", type=int, metavar="N",
192 help="If N > 0, intermediate output collections will be trashed N seconds after creation. Default is 0 (don't trash).",
195 parser.add_argument("--priority", type=int,
196 help="Workflow priority (range 1..1000, higher has precedence over lower)",
197 default=DEFAULT_PRIORITY)
199 parser.add_argument("--disable-validate", dest="do_validate",
200 action="store_false", default=True,
201 help=argparse.SUPPRESS)
203 parser.add_argument("--disable-git", dest="git_info",
204 action="store_false", default=True,
205 help=argparse.SUPPRESS)
207 parser.add_argument("--disable-color", dest="enable_color",
208 action="store_false", default=True,
209 help=argparse.SUPPRESS)
211 parser.add_argument("--disable-js-validation",
212 action="store_true", default=False,
213 help=argparse.SUPPRESS)
215 parser.add_argument("--fast-parser", dest="fast_parser",
216 action="store_true", default=False,
217 help=argparse.SUPPRESS)
219 parser.add_argument("--thread-count", type=int,
220 default=0, help="Number of threads to use for job submit and output collection.")
222 parser.add_argument("--http-timeout", type=int,
223 default=5*60, dest="http_timeout", help="API request timeout in seconds. Default is 300 seconds (5 minutes).")
225 parser.add_argument("--defer-downloads", action="store_true", default=False,
226 help="When submitting a workflow, defer downloading HTTP URLs to workflow launch instead of downloading to Keep before submit.")
228 parser.add_argument("--varying-url-params", type=str, default="",
229 help="A comma separated list of URL query parameters that should be ignored when storing HTTP URLs in Keep.")
231 parser.add_argument("--prefer-cached-downloads", action="store_true", default=False,
232 help="If a HTTP URL is found in Keep, skip upstream URL freshness check (will not notice if the upstream has changed, but also not error if upstream is unavailable).")
234 exgroup = parser.add_mutually_exclusive_group()
235 exgroup.add_argument("--enable-preemptible", dest="enable_preemptible", default=None, action="store_true", help="Use preemptible instances. Control individual steps with arv:UsePreemptible hint.")
236 exgroup.add_argument("--disable-preemptible", dest="enable_preemptible", default=None, action="store_false", help="Don't use preemptible instances.")
238 exgroup = parser.add_mutually_exclusive_group()
239 exgroup.add_argument("--copy-deps", dest="copy_deps", default=None, action="store_true", help="Copy dependencies into the destination project.")
240 exgroup.add_argument("--no-copy-deps", dest="copy_deps", default=None, action="store_false", help="Leave dependencies where they are.")
245 help="Skip loading of schemas",
250 exgroup = parser.add_mutually_exclusive_group()
251 exgroup.add_argument("--trash-intermediate", action="store_true",
252 default=False, dest="trash_intermediate",
253 help="Immediately trash intermediate outputs on workflow success.")
254 exgroup.add_argument("--no-trash-intermediate", action="store_false",
255 default=False, dest="trash_intermediate",
256 help="Do not trash intermediate outputs (default).")
258 exgroup = parser.add_mutually_exclusive_group()
259 exgroup.add_argument("--enable-usage-report", dest="enable_usage_report", default=None, action="store_true", help="Create usage_report.html with a summary of each step's resource usage.")
260 exgroup.add_argument("--disable-usage-report", dest="enable_usage_report", default=None, action="store_false", help="Disable usage report.")
262 parser.add_argument("workflow", default=None, help="The workflow to execute")
263 parser.add_argument("job_order", nargs=argparse.REMAINDER, help="The input object to the workflow.")
268 cwltool.command_line_tool.ACCEPTLIST_EN_RELAXED_RE = re.compile(r".*")
269 cwltool.command_line_tool.ACCEPTLIST_RE = cwltool.command_line_tool.ACCEPTLIST_EN_RELAXED_RE
270 supported_versions = ["v1.0", "v1.1", "v1.2"]
271 for s in supported_versions:
272 customschema = importlib.resources.read_text(__name__, f'arv-cwl-schema-{s}.yml', 'utf-8')
273 use_custom_schema(s, "http://arvados.org/cwl", customschema)
274 cwltool.process.supportedProcessRequirements.extend([
275 "http://arvados.org/cwl#RunInSingleContainer",
276 "http://arvados.org/cwl#OutputDirType",
277 "http://arvados.org/cwl#RuntimeConstraints",
278 "http://arvados.org/cwl#PartitionRequirement",
279 "http://arvados.org/cwl#APIRequirement",
280 "http://commonwl.org/cwltool#LoadListingRequirement",
281 "http://arvados.org/cwl#IntermediateOutput",
282 "http://arvados.org/cwl#ReuseRequirement",
283 "http://arvados.org/cwl#ClusterTarget",
284 "http://arvados.org/cwl#OutputStorageClass",
285 "http://arvados.org/cwl#ProcessProperties",
286 "http://commonwl.org/cwltool#CUDARequirement",
287 "http://arvados.org/cwl#UsePreemptible",
288 "http://arvados.org/cwl#OutputCollectionProperties",
289 "http://arvados.org/cwl#KeepCacheTypeRequirement",
290 "http://arvados.org/cwl#OutOfMemoryRetry",
291 "http://arvados.org/cwl#SpotInstanceRetry",
294 def exit_signal_handler(sigcode, frame):
295 logger.error(str(u"Caught signal {}, exiting.").format(sigcode))
298 def main(args=sys.argv[1:],
303 install_sig_handlers=True):
304 parser = arg_parser()
306 job_order_object = None
307 arvargs = parser.parse_args(args)
309 arvargs.use_container = True
310 arvargs.relax_path_checks = True
311 arvargs.print_supported_versions = False
313 if install_sig_handlers:
314 arv_cmd.install_signal_handlers()
316 if arvargs.update_workflow:
317 if arvargs.update_workflow.find('-7fd4e-') == 5:
318 want_api = 'containers'
321 if want_api and arvargs.work_api and want_api != arvargs.work_api:
322 logger.error(str(u'--update-workflow arg {!r} uses {!r} API, but --api={!r} specified').format(
323 arvargs.update_workflow, want_api, arvargs.work_api))
325 arvargs.work_api = want_api
327 workflow_op = arvargs.create_workflow or arvargs.update_workflow or arvargs.print_keep_deps
329 if workflow_op and not arvargs.job_order:
330 job_order_object = ({}, "")
334 for key, val in cwltool.argparser.get_default_args().items():
335 if not hasattr(arvargs, key):
336 setattr(arvargs, key, val)
339 if api_client is None:
340 api_client = arvados.safeapi.ThreadSafeApiCache(
342 'num_retries': arvargs.retries,
343 'timeout': arvargs.http_timeout,
346 'num_retries': arvargs.retries,
350 keep_client = api_client.keep
351 # Make an API object now so errors are reported early.
352 api_client.users().current().execute()
353 if keep_client is None:
354 block_cache = arvados.keep.KeepBlockCache(disk_cache=True)
355 keep_client = arvados.keep.KeepClient(
356 api_client=api_client,
357 block_cache=block_cache,
358 num_retries=arvargs.retries,
360 executor = ArvCwlExecutor(
363 keep_client=keep_client,
364 num_retries=arvargs.retries,
367 except WorkflowException as e:
368 logger.error(e, exc_info=(sys.exc_info()[1] if arvargs.debug else False))
371 logger.exception("Error creating the Arvados CWL Executor")
374 # Note that unless in debug mode, some stack traces related to user
375 # workflow errors may be suppressed.
377 # Set the logging on most modules INFO (instead of default which is WARNING)
378 logger.setLevel(logging.INFO)
379 logging.getLogger('arvados').setLevel(logging.INFO)
380 logging.getLogger('arvados.keep').setLevel(logging.WARNING)
381 # API retries are filtered to the INFO level and can be noisy, but as long as
382 # they succeed we don't need to see warnings about it.
383 googleapiclient_http_logger = logging.getLogger('googleapiclient.http')
384 googleapiclient_http_logger.addFilter(arvados.logging.GoogleHTTPClientFilter())
385 googleapiclient_http_logger.setLevel(logging.WARNING)
388 logger.setLevel(logging.DEBUG)
389 logging.getLogger('arvados').setLevel(logging.DEBUG)
390 # In debug mode show logs about retries, but we arn't
391 # debugging the google client so we don't need to see
393 googleapiclient_http_logger.setLevel(logging.NOTSET)
394 logging.getLogger('googleapiclient').setLevel(logging.INFO)
397 logger.setLevel(logging.WARN)
398 logging.getLogger('arvados').setLevel(logging.WARN)
399 logging.getLogger('arvados.arv-run').setLevel(logging.WARN)
402 metrics.setLevel(logging.DEBUG)
403 logging.getLogger("cwltool.metrics").setLevel(logging.DEBUG)
405 if arvargs.log_timestamps:
406 arvados.log_handler.setFormatter(logging.Formatter(
407 '%(asctime)s %(name)s %(levelname)s: %(message)s',
408 '%Y-%m-%d %H:%M:%S'))
410 arvados.log_handler.setFormatter(logging.Formatter('%(name)s %(levelname)s: %(message)s'))
412 if stdout is sys.stdout:
413 # cwltool.main has code to work around encoding issues with
414 # sys.stdout and unix pipes (they default to ASCII encoding,
415 # we want utf-8), so when stdout is sys.stdout set it to None
416 # to take advantage of that. Don't override it for all cases
417 # since we still want to be able to capture stdout for the
421 executor.loadingContext.default_docker_image = arvargs.submit_runner_image or "arvados/jobs:"+__version__
423 if arvargs.workflow.startswith("arvwf:") or workflow_uuid_pattern.match(arvargs.workflow) or arvargs.workflow.startswith("keep:"):
424 executor.loadingContext.do_validate = False
425 if arvargs.submit and not workflow_op:
426 executor.fast_submit = True
428 return cwltool.main.main(args=arvargs,
431 executor=executor.arv_executor,
432 versionfunc=versionstring,
433 job_order_object=job_order_object,
434 logger_handler=arvados.log_handler,
435 custom_schema_callback=add_arv_hints,
436 loadingContext=executor.loadingContext,
437 runtimeContext=executor.toplevel_runtimeContext,
438 input_required=not workflow_op)