import arvados
import arvados.config
+import arvados.logging
from arvados.keep import KeepClient
from arvados.errors import ApiError
import arvados.commands._util as arv_cmd
-from arvados.api import OrderedJsonModel
from .perf import Perf
from ._version import __version__
def arg_parser(): # type: () -> argparse.ArgumentParser
- parser = argparse.ArgumentParser(description='Arvados executor for Common Workflow Language')
+ parser = argparse.ArgumentParser(
+ description='Arvados executor for Common Workflow Language',
+ parents=[arv_cmd.retry_opt],
+ )
parser.add_argument("--basedir",
help="Base directory used to resolve relative references in the input, default to directory of input object file or current directory (if inputs piped/provided on command line).")
exgroup.add_argument("--create-workflow", action="store_true", help="Register an Arvados workflow that can be run from Workbench")
exgroup.add_argument("--update-workflow", metavar="UUID", help="Update an existing Arvados workflow with the given UUID.")
+ exgroup.add_argument("--print-keep-deps", action="store_true", help="To assist copying, print a list of Keep collections that this workflow depends on.")
+
exgroup = parser.add_mutually_exclusive_group()
exgroup.add_argument("--wait", action="store_true", help="After submitting workflow runner, wait for completion.",
default=True, dest="wait")
"http://arvados.org/cwl#UsePreemptible",
"http://arvados.org/cwl#OutputCollectionProperties",
"http://arvados.org/cwl#KeepCacheTypeRequirement",
+ "http://arvados.org/cwl#OutOfMemoryRetry",
])
def exit_signal_handler(sigcode, frame):
return 1
arvargs.work_api = want_api
- if (arvargs.create_workflow or arvargs.update_workflow) and not arvargs.job_order:
+ workflow_op = arvargs.create_workflow or arvargs.update_workflow or arvargs.print_keep_deps
+
+ if workflow_op and not arvargs.job_order:
job_order_object = ({}, "")
add_arv_hints()
try:
if api_client is None:
api_client = arvados.safeapi.ThreadSafeApiCache(
- api_params={"model": OrderedJsonModel(), "timeout": arvargs.http_timeout},
- keep_params={"num_retries": 4},
+ api_params={
+ 'num_retries': arvargs.retries,
+ 'timeout': arvargs.http_timeout,
+ },
+ keep_params={
+ 'num_retries': arvargs.retries,
+ },
version='v1',
)
keep_client = api_client.keep
api_client.users().current().execute()
if keep_client is None:
block_cache = arvados.keep.KeepBlockCache(disk_cache=True)
- keep_client = arvados.keep.KeepClient(api_client=api_client, num_retries=4, block_cache=block_cache)
- executor = ArvCwlExecutor(api_client, arvargs, keep_client=keep_client, num_retries=4, stdout=stdout)
+ keep_client = arvados.keep.KeepClient(
+ api_client=api_client,
+ block_cache=block_cache,
+ num_retries=arvargs.retries,
+ )
+ executor = ArvCwlExecutor(
+ api_client,
+ arvargs,
+ keep_client=keep_client,
+ num_retries=arvargs.retries,
+ stdout=stdout,
+ )
except WorkflowException as e:
logger.error(e, exc_info=(sys.exc_info()[1] if arvargs.debug else False))
return 1
# Note that unless in debug mode, some stack traces related to user
# workflow errors may be suppressed.
+
+ # Set the logging on most modules INFO (instead of default which is WARNING)
+ logger.setLevel(logging.INFO)
+ logging.getLogger('arvados').setLevel(logging.INFO)
+ logging.getLogger('arvados.keep').setLevel(logging.WARNING)
+ # API retries are filtered to the INFO level and can be noisy, but as long as
+ # they succeed we don't need to see warnings about it.
+ googleapiclient_http_logger = logging.getLogger('googleapiclient.http')
+ googleapiclient_http_logger.addFilter(arvados.logging.GoogleHTTPClientFilter())
+ googleapiclient_http_logger.setLevel(logging.WARNING)
+
if arvargs.debug:
logger.setLevel(logging.DEBUG)
logging.getLogger('arvados').setLevel(logging.DEBUG)
+ # In debug mode show logs about retries, but we arn't
+ # debugging the google client so we don't need to see
+ # everything.
+ googleapiclient_http_logger.setLevel(logging.NOTSET)
+ logging.getLogger('googleapiclient').setLevel(logging.INFO)
if arvargs.quiet:
logger.setLevel(logging.WARN)
# unit tests.
stdout = None
+ executor.loadingContext.default_docker_image = arvargs.submit_runner_image or "arvados/jobs:"+__version__
+
if arvargs.workflow.startswith("arvwf:") or workflow_uuid_pattern.match(arvargs.workflow) or arvargs.workflow.startswith("keep:"):
executor.loadingContext.do_validate = False
- if arvargs.submit:
+ if arvargs.submit and not workflow_op:
executor.fast_submit = True
return cwltool.main.main(args=arvargs,
custom_schema_callback=add_arv_hints,
loadingContext=executor.loadingContext,
runtimeContext=executor.toplevel_runtimeContext,
- input_required=not (arvargs.create_workflow or arvargs.update_workflow))
+ input_required=not workflow_op)