def arg_parser(): # type: () -> argparse.ArgumentParser
parser = argparse.ArgumentParser(description='Arvados executor for Common Workflow Language')
- parser.add_argument("--basedir", type=str,
+ parser.add_argument("--basedir",
help="Base directory used to resolve relative references in the input, default to directory of input object file or current directory (if inputs piped/provided on command line).")
- parser.add_argument("--outdir", type=str, default=os.path.abspath('.'),
+ parser.add_argument("--outdir", default=os.path.abspath('.'),
help="Output directory, default current directory")
parser.add_argument("--eval-timeout",
default=True, dest="enable_reuse",
help="Disable job or container reuse")
- parser.add_argument("--project-uuid", type=str, metavar="UUID", help="Project that will own the workflow jobs, if not provided, will go to home project.")
- parser.add_argument("--output-name", type=str, help="Name to use for collection that stores the final output.", default=None)
- parser.add_argument("--output-tags", type=str, help="Tags for the final output collection separated by commas, e.g., '--output-tags tag0,tag1,tag2'.", default=None)
+ parser.add_argument("--project-uuid", metavar="UUID", help="Project that will own the workflow jobs, if not provided, will go to home project.")
+ parser.add_argument("--output-name", help="Name to use for collection that stores the final output.", default=None)
+ parser.add_argument("--output-tags", help="Tags for the final output collection separated by commas, e.g., '--output-tags tag0,tag1,tag2'.", default=None)
parser.add_argument("--ignore-docker-for-reuse", action="store_true",
help="Ignore Docker image version when deciding whether to reuse past jobs.",
default=False)
exgroup.add_argument("--no-log-timestamps", action="store_false", help="No timestamp on logging lines",
default=True, dest="log_timestamps")
- parser.add_argument("--api", type=str,
+ parser.add_argument("--api",
default=None, dest="work_api",
choices=("jobs", "containers"),
help="Select work submission API. Default is 'jobs' if that API is available, otherwise 'containers'.")
help="RAM (in MiB) required for the workflow runner job (default 1024)",
default=None)
- parser.add_argument("--submit-runner-image", type=str,
+ parser.add_argument("--submit-runner-image",
help="Docker image for workflow runner job, default arvados/jobs:%s" % __version__,
default=None)
default=False)
exgroup = parser.add_mutually_exclusive_group()
- exgroup.add_argument("--submit-request-uuid", type=str,
+ exgroup.add_argument("--submit-request-uuid",
default=None,
help="Update and commit to supplied container request instead of creating a new one (containers API only).",
metavar="UUID")
- exgroup.add_argument("--submit-runner-cluster", type=str,
+ exgroup.add_argument("--submit-runner-cluster",
help="Submit workflow runner to a remote cluster (containers API only)",
default=None,
metavar="CLUSTER_ID")
default=None,
help="Collection cache size (in MiB, default 256).")
- parser.add_argument("--name", type=str,
+ parser.add_argument("--name",
help="Name to use for workflow execution instance.",
default=None)
parser.add_argument("--enable-dev", action="store_true",
help="Enable loading and running development versions "
"of CWL spec.", default=False)
- parser.add_argument('--storage-classes', default="default", type=str,
+ parser.add_argument('--storage-classes', default="default",
help="Specify comma separated list of storage classes to be used when saving workflow output to Keep.")
parser.add_argument("--intermediate-output-ttl", type=int, metavar="N",
default=False, dest="trash_intermediate",
help="Do not trash intermediate outputs (default).")
- parser.add_argument("workflow", type=str, default=None, help="The workflow to execute")
+ parser.add_argument("workflow", default=None, help="The workflow to execute")
parser.add_argument("job_order", nargs=argparse.REMAINDER, help="The input object to the workflow.")
return parser
def collectionResolver(api_client, document_loader, uri, num_retries=4):
if uri.startswith("keep:") or uri.startswith("arvwf:"):
- return uri
+ return uri.encode("utf-8").decode()
if workflow_uuid_pattern.match(uri):
- return "arvwf:%s#main" % (uri)
+ return u"arvwf:%s#main" % (uri)
if pipeline_template_uuid_pattern.match(uri):
pt = api_client.pipeline_templates().get(uuid=uri).execute(num_retries=num_retries)
- return "keep:" + viewvalues(pt["components"])[0]["script_parameters"]["cwl:tool"]
+ return u"keep:" + viewvalues(pt["components"])[0]["script_parameters"]["cwl:tool"]
p = uri.split("/")
if arvados.util.keep_locator_pattern.match(p[0]):
- return "keep:%s" % (uri)
+ return u"keep:%s" % (uri)
if arvados.util.collection_uuid_pattern.match(p[0]):
- return "keep:%s%s" % (api_client.collections().
+ return u"keep:%s%s" % (api_client.collections().
get(uuid=p[0]).execute()["portable_data_hash"],
uri[len(p[0]):])