X-Git-Url: https://git.arvados.org/arvados.git/blobdiff_plain/b92203411f6f6adaef1c2af62495830f13f4fa14..f4c3100bad26dff3c99ff4bb9fa19b0d9f7995c7:/sdk/python/arvados/commands/run.py?ds=sidebyside diff --git a/sdk/python/arvados/commands/run.py b/sdk/python/arvados/commands/run.py index 4cbda4ae7d..8815565460 100644 --- a/sdk/python/arvados/commands/run.py +++ b/sdk/python/arvados/commands/run.py @@ -19,7 +19,7 @@ logger.setLevel(logging.INFO) arvrun_parser = argparse.ArgumentParser(parents=[arv_cmd.retry_opt]) arvrun_parser.add_argument('--dry-run', action="store_true", help="Print out the pipeline that would be submitted and exit") arvrun_parser.add_argument('--local', action="store_true", help="Run locally using arv-run-pipeline-instance") -arvrun_parser.add_argument('--docker-image', type=str, default="arvados/jobs", help="Docker image to use, default arvados/jobs") +arvrun_parser.add_argument('--docker-image', type=str, help="Docker image to use, otherwise use instance default.") arvrun_parser.add_argument('--ignore-rcode', action="store_true", help="Commands that return non-zero return codes should not be considered failed.") arvrun_parser.add_argument('--no-reuse', action="store_true", help="Do not reuse past jobs.") arvrun_parser.add_argument('--no-wait', action="store_true", help="Do not wait and display logs after submitting command, just exit.") @@ -52,7 +52,7 @@ def is_in_collection(root, branch): else: sp = os.path.split(root) return is_in_collection(sp[0], os.path.join(sp[1], branch)) - except IOError, OSError: + except (IOError, OSError): return (None, None) # Determine the project to place the output of this command by searching upward @@ -73,7 +73,7 @@ def determine_project(root, current_user): else: sp = os.path.split(root) return determine_project(sp[0], current_user) - except IOError, OSError: + except (IOError, OSError): return current_user # Determine if string corresponds to a file, and if that file is part of a @@ -101,7 +101,7 @@ def statfile(prefix, fn): return prefix+fn -def uploadfiles(files, api, dry_run=False, num_retries=0, project=None): +def uploadfiles(files, api, dry_run=False, num_retries=0, project=None, fnPattern="$(file %s/%s)"): # Find the smallest path prefix that includes all the files that need to be uploaded. # This starts at the root and iteratively removes common parent directory prefixes # until all file pathes no longer have a common parent. @@ -153,7 +153,7 @@ def uploadfiles(files, api, dry_run=False, num_retries=0, project=None): logger.info("Uploaded to %s", item["uuid"]) for c in files: - c.fn = "$(file %s/%s)" % (pdh, c.fn) + c.fn = fnPattern % (pdh, c.fn) os.chdir(orgdir) @@ -249,11 +249,12 @@ def main(arguments=None): "repository": args.repository, "script_parameters": { }, - "runtime_constraints": { - "docker_image": args.docker_image - } + "runtime_constraints": {} } + if args.docker_image: + component["runtime_constraints"]["docker_image"] = args.docker_image + task_foreach = [] group_parser = argparse.ArgumentParser() group_parser.add_argument('-b', '--batch-size', type=int)