arvrun_parser = argparse.ArgumentParser(parents=[arv_cmd.retry_opt])
arvrun_parser.add_argument('--dry-run', action="store_true", help="Print out the pipeline that would be submitted and exit")
arvrun_parser.add_argument('--local', action="store_true", help="Run locally using arv-run-pipeline-instance")
-arvrun_parser.add_argument('--docker-image', type=str, default="arvados/jobs", help="Docker image to use, default arvados/jobs")
+arvrun_parser.add_argument('--docker-image', type=str, help="Docker image to use, otherwise use instance default.")
arvrun_parser.add_argument('--ignore-rcode', action="store_true", help="Commands that return non-zero return codes should not be considered failed.")
arvrun_parser.add_argument('--no-reuse', action="store_true", help="Do not reuse past jobs.")
arvrun_parser.add_argument('--no-wait', action="store_true", help="Do not wait and display logs after submitting command, just exit.")
else:
sp = os.path.split(root)
return is_in_collection(sp[0], os.path.join(sp[1], branch))
- except IOError, OSError:
+ except (IOError, OSError):
return (None, None)
# Determine the project to place the output of this command by searching upward
else:
sp = os.path.split(root)
return determine_project(sp[0], current_user)
- except IOError, OSError:
+ except (IOError, OSError):
return current_user
# Determine if string corresponds to a file, and if that file is part of a
return prefix+fn
-def uploadfiles(files):
+def uploadfiles(files, api, dry_run=False, num_retries=0, project=None, fnPattern="$(file %s/%s)"):
# Find the smallest path prefix that includes all the files that need to be uploaded.
# This starts at the root and iteratively removes common parent directory prefixes
# until all file pathes no longer have a common parent.
+ n = True
+ pathprefix = "/"
while n:
pathstep = None
for c in files:
logger.info("Upload local files: \"%s\"", '" "'.join([c.fn for c in files]))
- if args.dry_run:
+ if dry_run:
logger.info("$(input) is %s", pathprefix.rstrip('/'))
pdh = "$(input)"
else:
files = sorted(files, key=lambda x: x.fn)
- collection = arvados.CollectionWriter(api, num_retries=args.retries)
+ collection = arvados.CollectionWriter(api, num_retries=num_retries)
stream = None
for f in files:
sp = os.path.split(f.fn)
logger.info("Uploaded to %s", item["uuid"])
for c in files:
- c.fn = "$(file %s/%s)" % (pdh, c.fn)
+ c.fn = fnPattern % (pdh, c.fn)
os.chdir(orgdir)
command[i] = statfile(m.group(1), m.group(2))
break
- n = True
- pathprefix = "/"
files = [c for command in slots[1:] for c in command if isinstance(c, UploadFile)]
- if len(files) > 0:
- uploadfiles(files)
+ if files:
+ uploadfiles(files, api, dry_run=args.dry_run, num_retries=args.retries, project=project)
for i in xrange(1, len(slots)):
slots[i] = [("%s%s" % (c.prefix, c.fn)) if isinstance(c, ArvFile) else c for c in slots[i]]
"repository": args.repository,
"script_parameters": {
},
- "runtime_constraints": {
- "docker_image": args.docker_image
- }
+ "runtime_constraints": {}
}
+ if args.docker_image:
+ component["runtime_constraints"]["docker_image"] = args.docker_image
+
task_foreach = []
group_parser = argparse.ArgumentParser()
group_parser.add_argument('-b', '--batch-size', type=int)