11 #import arvados.command.ws as ws
15 logger = logging.getLogger('arvados.arv-run')
17 arvrun_parser = argparse.ArgumentParser()
18 arvrun_parser.add_argument('--dry-run', action="store_true", help="Print out the pipeline that would be submitted and exit")
19 arvrun_parser.add_argument('--local', action="store_true", help="Run locally using arv-crunch-job")
20 arvrun_parser.add_argument('--docker-image', type=str, default="arvados/jobs", help="Docker image to use, default arvados/jobs")
21 arvrun_parser.add_argument('--git-dir', type=str, default="", help="Git repository passed to arv-crunch-job when using --local")
22 arvrun_parser.add_argument('--repository', type=str, default="arvados", help="repository field of component, default 'arvados'")
23 arvrun_parser.add_argument('--script-version', type=str, default="master", help="script_version field of component, default 'master'")
24 arvrun_parser.add_argument('args', nargs=argparse.REMAINDER)
26 class ArvFile(object):
27 def __init__(self, prefix, fn):
31 class UploadFile(ArvFile):
34 def is_in_collection(root, branch):
37 fn = os.path.join(root, ".arvados#collection")
38 if os.path.exists(fn):
39 with file(fn, 'r') as f:
41 return (c["portable_data_hash"], branch)
43 sp = os.path.split(root)
44 return is_in_collection(sp[0], os.path.join(sp[1], branch))
46 def statfile(prefix, fn):
47 absfn = os.path.abspath(fn)
48 if os.path.exists(absfn):
50 if stat.S_ISREG(st.st_mode):
51 sp = os.path.split(absfn)
52 (pdh, branch) = is_in_collection(sp[0], sp[1])
54 return ArvFile(prefix, "$(file %s/%s)" % (pdh, branch))
56 # trim leading '/' for path prefix test later
57 return UploadFile(prefix, absfn[1:])
60 def main(arguments=None):
61 args = arvrun_parser.parse_args(arguments)
72 reading_into = len(slots)
75 slots[reading_into].append(c)
77 if slots[0] and len(slots[0]) > 1:
78 logger.error("Can only specify a single stdout file (run-command substitutions are permitted)")
81 patterns = [re.compile("(--[^=]+=)(.*)"),
82 re.compile("(-[^=]+=)(.*)"),
83 re.compile("(-.)(.+)")]
85 for command in slots[1:]:
86 for i in xrange(0, len(command)):
89 # parameter starts with '-' so it might be a command line
90 # parameter with a file name, do some pattern matching
95 command[i] = statfile(m.group(1), m.group(2))
98 # parameter might be a file, so test it
99 command[i] = statfile('', a)
103 files = [c for command in slots[1:] for c in command if isinstance(c, UploadFile)]
105 # Find the smallest path prefix that includes all the files that need to be uploaded.
106 # This starts at the root and iteratively removes common parent directory prefixes
107 # until all file pathes no longer have a common parent.
114 # no parent directories left
117 # path step takes next directory
118 pathstep = sp[0] + "/"
120 # check if pathstep is common prefix for all files
121 if not c.fn.startswith(pathstep):
125 # pathstep is common parent directory for all files, so remove the prefix
127 pathprefix += pathstep
129 c.fn = c.fn[len(pathstep):]
134 print("cd %s" % pathprefix)
135 print("arv-put \"%s\"" % '" "'.join([c.fn for c in files]))
138 pdh = put.main(["--portable-data-hash"]+[c.fn for c in files])
141 c.fn = "$(file %s/%s)" % (pdh, c.fn)
143 for i in xrange(1, len(slots)):
144 slots[i] = [("%s%s" % (c.prefix, c.fn)) if isinstance(c, ArvFile) else c for c in slots[i]]
147 "script": "run-command",
148 "script_version": args.script_version,
149 "repository": args.repository,
150 "script_parameters": {
152 "runtime_constraints": {
153 "docker_image": args.docker_image
158 group_parser = argparse.ArgumentParser()
159 group_parser.add_argument('--batch-size', type=int)
160 group_parser.add_argument('args', nargs=argparse.REMAINDER)
162 for s in xrange(2, len(slots)):
163 for i in xrange(0, len(slots[s])):
164 if slots[s][i] == '--':
165 inp = "input%i" % (s-2)
166 groupargs = group_parser.parse_args(slots[2][i+1:])
167 if groupargs.batch_size:
168 component["script_parameters"][inp] = []
169 for j in xrange(0, len(groupargs.args), groupargs.batch_size):
170 component["script_parameters"][inp].append(groupargs.args[j:j+groupargs.batch_size])
171 slots[s] = slots[s][0:i] + [{"foreach": inp, "command": "$(%s)" % inp}]
173 component["script_parameters"][inp] = groupargs.args
174 slots[s] = slots[s][0:i] + ["$(%s)" % inp]
175 task_foreach.append(inp)
177 if slots[s][i] == '\--':
181 component["script_parameters"]["task.stdout"] = slots[0][0]
183 task_foreach.append("stdin")
184 component["script_parameters"]["stdin"] = slots[1]
185 component["script_parameters"]["task.stdin"] = "$(stdin)"\
188 component["script_parameters"]["task.foreach"] = task_foreach
190 component["script_parameters"]["command"] = slots[2:]
197 "state":"RunningOnServer"
201 print(json.dumps(pipeline, indent=4))
203 subprocess.call(["arv-crunch-job", "--job", json.dumps(component), "--git-dir", args.git_dir])
205 api = arvados.api('v1')
206 pi = api.pipeline_instances().create(body=pipeline).execute()
207 print "Running pipeline %s" % pi["uuid"]
208 #ws.main(["--pipeline", pi["uuid"]])
210 if __name__ == '__main__':