Merge branch 'master' into 8654-arv-jobs-cwl-runner
[arvados.git] / sdk / python / arvados / commands / run.py
index e118a9e41a7bf253e6174f28cd68ba24cac6759d..5d29c45117acd71e924838bb9b758af77d8e9b91 100644 (file)
@@ -1,6 +1,7 @@
 #!/usr/bin/env python
 
 import arvados
+import arvados.commands.ws as ws
 import argparse
 import json
 import re
@@ -8,16 +9,21 @@ import os
 import stat
 import put
 import time
-#import arvados.command.ws as ws
 import subprocess
 import logging
+import arvados.commands._util as arv_cmd
 
 logger = logging.getLogger('arvados.arv-run')
+logger.setLevel(logging.INFO)
 
-arvrun_parser = argparse.ArgumentParser()
+arvrun_parser = argparse.ArgumentParser(parents=[arv_cmd.retry_opt])
 arvrun_parser.add_argument('--dry-run', action="store_true", help="Print out the pipeline that would be submitted and exit")
-arvrun_parser.add_argument('--local', action="store_true", help="Run locally using arv-crunch-job")
-arvrun_parser.add_argument('--docker-image', type=str, default="arvados/jobs", help="Docker image to use, default arvados/jobs")
+arvrun_parser.add_argument('--local', action="store_true", help="Run locally using arv-run-pipeline-instance")
+arvrun_parser.add_argument('--docker-image', type=str, help="Docker image to use, otherwise use instance default.")
+arvrun_parser.add_argument('--ignore-rcode', action="store_true", help="Commands that return non-zero return codes should not be considered failed.")
+arvrun_parser.add_argument('--no-reuse', action="store_true", help="Do not reuse past jobs.")
+arvrun_parser.add_argument('--no-wait', action="store_true", help="Do not wait and display logs after submitting command, just exit.")
+arvrun_parser.add_argument('--project-uuid', type=str, help="Parent project of the pipeline")
 arvrun_parser.add_argument('--git-dir', type=str, default="", help="Git repository passed to arv-crunch-job when using --local")
 arvrun_parser.add_argument('--repository', type=str, default="arvados", help="repository field of component, default 'arvados'")
 arvrun_parser.add_argument('--script-version', type=str, default="master", help="script_version field of component, default 'master'")
@@ -28,22 +34,60 @@ class ArvFile(object):
         self.prefix = prefix
         self.fn = fn
 
+    def __hash__(self):
+        return (self.prefix+self.fn).__hash__()
+
+    def __eq__(self, other):
+        return (self.prefix == other.prefix) and (self.fn == other.fn)
+
 class UploadFile(ArvFile):
     pass
 
+# Determine if a file is in a collection, and return a tuple consisting of the
+# portable data hash and the path relative to the root of the collection.
+# Return None if the path isn't with an arv-mount collection or there was is error.
 def is_in_collection(root, branch):
-    if root == "/":
+    try:
+        if root == "/":
+            return (None, None)
+        fn = os.path.join(root, ".arvados#collection")
+        if os.path.exists(fn):
+            with file(fn, 'r') as f:
+                c = json.load(f)
+            return (c["portable_data_hash"], branch)
+        else:
+            sp = os.path.split(root)
+            return is_in_collection(sp[0], os.path.join(sp[1], branch))
+    except (IOError, OSError):
         return (None, None)
-    fn = os.path.join(root, ".arvados#collection")
-    if os.path.exists(fn):
-        with file(fn, 'r') as f:
-            c = json.load(f)
-        return (c["portable_data_hash"], branch)
-    else:
-        sp = os.path.split(root)
-        return is_in_collection(sp[0], os.path.join(sp[1], branch))
 
-def statfile(prefix, fn):
+# Determine the project to place the output of this command by searching upward
+# for arv-mount psuedofile indicating the project.  If the cwd isn't within
+# an arv-mount project or there is an error, return current_user.
+def determine_project(root, current_user):
+    try:
+        if root == "/":
+            return current_user
+        fn = os.path.join(root, ".arvados#project")
+        if os.path.exists(fn):
+            with file(fn, 'r') as f:
+                c = json.load(f)
+            if 'writable_by' in c and current_user in c['writable_by']:
+                return c["uuid"]
+            else:
+                return current_user
+        else:
+            sp = os.path.split(root)
+            return determine_project(sp[0], current_user)
+    except (IOError, OSError):
+        return current_user
+
+# Determine if string corresponds to a file, and if that file is part of a
+# arv-mounted collection or only local to the machine.  Returns one of
+# ArvFile() (file already exists in a collection), UploadFile() (file needs to
+# be uploaded to a collection), or simply returns prefix+fn (which yields the
+# original parameter string).
+def statfile(prefix, fn, fnPattern="$(file %s/%s)", dirPattern="$(dir %s/%s/)"):
     absfn = os.path.abspath(fn)
     if os.path.exists(absfn):
         st = os.stat(absfn)
@@ -51,23 +95,119 @@ def statfile(prefix, fn):
             sp = os.path.split(absfn)
             (pdh, branch) = is_in_collection(sp[0], sp[1])
             if pdh:
-                return ArvFile(prefix, "$(file %s/%s)" % (pdh, branch))
+                return ArvFile(prefix, fnPattern % (pdh, branch))
             else:
                 # trim leading '/' for path prefix test later
                 return UploadFile(prefix, absfn[1:])
+        if stat.S_ISDIR(st.st_mode):
+            sp = os.path.split(absfn)
+            (pdh, branch) = is_in_collection(sp[0], sp[1])
+            if pdh:
+                return ArvFile(prefix, dirPattern % (pdh, branch))
+
     return prefix+fn
 
+def uploadfiles(files, api, dry_run=False, num_retries=0, project=None, fnPattern="$(file %s/%s)", name=None):
+    # Find the smallest path prefix that includes all the files that need to be uploaded.
+    # This starts at the root and iteratively removes common parent directory prefixes
+    # until all file paths no longer have a common parent.
+    n = True
+    pathprefix = "/"
+    while n:
+        pathstep = None
+        for c in files:
+            if pathstep is None:
+                sp = c.fn.split('/')
+                if len(sp) < 2:
+                    # no parent directories left
+                    n = False
+                    break
+                # path step takes next directory
+                pathstep = sp[0] + "/"
+            else:
+                # check if pathstep is common prefix for all files
+                if not c.fn.startswith(pathstep):
+                    n = False
+                    break
+        if n:
+            # pathstep is common parent directory for all files, so remove the prefix
+            # from each path
+            pathprefix += pathstep
+            for c in files:
+                c.fn = c.fn[len(pathstep):]
+
+    orgdir = os.getcwd()
+    os.chdir(pathprefix)
+
+    logger.info("Upload local files: \"%s\"", '" "'.join([c.fn for c in files]))
+
+    if dry_run:
+        logger.info("$(input) is %s", pathprefix.rstrip('/'))
+        pdh = "$(input)"
+    else:
+        files = sorted(files, key=lambda x: x.fn)
+        collection = arvados.CollectionWriter(api, num_retries=num_retries)
+        stream = None
+        for f in files:
+            sp = os.path.split(f.fn)
+            if sp[0] != stream:
+                stream = sp[0]
+                collection.start_new_stream(stream)
+            collection.write_file(f.fn, sp[1])
+
+        exists = api.collections().list(filters=[["owner_uuid", "=", project],
+                                                 ["portable_data_hash", "=", collection.portable_data_hash()],
+                                                 ["name", "=", name]]).execute(num_retries=num_retries)
+        if exists["items"]:
+            item = exists["items"][0]
+            logger.info("Using collection %s", item["uuid"])
+        else:
+            body = {"owner_uuid": project, "manifest_text": collection.manifest_text()}
+            if name is not None:
+                body["name"] = name
+            item = api.collections().create(body=body, ensure_unique_name=True).execute()
+            logger.info("Uploaded to %s", item["uuid"])
+
+        pdh = item["portable_data_hash"]
+
+    for c in files:
+        c.fn = fnPattern % (pdh, c.fn)
+
+    os.chdir(orgdir)
+
+
 def main(arguments=None):
     args = arvrun_parser.parse_args(arguments)
 
+    if len(args.args) == 0:
+        arvrun_parser.print_help()
+        return
+
+    starting_args = args.args
+
     reading_into = 2
 
+    # Parse the command arguments into 'slots'.
+    # All words following '>' are output arguments and are collected into slots[0].
+    # All words following '<' are input arguments and are collected into slots[1].
+    # slots[2..] store the parameters of each command in the pipeline.
+    #
+    # e.g. arv-run foo arg1 arg2 '|' bar arg3 arg4 '<' input1 input2 input3 '>' output.txt
+    # will be parsed into:
+    #   [['output.txt'],
+    #    ['input1', 'input2', 'input3'],
+    #    ['foo', 'arg1', 'arg2'],
+    #    ['bar', 'arg3', 'arg4']]
     slots = [[], [], []]
     for c in args.args:
-        if c == '>':
+        if c.startswith('>'):
             reading_into = 0
-        elif c == '<':
+            if len(c) > 1:
+                slots[reading_into].append(c[1:])
+        elif c.startswith('<'):
             reading_into = 1
+            if len(c) > 1:
+                slots[reading_into].append(c[1:])
         elif c == '|':
             reading_into = len(slots)
             slots.append([])
@@ -78,67 +218,45 @@ def main(arguments=None):
         logger.error("Can only specify a single stdout file (run-command substitutions are permitted)")
         return
 
-    patterns = [re.compile("(--[^=]+=)(.*)"),
-                re.compile("(-[^=]+=)(.*)"),
-                re.compile("(-.)(.+)")]
-
-    for command in slots[1:]:
-        for i in xrange(0, len(command)):
-            a = command[i]
-            if a[0] == '-':
-                # parameter starts with '-' so it might be a command line
-                # parameter with a file name, do some pattern matching
-                matched = False
-                for p in patterns:
-                    m = p.match(a)
-                    if m:
-                        command[i] = statfile(m.group(1), m.group(2))
-                        break
+    if not args.dry_run:
+        api = arvados.api('v1')
+        if args.project_uuid:
+            project = args.project_uuid
+        else:
+            project = determine_project(os.getcwd(), api.users().current().execute()["uuid"])
+
+    # Identify input files.  Look at each parameter and test to see if there is
+    # a file by that name.  This uses 'patterns' to look for within
+    # command line arguments, such as --foo=file.txt or -lfile.txt
+    patterns = [re.compile("([^=]+=)(.*)"),
+                re.compile("(-[A-Za-z])(.+)")]
+    for j, command in enumerate(slots[1:]):
+        for i, a in enumerate(command):
+            if j > 0 and i == 0:
+                # j == 0 is stdin, j > 0 is commands
+                # always skip program executable (i == 0) in commands
+                pass
+            elif a.startswith('\\'):
+                # if it starts with a \ then don't do any interpretation
+                command[i] = a[1:]
             else:
-                # parameter might be a file, so test it
+                # See if it looks like a file
                 command[i] = statfile('', a)
 
-    n = True
-    pathprefix = "/"
-    files = [c for command in slots[1:] for c in command if isinstance(c, UploadFile)]
-    if len(files) > 0:
-        # Find the smallest path prefix that includes all the files that need to be uploaded.
-        # This starts at the root and iteratively removes common parent directory prefixes
-        # until all file pathes no longer have a common parent.
-        while n:
-            pathstep = None
-            for c in files:
-                if pathstep is None:
-                    sp = c.fn.split('/')
-                    if len(sp) < 2:
-                        # no parent directories left
-                        n = False
-                        break
-                    # path step takes next directory
-                    pathstep = sp[0] + "/"
-                else:
-                    # check if pathstep is common prefix for all files
-                    if not c.fn.startswith(pathstep):
-                        n = False
-                        break
-            if n:
-                # pathstep is common parent directory for all files, so remove the prefix
-                # from each path
-                pathprefix += pathstep
-                for c in files:
-                    c.fn = c.fn[len(pathstep):]
-
-        os.chdir(pathprefix)
-
-        if args.dry_run:
-            print("cd %s" % pathprefix)
-            print("arv-put \"%s\"" % '" "'.join([c.fn for c in files]))
-            pdh = "$(input)"
-        else:
-            pdh = put.main(["--portable-data-hash"]+[c.fn for c in files])
+                # If a file named command[i] was found, it would now be an
+                # ArvFile or UploadFile.  If command[i] is a basestring, that
+                # means it doesn't correspond exactly to a file, so do some
+                # pattern matching.
+                if isinstance(command[i], basestring):
+                    for p in patterns:
+                        m = p.match(a)
+                        if m:
+                            command[i] = statfile(m.group(1), m.group(2))
+                            break
 
-        for c in files:
-            c.fn = "$(file %s/%s)" % (pdh, c.fn)
+    files = [c for command in slots[1:] for c in command if isinstance(c, UploadFile)]
+    if files:
+        uploadfiles(files, api, dry_run=args.dry_run, num_retries=args.retries, project=project)
 
     for i in xrange(1, len(slots)):
         slots[i] = [("%s%s" % (c.prefix, c.fn)) if isinstance(c, ArvFile) else c for c in slots[i]]
@@ -149,14 +267,15 @@ def main(arguments=None):
         "repository": args.repository,
         "script_parameters": {
         },
-        "runtime_constraints": {
-            "docker_image": args.docker_image
-        }
+        "runtime_constraints": {}
     }
 
+    if args.docker_image:
+        component["runtime_constraints"]["docker_image"] = args.docker_image
+
     task_foreach = []
     group_parser = argparse.ArgumentParser()
-    group_parser.add_argument('--batch-size', type=int)
+    group_parser.add_argument('-b', '--batch-size', type=int)
     group_parser.add_argument('args', nargs=argparse.REMAINDER)
 
     for s in xrange(2, len(slots)):
@@ -165,9 +284,7 @@ def main(arguments=None):
                 inp = "input%i" % (s-2)
                 groupargs = group_parser.parse_args(slots[2][i+1:])
                 if groupargs.batch_size:
-                    component["script_parameters"][inp] = []
-                    for j in xrange(0, len(groupargs.args), groupargs.batch_size):
-                        component["script_parameters"][inp].append(groupargs.args[j:j+groupargs.batch_size])
+                    component["script_parameters"][inp] = {"value": {"batch":groupargs.args, "size":groupargs.batch_size}}
                     slots[s] = slots[s][0:i] + [{"foreach": inp, "command": "$(%s)" % inp}]
                 else:
                     component["script_parameters"][inp] = groupargs.args
@@ -182,30 +299,42 @@ def main(arguments=None):
     if slots[1]:
         task_foreach.append("stdin")
         component["script_parameters"]["stdin"] = slots[1]
-        component["script_parameters"]["task.stdin"] = "$(stdin)"\
+        component["script_parameters"]["task.stdin"] = "$(stdin)"
 
     if task_foreach:
         component["script_parameters"]["task.foreach"] = task_foreach
 
     component["script_parameters"]["command"] = slots[2:]
+    if args.ignore_rcode:
+        component["script_parameters"]["task.ignore_rcode"] = args.ignore_rcode
 
     pipeline = {
-        "name": "",
+        "name": "arv-run " + " | ".join([s[0] for s in slots[2:]]),
+        "description": "@" + " ".join(starting_args) + "@",
         "components": {
             "command": component
         },
-        "state":"RunningOnServer"
+        "state": "RunningOnClient" if args.local else "RunningOnServer"
     }
 
     if args.dry_run:
         print(json.dumps(pipeline, indent=4))
-    elif args.local:
-        subprocess.call(["arv-crunch-job", "--job", json.dumps(component), "--git-dir", args.git_dir])
     else:
-        api = arvados.api('v1')
-        pi = api.pipeline_instances().create(body=pipeline).execute()
-        print "Running pipeline %s" % pi["uuid"]
-        #ws.main(["--pipeline", pi["uuid"]])
+        pipeline["owner_uuid"] = project
+        pi = api.pipeline_instances().create(body=pipeline, ensure_unique_name=True).execute()
+        logger.info("Running pipeline %s", pi["uuid"])
+
+        if args.local:
+            subprocess.call(["arv-run-pipeline-instance", "--instance", pi["uuid"], "--run-jobs-here"] + (["--no-reuse"] if args.no_reuse else []))
+        elif not args.no_wait:
+            ws.main(["--pipeline", pi["uuid"]])
+
+        pi = api.pipeline_instances().get(uuid=pi["uuid"]).execute()
+        logger.info("Pipeline is %s", pi["state"])
+        if "output_uuid" in pi["components"]["command"]:
+            logger.info("Output is %s", pi["components"]["command"]["output_uuid"])
+        else:
+            logger.info("No output")
 
 if __name__ == '__main__':
     main()