Merge branch '11305-migrate-docker19-doc'
[arvados.git] / sdk / python / arvados / commands / run.py
index f2bf0f353bbd146ece4775da7b6094d654ed0d04..46a56ca5589cf07a28054114cb0ccd75e580f44a 100644 (file)
@@ -11,21 +11,40 @@ import put
 import time
 import subprocess
 import logging
+import sys
+import errno
 import arvados.commands._util as arv_cmd
+import arvados.collection
+
+from arvados._version import __version__
 
 logger = logging.getLogger('arvados.arv-run')
+logger.setLevel(logging.INFO)
 
 arvrun_parser = argparse.ArgumentParser(parents=[arv_cmd.retry_opt])
-arvrun_parser.add_argument('--dry-run', action="store_true", help="Print out the pipeline that would be submitted and exit")
-arvrun_parser.add_argument('--local', action="store_true", help="Run locally using arv-run-pipeline-instance")
-arvrun_parser.add_argument('--docker-image', type=str, default="arvados/jobs", help="Docker image to use, default arvados/jobs")
-arvrun_parser.add_argument('--ignore-rcode', action="store_true", help="Commands that return non-zero return codes should not be considered failed.")
-arvrun_parser.add_argument('--no-reuse', action="store_true", help="Do not reuse past jobs.")
-arvrun_parser.add_argument('--no-wait', action="store_true", help="Do not wait and display logs after submitting command, just exit.")
-arvrun_parser.add_argument('--project-uuid', type=str, help="Parent project of the pipeline")
-arvrun_parser.add_argument('--git-dir', type=str, default="", help="Git repository passed to arv-crunch-job when using --local")
-arvrun_parser.add_argument('--repository', type=str, default="arvados", help="repository field of component, default 'arvados'")
-arvrun_parser.add_argument('--script-version', type=str, default="master", help="script_version field of component, default 'master'")
+arvrun_parser.add_argument('--dry-run', action="store_true",
+                           help="Print out the pipeline that would be submitted and exit")
+arvrun_parser.add_argument('--local', action="store_true",
+                           help="Run locally using arv-run-pipeline-instance")
+arvrun_parser.add_argument('--docker-image', type=str,
+                           help="Docker image to use, otherwise use instance default.")
+arvrun_parser.add_argument('--ignore-rcode', action="store_true",
+                           help="Commands that return non-zero return codes should not be considered failed.")
+arvrun_parser.add_argument('--no-reuse', action="store_true",
+                           help="Do not reuse past jobs.")
+arvrun_parser.add_argument('--no-wait', action="store_true",
+                           help="Do not wait and display logs after submitting command, just exit.")
+arvrun_parser.add_argument('--project-uuid', type=str,
+                           help="Parent project of the pipeline")
+arvrun_parser.add_argument('--git-dir', type=str, default="",
+                           help="Git repository passed to arv-crunch-job when using --local")
+arvrun_parser.add_argument('--repository', type=str, default="arvados",
+                           help="repository field of component, default 'arvados'")
+arvrun_parser.add_argument('--script-version', type=str, default="master",
+                           help="script_version field of component, default 'master'")
+arvrun_parser.add_argument('--version', action='version',
+                           version="%s %s" % (sys.argv[0], __version__),
+                           help='Print version and exit.')
 arvrun_parser.add_argument('args', nargs=argparse.REMAINDER)
 
 class ArvFile(object):
@@ -33,6 +52,12 @@ class ArvFile(object):
         self.prefix = prefix
         self.fn = fn
 
+    def __hash__(self):
+        return (self.prefix+self.fn).__hash__()
+
+    def __eq__(self, other):
+        return (self.prefix == other.prefix) and (self.fn == other.fn)
+
 class UploadFile(ArvFile):
     pass
 
@@ -51,7 +76,7 @@ def is_in_collection(root, branch):
         else:
             sp = os.path.split(root)
             return is_in_collection(sp[0], os.path.join(sp[1], branch))
-    except IOError, OSError:
+    except (IOError, OSError):
         return (None, None)
 
 # Determine the project to place the output of this command by searching upward
@@ -72,7 +97,7 @@ def determine_project(root, current_user):
         else:
             sp = os.path.split(root)
             return determine_project(sp[0], current_user)
-    except IOError, OSError:
+    except (IOError, OSError):
         return current_user
 
 # Determine if string corresponds to a file, and if that file is part of a
@@ -80,26 +105,118 @@ def determine_project(root, current_user):
 # ArvFile() (file already exists in a collection), UploadFile() (file needs to
 # be uploaded to a collection), or simply returns prefix+fn (which yields the
 # original parameter string).
-def statfile(prefix, fn):
+def statfile(prefix, fn, fnPattern="$(file %s/%s)", dirPattern="$(dir %s/%s/)"):
     absfn = os.path.abspath(fn)
-    if os.path.exists(absfn):
+    try:
         st = os.stat(absfn)
-        if stat.S_ISREG(st.st_mode):
-            sp = os.path.split(absfn)
-            (pdh, branch) = is_in_collection(sp[0], sp[1])
-            if pdh:
-                return ArvFile(prefix, "$(file %s/%s)" % (pdh, branch))
+        sp = os.path.split(absfn)
+        (pdh, branch) = is_in_collection(sp[0], sp[1])
+        if pdh:
+            if stat.S_ISREG(st.st_mode):
+                return ArvFile(prefix, fnPattern % (pdh, branch))
+            elif stat.S_ISDIR(st.st_mode):
+                return ArvFile(prefix, dirPattern % (pdh, branch))
             else:
-                # trim leading '/' for path prefix test later
-                return UploadFile(prefix, absfn[1:])
-        if stat.S_ISDIR(st.st_mode):
-            sp = os.path.split(absfn)
-            (pdh, branch) = is_in_collection(sp[0], sp[1])
-            if pdh:
-                return ArvFile(prefix, "$(dir %s/%s/)" % (pdh, branch))
+                raise Exception("%s is not a regular file or directory" % absfn)
+        else:
+            # trim leading '/' for path prefix test later
+            return UploadFile(prefix, absfn[1:])
+    except OSError as e:
+        if e.errno == errno.ENOENT:
+            pass
+        else:
+            raise
 
     return prefix+fn
 
+def write_file(collection, pathprefix, fn):
+    with open(os.path.join(pathprefix, fn)) as src:
+        dst = collection.open(fn, "w")
+        r = src.read(1024*128)
+        while r:
+            dst.write(r)
+            r = src.read(1024*128)
+        dst.close(flush=False)
+
+def uploadfiles(files, api, dry_run=False, num_retries=0,
+                project=None,
+                fnPattern="$(file %s/%s)",
+                name=None):
+    # Find the smallest path prefix that includes all the files that need to be uploaded.
+    # This starts at the root and iteratively removes common parent directory prefixes
+    # until all file paths no longer have a common parent.
+    n = True
+    pathprefix = "/"
+    while n:
+        pathstep = None
+        for c in files:
+            if pathstep is None:
+                sp = c.fn.split('/')
+                if len(sp) < 2:
+                    # no parent directories left
+                    n = False
+                    break
+                # path step takes next directory
+                pathstep = sp[0] + "/"
+            else:
+                # check if pathstep is common prefix for all files
+                if not c.fn.startswith(pathstep):
+                    n = False
+                    break
+        if n:
+            # pathstep is common parent directory for all files, so remove the prefix
+            # from each path
+            pathprefix += pathstep
+            for c in files:
+                c.fn = c.fn[len(pathstep):]
+
+    logger.info("Upload local files: \"%s\"", '" "'.join([c.fn for c in files]))
+
+    if dry_run:
+        logger.info("$(input) is %s", pathprefix.rstrip('/'))
+        pdh = "$(input)"
+    else:
+        files = sorted(files, key=lambda x: x.fn)
+        collection = arvados.collection.Collection(api_client=api, num_retries=num_retries)
+        prev = ""
+        for f in files:
+            localpath = os.path.join(pathprefix, f.fn)
+            if prev and localpath.startswith(prev+"/"):
+                # If this path is inside an already uploaded subdirectory,
+                # don't redundantly re-upload it.
+                # e.g. we uploaded /tmp/foo and the next file is /tmp/foo/bar
+                # skip it because it starts with "/tmp/foo/"
+                continue
+            prev = localpath
+            if os.path.isfile(localpath):
+                write_file(collection, pathprefix, f.fn)
+            elif os.path.isdir(localpath):
+                for root, dirs, iterfiles in os.walk(localpath):
+                    root = root[len(pathprefix):]
+                    for src in iterfiles:
+                        write_file(collection, pathprefix, os.path.join(root, src))
+
+        filters=[["portable_data_hash", "=", collection.portable_data_hash()],
+                 ["name", "like", name+"%"]]
+        if project:
+            filters.append(["owner_uuid", "=", project])
+
+        exists = api.collections().list(filters=filters, limit=1).execute(num_retries=num_retries)
+
+        if exists["items"]:
+            item = exists["items"][0]
+            pdh = item["portable_data_hash"]
+            logger.info("Using collection %s (%s)", pdh, item["uuid"])
+        else:
+            collection.save_new(name=name, owner_uuid=project, ensure_unique_name=True)
+            pdh = collection.portable_data_hash()
+            logger.info("Uploaded to %s (%s)", pdh, collection.manifest_locator())
+
+    for c in files:
+        c.keepref = "%s/%s" % (pdh, c.fn)
+        c.fn = fnPattern % (pdh, c.fn)
+
+
 def main(arguments=None):
     args = arvrun_parser.parse_args(arguments)
 
@@ -178,62 +295,9 @@ def main(arguments=None):
                             command[i] = statfile(m.group(1), m.group(2))
                             break
 
-    n = True
-    pathprefix = "/"
     files = [c for command in slots[1:] for c in command if isinstance(c, UploadFile)]
-    if len(files) > 0:
-        # Find the smallest path prefix that includes all the files that need to be uploaded.
-        # This starts at the root and iteratively removes common parent directory prefixes
-        # until all file pathes no longer have a common parent.
-        while n:
-            pathstep = None
-            for c in files:
-                if pathstep is None:
-                    sp = c.fn.split('/')
-                    if len(sp) < 2:
-                        # no parent directories left
-                        n = False
-                        break
-                    # path step takes next directory
-                    pathstep = sp[0] + "/"
-                else:
-                    # check if pathstep is common prefix for all files
-                    if not c.fn.startswith(pathstep):
-                        n = False
-                        break
-            if n:
-                # pathstep is common parent directory for all files, so remove the prefix
-                # from each path
-                pathprefix += pathstep
-                for c in files:
-                    c.fn = c.fn[len(pathstep):]
-
-        orgdir = os.getcwd()
-        os.chdir(pathprefix)
-
-        print("Upload local files: \"%s\"" % '" "'.join([c.fn for c in files]))
-
-        if args.dry_run:
-            print("$(input) is %s" % pathprefix.rstrip('/'))
-            pdh = "$(input)"
-        else:
-            files = sorted(files, key=lambda x: x.fn)
-            collection = arvados.CollectionWriter(api, num_retries=args.retries)
-            stream = None
-            for f in files:
-                sp = os.path.split(f.fn)
-                if sp[0] != stream:
-                    stream = sp[0]
-                    collection.start_new_stream(stream)
-                collection.write_file(f.fn, sp[1])
-            item = api.collections().create(body={"owner_uuid": project, "manifest_text": collection.manifest_text()}).execute()
-            pdh = item["portable_data_hash"]
-            print "Uploaded to %s" % item["uuid"]
-
-        for c in files:
-            c.fn = "$(file %s/%s)" % (pdh, c.fn)
-
-        os.chdir(orgdir)
+    if files:
+        uploadfiles(files, api, dry_run=args.dry_run, num_retries=args.retries, project=project)
 
     for i in xrange(1, len(slots)):
         slots[i] = [("%s%s" % (c.prefix, c.fn)) if isinstance(c, ArvFile) else c for c in slots[i]]
@@ -244,11 +308,12 @@ def main(arguments=None):
         "repository": args.repository,
         "script_parameters": {
         },
-        "runtime_constraints": {
-            "docker_image": args.docker_image
-        }
+        "runtime_constraints": {}
     }
 
+    if args.docker_image:
+        component["runtime_constraints"]["docker_image"] = args.docker_image
+
     task_foreach = []
     group_parser = argparse.ArgumentParser()
     group_parser.add_argument('-b', '--batch-size', type=int)
@@ -298,7 +363,7 @@ def main(arguments=None):
     else:
         pipeline["owner_uuid"] = project
         pi = api.pipeline_instances().create(body=pipeline, ensure_unique_name=True).execute()
-        print "Running pipeline %s" % pi["uuid"]
+        logger.info("Running pipeline %s", pi["uuid"])
 
         if args.local:
             subprocess.call(["arv-run-pipeline-instance", "--instance", pi["uuid"], "--run-jobs-here"] + (["--no-reuse"] if args.no_reuse else []))
@@ -306,11 +371,11 @@ def main(arguments=None):
             ws.main(["--pipeline", pi["uuid"]])
 
         pi = api.pipeline_instances().get(uuid=pi["uuid"]).execute()
-        print "Pipeline is %s" % pi["state"]
+        logger.info("Pipeline is %s", pi["state"])
         if "output_uuid" in pi["components"]["command"]:
-            print "Output is %s" % pi["components"]["command"]["output_uuid"]
+            logger.info("Output is %s", pi["components"]["command"]["output_uuid"])
         else:
-            print "No output"
+            logger.info("No output")
 
 if __name__ == '__main__':
     main()