-#!/usr/bin/env python
-
+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: Apache-2.0
+
+from __future__ import print_function
+from __future__ import absolute_import
+from builtins import range
+from past.builtins import basestring
+from builtins import object
import arvados
import arvados.commands.ws as ws
import argparse
import re
import os
import stat
-import put
+from . import put
import time
import subprocess
import logging
+import sys
+import errno
import arvados.commands._util as arv_cmd
+import arvados.collection
+
+from arvados._version import __version__
logger = logging.getLogger('arvados.arv-run')
logger.setLevel(logging.INFO)
arvrun_parser = argparse.ArgumentParser(parents=[arv_cmd.retry_opt])
-arvrun_parser.add_argument('--dry-run', action="store_true", help="Print out the pipeline that would be submitted and exit")
-arvrun_parser.add_argument('--local', action="store_true", help="Run locally using arv-run-pipeline-instance")
-arvrun_parser.add_argument('--docker-image', type=str, help="Docker image to use, otherwise use instance default.")
-arvrun_parser.add_argument('--ignore-rcode', action="store_true", help="Commands that return non-zero return codes should not be considered failed.")
-arvrun_parser.add_argument('--no-reuse', action="store_true", help="Do not reuse past jobs.")
-arvrun_parser.add_argument('--no-wait', action="store_true", help="Do not wait and display logs after submitting command, just exit.")
-arvrun_parser.add_argument('--project-uuid', type=str, help="Parent project of the pipeline")
-arvrun_parser.add_argument('--git-dir', type=str, default="", help="Git repository passed to arv-crunch-job when using --local")
-arvrun_parser.add_argument('--repository', type=str, default="arvados", help="repository field of component, default 'arvados'")
-arvrun_parser.add_argument('--script-version', type=str, default="master", help="script_version field of component, default 'master'")
+arvrun_parser.add_argument('--dry-run', action="store_true",
+ help="Print out the pipeline that would be submitted and exit")
+arvrun_parser.add_argument('--local', action="store_true",
+ help="Run locally using arv-run-pipeline-instance")
+arvrun_parser.add_argument('--docker-image', type=str,
+ help="Docker image to use, otherwise use instance default.")
+arvrun_parser.add_argument('--ignore-rcode', action="store_true",
+ help="Commands that return non-zero return codes should not be considered failed.")
+arvrun_parser.add_argument('--no-reuse', action="store_true",
+ help="Do not reuse past jobs.")
+arvrun_parser.add_argument('--no-wait', action="store_true",
+ help="Do not wait and display logs after submitting command, just exit.")
+arvrun_parser.add_argument('--project-uuid', type=str,
+ help="Parent project of the pipeline")
+arvrun_parser.add_argument('--git-dir', type=str, default="",
+ help="Git repository passed to arv-crunch-job when using --local")
+arvrun_parser.add_argument('--repository', type=str, default="arvados",
+ help="repository field of component, default 'arvados'")
+arvrun_parser.add_argument('--script-version', type=str, default="master",
+ help="script_version field of component, default 'master'")
+arvrun_parser.add_argument('--version', action='version',
+ version="%s %s" % (sys.argv[0], __version__),
+ help='Print version and exit.')
arvrun_parser.add_argument('args', nargs=argparse.REMAINDER)
class ArvFile(object):
self.prefix = prefix
self.fn = fn
+ def __hash__(self):
+ return (self.prefix+self.fn).__hash__()
+
+ def __eq__(self, other):
+ return (self.prefix == other.prefix) and (self.fn == other.fn)
+
class UploadFile(ArvFile):
pass
# ArvFile() (file already exists in a collection), UploadFile() (file needs to
# be uploaded to a collection), or simply returns prefix+fn (which yields the
# original parameter string).
-def statfile(prefix, fn):
+def statfile(prefix, fn, fnPattern="$(file %s/%s)", dirPattern="$(dir %s/%s/)", raiseOSError=False):
absfn = os.path.abspath(fn)
- if os.path.exists(absfn):
+ try:
st = os.stat(absfn)
- if stat.S_ISREG(st.st_mode):
- sp = os.path.split(absfn)
- (pdh, branch) = is_in_collection(sp[0], sp[1])
- if pdh:
- return ArvFile(prefix, "$(file %s/%s)" % (pdh, branch))
+ sp = os.path.split(absfn)
+ (pdh, branch) = is_in_collection(sp[0], sp[1])
+ if pdh:
+ if stat.S_ISREG(st.st_mode):
+ return ArvFile(prefix, fnPattern % (pdh, branch))
+ elif stat.S_ISDIR(st.st_mode):
+ return ArvFile(prefix, dirPattern % (pdh, branch))
else:
- # trim leading '/' for path prefix test later
- return UploadFile(prefix, absfn[1:])
- if stat.S_ISDIR(st.st_mode):
- sp = os.path.split(absfn)
- (pdh, branch) = is_in_collection(sp[0], sp[1])
- if pdh:
- return ArvFile(prefix, "$(dir %s/%s/)" % (pdh, branch))
+ raise Exception("%s is not a regular file or directory" % absfn)
+ else:
+ # trim leading '/' for path prefix test later
+ return UploadFile(prefix, absfn[1:])
+ except OSError as e:
+ if e.errno == errno.ENOENT and not raiseOSError:
+ pass
+ else:
+ raise
return prefix+fn
-def uploadfiles(files, api, dry_run=False, num_retries=0, project=None, fnPattern="$(file %s/%s)"):
+def write_file(collection, pathprefix, fn):
+ with open(os.path.join(pathprefix, fn)) as src:
+ dst = collection.open(fn, "w")
+ r = src.read(1024*128)
+ while r:
+ dst.write(r)
+ r = src.read(1024*128)
+ dst.close(flush=False)
+
+def uploadfiles(files, api, dry_run=False, num_retries=0,
+ project=None,
+ fnPattern="$(file %s/%s)",
+ name=None,
+ collection=None):
# Find the smallest path prefix that includes all the files that need to be uploaded.
# This starts at the root and iteratively removes common parent directory prefixes
- # until all file pathes no longer have a common parent.
- n = True
- pathprefix = "/"
- while n:
- pathstep = None
- for c in files:
- if pathstep is None:
- sp = c.fn.split('/')
- if len(sp) < 2:
- # no parent directories left
- n = False
- break
- # path step takes next directory
- pathstep = sp[0] + "/"
- else:
- # check if pathstep is common prefix for all files
- if not c.fn.startswith(pathstep):
- n = False
- break
- if n:
- # pathstep is common parent directory for all files, so remove the prefix
- # from each path
- pathprefix += pathstep
+ # until all file paths no longer have a common parent.
+ if files:
+ n = True
+ pathprefix = "/"
+ while n:
+ pathstep = None
for c in files:
- c.fn = c.fn[len(pathstep):]
-
- orgdir = os.getcwd()
- os.chdir(pathprefix)
-
- logger.info("Upload local files: \"%s\"", '" "'.join([c.fn for c in files]))
+ if pathstep is None:
+ sp = c.fn.split('/')
+ if len(sp) < 2:
+ # no parent directories left
+ n = False
+ break
+ # path step takes next directory
+ pathstep = sp[0] + "/"
+ else:
+ # check if pathstep is common prefix for all files
+ if not c.fn.startswith(pathstep):
+ n = False
+ break
+ if n:
+ # pathstep is common parent directory for all files, so remove the prefix
+ # from each path
+ pathprefix += pathstep
+ for c in files:
+ c.fn = c.fn[len(pathstep):]
+
+ logger.info("Upload local files: \"%s\"", '" "'.join([c.fn for c in files]))
if dry_run:
logger.info("$(input) is %s", pathprefix.rstrip('/'))
pdh = "$(input)"
else:
files = sorted(files, key=lambda x: x.fn)
- collection = arvados.CollectionWriter(api, num_retries=num_retries)
- stream = None
+ if collection is None:
+ collection = arvados.collection.Collection(api_client=api, num_retries=num_retries)
+ prev = ""
for f in files:
- sp = os.path.split(f.fn)
- if sp[0] != stream:
- stream = sp[0]
- collection.start_new_stream(stream)
- collection.write_file(f.fn, sp[1])
- item = api.collections().create(body={"owner_uuid": project, "manifest_text": collection.manifest_text()}).execute()
- pdh = item["portable_data_hash"]
- logger.info("Uploaded to %s", item["uuid"])
+ localpath = os.path.join(pathprefix, f.fn)
+ if prev and localpath.startswith(prev+"/"):
+ # If this path is inside an already uploaded subdirectory,
+ # don't redundantly re-upload it.
+ # e.g. we uploaded /tmp/foo and the next file is /tmp/foo/bar
+ # skip it because it starts with "/tmp/foo/"
+ continue
+ prev = localpath
+ if os.path.isfile(localpath):
+ write_file(collection, pathprefix, f.fn)
+ elif os.path.isdir(localpath):
+ for root, dirs, iterfiles in os.walk(localpath):
+ root = root[len(pathprefix):]
+ for src in iterfiles:
+ write_file(collection, pathprefix, os.path.join(root, src))
+
+ filters=[["portable_data_hash", "=", collection.portable_data_hash()]]
+ if name:
+ filters.append(["name", "like", name+"%"])
+ if project:
+ filters.append(["owner_uuid", "=", project])
+
+ exists = api.collections().list(filters=filters, limit=1).execute(num_retries=num_retries)
+
+ if exists["items"]:
+ item = exists["items"][0]
+ pdh = item["portable_data_hash"]
+ logger.info("Using collection %s (%s)", pdh, item["uuid"])
+ elif len(collection) > 0:
+ collection.save_new(name=name, owner_uuid=project, ensure_unique_name=True)
+ pdh = collection.portable_data_hash()
+ logger.info("Uploaded to %s (%s)", pdh, collection.manifest_locator())
for c in files:
+ c.keepref = "%s/%s" % (pdh, c.fn)
c.fn = fnPattern % (pdh, c.fn)
- os.chdir(orgdir)
-
def main(arguments=None):
args = arvrun_parser.parse_args(arguments)
if files:
uploadfiles(files, api, dry_run=args.dry_run, num_retries=args.retries, project=project)
- for i in xrange(1, len(slots)):
+ for i in range(1, len(slots)):
slots[i] = [("%s%s" % (c.prefix, c.fn)) if isinstance(c, ArvFile) else c for c in slots[i]]
component = {
group_parser.add_argument('-b', '--batch-size', type=int)
group_parser.add_argument('args', nargs=argparse.REMAINDER)
- for s in xrange(2, len(slots)):
- for i in xrange(0, len(slots[s])):
+ for s in range(2, len(slots)):
+ for i in range(0, len(slots[s])):
if slots[s][i] == '--':
inp = "input%i" % (s-2)
groupargs = group_parser.parse_args(slots[2][i+1:])