+ bytesum += os.path.getsize(path)
+ return bytesum
+
+_machine_format = "{} {}: {{}} written {{}} total\n".format(sys.argv[0],
+ os.getpid())
+def machine_progress(bytes_written, bytes_expected):
+ return _machine_format.format(
+ bytes_written, -1 if (bytes_expected is None) else bytes_expected)
+
+def human_progress(bytes_written, bytes_expected):
+ if bytes_expected:
+ return "\r{}M / {}M {:.1%} ".format(
+ bytes_written >> 20, bytes_expected >> 20,
+ float(bytes_written) / bytes_expected)
+ else:
+ return "\r{} ".format(bytes_written)
+
+def progress_writer(progress_func, outfile=sys.stderr):
+ def write_progress(bytes_written, bytes_expected):
+ outfile.write(progress_func(bytes_written, bytes_expected))
+ return write_progress
+
+def main(arguments=None):
+ ResumeCache.setup_user_cache()
+ args = parse_arguments(arguments)
+
+ if args.progress:
+ reporter = progress_writer(human_progress)
+ elif args.batch_progress:
+ reporter = progress_writer(machine_progress)
+ else:
+ reporter = None
+
+ try:
+ resume_cache = ResumeCache(ResumeCache.make_path(args))
+ if not args.resume:
+ resume_cache.restart()
+ except ResumeCacheConflict:
+ print "arv-put: Another process is already uploading this data."
+ sys.exit(1)
+
+ writer = ArvPutCollectionWriter.from_cache(
+ resume_cache, reporter, expected_bytes_for(args.paths))