X-Git-Url: https://git.arvados.org/arvados.git/blobdiff_plain/d843787b4ece9952597d7814cbf10fb383c72625..864c3b0afd16c77e046f0072d8517d34c5a44792:/sdk/python/arvados/commands/arv_copy.py diff --git a/sdk/python/arvados/commands/arv_copy.py b/sdk/python/arvados/commands/arv_copy.py index 75f8ca97bd..93fd6b598a 100755 --- a/sdk/python/arvados/commands/arv_copy.py +++ b/sdk/python/arvados/commands/arv_copy.py @@ -1,13 +1,15 @@ -#! /usr/bin/env python +# Copyright (C) The Arvados Authors. All rights reserved. +# +# SPDX-License-Identifier: Apache-2.0 -# arv-copy [--recursive] [--no-recursive] object-uuid src dst +# arv-copy [--recursive] [--no-recursive] object-uuid # # Copies an object from Arvados instance src to instance dst. # # By default, arv-copy recursively copies any dependent objects # necessary to make the object functional in the new instance -# (e.g. for a pipeline instance, arv-copy copies the pipeline -# template, input collection, docker images, git repositories). If +# (e.g. for a workflow, arv-copy copies the workflow, +# input collections, and docker images). If # --no-recursive is given, arv-copy copies only the single record # identified by object-uuid. # @@ -16,7 +18,14 @@ # instances src and dst. If either of these files is not found, # arv-copy will issue an error. +from __future__ import division +from future import standard_library +from future.utils import listvalues +standard_library.install_aliases() +from past.builtins import basestring +from builtins import object import argparse +import contextlib import getpass import os import re @@ -24,6 +33,8 @@ import shutil import sys import logging import tempfile +import urllib.parse +import io import arvados import arvados.config @@ -31,6 +42,12 @@ import arvados.keep import arvados.util import arvados.commands._util as arv_cmd import arvados.commands.keepdocker +import ruamel.yaml as yaml + +from arvados.api import OrderedJsonModel +from arvados._version import __version__ + +COMMIT_HASH_RE = re.compile(r'^[0-9a-f]{1,40}$') logger = logging.getLogger('arvados.arv-copy') @@ -46,12 +63,18 @@ local_repo_dir = {} # destination collection UUIDs. collections_copied = {} +# Set of (repository, script_version) two-tuples of commits copied in git. +scripts_copied = set() + # The owner_uuid of the object being copied src_owner_uuid = None def main(): copy_opts = argparse.ArgumentParser(add_help=False) + copy_opts.add_argument( + '--version', action='version', version="%s %s" % (sys.argv[0], __version__), + help='Print version and exit.') copy_opts.add_argument( '-v', '--verbose', dest='verbose', action='store_true', help='Verbose output.') @@ -65,23 +88,21 @@ def main(): '-f', '--force', dest='force', action='store_true', help='Perform copy even if the object appears to exist at the remote destination.') copy_opts.add_argument( - '--src', dest='source_arvados', required=True, - help='The name of the source Arvados instance (required). May be either a pathname to a config file, or the basename of a file in $HOME/.config/arvados/instance_name.conf.') + '--src', dest='source_arvados', + help='The name of the source Arvados instance (required) - points at an Arvados config file. May be either a pathname to a config file, or (for example) "foo" as shorthand for $HOME/.config/arvados/foo.conf.') copy_opts.add_argument( - '--dst', dest='destination_arvados', required=True, - help='The name of the destination Arvados instance (required). May be either a pathname to a config file, or the basename of a file in $HOME/.config/arvados/instance_name.conf.') + '--dst', dest='destination_arvados', + help='The name of the destination Arvados instance (required) - points at an Arvados config file. May be either a pathname to a config file, or (for example) "foo" as shorthand for $HOME/.config/arvados/foo.conf.') copy_opts.add_argument( '--recursive', dest='recursive', action='store_true', - help='Recursively copy any dependencies for this object. (default)') + help='Recursively copy any dependencies for this object, and subprojects. (default)') copy_opts.add_argument( '--no-recursive', dest='recursive', action='store_false', - help='Do not copy any dependencies. NOTE: if this option is given, the copied object will need to be updated manually in order to be functional.') - copy_opts.add_argument( - '--dst-git-repo', dest='dst_git_repo', - help='The name of the destination git repository. Required when copying a pipeline recursively.') + help='Do not copy any dependencies or subprojects.') copy_opts.add_argument( '--project-uuid', dest='project_uuid', - help='The UUID of the project at the destination to which the pipeline should be copied.') + help='The UUID of the project at the destination to which the collection or workflow should be copied.') + copy_opts.add_argument( 'object_uuid', help='The UUID of the object to be copied.') @@ -89,7 +110,7 @@ def main(): copy_opts.set_defaults(recursive=True) parser = argparse.ArgumentParser( - description='Copy a pipeline instance, template or collection from one Arvados instance to another.', + description='Copy a workflow or collection from one Arvados instance to another.', parents=[copy_opts, arv_cmd.retry_opt]) args = parser.parse_args() @@ -98,6 +119,9 @@ def main(): else: logger.setLevel(logging.INFO) + if not args.source_arvados: + args.source_arvados = args.object_uuid[:5] + # Create API clients for the source and destination instances src_arv = api_for_instance(args.source_arvados) dst_arv = api_for_instance(args.destination_arvados) @@ -112,20 +136,17 @@ def main(): result = copy_collection(args.object_uuid, src_arv, dst_arv, args) - elif t == 'PipelineInstance': - set_src_owner_uuid(src_arv.pipeline_instances(), args.object_uuid, args) - result = copy_pipeline_instance(args.object_uuid, - src_arv, dst_arv, - args) - elif t == 'PipelineTemplate': - set_src_owner_uuid(src_arv.pipeline_templates(), args.object_uuid, args) - result = copy_pipeline_template(args.object_uuid, - src_arv, dst_arv, args) + elif t == 'Workflow': + set_src_owner_uuid(src_arv.workflows(), args.object_uuid, args) + result = copy_workflow(args.object_uuid, src_arv, dst_arv, args) + elif t == 'Group': + set_src_owner_uuid(src_arv.groups(), args.object_uuid, args) + result = copy_project(args.object_uuid, src_arv, dst_arv, args.project_uuid, args) else: abort("cannot copy object {} of type {}".format(args.object_uuid, t)) # Clean up any outstanding temp git repositories. - for d in local_repo_dir.values(): + for d in listvalues(local_repo_dir): shutil.rmtree(d, ignore_errors=True) # If no exception was thrown and the response does not have an @@ -139,6 +160,7 @@ def main(): exit(0) def set_src_owner_uuid(resource, uuid, args): + global src_owner_uuid c = resource.get(uuid=uuid).execute(num_retries=args.retries) src_owner_uuid = c.get("owner_uuid") @@ -155,6 +177,10 @@ def set_src_owner_uuid(resource, uuid, args): # $HOME/.config/arvados/instance_name.conf # def api_for_instance(instance_name): + if not instance_name: + # Use environment + return arvados.api('v1', model=OrderedJsonModel()) + if '/' in instance_name: config_file = instance_name else: @@ -176,104 +202,138 @@ def api_for_instance(instance_name): client = arvados.api('v1', host=cfg['ARVADOS_API_HOST'], token=cfg['ARVADOS_API_TOKEN'], - insecure=api_is_insecure) + insecure=api_is_insecure, + model=OrderedJsonModel()) else: abort('need ARVADOS_API_HOST and ARVADOS_API_TOKEN for {}'.format(instance_name)) return client -# copy_pipeline_instance(pi_uuid, src, dst, args) -# -# Copies a pipeline instance identified by pi_uuid from src to dst. -# -# If the args.recursive option is set: -# 1. Copies all input collections -# * For each component in the pipeline, include all collections -# listed as job dependencies for that component) -# 2. Copy docker images -# 3. Copy git repositories -# 4. Copy the pipeline template -# -# The only changes made to the copied pipeline instance are: -# 1. The original pipeline instance UUID is preserved in -# the 'properties' hash as 'copied_from_pipeline_instance_uuid'. -# 2. The pipeline_template_uuid is changed to the new template uuid. -# 3. The owner_uuid of the instance is changed to the user who -# copied it. -# -def copy_pipeline_instance(pi_uuid, src, dst, args): - # Fetch the pipeline instance record. - pi = src.pipeline_instances().get(uuid=pi_uuid).execute(num_retries=args.retries) +# Check if git is available +def check_git_availability(): + try: + arvados.util.run_command(['git', '--help']) + except Exception: + abort('git command is not available. Please ensure git is installed.') - if args.recursive: - if not args.dst_git_repo: - abort('--dst-git-repo is required when copying a pipeline recursively.') - # Copy the pipeline template and save the copied template. - if pi.get('pipeline_template_uuid', None): - pt = copy_pipeline_template(pi['pipeline_template_uuid'], - src, dst, args) - - # Copy input collections, docker images and git repos. - pi = copy_collections(pi, src, dst, args) - copy_git_repos(pi, src, dst, args.dst_git_repo, args) - copy_docker_images(pi, src, dst, args) - - # Update the fields of the pipeline instance with the copied - # pipeline template. - if pi.get('pipeline_template_uuid', None): - pi['pipeline_template_uuid'] = pt['uuid'] +def filter_iter(arg): + """Iterate a filter string-or-list. + + Pass in a filter field that can either be a string or list. + This will iterate elements as if the field had been written as a list. + """ + if isinstance(arg, basestring): + return iter((arg,)) + else: + return iter(arg) + +def migrate_repository_filter(repo_filter, src_repository, dst_repository): + """Update a single repository filter in-place for the destination. + + If the filter checks that the repository is src_repository, it is + updated to check that the repository is dst_repository. If it does + anything else, this function raises ValueError. + """ + if src_repository is None: + raise ValueError("component does not specify a source repository") + elif dst_repository is None: + raise ValueError("no destination repository specified to update repository filter") + elif repo_filter[1:] == ['=', src_repository]: + repo_filter[2] = dst_repository + elif repo_filter[1:] == ['in', [src_repository]]: + repo_filter[2] = [dst_repository] else: - # not recursive - logger.info("Copying only pipeline instance %s.", pi_uuid) - logger.info("You are responsible for making sure all pipeline dependencies have been updated.") + raise ValueError("repository filter is not a simple source match") - # Update the pipeline instance properties, and create the new - # instance at dst. - pi['properties']['copied_from_pipeline_instance_uuid'] = pi_uuid - pi['description'] = "Pipeline copied from {}\n\n{}".format( - pi_uuid, - pi['description'] if pi.get('description', None) else '') +def migrate_script_version_filter(version_filter): + """Update a single script_version filter in-place for the destination. - pi['owner_uuid'] = args.project_uuid + Currently this function checks that all the filter operands are Git + commit hashes. If they're not, it raises ValueError to indicate that + the filter is not portable. It could be extended to make other + transformations in the future. + """ + if not all(COMMIT_HASH_RE.match(v) for v in filter_iter(version_filter[2])): + raise ValueError("script_version filter is not limited to commit hashes") - del pi['uuid'] +def attr_filtered(filter_, *attr_names): + """Return True if filter_ applies to any of attr_names, else False.""" + return any((name == 'any') or (name in attr_names) + for name in filter_iter(filter_[0])) - new_pi = dst.pipeline_instances().create(body=pi, ensure_unique_name=True).execute(num_retries=args.retries) - return new_pi +@contextlib.contextmanager +def exception_handler(handler, *exc_types): + """If any exc_types are raised in the block, call handler on the exception.""" + try: + yield + except exc_types as error: + handler(error) -# copy_pipeline_template(pt_uuid, src, dst, args) + +# copy_workflow(wf_uuid, src, dst, args) # -# Copies a pipeline template identified by pt_uuid from src to dst. +# Copies a workflow identified by wf_uuid from src to dst. # -# If args.recursive is True, also copy any collections, docker -# images and git repositories that this template references. +# If args.recursive is True, also copy any collections +# referenced in the workflow definition yaml. # -# The owner_uuid of the new template is changed to that of the user -# who copied the template. +# The owner_uuid of the new workflow is set to any given +# project_uuid or the user who copied the template. # -# Returns the copied pipeline template object. +# Returns the copied workflow object. # -def copy_pipeline_template(pt_uuid, src, dst, args): - # fetch the pipeline template from the source instance - pt = src.pipeline_templates().get(uuid=pt_uuid).execute(num_retries=args.retries) +def copy_workflow(wf_uuid, src, dst, args): + # fetch the workflow from the source instance + wf = src.workflows().get(uuid=wf_uuid).execute(num_retries=args.retries) + # copy collections and docker images if args.recursive: - if not args.dst_git_repo: - abort('--dst-git-repo is required when copying a pipeline recursively.') - # Copy input collections, docker images and git repos. - pt = copy_collections(pt, src, dst, args) - copy_git_repos(pt, src, dst, args.dst_git_repo, args) - copy_docker_images(pt, src, dst, args) + wf_def = yaml.safe_load(wf["definition"]) + if wf_def is not None: + locations = [] + docker_images = {} + graph = wf_def.get('$graph', None) + if graph is not None: + workflow_collections(graph, locations, docker_images) + else: + workflow_collections(wf_def, locations, docker_images) + + if locations: + copy_collections(locations, src, dst, args) + + for image in docker_images: + copy_docker_image(image, docker_images[image], src, dst, args) + + # copy the workflow itself + del wf['uuid'] + wf['owner_uuid'] = args.project_uuid + + existing = dst.workflows().list(filters=[["owner_uuid", "=", args.project_uuid], + ["name", "=", wf["name"]]]).execute() + if len(existing["items"]) == 0: + return dst.workflows().create(body=wf).execute(num_retries=args.retries) + else: + return dst.workflows().update(uuid=existing["items"][0]["uuid"], body=wf).execute(num_retries=args.retries) - pt['description'] = "Pipeline template copied from {}\n\n{}".format( - pt_uuid, - pt['description'] if pt.get('description', None) else '') - pt['name'] = "{} copied from {}".format(pt.get('name', ''), pt_uuid) - del pt['uuid'] - pt['owner_uuid'] = args.project_uuid +def workflow_collections(obj, locations, docker_images): + if isinstance(obj, dict): + loc = obj.get('location', None) + if loc is not None: + if loc.startswith("keep:"): + locations.append(loc[5:]) - return dst.pipeline_templates().create(body=pt, ensure_unique_name=True).execute(num_retries=args.retries) + docker_image = obj.get('dockerImageId', None) or obj.get('dockerPull', None) or obj.get('acrContainerImage', None) + if docker_image is not None: + ds = docker_image.split(":", 1) + tag = ds[1] if len(ds)==2 else 'latest' + docker_images[ds[0]] = tag + + for x in obj: + workflow_collections(obj[x], locations, docker_images) + elif isinstance(obj, list): + for x in obj: + workflow_collections(x, locations, docker_images) # copy_collections(obj, src, dst, args) # @@ -311,57 +371,13 @@ def copy_collections(obj, src, dst, args): obj = arvados.util.portable_data_hash_pattern.sub(copy_collection_fn, obj) obj = arvados.util.collection_uuid_pattern.sub(copy_collection_fn, obj) return obj - elif type(obj) == dict: - return {v: copy_collections(obj[v], src, dst, args) for v in obj} - elif type(obj) == list: - return [copy_collections(v, src, dst, args) for v in obj] + elif isinstance(obj, dict): + return type(obj)((v, copy_collections(obj[v], src, dst, args)) + for v in obj) + elif isinstance(obj, list): + return type(obj)(copy_collections(v, src, dst, args) for v in obj) return obj -# copy_git_repos(p, src, dst, dst_repo, args) -# -# Copies all git repositories referenced by pipeline instance or -# template 'p' from src to dst. -# -# For each component c in the pipeline: -# * Copy git repositories named in c['repository'] and c['job']['repository'] if present -# * Rename script versions: -# * c['script_version'] -# * c['job']['script_version'] -# * c['job']['supplied_script_version'] -# to the commit hashes they resolve to, since any symbolic -# names (tags, branches) are not preserved in the destination repo. -# -# The pipeline object is updated in place with the new repository -# names. The return value is undefined. -# -def copy_git_repos(p, src, dst, dst_repo, args): - copied = set() - for c in p['components']: - component = p['components'][c] - if 'repository' in component: - repo = component['repository'] - script_version = component.get('script_version', None) - if repo not in copied: - copy_git_repo(repo, src, dst, dst_repo, script_version, args) - copied.add(repo) - component['repository'] = dst_repo - if script_version: - repo_dir = local_repo_dir[repo] - component['script_version'] = git_rev_parse(script_version, repo_dir) - if 'job' in component: - j = component['job'] - if 'repository' in j: - repo = j['repository'] - script_version = j.get('script_version', None) - if repo not in copied: - copy_git_repo(repo, src, dst, dst_repo, script_version, args) - copied.add(repo) - j['repository'] = dst_repo - repo_dir = local_repo_dir[repo] - if script_version: - j['script_version'] = git_rev_parse(script_version, repo_dir) - if 'supplied_script_version' in j: - j['supplied_script_version'] = git_rev_parse(j['supplied_script_version'], repo_dir) def total_collection_size(manifest_text): """Return the total number of bytes in this collection (excluding @@ -387,29 +403,26 @@ def create_collection_from(c, src, dst, args): available.""" collection_uuid = c['uuid'] - del c['uuid'] - - if not c["name"]: - c['name'] = "copied from " + collection_uuid + body = {} + for d in ('description', 'manifest_text', 'name', 'portable_data_hash', 'properties'): + body[d] = c[d] - if 'properties' in c: - del c['properties'] + if not body["name"]: + body['name'] = "copied from " + collection_uuid - c['owner_uuid'] = args.project_uuid + body['owner_uuid'] = args.project_uuid - dst_collection = dst.collections().create(body=c, ensure_unique_name=True).execute(num_retries=args.retries) + dst_collection = dst.collections().create(body=body, ensure_unique_name=True).execute(num_retries=args.retries) # Create docker_image_repo+tag and docker_image_hash links # at the destination. for link_class in ("docker_image_repo+tag", "docker_image_hash"): docker_links = src.links().list(filters=[["head_uuid", "=", collection_uuid], ["link_class", "=", link_class]]).execute(num_retries=args.retries)['items'] - for d in docker_links: - body={ - 'head_uuid': dst_collection['uuid'], - 'link_class': link_class, - 'name': d['name'], - } + for src_link in docker_links: + body = {key: src_link[key] + for key in ['link_class', 'name', 'properties']} + body['head_uuid'] = dst_collection['uuid'] body['owner_uuid'] = args.project_uuid lk = dst.links().create(body=body).execute(num_retries=args.retries) @@ -441,10 +454,11 @@ def create_collection_from(c, src, dst, args): # def copy_collection(obj_uuid, src, dst, args): if arvados.util.keep_locator_pattern.match(obj_uuid): - # If the obj_uuid is a portable data hash, it might not be uniquely - # identified with a particular collection. As a result, it is - # ambigious as to what name to use for the copy. Apply some heuristics - # to pick which collection to get the name from. + # If the obj_uuid is a portable data hash, it might not be + # uniquely identified with a particular collection. As a + # result, it is ambiguous as to what name to use for the copy. + # Apply some heuristics to pick which collection to get the + # name from. srccol = src.collections().list( filters=[['portable_data_hash', '=', obj_uuid]], order="created_at asc" @@ -463,7 +477,7 @@ def copy_collection(obj_uuid, src, dst, args): c = items[0] if not c: # See if there is a collection that's in the same project - # as the root item (usually a pipeline) being copied. + # as the root item (usually a workflow) being copied. for i in items: if i.get("owner_uuid") == src_owner_uuid and i.get("name"): c = i @@ -520,7 +534,7 @@ def copy_collection(obj_uuid, src, dst, args): # a new manifest as we go. src_keep = arvados.keep.KeepClient(api_client=src, num_retries=args.retries) dst_keep = arvados.keep.KeepClient(api_client=dst, num_retries=args.retries) - dst_manifest = "" + dst_manifest = io.StringIO() dst_locators = {} bytes_written = 0 bytes_expected = total_collection_size(manifest) @@ -529,118 +543,90 @@ def copy_collection(obj_uuid, src, dst, args): else: progress_writer = None - for line in manifest.splitlines(True): + for line in manifest.splitlines(): words = line.split() - dst_manifest_line = words[0] + dst_manifest.write(words[0]) for word in words[1:]: try: loc = arvados.KeepLocator(word) - blockhash = loc.md5sum - # copy this block if we haven't seen it before - # (otherwise, just reuse the existing dst_locator) - if blockhash not in dst_locators: - logger.debug("Copying block %s (%s bytes)", blockhash, loc.size) - if progress_writer: - progress_writer.report(obj_uuid, bytes_written, bytes_expected) - data = src_keep.get(word) - dst_locator = dst_keep.put(data) - dst_locators[blockhash] = dst_locator - bytes_written += loc.size - dst_manifest_line += ' ' + dst_locators[blockhash] except ValueError: # If 'word' can't be parsed as a locator, # presume it's a filename. - dst_manifest_line += ' ' + word - dst_manifest += dst_manifest_line - if line.endswith("\n"): - dst_manifest += "\n" + dst_manifest.write(' ') + dst_manifest.write(word) + continue + blockhash = loc.md5sum + # copy this block if we haven't seen it before + # (otherwise, just reuse the existing dst_locator) + if blockhash not in dst_locators: + logger.debug("Copying block %s (%s bytes)", blockhash, loc.size) + if progress_writer: + progress_writer.report(obj_uuid, bytes_written, bytes_expected) + data = src_keep.get(word) + dst_locator = dst_keep.put(data) + dst_locators[blockhash] = dst_locator + bytes_written += loc.size + dst_manifest.write(' ') + dst_manifest.write(dst_locators[blockhash]) + dst_manifest.write("\n") if progress_writer: progress_writer.report(obj_uuid, bytes_written, bytes_expected) progress_writer.finish() # Copy the manifest and save the collection. - logger.debug('saving %s with manifest: <%s>', obj_uuid, dst_manifest) + logger.debug('saving %s with manifest: <%s>', obj_uuid, dst_manifest.getvalue()) - dst_keep.put(dst_manifest.encode('utf-8')) - c['manifest_text'] = dst_manifest + c['manifest_text'] = dst_manifest.getvalue() return create_collection_from(c, src, dst, args) -# copy_git_repo(src_git_repo, src, dst, dst_git_repo, script_version, args) -# -# Copies commits from git repository 'src_git_repo' on Arvados -# instance 'src' to 'dst_git_repo' on 'dst'. Both src_git_repo -# and dst_git_repo are repository names, not UUIDs (i.e. "arvados" -# or "jsmith") -# -# All commits will be copied to a destination branch named for the -# source repository URL. -# -# Because users cannot create their own repositories, the -# destination repository must already exist. -# -# The user running this command must be authenticated -# to both repositories. -# -def copy_git_repo(src_git_repo, src, dst, dst_git_repo, script_version, args): - # Identify the fetch and push URLs for the git repositories. - r = src.repositories().list( - filters=[['name', '=', src_git_repo]]).execute(num_retries=args.retries) +def select_git_url(api, repo_name, retries, allow_insecure_http, allow_insecure_http_opt): + r = api.repositories().list( + filters=[['name', '=', repo_name]]).execute(num_retries=retries) if r['items_available'] != 1: - raise Exception('cannot identify source repo {}; {} repos found' - .format(src_git_repo, r['items_available'])) - src_git_url = r['items'][0]['fetch_url'] - logger.debug('src_git_url: {}'.format(src_git_url)) + raise Exception('cannot identify repo {}; {} repos found' + .format(repo_name, r['items_available'])) + + https_url = [c for c in r['items'][0]["clone_urls"] if c.startswith("https:")] + http_url = [c for c in r['items'][0]["clone_urls"] if c.startswith("http:")] + other_url = [c for c in r['items'][0]["clone_urls"] if not c.startswith("http")] + + priority = https_url + other_url + http_url + + git_config = [] + git_url = None + for url in priority: + if url.startswith("http"): + u = urllib.parse.urlsplit(url) + baseurl = urllib.parse.urlunsplit((u.scheme, u.netloc, "", "", "")) + git_config = ["-c", "credential.%s/.username=none" % baseurl, + "-c", "credential.%s/.helper=!cred(){ cat >/dev/null; if [ \"$1\" = get ]; then echo password=$ARVADOS_API_TOKEN; fi; };cred" % baseurl] + else: + git_config = [] + + try: + logger.debug("trying %s", url) + arvados.util.run_command(["git"] + git_config + ["ls-remote", url], + env={"HOME": os.environ["HOME"], + "ARVADOS_API_TOKEN": api.api_token, + "GIT_ASKPASS": "/bin/false"}) + except arvados.errors.CommandFailedError: + pass + else: + git_url = url + break - r = dst.repositories().list( - filters=[['name', '=', dst_git_repo]]).execute(num_retries=args.retries) - if r['items_available'] != 1: - raise Exception('cannot identify destination repo {}; {} repos found' - .format(dst_git_repo, r['items_available'])) - dst_git_push_url = r['items'][0]['push_url'] - logger.debug('dst_git_push_url: {}'.format(dst_git_push_url)) - - # script_version is the "script_version" parameter from the source - # component or job. It is used here to tie the destination branch - # to the commit that was used on the source. If no script_version - # was supplied in the component or job, it is a mistake in the pipeline, - # but for the purposes of copying the repository, default to "master". - # - if not script_version: - script_version = "master" - - dst_branch = re.sub(r'\W+', '_', "{}_{}".format(src_git_url, script_version)) - - # Copy git commits from src repo to dst repo (but only if - # we have not already copied this repo in this session). - # - if src_git_repo in local_repo_dir: - logger.debug('already copied src repo %s, skipping', src_git_repo) - else: - tmprepo = tempfile.mkdtemp() - local_repo_dir[src_git_repo] = tmprepo - arvados.util.run_command( - ["git", "clone", "--bare", src_git_url, tmprepo], - cwd=os.path.dirname(tmprepo)) - arvados.util.run_command( - ["git", "branch", dst_branch, script_version], - cwd=tmprepo) - arvados.util.run_command(["git", "remote", "add", "dst", dst_git_push_url], cwd=tmprepo) - arvados.util.run_command(["git", "push", "dst", dst_branch], cwd=tmprepo) - - -def copy_docker_images(pipeline, src, dst, args): - """Copy any docker images named in the pipeline components' - runtime_constraints field from src to dst.""" - - logger.debug('copy_docker_images: {}'.format(pipeline['uuid'])) - for c_name, c_info in pipeline['components'].iteritems(): - if ('runtime_constraints' in c_info and - 'docker_image' in c_info['runtime_constraints']): - copy_docker_image( - c_info['runtime_constraints']['docker_image'], - c_info['runtime_constraints'].get('docker_image_tag', 'latest'), - src, dst, args) + if not git_url: + raise Exception('Cannot access git repository, tried {}' + .format(priority)) + + if git_url.startswith("http:"): + if allow_insecure_http: + logger.warning("Using insecure git url %s but will allow this because %s", git_url, allow_insecure_http_opt) + else: + raise Exception("Refusing to use insecure git url %s, use %s if you really want this." % (git_url, allow_insecure_http_opt)) + + return (git_url, git_config) def copy_docker_image(docker_image, docker_image_tag, src, dst, args): @@ -666,6 +652,42 @@ def copy_docker_image(docker_image, docker_image_tag, src, dst, args): else: logger.warning('Could not find docker image {}:{}'.format(docker_image, docker_image_tag)) +def copy_project(obj_uuid, src, dst, owner_uuid, args): + + src_project_record = src.groups().get(uuid=obj_uuid).execute(num_retries=args.retries) + + # Create/update the destination project + existing = dst.groups().list(filters=[["owner_uuid", "=", owner_uuid], + ["name", "=", src_project_record["name"]]]).execute(num_retries=args.retries) + if len(existing["items"]) == 0: + project_record = dst.groups().create(body={"group": {"group_class": "project", + "owner_uuid": owner_uuid, + "name": src_project_record["name"]}}).execute(num_retries=args.retries) + else: + project_record = existing["items"][0] + + dst.groups().update(uuid=project_record["uuid"], + body={"group": { + "description": src_project_record["description"]}}).execute(num_retries=args.retries) + + args.project_uuid = project_record["uuid"] + + logger.debug('Copying %s to %s', obj_uuid, project_record["uuid"]) + + # Copy collections + copy_collections([col["uuid"] for col in arvados.util.list_all(src.collections().list, filters=[["owner_uuid", "=", obj_uuid]])], + src, dst, args) + + # Copy workflows + for w in arvados.util.list_all(src.workflows().list, filters=[["owner_uuid", "=", obj_uuid]]): + copy_workflow(w["uuid"], src, dst, args) + + if args.recursive: + for g in arvados.util.list_all(src.groups().list, filters=[["owner_uuid", "=", obj_uuid]]): + copy_project(g["uuid"], src, dst, project_record["uuid"], args) + + return project_record + # git_rev_parse(rev, repo) # # Returns the 40-character commit hash corresponding to 'rev' in @@ -683,12 +705,12 @@ def git_rev_parse(rev, repo): # the second field of the uuid. This function consults the api's # schema to identify the object class. # -# It returns a string such as 'Collection', 'PipelineInstance', etc. +# It returns a string such as 'Collection', 'Workflow', etc. # # Special case: if handed a Keep locator hash, return 'Collection'. # def uuid_type(api, object_uuid): - if re.match(r'^[a-f0-9]{32}\+[0-9]+(\+[A-Za-z0-9+-]+)?$', object_uuid): + if re.match(arvados.util.keep_locator_pattern, object_uuid): return 'Collection' p = object_uuid.split('-') if len(p) == 3: