-#! /usr/bin/env python
+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: Apache-2.0
-# arv-copy [--recursive] [--no-recursive] object-uuid src dst
+# arv-copy [--recursive] [--no-recursive] object-uuid
#
# Copies an object from Arvados instance src to instance dst.
#
# By default, arv-copy recursively copies any dependent objects
# necessary to make the object functional in the new instance
-# (e.g. for a pipeline instance, arv-copy copies the pipeline
-# template, input collection, docker images, git repositories). If
+# (e.g. for a workflow, arv-copy copies the workflow,
+# input collections, and docker images). If
# --no-recursive is given, arv-copy copies only the single record
# identified by object-uuid.
#
# instances src and dst. If either of these files is not found,
# arv-copy will issue an error.
+from __future__ import division
+from future import standard_library
+from future.utils import listvalues
+standard_library.install_aliases()
+from past.builtins import basestring
+from builtins import object
import argparse
+import contextlib
import getpass
import os
import re
import sys
import logging
import tempfile
+import urllib.parse
+import io
import arvados
import arvados.config
import arvados.util
import arvados.commands._util as arv_cmd
import arvados.commands.keepdocker
+import ruamel.yaml as yaml
from arvados.api import OrderedJsonModel
+from arvados._version import __version__
+
+COMMIT_HASH_RE = re.compile(r'^[0-9a-f]{1,40}$')
logger = logging.getLogger('arvados.arv-copy')
def main():
copy_opts = argparse.ArgumentParser(add_help=False)
+ copy_opts.add_argument(
+ '--version', action='version', version="%s %s" % (sys.argv[0], __version__),
+ help='Print version and exit.')
copy_opts.add_argument(
'-v', '--verbose', dest='verbose', action='store_true',
help='Verbose output.')
'-f', '--force', dest='force', action='store_true',
help='Perform copy even if the object appears to exist at the remote destination.')
copy_opts.add_argument(
- '--src', dest='source_arvados', required=True,
- help='The name of the source Arvados instance (required). May be either a pathname to a config file, or the basename of a file in $HOME/.config/arvados/instance_name.conf.')
+ '--src', dest='source_arvados',
+ help='The name of the source Arvados instance (required) - points at an Arvados config file. May be either a pathname to a config file, or (for example) "foo" as shorthand for $HOME/.config/arvados/foo.conf.')
copy_opts.add_argument(
- '--dst', dest='destination_arvados', required=True,
- help='The name of the destination Arvados instance (required). May be either a pathname to a config file, or the basename of a file in $HOME/.config/arvados/instance_name.conf.')
+ '--dst', dest='destination_arvados',
+ help='The name of the destination Arvados instance (required) - points at an Arvados config file. May be either a pathname to a config file, or (for example) "foo" as shorthand for $HOME/.config/arvados/foo.conf.')
copy_opts.add_argument(
'--recursive', dest='recursive', action='store_true',
- help='Recursively copy any dependencies for this object. (default)')
+ help='Recursively copy any dependencies for this object, and subprojects. (default)')
copy_opts.add_argument(
'--no-recursive', dest='recursive', action='store_false',
- help='Do not copy any dependencies. NOTE: if this option is given, the copied object will need to be updated manually in order to be functional.')
- copy_opts.add_argument(
- '--dst-git-repo', dest='dst_git_repo',
- help='The name of the destination git repository. Required when copying a pipeline recursively.')
+ help='Do not copy any dependencies or subprojects.')
copy_opts.add_argument(
'--project-uuid', dest='project_uuid',
- help='The UUID of the project at the destination to which the pipeline should be copied.')
+ help='The UUID of the project at the destination to which the collection or workflow should be copied.')
+
copy_opts.add_argument(
'object_uuid',
help='The UUID of the object to be copied.')
copy_opts.set_defaults(recursive=True)
parser = argparse.ArgumentParser(
- description='Copy a pipeline instance, template or collection from one Arvados instance to another.',
+ description='Copy a workflow or collection from one Arvados instance to another.',
parents=[copy_opts, arv_cmd.retry_opt])
args = parser.parse_args()
else:
logger.setLevel(logging.INFO)
+ if not args.source_arvados:
+ args.source_arvados = args.object_uuid[:5]
+
# Create API clients for the source and destination instances
src_arv = api_for_instance(args.source_arvados)
dst_arv = api_for_instance(args.destination_arvados)
result = copy_collection(args.object_uuid,
src_arv, dst_arv,
args)
- elif t == 'PipelineInstance':
- set_src_owner_uuid(src_arv.pipeline_instances(), args.object_uuid, args)
- result = copy_pipeline_instance(args.object_uuid,
- src_arv, dst_arv,
- args)
- elif t == 'PipelineTemplate':
- set_src_owner_uuid(src_arv.pipeline_templates(), args.object_uuid, args)
- result = copy_pipeline_template(args.object_uuid,
- src_arv, dst_arv, args)
+ elif t == 'Workflow':
+ set_src_owner_uuid(src_arv.workflows(), args.object_uuid, args)
+ result = copy_workflow(args.object_uuid, src_arv, dst_arv, args)
+ elif t == 'Group':
+ set_src_owner_uuid(src_arv.groups(), args.object_uuid, args)
+ result = copy_project(args.object_uuid, src_arv, dst_arv, args.project_uuid, args)
else:
abort("cannot copy object {} of type {}".format(args.object_uuid, t))
# Clean up any outstanding temp git repositories.
- for d in local_repo_dir.values():
+ for d in listvalues(local_repo_dir):
shutil.rmtree(d, ignore_errors=True)
# If no exception was thrown and the response does not have an
# $HOME/.config/arvados/instance_name.conf
#
def api_for_instance(instance_name):
+ if not instance_name:
+ # Use environment
+ return arvados.api('v1', model=OrderedJsonModel())
+
if '/' in instance_name:
config_file = instance_name
else:
abort('need ARVADOS_API_HOST and ARVADOS_API_TOKEN for {}'.format(instance_name))
return client
-# copy_pipeline_instance(pi_uuid, src, dst, args)
-#
-# Copies a pipeline instance identified by pi_uuid from src to dst.
-#
-# If the args.recursive option is set:
-# 1. Copies all input collections
-# * For each component in the pipeline, include all collections
-# listed as job dependencies for that component)
-# 2. Copy docker images
-# 3. Copy git repositories
-# 4. Copy the pipeline template
-#
-# The only changes made to the copied pipeline instance are:
-# 1. The original pipeline instance UUID is preserved in
-# the 'properties' hash as 'copied_from_pipeline_instance_uuid'.
-# 2. The pipeline_template_uuid is changed to the new template uuid.
-# 3. The owner_uuid of the instance is changed to the user who
-# copied it.
-#
-def copy_pipeline_instance(pi_uuid, src, dst, args):
- # Fetch the pipeline instance record.
- pi = src.pipeline_instances().get(uuid=pi_uuid).execute(num_retries=args.retries)
+# Check if git is available
+def check_git_availability():
+ try:
+ arvados.util.run_command(['git', '--help'])
+ except Exception:
+ abort('git command is not available. Please ensure git is installed.')
- if args.recursive:
- if not args.dst_git_repo:
- abort('--dst-git-repo is required when copying a pipeline recursively.')
- # Copy the pipeline template and save the copied template.
- if pi.get('pipeline_template_uuid', None):
- pt = copy_pipeline_template(pi['pipeline_template_uuid'],
- src, dst, args)
-
- # Copy input collections, docker images and git repos.
- pi = copy_collections(pi, src, dst, args)
- copy_git_repos(pi, src, dst, args.dst_git_repo, args)
- copy_docker_images(pi, src, dst, args)
-
- # Update the fields of the pipeline instance with the copied
- # pipeline template.
- if pi.get('pipeline_template_uuid', None):
- pi['pipeline_template_uuid'] = pt['uuid']
+def filter_iter(arg):
+ """Iterate a filter string-or-list.
+
+ Pass in a filter field that can either be a string or list.
+ This will iterate elements as if the field had been written as a list.
+ """
+ if isinstance(arg, basestring):
+ return iter((arg,))
+ else:
+ return iter(arg)
+
+def migrate_repository_filter(repo_filter, src_repository, dst_repository):
+ """Update a single repository filter in-place for the destination.
+
+ If the filter checks that the repository is src_repository, it is
+ updated to check that the repository is dst_repository. If it does
+ anything else, this function raises ValueError.
+ """
+ if src_repository is None:
+ raise ValueError("component does not specify a source repository")
+ elif dst_repository is None:
+ raise ValueError("no destination repository specified to update repository filter")
+ elif repo_filter[1:] == ['=', src_repository]:
+ repo_filter[2] = dst_repository
+ elif repo_filter[1:] == ['in', [src_repository]]:
+ repo_filter[2] = [dst_repository]
else:
- # not recursive
- logger.info("Copying only pipeline instance %s.", pi_uuid)
- logger.info("You are responsible for making sure all pipeline dependencies have been updated.")
+ raise ValueError("repository filter is not a simple source match")
- # Update the pipeline instance properties, and create the new
- # instance at dst.
- pi['properties']['copied_from_pipeline_instance_uuid'] = pi_uuid
- pi['description'] = "Pipeline copied from {}\n\n{}".format(
- pi_uuid,
- pi['description'] if pi.get('description', None) else '')
+def migrate_script_version_filter(version_filter):
+ """Update a single script_version filter in-place for the destination.
- pi['owner_uuid'] = args.project_uuid
+ Currently this function checks that all the filter operands are Git
+ commit hashes. If they're not, it raises ValueError to indicate that
+ the filter is not portable. It could be extended to make other
+ transformations in the future.
+ """
+ if not all(COMMIT_HASH_RE.match(v) for v in filter_iter(version_filter[2])):
+ raise ValueError("script_version filter is not limited to commit hashes")
+
+def attr_filtered(filter_, *attr_names):
+ """Return True if filter_ applies to any of attr_names, else False."""
+ return any((name == 'any') or (name in attr_names)
+ for name in filter_iter(filter_[0]))
- del pi['uuid']
+@contextlib.contextmanager
+def exception_handler(handler, *exc_types):
+ """If any exc_types are raised in the block, call handler on the exception."""
+ try:
+ yield
+ except exc_types as error:
+ handler(error)
- new_pi = dst.pipeline_instances().create(body=pi, ensure_unique_name=True).execute(num_retries=args.retries)
- return new_pi
-# copy_pipeline_template(pt_uuid, src, dst, args)
+# copy_workflow(wf_uuid, src, dst, args)
#
-# Copies a pipeline template identified by pt_uuid from src to dst.
+# Copies a workflow identified by wf_uuid from src to dst.
#
-# If args.recursive is True, also copy any collections, docker
-# images and git repositories that this template references.
+# If args.recursive is True, also copy any collections
+# referenced in the workflow definition yaml.
#
-# The owner_uuid of the new template is changed to that of the user
-# who copied the template.
+# The owner_uuid of the new workflow is set to any given
+# project_uuid or the user who copied the template.
#
-# Returns the copied pipeline template object.
+# Returns the copied workflow object.
#
-def copy_pipeline_template(pt_uuid, src, dst, args):
- # fetch the pipeline template from the source instance
- pt = src.pipeline_templates().get(uuid=pt_uuid).execute(num_retries=args.retries)
+def copy_workflow(wf_uuid, src, dst, args):
+ # fetch the workflow from the source instance
+ wf = src.workflows().get(uuid=wf_uuid).execute(num_retries=args.retries)
+ # copy collections and docker images
if args.recursive:
- if not args.dst_git_repo:
- abort('--dst-git-repo is required when copying a pipeline recursively.')
- # Copy input collections, docker images and git repos.
- pt = copy_collections(pt, src, dst, args)
- copy_git_repos(pt, src, dst, args.dst_git_repo, args)
- copy_docker_images(pt, src, dst, args)
+ wf_def = yaml.safe_load(wf["definition"])
+ if wf_def is not None:
+ locations = []
+ docker_images = {}
+ graph = wf_def.get('$graph', None)
+ if graph is not None:
+ workflow_collections(graph, locations, docker_images)
+ else:
+ workflow_collections(wf_def, locations, docker_images)
- pt['description'] = "Pipeline template copied from {}\n\n{}".format(
- pt_uuid,
- pt['description'] if pt.get('description', None) else '')
- pt['name'] = "{} copied from {}".format(pt.get('name', ''), pt_uuid)
- del pt['uuid']
+ if locations:
+ copy_collections(locations, src, dst, args)
- pt['owner_uuid'] = args.project_uuid
+ for image in docker_images:
+ copy_docker_image(image, docker_images[image], src, dst, args)
- return dst.pipeline_templates().create(body=pt, ensure_unique_name=True).execute(num_retries=args.retries)
+ # copy the workflow itself
+ del wf['uuid']
+ wf['owner_uuid'] = args.project_uuid
+
+ existing = dst.workflows().list(filters=[["owner_uuid", "=", args.project_uuid],
+ ["name", "=", wf["name"]]]).execute()
+ if len(existing["items"]) == 0:
+ return dst.workflows().create(body=wf).execute(num_retries=args.retries)
+ else:
+ return dst.workflows().update(uuid=existing["items"][0]["uuid"], body=wf).execute(num_retries=args.retries)
+
+
+def workflow_collections(obj, locations, docker_images):
+ if isinstance(obj, dict):
+ loc = obj.get('location', None)
+ if loc is not None:
+ if loc.startswith("keep:"):
+ locations.append(loc[5:])
+
+ docker_image = obj.get('dockerImageId', None) or obj.get('dockerPull', None) or obj.get('acrContainerImage', None)
+ if docker_image is not None:
+ ds = docker_image.split(":", 1)
+ tag = ds[1] if len(ds)==2 else 'latest'
+ docker_images[ds[0]] = tag
+
+ for x in obj:
+ workflow_collections(obj[x], locations, docker_images)
+ elif isinstance(obj, list):
+ for x in obj:
+ workflow_collections(x, locations, docker_images)
# copy_collections(obj, src, dst, args)
#
obj = arvados.util.portable_data_hash_pattern.sub(copy_collection_fn, obj)
obj = arvados.util.collection_uuid_pattern.sub(copy_collection_fn, obj)
return obj
- elif type(obj) == dict:
- return {v: copy_collections(obj[v], src, dst, args) for v in obj}
- elif type(obj) == list:
- return [copy_collections(v, src, dst, args) for v in obj]
+ elif isinstance(obj, dict):
+ return type(obj)((v, copy_collections(obj[v], src, dst, args))
+ for v in obj)
+ elif isinstance(obj, list):
+ return type(obj)(copy_collections(v, src, dst, args) for v in obj)
return obj
-def migrate_jobspec(jobspec, src, dst, dst_repo, args):
- """Copy a job's script to the destination repository, and update its record.
-
- Given a jobspec dictionary, this function finds the referenced script from
- src and copies it to dst and dst_repo. It also updates jobspec in place to
- refer to names on the destination.
- """
- repo = jobspec.get('repository')
- if repo is None:
- return
- # script_version is the "script_version" parameter from the source
- # component or job. If no script_version was supplied in the
- # component or job, it is a mistake in the pipeline, but for the
- # purposes of copying the repository, default to "master".
- script_version = jobspec.get('script_version') or 'master'
- script_key = (repo, script_version)
- if script_key not in scripts_copied:
- copy_git_repo(repo, src, dst, dst_repo, script_version, args)
- scripts_copied.add(script_key)
- jobspec['repository'] = dst_repo
- repo_dir = local_repo_dir[repo]
- for version_key in ['script_version', 'supplied_script_version']:
- if version_key in jobspec:
- jobspec[version_key] = git_rev_parse(jobspec[version_key], repo_dir)
-
-# copy_git_repos(p, src, dst, dst_repo, args)
-#
-# Copies all git repositories referenced by pipeline instance or
-# template 'p' from src to dst.
-#
-# For each component c in the pipeline:
-# * Copy git repositories named in c['repository'] and c['job']['repository'] if present
-# * Rename script versions:
-# * c['script_version']
-# * c['job']['script_version']
-# * c['job']['supplied_script_version']
-# to the commit hashes they resolve to, since any symbolic
-# names (tags, branches) are not preserved in the destination repo.
-#
-# The pipeline object is updated in place with the new repository
-# names. The return value is undefined.
-#
-def copy_git_repos(p, src, dst, dst_repo, args):
- for component in p['components'].itervalues():
- migrate_jobspec(component, src, dst, dst_repo, args)
- if 'job' in component:
- migrate_jobspec(component['job'], src, dst, dst_repo, args)
def total_collection_size(manifest_text):
"""Return the total number of bytes in this collection (excluding
available."""
collection_uuid = c['uuid']
- del c['uuid']
-
- if not c["name"]:
- c['name'] = "copied from " + collection_uuid
+ body = {}
+ for d in ('description', 'manifest_text', 'name', 'portable_data_hash', 'properties'):
+ body[d] = c[d]
- if 'properties' in c:
- del c['properties']
+ if not body["name"]:
+ body['name'] = "copied from " + collection_uuid
- c['owner_uuid'] = args.project_uuid
+ body['owner_uuid'] = args.project_uuid
- dst_collection = dst.collections().create(body=c, ensure_unique_name=True).execute(num_retries=args.retries)
+ dst_collection = dst.collections().create(body=body, ensure_unique_name=True).execute(num_retries=args.retries)
# Create docker_image_repo+tag and docker_image_hash links
# at the destination.
#
def copy_collection(obj_uuid, src, dst, args):
if arvados.util.keep_locator_pattern.match(obj_uuid):
- # If the obj_uuid is a portable data hash, it might not be uniquely
- # identified with a particular collection. As a result, it is
- # ambigious as to what name to use for the copy. Apply some heuristics
- # to pick which collection to get the name from.
+ # If the obj_uuid is a portable data hash, it might not be
+ # uniquely identified with a particular collection. As a
+ # result, it is ambiguous as to what name to use for the copy.
+ # Apply some heuristics to pick which collection to get the
+ # name from.
srccol = src.collections().list(
filters=[['portable_data_hash', '=', obj_uuid]],
order="created_at asc"
c = items[0]
if not c:
# See if there is a collection that's in the same project
- # as the root item (usually a pipeline) being copied.
+ # as the root item (usually a workflow) being copied.
for i in items:
if i.get("owner_uuid") == src_owner_uuid and i.get("name"):
c = i
# a new manifest as we go.
src_keep = arvados.keep.KeepClient(api_client=src, num_retries=args.retries)
dst_keep = arvados.keep.KeepClient(api_client=dst, num_retries=args.retries)
- dst_manifest = ""
+ dst_manifest = io.StringIO()
dst_locators = {}
bytes_written = 0
bytes_expected = total_collection_size(manifest)
else:
progress_writer = None
- for line in manifest.splitlines(True):
+ for line in manifest.splitlines():
words = line.split()
- dst_manifest_line = words[0]
+ dst_manifest.write(words[0])
for word in words[1:]:
try:
loc = arvados.KeepLocator(word)
- blockhash = loc.md5sum
- # copy this block if we haven't seen it before
- # (otherwise, just reuse the existing dst_locator)
- if blockhash not in dst_locators:
- logger.debug("Copying block %s (%s bytes)", blockhash, loc.size)
- if progress_writer:
- progress_writer.report(obj_uuid, bytes_written, bytes_expected)
- data = src_keep.get(word)
- dst_locator = dst_keep.put(data)
- dst_locators[blockhash] = dst_locator
- bytes_written += loc.size
- dst_manifest_line += ' ' + dst_locators[blockhash]
except ValueError:
# If 'word' can't be parsed as a locator,
# presume it's a filename.
- dst_manifest_line += ' ' + word
- dst_manifest += dst_manifest_line
- if line.endswith("\n"):
- dst_manifest += "\n"
+ dst_manifest.write(' ')
+ dst_manifest.write(word)
+ continue
+ blockhash = loc.md5sum
+ # copy this block if we haven't seen it before
+ # (otherwise, just reuse the existing dst_locator)
+ if blockhash not in dst_locators:
+ logger.debug("Copying block %s (%s bytes)", blockhash, loc.size)
+ if progress_writer:
+ progress_writer.report(obj_uuid, bytes_written, bytes_expected)
+ data = src_keep.get(word)
+ dst_locator = dst_keep.put(data)
+ dst_locators[blockhash] = dst_locator
+ bytes_written += loc.size
+ dst_manifest.write(' ')
+ dst_manifest.write(dst_locators[blockhash])
+ dst_manifest.write("\n")
if progress_writer:
progress_writer.report(obj_uuid, bytes_written, bytes_expected)
progress_writer.finish()
# Copy the manifest and save the collection.
- logger.debug('saving %s with manifest: <%s>', obj_uuid, dst_manifest)
+ logger.debug('saving %s with manifest: <%s>', obj_uuid, dst_manifest.getvalue())
- dst_keep.put(dst_manifest.encode('utf-8'))
- c['manifest_text'] = dst_manifest
+ c['manifest_text'] = dst_manifest.getvalue()
return create_collection_from(c, src, dst, args)
-# copy_git_repo(src_git_repo, src, dst, dst_git_repo, script_version, args)
-#
-# Copies commits from git repository 'src_git_repo' on Arvados
-# instance 'src' to 'dst_git_repo' on 'dst'. Both src_git_repo
-# and dst_git_repo are repository names, not UUIDs (i.e. "arvados"
-# or "jsmith")
-#
-# All commits will be copied to a destination branch named for the
-# source repository URL.
-#
-# The destination repository must already exist.
-#
-# The user running this command must be authenticated
-# to both repositories.
-#
-def copy_git_repo(src_git_repo, src, dst, dst_git_repo, script_version, args):
- # Identify the fetch and push URLs for the git repositories.
- r = src.repositories().list(
- filters=[['name', '=', src_git_repo]]).execute(num_retries=args.retries)
+def select_git_url(api, repo_name, retries, allow_insecure_http, allow_insecure_http_opt):
+ r = api.repositories().list(
+ filters=[['name', '=', repo_name]]).execute(num_retries=retries)
if r['items_available'] != 1:
- raise Exception('cannot identify source repo {}; {} repos found'
- .format(src_git_repo, r['items_available']))
- src_git_url = r['items'][0]['fetch_url']
- logger.debug('src_git_url: {}'.format(src_git_url))
+ raise Exception('cannot identify repo {}; {} repos found'
+ .format(repo_name, r['items_available']))
+
+ https_url = [c for c in r['items'][0]["clone_urls"] if c.startswith("https:")]
+ http_url = [c for c in r['items'][0]["clone_urls"] if c.startswith("http:")]
+ other_url = [c for c in r['items'][0]["clone_urls"] if not c.startswith("http")]
+
+ priority = https_url + other_url + http_url
+
+ git_config = []
+ git_url = None
+ for url in priority:
+ if url.startswith("http"):
+ u = urllib.parse.urlsplit(url)
+ baseurl = urllib.parse.urlunsplit((u.scheme, u.netloc, "", "", ""))
+ git_config = ["-c", "credential.%s/.username=none" % baseurl,
+ "-c", "credential.%s/.helper=!cred(){ cat >/dev/null; if [ \"$1\" = get ]; then echo password=$ARVADOS_API_TOKEN; fi; };cred" % baseurl]
+ else:
+ git_config = []
+
+ try:
+ logger.debug("trying %s", url)
+ arvados.util.run_command(["git"] + git_config + ["ls-remote", url],
+ env={"HOME": os.environ["HOME"],
+ "ARVADOS_API_TOKEN": api.api_token,
+ "GIT_ASKPASS": "/bin/false"})
+ except arvados.errors.CommandFailedError:
+ pass
+ else:
+ git_url = url
+ break
- r = dst.repositories().list(
- filters=[['name', '=', dst_git_repo]]).execute(num_retries=args.retries)
- if r['items_available'] != 1:
- raise Exception('cannot identify destination repo {}; {} repos found'
- .format(dst_git_repo, r['items_available']))
- dst_git_push_url = r['items'][0]['push_url']
- logger.debug('dst_git_push_url: {}'.format(dst_git_push_url))
-
- dst_branch = re.sub(r'\W+', '_', "{}_{}".format(src_git_url, script_version))
-
- # Copy git commits from src repo to dst repo.
- if src_git_repo not in local_repo_dir:
- local_repo_dir[src_git_repo] = tempfile.mkdtemp()
- arvados.util.run_command(
- ["git", "clone", "--bare", src_git_url,
- local_repo_dir[src_git_repo]],
- cwd=os.path.dirname(local_repo_dir[src_git_repo]))
- arvados.util.run_command(
- ["git", "remote", "add", "dst", dst_git_push_url],
- cwd=local_repo_dir[src_git_repo])
- arvados.util.run_command(
- ["git", "branch", dst_branch, script_version],
- cwd=local_repo_dir[src_git_repo])
- arvados.util.run_command(["git", "push", "dst", dst_branch],
- cwd=local_repo_dir[src_git_repo])
-
-def copy_docker_images(pipeline, src, dst, args):
- """Copy any docker images named in the pipeline components'
- runtime_constraints field from src to dst."""
-
- logger.debug('copy_docker_images: {}'.format(pipeline['uuid']))
- for c_name, c_info in pipeline['components'].iteritems():
- if ('runtime_constraints' in c_info and
- 'docker_image' in c_info['runtime_constraints']):
- copy_docker_image(
- c_info['runtime_constraints']['docker_image'],
- c_info['runtime_constraints'].get('docker_image_tag', 'latest'),
- src, dst, args)
+ if not git_url:
+ raise Exception('Cannot access git repository, tried {}'
+ .format(priority))
+
+ if git_url.startswith("http:"):
+ if allow_insecure_http:
+ logger.warning("Using insecure git url %s but will allow this because %s", git_url, allow_insecure_http_opt)
+ else:
+ raise Exception("Refusing to use insecure git url %s, use %s if you really want this." % (git_url, allow_insecure_http_opt))
+
+ return (git_url, git_config)
def copy_docker_image(docker_image, docker_image_tag, src, dst, args):
else:
logger.warning('Could not find docker image {}:{}'.format(docker_image, docker_image_tag))
+def copy_project(obj_uuid, src, dst, owner_uuid, args):
+
+ src_project_record = src.groups().get(uuid=obj_uuid).execute(num_retries=args.retries)
+
+ # Create/update the destination project
+ existing = dst.groups().list(filters=[["owner_uuid", "=", owner_uuid],
+ ["name", "=", src_project_record["name"]]]).execute(num_retries=args.retries)
+ if len(existing["items"]) == 0:
+ project_record = dst.groups().create(body={"group": {"group_class": "project",
+ "owner_uuid": owner_uuid,
+ "name": src_project_record["name"]}}).execute(num_retries=args.retries)
+ else:
+ project_record = existing["items"][0]
+
+ dst.groups().update(uuid=project_record["uuid"],
+ body={"group": {
+ "description": src_project_record["description"]}}).execute(num_retries=args.retries)
+
+ args.project_uuid = project_record["uuid"]
+
+ logger.debug('Copying %s to %s', obj_uuid, project_record["uuid"])
+
+ # Copy collections
+ copy_collections([col["uuid"] for col in arvados.util.list_all(src.collections().list, filters=[["owner_uuid", "=", obj_uuid]])],
+ src, dst, args)
+
+ # Copy workflows
+ for w in arvados.util.list_all(src.workflows().list, filters=[["owner_uuid", "=", obj_uuid]]):
+ copy_workflow(w["uuid"], src, dst, args)
+
+ if args.recursive:
+ for g in arvados.util.list_all(src.groups().list, filters=[["owner_uuid", "=", obj_uuid]]):
+ copy_project(g["uuid"], src, dst, project_record["uuid"], args)
+
+ return project_record
+
# git_rev_parse(rev, repo)
#
# Returns the 40-character commit hash corresponding to 'rev' in
# the second field of the uuid. This function consults the api's
# schema to identify the object class.
#
-# It returns a string such as 'Collection', 'PipelineInstance', etc.
+# It returns a string such as 'Collection', 'Workflow', etc.
#
# Special case: if handed a Keep locator hash, return 'Collection'.
#
def uuid_type(api, object_uuid):
- if re.match(r'^[a-f0-9]{32}\+[0-9]+(\+[A-Za-z0-9+-]+)?$', object_uuid):
+ if re.match(arvados.util.keep_locator_pattern, object_uuid):
return 'Collection'
p = object_uuid.split('-')
if len(p) == 3: