src_arv = api_for_instance(args.source_arvados)
dst_arv = api_for_instance(args.destination_arvados)
+ if not args.project_uuid:
+ args.project_uuid = dst_arv.users().current().execute(num_retries=args.retries)["uuid"]
+
# Identify the kind of object we have been given, and begin copying.
t = uuid_type(src_arv, args.object_uuid)
if t == 'Collection':
pi['description'] = "Pipeline copied from {}\n\n{}".format(
pi_uuid,
pi['description'] if pi.get('description', None) else '')
- if args.project_uuid:
- pi['owner_uuid'] = args.project_uuid
- else:
- del pi['owner_uuid']
+
+ pi['owner_uuid'] = args.project_uuid
+
del pi['uuid']
new_pi = dst.pipeline_instances().create(body=pi, ensure_unique_name=True).execute()
pt['description'] if pt.get('description', None) else '')
pt['name'] = "{} copied from {}".format(pt.get('name', ''), pt_uuid)
del pt['uuid']
- del pt['owner_uuid']
+
+ pt['owner_uuid'] = args.project_uuid
return dst.pipeline_templates().create(body=pt, ensure_unique_name=True).execute()
return total_bytes
+def create_collection_from(c, src, dst, args):
+ """Create a new collection record on dst, and copy Docker metadata if
+ available."""
+
+ collection_uuid = c['uuid']
+
+ del c['uuid']
+
+ if 'properties' in c:
+ del c['properties']
+
+ c['owner_uuid'] = args.project_uuid
+
+ dst_collection = dst.collections().create(body=c, ensure_unique_name=True).execute(num_retries=args.retries)
+
+ # Create docker_image_repo+tag and docker_image_hash links
+ # at the destination.
+ for link_class in ("docker_image_repo+tag", "docker_image_hash"):
+ docker_links = src.links().list(filters=[["head_uuid", "=", collection_uuid], ["link_class", "=", link_class]]).execute(num_retries=args.retries)['items']
+
+ for d in docker_links:
+ body={
+ 'head_uuid': dst_collection['uuid'],
+ 'link_class': link_class,
+ 'name': d['name'],
+ }
+ body['owner_uuid'] = args.project_uuid
+
+ lk = dst.links().create(body=body).execute(num_retries=args.retries)
+ logger.debug('created dst link {}'.format(lk))
+
+ return dst_collection
+
# copy_collection(obj_uuid, src, dst, args)
#
# Copies the collection identified by obj_uuid from src to dst.
filters=[['portable_data_hash', '=', colhash]]
).execute()
if dstcol['items_available'] > 0:
- logger.debug("Skipping collection %s (already at dst)", obj_uuid)
- return dstcol['items'][0]
+ for d in dstcol['items']:
+ if ((args.project_uuid == d['owner_uuid']) and
+ (c['name'] == d['name']) and
+ (c['portable_data_hash'] == d['portable_data_hash'])):
+ return d
+
+ return create_collection_from(c, src, dst, args)
# Fetch the collection's manifest.
manifest = c['manifest_text']
# Copy the manifest and save the collection.
logger.debug('saving %s with manifest: <%s>', obj_uuid, dst_manifest)
- dst_keep.put(dst_manifest)
- if 'uuid' in c:
- del c['uuid']
- if 'owner_uuid' in c:
- del c['owner_uuid']
+ dst_keep.put(dst_manifest.encode('utf-8'))
c['manifest_text'] = dst_manifest
- return dst.collections().create(body=c, ensure_unique_name=True).execute()
+ return create_collection_from(c, src, dst, args)
# copy_git_repo(src_git_repo, src, dst, dst_git_repo, script_version)
#
# Copy the collection it refers to.
dst_image_col = copy_collection(image_uuid, src, dst, args)
- # Create docker_image_repo+tag and docker_image_hash links
- # at the destination.
- lk = dst.links().create(
- body={
- 'head_uuid': dst_image_col['uuid'],
- 'link_class': 'docker_image_repo+tag',
- 'name': "{}:{}".format(docker_image, docker_image_tag),
- }
- ).execute(num_retries=args.retries)
- logger.debug('created dst link {}'.format(lk))
-
- lk = dst.links().create(
- body={
- 'head_uuid': dst_image_col['uuid'],
- 'link_class': 'docker_image_hash',
- 'name': dst_image_col['portable_data_hash'],
- }
- ).execute(num_retries=args.retries)
- logger.debug('created dst link {}'.format(lk))
-
# git_rev_parse(rev, repo)
#