+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: AGPL-3.0
+
require 'arvados/keep'
require 'sweep_trashed_collections'
+require 'trashable'
class Collection < ArvadosModel
+ extend CurrentApiClient
extend DbCurrentTime
include HasUuid
include KindAndEtag
include CommonApiTemplate
+ include Trashable
serialize :properties, Hash
- before_validation :set_validation_timestamp
before_validation :default_empty_manifest
before_validation :check_encoding
before_validation :check_manifest_validity
before_validation :check_signatures
before_validation :strip_signatures_and_update_replication_confirmed
- before_validation :ensure_trash_at_not_in_past
- before_validation :sync_trash_state
- before_validation :default_trash_interval
validate :ensure_pdh_matches_manifest_text
- validate :validate_trash_and_delete_timing
before_save :set_file_names
- # Query only untrashed collections by default.
- default_scope where("is_trashed = false")
-
api_accessible :user, extend: :common do |t|
t.add :name
t.add :description
t.add :properties
t.add :portable_data_hash
t.add :signed_manifest_text, as: :manifest_text
+ t.add :manifest_text, as: :unsigned_manifest_text
t.add :replication_desired
t.add :replication_confirmed
t.add :replication_confirmed_at
# We need trash_at and is_trashed to determine the
# correct timestamp in signed_manifest_text.
'manifest_text' => ['manifest_text', 'trash_at', 'is_trashed'],
+ 'unsigned_manifest_text' => ['manifest_text'],
)
end
super + ["updated_at", "file_names"]
end
+ def self.limit_index_columns_read
+ ["manifest_text"]
+ end
+
FILE_TOKEN = /^[[:digit:]]+:[[:digit:]]+:/
def check_signatures
return false if self.manifest_text.nil?
false
elsif portable_data_hash[0..31] != computed_pdh[0..31]
errors.add(:portable_data_hash,
- "does not match computed hash #{computed_pdh}")
+ "'#{portable_data_hash}' does not match computed hash '#{computed_pdh}'")
false
else
# Ignore the client-provided size part: always store
[hash_part, size_part].compact.join '+'
end
- # Return array of Collection objects
- def self.find_all_for_docker_image(search_term, search_tag=nil, readers=nil)
+ def self.get_compatible_images(readers, pattern, collections)
+ if collections.empty?
+ return []
+ end
+
+ migrations = Hash[
+ Link.where('tail_uuid in (?) AND link_class=? AND links.owner_uuid=?',
+ collections.map(&:portable_data_hash),
+ 'docker_image_migration',
+ system_user_uuid).
+ order('links.created_at asc').
+ map { |l|
+ [l.tail_uuid, l.head_uuid]
+ }]
+
+ migrated_collections = Hash[
+ Collection.readable_by(*readers).
+ where('portable_data_hash in (?)', migrations.values).
+ map { |c|
+ [c.portable_data_hash, c]
+ }]
+
+ collections.map { |c|
+ # Check if the listed image is compatible first, if not, then try the
+ # migration link.
+ manifest = Keep::Manifest.new(c.manifest_text)
+ if manifest.exact_file_count?(1) and manifest.files[0][1] =~ pattern
+ c
+ elsif m = migrated_collections[migrations[c.portable_data_hash]]
+ manifest = Keep::Manifest.new(m.manifest_text)
+ if manifest.exact_file_count?(1) and manifest.files[0][1] =~ pattern
+ m
+ end
+ end
+ }.compact
+ end
+
+ # Resolve a Docker repo+tag, hash, or collection PDH to an array of
+ # Collection objects, sorted by timestamp starting with the most recent
+ # match.
+ #
+ # If filter_compatible_format is true (the default), only return image
+ # collections which are support by the installation as indicated by
+ # Rails.configuration.docker_image_formats. Will follow
+ # 'docker_image_migration' links if search_term resolves to an incompatible
+ # image, but an equivalent compatible image is available.
+ def self.find_all_for_docker_image(search_term, search_tag=nil, readers=nil, filter_compatible_format: true)
readers ||= [Thread.current[:user]]
base_search = Link.
readable_by(*readers).
joins("JOIN collections ON links.head_uuid = collections.uuid").
order("links.created_at DESC")
+ if (Rails.configuration.docker_image_formats.include? 'v1' and
+ Rails.configuration.docker_image_formats.include? 'v2') or filter_compatible_format == false
+ pattern = /^(sha256:)?[0-9A-Fa-f]{64}\.tar$/
+ elsif Rails.configuration.docker_image_formats.include? 'v2'
+ pattern = /^(sha256:)[0-9A-Fa-f]{64}\.tar$/
+ elsif Rails.configuration.docker_image_formats.include? 'v1'
+ pattern = /^[0-9A-Fa-f]{64}\.tar$/
+ else
+ raise "Unrecognized configuration for docker_image_formats #{Rails.configuration.docker_image_formats}"
+ end
+
# If the search term is a Collection locator that contains one file
# that looks like a Docker image, return it.
if loc = Keep::Locator.parse(search_term)
loc.strip_hints!
- coll_match = readable_by(*readers).where(portable_data_hash: loc.to_s).limit(1).first
- if coll_match
- # Check if the Collection contains exactly one file whose name
- # looks like a saved Docker image.
- manifest = Keep::Manifest.new(coll_match.manifest_text)
- if manifest.exact_file_count?(1) and
- (manifest.files[0][1] =~ /^(sha256:)?[0-9A-Fa-f]{64}\.tar$/)
- return [coll_match]
- end
- end
+ coll_match = readable_by(*readers).where(portable_data_hash: loc.to_s).limit(1)
+ return get_compatible_images(readers, pattern, coll_match)
end
if search_tag.nil? and (n = search_term.index(":"))
# so that anything with an image timestamp is considered more recent than
# anything without; then we use the link's created_at as a tiebreaker.
uuid_timestamps = {}
- matches.all.map do |link|
+ matches.each do |link|
uuid_timestamps[link.head_uuid] = [(-link.properties["image_timestamp"].to_datetime.to_i rescue 0),
-link.created_at.to_i]
+ end
+
+ sorted = Collection.where('uuid in (?)', uuid_timestamps.keys).sort_by { |c|
+ uuid_timestamps[c.uuid]
+ }
+ compatible = get_compatible_images(readers, pattern, sorted)
+ if sorted.length > 0 and compatible.empty?
+ raise ArvadosModel::UnresolvableContainerError.new "Matching Docker image is incompatible with 'docker_image_formats' configuration."
end
- Collection.where('uuid in (?)', uuid_timestamps.keys).sort_by { |c| uuid_timestamps[c.uuid] }
+ compatible
end
def self.for_latest_docker_image(search_term, search_tag=nil, readers=nil)
super
end
- # Use a single timestamp for all validations, even though each
- # validation runs at a different time.
- def set_validation_timestamp
- @validation_timestamp = db_current_time
- end
-
- # If trash_at is being changed to a time in the past, change it to
- # now. This allows clients to say "expires {client-current-time}"
- # without failing due to clock skew, while avoiding odd log entries
- # like "expiry date changed to {1 year ago}".
- def ensure_trash_at_not_in_past
- if trash_at_changed? && trash_at
- self.trash_at = [@validation_timestamp, trash_at].max
- end
- end
-
- # Caller can move into/out of trash by setting/clearing is_trashed
- # -- however, if the caller also changes trash_at, then any changes
- # to is_trashed are ignored.
- def sync_trash_state
- if is_trashed_changed? && !trash_at_changed?
- if is_trashed
- self.trash_at = @validation_timestamp
- else
- self.trash_at = nil
- self.delete_at = nil
- end
- end
- self.is_trashed = trash_at && trash_at <= @validation_timestamp || false
- true
- end
-
- # If trash_at is updated without touching delete_at, automatically
- # update delete_at to a sensible value.
- def default_trash_interval
- if trash_at_changed? && !delete_at_changed?
- if trash_at.nil?
- self.delete_at = nil
- else
- self.delete_at = trash_at + Rails.configuration.default_trash_lifetime.seconds
- end
- end
- end
-
- def validate_trash_and_delete_timing
- if trash_at.nil? != delete_at.nil?
- errors.add :delete_at, "must be set if trash_at is set, and must be nil otherwise"
- end
-
- earliest_delete = ([@validation_timestamp, trash_at_was].compact.min +
- Rails.configuration.blob_signature_ttl.seconds)
- if delete_at && delete_at < earliest_delete
- errors.add :delete_at, "#{delete_at} is too soon: earliest allowed is #{earliest_delete}"
- end
-
- if delete_at && delete_at < trash_at
- errors.add :delete_at, "must not be earlier than trash_at"
- end
-
- true
- end
end