}, status: :unprocessable_entity
end
end
- load_filters_param
# We used to ask for the minimum_, exclude_, and no_reuse params
# in the job resource. Now we advertise them as flags that alter
end
if params[:find_or_create]
- # Convert old special-purpose creation parameters to the new
- # filters-based method.
- minimum_script_version = params[:minimum_script_version]
- exclude_script_versions = params.fetch(:exclude_script_versions, [])
- @filters.select do |(col_name, operand, operator)|
- case col_name
- when "script_version"
- case operand
- when "in range"
- minimum_script_version = operator
+ load_filters_param
+ if @filters.empty? # Translate older creation parameters into filters.
+ @filters = [:repository, :script].map do |attrsym|
+ [attrsym.to_s, "=", resource_attrs[attrsym]]
+ end
+ @filters.append(["script_version", "in",
+ Commit.find_commit_range(current_user,
+ resource_attrs[:repository],
+ params[:minimum_script_version],
+ resource_attrs[:script_version],
+ params[:exclude_script_versions])])
+ if image_search = resource_attrs[:runtime_constraints].andand["docker_image"]
+ image_tag = resource_attrs[:runtime_constraints]["docker_image_tag"]
+ image_locator = Collection.
+ uuids_for_docker_image(image_search, image_tag, @read_users).first
+ return super if image_locator.nil? # We won't find anything to reuse.
+ @filters.append(["docker_image_locator", "=", image_locator])
+ else
+ @filters.append(["docker_image_locator", "=", nil])
+ end
+ else
+ script_info = {"repository" => nil, "script" => nil}
+ script_range = Hash.new { |key| [] }
+ @filters.select! do |filter|
+ if script_info.has_key? filter[0]
+ script_info[filter[0]] = filter[2]
+ end
+ case filter[0..1]
+ when ["script_version", "in git"]
+ script_range["min_version"] = filter.last
false
- when "not in", "not in range"
+ when ["script_version", "not in"], ["script_version", "not in git"]
begin
- exclude_script_versions += operator
+ script_range["exclude_versions"] += filter.last
rescue TypeError
- exclude_script_versions << operator
+ script_range["exclude_versions"] << filter.last
end
false
+ when ["docker_image_locator", "in docker"], ["docker_image_locator", "not in docker"]
+ filter[1].sub!(/ docker$/, '')
+ filter[2] = Collection.uuids_for_docker_image(filter[2])
+ true
else
true
end
- else
- true
end
- end
- @filters.append(["script_version", "in",
- Commit.find_commit_range(current_user,
- resource_attrs[:repository],
- minimum_script_version,
- resource_attrs[:script_version],
- exclude_script_versions)])
- # Set up default filters for specific parameters.
- if @filters.select { |f| f.first == "script" }.empty?
- @filters.append(["script", "=", resource_attrs[:script]])
+ script_info.each_pair do |key, value|
+ raise ArgumentError.new("#{key} filter required") if value.nil?
+ end
+ unless script_range.empty?
+ @filters.append(["script_version", "in",
+ Commit.find_commit_range(current_user,
+ script_info["repository"],
+ script_range["min_version"],
+ resource_attrs[:script_version],
+ script_range["exclude_versions"])])
+ end
end
+ # Search for a reusable Job, and return it if found.
@objects = Job.readable_by(current_user)
apply_filters
@object = nil
[hash_part, size_part].compact.join '+'
end
- def self.for_latest_docker_image(search_term, search_tag=nil, readers=nil)
+ def self.uuids_for_docker_image(search_term, search_tag=nil, readers=nil)
readers ||= [Thread.current[:user]]
base_search = Link.
readable_by(*readers).
coll_matches = base_search.
where(link_class: "docker_image_hash", collections: {uuid: search_term})
if match = coll_matches.first
- return find_by_uuid(match.head_uuid)
+ return [match.head_uuid]
end
# Find Collections with matching Docker image repository+tag pairs.
"docker_image_hash", "#{search_term}%")
end
- # Select the image that was created most recently. Note that the
- # SQL search order and fallback timestamp values are chosen so
- # that if image timestamps are missing, we use the image with the
- # newest link.
- latest_image_link = nil
- latest_image_timestamp = "1900-01-01T00:00:00Z"
+ # Generate an order key for each result. We want to order the results
+ # so that anything with an image timestamp is considered more recent than
+ # anything without; then we use the link's created_at as a tiebreaker.
+ uuid_timestamps = {}
matches.find_each do |link|
- link_timestamp = link.properties.fetch("image_timestamp",
- "1900-01-01T00:00:01Z")
- if link_timestamp > latest_image_timestamp
- latest_image_link = link
- latest_image_timestamp = link_timestamp
- end
+ uuid_timestamps[link.head_uuid] =
+ [(-link.properties["image_timestamp"].to_datetime.to_i rescue 0),
+ -link.created_at.to_i]
+ end
+ uuid_timestamps.keys.sort_by { |uuid| uuid_timestamps[uuid] }
+ end
+
+ def self.for_latest_docker_image(search_term, search_tag=nil, readers=nil)
+ image_uuid = uuids_for_docker_image(search_term, search_tag, readers).first
+ if image_uuid.nil?
+ nil
+ else
+ find_by_uuid(image_uuid)
end
- latest_image_link.nil? ? nil : find_by_uuid(latest_image_link.head_uuid)
end
end
previous_job_run:
uuid: zzzzz-8i9sb-cjs4pklxxjykqqq
owner_uuid: zzzzz-tpzed-xurymjxw79nv3jz
+ repository: foo
script: hash
script_version: 4fe459abe02d9b365932b8f5dc419439ab4e2577
script_parameters:
success: true
output: ea10d51bcf88862dbcc36eb292017dfd+45
+previous_docker_job_run:
+ uuid: zzzzz-8i9sb-k6emstgk4kw4yhi
+ owner_uuid: zzzzz-tpzed-xurymjxw79nv3jz
+ repository: foo
+ script: hash
+ script_version: 4fe459abe02d9b365932b8f5dc419439ab4e2577
+ script_parameters:
+ input: fa7aeb5140e2848d39b416daeef4ffc5+45
+ an_integer: "1"
+ success: true
+ output: ea10d51bcf88862dbcc36eb292017dfd+45
+ docker_image_locator: fa3c1a9cb6783f85f2ecda037e07b8c3+167
+
previous_job_run_no_output:
uuid: zzzzz-8i9sb-cjs4pklxxjykppp
owner_uuid: zzzzz-tpzed-xurymjxw79nv3jz
+ repository: foo
script: hash
script_version: 4fe459abe02d9b365932b8f5dc419439ab4e2577
script_parameters:
nondeterminisic_job_run:
uuid: zzzzz-8i9sb-cjs4pklxxjykyyy
owner_uuid: zzzzz-tpzed-xurymjxw79nv3jz
+ repository: foo
script: hash2
script_version: 4fe459abe02d9b365932b8f5dc419439ab4e2577
script_parameters:
tail_uuid: ~
head_uuid: fa3c1a9cb6783f85f2ecda037e07b8c3+167
properties:
- image_timestamp: 2014-06-10T14:30:00.184019565Z
+ image_timestamp: "2014-06-10T14:30:00.184019565Z"
docker_image_collection_repository:
uuid: zzzzz-o0j2j-dockercollrepos
tail_uuid: ~
head_uuid: fa3c1a9cb6783f85f2ecda037e07b8c3+167
properties:
- image_timestamp: 2014-06-10T14:30:00.184019565Z
+ image_timestamp: "2014-06-10T14:30:00.184019565Z"
docker_image_collection_tag:
uuid: zzzzz-o0j2j-dockercolltagbb
tail_uuid: ~
head_uuid: fa3c1a9cb6783f85f2ecda037e07b8c3+167
properties:
- image_timestamp: 2014-06-10T14:30:00.184019565Z
+ image_timestamp: "2014-06-10T14:30:00.184019565Z"
docker_image_collection_tag2:
uuid: zzzzz-o0j2j-dockercolltagbc
tail_uuid: ~
head_uuid: fa3c1a9cb6783f85f2ecda037e07b8c3+167
properties:
- image_timestamp: 2014-06-10T14:30:00.184019565Z
+ image_timestamp: "2014-06-10T14:30:00.184019565Z"
ancient_docker_image_collection_hash:
# This image helps test that searches for Docker images find
tail_uuid: ~
head_uuid: b519d9cb706a29fc7ea24dbea2f05851+249025
properties:
- image_timestamp: 2010-06-10T14:30:00.184019565Z
+ image_timestamp: "2010-06-10T14:30:00.184019565Z"
assert_equal '077ba2ad3ea24a929091a9e6ce545c93199b8e57', new_job['script_version']
end
+ BASE_FILTERS = {
+ 'repository' => ['=', 'foo'],
+ 'script' => ['=', 'hash'],
+ 'script_version' => ['in git', 'master'],
+ 'docker_image_locator' => ['=', nil],
+ }
+
+ def filters_from_hash(hash)
+ hash.each_pair.map { |name, filter| [name] + filter }
+ end
+
test "can reuse a Job based on filters" do
+ filter_h = BASE_FILTERS.
+ merge('script_version' => ['in git', 'tag1'])
post(:create, {
job: {
script: "hash",
an_integer: '1'
}
},
- filters: [["script_version", "in range", "tag1"]],
+ filters: filters_from_hash(filter_h),
find_or_create: true,
})
assert_response :success
end
test "can not reuse a Job based on filters" do
+ filter_a = filters_from_hash(BASE_FILTERS.reject { |k| k == 'script_version' })
+ filter_a += [["script_version", "in git",
+ "31ce37fe365b3dc204300a3e4c396ad333ed0556"],
+ ["script_version", "not in git", ["tag1"]]]
post(:create, {
job: {
script: "hash",
an_integer: '1'
}
},
- filters: [["script_version", "in range",
- "31ce37fe365b3dc204300a3e4c396ad333ed0556"],
- ["script_version", "not in", ["tag1"]]],
+ filters: filter_a,
find_or_create: true,
})
assert_response :success
end
test "can not reuse a Job based on arbitrary filters" do
+ filter_h = BASE_FILTERS.
+ merge("created_at" => ["<", "2010-01-01T00:00:00Z"])
post(:create, {
job: {
script: "hash",
an_integer: '1'
}
},
- filters: [["created_at", "<", "2010-01-01T00:00:00Z"]],
+ filters: filters_from_hash(filter_h),
find_or_create: true,
})
assert_response :success
assert_not_equal 'zzzzz-8i9sb-cjs4pklxxjykqqq', new_job['uuid']
assert_equal '4fe459abe02d9b365932b8f5dc419439ab4e2577', new_job['script_version']
end
+
+ test "can reuse a Job with a Docker image" do
+ post(:create, {
+ job: {
+ script: "hash",
+ script_version: "4fe459abe02d9b365932b8f5dc419439ab4e2577",
+ repository: "foo",
+ script_parameters: {
+ input: 'fa7aeb5140e2848d39b416daeef4ffc5+45',
+ an_integer: '1'
+ },
+ runtime_constraints: {
+ docker_image: 'arvados/apitestfixture',
+ }
+ },
+ find_or_create: true,
+ })
+ assert_response :success
+ new_job = assigns(:object)
+ assert_not_nil new_job
+ target_job = jobs(:previous_docker_job_run)
+ [:uuid, :script_version, :docker_image_locator].each do |attr|
+ assert_equal(target_job.send(attr), new_job.send(attr))
+ end
+ end
+
+ test "can reuse a Job with a Docker image hash filter" do
+ filter_h = BASE_FILTERS.
+ merge("script_version" =>
+ ["=", "4fe459abe02d9b365932b8f5dc419439ab4e2577"],
+ "docker_image_locator" =>
+ ["in docker", links(:docker_image_collection_hash).name])
+ post(:create, {
+ job: {
+ script: "hash",
+ script_version: "4fe459abe02d9b365932b8f5dc419439ab4e2577",
+ repository: "foo",
+ script_parameters: {
+ input: 'fa7aeb5140e2848d39b416daeef4ffc5+45',
+ an_integer: '1'
+ },
+ },
+ filters: filters_from_hash(filter_h),
+ find_or_create: true,
+ })
+ assert_response :success
+ new_job = assigns(:object)
+ assert_not_nil new_job
+ target_job = jobs(:previous_docker_job_run)
+ [:uuid, :script_version, :docker_image_locator].each do |attr|
+ assert_equal(target_job.send(attr), new_job.send(attr))
+ end
+ end
+
+ test "new job with unknown Docker image filter" do
+ filter_h = BASE_FILTERS.
+ merge("docker_image_locator" => ["in docker", "_nonesuchname_"])
+ post(:create, {
+ job: {
+ script: "hash",
+ script_version: "4fe459abe02d9b365932b8f5dc419439ab4e2577",
+ repository: "foo",
+ script_parameters: {
+ input: 'fa7aeb5140e2848d39b416daeef4ffc5+45',
+ an_integer: '1'
+ },
+ },
+ filters: filters_from_hash(filter_h),
+ find_or_create: true,
+ })
+ assert_response :success
+ new_job = assigns(:object)
+ assert_not_nil new_job
+ assert_not_equal(jobs(:previous_docker_job_run).uuid, new_job.uuid)
+ end
+
+ ["repository", "script"].each do |skip_key|
+ test "missing #{skip_key} filter raises an error" do
+ filter_a = filters_from_hash(BASE_FILTERS.reject { |k| k == skip_key })
+ post(:create, {
+ job: {
+ script: "hash",
+ script_version: "master",
+ repository: "foo",
+ script_parameters: {
+ input: 'fa7aeb5140e2848d39b416daeef4ffc5+45',
+ an_integer: '1'
+ }
+ },
+ filters: filter_a,
+ find_or_create: true,
+ })
+ assert_includes(405..599, @response.code.to_i,
+ "bad status code with missing #{skip_key} filter")
+ end
+ end
end