1 class Job < ArvadosModel
4 include CommonApiTemplate
5 extend CurrentApiClient
6 serialize :components, Hash
7 attr_protected :arvados_sdk_version, :docker_image_locator
8 serialize :script_parameters, Hash
9 serialize :runtime_constraints, Hash
10 serialize :tasks_summary, Hash
11 before_create :ensure_unique_submit_id
12 after_commit :trigger_crunch_dispatch_if_cancelled, :on => :update
13 before_validation :set_priority
14 before_validation :update_state_from_old_state_attrs
15 before_validation :update_script_parameters_digest
16 validate :ensure_script_version_is_commit
17 validate :find_docker_image_locator
18 validate :find_arvados_sdk_version
19 validate :validate_status
20 validate :validate_state_change
21 validate :ensure_no_collection_uuids_in_script_params
22 before_save :tag_version_in_internal_repository
23 before_save :update_timestamps_when_state_changes
25 has_many :commit_ancestors, :foreign_key => :descendant, :primary_key => :script_version
26 has_many(:nodes, foreign_key: :job_uuid, primary_key: :uuid)
28 class SubmitIdReused < StandardError
31 api_accessible :user, extend: :common do |t|
35 t.add :script_parameters
38 t.add :cancelled_by_client_uuid
39 t.add :cancelled_by_user_uuid
46 t.add :is_locked_by_uuid
48 t.add :runtime_constraints
50 t.add :nondeterministic
52 t.add :supplied_script_version
53 t.add :arvados_sdk_version
54 t.add :docker_image_locator
61 # Supported states for a job
64 (Running = 'Running'),
65 (Cancelled = 'Cancelled'),
67 (Complete = 'Complete'),
71 update_attributes(finished_at: finished_at || db_current_time,
72 success: success.nil? ? false : success,
81 self.where('state = ?', Queued).order('priority desc, created_at')
85 # We used to report this accurately, but the implementation made queue
86 # API requests O(n**2) for the size of the queue. See #8800.
87 # We've soft-disabled it because it's not clear we even want this
88 # functionality: now that we have Node Manager with support for multiple
89 # node sizes, "queue position" tells you very little about when a job will
91 state == Queued ? 0 : nil
95 self.where('running = ?', true).
96 order('priority desc, created_at')
99 def lock locked_by_uuid
101 unless self.state == Queued and self.is_locked_by_uuid.nil?
102 raise AlreadyLockedError
105 self.is_locked_by_uuid = locked_by_uuid
110 def update_script_parameters_digest
111 Job.benchmark("Job.update_script_parameters_digest #{self.uuid}") do
112 self.script_parameters_digest = self.class.sorted_hash_digest(script_parameters)
116 def self.searchable_columns operator
117 super - ["script_parameters_digest"]
120 def self.load_job_specific_filters attrs, orig_filters, read_users
121 # Convert Job-specific @filters entries into general SQL filters.
122 script_info = {"repository" => nil, "script" => nil}
123 git_filters = Hash.new do |hash, key|
124 hash[key] = {"max_version" => "HEAD", "exclude_versions" => []}
127 orig_filters.each do |attr, operator, operand|
128 if (script_info.has_key? attr) and (operator == "=")
129 if script_info[attr].nil?
130 script_info[attr] = operand
131 elsif script_info[attr] != operand
132 raise ArgumentError.new("incompatible #{attr} filters")
137 git_filters[attr]["min_version"] = operand
139 git_filters[attr]["exclude_versions"] += Array.wrap(operand)
140 when "in docker", "not in docker"
141 image_hashes = Array.wrap(operand).flat_map do |search_term|
142 image_search, image_tag = search_term.split(':', 2)
144 find_all_for_docker_image(image_search, image_tag, read_users).
145 map(&:portable_data_hash)
147 filters << [attr, operator.sub(/ docker$/, ""), image_hashes]
149 filters << [attr, operator, operand]
153 # Build a real script_version filter from any "not? in git" filters.
154 git_filters.each_pair do |attr, filter|
156 when "script_version"
157 script_info.each_pair do |key, value|
159 raise ArgumentError.new("script_version filter needs #{key} filter")
162 filter["repository"] = script_info["repository"]
163 if attrs[:script_version]
164 filter["max_version"] = attrs[:script_version]
166 # Using HEAD, set earlier by the hash default, is fine.
168 when "arvados_sdk_version"
169 filter["repository"] = "arvados"
171 raise ArgumentError.new("unknown attribute for git filter: #{attr}")
173 revisions = Commit.find_commit_range(filter["repository"],
174 filter["min_version"],
175 filter["max_version"],
176 filter["exclude_versions"])
179 new("error searching #{filter['repository']} from " +
180 "'#{filter['min_version']}' to '#{filter['max_version']}', " +
181 "excluding #{filter['exclude_versions']}")
183 filters.append([attr, "in", revisions])
189 def self.find_reusable attrs, params, filters, read_users
190 if filters.empty? # Translate older creation parameters into filters.
192 [["repository", "=", attrs[:repository]],
193 ["script", "=", attrs[:script]],
194 ["script_version", "not in git", params[:exclude_script_versions]],
195 ].reject { |filter| filter.last.nil? or filter.last.empty? }
196 if !params[:minimum_script_version].blank?
197 filters << ["script_version", "in git",
198 params[:minimum_script_version]]
200 filters += default_git_filters("script_version", attrs[:repository],
201 attrs[:script_version])
203 if image_search = attrs[:runtime_constraints].andand["docker_image"]
204 if image_tag = attrs[:runtime_constraints]["docker_image_tag"]
205 image_search += ":#{image_tag}"
207 image_locator = Collection.
208 for_latest_docker_image(image_search).andand.portable_data_hash
212 filters << ["docker_image_locator", "=", image_locator]
213 if sdk_version = attrs[:runtime_constraints].andand["arvados_sdk_version"]
214 filters += default_git_filters("arvados_sdk_version", "arvados", sdk_version)
216 filters = load_job_specific_filters(attrs, filters, read_users)
219 # Check specified filters for some reasonableness.
220 filter_names = filters.map { |f| f.first }.uniq
221 ["repository", "script"].each do |req_filter|
222 if not filter_names.include?(req_filter)
223 return send_error("#{req_filter} filter required")
227 # Search for a reusable Job, and return it if found.
229 readable_by(current_user).
230 where('state = ? or (owner_uuid = ? and state in (?))',
231 Job::Complete, current_user.uuid, [Job::Queued, Job::Running]).
232 where('script_parameters_digest = ?', Job.sorted_hash_digest(attrs[:script_parameters])).
233 where('nondeterministic is distinct from ?', true).
234 order('state desc, created_at') # prefer Running jobs over Queued
235 candidates = apply_filters candidates, filters
238 candidates.each do |j|
239 if j.state != Job::Complete
240 # We'll use this if we don't find a job that has completed
246 # We have already decided not to reuse any completed job
249 if chosen.output != j.output
250 # If two matching jobs produced different outputs, run a new
251 # job (or use one that's already running/queued) instead of
252 # choosing one arbitrarily.
255 # ...and that's the only thing we need to do once we've chosen
257 elsif !Collection.readable_by(current_user).find_by_portable_data_hash(j.output)
258 # As soon as the output we will end up returning (if any) is
259 # decided, check whether it will be visible to the user; if
260 # not, any further investigation of reusable jobs is futile.
266 chosen || incomplete_job
269 def self.default_git_filters(attr_name, repo_name, refspec)
270 # Add a filter to @filters for `attr_name` = the latest commit available
271 # in `repo_name` at `refspec`. No filter is added if refspec can't be
273 commits = Commit.find_commit_range(repo_name, nil, refspec, nil)
274 if commit_hash = commits.first
275 [[attr_name, "=", commit_hash]]
283 def self.sorted_hash_digest h
284 Digest::MD5.hexdigest(Oj.dump(deep_sort_hash(h)))
287 def foreign_key_attributes
288 super + %w(output log)
291 def skip_uuid_read_permission_check
292 super + %w(cancelled_by_client_uuid)
295 def skip_uuid_existence_check
296 super + %w(output log)
300 if self.priority.nil?
306 def ensure_script_version_is_commit
307 Job.benchmark("Job.ensure_script_version_is_commit #{self.uuid}") do
309 # Apparently client has already decided to go for it. This is
310 # needed to run a local job using a local working directory
311 # instead of a commit-ish.
314 if new_record? or repository_changed? or script_version_changed?
315 sha1 = Commit.find_commit_range(repository,
316 nil, script_version, nil).first
318 errors.add :script_version, "#{script_version} does not resolve to a commit"
321 if supplied_script_version.nil? or supplied_script_version.empty?
322 self.supplied_script_version = script_version
324 self.script_version = sha1
330 def tag_version_in_internal_repository
331 Job.benchmark("Job.tag_version_in_internal_repository #{self.uuid}") do
333 # No point now. See ensure_script_version_is_commit.
336 # Won't be saved, and script_version might not even be valid.
338 elsif new_record? or repository_changed? or script_version_changed?
342 Commit.tag_in_internal_repository repository, script_version, uuid
351 def ensure_unique_submit_id
353 if Job.where('submit_id=?',self.submit_id).first
354 raise SubmitIdReused.new
360 def resolve_runtime_constraint(key, attr_sym)
361 if ((runtime_constraints.is_a? Hash) and
362 (search = runtime_constraints[key]))
363 ok, result = yield search
365 ok, result = true, nil
368 send("#{attr_sym}=".to_sym, result)
370 errors.add(attr_sym, result)
375 def find_arvados_sdk_version
376 Job.benchmark("Job.find_arvados_sdk_version #{self.uuid}") do
377 resolve_runtime_constraint("arvados_sdk_version",
378 :arvados_sdk_version) do |git_search|
379 commits = Commit.find_commit_range("arvados",
380 nil, git_search, nil)
382 [false, "#{git_search} does not resolve to a commit"]
383 elsif not runtime_constraints["docker_image"]
384 [false, "cannot be specified without a Docker image constraint"]
386 [true, commits.first]
392 def find_docker_image_locator
393 Job.benchmark("Job.find_docker_image_locator #{self.uuid}") do
394 runtime_constraints['docker_image'] =
395 Rails.configuration.default_docker_image_for_jobs if ((runtime_constraints.is_a? Hash) and
396 (runtime_constraints['docker_image']).nil? and
397 Rails.configuration.default_docker_image_for_jobs)
398 resolve_runtime_constraint("docker_image",
399 :docker_image_locator) do |image_search|
400 image_tag = runtime_constraints['docker_image_tag']
401 if coll = Collection.for_latest_docker_image(image_search, image_tag)
402 [true, coll.portable_data_hash]
404 [false, "not found for #{image_search}"]
410 def permission_to_update
411 if is_locked_by_uuid_was and !(current_user and
412 (current_user.uuid == is_locked_by_uuid_was or
413 current_user.uuid == system_user.uuid))
414 if script_changed? or
415 script_parameters_changed? or
416 script_version_changed? or
417 (!cancelled_at_was.nil? and
418 (cancelled_by_client_uuid_changed? or
419 cancelled_by_user_uuid_changed? or
420 cancelled_at_changed?)) or
421 started_at_changed? or
422 finished_at_changed? or
427 tasks_summary_changed? or
430 logger.warn "User #{current_user.uuid if current_user} tried to change protected job attributes on locked #{self.class.to_s} #{uuid_was}"
434 if !is_locked_by_uuid_changed?
438 logger.warn "Anonymous user tried to change lock on #{self.class.to_s} #{uuid_was}"
440 elsif is_locked_by_uuid_was and is_locked_by_uuid_was != current_user.uuid
441 logger.warn "User #{current_user.uuid} tried to steal lock on #{self.class.to_s} #{uuid_was} from #{is_locked_by_uuid_was}"
443 elsif !is_locked_by_uuid.nil? and is_locked_by_uuid != current_user.uuid
444 logger.warn "User #{current_user.uuid} tried to lock #{self.class.to_s} #{uuid_was} with uuid #{is_locked_by_uuid}"
452 def update_modified_by_fields
453 if self.cancelled_at_changed?
454 # Ensure cancelled_at cannot be set to arbitrary non-now times,
455 # or changed once it is set.
456 if self.cancelled_at and not self.cancelled_at_was
457 self.cancelled_at = db_current_time
458 self.cancelled_by_user_uuid = current_user.uuid
459 self.cancelled_by_client_uuid = current_api_client.andand.uuid
460 @need_crunch_dispatch_trigger = true
462 self.cancelled_at = self.cancelled_at_was
463 self.cancelled_by_user_uuid = self.cancelled_by_user_uuid_was
464 self.cancelled_by_client_uuid = self.cancelled_by_client_uuid_was
470 def trigger_crunch_dispatch_if_cancelled
471 if @need_crunch_dispatch_trigger
472 File.open(Rails.configuration.crunch_refresh_trigger, 'wb') do
473 # That's all, just create/touch a file for crunch-job to see.
478 def update_timestamps_when_state_changes
479 return if not (state_changed? or new_record?)
483 self.started_at ||= db_current_time
484 when Failed, Complete
485 self.finished_at ||= db_current_time
487 self.cancelled_at ||= db_current_time
490 # TODO: Remove the following case block when old "success" and
491 # "running" attrs go away. Until then, this ensures we still
492 # expose correct success/running flags to older clients, even if
493 # some new clients are writing only the new state attribute.
501 when Cancelled, Failed
508 self.running ||= false # Default to false instead of nil.
510 @need_crunch_dispatch_trigger = true
515 def update_state_from_old_state_attrs
516 # If a client has touched the legacy state attrs, update the
517 # "state" attr to agree with the updated values of the legacy
520 # TODO: Remove this method when old "success" and "running" attrs
522 if cancelled_at_changed? or
527 self.state = Cancelled
528 elsif success == false
530 elsif success == true
531 self.state = Complete
532 elsif running == true
542 if self.state.in?(States)
545 errors.add :state, "#{state.inspect} must be one of: #{States.inspect}"
550 def validate_state_change
552 if self.state_changed?
553 ok = case self.state_was
555 # state isn't set yet
558 # Permit going from queued to any state
561 # From running, may only transition to a finished state
562 [Complete, Failed, Cancelled].include? self.state
563 when Complete, Failed, Cancelled
564 # Once in a finished state, don't permit any more state changes
567 # Any other state transition is also invalid
571 errors.add :state, "invalid change from #{self.state_was} to #{self.state}"
577 def ensure_no_collection_uuids_in_script_params
578 # recursive_hash_search searches recursively through hashes and
579 # arrays in 'thing' for string fields matching regular expression
580 # 'pattern'. Returns true if pattern is found, false otherwise.
581 def recursive_hash_search thing, pattern
584 return true if recursive_hash_search v, pattern
586 elsif thing.is_a? Array
588 return true if recursive_hash_search k, pattern
590 elsif thing.is_a? String
591 return true if thing.match pattern
596 Job.benchmark("Job.ensure_no_collection_uuids_in_script_params #{self.uuid}") do
597 # Fail validation if any script_parameters field includes a string containing a
598 # collection uuid pattern.
599 if self.script_parameters_changed?
600 if recursive_hash_search(self.script_parameters, Collection.uuid_regex)
601 self.errors.add :script_parameters, "must use portable_data_hash instead of collection uuid"