11017: When compute nodes use image format v2, prefer migrated docker images.
[arvados.git] / services / api / app / models / job.rb
1 class Job < ArvadosModel
2   include HasUuid
3   include KindAndEtag
4   include CommonApiTemplate
5   extend CurrentApiClient
6   serialize :components, Hash
7   attr_protected :arvados_sdk_version, :docker_image_locator
8   serialize :script_parameters, Hash
9   serialize :runtime_constraints, Hash
10   serialize :tasks_summary, Hash
11   before_create :ensure_unique_submit_id
12   after_commit :trigger_crunch_dispatch_if_cancelled, :on => :update
13   before_validation :set_priority
14   before_validation :update_state_from_old_state_attrs
15   before_validation :update_script_parameters_digest
16   validate :ensure_script_version_is_commit
17   validate :find_docker_image_locator
18   validate :find_arvados_sdk_version
19   validate :validate_status
20   validate :validate_state_change
21   validate :ensure_no_collection_uuids_in_script_params
22   before_save :tag_version_in_internal_repository
23   before_save :update_timestamps_when_state_changes
24
25   has_many :commit_ancestors, :foreign_key => :descendant, :primary_key => :script_version
26   has_many(:nodes, foreign_key: :job_uuid, primary_key: :uuid)
27
28   class SubmitIdReused < StandardError
29   end
30
31   api_accessible :user, extend: :common do |t|
32     t.add :submit_id
33     t.add :priority
34     t.add :script
35     t.add :script_parameters
36     t.add :script_version
37     t.add :cancelled_at
38     t.add :cancelled_by_client_uuid
39     t.add :cancelled_by_user_uuid
40     t.add :started_at
41     t.add :finished_at
42     t.add :output
43     t.add :success
44     t.add :running
45     t.add :state
46     t.add :is_locked_by_uuid
47     t.add :log
48     t.add :runtime_constraints
49     t.add :tasks_summary
50     t.add :nondeterministic
51     t.add :repository
52     t.add :supplied_script_version
53     t.add :arvados_sdk_version
54     t.add :docker_image_locator
55     t.add :queue_position
56     t.add :node_uuids
57     t.add :description
58     t.add :components
59   end
60
61   # Supported states for a job
62   States = [
63             (Queued = 'Queued'),
64             (Running = 'Running'),
65             (Cancelled = 'Cancelled'),
66             (Failed = 'Failed'),
67             (Complete = 'Complete'),
68            ]
69
70   after_initialize do
71     @need_crunch_dispatch_trigger = false
72   end
73
74   def assert_finished
75     update_attributes(finished_at: finished_at || db_current_time,
76                       success: success.nil? ? false : success,
77                       running: false)
78   end
79
80   def node_uuids
81     nodes.map(&:uuid)
82   end
83
84   def self.queue
85     self.where('state = ?', Queued).order('priority desc, created_at')
86   end
87
88   def queue_position
89     # We used to report this accurately, but the implementation made queue
90     # API requests O(n**2) for the size of the queue.  See #8800.
91     # We've soft-disabled it because it's not clear we even want this
92     # functionality: now that we have Node Manager with support for multiple
93     # node sizes, "queue position" tells you very little about when a job will
94     # run.
95     state == Queued ? 0 : nil
96   end
97
98   def self.running
99     self.where('running = ?', true).
100       order('priority desc, created_at')
101   end
102
103   def lock locked_by_uuid
104     with_lock do
105       unless self.state == Queued and self.is_locked_by_uuid.nil?
106         raise AlreadyLockedError
107       end
108       self.state = Running
109       self.is_locked_by_uuid = locked_by_uuid
110       self.save!
111     end
112   end
113
114   def update_script_parameters_digest
115     self.script_parameters_digest = self.class.sorted_hash_digest(script_parameters)
116   end
117
118   def self.searchable_columns operator
119     super - ["script_parameters_digest"]
120   end
121
122   def self.full_text_searchable_columns
123     super - ["script_parameters_digest"]
124   end
125
126   def self.load_job_specific_filters attrs, orig_filters, read_users
127     # Convert Job-specific @filters entries into general SQL filters.
128     script_info = {"repository" => nil, "script" => nil}
129     git_filters = Hash.new do |hash, key|
130       hash[key] = {"max_version" => "HEAD", "exclude_versions" => []}
131     end
132     filters = []
133     orig_filters.each do |attr, operator, operand|
134       if (script_info.has_key? attr) and (operator == "=")
135         if script_info[attr].nil?
136           script_info[attr] = operand
137         elsif script_info[attr] != operand
138           raise ArgumentError.new("incompatible #{attr} filters")
139         end
140       end
141       case operator
142       when "in git"
143         git_filters[attr]["min_version"] = operand
144       when "not in git"
145         git_filters[attr]["exclude_versions"] += Array.wrap(operand)
146       when "in docker", "not in docker"
147         image_hashes = Array.wrap(operand).flat_map do |search_term|
148           image_search, image_tag = search_term.split(':', 2)
149           Collection.
150             find_all_for_docker_image(image_search, image_tag, read_users).
151             map(&:portable_data_hash)
152         end
153         filters << [attr, operator.sub(/ docker$/, ""), image_hashes]
154       else
155         filters << [attr, operator, operand]
156       end
157     end
158
159     # Build a real script_version filter from any "not? in git" filters.
160     git_filters.each_pair do |attr, filter|
161       case attr
162       when "script_version"
163         script_info.each_pair do |key, value|
164           if value.nil?
165             raise ArgumentError.new("script_version filter needs #{key} filter")
166           end
167         end
168         filter["repository"] = script_info["repository"]
169         if attrs[:script_version]
170           filter["max_version"] = attrs[:script_version]
171         else
172           # Using HEAD, set earlier by the hash default, is fine.
173         end
174       when "arvados_sdk_version"
175         filter["repository"] = "arvados"
176       else
177         raise ArgumentError.new("unknown attribute for git filter: #{attr}")
178       end
179       revisions = Commit.find_commit_range(filter["repository"],
180                                            filter["min_version"],
181                                            filter["max_version"],
182                                            filter["exclude_versions"])
183       if revisions.empty?
184         raise ArgumentError.
185           new("error searching #{filter['repository']} from " +
186               "'#{filter['min_version']}' to '#{filter['max_version']}', " +
187               "excluding #{filter['exclude_versions']}")
188       end
189       filters.append([attr, "in", revisions])
190     end
191
192     filters
193   end
194
195   def self.find_reusable attrs, params, filters, read_users
196     if filters.empty?  # Translate older creation parameters into filters.
197       filters =
198         [["repository", "=", attrs[:repository]],
199          ["script", "=", attrs[:script]],
200          ["script_version", "not in git", params[:exclude_script_versions]],
201         ].reject { |filter| filter.last.nil? or filter.last.empty? }
202       if !params[:minimum_script_version].blank?
203         filters << ["script_version", "in git",
204                      params[:minimum_script_version]]
205       else
206         filters += default_git_filters("script_version", attrs[:repository],
207                                        attrs[:script_version])
208       end
209       if image_search = attrs[:runtime_constraints].andand["docker_image"]
210         if image_tag = attrs[:runtime_constraints]["docker_image_tag"]
211           image_search += ":#{image_tag}"
212         end
213         image_locator = Collection.
214           for_latest_docker_image(image_search).andand.portable_data_hash
215       else
216         image_locator = nil
217       end
218       filters << ["docker_image_locator", "=",
219                   Collection.docker_migration_pdh(read_users, image_locator)]
220       if sdk_version = attrs[:runtime_constraints].andand["arvados_sdk_version"]
221         filters += default_git_filters("arvados_sdk_version", "arvados", sdk_version)
222       end
223       filters = load_job_specific_filters(attrs, filters, read_users)
224     end
225
226     # Check specified filters for some reasonableness.
227     filter_names = filters.map { |f| f.first }.uniq
228     ["repository", "script"].each do |req_filter|
229       if not filter_names.include?(req_filter)
230         return send_error("#{req_filter} filter required")
231       end
232     end
233
234     # Search for a reusable Job, and return it if found.
235     candidates = Job.
236       readable_by(current_user).
237       where('state = ? or (owner_uuid = ? and state in (?))',
238             Job::Complete, current_user.uuid, [Job::Queued, Job::Running]).
239       where('script_parameters_digest = ?', Job.sorted_hash_digest(attrs[:script_parameters])).
240       where('nondeterministic is distinct from ?', true).
241       order('state desc, created_at') # prefer Running jobs over Queued
242     candidates = apply_filters candidates, filters
243     chosen = nil
244     incomplete_job = nil
245     candidates.each do |j|
246       if j.state != Job::Complete
247         # We'll use this if we don't find a job that has completed
248         incomplete_job ||= j
249         next
250       end
251
252       if chosen == false
253         # We have already decided not to reuse any completed job
254         next
255       elsif chosen
256         if chosen.output != j.output
257           # If two matching jobs produced different outputs, run a new
258           # job (or use one that's already running/queued) instead of
259           # choosing one arbitrarily.
260           chosen = false
261         end
262         # ...and that's the only thing we need to do once we've chosen
263         # a job to reuse.
264       elsif !Collection.readable_by(current_user).find_by_portable_data_hash(j.output)
265         # As soon as the output we will end up returning (if any) is
266         # decided, check whether it will be visible to the user; if
267         # not, any further investigation of reusable jobs is futile.
268         chosen = false
269       else
270         chosen = j
271       end
272     end
273     chosen || incomplete_job
274   end
275
276   def self.default_git_filters(attr_name, repo_name, refspec)
277     # Add a filter to @filters for `attr_name` = the latest commit available
278     # in `repo_name` at `refspec`.  No filter is added if refspec can't be
279     # resolved.
280     commits = Commit.find_commit_range(repo_name, nil, refspec, nil)
281     if commit_hash = commits.first
282       [[attr_name, "=", commit_hash]]
283     else
284       []
285     end
286   end
287
288   protected
289
290   def self.sorted_hash_digest h
291     Digest::MD5.hexdigest(Oj.dump(deep_sort_hash(h)))
292   end
293
294   def foreign_key_attributes
295     super + %w(output log)
296   end
297
298   def skip_uuid_read_permission_check
299     super + %w(cancelled_by_client_uuid)
300   end
301
302   def skip_uuid_existence_check
303     super + %w(output log)
304   end
305
306   def set_priority
307     if self.priority.nil?
308       self.priority = 0
309     end
310     true
311   end
312
313   def ensure_script_version_is_commit
314     if state == Running
315       # Apparently client has already decided to go for it. This is
316       # needed to run a local job using a local working directory
317       # instead of a commit-ish.
318       return true
319     end
320     if new_record? or repository_changed? or script_version_changed?
321       sha1 = Commit.find_commit_range(repository,
322                                       nil, script_version, nil).first
323       if not sha1
324         errors.add :script_version, "#{script_version} does not resolve to a commit"
325         return false
326       end
327       if supplied_script_version.nil? or supplied_script_version.empty?
328         self.supplied_script_version = script_version
329       end
330       self.script_version = sha1
331     end
332     true
333   end
334
335   def tag_version_in_internal_repository
336     if state == Running
337       # No point now. See ensure_script_version_is_commit.
338       true
339     elsif errors.any?
340       # Won't be saved, and script_version might not even be valid.
341       true
342     elsif new_record? or repository_changed? or script_version_changed?
343       uuid_was = uuid
344       begin
345         assign_uuid
346         Commit.tag_in_internal_repository repository, script_version, uuid
347       rescue
348         self.uuid = uuid_was
349         raise
350       end
351     end
352   end
353
354   def ensure_unique_submit_id
355     if !submit_id.nil?
356       if Job.where('submit_id=?',self.submit_id).first
357         raise SubmitIdReused.new
358       end
359     end
360     true
361   end
362
363   def resolve_runtime_constraint(key, attr_sym)
364     if ((runtime_constraints.is_a? Hash) and
365         (search = runtime_constraints[key]))
366       ok, result = yield search
367     else
368       ok, result = true, nil
369     end
370     if ok
371       send("#{attr_sym}=".to_sym, result)
372     else
373       errors.add(attr_sym, result)
374     end
375     ok
376   end
377
378   def find_arvados_sdk_version
379     resolve_runtime_constraint("arvados_sdk_version",
380                                :arvados_sdk_version) do |git_search|
381       commits = Commit.find_commit_range("arvados",
382                                          nil, git_search, nil)
383       if commits.empty?
384         [false, "#{git_search} does not resolve to a commit"]
385       elsif not runtime_constraints["docker_image"]
386         [false, "cannot be specified without a Docker image constraint"]
387       else
388         [true, commits.first]
389       end
390     end
391   end
392
393   def find_docker_image_locator
394     if runtime_constraints.is_a? Hash
395       runtime_constraints['docker_image'] ||=
396         Rails.configuration.default_docker_image_for_jobs
397     end
398
399     resolve_runtime_constraint("docker_image",
400                                :docker_image_locator) do |image_search|
401       image_tag = runtime_constraints['docker_image_tag']
402       if coll = Collection.for_latest_docker_image(image_search, image_tag)
403         [true, coll.portable_data_hash]
404       else
405         [false, "not found for #{image_search}"]
406       end
407     end
408     Rails.logger.info("docker_image_locator is #{docker_image_locator}")
409     if docker_image_locator && docker_image_locator_changed?
410       self.docker_image_locator =
411         Collection.docker_migration_pdh([current_user], docker_image_locator)
412     end
413     Rails.logger.info("docker_image_locator is #{docker_image_locator}")
414   end
415
416   def permission_to_update
417     if is_locked_by_uuid_was and !(current_user and
418                                    (current_user.uuid == is_locked_by_uuid_was or
419                                     current_user.uuid == system_user.uuid))
420       if script_changed? or
421           script_parameters_changed? or
422           script_version_changed? or
423           (!cancelled_at_was.nil? and
424            (cancelled_by_client_uuid_changed? or
425             cancelled_by_user_uuid_changed? or
426             cancelled_at_changed?)) or
427           started_at_changed? or
428           finished_at_changed? or
429           running_changed? or
430           success_changed? or
431           output_changed? or
432           log_changed? or
433           tasks_summary_changed? or
434           (state_changed? && state != Cancelled) or
435           components_changed?
436         logger.warn "User #{current_user.uuid if current_user} tried to change protected job attributes on locked #{self.class.to_s} #{uuid_was}"
437         return false
438       end
439     end
440     if !is_locked_by_uuid_changed?
441       super
442     else
443       if !current_user
444         logger.warn "Anonymous user tried to change lock on #{self.class.to_s} #{uuid_was}"
445         false
446       elsif is_locked_by_uuid_was and is_locked_by_uuid_was != current_user.uuid
447         logger.warn "User #{current_user.uuid} tried to steal lock on #{self.class.to_s} #{uuid_was} from #{is_locked_by_uuid_was}"
448         false
449       elsif !is_locked_by_uuid.nil? and is_locked_by_uuid != current_user.uuid
450         logger.warn "User #{current_user.uuid} tried to lock #{self.class.to_s} #{uuid_was} with uuid #{is_locked_by_uuid}"
451         false
452       else
453         super
454       end
455     end
456   end
457
458   def update_modified_by_fields
459     if self.cancelled_at_changed?
460       # Ensure cancelled_at cannot be set to arbitrary non-now times,
461       # or changed once it is set.
462       if self.cancelled_at and not self.cancelled_at_was
463         self.cancelled_at = db_current_time
464         self.cancelled_by_user_uuid = current_user.uuid
465         self.cancelled_by_client_uuid = current_api_client.andand.uuid
466         @need_crunch_dispatch_trigger = true
467       else
468         self.cancelled_at = self.cancelled_at_was
469         self.cancelled_by_user_uuid = self.cancelled_by_user_uuid_was
470         self.cancelled_by_client_uuid = self.cancelled_by_client_uuid_was
471       end
472     end
473     super
474   end
475
476   def trigger_crunch_dispatch_if_cancelled
477     if @need_crunch_dispatch_trigger
478       File.open(Rails.configuration.crunch_refresh_trigger, 'wb') do
479         # That's all, just create/touch a file for crunch-job to see.
480       end
481     end
482   end
483
484   def update_timestamps_when_state_changes
485     return if not (state_changed? or new_record?)
486
487     case state
488     when Running
489       self.started_at ||= db_current_time
490     when Failed, Complete
491       self.finished_at ||= db_current_time
492     when Cancelled
493       self.cancelled_at ||= db_current_time
494     end
495
496     # TODO: Remove the following case block when old "success" and
497     # "running" attrs go away. Until then, this ensures we still
498     # expose correct success/running flags to older clients, even if
499     # some new clients are writing only the new state attribute.
500     case state
501     when Queued
502       self.running = false
503       self.success = nil
504     when Running
505       self.running = true
506       self.success = nil
507     when Cancelled, Failed
508       self.running = false
509       self.success = false
510     when Complete
511       self.running = false
512       self.success = true
513     end
514     self.running ||= false # Default to false instead of nil.
515
516     @need_crunch_dispatch_trigger = true
517
518     true
519   end
520
521   def update_state_from_old_state_attrs
522     # If a client has touched the legacy state attrs, update the
523     # "state" attr to agree with the updated values of the legacy
524     # attrs.
525     #
526     # TODO: Remove this method when old "success" and "running" attrs
527     # go away.
528     if cancelled_at_changed? or
529         success_changed? or
530         running_changed? or
531         state.nil?
532       if cancelled_at
533         self.state = Cancelled
534       elsif success == false
535         self.state = Failed
536       elsif success == true
537         self.state = Complete
538       elsif running == true
539         self.state = Running
540       else
541         self.state = Queued
542       end
543     end
544     true
545   end
546
547   def validate_status
548     if self.state.in?(States)
549       true
550     else
551       errors.add :state, "#{state.inspect} must be one of: #{States.inspect}"
552       false
553     end
554   end
555
556   def validate_state_change
557     ok = true
558     if self.state_changed?
559       ok = case self.state_was
560            when nil
561              # state isn't set yet
562              true
563            when Queued
564              # Permit going from queued to any state
565              true
566            when Running
567              # From running, may only transition to a finished state
568              [Complete, Failed, Cancelled].include? self.state
569            when Complete, Failed, Cancelled
570              # Once in a finished state, don't permit any more state changes
571              false
572            else
573              # Any other state transition is also invalid
574              false
575            end
576       if not ok
577         errors.add :state, "invalid change from #{self.state_was} to #{self.state}"
578       end
579     end
580     ok
581   end
582
583   def ensure_no_collection_uuids_in_script_params
584     # Fail validation if any script_parameters field includes a string containing a
585     # collection uuid pattern.
586     if self.script_parameters_changed?
587       if recursive_hash_search(self.script_parameters, Collection.uuid_regex)
588         self.errors.add :script_parameters, "must use portable_data_hash instead of collection uuid"
589         return false
590       end
591     end
592     true
593   end
594
595   # recursive_hash_search searches recursively through hashes and
596   # arrays in 'thing' for string fields matching regular expression
597   # 'pattern'.  Returns true if pattern is found, false otherwise.
598   def recursive_hash_search thing, pattern
599     if thing.is_a? Hash
600       thing.each do |k, v|
601         return true if recursive_hash_search v, pattern
602       end
603     elsif thing.is_a? Array
604       thing.each do |k|
605         return true if recursive_hash_search k, pattern
606       end
607     elsif thing.is_a? String
608       return true if thing.match pattern
609     end
610     false
611   end
612 end