9875: Fix script_parameters_digest to sort hashes hiding inside arrays, too.
[arvados.git] / services / api / app / models / job.rb
1 class Job < ArvadosModel
2   include HasUuid
3   include KindAndEtag
4   include CommonApiTemplate
5   serialize :components, Hash
6   attr_protected :arvados_sdk_version, :docker_image_locator
7   serialize :script_parameters, Hash
8   serialize :runtime_constraints, Hash
9   serialize :tasks_summary, Hash
10   before_create :ensure_unique_submit_id
11   after_commit :trigger_crunch_dispatch_if_cancelled, :on => :update
12   before_validation :set_priority
13   before_validation :update_state_from_old_state_attrs
14   before_validation :update_script_parameters_digest
15   validate :ensure_script_version_is_commit
16   validate :find_docker_image_locator
17   validate :find_arvados_sdk_version
18   validate :validate_status
19   validate :validate_state_change
20   validate :ensure_no_collection_uuids_in_script_params
21   before_save :tag_version_in_internal_repository
22   before_save :update_timestamps_when_state_changes
23
24   has_many :commit_ancestors, :foreign_key => :descendant, :primary_key => :script_version
25   has_many(:nodes, foreign_key: :job_uuid, primary_key: :uuid)
26
27   class SubmitIdReused < StandardError
28   end
29
30   api_accessible :user, extend: :common do |t|
31     t.add :submit_id
32     t.add :priority
33     t.add :script
34     t.add :script_parameters
35     t.add :script_version
36     t.add :cancelled_at
37     t.add :cancelled_by_client_uuid
38     t.add :cancelled_by_user_uuid
39     t.add :started_at
40     t.add :finished_at
41     t.add :output
42     t.add :success
43     t.add :running
44     t.add :state
45     t.add :is_locked_by_uuid
46     t.add :log
47     t.add :runtime_constraints
48     t.add :tasks_summary
49     t.add :nondeterministic
50     t.add :repository
51     t.add :supplied_script_version
52     t.add :arvados_sdk_version
53     t.add :docker_image_locator
54     t.add :queue_position
55     t.add :node_uuids
56     t.add :description
57     t.add :components
58   end
59
60   # Supported states for a job
61   States = [
62             (Queued = 'Queued'),
63             (Running = 'Running'),
64             (Cancelled = 'Cancelled'),
65             (Failed = 'Failed'),
66             (Complete = 'Complete'),
67            ]
68
69   def assert_finished
70     update_attributes(finished_at: finished_at || db_current_time,
71                       success: success.nil? ? false : success,
72                       running: false)
73   end
74
75   def node_uuids
76     nodes.map(&:uuid)
77   end
78
79   def self.queue
80     self.where('state = ?', Queued).order('priority desc, created_at')
81   end
82
83   def queue_position
84     # We used to report this accurately, but the implementation made queue
85     # API requests O(n**2) for the size of the queue.  See #8800.
86     # We've soft-disabled it because it's not clear we even want this
87     # functionality: now that we have Node Manager with support for multiple
88     # node sizes, "queue position" tells you very little about when a job will
89     # run.
90     state == Queued ? 0 : nil
91   end
92
93   def self.running
94     self.where('running = ?', true).
95       order('priority desc, created_at')
96   end
97
98   def lock locked_by_uuid
99     with_lock do
100       unless self.state == Queued and self.is_locked_by_uuid.nil?
101         raise AlreadyLockedError
102       end
103       self.state = Running
104       self.is_locked_by_uuid = locked_by_uuid
105       self.save!
106     end
107   end
108
109   def update_script_parameters_digest
110     self.script_parameters_digest = self.class.sorted_hash_digest(script_parameters)
111   end
112
113   def self.searchable_columns operator
114     super - ["script_parameters_digest"]
115   end
116
117   def self.load_job_specific_filters attrs, orig_filters, read_users
118     # Convert Job-specific @filters entries into general SQL filters.
119     script_info = {"repository" => nil, "script" => nil}
120     git_filters = Hash.new do |hash, key|
121       hash[key] = {"max_version" => "HEAD", "exclude_versions" => []}
122     end
123     filters = []
124     orig_filters.each do |attr, operator, operand|
125       if (script_info.has_key? attr) and (operator == "=")
126         if script_info[attr].nil?
127           script_info[attr] = operand
128         elsif script_info[attr] != operand
129           raise ArgumentError.new("incompatible #{attr} filters")
130         end
131       end
132       case operator
133       when "in git"
134         git_filters[attr]["min_version"] = operand
135       when "not in git"
136         git_filters[attr]["exclude_versions"] += Array.wrap(operand)
137       when "in docker", "not in docker"
138         image_hashes = Array.wrap(operand).flat_map do |search_term|
139           image_search, image_tag = search_term.split(':', 2)
140           Collection.
141             find_all_for_docker_image(image_search, image_tag, read_users).
142             map(&:portable_data_hash)
143         end
144         filters << [attr, operator.sub(/ docker$/, ""), image_hashes]
145       else
146         filters << [attr, operator, operand]
147       end
148     end
149
150     # Build a real script_version filter from any "not? in git" filters.
151     git_filters.each_pair do |attr, filter|
152       case attr
153       when "script_version"
154         script_info.each_pair do |key, value|
155           if value.nil?
156             raise ArgumentError.new("script_version filter needs #{key} filter")
157           end
158         end
159         filter["repository"] = script_info["repository"]
160         if attrs[:script_version]
161           filter["max_version"] = attrs[:script_version]
162         else
163           # Using HEAD, set earlier by the hash default, is fine.
164         end
165       when "arvados_sdk_version"
166         filter["repository"] = "arvados"
167       else
168         raise ArgumentError.new("unknown attribute for git filter: #{attr}")
169       end
170       revisions = Commit.find_commit_range(filter["repository"],
171                                            filter["min_version"],
172                                            filter["max_version"],
173                                            filter["exclude_versions"])
174       if revisions.empty?
175         raise ArgumentError.
176           new("error searching #{filter['repository']} from " +
177               "'#{filter['min_version']}' to '#{filter['max_version']}', " +
178               "excluding #{filter['exclude_versions']}")
179       end
180       filters.append([attr, "in", revisions])
181     end
182
183     filters
184   end
185
186   def self.find_reusable attrs, params, filters, read_users
187     if filters.empty?  # Translate older creation parameters into filters.
188       filters =
189         [["repository", "=", attrs[:repository]],
190          ["script", "=", attrs[:script]],
191          ["script_version", "not in git", params[:exclude_script_versions]],
192         ].reject { |filter| filter.last.nil? or filter.last.empty? }
193       if !params[:minimum_script_version].blank?
194         filters << ["script_version", "in git",
195                      params[:minimum_script_version]]
196       else
197         filters += default_git_filters("script_version", attrs[:repository],
198                                        attrs[:script_version])
199       end
200       if image_search = attrs[:runtime_constraints].andand["docker_image"]
201         if image_tag = attrs[:runtime_constraints]["docker_image_tag"]
202           image_search += ":#{image_tag}"
203         end
204         image_locator = Collection.
205           for_latest_docker_image(image_search).andand.portable_data_hash
206       else
207         image_locator = nil
208       end
209       filters << ["docker_image_locator", "=", image_locator]
210       if sdk_version = attrs[:runtime_constraints].andand["arvados_sdk_version"]
211         filters += default_git_filters("arvados_sdk_version", "arvados", sdk_version)
212       end
213       filters = load_job_specific_filters(attrs, filters, read_users)
214     end
215
216     # Check specified filters for some reasonableness.
217     filter_names = filters.map { |f| f.first }.uniq
218     ["repository", "script"].each do |req_filter|
219       if not filter_names.include?(req_filter)
220         return send_error("#{req_filter} filter required")
221       end
222     end
223
224     # Search for a reusable Job, and return it if found.
225     candidates = Job.
226       readable_by(current_user).
227       where('state = ? or (owner_uuid = ? and state in (?))',
228             Job::Complete, current_user.uuid, [Job::Queued, Job::Running]).
229       where('script_parameters_digest = ?', Job.sorted_hash_digest(attrs[:script_parameters])).
230       where('nondeterministic is distinct from ?', true).
231       order('state desc, created_at') # prefer Running jobs over Queued
232     candidates = apply_filters candidates, filters
233     chosen = nil
234     incomplete_job = nil
235     candidates.each do |j|
236       if j.state != Job::Complete
237         # We'll use this if we don't find a job that has completed
238         incomplete_job ||= j
239         next
240       end
241
242       if chosen == false
243         # We have already decided not to reuse any completed job
244         next
245       elsif chosen
246         if chosen.output != j.output
247           # If two matching jobs produced different outputs, run a new
248           # job (or use one that's already running/queued) instead of
249           # choosing one arbitrarily.
250           chosen = false
251         end
252         # ...and that's the only thing we need to do once we've chosen
253         # a job to reuse.
254       elsif !Collection.readable_by(current_user).find_by_portable_data_hash(j.output)
255         # As soon as the output we will end up returning (if any) is
256         # decided, check whether it will be visible to the user; if
257         # not, any further investigation of reusable jobs is futile.
258         chosen = false
259       else
260         chosen = j
261       end
262     end
263     chosen || incomplete_job
264   end
265
266   def self.default_git_filters(attr_name, repo_name, refspec)
267     # Add a filter to @filters for `attr_name` = the latest commit available
268     # in `repo_name` at `refspec`.  No filter is added if refspec can't be
269     # resolved.
270     commits = Commit.find_commit_range(repo_name, nil, refspec, nil)
271     if commit_hash = commits.first
272       [[attr_name, "=", commit_hash]]
273     else
274       []
275     end
276   end
277
278   protected
279
280   def self.sorted_hash_digest h
281     Digest::MD5.hexdigest(Oj.dump(deep_sort_hash(h)))
282   end
283
284   def self.deep_sort_hash x
285     if x.is_a? Hash
286       x.sort.collect do |k, v|
287         [k, deep_sort_hash(v)]
288       end.to_h
289     elsif x.is_a? Array
290       x.collect { |v| deep_sort_hash(v) }
291     else
292       x
293     end
294   end
295
296   def foreign_key_attributes
297     super + %w(output log)
298   end
299
300   def skip_uuid_read_permission_check
301     super + %w(cancelled_by_client_uuid)
302   end
303
304   def skip_uuid_existence_check
305     super + %w(output log)
306   end
307
308   def set_priority
309     if self.priority.nil?
310       self.priority = 0
311     end
312     true
313   end
314
315   def ensure_script_version_is_commit
316     if state == Running
317       # Apparently client has already decided to go for it. This is
318       # needed to run a local job using a local working directory
319       # instead of a commit-ish.
320       return true
321     end
322     if new_record? or repository_changed? or script_version_changed?
323       sha1 = Commit.find_commit_range(repository,
324                                       nil, script_version, nil).first
325       if not sha1
326         errors.add :script_version, "#{script_version} does not resolve to a commit"
327         return false
328       end
329       if supplied_script_version.nil? or supplied_script_version.empty?
330         self.supplied_script_version = script_version
331       end
332       self.script_version = sha1
333     end
334     true
335   end
336
337   def tag_version_in_internal_repository
338     if state == Running
339       # No point now. See ensure_script_version_is_commit.
340       true
341     elsif errors.any?
342       # Won't be saved, and script_version might not even be valid.
343       true
344     elsif new_record? or repository_changed? or script_version_changed?
345       uuid_was = uuid
346       begin
347         assign_uuid
348         Commit.tag_in_internal_repository repository, script_version, uuid
349       rescue
350         uuid = uuid_was
351         raise
352       end
353     end
354   end
355
356   def ensure_unique_submit_id
357     if !submit_id.nil?
358       if Job.where('submit_id=?',self.submit_id).first
359         raise SubmitIdReused.new
360       end
361     end
362     true
363   end
364
365   def resolve_runtime_constraint(key, attr_sym)
366     if ((runtime_constraints.is_a? Hash) and
367         (search = runtime_constraints[key]))
368       ok, result = yield search
369     else
370       ok, result = true, nil
371     end
372     if ok
373       send("#{attr_sym}=".to_sym, result)
374     else
375       errors.add(attr_sym, result)
376     end
377     ok
378   end
379
380   def find_arvados_sdk_version
381     resolve_runtime_constraint("arvados_sdk_version",
382                                :arvados_sdk_version) do |git_search|
383       commits = Commit.find_commit_range("arvados",
384                                          nil, git_search, nil)
385       if commits.empty?
386         [false, "#{git_search} does not resolve to a commit"]
387       elsif not runtime_constraints["docker_image"]
388         [false, "cannot be specified without a Docker image constraint"]
389       else
390         [true, commits.first]
391       end
392     end
393   end
394
395   def find_docker_image_locator
396     runtime_constraints['docker_image'] =
397         Rails.configuration.default_docker_image_for_jobs if ((runtime_constraints.is_a? Hash) and
398                                                               (runtime_constraints['docker_image']).nil? and
399                                                               Rails.configuration.default_docker_image_for_jobs)
400     resolve_runtime_constraint("docker_image",
401                                :docker_image_locator) do |image_search|
402       image_tag = runtime_constraints['docker_image_tag']
403       if coll = Collection.for_latest_docker_image(image_search, image_tag)
404         [true, coll.portable_data_hash]
405       else
406         [false, "not found for #{image_search}"]
407       end
408     end
409   end
410
411   def permission_to_update
412     if is_locked_by_uuid_was and !(current_user and
413                                    (current_user.uuid == is_locked_by_uuid_was or
414                                     current_user.uuid == system_user.uuid))
415       if script_changed? or
416           script_parameters_changed? or
417           script_version_changed? or
418           (!cancelled_at_was.nil? and
419            (cancelled_by_client_uuid_changed? or
420             cancelled_by_user_uuid_changed? or
421             cancelled_at_changed?)) or
422           started_at_changed? or
423           finished_at_changed? or
424           running_changed? or
425           success_changed? or
426           output_changed? or
427           log_changed? or
428           tasks_summary_changed? or
429           state_changed? or
430           components_changed?
431         logger.warn "User #{current_user.uuid if current_user} tried to change protected job attributes on locked #{self.class.to_s} #{uuid_was}"
432         return false
433       end
434     end
435     if !is_locked_by_uuid_changed?
436       super
437     else
438       if !current_user
439         logger.warn "Anonymous user tried to change lock on #{self.class.to_s} #{uuid_was}"
440         false
441       elsif is_locked_by_uuid_was and is_locked_by_uuid_was != current_user.uuid
442         logger.warn "User #{current_user.uuid} tried to steal lock on #{self.class.to_s} #{uuid_was} from #{is_locked_by_uuid_was}"
443         false
444       elsif !is_locked_by_uuid.nil? and is_locked_by_uuid != current_user.uuid
445         logger.warn "User #{current_user.uuid} tried to lock #{self.class.to_s} #{uuid_was} with uuid #{is_locked_by_uuid}"
446         false
447       else
448         super
449       end
450     end
451   end
452
453   def update_modified_by_fields
454     if self.cancelled_at_changed?
455       # Ensure cancelled_at cannot be set to arbitrary non-now times,
456       # or changed once it is set.
457       if self.cancelled_at and not self.cancelled_at_was
458         self.cancelled_at = db_current_time
459         self.cancelled_by_user_uuid = current_user.uuid
460         self.cancelled_by_client_uuid = current_api_client.andand.uuid
461         @need_crunch_dispatch_trigger = true
462       else
463         self.cancelled_at = self.cancelled_at_was
464         self.cancelled_by_user_uuid = self.cancelled_by_user_uuid_was
465         self.cancelled_by_client_uuid = self.cancelled_by_client_uuid_was
466       end
467     end
468     super
469   end
470
471   def trigger_crunch_dispatch_if_cancelled
472     if @need_crunch_dispatch_trigger
473       File.open(Rails.configuration.crunch_refresh_trigger, 'wb') do
474         # That's all, just create/touch a file for crunch-job to see.
475       end
476     end
477   end
478
479   def update_timestamps_when_state_changes
480     return if not (state_changed? or new_record?)
481
482     case state
483     when Running
484       self.started_at ||= db_current_time
485     when Failed, Complete
486       self.finished_at ||= db_current_time
487     when Cancelled
488       self.cancelled_at ||= db_current_time
489     end
490
491     # TODO: Remove the following case block when old "success" and
492     # "running" attrs go away. Until then, this ensures we still
493     # expose correct success/running flags to older clients, even if
494     # some new clients are writing only the new state attribute.
495     case state
496     when Queued
497       self.running = false
498       self.success = nil
499     when Running
500       self.running = true
501       self.success = nil
502     when Cancelled, Failed
503       self.running = false
504       self.success = false
505     when Complete
506       self.running = false
507       self.success = true
508     end
509     self.running ||= false # Default to false instead of nil.
510
511     @need_crunch_dispatch_trigger = true
512
513     true
514   end
515
516   def update_state_from_old_state_attrs
517     # If a client has touched the legacy state attrs, update the
518     # "state" attr to agree with the updated values of the legacy
519     # attrs.
520     #
521     # TODO: Remove this method when old "success" and "running" attrs
522     # go away.
523     if cancelled_at_changed? or
524         success_changed? or
525         running_changed? or
526         state.nil?
527       if cancelled_at
528         self.state = Cancelled
529       elsif success == false
530         self.state = Failed
531       elsif success == true
532         self.state = Complete
533       elsif running == true
534         self.state = Running
535       else
536         self.state = Queued
537       end
538     end
539     true
540   end
541
542   def validate_status
543     if self.state.in?(States)
544       true
545     else
546       errors.add :state, "#{state.inspect} must be one of: #{States.inspect}"
547       false
548     end
549   end
550
551   def validate_state_change
552     ok = true
553     if self.state_changed?
554       ok = case self.state_was
555            when nil
556              # state isn't set yet
557              true
558            when Queued
559              # Permit going from queued to any state
560              true
561            when Running
562              # From running, may only transition to a finished state
563              [Complete, Failed, Cancelled].include? self.state
564            when Complete, Failed, Cancelled
565              # Once in a finished state, don't permit any more state changes
566              false
567            else
568              # Any other state transition is also invalid
569              false
570            end
571       if not ok
572         errors.add :state, "invalid change from #{self.state_was} to #{self.state}"
573       end
574     end
575     ok
576   end
577
578   def ensure_no_collection_uuids_in_script_params
579     # recursive_hash_search searches recursively through hashes and
580     # arrays in 'thing' for string fields matching regular expression
581     # 'pattern'.  Returns true if pattern is found, false otherwise.
582     def recursive_hash_search thing, pattern
583       if thing.is_a? Hash
584         thing.each do |k, v|
585           return true if recursive_hash_search v, pattern
586         end
587       elsif thing.is_a? Array
588         thing.each do |k|
589           return true if recursive_hash_search k, pattern
590         end
591       elsif thing.is_a? String
592         return true if thing.match pattern
593       end
594       false
595     end
596
597     # Fail validation if any script_parameters field includes a string containing a
598     # collection uuid pattern.
599     if self.script_parameters_changed?
600       if recursive_hash_search(self.script_parameters, Collection.uuid_regex)
601         self.errors.add :script_parameters, "must use portable_data_hash instead of collection uuid"
602         return false
603       end
604     end
605     true
606   end
607 end