Merge branch '11138-docker-load-fail'
[arvados.git] / services / api / app / models / job.rb
1 require 'safe_json'
2
3 class Job < ArvadosModel
4   include HasUuid
5   include KindAndEtag
6   include CommonApiTemplate
7   extend CurrentApiClient
8   serialize :components, Hash
9   attr_protected :arvados_sdk_version, :docker_image_locator
10   serialize :script_parameters, Hash
11   serialize :runtime_constraints, Hash
12   serialize :tasks_summary, Hash
13   before_create :ensure_unique_submit_id
14   after_commit :trigger_crunch_dispatch_if_cancelled, :on => :update
15   before_validation :set_priority
16   before_validation :update_state_from_old_state_attrs
17   before_validation :update_script_parameters_digest
18   validate :ensure_script_version_is_commit
19   validate :find_docker_image_locator
20   validate :find_arvados_sdk_version
21   validate :validate_status
22   validate :validate_state_change
23   validate :ensure_no_collection_uuids_in_script_params
24   before_save :tag_version_in_internal_repository
25   before_save :update_timestamps_when_state_changes
26
27   has_many :commit_ancestors, :foreign_key => :descendant, :primary_key => :script_version
28   has_many(:nodes, foreign_key: :job_uuid, primary_key: :uuid)
29
30   class SubmitIdReused < StandardError
31   end
32
33   api_accessible :user, extend: :common do |t|
34     t.add :submit_id
35     t.add :priority
36     t.add :script
37     t.add :script_parameters
38     t.add :script_version
39     t.add :cancelled_at
40     t.add :cancelled_by_client_uuid
41     t.add :cancelled_by_user_uuid
42     t.add :started_at
43     t.add :finished_at
44     t.add :output
45     t.add :success
46     t.add :running
47     t.add :state
48     t.add :is_locked_by_uuid
49     t.add :log
50     t.add :runtime_constraints
51     t.add :tasks_summary
52     t.add :nondeterministic
53     t.add :repository
54     t.add :supplied_script_version
55     t.add :arvados_sdk_version
56     t.add :docker_image_locator
57     t.add :queue_position
58     t.add :node_uuids
59     t.add :description
60     t.add :components
61   end
62
63   # Supported states for a job
64   States = [
65             (Queued = 'Queued'),
66             (Running = 'Running'),
67             (Cancelled = 'Cancelled'),
68             (Failed = 'Failed'),
69             (Complete = 'Complete'),
70            ]
71
72   after_initialize do
73     @need_crunch_dispatch_trigger = false
74   end
75
76   def assert_finished
77     update_attributes(finished_at: finished_at || db_current_time,
78                       success: success.nil? ? false : success,
79                       running: false)
80   end
81
82   def node_uuids
83     nodes.map(&:uuid)
84   end
85
86   def self.queue
87     self.where('state = ?', Queued).order('priority desc, created_at')
88   end
89
90   def queue_position
91     # We used to report this accurately, but the implementation made queue
92     # API requests O(n**2) for the size of the queue.  See #8800.
93     # We've soft-disabled it because it's not clear we even want this
94     # functionality: now that we have Node Manager with support for multiple
95     # node sizes, "queue position" tells you very little about when a job will
96     # run.
97     state == Queued ? 0 : nil
98   end
99
100   def self.running
101     self.where('running = ?', true).
102       order('priority desc, created_at')
103   end
104
105   def lock locked_by_uuid
106     with_lock do
107       unless self.state == Queued and self.is_locked_by_uuid.nil?
108         raise AlreadyLockedError
109       end
110       self.state = Running
111       self.is_locked_by_uuid = locked_by_uuid
112       self.save!
113     end
114   end
115
116   def update_script_parameters_digest
117     self.script_parameters_digest = self.class.sorted_hash_digest(script_parameters)
118   end
119
120   def self.searchable_columns operator
121     super - ["script_parameters_digest"]
122   end
123
124   def self.full_text_searchable_columns
125     super - ["script_parameters_digest"]
126   end
127
128   def self.load_job_specific_filters attrs, orig_filters, read_users
129     # Convert Job-specific @filters entries into general SQL filters.
130     script_info = {"repository" => nil, "script" => nil}
131     git_filters = Hash.new do |hash, key|
132       hash[key] = {"max_version" => "HEAD", "exclude_versions" => []}
133     end
134     filters = []
135     orig_filters.each do |attr, operator, operand|
136       if (script_info.has_key? attr) and (operator == "=")
137         if script_info[attr].nil?
138           script_info[attr] = operand
139         elsif script_info[attr] != operand
140           raise ArgumentError.new("incompatible #{attr} filters")
141         end
142       end
143       case operator
144       when "in git"
145         git_filters[attr]["min_version"] = operand
146       when "not in git"
147         git_filters[attr]["exclude_versions"] += Array.wrap(operand)
148       when "in docker", "not in docker"
149         image_hashes = Array.wrap(operand).flat_map do |search_term|
150           image_search, image_tag = search_term.split(':', 2)
151           Collection.
152             find_all_for_docker_image(image_search, image_tag, read_users).
153             map(&:portable_data_hash)
154         end
155         filters << [attr, operator.sub(/ docker$/, ""), image_hashes]
156       else
157         filters << [attr, operator, operand]
158       end
159     end
160
161     # Build a real script_version filter from any "not? in git" filters.
162     git_filters.each_pair do |attr, filter|
163       case attr
164       when "script_version"
165         script_info.each_pair do |key, value|
166           if value.nil?
167             raise ArgumentError.new("script_version filter needs #{key} filter")
168           end
169         end
170         filter["repository"] = script_info["repository"]
171         if attrs[:script_version]
172           filter["max_version"] = attrs[:script_version]
173         else
174           # Using HEAD, set earlier by the hash default, is fine.
175         end
176       when "arvados_sdk_version"
177         filter["repository"] = "arvados"
178       else
179         raise ArgumentError.new("unknown attribute for git filter: #{attr}")
180       end
181       revisions = Commit.find_commit_range(filter["repository"],
182                                            filter["min_version"],
183                                            filter["max_version"],
184                                            filter["exclude_versions"])
185       if revisions.empty?
186         raise ArgumentError.
187           new("error searching #{filter['repository']} from " +
188               "'#{filter['min_version']}' to '#{filter['max_version']}', " +
189               "excluding #{filter['exclude_versions']}")
190       end
191       filters.append([attr, "in", revisions])
192     end
193
194     filters
195   end
196
197   def self.find_reusable attrs, params, filters, read_users
198     if filters.empty?  # Translate older creation parameters into filters.
199       filters =
200         [["repository", "=", attrs[:repository]],
201          ["script", "=", attrs[:script]],
202          ["script_version", "not in git", params[:exclude_script_versions]],
203         ].reject { |filter| filter.last.nil? or filter.last.empty? }
204       if !params[:minimum_script_version].blank?
205         filters << ["script_version", "in git",
206                      params[:minimum_script_version]]
207       else
208         filters += default_git_filters("script_version", attrs[:repository],
209                                        attrs[:script_version])
210       end
211       if image_search = attrs[:runtime_constraints].andand["docker_image"]
212         if image_tag = attrs[:runtime_constraints]["docker_image_tag"]
213           image_search += ":#{image_tag}"
214         end
215         image_locator = Collection.
216           for_latest_docker_image(image_search).andand.portable_data_hash
217       else
218         image_locator = nil
219       end
220       filters << ["docker_image_locator", "=",
221                   Collection.docker_migration_pdh(read_users, image_locator)]
222       if sdk_version = attrs[:runtime_constraints].andand["arvados_sdk_version"]
223         filters += default_git_filters("arvados_sdk_version", "arvados", sdk_version)
224       end
225       filters = load_job_specific_filters(attrs, filters, read_users)
226     end
227
228     # Check specified filters for some reasonableness.
229     filter_names = filters.map { |f| f.first }.uniq
230     ["repository", "script"].each do |req_filter|
231       if not filter_names.include?(req_filter)
232         return send_error("#{req_filter} filter required")
233       end
234     end
235
236     # Search for a reusable Job, and return it if found.
237     candidates = Job.
238       readable_by(current_user).
239       where('state = ? or (owner_uuid = ? and state in (?))',
240             Job::Complete, current_user.uuid, [Job::Queued, Job::Running]).
241       where('script_parameters_digest = ?', Job.sorted_hash_digest(attrs[:script_parameters])).
242       where('nondeterministic is distinct from ?', true).
243       order('state desc, created_at') # prefer Running jobs over Queued
244     candidates = apply_filters candidates, filters
245     chosen = nil
246     incomplete_job = nil
247     candidates.each do |j|
248       if j.state != Job::Complete
249         # We'll use this if we don't find a job that has completed
250         incomplete_job ||= j
251         next
252       end
253
254       if chosen == false
255         # We have already decided not to reuse any completed job
256         next
257       elsif chosen
258         if chosen.output != j.output
259           # If two matching jobs produced different outputs, run a new
260           # job (or use one that's already running/queued) instead of
261           # choosing one arbitrarily.
262           chosen = false
263         end
264         # ...and that's the only thing we need to do once we've chosen
265         # a job to reuse.
266       elsif !Collection.readable_by(current_user).find_by_portable_data_hash(j.output)
267         # As soon as the output we will end up returning (if any) is
268         # decided, check whether it will be visible to the user; if
269         # not, any further investigation of reusable jobs is futile.
270         chosen = false
271       else
272         chosen = j
273       end
274     end
275     chosen || incomplete_job
276   end
277
278   def self.default_git_filters(attr_name, repo_name, refspec)
279     # Add a filter to @filters for `attr_name` = the latest commit available
280     # in `repo_name` at `refspec`.  No filter is added if refspec can't be
281     # resolved.
282     commits = Commit.find_commit_range(repo_name, nil, refspec, nil)
283     if commit_hash = commits.first
284       [[attr_name, "=", commit_hash]]
285     else
286       []
287     end
288   end
289
290   def cancel(cascade: false, need_transaction: true)
291     if need_transaction
292       ActiveRecord::Base.transaction do
293         cancel(cascade: cascade, need_transaction: false)
294       end
295       return
296     end
297
298     if self.state.in?([Queued, Running])
299       self.state = Cancelled
300       self.save!
301     elsif self.state != Cancelled
302       raise InvalidStateTransitionError
303     end
304
305     return if !cascade
306
307     # cancel all children; they could be jobs or pipeline instances
308     children = self.components.andand.collect{|_, u| u}.compact
309
310     return if children.empty?
311
312     # cancel any child jobs
313     Job.where(uuid: children, state: [Queued, Running]).each do |job|
314       job.cancel(cascade: cascade, need_transaction: false)
315     end
316
317     # cancel any child pipelines
318     PipelineInstance.where(uuid: children, state: [PipelineInstance::RunningOnServer, PipelineInstance::RunningOnClient]).each do |pi|
319       pi.cancel(cascade: cascade, need_transaction: false)
320     end
321   end
322
323   protected
324
325   def self.sorted_hash_digest h
326     Digest::MD5.hexdigest(Oj.dump(deep_sort_hash(h)))
327   end
328
329   def foreign_key_attributes
330     super + %w(output log)
331   end
332
333   def skip_uuid_read_permission_check
334     super + %w(cancelled_by_client_uuid)
335   end
336
337   def skip_uuid_existence_check
338     super + %w(output log)
339   end
340
341   def set_priority
342     if self.priority.nil?
343       self.priority = 0
344     end
345     true
346   end
347
348   def ensure_script_version_is_commit
349     if state == Running
350       # Apparently client has already decided to go for it. This is
351       # needed to run a local job using a local working directory
352       # instead of a commit-ish.
353       return true
354     end
355     if new_record? or repository_changed? or script_version_changed?
356       sha1 = Commit.find_commit_range(repository,
357                                       nil, script_version, nil).first
358       if not sha1
359         errors.add :script_version, "#{script_version} does not resolve to a commit"
360         return false
361       end
362       if supplied_script_version.nil? or supplied_script_version.empty?
363         self.supplied_script_version = script_version
364       end
365       self.script_version = sha1
366     end
367     true
368   end
369
370   def tag_version_in_internal_repository
371     if state == Running
372       # No point now. See ensure_script_version_is_commit.
373       true
374     elsif errors.any?
375       # Won't be saved, and script_version might not even be valid.
376       true
377     elsif new_record? or repository_changed? or script_version_changed?
378       uuid_was = uuid
379       begin
380         assign_uuid
381         Commit.tag_in_internal_repository repository, script_version, uuid
382       rescue
383         self.uuid = uuid_was
384         raise
385       end
386     end
387   end
388
389   def ensure_unique_submit_id
390     if !submit_id.nil?
391       if Job.where('submit_id=?',self.submit_id).first
392         raise SubmitIdReused.new
393       end
394     end
395     true
396   end
397
398   def resolve_runtime_constraint(key, attr_sym)
399     if ((runtime_constraints.is_a? Hash) and
400         (search = runtime_constraints[key]))
401       ok, result = yield search
402     else
403       ok, result = true, nil
404     end
405     if ok
406       send("#{attr_sym}=".to_sym, result)
407     else
408       errors.add(attr_sym, result)
409     end
410     ok
411   end
412
413   def find_arvados_sdk_version
414     resolve_runtime_constraint("arvados_sdk_version",
415                                :arvados_sdk_version) do |git_search|
416       commits = Commit.find_commit_range("arvados",
417                                          nil, git_search, nil)
418       if commits.empty?
419         [false, "#{git_search} does not resolve to a commit"]
420       elsif not runtime_constraints["docker_image"]
421         [false, "cannot be specified without a Docker image constraint"]
422       else
423         [true, commits.first]
424       end
425     end
426   end
427
428   def find_docker_image_locator
429     if runtime_constraints.is_a? Hash
430       runtime_constraints['docker_image'] ||=
431         Rails.configuration.default_docker_image_for_jobs
432     end
433
434     resolve_runtime_constraint("docker_image",
435                                :docker_image_locator) do |image_search|
436       image_tag = runtime_constraints['docker_image_tag']
437       if coll = Collection.for_latest_docker_image(image_search, image_tag)
438         [true, coll.portable_data_hash]
439       else
440         [false, "not found for #{image_search}"]
441       end
442     end
443     Rails.logger.info("docker_image_locator is #{docker_image_locator}")
444     if docker_image_locator && docker_image_locator_changed?
445       self.docker_image_locator =
446         Collection.docker_migration_pdh([current_user], docker_image_locator)
447     end
448     Rails.logger.info("docker_image_locator is #{docker_image_locator}")
449   end
450
451   def permission_to_update
452     if is_locked_by_uuid_was and !(current_user and
453                                    (current_user.uuid == is_locked_by_uuid_was or
454                                     current_user.uuid == system_user.uuid))
455       if script_changed? or
456           script_parameters_changed? or
457           script_version_changed? or
458           (!cancelled_at_was.nil? and
459            (cancelled_by_client_uuid_changed? or
460             cancelled_by_user_uuid_changed? or
461             cancelled_at_changed?)) or
462           started_at_changed? or
463           finished_at_changed? or
464           running_changed? or
465           success_changed? or
466           output_changed? or
467           log_changed? or
468           tasks_summary_changed? or
469           (state_changed? && state != Cancelled) or
470           components_changed?
471         logger.warn "User #{current_user.uuid if current_user} tried to change protected job attributes on locked #{self.class.to_s} #{uuid_was}"
472         return false
473       end
474     end
475     if !is_locked_by_uuid_changed?
476       super
477     else
478       if !current_user
479         logger.warn "Anonymous user tried to change lock on #{self.class.to_s} #{uuid_was}"
480         false
481       elsif is_locked_by_uuid_was and is_locked_by_uuid_was != current_user.uuid
482         logger.warn "User #{current_user.uuid} tried to steal lock on #{self.class.to_s} #{uuid_was} from #{is_locked_by_uuid_was}"
483         false
484       elsif !is_locked_by_uuid.nil? and is_locked_by_uuid != current_user.uuid
485         logger.warn "User #{current_user.uuid} tried to lock #{self.class.to_s} #{uuid_was} with uuid #{is_locked_by_uuid}"
486         false
487       else
488         super
489       end
490     end
491   end
492
493   def update_modified_by_fields
494     if self.cancelled_at_changed?
495       # Ensure cancelled_at cannot be set to arbitrary non-now times,
496       # or changed once it is set.
497       if self.cancelled_at and not self.cancelled_at_was
498         self.cancelled_at = db_current_time
499         self.cancelled_by_user_uuid = current_user.uuid
500         self.cancelled_by_client_uuid = current_api_client.andand.uuid
501         @need_crunch_dispatch_trigger = true
502       else
503         self.cancelled_at = self.cancelled_at_was
504         self.cancelled_by_user_uuid = self.cancelled_by_user_uuid_was
505         self.cancelled_by_client_uuid = self.cancelled_by_client_uuid_was
506       end
507     end
508     super
509   end
510
511   def trigger_crunch_dispatch_if_cancelled
512     if @need_crunch_dispatch_trigger
513       File.open(Rails.configuration.crunch_refresh_trigger, 'wb') do
514         # That's all, just create/touch a file for crunch-job to see.
515       end
516     end
517   end
518
519   def update_timestamps_when_state_changes
520     return if not (state_changed? or new_record?)
521
522     case state
523     when Running
524       self.started_at ||= db_current_time
525     when Failed, Complete
526       self.finished_at ||= db_current_time
527     when Cancelled
528       self.cancelled_at ||= db_current_time
529     end
530
531     # TODO: Remove the following case block when old "success" and
532     # "running" attrs go away. Until then, this ensures we still
533     # expose correct success/running flags to older clients, even if
534     # some new clients are writing only the new state attribute.
535     case state
536     when Queued
537       self.running = false
538       self.success = nil
539     when Running
540       self.running = true
541       self.success = nil
542     when Cancelled, Failed
543       self.running = false
544       self.success = false
545     when Complete
546       self.running = false
547       self.success = true
548     end
549     self.running ||= false # Default to false instead of nil.
550
551     @need_crunch_dispatch_trigger = true
552
553     true
554   end
555
556   def update_state_from_old_state_attrs
557     # If a client has touched the legacy state attrs, update the
558     # "state" attr to agree with the updated values of the legacy
559     # attrs.
560     #
561     # TODO: Remove this method when old "success" and "running" attrs
562     # go away.
563     if cancelled_at_changed? or
564         success_changed? or
565         running_changed? or
566         state.nil?
567       if cancelled_at
568         self.state = Cancelled
569       elsif success == false
570         self.state = Failed
571       elsif success == true
572         self.state = Complete
573       elsif running == true
574         self.state = Running
575       else
576         self.state = Queued
577       end
578     end
579     true
580   end
581
582   def validate_status
583     if self.state.in?(States)
584       true
585     else
586       errors.add :state, "#{state.inspect} must be one of: #{States.inspect}"
587       false
588     end
589   end
590
591   def validate_state_change
592     ok = true
593     if self.state_changed?
594       ok = case self.state_was
595            when nil
596              # state isn't set yet
597              true
598            when Queued
599              # Permit going from queued to any state
600              true
601            when Running
602              # From running, may only transition to a finished state
603              [Complete, Failed, Cancelled].include? self.state
604            when Complete, Failed, Cancelled
605              # Once in a finished state, don't permit any more state changes
606              false
607            else
608              # Any other state transition is also invalid
609              false
610            end
611       if not ok
612         errors.add :state, "invalid change from #{self.state_was} to #{self.state}"
613       end
614     end
615     ok
616   end
617
618   def ensure_no_collection_uuids_in_script_params
619     # Fail validation if any script_parameters field includes a string containing a
620     # collection uuid pattern.
621     if self.script_parameters_changed?
622       if recursive_hash_search(self.script_parameters, Collection.uuid_regex)
623         self.errors.add :script_parameters, "must use portable_data_hash instead of collection uuid"
624         return false
625       end
626     end
627     true
628   end
629
630   # recursive_hash_search searches recursively through hashes and
631   # arrays in 'thing' for string fields matching regular expression
632   # 'pattern'.  Returns true if pattern is found, false otherwise.
633   def recursive_hash_search thing, pattern
634     if thing.is_a? Hash
635       thing.each do |k, v|
636         return true if recursive_hash_search v, pattern
637       end
638     elsif thing.is_a? Array
639       thing.each do |k|
640         return true if recursive_hash_search k, pattern
641       end
642     elsif thing.is_a? String
643       return true if thing.match pattern
644     end
645     false
646   end
647 end