Merge branch '9827-no-docker-caching' closes #9827
authorPeter Amstutz <peter.amstutz@curoverse.com>
Tue, 23 Aug 2016 13:27:28 +0000 (09:27 -0400)
committerPeter Amstutz <peter.amstutz@curoverse.com>
Tue, 23 Aug 2016 13:27:28 +0000 (09:27 -0400)
16 files changed:
apps/workbench/Gemfile
apps/workbench/Gemfile.lock
apps/workbench/config/initializers/lograge.rb [new file with mode: 0644]
sdk/cwl/test_with_arvbox.sh
services/api/Gemfile
services/api/Gemfile.lock
services/api/app/controllers/arvados/v1/jobs_controller.rb
services/api/app/models/job.rb
services/api/config/initializers/load_config.rb
services/api/config/initializers/lograge.rb [new file with mode: 0644]
services/api/db/migrate/20160819195557_add_script_parameters_digest_to_jobs.rb [new file with mode: 0644]
services/api/db/migrate/20160819195725_populate_script_parameters_digest.rb [new file with mode: 0644]
services/api/db/structure.sql
services/api/test/fixtures/jobs.yml
services/api/test/functional/arvados/v1/job_reuse_controller_test.rb
services/api/test/unit/job_test.rb

index e35cc83ece303aec8b84c4e43e9a15f0258ba7d4..db569c96a5dcddaaa47b4aaf49ff2cbe40244ef8 100644 (file)
@@ -93,3 +93,6 @@ gem "deep_merge", :require => 'deep_merge/rails_compat'
 
 gem 'morrisjs-rails'
 gem 'raphael-rails'
+
+gem 'lograge'
+gem 'logstash-event'
index 2618e47cbf606dbf59ab7ef63d2833e60553c020..03bbbce0614eb37411ac993ea1d8af0d2f3bbdeb 100644 (file)
@@ -142,6 +142,11 @@ GEM
     logging (2.1.0)
       little-plugger (~> 1.1)
       multi_json (~> 1.10)
+    lograge (0.3.6)
+      actionpack (>= 3)
+      activesupport (>= 3)
+      railties (>= 3)
+    logstash-event (1.2.02)
     mail (2.6.3)
       mime-types (>= 1.16, < 3)
     memoist (0.14.0)
@@ -284,6 +289,8 @@ DEPENDENCIES
   jquery-rails
   less
   less-rails
+  lograge
+  logstash-event
   minitest (>= 5.0.0)
   mocha
   morrisjs-rails
@@ -309,3 +316,6 @@ DEPENDENCIES
   therubyracer
   uglifier (>= 1.0.3)
   wiselinks
+
+BUNDLED WITH
+   1.12.1
diff --git a/apps/workbench/config/initializers/lograge.rb b/apps/workbench/config/initializers/lograge.rb
new file mode 100644 (file)
index 0000000..24252c8
--- /dev/null
@@ -0,0 +1,14 @@
+ArvadosWorkbench::Application.configure do
+  config.lograge.enabled = true
+  config.lograge.formatter = Lograge::Formatters::Logstash.new
+  config.lograge.custom_options = lambda do |event|
+    exceptions = %w(controller action format id)
+    params = event.payload[:params].except(*exceptions)
+    params_s = Oj.dump(params)
+    if params_s.length > 1000
+      { params_truncated: params_s[0..1000] + "[...]" }
+    else
+      { params: params }
+    end
+  end
+end
index 29109a4e6a61309b7968b7b81b5bb0eeca3499ea..3b16bbcc200819f04386db92b698db5b46f276c8 100755 (executable)
@@ -81,13 +81,13 @@ fi
 
 cat >/tmp/cwltest/arv-cwl-jobs <<EOF2
 #!/bin/sh
-exec arvados-cwl-runner --api=jobs \\\$@
+exec arvados-cwl-runner --api=jobs --compute-checksum \\\$@
 EOF2
 chmod +x /tmp/cwltest/arv-cwl-jobs
 
 cat >/tmp/cwltest/arv-cwl-containers <<EOF2
 #!/bin/sh
-exec arvados-cwl-runner --api=containers \\\$@
+exec arvados-cwl-runner --api=containers --compute-checksum \\\$@
 EOF2
 chmod +x /tmp/cwltest/arv-cwl-containers
 
index 4d03da31b2275ab89e4c041867d01cf56cc851e0..5134fc4ce82d68bf4b00da91a4bedaa10e7f7e1e 100644 (file)
@@ -81,3 +81,5 @@ gem 'pg_power'
 gem 'puma'
 gem 'sshkey'
 gem 'safe_yaml'
+gem 'lograge'
+gem 'logstash-event'
index 77e876e926f0991193960aa646cef805b133d556..391a26c2a859945635772864a8d0ef642c7d47be 100644 (file)
@@ -112,6 +112,11 @@ GEM
     launchy (2.4.3)
       addressable (~> 2.3)
     libv8 (3.16.14.3)
+    lograge (0.3.6)
+      actionpack (>= 3)
+      activesupport (>= 3)
+      railties (>= 3)
+    logstash-event (1.2.02)
     mail (2.5.4)
       mime-types (~> 1.16)
       treetop (~> 1.4.8)
@@ -237,6 +242,8 @@ DEPENDENCIES
   factory_girl_rails
   faye-websocket
   jquery-rails
+  lograge
+  logstash-event
   mocha
   multi_json
   oj
@@ -259,3 +266,6 @@ DEPENDENCIES
   therubyracer
   trollop
   uglifier (>= 1.0.3)
+
+BUNDLED WITH
+   1.12.1
index 67963388639f9fb352ca42b0abff2a05e86d140c..0c68dc26c2ac9c3e32048b700d72d26fb1213bb5 100644 (file)
@@ -28,83 +28,96 @@ class Arvados::V1::JobsController < ApplicationController
       params[:find_or_create] = !resource_attrs.delete(:no_reuse)
     end
 
-    if params[:find_or_create]
-      return if false.equal?(load_filters_param)
-      if @filters.empty?  # Translate older creation parameters into filters.
-        @filters =
-          [["repository", "=", resource_attrs[:repository]],
-           ["script", "=", resource_attrs[:script]],
-           ["script_version", "not in git", params[:exclude_script_versions]],
-          ].reject { |filter| filter.last.nil? or filter.last.empty? }
-        if !params[:minimum_script_version].blank?
-          @filters << ["script_version", "in git",
-                       params[:minimum_script_version]]
-        else
-          add_default_git_filter("script_version", resource_attrs[:repository],
-                                 resource_attrs[:script_version])
-        end
-        if image_search = resource_attrs[:runtime_constraints].andand["docker_image"]
-          if image_tag = resource_attrs[:runtime_constraints]["docker_image_tag"]
-            image_search += ":#{image_tag}"
-          end
-          image_locator = Collection.
-            for_latest_docker_image(image_search).andand.portable_data_hash
-        else
-          image_locator = nil
-        end
-        @filters << ["docker_image_locator", "=", image_locator]
-        if sdk_version = resource_attrs[:runtime_constraints].andand["arvados_sdk_version"]
-          add_default_git_filter("arvados_sdk_version", "arvados", sdk_version)
-        end
-        begin
-          load_job_specific_filters
-        rescue ArgumentError => error
-          return send_error(error.message)
+    return super if !params[:find_or_create]
+    return if !load_filters_param
+
+    if @filters.empty?  # Translate older creation parameters into filters.
+      @filters =
+        [["repository", "=", resource_attrs[:repository]],
+         ["script", "=", resource_attrs[:script]],
+         ["script_version", "not in git", params[:exclude_script_versions]],
+        ].reject { |filter| filter.last.nil? or filter.last.empty? }
+      if !params[:minimum_script_version].blank?
+        @filters << ["script_version", "in git",
+                     params[:minimum_script_version]]
+      else
+        add_default_git_filter("script_version", resource_attrs[:repository],
+                               resource_attrs[:script_version])
+      end
+      if image_search = resource_attrs[:runtime_constraints].andand["docker_image"]
+        if image_tag = resource_attrs[:runtime_constraints]["docker_image_tag"]
+          image_search += ":#{image_tag}"
         end
+        image_locator = Collection.
+          for_latest_docker_image(image_search).andand.portable_data_hash
+      else
+        image_locator = nil
+      end
+      @filters << ["docker_image_locator", "=", image_locator]
+      if sdk_version = resource_attrs[:runtime_constraints].andand["arvados_sdk_version"]
+        add_default_git_filter("arvados_sdk_version", "arvados", sdk_version)
+      end
+      begin
+        load_job_specific_filters
+      rescue ArgumentError => error
+        return send_error(error.message)
+      end
+    end
+
+    # Check specified filters for some reasonableness.
+    filter_names = @filters.map { |f| f.first }.uniq
+    ["repository", "script"].each do |req_filter|
+      if not filter_names.include?(req_filter)
+        return send_error("#{req_filter} filter required")
       end
+    end
 
-      # Check specified filters for some reasonableness.
-      filter_names = @filters.map { |f| f.first }.uniq
-      ["repository", "script"].each do |req_filter|
-        if not filter_names.include?(req_filter)
-          return send_error("#{req_filter} filter required")
-        end
+    # Search for a reusable Job, and return it if found.
+    @objects = Job.
+      readable_by(current_user).
+      where('state = ? or (owner_uuid = ? and state in (?))',
+            Job::Complete, current_user.uuid, [Job::Queued, Job::Running]).
+      where('script_parameters_digest = ?', Job.sorted_hash_digest(resource_attrs[:script_parameters])).
+      where('nondeterministic is distinct from ?', true).
+      order('state desc, created_at') # prefer Running jobs over Queued
+    apply_filters
+    @object = nil
+    incomplete_job = nil
+    @objects.each do |j|
+      if j.state != Job::Complete
+        # We'll use this if we don't find a job that has completed
+        incomplete_job ||= j
+        next
       end
 
-      # Search for a reusable Job, and return it if found.
-      @objects = Job.readable_by(current_user)
-      apply_filters
-      @object = nil
-      incomplete_job = nil
-      @objects.each do |j|
-        if j.nondeterministic != true and
-            ["Queued", "Running", "Complete"].include?(j.state) and
-            j.script_parameters == resource_attrs[:script_parameters]
-          if j.state != "Complete" && j.owner_uuid == current_user.uuid
-            # We'll use this if we don't find a job that has completed
-            incomplete_job ||= j
-          else
-            if Collection.readable_by(current_user).find_by_portable_data_hash(j.output)
-              # Record the first job in the list
-              if !@object
-                @object = j
-              end
-              # Ensure that all candidate jobs actually did produce the same output
-              if @object.output != j.output
-                @object = nil
-                break
-              end
-            end
-          end
-        end
-        @object ||= incomplete_job
-        if @object
-          return show
+      if @object == false
+        # We have already decided not to reuse any completed job
+        next
+      elsif @object
+        if @object.output != j.output
+          # If two matching jobs produced different outputs, run a new
+          # job (or use one that's already running/queued) instead of
+          # choosing one arbitrarily.
+          @object = false
         end
+        # ...and that's the only thing we need to do once we've chosen
+        # an @object to reuse.
+      elsif !Collection.readable_by(current_user).find_by_portable_data_hash(j.output)
+        # As soon as the output we will end up returning (if any) is
+        # decided, check whether it will be visible to the user; if
+        # not, any further investigation of reusable jobs is futile.
+        return super
+      else
+        @object = j
       end
     end
 
-    super
+    @object ||= incomplete_job
+    if @object
+      show
+    else
+      super
+    end
   end
 
   def cancel
@@ -171,7 +184,7 @@ class Arvados::V1::JobsController < ApplicationController
     load_limit_offset_order_params
     load_where_param
     @where.merge!({state: Job::Queued})
-    return if false.equal?(load_filters_param)
+    return if !load_filters_param
     find_objects_for_index
     index
   end
@@ -293,6 +306,8 @@ class Arvados::V1::JobsController < ApplicationController
     rescue ArgumentError => error
       send_error(error.message)
       false
+    else
+      true
     end
   end
 end
index 0ed53535778335d11b2b12d3007058e9ad76adfc..0aaa0bd3f90de2351d01d37b86cb946084eff489 100644 (file)
@@ -11,6 +11,7 @@ class Job < ArvadosModel
   after_commit :trigger_crunch_dispatch_if_cancelled, :on => :update
   before_validation :set_priority
   before_validation :update_state_from_old_state_attrs
+  before_validation :update_script_parameters_digest
   validate :ensure_script_version_is_commit
   validate :find_docker_image_locator
   validate :find_arvados_sdk_version
@@ -105,8 +106,27 @@ class Job < ArvadosModel
     end
   end
 
+  def update_script_parameters_digest
+    self.script_parameters_digest = self.class.sorted_hash_digest(script_parameters)
+  end
+
+  def self.searchable_columns operator
+    super - ["script_parameters_digest"]
+  end
+
   protected
 
+  def self.sorted_hash_digest h
+    Digest::MD5.hexdigest(Oj.dump(deep_sort_hash(h)))
+  end
+
+  def self.deep_sort_hash h
+    return h unless h.is_a? Hash
+    h.sort.collect do |k, v|
+      [k, deep_sort_hash(v)]
+    end.to_h
+  end
+
   def foreign_key_attributes
     super + %w(output log)
   end
index de9770d7b7d1b45d7466b87b93f1e2fee9e9afab..76234d3e4b0f6ab148f73cb7a1242af1eacefb6a 100644 (file)
@@ -28,7 +28,7 @@ $application_config = {}
   path = "#{::Rails.root.to_s}/config/#{cfgfile}.yml"
   if File.exists? path
     yaml = ERB.new(IO.read path).result(binding)
-    confs = YAML.load(yaml)
+    confs = YAML.load(yaml, deserialize_symbols: true)
     # Ignore empty YAML file:
     next if confs == false
     $application_config.merge!(confs['common'] || {})
diff --git a/services/api/config/initializers/lograge.rb b/services/api/config/initializers/lograge.rb
new file mode 100644 (file)
index 0000000..e5bd200
--- /dev/null
@@ -0,0 +1,14 @@
+Server::Application.configure do
+  config.lograge.enabled = true
+  config.lograge.formatter = Lograge::Formatters::Logstash.new
+  config.lograge.custom_options = lambda do |event|
+    exceptions = %w(controller action format id)
+    params = event.payload[:params].except(*exceptions)
+    params_s = Oj.dump(params)
+    if params_s.length > 1000
+      { params_truncated: params_s[0..1000] + "[...]" }
+    else
+      { params: params }
+    end
+  end
+end
diff --git a/services/api/db/migrate/20160819195557_add_script_parameters_digest_to_jobs.rb b/services/api/db/migrate/20160819195557_add_script_parameters_digest_to_jobs.rb
new file mode 100644 (file)
index 0000000..8ed3cfe
--- /dev/null
@@ -0,0 +1,6 @@
+class AddScriptParametersDigestToJobs < ActiveRecord::Migration
+  def change
+    add_column :jobs, :script_parameters_digest, :string
+    add_index :jobs, :script_parameters_digest
+  end
+end
diff --git a/services/api/db/migrate/20160819195725_populate_script_parameters_digest.rb b/services/api/db/migrate/20160819195725_populate_script_parameters_digest.rb
new file mode 100644 (file)
index 0000000..9f6c3ee
--- /dev/null
@@ -0,0 +1,21 @@
+class PopulateScriptParametersDigest < ActiveRecord::Migration
+  def up
+    done = false
+    while !done
+      done = true
+      Job.
+        where('script_parameters_digest is null').
+        select([:id, :script_parameters, :script_parameters_digest]).
+        limit(200).
+        each do |j|
+        done = false
+        Job.
+          where('id=? or script_parameters=?', j.id, j.script_parameters.to_yaml).
+          update_all(script_parameters_digest: j.update_script_parameters_digest)
+      end
+    end
+  end
+
+  def down
+  end
+end
index 1d7a2c67921061312e377d6e30cfc6fa4da576de..bda4e5de872607b4d9c29786b4054889c760d6fb 100644 (file)
@@ -539,7 +539,8 @@ CREATE TABLE jobs (
     description character varying(524288),
     state character varying(255),
     arvados_sdk_version character varying(255),
-    components text
+    components text,
+    script_parameters_digest character varying(255)
 );
 
 
@@ -1852,6 +1853,13 @@ CREATE INDEX index_jobs_on_owner_uuid ON jobs USING btree (owner_uuid);
 CREATE INDEX index_jobs_on_script ON jobs USING btree (script);
 
 
+--
+-- Name: index_jobs_on_script_parameters_digest; Type: INDEX; Schema: public; Owner: -; Tablespace: 
+--
+
+CREATE INDEX index_jobs_on_script_parameters_digest ON jobs USING btree (script_parameters_digest);
+
+
 --
 -- Name: index_jobs_on_started_at; Type: INDEX; Schema: public; Owner: -; Tablespace: 
 --
@@ -2674,4 +2682,8 @@ INSERT INTO schema_migrations (version) VALUES ('20160509143250');
 
 INSERT INTO schema_migrations (version) VALUES ('20160808151459');
 
-INSERT INTO schema_migrations (version) VALUES ('20160808151559');
\ No newline at end of file
+INSERT INTO schema_migrations (version) VALUES ('20160808151559');
+
+INSERT INTO schema_migrations (version) VALUES ('20160819195557');
+
+INSERT INTO schema_migrations (version) VALUES ('20160819195725');
\ No newline at end of file
index 95cb967ffc1a9c71b418e2949b063ed9f419d4c1..584cc23f391e3613bba1a6b6d009cb20e86561eb 100644 (file)
@@ -23,6 +23,7 @@ running:
     done: 1
   runtime_constraints: {}
   state: Running
+  script_parameters_digest: 99914b932bd37a50b983c5e7c90ae93b
 
 running_cancelled:
   uuid: zzzzz-8i9sb-4cf0nhn6xte809j
@@ -49,6 +50,7 @@ running_cancelled:
     done: 1
   runtime_constraints: {}
   state: Cancelled
+  script_parameters_digest: 99914b932bd37a50b983c5e7c90ae93b
 
 uses_nonexistent_script_version:
   uuid: zzzzz-8i9sb-7m339pu0x9mla88
@@ -75,6 +77,7 @@ uses_nonexistent_script_version:
     done: 1
   runtime_constraints: {}
   state: Complete
+  script_parameters_digest: 99914b932bd37a50b983c5e7c90ae93b
 
 foobar:
   uuid: zzzzz-8i9sb-aceg2bnq7jt7kon
@@ -103,6 +106,7 @@ foobar:
     done: 1
   runtime_constraints: {}
   state: Complete
+  script_parameters_digest: 03a43a7d84f7fb022467b876c2950acd
 
 barbaz:
   uuid: zzzzz-8i9sb-cjs4pklxxjykyuq
@@ -131,6 +135,7 @@ barbaz:
     done: 1
   runtime_constraints: {}
   state: Complete
+  script_parameters_digest: c3d19d3ec50ac0914baa56b149640f73
 
 runningbarbaz:
   uuid: zzzzz-8i9sb-cjs4pklxxjykyuj
@@ -159,6 +164,7 @@ runningbarbaz:
     done: 0
   runtime_constraints: {}
   state: Running
+  script_parameters_digest: c3d19d3ec50ac0914baa56b149640f73
 
 previous_job_run:
   uuid: zzzzz-8i9sb-cjs4pklxxjykqqq
@@ -175,6 +181,7 @@ previous_job_run:
   log: d41d8cd98f00b204e9800998ecf8427e+0
   output: ea10d51bcf88862dbcc36eb292017dfd+45
   state: Complete
+  script_parameters_digest: a5f03bbfb8ba88a2efe4a7852671605b
 
 previous_ancient_job_run:
   uuid: zzzzz-8i9sb-ahd7cie8jah9qui
@@ -191,6 +198,7 @@ previous_ancient_job_run:
   log: d41d8cd98f00b204e9800998ecf8427e+0
   output: ea10d51bcf88862dbcc36eb292017dfd+45
   state: Complete
+  script_parameters_digest: 174dd339d44f2b259fadbab7ebdb8df9
 
 previous_docker_job_run:
   uuid: zzzzz-8i9sb-k6emstgk4kw4yhi
@@ -208,6 +216,7 @@ previous_docker_job_run:
   output: ea10d51bcf88862dbcc36eb292017dfd+45
   docker_image_locator: fa3c1a9cb6783f85f2ecda037e07b8c3+167
   state: Complete
+  script_parameters_digest: a5f03bbfb8ba88a2efe4a7852671605b
 
 previous_ancient_docker_image_job_run:
   uuid: zzzzz-8i9sb-t3b460aolxxuldl
@@ -225,6 +234,7 @@ previous_ancient_docker_image_job_run:
   output: ea10d51bcf88862dbcc36eb292017dfd+45
   docker_image_locator: b519d9cb706a29fc7ea24dbea2f05851+93
   state: Complete
+  script_parameters_digest: 174dd339d44f2b259fadbab7ebdb8df9
 
 previous_job_run_with_arvados_sdk_version:
   uuid: zzzzz-8i9sb-eoo0321or2dw2jg
@@ -244,6 +254,7 @@ previous_job_run_with_arvados_sdk_version:
   success: true
   output: ea10d51bcf88862dbcc36eb292017dfd+45
   state: Complete
+  script_parameters_digest: a5f03bbfb8ba88a2efe4a7852671605b
 
 previous_job_run_no_output:
   uuid: zzzzz-8i9sb-cjs4pklxxjykppp
@@ -258,6 +269,7 @@ previous_job_run_no_output:
   success: true
   output: ~
   state: Complete
+  script_parameters_digest: 174dd339d44f2b259fadbab7ebdb8df9
 
 previous_job_run_superseded_by_hash_branch:
   # This supplied_script_version is a branch name with later commits.
@@ -272,6 +284,7 @@ previous_job_run_superseded_by_hash_branch:
   success: true
   output: d41d8cd98f00b204e9800998ecf8427e+0
   state: Complete
+  script_parameters_digest: 99914b932bd37a50b983c5e7c90ae93b
 
 nondeterminisic_job_run:
   uuid: zzzzz-8i9sb-cjs4pklxxjykyyy
@@ -286,6 +299,7 @@ nondeterminisic_job_run:
   success: true
   nondeterministic: true
   state: Complete
+  script_parameters_digest: a5f03bbfb8ba88a2efe4a7852671605b
 
 nearly_finished_job:
   uuid: zzzzz-8i9sb-2gx6rz0pjl033w3
@@ -307,6 +321,7 @@ nearly_finished_job:
     done: 0
   runtime_constraints: {}
   state: Complete
+  script_parameters_digest: 7ea26d58a79b7f5db9f90fb1e33d3006
 
 queued:
   uuid: zzzzz-8i9sb-grx15v5mjnsyxk7
@@ -329,6 +344,7 @@ queued:
   tasks_summary: {}
   runtime_constraints: {}
   state: Queued
+  script_parameters_digest: 99914b932bd37a50b983c5e7c90ae93b
 
 # A job with a log collection that can be parsed by the log viewer.
 job_with_real_log:
@@ -338,6 +354,7 @@ job_with_real_log:
   log: 0b9a7787660e1fce4a93f33e01376ba6+81
   script_version: 7def43a4d3f20789dda4700f703b5514cc3ed250
   state: Complete
+  script_parameters_digest: 99914b932bd37a50b983c5e7c90ae93b
 
 cancelled:
   uuid: zzzzz-8i9sb-4cf0abc123e809j
@@ -362,6 +379,7 @@ cancelled:
     done: 1
   runtime_constraints: {}
   state: Cancelled
+  script_parameters_digest: 99914b932bd37a50b983c5e7c90ae93b
 
 job_in_subproject:
   uuid: zzzzz-8i9sb-subprojectjob01
@@ -372,6 +390,7 @@ job_in_subproject:
   script: hash
   script_version: 4fe459abe02d9b365932b8f5dc419439ab4e2577
   state: Complete
+  script_parameters_digest: 99914b932bd37a50b983c5e7c90ae93b
 
 running_will_be_completed:
   uuid: zzzzz-8i9sb-rshmckwoma9pjh8
@@ -396,6 +415,7 @@ running_will_be_completed:
     done: 1
   runtime_constraints: {}
   state: Running
+  script_parameters_digest: 99914b932bd37a50b983c5e7c90ae93b
 
 graph_stage1:
   uuid: zzzzz-8i9sb-graphstage10000
@@ -405,6 +425,7 @@ graph_stage1:
   script_version: 4fe459abe02d9b365932b8f5dc419439ab4e2577
   state: Complete
   output: fa7aeb5140e2848d39b416daeef4ffc5+45
+  script_parameters_digest: 99914b932bd37a50b983c5e7c90ae93b
 
 graph_stage2:
   uuid: zzzzz-8i9sb-graphstage20000
@@ -417,6 +438,7 @@ graph_stage2:
     input: fa7aeb5140e2848d39b416daeef4ffc5+45
     input2: "stuff"
   output: 65b17c95fdbc9800fc48acda4e9dcd0b+93
+  script_parameters_digest: 4900033ec5cfaf8a63566f3664aeaa70
 
 graph_stage3:
   uuid: zzzzz-8i9sb-graphstage30000
@@ -429,6 +451,7 @@ graph_stage3:
     input: fa7aeb5140e2848d39b416daeef4ffc5+45
     input2: "stuff2"
   output: ea10d51bcf88862dbcc36eb292017dfd+45
+  script_parameters_digest: 02a085407e751d00b5dc88f1bd5e8247
 
 job_with_latest_version:
   uuid: zzzzz-8i9sb-nj8ioxnrvjtyk2b
@@ -458,6 +481,7 @@ job_with_latest_version:
     done: 1
   runtime_constraints: {}
   state: Complete
+  script_parameters_digest: 03a43a7d84f7fb022467b876c2950acd
 
 running_job_in_publicly_accessible_project:
   uuid: zzzzz-8i9sb-n7omg50bvt0m1nf
@@ -470,6 +494,7 @@ running_job_in_publicly_accessible_project:
   script_parameters:
     input: fa7aeb5140e2848d39b416daeef4ffc5+45
     input2: "stuff2"
+  script_parameters_digest: 02a085407e751d00b5dc88f1bd5e8247
 
 completed_job_in_publicly_accessible_project:
   uuid: zzzzz-8i9sb-jyq01m7in1jlofj
@@ -484,6 +509,7 @@ completed_job_in_publicly_accessible_project:
     input2: "stuff2"
   log: zzzzz-4zz18-4en62shvi99lxd4
   output: b519d9cb706a29fc7ea24dbea2f05851+93
+  script_parameters_digest: 02a085407e751d00b5dc88f1bd5e8247
 
 job_in_publicly_accessible_project_but_other_objects_elsewhere:
   uuid: zzzzz-8i9sb-jyq01muyhgr4ofj
@@ -498,6 +524,7 @@ job_in_publicly_accessible_project_but_other_objects_elsewhere:
     input2: "stuff2"
   log: zzzzz-4zz18-fy296fx3hot09f7
   output: zzzzz-4zz18-bv31uwvy3neko21
+  script_parameters_digest: 02a085407e751d00b5dc88f1bd5e8247
 
 running_job_with_components:
   uuid: zzzzz-8i9sb-with2components
@@ -527,3 +554,4 @@ running_job_with_components:
   components:
     component1: zzzzz-8i9sb-jyq01m7in1jlofj
     component2: zzzzz-d1hrv-partdonepipelin
+  script_parameters_digest: 99914b932bd37a50b983c5e7c90ae93b
index 64d559107c19257ca7a954d323f42cba60c7a9c2..ab79d7bf80d2ac47bf6f94ef39d875a043ee7356 100644 (file)
@@ -19,8 +19,8 @@ class Arvados::V1::JobReuseControllerTest < ActionController::TestCase
       script_version: "4fe459abe02d9b365932b8f5dc419439ab4e2577",
       repository: "active/foo",
       script_parameters: {
-        input: 'fa7aeb5140e2848d39b416daeef4ffc5+45',
-        an_integer: '1'
+        an_integer: '1',
+        input: 'fa7aeb5140e2848d39b416daeef4ffc5+45'
       }
     }
     assert_response :success
index 832338a3cc5de1ce742eb5088992ba91d4fe5fdc..1bc8035db67ce414fc23a3e84c9818e266d527d7 100644 (file)
@@ -440,4 +440,20 @@ class JobTest < ActiveSupport::TestCase
     assert_equal('077ba2ad3ea24a929091a9e6ce545c93199b8e57',
                  internal_tag(j.uuid))
   end
+
+  test 'script_parameters_digest is independent of key order' do
+    j1 = Job.new(job_attrs(script_parameters: {'a' => 'a', 'ddee' => {'d' => 'd', 'e' => 'e'}}))
+    j2 = Job.new(job_attrs(script_parameters: {'ddee' => {'e' => 'e', 'd' => 'd'}, 'a' => 'a'}))
+    assert j1.valid?
+    assert j2.valid?
+    assert_equal(j1.script_parameters_digest, j2.script_parameters_digest)
+  end
+
+  test 'job fixtures have correct script_parameters_digest' do
+    Job.all.each do |j|
+      d = j.script_parameters_digest
+      assert_equal(j.update_script_parameters_digest, d,
+                   "wrong script_parameters_digest for #{j.uuid}")
+    end
+  end
 end