Merge branch 'master' into 3140-project-content-tabs
authorradhika <radhika@curoverse.com>
Mon, 7 Jul 2014 18:58:01 +0000 (14:58 -0400)
committerradhika <radhika@curoverse.com>
Mon, 7 Jul 2014 18:58:01 +0000 (14:58 -0400)
30 files changed:
apps/workbench/app/helpers/provenance_helper.rb
apps/workbench/app/views/collections/_show_chooser_preview.html.erb
apps/workbench/app/views/collections/_show_files.html.erb
crunch_scripts/arvados-bcbio-nextgen.py [new file with mode: 0755]
crunch_scripts/run-command
doc/user/tutorials/running-external-program.html.textile.liquid
doc/user/tutorials/tutorial-firstscript.html.textile.liquid
doc/user/tutorials/tutorial-new-pipeline.html.textile.liquid
docker/bcbio-nextgen/Dockerfile [new file with mode: 0644]
docker/build_tools/Makefile
docker/java-bwa-samtools/Dockerfile [moved from docker/bwa-samtools/Dockerfile with 90% similarity]
docker/jobs/Dockerfile
sdk/cli/bin/arv-run-pipeline-instance
sdk/cli/bin/crunch-job
services/api/Gemfile
services/api/Gemfile.lock
services/api/app/models/job.rb
services/api/lib/load_param.rb
services/api/test/functional/arvados/v1/commits_controller_test.rb
services/api/test/functional/arvados/v1/git_setup.rb [deleted file]
services/api/test/functional/arvados/v1/job_reuse_controller_test.rb
services/api/test/functional/arvados/v1/jobs_controller_test.rb
services/api/test/helpers/git_test_helper.rb [new file with mode: 0644]
services/api/test/integration/crunch_dispatch_test.rb
services/api/test/integration/serialized_encoding_test.rb
services/api/test/integration/websocket_test.rb
services/api/test/unit/job_test.rb
services/keep/src/keep/keep.go
services/keep/src/keep/volume.go
services/keep/src/keep/volume_unix.go

index 16f68dc620da793f04c43f7b0adc74364e6ed4dc..1cdf53ae1e9dc54d008d6a3f7d9149f4578757eb 100644 (file)
@@ -8,7 +8,7 @@ module ProvenanceHelper
       @jobs = {}
       @node_extra = {}
     end
-    
+
     def self.collection_uuid(uuid)
       m = CollectionsHelper.match(uuid)
       if m
@@ -23,12 +23,12 @@ module ProvenanceHelper
     end
 
     def url_for u
-      p = { :host => @opts[:request].host, 
+      p = { :host => @opts[:request].host,
         :port => @opts[:request].port,
         :protocol => @opts[:request].protocol }
       p.merge! u
-      Rails.application.routes.url_helpers.url_for (p)      
-    end 
+      Rails.application.routes.url_helpers.url_for (p)
+    end
 
     def determine_fillcolor(n)
       fillcolor = %w(aaaaaa aaffaa aaaaff aaaaaa ffaaaa)[n || 0] || 'aaaaaa'
@@ -41,27 +41,27 @@ module ProvenanceHelper
 
       rsc = ArvadosBase::resource_class_for_uuid uuid.to_s
       if rsc
-        href = url_for ({:controller => rsc.to_s.tableize, 
-                          :action => :show, 
+        href = url_for ({:controller => rsc.to_s.tableize,
+                          :action => :show,
                           :id => uuid.to_s })
-      
+
         #"\"#{uuid}\" [label=\"#{rsc}\\n#{uuid}\",href=\"#{href}\"];\n"
         if rsc == Collection
           if Collection.is_empty_blob_locator? uuid.to_s
             # special case
             return "\"#{uuid}\" [label=\"(empty collection)\"];\n"
           end
-          if @pdata[uuid] 
+          if @pdata[uuid]
             if @pdata[uuid][:name]
               return "\"#{uuid}\" [label=\"#{@pdata[uuid][:name]}\",href=\"#{href}\",shape=oval,#{bgcolor}];\n"
-            else              
+            else
               files = nil
               if @pdata[uuid].respond_to? :files
                 files = @pdata[uuid].files
               elsif @pdata[uuid][:files]
                 files = @pdata[uuid][:files]
               end
-              
+
               if files
                 i = 0
                 label = ""
@@ -78,7 +78,7 @@ module ProvenanceHelper
                 }.andand.join ","
                 return "\"#{uuid}\" [label=\"#{label}\",href=\"#{href}\",shape=oval,#{bgcolor},#{extra_s}];\n"
               end
-            end  
+            end
           end
         end
         return "\"#{uuid}\" [label=\"#{rsc}\",href=\"#{href}\",#{bgcolor}];\n"
@@ -87,10 +87,11 @@ module ProvenanceHelper
     end
 
     def job_uuid(job)
+      d = Digest::MD5.hexdigest(job[:script_parameters].to_json)
       if @opts[:combine_jobs] == :script_only
-        uuid = "#{job[:script]}"
+        uuid = "#{job[:script]}_#{d}"
       elsif @opts[:combine_jobs] == :script_and_version
-        uuid = "#{job[:script]}_#{job[:script_version]}"
+        uuid = "#{job[:script]}_#{job[:script_version]}_#{d}"
       else
         uuid = "#{job[:uuid]}"
       end
@@ -131,20 +132,25 @@ module ProvenanceHelper
       when Array
         i = 0
         node = ""
+        count = 0
         sp.each do |v|
           if GenerateGraph::collection_uuid(v)
             gr += script_param_edges(job, "#{prefix}[#{i}]", v)
           elsif @opts[:all_script_parameters]
-            node += "', '" unless node == ""
+            t = "#{v}"
+            nl = (if (count+t.length) > 60 then "\\n" else " " end)
+            count = 0 if (count+t.length) > 60
+            node += "',#{nl}'" unless node == ""
             node = "['" if node == ""
-            node += "#{v}"
+            node += t
+            count += t.length
           end
           i += 1
         end
         unless node == ""
           node += "']"
           gr += "\"#{node}\" [label=\"#{node}\"];\n"
-          gr += edge(job_uuid(job), node, {:label => prefix})        
+          gr += edge(job_uuid(job), node, {:label => prefix})
         end
       when String
         return '' if sp.empty?
@@ -171,7 +177,7 @@ module ProvenanceHelper
         return ""
       end
 
-      if not @pdata[uuid] then 
+      if not @pdata[uuid] then
         return describe_node(uuid)
       else
         @visited[uuid] = true
@@ -221,12 +227,12 @@ module ProvenanceHelper
 
       @pdata.each do |k, link|
         if link[:head_uuid] == uuid.to_s and link[:link_class] == "provenance"
-          href = url_for ({:controller => Link.to_s.tableize, 
-                            :action => :show, 
+          href = url_for ({:controller => Link.to_s.tableize,
+                            :action => :show,
                             :id => link[:uuid] })
 
           gr += describe_node(link[:tail_uuid])
-          gr += edge(link[:head_uuid], link[:tail_uuid], {:label => link[:name], :href => href}) 
+          gr += edge(link[:head_uuid], link[:tail_uuid], {:label => link[:name], :href => href})
           gr += generate_provenance_edges(link[:tail_uuid])
         end
       end
@@ -237,11 +243,11 @@ module ProvenanceHelper
     def describe_jobs
       gr = ""
       @jobs.each do |k, v|
-        href = url_for ({:controller => Job.to_s.tableize, 
+        href = url_for ({:controller => Job.to_s.tableize,
                           :action => :index })
 
         gr += "\"#{k}\" [href=\"#{href}?"
-        
+
         n = 0
         v.each do |u|
           gr += "uuid%5b%5d=#{u[:uuid]}&"
@@ -249,7 +255,7 @@ module ProvenanceHelper
         end
 
         gr += "\",label=\""
-        
+
         if @opts[:combine_jobs] == :script_only
           gr += "#{v[0][:script]}"
         elsif @opts[:combine_jobs] == :script_and_version
@@ -276,7 +282,7 @@ module ProvenanceHelper
     unless pdata.is_a? Hash
       raise "create_provenance_graph accepts Array or Hash for pdata only, pdata is #{pdata.class}"
     end
-    
+
     gr = """strict digraph {
 node [fontsize=10,shape=box];
 edge [fontsize=10];
index 4477357e724ffcb035a5b365a1385d3285795740..4a0a8377cd9f20e658bca395b207a346356cc63b 100644 (file)
@@ -1,2 +1,2 @@
 <%= render partial: "show_source_summary" %>
-<%= render partial: "show_files" %>
+<%= render partial: "show_files", locals: {no_checkboxes: true} %>
index c461163651c570ac635c69a16e91e3eabcf07b23..8576d4044069266e971023eb635f333482de14b8 100644 (file)
@@ -27,6 +27,7 @@
        <div class="collection_files_row">
         <div class="collection_files_buttons pull-right">
           <%= raw(human_readable_bytes_html(size)) %>
+          <% if !defined? no_checkboxes or !no_checkboxes %>
           <%= check_box_tag 'uuids[]', "#{@object.uuid}/#{file_path}", false, {
                 :class => 'persistent-selection',
                 :friendly_type => "File",
@@ -35,6 +36,7 @@
                                  uuid: @object.uuid, file: file_path),
                 :title => "Include #{file_path} in your selections",
               } %>
+          <% end %>
           <%= link_to(raw('<i class="fa fa-search"></i>'),
                       link_params.merge(disposition: 'inline'),
                       {title: "View #{file_path}", class: "btn btn-info btn-sm"}) %>
diff --git a/crunch_scripts/arvados-bcbio-nextgen.py b/crunch_scripts/arvados-bcbio-nextgen.py
new file mode 100755 (executable)
index 0000000..d53e069
--- /dev/null
@@ -0,0 +1,136 @@
+#!/usr/bin/python
+
+import arvados
+import subprocess
+import subst
+import shutil
+import os
+
+if len(arvados.current_task()['parameters']) > 0:
+    p = arvados.current_task()['parameters']
+else:
+    p = arvados.current_job()['script_parameters']
+
+t = arvados.current_task().tmpdir
+
+os.unlink("/usr/local/share/bcbio-nextgen/galaxy")
+os.mkdir("/usr/local/share/bcbio-nextgen/galaxy")
+shutil.copy("/usr/local/share/bcbio-nextgen/config/bcbio_system.yaml", "/usr/local/share/bcbio-nextgen/galaxy")
+
+with open("/usr/local/share/bcbio-nextgen/galaxy/tool_data_table_conf.xml", "w") as f:
+    f.write('''<tables>
+    <!-- Locations of indexes in the BWA mapper format -->
+    <table name="bwa_indexes" comment_char="#">
+        <columns>value, dbkey, name, path</columns>
+        <file path="tool-data/bwa_index.loc" />
+    </table>
+    <!-- Locations of indexes in the Bowtie2 mapper format -->
+    <table name="bowtie2_indexes" comment_char="#">
+        <columns>value, dbkey, name, path</columns>
+        <file path="tool-data/bowtie2_indices.loc" />
+    </table>
+    <!-- Locations of indexes in the Bowtie2 mapper format for TopHat2 to use -->
+    <table name="tophat2_indexes" comment_char="#">
+        <columns>value, dbkey, name, path</columns>
+        <file path="tool-data/bowtie2_indices.loc" />
+    </table>
+    <!-- Location of SAMTools indexes and other files -->
+    <table name="sam_fa_indexes" comment_char="#">
+        <columns>index, value, path</columns>
+        <file path="tool-data/sam_fa_indices.loc" />
+    </table>
+    <!-- Location of Picard dict file and other files -->
+    <table name="picard_indexes" comment_char="#">
+        <columns>value, dbkey, name, path</columns>
+        <file path="tool-data/picard_index.loc" />
+    </table>
+    <!-- Location of Picard dict files valid for GATK -->
+    <table name="gatk_picard_indexes" comment_char="#">
+        <columns>value, dbkey, name, path</columns>
+        <file path="tool-data/gatk_sorted_picard_index.loc" />
+    </table>
+</tables>
+''')
+
+os.mkdir("/usr/local/share/bcbio-nextgen/galaxy/tool-data")
+
+with open("/usr/local/share/bcbio-nextgen/galaxy/tool-data/bowtie2_indices.loc", "w") as f:
+    f.write(subst.do_substitution(p, "GRCh37\tGRCh37\tHuman (GRCh37)\t$(dir $(bowtie2_indices))\n"))
+
+with open("/usr/local/share/bcbio-nextgen/galaxy/tool-data/bwa_index.loc", "w") as f:
+    f.write(subst.do_substitution(p, "GRCh37\tGRCh37\tHuman (GRCh37)\t$(file $(bwa_index))\n"))
+
+with open("/usr/local/share/bcbio-nextgen/galaxy/tool-data/gatk_sorted_picard_index.loc", "w") as f:
+    f.write(subst.do_substitution(p, "GRCh37\tGRCh37\tHuman (GRCh37)\t$(file $(gatk_sorted_picard_index))\n"))
+
+with open("/usr/local/share/bcbio-nextgen/galaxy/tool-data/picard_index.loc", "w") as f:
+    f.write(subst.do_substitution(p, "GRCh37\tGRCh37\tHuman (GRCh37)\t$(file $(picard_index))\n"))
+
+with open("/usr/local/share/bcbio-nextgen/galaxy/tool-data/sam_fa_indices.loc", "w") as f:
+    f.write(subst.do_substitution(p, "index\tGRCh37\t$(file $(sam_fa_indices))\n"))
+
+with open("/tmp/crunch-job/freebayes-variant.yaml", "w") as f:
+    f.write('''
+# Template for whole genome Illumina variant calling with FreeBayes
+# This is a GATK-free pipeline without post-alignment BAM pre-processing
+# (recalibration and realignment)
+---
+details:
+  - analysis: variant2
+    genome_build: GRCh37
+    # to do multi-sample variant calling, assign samples the same metadata / batch
+    # metadata:
+    #   batch: your-arbitrary-batch-name
+    algorithm:
+      aligner: bwa
+      mark_duplicates: true
+      recalibrate: false
+      realign: false
+      variantcaller: freebayes
+      platform: illumina
+      quality_format: Standard
+      # for targetted projects, set the region
+      # variant_regions: /path/to/your.bed
+''')
+
+os.chdir(arvados.current_task().tmpdir)
+
+rcode = subprocess.call(["bcbio_nextgen.py", "--workflow", "template", "/tmp/crunch-job/freebayes-variant.yaml", "project1",
+                         subst.do_substitution(p, "$(file $(R1))"),
+                         subst.do_substitution(p, "$(file $(R2))")])
+
+os.chdir("project1/work")
+
+os.symlink("/usr/local/share/bcbio-nextgen/galaxy/tool-data", "tool-data")
+
+rcode = subprocess.call(["bcbio_nextgen.py", "../config/project1.yaml", "-n", os.environ['CRUNCH_NODE_SLOTS']])
+
+print("run-command: completed with exit code %i (%s)" % (rcode, "success" if rcode == 0 else "failed"))
+
+if rcode == 0:
+    os.chdir("../final")
+
+    print("arvados-bcbio-nextgen: the follow output files will be saved to keep:")
+
+    subprocess.call(["find", ".", "-type", "f", "-printf", "arvados-bcbio-nextgen: %12.12s %h/%f\\n"])
+
+    print("arvados-bcbio-nextgen: start writing output to keep")
+
+    done = False
+    while not done:
+        try:
+            out = arvados.CollectionWriter()
+            out.write_directory_tree(".", max_manifest_depth=0)
+            outuuid = out.finish()
+            api.job_tasks().update(uuid=arvados.current_task()['uuid'],
+                                                 body={
+                                                     'output':outuuid,
+                                                     'success': (rcode == 0),
+                                                     'progress':1.0
+                                                 }).execute()
+            done = True
+        except Exception as e:
+            print("arvados-bcbio-nextgen: caught exception: {}".format(e))
+            time.sleep(5)
+
+sys.exit(rcode)
index 528baab4c18c8f0ea23360719c36b6a1605c9f2e..268c038ee9a2fd007375bd25d4099541c6601799 100755 (executable)
@@ -7,11 +7,14 @@ import subprocess
 import sys
 import shutil
 import subst
+import time
 
 os.umask(0077)
 
 t = arvados.current_task().tmpdir
 
+api = arvados.api('v1')
+
 os.chdir(arvados.current_task().tmpdir)
 os.mkdir("tmpdir")
 os.mkdir("output")
@@ -34,8 +37,12 @@ def sub_link(v):
 def sub_tmpdir(v):
     return os.path.join(arvados.current_task().tmpdir, 'tmpdir')
 
+def sub_cores(v):
+     return os.environ['CRUNCH_NODE_SLOTS']
+
 subst.default_subs["link "] = sub_link
 subst.default_subs["tmpdir"] = sub_tmpdir
+subst.default_subs["node.cores"] = sub_cores
 
 rcode = 1
 
@@ -50,24 +57,40 @@ try:
         stdoutname = subst.do_substitution(p, p["stdout"])
         stdoutfile = open(stdoutname, "wb")
 
-    print("Running command: {}{}".format(' '.join(cmd), (" > " + stdoutname) if stdoutname != None else ""))
+    print("run-command: {}{}".format(' '.join(cmd), (" > " + stdoutname) if stdoutname != None else ""))
 
     rcode = subprocess.call(cmd, stdout=stdoutfile)
 
+    print("run-command: completed with exit code %i (%s)" % (rcode, "success" if rcode == 0 else "failed"))
+
 except Exception as e:
-    print("Caught exception {}".format(e))
+    print("run-command: caught exception: {}".format(e))
 
 finally:
     for l in links:
         os.unlink(l)
 
-    out = arvados.CollectionWriter()
-    out.write_directory_tree(".", max_manifest_depth=0)
-    arvados.current_task().set_output(out.finish())
-
-if rcode == 0:
-    os.chdir("..")
-    shutil.rmtree("tmpdir")
-    shutil.rmtree("output")
+    print("run-command: the follow output files will be saved to keep:")
+
+    subprocess.call(["find", ".", "-type", "f", "-printf", "run-command: %12.12s %h/%f\\n"])
+
+    print("run-command: start writing output to keep")
+
+    done = False
+    while not done:
+        try:
+            out = arvados.CollectionWriter()
+            out.write_directory_tree(".", max_manifest_depth=0)
+            outuuid = out.finish()
+            api.job_tasks().update(uuid=arvados.current_task()['uuid'],
+                                                 body={
+                                                     'output':outuuid,
+                                                     'success': (rcode == 0),
+                                                     'progress':1.0
+                                                 }).execute()
+            done = True
+        except Exception as e:
+            print("run-command: caught exception: {}".format(e))
+            time.sleep(5)
 
 sys.exit(rcode)
index 26b93ce6b34b84f98290601ace4b18554c62acf4..da6df59c4d279b9ee739f51aee5f55e89aa6f89e 100644 (file)
@@ -65,4 +65,4 @@ EOF
 
 (Your shell should automatically fill in @$USER@ with your login name.  The JSON that gets saved should have @"repository"@ pointed at your personal Git repository.)
 
-Your new pipeline template will appear on the Workbench "Compute %(rarr)&rarr;% Pipeline&nbsp;templates":https://{{ site.arvados_workbench_host }}/pipeline_instances page.  You can run the "pipeline using Workbench":tutorial-pipeline-workbench.html.
+Your new pipeline template will appear on the Workbench "Compute %(rarr)&rarr;% Pipeline&nbsp;templates":https://{{ site.arvados_workbench_host }}/pipeline_templates page.  You can run the "pipeline using Workbench":tutorial-pipeline-workbench.html.
index 9de1a9c61e6ee206cd39b3cb3b0bbae757c2715b..5365b5aed7e223e5edb98e26b0b3f27afdc41970 100644 (file)
@@ -140,6 +140,6 @@ Now, use @arv pipeline_template create@ to register your pipeline template in Ar
 </code></pre>
 </notextile>
 
-Your new pipeline template will appear on the Workbench "Compute %(rarr)&rarr;% Pipeline&nbsp;templates":https://{{ site.arvados_workbench_host }}/pipeline_instances page.  You can run the "pipeline using Workbench":tutorial-pipeline-workbench.html.
+Your new pipeline template will appear on the Workbench "Compute %(rarr)&rarr;% Pipeline&nbsp;templates":https://{{ site.arvados_workbench_host }}/pipeline_templates page.  You can run the "pipeline using Workbench":tutorial-pipeline-workbench.html.
 
 For more information and examples for writing pipelines, see the "pipeline template reference":{{site.baseurl}}/api/schema/PipelineTemplate.html
index d3d4f4a1e06030c31645213b3093480a9a6c2d4c..045b8ec41ede92be4a1a7df205748ef4e79060ac 100644 (file)
@@ -75,6 +75,6 @@ Now, use @arv pipeline_template create@ to register your pipeline template in Ar
 </code></pre>
 </notextile>
 
-Your new pipeline template will appear on the Workbench "Compute %(rarr)&rarr;% Pipeline&nbsp;templates":https://{{ site.arvados_workbench_host }}/pipeline_instances page.
+Your new pipeline template will appear on the Workbench "Compute %(rarr)&rarr;% Pipeline&nbsp;templates":https://{{ site.arvados_workbench_host }}/pipeline_templates page.
 
 For more information and examples for writing pipelines, see the "pipeline template reference":{{site.baseurl}}/api/schema/PipelineTemplate.html
diff --git a/docker/bcbio-nextgen/Dockerfile b/docker/bcbio-nextgen/Dockerfile
new file mode 100644 (file)
index 0000000..8f6e774
--- /dev/null
@@ -0,0 +1,47 @@
+# Install Arvados SDK into bcbio-nextgen Docker image.
+#
+# To build bcbio-nextgen:
+#
+# $ git clone https://github.com/chapmanb/bcbio-nextgen.git
+# $ cd bcbio-nextgen
+# $ docker build
+# $ docker tag <image> bcbio-nextgen
+#
+
+FROM bcbio-nextgen
+MAINTAINER Peter Amstutz <peter.amstutz@curoverse.com>
+
+USER root
+
+# Install Ruby 2.1.0
+RUN apt-get remove --quiet --assume-yes ruby && \
+    curl -L https://get.rvm.io | bash -s stable && \
+    /usr/local/rvm/bin/rvm install 2.1.0 && \
+    /bin/mkdir -p /usr/src/arvados
+
+ADD generated/arvados.tar.gz /usr/src/arvados/
+ENV GEM_HOME /usr/local/rvm/gems/ruby-2.1.0
+ENV GEM_PATH /usr/local/rvm/gems/ruby-2.1.0:/usr/local/rvm/gems/ruby-2.1.0@global
+ENV PATH /usr/local/rvm/gems/ruby-2.1.0/bin:/usr/local/rvm/gems/ruby-2.1.0@global/bin:/usr/local/rvm/rubies/ruby-2.1.0/bin:/usr/local/rvm/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
+
+# Install dependencies and set up system.
+# The FUSE packages help ensure that we can install the Python SDK (arv-mount).
+RUN /usr/bin/apt-get update && \
+    /usr/bin/apt-get install --quiet --assume-yes python-dev python-llfuse python-pip \
+      libio-socket-ssl-perl libjson-perl liburi-perl libwww-perl \
+      fuse libattr1-dev libfuse-dev && \
+    /usr/sbin/adduser --disabled-password \
+      --gecos 'Crunch execution user' crunch && \
+    /usr/bin/install --directory --owner=crunch --group=crunch --mode=0700 /keep /tmp/crunch-src /tmp/crunch-job && \
+    /bin/ln -s /usr/src/arvados /usr/local/src/arvados
+
+# Install Arvados packages.
+RUN gem update --system && \
+    find /usr/src/arvados/sdk -name '*.gem' -print0 | \
+      xargs -0rn 1 gem install && \
+    cd /usr/src/arvados/services/fuse && \
+    python setup.py install && \
+    cd /usr/src/arvados/sdk/python && \
+    python setup.py install
+
+USER crunch
index 81fa12f643da9dd42e5b1f83b0e4f29653133d46..d01f955b0c3e370c759bc4edb88b11f4cc323735 100644 (file)
@@ -26,7 +26,7 @@ BASE_DEPS = base/Dockerfile $(BASE_GENERATED)
 
 JOBS_DEPS = jobs/Dockerfile
 
-BWA_SAMTOOLS_DEPS = bwa-samtools/Dockerfile
+JAVA_BWA_SAMTOOLS_DEPS = java-bwa-samtools/Dockerfile
 
 API_DEPS = api/Dockerfile $(API_GENERATED)
 
@@ -40,6 +40,8 @@ KEEP_DEPS = keep/Dockerfile
 
 SSO_DEPS = sso/passenger.conf $(SSO_GENERATED)
 
+BCBIO_NEXTGEN_DEPS = bcbio-nextgen/Dockerfile
+
 BASE_GENERATED = base/generated/arvados.tar.gz
 
 API_GENERATED = \
@@ -132,9 +134,15 @@ jobs-image: base-image $(BUILD) $(JOBS_DEPS)
        $(DOCKER_BUILD) -t arvados/jobs jobs
        date >jobs-image
 
-bwa-samtools-image: jobs-image $(BUILD) $(BWA_SAMTOOLS_DEPS)
-       $(DOCKER_BUILD) -t arvados/jobs-bwa-samtools bwa-samtools
-       date >bwa-samtools-image
+java-bwa-samtools-image: jobs-image $(BUILD) $(JAVA_BWA_SAMTOOLS_DEPS)
+       $(DOCKER_BUILD) -t arvados/jobs-java-bwa-samtools java-bwa-samtools
+       date >java-bwa-samtools-image
+
+bcbio-nextgen-image: $(BUILD) $(BASE_GENERATED) $(BCBIO_NEXTGEN_DEPS)
+       rm -rf bcbio-nextgen/generated
+       cp -r base/generated bcbio-nextgen
+       $(DOCKER_BUILD) -t arvados/bcbio-nextgen bcbio-nextgen
+       date >bcbio-nextgen-image
 
 workbench-image: passenger-image $(BUILD) $(WORKBENCH_DEPS)
        mkdir -p workbench/generated
similarity index 90%
rename from docker/bwa-samtools/Dockerfile
rename to docker/java-bwa-samtools/Dockerfile
index cf19ee9b14a48ac9bab977352bd730e5db1d6dbc..e10f94f6c16c872bae74c5201b8e01f6b6b62c8e 100644 (file)
@@ -3,7 +3,8 @@ MAINTAINER Peter Amstutz <peter.amstutz@curoverse.com>
 
 USER root
 
-RUN cd /tmp && \
+RUN apt-get install -y -q openjdk-7-jre-headless && \
+    cd /tmp && \
     curl --location http://downloads.sourceforge.net/project/bio-bwa/bwa-0.7.9a.tar.bz2 -o bwa-0.7.9a.tar.bz2 && \
     tar xjf bwa-0.7.9a.tar.bz2 && \
     cd bwa-0.7.9a && \
index 29c9d540b5f402828178586471dadd51a707a2cc..2cad65c52746ecfdd40cb9236440cfbb9c714e11 100644 (file)
@@ -8,12 +8,12 @@ RUN /usr/bin/apt-get install -q -y python-dev python-llfuse python-pip \
       fuse libattr1-dev libfuse-dev && \
     /usr/sbin/adduser --disabled-password \
       --gecos 'Crunch execution user' crunch && \
-    /usr/bin/install -d -o crunch -g crunch -m 0700 /tmp/crunch-job && \
+    /usr/bin/install --directory --owner=crunch --group=crunch --mode=0700 /keep /tmp/crunch-src /tmp/crunch-job && \
     /bin/ln -s /usr/src/arvados /usr/local/src/arvados
 
 # Install Arvados packages.
-RUN find /usr/src/arvados/sdk -name '*.gem' -print0 | \
-      xargs -0rn 1 gem install && \
+RUN (find /usr/src/arvados/sdk -name '*.gem' -print0 | \
+      xargs -0rn 1 gem install) && \
     cd /usr/src/arvados/services/fuse && \
     python setup.py install && \
     cd /usr/src/arvados/sdk/python && \
index 304628f43869ea83cc5871461dbcff0055be0287..8b18d377ed9e17dc96234d4e9fcddd141eb64531 100755 (executable)
@@ -327,7 +327,7 @@ class JobCache
 
     jsonified_create_params = {}
     create_params.each do |k, v|
-      jsonified_create_params[k] = v.to_json
+      jsonified_create_params[k] = v.to_json unless v.nil?
     end
 
     result = $client.execute(:api_method => $arvados.jobs.create,
index b0d779bf3ce84ba3ac9effd9839bd49a82bf9b77..9c9d9c1ec96ed79796ae13e002bfc00538feb874 100755 (executable)
@@ -639,8 +639,7 @@ for (my $todo_ptr = 0; $todo_ptr <= $#jobstep_todo; $todo_ptr ++)
     my $build_script_to_send = "";
     my $command =
        "if [ -e $ENV{TASK_WORK} ]; then rm -rf $ENV{TASK_WORK}; fi; "
-        ."mkdir -p $ENV{JOB_WORK} $ENV{CRUNCH_TMP} $ENV{TASK_WORK} $ENV{TASK_KEEPMOUNT} "
-        ."&& chmod og+wrx $ENV{TASK_WORK}"
+        ."mkdir -p $ENV{JOB_WORK} $ENV{CRUNCH_TMP} $ENV{TASK_WORK} $ENV{TASK_KEEPMOUNT}"
        ."&& cd $ENV{CRUNCH_TMP} ";
     if ($build_script)
     {
@@ -652,41 +651,41 @@ for (my $todo_ptr = 0; $todo_ptr <= $#jobstep_todo; $todo_ptr ++)
     if ($docker_hash)
     {
       $command .= "crunchstat -cgroup-root=/sys/fs/cgroup -cgroup-parent=docker -cgroup-cid=$ENV{TASK_WORK}/docker.cid -poll=10000 ";
-      $command .= "$docker_bin run -i -a stdin -a stdout -a stderr --cidfile=$ENV{TASK_WORK}/docker.cid ";
+      $command .= "$docker_bin run --rm=true --attach=stdout --attach=stderr --user=crunch --cidfile=$ENV{TASK_WORK}/docker.cid ";
       # Dynamically configure the container to use the host system as its
       # DNS server.  Get the host's global addresses from the ip command,
       # and turn them into docker --dns options using gawk.
       $command .=
           q{$(ip -o address show scope global |
               gawk 'match($4, /^([0-9\.:]+)\//, x){print "--dns", x[1]}') };
-      $command .= "-v \Q$ENV{TASK_WORK}:/tmp/crunch-job:rw\E ";
-      $command .= "-v \Q$ENV{CRUNCH_SRC}:/tmp/crunch-src:ro\E ";
-      $command .= "-v \Q$ENV{TASK_KEEPMOUNT}:/mnt:ro\E ";
-      $command .= "-e \QHOME=/tmp/crunch-job\E ";
+      $command .= "--volume=\Q$ENV{CRUNCH_SRC}:/tmp/crunch-src:ro\E ";
+      $command .= "--volume=\Q$ENV{TASK_KEEPMOUNT}:/keep:ro\E ";
+      $command .= "--env=\QHOME=/home/crunch\E ";
       while (my ($env_key, $env_val) = each %ENV)
       {
         if ($env_key =~ /^(ARVADOS|JOB|TASK)_/) {
           if ($env_key eq "TASK_WORK") {
-            $command .= "-\QTASK_WORK=/tmp/crunch-job\E ";
+            $command .= "--env=\QTASK_WORK=/tmp/crunch-job\E ";
           }
           elsif ($env_key eq "TASK_KEEPMOUNT") {
-            $command .= "-e \QTASK_KEEPMOUNT=/mnt\E ";
+            $command .= "--env=\QTASK_KEEPMOUNT=/keep\E ";
           }
           elsif ($env_key eq "CRUNCH_SRC") {
-            $command .= "-\QCRUNCH_SRC=/tmp/crunch-src\E ";
+            $command .= "--env=\QCRUNCH_SRC=/tmp/crunch-src\E ";
           }
           else {
-            $command .= "-\Q$env_key=$env_val\E ";
+            $command .= "--env=\Q$env_key=$env_val\E ";
           }
         }
       }
+      $command .= "--env=\QCRUNCH_NODE_SLOTS=$ENV{CRUNCH_NODE_SLOTS}\E ";
       $command .= "\Q$docker_hash\E ";
-      $command .= "stdbuf -o0 -e0 ";
+      $command .= "stdbuf --output=0 --error=0 ";
       $command .= "/tmp/crunch-src/crunch_scripts/" . $Job->{"script"};
     } else {
       # Non-docker run
       $command .= "crunchstat -cgroup-root=/sys/fs/cgroup -poll=10000 ";
-      $command .= "stdbuf -o0 -e0 ";
+      $command .= "stdbuf --output=0 --error=0 ";
       $command .= "$ENV{CRUNCH_SRC}/crunch_scripts/" . $Job->{"script"};
     }
 
index b0562176524360f99c04a2535c3292f2f885e246..e5235a57a0f54adbce3bf49b2df6ca373780af97 100644 (file)
@@ -70,7 +70,7 @@ gem 'database_cleaner'
 
 gem 'themes_for_rails'
 
-gem 'arvados-cli', '>= 0.1.20140703164940'
+gem 'arvados-cli', '>= 0.1.20140703225421'
 
 # pg_power lets us use partial indexes in schema.rb in Rails 3
 gem 'pg_power'
index e9cfd1f0fec462d0bbaab96833bf583c0de163e4..ea44cc129741b92ff122928c41c7ae02ca3fc71f 100644 (file)
@@ -35,13 +35,13 @@ GEM
     addressable (2.3.6)
     andand (1.3.3)
     arel (3.0.3)
-    arvados (0.1.20140703164940)
+    arvados (0.1.20140703225421)
       activesupport (>= 3.2.13)
       andand
       google-api-client (~> 0.6.3)
       json (>= 1.7.7)
       jwt (>= 0.1.5, < 1.0.0)
-    arvados-cli (0.1.20140703164940)
+    arvados-cli (0.1.20140703225421)
       activesupport (~> 3.2, >= 3.2.13)
       andand (~> 1.3, >= 1.3.3)
       arvados (~> 0.1.0)
@@ -217,7 +217,7 @@ PLATFORMS
 DEPENDENCIES
   acts_as_api
   andand
-  arvados-cli (>= 0.1.20140703164940)
+  arvados-cli (>= 0.1.20140703225421)
   coffee-rails (~> 3.2.0)
   database_cleaner
   faye-websocket
index 654778aa4496ec8f7bf5f663ad933392fe43691b..fc445ae24edb5977a2092f7d92af86c300eb21ad 100644 (file)
@@ -6,11 +6,10 @@ class Job < ArvadosModel
   serialize :script_parameters, Hash
   serialize :runtime_constraints, Hash
   serialize :tasks_summary, Hash
-  before_validation :find_docker_image_locator
   before_create :ensure_unique_submit_id
-  before_create :ensure_script_version_is_commit
-  before_update :ensure_script_version_is_commit
   after_commit :trigger_crunch_dispatch_if_cancelled, :on => :update
+  validate :ensure_script_version_is_commit
+  validate :find_docker_image_locator
 
   has_many :commit_ancestors, :foreign_key => :descendant, :primary_key => :script_version
 
@@ -87,7 +86,8 @@ class Job < ArvadosModel
         self.supplied_script_version = self.script_version if self.supplied_script_version.nil? or self.supplied_script_version.empty?
         self.script_version = sha1
       else
-        raise ArgumentError.new("Specified script_version does not resolve to a commit")
+        self.errors.add :script_version, "#{self.script_version} does not resolve to a commit"
+        return false
       end
     end
   end
@@ -104,16 +104,22 @@ class Job < ArvadosModel
   def find_docker_image_locator
     # Find the Collection that holds the Docker image specified in the
     # runtime constraints, and store its locator in docker_image_locator.
-    if runtime_constraints.nil? then
+    unless runtime_constraints.is_a? Hash
+      # We're still in validation stage, so we can't assume
+      # runtime_constraints isn't something horrible like an array or
+      # a string. Treat those cases as "no docker image supplied";
+      # other validations will fail anyway.
       self.docker_image_locator = nil
-      return false
+      return true
     end
     image_search = runtime_constraints['docker_image']
     image_tag = runtime_constraints['docker_image_tag']
     if image_search.nil?
       self.docker_image_locator = nil
+      true
     elsif coll = Collection.for_latest_docker_image(image_search, image_tag)
       self.docker_image_locator = coll.uuid
+      true
     else
       errors.add(:docker_image_locator, "not found for #{image_search}")
       false
index 70387fe9165e34595d9cae9db9c90adee23433ee..71678cd223739db6649e011b28a5eaa82c3ea31a 100644 (file)
@@ -34,8 +34,10 @@ module LoadParam
     elsif params[:filters].is_a? String and !params[:filters].empty?
       begin
         f = Oj.load params[:filters]
-        raise unless f.is_a? Array
-        @filters += f
+        if not f.nil?
+          raise unless f.is_a? Array
+          @filters += f
+        end
       rescue
         raise ArgumentError.new("Could not parse \"filters\" param as an array")
       end
index 788cd83c797be3a8d7b9940d30f42f454becbea7..f7f99d1bcfc6d6c614160bc7b94b19af807e7397 100644 (file)
@@ -1,5 +1,5 @@
 require 'test_helper'
-load 'test/functional/arvados/v1/git_setup.rb'
+require 'helpers/git_test_helper'
 
 # NOTE: calling Commit.find_commit_range(user, nil, nil, 'rev') will produce
 # an error message "fatal: bad object 'rev'" on stderr if 'rev' does not exist
@@ -13,7 +13,7 @@ class Arvados::V1::CommitsControllerTest < ActionController::TestCase
   fixtures :repositories, :users
 
   # See git_setup.rb for the commit log for test.git.tar
-  include GitSetup
+  include GitTestHelper
 
   test "test_find_commit_range" do
     authorize_with :active
diff --git a/services/api/test/functional/arvados/v1/git_setup.rb b/services/api/test/functional/arvados/v1/git_setup.rb
deleted file mode 100644 (file)
index 46f5f70..0000000
+++ /dev/null
@@ -1,27 +0,0 @@
-require 'fileutils'
-require 'tmpdir'
-
-# Commit log for test.git.tar
-# master is the main branch
-# b1 is a branch off of master
-# tag1 is a tag
-#
-# 1de84a8 * b1
-# 077ba2a * master
-# 4fe459a * tag1
-# 31ce37f * foo
-
-module GitSetup
-  def setup
-    @tmpdir = Dir.mktmpdir()
-    #puts "setup #{@tmpdir}"
-    `cp test/test.git.tar #{@tmpdir} && cd #{@tmpdir} && tar xf test.git.tar`
-    Rails.configuration.git_repositories_dir = "#{@tmpdir}/test"
-    Commit.refresh_repositories
-  end
-
-  def teardown
-    #puts "teardown #{@tmpdir}"
-    FileUtils.remove_entry @tmpdir, true
-  end
-end
index b00fbf11c66f49a7799da07ed296535331f69aa0..62bc866a1fc2310c3421efa121679d431ed72851 100644 (file)
@@ -1,11 +1,11 @@
 require 'test_helper'
-load 'test/functional/arvados/v1/git_setup.rb'
+require 'helpers/git_test_helper'
 
 class Arvados::V1::JobReuseControllerTest < ActionController::TestCase
   fixtures :repositories, :users, :jobs, :links, :collections
 
   # See git_setup.rb for the commit log for test.git.tar
-  include GitSetup
+  include GitTestHelper
 
   setup do
     @controller = Arvados::V1::JobsController.new
index 0188bd4b130c245db7a10b3e45f08c8f7b1f324a..86b45952d3383d61f9931ba66d0a5cbdeebdfe14 100644 (file)
@@ -1,9 +1,9 @@
 require 'test_helper'
-load 'test/functional/arvados/v1/git_setup.rb'
+require 'helpers/git_test_helper'
 
 class Arvados::V1::JobsControllerTest < ActionController::TestCase
 
-  include GitSetup
+  include GitTestHelper
 
   test "submit a job" do
     authorize_with :active
diff --git a/services/api/test/helpers/git_test_helper.rb b/services/api/test/helpers/git_test_helper.rb
new file mode 100644 (file)
index 0000000..39e506f
--- /dev/null
@@ -0,0 +1,30 @@
+require 'fileutils'
+require 'tmpdir'
+
+# Commit log for "foo" repository in test.git.tar
+# master is the main branch
+# b1 is a branch off of master
+# tag1 is a tag
+#
+# 1de84a8 * b1
+# 077ba2a * master
+# 4fe459a * tag1
+# 31ce37f * foo
+
+module GitTestHelper
+  def self.included base
+    base.setup do
+      @tmpdir = Dir.mktmpdir()
+      `cp test/test.git.tar #{@tmpdir} && cd #{@tmpdir} && tar xf test.git.tar`
+      @orig_git_repositories_dir = Rails.configuration.git_repositories_dir
+      Rails.configuration.git_repositories_dir = "#{@tmpdir}/test"
+      Commit.refresh_repositories
+    end
+
+    base.teardown do
+      FileUtils.remove_entry @tmpdir, true
+      Rails.configuration.git_repositories_dir = @orig_git_repositories_dir
+      Commit.refresh_repositories
+    end
+  end
+end
index ee1bd9f2a11fb5c0cd78070302df6f7b40e5523f..81767af905cc609f3cfc18a56b404446cbb10bb1 100644 (file)
@@ -1,8 +1,8 @@
 require 'test_helper'
-load 'test/functional/arvados/v1/git_setup.rb'
+require 'helpers/git_test_helper'
 
 class CrunchDispatchTest < ActionDispatch::IntegrationTest
-  include GitSetup
+  include GitTestHelper
 
   fixtures :all
 
index 269018d807ab7de16bdb05e243a5308aadbff834..8a1cb10004f2bcfedf379d3d944c4251828fd02f 100644 (file)
@@ -1,8 +1,8 @@
 require 'test_helper'
-load 'test/functional/arvados/v1/git_setup.rb'
+require 'helpers/git_test_helper'
 
 class SerializedEncodingTest < ActionDispatch::IntegrationTest
-  include GitSetup
+  include GitTestHelper
 
   fixtures :all
 
index 002bc281cec70e078046769cf5e4518f0d431ce5..925d879906030be0b106437d39b0f1f8561cdaa5 100644 (file)
@@ -32,7 +32,7 @@ class WebsocketTest < ActionDispatch::IntegrationTest
         opened = true
         if timeout
           EM::Timer.new 4 do
-            too_long = true
+            too_long = true if close_status.nil?
             EM.stop_event_loop
           end
         end
index 5f53b2ab9bc07be2eeefff75f06d08be46a40ab0..e1ca7c53076e0bde35986ea0b5e07a1443e99e36 100644 (file)
@@ -1,15 +1,27 @@
 require 'test_helper'
+require 'helpers/git_test_helper'
 
 class JobTest < ActiveSupport::TestCase
+  include GitTestHelper
+
   BAD_COLLECTION = "#{'f' * 32}+0"
 
   setup do
     set_user_from_auth :active
   end
 
+  def job_attrs merge_me={}
+    # Default (valid) set of attributes, with given overrides
+    {
+      script: "hash",
+      script_version: "master",
+      repository: "foo",
+    }.merge(merge_me)
+  end
+
   test "Job without Docker image doesn't get locator" do
-    job = Job.new
-    assert job.valid?
+    job = Job.new job_attrs
+    assert job.valid?, job.errors.full_messages.to_s
     assert_nil job.docker_image_locator
   end
 
@@ -19,55 +31,58 @@ class JobTest < ActiveSupport::TestCase
   }.each_pair do |spec_type, (fixture_type, fixture_name, fixture_attr)|
     test "Job initialized with Docker image #{spec_type} gets locator" do
       image_spec = send(fixture_type, fixture_name).send(fixture_attr)
-      job = Job.new(runtime_constraints: {'docker_image' => image_spec})
-      assert(job.valid?, "Docker image #{spec_type} was invalid")
+      job = Job.new job_attrs(runtime_constraints:
+                              {'docker_image' => image_spec})
+      assert job.valid?, job.errors.full_messages.to_s
       assert_equal(collections(:docker_image).uuid, job.docker_image_locator)
     end
 
     test "Job modified with Docker image #{spec_type} gets locator" do
-      job = Job.new
-      assert job.valid?
+      job = Job.new job_attrs
+      assert job.valid?, job.errors.full_messages.to_s
       assert_nil job.docker_image_locator
       image_spec = send(fixture_type, fixture_name).send(fixture_attr)
       job.runtime_constraints['docker_image'] = image_spec
-      assert(job.valid?, "modified Docker image #{spec_type} was invalid")
+      assert job.valid?, job.errors.full_messages.to_s
       assert_equal(collections(:docker_image).uuid, job.docker_image_locator)
     end
   end
 
   test "removing a Docker runtime constraint removes the locator" do
     image_locator = collections(:docker_image).uuid
-    job = Job.new(runtime_constraints: {'docker_image' => image_locator})
-    assert job.valid?
+    job = Job.new job_attrs(runtime_constraints:
+                            {'docker_image' => image_locator})
+    assert job.valid?, job.errors.full_messages.to_s
     assert_equal(image_locator, job.docker_image_locator)
     job.runtime_constraints = {}
-    assert(job.valid?, "clearing runtime constraints made the Job invalid")
+    assert job.valid?, job.errors.full_messages.to_s + "after clearing runtime constraints"
     assert_nil job.docker_image_locator
   end
 
   test "locate a Docker image with a repository + tag" do
     image_repo, image_tag =
       links(:docker_image_collection_tag2).name.split(':', 2)
-    job = Job.new(runtime_constraints:
-                  {'docker_image' => image_repo,
-                    'docker_image_tag' => image_tag})
-    assert(job.valid?, "Job with Docker tag search invalid")
+    job = Job.new job_attrs(runtime_constraints:
+                            {'docker_image' => image_repo,
+                              'docker_image_tag' => image_tag})
+    assert job.valid?, job.errors.full_messages.to_s
     assert_equal(collections(:docker_image).uuid, job.docker_image_locator)
   end
 
   test "can't locate a Docker image with a nonexistent tag" do
     image_repo = links(:docker_image_collection_repository).name
     image_tag = '__nonexistent tag__'
-    job = Job.new(runtime_constraints:
-                  {'docker_image' => image_repo,
-                    'docker_image_tag' => image_tag})
+    job = Job.new job_attrs(runtime_constraints:
+                            {'docker_image' => image_repo,
+                              'docker_image_tag' => image_tag})
     assert(job.invalid?, "Job with bad Docker tag valid")
   end
 
   test "locate a Docker image with a partial hash" do
     image_hash = links(:docker_image_collection_hash).name[0..24]
-    job = Job.new(runtime_constraints: {'docker_image' => image_hash})
-    assert(job.valid?, "Job with partial Docker image hash failed")
+    job = Job.new job_attrs(runtime_constraints:
+                            {'docker_image' => image_hash})
+    assert job.valid?, job.errors.full_messages.to_s + " with partial hash #{image_hash}"
     assert_equal(collections(:docker_image).uuid, job.docker_image_locator)
   end
 
@@ -76,20 +91,21 @@ class JobTest < ActiveSupport::TestCase
     'locator' => BAD_COLLECTION,
   }.each_pair do |spec_type, image_spec|
     test "Job validation fails with nonexistent Docker image #{spec_type}" do
-      job = Job.new(runtime_constraints: {'docker_image' => image_spec})
+      job = Job.new job_attrs(runtime_constraints:
+                              {'docker_image' => image_spec})
       assert(job.invalid?, "nonexistent Docker image #{spec_type} was valid")
     end
   end
 
   test "Job validation fails with non-Docker Collection constraint" do
-    job = Job.new(runtime_constraints:
-                  {'docker_image' => collections(:foo_file).uuid})
+    job = Job.new job_attrs(runtime_constraints:
+                            {'docker_image' => collections(:foo_file).uuid})
     assert(job.invalid?, "non-Docker Collection constraint was valid")
   end
 
   test "can't create Job with Docker image locator" do
     begin
-      job = Job.new(docker_image_locator: BAD_COLLECTION)
+      job = Job.new job_attrs(docker_image_locator: BAD_COLLECTION)
     rescue ActiveModel::MassAssignmentSecurity::Error
       # Test passes - expected attribute protection
     else
@@ -98,7 +114,7 @@ class JobTest < ActiveSupport::TestCase
   end
 
   test "can't assign Docker image locator to Job" do
-    job = Job.new
+    job = Job.new job_attrs
     begin
       Job.docker_image_locator = BAD_COLLECTION
     rescue NoMethodError
@@ -106,4 +122,29 @@ class JobTest < ActiveSupport::TestCase
     end
     assert_nil job.docker_image_locator
   end
+
+  [
+   {script_parameters: ""},
+   {script_parameters: []},
+   {script_parameters: {symbols: :are_not_allowed_here}},
+   {runtime_constraints: ""},
+   {runtime_constraints: []},
+   {tasks_summary: ""},
+   {tasks_summary: []},
+   {script_version: "no/branch/could/ever/possibly/have/this/name"},
+  ].each do |invalid_attrs|
+    test "validation failures set error messages: #{invalid_attrs.to_json}" do
+      # Ensure valid_attrs doesn't produce errors -- otherwise we will
+      # not know whether errors reported below are actually caused by
+      # invalid_attrs.
+      dummy = Job.create! job_attrs
+
+      job = Job.create job_attrs(invalid_attrs)
+      assert_raises(ActiveRecord::RecordInvalid, ArgumentError,
+                    "save! did not raise the expected exception") do
+        job.save!
+      end
+      assert_not_empty job.errors, "validation failure did not provide errors"
+    end
+  end
 end
index 2a4b28b2cf0345d92f3f178d9891142382611f24..67c628d128db0c3fd4fc40570f56e9f2a4fb33cd 100644 (file)
@@ -413,6 +413,9 @@ func GetBlockHandler(resp http.ResponseWriter, req *http.Request) {
        if err != nil {
                // This type assertion is safe because the only errors
                // GetBlock can return are CorruptError or NotFoundError.
+               if err == NotFoundError {
+                       log.Printf("%s: not found, giving up\n", hash)
+               }
                http.Error(resp, err.Error(), err.(*KeepError).HTTPCode)
                return
        }
@@ -601,7 +604,6 @@ func GetBlock(hash string) ([]byte, error) {
                }
        }
 
-       log.Printf("%s: not found on any volumes, giving up\n", hash)
        return nil, NotFoundError
 }
 
index fffc815d90ddcb56c7cde97a75ecb6e751bb9673..d1956862b10d74d0da40fe7c8809f5bbdb863d4a 100644 (file)
@@ -8,6 +8,7 @@ import (
        "errors"
        "fmt"
        "strings"
+       "os"
 )
 
 type Volume interface {
@@ -38,7 +39,7 @@ func (v *MockVolume) Get(loc string) ([]byte, error) {
        } else if block, ok := v.Store[loc]; ok {
                return block, nil
        }
-       return nil, errors.New("not found")
+       return nil, os.ErrNotExist
 }
 
 func (v *MockVolume) Put(loc string, block []byte) error {
index 7b711d2eac1e7c6f5024cc49f723dc31c6c86952..aafc8debf134ec4bf8c44fbb93d2bc6313892e72 100644 (file)
@@ -111,12 +111,7 @@ func (v *UnixVolume) Put(loc string, block []byte) error {
 func (v *UnixVolume) Read(loc string) ([]byte, error) {
        blockFilename := filepath.Join(v.root, loc[0:3], loc)
        buf, err := ioutil.ReadFile(blockFilename)
-       if err != nil {
-               log.Printf("%s: reading %s: %s\n", v, blockFilename, err)
-               return nil, err
-       }
-
-       return buf, nil
+       return buf, err
 }
 
 // Write stores a block of data identified by the locator string