3114: Merge branch 'master' into 3114-copy-to-project
authorTom Clegg <tom@curoverse.com>
Mon, 18 Aug 2014 22:02:47 +0000 (18:02 -0400)
committerTom Clegg <tom@curoverse.com>
Mon, 18 Aug 2014 22:02:47 +0000 (18:02 -0400)
140 files changed:
.gitignore
apps/workbench/app/assets/javascripts/infinite_scroll.js
apps/workbench/app/assets/javascripts/user_agreements.js [new file with mode: 0644]
apps/workbench/app/controllers/application_controller.rb
apps/workbench/app/controllers/collections_controller.rb
apps/workbench/app/controllers/projects_controller.rb
apps/workbench/app/controllers/user_agreements_controller.rb
apps/workbench/app/controllers/users_controller.rb
apps/workbench/app/helpers/application_helper.rb
apps/workbench/app/models/user.rb
apps/workbench/app/views/application/_choose.html.erb
apps/workbench/app/views/application/_content.html.erb
apps/workbench/app/views/application/_content_layout.html.erb
apps/workbench/app/views/application/_tab_line_buttons.html.erb [new file with mode: 0644]
apps/workbench/app/views/application/show.html.erb
apps/workbench/app/views/collections/_show_files.html.erb
apps/workbench/app/views/collections/_show_recent.html.erb
apps/workbench/app/views/collections/index.html.erb [new file with mode: 0644]
apps/workbench/app/views/collections/show.html.erb
apps/workbench/app/views/keep_disks/_content_layout.html.erb
apps/workbench/app/views/layouts/body.html.erb
apps/workbench/app/views/pipeline_instances/_show_recent.html.erb
apps/workbench/app/views/pipeline_instances/index.html.erb [new file with mode: 0644]
apps/workbench/app/views/pipeline_templates/_show_components.html.erb
apps/workbench/app/views/pipeline_templates/show.html.erb [new file with mode: 0644]
apps/workbench/app/views/projects/_show_contents_rows.html.erb
apps/workbench/app/views/projects/_show_data_collections.html.erb
apps/workbench/app/views/projects/_show_jobs_and_pipelines.html.erb
apps/workbench/app/views/projects/_show_other_objects.html.erb
apps/workbench/app/views/projects/_show_pipeline_templates.html.erb
apps/workbench/app/views/projects/_show_sharing.html.erb
apps/workbench/app/views/projects/_show_subprojects.html.erb
apps/workbench/app/views/projects/_show_tab_contents.html.erb
apps/workbench/app/views/projects/show.html.erb [new file with mode: 0644]
apps/workbench/app/views/user_agreements/index.html.erb
apps/workbench/app/views/users/inactive.html.erb
apps/workbench/app/views/users/profile.html.erb [new file with mode: 0644]
apps/workbench/config/application.default.yml
apps/workbench/config/routes.rb
apps/workbench/test/functional/application_controller_test.rb
apps/workbench/test/functional/collections_controller_test.rb
apps/workbench/test/functional/projects_controller_test.rb
apps/workbench/test/functional/users_controller_test.rb
apps/workbench/test/integration/application_layout_test.rb [new file with mode: 0644]
apps/workbench/test/integration/collections_test.rb
apps/workbench/test/integration/pipeline_instances_test.rb
apps/workbench/test/integration/projects_test.rb
apps/workbench/test/test_helper.rb
crunch_scripts/arvados-bcbio-nextgen.py
crunch_scripts/collection-merge
crunch_scripts/crunchutil/__init__.py [new file with mode: 0644]
crunch_scripts/crunchutil/robust_put.py [new file with mode: 0644]
crunch_scripts/crunchutil/subst.py [moved from crunch_scripts/subst.py with 91% similarity]
crunch_scripts/crunchutil/vwd.py [new file with mode: 0644]
crunch_scripts/decompress-all.py [new file with mode: 0755]
crunch_scripts/run-command
crunch_scripts/split-fastq.py [new file with mode: 0755]
doc/_config.yml
doc/_includes/_concurrent_hash_script_py.liquid [moved from doc/_includes/_parallel_hash_script_py.liquid with 100% similarity]
doc/api/methods/collections.html.textile.liquid
doc/sdk/go/index.html.textile.liquid [new file with mode: 0644]
doc/sdk/index.html.textile.liquid
doc/user/topics/tutorial-job-debug.html.textile.liquid [deleted file]
doc/user/topics/tutorial-parallel.html.textile.liquid
doc/user/topics/tutorial-trait-search.html.textile.liquid
doc/user/tutorials/tutorial-firstscript.html.textile.liquid
doc/user/tutorials/tutorial-keep.html.textile.liquid
doc/user/tutorials/tutorial-submit-job.html.textile.liquid [new file with mode: 0644]
docker/build_tools/Makefile
docker/build_tools/config.rb
docker/jobs/Dockerfile
docker/keep/Dockerfile
docker/keep/run-keep.in
sdk/cli/bin/arv
sdk/cli/bin/arv-run-pipeline-instance
sdk/cli/bin/crunch-job
sdk/go/arvadosclient/arvadosclient.go [moved from sdk/go/src/arvados.org/sdk/sdk.go with 99% similarity]
sdk/go/arvadosclient/arvadosclient_test.go [moved from sdk/go/src/arvados.org/sdk/sdk_test.go with 95% similarity]
sdk/go/go.sh [deleted file]
sdk/go/keepclient/hashcheck.go [moved from sdk/go/src/arvados.org/keepclient/hashcheck.go with 98% similarity]
sdk/go/keepclient/hashcheck_test.go [moved from sdk/go/src/arvados.org/keepclient/hashcheck_test.go with 100% similarity]
sdk/go/keepclient/keepclient.go [moved from sdk/go/src/arvados.org/keepclient/keepclient.go with 97% similarity]
sdk/go/keepclient/keepclient_test.go [moved from sdk/go/src/arvados.org/keepclient/keepclient_test.go with 92% similarity]
sdk/go/keepclient/support.go [moved from sdk/go/src/arvados.org/keepclient/support.go with 99% similarity]
sdk/go/streamer/streamer.go [moved from sdk/go/src/arvados.org/streamer/streamer.go with 100% similarity]
sdk/go/streamer/streamer_test.go [moved from sdk/go/src/arvados.org/streamer/streamer_test.go with 100% similarity]
sdk/go/streamer/transfer.go [moved from sdk/go/src/arvados.org/streamer/transfer.go with 100% similarity]
sdk/python/.gitignore
sdk/python/arvados/__init__.py
sdk/python/arvados/api.py
sdk/python/arvados/collection.py
sdk/python/arvados/commands/put.py
sdk/python/arvados/events.py
sdk/python/arvados/keep.py
sdk/python/arvados/stream.py
sdk/python/bin/arv-get
sdk/python/bin/arv-ls
sdk/python/bin/arv-normalize
sdk/python/bin/arv-ws
sdk/python/tests/run_test_server.py
sdk/python/tests/test_api.py
sdk/python/tests/test_arv_put.py
sdk/ruby/lib/arvados.rb
services/api/Gemfile.lock
services/api/app/controllers/application_controller.rb
services/api/app/controllers/arvados/v1/collections_controller.rb
services/api/app/controllers/arvados/v1/jobs_controller.rb
services/api/app/controllers/arvados/v1/repositories_controller.rb
services/api/app/controllers/arvados/v1/schema_controller.rb
services/api/app/controllers/arvados/v1/users_controller.rb
services/api/app/mailers/profile_notifier.rb [new file with mode: 0644]
services/api/app/models/arvados_model.rb
services/api/app/models/collection.rb
services/api/app/models/job.rb
services/api/app/models/user.rb
services/api/app/views/profile_notifier/profile_created.text.erb [new file with mode: 0644]
services/api/config/application.default.yml
services/api/script/crunch-dispatch.rb
services/api/test/fixtures/api_client_authorizations.yml
services/api/test/fixtures/jobs.yml
services/api/test/fixtures/pipeline_instances.yml
services/api/test/fixtures/users.yml
services/api/test/functional/arvados/v1/collections_controller_test.rb
services/api/test/functional/arvados/v1/users_controller_test.rb
services/crunch/crunchstat/go.sh [deleted file]
services/crunchstat/crunchstat.go [moved from services/crunch/crunchstat/src/arvados.org/crunchstat/crunchstat.go with 100% similarity]
services/fuse/arvados_fuse/__init__.py
services/fuse/bin/arv-mount
services/keep/go.sh [deleted file]
services/keepproxy/keepproxy.go [moved from services/keep/src/arvados.org/keepproxy/keepproxy.go with 98% similarity]
services/keepproxy/keepproxy_test.go [moved from services/keep/src/arvados.org/keepproxy/keepproxy_test.go with 85% similarity]
services/keepstore/handler_test.go [moved from services/keep/src/keep/handler_test.go with 100% similarity]
services/keepstore/handlers.go [moved from services/keep/src/keep/handlers.go with 100% similarity]
services/keepstore/keepstore.go [moved from services/keep/src/keep/keep.go with 100% similarity]
services/keepstore/keepstore_test.go [moved from services/keep/src/keep/keep_test.go with 97% similarity]
services/keepstore/perms.go [moved from services/keep/src/keep/perms.go with 100% similarity]
services/keepstore/perms_test.go [moved from services/keep/src/keep/perms_test.go with 100% similarity]
services/keepstore/volume.go [moved from services/keep/src/keep/volume.go with 100% similarity]
services/keepstore/volume_unix.go [moved from services/keep/src/keep/volume_unix.go with 100% similarity]
services/keepstore/volume_unix_test.go [moved from services/keep/src/keep/volume_unix_test.go with 100% similarity]

index 0cddee596c4cf665b9359f1a6276c7ecdcfd3d49..8cc6b89324311d0cd7db51f9c6a5e7ba400253dc 100644 (file)
@@ -11,11 +11,9 @@ sdk/perl/Makefile
 sdk/perl/blib
 sdk/perl/pm_to_blib
 */vendor/bundle
-services/keep/bin
-services/keep/pkg
-services/keep/src/github.com
 sdk/java/target
 *.class
 apps/workbench/vendor/bundle
 services/api/vendor/bundle
 sdk/java/log
+sdk/cli/vendor
index 2cb9748dadc09e71596b9cd6e4264d11e16389c0..e70ca259e4766c0ca00de11b5dcb9d3dc13200ab 100644 (file)
@@ -111,6 +111,7 @@ $(document).
                 $scroller = $(window);
             $scroller.
                 addClass('infinite-scroller').
-                on('scroll', { container: this }, maybe_load_more_content);
+                on('scroll resize', { container: this }, maybe_load_more_content).
+                trigger('scroll');
         });
     });
diff --git a/apps/workbench/app/assets/javascripts/user_agreements.js b/apps/workbench/app/assets/javascripts/user_agreements.js
new file mode 100644 (file)
index 0000000..688bd0b
--- /dev/null
@@ -0,0 +1,9 @@
+$('#open_user_agreement input[name="checked[]"]').on('click', function() {
+    var dialog = $('#open_user_agreement')[0]
+    $('input[type=submit]', dialog).prop('disabled',false);
+    $('input[name="checked[]"]', dialog).each(function(){
+        if(!this.checked) {
+            $('input[type=submit]', dialog).prop('disabled',true);
+        }
+    });
+});
index 160730331dd28ec9495e933787e9e8d2c3dbb697..98bc53ae4612d5efd25ef7a6dc96287bb2857bf9 100644 (file)
@@ -14,6 +14,7 @@ class ApplicationController < ActionController::Base
   around_filter :require_thread_api_token, except: ERROR_ACTIONS
   before_filter :accept_uuid_as_id_param, except: ERROR_ACTIONS
   before_filter :check_user_agreements, except: ERROR_ACTIONS
+  before_filter :check_user_profile, except: ERROR_ACTIONS
   before_filter :check_user_notifications, except: ERROR_ACTIONS
   before_filter :load_filters_and_paging_params, except: ERROR_ACTIONS
   before_filter :find_object_by_uuid, except: [:index, :choose] + ERROR_ACTIONS
@@ -61,22 +62,31 @@ class ApplicationController < ActionController::Base
     else
       @errors = [e.to_s]
     end
-    # If the user has an active session, and the API server is available,
-    # make user information available on the error page.
+    # Make user information available on the error page, falling back to the
+    # session cache if the API server is unavailable.
     begin
       load_api_token(session[:arvados_api_token])
     rescue ArvadosApiClient::ApiError
-      load_api_token(nil)
+      unless session[:user].nil?
+        begin
+          Thread.current[:user] = User.new(session[:user])
+        rescue ArvadosApiClient::ApiError
+          # This can happen if User's columns are unavailable.  Nothing to do.
+        end
+      end
     end
-    # Preload projects trees for the template.  If that fails, set empty
+    # Preload projects trees for the template.  If that's not doable, set empty
     # trees so error page rendering can proceed.  (It's easier to rescue the
     # exception here than in a template.)
-    begin
-      build_project_trees
-    rescue ArvadosApiClient::ApiError
-      @my_project_tree ||= []
-      @shared_project_tree ||= []
+    unless current_user.nil?
+      begin
+        build_project_trees
+      rescue ArvadosApiClient::ApiError
+        # Fall back to the default-setting code later.
+      end
     end
+    @my_project_tree ||= []
+    @shared_project_tree ||= []
     render_error(err_opts)
   end
 
@@ -89,6 +99,9 @@ class ApplicationController < ActionController::Base
   end
 
   def load_filters_and_paging_params
+    @order = params[:order] || 'created_at desc'
+    @order = [@order] unless @order.is_a? Array
+
     @limit ||= 200
     if params[:limit]
       @limit = params[:limit].to_i
@@ -128,10 +141,8 @@ class ApplicationController < ActionController::Base
     respond_to do |f|
       f.json { render json: @objects }
       f.html {
-        if params['tab_pane']
-          comparable = self.respond_to? :compare
-          render(partial: 'show_' + params['tab_pane'].downcase,
-                 locals: { comparable: comparable, objects: @objects })
+        if params[:tab_pane]
+          render_pane params[:tab_pane]
         else
           render
         end
@@ -140,6 +151,23 @@ class ApplicationController < ActionController::Base
     end
   end
 
+  helper_method :render_pane
+  def render_pane tab_pane, opts={}
+    render_opts = {
+      partial: 'show_' + tab_pane.downcase,
+      locals: {
+        comparable: self.respond_to?(:compare),
+        objects: @objects,
+        tab_pane: tab_pane
+      }.merge(opts[:locals] || {})
+    }
+    if opts[:to_string]
+      render_to_string render_opts
+    else
+      render render_opts
+    end
+  end
+
   def index
     find_objects_for_index if !@objects
     render_index
@@ -177,9 +205,7 @@ class ApplicationController < ActionController::Base
       f.json { render json: @object.attributes.merge(href: url_for(action: :show, id: @object)) }
       f.html {
         if params['tab_pane']
-          comparable = self.respond_to? :compare
-          render(partial: 'show_' + params['tab_pane'].downcase,
-                 locals: { comparable: comparable, objects: @objects })
+          render_pane params['tab_pane']
         elsif request.method.in? ['GET', 'HEAD']
           render
         else
@@ -393,9 +419,6 @@ class ApplicationController < ActionController::Base
     Thread.current[:arvados_api_token] = new_token
     if new_token.nil?
       Thread.current[:user] = nil
-    elsif (new_token == session[:arvados_api_token]) and
-        session[:user].andand[:is_active]
-      Thread.current[:user] = User.new(session[:user])
     else
       Thread.current[:user] = User.current
     end
@@ -422,6 +445,7 @@ class ApplicationController < ActionController::Base
         is_admin: user.is_admin,
         prefs: user.prefs
       }
+
       if !request.format.json? and request.method.in? ['GET', 'HEAD']
         # Repeat this request with api_token in the (new) session
         # cookie instead of the query string.  This prevents API
@@ -466,7 +490,7 @@ class ApplicationController < ActionController::Base
     end
   end
 
-  # Reroute this request if an API token is unavailable.
+  # Redirect to login/welcome if client provided expired API token (or none at all)
   def require_thread_api_token
     if Thread.current[:arvados_api_token]
       yield
@@ -477,7 +501,7 @@ class ApplicationController < ActionController::Base
       session.delete :arvados_api_token
       redirect_to_login
     else
-      render 'users/welcome'
+      redirect_to welcome_users_path(return_to: request.fullpath)
     end
   end
 
@@ -488,19 +512,22 @@ class ApplicationController < ActionController::Base
     end
   end
 
+  helper_method :unsigned_user_agreements
+  def unsigned_user_agreements
+    @signed_ua_uuids ||= UserAgreement.signatures.map &:head_uuid
+    @unsigned_user_agreements ||= UserAgreement.all.map do |ua|
+      if not @signed_ua_uuids.index ua.uuid
+        Collection.find(ua.uuid)
+      end
+    end.compact
+  end
+
   def check_user_agreements
     if current_user && !current_user.is_active
       if not current_user.is_invited
-        return render 'users/inactive'
+        return redirect_to inactive_users_path(return_to: request.fullpath)
       end
-      signatures = UserAgreement.signatures
-      @signed_ua_uuids = UserAgreement.signatures.map &:head_uuid
-      @required_user_agreements = UserAgreement.all.map do |ua|
-        if not @signed_ua_uuids.index ua.uuid
-          Collection.find(ua.uuid)
-        end
-      end.compact
-      if @required_user_agreements.empty?
+      if unsigned_user_agreements.empty?
         # No agreements to sign. Perhaps we just need to ask?
         current_user.activate
         if !current_user.is_active
@@ -509,12 +536,47 @@ class ApplicationController < ActionController::Base
         end
       end
       if !current_user.is_active
-        render 'user_agreements/index'
+        redirect_to user_agreements_path(return_to: request.fullpath)
       end
     end
     true
   end
 
+  def check_user_profile
+    if request.method.downcase != 'get' || params[:partial] ||
+       params[:tab_pane] || params[:action_method] ||
+       params[:action] == 'setup_popup'
+      return true
+    end
+
+    if missing_required_profile?
+      redirect_to profile_user_path(current_user.uuid, return_to: request.fullpath)
+    end
+    true
+  end
+
+  helper_method :missing_required_profile?
+  def missing_required_profile?
+    missing_required = false
+
+    profile_config = Rails.configuration.user_profile_form_fields
+    if current_user && profile_config
+      current_user_profile = current_user.prefs[:profile]
+      profile_config.kind_of?(Array) && profile_config.andand.each do |entry|
+        if entry['required']
+          if !current_user_profile ||
+             !current_user_profile[entry['key'].to_sym] ||
+             current_user_profile[entry['key'].to_sym].empty?
+            missing_required = true
+            break
+          end
+        end
+      end
+    end
+
+    missing_required
+  end
+
   def select_theme
     return Rails.configuration.arvados_theme
   end
@@ -530,15 +592,6 @@ class ApplicationController < ActionController::Base
     }
   }
 
-  #@@notification_tests.push lambda { |controller, current_user|
-  #  Job.limit(1).where(created_by: current_user.uuid).each do
-  #    return nil
-  #  end
-  #  return lambda { |view|
-  #    view.render partial: 'notifications/jobs_notification'
-  #  }
-  #}
-
   @@notification_tests.push lambda { |controller, current_user|
     Collection.limit(1).where(created_by: current_user.uuid).each do
       return nil
@@ -563,7 +616,7 @@ class ApplicationController < ActionController::Base
     @notification_count = 0
     @notifications = []
 
-    if current_user
+    if current_user.andand.is_active
       @showallalerts = false
       @@notification_tests.each do |t|
         a = t.call(self, current_user)
index bea72a4274b9d8748bffbbde9a3e7937ef616879..f59cb956b77efcebc8f19e22824fabd0873b6ce9 100644 (file)
@@ -4,7 +4,8 @@ class CollectionsController < ApplicationController
   skip_before_filter(:find_object_by_uuid,
                      only: [:provenance, :show_file, :show_file_links])
   # We depend on show_file to display the user agreement:
-  skip_before_filter :check_user_agreements, only: [:show_file]
+  skip_before_filter :check_user_agreements, only: :show_file
+  skip_before_filter :check_user_profile, only: :show_file
 
   RELATION_LIMIT = 5
 
index 9f4c58877581dfdba308d9167c8610b2f7b4ac31..6287793f765bf82ddd0e15e3037f7a11c7d3c05a 100644 (file)
@@ -128,20 +128,61 @@ class ProjectsController < ApplicationController
     super
   end
 
+  def load_contents_objects kinds=[]
+    kind_filters = @filters.select do |attr,op,val|
+      op == 'is_a' and val.is_a? Array and val.count > 1
+    end
+    if /^created_at\b/ =~ @order[0] and kind_filters.count == 1
+      # If filtering on multiple types and sorting by date: Get the
+      # first page of each type, sort the entire set, truncate to one
+      # page, and use the last item on this page as a filter for
+      # retrieving the next page. Ideally the API would do this for
+      # us, but it doesn't (yet).
+      nextpage_operator = /\bdesc$/i =~ @order[0] ? '<' : '>'
+      @objects = []
+      @name_link_for = {}
+      kind_filters.each do |attr,op,val|
+        (val.is_a?(Array) ? val : [val]).each do |type|
+          objects = @object.contents(order: @order,
+                                     limit: @limit,
+                                     include_linked: true,
+                                     filters: (@filters - kind_filters + [['uuid', 'is_a', type]]),
+                                     offset: @offset)
+          objects.each do |object|
+            @name_link_for[object.andand.uuid] = objects.links_for(object, 'name').first
+          end
+          @objects += objects
+        end
+      end
+      @objects = @objects.to_a.sort_by(&:created_at)
+      @objects.reverse! if nextpage_operator == '<'
+      @objects = @objects[0..@limit-1]
+      @next_page_filters = @filters.reject do |attr,op,val|
+        attr == 'created_at' and op == nextpage_operator
+      end
+      if @objects.any?
+        @next_page_filters += [['created_at',
+                                nextpage_operator,
+                                @objects.last.created_at]]
+        @next_page_href = url_for(partial: :contents_rows,
+                                  filters: @next_page_filters.to_json)
+      else
+        @next_page_href = nil
+      end
+    else
+      @objects = @object.contents(order: @order,
+                                  limit: @limit,
+                                  include_linked: true,
+                                  filters: @filters,
+                                  offset: @offset)
+      @next_page_href = next_page_href(partial: :contents_rows)
+    end
+  end
+
   def show
     if !@object
       return render_not_found("object not found")
     end
-    @objects = @object.contents(limit: 50,
-                                include_linked: true,
-                                filters: params[:filters],
-                                offset: params[:offset] || 0)
-    @logs = Log.limit(10).filter([['object_uuid', '=', @object.uuid]])
-    @users = User.limit(10000).
-      select(["uuid", "is_active", "first_name", "last_name"]).
-      filter([['is_active', '=', 'true']])
-    @groups = Group.limit(10000).
-      select(["uuid", "name", "description"])
 
     @user_is_manager = false
     @share_links = []
@@ -154,24 +195,19 @@ class ProjectsController < ApplicationController
       end
     end
 
-    @objects_and_names = get_objects_and_names @objects
-
     if params[:partial]
+      load_contents_objects
       respond_to do |f|
         f.json {
           render json: {
             content: render_to_string(partial: 'show_contents_rows.html',
-                                      formats: [:html],
-                                      locals: {
-                                        objects_and_names: @objects_and_names,
-                                        project: @object
-                                      }),
-            next_page_href: (next_page_offset and
-                             url_for(offset: next_page_offset, filters: params[:filters], partial: true))
+                                      formats: [:html]),
+            next_page_href: @next_page_href
           }
         }
       end
     else
+      @objects = []
       super
     end
   end
@@ -188,13 +224,17 @@ class ProjectsController < ApplicationController
   end
 
   helper_method :get_objects_and_names
-  def get_objects_and_names(objects)
+  def get_objects_and_names(objects=nil)
+    objects = @objects if objects.nil?
     objects_and_names = []
     objects.each do |object|
-      if !(name_links = objects.links_for(object, 'name')).empty?
+      if objects.respond_to? :links_for and
+          !(name_links = objects.links_for(object, 'name')).empty?
         name_links.each do |name_link|
           objects_and_names << [object, name_link]
         end
+      elsif @name_link_for.andand[object.uuid]
+        objects_and_names << [object, @name_link_for[object.uuid]]
       elsif object.respond_to? :name
         objects_and_names << [object, object]
       else
index 6ab8ae215f488888b04bcb2362f52596e26603f7..924bf44baeee801735d905a431743bddd49e0ff8 100644 (file)
@@ -1,6 +1,7 @@
 class UserAgreementsController < ApplicationController
   skip_before_filter :check_user_agreements
   skip_before_filter :find_object_by_uuid
+  skip_before_filter :check_user_profile
 
   def model_class
     Collection
index d817b32711fe5d7695a642343a442cd4c394c73e..189f295f8fe097c2a2a17e8a5f41342ed254eef0 100644 (file)
@@ -1,5 +1,8 @@
 class UsersController < ApplicationController
-  skip_before_filter :find_object_by_uuid, :only => [:welcome, :activity, :storage]
+  skip_around_filter :require_thread_api_token, only: :welcome
+  skip_before_filter :check_user_agreements, only: [:welcome, :inactive]
+  skip_before_filter :check_user_profile, only: [:welcome, :inactive, :profile]
+  skip_before_filter :find_object_by_uuid, only: [:welcome, :activity, :storage]
   before_filter :ensure_current_user_is_admin, only: [:sudo, :unsetup, :setup]
 
   def show
@@ -12,11 +15,20 @@ class UsersController < ApplicationController
 
   def welcome
     if current_user
-      params[:action] = 'home'
-      home
+      redirect_to (params[:return_to] || '/')
     end
   end
 
+  def inactive
+    if current_user.andand.is_invited
+      redirect_to (params[:return_to] || '/')
+    end
+  end
+
+  def profile
+    params[:offer_return_to] ||= params[:return_to]
+  end
+
   def activity
     @breadcrumb_page_name = nil
     @users = User.limit(params[:limit] || 1000).all
index 11300367b9acaa2520578a1c0acffb17082dad73..428c14f8282961f65e6ad69e9197794f62997d54 100644 (file)
@@ -267,7 +267,7 @@ module ApplicationHelper
       end
       modal_path = choose_collections_path \
       ({ title: chooser_title,
-         filters: [['tail_uuid', '=', object.owner_uuid]].to_json,
+         filters: [['owner_uuid', '=', object.owner_uuid]].to_json,
          action_name: 'OK',
          action_href: pipeline_instance_path(id: object.uuid),
          action_method: 'patch',
index af1922adffa5fea605fca8b83edda1c73d0db45d..87ea5faefa85976b8d11350b291dffc28b2514d7 100644 (file)
@@ -53,4 +53,10 @@ class User < ArvadosBase
     arvados_api_client.api(self, "/setup", params)
   end
 
+  def update_profile params
+    self.private_reload(arvados_api_client.api(self.class,
+                                               "/#{self.uuid}/profile",
+                                               params))
+  end
+
 end
index d8d2032826d5bbdb9f173000590eb8e3117c25f1..68351a94995121f5fbf90860412fbf4f5c2a4ee9 100644 (file)
@@ -9,9 +9,20 @@
       <div class="modal-body">
         <div class="input-group">
           <% if params[:by_project].to_s != "false" %>
+            <% selected_project_name = 'All projects'
+               @filters.andand.each do |attr, op, val|
+                 if attr == 'owner_uuid' and op == '='
+                   if val == current_user.uuid
+                     selected_project_name = "Home"
+                   else
+                     selected_project_name = Group.find(val).name rescue val
+                   end
+                 end
+               end
+               %>
             <div class="input-group-btn" data-filterable-target=".modal.arv-choose .selectable-container">
               <button type="button" class="btn btn-default dropdown-toggle" data-toggle="dropdown">
-                All projects <span class="caret"></span>
+                <%= selected_project_name %> <span class="caret"></span>
               </button>
               <ul class="dropdown-menu" role="menu">
                 <li>
index 776787b416e97e4490dcc7171f135ba3cd9b6acd..f7ae90912f51f9facc73b8566d75063eaf3814ee 100644 (file)
@@ -21,8 +21,7 @@
     <div id="<%= pane %>-scroll" style="margin-top:0.5em;">
       <div class="pane-content">
         <% if i == 0 %>
-          <%= render(partial: 'show_' + pane.downcase,
-                     locals: { comparable: comparable, objects: @objects }) %>
+          <%= render_pane pane, to_string: true %>
         <% else %>
           <div class="spinner spinner-32px spinner-h-center"></div>
         <% end %>
index 519e8853f3bd77a10b6e28bc8cb0cafcb3d6bb24..3e50b6eb8c3a97e19fb485909a2dbaf2fcd819a9 100644 (file)
@@ -1,13 +1,6 @@
 <%= content_for :content_top %>
-  <% if @object and @object.is_a?(Group) and @object.group_class == 'project' %>
-  <div class="pull-right">
-    <%= content_for :tab_line_buttons %>
-  </div>
-  <br clear="all" />
-<% else %>
-  <br clear="all" />
-  <div class="pull-right">
-    <%= content_for :tab_line_buttons %>
-  </div>
-<% end %>
+<div class="pull-right">
+  <%= content_for :tab_line_buttons %>
+</div>
+<br clear="all" />
 <%= content_for :tab_panes %>
diff --git a/apps/workbench/app/views/application/_tab_line_buttons.html.erb b/apps/workbench/app/views/application/_tab_line_buttons.html.erb
new file mode 100644 (file)
index 0000000..e69de29
index d6eca3a2155d2e7c50b58e030884fc5c8f323dcc..105e1c356d69f140cc7bcc6cbf0a4069a9be2e61 100644 (file)
@@ -27,4 +27,3 @@
 <% end %>
 
 <%= render partial: 'content', layout: 'content_layout', locals: {pane_list: controller.show_pane_list }%>
-
index 8576d4044069266e971023eb635f333482de14b8..ea55577e7077a28d464975dc49b60a8d50b54900 100644 (file)
@@ -1,8 +1,3 @@
-<% content_for :tab_line_buttons do %>
-<span style="padding-left: 1em">Collection storage status:</span>
-<%= render partial: 'toggle_persist', locals: { uuid: @object.uuid, current_state: (@is_persistent ? 'persistent' : 'cache') } %>
-<% end %>
-
 <% file_tree = @object.andand.files_tree %>
 <% if file_tree.nil? or file_tree.empty? %>
   <p>This collection is empty.</p>
index 2f81073305ad4f7598d61a133ddf8802a0f621e1..d624749a4a1e4a9645ee678e61b3594e0db3f7e5 100644 (file)
@@ -1,16 +1,3 @@
-<% content_for :tab_line_buttons do %>
- <%= form_tag collections_path, method: 'get', remote: true, class: 'form-search' do %>
- <div class="input-group">
-   <%= text_field_tag :search, params[:search], class: 'form-control', placeholder: 'Search collections' %>
-   <span class="input-group-btn">
-     <%= button_tag(class: 'btn btn-info') do %>
-     <span class="glyphicon glyphicon-search"></span>
-     <% end %>
-   </span>
- </div>
- <% end %>
-<% end %>
-
 <%= render partial: "paging", locals: {results: @collections, object: @object} %>
 
 <div style="padding-right: 1em">
diff --git a/apps/workbench/app/views/collections/index.html.erb b/apps/workbench/app/views/collections/index.html.erb
new file mode 100644 (file)
index 0000000..061b05b
--- /dev/null
@@ -0,0 +1,14 @@
+<% content_for :tab_line_buttons do %>
+ <%= form_tag collections_path, method: 'get', remote: true, class: 'form-search' do %>
+ <div class="input-group">
+   <%= text_field_tag :search, params[:search], class: 'form-control', placeholder: 'Search collections' %>
+   <span class="input-group-btn">
+     <%= button_tag(class: 'btn btn-info') do %>
+     <span class="glyphicon glyphicon-search"></span>
+     <% end %>
+   </span>
+ </div>
+ <% end %>
+<% end %>
+
+<%= render file: 'application/index.html.erb', locals: local_assigns %>
index 4585a43664b0dd2fa4acbbd2067b070e215cee6c..0d52b088c47a91f59bc1f46092d4a73e3fc51a34 100644 (file)
   </div>
 </div>
 
-<%= render file: 'application/show.html.erb' %>
+<% content_for :tab_line_buttons do %>
+  <span style="padding-left: 1em">Collection storage status:</span>
+  <%= render partial: 'toggle_persist', locals: { uuid: @object.uuid, current_state: (@is_persistent ? 'persistent' : 'cache') } %>
+<% end %>
+
+<%= render file: 'application/show.html.erb', locals: local_assigns %>
index 0f5cd7aeceaa209f3be7a64ceb981cf13167498d..56177ae6e57cef33c7c9b959a8287eb0f96a5008 100644 (file)
@@ -17,5 +17,7 @@
   <% end %>
 <% end %>
 <%= content_for :content_top %>
-<%= content_for :tab_line_buttons %>
+<div class="pull-right">
+  <%= content_for :tab_line_buttons %>
+</div>
 <%= content_for :tab_panes %>
index a8c45847474432ab597a29c2e8b38163e125779c..4c58daba70e85efeddb34704e89fe6badde9cdab 100644 (file)
               <%= current_user.email %>
             </a>
             <ul class="dropdown-menu" role="menu">
+              <li role="presentation" class="dropdown-header">
+                My account
+              </li>
               <% if current_user.is_active %>
               <li role="presentation"><a href="/manage_account" role="menuitem"><i class="fa fa-key fa-fw"></i> Manage account</a></li>
+              <% if Rails.configuration.user_profile_form_fields %>
+                <li role="presentation"><a href="/users/<%=current_user.uuid%>/profile" role="menuitem"><i class="fa fa-key fa-fw"></i> Manage profile</a></li>
+              <% end %>
               <li role="presentation" class="divider"></li>
               <% end %>
               <li role="presentation"><a href="<%= logout_path %>" role="menuitem"><i class="fa fa-sign-out fa-fw"></i> Log out</a></li>
index 0b78e07d65e875facfee37ee65fba34ad76847e1..08b24f13cb03faa1dcb17c1c1830f90aa5706c64 100644 (file)
@@ -1,10 +1,3 @@
-<%= content_for :tab_line_buttons do %>
-<%= form_tag({action: 'compare', controller: params[:controller], method: 'get'}, {method: 'get', id: 'compare', class: 'pull-right small-form-margin'}) do |f| %>
-  <%= submit_tag 'Compare 2 or 3 selected', {class: 'btn btn-primary', disabled: true, style: 'display: none'} %>
-  &nbsp;
-<% end rescue nil %>
-<% end %>
-
 <%= render partial: "paging", locals: {results: @objects, object: @object} %>
 
 <%= form_tag do |f| %>
diff --git a/apps/workbench/app/views/pipeline_instances/index.html.erb b/apps/workbench/app/views/pipeline_instances/index.html.erb
new file mode 100644 (file)
index 0000000..4b73bd4
--- /dev/null
@@ -0,0 +1,8 @@
+<% content_for :tab_line_buttons do %>
+<%= form_tag({action: 'compare', controller: params[:controller], method: 'get'}, {method: 'get', id: 'compare', class: 'pull-right small-form-margin'}) do |f| %>
+  <%= submit_tag 'Compare 2 or 3 selected', {class: 'btn btn-primary', disabled: true, style: 'display: none'} %>
+  &nbsp;
+<% end rescue nil %>
+<% end %>
+
+<%= render file: 'application/index.html.erb', locals: local_assigns %>
index 1f2c1baaf4930dda3da6a179652d9fe43fdf7fe8..cd03a5c790ef6bbde11590dc18d4acc4f621ef41 100644 (file)
@@ -1,18 +1 @@
-<% content_for :tab_line_buttons do %>
-  <%= button_to(choose_projects_path(id: "run-pipeline-button",
-                                     title: 'Choose project',
-                                     editable: true,
-                                     action_name: 'Choose',
-                                     action_href: pipeline_instances_path,
-                                     action_method: 'post',
-                                     action_data: {selection_param: 'pipeline_instance[owner_uuid]',
-                                                   'pipeline_instance[pipeline_template_uuid]' => @object.uuid,
-                                                   'success' => 'redirect-to-created-object'
-                                                  }.to_json),
-                { class: "btn btn-primary btn-sm", remote: true, method: 'get' }
-               ) do %>
-                   Run this pipeline
-                 <% end %>
-<% end %>
-
 <%= render_pipeline_components("editable", :json, editable: false) %>
diff --git a/apps/workbench/app/views/pipeline_templates/show.html.erb b/apps/workbench/app/views/pipeline_templates/show.html.erb
new file mode 100644 (file)
index 0000000..725d63d
--- /dev/null
@@ -0,0 +1,18 @@
+<% content_for :tab_line_buttons do %>
+  <%= button_to(choose_projects_path(id: "run-pipeline-button",
+                                     title: 'Choose project',
+                                     editable: true,
+                                     action_name: 'Choose',
+                                     action_href: pipeline_instances_path,
+                                     action_method: 'post',
+                                     action_data: {selection_param: 'pipeline_instance[owner_uuid]',
+                                                   'pipeline_instance[pipeline_template_uuid]' => @object.uuid,
+                                                   'success' => 'redirect-to-created-object'
+                                                  }.to_json),
+                { class: "btn btn-primary btn-sm", remote: true, method: 'get' }
+               ) do %>
+                   Run this pipeline
+                 <% end %>
+<% end %>
+
+<%= render file: 'application/show.html.erb', locals: local_assigns %>
index b690a1bf8f621b53556bf5480f3c536eba6e55be..e1996a7f406f2700dbfb291b07cd1d7962d6164e 100644 (file)
@@ -1,24 +1,34 @@
-<% objects_and_names.each do |object, name_link| %>
+<% get_objects_and_names.each do |object, name_link| %>
   <% name_object = (object.respond_to?(:name) || !name_link) ? object : name_link %>
   <tr class="filterable"
       data-object-uuid="<%= name_object.uuid %>"
       data-kind="<%= object.kind %>"
+      data-object-created-at="<%= object.created_at %>"
       >
     <td>
-      <%= render partial: 'selection_checkbox', locals: {object: name_object, friendly_name: ((name_object.name rescue '') || '')} %>
+      <div style="width:1em; display:inline-block;">
+        <%= render partial: 'selection_checkbox', locals: {object: name_object, friendly_name: ((name_object.name rescue '') || '')} %>
+      </div>
+    </td>
 
-      <% if project.editable? %>
-        <%= link_to({action: 'remove_item', id: project.uuid, item_uuid: ((name_link && name_link.uuid) || object.uuid)}, method: :delete, remote: true, data: {confirm: "Remove #{object.class_for_display.downcase} #{name_object.name rescue object.uuid} from this project?", toggle: 'tooltip', placement: 'top'}, class: 'btn btn-sm btn-default btn-nodecorate', title: 'remove') do %>
+    <td>
+      <% if @object.editable? %>
+        <%= link_to({action: 'remove_item', id: @object.uuid, item_uuid: ((name_link && name_link.uuid) || object.uuid)}, method: :delete, remote: true, data: {confirm: "Remove #{object.class_for_display.downcase} #{name_object.name rescue object.uuid} from this project?", toggle: 'tooltip', placement: 'top'}, class: 'btn btn-sm btn-default btn-nodecorate', title: 'remove') do %>
           <i class="fa fa-fw fa-trash-o"></i>
         <% end %>
       <% else %>
         <i class="fa fa-fw"></i><%# placeholder %>
       <% end %>
+    </td>
 
+    <td>
       <%= render :partial => "show_object_button", :locals => {object: object, size: 'sm', name_link: name_link} %>
+    </td>
 
+    <td>
       <%= render_editable_attribute (name_link || object), 'name', nil, {tiptitle: 'rename'} %>
     </td>
+
     <td class="arv-description-in-table">
       <%= render_controller_partial(
           'show_object_description_cell.html',
index 337629a4c812a1084d29dad136a35b84fb5ef194..e56321dde8a55b4137e082882b8afe77244d7e80 100644 (file)
@@ -1,69 +1,3 @@
-<% if @object.uuid != current_user.uuid # Not the "Home" project %>
-<% content_for :content_top do %>
-
-<h2>
-  <%= render_editable_attribute @object, 'name', nil, { 'data-emptytext' => "New project" } %>
-</h2>
-
-<div class="arv-description-as-subtitle">
-  <%= render_editable_attribute @object, 'description', nil, { 'data-emptytext' => "(No description provided)", 'data-toggle' => 'manual' } %>
-</div>
-
-<% end %>
-<% end %>
-
-<% content_for :tab_line_buttons do %>
-  <% if @object.editable? %>
-    <%= link_to(
-         choose_collections_path(
-           title: 'Add data to project:',
-           multiple: true,
-           action_name: 'Add',
-           action_href: actions_path(id: @object.uuid),
-           action_method: 'post',
-           action_data: {selection_param: 'selection[]', copy_selections_into_project: @object.uuid, success: 'page-refresh'}.to_json),
-         { class: "btn btn-primary btn-sm", remote: true, method: 'get', data: {'event-after-select' => 'page-refresh'} }) do %>
-      <i class="fa fa-fw fa-plus"></i> Add data...
-    <% end %>
-    <%= link_to(
-         choose_pipeline_templates_path(
-           title: 'Choose a pipeline to run:',
-           action_name: 'Next: choose inputs <i class="fa fa-fw fa-arrow-circle-right"></i>',
-           action_href: pipeline_instances_path,
-           action_method: 'post',
-           action_data: {'selection_param' => 'pipeline_instance[pipeline_template_uuid]', 'pipeline_instance[owner_uuid]' => @object.uuid, 'success' => 'redirect-to-created-object'}.to_json),
-         { class: "btn btn-primary btn-sm", remote: true, method: 'get' }) do %>
-      <i class="fa fa-fw fa-gear"></i> Run a pipeline...
-    <% end %>
-    <%= link_to projects_path('project[owner_uuid]' => @object.uuid), method: 'post', class: 'btn btn-sm btn-primary' do %>
-      <i class="fa fa-fw fa-plus"></i>
-      Add a subproject
-    <% end %>
-    <% if @object.uuid != current_user.uuid # Not the "Home" project %>
-      <%= link_to(
-          choose_projects_path(
-           title: 'Move this project to...',
-           editable: true,
-           my_root_selectable: true,
-           action_name: 'Move',
-           action_href: project_path(@object.uuid),
-           action_method: 'put',
-           action_data: {selection_param: 'project[owner_uuid]', success: 'page-refresh'}.to_json),
-          { class: "btn btn-sm btn-primary arv-move-to-project", remote: true, method: 'get' }) do %>
-        <i class="fa fa-fw fa-truck"></i> Move project...
-      <% end %>
-      <%= link_to(project_path(id: @object.uuid), method: 'delete', class: 'btn btn-sm btn-primary', data: {confirm: "Really delete project '#{@object.name}'?"}) do %>
-        <i class="fa fa-fw fa-trash-o"></i> Delete project
-      <% end %>
-    <% end %>
-  <% end %>
-<% end %>
-
-<% 
-  filters = [['uuid', 'is_a', "arvados#collection"]]
-  @objects = @object.contents({limit: 50, include_linked: true, :filters => filters})
-  objects_and_names = get_objects_and_names @objects
-  page_offset = next_page_offset @objects
-%>
-
-<%= render partial: 'show_tab_contents', locals: {project: @object, objects_and_names: objects_and_names, filters: filters, page_offset: page_offset, tab_name: 'Data_collections'} %>
+<%= render_pane 'tab_contents', to_string: true, locals: {
+    filters: [['uuid', 'is_a', "arvados#collection"]]
+    }.merge(local_assigns) %>
index f9e6306a9d44f01130b473442698563f35f34b56..df31fec8ee0bf19df7058fa358d0330636d06798 100644 (file)
@@ -1,8 +1,3 @@
-<%
-  filters = [['uuid', 'is_a', ["arvados#pipelineInstance","arvados#job"]]]
-  @objects = @object.contents({limit: 50, include_linked: true, :filters => filters})
-  objects_and_names = get_objects_and_names @objects
-  page_offset = next_page_offset @objects
-%>
-
-<%= render partial: 'show_tab_contents', locals: {project: @object, objects_and_names: objects_and_names, filters: filters, page_offset: page_offset, tab_name: 'Jobs_and_pipelines'} %>
+<%= render_pane 'tab_contents', to_string: true, locals: {
+    filters: [['uuid', 'is_a', ["arvados#job", "arvados#pipelineInstance"]]]
+    }.merge(local_assigns) %>
index 308034ed5429559fdab0bc73b0745ca4e5c23e45..af6fbd1a92ab9b5049e64d635741f4295e7a46a5 100644 (file)
@@ -1,8 +1,3 @@
-<%
-  filters = [['uuid', 'is_a', ["arvados#human","arvados#specimen","arvados#trait"]]]
-  @objects = @object.contents({limit: 50, include_linked: true, :filters => filters})
-  objects_and_names = get_objects_and_names @objects
-  page_offset = next_page_offset @objects
-%>
-
-<%= render partial: 'show_tab_contents', locals: {project: @object, objects_and_names: objects_and_names, filters: filters, page_offset: page_offset, tab_name: 'Other_objects'} %>
+<%= render_pane 'tab_contents', to_string: true, locals: {
+    filters: [['uuid', 'is_a', ["arvados#human", "arvados#specimen", "arvados#trait"]]]
+    }.merge(local_assigns) %>
index c54e28dfebdb9170da31e6b5913ad1c86f49d7ad..b875b086ec3e1528a1a29fb35df40ba1b239dd7c 100644 (file)
@@ -1,8 +1,3 @@
-<%
-  filters = [['uuid', 'is_a', "arvados#pipelineTemplate"]]
-  @objects = @object.contents({limit: 50, include_linked: true, :filters => filters})
-  objects_and_names = get_objects_and_names @objects
-  page_offset = next_page_offset @objects
-%>
-
-<%= render partial: 'show_tab_contents', locals: {project: @object, objects_and_names: objects_and_names, filters: filters, page_offset: page_offset, tab_name: 'Pipeline_templates'} %>
+<%= render_pane 'tab_contents', to_string: true, locals: {
+    filters: [['uuid', 'is_a', ["arvados#pipelineTemplate"]]]
+    }.merge(local_assigns) %>
index ad23065876cfea66acc6caa31eb05b46770ab92b..ff0062c24bb15b44c72e13628d44f659f2e3dff2 100644 (file)
@@ -1,7 +1,13 @@
 <%
    uuid_map = {}
-   [@users, @groups].each do |obj_list|
-     obj_list.each { |o| uuid_map[o.uuid] = o }
+   if @share_links
+     [User, Group].each do |type|
+       type.limit(10000)
+         .filter([['uuid','in',@share_links.collect(&:tail_uuid)]])
+         .each do |o|
+         uuid_map[o.uuid] = o
+       end
+     end
    end
    perm_name_desc_map = {}
    perm_desc_name_map = {}
index 4497ca4ea739a9dfc180f3cc2f37697b4313a20f..2c0ba6017831b5cab5c2361a98aa5a49ed9f1e1a 100644 (file)
@@ -1,8 +1,3 @@
-<%
-  filters = [['uuid', 'is_a', "arvados#group"]]
-  @objects = @object.contents({limit: 50, include_linked: true, :filters => filters})
-  objects_and_names = get_objects_and_names @objects
-  page_offset = next_page_offset @objects
-%>
-
-<%= render partial: 'show_tab_contents', locals: {project: @object, objects_and_names: objects_and_names, filters: filters, page_offset: page_offset, tab_name: 'Subprojects'} %>
+<%= render_pane 'tab_contents', to_string: true, locals: {
+    filters: [['uuid', 'is_a', ["arvados#group"]]]
+    }.merge(local_assigns) %>
index df37d3e1c7051ad6b1da1fcb0af7fb77d6aecc91..e3884b629287584581fffcf57cd85e15cecc486d 100644 (file)
@@ -2,7 +2,7 @@
   <div class="row">
     <div class="col-sm-5">
       <div class="btn-group btn-group-sm">
-        <button type="button" class="btn btn-default dropdown-toggle" data-toggle="dropdown">Selection... <i class="fa fa-fw fa-long-arrow-down "></i></button>
+        <button type="button" class="btn btn-default dropdown-toggle" data-toggle="dropdown">Selection... <span class="caret"></span></button>
         <ul class="dropdown-menu" role="menu">
           <li><%= link_to "Compare selected", '#',
                   'data-href' => compare_pipeline_instances_path,
       </div>
     </div>
     <div class="col-sm-4 pull-right">
-      <input type="text" class="form-control filterable-control" placeholder="Search project contents" data-filterable-target="table.arv-index.arv-project-<%= tab_name %> tbody"/>
+      <input type="text" class="form-control filterable-control" placeholder="Search project contents" data-filterable-target="table.arv-index.arv-project-<%= tab_pane %> tbody"/>
     </div>
   </div>
 
-  <table class="table table-condensed table-fixedlayout arv-index arv-project-<%= tab_name %>" style="overflow-x: hidden">
+  <table class="table table-condensed arv-index arv-project-<%= tab_pane %>">
     <colgroup>
-      <col width="40%" />
-      <col width="60%" />
+      <col width="0*" style="max-width: fit-content;" />
+      <col width="0*" style="max-width: fit-content;" />
+      <col width="0*" style="max-width: fit-content;" />
+      <col width="60%" style="width: 60%;" />
+      <col width="40%" style="width: 40%;" />
     </colgroup>
-    <tbody data-infinite-scroller="#<%= tab_name %>-scroll" data-infinite-content-href="<%= url_for(format: :json, partial: :contents_rows, offset: page_offset, filters: "#{filters}") if page_offset %>">
-      <%= render partial: 'show_contents_rows', locals: {project: @object, objects_and_names: objects_and_names} %>
+    <tbody data-infinite-scroller="#<%= tab_pane %>-scroll" data-infinite-content-href="<%= url_for partial: :contents_rows, filters: filters.to_json %>">
     </tbody>
     <thead>
       <tr>
-        <th>
-        </th>
-        <th>
-          description
-        </th>
+        <th></th>
+        <th></th>
+        <th></th>
+        <th>name</th>
+        <th>description</th>
       </tr>
     </thead>
   </table>
-
 </div>
diff --git a/apps/workbench/app/views/projects/show.html.erb b/apps/workbench/app/views/projects/show.html.erb
new file mode 100644 (file)
index 0000000..c221ca1
--- /dev/null
@@ -0,0 +1,62 @@
+<% if @object.uuid != current_user.uuid # Not the "Home" project %>
+<% content_for :content_top do %>
+
+<h2>
+  <%= render_editable_attribute @object, 'name', nil, { 'data-emptytext' => "New project" } %>
+</h2>
+
+<div class="arv-description-as-subtitle">
+  <%= render_editable_attribute @object, 'description', nil, { 'data-emptytext' => "(No description provided)", 'data-toggle' => 'manual' } %>
+</div>
+
+<% end %>
+<% end %>
+
+<% content_for :tab_line_buttons do %>
+  <% if @object.editable? %>
+    <%= link_to(
+         choose_collections_path(
+           title: 'Add data to project:',
+           multiple: true,
+           action_name: 'Add',
+           action_href: actions_path(id: @object.uuid),
+           action_method: 'post',
+           action_data: {selection_param: 'selection[]', copy_selections_into_project: @object.uuid, success: 'page-refresh'}.to_json),
+         { class: "btn btn-primary btn-sm", remote: true, method: 'get', data: {'event-after-select' => 'page-refresh'} }) do %>
+      <i class="fa fa-fw fa-plus"></i> Add data...
+    <% end %>
+    <%= link_to(
+         choose_pipeline_templates_path(
+           title: 'Choose a pipeline to run:',
+           action_name: 'Next: choose inputs <i class="fa fa-fw fa-arrow-circle-right"></i>',
+           action_href: pipeline_instances_path,
+           action_method: 'post',
+           action_data: {'selection_param' => 'pipeline_instance[pipeline_template_uuid]', 'pipeline_instance[owner_uuid]' => @object.uuid, 'success' => 'redirect-to-created-object'}.to_json),
+         { class: "btn btn-primary btn-sm", remote: true, method: 'get' }) do %>
+      <i class="fa fa-fw fa-gear"></i> Run a pipeline...
+    <% end %>
+    <%= link_to projects_path('project[owner_uuid]' => @object.uuid), method: 'post', class: 'btn btn-sm btn-primary' do %>
+      <i class="fa fa-fw fa-plus"></i>
+      Add a subproject
+    <% end %>
+    <% if @object.uuid != current_user.uuid # Not the "Home" project %>
+      <%= link_to(
+          choose_projects_path(
+           title: 'Move this project to...',
+           editable: true,
+           my_root_selectable: true,
+           action_name: 'Move',
+           action_href: project_path(@object.uuid),
+           action_method: 'put',
+           action_data: {selection_param: 'project[owner_uuid]', success: 'page-refresh'}.to_json),
+          { class: "btn btn-sm btn-primary arv-move-to-project", remote: true, method: 'get' }) do %>
+        <i class="fa fa-fw fa-truck"></i> Move project...
+      <% end %>
+      <%= link_to(project_path(id: @object.uuid), method: 'delete', class: 'btn btn-sm btn-primary', data: {confirm: "Really delete project '#{@object.name}'?"}) do %>
+        <i class="fa fa-fw fa-trash-o"></i> Delete project
+      <% end %>
+    <% end %>
+  <% end %>
+<% end %>
+
+<%= render file: 'application/show.html.erb', locals: local_assigns %>
index 49516eb67d48b646b46b743d5df178fa409e0f0c..d37360188dfee5a9f6fc366214ef35db7e6dddaf 100644 (file)
@@ -1,27 +1,27 @@
 <% content_for :breadcrumbs do raw '<!-- -->' end %>
 
-<% n_files = @required_user_agreements.collect(&:files).flatten(1).count %>
+<% n_files = unsigned_user_agreements.collect(&:files).flatten(1).count %>
 <% content_for :page_title do %>
 <% if n_files == 1 %>
-<%= @required_user_agreements.first.files.first[1].sub(/\.[a-z]{3,4}$/,'') %>
+<%= unsigned_user_agreements.first.files.first[1].sub(/\.[a-z]{3,4}$/,'') %>
 <% else %>
 User agreements
 <% end %>
 <% end %>
 
-<%= form_for(@required_user_agreements.first, {url: {action: 'sign', controller: 'user_agreements'}, method: 'post'}) do |f| %>
+<%= form_for(unsigned_user_agreements.first, {url: {action: 'sign', controller: 'user_agreements'}, method: 'post'}) do |f| %>
 <%= hidden_field_tag :return_to, request.url %>
 <div id="open_user_agreement">
   <div class="alert alert-info">
     <strong>Please check <%= n_files > 1 ? 'each' : 'the' %> box below</strong> to indicate that you have read and accepted the user agreement<%= 's' if n_files > 1 %>.
   </div>
   <% if n_files == 1 and (Rails.configuration.show_user_agreement_inline rescue false) %>
-  <% ua = @required_user_agreements.first; file = ua.files.first %>
+  <% ua = unsigned_user_agreements.first; file = ua.files.first %>
   <object data="<%= url_for(controller: 'collections', action: 'show_file', uuid: ua.uuid, file: "#{file[0]}/#{file[1]}") %>" type="<%= Rack::Mime::MIME_TYPES[file[1].match(/\.\w+$/)[0]] rescue '' %>" width="100%" height="400px">
   </object>
   <% end %>
   <div>
-    <% @required_user_agreements.each do |ua| %>
+    <% unsigned_user_agreements.each do |ua| %>
     <% ua.files.each do |file| %>
     <%= f.label 'checked[]', class: 'checkbox inline' do %>
     <%= check_box_tag 'checked[]', "#{ua.uuid}/#{file[0]}/#{file[1]}", false %>
@@ -37,15 +37,3 @@ User agreements
   </div>
 </div>
 <% end %>
-
-<% content_for :footer_js do %>
-$('#open_user_agreement input[name="checked[]"]').on('click', function() {
-    var dialog = $('#open_user_agreement')[0]
-    $('input[type=submit]', dialog).prop('disabled',false);
-    $('input[name="checked[]"]', dialog).each(function(){
-        if(!this.checked) {
-            $('input[type=submit]', dialog).prop('disabled',true);
-        }
-    });
-});
-<% end %>
index 5f825e47e6f9f2649ea555c804dad475b8e6cf22..832b5809673b175346032fe1df87351fea411fcf 100644 (file)
         An administrator must activate your account before you can get
         any further.
 
+      </p><p>
+
+        <%= link_to 'Retry', (params[:return_to] || '/'), class: 'btn btn-primary' %>
+
       </p>
     </div>
   </div>
diff --git a/apps/workbench/app/views/users/profile.html.erb b/apps/workbench/app/views/users/profile.html.erb
new file mode 100644 (file)
index 0000000..aab8843
--- /dev/null
@@ -0,0 +1,95 @@
+<%
+    profile_config = Rails.configuration.user_profile_form_fields
+    current_user_profile = current_user.prefs[:profile]
+    show_save_button = false
+
+    profile_message = Rails.configuration.user_profile_form_message
+%>
+
+<div>
+    <div class="panel panel-default">
+        <div class="panel-heading">
+          <h4 class="panel-title">
+            Profile
+          </h4>
+        </div>
+        <div class="panel-body">
+          <% if !missing_required_profile? && params[:offer_return_to] %>
+            <div class="alert alert-success">
+              <p>Thank you for filling in your profile. <%= link_to 'Back to work!', params[:offer_return_to], class: 'btn btn-sm btn-primary' %></p>
+            </div>
+          <% else %>
+            <div class="alert alert-info">
+              <p><%=raw(profile_message)%></p>
+            </div>
+          <% end %>
+
+            <%= form_for current_user, html: {id: 'save_profile_form', name: 'save_profile_form', class: 'form-horizontal'} do %>
+              <%= hidden_field_tag :offer_return_to, params[:offer_return_to] %>
+              <%= hidden_field_tag :return_to, profile_user_path(current_user.uuid, offer_return_to: params[:offer_return_to]) %>
+              <div class="form-group">
+                  <label for="email" class="col-sm-3 control-label"> Email </label>
+                  <div class="col-sm-8">
+                    <p class="form-control-static" id="email" name="email"><%=current_user.email%></p>
+                  </div>
+              </div>
+              <div class="form-group">
+                  <label for="first_name" class="col-sm-3 control-label"> First name </label>
+                  <div class="col-sm-8">
+                    <p class="form-control-static" id="first_name" name="first_name"><%=current_user.first_name%></p>
+                  </div>
+              </div>
+              <div class="form-group">
+                  <label for="last_name" class="col-sm-3 control-label"> Last name </label>
+                  <div class="col-sm-8">
+                    <p class="form-control-static" id="last_name" name="last_name"><%=current_user.last_name%></p>
+                  </div>
+              </div>
+              <div class="form-group">
+                  <label for="identity_url" class="col-sm-3 control-label"> Identity URL </label>
+                  <div class="col-sm-8">
+                    <p class="form-control-static" id="identity_url" name="identity_url"><%=current_user.andand.identity_url%></p>
+                  </div>
+              </div>
+
+              <% profile_config.kind_of?(Array) && profile_config.andand.each do |entry| %>
+                <% if entry['key'] %>
+                  <%
+                      show_save_button = true
+                      label = entry['required'] ? '* ' : ''
+                      label += entry['form_field_title']
+                      value = current_user_profile[entry['key'].to_sym] if current_user_profile
+                  %>
+                  <div class="form-group">
+                    <label for="<%=entry['key']%>"
+                           class="col-sm-3 control-label"
+                           style=<%="color:red" if entry['required']&&(!value||value.empty?)%>> <%=label%>
+                    </label>
+                    <% if entry['type'] == 'select' %>
+                      <div class="col-sm-8">
+                        <select class="form-control" name="user[prefs][:profile][:<%=entry['key']%>]">
+                          <% entry['options'].each do |option| %>
+                            <option value="<%=option%>" <%='selected' if option==value%>><%=option%></option>
+                          <% end %>
+                        </select>
+                      </div>
+                    <% else %>
+                      <div class="col-sm-8">
+                        <input type="text" class="form-control" name="user[prefs][:profile][:<%=entry['key']%>]" placeholder="<%=entry['form_field_description']%>" value="<%=value%>" ></input>
+                      </div>
+                    <% end %>
+                  </div>
+                <% end %>
+              <% end %>
+
+              <% if show_save_button %>
+                <div class="form-group">
+                  <div class="col-sm-offset-3 col-sm-8">
+                    <button type="submit" class="btn btn-primary">Save profile</button>
+                  </div>
+                </div>
+              <% end %>
+            <% end %>
+        </div>
+    </div>
+</div>
index 2fe701afcb6d5efdd9b92abba0ef94f571f23058..9443520b1443ade6b8bbad19bed8363016f33097 100644 (file)
@@ -58,6 +58,25 @@ test:
 
   site_name: Workbench:test
 
+  # Enable user profile with one required field
+  user_profile_form_fields:
+    - key: organization
+      type: text
+      form_field_title: Institution
+      form_field_description: Your organization
+      required: true
+    - key: role
+      type: select
+      form_field_title: Your role
+      form_field_description: Choose the category that best describes your role in your organization.
+      options:
+        - Bio-informatician
+        - Computational biologist
+        - Biologist or geneticist
+        - Software developer
+        - IT
+        - Other
+
 common:
   assets.js_compressor: false
   assets.css_compressor: false
@@ -74,3 +93,41 @@ common:
   secret_key_base: false
   default_openid_prefix: https://www.google.com/accounts/o8/id
   send_user_setup_notification_email: true
+
+  # Set user_profile_form_fields to enable and configure the user profile page.
+  # Default is set to false. A commented setting with full description is provided below.
+  user_profile_form_fields: false
+
+  # Below is a sample setting of user_profile_form_fields config parameter.
+  # This configuration parameter should be set to either false (to disable) or
+  # to an array as shown below. 
+  # Configure the list of input fields to be displayed in the profile page
+  # using the attribute "key" for each of the input fields.
+  # This sample shows configuration with one required and one optional form fields.
+  # For each of these input fields:
+  #   You can specify "type" as "text" or "select".
+  #   List the "options" to be displayed for each of the "select" menu.
+  #   Set "required" as "true" for any of these fields to make them required.
+  # If any of the required fields are missing in the user's profile, the user will be
+  # redirected to the profile page before they can access any Workbench features.
+  #user_profile_form_fields:
+  #  - key: organization
+  #    type: text
+  #    form_field_title: Institution/Company
+  #    form_field_description: Your organization
+  #    required: true
+  #  - key: role
+  #    type: select
+  #    form_field_title: Your role
+  #    form_field_description: Choose the category that best describes your role in your organization.
+  #    options:
+  #      - Bio-informatician
+  #      - Computational biologist
+  #      - Biologist or geneticist
+  #      - Software developer
+  #      - IT
+  #      - Other
+
+  # Use "user_profile_form_message" to configure the message you want to display on
+  # the profile page.
+  user_profile_form_message: Welcome to Arvados. All <span style="color:red">required fields</span> must be completed before you can proceed.
index 091a06987174697ea5f81b0d36b293173ab44801..dfd682119ce699a0bb9f9ab5fc8d4a34dc12f01c 100644 (file)
@@ -27,11 +27,13 @@ ArvadosWorkbench::Application.routes.draw do
     get 'choose', :on => :collection
     get 'home', :on => :member
     get 'welcome', :on => :collection
+    get 'inactive', :on => :collection
     get 'activity', :on => :collection
     get 'storage', :on => :collection
     post 'sudo', :on => :member
     post 'unsetup', :on => :member
     get 'setup_popup', :on => :member
+    get 'profile', :on => :member
   end
   get '/manage_account' => 'users#manage_account'
   get "/add_ssh_key_popup" => 'users#add_ssh_key_popup', :as => :add_ssh_key_popup
index 50f990aa0bca1119c11aba9b60a5365fdd6f8811..c2828020bdca966400301c68b486b505a20c20ff 100644 (file)
@@ -303,4 +303,22 @@ class ApplicationControllerTest < ActionController::TestCase
     get(:show, {id: "zzzzz-zzzzz-zzzzzzzzzzzzzzz"}, session_for(:admin))
     assert_response 404
   end
+
+  test "Workbench returns 4xx when API server is unreachable" do
+    # We're really testing ApplicationController's render_exception.
+    # Our primary concern is that it doesn't raise an error and
+    # return 500.
+    orig_api_server = Rails.configuration.arvados_v1_base
+    begin
+      # The URL should look valid in all respects, and avoid talking over a
+      # network.  100::/64 is the IPv6 discard prefix, so it's perfect.
+      Rails.configuration.arvados_v1_base = "https://[100::f]:1/"
+      @controller = NodesController.new
+      get(:index, {}, session_for(:active))
+      assert_includes(405..422, @response.code.to_i,
+                      "bad response code when API server is unreachable")
+    ensure
+      Rails.configuration.arvados_v1_base = orig_api_server
+    end
+  end
 end
index 4745d1b9ac7a8f7860e3c10b5b86299f404c7d07..f86c50a26788a92a352700808afe752b635ab25d 100644 (file)
@@ -170,7 +170,7 @@ class CollectionsControllerTest < ActionController::TestCase
       uuid: ua_collection['uuid'],
       file: ua_collection['manifest_text'].match(/ \d+:\d+:(\S+)/)[1]
     }, session_for(:inactive)
-    assert_nil(assigns(:required_user_agreements),
+    assert_nil(assigns(:unsigned_user_agreements),
                "Did not skip check_user_agreements filter " +
                "when showing the user agreement.")
     assert_response :success
index 39c32ac4ac3d959bc31dc3b91bdffd87af1b440f..d76430cfdf523f246e4a60d03abf3f85e2b9a85c 100644 (file)
@@ -1,13 +1,20 @@
 require 'test_helper'
 
 class ProjectsControllerTest < ActionController::TestCase
-  test "inactive user is asked to sign user agreements on front page" do
+  test "invited user is asked to sign user agreements on front page" do
     get :index, {}, session_for(:inactive)
-    assert_response :success
-    assert_not_empty assigns(:required_user_agreements),
-    "Inactive user did not have required_user_agreements"
-    assert_template 'user_agreements/index',
-    "Inactive user was not presented with a user agreement at the front page"
+    assert_response :redirect
+    assert_match(/^#{Regexp.escape(user_agreements_url)}\b/,
+                 @response.redirect_url,
+                 "Inactive user was not redirected to user_agreements page")
+  end
+
+  test "uninvited user is asked to wait for activation" do
+    get :index, {}, session_for(:inactive_uninvited)
+    assert_response :redirect
+    assert_match(/^#{Regexp.escape(inactive_users_url)}\b/,
+                 @response.redirect_url,
+                 "Uninvited user was not redirected to inactive user page")
   end
 
   [[:active, true],
@@ -38,7 +45,6 @@ class ProjectsControllerTest < ActionController::TestCase
            format: "json"},
          session_for(:active))
     assert_response :success
-    json_response = Oj.load(@response.body)
     assert_equal(uuid_list, json_response["success"])
   end
 
@@ -50,7 +56,6 @@ class ProjectsControllerTest < ActionController::TestCase
            format: "json"},
          session_for(:project_viewer))
     assert_response 422
-    json_response = Oj.load(@response.body)
     assert(json_response["errors"].andand.
              any? { |msg| msg.start_with?("#{share_uuid}: ") },
            "JSON response missing properly formatted sharing error")
@@ -87,4 +92,74 @@ class ProjectsControllerTest < ActionController::TestCase
   test "viewer can't manage asubproject" do
     refute user_can_manage(:project_viewer, "asubproject")
   end
+
+  test 'projects#show tab infinite scroll partial obeys limit' do
+    get_contents_rows(limit: 1, filters: [['uuid','is_a',['arvados#job']]])
+    assert_response :success
+    assert_equal(1, json_response['content'].scan('<tr').count,
+                 "Did not get exactly one row")
+  end
+
+  ['', ' asc', ' desc'].each do |direction|
+    test "projects#show tab partial orders correctly by #{direction}" do
+      _test_tab_content_order direction
+    end
+  end
+
+  def _test_tab_content_order direction
+    get_contents_rows(limit: 100,
+                      order: "created_at#{direction}",
+                      filters: [['uuid','is_a',['arvados#job',
+                                                'arvados#pipelineInstance']]])
+    assert_response :success
+    not_grouped_by_kind = nil
+    last_timestamp = nil
+    last_kind = nil
+    found_kind = {}
+    json_response['content'].scan /<tr[^>]+>/ do |tr_tag|
+      found_timestamps = 0
+      tr_tag.scan(/\ data-object-created-at=\"(.*?)\"/).each do |t,|
+        if last_timestamp
+          correct_operator = / desc$/ =~ direction ? :>= : :<=
+          assert_operator(last_timestamp, correct_operator, t,
+                          "Rows are not sorted by created_at#{direction}")
+        end
+        last_timestamp = t
+        found_timestamps += 1
+      end
+      assert_equal(1, found_timestamps,
+                   "Content row did not have exactly one timestamp")
+
+      # Confirm that the test for timestamp ordering couldn't have
+      # passed merely because the test fixtures have convenient
+      # timestamps (e.g., there is only one pipeline and one job in
+      # the project being tested, or there are no pipelines at all in
+      # the project being tested):
+      tr_tag.scan /\ data-kind=\"(.*?)\"/ do |kind|
+        if last_kind and last_kind != kind and found_kind[kind]
+          # We saw this kind before, then a different kind, then
+          # this kind again. That means objects are not grouped by
+          # kind.
+          not_grouped_by_kind = true
+        end
+        found_kind[kind] ||= 0
+        found_kind[kind] += 1
+        last_kind = kind
+      end
+    end
+    assert_equal(true, not_grouped_by_kind,
+                 "Could not confirm that results are not grouped by kind")
+  end
+
+  def get_contents_rows params
+    params = {
+      id: api_fixture('users')['active']['uuid'],
+      partial: :contents_rows,
+      format: :json,
+    }.merge(params)
+    encoded_params = Hash[params.map { |k,v|
+                            [k, (v.is_a?(Array) || v.is_a?(Hash)) ? v.to_json : v]
+                          }]
+    get :show, encoded_params, session_for(:active)
+  end
 end
index 86778df03607d3bdec23e81dabc0c804c6874457..e8ee10fcbf3362f28e610690ba37d19698af7483 100644 (file)
@@ -23,4 +23,10 @@ class UsersControllerTest < ActionController::TestCase
     assert_nil assigns(:my_jobs)
     assert_nil assigns(:my_ssh_keys)
   end
+
+  test "show welcome page if no token provided" do
+    get :index, {}
+    assert_response :redirect
+    assert_match /\/users\/welcome/, @response.redirect_url
+  end
 end
diff --git a/apps/workbench/test/integration/application_layout_test.rb b/apps/workbench/test/integration/application_layout_test.rb
new file mode 100644 (file)
index 0000000..78842a4
--- /dev/null
@@ -0,0 +1,317 @@
+require 'integration_helper'
+require 'selenium-webdriver'
+require 'headless'
+
+class ApplicationLayoutTest < ActionDispatch::IntegrationTest
+  setup do
+    headless = Headless.new
+    headless.start
+    Capybara.current_driver = :selenium
+
+    @user_profile_form_fields = Rails.configuration.user_profile_form_fields
+  end
+
+  teardown do
+    Rails.configuration.user_profile_form_fields = @user_profile_form_fields
+  end
+
+  def verify_homepage_with_profile user, invited, has_profile
+    profile_config = Rails.configuration.user_profile_form_fields
+
+    if !user
+      assert page.has_text? 'Please log in'
+      assert page.has_text? 'The "Log in" button below will show you a Google sign-in page'
+      assert page.has_no_text? 'My projects'
+      assert page.has_link? "Log in to #{Rails.configuration.site_name}"
+    elsif profile_config && !has_profile && user['is_active']
+      add_profile user
+    elsif user['is_active']
+      assert page.has_text? 'My projects'
+      assert page.has_text? 'Projects shared with me'
+      assert page.has_no_text? 'Save profile'
+    elsif invited
+      assert page.has_text? 'Please check the box below to indicate that you have read and accepted the user agreement'
+      assert page.has_no_text? 'Save profile'
+    else
+      assert page.has_text? 'Your account is inactive'
+      assert page.has_no_text? 'Save profile'
+    end
+
+    within('.navbar-fixed-top') do
+      if !user
+        assert page.has_link? 'Log in'
+      else
+        # my account menu
+        assert page.has_link? "#{user['email']}"
+        find('a', text: "#{user['email']}").click
+        within('.dropdown-menu') do
+          if user['is_active']
+            assert page.has_no_link? ('Not active')
+            assert page.has_no_link? ('Sign agreements')
+
+            assert page.has_link? ('Manage account')
+
+            if profile_config
+              assert page.has_link? ('Manage profile')
+            else
+              assert page.has_no_link? ('Manage profile')
+            end
+          end
+          assert page.has_link? ('Log out')
+        end
+      end
+    end
+  end
+
+  # test the help menu
+  def check_help_menu
+    within('.navbar-fixed-top') do
+      page.find("#arv-help").click
+      within('.dropdown-menu') do
+        assert page.has_link? 'Tutorials and User guide'
+        assert page.has_link? 'API Reference'
+        assert page.has_link? 'SDK Reference'
+      end
+    end
+  end
+
+  def verify_system_menu user
+    if user && user['is_active']
+      look_for_add_new = nil
+      within('.navbar-fixed-top') do
+        page.find("#system-menu").click
+        if user['is_admin']
+          within('.dropdown-menu') do
+            assert page.has_text? 'Groups'
+            assert page.has_link? 'Repositories'
+            assert page.has_link? 'Virtual machines'
+            assert page.has_link? 'SSH keys'
+            assert page.has_link? 'API tokens'
+            find('a', text: 'Users').click
+            look_for_add_new = 'Add a new user'
+          end
+        else
+          within('.dropdown-menu') do
+            assert page.has_no_text? 'Users'
+            assert page.has_no_link? 'Repositories'
+            assert page.has_no_link? 'Virtual machines'
+            assert page.has_no_link? 'SSH keys'
+            assert page.has_no_link? 'API tokens'
+
+            find('a', text: 'Groups').click
+            look_for_add_new = 'Add a new group'
+          end
+        end
+      end
+      if look_for_add_new
+        assert page.has_text? look_for_add_new
+      end
+    else
+      assert page.has_no_link? '#system-menu'
+    end
+  end
+
+  # test manage_account page
+  def verify_manage_account user
+    if user && user['is_active']
+      within('.navbar-fixed-top') do
+        find('a', text: "#{user['email']}").click
+        within('.dropdown-menu') do
+          find('a', text: 'Manage account').click
+        end
+      end
+
+      # now in manage account page
+      assert page.has_text? 'Virtual Machines'
+      assert page.has_text? 'Repositories'
+      assert page.has_text? 'SSH Keys'
+      assert page.has_text? 'Current Token'
+
+      assert page.has_text? 'The Arvados API token is a secret key that enables the Arvados SDKs to access Arvados'
+
+      click_link 'Add new SSH key'
+
+      within '.modal-content' do
+        assert page.has_text? 'Public Key'
+        assert page.has_button? 'Cancel'
+        assert page.has_button? 'Submit'
+
+        page.find_field('public_key').set 'first test with an incorrect ssh key value'
+        click_button 'Submit'
+        assert page.has_text? 'Public key does not appear to be a valid ssh-rsa or dsa public key'
+
+        public_key_str = api_fixture('authorized_keys')['active']['public_key']
+        page.find_field('public_key').set public_key_str
+        page.find_field('name').set 'added_in_test'
+        click_button 'Submit'
+        assert page.has_text? 'Public key already exists in the database, use a different key.'
+
+        new_key = SSHKey.generate
+        page.find_field('public_key').set new_key.ssh_public_key
+        page.find_field('name').set 'added_in_test'
+        click_button 'Submit'
+      end
+
+      # key must be added. look for it in the refreshed page
+      assert page.has_text? 'added_in_test'
+    end
+  end
+
+  # Check manage profile page and add missing profile to the user
+  def add_profile user
+    assert page.has_no_text? 'My projects'
+    assert page.has_no_text? 'Projects shared with me'
+
+    assert page.has_text? 'Profile'
+    assert page.has_text? 'First name'
+    assert page.has_text? 'Last name'
+    assert page.has_text? 'Identity URL'
+    assert page.has_text? 'Email'
+    assert page.has_text? user['email']
+
+    # Using the default profile which has message and one required field
+
+    # Save profile without filling in the required field. Expect to be back in this profile page again
+    click_button "Save profile"
+    assert page.has_text? 'Profile'
+    assert page.has_text? 'First name'
+    assert page.has_text? 'Last name'
+    assert page.has_text? 'Save profile'
+
+    # This time fill in required field and then save. Expect to go to requested page after that.
+    profile_message = Rails.configuration.user_profile_form_message
+    required_field_title = ''
+    required_field_key = ''
+    profile_config = Rails.configuration.user_profile_form_fields
+    profile_config.andand.each do |entry|
+      if entry['required']
+        required_field_key = entry['key']
+        required_field_title = entry['form_field_title']
+      end
+    end
+
+    assert page.has_text? profile_message.gsub(/<.*?>/,'')
+    assert page.has_text? required_field_title
+    page.find_field('user[prefs][:profile][:'+required_field_key+']').set 'value to fill required field'
+
+    click_button "Save profile"
+    # profile saved and in profile page now with success
+    assert page.has_text? 'Thank you for filling in your profile'
+    click_link 'Back to work'
+
+    # profile saved and in home page now
+    assert page.has_text? 'My projects'
+    assert page.has_text? 'Projects shared with me'
+  end
+
+  # test the search box
+  def verify_search_box user
+    if user && user['is_active']
+      # let's search for a valid uuid
+      within('.navbar-fixed-top') do
+        page.find_field('search').set user['uuid']
+        page.find('.glyphicon-search').click
+      end
+
+      # we should now be in the user's page as a result of search
+      assert page.has_text? user['first_name']
+
+      # let's search again for an invalid valid uuid
+      within('.navbar-fixed-top') do
+        search_for = String.new user['uuid']
+        search_for[0]='1'
+        page.find_field('search').set search_for
+        page.find('.glyphicon-search').click
+      end
+
+      # we should see 'not found' error page
+      assert page.has_text? 'Not Found'
+
+      # let's search for the anonymously accessible project
+      publicly_accessible_project = api_fixture('groups')['anonymously_accessible_project']
+
+      within('.navbar-fixed-top') do
+        # search again for the anonymously accessible project
+        page.find_field('search').set publicly_accessible_project['name'][0,10]
+        page.find('.glyphicon-search').click
+      end
+
+      within '.modal-content' do
+        assert page.has_text? 'All projects'
+        assert page.has_text? 'Search'
+        assert page.has_text? 'Cancel'
+        assert_selector('div', text: publicly_accessible_project['name'])
+        find(:xpath, '//div[./span[contains(.,publicly_accessible_project["uuid"])]]').click
+
+        click_button 'Show'
+      end
+
+      # seeing "Unrestricted public data" now
+      assert page.has_text? publicly_accessible_project['name']
+      assert page.has_text? publicly_accessible_project['description']
+    end
+  end
+
+  [
+    [nil, nil, false, false],
+    ['inactive', api_fixture('users')['inactive'], true, false],
+    ['inactive_uninvited', api_fixture('users')['inactive_uninvited'], false, false],
+    ['active', api_fixture('users')['active'], true, true],
+    ['admin', api_fixture('users')['admin'], true, true],
+    ['active_no_prefs', api_fixture('users')['active_no_prefs'], true, false],
+    ['active_no_prefs_profile', api_fixture('users')['active_no_prefs_profile'], true, false],
+  ].each do |token, user, invited, has_profile|
+    test "visit home page when profile is configured for user #{token}" do
+      # Our test config enabled profile by default. So, no need to update config
+      if !token
+        visit ('/')
+      else
+        visit page_with_token(token)
+      end
+
+      verify_homepage_with_profile user, invited, has_profile
+    end
+
+    test "visit home page when profile not configured for user #{token}" do
+      Rails.configuration.user_profile_form_fields = false
+
+      if !token
+        visit ('/')
+      else
+        visit page_with_token(token)
+      end
+
+      verify_homepage_with_profile user, invited, has_profile
+    end
+
+    test "check help for user #{token}" do
+      if !token
+        visit ('/')
+      else
+        visit page_with_token(token)
+      end
+
+      check_help_menu
+    end
+  end
+
+  [
+    ['active', api_fixture('users')['active'], true, true],
+    ['admin', api_fixture('users')['admin'], true, true],
+  ].each do |token, user|
+    test "test system menu for user #{token}" do
+      visit page_with_token(token)
+      verify_system_menu user
+    end
+
+    test "test manage account for user #{token}" do
+      visit page_with_token(token)
+      verify_manage_account user
+    end
+
+    test "test search for user #{token}" do
+      visit page_with_token(token)
+      verify_search_box user
+    end
+  end
+end
index 8657aaa0e492308075f891e89f89aece2df7a6b3..19720cb752cfc83823e77337956d08c29b4227b6 100644 (file)
@@ -13,19 +13,17 @@ class CollectionsTest < ActionDispatch::IntegrationTest
     page.assert_no_selector "div[data-persistent-state='#{oldstate}']"
   end
 
-  ['/collections', '/users/welcome'].each do |path|
-    test "Flip persistent switch at #{path}" do
-      Capybara.current_driver = Capybara.javascript_driver
-      uuid = api_fixture('collections')['foo_file']['uuid']
-      visit page_with_token('active', path)
-      within "tr[data-object-uuid='#{uuid}']" do
-        change_persist 'cache', 'persistent'
-      end
-      # Refresh page and make sure the change was committed.
-      visit current_path
-      within "tr[data-object-uuid='#{uuid}']" do
-        change_persist 'persistent', 'cache'
-      end
+  test "Flip persistent switch at /collections" do
+    Capybara.current_driver = Capybara.javascript_driver
+    uuid = api_fixture('collections')['foo_file']['uuid']
+    visit page_with_token('active', '/collections')
+    within "tr[data-object-uuid='#{uuid}']" do
+      change_persist 'cache', 'persistent'
+    end
+    # Refresh page and make sure the change was committed.
+    visit current_path
+    within "tr[data-object-uuid='#{uuid}']" do
+      change_persist 'persistent', 'cache'
     end
   end
 
index 0a6c3404f1c5c5aa03347deafd2ef702d608e9c7..6385393b24f2c82cbb068e85aea4918d9844d379 100644 (file)
@@ -44,7 +44,9 @@ class PipelineInstancesTest < ActionDispatch::IntegrationTest
     within('.modal-dialog') do
       find('.btn', text: 'Add').click
     end
-    wait_for_ajax
+    using_wait_time(Capybara.default_wait_time * 3) do
+      wait_for_ajax
+    end
 
     click_link 'Jobs and pipelines'
     find('tr[data-kind="arvados#pipelineInstance"]', text: 'New pipeline instance').
@@ -117,6 +119,7 @@ class PipelineInstancesTest < ActionDispatch::IntegrationTest
       click
 
     within('.modal-dialog') do
+      assert_selector 'button.dropdown-toggle', text: 'A Project'
       first('span', text: 'foo_tag').click
       find('button', text: 'OK').click
     end
index 38d4063ce2a422695265ad1721b7683a6e2d493e..b2c9d772d14b652688273023eeeb0d7ee9111017 100644 (file)
@@ -30,18 +30,20 @@ class ProjectsTest < ActionDispatch::IntegrationTest
     specimen_uuid = api_fixture('specimens')['owned_by_aproject_with_no_name_link']['uuid']
     visit page_with_token 'active', '/projects/' + project_uuid
     click_link 'Other objects'
-    within(".selection-action-container") do
-      within (first('tr', text: 'Specimen')) do
+    within '.selection-action-container' do
+      # Wait for the tab to load:
+      assert_selector 'tr[data-kind="arvados#specimen"]'
+      within first('tr', text: 'Specimen') do
         find(".fa-pencil").click
         find('.editable-input input').set('Now I have a name.')
         find('.glyphicon-ok').click
-        find('.editable', text: 'Now I have a name.').click
+        assert_selector '.editable', text: 'Now I have a name.'
         find(".fa-pencil").click
         find('.editable-input input').set('Now I have a new name.')
         find('.glyphicon-ok').click
-        end
+      end
       wait_for_ajax
-      find('.editable', text: 'Now I have a new name.')
+      assert_selector '.editable', text: 'Now I have a new name.'
     end
     visit current_path
     click_link 'Other objects'
index e833f970c02d211905cd504a4d7a25a13a25a016..260bff043c8cfc10cfc08f3d272835c0be214d51 100644 (file)
@@ -70,6 +70,9 @@ class ActiveSupport::TestCase
       arvados_api_token: api_fixture('api_client_authorizations')[api_client_auth_name.to_s]['api_token']
     }
   end
+  def json_response
+    Oj.load(@response.body)
+  end
 end
 
 class ApiServerForTests
index df66cf66e0e9ae7ead91c1622e2411e0fc4c2b5b..c2962483999b04cad29d7e20ef06e508240cdb36 100755 (executable)
@@ -2,7 +2,7 @@
 
 import arvados
 import subprocess
-import subst
+import crunchutil.subst as subst
 import shutil
 import os
 import sys
index f16d62466a1e1853b56266ed58ef9935fd3f974b..ca80a82e3f26e7fc87a4373dbb7e7a529e37802e 100755 (executable)
@@ -1,8 +1,21 @@
 #!/usr/bin/env python
 
+# collection-merge
+#
+# Merge two or more collections together.  Can also be used to extract specific
+# files from a collection to produce a new collection.
+#
+# input:
+# An array of collections or collection/file paths in script_parameter["input"]
+#
+# output:
+# A manifest with the collections merged.  Duplicate file names will
+# have their contents concatenated in the order that they appear in the input
+# array.
+
 import arvados
 import md5
-import subst
+import crunchutil.subst as subst
 import subprocess
 import os
 import hashlib
@@ -30,28 +43,4 @@ for c in p["input"]:
                 if fn in s.files():
                     merged += s.files()[fn].as_manifest()
 
-crm = arvados.CollectionReader(merged)
-
-combined = crm.manifest_text(strip=True)
-
-m = hashlib.new('md5')
-m.update(combined)
-
-uuid = "{}+{}".format(m.hexdigest(), len(combined))
-
-collection = arvados.api().collections().create(
-    body={
-        'uuid': uuid,
-        'manifest_text': crm.manifest_text(),
-    }).execute()
-
-for s in src:
-    l = arvados.api().links().create(body={
-        "link": {
-            "tail_uuid": s,
-            "head_uuid": uuid,
-            "link_class": "provenance",
-            "name": "provided"
-        }}).execute()
-
-arvados.current_task().set_output(uuid)
+arvados.current_task().set_output(merged)
diff --git a/crunch_scripts/crunchutil/__init__.py b/crunch_scripts/crunchutil/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/crunch_scripts/crunchutil/robust_put.py b/crunch_scripts/crunchutil/robust_put.py
new file mode 100644 (file)
index 0000000..158ceb1
--- /dev/null
@@ -0,0 +1,48 @@
+import arvados
+import arvados.commands.put as put
+import os
+import logging
+
+def machine_progress(bytes_written, bytes_expected):
+    return "upload wrote {} total {}\n".format(
+        bytes_written, -1 if (bytes_expected is None) else bytes_expected)
+
+class Args(object):
+    def __init__(self, fn):
+        self.filename = None
+        self.paths = [fn]
+        self.max_manifest_depth = 0
+
+# Upload to Keep with error recovery.
+# Return a uuid or raise an exception if there are too many failures.
+def upload(source_dir):
+    source_dir = os.path.abspath(source_dir)
+    done = False
+    if 'TASK_WORK' in os.environ:
+        resume_cache = put.ResumeCache(os.path.join(arvados.current_task().tmpdir, "upload-output-checkpoint"))
+    else:
+        resume_cache = put.ResumeCache(put.ResumeCache.make_path(Args(source_dir)))
+    reporter = put.progress_writer(machine_progress)
+    bytes_expected = put.expected_bytes_for([source_dir])
+    backoff = 1
+    outuuid = None
+    while not done:
+        try:
+            out = put.ArvPutCollectionWriter.from_cache(resume_cache, reporter, bytes_expected)
+            out.do_queued_work()
+            out.write_directory_tree(source_dir, max_manifest_depth=0)
+            outuuid = out.finish()
+            done = True
+        except KeyboardInterrupt as e:
+            logging.critical("caught interrupt signal 2")
+            raise e
+        except Exception as e:
+            logging.exception("caught exception:")
+            backoff *= 2
+            if backoff > 256:
+                logging.critical("Too many upload failures, giving up")
+                raise e
+            else:
+                logging.warning("Sleeping for %s seconds before trying again" % backoff)
+                time.sleep(backoff)
+    return outuuid
similarity index 91%
rename from crunch_scripts/subst.py
rename to crunch_scripts/crunchutil/subst.py
index 2598e1cc944397324c9bb02b931a8bc7c20ed5a8..b3526883fcf193ff719c249a37343afe45e340e7 100644 (file)
@@ -44,7 +44,11 @@ def sub_basename(v):
     return os.path.splitext(os.path.basename(v))[0]
 
 def sub_glob(v):
-    return glob.glob(v)[0]
+    l = glob.glob(v)
+    if len(l) == 0:
+        raise Exception("$(glob): No match on '%s'" % v)
+    else:
+        return l[0]
 
 default_subs = {"file ": sub_file,
                 "dir ": sub_dir,
@@ -55,7 +59,7 @@ def do_substitution(p, c, subs=default_subs):
     while True:
         #print("c is", c)
         m = search(c)
-        if m != None:
+        if m is not None:
             v = do_substitution(p, c[m[0]+2 : m[1]])
             var = True
             for sub in subs:
diff --git a/crunch_scripts/crunchutil/vwd.py b/crunch_scripts/crunchutil/vwd.py
new file mode 100644 (file)
index 0000000..3d54c9c
--- /dev/null
@@ -0,0 +1,54 @@
+import arvados
+import os
+import robust_put
+import stat
+
+# Implements "Virtual Working Directory"
+# Provides a way of emulating a shared writable directory in Keep based
+# on a "check out, edit, check in, merge" model.
+# At the moment, this only permits adding new files, applications
+# cannot modify or delete existing files.
+
+# Create a symlink tree rooted at target_dir mirroring arv-mounted
+# source_collection.  target_dir must be empty, and will be created if it
+# doesn't exist.
+def checkout(source_collection, target_dir, keepmount=None):
+    # create symlinks
+    if keepmount is None:
+        keepmount = os.environ['TASK_KEEPMOUNT']
+
+    if not os.path.exists(target_dir):
+        os.makedirs(target_dir)
+
+    l = os.listdir(target_dir)
+    if len(l) > 0:
+        raise Exception("target_dir must be empty before checkout, contains %s" % l)
+
+    stem = os.path.join(keepmount, source_collection)
+    for root, dirs, files in os.walk(os.path.join(keepmount, source_collection), topdown=True):
+        rel = root[len(stem)+1:]
+        for d in dirs:
+            os.mkdir(os.path.join(target_dir, rel, d))
+        for f in files:
+            os.symlink(os.path.join(root, f), os.path.join(target_dir, rel, f))
+
+# Delete all symlinks and check in any remaining normal files.
+# If merge == True, merge the manifest with source_collection and return a
+# CollectionReader for the combined collection.
+def checkin(source_collection, target_dir, merge=True):
+    # delete symlinks, commit directory, merge manifests and return combined
+    # collection.
+    for root, dirs, files in os.walk(target_dir):
+        for f in files:
+            s = os.lstat(os.path.join(root, f))
+            if stat.S_ISLNK(s.st_mode):
+                os.unlink(os.path.join(root, f))
+
+    uuid = robust_put.upload(target_dir)
+    if merge:
+        cr1 = arvados.CollectionReader(source_collection)
+        cr2 = arvados.CollectionReader(uuid)
+        combined = arvados.CollectionReader(cr1.manifest_text() + cr2.manifest_text())
+        return combined
+    else:
+        return arvados.CollectionReader(uuid)
diff --git a/crunch_scripts/decompress-all.py b/crunch_scripts/decompress-all.py
new file mode 100755 (executable)
index 0000000..50d11f4
--- /dev/null
@@ -0,0 +1,61 @@
+#!/usr/bin/env python
+
+#
+# decompress-all.py
+#
+# Decompress all compressed files in the collection using the "dtrx" tool and
+# produce a new collection with the contents.  Uncompressed files
+# are passed through.
+#
+# input:
+# A collection at script_parameters["input"]
+#
+# output:
+# A manifest of the uncompressed contents of the input collection.
+
+import arvados
+import re
+import subprocess
+import os
+import sys
+import crunchutil.robust_put as robust_put
+
+arvados.job_setup.one_task_per_input_file(if_sequence=0, and_end_task=True,
+                                          input_as_path=True)
+
+task = arvados.current_task()
+
+input_file = task['parameters']['input']
+
+infile_parts = re.match(r"(^[a-f0-9]{32}\+\d+)(\+\S+)*(/.*)?(/[^/]+)$", input_file)
+
+outdir = os.path.join(task.tmpdir, "output")
+os.makedirs(outdir)
+os.chdir(outdir)
+
+if infile_parts is None:
+    print >>sys.stderr, "Failed to parse input filename '%s' as a Keep file\n" % input_file
+    sys.exit(1)
+
+cr = arvados.CollectionReader(infile_parts.group(1))
+streamname = infile_parts.group(3)[1:]
+filename = infile_parts.group(4)[1:]
+
+if streamname is not None:
+    subprocess.call(["mkdir", "-p", streamname])
+    os.chdir(streamname)
+else:
+    streamname = '.'
+
+m = re.match(r'.*\.(gz|Z|bz2|tgz|tbz|zip|rar|7z|cab|deb|rpm|cpio|gem)$', arvados.get_task_param_mount('input'), re.IGNORECASE)
+
+if m is not None:
+    rc = subprocess.call(["dtrx", "-r", "-n", "-q", arvados.get_task_param_mount('input')])
+    if rc == 0:
+        task.set_output(robust_put.upload(outdir))
+    else:
+        sys.exit(rc)
+else:
+    streamreader = filter(lambda s: s.name() == streamname, cr.all_streams())[0]
+    filereader = streamreader.files()[filename]
+    task.set_output(streamname + filereader.as_manifest()[1:])
index e6ec889bbcbdbd731b7e6930379b1957062b4178..c624e3cadf7ec7fdaad169f04ffb8f34f8bcdd9f 100755 (executable)
@@ -1,15 +1,25 @@
 #!/usr/bin/env python
 
+import logging
+logging.basicConfig(level=logging.INFO, format="run-command: %(message)s")
+
 import arvados
 import re
 import os
 import subprocess
 import sys
 import shutil
-import subst
+import crunchutil.subst as subst
 import time
 import arvados.commands.put as put
 import signal
+import stat
+import copy
+import traceback
+import pprint
+import multiprocessing
+import crunchutil.robust_put as robust_put
+import crunchutil.vwd as vwd
 
 os.umask(0077)
 
@@ -23,34 +33,39 @@ os.mkdir("output")
 
 os.chdir("output")
 
+outdir = os.getcwd()
+
+taskp = None
+jobp = arvados.current_job()['script_parameters']
 if len(arvados.current_task()['parameters']) > 0:
-    p = arvados.current_task()['parameters']
-else:
-    p = arvados.current_job()['script_parameters']
+    taskp = arvados.current_task()['parameters']
 
 links = []
 
-def sub_link(v):
-    r = os.path.basename(v)
-    os.symlink(os.path.join(os.environ['TASK_KEEPMOUNT'], v) , r)
-    links.append(r)
-    return r
-
 def sub_tmpdir(v):
     return os.path.join(arvados.current_task().tmpdir, 'tmpdir')
 
+def sub_outdir(v):
+    return outdir
+
 def sub_cores(v):
-     return os.environ['CRUNCH_NODE_SLOTS']
+     return str(multiprocessing.cpu_count())
 
-subst.default_subs["link "] = sub_link
-subst.default_subs["tmpdir"] = sub_tmpdir
-subst.default_subs["node.cores"] = sub_cores
+def sub_jobid(v):
+     return os.environ['JOB_UUID']
 
-rcode = 1
+def sub_taskid(v):
+     return os.environ['TASK_UUID']
 
-def machine_progress(bytes_written, bytes_expected):
-    return "run-command: wrote {} total {}\n".format(
-        bytes_written, -1 if (bytes_expected is None) else bytes_expected)
+def sub_jobsrc(v):
+     return os.environ['CRUNCH_SRC']
+
+subst.default_subs["task.tmpdir"] = sub_tmpdir
+subst.default_subs["task.outdir"] = sub_outdir
+subst.default_subs["job.srcdir"] = sub_jobsrc
+subst.default_subs["node.cores"] = sub_cores
+subst.default_subs["job.uuid"] = sub_jobid
+subst.default_subs["task.uuid"] = sub_taskid
 
 class SigHandler(object):
     def __init__(self):
@@ -60,20 +75,106 @@ class SigHandler(object):
         sp.send_signal(signum)
         self.sig = signum
 
+def expand_item(p, c):
+    if isinstance(c, dict):
+        if "foreach" in c and "command" in c:
+            var = c["foreach"]
+            items = get_items(p, p[var])
+            r = []
+            for i in items:
+                params = copy.copy(p)
+                params[var] = i
+                r.extend(expand_list(params, c["command"]))
+            return r
+    elif isinstance(c, list):
+        return expand_list(p, c)
+    elif isinstance(c, str) or isinstance(c, unicode):
+        return [subst.do_substitution(p, c)]
+
+    return []
+
+def expand_list(p, l):
+    return [exp for arg in l for exp in expand_item(p, arg)]
+
+def get_items(p, value):
+    if isinstance(value, list):
+        return expand_list(p, value)
+
+    fn = subst.do_substitution(p, value)
+    mode = os.stat(fn).st_mode
+    prefix = fn[len(os.environ['TASK_KEEPMOUNT'])+1:]
+    if mode is not None:
+        if stat.S_ISDIR(mode):
+            items = ["$(dir %s/%s/)" % (prefix, l) for l in os.listdir(fn)]
+        elif stat.S_ISREG(mode):
+            with open(fn) as f:
+                items = [line for line in f]
+        return items
+    else:
+        return None
+
+stdoutname = None
+stdoutfile = None
+stdinname = None
+stdinfile = None
+rcode = 1
+
 try:
-    cmd = []
-    for c in p["command"]:
-        cmd.append(subst.do_substitution(p, c))
-
-    stdoutname = None
-    stdoutfile = None
-    if "stdout" in p:
-        stdoutname = subst.do_substitution(p, p["stdout"])
+    if "task.foreach" in jobp:
+        if arvados.current_task()['sequence'] == 0:
+            var = jobp["task.foreach"]
+            items = get_items(jobp, jobp[var])
+            logging.info("parallelizing on %s with items %s" % (var, items))
+            if items is not None:
+                for i in items:
+                    params = copy.copy(jobp)
+                    params[var] = i
+                    arvados.api().job_tasks().create(body={
+                        'job_uuid': arvados.current_job()['uuid'],
+                        'created_by_job_task_uuid': arvados.current_task()['uuid'],
+                        'sequence': 1,
+                        'parameters': params
+                        }
+                    ).execute()
+                if "task.vwd" in jobp:
+                    # Base vwd collection will be merged with output fragments from
+                    # the other tasks by crunch.
+                    arvados.current_task().set_output(subst.do_substitution(jobp, jobp["task.vwd"]))
+                else:
+                    arvados.current_task().set_output(None)
+                sys.exit(0)
+            else:
+                sys.exit(1)
+    else:
+        taskp = jobp
+
+    if "task.vwd" in taskp:
+        # Populate output directory with symlinks to files in collection
+        vwd.checkout(subst.do_substitution(taskp, taskp["task.vwd"]), outdir)
+
+    if "task.cwd" in taskp:
+        os.chdir(subst.do_substitution(taskp, taskp["task.cwd"]))
+
+    cmd = expand_list(taskp, taskp["command"])
+
+    if "task.stdin" in taskp:
+        stdinname = subst.do_substitution(taskp, taskp["task.stdin"])
+        stdinfile = open(stdinname, "rb")
+
+    if "task.stdout" in taskp:
+        stdoutname = subst.do_substitution(taskp, taskp["task.stdout"])
         stdoutfile = open(stdoutname, "wb")
 
-    print("run-command: {}{}".format(' '.join(cmd), (" > " + stdoutname) if stdoutname != None else ""))
+    logging.info("{}{}{}".format(' '.join(cmd), (" < " + stdinname) if stdinname is not None else "", (" > " + stdoutname) if stdoutname is not None else ""))
+
+except Exception as e:
+    logging.exception("caught exception")
+    logging.error("task parameters was:")
+    logging.error(pprint.pformat(taskp))
+    sys.exit(1)
 
-    sp = subprocess.Popen(cmd, shell=False, stdout=stdoutfile)
+try:
+    sp = subprocess.Popen(cmd, shell=False, stdin=stdinfile, stdout=stdoutfile)
     sig = SigHandler()
 
     # forward signals to the process.
@@ -84,14 +185,14 @@ try:
     # wait for process to complete.
     rcode = sp.wait()
 
-    if sig.sig != None:
-        print("run-command: terminating on signal %s" % sig.sig)
+    if sig.sig is not None:
+        logging.critical("terminating on signal %s" % sig.sig)
         sys.exit(2)
     else:
-        print("run-command: completed with exit code %i (%s)" % (rcode, "success" if rcode == 0 else "failed"))
+        logging.info("completed with exit code %i (%s)" % (rcode, "success" if rcode == 0 else "failed"))
 
 except Exception as e:
-    print("run-command: caught exception: {}".format(e))
+    logging.exception("caught exception")
 
 # restore default signal handlers.
 signal.signal(signal.SIGINT, signal.SIG_DFL)
@@ -101,34 +202,27 @@ signal.signal(signal.SIGQUIT, signal.SIG_DFL)
 for l in links:
     os.unlink(l)
 
-print("run-command: the follow output files will be saved to keep:")
-
-subprocess.call(["find", ".", "-type", "f", "-printf", "run-command: %12.12s %h/%f\\n"])
-
-print("run-command: start writing output to keep")
-
-done = False
-resume_cache = put.ResumeCache(os.path.join(arvados.current_task().tmpdir, "upload-output-checkpoint"))
-reporter = put.progress_writer(machine_progress)
-bytes_expected = put.expected_bytes_for(".")
-while not done:
-    try:
-        out = put.ArvPutCollectionWriter.from_cache(resume_cache, reporter, bytes_expected)
-        out.do_queued_work()
-        out.write_directory_tree(".", max_manifest_depth=0)
-        outuuid = out.finish()
-        api.job_tasks().update(uuid=arvados.current_task()['uuid'],
-                                             body={
-                                                 'output':outuuid,
-                                                 'success': (rcode == 0),
-                                                 'progress':1.0
-                                             }).execute()
-        done = True
-    except KeyboardInterrupt:
-        print("run-command: terminating on signal 2")
-        sys.exit(2)
-    except Exception as e:
-        print("run-command: caught exception: {}".format(e))
-        time.sleep(5)
+logging.info("the following output files will be saved to keep:")
+
+subprocess.call(["find", ".", "-type", "f", "-printf", "run-command: %12.12s %h/%f\\n"], stdout=sys.stderr)
+
+logging.info("start writing output to keep")
+
+if "task.vwd" in taskp:
+    if "task.foreach" in jobp:
+        # This is a subtask, so don't merge with the original collection, that will happen at the end
+        outcollection = vwd.checkin(subst.do_substitution(taskp, taskp["task.vwd"]), outdir, merge=False).manifest_text()
+    else:
+        # Just a single task, so do merge with the original collection
+        outcollection = vwd.checkin(subst.do_substitution(taskp, taskp["task.vwd"]), outdir, merge=True).manifest_text()
+else:
+    outcollection = robust_put.upload(outdir)
+
+api.job_tasks().update(uuid=arvados.current_task()['uuid'],
+                                     body={
+                                         'output': outcollection,
+                                         'success': (rcode == 0),
+                                         'progress':1.0
+                                     }).execute()
 
 sys.exit(rcode)
diff --git a/crunch_scripts/split-fastq.py b/crunch_scripts/split-fastq.py
new file mode 100755 (executable)
index 0000000..253dd22
--- /dev/null
@@ -0,0 +1,128 @@
+#!/usr/bin/python
+
+import arvados
+import re
+import hashlib
+import string
+
+api = arvados.api('v1')
+
+piece = 0
+manifest_text = ""
+
+# Look for paired reads
+
+inp = arvados.CollectionReader(arvados.getjobparam('reads'))
+
+manifest_list = []
+
+chunking = False #arvados.getjobparam('chunking')
+
+def nextline(reader, start):
+    n = -1
+    while True:
+        r = reader.readfrom(start, 128)
+        if r == '':
+            break
+        n = string.find(r, "\n")
+        if n > -1:
+            break
+        else:
+            start += 128
+    return n
+
+# Chunk a fastq into approximately 64 MiB chunks.  Requires that the input data
+# be decompressed ahead of time, such as using decompress-all.py.  Generates a
+# new manifest, but doesn't actually move any data around.  Handles paired
+# reads by ensuring that each chunk of a pair gets the same number of records.
+#
+# This works, but in practice is so slow that potential gains in alignment
+# performance are lost in the prep time, which is why it is currently disabled.
+#
+# A better algorithm would seek to a file position a bit less than the desired
+# chunk size and then scan ahead for the next record, making sure that record
+# was matched by the read pair.
+def splitfastq(p):
+    for i in xrange(0, len(p)):
+        p[i]["start"] = 0
+        p[i]["end"] = 0
+
+    count = 0
+    recordsize = [0, 0]
+
+    global piece
+    finish = False
+    while not finish:
+        for i in xrange(0, len(p)):
+            recordsize[i] = 0
+
+        # read next 4 lines
+        for i in xrange(0, len(p)):
+            for ln in xrange(0, 4):
+                r = nextline(p[i]["reader"], p[i]["end"]+recordsize[i])
+                if r == -1:
+                    finish = True
+                    break
+                recordsize[i] += (r+1)
+
+        splitnow = finish
+        for i in xrange(0, len(p)):
+            if ((p[i]["end"] - p[i]["start"]) + recordsize[i]) >= (64*1024*1024):
+                splitnow = True
+
+        if splitnow:
+            for i in xrange(0, len(p)):
+                global manifest_list
+                print >>sys.stderr, "Finish piece ./_%s/%s (%s %s)" % (piece, p[i]["reader"].name(), p[i]["start"], p[i]["end"])
+                manifest = []
+                manifest.extend(["./_" + str(piece)])
+                manifest.extend([d[arvados.LOCATOR] for d in p[i]["reader"]._stream._data_locators])
+                manifest.extend(["{}:{}:{}".format(seg[arvados.LOCATOR]+seg[arvados.OFFSET], seg[arvados.SEGMENTSIZE], p[i]["reader"].name().replace(' ', '\\040')) for seg in arvados.locators_and_ranges(p[i]["reader"].segments, p[i]["start"], p[i]["end"] - p[i]["start"])])
+                manifest_list.append(manifest)
+                p[i]["start"] = p[i]["end"]
+            piece += 1
+        else:
+            for i in xrange(0, len(p)):
+                p[i]["end"] += recordsize[i]
+            count += 1
+            if count % 10000 == 0:
+                print >>sys.stderr, "Record %s at %s" % (count, p[i]["end"])
+
+prog = re.compile(r'(.*?)(_[12])?\.fastq(\.gz)?$')
+
+# Look for fastq files
+for s in inp.all_streams():
+    for f in s.all_files():
+        name_pieces = prog.match(f.name())
+        if name_pieces is not None:
+            if s.name() != ".":
+                # The downstream tool (run-command) only iterates over the top
+                # level of directories so if there are fastq files in
+                # directories in the input, the choice is either to forget
+                # there are directories (which might lead to name conflicts) or
+                # just fail.
+                print >>sys.stderr, "fastq must be at the root of the collection"
+                sys.exit(1)
+
+            p = None
+            if name_pieces.group(2) is not None:
+                if name_pieces.group(2) == "_1":
+                    p = [{}, {}]
+                    p[0]["reader"] = s.files()[name_pieces.group(0)]
+                    p[1]["reader"] = s.files()[name_pieces.group(1) + "_2.fastq" + (name_pieces.group(3) if name_pieces.group(3) else '')]
+            else:
+                p = [{}]
+                p[0]["reader"] = s.files()[name_pieces.group(0)]
+
+            if p is not None:
+                if chunking:
+                    splitfastq(p)
+                else:
+                    for i in xrange(0, len(p)):
+                        m = p[i]["reader"].as_manifest()[1:]
+                        manifest_list.append(["./_" + str(piece), m[:-1]])
+                    piece += 1
+
+manifest_text = "\n".join(" ".join(m) for m in manifest_list)
+
+arvados.current_task().set_output(manifest_text)
index d8b0d88a0f8978b0cb3fb844b1018dda088862ff..3301207250710e9d706d4177c2f12caa3ec257f9 100644 (file)
@@ -37,7 +37,7 @@ navbar:
       - user/tutorials/intro-crunch.html.textile.liquid
       - user/tutorials/running-external-program.html.textile.liquid
       - user/tutorials/tutorial-firstscript.html.textile.liquid
-      - user/topics/tutorial-job-debug.html.textile.liquid
+      - user/tutorials/tutorial-submit-job.html.textile.liquid
       - user/topics/tutorial-parallel.html.textile.liquid
       - user/examples/crunch-examples.html.textile.liquid
     - Query the metadata database:
@@ -60,6 +60,8 @@ navbar:
       - sdk/ruby/index.html.textile.liquid
     - Java:
       - sdk/java/index.html.textile.liquid
+    - Go:
+      - sdk/go/index.html.textile.liquid
     - CLI:
       - sdk/cli/index.html.textile.liquid
   api:
index f4eabcfaa8e8227b04e2d8390960cb01de0237cd..8760fe88edce9c41ab21e8518edcd66064b371bf 100644 (file)
@@ -53,6 +53,9 @@ table(table table-bordered table-condensed).
 |limit|integer (default 100)|Maximum number of collections to return.|query||
 |order|string|Order in which to return matching collections.|query||
 |filters|array|Conditions for filtering collections.|query||
+|select|array|Data fields to return in the result list.|query|@["uuid", "manifest_text"]@|
+
+N.B.: Because adding access tokens to manifests can be computationally expensive, the @manifest_text@ field is not included in results by default.  If you need it, pass a @select@ parameter that includes @manifest_text@.
 
 h2. update
 
diff --git a/doc/sdk/go/index.html.textile.liquid b/doc/sdk/go/index.html.textile.liquid
new file mode 100644 (file)
index 0000000..0e26369
--- /dev/null
@@ -0,0 +1,65 @@
+---
+layout: default
+navsection: sdk
+navmenu: Go
+title: "Go SDK"
+
+...
+
+The Go ("Golang":http://golang.org) SDK provides a generic set of wrappers so you can make API calls easily.
+
+h3. Installation
+
+You don't need to install anything. Just import the client like this. The go tools will fetch the relevant code and dependencies for you.
+
+<notextile>
+<pre><code class="userinput">import (
+       "git.curoverse.com/arvados.git/sdk/go/keepclient"
+       "git.curoverse.com/arvados.git/sdk/go/arvadosclient"
+)
+</code></pre>
+</notextile>
+
+h3. Examples
+
+Import the module. (We import the log module here too, so we can use it in the subsequent examples.)
+
+<notextile>
+<pre><code class="userinput">import (
+       "git.curoverse.com/arvados.git/sdk/go/keepclient"
+       "git.curoverse.com/arvados.git/sdk/go/arvadosclient"
+       "log"
+)
+</code></pre>
+</notextile>
+
+Set up an API client user agent:
+
+<notextile>
+<pre><code class="userinput">  arv, err := arvadosclient.MakeArvadosClient()
+       if err != nil {
+               log.Fatalf("Error setting up arvados client %s", err.Error())
+       }
+</code></pre>
+</notextile>
+
+Get the User object for the current user:
+
+<notextile>
+<pre><code class="userinput">  type user struct {
+               Uuid         string `json:"uuid"`
+               FullName     int    `json:"full_name"`
+       }
+
+       var u user
+       err := arv.Call("GET", "users", "", "current", nil, &u)
+
+       if err != nil {
+               return err
+       }
+
+       log.Printf("Logged in as %s (uuid %s)", user.Uuid, user.FullName)
+</code></pre>
+</notextile>
+
+A few more usage examples can be found in the services/keepproxy and sdk/go/keepclient directories in the arvados source tree.
index 1b1e18ab9447fbf964e414403003ae980e69926d..db5d6f13b0cbdc08c62f1f156ca70bafdd23e88b 100644 (file)
@@ -10,6 +10,7 @@ This section documents how to access the Arvados API and Keep using various prog
 * "Perl SDK":{{site.baseurl}}/sdk/perl/index.html
 * "Ruby SDK":{{site.baseurl}}/sdk/ruby/index.html
 * "Java SDK":{{site.baseurl}}/sdk/java/index.html
+* "Go SDK":{{site.baseurl}}/sdk/go/index.html
 * "Command line SDK":{{site.baseurl}}/sdk/cli/index.html ("arv")
 
 SDKs not yet implemented:
diff --git a/doc/user/topics/tutorial-job-debug.html.textile.liquid b/doc/user/topics/tutorial-job-debug.html.textile.liquid
deleted file mode 100644 (file)
index 63f152e..0000000
+++ /dev/null
@@ -1,163 +0,0 @@
----
-layout: default
-navsection: userguide
-title: "Debugging a Crunch script"
-...
-
-To test changes to a script by running a job, the change must be pushed to your hosted repository, and the job might have to wait in the queue before it runs. This cycle can be an inefficient way to develop and debug scripts. This tutorial demonstrates an alternative: using @arv-crunch-job@ to run your job in your local VM.  This avoids the job queue and allows you to execute the script directly from your git working tree without committing or pushing.
-
-{% include 'tutorial_expectations' %}
-
-This tutorial uses @$USER@ to denote your username.  Replace @$USER@ with your user name in all the following examples.
-
-h2. Create a new script
-
-Change to your Git working directory and create a new script in @crunch_scripts/@.
-
-<notextile>
-<pre><code>~$ <span class="userinput">cd $USER/crunch_scripts</span>
-~/$USER/crunch_scripts$ <span class="userinput">cat &gt;hello-world.py &lt;&lt;EOF
-#!/usr/bin/env python
-
-print "hello world"
-print "this script will fail, and that is expected!"
-EOF</span>
-~/$USER/crunch_scripts$ <span class="userinput">chmod +x hello-world.py</span>
-</code></pre>
-</notextile>
-
-h2. Using arv-crunch-job to run the job in your VM
-
-Instead of a Git commit hash, we provide the path to the directory in the "script_version" parameter.  The script specified in "script" is expected to be in the @crunch_scripts/@ subdirectory of the directory specified "script_version".  Although we are running the script locally, the script still requires access to the Arvados API server and Keep storage service. The job will be recorded in the Arvados job history, and visible in Workbench.
-
-<notextile>
-<pre><code>~/$USER/crunch_scripts$ <span class="userinput">cat &gt;~/the_job &lt;&lt;EOF
-{
- "repository":"",
- "script":"hello-world.py",
- "script_version":"$HOME/$USER",
- "script_parameters":{}
-}
-EOF</span>
-</code></pre>
-</notextile>
-
-Your shell should fill in values for @$HOME@ and @$USER@ so that the saved JSON points "script_version" at the directory with your checkout.  Now you can run that job:
-
-<notextile>
-<pre><code>~/$USER/crunch_scripts</span>$ <span class="userinput">arv-crunch-job --job "$(cat ~/the_job)"</span>
-2013-12-12_21:36:42 qr1hi-8i9sb-okzukfzkpbrnhst 29827  check slurm allocation
-2013-12-12_21:36:42 qr1hi-8i9sb-okzukfzkpbrnhst 29827  node localhost - 1 slots
-2013-12-12_21:36:42 qr1hi-8i9sb-okzukfzkpbrnhst 29827  start
-2013-12-12_21:36:42 qr1hi-8i9sb-okzukfzkpbrnhst 29827  script hello-world.py
-2013-12-12_21:36:42 qr1hi-8i9sb-okzukfzkpbrnhst 29827  script_version /home/$USER/$USER
-2013-12-12_21:36:42 qr1hi-8i9sb-okzukfzkpbrnhst 29827  script_parameters {}
-2013-12-12_21:36:42 qr1hi-8i9sb-okzukfzkpbrnhst 29827  runtime_constraints {"max_tasks_per_node":0}
-2013-12-12_21:36:42 qr1hi-8i9sb-okzukfzkpbrnhst 29827  start level 0
-2013-12-12_21:36:42 qr1hi-8i9sb-okzukfzkpbrnhst 29827  status: 0 done, 0 running, 1 todo
-2013-12-12_21:36:42 qr1hi-8i9sb-okzukfzkpbrnhst 29827 0 job_task qr1hi-ot0gb-4zdajby8cjmlguh
-2013-12-12_21:36:42 qr1hi-8i9sb-okzukfzkpbrnhst 29827 0 child 29834 started on localhost.1
-2013-12-12_21:36:42 qr1hi-8i9sb-okzukfzkpbrnhst 29827  status: 0 done, 1 running, 0 todo
-2013-12-12_21:36:42 qr1hi-8i9sb-okzukfzkpbrnhst 29827 0 stderr hello world
-2013-12-12_21:36:42 qr1hi-8i9sb-okzukfzkpbrnhst 29827 0 stderr this script will fail, and that is expected!
-2013-12-12_21:36:43 qr1hi-8i9sb-okzukfzkpbrnhst 29827 0 child 29834 on localhost.1 exit 0 signal 0 success=
-2013-12-12_21:36:43 qr1hi-8i9sb-okzukfzkpbrnhst 29827 0 failure (#1, permanent) after 0 seconds
-2013-12-12_21:36:43 qr1hi-8i9sb-okzukfzkpbrnhst 29827 0 output
-2013-12-12_21:36:43 qr1hi-8i9sb-okzukfzkpbrnhst 29827  Every node has failed -- giving up on this round
-2013-12-12_21:36:43 qr1hi-8i9sb-okzukfzkpbrnhst 29827  wait for last 0 children to finish
-2013-12-12_21:36:43 qr1hi-8i9sb-okzukfzkpbrnhst 29827  status: 0 done, 0 running, 0 todo
-2013-12-12_21:36:43 qr1hi-8i9sb-okzukfzkpbrnhst 29827  Freeze not implemented
-2013-12-12_21:36:43 qr1hi-8i9sb-okzukfzkpbrnhst 29827  collate
-2013-12-12_21:36:43 qr1hi-8i9sb-okzukfzkpbrnhst 29827  output d41d8cd98f00b204e9800998ecf8427e+0
-2013-12-12_21:36:44 qr1hi-8i9sb-okzukfzkpbrnhst 29827  meta key is c00bfbd58e6f58ce3bebdd47f745a70f+1857
-</code></pre>
-</notextile>
-
-These are the lines of interest:
-
-bc. 2013-12-12_21:36:42 qr1hi-8i9sb-okzukfzkpbrnhst 29827 0 stderr hello world
-2013-12-12_21:36:42 qr1hi-8i9sb-okzukfzkpbrnhst 29827 0 stderr this script will fail, and that is expected!
-
-The script's output is captured in the log, which is useful for print statement debugging. However, although this script returned a status code of 0 (success), the job failed.  Why?  For a job to complete successfully scripts must explicitly add their output to Keep, and then tell Arvados about it.  Here is a second try:
-
-<notextile>
-<pre><code>~/$USER/crunch_scripts$ <span class="userinput">cat &gt;hello-world-fixed.py &lt;&lt;EOF
-#!/usr/bin/env python
-
-import arvados
-
-# Create a new collection
-out = arvados.CollectionWriter()
-
-# Set the name of the file in the collection to write to
-out.set_current_file_name('hello.txt')
-
-# Actually output our text
-out.write('hello world')
-
-# Commit the collection to Keep
-out_collection = out.finish()
-
-# Tell Arvados which Keep object is our output
-arvados.current_task().set_output(out_collection)
-
-# Done!
-EOF</span>
-~/$USER/crunch_scripts$ <span class="userinput">chmod +x hello-world-fixed.py</span>
-~/$USER/crunch_scripts$ <span class="userinput">cat &gt;~/the_job &lt;&lt;EOF
-{
- "repository":"",
- "script":"hello-world-fixed.py",
- "script_version":"$HOME/$USER",
- "script_parameters":{}
-}
-EOF</span>
-~/$USER/crunch_scripts$ <span class="userinput">arv-crunch-job --job "$(cat ~/the_job)"</span>
-2013-12-12_21:56:59 qr1hi-8i9sb-79260ykfew5trzl 31578  check slurm allocation
-2013-12-12_21:56:59 qr1hi-8i9sb-79260ykfew5trzl 31578  node localhost - 1 slots
-2013-12-12_21:57:00 qr1hi-8i9sb-79260ykfew5trzl 31578  start
-2013-12-12_21:57:00 qr1hi-8i9sb-79260ykfew5trzl 31578  script hello-world-fixed.py
-2013-12-12_21:57:00 qr1hi-8i9sb-79260ykfew5trzl 31578  script_version /home/$USER/$USER
-2013-12-12_21:57:00 qr1hi-8i9sb-79260ykfew5trzl 31578  script_parameters {}
-2013-12-12_21:57:00 qr1hi-8i9sb-79260ykfew5trzl 31578  runtime_constraints {"max_tasks_per_node":0}
-2013-12-12_21:57:00 qr1hi-8i9sb-79260ykfew5trzl 31578  start level 0
-2013-12-12_21:57:00 qr1hi-8i9sb-79260ykfew5trzl 31578  status: 0 done, 0 running, 1 todo
-2013-12-12_21:57:00 qr1hi-8i9sb-79260ykfew5trzl 31578 0 job_task qr1hi-ot0gb-u8g594ct0wt7f3f
-2013-12-12_21:57:00 qr1hi-8i9sb-79260ykfew5trzl 31578 0 child 31585 started on localhost.1
-2013-12-12_21:57:00 qr1hi-8i9sb-79260ykfew5trzl 31578  status: 0 done, 1 running, 0 todo
-2013-12-12_21:57:02 qr1hi-8i9sb-79260ykfew5trzl 31578 0 child 31585 on localhost.1 exit 0 signal 0 success=true
-2013-12-12_21:57:02 qr1hi-8i9sb-79260ykfew5trzl 31578 0 success in 1 seconds
-2013-12-12_21:57:02 qr1hi-8i9sb-79260ykfew5trzl 31578 0 output 576c44d762ba241b0a674aa43152b52a+53
-2013-12-12_21:57:02 qr1hi-8i9sb-79260ykfew5trzl 31578  wait for last 0 children to finish
-2013-12-12_21:57:02 qr1hi-8i9sb-79260ykfew5trzl 31578  status: 1 done, 0 running, 0 todo
-2013-12-12_21:57:02 qr1hi-8i9sb-79260ykfew5trzl 31578  Freeze not implemented
-2013-12-12_21:57:02 qr1hi-8i9sb-79260ykfew5trzl 31578  collate
-2013-12-12_21:57:02 qr1hi-8i9sb-79260ykfew5trzl 31578  output 576c44d762ba241b0a674aa43152b52a+53
-WARNING:root:API lookup failed for collection 576c44d762ba241b0a674aa43152b52a+53 (<class 'apiclient.errors.HttpError'>: <HttpError 404 when requesting https://qr1hi.arvadosapi.com/arvados/v1/collections/576c44d762ba241b0a674aa43152b52a%2B53?alt=json returned "Not Found">)
-2013-12-12_21:57:03 qr1hi-8i9sb-79260ykfew5trzl 31578  finish
-</code></pre>
-</notextile>
-
-(The WARNING issued near the end of the script may be safely ignored here; it is the Arvados SDK letting you know that it could not find a collection named @576c44d762ba241b0a674aa43152b52a+53@ and that it is going to try looking up a block by that name instead.)
-
-The job succeeded, with output in Keep object @576c44d762ba241b0a674aa43152b52a+53@.  Let's look at our output:
-
-<notextile>
-<pre><code>~/$USER/crunch_scripts$ <span class="userinput">arv keep get 576c44d762ba241b0a674aa43152b52a+53/hello.txt</span>
-hello world
-</code></pre>
-</notextile>
-
-h3. Location of temporary files
-
-Crunch job tasks are supplied with @TASK_WORK@ and @JOB_WORK@ environment variables, to be used as scratch space.  When running in local development mode using @arv-crunch-job@, Crunch sets these variables to point to directory called @crunch-job-{USERID}@ in @TMPDIR@ (or @/tmp@ if @TMPDIR@ is not set).
-
-* Set @TMPDIR@ to @/scratch@ to make Crunch use a directory like @/scratch/crunch-job-{USERID}/@ for temporary space.
-
-* Set @CRUNCH_TMP@ to @/scratch/foo@ to make Crunch use @/scratch/foo/@ for temporary space (omitting the default @crunch-job-{USERID}@ leaf name)
-
-h3. Testing job scripts without SDKs and Keep access
-
-Read and write data to @/tmp/@ instead of Keep. This only works with the Python SDK.
-
-notextile. <pre><code>~$ <span class="userinput">export KEEP_LOCAL_STORE=/tmp</span></code></pre>
index 0cbceda6192b36e2eacf189190a0865223d3e609..9be610358bb5f8133c6f7269c390799e04de8e5d 100644 (file)
@@ -1,10 +1,10 @@
 ---
 layout: default
 navsection: userguide
-title: "Parallel Crunch tasks"
+title: "Concurrent Crunch tasks"
 ...
 
-In the previous tutorials, we used @arvados.job_setup.one_task_per_input_file()@ to automatically parallelize our jobs by creating a separate task per file.  For some types of jobs, you may need to split the work up differently, for example creating tasks to process different segments of a single large file.  In this this tutorial will demonstrate how to create Crunch tasks directly.
+In the previous tutorials, we used @arvados.job_setup.one_task_per_input_file()@ to automatically create concurrent jobs by creating a separate task per file.  For some types of jobs, you may need to split the work up differently, for example creating tasks to process different segments of a single large file.  In this this tutorial will demonstrate how to create Crunch tasks directly.
 
 Start by entering the @crunch_scripts@ directory of your Git repository:
 
@@ -13,33 +13,33 @@ Start by entering the @crunch_scripts@ directory of your Git repository:
 </code></pre>
 </notextile>
 
-Next, using @nano@ or your favorite Unix text editor, create a new file called @parallel-hash.py@ in the @crunch_scripts@ directory.
+Next, using @nano@ or your favorite Unix text editor, create a new file called @concurrent-hash.py@ in the @crunch_scripts@ directory.
 
-notextile. <pre>~/$USER/crunch_scripts$ <code class="userinput">nano parallel-hash.py</code></pre>
+notextile. <pre>~/$USER/crunch_scripts$ <code class="userinput">nano concurrent-hash.py</code></pre>
 
 Add the following code to compute the MD5 hash of each file in a collection:
 
-<notextile> {% code 'parallel_hash_script_py' as python %} </notextile>
+<notextile> {% code 'concurrent_hash_script_py' as python %} </notextile>
 
 Make the file executable:
 
-notextile. <pre><code>~/$USER/crunch_scripts$ <span class="userinput">chmod +x parallel-hash.py</span></code></pre>
+notextile. <pre><code>~/$USER/crunch_scripts$ <span class="userinput">chmod +x concurrent-hash.py</span></code></pre>
 
 Add the file to the Git staging area, commit, and push:
 
 <notextile>
-<pre><code>~/$USER/crunch_scripts$ <span class="userinput">git add parallel-hash.py</span>
-~/$USER/crunch_scripts$ <span class="userinput">git commit -m"parallel hash"</span>
+<pre><code>~/$USER/crunch_scripts$ <span class="userinput">git add concurrent-hash.py</span>
+~/$USER/crunch_scripts$ <span class="userinput">git commit -m"concurrent hash"</span>
 ~/$USER/crunch_scripts$ <span class="userinput">git push origin master</span>
 </code></pre>
 </notextile>
 
-You should now be able to run your new script using Crunch, with "script" referring to our new "parallel-hash.py" script.  We will use a different input from our previous examples.  We will use @887cd41e9c613463eab2f0d885c6dd96+83@ which consists of three files, "alice.txt", "bob.txt" and "carol.txt" (the example collection used previously in "fetching data from Arvados using Keep":{{site.baseurl}}/user/tutorials/tutorial-keep.html#dir).
+You should now be able to run your new script using Crunch, with "script" referring to our new "concurrent-hash.py" script.  We will use a different input from our previous examples.  We will use @887cd41e9c613463eab2f0d885c6dd96+83@ which consists of three files, "alice.txt", "bob.txt" and "carol.txt" (the example collection used previously in "fetching data from Arvados using Keep":{{site.baseurl}}/user/tutorials/tutorial-keep.html#dir).
 
 <notextile>
 <pre><code>~/$USER/crunch_scripts$ <span class="userinput">cat &gt;~/the_job &lt;&lt;EOF
 {
- "script": "parallel-hash.py",
+ "script": "concurrent-hash.py",
  "repository": "$USER",
  "script_version": "master",
  "script_parameters":
@@ -65,7 +65,7 @@ EOF</span>
 
 (Your shell should automatically fill in @$USER@ with your login name.  The job JSON that gets saved should have @"repository"@ pointed at your personal Git repository.)
 
-Because the job ran in parallel, each instance of parallel-hash creates a separate @md5sum.txt@ as output.  Arvados automatically collates theses files into a single collection, which is the output of the job:
+Because the job ran in concurrent, each instance of concurrent-hash creates a separate @md5sum.txt@ as output.  Arvados automatically collates theses files into a single collection, which is the output of the job:
 
 <notextile>
 <pre><code>~/$USER/crunch_scripts$ <span class="userinput">arv keep ls e2ccd204bca37c77c0ba59fc470cd0f7+162</span>
index a95e30ddfd4b8e416bdbf23565b2df65dbe32de9..d1a0e2434c71dff0e318c8c0d8c301ce6ca96258 100644 (file)
@@ -137,13 +137,13 @@ h2. Find Personal Genome Project identifiers from Arvados UUIDs
 
 These PGP IDs let us find public profiles, for example:
 
-* "https://my.personalgenomes.org/profile/huE2E371":https://my.personalgenomes.org/profile/huE2E371
-* "https://my.personalgenomes.org/profile/huDF04CC":https://my.personalgenomes.org/profile/huDF04CC
+* "https://my.pgp-hms.org/profile/huE2E371":https://my.pgp-hms.org/profile/huE2E371
+* "https://my.pgp-hms.org/profile/huDF04CC":https://my.pgp-hms.org/profile/huDF04CC
 * ...
 
 h2. Find genomic data from specific humans
 
-Now we want to find collections in Keep that were provided by these humans.  We search the "links" resource for "provenance" links that point to subjects in list of humans with the non-melanoma skin cancer trait:
+Now we want to find collections in Keep that were provided by these humans.  We search the "links" resource for "provenance" links that point to entries in the list of humans with the non-melanoma skin cancer trait:
 
 <notextile>
 <pre><code>&gt;&gt;&gt; <span class="userinput">provenance_links = arvados.api().links().list(limit=1000, filters=[
@@ -170,26 +170,26 @@ collections = arvados.api('v1').collections().list(filters=[
 # print PGP public profile links with file locators
 for c in collections['items']:
   for f in c['files']:
-    print "https://my.personalgenomes.org/profile/%s %s %s%s" % (pgpid[c['uuid']], c['uuid'], ('' if f[0] == '.' else f[0]+'/'), f[1])
+    print "https://my.pgp-hms.org/profile/%s %s %s%s" % (pgpid[c['uuid']], c['uuid'], ('' if f[0] == '.' else f[0]+'/'), f[1])
 </span>
-https://my.personalgenomes.org/profile/hu43860C a58dca7609fa84c8c38a7e926a97b2fc var-GS00253-DNA_A01_200_37-ASM.tsv.bz2
-https://my.personalgenomes.org/profile/huB1FD55 ea30eb9e46eedf7f05ed6e348c2baf5d var-GS000010320-ASM.tsv.bz2
-https://my.personalgenomes.org/profile/huDF04CC 4ab0df8f22f595d1747a22c476c05873 var-GS000010427-ASM.tsv.bz2
-https://my.personalgenomes.org/profile/hu7A2F1D 756d0ada29b376140f64e7abfe6aa0e7 var-GS000014566-ASM.tsv.bz2
-https://my.personalgenomes.org/profile/hu553620 7ed4e425bb1c7cc18387cbd9388181df var-GS000015272-ASM.tsv.bz2
-https://my.personalgenomes.org/profile/huD09534 542112e210daff30dd3cfea4801a9f2f var-GS000016374-ASM.tsv.bz2
-https://my.personalgenomes.org/profile/hu599905 33a9f3842b01ea3fdf27cc582f5ea2af var-GS000016015-ASM.tsv.bz2
-https://my.personalgenomes.org/profile/hu43860C a58dca7609fa84c8c38a7e926a97b2fc+302 var-GS00253-DNA_A01_200_37-ASM.tsv.bz2
-https://my.personalgenomes.org/profile/huB1FD55 ea30eb9e46eedf7f05ed6e348c2baf5d+291 var-GS000010320-ASM.tsv.bz2
-https://my.personalgenomes.org/profile/huDF04CC 4ab0df8f22f595d1747a22c476c05873+242 var-GS000010427-ASM.tsv.bz2
-https://my.personalgenomes.org/profile/hu7A2F1D 756d0ada29b376140f64e7abfe6aa0e7+242 var-GS000014566-ASM.tsv.bz2
-https://my.personalgenomes.org/profile/hu553620 7ed4e425bb1c7cc18387cbd9388181df+242 var-GS000015272-ASM.tsv.bz2
-https://my.personalgenomes.org/profile/huD09534 542112e210daff30dd3cfea4801a9f2f+242 var-GS000016374-ASM.tsv.bz2
-https://my.personalgenomes.org/profile/hu599905 33a9f3842b01ea3fdf27cc582f5ea2af+242 var-GS000016015-ASM.tsv.bz2
-https://my.personalgenomes.org/profile/hu599905 d6e2e57cd60ba5979006d0b03e45e726+81 Witch_results.zip
-https://my.personalgenomes.org/profile/hu553620 ea4f2d325592a1272f989d141a917fdd+85 Devenwood_results.zip
-https://my.personalgenomes.org/profile/hu7A2F1D 4580f6620bb15b25b18373766e14e4a7+85 Innkeeper_results.zip
-https://my.personalgenomes.org/profile/huD09534 fee37be9440b912eb90f5e779f272416+82 Hallet_results.zip
+https://my.pgp-hms.org/profile/hu43860C a58dca7609fa84c8c38a7e926a97b2fc var-GS00253-DNA_A01_200_37-ASM.tsv.bz2
+https://my.pgp-hms.org/profile/huB1FD55 ea30eb9e46eedf7f05ed6e348c2baf5d var-GS000010320-ASM.tsv.bz2
+https://my.pgp-hms.org/profile/huDF04CC 4ab0df8f22f595d1747a22c476c05873 var-GS000010427-ASM.tsv.bz2
+https://my.pgp-hms.org/profile/hu7A2F1D 756d0ada29b376140f64e7abfe6aa0e7 var-GS000014566-ASM.tsv.bz2
+https://my.pgp-hms.org/profile/hu553620 7ed4e425bb1c7cc18387cbd9388181df var-GS000015272-ASM.tsv.bz2
+https://my.pgp-hms.org/profile/huD09534 542112e210daff30dd3cfea4801a9f2f var-GS000016374-ASM.tsv.bz2
+https://my.pgp-hms.org/profile/hu599905 33a9f3842b01ea3fdf27cc582f5ea2af var-GS000016015-ASM.tsv.bz2
+https://my.pgp-hms.org/profile/hu43860C a58dca7609fa84c8c38a7e926a97b2fc+302 var-GS00253-DNA_A01_200_37-ASM.tsv.bz2
+https://my.pgp-hms.org/profile/huB1FD55 ea30eb9e46eedf7f05ed6e348c2baf5d+291 var-GS000010320-ASM.tsv.bz2
+https://my.pgp-hms.org/profile/huDF04CC 4ab0df8f22f595d1747a22c476c05873+242 var-GS000010427-ASM.tsv.bz2
+https://my.pgp-hms.org/profile/hu7A2F1D 756d0ada29b376140f64e7abfe6aa0e7+242 var-GS000014566-ASM.tsv.bz2
+https://my.pgp-hms.org/profile/hu553620 7ed4e425bb1c7cc18387cbd9388181df+242 var-GS000015272-ASM.tsv.bz2
+https://my.pgp-hms.org/profile/huD09534 542112e210daff30dd3cfea4801a9f2f+242 var-GS000016374-ASM.tsv.bz2
+https://my.pgp-hms.org/profile/hu599905 33a9f3842b01ea3fdf27cc582f5ea2af+242 var-GS000016015-ASM.tsv.bz2
+https://my.pgp-hms.org/profile/hu599905 d6e2e57cd60ba5979006d0b03e45e726+81 Witch_results.zip
+https://my.pgp-hms.org/profile/hu553620 ea4f2d325592a1272f989d141a917fdd+85 Devenwood_results.zip
+https://my.pgp-hms.org/profile/hu7A2F1D 4580f6620bb15b25b18373766e14e4a7+85 Innkeeper_results.zip
+https://my.pgp-hms.org/profile/huD09534 fee37be9440b912eb90f5e779f272416+82 Hallet_results.zip
 </code></pre>
 </notextile>
 
index 9c3242fd2a9351c0d5f3f6cb2cfe7b86c39f1910..b045d624b873195b955c69fdf9e812db35ba68fb 100644 (file)
@@ -5,55 +5,22 @@ navmenu: Tutorials
 title: "Writing a Crunch script"
 ...
 
-This tutorial demonstrates how to create a new Arvados pipeline using the Arvados Python SDK.  The Arvados SDK supports access to advanced features not available using the @run-command@ wrapper, such as scheduling parallel tasks across nodes.
+This tutorial demonstrates how to write a script using Arvados Python SDK.  The Arvados SDK supports access to advanced features not available using the @run-command@ wrapper, such as scheduling concurrent tasks across nodes.
 
 {% include 'tutorial_expectations' %}
 
 This tutorial uses @$USER@ to denote your username.  Replace @$USER@ with your user name in all the following examples.
 
-h2. Setting up Git
-
-All Crunch scripts are managed through the Git revision control system.  Before you start using Git, you should do some basic configuration (you only need to do this the first time):
+Start by creating a directory called @$USER@ .  Next, create a subdirectory called @crunch_scripts@ and change to that directory:
 
 <notextile>
-<pre><code>~$ <span class="userinput">git config --global user.name "Your Name"</span>
-~$ <span class="userinput">git config --global user.email $USER@example.com</span></code></pre>
-</notextile>
-
-On the Arvados Workbench, navigate to "Code repositories":https://{{site.arvados_workbench_host}}/repositories.  You should see a repository with your user name listed in the *name* column.  Next to *name* is the column *push_url*.  Copy the *push_url* value associated with your repository.  This should look like <notextile><code>git@git.{{ site.arvados_api_host }}:$USER.git</code></notextile>.
-
-Next, on the Arvados virtual machine, clone your Git repository:
-
-<notextile>
-<pre><code>~$ <span class="userinput">cd $HOME</span> # (or wherever you want to install)
-~$ <span class="userinput">git clone git@git.{{ site.arvados_api_host }}:$USER.git</span>
-Cloning into '$USER'...</code></pre>
-</notextile>
-
-This will create a Git repository in the directory called @$USER@ in your home directory. Say yes when prompted to continue with connection.
-Ignore any warning that you are cloning an empty repository.
-
-{% include 'notebox_begin' %}
-For more information about using Git, try
-
-notextile. <pre><code>$ <span class="userinput">man gittutorial</span></code></pre>
-
-or *"search Google for Git tutorials":http://google.com/#q=git+tutorial*.
-{% include 'notebox_end' %}
-
-h2. Creating a Crunch script
-
-Start by entering the @$USER@ directory created by @git clone@.  Next create a subdirectory called @crunch_scripts@ and change to that directory:
-
-<notextile>
-<pre><code>~$ <span class="userinput">cd $USER</span>
-~/$USER$ <span class="userinput">mkdir crunch_scripts</span>
-~/$USER$ <span class="userinput">cd crunch_scripts</span></code></pre>
+<pre><code>~$ <span class="userinput">mkdir -p tutorial/crunch_scripts</span>
+~$ <span class="userinput">cd tutorial/crunch_scripts</span></code></pre>
 </notextile>
 
 Next, using @nano@ or your favorite Unix text editor, create a new file called @hash.py@ in the @crunch_scripts@ directory.
 
-notextile. <pre>~/$USER/crunch_scripts$ <code class="userinput">nano hash.py</code></pre>
+notextile. <pre>~/tutorial/crunch_scripts$ <code class="userinput">nano hash.py</code></pre>
 
 Add the following code to compute the MD5 hash of each file in a collection:
 
@@ -61,82 +28,74 @@ Add the following code to compute the MD5 hash of each file in a collection:
 
 Make the file executable:
 
-notextile. <pre><code>~/$USER/crunch_scripts$ <span class="userinput">chmod +x hash.py</span></code></pre>
-
-{% include 'notebox_begin' %}
-The steps below describe how to execute the script after committing changes to Git. To run a single script locally for testing (bypassing the job queue) please see "debugging a crunch script":{{site.baseurl}}/user/topics/tutorial-job-debug.html.
-
-{% include 'notebox_end' %}
+notextile. <pre><code>~/tutorial/crunch_scripts$ <span class="userinput">chmod +x hash.py</span></code></pre>
 
-Next, add the file to the staging area.  This tells @git@ that the file should be included on the next commit.
-
-notextile. <pre><code>~/$USER/crunch_scripts$ <span class="userinput">git add hash.py</span></code></pre>
-
-Next, commit your changes.  All staged changes are recorded into the local git repository:
-
-<notextile>
-<pre><code>~/$USER/crunch_scripts$ <span class="userinput">git commit -m"my first script"</span>
-[master (root-commit) 27fd88b] my first script
- 1 file changed, 45 insertions(+)
- create mode 100755 crunch_scripts/hash.py</code></pre>
-</notextile>
-
-Finally, upload your changes to the Arvados server:
-
-<notextile>
-<pre><code>~/$USER/crunch_scripts$ <span class="userinput">git push origin master</span>
-Counting objects: 4, done.
-Compressing objects: 100% (2/2), done.
-Writing objects: 100% (4/4), 682 bytes, done.
-Total 4 (delta 0), reused 0 (delta 0)
-To git@git.qr1hi.arvadosapi.com:$USER.git
- * [new branch]      master -> master</code></pre>
-</notextile>
-
-h2. Create a pipeline template
-
-Next, create a file that contains the pipeline definition:
+Next, create a submission job record.  This describes a specific invocation of your script:
 
 <notextile>
-<pre><code>~/$USER/crunch_scripts$ <span class="userinput">cd ~</span>
-~$ <span class="userinput">cat &gt;the_pipeline &lt;&lt;EOF
+<pre><code>~/tutorial/crunch_scripts$ <span class="userinput">cat &gt;~/the_job &lt;&lt;EOF
 {
-  "name":"My md5 pipeline",
-  "components":{
-    "do_hash":{
-      "script":"hash.py",
-      "script_parameters":{
-        "input":{
-          "required": true,
-          "dataclass": "Collection"
-        }
-      },
-      "repository":"$USER",
-      "script_version":"master",
-      "output_is_persistent":true,
-      "runtime_constraints":{
-        "docker_image":"arvados/jobs-java-bwa-samtools"
-      }
-    }
-  }
+ "repository":"",
+ "script":"hash.py",
+ "script_version":"$HOME/tutorial",
+ "script_parameters":{
+   "input":"c1bad4b39ca5a924e481008009d94e32+210"
+ }
 }
-EOF
-</span></code></pre>
+EOF</span>
+</code></pre>
 </notextile>
 
-* @"repository"@ is the name of a git repository to search for the script version.  You can access a list of available git repositories on the Arvados Workbench under "Code repositories":https://{{site.arvados_workbench_host}}/repositories.
-* @"script_version"@ specifies the version of the script that you wish to run.  This can be in the form of an explicit Git revision hash, a tag, or a branch (in which case it will use the HEAD of the specified branch).  Arvados logs the script version that was used in the run, enabling you to go back and re-run any past job with the guarantee that the exact same code will be used as was used in the previous run.
-* @"script"@ specifies the filename of the script to run.  Crunch expects to find this in the @crunch_scripts/@ subdirectory of the Git repository.
-
-Now, use @arv pipeline_template create@ to register your pipeline template in Arvados:
+You can now run your script on your local workstation or VM using @arv-crunch-job@:
 
 <notextile>
-<pre><code>~$ <span class="userinput">arv pipeline_template create --pipeline-template "$(cat the_pipeline)"</span>
+<pre><code>~/tutorial/crunch_scripts</span>$ <span class="userinput">arv-crunch-job --job "$(cat ~/the_job)"</span>
+2014-08-06_15:16:22 qr1hi-8i9sb-qyrat80ef927lam 14473  check slurm allocation
+2014-08-06_15:16:22 qr1hi-8i9sb-qyrat80ef927lam 14473  node localhost - 1 slots
+2014-08-06_15:16:23 qr1hi-8i9sb-qyrat80ef927lam 14473  start
+2014-08-06_15:16:23 qr1hi-8i9sb-qyrat80ef927lam 14473  script hash.py
+2014-08-06_15:16:23 qr1hi-8i9sb-qyrat80ef927lam 14473  script_version /home/peter/peter
+2014-08-06_15:16:23 qr1hi-8i9sb-qyrat80ef927lam 14473  script_parameters {"input":"c1bad4b39ca5a924e481008009d94e32+210"}
+2014-08-06_15:16:23 qr1hi-8i9sb-qyrat80ef927lam 14473  runtime_constraints {"max_tasks_per_node":0}
+2014-08-06_15:16:23 qr1hi-8i9sb-qyrat80ef927lam 14473  start level 0
+2014-08-06_15:16:23 qr1hi-8i9sb-qyrat80ef927lam 14473  status: 0 done, 0 running, 1 todo
+2014-08-06_15:16:23 qr1hi-8i9sb-qyrat80ef927lam 14473 0 job_task qr1hi-ot0gb-lptn85mwkrn9pqo
+2014-08-06_15:16:23 qr1hi-8i9sb-qyrat80ef927lam 14473 0 child 14478 started on localhost.1
+2014-08-06_15:16:23 qr1hi-8i9sb-qyrat80ef927lam 14473  status: 0 done, 1 running, 0 todo
+2014-08-06_15:16:24 qr1hi-8i9sb-qyrat80ef927lam 14473 0 stderr crunchstat: Running [stdbuf --output=0 --error=0 /home/$USER/tutorial/crunch_scripts/hash.py]
+2014-08-06_15:16:24 qr1hi-8i9sb-qyrat80ef927lam 14473 0 child 14478 on localhost.1 exit 0 signal 0 success=true
+2014-08-06_15:16:24 qr1hi-8i9sb-qyrat80ef927lam 14473 0 success in 1 seconds
+2014-08-06_15:16:24 qr1hi-8i9sb-qyrat80ef927lam 14473 0 output
+2014-08-06_15:16:25 qr1hi-8i9sb-qyrat80ef927lam 14473  wait for last 0 children to finish
+2014-08-06_15:16:25 qr1hi-8i9sb-qyrat80ef927lam 14473  status: 1 done, 0 running, 1 todo
+2014-08-06_15:16:25 qr1hi-8i9sb-qyrat80ef927lam 14473  start level 1
+2014-08-06_15:16:25 qr1hi-8i9sb-qyrat80ef927lam 14473  status: 1 done, 0 running, 1 todo
+2014-08-06_15:16:25 qr1hi-8i9sb-qyrat80ef927lam 14473 1 job_task qr1hi-ot0gb-e3obm0lv6k6p56a
+2014-08-06_15:16:25 qr1hi-8i9sb-qyrat80ef927lam 14473 1 child 14504 started on localhost.1
+2014-08-06_15:16:25 qr1hi-8i9sb-qyrat80ef927lam 14473  status: 1 done, 1 running, 0 todo
+2014-08-06_15:16:26 qr1hi-8i9sb-qyrat80ef927lam 14473 1 stderr crunchstat: Running [stdbuf --output=0 --error=0 /home/$USER/tutorial/crunch_scripts/hash.py]
+2014-08-06_15:16:35 qr1hi-8i9sb-qyrat80ef927lam 14473 1 child 14504 on localhost.1 exit 0 signal 0 success=true
+2014-08-06_15:16:35 qr1hi-8i9sb-qyrat80ef927lam 14473 1 success in 10 seconds
+2014-08-06_15:16:35 qr1hi-8i9sb-qyrat80ef927lam 14473 1 output 50cafdb29cc21dd6eaec85ba9e0c6134+56+Aef0f991b80fa0b75f802e58e70b207aa184d24ff@53f4bbd3
+2014-08-06_15:16:35 qr1hi-8i9sb-qyrat80ef927lam 14473  wait for last 0 children to finish
+2014-08-06_15:16:35 qr1hi-8i9sb-qyrat80ef927lam 14473  status: 2 done, 0 running, 0 todo
+2014-08-06_15:16:35 qr1hi-8i9sb-qyrat80ef927lam 14473  Freeze not implemented
+2014-08-06_15:16:35 qr1hi-8i9sb-qyrat80ef927lam 14473  collate
+2014-08-06_15:16:36 qr1hi-8i9sb-qyrat80ef927lam 14473  output d6338df28d6b8e5d14929833b417e20e+107+Adf1ce81222b6992ce5d33d8bfb28a6b5a1497898@53f4bbd4
+2014-08-06_15:16:37 qr1hi-8i9sb-qyrat80ef927lam 14473  finish
+2014-08-06_15:16:38 qr1hi-8i9sb-qyrat80ef927lam 14473  log manifest is 7fe8cf1d45d438a3ca3ac4a184b7aff4+83
 </code></pre>
 </notextile>
 
-h2. Running your pipeline
+Although the job runs locally, the output of the job has been saved to Keep, the Arvados file store.  The "output" line (third from the bottom) provides the "Keep locator":{{site.baseurl}}/user/tutorials/tutorial-keep-get.html to which the script's output has been saved.  Copy the output identifier and use @arv-ls@ to list the contents of your output collection, and @arv-get@ to download it to the current directory:
 
-Your new pipeline template should appear at the top of the Workbench "pipeline&nbsp;templates":https://{{ site.arvados_workbench_host }}/pipeline_templates page.  You can run your pipeline "using Workbench":tutorial-pipeline-workbench.html or the "command line.":{{site.baseurl}}/user/topics/running-pipeline-command-line.html
+<notextile>
+<pre><code>~/tutorial/crunch_scripts$ <span class="userinput">arv-ls d6338df28d6b8e5d14929833b417e20e+107+Adf1ce81222b6992ce5d33d8bfb28a6b5a1497898@53f4bbd4</span>
+./md5sum.txt
+~/tutorial/crunch_scripts$ <span class="userinput">arv-get d6338df28d6b8e5d14929833b417e20e+107+Adf1ce81222b6992ce5d33d8bfb28a6b5a1497898@53f4bbd4/ .</span>
+~/tutorial/crunch_scripts$ <span class="userinput">cat md5sum.txt</span>
+44b8ae3fde7a8a88d2f7ebd237625b4f c1bad4b39ca5a924e481008009d94e32+210/./var-GS000016015-ASM.tsv.bz2
+</code></pre>
+</notextile>
 
-For more information and examples for writing pipelines, see the "pipeline template reference":{{site.baseurl}}/api/schema/PipelineTemplate.html
+Running locally is convenient for development and debugging, as it permits a fast iterative development cycle.  Your job run is also recorded by Arvados, and will show up in the "Recent jobs and pipelines" panel on the "Workbench dashboard":https://{{site.arvados_workbench_host}}.  This provides limited provenance, by recording the input parameters, the execution log, and the output.  However, running locally does not allow you to scale out to multiple nodes, and does not store the complete system snapshot required to achieve reproducibility; to that you need to "submit a job to the Arvados cluster":{{site.baseurl}}/user/tutorials/tutorial-submit-job.html
index 0c1b04758a84705465061648341ea0b8c6c70c06..59ebe84ddbbffd21f219655de601a8b51ef4b8de 100644 (file)
@@ -4,7 +4,7 @@ navsection: userguide
 title: "Uploading data"
 ...
 
-This tutorial describes how to to upload new Arvados data collections using the command line tool @arv-put@.  This example uses a freely available TSV file containing variant annotations from "Personal Genome Project (PGP)":http://www.personalgenomes.org subject "hu599905.":https://my.personalgenomes.org/profile/hu599905
+This tutorial describes how to to upload new Arvados data collections using the command line tool @arv-put@.  This example uses a freely available TSV file containing variant annotations from "Personal Genome Project (PGP)":http://www.pgp-hms.org participant "hu599905.":https://my.pgp-hms.org/profile/hu599905
 
 notextile. <div class="spaced-out">
 
@@ -12,7 +12,7 @@ notextile. <div class="spaced-out">
 # On system from which you will upload data, configure the environment with the Arvados instance host name and authentication token as decribed in "Getting an API token.":{{site.baseurl}}/user/reference/api-tokens.html  (If you are logged into an Arvados VM, you can skip this step.)
 # Download the following example file.  (If you are uploading your own data, you can skip this step.)
 <notextile>
-<pre><code>~$ <span class="userinput">curl -o var-GS000016015-ASM.tsv.bz2 'https://warehouse.personalgenomes.org/warehouse/f815ec01d5d2f11cb12874ab2ed50daa+234+K@ant/var-GS000016015-ASM.tsv.bz2'</span>
+<pre><code>~$ <span class="userinput">curl -o var-GS000016015-ASM.tsv.bz2 'https://warehouse.pgp-hms.org/warehouse/f815ec01d5d2f11cb12874ab2ed50daa+234+K@ant/var-GS000016015-ASM.tsv.bz2'</span>
   % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                  Dload  Upload   Total   Spent    Left  Speed
 100  216M  100  216M    0     0  10.0M      0  0:00:21  0:00:21 --:--:-- 9361k
diff --git a/doc/user/tutorials/tutorial-submit-job.html.textile.liquid b/doc/user/tutorials/tutorial-submit-job.html.textile.liquid
new file mode 100644 (file)
index 0000000..8be72a2
--- /dev/null
@@ -0,0 +1,137 @@
+---
+layout: default
+navsection: userguide
+navmenu: Tutorials
+title: "Running on an Arvados cluster"
+...
+
+This tutorial demonstrates how to create a pipeline to run your crunch script on an Arvados cluster.  Cluster jobs can scale out to multiple nodes, and use @git@ and @docker@ to store the complete system snapshot required to achieve reproducibilty.
+
+{% include 'tutorial_expectations' %}
+
+This tutorial uses @$USER@ to denote your username.  Replace @$USER@ with your user name in all the following examples.
+
+h2. Setting up Git
+
+All Crunch scripts are managed through the Git revision control system.  Before you start using Git, you should do some basic configuration (you only need to do this the first time):
+
+<notextile>
+<pre><code>~$ <span class="userinput">git config --global user.name "Your Name"</span>
+~$ <span class="userinput">git config --global user.email $USER@example.com</span></code></pre>
+</notextile>
+
+On the Arvados Workbench, navigate to "Code repositories":https://{{site.arvados_workbench_host}}/repositories.  You should see a repository with your user name listed in the *name* column.  Next to *name* is the column *push_url*.  Copy the *push_url* value associated with your repository.  This should look like <notextile><code>git@git.{{ site.arvados_api_host }}:$USER.git</code></notextile>.
+
+Next, on the Arvados virtual machine, clone your Git repository:
+
+<notextile>
+<pre><code>~$ <span class="userinput">cd $HOME</span> # (or wherever you want to install)
+~$ <span class="userinput">git clone git@git.{{ site.arvados_api_host }}:$USER.git</span>
+Cloning into '$USER'...</code></pre>
+</notextile>
+
+This will create a Git repository in the directory called @$USER@ in your home directory. Say yes when prompted to continue with connection.
+Ignore any warning that you are cloning an empty repository.
+
+{% include 'notebox_begin' %}
+For more information about using Git, try
+
+notextile. <pre><code>$ <span class="userinput">man gittutorial</span></code></pre>
+
+or *"search Google for Git tutorials":http://google.com/#q=git+tutorial*.
+{% include 'notebox_end' %}
+
+h2. Creating a Crunch script
+
+Start by entering the @$USER@ directory created by @git clone@.  Next create a subdirectory called @crunch_scripts@ and change to that directory:
+
+<notextile>
+<pre><code>~$ <span class="userinput">cd $USER</span>
+~/$USER$ <span class="userinput">mkdir crunch_scripts</span>
+~/$USER$ <span class="userinput">cd crunch_scripts</span></code></pre>
+</notextile>
+
+Next, using @nano@ or your favorite Unix text editor, create a new file called @hash.py@ in the @crunch_scripts@ directory.
+
+notextile. <pre>~/$USER/crunch_scripts$ <code class="userinput">nano hash.py</code></pre>
+
+Add the following code to compute the MD5 hash of each file in a collection (if you already completed "Writing a Crunch script":tutorial-firstscript.html you can just copy the @hash.py@ file you created previously.)
+
+<notextile> {% code 'tutorial_hash_script_py' as python %} </notextile>
+
+Make the file executable:
+
+notextile. <pre><code>~/$USER/crunch_scripts$ <span class="userinput">chmod +x hash.py</span></code></pre>
+
+Next, add the file to the staging area.  This tells @git@ that the file should be included on the next commit.
+
+notextile. <pre><code>~/$USER/crunch_scripts$ <span class="userinput">git add hash.py</span></code></pre>
+
+Next, commit your changes.  All staged changes are recorded into the local git repository:
+
+<notextile>
+<pre><code>~/$USER/crunch_scripts$ <span class="userinput">git commit -m"my first script"</span>
+[master (root-commit) 27fd88b] my first script
+ 1 file changed, 45 insertions(+)
+ create mode 100755 crunch_scripts/hash.py</code></pre>
+</notextile>
+
+Finally, upload your changes to the Arvados server:
+
+<notextile>
+<pre><code>~/$USER/crunch_scripts$ <span class="userinput">git push origin master</span>
+Counting objects: 4, done.
+Compressing objects: 100% (2/2), done.
+Writing objects: 100% (4/4), 682 bytes, done.
+Total 4 (delta 0), reused 0 (delta 0)
+To git@git.qr1hi.arvadosapi.com:$USER.git
+ * [new branch]      master -> master</code></pre>
+</notextile>
+
+h2. Create a pipeline template
+
+Next, create a file that contains the pipeline definition:
+
+<notextile>
+<pre><code>~/$USER/crunch_scripts$ <span class="userinput">cd ~</span>
+~$ <span class="userinput">cat &gt;the_pipeline &lt;&lt;EOF
+{
+  "name":"My md5 pipeline",
+  "components":{
+    "do_hash":{
+      "script":"hash.py",
+      "script_parameters":{
+        "input":{
+          "required": true,
+          "dataclass": "Collection"
+        }
+      },
+      "repository":"$USER",
+      "script_version":"master",
+      "output_is_persistent":true,
+      "runtime_constraints":{
+        "docker_image":"arvados/jobs-java-bwa-samtools"
+      }
+    }
+  }
+}
+EOF
+</span></code></pre>
+</notextile>
+
+* @"repository"@ is the name of a git repository to search for the script version.  You can access a list of available git repositories on the Arvados Workbench under "Code repositories":https://{{site.arvados_workbench_host}}/repositories.
+* @"script_version"@ specifies the version of the script that you wish to run.  This can be in the form of an explicit Git revision hash, a tag, or a branch (in which case it will use the HEAD of the specified branch).  Arvados logs the script version that was used in the run, enabling you to go back and re-run any past job with the guarantee that the exact same code will be used as was used in the previous run.
+* @"script"@ specifies the filename of the script to run.  Crunch expects to find this in the @crunch_scripts/@ subdirectory of the Git repository.
+
+Now, use @arv pipeline_template create@ to register your pipeline template in Arvados:
+
+<notextile>
+<pre><code>~$ <span class="userinput">arv pipeline_template create --pipeline-template "$(cat the_pipeline)"</span>
+</code></pre>
+</notextile>
+
+h2. Running your pipeline
+
+Your new pipeline template should appear at the top of the Workbench "pipeline&nbsp;templates":https://{{ site.arvados_workbench_host }}/pipeline_templates page.  You can run your pipeline "using Workbench":tutorial-pipeline-workbench.html or the "command line.":{{site.baseurl}}/user/topics/running-pipeline-command-line.html
+
+For more information and examples for writing pipelines, see the "pipeline template reference":{{site.baseurl}}/api/schema/PipelineTemplate.html
index eb611eadeb12d2388f771bfc1a0b1cf5e84f1421..f3dd90c6c741db3a1a3dd0ec0e98bfbce0323283 100644 (file)
@@ -7,7 +7,6 @@ endif
 all: skydns-image skydock-image api-image compute-image doc-image workbench-image keep-image sso-image shell-image
 
 IMAGE_FILES := $(shell ls *-image 2>/dev/null |grep -v -E 'debian-arvados-image|skydns-image|skydock-image')
-GENERATED_FILES := $(shell ls */generated/* 2>/dev/null)
 GENERATED_DIRS := $(shell ls */generated 2>/dev/null)
 
 # `make clean' removes the files generated in the build directory
@@ -15,8 +14,8 @@ GENERATED_DIRS := $(shell ls */generated 2>/dev/null)
 clean:
        @echo "make clean"
        -@rm -rf build
-       +@[ "$(IMAGE_FILES)$(GENERATED_FILES)" = "" ] || rm $(IMAGE_FILES) $(GENERATED_FILES) 2>/dev/null
-       +@[ "$(GENERATED_DIRS)" = "" ] || rmdir */generated 2>/dev/null
+       +@[ "$(IMAGE_FILES)" = "" ] || rm -f $(IMAGE_FILES) 2>/dev/null
+       +@[ "$(GENERATED_DIRS)" = "" ] || rm -rf */generated 2>/dev/null
 
 DEBIAN_IMAGE := $(shell $(DOCKER) images -q arvados/debian |head -n1)
 
@@ -104,6 +103,13 @@ WORKBENCH_GENERATED    = workbench/generated/*
 SSO_GENERATED_IN       = sso/*.in
 SSO_GENERATED          = sso/generated/*
 
+KEEP_DEPS += keep/generated/bin/keepproxy
+KEEP_DEPS += keep/generated/bin/keepstore
+keep/generated/bin/%: $(wildcard build/services/%/*.go)
+       mkdir -p keep/generated/src/git.curoverse.com
+       ln -sfn ../../../../.. keep/generated/src/git.curoverse.com/arvados.git
+       GOPATH=$(shell pwd)/keep/generated go get $(@:keep/generated/bin/%=git.curoverse.com/arvados.git/services/%)
+
 $(BUILD):
        mkdir -p build
        rsync -rlp --exclude=docker/ --exclude='**/log/*' --exclude='**/tmp/*' \
@@ -154,21 +160,17 @@ api-image: passenger-image $(BUILD) $(API_DEPS)
        @echo "Building api-image"
        mkdir -p api/generated
        tar -czf api/generated/api.tar.gz -C build/services api
-       chmod 755 api/generated/setup.sh
-       chmod 755 api/generated/setup-gitolite.sh
        $(DOCKER_BUILD) -t arvados/api api
        date >api-image
 
 shell-image: base-image $(BUILD) $(SHELL_DEPS)
        @echo "Building shell-image"
        mkdir -p shell/generated
-       chmod 755 shell/generated/setup.sh
        $(DOCKER_BUILD) -t arvados/shell shell
        date >shell-image
 
 compute-image: slurm-image $(BUILD) $(COMPUTE_DEPS)
        @echo "Building compute-image"
-       chmod 755 compute/generated/setup.sh
        $(DOCKER_BUILD) -t arvados/compute compute
        date >compute-image
 
index 77c721c61f9ea1cca45302e58bad9c33f9e465de..d8bf256124eeeb387f6af39128b47b4ab35f9aac 100755 (executable)
@@ -40,16 +40,15 @@ else
   globdir = '*'
 end
 
-Dir.glob(globdir + '/generated/*') do |stale_file|
-  File.delete(stale_file)
-end
+FileUtils.rm_r Dir.glob(globdir + '/generated/*')
 
 File.umask(022)
 Dir.glob(globdir + '/*.in') do |template_file|
   generated_dir = File.join(File.dirname(template_file), 'generated')
   Dir.mkdir(generated_dir) unless Dir.exists? generated_dir
   output_path = File.join(generated_dir, File.basename(template_file, '.in'))
-  File.open(output_path, "w") do |output|
+  output_mode = (File.stat(template_file).mode & 0100) ? 0755 : 0644
+  File.open(output_path, "w", output_mode) do |output|
     File.open(template_file) do |input|
       input.each_line do |line|
 
index 2cad65c52746ecfdd40cb9236440cfbb9c714e11..1b493e3a30a88c68395dee7f52413be3f9f7b6db 100644 (file)
@@ -4,7 +4,7 @@ MAINTAINER Brett Smith <brett@curoverse.com>
 # Install dependencies and set up system.
 # The FUSE packages help ensure that we can install the Python SDK (arv-mount).
 RUN /usr/bin/apt-get install -q -y python-dev python-llfuse python-pip \
-      libio-socket-ssl-perl libjson-perl liburi-perl libwww-perl \
+      libio-socket-ssl-perl libjson-perl liburi-perl libwww-perl dtrx \
       fuse libattr1-dev libfuse-dev && \
     /usr/sbin/adduser --disabled-password \
       --gecos 'Crunch execution user' crunch && \
@@ -13,7 +13,7 @@ RUN /usr/bin/apt-get install -q -y python-dev python-llfuse python-pip \
 
 # Install Arvados packages.
 RUN (find /usr/src/arvados/sdk -name '*.gem' -print0 | \
-      xargs -0rn 1 gem install) && \
+      xargs -0rn 1 /usr/local/rvm/bin/rvm-exec default gem install) && \
     cd /usr/src/arvados/services/fuse && \
     python setup.py install && \
     cd /usr/src/arvados/sdk/python && \
index 240cb39fc5221f9a5020c48cc6c2a1b5ec653660..cd40a72f5b236d9972c710dd30c656a21eaa1607 100644 (file)
@@ -2,20 +2,11 @@
 FROM arvados/debian:wheezy
 MAINTAINER Ward Vandewege <ward@curoverse.com>
 
-RUN echo 'deb http://apt.arvados.org/ wheezy main' > /etc/apt/sources.list.d/apt.arvados.org.list
-
-RUN /usr/bin/apt-key adv --keyserver pgp.mit.edu --recv 1078ECD7
-
-RUN /usr/bin/apt-get update
-
-RUN /usr/bin/apt-get install keep
-
+ADD generated/bin/keepstore /usr/local/bin/
+ADD generated/bin/keepproxy /usr/local/bin/
 ADD generated/run-keep /usr/local/bin/
-RUN chmod +x /usr/local/bin/run-keep
 
 ADD generated/keep_signing_secret /etc/
 
-RUN /bin/chmod a+x /usr/local/bin/run-keep
-
 # Start keep
 CMD ["/usr/local/bin/run-keep"]
index 9525ed5a4c04f68083aee479e3c810e75f3bb084..a0b4cb0d2cb57101f9b287c83fffb2b9e34a7a20 100755 (executable)
@@ -8,4 +8,4 @@ else
     permission_args=""
 fi
 
-exec keep $permission_args -listen=":25107" -volumes="/keep-data"
+exec keepstore $permission_args -listen=":25107" -volumes="/keep-data"
index e84150a35d9b4064264393cdbab6e2e5312bc8f7..337d9abdefb84f403e8111c189797af9614468ec 100755 (executable)
@@ -53,14 +53,16 @@ end
 class Google::APIClient
  def discovery_document(api, version)
    api = api.to_s
-   return @discovery_documents["#{api}:#{version}"] ||=
+   discovery_uri = self.discovery_uri(api, version)
+   discovery_uri_hash = Digest::MD5.hexdigest(discovery_uri)
+   return @discovery_documents[discovery_uri_hash] ||=
      begin
        # fetch new API discovery doc if stale
-       cached_doc = File.expand_path '~/.cache/arvados/discovery_uri.json' rescue nil
+       cached_doc = File.expand_path "~/.cache/arvados/discovery-#{discovery_uri_hash}.json" rescue nil
 
        if cached_doc.nil? or not File.exist?(cached_doc) or (Time.now - File.mtime(cached_doc)) > 86400
          response = self.execute!(:http_method => :get,
-                                  :uri => self.discovery_uri(api, version),
+                                  :uri => discovery_uri,
                                   :authenticated => false)
 
          begin
index 265d634d58ec33d120bab2e3a6f63daaeacd5c88..980ce83ae3a42c153d0a445d01fac4a3cf0c4dd4 100755 (executable)
@@ -243,8 +243,8 @@ class PipelineInstance
   end
   def self.create(attributes)
     result = $client.execute(:api_method => $arvados.pipeline_instances.create,
-                             :body => {
-                               :pipeline_instance => attributes.to_json
+                             :body_object => {
+                               :pipeline_instance => attributes
                              },
                              :authenticated => false,
                              :headers => {
@@ -262,8 +262,8 @@ class PipelineInstance
                              :parameters => {
                                :uuid => @pi[:uuid]
                              },
-                             :body => {
-                               :pipeline_instance => @attributes_to_update.to_json
+                             :body_object => {
+                               :pipeline_instance => @attributes_to_update
                              },
                              :authenticated => false,
                              :headers => {
@@ -285,6 +285,16 @@ class PipelineInstance
   def [](x)
     @pi[x]
   end
+
+  def log_stderr(msg)
+    $arv.log.create log: {
+      event_type: 'stderr',
+      object_uuid: self[:uuid],
+      owner_uuid: self[:owner_uuid],
+      properties: {"text" => msg},
+    }
+  end
+
   protected
   def initialize(j)
     @attributes_to_update = {}
@@ -328,7 +338,7 @@ class JobCache
     body = {job: no_nil_values(job)}.merge(no_nil_values(create_params))
 
     result = $client.execute(:api_method => $arvados.jobs.create,
-                             :body => body,
+                             :body_object => body,
                              :authenticated => false,
                              :headers => {
                                authorization: 'OAuth2 '+ENV['ARVADOS_API_TOKEN']
@@ -345,19 +355,7 @@ class JobCache
       end
       msg += "Job submission was: #{body.to_json}"
 
-      $client.execute(:api_method => $arvados.logs.create,
-                      :body => {
-                        :log => {
-                          :object_uuid => pipeline[:uuid],
-                          :event_type => 'stderr',
-                          :owner_uuid => pipeline[:owner_uuid],
-                          :properties => {"text" => msg}
-                        }
-                      },
-                      :authenticated => false,
-                      :headers => {
-                        authorization: 'OAuth2 '+ENV['ARVADOS_API_TOKEN']
-                      })
+      pipeline.log_stderr(msg)
       nil
     end
   end
@@ -380,10 +378,6 @@ class WhRunPipelineInstance
     if template.match /[^-0-9a-z]/
       # Doesn't look like a uuid -- use it as a filename.
       @template = JSON.parse File.read(template), :symbolize_names => true
-      if !@template[:components]
-        abort ("#{$0}: Template loaded from #{template} " +
-               "does not have a \"components\" key")
-      end
     else
       result = $client.execute(:api_method => $arvados.pipeline_templates.get,
                                :parameters => {
@@ -422,8 +416,25 @@ class WhRunPipelineInstance
       end
     end
 
+    if not @template[:components].is_a?(Hash)
+      abort "\n#{Time.now} -- pipeline_template #{@template[:uuid]}\nSyntax error: Template missing \"components\" hash"
+    end
     @components = @template[:components].dup
 
+    bad_components = @components.each_pair.select do |cname, cspec|
+      not cspec.is_a?(Hash)
+    end
+    if bad_components.any?
+      abort "\n#{Time.now} -- pipeline_template #{@template[:uuid]}\nSyntax error: Components not specified with hashes: #{bad_components.map(&:first).join(', ')}"
+    end
+
+    bad_components = @components.each_pair.select do |cname, cspec|
+      not cspec[:script_parameters].is_a?(Hash)
+    end
+    if bad_components.any?
+      abort "\n#{Time.now} -- pipeline_template #{@template[:uuid]}\nSyntax error: Components missing \"script_parameters\" hashes: #{bad_components.map(&:first).join(', ')}"
+    end
+
     errors = []
     @components.each do |componentname, component|
       component[:script_parameters].each do |parametername, parameter|
@@ -475,7 +486,7 @@ class WhRunPipelineInstance
                  }
                },
                pipeline_template_uuid: @template[:uuid],
-               state: ($options[:submit] ? 'New' : 'RunningOnClient'))
+               state: ($options[:submit] ? 'RunningOnServer' : 'RunningOnClient'))
     end
     self
   end
@@ -711,6 +722,18 @@ class WhRunPipelineInstance
       end
     end
   end
+
+  def abort(msg)
+    if @instance
+      if ["New", "Ready", "RunningOnClient",
+          "RunningOnServer"].include?(@instance[:state])
+        @instance[:state] = "Failed"
+        @instance.save
+      end
+      @instance.log_stderr(msg)
+    end
+    Kernel::abort(msg)
+  end
 end
 
 runner = WhRunPipelineInstance.new($options)
index 06b3da99a9929823bdc6b91ca4d315bb46878bc0..e7cac1816f890e87d3b2767d0930dd57f3d5562b 100755 (executable)
@@ -84,6 +84,8 @@ use File::Temp;
 use Fcntl ':flock';
 use File::Path qw( make_path );
 
+use constant EX_TEMPFAIL => 75;
+
 $ENV{"TMPDIR"} ||= "/tmp";
 unless (defined $ENV{"CRUNCH_TMP"}) {
   $ENV{"CRUNCH_TMP"} = $ENV{"TMPDIR"} . "/crunch-job";
@@ -150,17 +152,25 @@ if ($job_has_uuid)
 {
   $Job = $arv->{'jobs'}->{'get'}->execute('uuid' => $jobspec);
   if (!$force_unlock) {
+    # If some other crunch-job process has grabbed this job (or we see
+    # other evidence that the job is already underway) we exit
+    # EX_TEMPFAIL so crunch-dispatch (our parent process) doesn't
+    # mark the job as failed.
     if ($Job->{'is_locked_by_uuid'}) {
-      croak("Job is locked: " . $Job->{'is_locked_by_uuid'});
+      Log(undef, "Job is locked by " . $Job->{'is_locked_by_uuid'});
+      exit EX_TEMPFAIL;
     }
     if ($Job->{'success'} ne undef) {
-      croak("Job 'success' flag (" . $Job->{'success'} . ") is not null");
+      Log(undef, "Job 'success' flag (" . $Job->{'success'} . ") is not null");
+      exit EX_TEMPFAIL;
     }
     if ($Job->{'running'}) {
-      croak("Job 'running' flag is already set");
+      Log(undef, "Job 'running' flag is already set");
+      exit EX_TEMPFAIL;
     }
     if ($Job->{'started_at'}) {
-      croak("Job 'started_at' time is already set (" . $Job->{'started_at'} . ")");
+      Log(undef, "Job 'started_at' time is already set (" . $Job->{'started_at'} . ")");
+      exit EX_TEMPFAIL;
     }
   }
 }
@@ -273,7 +283,8 @@ if ($job_has_uuid)
   # Claim this job, and make sure nobody else does
   unless ($Job->update_attributes('is_locked_by_uuid' => $User->{'uuid'}) &&
           $Job->{'is_locked_by_uuid'} == $User->{'uuid'}) {
-    croak("Error while updating / locking job");
+    Log(undef, "Error while updating / locking job, exiting ".EX_TEMPFAIL);
+    exit EX_TEMPFAIL;
   }
   $Job->update_attributes('started_at' => scalar gmtime,
                           'running' => 1,
@@ -688,7 +699,9 @@ for (my $todo_ptr = 0; $todo_ptr <= $#jobstep_todo; $todo_ptr ++)
 
     my @execargs = ('bash', '-c', $command);
     srun (\@srunargs, \@execargs, undef, $build_script_to_send);
-    exit (111);
+    # exec() failed, we assume nothing happened.
+    Log(undef, "srun() failed on build script");
+    die;
   }
   close("writer");
   if (!defined $childpid)
@@ -882,7 +895,7 @@ else {
 Log (undef, "finish");
 
 save_meta();
-exit 0;
+exit ($Job->{'success'} ? 1 : 0);
 
 
 
similarity index 99%
rename from sdk/go/src/arvados.org/sdk/sdk.go
rename to sdk/go/arvadosclient/arvadosclient.go
index 9f1880fab18792f2891f205d84cadd5f668baffc..3d5aff7b2325b4d93e3a0afacd71c44329c42f6f 100644 (file)
@@ -1,6 +1,6 @@
 /* Simple Arvados Go SDK for communicating with API server. */
 
-package sdk
+package arvadosclient
 
 import (
        "bytes"
similarity index 95%
rename from sdk/go/src/arvados.org/sdk/sdk_test.go
rename to sdk/go/arvadosclient/arvadosclient_test.go
index 8af848f8d634142bf6e9cfea949384d95b1302cf..678c53310e359c8e8783b5652c9ad6c7bff1e3c9 100644 (file)
@@ -1,4 +1,4 @@
-package sdk
+package arvadosclient
 
 import (
        "fmt"
@@ -6,7 +6,6 @@ import (
        "net/http"
        "os"
        "os/exec"
-       "strings"
        "testing"
 )
 
@@ -21,8 +20,8 @@ var _ = Suite(&ServerRequiredSuite{})
 type ServerRequiredSuite struct{}
 
 func pythonDir() string {
-       gopath := os.Getenv("GOPATH")
-       return fmt.Sprintf("%s/../python/tests", strings.Split(gopath, ":")[0])
+       cwd, _ := os.Getwd()
+       return fmt.Sprintf("%s/../../python/tests", cwd)
 }
 
 func (s *ServerRequiredSuite) SetUpSuite(c *C) {
diff --git a/sdk/go/go.sh b/sdk/go/go.sh
deleted file mode 100755 (executable)
index 5553567..0000000
+++ /dev/null
@@ -1,17 +0,0 @@
-#! /bin/sh
-
-# Wraps the 'go' executable with some environment setup.  Sets GOPATH, creates
-# 'pkg' and 'bin' directories, automatically installs dependencies, then runs
-# the underlying 'go' executable with any command line parameters provided to
-# the script.
-
-rootdir=$(readlink -f $(dirname $0))
-GOPATH=$rootdir:$GOPATH
-export GOPATH
-
-mkdir -p $rootdir/pkg
-mkdir -p $rootdir/bin
-
-go get gopkg.in/check.v1
-
-go $*
similarity index 98%
rename from sdk/go/src/arvados.org/keepclient/hashcheck.go
rename to sdk/go/keepclient/hashcheck.go
index a585d0088833d08994b3891361623b3bfeabe78e..1f696d95b64d60f69c772063be7ae5b60e671cca 100644 (file)
@@ -30,9 +30,10 @@ type HashCheckingReader struct {
 // the checksum doesn't match.
 func (this HashCheckingReader) Read(p []byte) (n int, err error) {
        n, err = this.Reader.Read(p)
-       if err == nil {
+       if n > 0 {
                this.Hash.Write(p[:n])
-       } else if err == io.EOF {
+       }
+       if err == io.EOF {
                sum := this.Hash.Sum(make([]byte, 0, this.Hash.Size()))
                if fmt.Sprintf("%x", sum) != this.Check {
                        err = BadChecksum
similarity index 97%
rename from sdk/go/src/arvados.org/keepclient/keepclient.go
rename to sdk/go/keepclient/keepclient.go
index d43a2150f3f0950b692df0344c85983d7f4977e8..e1c25c9e1d82ee5f2f41bbb72182faf5a73b638d 100644 (file)
@@ -2,8 +2,8 @@
 package keepclient
 
 import (
-       "arvados.org/sdk"
-       "arvados.org/streamer"
+       "git.curoverse.com/arvados.git/sdk/go/streamer"
+       "git.curoverse.com/arvados.git/sdk/go/arvadosclient"
        "crypto/md5"
        "errors"
        "fmt"
@@ -33,7 +33,7 @@ const X_Keep_Replicas_Stored = "X-Keep-Replicas-Stored"
 
 // Information about Arvados and Keep servers.
 type KeepClient struct {
-       Arvados       *sdk.ArvadosClient
+       Arvados       *arvadosclient.ArvadosClient
        Want_replicas int
        Using_proxy   bool
        service_roots *[]string
@@ -43,7 +43,7 @@ type KeepClient struct {
 
 // Create a new KeepClient.  This will contact the API server to discover Keep
 // servers.
-func MakeKeepClient(arv *sdk.ArvadosClient) (kc KeepClient, err error) {
+func MakeKeepClient(arv *arvadosclient.ArvadosClient) (kc KeepClient, err error) {
        kc = KeepClient{
                Arvados:       arv,
                Want_replicas: 2,
similarity index 92%
rename from sdk/go/src/arvados.org/keepclient/keepclient_test.go
rename to sdk/go/keepclient/keepclient_test.go
index 5327fb507e9b6de8a323fc9be8738d7d26b6595e..015b24896ebf72ed756509057a7f622f464a4438 100644 (file)
@@ -1,8 +1,8 @@
 package keepclient
 
 import (
-       "arvados.org/sdk"
-       "arvados.org/streamer"
+       "git.curoverse.com/arvados.git/sdk/go/arvadosclient"
+       "git.curoverse.com/arvados.git/sdk/go/streamer"
        "crypto/md5"
        "flag"
        "fmt"
@@ -14,7 +14,6 @@ import (
        "net/http"
        "os"
        "os/exec"
-       "strings"
        "testing"
 )
 
@@ -36,20 +35,36 @@ type ServerRequiredSuite struct{}
 type StandaloneSuite struct{}
 
 func pythonDir() string {
-       gopath := os.Getenv("GOPATH")
-       return fmt.Sprintf("%s/../python/tests", strings.Split(gopath, ":")[0])
+       cwd, _ := os.Getwd()
+       return fmt.Sprintf("%s/../../python/tests", cwd)
 }
 
 func (s *ServerRequiredSuite) SetUpSuite(c *C) {
        if *no_server {
                c.Skip("Skipping tests that require server")
-       } else {
-               os.Chdir(pythonDir())
-               if err := exec.Command("python", "run_test_server.py", "start").Run(); err != nil {
-                       panic("'python run_test_server.py start' returned error")
+               return
+       }
+       os.Chdir(pythonDir())
+       {
+               cmd := exec.Command("python", "run_test_server.py", "start")
+               stderr, err := cmd.StderrPipe()
+               if err != nil {
+                       log.Fatalf("Setting up stderr pipe: %s", err)
+               }
+               go io.Copy(os.Stderr, stderr)
+               if err := cmd.Run(); err != nil {
+                       panic(fmt.Sprintf("'python run_test_server.py start' returned error %s", err))
+               }
+       }
+       {
+               cmd := exec.Command("python", "run_test_server.py", "start_keep")
+               stderr, err := cmd.StderrPipe()
+               if err != nil {
+                       log.Fatalf("Setting up stderr pipe: %s", err)
                }
-               if err := exec.Command("python", "run_test_server.py", "start_keep").Run(); err != nil {
-                       panic("'python run_test_server.py start_keep' returned error")
+               go io.Copy(os.Stderr, stderr)
+               if err := cmd.Run(); err != nil {
+                       panic(fmt.Sprintf("'python run_test_server.py start_keep' returned error %s", err))
                }
        }
 }
@@ -65,7 +80,7 @@ func (s *ServerRequiredSuite) TestMakeKeepClient(c *C) {
        os.Setenv("ARVADOS_API_TOKEN", "4axaw8zxe0qm22wa6urpp5nskcne8z88cvbupv653y1njyi05h")
        os.Setenv("ARVADOS_API_HOST_INSECURE", "true")
 
-       arv, err := sdk.MakeArvadosClient()
+       arv, err := arvadosclient.MakeArvadosClient()
        c.Assert(err, Equals, nil)
 
        kc, err := MakeKeepClient(&arv)
@@ -126,7 +141,7 @@ func UploadToStubHelper(c *C, st http.Handler, f func(KeepClient, string,
        listener, url := RunBogusKeepServer(st, 2990)
        defer listener.Close()
 
-       arv, _ := sdk.MakeArvadosClient()
+       arv, _ := arvadosclient.MakeArvadosClient()
        arv.ApiToken = "abc123"
 
        kc, _ := MakeKeepClient(&arv)
@@ -260,7 +275,7 @@ func (s *StandaloneSuite) TestPutB(c *C) {
                "foo",
                make(chan string, 2)}
 
-       arv, _ := sdk.MakeArvadosClient()
+       arv, _ := arvadosclient.MakeArvadosClient()
        kc, _ := MakeKeepClient(&arv)
 
        kc.Want_replicas = 2
@@ -302,7 +317,7 @@ func (s *StandaloneSuite) TestPutHR(c *C) {
                "foo",
                make(chan string, 2)}
 
-       arv, _ := sdk.MakeArvadosClient()
+       arv, _ := arvadosclient.MakeArvadosClient()
        kc, _ := MakeKeepClient(&arv)
 
        kc.Want_replicas = 2
@@ -356,7 +371,7 @@ func (s *StandaloneSuite) TestPutWithFail(c *C) {
        fh := FailHandler{
                make(chan string, 1)}
 
-       arv, err := sdk.MakeArvadosClient()
+       arv, err := arvadosclient.MakeArvadosClient()
        kc, _ := MakeKeepClient(&arv)
 
        kc.Want_replicas = 2
@@ -405,7 +420,7 @@ func (s *StandaloneSuite) TestPutWithTooManyFail(c *C) {
        fh := FailHandler{
                make(chan string, 4)}
 
-       arv, err := sdk.MakeArvadosClient()
+       arv, err := arvadosclient.MakeArvadosClient()
        kc, _ := MakeKeepClient(&arv)
 
        kc.Want_replicas = 2
@@ -465,7 +480,7 @@ func (s *StandaloneSuite) TestGet(c *C) {
        listener, url := RunBogusKeepServer(st, 2990)
        defer listener.Close()
 
-       arv, err := sdk.MakeArvadosClient()
+       arv, err := arvadosclient.MakeArvadosClient()
        kc, _ := MakeKeepClient(&arv)
        arv.ApiToken = "abc123"
        kc.SetServiceRoots([]string{url})
@@ -491,7 +506,7 @@ func (s *StandaloneSuite) TestGetFail(c *C) {
        listener, url := RunBogusKeepServer(st, 2990)
        defer listener.Close()
 
-       arv, err := sdk.MakeArvadosClient()
+       arv, err := arvadosclient.MakeArvadosClient()
        kc, _ := MakeKeepClient(&arv)
        arv.ApiToken = "abc123"
        kc.SetServiceRoots([]string{url})
@@ -521,7 +536,7 @@ func (s *StandaloneSuite) TestChecksum(c *C) {
        listener, url := RunBogusKeepServer(st, 2990)
        defer listener.Close()
 
-       arv, err := sdk.MakeArvadosClient()
+       arv, err := arvadosclient.MakeArvadosClient()
        kc, _ := MakeKeepClient(&arv)
        arv.ApiToken = "abc123"
        kc.SetServiceRoots([]string{url})
@@ -554,7 +569,7 @@ func (s *StandaloneSuite) TestGetWithFailures(c *C) {
                "abc123",
                []byte("foo")}
 
-       arv, err := sdk.MakeArvadosClient()
+       arv, err := arvadosclient.MakeArvadosClient()
        kc, _ := MakeKeepClient(&arv)
        arv.ApiToken = "abc123"
        service_roots := make([]string, 5)
@@ -589,7 +604,7 @@ func (s *ServerRequiredSuite) TestPutGetHead(c *C) {
        os.Setenv("ARVADOS_API_TOKEN", "4axaw8zxe0qm22wa6urpp5nskcne8z88cvbupv653y1njyi05h")
        os.Setenv("ARVADOS_API_HOST_INSECURE", "true")
 
-       arv, err := sdk.MakeArvadosClient()
+       arv, err := arvadosclient.MakeArvadosClient()
        kc, err := MakeKeepClient(&arv)
        c.Assert(err, Equals, nil)
 
@@ -638,7 +653,7 @@ func (s *StandaloneSuite) TestPutProxy(c *C) {
 
        st := StubProxyHandler{make(chan string, 1)}
 
-       arv, err := sdk.MakeArvadosClient()
+       arv, err := arvadosclient.MakeArvadosClient()
        kc, _ := MakeKeepClient(&arv)
 
        kc.Want_replicas = 2
@@ -669,7 +684,7 @@ func (s *StandaloneSuite) TestPutProxyInsufficientReplicas(c *C) {
 
        st := StubProxyHandler{make(chan string, 1)}
 
-       arv, err := sdk.MakeArvadosClient()
+       arv, err := arvadosclient.MakeArvadosClient()
        kc, _ := MakeKeepClient(&arv)
 
        kc.Want_replicas = 3
similarity index 99%
rename from sdk/go/src/arvados.org/keepclient/support.go
rename to sdk/go/keepclient/support.go
index f3e47f92f5864d2fe3d8b1e06de92e2075f4faf9..ce15ce91adbab7926c0eca76ece43b52ab051bb2 100644 (file)
@@ -2,7 +2,7 @@
 package keepclient
 
 import (
-       "arvados.org/streamer"
+       "git.curoverse.com/arvados.git/sdk/go/streamer"
        "errors"
        "fmt"
        "io"
index 090c08e4c22a7a2138ff7778db3293b2c17f0424..105d068d4b60377ef81bcd0f4af620da766bdb68 100644 (file)
@@ -2,4 +2,4 @@
 /dist/
 /*.egg
 /*.egg-info
-/tmp
+/tests/tmp
index 42c4b3aae63b4c460fa4fc19bea3180c41fb5d9f..060ed95d959531917749584176c5f42b8c453f66 100644 (file)
@@ -25,6 +25,17 @@ from stream import *
 import errors
 import util
 
+# Set up Arvados logging based on the user's configuration.
+# All Arvados code should log under the arvados hierarchy.
+log_handler = logging.StreamHandler()
+log_handler.setFormatter(logging.Formatter(
+        '%(asctime)s %(name)s[%(process)d] %(levelname)s: %(message)s',
+        '%Y-%m-%d %H:%M:%S'))
+logger = logging.getLogger('arvados')
+logger.addHandler(log_handler)
+logger.setLevel(logging.DEBUG if config.get('ARVADOS_DEBUG')
+                else logging.WARNING)
+
 def task_set_output(self,s):
     api('v1').job_tasks().update(uuid=self['uuid'],
                                  body={
index 8a71b90c0dda661ea0ac8bfbcd17e47cb1ee36b9..3df24306df750bc0a4e574218cc86024cfe5b993 100644 (file)
@@ -12,9 +12,13 @@ import config
 import errors
 import util
 
-services = {}
+_logger = logging.getLogger('arvados.api')
+conncache = {}
+
+class CredentialsFromToken(object):
+    def __init__(self, api_token):
+        self.api_token = api_token
 
-class CredentialsFromEnv(object):
     @staticmethod
     def http_request(self, uri, **kwargs):
         from httplib import BadStatusLine
@@ -24,7 +28,7 @@ class CredentialsFromEnv(object):
         if config.get("ARVADOS_EXTERNAL_CLIENT", "") == "true":
             kwargs['headers']['X-External-Client'] = '1'
 
-        kwargs['headers']['Authorization'] = 'OAuth2 %s' % config.get('ARVADOS_API_TOKEN', 'ARVADOS_API_TOKEN_not_set')
+        kwargs['headers']['Authorization'] = 'OAuth2 %s' % self.arvados_api_token
         try:
             return self.orig_http_request(uri, **kwargs)
         except BadStatusLine:
@@ -36,6 +40,7 @@ class CredentialsFromEnv(object):
             # risky.
             return self.orig_http_request(uri, **kwargs)
     def authorize(self, http):
+        http.arvados_api_token = self.api_token
         http.orig_http_request = http.request
         http.request = types.MethodType(self.http_request, http)
         return http
@@ -69,57 +74,88 @@ def http_cache(data_type):
         path = None
     return path
 
-def api(version=None, cache=True, **kwargs):
+def api(version=None, cache=True, host=None, token=None, insecure=False, **kwargs):
     """Return an apiclient Resources object for an Arvados instance.
 
     Arguments:
     * version: A string naming the version of the Arvados API to use (for
       example, 'v1').
-    * cache: If True (default), return an existing resources object, or use
-      a cached discovery document to build one.
+    * cache: If True (default), return an existing Resources object if
+      one already exists with the same endpoint and credentials. If
+      False, create a new one, and do not keep it in the cache (i.e.,
+      do not return it from subsequent api(cache=True) calls with
+      matching endpoint and credentials).
+    * host: The Arvados API server host (and optional :port) to connect to.
+    * token: The authentication token to send with each API call.
+    * insecure: If True, ignore SSL certificate validation errors.
 
     Additional keyword arguments will be passed directly to
-    `apiclient.discovery.build`.  If the `discoveryServiceUrl` or `http`
-    keyword arguments are missing, this function will set default values for
-    them, based on the current Arvados configuration settings."""
-    if config.get('ARVADOS_DEBUG'):
-        logging.basicConfig(level=logging.DEBUG)
-
-    if not cache or not services.get(version):
-        if not version:
-            version = 'v1'
-            logging.info("Using default API version. " +
-                         "Call arvados.api('%s') instead." %
-                         version)
-
-        if 'discoveryServiceUrl' not in kwargs:
-            api_host = config.get('ARVADOS_API_HOST')
-            if not api_host:
-                raise ValueError(
-                    "No discoveryServiceUrl or ARVADOS_API_HOST set.")
-            kwargs['discoveryServiceUrl'] = (
-                'https://%s/discovery/v1/apis/{api}/{apiVersion}/rest' %
-                (api_host,))
-
-        if 'http' not in kwargs:
-            http_kwargs = {}
-            # Prefer system's CA certificates (if available) over httplib2's.
-            certs_path = '/etc/ssl/certs/ca-certificates.crt'
-            if os.path.exists(certs_path):
-                http_kwargs['ca_certs'] = certs_path
-            if cache:
-                http_kwargs['cache'] = http_cache('discovery')
-            if (config.get('ARVADOS_API_HOST_INSECURE', '').lower() in
-                  ('yes', 'true', '1')):
-                http_kwargs['disable_ssl_certificate_validation'] = True
-            kwargs['http'] = httplib2.Http(**http_kwargs)
-
-        kwargs['http'] = CredentialsFromEnv().authorize(kwargs['http'])
-        services[version] = apiclient.discovery.build('arvados', version,
-                                                      **kwargs)
-        kwargs['http'].cache = None
-    return services[version]
-
-def uncache_api(version):
-    if version in services:
-        del services[version]
+    `apiclient.discovery.build` if a new Resource object is created.
+    If the `discoveryServiceUrl` or `http` keyword arguments are
+    missing, this function will set default values for them, based on
+    the current Arvados configuration settings.
+
+    """
+
+    if not version:
+        version = 'v1'
+        logging.info("Using default API version. " +
+                     "Call arvados.api('%s') instead." %
+                     version)
+    if 'discoveryServiceUrl' in kwargs:
+        if host:
+            raise ValueError("both discoveryServiceUrl and host provided")
+        # Here we can't use a token from environment, config file,
+        # etc. Those probably have nothing to do with the host
+        # provided by the caller.
+        if not token:
+            raise ValueError("discoveryServiceUrl provided, but token missing")
+    elif host and token:
+        pass
+    elif not host and not token:
+        # Load from user configuration or environment
+        for x in ['ARVADOS_API_HOST', 'ARVADOS_API_TOKEN']:
+            if x not in config.settings():
+                raise ValueError("%s is not set. Aborting." % x)
+        host = config.get('ARVADOS_API_HOST')
+        token = config.get('ARVADOS_API_TOKEN')
+        insecure = (config.get('ARVADOS_API_HOST_INSECURE', '').lower() in
+                       ('yes', 'true', '1'))
+    else:
+        # Caller provided one but not the other
+        if not host:
+            raise ValueError("token argument provided, but host missing.")
+        else:
+            raise ValueError("host argument provided, but token missing.")
+
+    if host:
+        # Caller wants us to build the discoveryServiceUrl
+        kwargs['discoveryServiceUrl'] = (
+            'https://%s/discovery/v1/apis/{api}/{apiVersion}/rest' % (host,))
+
+    if cache:
+        connprofile = (version, host, token, insecure)
+        svc = conncache.get(connprofile)
+        if svc:
+            return svc
+
+    if 'http' not in kwargs:
+        http_kwargs = {}
+        # Prefer system's CA certificates (if available) over httplib2's.
+        certs_path = '/etc/ssl/certs/ca-certificates.crt'
+        if os.path.exists(certs_path):
+            http_kwargs['ca_certs'] = certs_path
+        if cache:
+            http_kwargs['cache'] = http_cache('discovery')
+        if insecure:
+            http_kwargs['disable_ssl_certificate_validation'] = True
+        kwargs['http'] = httplib2.Http(**http_kwargs)
+
+    credentials = CredentialsFromToken(api_token=token)
+    kwargs['http'] = credentials.authorize(kwargs['http'])
+
+    svc = apiclient.discovery.build('arvados', version, **kwargs)
+    kwargs['http'].cache = None
+    if cache:
+        conncache[connprofile] = svc
+    return svc
index e420a679e246df191939343bcb1bc9533bb9c6f1..7aed681504702dc3d37477b5f1c838cb5e10f7c6 100644 (file)
@@ -27,6 +27,8 @@ import config
 import errors
 import util
 
+_logger = logging.getLogger('arvados.collection')
+
 def normalize_stream(s, stream):
     stream_tokens = [s]
     sortedfiles = list(stream.keys())
@@ -91,10 +93,10 @@ def normalize(collection):
 
 class CollectionReader(object):
     def __init__(self, manifest_locator_or_text):
-        if re.search(r'^[a-f0-9]{32}(\+\d+)?(\+\S+)*$', manifest_locator_or_text):
+        if re.match(r'[a-f0-9]{32}(\+\d+)?(\+\S+)*$', manifest_locator_or_text):
             self._manifest_locator = manifest_locator_or_text
             self._manifest_text = None
-        elif re.search(r'^\S+( [a-f0-9]{32,}(\+\S+)*)*( \d+:\d+:\S+)+\n', manifest_locator_or_text):
+        elif re.match(r'(\S+)( [a-f0-9]{32}(\+\d+)(\+\S+)*)+( \d+:\d+:\S+)+\n', manifest_locator_or_text):
             self._manifest_text = manifest_locator_or_text
             self._manifest_locator = None
         else:
@@ -117,8 +119,8 @@ class CollectionReader(object):
                     uuid=self._manifest_locator).execute()
                 self._manifest_text = c['manifest_text']
             except Exception as e:
-                logging.warning("API lookup failed for collection %s (%s: %s)" %
-                                (self._manifest_locator, type(e), str(e)))
+                _logger.warning("API lookup failed for collection %s (%s: %s)",
+                                self._manifest_locator, type(e), str(e))
                 self._manifest_text = Keep.get(self._manifest_locator)
         self._streams = []
         for stream_line in self._manifest_text.split("\n"):
@@ -312,8 +314,8 @@ class CollectionWriter(object):
                  self._current_file_pos,
                  self._current_stream_name))
         self._current_stream_files += [[self._current_file_pos,
-                                       self._current_stream_length - self._current_file_pos,
-                                       self._current_file_name]]
+                                        self._current_stream_length - self._current_file_pos,
+                                        self._current_file_name]]
         self._current_file_pos = self._current_stream_length
 
     def start_new_stream(self, newstreamname='.'):
@@ -342,8 +344,8 @@ class CollectionWriter(object):
             if len(self._current_stream_locators) == 0:
                 self._current_stream_locators += [config.EMPTY_BLOCK_LOCATOR]
             self._finished_streams += [[self._current_stream_name,
-                                       self._current_stream_locators,
-                                       self._current_stream_files]]
+                                        self._current_stream_locators,
+                                        self._current_stream_files]]
         self._current_stream_files = []
         self._current_stream_length = 0
         self._current_stream_locators = []
@@ -352,9 +354,8 @@ class CollectionWriter(object):
         self._current_file_name = None
 
     def finish(self):
-        # Send the stripped manifest to Keep, to ensure that we use the
-        # same UUID regardless of what hints are used on the collection.
-        return Keep.put(self.stripped_manifest())
+        # Store the manifest in Keep and return its locator.
+        return Keep.put(self.manifest_text())
 
     def stripped_manifest(self):
         """
index d3efa63a9bf4f71b0f60f35e3fac457b8b418fbf..335ef174de6dcb860f6d13ee9ec32e330d54cc31 100644 (file)
@@ -447,7 +447,6 @@ def main(arguments=None, stdout=sys.stdout, stderr=sys.stderr):
         # Register the resulting collection in Arvados.
         collection = arvados.api().collections().create(
             body={
-                'uuid': writer.finish(),
                 'manifest_text': writer.manifest_text(),
                 },
             ).execute()
index 06f3b34410531a8a1f76bcf32c660d496f80d6cd..0dbfe6359dd18b93cb95d6154f22ec7dcbe24177 100644 (file)
@@ -8,6 +8,8 @@ import re
 import config
 import logging
 
+_logger = logging.getLogger('arvados.events')
+
 class EventClient(WebSocketClient):
     def __init__(self, url, filters, on_event):
         ssl_options = None
@@ -41,8 +43,8 @@ def subscribe(api, filters, on_event):
         ws = EventClient(url, filters, on_event)
         ws.connect()
         return ws
-    except:
-        logging.exception('')
+    except Exception:
+        _logger.exception('')
         if (ws):
-          ws.close_connection()        
+          ws.close_connection()
         raise
index fdb27f0d313ee57385d5df856516451d5cc57e93..bffb02a147b926d258d65387342dc718eb934f51 100644 (file)
@@ -21,6 +21,7 @@ import timer
 import datetime
 import ssl
 
+_logger = logging.getLogger('arvados.keep')
 global_client_object = None
 
 from api import *
@@ -200,10 +201,10 @@ class KeepClient(object):
                 self.run_with_limiter(limiter)
 
         def run_with_limiter(self, limiter):
-            logging.debug("KeepWriterThread %s proceeding %s %s" %
-                          (str(threading.current_thread()),
-                           self.args['data_hash'],
-                           self.args['service_root']))
+            _logger.debug("KeepWriterThread %s proceeding %s %s",
+                          str(threading.current_thread()),
+                          self.args['data_hash'],
+                          self.args['service_root'])
             h = httplib2.Http(timeout=self.args.get('timeout', None))
             url = self.args['service_root'] + self.args['data_hash']
             api_token = config.get('ARVADOS_API_TOKEN')
@@ -215,7 +216,7 @@ class KeepClient(object):
                 headers['X-Keep-Desired-Replication'] = str(self.args['want_copies'])
 
             try:
-                logging.debug("Uploading to {}".format(url))
+                _logger.debug("Uploading to {}".format(url))
                 resp, content = h.request(url.encode('utf-8'), 'PUT',
                                           headers=headers,
                                           body=self.args['data'])
@@ -230,10 +231,10 @@ class KeepClient(object):
                                               body=body)
                 if re.match(r'^2\d\d$', resp['status']):
                     self._success = True
-                    logging.debug("KeepWriterThread %s succeeded %s %s" %
-                                  (str(threading.current_thread()),
-                                   self.args['data_hash'],
-                                   self.args['service_root']))
+                    _logger.debug("KeepWriterThread %s succeeded %s %s",
+                                  str(threading.current_thread()),
+                                  self.args['data_hash'],
+                                  self.args['service_root'])
                     replicas_stored = 1
                     if 'x-keep-replicas-stored' in resp:
                         # Tick the 'done' counter for the number of replica
@@ -246,15 +247,15 @@ class KeepClient(object):
                             pass
                     limiter.save_response(content.strip(), replicas_stored)
                 else:
-                    logging.warning("Request fail: PUT %s => %s %s" %
-                                    (url, resp['status'], content))
+                    _logger.debug("Request fail: PUT %s => %s %s",
+                                    url, resp['status'], content)
             except (httplib2.HttpLib2Error,
                     httplib.HTTPException,
                     ssl.SSLError) as e:
                 # When using https, timeouts look like ssl.SSLError from here.
                 # "SSLError: The write operation timed out"
-                logging.warning("Request fail: PUT %s => %s: %s" %
-                                (url, type(e), str(e)))
+                _logger.debug("Request fail: PUT %s => %s: %s",
+                                url, type(e), str(e))
 
     def __init__(self, **kwargs):
         self.lock = threading.Lock()
@@ -298,7 +299,7 @@ class KeepClient(object):
                                f['service_port']))
                              for f in keep_services)
                     self.service_roots = sorted(set(roots))
-                    logging.debug(str(self.service_roots))
+                    _logger.debug(str(self.service_roots))
                 finally:
                     self.lock.release()
 
@@ -343,7 +344,7 @@ class KeepClient(object):
 
             # Remove the digits just used from the seed
             seed = seed[8:]
-        logging.debug(str(pseq))
+        _logger.debug(str(pseq))
         return pseq
 
     class CacheSlot(object):
@@ -401,8 +402,6 @@ class KeepClient(object):
             self._cache_lock.release()
 
     def get(self, locator):
-        #logging.debug("Keep.get %s" % (locator))
-
         if re.search(r',', locator):
             return ''.join(self.get(x) for x in locator.split(','))
         if 'KEEP_LOCAL_STORE' in os.environ:
@@ -410,7 +409,6 @@ class KeepClient(object):
         expect_hash = re.sub(r'\+.*', '', locator)
 
         slot, first = self.reserve_cache(expect_hash)
-        #logging.debug("%s %s %s" % (slot, first, expect_hash))
 
         if not first:
             v = slot.get()
@@ -448,23 +446,23 @@ class KeepClient(object):
     def get_url(self, url, headers, expect_hash):
         h = httplib2.Http()
         try:
-            logging.info("Request: GET %s" % (url))
+            _logger.debug("Request: GET %s", url)
             with timer.Timer() as t:
                 resp, content = h.request(url.encode('utf-8'), 'GET',
                                           headers=headers)
-            logging.info("Received %s bytes in %s msec (%s MiB/sec)" % (len(content),
-                                                                        t.msecs,
-                                                                        (len(content)/(1024*1024))/t.secs))
+            _logger.info("Received %s bytes in %s msec (%s MiB/sec)",
+                         len(content), t.msecs,
+                         (len(content)/(1024*1024))/t.secs)
             if re.match(r'^2\d\d$', resp['status']):
                 m = hashlib.new('md5')
                 m.update(content)
                 md5 = m.hexdigest()
                 if md5 == expect_hash:
                     return content
-                logging.warning("Checksum fail: md5(%s) = %s" % (url, md5))
+                _logger.warning("Checksum fail: md5(%s) = %s", url, md5)
         except Exception as e:
-            logging.info("Request fail: GET %s => %s: %s" %
-                         (url, type(e), str(e)))
+            _logger.debug("Request fail: GET %s => %s: %s",
+                         url, type(e), str(e))
         return None
 
     def put(self, data, **kwargs):
@@ -498,9 +496,9 @@ class KeepClient(object):
             threads_retry = []
             for t in threads:
                 if not t.success():
-                    logging.warning("Retrying: PUT %s %s" % (
-                        t.args['service_root'],
-                        t.args['data_hash']))
+                    _logger.debug("Retrying: PUT %s %s",
+                                    t.args['service_root'],
+                                    t.args['data_hash'])
                     retry_with_args = t.args.copy()
                     t_retry = KeepClient.KeepWriterThread(**retry_with_args)
                     t_retry.start()
index fd844354dde75330c0826ad0ad3c08325392597f..7a29100a398cc1e6b06b0f452845079d23c34a2c 100644 (file)
@@ -1,7 +1,6 @@
 import gflags
 import httplib
 import httplib2
-import logging
 import os
 import pprint
 import sys
index 7f07e22bc09184c233ade533fd07eed03bc00126..7f02cf73df1e6ce242607d202d3c143d8a9c66b2 100755 (executable)
@@ -8,7 +8,13 @@ import string
 import sys
 import logging
 
-logger = logging.getLogger(os.path.basename(sys.argv[0]))
+import arvados
+
+logger = logging.getLogger('arvados.arv-get')
+
+def abort(msg, code=1):
+    print >>sys.stderr, "arv-get:", msg
+    exit(code)
 
 parser = argparse.ArgumentParser(
     description='Copy data from Keep to a local file or pipe.')
@@ -25,8 +31,10 @@ group = parser.add_mutually_exclusive_group()
 group.add_argument('--progress', action='store_true',
                    help="""
 Display human-readable progress on stderr (bytes and, if possible,
-percentage of total data size). This is the default behavior when
-stderr is a tty and stdout is not a tty.
+percentage of total data size). This is the default behavior when it
+is not expected to interfere with the output: specifically, stderr is
+a tty _and_ either stdout is not a tty, or output is being written to
+named files rather than stdout.
 """)
 group.add_argument('--no-progress', action='store_true',
                    help="""
@@ -87,15 +95,9 @@ if not args.r and (os.path.isdir(args.destination) or
                    args.destination[-1] == os.path.sep):
     args.destination = os.path.join(args.destination,
                                     os.path.basename(args.locator))
-    logger.debug("Appended source file name to destination directory: %s" %
+    logger.debug("Appended source file name to destination directory: %s",
                  args.destination)
 
-# Turn on --progress by default if stderr is a tty and stdout isn't.
-if (not (args.batch_progress or args.no_progress)
-    and os.isatty(sys.stderr.fileno())
-    and not os.isatty(sys.stdout.fileno())):
-    args.progress = True
-
 if args.destination == '-':
     args.destination = '/dev/stdout'
 if args.destination == '/dev/stdout':
@@ -106,8 +108,15 @@ if args.destination == '/dev/stdout':
 else:
     args.destination = args.destination.rstrip(os.sep)
 
+# Turn on --progress by default if stderr is a tty and output is
+# either going to a named file, or going (via stdout) to something
+# that isn't a tty.
+if (not (args.batch_progress or args.no_progress)
+    and sys.stderr.isatty()
+    and (args.destination != '/dev/stdout'
+         or not sys.stdout.isatty())):
+    args.progress = True
 
-import arvados
 
 r = re.search(r'^(.*?)(/.*)?$', args.locator)
 collection = r.group(1)
@@ -121,23 +130,22 @@ if not get_prefix:
     try:
         if not args.n:
             if not args.f and os.path.exists(args.destination):
-                logger.error('Local file %s already exists' % args.destination)
-                sys.exit(1)
+                abort('Local file %s already exists.' % (args.destination,))
             with open(args.destination, 'wb') as f:
                 try:
                     c = arvados.api('v1').collections().get(
                         uuid=collection).execute()
                     manifest = c['manifest_text']
                 except Exception as e:
-                    logging.warning(
-                        "API lookup failed for collection %s (%s: %s)" %
-                        (collection, type(e), str(e)))
+                    logger.warning(
+                        "Collection %s not found. " +
+                        "Trying to fetch directly from Keep (deprecated).",
+                        collection)
                     manifest = arvados.Keep.get(collection)
                 f.write(manifest)
         sys.exit(0)
     except arvados.errors.NotFoundError as e:
-        logger.error(e)
-        sys.exit(1)
+        abort(e)
 
 reader = arvados.CollectionReader(collection)
 
@@ -156,8 +164,7 @@ try:
                     os.path.join(s.name(), f.name())[len(get_prefix)+1:])
                 if (not (args.n or args.f or args.skip_existing) and
                     os.path.exists(dest_path)):
-                    logger.error('Local file %s already exists' % dest_path)
-                    sys.exit(1)
+                    abort('Local file %s already exists.' % (dest_path,))
             else:
                 if os.path.join(s.name(), f.name()) != '.' + get_prefix:
                     continue
@@ -165,8 +172,7 @@ try:
             todo += [(s, f, dest_path)]
             todo_bytes += f.size()
 except arvados.errors.NotFoundError as e:
-    logger.error(e)
-    sys.exit(1)
+    abort(e)
 
 # Read data, and (if not -n) write to local file(s) or pipe.
 
@@ -176,21 +182,19 @@ for s,f,outfilename in todo:
     digestor = None
     if not args.n:
         if args.skip_existing and os.path.exists(outfilename):
-            logger.debug('Local file %s exists. Skipping.' % outfilename)
+            logger.debug('Local file %s exists. Skipping.', outfilename)
             continue
         elif not args.f and (os.path.isfile(outfilename) or
                            os.path.isdir(outfilename)):
             # Good thing we looked again: apparently this file wasn't
             # here yet when we checked earlier.
-            logger.error('Local file %s already exists' % outfilename)
-            sys.exit(1)
+            abort('Local file %s already exists.' % (outfilename,))
         if args.r:
             arvados.util.mkdir_dash_p(os.path.dirname(outfilename))
         try:
             outfile = open(outfilename, 'wb')
         except Exception as e:
-            logger.error('Open(%s) failed: %s' % (outfilename, e))
-            sys.exit(1)
+            abort('Open(%s) failed: %s' % (outfilename, e))
     if args.hash:
         digestor = hashlib.new(args.hash)
     try:
index 143227499f1e41a95d9409cf1b780d4255691bfd..ee74bb8215aa627a779c9071c9dee7abd9183d2b 100755 (executable)
@@ -6,9 +6,6 @@ import os
 import re
 import string
 import sys
-import logging
-
-logger = logging.getLogger(os.path.basename(sys.argv[0]))
 
 parser = argparse.ArgumentParser(
     description='List contents of a manifest')
index 0506381272c856bbc4e1b933aef6f474752d3667..478a74c22f7a1d04ccb83493bc04cd25a307ecff 100755 (executable)
@@ -6,9 +6,6 @@ import os
 import re
 import string
 import sys
-import logging
-
-logger = logging.getLogger(os.path.basename(sys.argv[0]))
 
 parser = argparse.ArgumentParser(
     description='Read manifest on standard input and put normalized manifest on standard output.')
index 58b628145ff4fe0689fc85a9833c5b7cb093d751..ce7f066ec7682ff8b4fe3cdeaa59e0656db68716 100755 (executable)
@@ -6,6 +6,8 @@ import argparse
 import arvados
 from arvados.events import subscribe
 
+logger = logging.getLogger('arvados.arv-ws')
+
 parser = argparse.ArgumentParser()
 parser.add_argument('-u', '--uuid', type=str, default="")
 args = parser.parse_args()
@@ -22,9 +24,7 @@ ws = None
 try:
   ws = subscribe(api, filters, lambda ev: on_message(ev))
   ws.run_forever()
-except KeyboardInterrupt:
-  print '' # don't log it
-except:
-  logging.exception('')
+except Exception:
+  logger.exception('')
   if (ws):
     ws.close_connection()
index dc95d8a9a5ac3f0781ed4aec9fa10d713f66479b..8ac34b8ce2b3b90a334bb08a04aa2a5c6147e541 100644 (file)
@@ -20,10 +20,13 @@ if __name__ == '__main__' and os.path.exists(
 import arvados.api
 import arvados.config
 
-ARV_API_SERVER_DIR = '../../../services/api'
-KEEP_SERVER_DIR = '../../../services/keep'
+SERVICES_SRC_DIR = os.path.join(MY_DIRNAME, '../../../services')
 SERVER_PID_PATH = 'tmp/pids/webrick-test.pid'
 WEBSOCKETS_SERVER_PID_PATH = 'tmp/pids/passenger-test.pid'
+if 'GOPATH' in os.environ:
+    gopaths = os.environ['GOPATH'].split(':')
+    gobins = [os.path.join(path, 'bin') for path in gopaths]
+    os.environ['PATH'] = ':'.join(gobins) + ':' + os.environ['PATH']
 
 def find_server_pid(PID_PATH, wait=10):
     now = time.time()
@@ -64,7 +67,7 @@ def kill_server_pid(PID_PATH, wait=10):
 
 def run(websockets=False, reuse_server=False):
     cwd = os.getcwd()
-    os.chdir(os.path.join(MY_DIRNAME, ARV_API_SERVER_DIR))
+    os.chdir(os.path.join(SERVICES_SRC_DIR, 'api'))
 
     if websockets:
         pid_file = WEBSOCKETS_SERVER_PID_PATH
@@ -116,7 +119,7 @@ def run(websockets=False, reuse_server=False):
 
 def stop():
     cwd = os.getcwd()
-    os.chdir(os.path.join(MY_DIRNAME, ARV_API_SERVER_DIR))
+    os.chdir(os.path.join(SERVICES_SRC_DIR, 'api'))
 
     kill_server_pid(WEBSOCKETS_SERVER_PID_PATH, 0)
     kill_server_pid(SERVER_PID_PATH, 0)
@@ -135,7 +138,7 @@ def stop():
 
 def _start_keep(n, keep_args):
     keep0 = tempfile.mkdtemp()
-    keep_cmd = ["bin/keep",
+    keep_cmd = ["keepstore",
                 "-volumes={}".format(keep0),
                 "-listen=:{}".format(25107+n),
                 "-pid={}".format("tmp/keep{}.pid".format(n))]
@@ -153,15 +156,6 @@ def _start_keep(n, keep_args):
 def run_keep(blob_signing_key=None, enforce_permissions=False):
     stop_keep()
 
-    cwd = os.getcwd()
-    os.chdir(os.path.join(MY_DIRNAME, KEEP_SERVER_DIR))
-    if os.environ.get('GOPATH') == None:
-        os.environ["GOPATH"] = os.getcwd()
-    else:
-        os.environ["GOPATH"] = os.getcwd() + ":" + os.environ["GOPATH"]
-
-    subprocess.call(["./go.sh", "install", "keep"])
-
     if not os.path.exists("tmp"):
         os.mkdir("tmp")
 
@@ -191,8 +185,6 @@ def run_keep(blob_signing_key=None, enforce_permissions=False):
     api.keep_disks().create(body={"keep_disk": {"keep_service_uuid": s1["uuid"] } }).execute()
     api.keep_disks().create(body={"keep_disk": {"keep_service_uuid": s2["uuid"] } }).execute()
 
-    os.chdir(cwd)
-
 def _stop_keep(n):
     kill_server_pid("tmp/keep{}.pid".format(n), 0)
     if os.path.exists("tmp/keep{}.volume".format(n)):
@@ -203,26 +195,12 @@ def _stop_keep(n):
         os.remove("tmp/keep.blob_signing_key")
 
 def stop_keep():
-    cwd = os.getcwd()
-    os.chdir(os.path.join(MY_DIRNAME, KEEP_SERVER_DIR))
-
     _stop_keep(0)
     _stop_keep(1)
 
-    os.chdir(cwd)
-
 def run_keep_proxy(auth):
     stop_keep_proxy()
 
-    cwd = os.getcwd()
-    os.chdir(os.path.join(MY_DIRNAME, KEEP_SERVER_DIR))
-    if os.environ.get('GOPATH') == None:
-        os.environ["GOPATH"] = os.getcwd()
-    else:
-        os.environ["GOPATH"] = os.getcwd() + ":" + os.environ["GOPATH"]
-
-    subprocess.call(["./go.sh", "install", "arvados.org/keepproxy"])
-
     if not os.path.exists("tmp"):
         os.mkdir("tmp")
 
@@ -230,7 +208,8 @@ def run_keep_proxy(auth):
     os.environ["ARVADOS_API_HOST_INSECURE"] = "true"
     os.environ["ARVADOS_API_TOKEN"] = fixture("api_client_authorizations")[auth]["api_token"]
 
-    kp0 = subprocess.Popen(["bin/keepproxy", "-pid=tmp/keepproxy.pid", "-listen=:{}".format(25101)])
+    kp0 = subprocess.Popen(["keepproxy",
+                            "-pid=tmp/keepproxy.pid", "-listen=:{}".format(25101)])
 
     authorize_with("admin")
     api = arvados.api('v1', cache=False)
@@ -238,17 +217,12 @@ def run_keep_proxy(auth):
 
     arvados.config.settings()["ARVADOS_KEEP_PROXY"] = "http://localhost:25101"
 
-    os.chdir(cwd)
-
 def stop_keep_proxy():
-    cwd = os.getcwd()
-    os.chdir(os.path.join(MY_DIRNAME, KEEP_SERVER_DIR))
     kill_server_pid("tmp/keepproxy.pid", 0)
-    os.chdir(cwd)
 
 def fixture(fix):
     '''load a fixture yaml file'''
-    with open(os.path.join(MY_DIRNAME, ARV_API_SERVER_DIR, "test", "fixtures",
+    with open(os.path.join(SERVICES_SRC_DIR, 'api', "test", "fixtures",
                            fix + ".yml")) as f:
         return yaml.load(f.read())
 
index 4c485712ec44b38e2a0009bb01f84ed17853b743..2fd709e3ab2dc0eb7f41a57343c7cc5382e5a249 100644 (file)
@@ -5,6 +5,8 @@ import arvados
 import httplib2
 import json
 import mimetypes
+import os
+import run_test_server
 import unittest
 
 from apiclient.http import RequestMockBuilder
@@ -31,10 +33,10 @@ class ArvadosApiClientTest(unittest.TestCase):
     def setUpClass(cls):
         # The apiclient library has support for mocking requests for
         # testing, but it doesn't extend to the discovery document
-        # itself.  Point it at a known stable discovery document for now.
+        # itself. For now, bring up an API server that will serve
+        # a discovery document.
         # FIXME: Figure out a better way to stub this out.
-        cls.orig_api_host = arvados.config.get('ARVADOS_API_HOST')
-        arvados.config.settings()['ARVADOS_API_HOST'] = 'qr1hi.arvadosapi.com'
+        run_test_server.run()
         mock_responses = {
             'arvados.humans.delete': (cls.response_from_code(500), ""),
             'arvados.humans.get': cls.api_error_response(
@@ -43,16 +45,15 @@ class ArvadosApiClientTest(unittest.TestCase):
                     {'items_available': 0, 'items': []})),
             }
         req_builder = RequestMockBuilder(mock_responses)
-        cls.api = arvados.api('v1', False, requestBuilder=req_builder)
+        cls.api = arvados.api('v1', cache=False,
+                              host=os.environ['ARVADOS_API_HOST'],
+                              token='discovery-doc-only-no-token-needed',
+                              insecure=True,
+                              requestBuilder=req_builder)
 
     @classmethod
     def tearDownClass(cls):
-        if cls.orig_api_host is None:
-            del arvados.config.settings()['ARVADOS_API_HOST']
-        else:
-            arvados.config.settings()['ARVADOS_API_HOST'] = cls.orig_api_host
-        # Prevent other tests from using our mocked API client.
-        arvados.uncache_api('v1')
+        run_test_server.stop()
 
     def test_basic_list(self):
         answer = self.api.humans().list(
index 7a827684a33aaf2686a1b947c9797285d4000ca8..9bc385d2e6645cabaf140a7573c13a7705951090 100644 (file)
@@ -466,10 +466,8 @@ class ArvPutIntegrationTest(unittest.TestCase):
         # to provision the Keep server.
         config_blob_signing_key = None
         for config_file in ['application.yml', 'application.default.yml']:
-            with open(os.path.join(os.path.dirname(__file__),
-                                   run_test_server.ARV_API_SERVER_DIR,
-                                   "config",
-                                   config_file)) as f:
+            with open(os.path.join(run_test_server.SERVICES_SRC_DIR,
+                                   "api", "config", config_file)) as f:
                 rails_config = yaml.load(f.read())
                 for config_section in ['test', 'common']:
                     try:
index 429777e73f29f88128f75ae16f8494a6fed77490..6a9a52b1067f97dfa0a05a047fcf35db86b8b9ec 100644 (file)
@@ -108,17 +108,23 @@ class Arvados
   class Google::APIClient
     def discovery_document(api, version)
       api = api.to_s
-      return @discovery_documents["#{api}:#{version}"] ||=
+      discovery_uri = self.discovery_uri(api, version)
+      discovery_uri_hash = Digest::MD5.hexdigest(discovery_uri)
+      return @discovery_documents[discovery_uri_hash] ||=
         begin
           # fetch new API discovery doc if stale
-          cached_doc = File.expand_path '~/.cache/arvados/discovery_uri.json'
-          if not File.exist?(cached_doc) or (Time.now - File.mtime(cached_doc)) > 86400
+          cached_doc = File.expand_path "~/.cache/arvados/discovery-#{discovery_uri_hash}.json" rescue nil
+          if cached_doc.nil? or not File.exist?(cached_doc) or (Time.now - File.mtime(cached_doc)) > 86400
             response = self.execute!(:http_method => :get,
-                                     :uri => self.discovery_uri(api, version),
+                                     :uri => discovery_uri,
                                      :authenticated => false)
-            FileUtils.makedirs(File.dirname cached_doc)
-            File.open(cached_doc, 'w') do |f|
-              f.puts response.body
+            begin
+              FileUtils.makedirs(File.dirname cached_doc)
+              File.open(cached_doc, 'w') do |f|
+                f.puts response.body
+              end
+            rescue
+              return JSON.load response.body
             end
           end
 
index 8fb6a52eed0f3109066fa86df2a5d157641cd46f..18fce168191a762672c379fe184153a60a5641b7 100644 (file)
@@ -35,13 +35,13 @@ GEM
     addressable (2.3.6)
     andand (1.3.3)
     arel (3.0.3)
-    arvados (0.1.20140708213257)
+    arvados (0.1.20140812162850)
       activesupport (>= 3.2.13)
       andand
       google-api-client (~> 0.6.3)
       json (>= 1.7.7)
       jwt (>= 0.1.5, < 1.0.0)
-    arvados-cli (0.1.20140708213257)
+    arvados-cli (0.1.20140812162850)
       activesupport (~> 3.2, >= 3.2.13)
       andand (~> 1.3, >= 1.3.3)
       arvados (~> 0.1.0)
@@ -69,7 +69,7 @@ GEM
       coffee-script-source
       execjs
     coffee-script-source (1.7.0)
-    curb (0.8.5)
+    curb (0.8.6)
     daemon_controller (1.2.0)
     database_cleaner (1.2.0)
     erubis (2.7.0)
@@ -95,7 +95,7 @@ GEM
     highline (1.6.21)
     hike (1.2.3)
     httpauth (0.2.1)
-    i18n (0.6.9)
+    i18n (0.6.11)
     journey (1.0.4)
     jquery-rails (3.1.0)
       railties (>= 3.0, < 5.0)
@@ -110,7 +110,7 @@ GEM
       mime-types (~> 1.16)
       treetop (~> 1.4.8)
     mime-types (1.25.1)
-    multi_json (1.10.0)
+    multi_json (1.10.1)
     multipart-post (1.2.0)
     net-scp (1.2.0)
       net-ssh (>= 2.6.5)
@@ -125,7 +125,7 @@ GEM
       jwt (~> 0.1.4)
       multi_json (~> 1.0)
       rack (~> 1.2)
-    oj (2.9.0)
+    oj (2.10.0)
     omniauth (1.1.1)
       hashie (~> 1.2)
       rack
@@ -209,7 +209,7 @@ GEM
     uglifier (2.5.0)
       execjs (>= 0.3.0)
       json (>= 1.8.0)
-    uuidtools (2.1.4)
+    uuidtools (2.1.5)
     websocket-driver (0.3.2)
 
 PLATFORMS
index 4a1a7d46dba7f1e7e2a765119c398ca96ae74685..d3b5c6b14725d9549deb1423ac443757366b5abe 100644 (file)
@@ -203,7 +203,17 @@ class ApplicationController < ActionController::Base
       end
     end
 
-    @objects = @objects.select(@select.map { |s| "#{table_name}.#{ActiveRecord::Base.connection.quote_column_name s.to_s}" }.join ", ") if @select
+    if @select
+      # Map attribute names in @select to real column names, resolve
+      # those to fully-qualified SQL column names, and pass the
+      # resulting string to the select method.
+      api_column_map = model_class.attributes_required_columns
+      columns_list = @select.
+        flat_map { |attr| api_column_map[attr] }.
+        uniq.
+        map { |s| "#{table_name}.#{ActiveRecord::Base.connection.quote_column_name s}" }
+      @objects = @objects.select(columns_list.join(", "))
+    end
     @objects = @objects.order(@orders.join ", ") if @orders.any?
     @objects = @objects.limit(@limit)
     @objects = @objects.offset(@offset)
index a0c64aa6e6adc4ad30dc4094681a45d3ce597ecb..b65fa5b962ab76b9c26e52a0443cb4586f90a043 100644 (file)
@@ -45,16 +45,9 @@ class Arvados::V1::CollectionsController < ApplicationController
     end
 
     # Remove any permission signatures from the manifest.
-    resource_attrs[:manifest_text]
-      .gsub!(/ [[:xdigit:]]{32}(\+[[:digit:]]+)?(\+\S+)/) { |word|
-      word.strip!
-      loc = Locator.parse(word)
-      if loc
-        " " + loc.without_signature.to_s
-      else
-        " " + word
-      end
-    }
+    munge_manifest_locators(resource_attrs[:manifest_text]) do |loc|
+      loc.without_signature.to_s
+    end
 
     # Save the collection with the stripped manifest.
     act_as_system_user do
@@ -91,24 +84,13 @@ class Arvados::V1::CollectionsController < ApplicationController
   end
 
   def show
-    if current_api_client_authorization
-      signing_opts = {
-        key: Rails.configuration.blob_signing_key,
-        api_token: current_api_client_authorization.api_token,
-        ttl: Rails.configuration.blob_signing_ttl,
-      }
-      @object[:manifest_text]
-        .gsub!(/ [[:xdigit:]]{32}(\+[[:digit:]]+)?(\+\S+)/) { |word|
-        word.strip!
-        loc = Locator.parse(word)
-        if loc
-          " " + Blob.sign_locator(word, signing_opts)
-        else
-          " " + word
-        end
-      }
-    end
-    render json: @object.as_api_response(:with_data)
+    sign_manifests(@object[:manifest_text])
+    super
+  end
+
+  def index
+    sign_manifests(*@objects.map { |c| c[:manifest_text] })
+    super
   end
 
   def collection_uuid(uuid)
@@ -149,7 +131,7 @@ class Arvados::V1::CollectionsController < ApplicationController
 
     logger.debug "visiting #{uuid}"
 
-    if m  
+    if m
       # uuid is a collection
       Collection.readable_by(current_user).where(uuid: uuid).each do |c|
         visited[uuid] = c.as_api_response
@@ -166,7 +148,7 @@ class Arvados::V1::CollectionsController < ApplicationController
       Job.readable_by(current_user).where(log: uuid).each do |job|
         generate_provenance_edges(visited, job.uuid)
       end
-      
+
     else
       # uuid is something else
       rsc = ArvadosModel::resource_class_for_uuid uuid
@@ -208,7 +190,7 @@ class Arvados::V1::CollectionsController < ApplicationController
 
     logger.debug "visiting #{uuid}"
 
-    if m  
+    if m
       # uuid is a collection
       Collection.readable_by(current_user).where(uuid: uuid).each do |c|
         visited[uuid] = c.as_api_response
@@ -226,7 +208,7 @@ class Arvados::V1::CollectionsController < ApplicationController
       Job.readable_by(current_user).where(["jobs.script_parameters like ?", "%#{uuid}%"]).each do |job|
         generate_used_by_edges(visited, job.uuid)
       end
-      
+
     else
       # uuid is something else
       rsc = ArvadosModel::resource_class_for_uuid uuid
@@ -258,7 +240,27 @@ class Arvados::V1::CollectionsController < ApplicationController
     render json: visited
   end
 
+  def self.munge_manifest_locators(manifest)
+    # Given a manifest text and a block, yield each locator,
+    # and replace it with whatever the block returns.
+    manifest.andand.gsub!(/ [[:xdigit:]]{32}(\+[[:digit:]]+)?(\+\S+)/) do |word|
+      if loc = Locator.parse(word.strip)
+        " " + yield(loc)
+      else
+        " " + word
+      end
+    end
+  end
+
   protected
+
+  def find_objects_for_index
+    # Omit manifest_text from index results unless expressly selected.
+    @select ||= model_class.api_accessible_attributes(:user).
+      map { |attr_spec| attr_spec.first.to_s } - ["manifest_text"]
+    super
+  end
+
   def find_object_by_uuid
     super
     if !@object and !params[:uuid].match(/^[0-9a-f]+\+\d+$/)
@@ -277,4 +279,23 @@ class Arvados::V1::CollectionsController < ApplicationController
       end
     end
   end
+
+  def munge_manifest_locators(manifest, &block)
+    self.class.munge_manifest_locators(manifest, &block)
+  end
+
+  def sign_manifests(*manifests)
+    if current_api_client_authorization
+      signing_opts = {
+        key: Rails.configuration.blob_signing_key,
+        api_token: current_api_client_authorization.api_token,
+        ttl: Rails.configuration.blob_signing_ttl,
+      }
+      manifests.each do |text|
+        munge_manifest_locators(text) do |loc|
+          Blob.sign_locator(loc.to_s, signing_opts)
+        end
+      end
+    end
+  end
 end
index 6bd2c4d4912d1504e3dfa1fe2c4e5dd50db00b49..69778293a4096138cba2607599a2c9d47efafe6b 100644 (file)
@@ -162,6 +162,24 @@ class Arvados::V1::JobsController < ApplicationController
     index
   end
 
+  def self._create_requires_parameters
+    (super rescue {}).
+      merge({
+              find_or_create: {
+                type: 'boolean', required: false, default: false
+              },
+              filters: {
+                type: 'array', required: false
+              },
+              minimum_script_version: {
+                type: 'string', required: false
+              },
+              exclude_script_versions: {
+                type: 'array', required: false
+              },
+            })
+  end
+
   def self._queue_requires_parameters
     self._index_requires_parameters
   end
index 94c172da326ae3e4936b01aceea3f62a35ffee22..0452c523bf3a21348264350c8f9deba96bfefedd 100644 (file)
@@ -18,7 +18,9 @@ class Arvados::V1::RepositoriesController < ApplicationController
         if ArvadosModel::resource_class_for_uuid(perm.tail_uuid) == Group
           @users.each do |user_uuid, user|
             user.group_permissions.each do |group_uuid, perm_mask|
-              if perm_mask[:write]
+              if perm_mask[:manage]
+                perms << {name: 'can_manage', user_uuid: user_uuid}
+              elsif perm_mask[:write]
                 perms << {name: 'can_write', user_uuid: user_uuid}
               elsif perm_mask[:read]
                 perms << {name: 'can_read', user_uuid: user_uuid}
@@ -57,7 +59,11 @@ class Arvados::V1::RepositoriesController < ApplicationController
     end
     @repo_info.values.each do |repo_users|
       repo_users[:user_permissions].each do |user_uuid,perms|
-        if perms['can_write']
+        if perms['can_manage']
+          perms[:gitolite_permissions] = 'RW'
+          perms['can_write'] = true
+          perms['can_read'] = true
+        elsif perms['can_write']
           perms[:gitolite_permissions] = 'RW'
           perms['can_read'] = true
         elsif perms['can_read']
index 4470291504b87b0ef436cc99c63a087dfad632f0..a2a5759c739bc1af3e83b586bd8dff96f1268e79 100644 (file)
@@ -332,32 +332,37 @@ class Arvados::V1::SchemaController < ApplicationController
           }.compact.first
           if httpMethod and
               route.defaults[:controller] == 'arvados/v1/' + k.to_s.underscore.pluralize and
-              !d_methods[action.to_sym] and
-              ctl_class.action_methods.include? action and
-              ![:show, :index, :destroy].include?(action.to_sym)
-            method = {
-              id: "arvados.#{k.to_s.underscore.pluralize}.#{action}",
-              path: route.path.spec.to_s.sub('/arvados/v1/','').sub('(.:format)','').sub(/:(uu)?id/,'{uuid}'),
-              httpMethod: httpMethod,
-              description: "#{route.defaults[:action]} #{k.to_s.underscore.pluralize}",
-              parameters: {},
-              response: {
-                "$ref" => (action == 'index' ? "#{k.to_s}List" : k.to_s)
-              },
-              scopes: [
-                       "https://api.clinicalfuture.com/auth/arvados"
-                      ]
-            }
-            route.segment_keys.each do |key|
-              if key != :format
-                key = :uuid if key == :id
-                method[:parameters][key] = {
-                  type: "string",
-                  description: "",
-                  required: true,
-                  location: "path"
-                }
+              ctl_class.action_methods.include? action
+            if !d_methods[action.to_sym]
+              method = {
+                id: "arvados.#{k.to_s.underscore.pluralize}.#{action}",
+                path: route.path.spec.to_s.sub('/arvados/v1/','').sub('(.:format)','').sub(/:(uu)?id/,'{uuid}'),
+                httpMethod: httpMethod,
+                description: "#{action} #{k.to_s.underscore.pluralize}",
+                parameters: {},
+                response: {
+                  "$ref" => (action == 'index' ? "#{k.to_s}List" : k.to_s)
+                },
+                scopes: [
+                         "https://api.clinicalfuture.com/auth/arvados"
+                        ]
+              }
+              route.segment_keys.each do |key|
+                if key != :format
+                  key = :uuid if key == :id
+                  method[:parameters][key] = {
+                    type: "string",
+                    description: "",
+                    required: true,
+                    location: "path"
+                  }
+                end
               end
+            else
+              # We already built a generic method description, but we
+              # might find some more required parameters through
+              # introspection.
+              method = d_methods[action.to_sym]
             end
             if ctl_class.respond_to? "_#{action}_requires_parameters".to_sym
               ctl_class.send("_#{action}_requires_parameters".to_sym).each do |k, v|
@@ -378,7 +383,7 @@ class Arvados::V1::SchemaController < ApplicationController
                 end
               end
             end
-            d_methods[route.defaults[:action].to_sym] = method
+            d_methods[action.to_sym] = method
           end
         end
       end
index a044fb7bfd3ff325cf9678375af2e551d52f78ae..271299b6c97608c68b14d0500e86d779ba610b3d 100644 (file)
@@ -118,7 +118,21 @@ class Arvados::V1::UsersController < ApplicationController
 
   def self._setup_requires_parameters
     {
-      send_notification_email: { type: 'boolean', required: true },
+      user: {
+        type: 'object', required: false
+      },
+      openid_prefix: {
+        type: 'string', required: false
+      },
+      repo_name: {
+        type: 'string', required: false
+      },
+      vm_uuid: {
+        type: 'string', required: false
+      },
+      send_notification_email: {
+        type: 'boolean', required: false, default: false
+      },
     }
   end
 
diff --git a/services/api/app/mailers/profile_notifier.rb b/services/api/app/mailers/profile_notifier.rb
new file mode 100644 (file)
index 0000000..13e3b34
--- /dev/null
@@ -0,0 +1,8 @@
+class ProfileNotifier < ActionMailer::Base
+  default from: Rails.configuration.admin_notifier_email_from
+
+  def profile_created(user, address)
+    @user = user
+    mail(to: address, subject: "Profile created by #{@user.email}")
+  end
+end
index 1247e365b1fd5f65e86993a75b412eb6c2743ea9..539f69d6cabd5afac1e0d3e4b19a8337bb231897 100644 (file)
@@ -68,6 +68,28 @@ class ArvadosModel < ActiveRecord::Base
     self.columns.select { |col| col.name == attr.to_s }.first
   end
 
+  def self.attributes_required_columns
+    # This method returns a hash.  Each key is the name of an API attribute,
+    # and it's mapped to a list of database columns that must be fetched
+    # to generate that attribute.
+    # This implementation generates a simple map of attributes to
+    # matching column names.  Subclasses can override this method
+    # to specify that method-backed API attributes need to fetch
+    # specific columns from the database.
+    all_columns = columns.map(&:name)
+    api_column_map = Hash.new { |hash, key| hash[key] = [] }
+    methods.grep(/^api_accessible_\w+$/).each do |method_name|
+      next if method_name == :api_accessible_attributes
+      send(method_name).each_pair do |api_attr_name, col_name|
+        col_name = col_name.to_s
+        if all_columns.include?(col_name)
+          api_column_map[api_attr_name.to_s] |= [col_name]
+        end
+      end
+    end
+    api_column_map
+  end
+
   # Return nil if current user is not allowed to see the list of
   # writers. Otherwise, return a list of user_ and group_uuids with
   # write permission. (If not returning nil, current_user is always in
index 50dd42cd1cce1cccaa4c5fa0145e37eea22d583f..ac845f50adc71e5e95d4eded09779be72ffdca46 100644 (file)
@@ -6,10 +6,13 @@ class Collection < ArvadosModel
   api_accessible :user, extend: :common do |t|
     t.add :data_size
     t.add :files
+    t.add :manifest_text
   end
 
-  api_accessible :with_data, extend: :user do |t|
-    t.add :manifest_text
+  def self.attributes_required_columns
+    super.merge({ "data_size" => ["manifest_text"],
+                  "files" => ["manifest_text"],
+                })
   end
 
   def redundancy_status
index fc445ae24edb5977a2092f7d92af86c300eb21ad..1ef0b797c044574a2b9eba72e1fcbd0aeba77e16 100644 (file)
@@ -191,7 +191,7 @@ class Job < ArvadosModel
       if self.cancelled_at and not self.cancelled_at_was
         self.cancelled_at = Time.now
         self.cancelled_by_user_uuid = current_user.uuid
-        self.cancelled_by_client_uuid = current_api_client.uuid
+        self.cancelled_by_client_uuid = current_api_client.andand.uuid
         @need_crunch_dispatch_trigger = true
       else
         self.cancelled_at = self.cancelled_at_was
index 4fc6204eb504cf941e14abba0d15113dd1935ccc..19e84dca7cf4d37288882cc46037c3494a6e8c34 100644 (file)
@@ -13,6 +13,8 @@ class User < ArvadosModel
   before_create :check_auto_admin
   after_create :add_system_group_permission_link
   after_create :send_admin_notifications
+  after_update :send_profile_created_notification
+
 
   has_many :authorized_keys, :foreign_key => :authorized_user_uuid, :primary_key => :uuid
 
@@ -151,47 +153,35 @@ class User < ArvadosModel
   # delete user signatures, login, repo, and vm perms, and mark as inactive
   def unsetup
     # delete oid_login_perms for this user
-    oid_login_perms = Link.where(tail_uuid: self.email,
-                                 link_class: 'permission',
-                                 name: 'can_login')
-    oid_login_perms.each do |perm|
-      Link.delete perm
-    end
+    Link.destroy_all(tail_uuid: self.email,
+                     link_class: 'permission',
+                     name: 'can_login')
 
     # delete repo_perms for this user
-    repo_perms = Link.where(tail_uuid: self.uuid,
-                            link_class: 'permission',
-                            name: 'can_manage')
-    repo_perms.each do |perm|
-      Link.delete perm
-    end
+    Link.destroy_all(tail_uuid: self.uuid,
+                     link_class: 'permission',
+                     name: 'can_manage')
 
     # delete vm_login_perms for this user
-    vm_login_perms = Link.where(tail_uuid: self.uuid,
-                                link_class: 'permission',
-                                name: 'can_login')
-    vm_login_perms.each do |perm|
-      Link.delete perm
-    end
+    Link.destroy_all(tail_uuid: self.uuid,
+                     link_class: 'permission',
+                     name: 'can_login')
 
-    # delete "All users' group read permissions for this user
+    # delete "All users" group read permissions for this user
     group = Group.where(name: 'All users').select do |g|
       g[:uuid].match /-f+$/
     end.first
-    group_perms = Link.where(tail_uuid: self.uuid,
-                             head_uuid: group[:uuid],
-                             link_class: 'permission',
-                             name: 'can_read')
-    group_perms.each do |perm|
-      Link.delete perm
-    end
+    Link.destroy_all(tail_uuid: self.uuid,
+                     head_uuid: group[:uuid],
+                     link_class: 'permission',
+                     name: 'can_read')
 
     # delete any signatures by this user
-    signed_uuids = Link.where(link_class: 'signature',
-                              tail_uuid: self.uuid)
-    signed_uuids.each do |sign|
-      Link.delete sign
-    end
+    Link.destroy_all(link_class: 'signature',
+                     tail_uuid: self.uuid)
+
+    # delete user preferences (including profile)
+    self.prefs = {}
 
     # mark the user as inactive
     self.is_active = false
@@ -429,4 +419,15 @@ class User < ArvadosModel
       AdminNotifier.new_inactive_user(self).deliver
     end
   end
+
+  # Send notification if the user saved profile for the first time
+  def send_profile_created_notification
+    if self.prefs_changed?
+      if self.prefs_was.andand.empty? || !self.prefs_was.andand['profile']
+        profile_notification_address = Rails.configuration.user_profile_notification_address
+        ProfileNotifier.profile_created(self, profile_notification_address).deliver if profile_notification_address
+      end
+    end
+  end
+
 end
diff --git a/services/api/app/views/profile_notifier/profile_created.text.erb b/services/api/app/views/profile_notifier/profile_created.text.erb
new file mode 100644 (file)
index 0000000..73adf28
--- /dev/null
@@ -0,0 +1,2 @@
+Profile created by user <%=@user.full_name%> <%=@user.email%>
+User's profile: <%=@user.prefs['profile']%>
index c32900cfaf01e73f9ab7cd542547020233528360..ddcaa57302b5ceec5437ab1b20cb09271b2f24ea 100644 (file)
@@ -43,6 +43,9 @@ test:
   secret_token: <%= rand(2**512).to_s(36) %>
   blob_signing_key: zfhgfenhffzltr9dixws36j1yhksjoll2grmku38mi7yxd66h5j4q9w4jzanezacp8s6q0ro3hxakfye02152hncy6zml2ed0uc
 
+  # email address to which mail should be sent when the user creates profile for the first time
+  user_profile_notification_address: arvados@example.com
+
 common:
   uuid_prefix: <%= Digest::MD5.hexdigest(`hostname`).to_i(16).to_s(36)[0..4] %>
 
@@ -173,3 +176,6 @@ common:
   # to sign session tokens. IMPORTANT: This is a site secret. It
   # should be at least 50 characters.
   secret_token: ~
+
+  # email address to which mail should be sent when the user creates profile for the first time
+  user_profile_notification_address: false
index 5a990f0cb41ad30bd8832399a0d40c1839f2460e..ddc0f3a89928643fbc4b448b837b4e122fd0fbc0 100755 (executable)
@@ -68,25 +68,28 @@ class Dispatcher
       begin
         sinfo.split("\n").
           each do |line|
-          re = line.match /(\S+?):+(idle|alloc|down)/
+          re = line.match /(\S+?):+(idle|alloc|down)?/
           next if !re
 
+          _, node_name, node_state = *re
+          node_state = 'down' unless %w(idle alloc down).include? node_state
+
           # sinfo tells us about a node N times if it is shared by N partitions
-          next if node_seen[re[1]]
-          node_seen[re[1]] = true
+          next if node_seen[node_name]
+          node_seen[node_name] = true
 
           # update our database (and cache) when a node's state changes
-          if @node_state[re[1]] != re[2]
-            @node_state[re[1]] = re[2]
-            node = Node.where('hostname=?', re[1]).order(:last_ping_at).last
+          if @node_state[node_name] != node_state
+            @node_state[node_name] = node_state
+            node = Node.where('hostname=?', node_name).order(:last_ping_at).last
             if node
-              $stderr.puts "dispatch: update #{re[1]} state to #{re[2]}"
-              node.info['slurm_state'] = re[2]
+              $stderr.puts "dispatch: update #{node_name} state to #{node_state}"
+              node.info['slurm_state'] = node_state
               if not node.save
                 $stderr.puts "dispatch: failed to update #{node.uuid}: #{node.errors.messages}"
               end
-            elsif re[2] != 'down'
-              $stderr.puts "dispatch: sinfo reports '#{re[1]}' is not down, but no node has that name"
+            elsif node_state != 'down'
+              $stderr.puts "dispatch: sinfo reports '#{node_name}' is not down, but no node has that name"
             end
           end
         end
@@ -375,11 +378,11 @@ class Dispatcher
       $stderr.puts j_done[:stderr_buf] + "\n"
     end
 
-    # Wait the thread
-    j_done[:wait_thr].value
+    # Wait the thread (returns a Process::Status)
+    exit_status = j_done[:wait_thr].value
 
     jobrecord = Job.find_by_uuid(job_done.uuid)
-    if jobrecord.started_at
+    if exit_status.to_i != 75 and jobrecord.started_at
       # Clean up state fields in case crunch-job exited without
       # putting the job in a suitable "finished" state.
       jobrecord.running = false
@@ -392,7 +395,18 @@ class Dispatcher
       # Don't fail the job if crunch-job didn't even get as far as
       # starting it. If the job failed to run due to an infrastructure
       # issue with crunch-job or slurm, we want the job to stay in the
-      # queue.
+      # queue. If crunch-job exited after losing a race to another
+      # crunch-job process, it exits 75 and we should leave the job
+      # record alone so the winner of the race do its thing.
+      #
+      # There is still an unhandled race condition: If our crunch-job
+      # process is about to lose a race with another crunch-job
+      # process, but crashes before getting to its "exit 75" (for
+      # example, "cannot fork" or "cannot reach API server") then we
+      # will assume incorrectly that it's our process's fault
+      # jobrecord.started_at is non-nil, and mark the job as failed
+      # even though the winner of the race is probably still doing
+      # fine.
     end
 
     # Invalidate the per-job auth token
index 4fa41621d7b9b8f300c38b117c0fa345b9e9a40f..c0bce93daa9e8fe83c784ad0d10a9365625e6953 100644 (file)
@@ -162,3 +162,15 @@ job_reader:
   user: job_reader
   api_token: e99512cdc0f3415c2428b9758f33bdfb07bc3561b00e86e7e6
   expires_at: 2038-01-01 00:00:00
+
+active_no_prefs:
+  api_client: untrusted
+  user: active_no_prefs
+  api_token: 3kg612cdc0f3415c2428b9758f33bdfb07bc3561b00e86qdmi
+  expires_at: 2038-01-01 00:00:00
+
+active_no_prefs_profile:
+  api_client: untrusted
+  user: active_no_prefs_profile
+  api_token: 3kg612cdc0f3415c242856758f33bdfb07bc3561b00e86qdmi
+  expires_at: 2038-01-01 00:00:00
index e142faeb5bde344a8385f9b50b30f96a797049c6..3274381b60b020dfac475af00f30950d887d0163 100644 (file)
@@ -4,6 +4,7 @@ running:
   cancelled_at: ~
   cancelled_by_user_uuid: ~
   cancelled_by_client_uuid: ~
+  created_at: <%= 3.minute.ago.to_s(:db) %>
   started_at: <%= 3.minute.ago.to_s(:db) %>
   finished_at: ~
   script_version: 1de84a854e2b440dc53bf42f8548afa4c17da332
@@ -26,6 +27,7 @@ running_cancelled:
   cancelled_at: <%= 1.minute.ago.to_s(:db) %>
   cancelled_by_user_uuid: zzzzz-tpzed-xurymjxw79nv3jz
   cancelled_by_client_uuid: zzzzz-ozdt8-obw7foaks3qjyej
+  created_at: <%= 4.minute.ago.to_s(:db) %>
   started_at: <%= 3.minute.ago.to_s(:db) %>
   finished_at: ~
   script_version: 1de84a854e2b440dc53bf42f8548afa4c17da332
@@ -49,6 +51,7 @@ uses_nonexistent_script_version:
   cancelled_by_user_uuid: ~
   cancelled_by_client_uuid: ~
   script_version: 7def43a4d3f20789dda4700f703b5514cc3ed250
+  created_at: <%= 5.minute.ago.to_s(:db) %>
   started_at: <%= 3.minute.ago.to_s(:db) %>
   finished_at: <%= 2.minute.ago.to_s(:db) %>
   running: false
@@ -73,6 +76,7 @@ foobar:
   script_version: 7def43a4d3f20789dda4700f703b5514cc3ed250
   script_parameters:
     input: 1f4b0bc7583c2a7f9102c395f4ffc5e3+45
+  created_at: <%= 4.minute.ago.to_s(:db) %>
   started_at: <%= 3.minute.ago.to_s(:db) %>
   finished_at: <%= 2.minute.ago.to_s(:db) %>
   running: false
@@ -98,6 +102,7 @@ barbaz:
   script_parameters:
     input: fa7aeb5140e2848d39b416daeef4ffc5+45
     an_integer: 1
+  created_at: <%= 4.minute.ago.to_s(:db) %>
   started_at: <%= 3.minute.ago.to_s(:db) %>
   finished_at: <%= 2.minute.ago.to_s(:db) %>
   running: false
@@ -116,6 +121,7 @@ barbaz:
 
 previous_job_run:
   uuid: zzzzz-8i9sb-cjs4pklxxjykqqq
+  created_at: <%= 14.minute.ago.to_s(:db) %>
   owner_uuid: zzzzz-tpzed-xurymjxw79nv3jz
   repository: foo
   script: hash
@@ -128,6 +134,7 @@ previous_job_run:
 
 previous_docker_job_run:
   uuid: zzzzz-8i9sb-k6emstgk4kw4yhi
+  created_at: <%= 14.minute.ago.to_s(:db) %>
   owner_uuid: zzzzz-tpzed-xurymjxw79nv3jz
   repository: foo
   script: hash
@@ -141,6 +148,7 @@ previous_docker_job_run:
 
 previous_job_run_no_output:
   uuid: zzzzz-8i9sb-cjs4pklxxjykppp
+  created_at: <%= 14.minute.ago.to_s(:db) %>
   owner_uuid: zzzzz-tpzed-xurymjxw79nv3jz
   repository: foo
   script: hash
@@ -153,6 +161,7 @@ previous_job_run_no_output:
 
 nondeterminisic_job_run:
   uuid: zzzzz-8i9sb-cjs4pklxxjykyyy
+  created_at: <%= 14.minute.ago.to_s(:db) %>
   owner_uuid: zzzzz-tpzed-xurymjxw79nv3jz
   repository: foo
   script: hash2
@@ -165,6 +174,7 @@ nondeterminisic_job_run:
 
 nearly_finished_job:
   uuid: zzzzz-8i9sb-2gx6rz0pjl033w3
+  created_at: <%= 14.minute.ago.to_s(:db) %>
   owner_uuid: zzzzz-tpzed-xurymjxw79nv3jz
   repository: arvados
   script: doesnotexist
@@ -184,6 +194,7 @@ nearly_finished_job:
 
 queued:
   uuid: zzzzz-8i9sb-grx15v5mjnsyxk7
+  created_at: <%= 1.minute.ago.to_s(:db) %>
   owner_uuid: zzzzz-tpzed-xurymjxw79nv3jz
   cancelled_at: ~
   cancelled_by_user_uuid: ~
index f73558df401a3fd85fdced9cdb944c970dc26d08..7bfc2e1b3fe65d4882e2528eb6efaba07f858d6f 100644 (file)
@@ -2,11 +2,13 @@ new_pipeline:
   state: New
   uuid: zzzzz-d1hrv-f4gneyn6br1xize
   owner_uuid: zzzzz-tpzed-xurymjxw79nv3jz
+  created_at: <%= 1.minute.ago.to_s(:db) %>
 
 has_component_with_no_script_parameters:
   state: Ready
   uuid: zzzzz-d1hrv-1xfj6xkicf2muk2
   owner_uuid: zzzzz-tpzed-xurymjxw79nv3jz
+  created_at: <%= 10.minute.ago.to_s(:db) %>
   components:
    foo:
     script: foo
@@ -17,6 +19,7 @@ has_component_with_empty_script_parameters:
   state: Ready
   uuid: zzzzz-d1hrv-jq16l10gcsnyumo
   owner_uuid: zzzzz-tpzed-xurymjxw79nv3jz
+  created_at: <%= 3.minute.ago.to_s(:db) %>
   components:
    foo:
     script: foo
@@ -27,6 +30,7 @@ has_job:
   state: Ready
   uuid: zzzzz-d1hrv-1yfj6xkidf2muk3
   owner_uuid: zzzzz-tpzed-xurymjxw79nv3jz
+  created_at: <%= 3.1.minute.ago.to_s(:db) %>
   components:
    foo:
     script: foo
@@ -41,6 +45,7 @@ components_is_jobspec:
   # Helps test that clients cope with funny-shaped components.
   # For an example, see #3321.
   uuid: zzzzz-d1hrv-jobspeccomponts
+  created_at: <%= 30.minute.ago.to_s(:db) %>
   owner_uuid: zzzzz-tpzed-xurymjxw79nv3jz
   created_at: 2014-04-14 12:35:04 -0400
   updated_at: 2014-04-14 12:35:04 -0400
index 0e02b7d8f1f638d7e4397c67e2b40c18f2566251..5c688363181c2bb19ab740bb5cff6305e53260c8 100644 (file)
@@ -9,7 +9,10 @@ admin:
   identity_url: https://admin.openid.local
   is_active: true
   is_admin: true
-  prefs: {}
+  prefs:
+    profile:
+      organization: example.com
+      role: IT
 
 miniadmin:
   owner_uuid: zzzzz-tpzed-000000000000000
@@ -20,7 +23,10 @@ miniadmin:
   identity_url: https://miniadmin.openid.local
   is_active: true
   is_admin: false
-  prefs: {}
+  prefs:
+    profile:
+      organization: example.com
+      role: IT
 
 rominiadmin:
   owner_uuid: zzzzz-tpzed-000000000000000
@@ -31,7 +37,10 @@ rominiadmin:
   identity_url: https://rominiadmin.openid.local
   is_active: true
   is_admin: false
-  prefs: {}
+  prefs:
+    profile:
+      organization: example.com
+      role: IT
 
 active:
   owner_uuid: zzzzz-tpzed-000000000000000
@@ -42,7 +51,10 @@ active:
   identity_url: https://active-user.openid.local
   is_active: true
   is_admin: false
-  prefs: {}
+  prefs:
+    profile:
+      organization: example.com
+      role: Computational biologist
 
 project_viewer:
   owner_uuid: zzzzz-tpzed-000000000000000
@@ -53,7 +65,10 @@ project_viewer:
   identity_url: https://project-viewer.openid.local
   is_active: true
   is_admin: false
-  prefs: {}
+  prefs:
+    profile:
+      organization: example.com
+      role: Computational biologist
 
 future_project_user:
   # Workbench tests give this user permission on aproject.
@@ -64,7 +79,10 @@ future_project_user:
   identity_url: https://future-project-user.openid.local
   is_active: true
   is_admin: false
-  prefs: {}
+  prefs:
+    profile:
+      organization: example.com
+      role: Computational biologist
 
 spectator:
   owner_uuid: zzzzz-tpzed-000000000000000
@@ -75,7 +93,10 @@ spectator:
   identity_url: https://spectator.openid.local
   is_active: true
   is_admin: false
-  prefs: {}
+  prefs:
+    profile:
+      organization: example.com
+      role: Computational biologist
 
 inactive_uninvited:
   owner_uuid: zzzzz-tpzed-000000000000000
@@ -129,4 +150,30 @@ job_reader:
   identity_url: https://spectator.openid.local
   is_active: true
   is_admin: false
+  prefs:
+    profile:
+      organization: example.com
+      role: Computational biologist
+
+active_no_prefs:
+  owner_uuid: zzzzz-tpzed-000000000000000
+  uuid: zzzzz-tpzed-a46c42d1td4aoj4
+  email: active_no_prefs@arvados.local
+  first_name: NoPrefs
+  last_name: NoProfile
+  identity_url: https://active_no_prefs.openid.local
+  is_active: true
+  is_admin: false
   prefs: {}
+
+active_no_prefs_profile:
+  owner_uuid: zzzzz-tpzed-000000000000000
+  uuid: zzzzz-tpzed-a46c98d1td4aoj4
+  email: active_no_prefs_profile@arvados.local
+  first_name: HasPrefs
+  last_name: NoProfile
+  identity_url: https://active_no_prefs_profile.openid.local
+  is_active: true
+  is_admin: false
+  prefs:
+    test: abc
index e4bbd5cd25d0506af16b21b79d02487627fc852f..9c7f4886affa7eeb559443e6364210ee80254700 100644 (file)
@@ -21,7 +21,52 @@ class Arvados::V1::CollectionsControllerTest < ActionController::TestCase
     authorize_with :active
     get :index
     assert_response :success
-    assert_not_nil assigns(:objects)
+    assert(assigns(:objects).andand.any?, "no Collections returned in index")
+    refute(json_response["items"].any? { |c| c.has_key?("manifest_text") },
+           "basic Collections index included manifest_text")
+  end
+
+  test "can get non-database fields via index select" do
+    authorize_with :active
+    get(:index, filters: [["uuid", "=", collections(:foo_file).uuid]],
+        select: %w(uuid owner_uuid files))
+    assert_response :success
+    assert_equal(1, json_response["items"].andand.size,
+                 "wrong number of items returned for index")
+    assert_equal([[".", "foo", 3]], json_response["items"].first["files"],
+                 "wrong file list in index result")
+  end
+
+  test "can select only non-database fields for index" do
+    authorize_with :active
+    get(:index, select: %w(data_size files))
+    assert_response :success
+    assert(json_response["items"].andand.any?, "no items found in index")
+    json_response["items"].each do |coll|
+      assert_equal(coll["data_size"],
+                   coll["files"].inject(0) { |size, fspec| size + fspec.last },
+                   "mismatch between data size and file list")
+    end
+  end
+
+  test "index with manifest_text selected returns signed locators" do
+    columns = %w(uuid owner_uuid data_size files manifest_text)
+    authorize_with :active
+    get :index, select: columns
+    assert_response :success
+    assert(assigns(:objects).andand.any?,
+           "no Collections returned for index with columns selected")
+    json_response["items"].each do |coll|
+      assert_equal(columns, columns & coll.keys,
+                   "Collections index did not respect selected columns")
+      loc_regexp = / [[:xdigit:]]{32}\+\d+\S+/
+      pos = 0
+      while match = loc_regexp.match(coll["manifest_text"], pos)
+        assert_match(/\+A[[:xdigit:]]+@[[:xdigit:]]{8}\b/, match.to_s,
+                     "Locator in manifest_text was not signed")
+        pos = match.end(0)
+      end
+    end
   end
 
   [0,1,2].each do |limit|
index 28367831e18a48c9b954355626624905fb67eda8..1a8e94fcbe74ac77baa323436099b82508225e2a 100644 (file)
@@ -734,6 +734,14 @@ class Arvados::V1::UsersControllerTest < ActionController::TestCase
 
     verify_link_existence response_user['uuid'], response_user['email'],
           false, false, false, false, false
+
+    active_user = User.find_by_uuid(users(:active).uuid)
+    readable_groups = active_user.groups_i_can(:read)
+    all_users_group = Group.all.collect(&:uuid).select { |g| g.match /-f+$/ }
+    refute_includes(readable_groups, all_users_group,
+                    "active user can read All Users group after being deactivated")
+    assert_equal(false, active_user.is_invited,
+                 "active user is_invited after being deactivated & reloaded")
   end
 
   test "setup user with send notification param false and verify no email" do
@@ -842,6 +850,71 @@ class Arvados::V1::UsersControllerTest < ActionController::TestCase
     check_active_users_index
   end
 
+  test "update active_no_prefs user profile and expect notification email" do
+    authorize_with :admin
+
+    put :update, {
+      id: users(:active_no_prefs).uuid,
+      user: {
+        prefs: {:profile => {'organization' => 'example.com'}}
+      }
+    }
+    assert_response :success
+
+    found_email = false
+    ActionMailer::Base.deliveries.andand.each do |email|
+      if email.subject == "Profile created by #{users(:active_no_prefs).email}"
+        found_email = true
+        break
+      end
+    end
+    assert_equal true, found_email, 'Expected email after creating profile'
+  end
+
+  test "update active_no_prefs_profile user profile and expect notification email" do
+    authorize_with :admin
+
+    user = {}
+    user[:prefs] = users(:active_no_prefs_profile).prefs
+    user[:prefs][:profile] = {:profile => {'organization' => 'example.com'}}
+    put :update, {
+      id: users(:active_no_prefs_profile).uuid,
+      user: user
+    }
+    assert_response :success
+
+    found_email = false
+    ActionMailer::Base.deliveries.andand.each do |email|
+      if email.subject == "Profile created by #{users(:active_no_prefs_profile).email}"
+        found_email = true
+        break
+      end
+    end
+    assert_equal true, found_email, 'Expected email after creating profile'
+  end
+
+  test "update active user profile and expect no notification email" do
+    authorize_with :admin
+
+    put :update, {
+      id: users(:active).uuid,
+      user: {
+        prefs: {:profile => {'organization' => 'anotherexample.com'}}
+      }
+    }
+    assert_response :success
+
+    found_email = false
+    ActionMailer::Base.deliveries.andand.each do |email|
+      if email.subject == "Profile created by #{users(:active).email}"
+        found_email = true
+        break
+      end
+    end
+    assert_equal false, found_email, 'Expected no email after updating profile'
+  end
+
+
   NON_ADMIN_USER_DATA = ["uuid", "kind", "is_active", "email", "first_name",
                          "last_name"].sort
 
diff --git a/services/crunch/crunchstat/go.sh b/services/crunch/crunchstat/go.sh
deleted file mode 100755 (executable)
index 640a0d2..0000000
+++ /dev/null
@@ -1,15 +0,0 @@
-#! /bin/sh
-
-# Wraps the 'go' executable with some environment setup.  Sets GOPATH, creates
-# 'pkg' and 'bin' directories, automatically installs dependencies, then runs
-# the underlying 'go' executable with any command line parameters provided to
-# the script.
-
-rootdir=$(readlink -f $(dirname $0))
-GOPATH=$rootdir:$rootdir/../../sdk/go:$GOPATH
-export GOPATH
-
-mkdir -p $rootdir/pkg
-mkdir -p $rootdir/bin
-
-go $*
index c0ea03f06b39df2c592e427c0a21b1cfd909038f..ec5e94af57b312ead31f2ffaa1e5ff3c09a15f55 100644 (file)
@@ -17,6 +17,8 @@ import apiclient
 import json
 import logging
 
+_logger = logging.getLogger('arvados.arvados_fuse')
+
 from time import time
 from llfuse import FUSEError
 
@@ -124,7 +126,7 @@ class Directory(FreshBase):
             try:
                 self.update()
             except apiclient.errors.HttpError as e:
-                logging.debug(e)
+                _logger.debug(e)
 
     def __getitem__(self, item):
         self.checkupdate()
@@ -197,7 +199,8 @@ class CollectionDirectory(Directory):
             self.fresh()
             return True
         except Exception as detail:
-            logging.debug("arv-mount %s: error: %s" % (self.collection_locator,detail))
+            _logger.debug("arv-mount %s: error: %s",
+                          self.collection_locator, detail)
             return False
 
 class MagicDirectory(Directory):
@@ -225,7 +228,7 @@ class MagicDirectory(Directory):
             else:
                 return False
         except Exception as e:
-            logging.debug('arv-mount exception keep %s', e)
+            _logger.debug('arv-mount exception keep %s', e)
             return False
 
     def __getitem__(self, item):
@@ -476,7 +479,8 @@ class Operations(llfuse.Operations):
         return entry
 
     def lookup(self, parent_inode, name):
-        logging.debug("arv-mount lookup: parent_inode %i name %s", parent_inode, name)
+        _logger.debug("arv-mount lookup: parent_inode %i name %s",
+                      parent_inode, name)
         inode = None
 
         if name == '.':
@@ -512,7 +516,7 @@ class Operations(llfuse.Operations):
         return fh
 
     def read(self, fh, off, size):
-        logging.debug("arv-mount read %i %i %i", fh, off, size)
+        _logger.debug("arv-mount read %i %i %i", fh, off, size)
         if fh in self._filehandles:
             handle = self._filehandles[fh]
         else:
@@ -529,7 +533,7 @@ class Operations(llfuse.Operations):
             del self._filehandles[fh]
 
     def opendir(self, inode):
-        logging.debug("arv-mount opendir: inode %i", inode)
+        _logger.debug("arv-mount opendir: inode %i", inode)
 
         if inode in self.inodes:
             p = self.inodes[inode]
@@ -550,14 +554,14 @@ class Operations(llfuse.Operations):
         return fh
 
     def readdir(self, fh, off):
-        logging.debug("arv-mount readdir: fh %i off %i", fh, off)
+        _logger.debug("arv-mount readdir: fh %i off %i", fh, off)
 
         if fh in self._filehandles:
             handle = self._filehandles[fh]
         else:
             raise llfuse.FUSEError(errno.EBADF)
 
-        logging.debug("arv-mount handle.entry %s", handle.entry)
+        _logger.debug("arv-mount handle.entry %s", handle.entry)
 
         e = off
         while e < len(handle.entry):
index 0f3e8719b9f683a6bedbed470b280b0b65d2f710..794a468fbc6c064a1685e162caa761c476c5b519 100755 (executable)
@@ -7,10 +7,11 @@ import logging
 import os
 import signal
 import subprocess
-import traceback
 
 from arvados_fuse import *
 
+logger = logging.getLogger('arvados.arv-mount')
+
 if __name__ == '__main__':
     # Handle command line parameters
     parser = argparse.ArgumentParser(
@@ -52,17 +53,30 @@ collections on the server.""")
     else:
         daemon_ctx = None
 
-    # Set up logging.
-    # If we're daemonized without a logfile, there's nowhere to log, so don't.
-    if args.logfile or (daemon_ctx is None):
-        log_conf = {}
-        if args.debug:
-            log_conf['level'] = logging.DEBUG
-            arvados.config.settings()['ARVADOS_DEBUG'] = 'true'
-        if args.logfile:
-            log_conf['filename'] = args.logfile
-        logging.basicConfig(**log_conf)
-        logging.debug("arv-mount debugging enabled")
+    # Configure a logger based on command-line switches.
+    # If we're using a contemporary Python SDK (mid-August 2014),
+    # configure the arvados hierarchy logger.
+    # Otherwise, configure the program root logger.
+    base_logger = getattr(arvados, 'logger', None)
+
+    if args.logfile:
+        log_handler = logging.FileHandler(args.logfile)
+    elif daemon_ctx:
+        log_handler = logging.NullHandler()
+    elif base_logger:
+        log_handler = arvados.log_handler
+    else:
+        log_handler = logging.StreamHandler()
+
+    if base_logger is None:
+        base_logger = logging.getLogger()
+    else:
+        base_logger.removeHandler(arvados.log_handler)
+    base_logger.addHandler(log_handler)
+
+    if args.debug:
+        base_logger.setLevel(logging.DEBUG)
+        logger.debug("arv-mount debugging enabled")
 
     try:
         # Create the request handler
@@ -79,9 +93,8 @@ collections on the server.""")
         else:
             # Set up the request handler with the 'magic directory' at the root
             operations.inodes.add_entry(MagicDirectory(llfuse.ROOT_INODE, operations.inodes))
-    except Exception as ex:
-        logging.error("arv-mount: exception during API setup")
-        logging.error(traceback.format_exc())
+    except Exception:
+        logger.exception("arv-mount: exception during API setup")
         exit(1)
 
     # FUSE options, see mount.fuse(8)
@@ -115,8 +128,8 @@ collections on the server.""")
             signal.signal(signal.SIGTERM, signal.SIG_DFL)
             signal.signal(signal.SIGQUIT, signal.SIG_DFL)
         except Exception as e:
-            logging.error('arv-mount: exception during exec %s' % (args.exec_args,))
-            logging.error(traceback.format_exc())
+            logger.exception('arv-mount: exception during exec %s',
+                             args.exec_args)
             try:
                 rc = e.errno
             except AttributeError:
@@ -130,6 +143,5 @@ collections on the server.""")
             llfuse.init(operations, args.mountpoint, opts)
             llfuse.main()
         except Exception as e:
-            logging.error('arv-mount: exception during mount')
-            logging.error(traceback.format_exc())
+            logger.exception('arv-mount: exception during mount')
             exit(getattr(e, 'errno', 1))
diff --git a/services/keep/go.sh b/services/keep/go.sh
deleted file mode 100755 (executable)
index 156fe90..0000000
+++ /dev/null
@@ -1,17 +0,0 @@
-#! /bin/sh
-
-# Wraps the 'go' executable with some environment setup.  Sets GOPATH, creates
-# 'pkg' and 'bin' directories, automatically installs dependencies, then runs
-# the underlying 'go' executable with any command line parameters provided to
-# the script.
-
-rootdir=$(readlink -f $(dirname $0))
-GOPATH=$rootdir:$rootdir/../../sdk/go:$GOPATH
-export GOPATH
-
-mkdir -p $rootdir/pkg
-mkdir -p $rootdir/bin
-
-go get github.com/gorilla/mux
-
-go $*
similarity index 98%
rename from services/keep/src/arvados.org/keepproxy/keepproxy.go
rename to services/keepproxy/keepproxy.go
index 367854bed382ec1b17589c825126d5e31a9bf6a6..a927b87ea6575812c1b5fb728e50e946b6b46589 100644 (file)
@@ -1,8 +1,8 @@
 package main
 
 import (
-       "arvados.org/keepclient"
-       "arvados.org/sdk"
+       "git.curoverse.com/arvados.git/sdk/go/keepclient"
+       "git.curoverse.com/arvados.git/sdk/go/arvadosclient"
        "flag"
        "fmt"
        "github.com/gorilla/mux"
@@ -68,7 +68,7 @@ func main() {
 
        flagset.Parse(os.Args[1:])
 
-       arv, err := sdk.MakeArvadosClient()
+       arv, err := arvadosclient.MakeArvadosClient()
        if err != nil {
                log.Fatalf("Error setting up arvados client %s", err.Error())
        }
similarity index 85%
rename from services/keep/src/arvados.org/keepproxy/keepproxy_test.go
rename to services/keepproxy/keepproxy_test.go
index 4bf347890eb01edea36af772fed2bbef9f4f91d1..c4e5ae7258f0963093232f78877820ed15c0296c 100644 (file)
@@ -1,8 +1,8 @@
 package main
 
 import (
-       "arvados.org/keepclient"
-       "arvados.org/sdk"
+       "git.curoverse.com/arvados.git/sdk/go/keepclient"
+       "git.curoverse.com/arvados.git/sdk/go/arvadosclient"
        "crypto/md5"
        "crypto/tls"
        "fmt"
@@ -14,7 +14,6 @@ import (
        "net/url"
        "os"
        "os/exec"
-       "strings"
        "testing"
        "time"
 )
@@ -31,8 +30,8 @@ var _ = Suite(&ServerRequiredSuite{})
 type ServerRequiredSuite struct{}
 
 func pythonDir() string {
-       gopath := os.Getenv("GOPATH")
-       return fmt.Sprintf("%s/../../sdk/python/tests", strings.Split(gopath, ":")[0])
+       cwd, _ := os.Getwd()
+       return fmt.Sprintf("%s/../../sdk/python/tests", cwd)
 }
 
 func (s *ServerRequiredSuite) SetUpSuite(c *C) {
@@ -40,12 +39,27 @@ func (s *ServerRequiredSuite) SetUpSuite(c *C) {
        defer os.Chdir(cwd)
 
        os.Chdir(pythonDir())
-
-       if err := exec.Command("python", "run_test_server.py", "start").Run(); err != nil {
-               panic("'python run_test_server.py start' returned error")
+       {
+               cmd := exec.Command("python", "run_test_server.py", "start")
+               stderr, err := cmd.StderrPipe()
+               if err != nil {
+                       log.Fatalf("Setting up stderr pipe: %s", err)
+               }
+               go io.Copy(os.Stderr, stderr)
+               if err := cmd.Run(); err != nil {
+                       panic(fmt.Sprintf("'python run_test_server.py start' returned error %s", err))
+               }
        }
-       if err := exec.Command("python", "run_test_server.py", "start_keep").Run(); err != nil {
-               panic("'python run_test_server.py start_keep' returned error")
+       {
+               cmd := exec.Command("python", "run_test_server.py", "start_keep")
+               stderr, err := cmd.StderrPipe()
+               if err != nil {
+                       log.Fatalf("Setting up stderr pipe: %s", err)
+               }
+               go io.Copy(os.Stderr, stderr)
+               if err := cmd.Run(); err != nil {
+                       panic(fmt.Sprintf("'python run_test_server.py start_keep' returned error %s", err))
+               }
        }
 
        os.Setenv("ARVADOS_API_HOST", "localhost:3001")
@@ -109,7 +123,7 @@ func runProxy(c *C, args []string, token string, port int) keepclient.KeepClient
 
        os.Setenv("ARVADOS_KEEP_PROXY", fmt.Sprintf("http://localhost:%v", port))
        os.Setenv("ARVADOS_API_TOKEN", token)
-       arv, err := sdk.MakeArvadosClient()
+       arv, err := arvadosclient.MakeArvadosClient()
        kc, err := keepclient.MakeKeepClient(&arv)
        c.Check(kc.Using_proxy, Equals, true)
        c.Check(len(kc.ServiceRoots()), Equals, 1)
@@ -131,7 +145,7 @@ func (s *ServerRequiredSuite) TestPutAskGet(c *C) {
        setupProxyService()
 
        os.Setenv("ARVADOS_EXTERNAL_CLIENT", "true")
-       arv, err := sdk.MakeArvadosClient()
+       arv, err := arvadosclient.MakeArvadosClient()
        kc, err := keepclient.MakeKeepClient(&arv)
        c.Check(kc.Arvados.External, Equals, true)
        c.Check(kc.Using_proxy, Equals, true)
@@ -156,7 +170,7 @@ func (s *ServerRequiredSuite) TestPutAskGet(c *C) {
                var rep int
                var err error
                hash2, rep, err = kc.PutB([]byte("foo"))
-               c.Check(hash2, Equals, fmt.Sprintf("%s+3", hash))
+               c.Check(hash2, Matches, fmt.Sprintf(`^%s\+3(\+.+)?$`, hash))
                c.Check(rep, Equals, 2)
                c.Check(err, Equals, nil)
                log.Print("PutB")
@@ -238,7 +252,7 @@ func (s *ServerRequiredSuite) TestGetDisabled(c *C) {
 
        {
                hash2, rep, err := kc.PutB([]byte("baz"))
-               c.Check(hash2, Equals, fmt.Sprintf("%s+3", hash))
+               c.Check(hash2, Matches, fmt.Sprintf(`^%s\+3(\+.+)?$`, hash))
                c.Check(rep, Equals, 2)
                c.Check(err, Equals, nil)
                log.Print("PutB")
similarity index 97%
rename from services/keep/src/keep/keep_test.go
rename to services/keepstore/keepstore_test.go
index de189b659c0b10db9cfac8490c689ec87537d48b..b153d6dce27309f3cb692ec0d6a84b724386891a 100644 (file)
@@ -7,6 +7,8 @@ import (
        "os"
        "path"
        "regexp"
+       "sort"
+       "strings"
        "testing"
 )
 
@@ -342,11 +344,14 @@ func TestIndex(t *testing.T) {
        vols[1].Put(TEST_HASH_2+".meta", []byte("metadata"))
 
        index := vols[0].Index("") + vols[1].Index("")
-       expected := `^` + TEST_HASH + `\+\d+ \d+\n` +
+       index_rows := strings.Split(index, "\n")
+       sort.Strings(index_rows)
+       sorted_index := strings.Join(index_rows, "\n")
+       expected := `^\n` + TEST_HASH + `\+\d+ \d+\n` +
                TEST_HASH_3 + `\+\d+ \d+\n` +
-               TEST_HASH_2 + `\+\d+ \d+\n$`
+               TEST_HASH_2 + `\+\d+ \d+$`
 
-       match, err := regexp.MatchString(expected, index)
+       match, err := regexp.MatchString(expected, sorted_index)
        if err == nil {
                if !match {
                        t.Errorf("IndexLocators returned:\n%s", index)