Merge branch '8015-crunch2-mount' closes #8015
authorPeter Amstutz <peter.amstutz@curoverse.com>
Tue, 1 Mar 2016 20:56:37 +0000 (15:56 -0500)
committerPeter Amstutz <peter.amstutz@curoverse.com>
Tue, 1 Mar 2016 20:56:37 +0000 (15:56 -0500)
49 files changed:
apps/workbench/.gitignore
apps/workbench/app/controllers/actions_controller.rb
apps/workbench/app/controllers/application_controller.rb
apps/workbench/app/models/job.rb
apps/workbench/app/views/application/_breadcrumbs.html.erb
apps/workbench/app/views/application/_projects_tree_menu.html.erb
apps/workbench/app/views/application/_show_star.html.erb [new file with mode: 0644]
apps/workbench/app/views/application/star.js.erb [new file with mode: 0644]
apps/workbench/app/views/jobs/_show_log.html.erb
apps/workbench/app/views/layouts/body.html.erb
apps/workbench/app/views/projects/_choose.html.erb
apps/workbench/app/views/projects/show.html.erb
apps/workbench/config/application.default.yml
apps/workbench/config/routes.rb
apps/workbench/test/controllers/projects_controller_test.rb
apps/workbench/test/integration/projects_test.rb
apps/workbench/test/integration/websockets_test.rb
doc/_includes/_run_command_foreach_example.liquid
sdk/cli/bin/crunch-job
sdk/cwl/arvados_cwl/__init__.py
sdk/cwl/setup.py
sdk/perl/.gitignore [new file with mode: 0644]
sdk/python/arvados/api.py
sdk/python/arvados/commands/arv_copy.py
sdk/python/arvados/commands/run.py
sdk/python/arvados/events.py
sdk/python/setup.py
services/api/.gitignore
services/api/app/controllers/arvados/v1/api_client_authorizations_controller.rb
services/api/app/models/api_client_authorization.rb
services/api/db/migrate/20160208210629_add_uuid_to_api_client_authorization.rb [new file with mode: 0644]
services/api/db/migrate/20160209155729_add_uuid_to_api_token_search_index.rb [new file with mode: 0644]
services/api/db/structure.sql
services/api/test/fixtures/api_client_authorizations.yml
services/api/test/fixtures/collections.yml
services/api/test/fixtures/groups.yml
services/api/test/fixtures/links.yml
services/api/test/functional/arvados/v1/api_client_authorizations_controller_test.rb
services/api/test/functional/arvados/v1/groups_controller_test.rb
services/datamanager/collection/collection.go
services/keepstore/volume_generic_test.go
services/nodemanager/arvnodeman/computenode/driver/__init__.py
services/nodemanager/arvnodeman/computenode/driver/ec2.py
services/nodemanager/arvnodeman/computenode/driver/gce.py
services/nodemanager/arvnodeman/config.py
services/nodemanager/arvnodeman/fullstopactor.py [new file with mode: 0644]
services/nodemanager/tests/test_failure.py [new file with mode: 0644]
tools/crunchstat-summary/crunchstat_summary/summarizer.py
tools/crunchstat-summary/tests/test_examples.py

index 9bef02bbfda670595750fd99a4461005ce5b8f12..a27ac31580a1d6b5cc81ab47e60c8deb649a2f85 100644 (file)
@@ -36,3 +36,6 @@
 # Dev/test SSL certificates
 /self-signed.key
 /self-signed.pem
+
+# Generated git-commit.version file
+/git-commit.version
index 58b8cdc54f018e6dae20ba7b9c182bfbaef909c0..28680df33f3cf4f5902d5abdc278c305011018f2 100644 (file)
@@ -238,6 +238,35 @@ You can try recreating the collection to get a copy with full provenance data."
     end
   end
 
+  # star / unstar the current project
+  def star
+    links = Link.where(tail_uuid: current_user.uuid,
+                       head_uuid: @object.uuid,
+                       link_class: 'star')
+
+    if params['status'] == 'create'
+      # create 'star' link if one does not already exist
+      if !links.andand.any?
+        dst = Link.new(owner_uuid: current_user.uuid,
+                       tail_uuid: current_user.uuid,
+                       head_uuid: @object.uuid,
+                       link_class: 'star',
+                       name: @object.uuid)
+        dst.save!
+      end
+    else # delete any existing 'star' links
+      if links.andand.any?
+        links.each do |link|
+          link.destroy
+        end
+      end
+    end
+
+    respond_to do |format|
+      format.js
+    end
+  end
+
   protected
 
   def derive_unique_filename filename, manifest_files
index db3d43040c416bef846a19d8ee0b4009a9e8f622..4c3d3f852eb2a737049f0a734e88de738a6f0b95 100644 (file)
@@ -89,13 +89,14 @@ class ApplicationController < ActionController::Base
     # exception here than in a template.)
     unless current_user.nil?
       begin
-        build_project_trees
+        my_starred_projects current_user
+        build_my_wanted_projects_tree current_user
       rescue ArvadosApiClient::ApiError
         # Fall back to the default-setting code later.
       end
     end
-    @my_project_tree ||= []
-    @shared_project_tree ||= []
+    @starred_projects ||= []
+    @my_wanted_projects_tree ||= []
     render_error(err_opts)
   end
 
@@ -444,6 +445,15 @@ class ApplicationController < ActionController::Base
     end
   end
 
+  helper_method :is_starred
+  def is_starred
+    links = Link.where(tail_uuid: current_user.uuid,
+               head_uuid: @object.uuid,
+               link_class: 'star')
+
+    return links.andand.any?
+  end
+
   protected
 
   helper_method :strip_token_from_path
@@ -833,27 +843,63 @@ class ApplicationController < ActionController::Base
     {collections: c, owners: own}
   end
 
-  helper_method :my_project_tree
-  def my_project_tree
-    build_project_trees
-    @my_project_tree
+  helper_method :my_starred_projects
+  def my_starred_projects user
+    return if @starred_projects
+    links = Link.filter([['tail_uuid', '=', user.uuid],
+                         ['link_class', '=', 'star'],
+                         ['head_uuid', 'is_a', 'arvados#group']]).select(%w(head_uuid))
+    uuids = links.collect { |x| x.head_uuid }
+    starred_projects = Group.filter([['uuid', 'in', uuids]]).order('name')
+    @starred_projects = starred_projects.results
+  end
+
+  # If there are more than 200 projects that are readable by the user,
+  # build the tree using only the top 200+ projects owned by the user,
+  # from the top three levels.
+  # That is: get toplevel projects under home, get subprojects of
+  # these projects, and so on until we hit the limit.
+  def my_wanted_projects user, page_size=100
+    return @my_wanted_projects if @my_wanted_projects
+
+    from_top = []
+    uuids = [user.uuid]
+    depth = 0
+    @too_many_projects = false
+    @reached_level_limit = false
+    while from_top.size <= page_size*2
+      current_level = Group.filter([['group_class','=','project'],
+                                    ['owner_uuid', 'in', uuids]])
+                      .order('name').limit(page_size*2)
+      break if current_level.results.size == 0
+      @too_many_projects = true if current_level.items_available > current_level.results.size
+      from_top.concat current_level.results
+      uuids = current_level.results.collect { |x| x.uuid }
+      depth += 1
+      if depth >= 3
+        @reached_level_limit = true
+        break
+      end
+    end
+    @my_wanted_projects = from_top
   end
 
-  helper_method :shared_project_tree
-  def shared_project_tree
-    build_project_trees
-    @shared_project_tree
+  helper_method :my_wanted_projects_tree
+  def my_wanted_projects_tree user, page_size=100
+    build_my_wanted_projects_tree user, page_size
+    [@my_wanted_projects_tree, @too_many_projects, @reached_level_limit]
   end
 
-  def build_project_trees
-    return if @my_project_tree and @shared_project_tree
-    parent_of = {current_user.uuid => 'me'}
-    all_projects.each do |ob|
+  def build_my_wanted_projects_tree user, page_size=100
+    return @my_wanted_projects_tree if @my_wanted_projects_tree
+
+    parent_of = {user.uuid => 'me'}
+    my_wanted_projects(user, page_size).each do |ob|
       parent_of[ob.uuid] = ob.owner_uuid
     end
-    children_of = {false => [], 'me' => [current_user]}
-    all_projects.each do |ob|
-      if ob.owner_uuid != current_user.uuid and
+    children_of = {false => [], 'me' => [user]}
+    my_wanted_projects(user, page_size).each do |ob|
+      if ob.owner_uuid != user.uuid and
           not parent_of.has_key? ob.owner_uuid
         parent_of[ob.uuid] = false
       end
@@ -877,11 +923,8 @@ class ApplicationController < ActionController::Base
       end
       paths
     end
-    @my_project_tree =
+    @my_wanted_projects_tree =
       sorted_paths.call buildtree.call(children_of, 'me')
-    @shared_project_tree =
-      sorted_paths.call({'Projects shared with me' =>
-                          buildtree.call(children_of, false)})
   end
 
   helper_method :get_object
index 3ece865959f10aabf70acfab6cde1dd9145cf6d5..6566aeb7cd5f82c4aa9e8f16a88b4d287f1afc24 100644 (file)
@@ -41,4 +41,16 @@ class Job < ArvadosBase
   def textile_attributes
     [ 'description' ]
   end
+
+  def stderr_log_query(limit=nil)
+    query = Log.where(event_type: "stderr", object_uuid: self.uuid)
+               .order("id DESC")
+    query = query.limit(limit) if limit
+    query
+  end
+
+  def stderr_log_lines(limit=2000)
+    stderr_log_query(limit).results.reverse.
+      flat_map { |log| log.properties[:text].split("\n") rescue [] }
+  end
 end
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..3ef2aec17c35575013dd0be2ca51afe95953c8ba 100644 (file)
@@ -0,0 +1,69 @@
+      <nav class="navbar navbar-default breadcrumbs" role="navigation">
+        <ul class="nav navbar-nav navbar-left">
+          <li>
+            <a href="/">
+              <i class="fa fa-lg fa-fw fa-dashboard"></i>
+              Dashboard
+            </a>
+          </li>
+          <li class="dropdown">
+            <a href="#" class="dropdown-toggle" data-toggle="dropdown" id="projects-menu">
+              Projects
+              <span class="caret"></span>
+            </a>
+            <ul class="dropdown-menu" style="min-width: 20em" role="menu">
+              <li role="menuitem">
+                  <%= link_to(
+                        url_for(
+                          action: 'choose',
+                          controller: 'search',
+                          filters: [['uuid', 'is_a', 'arvados#group']].to_json,
+                          title: 'Search',
+                          action_name: 'Show',
+                          action_href: url_for(controller: :actions, action: :show),
+                          action_method: 'get',
+                          action_data: {selection_param: 'uuid', success: 'redirect-to-created-object'}.to_json),
+                        { remote: true, method: 'get', title: "Search" }) do %>
+                    <i class="glyphicon fa-fw glyphicon-search"></i> Search all projects ...
+                  <% end %>
+               </li>
+              <% if Rails.configuration.anonymous_user_token and Rails.configuration.enable_public_projects_page %>
+                <li role="menuitem"><a href="/projects/public" role="menuitem"><i class="fa fa-fw fa-list"></i> Browse public projects </a>
+                </li>
+              <% end %>
+              <li role="menuitem">
+                <%= link_to projects_path(options: {ensure_unique_name: true}), role: 'menu-item', method: :post do %>
+                  <i class="fa fa-fw fa-plus"></i> Add a new project
+                <% end %>
+              </li>
+              <li role="presentation" class="divider"></li>
+              <%= render partial: "projects_tree_menu", locals: {
+                  :project_link_to => Proc.new do |pnode, &block|
+                    link_to(project_path(pnode[:object].uuid),
+                      data: { 'object-uuid' => pnode[:object].uuid,
+                              'name' => 'name' },
+                      &block)
+                  end,
+              } %>
+            </ul>
+          </li>
+          <% if @name_link or @object %>
+            <li class="nav-separator">
+              <i class="fa fa-lg fa-angle-double-right"></i>
+            </li>
+            <li>
+              <%= link_to project_path(current_user.uuid) do %>
+                Home
+              <% end %>
+            </li>
+            <% project_breadcrumbs.each do |p| %>
+              <li class="nav-separator">
+                <i class="fa fa-lg fa-angle-double-right"></i>
+              </li>
+              <li>
+                <%= link_to(p.name, project_path(p.uuid), data: {object_uuid: p.uuid, name: 'name'}) %>
+              </li>
+            <% end %>
+          <% end %>
+        </ul>
+      </nav>
index 77b9d45f93587e1d1102582f62a0bda24e32fb0c..a680c69ce314bb4bd02c290506f996faa464f38a 100644 (file)
@@ -1,3 +1,23 @@
+<% starred_projects = my_starred_projects current_user%>
+<% if starred_projects.andand.any? %>
+  <li role="presentation" class="dropdown-header">
+    My favorite projects
+  </li>
+  <li>
+    <%= project_link_to.call({object: current_user, depth: 0}) do %>
+      <span style="padding-left: 0">Home</span>
+    <% end %>
+  </li>
+  <% (starred_projects).each do |pnode| %>
+    <li>
+      <%= project_link_to.call({object: pnode, depth: 0}) do%>
+        <span style="padding-left: 0em"></span><%= pnode[:name] %>
+      <% end %>
+    </li>
+  <% end %>
+  <li role="presentation" class="divider"></li>
+<% end %>
+
 <li role="presentation" class="dropdown-header">
   My projects
 </li>
@@ -6,7 +26,8 @@
     <span style="padding-left: 0">Home</span>
   <% end %>
 </li>
-<% my_project_tree.each do |pnode| %>
+<% my_tree = my_wanted_projects_tree current_user %>
+<% my_tree[0].each do |pnode| %>
   <% next if pnode[:object].class != Group %>
   <li>
     <%= project_link_to.call pnode do %>
     <% end %>
   </li>
 <% end %>
+<% if my_tree[1] or my_tree[0].size > 200 %>
+<li role="presentation" class="dropdown-header">
+  Some projects have been omitted.
+</li>
+<% elsif my_tree[2] %>
+<li role="presentation" class="dropdown-header">
+  Showing top three levels of your projects.
+</li>
+<% end %>
diff --git a/apps/workbench/app/views/application/_show_star.html.erb b/apps/workbench/app/views/application/_show_star.html.erb
new file mode 100644 (file)
index 0000000..b32fd47
--- /dev/null
@@ -0,0 +1,9 @@
+<% if current_user and is_starred %>
+  <%= link_to(star_path(status: 'delete', id:@object.uuid, action_method: 'get'), style: "color:#D00", class: "btn btn-xs star-unstar", title: "Remove from list of favorites", remote: true) do  %>
+            <i class="fa fa-lg fa-star"></i>
+          <% end %>
+<% else %>
+  <%= link_to(star_path(status: 'create', id:@object.uuid, action_method: 'get'), class: "btn btn-xs star-unstar", title: "Add to list of favorites", remote: true) do %>
+            <i class="fa fa-lg fa-star-o"></i>
+          <% end %>
+<% end %>
diff --git a/apps/workbench/app/views/application/star.js.erb b/apps/workbench/app/views/application/star.js.erb
new file mode 100644 (file)
index 0000000..701c673
--- /dev/null
@@ -0,0 +1,2 @@
+$(".star-unstar").html("<%= escape_javascript(render partial: 'show_star') %>");
+$(".breadcrumbs").html("<%= escape_javascript(render partial: 'breadcrumbs') %>");
index 7d67b74210812895347d21d6c08f4ceb15c50db0..02ad2b73986856185e6b66bbaacd6d8a1a5826c7 100644 (file)
@@ -8,7 +8,8 @@
 <div id="event_log_div"
      class="arv-log-event-listener arv-log-event-handler-append-logs arv-job-log-window"
      data-object-uuid="<%= @object.uuid %>"
-     ></div>
+  ><%= @object.stderr_log_lines(Rails.configuration.running_job_log_records_to_fetch).join("\n") %>
+</div>
 
 <%# Applying a long throttle suppresses the auto-refresh of this
     partial that would normally be triggered by arv-log-event. %>
index abb79e932790502c007215b857c822ca0908d160..456f15f218cee86cc76098acb33f2fab9d78cc23 100644 (file)
     </nav>
 
     <% if current_user.andand.is_active %>
-      <nav class="navbar navbar-default breadcrumbs" role="navigation">
-        <ul class="nav navbar-nav navbar-left">
-          <li>
-            <a href="/">
-              <i class="fa fa-lg fa-fw fa-dashboard"></i>
-              Dashboard
-            </a>
-          </li>
-          <li class="dropdown">
-            <a href="#" class="dropdown-toggle" data-toggle="dropdown" id="projects-menu">
-              Projects
-              <span class="caret"></span>
-            </a>
-            <ul class="dropdown-menu" style="min-width: 20em" role="menu">
-              <li role="menuitem">
-                  <%= link_to(
-                        url_for(
-                          action: 'choose',
-                          controller: 'search',
-                          filters: [['uuid', 'is_a', 'arvados#group']].to_json,
-                          title: 'Search',
-                          action_name: 'Show',
-                          action_href: url_for(controller: :actions, action: :show),
-                          action_method: 'get',
-                          action_data: {selection_param: 'uuid', success: 'redirect-to-created-object'}.to_json),
-                        { remote: true, method: 'get', title: "Search" }) do %>
-                    <i class="glyphicon fa-fw glyphicon-search"></i> Search all projects ...
-                  <% end %>
-               </li>
-              <% if Rails.configuration.anonymous_user_token and Rails.configuration.enable_public_projects_page %>
-                <li role="menuitem"><a href="/projects/public" role="menuitem"><i class="fa fa-fw fa-list"></i> Browse public projects </a>
-                </li>
-              <% end %>
-              <li role="menuitem">
-                <%= link_to projects_path(options: {ensure_unique_name: true}), role: 'menu-item', method: :post do %>
-                  <i class="fa fa-fw fa-plus"></i> Add a new project
-                <% end %>
-              </li>
-              <li role="presentation" class="divider"></li>
-              <%= render partial: "projects_tree_menu", locals: {
-                  :project_link_to => Proc.new do |pnode, &block|
-                    link_to(project_path(pnode[:object].uuid),
-                      data: { 'object-uuid' => pnode[:object].uuid,
-                              'name' => 'name' },
-                      &block)
-                  end,
-              } %>
-            </ul>
-          </li>
-          <% if @name_link or @object %>
-            <li class="nav-separator">
-              <i class="fa fa-lg fa-angle-double-right"></i>
-            </li>
-            <li>
-              <%= link_to project_path(current_user.uuid) do %>
-                Home
-              <% end %>
-            </li>
-            <% project_breadcrumbs.each do |p| %>
-              <li class="nav-separator">
-                <i class="fa fa-lg fa-angle-double-right"></i>
-              </li>
-              <li>
-                <%= link_to(p.name, project_path(p.uuid), data: {object_uuid: p.uuid, name: 'name'}) %>
-              </li>
-            <% end %>
-          <% end %>
-        </ul>
-      </nav>
+      <%= render partial: 'breadcrumbs' %>
     <% elsif !current_user %>   <%# anonymous %>
       <% if (@name_link or @object) and (project_breadcrumbs.any?) %>
         <nav class="navbar navbar-default breadcrumbs" role="navigation">
index c0759ed2e3ac1da813acd7afa744f62f543185a7..badaa24983f2640e60a2d2e8397d9842941f74ea 100644 (file)
 
       <div class="modal-body">
         <div class="selectable-container" style="height: 15em; overflow-y: scroll">
-          <% [my_project_tree, shared_project_tree].each do |tree| %>
-            <% tree.each do |projectnode| %>
-              <% if projectnode[:object].is_a? String %>
-                <div class="row" style="padding-left: <%= 1 + projectnode[:depth] %>em; margin-right: 0px">
-                  <i class="fa fa-fw fa-share-alt"></i>
-                  <%= projectnode[:object] %>
-                </div>
-              <% else
-                 row_selectable = !params[:editable] || projectnode[:object].editable?
-                 if projectnode[:object].uuid == current_user.uuid
-                   row_name = "Home"
-                   row_selectable = true
-                 else
-                   row_name = projectnode[:object].friendly_link_name || 'New project'
-                 end %>
-                <div class="<%= 'selectable project' if row_selectable %> row"
-                     style="padding-left: <%= 1 + projectnode[:depth] %>em; margin-right: 0px" data-object-uuid="<%= projectnode[:object].uuid %>">
-                  <i class="fa fa-fw fa-folder-o"></i> <%= row_name %>
-                </div>
-              <% end %>
+          <% starred_projects = my_starred_projects current_user%>
+          <% if starred_projects.andand.any? %>
+            <% writable_projects = starred_projects.select(&:editable?) %>
+            <% writable_projects.each do |projectnode| %>
+              <% row_name = projectnode.friendly_link_name || 'New project' %>
+              <div class="selectable project row"
+                   style="padding-left: 1em; margin-right: 0px"
+                   data-object-uuid="<%= projectnode.uuid %>">
+                <i class="fa fa-fw fa-folder-o"></i> <%= row_name %> <i class="fa fa-fw fa-star"></i>
+              </div>
             <% end %>
           <% end %>
+
+          <% my_projects = my_wanted_projects_tree(current_user) %>
+          <% my_projects[0].each do |projectnode| %>
+            <% if projectnode[:object].uuid == current_user.uuid
+                 row_name = "Home"
+               else
+                 row_name = projectnode[:object].friendly_link_name || 'New project'
+               end %>
+            <div class="selectable project row"
+                 style="padding-left: <%= 1 + projectnode[:depth] %>em; margin-right: 0px"
+                 data-object-uuid="<%= projectnode[:object].uuid %>">
+              <i class="fa fa-fw fa-folder-o"></i> <%= row_name %>
+            </div>
+          <% end %>
         </div>
+
+        <% if my_projects[1] or my_projects[2] or my_projects[0].size > 200 %>
+          <div>Some of your projects are omitted. Add projects of interest to favorites.</div>
+        <% end %>
       </div>
 
       <div class="modal-footer">
index 2a85da83214303fed625725dea1a334067691ee2..6033a3491051d657bfb470eb351f2df710edb90c 100644 (file)
@@ -3,6 +3,7 @@
     <% if @object.uuid == current_user.andand.uuid %>
       Home
     <% else %>
+      <%= render partial: "show_star" %>
       <%= render_editable_attribute @object, 'name', nil, { 'data-emptytext' => "New project" } %>
     <% end %>
   </h2>
index 239ffcd225da24a0c444851c16e2484c1293ba96..5400debbfdaf55e1f64c004adf70f98ca4037cb1 100644 (file)
@@ -272,3 +272,7 @@ common:
   #
   # The default setting (false) is appropriate for a multi-user site.
   trust_all_content: false
+
+  # Maximum number of historic log records of a running job to fetch
+  # and display in the Log tab, while subscribing to web sockets.
+  running_job_log_records_to_fetch: 2000
index 10426099937e2bdda42b6f7fb0976c0ada764a51..fc72ea2222508db8b19d540b089deaf03f128df5 100644 (file)
@@ -12,6 +12,7 @@ ArvadosWorkbench::Application.routes.draw do
   get "users/setup" => 'users#setup', :as => :setup_user
   get "report_issue_popup" => 'actions#report_issue_popup', :as => :report_issue_popup
   post "report_issue" => 'actions#report_issue', :as => :report_issue
+  get "star" => 'actions#star', :as => :star
   resources :nodes
   resources :humans
   resources :traits
index 8fa9fe9a817e7f2e3b6494099b85afc53c05ac57..58914a84ac87b5b0949f07d634a826226a2b64af 100644 (file)
@@ -418,4 +418,83 @@ class ProjectsControllerTest < ActionController::TestCase
     }, session_for(:active)
     assert_select "#projects-menu + ul li.divider ~ li a[href=/projects/#{project_uuid}]"
   end
+
+  [
+    ["active", 5, ["aproject", "asubproject"], "anonymously_accessible_project"],
+    ["user1_with_load", 2, ["project_with_10_collections"], "project_with_2_pipelines_and_60_jobs"],
+    ["admin", 5, ["anonymously_accessible_project", "subproject_in_anonymous_accessible_project"], "aproject"],
+  ].each do |user, page_size, tree_segment, unexpected|
+    test "build my projects tree for #{user} user and verify #{unexpected} is omitted" do
+      use_token user
+      ctrl = ProjectsController.new
+
+      current_user = User.find(api_fixture('users')[user]['uuid'])
+
+      my_tree = ctrl.send :my_wanted_projects_tree, current_user, page_size
+
+      tree_segment_at_depth_1 = api_fixture('groups')[tree_segment[0]]
+      tree_segment_at_depth_2 = api_fixture('groups')[tree_segment[1]] if tree_segment[1]
+
+      tree_nodes = {}
+      my_tree[0].each do |x|
+        tree_nodes[x[:object]['uuid']] = x[:depth]
+      end
+
+      assert_equal(1, tree_nodes[tree_segment_at_depth_1['uuid']])
+      assert_equal(2, tree_nodes[tree_segment_at_depth_2['uuid']]) if tree_segment[1]
+
+      unexpected_project = api_fixture('groups')[unexpected]
+      assert_nil(tree_nodes[unexpected_project['uuid']])
+    end
+  end
+
+  [
+    ["active", 1],
+    ["project_viewer", 1],
+    ["admin", 0],
+  ].each do |user, size|
+    test "starred projects for #{user}" do
+      use_token user
+      ctrl = ProjectsController.new
+      current_user = User.find(api_fixture('users')[user]['uuid'])
+      my_starred_project = ctrl.send :my_starred_projects, current_user
+      assert_equal(size, my_starred_project.andand.size)
+
+      ctrl2 = ProjectsController.new
+      current_user = User.find(api_fixture('users')[user]['uuid'])
+      my_starred_project = ctrl2.send :my_starred_projects, current_user
+      assert_equal(size, my_starred_project.andand.size)
+    end
+  end
+
+  test "unshare project and verify that it is no longer included in shared user's starred projects" do
+    # remove sharing link
+    use_token :system_user
+    Link.find(api_fixture('links')['share_starred_project_with_project_viewer']['uuid']).destroy
+
+    # verify that project is no longer included in starred projects
+    use_token :project_viewer
+    current_user = User.find(api_fixture('users')['project_viewer']['uuid'])
+    ctrl = ProjectsController.new
+    my_starred_project = ctrl.send :my_starred_projects, current_user
+    assert_equal(0, my_starred_project.andand.size)
+
+    # share it again
+    @controller = LinksController.new
+    post :create, {
+      link: {
+        link_class: 'permission',
+        name: 'can_read',
+        head_uuid: api_fixture('groups')['starred_and_shared_active_user_project']['uuid'],
+        tail_uuid: api_fixture('users')['project_viewer']['uuid'],
+      },
+      format: :json
+    }, session_for(:system_user)
+
+    # verify that the project is again included in starred projects
+    use_token :project_viewer
+    ctrl = ProjectsController.new
+    my_starred_project = ctrl.send :my_starred_projects, current_user
+    assert_equal(1, my_starred_project.andand.size)
+  end
 end
index 64a547108bc3e50157a0bebed23392728c83ee45..01e84b1c0219d19551122356006f7081b0d42629 100644 (file)
@@ -39,7 +39,10 @@ class ProjectsTest < ActionDispatch::IntegrationTest
   test 'Create a project and move it into a different project' do
     visit page_with_token 'active', '/projects'
     find("#projects-menu").click
-    find(".dropdown-menu a", text: "Home").click
+    within('.dropdown-menu') do
+      first('li', text: 'Home').click
+    end
+    wait_for_ajax
     find('.btn', text: "Add a subproject").click
 
     within('h2') do
@@ -51,7 +54,10 @@ class ProjectsTest < ActionDispatch::IntegrationTest
 
     visit '/projects'
     find("#projects-menu").click
-    find(".dropdown-menu a", text: "Home").click
+    within('.dropdown-menu') do
+      first('li', text: 'Home').click
+    end
+    wait_for_ajax
     find('.btn', text: "Add a subproject").click
     within('h2') do
       find('.fa-pencil').click
@@ -709,4 +715,26 @@ class ProjectsTest < ActionDispatch::IntegrationTest
      assert page.has_text?('Unrestricted public data'), 'No text - Unrestricted public data'
      assert page.has_text?('An anonymously accessible project'), 'No text - An anonymously accessible project'
   end
+
+  test "test star and unstar project" do
+    visit page_with_token 'active', "/projects/#{api_fixture('groups')['anonymously_accessible_project']['uuid']}"
+
+    # add to favorites
+    find('.fa-star-o').click
+    wait_for_ajax
+
+    find("#projects-menu").click
+    within('.dropdown-menu') do
+      assert_selector 'li', text: 'Unrestricted public data'
+    end
+
+    # remove from favotires
+    find('.fa-star').click
+    wait_for_ajax
+
+    find("#projects-menu").click
+    within('.dropdown-menu') do
+      assert_no_selector 'li', text: 'Unrestricted public data'
+    end
+  end
 end
index 648d59c69000b19cfca089d4aac154da5e2529c7..655ad92c94d1d18f23988990c59cb832a817fb9e 100644 (file)
@@ -211,4 +211,68 @@ class WebsocketTest < ActionDispatch::IntegrationTest
     datum = page.evaluate_script("jobGraphData[jobGraphData.length-1]['#{series}']")
     assert_in_epsilon value, datum.to_f
   end
+
+  test "test running job with just a few previous log records" do
+    Thread.current[:arvados_api_token] = @@API_AUTHS["admin"]['api_token']
+    job = Job.where(uuid: api_fixture("jobs")['running']['uuid']).results.first
+    visit page_with_token("admin", "/jobs/#{job.uuid}")
+
+    api = ArvadosApiClient.new
+
+    # Create just one old log record
+    api.api("logs", "", {log: {
+                object_uuid: job.uuid,
+                event_type: "stderr",
+                properties: {"text" => "Historic log message"}}})
+
+    click_link("Log")
+
+    # Expect "all" historic log records because we have less than
+    # default Rails.configuration.running_job_log_records_to_fetch count
+    assert_text 'Historic log message'
+
+    # Create new log record and expect it to show up in log tab
+    api.api("logs", "", {log: {
+                object_uuid: job.uuid,
+                event_type: "stderr",
+                properties: {"text" => "Log message after subscription"}}})
+    assert_text 'Log message after subscription'
+  end
+
+  test "test running job with too many previous log records" do
+    Rails.configuration.running_job_log_records_to_fetch = 5
+
+    Thread.current[:arvados_api_token] = @@API_AUTHS["admin"]['api_token']
+    job = Job.where(uuid: api_fixture("jobs")['running']['uuid']).results.first
+
+    visit page_with_token("admin", "/jobs/#{job.uuid}")
+
+    api = ArvadosApiClient.new
+
+    # Create Rails.configuration.running_job_log_records_to_fetch + 1 log records
+    (0..Rails.configuration.running_job_log_records_to_fetch).each do |count|
+      api.api("logs", "", {log: {
+                object_uuid: job.uuid,
+                event_type: "stderr",
+                properties: {"text" => "Old log message #{count}"}}})
+    end
+
+    # Go to log tab, which results in subscribing to websockets
+    click_link("Log")
+
+    # Expect all but the first historic log records,
+    # because that was one too many than fetch count.
+    (1..Rails.configuration.running_job_log_records_to_fetch).each do |count|
+      assert_text "Old log message #{count}"
+    end
+    assert_no_text 'Old log message 0'
+
+    # Create one more log record after subscription
+    api.api("logs", "", {log: {
+                object_uuid: job.uuid,
+                event_type: "stderr",
+                properties: {"text" => "Life goes on!"}}})
+    # Expect it to show up in log tab
+    assert_text 'Life goes on!'
+  end
 end
index 3fb754f9db139d4a659f5aba0dec9487f1fe488c..20fe6c2c2fd15e69522c9ebd4d5c2ad1c784d1ab 100644 (file)
@@ -27,7 +27,7 @@
                     "required": true,
                     "dataclass": "Collection"
                 },
-                "sample_subdir": "$(dir $(samples))",
+                "sample_subdir": "$(dir $(sample))",
                 "read_pair": {
                     "value": {
                         "group": "sample_subdir",
index 4e5b0826b6bceabf326b07b65660a5ec4c93f946..ae210a6f447e42d69ecd9302f414866bb4da6e23 100755 (executable)
@@ -432,7 +432,7 @@ fi
   if ($docker_pid == 0)
   {
     srun (["srun", "--nodelist=" . join(',', @node)],
-          ["/bin/sh", "-ec", $docker_install_script]);
+          ["/bin/bash", "-o", "pipefail", "-ec", $docker_install_script]);
     exit ($?);
   }
   while (1)
@@ -443,8 +443,8 @@ fi
   }
   if ($? != 0)
   {
-    croak("Installing Docker image from $docker_locator exited "
-          .exit_status_s($?));
+    Log(undef, "Installing Docker image from $docker_locator exited " . exit_status_s($?));
+    exit(EX_RETRY_UNLOCKED);
   }
 
   # Determine whether this version of Docker supports memory+swap limits.
index 4198c34482ccd0f6fa54daa9e5a9d1d143db2ee1..8370e3d5e75a42e68fd73ee770c281b0388dd198 100644 (file)
@@ -8,6 +8,7 @@ import arvados.commands.run
 import cwltool.draft2tool
 import cwltool.workflow
 import cwltool.main
+from cwltool.process import shortname
 import threading
 import cwltool.docker
 import fnmatch
@@ -37,6 +38,7 @@ def arv_docker_get_image(api_client, dockerRequirement, pull_image):
         args = [image_name]
         if image_tag:
             args.append(image_tag)
+        logger.info("Uploading Docker image %s", ":".join(args))
         arvados.commands.keepdocker.main(args)
 
     return dockerRequirement["dockerImageId"]
@@ -144,11 +146,17 @@ class ArvadosJob(object):
                 "script_version": "master",
                 "script_parameters": {"tasks": [script_parameters]},
                 "runtime_constraints": runtime_constraints
-            }, find_or_create=kwargs.get("enable_reuse", True)).execute()
+            }, find_or_create=kwargs.get("enable_reuse", True)).execute(num_retries=self.arvrunner.num_retries)
 
             self.arvrunner.jobs[response["uuid"]] = self
 
-            logger.info("Job %s is %s", response["uuid"], response["state"])
+            self.arvrunner.pipeline["components"][self.name] = {"job": response}
+            self.arvrunner.pipeline = self.arvrunner.api.pipeline_instances().update(uuid=self.arvrunner.pipeline["uuid"],
+                                                                                     body={
+                                                                                         "components": self.arvrunner.pipeline["components"]
+                                                                                     }).execute(num_retries=self.arvrunner.num_retries)
+
+            logger.info("Job %s (%s) is %s", self.name, response["uuid"], response["state"])
 
             if response["state"] in ("Complete", "Failed", "Cancelled"):
                 self.done(response)
@@ -156,8 +164,19 @@ class ArvadosJob(object):
             logger.error("Got error %s" % str(e))
             self.output_callback({}, "permanentFail")
 
+    def update_pipeline_component(self, record):
+        self.arvrunner.pipeline["components"][self.name] = {"job": record}
+        self.arvrunner.pipeline = self.arvrunner.api.pipeline_instances().update(uuid=self.arvrunner.pipeline["uuid"],
+                                                                                 body={
+                                                                                    "components": self.arvrunner.pipeline["components"]
+                                                                                 }).execute(num_retries=self.arvrunner.num_retries)
 
     def done(self, record):
+        try:
+            self.update_pipeline_component(record)
+        except:
+            pass
+
         try:
             if record["state"] == "Complete":
                 processStatus = "success"
@@ -166,7 +185,8 @@ class ArvadosJob(object):
 
             try:
                 outputs = {}
-                outputs = self.collect_outputs("keep:" + record["output"])
+                if record["output"]:
+                    outputs = self.collect_outputs("keep:" + record["output"])
             except Exception as e:
                 logger.exception("Got exception while collecting job outputs:")
                 processStatus = "permanentFail"
@@ -188,7 +208,7 @@ class ArvPathMapper(cwltool.pathmapper.PathMapper):
                 self._pathmap[src] = (src, "$(task.keep)/%s" % src[5:])
             if src not in self._pathmap:
                 ab = cwltool.pathmapper.abspath(src, basedir)
-                st = arvados.commands.run.statfile("", ab)
+                st = arvados.commands.run.statfile("", ab, fnPattern="$(task.keep)/%s/%s")
                 if kwargs.get("conformance_test"):
                     self._pathmap[src] = (src, ab)
                 elif isinstance(st, arvados.commands.run.UploadFile):
@@ -231,6 +251,7 @@ class ArvCwlRunner(object):
         self.cond = threading.Condition(self.lock)
         self.final_output = None
         self.uploaded = {}
+        self.num_retries = 4
 
     def arvMakeTool(self, toolpath_object, **kwargs):
         if "class" in toolpath_object and toolpath_object["class"] == "CommandLineTool":
@@ -241,22 +262,33 @@ class ArvCwlRunner(object):
     def output_callback(self, out, processStatus):
         if processStatus == "success":
             logger.info("Overall job status is %s", processStatus)
+            self.api.pipeline_instances().update(uuid=self.pipeline["uuid"],
+                                                 body={"state": "Complete"}).execute(num_retries=self.num_retries)
+
         else:
             logger.warn("Overall job status is %s", processStatus)
+            self.api.pipeline_instances().update(uuid=self.pipeline["uuid"],
+                                                 body={"state": "Failed"}).execute(num_retries=self.num_retries)
         self.final_output = out
 
+
     def on_message(self, event):
         if "object_uuid" in event:
                 if event["object_uuid"] in self.jobs and event["event_type"] == "update":
                     if event["properties"]["new_attributes"]["state"] == "Running" and self.jobs[event["object_uuid"]].running is False:
-                        logger.info("Job %s is Running", event["object_uuid"])
+                        uuid = event["object_uuid"]
                         with self.lock:
-                            self.jobs[event["object_uuid"]].running = True
+                            j = self.jobs[uuid]
+                            logger.info("Job %s (%s) is Running", j.name, uuid)
+                            j.running = True
+                            j.update_pipeline_component(event["properties"]["new_attributes"])
                     elif event["properties"]["new_attributes"]["state"] in ("Complete", "Failed", "Cancelled"):
-                        logger.info("Job %s is %s", event["object_uuid"], event["properties"]["new_attributes"]["state"])
+                        uuid = event["object_uuid"]
                         try:
                             self.cond.acquire()
-                            self.jobs[event["object_uuid"]].done(event["properties"]["new_attributes"])
+                            j = self.jobs[uuid]
+                            logger.info("Job %s (%s) is %s", j.name, uuid, event["properties"]["new_attributes"]["state"])
+                            j.done(event["properties"]["new_attributes"])
                             self.cond.notify()
                         finally:
                             self.cond.release()
@@ -270,6 +302,10 @@ class ArvCwlRunner(object):
     def arvExecutor(self, tool, job_order, input_basedir, args, **kwargs):
         events = arvados.events.subscribe(arvados.api('v1'), [["object_uuid", "is_a", "arvados#job"]], self.on_message)
 
+        self.pipeline = self.api.pipeline_instances().create(body={"name": shortname(tool.tool["id"]),
+                                                                   "components": {},
+                                                                   "state": "RunningOnClient"}).execute(num_retries=self.num_retries)
+
         self.fs_access = CollectionFsAccess(input_basedir)
 
         kwargs["fs_access"] = self.fs_access
index bcf6b963830aca8570545045ab112ee79aa8216d..65ae16b5158aebe388afc7f42e2e247f4e13733f 100644 (file)
@@ -30,8 +30,8 @@ setup(name='arvados-cwl-runner',
           'bin/arvados-cwl-runner'
       ],
       install_requires=[
-          'cwltool>=1.0.20151026181844',
-          'arvados-python-client>=0.1.20151023214338'
+          'cwltool>=1.0.20160129152024',
+          'arvados-python-client>=0.1.20160122132348'
       ],
       zip_safe=True,
       cmdclass={'egg_info': tagger},
diff --git a/sdk/perl/.gitignore b/sdk/perl/.gitignore
new file mode 100644 (file)
index 0000000..7c32f55
--- /dev/null
@@ -0,0 +1 @@
+install
index e2e8ba19b64823af233c25eba1f90880dc9f4067..f24b1ed8142d3e3678ff819bfe4823ebe2add874 100644 (file)
@@ -207,6 +207,7 @@ def api(version=None, cache=True, host=None, token=None, insecure=False, **kwarg
 
     svc = apiclient_discovery.build('arvados', version, **kwargs)
     svc.api_token = token
+    svc.insecure = insecure
     kwargs['http'].max_request_size = svc._rootDesc.get('maxRequestSize', 0)
     kwargs['http'].cache = None
     return svc
index a10eb2b348aff4499648d6738b8042e5a9d4fa11..2ee97b9867036ded89244d1f11c2294efccb9ef3 100755 (executable)
@@ -24,6 +24,7 @@ import shutil
 import sys
 import logging
 import tempfile
+import urlparse
 
 import arvados
 import arvados.config
@@ -87,6 +88,13 @@ def main():
     copy_opts.add_argument(
         '--project-uuid', dest='project_uuid',
         help='The UUID of the project at the destination to which the pipeline should be copied.')
+    copy_opts.add_argument(
+        '--allow-git-http-src', action="store_true",
+        help='Allow cloning git repositories over insecure http')
+    copy_opts.add_argument(
+        '--allow-git-http-dst', action="store_true",
+        help='Allow pushing git repositories over insecure http')
+
     copy_opts.add_argument(
         'object_uuid',
         help='The UUID of the object to be copied.')
@@ -583,6 +591,55 @@ def copy_collection(obj_uuid, src, dst, args):
     c['manifest_text'] = dst_manifest
     return create_collection_from(c, src, dst, args)
 
+def select_git_url(api, repo_name, retries, allow_insecure_http, allow_insecure_http_opt):
+    r = api.repositories().list(
+        filters=[['name', '=', repo_name]]).execute(num_retries=retries)
+    if r['items_available'] != 1:
+        raise Exception('cannot identify repo {}; {} repos found'
+                        .format(repo_name, r['items_available']))
+
+    https_url = [c for c in r['items'][0]["clone_urls"] if c.startswith("https:")]
+    http_url = [c for c in r['items'][0]["clone_urls"] if c.startswith("http:")]
+    other_url = [c for c in r['items'][0]["clone_urls"] if not c.startswith("http")]
+
+    priority = https_url + other_url + http_url
+
+    git_config = []
+    git_url = None
+    for url in priority:
+        if url.startswith("http"):
+            u = urlparse.urlsplit(url)
+            baseurl = urlparse.urlunsplit((u.scheme, u.netloc, "", "", ""))
+            git_config = ["-c", "credential.%s/.username=none" % baseurl,
+                          "-c", "credential.%s/.helper=!cred(){ cat >/dev/null; if [ \"$1\" = get ]; then echo password=$ARVADOS_API_TOKEN; fi; };cred" % baseurl]
+        else:
+            git_config = []
+
+        try:
+            logger.debug("trying %s", url)
+            arvados.util.run_command(["git"] + git_config + ["ls-remote", url],
+                                      env={"HOME": os.environ["HOME"],
+                                           "ARVADOS_API_TOKEN": api.api_token,
+                                           "GIT_ASKPASS": "/bin/false"})
+        except arvados.errors.CommandFailedError:
+            pass
+        else:
+            git_url = url
+            break
+
+    if not git_url:
+        raise Exception('Cannot access git repository, tried {}'
+                        .format(priority))
+
+    if git_url.startswith("http:"):
+        if allow_insecure_http:
+            logger.warn("Using insecure git url %s but will allow this because %s", git_url, allow_insecure_http_opt)
+        else:
+            raise Exception("Refusing to use insecure git url %s, use %s if you really want this." % (git_url, allow_insecure_http_opt))
+
+    return (git_url, git_config)
+
+
 # copy_git_repo(src_git_repo, src, dst, dst_git_repo, script_version, args)
 #
 #    Copies commits from git repository 'src_git_repo' on Arvados
@@ -600,21 +657,12 @@ def copy_collection(obj_uuid, src, dst, args):
 #
 def copy_git_repo(src_git_repo, src, dst, dst_git_repo, script_version, args):
     # Identify the fetch and push URLs for the git repositories.
-    r = src.repositories().list(
-        filters=[['name', '=', src_git_repo]]).execute(num_retries=args.retries)
-    if r['items_available'] != 1:
-        raise Exception('cannot identify source repo {}; {} repos found'
-                        .format(src_git_repo, r['items_available']))
-    src_git_url = r['items'][0]['fetch_url']
-    logger.debug('src_git_url: {}'.format(src_git_url))
 
-    r = dst.repositories().list(
-        filters=[['name', '=', dst_git_repo]]).execute(num_retries=args.retries)
-    if r['items_available'] != 1:
-        raise Exception('cannot identify destination repo {}; {} repos found'
-                        .format(dst_git_repo, r['items_available']))
-    dst_git_push_url  = r['items'][0]['push_url']
-    logger.debug('dst_git_push_url: {}'.format(dst_git_push_url))
+    (src_git_url, src_git_config) = select_git_url(src, src_git_repo, args.retries, args.allow_git_http_src, "--allow-git-http-src")
+    (dst_git_url, dst_git_config) = select_git_url(dst, dst_git_repo, args.retries, args.allow_git_http_dst, "--allow-git-http-dst")
+
+    logger.debug('src_git_url: {}'.format(src_git_url))
+    logger.debug('dst_git_url: {}'.format(dst_git_url))
 
     dst_branch = re.sub(r'\W+', '_', "{}_{}".format(src_git_url, script_version))
 
@@ -622,17 +670,23 @@ def copy_git_repo(src_git_repo, src, dst, dst_git_repo, script_version, args):
     if src_git_repo not in local_repo_dir:
         local_repo_dir[src_git_repo] = tempfile.mkdtemp()
         arvados.util.run_command(
-            ["git""clone", "--bare", src_git_url,
+            ["git"] + src_git_config + ["clone", "--bare", src_git_url,
              local_repo_dir[src_git_repo]],
-            cwd=os.path.dirname(local_repo_dir[src_git_repo]))
+            cwd=os.path.dirname(local_repo_dir[src_git_repo]),
+            env={"HOME": os.environ["HOME"],
+                 "ARVADOS_API_TOKEN": src.api_token,
+                 "GIT_ASKPASS": "/bin/false"})
         arvados.util.run_command(
-            ["git", "remote", "add", "dst", dst_git_push_url],
+            ["git", "remote", "add", "dst", dst_git_url],
             cwd=local_repo_dir[src_git_repo])
     arvados.util.run_command(
         ["git", "branch", dst_branch, script_version],
         cwd=local_repo_dir[src_git_repo])
-    arvados.util.run_command(["git", "push", "dst", dst_branch],
-                             cwd=local_repo_dir[src_git_repo])
+    arvados.util.run_command(["git"] + dst_git_config + ["push", "dst", dst_branch],
+                             cwd=local_repo_dir[src_git_repo],
+                             env={"HOME": os.environ["HOME"],
+                                  "ARVADOS_API_TOKEN": dst.api_token,
+                                  "GIT_ASKPASS": "/bin/false"})
 
 def copy_docker_images(pipeline, src, dst, args):
     """Copy any docker images named in the pipeline components'
index 5c8bced513c160dd64e2cdbf3f4433d72ce89fe6..ef39be81a4650cda86e20c6d13a7d23848398ecb 100644 (file)
@@ -81,7 +81,7 @@ def determine_project(root, current_user):
 # ArvFile() (file already exists in a collection), UploadFile() (file needs to
 # be uploaded to a collection), or simply returns prefix+fn (which yields the
 # original parameter string).
-def statfile(prefix, fn):
+def statfile(prefix, fn, fnPattern="$(file %s/%s)", dirPattern="$(dir %s/%s/)"):
     absfn = os.path.abspath(fn)
     if os.path.exists(absfn):
         st = os.stat(absfn)
@@ -89,7 +89,7 @@ def statfile(prefix, fn):
             sp = os.path.split(absfn)
             (pdh, branch) = is_in_collection(sp[0], sp[1])
             if pdh:
-                return ArvFile(prefix, "$(file %s/%s)" % (pdh, branch))
+                return ArvFile(prefix, fnPattern % (pdh, branch))
             else:
                 # trim leading '/' for path prefix test later
                 return UploadFile(prefix, absfn[1:])
@@ -97,7 +97,7 @@ def statfile(prefix, fn):
             sp = os.path.split(absfn)
             (pdh, branch) = is_in_collection(sp[0], sp[1])
             if pdh:
-                return ArvFile(prefix, "$(dir %s/%s/)" % (pdh, branch))
+                return ArvFile(prefix, dirPattern % (pdh, branch))
 
     return prefix+fn
 
index 94b8a9d06cfaec73b718b8514adcd3ba08ab2991..df824a331ea41a2fd702587be9c5d2828884ffb5 100644 (file)
@@ -190,7 +190,10 @@ def subscribe(api, filters, on_event, poll_fallback=15, last_log_id=None):
         return _subscribe_websocket(api, filters, on_event, last_log_id)
 
     try:
-        return _subscribe_websocket(api, filters, on_event, last_log_id)
+        if not config.flag_is_true('ARVADOS_DISABLE_WEBSOCKETS'):
+            return _subscribe_websocket(api, filters, on_event, last_log_id)
+        else:
+            _logger.info("Using polling because ARVADOS_DISABLE_WEBSOCKETS is true")
     except Exception as e:
         _logger.warn("Falling back to polling after websocket error: %s" % e)
     p = PollClient(api, filters, on_event, poll_fallback, last_log_id)
index b6518f95a17495851c72985af0e8b16a97e3f1b6..759e8ff67edf1ec8b99b0de86ee8a3e4602b73b7 100644 (file)
@@ -39,8 +39,9 @@ setup(name='arvados-python-client',
           ('share/doc/arvados-python-client', ['LICENSE-2.0.txt', 'README.rst']),
       ],
       install_requires=[
+          'google-api-python-client==1.4.2',
+          'oauth2client >=1.4.6, <2',
           'ciso8601',
-          'google-api-python-client',
           'httplib2',
           'pycurl >=7.19.5.1, <7.21.5',
           'python-gflags<3.0',
index 4ad5e10faa46b96222d4291596d2f47f686bf18c..29eb939002fa9dab98cb8feff4987d46151d0bc8 100644 (file)
@@ -28,3 +28,6 @@
 # Dev/test SSL certificates
 /self-signed.key
 /self-signed.pem
+
+# Generated git-commit.version file
+/git-commit.version
index f365a7fee8996e7c9ba51cdd11bc4d525c3e9193..2eb79c090dcd69a4e6e4d4157b0ea0f0d2de5afc 100644 (file)
@@ -56,7 +56,7 @@ class Arvados::V1::ApiClientAuthorizationsController < ApplicationController
         ((attr == 'scopes') and (operator == '=')) ? operand : nil
       })
       @filters.select! { |attr, operator, operand|
-        (attr == 'uuid') and (operator == '=')
+        ((attr == 'uuid') and (operator == '=')) || ((attr == 'api_token') and (operator == '='))
       }
     end
     if @where
@@ -74,14 +74,23 @@ class Arvados::V1::ApiClientAuthorizationsController < ApplicationController
   end
 
   def find_object_by_uuid
-    # Again, to make things easier for the client and our own routing,
-    # here we look for the api_token key in a "uuid" (POST) or "id"
-    # (GET) parameter.
-    @object = model_class.where('api_token=?', params[:uuid] || params[:id]).first
+    @object = model_class.where(uuid: (params[:uuid] || params[:id])).first
   end
 
   def current_api_client_is_trusted
     unless Thread.current[:api_client].andand.is_trusted
+      if params["action"] == "show"
+        if @object and @object['api_token'] == current_api_client_authorization.andand.api_token
+          return true
+        end
+      elsif params["action"] == "index" and @objects.andand.size == 1
+        filters = @filters.map{|f|f.first}.uniq
+        if ['uuid'] == filters
+          return true if @objects.first['api_token'] == current_api_client_authorization.andand.api_token
+        elsif ['api_token'] == filters
+          return true if @objects.first[:user_id] = current_user.id
+        end
+      end
       send_error('Forbidden: this API client cannot manipulate other clients\' access tokens.',
                  status: 403)
     end
index b9442d64e78bf888741e9e47d532f53d73059e22..c587e5830af41549c5bd637c7ffa9472bbf51017 100644 (file)
@@ -1,4 +1,5 @@
 class ApiClientAuthorization < ArvadosModel
+  include HasUuid
   include KindAndEtag
   include CommonApiTemplate
 
@@ -36,17 +37,6 @@ class ApiClientAuthorization < ArvadosModel
     self.user_id_changed?
   end
 
-  def uuid
-    self.api_token
-  end
-  def uuid=(x) end
-  def uuid_was
-    self.api_token_was
-  end
-  def uuid_changed?
-    self.api_token_changed?
-  end
-
   def modified_by_client_uuid
     nil
   end
diff --git a/services/api/db/migrate/20160208210629_add_uuid_to_api_client_authorization.rb b/services/api/db/migrate/20160208210629_add_uuid_to_api_client_authorization.rb
new file mode 100644 (file)
index 0000000..69da34c
--- /dev/null
@@ -0,0 +1,28 @@
+require 'has_uuid'
+
+class AddUuidToApiClientAuthorization < ActiveRecord::Migration
+  extend HasUuid::ClassMethods
+
+  def up
+    add_column :api_client_authorizations, :uuid, :string
+    add_index :api_client_authorizations, :uuid, :unique => true
+
+    prefix = Server::Application.config.uuid_prefix + '-' +
+             Digest::MD5.hexdigest('ApiClientAuthorization'.to_s).to_i(16).to_s(36)[-5..-1] + '-'
+
+    update_sql <<-EOS
+update api_client_authorizations set uuid = (select concat('#{prefix}',
+array_to_string(ARRAY (SELECT substring(api_token FROM (ceil(random()*36))::int FOR 1) FROM generate_series(1, 15)), '')
+));
+EOS
+
+    change_column_null :api_client_authorizations, :uuid, false
+  end
+
+  def down
+    if column_exists?(:api_client_authorizations, :uuid)
+      remove_index :api_client_authorizations, :uuid
+      remove_column :api_client_authorizations, :uuid
+    end
+  end
+end
diff --git a/services/api/db/migrate/20160209155729_add_uuid_to_api_token_search_index.rb b/services/api/db/migrate/20160209155729_add_uuid_to_api_token_search_index.rb
new file mode 100644 (file)
index 0000000..1bbc16a
--- /dev/null
@@ -0,0 +1,21 @@
+class AddUuidToApiTokenSearchIndex < ActiveRecord::Migration
+  def up
+    begin
+      remove_index :api_client_authorizations, :name => 'api_client_authorizations_search_index'
+    rescue
+    end
+    add_index :api_client_authorizations,
+              ["api_token", "created_by_ip_address", "last_used_by_ip_address", "default_owner_uuid", "uuid"],
+              name: "api_client_authorizations_search_index"
+  end
+
+  def down
+    begin
+      remove_index :api_client_authorizations, :name => 'api_client_authorizations_search_index'
+    rescue
+    end
+         add_index :api_client_authorizations,
+              ["api_token", "created_by_ip_address", "last_used_by_ip_address", "default_owner_uuid"],
+              name: "api_client_authorizations_search_index"
+  end
+end
index 0492c87e1a859df21063834741b7a0ff9c7701a5..e482e6e607b4141bbbc00f9b70352852be62ac90 100644 (file)
@@ -46,7 +46,8 @@ CREATE TABLE api_client_authorizations (
     default_owner_uuid character varying(255),
     scopes text DEFAULT '---
 - all
-'::text NOT NULL
+'::text NOT NULL,
+    uuid character varying(255) NOT NULL
 );
 
 
@@ -1414,7 +1415,7 @@ ALTER TABLE ONLY virtual_machines
 -- Name: api_client_authorizations_search_index; Type: INDEX; Schema: public; Owner: -; Tablespace: 
 --
 
-CREATE INDEX api_client_authorizations_search_index ON api_client_authorizations USING btree (api_token, created_by_ip_address, last_used_by_ip_address, default_owner_uuid);
+CREATE INDEX api_client_authorizations_search_index ON api_client_authorizations USING btree (api_token, created_by_ip_address, last_used_by_ip_address, default_owner_uuid, uuid);
 
 
 --
@@ -1529,6 +1530,13 @@ CREATE INDEX index_api_client_authorizations_on_expires_at ON api_client_authori
 CREATE INDEX index_api_client_authorizations_on_user_id ON api_client_authorizations USING btree (user_id);
 
 
+--
+-- Name: index_api_client_authorizations_on_uuid; Type: INDEX; Schema: public; Owner: -; Tablespace: 
+--
+
+CREATE UNIQUE INDEX index_api_client_authorizations_on_uuid ON api_client_authorizations USING btree (uuid);
+
+
 --
 -- Name: index_api_clients_on_created_at; Type: INDEX; Schema: public; Owner: -; Tablespace: 
 --
@@ -2568,4 +2576,8 @@ INSERT INTO schema_migrations (version) VALUES ('20151202151426');
 
 INSERT INTO schema_migrations (version) VALUES ('20151215134304');
 
-INSERT INTO schema_migrations (version) VALUES ('20151229214707');
\ No newline at end of file
+INSERT INTO schema_migrations (version) VALUES ('20151229214707');
+
+INSERT INTO schema_migrations (version) VALUES ('20160208210629');
+
+INSERT INTO schema_migrations (version) VALUES ('20160209155729');
\ No newline at end of file
index 7169ebdc8a3699b84df58d6f090c32f289546582..f99a9fb941f1b26f44d2d4b4035a28afd84fbc08 100644 (file)
@@ -1,24 +1,28 @@
 # Read about fixtures at http://api.rubyonrails.org/classes/ActiveRecord/Fixtures.html
 
 system_user:
+  uuid: zzzzz-gj3su-017z32aux8dg2s1
   api_client: untrusted
   user: system_user
   api_token: systemusertesttoken1234567890aoeuidhtnsqjkxbmwvzpy
   expires_at: 2038-01-01 00:00:00
 
 admin:
+  uuid: zzzzz-gj3su-027z32aux8dg2s1
   api_client: untrusted
   user: admin
   api_token: 4axaw8zxe0qm22wa6urpp5nskcne8z88cvbupv653y1njyi05h
   expires_at: 2038-01-01 00:00:00
 
 admin_trustedclient:
+  uuid: zzzzz-gj3su-037z32aux8dg2s1
   api_client: trusted_workbench
   user: admin
   api_token: 1a9ffdcga2o7cw8q12dndskomgs1ygli3ns9k2o9hgzgmktc78
   expires_at: 2038-01-01 00:00:00
 
 data_manager:
+  uuid: zzzzz-gj3su-047z32aux8dg2s1
   api_client: untrusted
   user: system_user
   api_token: 320mkve8qkswstz7ff61glpk3mhgghmg67wmic7elw4z41pke1
@@ -31,30 +35,35 @@ data_manager:
     - POST /arvados/v1/logs
 
 miniadmin:
+  uuid: zzzzz-gj3su-057z32aux8dg2s1
   api_client: untrusted
   user: miniadmin
   api_token: 2zb2y9pw3e70270te7oe3ewaantea3adyxjascvkz0zob7q7xb
   expires_at: 2038-01-01 00:00:00
 
 rominiadmin:
+  uuid: zzzzz-gj3su-067z32aux8dg2s1
   api_client: untrusted
   user: rominiadmin
   api_token: 5tsb2pc3zlatn1ortl98s2tqsehpby88wmmnzmpsjmzwa6payh
   expires_at: 2038-01-01 00:00:00
 
 active:
+  uuid: zzzzz-gj3su-077z32aux8dg2s1
   api_client: untrusted
   user: active
   api_token: 3kg6k6lzmp9kj5cpkcoxie963cmvjahbt2fod9zru30k1jqdmi
   expires_at: 2038-01-01 00:00:00
 
 active_trustedclient:
+  uuid: zzzzz-gj3su-087z32aux8dg2s1
   api_client: trusted_workbench
   user: active
   api_token: 27bnddk6x2nmq00a1e3gq43n9tsl5v87a3faqar2ijj8tud5en
   expires_at: 2038-01-01 00:00:00
 
 active_noscope:
+  uuid: zzzzz-gj3su-097z32aux8dg2s1
   api_client: untrusted
   user: active
   api_token: activenoscopeabcdefghijklmnopqrstuvwxyz12345678901
@@ -62,24 +71,28 @@ active_noscope:
   scopes: []
 
 project_viewer:
+  uuid: zzzzz-gj3su-107z32aux8dg2s1
   api_client: untrusted
   user: project_viewer
   api_token: projectviewertoken1234567890abcdefghijklmnopqrstuv
   expires_at: 2038-01-01 00:00:00
 
 project_viewer_trustedclient:
+  uuid: zzzzz-gj3su-117z32aux8dg2s1
   api_client: trusted_workbench
   user: project_viewer
   api_token: projectviewertrustedtoken1234567890abcdefghijklmno
   expires_at: 2038-01-01 00:00:00
 
 subproject_admin:
+  uuid: zzzzz-gj3su-127z32aux8dg2s1
   api_client: untrusted
   user: subproject_admin
   api_token: subprojectadmintoken1234567890abcdefghijklmnopqrst
   expires_at: 2038-01-01 00:00:00
 
 admin_vm:
+  uuid: zzzzz-gj3su-137z32aux8dg2s1
   api_client: untrusted
   user: admin
   api_token: adminvirtualmachineabcdefghijklmnopqrstuvwxyz12345
@@ -88,6 +101,7 @@ admin_vm:
   scopes: ["GET /arvados/v1/virtual_machines/zzzzz-2x53u-382brsig8rp3064/logins"]
 
 admin_noscope:
+  uuid: zzzzz-gj3su-147z32aux8dg2s1
   api_client: untrusted
   user: admin
   api_token: adminnoscopeabcdefghijklmnopqrstuvwxyz123456789012
@@ -95,6 +109,7 @@ admin_noscope:
   scopes: []
 
 active_all_collections:
+  uuid: zzzzz-gj3su-157z32aux8dg2s1
   api_client: untrusted
   user: active
   api_token: activecollectionsabcdefghijklmnopqrstuvwxyz1234567
@@ -102,6 +117,7 @@ active_all_collections:
   scopes: ["GET /arvados/v1/collections/", "GET /arvados/v1/keep_services/accessible"]
 
 active_userlist:
+  uuid: zzzzz-gj3su-167z32aux8dg2s1
   api_client: untrusted
   user: active
   api_token: activeuserlistabcdefghijklmnopqrstuvwxyz1234568900
@@ -109,6 +125,7 @@ active_userlist:
   scopes: ["GET /arvados/v1/users"]
 
 active_specimens:
+  uuid: zzzzz-gj3su-177z32aux8dg2s1
   api_client: untrusted
   user: active
   api_token: activespecimensabcdefghijklmnopqrstuvwxyz123456890
@@ -116,6 +133,7 @@ active_specimens:
   scopes: ["GET /arvados/v1/specimens/"]
 
 active_apitokens:
+  uuid: zzzzz-gj3su-187z32aux8dg2s1
   api_client: trusted_workbench
   user: active
   api_token: activeapitokensabcdefghijklmnopqrstuvwxyz123456789
@@ -124,6 +142,7 @@ active_apitokens:
            "POST /arvados/v1/api_client_authorizations"]
 
 active_readonly:
+  uuid: zzzzz-gj3su-197z32aux8dg2s1
   api_client: untrusted
   user: active
   api_token: activereadonlyabcdefghijklmnopqrstuvwxyz1234568790
@@ -131,12 +150,14 @@ active_readonly:
   scopes: ["GET /"]
 
 spectator:
+  uuid: zzzzz-gj3su-207z32aux8dg2s1
   api_client: untrusted
   user: spectator
   api_token: zw2f4gwx8hw8cjre7yp6v1zylhrhn3m5gvjq73rtpwhmknrybu
   expires_at: 2038-01-01 00:00:00
 
 spectator_specimens:
+  uuid: zzzzz-gj3su-217z32aux8dg2s1
   api_client: untrusted
   user: spectator
   api_token: spectatorspecimensabcdefghijklmnopqrstuvwxyz123245
@@ -145,42 +166,49 @@ spectator_specimens:
            "POST /arvados/v1/specimens"]
 
 inactive:
+  uuid: zzzzz-gj3su-227z32aux8dg2s1
   api_client: untrusted
   user: inactive
   api_token: 5s29oj2hzmcmpq80hx9cta0rl5wuf3xfd6r7disusaptz7h9m0
   expires_at: 2038-01-01 00:00:00
 
 inactive_uninvited:
+  uuid: zzzzz-gj3su-237z32aux8dg2s1
   api_client: untrusted
   user: inactive_uninvited
   api_token: 62mhllc0otp78v08e3rpa3nsmf8q8ogk47f7u5z4erp5gpj9al
   expires_at: 2038-01-01 00:00:00
 
 inactive_but_signed_user_agreement:
+  uuid: zzzzz-gj3su-247z32aux8dg2s1
   api_client: untrusted
   user: inactive_but_signed_user_agreement
   api_token: 64k3bzw37iwpdlexczj02rw3m333rrb8ydvn2qq99ohv68so5k
   expires_at: 2038-01-01 00:00:00
 
 expired:
+  uuid: zzzzz-gj3su-257z32aux8dg2s1
   api_client: untrusted
   user: active
   api_token: 2ym314ysp27sk7h943q6vtc378srb06se3pq6ghurylyf3pdmx
   expires_at: 1970-01-01 00:00:00
 
 expired_trustedclient:
+  uuid: zzzzz-gj3su-267z32aux8dg2s1
   api_client: trusted_workbench
   user: active
   api_token: 5hpni7izokzcatku2896xxwqdbt5ptomn04r6auc7fohnli82v
   expires_at: 1970-01-01 00:00:00
 
 valid_token_deleted_user:
+  uuid: zzzzz-gj3su-277z32aux8dg2s1
   api_client: trusted_workbench
   user_id: 1234567
   api_token: tewfa58099sndckyqhlgd37za6e47o6h03r9l1vpll23hudm8b
   expires_at: 2038-01-01 00:00:00
 
 anonymous:
+  uuid: zzzzz-gj3su-287z32aux8dg2s1
   api_client: untrusted
   user: anonymous
   api_token: 4kg6k6lzmp9kj4cpkcoxie964cmvjahbt4fod9zru44k4jqdmi
@@ -188,48 +216,56 @@ anonymous:
   scopes: ["GET /"]
 
 job_reader:
+  uuid: zzzzz-gj3su-297z32aux8dg2s1
   api_client: untrusted
   user: job_reader
   api_token: e99512cdc0f3415c2428b9758f33bdfb07bc3561b00e86e7e6
   expires_at: 2038-01-01 00:00:00
 
 active_no_prefs:
+  uuid: zzzzz-gj3su-307z32aux8dg2s1
   api_client: untrusted
   user: active_no_prefs
   api_token: 3kg612cdc0f3415c2428b9758f33bdfb07bc3561b00e86qdmi
   expires_at: 2038-01-01 00:00:00
 
 active_no_prefs_profile_no_getting_started_shown:
+  uuid: zzzzz-gj3su-317z32aux8dg2s1
   api_client: untrusted
   user: active_no_prefs_profile_no_getting_started_shown
   api_token: 3kg612cdc0f3415c242856758f33bdfb07bc3561b00e86qdmi
   expires_at: 2038-01-01 00:00:00
 
 active_no_prefs_profile_with_getting_started_shown:
+  uuid: zzzzz-gj3su-327z32aux8dg2s1
   api_client: untrusted
   user: active_no_prefs_profile_with_getting_started_shown
   api_token: 3kg612cdc0f3415c245786758f33bdfb07babcd1b00e86qdmi
   expires_at: 2038-01-01 00:00:00
 
 active_with_prefs_profile_no_getting_started_shown:
+  uuid: zzzzz-gj3su-337z32aux8dg2s1
   api_client: untrusted
   user: active_with_prefs_profile_no_getting_started_shown
   api_token: 3kg612cdc0f3415c245786758f33bdfb07befgh1b00e86qdmi
   expires_at: 2038-01-01 00:00:00
 
 user_foo_in_sharing_group:
+  uuid: zzzzz-gj3su-347z32aux8dg2s1
   api_client: untrusted
   user: user_foo_in_sharing_group
   api_token: 2p1pou8p4ls208mcbedeewlotghppenobcyrmyhq8pyf51xd8u
   expires_at: 2038-01-01 00:00:00
 
 user1_with_load:
+  uuid: zzzzz-gj3su-357z32aux8dg2s1
   api_client: untrusted
   user: user1_with_load
   api_token: 1234k6lzmp9kj5cpkcoxie963cmvjahbt2fod9zru30k1jqdmi
   expires_at: 2038-01-01 00:00:00
 
 fuse:
+  uuid: zzzzz-gj3su-367z32aux8dg2s1
   api_client: untrusted
   user: fuse
   api_token: 4nagbkv8eap0uok7pxm72nossq5asihls3yn5p4xmvqx5t5e7p
index 556a5c047120923aa557a5fe38eab002313aaad3..b1154a8399e478d9fd2147fbb5fdb6cc8d46e016 100644 (file)
@@ -244,6 +244,7 @@ real_log_collection:
   uuid: zzzzz-4zz18-op4e2lbej01tcvu
   owner_uuid: zzzzz-tpzed-xurymjxw79nv3jz
   created_at: 2014-09-01 12:00:00
+  modified_at: 2014-09-01 12:00:00
   portable_data_hash: 0b9a7787660e1fce4a93f33e01376ba6+81
   manifest_text: ". cdd549ae79fe6640fa3d5c6261d8303c+195 0:195:zzzzz-8i9sb-0vsrcqi7whchuil.log.txt\n"
   name: real_log_collection
@@ -278,6 +279,7 @@ collection_owned_by_foo:
   manifest_text: ". 73feffa4b7f6bb68e44cf984c85f6e88+3 0:3:baz\n"
   owner_uuid: zzzzz-tpzed-81hsbo6mk8nl05c
   created_at: 2014-02-03T17:22:54Z
+  modified_at: 2014-02-03T17:22:54Z
   name: collection_owned_by_foo
 
 collection_to_remove_from_subproject:
@@ -287,6 +289,7 @@ collection_to_remove_from_subproject:
   manifest_text: ". 8258b505536a9ab47baa2f4281cb932a+9 0:9:missingno\n"
   owner_uuid: zzzzz-j7d0g-axqo7eu9pwvna1x
   created_at: 2014-10-15T10:45:00
+  modified_at: 2014-10-15T10:45:00
   name: Collection to remove from subproject
 
 collection_with_files_in_subdir:
@@ -307,6 +310,8 @@ graph_test_collection1:
   portable_data_hash: fa7aeb5140e2848d39b416daeef4ffc5+45
   manifest_text: ". 37b51d194a7513e45b56f6524f2d51f2+3 0:3:bar\n"
   name: bar_file
+  created_at: 2014-02-03T17:22:54Z
+  modified_at: 2014-02-03T17:22:54Z
 
 graph_test_collection2:
   uuid: zzzzz-4zz18-uukreo9rbgwsujx
@@ -314,6 +319,8 @@ graph_test_collection2:
   portable_data_hash: 65b17c95fdbc9800fc48acda4e9dcd0b+93
   manifest_text: ". 6a4ff0499484c6c79c95cd8c566bd25f+249025 0:249025:FOO_General_Public_License,_version_3.pdf\n"
   name: "FOO General Public License, version 3"
+  created_at: 2014-02-03T17:22:54Z
+  modified_at: 2014-02-03T17:22:54Z
 
 graph_test_collection3:
   uuid: zzzzz-4zz18-uukreo9rbgwsujj
@@ -321,6 +328,8 @@ graph_test_collection3:
   portable_data_hash: ea10d51bcf88862dbcc36eb292017dfd+45
   manifest_text: ". 73feffa4b7f6bb68e44cf984c85f6e88+3 0:3:baz\n"
   name: "baz file"
+  created_at: 2014-02-03T17:22:54Z
+  modified_at: 2014-02-03T17:22:54Z
 
 collection_1_owned_by_fuse:
   uuid: zzzzz-4zz18-ovx05bfzormx3bg
@@ -557,6 +566,7 @@ collection_<%=i%>_of_10:
   uuid: zzzzz-4zz18-10gneyn6brkx<%= i.to_s.rjust(3, '0') %>
   owner_uuid: zzzzz-j7d0g-0010collections
   created_at: <%= i.minute.ago.to_s(:db) %>
+  modified_at: <%= i.minute.ago.to_s(:db) %>
 <% end %>
 
 # collections in project_with_201_collections
@@ -568,6 +578,7 @@ collection_<%=i%>_of_201:
   uuid: zzzzz-4zz18-201gneyn6brd<%= i.to_s.rjust(3, '0') %>
   owner_uuid: zzzzz-j7d0g-0201collections
   created_at: <%= i.minute.ago.to_s(:db) %>
+  modified_at: <%= i.minute.ago.to_s(:db) %>
 <% end %>
 
 # Do not add your fixtures below this line as the rest of this file will be trimmed by test_helper
index f6b99a06617a860d9d4c6681b60c2861b426d4e1..4029846484d41a79acc2443246bae76a8c526fa3 100644 (file)
@@ -282,3 +282,15 @@ subproject_in_asubproject_with_same_name_as_one_in_active_user_home:
   name: Subproject to test owner uuid and name unique key violation upon removal
   description: "Removing this will result in name conflict with 'A project' in Home project and hence get renamed."
   group_class: project
+
+starred_and_shared_active_user_project:
+  uuid: zzzzz-j7d0g-starredshared01
+  owner_uuid: zzzzz-tpzed-xurymjxw79nv3jz
+  created_at: 2014-04-21 15:37:48 -0400
+  modified_by_client_uuid: zzzzz-ozdt8-brczlopd8u8d0jr
+  modified_by_user_uuid: zzzzz-tpzed-xurymjxw79nv3jz
+  modified_at: 2014-04-21 15:37:48 -0400
+  updated_at: 2014-04-21 15:37:48 -0400
+  name: Starred and shared active user project
+  description: Starred and shared active user project
+  group_class: project
index 925e4661248279b1543052fd8f8cc563e5efc8d8..7ed7f6bcf35636ad4ccc572b2da2e3044f6a277a 100644 (file)
@@ -920,3 +920,38 @@ empty_collection_name_in_fuse_user_home_project:
   properties: {}
   updated_at: 2014-08-06 22:11:51.242010312 Z
 
+star_project_for_active_user:
+  uuid: zzzzz-o0j2j-starredbyactive
+  owner_uuid: zzzzz-tpzed-xurymjxw79nv3jz
+  created_at: 2014-01-24 20:42:26 -0800
+  modified_by_client_uuid: zzzzz-ozdt8-brczlopd8u8d0jr
+  modified_by_user_uuid: zzzzz-tpzed-000000000000000
+  modified_at: 2014-01-24 20:42:26 -0800
+  updated_at: 2014-01-24 20:42:26 -0800
+  tail_uuid: zzzzz-tpzed-xurymjxw79nv3jz
+  link_class: star
+  name: zzzzz-j7d0g-starredshared01
+  head_uuid: zzzzz-j7d0g-starredshared01
+  properties: {}
+
+share_starred_project_with_project_viewer:
+  uuid: zzzzz-o0j2j-sharewithviewer
+  owner_uuid: zzzzz-tpzed-000000000000000
+  tail_uuid: zzzzz-tpzed-projectviewer1a
+  link_class: permission
+  name: can_read
+  head_uuid: zzzzz-j7d0g-starredshared01
+
+star_shared_project_for_project_viewer:
+  uuid: zzzzz-o0j2j-starredbyviewer
+  owner_uuid: zzzzz-tpzed-projectviewer1a
+  created_at: 2014-01-24 20:42:26 -0800
+  modified_by_client_uuid: zzzzz-ozdt8-brczlopd8u8d0jr
+  modified_by_user_uuid: zzzzz-tpzed-000000000000000
+  modified_at: 2014-01-24 20:42:26 -0800
+  updated_at: 2014-01-24 20:42:26 -0800
+  tail_uuid: zzzzz-tpzed-projectviewer1a
+  link_class: star
+  name: zzzzz-j7d0g-starredshared01
+  head_uuid: zzzzz-j7d0g-starredshared01
+  properties: {}
index 8877719b5bd613673581a9b118c5cf3ae9e41f9e..5da9145a81e052b3ef5a471f672c7568399e428b 100644 (file)
@@ -23,6 +23,7 @@ class Arvados::V1::ApiClientAuthorizationsControllerTest < ActionController::Tes
     authorize_with :admin_trustedclient
     post :create_system_auth, scopes: '["test"]'
     assert_response :success
+    assert_not_nil JSON.parse(@response.body)['uuid']
   end
 
   test "prohibit create system auth with token from non-trusted client" do
@@ -66,4 +67,47 @@ class Arvados::V1::ApiClientAuthorizationsControllerTest < ActionController::Tes
       assert_found_tokens(auth, {filters: [['scopes', '=', scopes]]}, *expected)
     end
   end
+
+  [
+    [:admin, :admin, 200],
+    [:admin, :active, 403],
+    [:admin, :admin_vm, 403], # this belongs to the user of current session, but we can't get it by uuid
+    [:admin_trustedclient, :active, 200],
+  ].each do |user, token, status|
+    test "as user #{user} get #{token} token and expect #{status}" do
+      authorize_with user
+      get :show, {id: api_client_authorizations(token).uuid}
+      assert_response status
+    end
+  end
+
+  [
+    [:admin, :admin, 200],
+    [:admin, :active, 403],
+    [:admin, :admin_vm, 403], # this belongs to the user of current session, but we can't list it by uuid
+    [:admin_trustedclient, :active, 200],
+  ].each do |user, token, status|
+    test "as user #{user} list #{token} token using uuid and expect #{status}" do
+      authorize_with user
+      get :index, {
+        filters: [['uuid','=',api_client_authorizations(token).uuid]]
+      }
+      assert_response status
+    end
+  end
+
+  [
+    [:admin, :admin, 200],
+    [:admin, :active, 403],
+    [:admin, :admin_vm, 200], # this belongs to the user of current session, and can be listed by token
+    [:admin_trustedclient, :active, 200],
+  ].each do |user, token, status|
+    test "as user #{user} list #{token} token using token and expect #{status}" do
+      authorize_with user
+      get :index, {
+        filters: [['api_token','=',api_client_authorizations(token).api_token]]
+      }
+      assert_response status
+    end
+  end
 end
index 1d7bb77725222ed98c4272308a2717edaf86b6cd..6623c726df01923b7227d33f17e6f2098cab649e 100644 (file)
@@ -380,4 +380,47 @@ class Arvados::V1::GroupsControllerTest < ActionController::TestCase
                  'A Project (2)',
                  "new project name '#{new_project['name']}' was expected to be 'A Project (2)'")
   end
+
+  test "unsharing a project results in hiding it from previously shared user" do
+    # remove sharing link for project
+    @controller = Arvados::V1::LinksController.new
+    authorize_with :admin
+    post :destroy, id: links(:share_starred_project_with_project_viewer).uuid
+    assert_response :success
+
+    # verify that the user can no longer see the project
+    @counter = 0  # Reset executed action counter
+    @controller = Arvados::V1::GroupsController.new
+    authorize_with :project_viewer
+    get :index, filters: [['group_class', '=', 'project']], format: :json
+    assert_response :success
+    found_projects = {}
+    json_response['items'].each do |g|
+      found_projects[g['uuid']] = g
+    end
+    assert_equal false, found_projects.include?(groups(:starred_and_shared_active_user_project).uuid)
+
+    # share the project
+    @counter = 0
+    @controller = Arvados::V1::LinksController.new
+    authorize_with :system_user
+    post :create, link: {
+      link_class: "permission",
+      name: "can_read",
+      head_uuid: groups(:starred_and_shared_active_user_project).uuid,
+      tail_uuid: users(:project_viewer).uuid,
+    }
+
+    # verify that project_viewer user can now see shared project again
+    @counter = 0
+    @controller = Arvados::V1::GroupsController.new
+    authorize_with :project_viewer
+    get :index, filters: [['group_class', '=', 'project']], format: :json
+    assert_response :success
+    found_projects = {}
+    json_response['items'].each do |g|
+      found_projects[g['uuid']] = g
+    end
+    assert_equal true, found_projects.include?(groups(:starred_and_shared_active_user_project).uuid)
+  end
 end
index 1229f2917e21f9b17d897db8fe85e7adb4e43429..9b7eb7543a4ebed086aba2d409f44fcc789ef222 100644 (file)
@@ -130,8 +130,9 @@ func GetCollections(params GetCollectionsParams) (results ReadCollections, err e
 
        sdkParams := arvadosclient.Dict{
                "select":  fieldsWanted,
-               "order":   []string{"modified_at ASC"},
-               "filters": [][]string{[]string{"modified_at", ">=", "1900-01-01T00:00:00Z"}}}
+               "order":   []string{"modified_at ASC", "uuid ASC"},
+               "filters": [][]string{[]string{"modified_at", ">=", "1900-01-01T00:00:00Z"}},
+               "offset": 0}
 
        if params.BatchSize > 0 {
                sdkParams["limit"] = params.BatchSize
@@ -176,9 +177,10 @@ func GetCollections(params GetCollectionsParams) (results ReadCollections, err e
 
        // These values are just for getting the loop to run the first time,
        // afterwards they'll be set to real values.
-       previousTotalCollections := -1
-       totalCollections := 0
-       for totalCollections > previousTotalCollections {
+       remainingCollections := 1
+       var totalCollections int
+       var previousTotalCollections int
+       for remainingCollections > 0 {
                // We're still finding new collections
 
                // Write the heap profile for examining memory usage
@@ -193,6 +195,16 @@ func GetCollections(params GetCollectionsParams) (results ReadCollections, err e
                if err != nil {
                        return
                }
+               batchCollections := len(collections.Items)
+
+               // We must always have at least one collection in the batch
+               if batchCollections < 1 {
+                       err = fmt.Errorf("API query returned no collections for %+v", sdkParams)
+                       return
+               }
+
+               // Update count of remaining collections
+               remainingCollections = collections.ItemsAvailable - sdkParams["offset"].(int) - batchCollections
 
                // Process collection and update our date filter.
                latestModificationDate, maxManifestSize, totalManifestSize, err := ProcessCollections(params.Logger,
@@ -202,16 +214,24 @@ func GetCollections(params GetCollectionsParams) (results ReadCollections, err e
                if err != nil {
                        return results, err
                }
-               sdkParams["filters"].([][]string)[0][2] = latestModificationDate.Format(time.RFC3339)
+               if sdkParams["filters"].([][]string)[0][2] != latestModificationDate.Format(time.RFC3339) {
+                       sdkParams["filters"].([][]string)[0][2] = latestModificationDate.Format(time.RFC3339)
+                       sdkParams["offset"] = 0
+               } else {
+                       sdkParams["offset"] = sdkParams["offset"].(int) + batchCollections
+               }
 
                // update counts
                previousTotalCollections = totalCollections
                totalCollections = len(results.UUIDToCollection)
 
-               log.Printf("%d collections read, %d new in last batch, "+
+               log.Printf("%d collections read, %d (%d new) in last batch, "+
+                       "%d remaining, "+
                        "%s latest modified date, %.0f %d %d avg,max,total manifest size",
                        totalCollections,
+                       batchCollections,
                        totalCollections-previousTotalCollections,
+                       remainingCollections,
                        sdkParams["filters"].([][]string)[0][2],
                        float32(totalManifestSize)/float32(totalCollections),
                        maxManifestSize, totalManifestSize)
@@ -227,6 +247,30 @@ func GetCollections(params GetCollectionsParams) (results ReadCollections, err e
                }
        }
 
+       // Make one final API request to verify that we have processed all collections available up to the latest modification date
+       var collections SdkCollectionList
+       sdkParams["filters"].([][]string)[0][1] = "<="
+       sdkParams["limit"] = 0
+       err = params.Client.List("collections", sdkParams, &collections)
+       if err != nil {
+               return
+       }
+       finalNumberOfCollectionsAvailable, err :=
+               util.NumberItemsAvailable(params.Client, "collections")
+       if err != nil {
+               return
+       }
+       if totalCollections < finalNumberOfCollectionsAvailable {
+               err = fmt.Errorf("API server indicates a total of %d collections "+
+                               "available up to %v, but we only retrieved %d. "+
+                               "Refusing to continue as this could indicate an "+
+                               "otherwise undetected failure.",
+                               finalNumberOfCollectionsAvailable, 
+                               sdkParams["filters"].([][]string)[0][2],
+                               totalCollections)
+               return
+       }
+
        // Write the heap profile for examining memory usage
        err = WriteHeapProfile()
 
index e168940fdd660f9c3487fea26e1decdec8ba9098..5810411c89bca1d3a31b3d03f65748456fec78db 100644 (file)
@@ -76,6 +76,8 @@ func DoGenericVolumeTests(t TB, factory TestableVolumeFactory) {
        testPutConcurrent(t, factory)
 
        testPutFullBlock(t, factory)
+
+       testTrashUntrash(t, factory)
 }
 
 // Put a test block, get it and verify content
@@ -696,3 +698,63 @@ func testPutFullBlock(t TB, factory TestableVolumeFactory) {
                t.Error("rdata != wdata")
        }
 }
+
+// With trashLifetime != 0, perform:
+// Trash an old block - which either raises ErrNotImplemented or succeeds
+// Untrash -  which either raises ErrNotImplemented or succeeds
+// Get - which must succeed
+func testTrashUntrash(t TB, factory TestableVolumeFactory) {
+       v := factory(t)
+       defer v.Teardown()
+       defer func() {
+               trashLifetime = 0
+       }()
+
+       trashLifetime = 3600 * time.Second
+
+       // put block and backdate it
+       v.PutRaw(TestHash, TestBlock)
+       v.TouchWithDate(TestHash, time.Now().Add(-2*blobSignatureTTL))
+
+       buf, err := v.Get(TestHash)
+       if err != nil {
+               t.Fatal(err)
+       }
+       if bytes.Compare(buf, TestBlock) != 0 {
+               t.Errorf("Got data %+q, expected %+q", buf, TestBlock)
+       }
+       bufs.Put(buf)
+
+       // Trash
+       err = v.Trash(TestHash)
+       if v.Writable() == false {
+               if err != MethodDisabledError {
+                       t.Error(err)
+               }
+       } else if err != nil {
+               if err != ErrNotImplemented {
+                       t.Error(err)
+               }
+       } else {
+               _, err = v.Get(TestHash)
+               if err == nil || !os.IsNotExist(err) {
+                       t.Errorf("os.IsNotExist(%v) should have been true", err)
+               }
+
+               // Untrash
+               err = v.Untrash(TestHash)
+               if err != nil {
+                       t.Fatal(err)
+               }
+       }
+
+       // Get the block - after trash and untrash sequence
+       buf, err = v.Get(TestHash)
+       if err != nil {
+               t.Fatal(err)
+       }
+       if bytes.Compare(buf, TestBlock) != 0 {
+               t.Errorf("Got data %+q, expected %+q", buf, TestBlock)
+       }
+       bufs.Put(buf)
+}
index c98c95af66d89b257be6f7e79d4fd4371138281c..9bdcc5f7a123e5a0d15dc237e2144bfe5358fdb4 100644 (file)
@@ -131,12 +131,9 @@ class BaseComputeNodeDriver(RetryMixin):
             self.ping_host, arvados_node['uuid'],
             arvados_node['info']['ping_secret'])
 
-    def find_node(self, name):
-        node = [n for n in self.list_nodes() if n.name == name]
-        if node:
-            return node[0]
-        else:
-            return None
+    @staticmethod
+    def _name_key(cloud_object):
+        return cloud_object.name
 
     def create_node(self, size, arvados_node):
         try:
@@ -151,15 +148,12 @@ class BaseComputeNodeDriver(RetryMixin):
             # loop forever because subsequent create_node attempts will fail
             # due to node name collision.  So check if the node we intended to
             # create shows up in the cloud node list and return it if found.
-            try:
-                node = self.find_node(kwargs['name'])
-                if node:
-                    return node
-            except:
-                # Ignore possible exception from find_node in favor of
-                # re-raising the original create_node exception.
-                pass
-            raise
+            node = self.search_for(kwargs['name'], 'list_nodes', self._name_key)
+            if node:
+                return node
+            else:
+                # something else went wrong, re-raise the exception
+                raise
 
     def post_create_node(self, cloud_node):
         # ComputeNodeSetupActor calls this method after the cloud node is
index d89c48e270bcc119638c70fc3d5f2928fbe1f8e3..991a2983c7217f1a29368293513587d117d01d59 100644 (file)
@@ -75,9 +75,6 @@ class ComputeNodeDriver(BaseComputeNodeDriver):
         self.real.ex_create_tags(cloud_node,
                                  {'Name': arvados_node_fqdn(arvados_node)})
 
-    def find_node(self, name):
-        raise NotImplementedError("ec2.ComputeNodeDriver.find_node")
-
     def list_nodes(self):
         # Need to populate Node.size
         nodes = super(ComputeNodeDriver, self).list_nodes()
index c5bf0b8cda42d211adcfbb61ffb3d73f460a7830..bbabdd4c761b5a0e3809449878227adea7db0a5a 100644 (file)
@@ -38,10 +38,6 @@ class ComputeNodeDriver(BaseComputeNodeDriver):
         self._disktype_links = {dt.name: self._object_link(dt)
                                 for dt in self.real.ex_list_disktypes()}
 
-    @staticmethod
-    def _name_key(cloud_object):
-        return cloud_object.name
-
     @staticmethod
     def _object_link(cloud_object):
         return cloud_object.extra.get('selfLink')
index dd45165deaa1514789c42428dacbb4bcf862b5a5..dcfe1ceb133e671527e70967dd25a783d64210a6 100644 (file)
@@ -12,13 +12,15 @@ import httplib2
 import pykka
 from apiclient import errors as apierror
 
+from .fullstopactor import FullStopActor
+
 # IOError is the base class for socket.error, ssl.SSLError, and friends.
 # It seems like it hits the sweet spot for operations we want to retry:
 # it's low-level, but unlikely to catch code bugs.
 NETWORK_ERRORS = (IOError,)
 ARVADOS_ERRORS = NETWORK_ERRORS + (apierror.Error,)
 
-actor_class = pykka.ThreadingActor
+actor_class = FullStopActor
 
 class NodeManagerConfig(ConfigParser.SafeConfigParser):
     """Node Manager Configuration class.
diff --git a/services/nodemanager/arvnodeman/fullstopactor.py b/services/nodemanager/arvnodeman/fullstopactor.py
new file mode 100644 (file)
index 0000000..07e0625
--- /dev/null
@@ -0,0 +1,17 @@
+from __future__ import absolute_import, print_function
+
+import errno
+import logging
+import os
+import threading
+import traceback
+
+import pykka
+
+class FullStopActor(pykka.ThreadingActor):
+    def on_failure(self, exception_type, exception_value, tb):
+        lg = getattr(self, "_logger", logging)
+        if (exception_type in (threading.ThreadError, MemoryError) or
+            exception_type is OSError and exception_value.errno == errno.ENOMEM):
+            lg.critical("Unhandled exception is a fatal error, killing Node Manager")
+            os.killpg(os.getpgid(0), 9)
diff --git a/services/nodemanager/tests/test_failure.py b/services/nodemanager/tests/test_failure.py
new file mode 100644 (file)
index 0000000..afebb9c
--- /dev/null
@@ -0,0 +1,48 @@
+#!/usr/bin/env python
+
+from __future__ import absolute_import, print_function
+
+import errno
+import logging
+import threading
+import unittest
+
+import mock
+import pykka
+
+from . import testutil
+
+import arvnodeman.fullstopactor
+
+class BogusActor(arvnodeman.fullstopactor.FullStopActor):
+    def __init__(self, e):
+        super(BogusActor, self).__init__()
+        self.exp = e
+
+    def doStuff(self):
+        raise self.exp
+
+class ActorUnhandledExceptionTest(unittest.TestCase):
+    def test1(self):
+        for e in (MemoryError(), threading.ThreadError(), OSError(errno.ENOMEM, "")):
+            with mock.patch('os.killpg') as killpg_mock:
+                act = BogusActor.start(e)
+                act.tell({
+                    'command': 'pykka_call',
+                    'attr_path': ("doStuff",),
+                    'args': [],
+                    'kwargs': {}
+                })
+                act.stop(block=True)
+                self.assertTrue(killpg_mock.called)
+
+        with mock.patch('os.killpg') as killpg_mock:
+            act = BogusActor.start(OSError(errno.ENOENT, ""))
+            act.tell({
+                'command': 'pykka_call',
+                'attr_path': ("doStuff",),
+                'args': [],
+                'kwargs': {}
+            })
+            act.stop(block=True)
+            self.assertFalse(killpg_mock.called)
index 0d5d6a246cdb084205137b84f1607141146336c2..2ac12abcba23e381073589cf209915b88a9d8cef 100644 (file)
@@ -21,6 +21,11 @@ from crunchstat_summary import logger
 AVAILABLE_RAM_RATIO = 0.95
 
 
+# Workaround datetime.datetime.strptime() thread-safety bug by calling
+# it once before starting threads.  https://bugs.python.org/issue7980
+datetime.datetime.strptime('1999-12-31_23:59:59', '%Y-%m-%d_%H:%M:%S')
+
+
 class Task(object):
     def __init__(self):
         self.starttime = None
@@ -197,6 +202,8 @@ class Summarizer(object):
         return label
 
     def text_report(self):
+        if not self.tasks:
+            return "(no report generated)\n"
         return "\n".join(itertools.chain(
             self._text_report_gen(),
             self._recommend_gen())) + "\n"
@@ -226,7 +233,8 @@ class Summarizer(object):
                  lambda x: x * 100),
                 ('Overall CPU usage: {}%',
                  self.job_tot['cpu']['user+sys'] /
-                 self.job_tot['time']['elapsed'],
+                 self.job_tot['time']['elapsed']
+                 if self.job_tot['time']['elapsed'] > 0 else 0,
                  lambda x: x * 100),
                 ('Max memory used by a single task: {}GB',
                  self.stats_max['mem']['rss'],
@@ -333,19 +341,20 @@ class Summarizer(object):
                 int(math.ceil(nearlygibs(used_mib))*AVAILABLE_RAM_RATIO*1024))
 
     def _recommend_keep_cache(self):
-        """Recommend increasing keep cache if miss rate is above 0.2%"""
-        if self.job_tot['keepcalls']['get'] == 0:
+        """Recommend increasing keep cache if utilization < 80%"""
+        if self.job_tot['net:keep0']['rx'] == 0:
             return
-        miss_rate = float(self.job_tot['keepcache']['miss']) / float(self.job_tot['keepcalls']['get']) * 100.0
+        utilization = (float(self.job_tot['blkio:0:0']['read']) /
+                       float(self.job_tot['net:keep0']['rx']))
         asked_mib = self.existing_constraints.get('keep_cache_mb_per_task', 256)
 
-        if miss_rate > 0.2:
+        if utilization < 0.8:
             yield (
-                '#!! {} Keep cache miss rate was {:.2f}% -- '
-                'try runtime_constraints "keep_cache_mb_per_task":{}'
+                '#!! {} Keep cache utilization was {:.2f}% -- '
+                'try runtime_constraints "keep_cache_mb_per_task":{} (or more)'
             ).format(
                 self.label,
-                miss_rate,
+                utilization * 100.0,
                 asked_mib*2)
 
 
@@ -374,7 +383,7 @@ class JobSummarizer(Summarizer):
         else:
             self.job = job
         rdr = None
-        if self.job['log']:
+        if self.job.get('log'):
             try:
                 rdr = crunchstat_summary.reader.CollectionReader(self.job['log'])
             except arvados.errors.NotFoundError as e:
@@ -400,15 +409,12 @@ class PipelineSummarizer(object):
             if 'job' not in component:
                 logger.warning(
                     "%s: skipping component with no job assigned", cname)
-            elif component['job'].get('log') is None:
-                logger.warning(
-                    "%s: skipping job %s with no log available",
-                    cname, component['job'].get('uuid'))
             else:
                 logger.info(
-                    "%s: logdata %s", cname, component['job']['log'])
+                    "%s: job %s", cname, component['job']['uuid'])
                 summarizer = JobSummarizer(component['job'], **kwargs)
-                summarizer.label = cname
+                summarizer.label = '{} {}'.format(
+                    cname, component['job']['uuid'])
                 self.summarizers[cname] = summarizer
         self.label = pipeline_instance_uuid
 
index 6c1443733c35ec9ea76bd35e49fb3c69d6f83906..b1e5fed81d7b4481023de037d68656abe6fe4406 100644 (file)
@@ -144,6 +144,9 @@ class SummarizePipeline(ReportDiff):
             job_report + ['\n'] +
             ['### Summary for bar (zzzzz-8i9sb-000000000000001)\n'] +
             job_report + ['\n'] +
+            ['### Summary for unfinished-job (zzzzz-8i9sb-xxxxxxxxxxxxxxx)\n',
+             '(no report generated)\n',
+             '\n'] +
             ['### Summary for baz (zzzzz-8i9sb-000000000000002)\n'] +
             job_report)
         self.diff_report(cmd, expect)