source 'https://rubygems.org'
gem 'rails', '~> 4.1.0'
-gem 'minitest', '>= 5.0.0'
-
-gem 'arvados', '>= 0.1.20141114230720'
-
-# Bundle edge Rails instead:
-# gem 'rails', :git => 'git://github.com/rails/rails.git'
+gem 'arvados', '>= 0.1.20150116063758'
gem 'sqlite3'
# Gems used only for assets and not required
# in production environments by default.
group :assets do
- gem 'sass-rails', '~> 4.0.4'
+ gem 'sass-rails'
+ gem 'uglifier', '>= 1.0.3'
# See https://github.com/sstephenson/execjs#readme for more supported runtimes
gem 'therubyracer', :platforms => :ruby
-
- gem 'uglifier', '>= 1.0.3'
end
group :development do
gem 'byebug'
gem 'ruby-debug-passenger'
+ gem 'rack-mini-profiler', require: false
+ gem 'flamegraph', require: false
end
group :test, :diagnostics do
+ gem 'minitest', '>= 5.0.0'
gem 'selenium-webdriver'
gem 'capybara'
gem 'poltergeist'
gem 'headless'
end
-group :test, :performance do
+group :test do
gem 'rails-perftest'
gem 'ruby-prof'
-end
-
-group :test do
gem 'rvm-capistrano'
# Note: "require: false" here tells bunder not to automatically
# 'require' the packages during application startup. Installation is
# still mandatory.
- gem 'simplecov', '~> 0.7.1', require: false
+ gem 'simplecov', '~> 0.7', require: false
gem 'simplecov-rcov', require: false
gem 'mocha', require: false
end
gem 'RedCloth'
gem 'piwik_analytics'
-gem 'httpclient', '~> 2.5.0'
+gem 'httpclient', '~> 2.5'
# This fork has Rails 4 compatible routes
gem 'themes_for_rails', git: 'https://github.com/holtkampw/themes_for_rails', ref: '1fd2d7897d75ae0d6375f4c390df87b8e91ad417'
remote: https://rubygems.org/
specs:
RedCloth (4.2.9)
- actionmailer (4.1.8)
- actionpack (= 4.1.8)
- actionview (= 4.1.8)
+ actionmailer (4.1.9)
+ actionpack (= 4.1.9)
+ actionview (= 4.1.9)
mail (~> 2.5, >= 2.5.4)
- actionpack (4.1.8)
- actionview (= 4.1.8)
- activesupport (= 4.1.8)
+ actionpack (4.1.9)
+ actionview (= 4.1.9)
+ activesupport (= 4.1.9)
rack (~> 1.5.2)
rack-test (~> 0.6.2)
- actionview (4.1.8)
- activesupport (= 4.1.8)
+ actionview (4.1.9)
+ activesupport (= 4.1.9)
builder (~> 3.1)
erubis (~> 2.7.0)
- activemodel (4.1.8)
- activesupport (= 4.1.8)
+ activemodel (4.1.9)
+ activesupport (= 4.1.9)
builder (~> 3.1)
- activerecord (4.1.8)
- activemodel (= 4.1.8)
- activesupport (= 4.1.8)
+ activerecord (4.1.9)
+ activemodel (= 4.1.9)
+ activesupport (= 4.1.9)
arel (~> 5.0.0)
- activesupport (4.1.8)
+ activesupport (4.1.9)
i18n (~> 0.6, >= 0.6.9)
json (~> 1.7, >= 1.7.7)
minitest (~> 5.1)
tzinfo (~> 1.1)
addressable (2.3.6)
andand (1.3.3)
- angularjs-rails (1.3.3)
+ angularjs-rails (1.3.8)
arel (5.0.1.20140414130214)
- arvados (0.1.20141114230720)
+ arvados (0.1.20150116063758)
activesupport (>= 3.2.13)
andand (~> 1.3, >= 1.3.3)
google-api-client (~> 0.6.3, >= 0.6.3)
coffee-script-source
execjs
coffee-script-source (1.8.0)
- columnize (0.8.9)
+ columnize (0.9.0)
commonjs (0.2.7)
daemon_controller (1.2.0)
debugger-linecache (1.2.0)
deep_merge (1.0.1)
+ docile (1.1.5)
erubis (2.7.0)
execjs (2.2.2)
extlib (0.9.16)
faraday (0.8.9)
multipart-post (~> 1.2.0)
+ fast_stack (0.1.0)
+ rake
+ rake-compiler
ffi (1.9.6)
+ flamegraph (0.1.0)
+ fast_stack
google-api-client (0.6.4)
addressable (>= 2.3.2)
autoparse (>= 0.3.3)
headless (1.0.2)
highline (1.6.21)
hike (1.2.3)
- httpclient (2.5.3.3)
- i18n (0.6.11)
+ httpclient (2.6.0.1)
+ i18n (0.7.0)
jquery-rails (3.1.2)
railties (>= 3.0, < 5.0)
thor (>= 0.14, < 2.0)
- json (1.8.1)
+ json (1.8.2)
jwt (0.1.13)
multi_json (>= 1.5)
launchy (2.4.3)
mime-types (>= 1.16, < 3)
metaclass (0.0.4)
mime-types (2.4.3)
- mini_portile (0.6.1)
- minitest (5.4.3)
+ mini_portile (0.6.2)
+ minitest (5.5.1)
mocha (1.1.0)
metaclass (~> 0.0.1)
morrisjs-rails (0.5.1)
net-ssh (>= 2.6.5)
net-sftp (2.1.2)
net-ssh (>= 2.6.5)
- net-ssh (2.9.1)
+ net-ssh (2.9.2)
net-ssh-gateway (1.2.0)
net-ssh (>= 2.6.5)
- nokogiri (1.6.4.1)
+ nokogiri (1.6.5)
mini_portile (~> 0.6.0)
- oj (2.11.1)
- passenger (4.0.53)
+ oj (2.11.2)
+ passenger (4.0.57)
daemon_controller (>= 1.2.0)
rack
rake (>= 0.8.1)
multi_json (~> 1.0)
websocket-driver (>= 0.2.0)
rack (1.5.2)
- rack-test (0.6.2)
+ rack-mini-profiler (0.9.2)
+ rack (>= 1.1.3)
+ rack-test (0.6.3)
rack (>= 1.0)
- rails (4.1.8)
- actionmailer (= 4.1.8)
- actionpack (= 4.1.8)
- actionview (= 4.1.8)
- activemodel (= 4.1.8)
- activerecord (= 4.1.8)
- activesupport (= 4.1.8)
+ rails (4.1.9)
+ actionmailer (= 4.1.9)
+ actionpack (= 4.1.9)
+ actionview (= 4.1.9)
+ activemodel (= 4.1.9)
+ activerecord (= 4.1.9)
+ activesupport (= 4.1.9)
bundler (>= 1.3.0, < 2.0)
- railties (= 4.1.8)
+ railties (= 4.1.9)
sprockets-rails (~> 2.0)
rails-perftest (0.0.5)
- railties (4.1.8)
- actionpack (= 4.1.8)
- activesupport (= 4.1.8)
+ railties (4.1.9)
+ actionpack (= 4.1.9)
+ activesupport (= 4.1.9)
rake (>= 0.8.7)
thor (>= 0.18.1, < 2.0)
- rake (10.4.0)
+ rake (10.4.2)
+ rake-compiler (0.9.5)
+ rake
raphael-rails (2.1.2)
ref (1.0.5)
ruby-debug-passenger (0.2.0)
rubyzip (1.1.6)
rvm-capistrano (1.5.5)
capistrano (~> 2.15.4)
- sass (3.2.19)
- sass-rails (4.0.4)
+ sass (3.4.9)
+ sass-rails (5.0.1)
railties (>= 4.0.0, < 5.0)
- sass (~> 3.2.2)
- sprockets (~> 2.8, < 2.12)
- sprockets-rails (~> 2.0)
+ sass (~> 3.1)
+ sprockets (>= 2.8, < 4.0)
+ sprockets-rails (>= 2.0, < 4.0)
+ tilt (~> 1.1)
selenium-webdriver (2.44.0)
childprocess (~> 0.5)
multi_json (~> 1.0)
faraday (~> 0.8.1)
jwt (>= 0.1.5)
multi_json (>= 1.0.0)
- simplecov (0.7.1)
+ simplecov (0.9.1)
+ docile (~> 1.1.0)
multi_json (~> 1.0)
- simplecov-html (~> 0.7.1)
- simplecov-html (0.7.1)
+ simplecov-html (~> 0.8.0)
+ simplecov-html (0.8.0)
simplecov-rcov (0.2.3)
simplecov (>= 0.4.1)
slop (3.6.0)
- sprockets (2.11.3)
+ sprockets (2.12.3)
hike (~> 1.2)
multi_json (~> 1.0)
rack (~> 1.0)
tilt (~> 1.1, != 1.3.0)
- sprockets-rails (2.2.0)
+ sprockets-rails (2.2.2)
actionpack (>= 3.0)
activesupport (>= 3.0)
sprockets (>= 2.8, < 4.0)
tilt (1.4.1)
tzinfo (1.2.2)
thread_safe (~> 0.1)
- uglifier (2.5.3)
+ uglifier (2.7.0)
execjs (>= 0.3.0)
json (>= 1.8.0)
uuidtools (2.1.5)
websocket (1.2.1)
- websocket-driver (0.4.0)
+ websocket-driver (0.5.1)
+ websocket-extensions (>= 0.1.0)
+ websocket-extensions (0.1.1)
wiselinks (1.2.1)
xpath (2.0.0)
nokogiri (~> 1.3)
RedCloth
andand
angularjs-rails
- arvados (>= 0.1.20141114230720)
+ arvados (>= 0.1.20150116063758)
bootstrap-sass (~> 3.1.0)
bootstrap-tab-history-rails
bootstrap-x-editable-rails
capybara
coffee-rails
deep_merge
+ flamegraph
headless
- httpclient (~> 2.5.0)
+ httpclient (~> 2.5)
jquery-rails
less
less-rails
passenger
piwik_analytics
poltergeist
+ rack-mini-profiler
rails (~> 4.1.0)
rails-perftest
raphael-rails
ruby-prof
rvm-capistrano
sass
- sass-rails (~> 4.0.4)
+ sass-rails
selenium-webdriver
- simplecov (~> 0.7.1)
+ simplecov (~> 0.7)
simplecov-rcov
sqlite3
sshkey
});
function dispatch_selection_action() {
- // Build a new "href" attribute for this link by starting with the
- // "data-href" attribute and appending ?foo[]=bar&foo[]=baz (or
- // &foo=... as appropriate) to reflect the current object
- // selections.
- var data = [];
- var param_name = $(this).attr('data-selection-param-name');
- var href = $(this).attr('data-href');
- if ($(this).closest('.disabled').length > 0) {
+ /* When the user clicks a selection action link, build a form to perform
+ the action on the selected data, and submit it.
+ This is based on handleMethod from rails-ujs, extended to add the
+ selections to the submitted form.
+ Copyright (c) 2007-2010 Contributors at http://github.com/rails/jquery-ujs/contributors
+ */
+ var $container = $(this);
+ if ($container.closest('.disabled').length) {
return false;
}
- $(this).
+ $container.closest('.dropdown-menu').dropdown('toggle');
+
+ var href = $container.data('href'),
+ method = $container.data('method') || 'GET',
+ paramName = $container.data('selection-param-name'),
+ csrfToken = $('meta[name=csrf-token]').attr('content'),
+ csrfParam = $('meta[name=csrf-param]').attr('content'),
+ form = $('<form method="post" action="' + href + '"></form>'),
+ metadataInput = ('<input name="_method" value="' + method +
+ '" type="hidden" />');
+
+ if (csrfParam !== undefined && csrfToken !== undefined) {
+ metadataInput += ('<input type="hidden" name="' + csrfParam +
+ '" value="' + csrfToken + '" />');
+ }
+ $container.
closest('.selection-action-container').
find(':checkbox:checked:visible').
- each(function() {
- data.push({name: param_name, value: $(this).val()});
+ each(function(index, elem) {
+ metadataInput += ('<input type="hidden" name="' + paramName +
+ '" value="' + elem.value + '" />');
});
- if (href.indexOf('?') >= 0)
- href += '&';
- else
- href += '?';
- href += $.param(data, true);
- $(this).attr('href', href);
- return true;
+
+ form.data('remote', $container.data('remote'));
+ form.hide().append(metadataInput).appendTo('body');
+ form.submit();
+ return false;
}
function enable_disable_selection_actions() {
before_filter :accept_uuid_as_id_param, except: ERROR_ACTIONS
before_filter :check_user_agreements, except: ERROR_ACTIONS
before_filter :check_user_profile, except: ERROR_ACTIONS
- before_filter :check_user_notifications, except: ERROR_ACTIONS
before_filter :load_filters_and_paging_params, except: ERROR_ACTIONS
before_filter :find_object_by_uuid, except: [:create, :index, :choose] + ERROR_ACTIONS
theme :select_theme
end
def render_error(opts={})
+ # Helpers can rely on the presence of @errors to know they're
+ # being used in an error page.
+ @errors ||= []
opts[:status] ||= 500
respond_to do |f|
# json must come before html here, so it gets used as the
f.html {
if params['tab_pane']
render_pane(if params['tab_pane'].is_a? Hash then params['tab_pane']["name"] else params['tab_pane'] end)
- elsif request.method.in? ['GET', 'HEAD']
+ elsif request.request_method.in? ['GET', 'HEAD']
render
else
redirect_to (params[:return_to] ||
%w(Attributes Advanced)
end
+ def set_share_links
+ @user_is_manager = false
+ @share_links = []
+
+ if @object.uuid != current_user.uuid
+ begin
+ @share_links = Link.permissions_for(@object)
+ @user_is_manager = true
+ rescue ArvadosApiClient::AccessForbiddenException,
+ ArvadosApiClient::NotFoundException
+ end
+ end
+ end
+
+ def share_with
+ if not params[:uuids].andand.any?
+ @errors = ["No user/group UUIDs specified to share with."]
+ return render_error(status: 422)
+ end
+ results = {"success" => [], "errors" => []}
+ params[:uuids].each do |shared_uuid|
+ begin
+ Link.create(tail_uuid: shared_uuid, link_class: "permission",
+ name: "can_read", head_uuid: @object.uuid)
+ rescue ArvadosApiClient::ApiError => error
+ error_list = error.api_response.andand[:errors]
+ if error_list.andand.any?
+ results["errors"] += error_list.map { |e| "#{shared_uuid}: #{e}" }
+ else
+ error_code = error.api_status || "Bad status"
+ results["errors"] << "#{shared_uuid}: #{error_code} response"
+ end
+ else
+ results["success"] << shared_uuid
+ end
+ end
+ if results["errors"].empty?
+ results.delete("errors")
+ status = 200
+ else
+ status = 422
+ end
+ respond_to do |f|
+ f.json { render(json: results, status: status) }
+ end
+ end
+
protected
def strip_token_from_path(path)
}
}
- def check_user_notifications
- return if params['tab_pane']
-
- @notification_count = 0
- @notifications = []
-
- if current_user.andand.is_active
- @showallalerts = false
- @@notification_tests.each do |t|
- a = t.call(self, current_user)
- if a
- @notification_count += 1
- @notifications.push a
- end
- end
- end
-
- if @notification_count == 0
- @notification_count = ''
- end
+ helper_method :user_notifications
+ def user_notifications
+ return [] if @errors or not current_user.andand.is_active
+ @notifications ||= @@notification_tests.map do |t|
+ t.call(self, current_user)
+ end.compact
end
helper_method :all_projects
end
end
- def set_share_links
- @user_is_manager = false
- @share_links = []
- if @object.uuid != current_user.uuid
- begin
- @share_links = Link.permissions_for(@object)
- @user_is_manager = true
- rescue ArvadosApiClient::AccessForbiddenException,
- ArvadosApiClient::NotFoundException
- end
- end
- end
-
def index_pane_list
%w(Projects)
end
end
objects_and_names
end
-
- def share_with
- if not params[:uuids].andand.any?
- @errors = ["No user/group UUIDs specified to share with."]
- return render_error(status: 422)
- end
- results = {"success" => [], "errors" => []}
- params[:uuids].each do |shared_uuid|
- begin
- Link.create(tail_uuid: shared_uuid, link_class: "permission",
- name: "can_read", head_uuid: @object.uuid)
- rescue ArvadosApiClient::ApiError => error
- error_list = error.api_response.andand[:errors]
- if error_list.andand.any?
- results["errors"] += error_list.map { |e| "#{shared_uuid}: #{e}" }
- else
- error_code = error.api_status || "Bad status"
- results["errors"] << "#{shared_uuid}: #{error_code} response"
- end
- else
- results["success"] << shared_uuid
- end
- end
- if results["errors"].empty?
- results.delete("errors")
- status = 200
- else
- status = 422
- end
- respond_to do |f|
- f.json { render(json: results, status: status) }
- end
- end
end
class RepositoriesController < ApplicationController
+ before_filter :set_share_links, if: -> { defined? @object }
+
def index_pane_list
%w(recent help)
end
+
+ def show_pane_list
+ if @user_is_manager
+ panes = super | %w(Sharing)
+ panes.insert(panes.length-1, panes.delete_at(panes.index('Advanced'))) if panes.index('Advanced')
+ panes
+ else
+ super
+ end
+ end
end
end
def home
- @showallalerts = false
@my_ssh_keys = AuthorizedKey.where(authorized_user_uuid: current_user.uuid)
@my_tag_links = {}
end
input_type = 'text'
- case object.class.attribute_info[attr.to_sym].andand[:type]
- when 'text'
+ attrtype = object.class.attribute_info[attr.to_sym].andand[:type]
+ if attrtype == 'text' or attr == 'description'
input_type = 'textarea'
- when 'datetime'
+ elsif attrtype == 'datetime'
input_type = 'date'
else
input_type = 'text'
</td>
<% end %>
<td>
- <%= render :partial => "show_object_button", :locals => {object: object, size: 'xs'} %>
+ <% if (current_user.is_admin and current_user.uuid != object.uuid) or !current_user.is_admin %>
+ <%= render :partial => "show_object_button", :locals => {object: object, size: 'xs'} %>
+ <% end %>
</td>
<% object.attributes_for_display.each do |attr, attrvalue| %>
else
owner_type = "owning user"
end
+
+ sharing_path = url_for(:controller => params['controller'], :action => 'share_with')
%>
<div class="pull-right">
multiple: true,
filters: choose_filters[share_class].to_json,
action_method: 'post',
- action_href: share_with_project_path,
+ action_href: sharing_path,
action_name: 'Add',
action_data: {selection_param: 'uuids[]', success: 'tab-refresh'}.to_json),
class: "btn btn-primary btn-sm", remote: true) do %>
<% end %>
</div>
-<p>Permissions for this project are inherited from the <%= owner_type %>
+<p>Permissions for this <%=@object.class_for_display.downcase%> are inherited from the <%= owner_type %>
<i class="fa fa-fw <%= owner_icon %>"></i>
<%= link_to_if_arvados_object @object.owner_uuid, friendly_name: true %>.
</p>
-<table id="project_sharing" class="topalign table" style="clear: both; margin-top: 1em;">
+<table id="object_sharing" class="topalign table" style="clear: both; margin-top: 1em;">
<tr>
<th>User/Group Name</th>
<th>Email Address</th>
- <th colspan="2">Project Access</th>
+ <th colspan="2"><%=@object.class_for_display%> Access</th>
</tr>
<% @share_links.andand.each do |link|
<%= link_to(
{action: 'destroy', id: link.uuid, controller: "links"},
{title: 'Revoke', class: 'btn btn-default btn-nodecorate', method: :delete,
- data: {confirm: "Revoke #{link_name}'s access to this project?",
+ data: {confirm: "Revoke #{link_name}'s access to this #{@object.class_for_display.downcase}?",
remote: true}}) do %>
<i class="fa fa-fw fa-trash-o"></i>
<% end %>
<li class="dropdown notification-menu">
<a href="#" class="dropdown-toggle" data-toggle="dropdown" id="notifications-menu">
- <span class="badge badge-alert notification-count"><%= @notification_count %></span>
+ <span class="badge badge-alert notification-count"><%= user_notifications.length if user_notifications.any? %></span>
<%= current_user.email %> <span class="caret"></span>
</a>
<ul class="dropdown-menu" role="menu">
<% end %>
<% end %>
<li role="menuitem"><a href="<%= logout_path %>" role="menuitem"><i class="fa fa-sign-out fa-fw"></i> Log out</a></li>
- <% if current_user.is_active and
- (@notifications || []).length > 0 %>
+ <% if user_notifications.any? %>
<li role="presentation" class="divider"></li>
- <% @notifications.each_with_index do |n, i| %>
+ <% user_notifications.each_with_index do |n, i| %>
<% if i > 0 %><li class="divider"></li><% end %>
<li class="notification"><%= n.call(self) %></li>
<% end %>
>
<td>
<div style="width:1em; display:inline-block;">
- <%= render partial: 'selection_checkbox', locals: {object: name_object, friendly_name: ((name_object.name rescue '') || '')} %>
+ <%= render partial: 'selection_checkbox', locals: {object: object, friendly_name: ((name_object.name rescue '') || '')} %>
</div>
</td>
'data-selection-action' => 'combine-project-contents',
'data-toggle' => 'dropdown'
%></li>
- <li><%= link_to "Compare selected", 'action',
+ <li><%= link_to "Compare selected", '#',
'data-href' => compare_pipeline_instances_path,
'data-selection-param-name' => 'uuids[]',
- 'data-selection-action' => 'compare'
+ 'data-selection-action' => 'compare',
+ 'data-toggle' => 'dropdown'
%></li>
<li><%= link_to "Copy selected...", '#',
'data-href' => choose_projects_path(
success: 'page-refresh'}.to_json),
'data-remote' => true,
'data-selection-param-name' => 'selection[]',
- 'data-selection-action' => 'copy'
+ 'data-selection-action' => 'copy',
+ 'data-toggle' => 'dropdown'
%></li>
<% if @object.editable? %>
<li><%= link_to "Move selected...", '#',
success: 'page-refresh'}.to_json),
'data-remote' => true,
'data-selection-param-name' => 'selection[]',
- 'data-selection-action' => 'move'
+ 'data-selection-action' => 'move',
+ 'data-toggle' => 'dropdown'
%></li>
<li><%= link_to "Remove selected", '#',
method: :delete,
--- /dev/null
+if not Rails.env.production? and ENV['ENABLE_PROFILING']
+ require 'rack-mini-profiler'
+ require 'flamegraph'
+ Rack::MiniProfilerRails.initialize! Rails.application
+end
resources :humans
resources :traits
resources :api_client_authorizations
- resources :repositories
resources :virtual_machines
resources :authorized_keys
resources :job_tasks
post 'cancel', :on => :member
get 'logs', :on => :member
end
+ resources :repositories do
+ post 'share_with', on: :member
+ end
match '/logout' => 'sessions#destroy', via: [:get, :post]
get '/logged_out' => 'sessions#index'
resources :users do
require 'test_helper'
+require 'helpers/share_object_helper'
class ProjectsControllerTest < ActionController::TestCase
+ include ShareObjectHelper
+
test "invited user is asked to sign user agreements on front page" do
get :index, {}, session_for(:inactive)
assert_response :redirect
"JSON response missing properly formatted sharing error")
end
- def user_can_manage(user_sym, group_key)
- get(:show, {id: api_fixture("groups")[group_key]["uuid"]},
- session_for(user_sym))
- is_manager = assigns(:user_is_manager)
- assert_not_nil(is_manager, "user_is_manager flag not set")
- if not is_manager
- assert_empty(assigns(:share_links),
- "non-manager has share links set")
- end
- is_manager
- end
-
test "admin can_manage aproject" do
- assert user_can_manage(:admin, "aproject")
+ assert user_can_manage(:admin, api_fixture("groups")["aproject"])
end
test "owner can_manage aproject" do
- assert user_can_manage(:active, "aproject")
+ assert user_can_manage(:active, api_fixture("groups")["aproject"])
end
test "owner can_manage asubproject" do
- assert user_can_manage(:active, "asubproject")
+ assert user_can_manage(:active, api_fixture("groups")["asubproject"])
end
test "viewer can't manage aproject" do
- refute user_can_manage(:project_viewer, "aproject")
+ refute user_can_manage(:project_viewer, api_fixture("groups")["aproject"])
end
test "viewer can't manage asubproject" do
- refute user_can_manage(:project_viewer, "asubproject")
+ refute user_can_manage(:project_viewer, api_fixture("groups")["asubproject"])
end
test "subproject_admin can_manage asubproject" do
- assert user_can_manage(:subproject_admin, "asubproject")
+ assert user_can_manage(:subproject_admin, api_fixture("groups")["asubproject"])
end
test "detect ownership loop in project breadcrumbs" do
require 'test_helper'
+require 'helpers/share_object_helper'
class RepositoriesControllerTest < ActionController::TestCase
+ include ShareObjectHelper
+
+ [
+ :active, #owner
+ :admin,
+ ].each do |user|
+ test "#{user} shares repository with a user and group" do
+ uuid_list = [api_fixture("groups")["future_project_viewing_group"]["uuid"],
+ api_fixture("users")["future_project_user"]["uuid"]]
+ post(:share_with, {
+ id: api_fixture("repositories")["foo"]["uuid"],
+ uuids: uuid_list,
+ format: "json"},
+ session_for(user))
+ assert_response :success
+ assert_equal(uuid_list, json_response["success"])
+ end
+ end
+
+ test "user with repository read permission cannot add permissions" do
+ share_uuid = api_fixture("users")["project_viewer"]["uuid"]
+ post(:share_with, {
+ id: api_fixture("repositories")["arvados"]["uuid"],
+ uuids: [share_uuid],
+ format: "json"},
+ session_for(:spectator))
+ assert_response 422
+ assert(json_response["errors"].andand.
+ any? { |msg| msg.start_with?("#{share_uuid}: ") },
+ "JSON response missing properly formatted sharing error")
+ end
+
+ test "admin can_manage repository" do
+ assert user_can_manage(:admin, api_fixture("repositories")["foo"])
+ end
+
+ test "owner can_manage repository" do
+ assert user_can_manage(:active, api_fixture("repositories")["foo"])
+ end
+
+ test "viewer cannot manage repository" do
+ refute user_can_manage(:spectator, api_fixture("repositories")["arvados"])
+ end
end
--- /dev/null
+module ShareObjectHelper
+ def show_object_using(auth_key, type, key, expect)
+ obj_uuid = api_fixture(type)[key]['uuid']
+ visit(page_with_token(auth_key, "/#{type}/#{obj_uuid}"))
+ assert(page.has_text?(expect), "expected string not found: #{expect}")
+ end
+
+ def share_rows
+ find('#object_sharing').all('tr')
+ end
+
+ def add_share_and_check(share_type, name, obj=nil)
+ assert(page.has_no_text?(name), "project is already shared with #{name}")
+ start_share_count = share_rows.size
+ click_on("Share with #{share_type}")
+ within(".modal-container") do
+ # Order is important here: we should find something that appears in the
+ # modal before we make any assertions about what's not in the modal.
+ # Otherwise, the not-included assertions might falsely pass because
+ # the modal hasn't loaded yet.
+ find(".selectable", text: name).click
+ assert(has_no_selector?(".modal-dialog-preview-pane"),
+ "preview pane available in sharing dialog")
+ if share_type == 'users' and obj and obj['email']
+ assert(page.has_text?(obj['email']), "Did not find user's email")
+ end
+ assert_raises(Capybara::ElementNotFound,
+ "Projects pulldown available from sharing dialog") do
+ click_on "All projects"
+ end
+ click_on "Add"
+ end
+ using_wait_time(Capybara.default_wait_time * 3) do
+ assert(page.has_link?(name),
+ "new share was not added to sharing table")
+ assert_equal(start_share_count + 1, share_rows.size,
+ "new share did not add row to sharing table")
+ end
+ end
+
+ def modify_share_and_check(name)
+ start_rows = share_rows
+ link_row = start_rows.select { |row| row.has_text?(name) }
+ assert_equal(1, link_row.size, "row with new permission not found")
+ within(link_row.first) do
+ click_on("Read")
+ select("Write", from: "share_change_level")
+ click_on("editable-submit")
+ assert(has_link?("Write"),
+ "failed to change access level on new share")
+ click_on "Revoke"
+ if Capybara.current_driver == :selenium
+ page.driver.browser.switch_to.alert.accept
+ else
+ # poltergeist returns true for confirm(), so we don't need to accept.
+ end
+ end
+ wait_for_ajax
+ using_wait_time(Capybara.default_wait_time * 3) do
+ assert(page.has_no_text?(name),
+ "new share row still exists after being revoked")
+ assert_equal(start_rows.size - 1, share_rows.size,
+ "revoking share did not remove row from sharing table")
+ end
+ end
+
+ def user_can_manage(user_sym, fixture)
+ get(:show, {id: fixture["uuid"]}, session_for(user_sym))
+ is_manager = assigns(:user_is_manager)
+ assert_not_nil(is_manager, "user_is_manager flag not set")
+ if not is_manager
+ assert_empty(assigns(:share_links),
+ "non-manager has share links set")
+ end
+ is_manager
+ end
+
+end
['admin', nil, 40, 200],
['admin', 'FUSE project', 1, 1],
['admin', 'pipeline_10', 2, 2],
- ['active', 'containing at least two', 2, 100], # component description
- ['admin', 'containing at least two', 2, 100],
+ ['active', 'containing at least two', 2, 100],
['active', nil, 10, 100],
['active', 'no such match', 0, 0],
].each do |user, search_filter, expected_min, expected_max|
require 'integration_helper'
+require 'helpers/share_object_helper'
class ProjectsTest < ActionDispatch::IntegrationTest
+ include ShareObjectHelper
+
setup do
need_javascript
end
"Project 5678 should now be inside project 1234")
end
- def show_project_using(auth_key, proj_key='aproject')
- project_uuid = api_fixture('groups')[proj_key]['uuid']
- visit(page_with_token(auth_key, "/projects/#{project_uuid}"))
- assert(page.has_text?("A Project"), "not on expected project page")
- end
-
- def share_rows
- find('#project_sharing').all('tr')
- end
-
- def add_share_and_check(share_type, name, obj=nil)
- assert(page.has_no_text?(name), "project is already shared with #{name}")
- start_share_count = share_rows.size
- click_on("Share with #{share_type}")
- within(".modal-container") do
- # Order is important here: we should find something that appears in the
- # modal before we make any assertions about what's not in the modal.
- # Otherwise, the not-included assertions might falsely pass because
- # the modal hasn't loaded yet.
- find(".selectable", text: name).click
- assert(has_no_selector?(".modal-dialog-preview-pane"),
- "preview pane available in sharing dialog")
- if share_type == 'users' and obj and obj['email']
- assert(page.has_text?(obj['email']), "Did not find user's email")
- end
- assert_raises(Capybara::ElementNotFound,
- "Projects pulldown available from sharing dialog") do
- click_on "All projects"
- end
- click_on "Add"
- end
- using_wait_time(Capybara.default_wait_time * 3) do
- assert(page.has_link?(name),
- "new share was not added to sharing table")
- assert_equal(start_share_count + 1, share_rows.size,
- "new share did not add row to sharing table")
- end
- end
-
- def modify_share_and_check(name)
- start_rows = share_rows
- link_row = start_rows.select { |row| row.has_text?(name) }
- assert_equal(1, link_row.size, "row with new permission not found")
- within(link_row.first) do
- click_on("Read")
- select("Write", from: "share_change_level")
- click_on("editable-submit")
- assert(has_link?("Write"),
- "failed to change access level on new share")
- click_on "Revoke"
- if Capybara.current_driver == :selenium
- page.driver.browser.switch_to.alert.accept
- else
- # poltergeist returns true for confirm(), so we don't need to accept.
- end
- end
- wait_for_ajax
- using_wait_time(Capybara.default_wait_time * 3) do
- assert(page.has_no_text?(name),
- "new share row still exists after being revoked")
- assert_equal(start_rows.size - 1, share_rows.size,
- "revoking share did not remove row from sharing table")
- end
- end
-
test "project viewer can't see project sharing tab" do
- show_project_using("project_viewer")
+ show_object_using('project_viewer', 'groups', 'aproject', 'A Project')
assert(page.has_no_link?("Sharing"),
"read-only project user sees sharing tab")
end
add_user = api_fixture('users')['future_project_user']
new_name = ["first_name", "last_name"].map { |k| add_user[k] }.join(" ")
- show_project_using("active")
+ show_object_using('active', 'groups', 'aproject', 'A Project')
click_on "Sharing"
add_share_and_check("users", new_name, add_user)
modify_share_and_check(new_name)
test "project owner can manage sharing for another group" do
new_name = api_fixture('groups')['future_project_viewing_group']['name']
- show_project_using("active")
+ show_object_using('active', 'groups', 'aproject', 'A Project')
click_on "Sharing"
add_share_and_check("groups", new_name)
modify_share_and_check(new_name)
end
test "'share with group' listing does not offer projects" do
- show_project_using("active")
+ show_object_using('active', 'groups', 'aproject', 'A Project')
click_on "Sharing"
click_on "Share with groups"
good_uuid = api_fixture("groups")["private"]["uuid"]
--- /dev/null
+require 'integration_helper'
+require 'helpers/share_object_helper'
+
+class RepositoriesTest < ActionDispatch::IntegrationTest
+ include ShareObjectHelper
+
+ setup do
+ need_javascript
+ end
+
+ [
+ 'active', #owner
+ 'admin'
+ ].each do |user|
+ test "#{user} can manage sharing for another user" do
+ add_user = api_fixture('users')['future_project_user']
+ new_name = ["first_name", "last_name"].map { |k| add_user[k] }.join(" ")
+ show_object_using(user, 'repositories', 'foo', 'push_url')
+ click_on "Sharing"
+ add_share_and_check("users", new_name, add_user)
+ modify_share_and_check(new_name)
+ end
+ end
+
+ [
+ 'active', #owner
+ 'admin'
+ ].each do |user|
+ test "#{user} can manage sharing for another group" do
+ new_name = api_fixture('groups')['future_project_viewing_group']['name']
+ show_object_using(user, 'repositories', 'foo', 'push_url')
+ click_on "Sharing"
+ add_share_and_check("groups", new_name)
+ modify_share_and_check(new_name)
+ end
+ end
+
+ test "spectator does not see repository sharing tab" do
+ show_object_using("spectator", 'repositories', 'arvados', 'push_url')
+ assert(page.has_no_link?("Sharing"),
+ "read-only repository user sees sharing tab")
+ end
+end
assert page.has_text? 'VirtualMachine: testvm.shell'
end
+ [
+ ['admin', false],
+ ['active', true],
+ ].each do |username, expect_show_button|
+ test "login as #{username} and access show button #{expect_show_button}" do
+ need_javascript
+
+ user = api_fixture('users', username)
+
+ visit page_with_token(username, '/users')
+
+ if expect_show_button
+ within('tr', text: user['uuid']) do
+ assert_text user['email']
+ assert_selector 'a', text: 'Show'
+ find('a', text: 'Show').click
+ end
+ assert_selector 'a', 'Data collections'
+ else
+ # no 'Show' button in the admin user's own row
+ within('tr', text: user['uuid']) do
+ assert_text user['email']
+ assert_no_selector 'a', text: 'Show'
+ end
+
+ # but the admin user can access 'Show' button for other users
+ active_user = api_fixture('users', 'active')
+ within('tr', text: active_user['uuid']) do
+ assert_text active_user['email']
+ assert_selector 'a', text: 'Show'
+ find('a', text: 'Show').click
+ assert_selector 'a', 'Attributes'
+ end
+ end
+ end
+ end
end
test "home page" do
visit_page_with_token
- wait_for_ajax
assert_text 'Dashboard'
assert_selector 'a', text: 'Run a pipeline'
end
test "search for hash" do
visit_page_with_token
- wait_for_ajax
assert_text 'Dashboard'
- within('.navbar-fixed-top') do
- page.find_field('search').set('hash')
- wait_for_ajax
- page.find('.glyphicon-search').click
+ assert_selector '.navbar-fixed-top'
+ assert_triggers_dom_event 'shown.bs.modal' do
+ within '.navbar-fixed-top' do
+ find_field('search').set 'hash'
+ find('.glyphicon-search').click
+ end
end
# In the search dialog now. Expect at least one item in the result display.
within '.modal-content' do
- wait_for_ajax
assert_text 'All projects'
assert_text 'Search'
- assert(page.has_selector?(".selectable[data-object-uuid]"))
+ assert_selector '.selectable[data-object-uuid]'
click_button 'Cancel'
end
end
'--pid-file', @pidfile)
else
make_ssl_cert
- _system('bundle', 'exec', 'rake', 'db:test:load')
- _system('bundle', 'exec', 'rake', 'db:fixtures:load')
+ if ENV['ARVADOS_TEST_API_INSTALLED'].blank?
+ _system('bundle', 'exec', 'rake', 'db:test:load')
+ _system('bundle', 'exec', 'rake', 'db:fixtures:load')
+ end
_system('bundle', 'exec', 'passenger', 'start', '-d', '-p3000',
'--pid-file', @pidfile,
'--ssl',
ApiServerForTests.new.run
ApiServerForTests.new.run ["--websockets"]
end
+
+# Reset fixtures now (i.e., before any tests run).
+ActiveSupport::TestCase.reset_api_fixtures_now
title: "Downloading data"
...
-This tutorial describes how to list and download Arvados data collections using the command line tools @arv-ls@ and @arv-get@. It is also possible to download files from a collection from the Workbench page for the collection, covered in "running a pipeline using Workbench":{{site.baseurl}}/user/tutorials/tutorial-pipeline-workbench.html
+Arvados Data collections can be downloaded using either the arv commands or using Workbench.
+
+# "*Downloading using arv commands*":#download-using-arv
+# "*Downloading using Workbench*":#download-using-workbench
+# "*Downloading a shared collection using Workbench*":#download-shared-collection
+
+h2(#download-using-arv). Downloading using arv commands
{% include 'tutorial_expectations' %}
-You can view the contents of a collection using @arv-ls@:
+You can download Arvados data collections using the command line tools @arv-ls@ and @arv-get@.
+
+Use @arv-ls@ to view the contents of a collection:
<notextile>
<pre><code>~$ <span class="userinput">arv-ls c1bad4b39ca5a924e481008009d94e32+210</span>
<pre><code>~$ <span class="userinput">arv-get 887cd41e9c613463eab2f0d885c6dd96+83/alice.txt .</span>
</code></pre>
</notextile>
+
+h2(#download-using-workbench). Downloading using Workbench
+
+You can also download Arvados data collections using the Workbench.
+
+Visit the Workbench *Dashboard*. Click on *Projects*<span class="caret"></span> dropdown menu in the top navigation menu, select your *Home* project. You will see the *Data collections* tab, which lists the collections in this project.
+
+You can access the contents of a collection by clicking on the *<i class="fa fa-fw fa-archive"></i> Show* button next to the collection. This will take you to the collection's page. Using this page you can see the collection's contents, download individual files, and set sharing options.
+
+You can now download the collection files by clicking on the <span class="btn btn-sm btn-info"><i class="fa fa-download"></i></span> button(s).
+
+h2(#download-shared-collection). Downloading a shared collection using Workbench
+
+Collections can be shared to allow downloads by anonymous users.
+
+To share a collection with anonymous users, visit the collection page using Workbench as described in the above section. Once on this page, click on the <span class="btn btn-sm btn-primary" >Create sharing link</span> button.
+
+This will create a sharing link for the collection as shown below. You can copy the sharing link in this page and share it with other users.
+
+!{display: block;margin-left: 25px;margin-right: auto;border:1px solid lightgray;}{{ site.baseurl }}/images/shared-collection.png!
+
+A user with this url can download this collection by simply accessing this url. It will present a downloadable version of the collection as shown below.
+
+!{display: block;margin-left: 25px;margin-right: auto;border:1px solid lightgray;}{{ site.baseurl }}/images/download-shared-collection.png!
title: "Uploading data"
...
-This tutorial describes how to to upload new Arvados data collections using the command line tool @arv keep put@.
+Arvados Data collections can be uploaded using either the @*arv keep put*@ command line tool or using Workbench.
+
+# "*Upload using command line tool*":#upload-using-command
+# "*Upload using Workbench*":#upload-using-workbench
notextile. <div class="spaced-out">
-{% include 'tutorial_expectations' %}
+h2(#upload-using-command). Upload using command line tool
-h3. Upload
+{% include 'tutorial_expectations' %}
To upload a file to Keep using @arv keep put@:
<notextile>
In both examples, the @arv keep put@ command created a collection. The first collection contains the single uploaded file. The second collection contains the entire uploaded directory.
-@arv keep put@ accepts quite a few optional command line arguments, which are described "on the arv subcommands":{{site.baseurl}}/sdk/cli/subcommands.html#arv-keep-put page.
+@arv keep put@ accepts quite a few optional command line arguments, which are described on the "arv subcommands":{{site.baseurl}}/sdk/cli/subcommands.html#arv-keep-put page.
h3. Locate your collection in Workbench
To move the collection to a different project, check the box at the left of the collection row. Pull down the *Selection...*<span class="caret"></span> menu near the top of the page tab, and select *Move selected*. This will open a dialog box where you can select a destination project for the collection. Click a project, then finally the <span class="btn btn-sm btn-primary">Move</span> button.
-!{{ site.baseurl }}/images/workbench-move-selected.png!
+!{display: block;margin-left: 25px;margin-right: auto;}{{ site.baseurl }}/images/workbench-move-selected.png!
Click on the *<i class="fa fa-fw fa-archive"></i> Show* button next to the collection's listing on a project page to go to the Workbench page for your collection. On this page, you can see the collection's contents, download individual files, and set sharing options.
notextile. </div>
+
+h2(#upload-using-workbench). Upload using Workbench
+
+To upload using Workbench, visit the Workbench *Dashboard*. Click on *Projects*<span class="caret"></span> dropdown menu in the top navigation menu and select your *Home* project or any other project of your choosing. You will see the *Data collections* tab for this project, which lists the collections in this project.
+
+To upload files into a new collection, click on *Add data*<span class="caret"></span> dropdown menu and select *Upload files from my computer*.
+
+!{display: block;margin-left: 25px;margin-right: auto;border:1px solid lightgray;}{{ site.baseurl }}/images/upload-using-workbench.png!
+
+<br/>This will create a new empty collection in your chosen project and will take you to the *Upload* tab for that collection.
+
+!{display: block;margin-left: 25px;margin-right: auto;border:1px solid lightgray;}{{ site.baseurl }}/images/upload-tab-in-new-collection.png!
+
+Click on the *Browse...* button and select the files you would like to upload. Selected files will be added to a list of files to be uploaded. After you are done selecting files to upload, click on the *<i class="fa fa-fw fa-play"></i> Start* button to start upload. This will start uploading files to Arvados and Workbench will show you the progress bar. When upload is completed, you will see an indication to that effect.
+
+!{display: block;margin-left: 25px;margin-right: auto;border:1px solid lightgray;}{{ site.baseurl }}/images/files-uploaded.png!
+
+*Note:* If you leave the collection page during the upload, the upload process will be aborted and you will need to upload the files again.
+
+*Note:* You can also use the Upload tab to add files to an existing collection.
my $cleanpid = fork();
if ($cleanpid == 0)
{
+ # Find FUSE mounts that look like Keep mounts (the mount path has the
+ # word "keep") and unmount them. Then clean up work directories.
+ # TODO: When #5036 is done and widely deployed, we can get rid of the
+ # regular expression and just unmount everything with type fuse.keep.
srun (["srun", "--nodelist=$nodelist", "-D", $ENV{'TMPDIR'}],
- ['bash', '-c', 'if mount | grep -q $JOB_WORK/; then for i in $JOB_WORK/*keep $CRUNCH_TMP/task/*.keep; do /bin/fusermount -z -u $i; done; fi; sleep 1; rm -rf $JOB_WORK $CRUNCH_INSTALL $CRUNCH_TMP/task $CRUNCH_TMP/src*']);
+ ['bash', '-ec', 'mount -t fuse,fuse.keep | awk \'($3 ~ /\ykeep\y/){print $3}\' | xargs -r -n 1 fusermount -u -z; sleep 1; rm -rf $JOB_WORK $CRUNCH_INSTALL $CRUNCH_TMP/task $CRUNCH_TMP/src*']);
exit (1);
}
while (1)
if ((!$venv_built) and (-d $python_src) and can_run("virtualenv")) {
shell_or_die("virtualenv", "--quiet", "--system-site-packages",
"--python=python2.7", $venv_dir);
- shell_or_die("$venv_dir/bin/pip", "--quiet", "install", $python_src);
+ shell_or_die("$venv_dir/bin/pip", "--quiet", "install", "-I", $python_src);
$venv_built = 1;
$Log->("Built Python SDK virtualenv");
}
- my $pkgs;
+ my $pip_bin = "pip";
if ($venv_built) {
$Log->("Running in Python SDK virtualenv");
- $pkgs = `(\Q$venv_dir/bin/pip\E freeze 2>/dev/null | grep arvados) || dpkg-query --show '*arvados*'`;
+ $pip_bin = "$venv_dir/bin/pip";
my $orig_argv = join(" ", map { quotemeta($_); } @ARGV);
@ARGV = ("/bin/sh", "-ec",
". \Q$venv_dir/bin/activate\E; exec $orig_argv");
} elsif (-d $python_src) {
- $Log->("Warning: virtualenv not found inside Docker container default " +
+ $Log->("Warning: virtualenv not found inside Docker container default " .
"\$PATH. Can't install Python SDK.");
- } else {
- $pkgs = `(pip freeze 2>/dev/null | grep arvados) || dpkg-query --show '*arvados*'`;
}
+ my $pkgs = `(\Q$pip_bin\E freeze 2>/dev/null | grep arvados) || dpkg-query --show '*arvados*'`;
if ($pkgs) {
$Log->("Using Arvados SDK:");
foreach my $line (split /\n/, $pkgs) {
# errors.py - Arvados-specific exceptions.
import json
+import requests
+
from apiclient import errors as apiclient_errors
+from collections import OrderedDict
class ApiError(apiclient_errors.HttpError):
def _get_reason(self):
return super(ApiError, self)._get_reason()
+class KeepRequestError(Exception):
+ """Base class for errors accessing Keep services."""
+ def __init__(self, message='', service_errors=()):
+ """KeepRequestError(message='', service_errors=())
+
+ Arguments:
+ * message: A human-readable message describing what Keep operation
+ failed.
+ * service_errors: An iterable that yields 2-tuples of Keep
+ service URLs to the error encountered when talking to
+ it--either an exception, or an HTTP response object. These
+ will be packed into an OrderedDict, available through the
+ service_errors() method.
+ """
+ self._service_errors = OrderedDict(service_errors)
+ if self._service_errors:
+ exc_reports = [self._format_error(*err_pair)
+ for err_pair in self._service_errors.iteritems()]
+ base_msg = "{}: {}".format(message, "; ".join(exc_reports))
+ else:
+ base_msg = message
+ super(KeepRequestError, self).__init__(base_msg)
+ self.message = message
+
+ def _format_error(self, service_root, error):
+ if isinstance(error, requests.Response):
+ err_fmt = "{} responded with {e.status_code} {e.reason}"
+ else:
+ err_fmt = "{} raised {e.__class__.__name__} ({e})"
+ return err_fmt.format(service_root, e=error)
+
+ def service_errors(self):
+ """service_errors() -> OrderedDict
+
+ The keys of the dictionary are Keep service URLs.
+ The corresponding value is the exception raised when sending the
+ request to it."""
+ return self._service_errors
+
+
class ArgumentError(Exception):
pass
class SyntaxError(Exception):
pass
class CommandFailedError(Exception):
pass
-class KeepReadError(Exception):
+class KeepReadError(KeepRequestError):
pass
-class KeepWriteError(Exception):
+class KeepWriteError(KeepRequestError):
pass
class NotFoundError(KeepReadError):
pass
self.stop.wait(self.poll_time)
def run_forever(self):
- self.stop.wait()
+ # Have to poll here, otherwise KeyboardInterrupt will never get processed.
+ while not self.stop.is_set():
+ self.stop.wait(1)
def close(self):
self.stop.set()
if loop.success():
return blob
- # No servers fulfilled the request. Count how many responded
- # "not found;" if the ratio is high enough (currently 75%), report
- # Not Found; otherwise a generic error.
+ try:
+ all_roots = local_roots + hint_roots
+ except NameError:
+ # We never successfully fetched local_roots.
+ all_roots = hint_roots
# Q: Including 403 is necessary for the Keep tests to continue
# passing, but maybe they should expect KeepReadError instead?
- not_founds = sum(1 for ks in roots_map.values()
- if ks.last_status() in set([403, 404, 410]))
- if roots_map and ((float(not_founds) / len(roots_map)) >= .75):
- raise arvados.errors.NotFoundError(loc_s)
+ not_founds = sum(1 for key in all_roots
+ if roots_map[key].last_status() in {403, 404, 410})
+ service_errors = ((key, roots_map[key].last_result)
+ for key in all_roots)
+ if not roots_map:
+ raise arvados.errors.KeepReadError(
+ "failed to read {}: no Keep services available ({})".format(
+ loc_s, loop.last_result()))
+ elif not_founds == len(all_roots):
+ raise arvados.errors.NotFoundError(
+ "{} not found".format(loc_s), service_errors)
else:
- raise arvados.errors.KeepReadError(loc_s)
+ raise arvados.errors.KeepReadError(
+ "failed to read {}".format(loc_s), service_errors)
@retry.retry_method
def put(self, data, copies=2, num_retries=None):
if loop.success():
return thread_limiter.response()
- raise arvados.errors.KeepWriteError(
- "Write fail for %s: wanted %d but wrote %d" %
- (data_hash, copies, thread_limiter.done()))
+ if not roots_map:
+ raise arvados.errors.KeepWriteError(
+ "failed to write {}: no Keep services available ({})".format(
+ data_hash, loop.last_result()))
+ else:
+ service_errors = ((key, roots_map[key].last_result)
+ for key in local_roots
+ if not roots_map[key].success_flag)
+ raise arvados.errors.KeepWriteError(
+ "failed to write {} (wanted {} copies but wrote {})".format(
+ data_hash, copies, thread_limiter.done()), service_errors)
# Local storage methods need no-op num_retries arguments to keep
# integration tests happy. With better isolation they could
--- /dev/null
+#!/usr/bin/env python
+
+import traceback
+import unittest
+
+import arvados.errors as arv_error
+import arvados_testutil as tutil
+
+class KeepRequestErrorTestCase(unittest.TestCase):
+ SERVICE_ERRORS = [
+ ('http://keep1.zzzzz.example.org/', IOError("test IOError")),
+ ('http://keep3.zzzzz.example.org/', MemoryError("test MemoryError")),
+ ('http://keep5.zzzzz.example.org/', tutil.fake_requests_response(
+ 500, "test 500")),
+ ('http://keep7.zzzzz.example.org/', IOError("second test IOError")),
+ ]
+
+ def check_get_message(self, *args):
+ test_exc = arv_error.KeepRequestError("test message", *args)
+ self.assertEqual("test message", test_exc.message)
+
+ def test_get_message_with_service_errors(self):
+ self.check_get_message(self.SERVICE_ERRORS[:])
+
+ def test_get_message_without_service_errors(self):
+ self.check_get_message()
+
+ def check_get_service_errors(self, *args):
+ expected = dict(args[0]) if args else {}
+ test_exc = arv_error.KeepRequestError("test service exceptions", *args)
+ self.assertEqual(expected, test_exc.service_errors())
+
+ def test_get_service_errors(self):
+ self.check_get_service_errors(self.SERVICE_ERRORS[:])
+
+ def test_get_service_errors_none(self):
+ self.check_get_service_errors({})
+
+ def test_empty_exception(self):
+ test_exc = arv_error.KeepRequestError()
+ self.assertFalse(test_exc.message)
+ self.assertEqual({}, test_exc.service_errors())
+
+ def traceback_str(self, exc):
+ return traceback.format_exception_only(type(exc), exc)[-1]
+
+ def test_traceback_str_without_service_errors(self):
+ message = "test plain traceback string"
+ test_exc = arv_error.KeepRequestError(message)
+ exc_report = self.traceback_str(test_exc)
+ self.assertTrue(exc_report.startswith("KeepRequestError: "))
+ self.assertIn(message, exc_report)
+
+ def test_traceback_str_with_service_errors(self):
+ message = "test traceback shows Keep services"
+ test_exc = arv_error.KeepRequestError(message, self.SERVICE_ERRORS[:])
+ exc_report = self.traceback_str(test_exc)
+ self.assertTrue(exc_report.startswith("KeepRequestError: "))
+ for expect_substr in [message, "raised IOError", "raised MemoryError",
+ "test MemoryError", "second test IOError",
+ "responded with 500 Internal Server Error"]:
+ self.assertIn(expect_substr, exc_report)
+ # Assert the report maintains order of listed services.
+ last_index = -1
+ for service_key, _ in self.SERVICE_ERRORS:
+ service_index = exc_report.find(service_key)
+ self.assertGreater(service_index, last_index)
+ last_index = service_index
min_penalty,
max_penalty))
+ def check_64_zeros_error_order(self, verb, exc_class):
+ data = '0' * 64
+ if verb == 'get':
+ data = hashlib.md5(data).hexdigest() + '+1234'
+ api_client = self.mock_n_keep_disks(16)
+ keep_client = arvados.KeepClient(api_client=api_client)
+ with mock.patch('requests.' + verb,
+ side_effect=socket.timeout) as req_mock, \
+ self.assertRaises(exc_class) as err_check:
+ getattr(keep_client, verb)(data)
+ urls = [urlparse.urlparse(url)
+ for url in err_check.exception.service_errors()]
+ self.assertEqual([('keep0x' + c, 80) for c in '3eab2d5fc9681074'],
+ [(url.hostname, url.port) for url in urls])
+
+ def test_get_error_shows_probe_order(self):
+ self.check_64_zeros_error_order('get', arvados.errors.KeepReadError)
+
+ def test_put_error_shows_probe_order(self):
+ self.check_64_zeros_error_order('put', arvados.errors.KeepWriteError)
+
+ def check_no_services_error(self, verb, exc_class):
+ api_client = mock.MagicMock(name='api_client')
+ api_client.keep_services().accessible().execute.side_effect = (
+ arvados.errors.ApiError)
+ keep_client = arvados.KeepClient(api_client=api_client)
+ with self.assertRaises(exc_class) as err_check:
+ getattr(keep_client, verb)('d41d8cd98f00b204e9800998ecf8427e+0')
+ self.assertEqual(0, len(err_check.exception.service_errors()))
+
+ def test_get_error_with_no_services(self):
+ self.check_no_services_error('get', arvados.errors.KeepReadError)
+
+ def test_put_error_with_no_services(self):
+ self.check_no_services_error('put', arvados.errors.KeepWriteError)
+
+ def check_errors_from_last_retry(self, verb, exc_class):
+ api_client = self.mock_n_keep_disks(2)
+ keep_client = arvados.KeepClient(api_client=api_client)
+ req_mock = getattr(tutil, 'mock_{}_responses'.format(verb))(
+ "retry error reporting test", 500, 500, 403, 403)
+ with req_mock, tutil.skip_sleep, \
+ self.assertRaises(exc_class) as err_check:
+ getattr(keep_client, verb)('d41d8cd98f00b204e9800998ecf8427e+0',
+ num_retries=3)
+ self.assertEqual([403, 403], [
+ getattr(error, 'status_code', None)
+ for error in err_check.exception.service_errors().itervalues()])
+
+ def test_get_error_reflects_last_retry(self):
+ self.check_errors_from_last_retry('get', arvados.errors.KeepReadError)
+
+ def test_put_error_reflects_last_retry(self):
+ self.check_errors_from_last_retry('put', arvados.errors.KeepWriteError)
+
+ def test_put_error_does_not_include_successful_puts(self):
+ data = 'partial failure test'
+ data_loc = '{}+{}'.format(hashlib.md5(data).hexdigest(), len(data))
+ api_client = self.mock_n_keep_disks(3)
+ keep_client = arvados.KeepClient(api_client=api_client)
+ with tutil.mock_put_responses(data_loc, 200, 500, 500) as req_mock, \
+ self.assertRaises(arvados.errors.KeepWriteError) as exc_check:
+ keep_client.put(data)
+ self.assertEqual(2, len(exc_check.exception.service_errors()))
+
class KeepClientRetryTestMixin(object):
# Testing with a local Keep store won't exercise the retry behavior.
# sign-timestamp ::= <8 lowercase hex digits>
attr_reader :hash, :hints, :size
+ LOCATOR_REGEXP = /^([[:xdigit:]]{32})(\+([[:digit:]]+))?(\+([[:upper:]][[:alnum:]+@_-]*))?$/
+
def initialize(hasharg, sizearg, hintarg)
@hash = hasharg
@size = sizearg
@hints = hintarg
end
+ def self.valid? tok
+ !!(LOCATOR_REGEXP.match tok)
+ end
+
# Locator.parse returns a Locator object parsed from the string tok.
# Returns nil if tok could not be parsed as a valid locator.
def self.parse(tok)
raise ArgumentError.new "locator is nil or empty"
end
- m = /^([[:xdigit:]]{32})(\+([[:digit:]]+))?(\+([[:upper:]][[:alnum:]+@_-]*))?$/.match(tok.strip)
+ m = LOCATOR_REGEXP.match(tok.strip)
unless m
raise ArgumentError.new "not a valid locator #{tok}"
end
def each_line
return to_enum(__method__) unless block_given?
@text.each_line do |line|
- tokens = line.split
- next if tokens.empty?
- stream_name = unescape(tokens.shift)
- blocks = []
- while loc = Locator.parse(tokens.first)
- blocks << loc
- tokens.shift
+ stream_name = nil
+ block_tokens = []
+ file_tokens = []
+ line.scan /\S+/ do |token|
+ if stream_name.nil?
+ stream_name = unescape token
+ elsif file_tokens.empty? and Locator.valid? token
+ block_tokens << token
+ else
+ file_tokens << unescape(token)
+ end
end
- yield [stream_name, blocks, tokens.map { |s| unescape(s) }]
+ # Ignore blank lines
+ next if stream_name.nil?
+ yield [stream_name, block_tokens, file_tokens]
end
end
end
end
- def each_file_spec(speclist)
- return to_enum(__method__, speclist) unless block_given?
- speclist.each do |filespec|
- start_pos, filesize, filename = filespec.split(':', 3)
- yield [start_pos.to_i, filesize.to_i, filename]
+ def split_file_token token
+ start_pos, filesize, filename = token.split(':', 3)
+ [start_pos.to_i, filesize.to_i, filename]
+ end
+
+ def each_file_spec
+ return to_enum(__method__) unless block_given?
+ @text.each_line do |line|
+ stream_name = nil
+ in_file_tokens = false
+ line.scan /\S+/ do |token|
+ if stream_name.nil?
+ stream_name = unescape token
+ elsif in_file_tokens or not Locator.valid? token
+ in_file_tokens = true
+ yield [stream_name] + split_file_token(token)
+ end
+ end
end
+ true
end
def files
if @files.nil?
file_sizes = Hash.new(0)
- each_line do |streamname, blocklist, filelist|
- each_file_spec(filelist) do |_, filesize, filename|
- file_sizes[[streamname, filename]] += filesize
- end
+ each_file_spec do |streamname, _, filesize, filename|
+ file_sizes[[streamname, filename]] += filesize
end
@files = file_sizes.each_pair.map do |(streamname, filename), size|
[streamname, filename, size]
return files.size
end
seen_files = {}
- each_line do |streamname, blocklist, filelist|
- each_file_spec(filelist) do |_, _, filename|
- seen_files[[streamname, filename]] = true
- return stop_after if (seen_files.size >= stop_after)
- end
+ each_file_spec do |streamname, _, _, filename|
+ seen_files[[streamname, filename]] = true
+ return stop_after if (seen_files.size >= stop_after)
end
seen_files.size
end
if want_file.nil?
want_stream, want_file = File.split(want_stream)
end
- each_line do |stream_name, _, filelist|
- if (stream_name == want_stream) and
- each_file_spec(filelist).any? { |_, _, name| name == want_file }
+ each_file_spec do |streamname, _, _, name|
+ if streamname == want_stream and name == want_file
return true
end
end
group :test, :development do
gem 'factory_girl_rails'
gem 'database_cleaner'
+ gem 'ruby-prof'
# Note: "require: false" here tells bunder not to automatically
# 'require' the packages during application startup. Installation is
# still mandatory.
gem 'themes_for_rails'
gem 'arvados', '>= 0.1.20140919104705'
-gem 'arvados-cli', '>= 0.1.20141202211726'
+gem 'arvados-cli', '>= 0.1.20150121183928'
# pg_power lets us use partial indexes in schema.rb in Rails 3
gem 'pg_power'
google-api-client (~> 0.6.3, >= 0.6.3)
json (~> 1.7, >= 1.7.7)
jwt (>= 0.1.5, < 1.0.0)
- arvados-cli (0.1.20141209151444)
+ arvados-cli (0.1.20150121183928)
activesupport (~> 3.2, >= 3.2.13)
andand (~> 1.3, >= 1.3.3)
arvados (~> 0.1, >= 0.1.0)
rdoc (3.12.2)
json (~> 1.4)
ref (1.0.5)
+ ruby-prof (0.15.2)
rvm-capistrano (1.5.1)
capistrano (~> 2.15.4)
sass (3.3.4)
acts_as_api
andand
arvados (>= 0.1.20140919104705)
- arvados-cli (>= 0.1.20141202211726)
+ arvados-cli (>= 0.1.20150121183928)
coffee-rails (~> 3.2.0)
database_cleaner
factory_girl_rails
pg_power
puma
rails (~> 3.2.0)
+ ruby-prof
rvm-capistrano
sass-rails (>= 3.2.0)
simplecov (~> 0.7.1)
end
def show
- render json: @object.as_api_response(nil, select: @select)
+ send_json @object.as_api_response(nil, select: @select)
end
def create
err[:error_token] = [Time.now.utc.to_i, "%08x" % rand(16 ** 8)].join("+")
status = err.delete(:status) || 422
logger.error "Error #{err[:error_token]}: #{status}"
- render json: err, status: status
+ send_json err, status: status
+ end
+
+ def send_json response, opts={}
+ # The obvious render(json: ...) forces a slow JSON encoder. See
+ # #3021 and commit logs. Might be fixed in Rails 4.1.
+ render({
+ text: Oj.dump(response, mode: :compat).html_safe,
+ content_type: 'application/json'
+ }.merge opts)
end
def find_objects_for_index
except(:limit).except(:offset).
count(:id, distinct: true)
end
- render json: @object_list
+ send_json @object_list
end
def remote_ip
if @object.is_a? Collection
super
else
- render json: @object
+ send_json @object
end
end
visited = {}
search_edges(visited, @object[:portable_data_hash], :search_up)
search_edges(visited, @object[:uuid], :search_up)
- render json: visited
+ send_json visited
end
def used_by
visited = {}
search_edges(visited, @object[:uuid], :search_down)
search_edges(visited, @object[:portable_data_hash], :search_down)
- render json: visited
+ send_json visited
end
protected
:items_available => @items_available,
:items => @objects.as_api_response(nil)
}
- render json: @object_list
+ send_json @object_list
end
protected
# Render the :superuser view (i.e., include the ping_secret) even
# if !current_user.is_admin. This is safe because @object.ping's
# success implies the ping_secret was already known by the client.
- render json: @object.as_api_response(:superuser)
+ send_json @object.as_api_response(:superuser)
end
end
end
@object.ping(ping_data)
if @object.info['ping_secret'] == params[:ping_secret]
- render json: @object.as_api_response(:superuser)
+ send_json @object.as_api_response(:superuser)
else
raise "Invalid ping_secret after ping"
end
end
end
end
- render json: {
- kind: 'arvados#RepositoryPermissionSnapshot',
- repositories: @repo_info.values,
- user_keys: @user_aks
- }
+ send_json(kind: 'arvados#RepositoryPermissionSnapshot',
+ repositories: @repo_info.values,
+ user_keys: @user_aks)
end
end
end
discovery
end
- render json: discovery
+ send_json discovery
end
end
UserNotifier.account_is_setup(@object).deliver
end
- render json: { kind: "arvados#HashList", items: @response.as_api_response(nil) }
+ send_json kind: "arvados#HashList", items: @response.as_api_response(nil)
end
# delete user agreements, vm, repository, login links; set state to inactive
end
end
end
- render json: { kind: "arvados#HashList", items: @response }
+ send_json kind: "arvados#HashList", items: @response
end
end
end
# Done.
- render json: {success: true}
+ send_json success: true
end
end
def self.uuid_prefixes
unless @@prefixes_hash
@@prefixes_hash = {}
+ Rails.application.eager_load!
ActiveRecord::Base.descendants.reject(&:abstract_class?).each do |k|
if k.respond_to?(:uuid_prefix)
@@prefixes_hash[k.uuid_prefix] = k
end
resource_class = nil
- Rails.application.eager_load!
uuid.match HasUuid::UUID_REGEX do |re|
return uuid_prefixes[re[1]] if uuid_prefixes[re[1]]
end
end
def log_start_state
- @old_etag = etag
- @old_attributes = logged_attributes
+ @old_attributes = Marshal.load(Marshal.dump(attributes))
+ @old_logged_attributes = Marshal.load(Marshal.dump(logged_attributes))
end
def log_change(event_type)
def log_update
log_change('update') do |log|
- log.fill_properties('old', @old_etag, @old_attributes)
+ log.fill_properties('old', etag(@old_attributes), @old_logged_attributes)
log.update_to self
end
end
def log_destroy
log_change('destroy') do |log|
- log.fill_properties('old', @old_etag, @old_attributes)
+ log.fill_properties('old', etag(@old_attributes), @old_logged_attributes)
log.update_to nil
end
end
before_validation :strip_manifest_text
before_validation :set_portable_data_hash
validate :ensure_hash_matches_manifest_text
+ before_save :set_file_names
# Query only undeleted collections by default.
default_scope where("expires_at IS NULL or expires_at > CURRENT_TIMESTAMP")
end
end
+ def set_file_names
+ if self.manifest_text_changed?
+ self.file_names = manifest_files
+ end
+ true
+ end
+
+ def manifest_files
+ names = ''
+ if self.manifest_text
+ self.manifest_text.scan(/ \d+:\d+:(\S+)/) do |name|
+ names << name.first.gsub('\040',' ') + "\n"
+ break if names.length > 2**12
+ end
+ end
+
+ if self.manifest_text and names.length < 2**12
+ self.manifest_text.scan(/^\.\/(\S+)/m) do |stream_name|
+ names << stream_name.first.gsub('\040',' ') + "\n"
+ break if names.length > 2**12
+ end
+ end
+
+ names[0,2**12]
+ end
+
def check_encoding
if manifest_text.encoding.name == 'UTF-8' and manifest_text.valid_encoding?
true
find_all_for_docker_image(search_term, search_tag, readers).first
end
+ def self.searchable_columns operator
+ super - ["manifest_text"]
+ end
+
protected
def portable_manifest_text
portable_manifest = self[:manifest_text].dup
class OwnerUuidIndex < ActiveRecord::Migration
def tables_with_owner_uuid
- ActiveRecord::Base.connection.tables.select do |table|
- columns = ActiveRecord::Base.connection.columns(table)
- columns.collect(&:name).include? 'owner_uuid'
- end
+ %w{api_clients authorized_keys collections groups humans
+ job_tasks jobs keep_disks keep_services links logs
+ nodes pipeline_instances pipeline_templates repositories
+ specimens traits users virtual_machines}
end
def up
def down
tables_with_owner_uuid.each do |table|
- remove_index table.to_sym, :owner_uuid
+ indexes = ActiveRecord::Base.connection.indexes(table)
+ owner_uuid_index = indexes.select do |index|
+ index.columns == ['owner_uuid']
+ end
+ if !owner_uuid_index.empty?
+ remove_index table.to_sym, :owner_uuid
+ end
end
end
end
--- /dev/null
+class DescriptionsAreStrings < ActiveRecord::Migration
+ def tables_with_description_column
+ %w{collections groups jobs pipeline_instances pipeline_templates}
+ end
+
+ def up
+ tables_with_description_column.each do |table|
+ change_column table.to_sym, :description, :string, :limit => 2**19
+ end
+ end
+
+ def down
+ tables_with_description_column.each do |table|
+ if table == 'collections'
+ change_column table.to_sym, :description, :string # implicit limit 255
+ else
+ change_column table.to_sym, :description, :text
+ end
+ end
+ end
+end
--- /dev/null
+class CollectionFileNames < ActiveRecord::Migration
+ include CurrentApiClient
+
+ def up
+ add_column :collections, :file_names, :string, :limit => 2**13
+
+ act_as_system_user do
+ Collection.find_each(batch_size: 20) do |c|
+ file_names = c.manifest_files
+ ActiveRecord::Base.connection.execute "UPDATE collections
+ SET file_names = #{ActiveRecord::Base.connection.quote(file_names)}
+ WHERE uuid = '#{c.uuid}'"
+ end
+ end
+ end
+
+ def down
+ remove_column :collections, :file_names
+ end
+end
--- /dev/null
+class SearchIndex < ActiveRecord::Migration
+ def tables_with_searchable_columns
+ {
+ "api_client_authorizations" => ["api_token", "created_by_ip_address", "last_used_by_ip_address", "default_owner_uuid"],
+ "api_clients" => ["uuid", "owner_uuid", "modified_by_client_uuid", "modified_by_user_uuid", "name", "url_prefix"],
+ "authorized_keys" => ["uuid", "owner_uuid", "modified_by_client_uuid", "modified_by_user_uuid", "name", "key_type", "authorized_user_uuid"],
+ "collections" => ["owner_uuid", "modified_by_client_uuid", "modified_by_user_uuid", "portable_data_hash", "redundancy_confirmed_by_client_uuid", "uuid", "name", "file_names"],
+ "groups" => ["uuid", "owner_uuid", "modified_by_client_uuid", "modified_by_user_uuid", "name", "group_class"],
+ "humans" => ["uuid", "owner_uuid", "modified_by_client_uuid", "modified_by_user_uuid"],
+ "job_tasks" => ["uuid", "owner_uuid", "modified_by_client_uuid", "modified_by_user_uuid", "job_uuid", "created_by_job_task_uuid"],
+ "jobs" => ["uuid", "owner_uuid", "modified_by_client_uuid", "modified_by_user_uuid", "submit_id", "script", "script_version", "cancelled_by_client_uuid", "cancelled_by_user_uuid", "output", "is_locked_by_uuid", "log", "repository", "supplied_script_version", "docker_image_locator", "state", "arvados_sdk_version"],
+ "keep_disks" => ["uuid", "owner_uuid", "modified_by_client_uuid", "modified_by_user_uuid", "ping_secret", "node_uuid", "filesystem_uuid", "keep_service_uuid"],
+ "keep_services" => ["uuid", "owner_uuid", "modified_by_client_uuid", "modified_by_user_uuid", "service_host", "service_type"],
+ "links" => ["uuid", "owner_uuid", "modified_by_client_uuid", "modified_by_user_uuid", "tail_uuid", "link_class", "name", "head_uuid"],
+ "logs" => ["uuid", "owner_uuid", "modified_by_client_uuid", "modified_by_user_uuid", "object_uuid", "event_type", "object_owner_uuid"],
+ "nodes" => ["uuid", "owner_uuid", "modified_by_client_uuid", "modified_by_user_uuid", "hostname", "domain", "ip_address", "job_uuid"],
+ "pipeline_instances" => ["uuid", "owner_uuid", "modified_by_client_uuid", "modified_by_user_uuid", "pipeline_template_uuid", "name", "state"],
+ "pipeline_templates" => ["uuid", "owner_uuid", "modified_by_client_uuid", "modified_by_user_uuid", "name"],
+ "repositories" => ["uuid", "owner_uuid", "modified_by_client_uuid", "modified_by_user_uuid", "name", "fetch_url", "push_url"],
+ "specimens" => ["uuid", "owner_uuid", "modified_by_client_uuid", "modified_by_user_uuid", "material"],
+ "traits" => ["uuid", "owner_uuid", "modified_by_client_uuid", "modified_by_user_uuid", "name"],
+ "users" => ["uuid", "owner_uuid", "modified_by_client_uuid", "modified_by_user_uuid", "email", "first_name", "last_name", "identity_url", "default_owner_uuid"],
+ "virtual_machines" => ["uuid", "owner_uuid", "modified_by_client_uuid", "modified_by_user_uuid", "hostname"],
+ }
+ end
+
+ def change
+ tables_with_searchable_columns.each do |table, columns|
+ add_index table.to_sym, columns, name: "#{table}_search_index"
+ end
+ end
+end
uuid character varying(255),
manifest_text text,
name character varying(255),
- description character varying(255),
+ description character varying(524288),
properties text,
- expires_at date
+ expires_at date,
+ file_names character varying(8192)
);
modified_by_user_uuid character varying(255),
modified_at timestamp without time zone,
name character varying(255) NOT NULL,
- description text,
+ description character varying(524288),
updated_at timestamp without time zone NOT NULL,
group_class character varying(255)
);
supplied_script_version character varying(255),
docker_image_locator character varying(255),
priority integer DEFAULT 0 NOT NULL,
- description text,
+ description character varying(524288),
state character varying(255),
arvados_sdk_version character varying(255)
);
components_summary text,
started_at timestamp without time zone,
finished_at timestamp without time zone,
- description text
+ description character varying(524288)
);
name character varying(255),
components text,
updated_at timestamp without time zone NOT NULL,
- description text
+ description character varying(524288)
);
ADD CONSTRAINT virtual_machines_pkey PRIMARY KEY (id);
+--
+-- Name: api_client_authorizations_search_index; Type: INDEX; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE INDEX api_client_authorizations_search_index ON api_client_authorizations USING btree (api_token, created_by_ip_address, last_used_by_ip_address, default_owner_uuid);
+
+
+--
+-- Name: api_clients_search_index; Type: INDEX; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE INDEX api_clients_search_index ON api_clients USING btree (uuid, owner_uuid, modified_by_client_uuid, modified_by_user_uuid, name, url_prefix);
+
+
+--
+-- Name: authorized_keys_search_index; Type: INDEX; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE INDEX authorized_keys_search_index ON authorized_keys USING btree (uuid, owner_uuid, modified_by_client_uuid, modified_by_user_uuid, name, key_type, authorized_user_uuid);
+
+
--
-- Name: collection_owner_uuid_name_unique; Type: INDEX; Schema: public; Owner: -; Tablespace:
--
CREATE UNIQUE INDEX collection_owner_uuid_name_unique ON collections USING btree (owner_uuid, name);
+--
+-- Name: collections_search_index; Type: INDEX; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE INDEX collections_search_index ON collections USING btree (owner_uuid, modified_by_client_uuid, modified_by_user_uuid, portable_data_hash, redundancy_confirmed_by_client_uuid, uuid, name, file_names);
+
+
--
-- Name: groups_owner_uuid_name_unique; Type: INDEX; Schema: public; Owner: -; Tablespace:
--
CREATE UNIQUE INDEX groups_owner_uuid_name_unique ON groups USING btree (owner_uuid, name);
+--
+-- Name: groups_search_index; Type: INDEX; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE INDEX groups_search_index ON groups USING btree (uuid, owner_uuid, modified_by_client_uuid, modified_by_user_uuid, name, group_class);
+
+
+--
+-- Name: humans_search_index; Type: INDEX; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE INDEX humans_search_index ON humans USING btree (uuid, owner_uuid, modified_by_client_uuid, modified_by_user_uuid);
+
+
--
-- Name: index_api_client_authorizations_on_api_client_id; Type: INDEX; Schema: public; Owner: -; Tablespace:
--
CREATE UNIQUE INDEX index_virtual_machines_on_uuid ON virtual_machines USING btree (uuid);
+--
+-- Name: job_tasks_search_index; Type: INDEX; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE INDEX job_tasks_search_index ON job_tasks USING btree (uuid, owner_uuid, modified_by_client_uuid, modified_by_user_uuid, job_uuid, created_by_job_task_uuid);
+
+
+--
+-- Name: jobs_search_index; Type: INDEX; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE INDEX jobs_search_index ON jobs USING btree (uuid, owner_uuid, modified_by_client_uuid, modified_by_user_uuid, submit_id, script, script_version, cancelled_by_client_uuid, cancelled_by_user_uuid, output, is_locked_by_uuid, log, repository, supplied_script_version, docker_image_locator, state, arvados_sdk_version);
+
+
+--
+-- Name: keep_disks_search_index; Type: INDEX; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE INDEX keep_disks_search_index ON keep_disks USING btree (uuid, owner_uuid, modified_by_client_uuid, modified_by_user_uuid, ping_secret, node_uuid, filesystem_uuid, keep_service_uuid);
+
+
+--
+-- Name: keep_services_search_index; Type: INDEX; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE INDEX keep_services_search_index ON keep_services USING btree (uuid, owner_uuid, modified_by_client_uuid, modified_by_user_uuid, service_host, service_type);
+
+
+--
+-- Name: links_search_index; Type: INDEX; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE INDEX links_search_index ON links USING btree (uuid, owner_uuid, modified_by_client_uuid, modified_by_user_uuid, tail_uuid, link_class, name, head_uuid);
+
+
--
-- Name: links_tail_name_unique_if_link_class_name; Type: INDEX; Schema: public; Owner: -; Tablespace:
--
CREATE UNIQUE INDEX links_tail_name_unique_if_link_class_name ON links USING btree (tail_uuid, name) WHERE ((link_class)::text = 'name'::text);
+--
+-- Name: logs_search_index; Type: INDEX; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE INDEX logs_search_index ON logs USING btree (uuid, owner_uuid, modified_by_client_uuid, modified_by_user_uuid, object_uuid, event_type, object_owner_uuid);
+
+
+--
+-- Name: nodes_search_index; Type: INDEX; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE INDEX nodes_search_index ON nodes USING btree (uuid, owner_uuid, modified_by_client_uuid, modified_by_user_uuid, hostname, domain, ip_address, job_uuid);
+
+
+--
+-- Name: pipeline_instances_search_index; Type: INDEX; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE INDEX pipeline_instances_search_index ON pipeline_instances USING btree (uuid, owner_uuid, modified_by_client_uuid, modified_by_user_uuid, pipeline_template_uuid, name, state);
+
+
--
-- Name: pipeline_template_owner_uuid_name_unique; Type: INDEX; Schema: public; Owner: -; Tablespace:
--
CREATE UNIQUE INDEX pipeline_template_owner_uuid_name_unique ON pipeline_templates USING btree (owner_uuid, name);
+--
+-- Name: pipeline_templates_search_index; Type: INDEX; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE INDEX pipeline_templates_search_index ON pipeline_templates USING btree (uuid, owner_uuid, modified_by_client_uuid, modified_by_user_uuid, name);
+
+
+--
+-- Name: repositories_search_index; Type: INDEX; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE INDEX repositories_search_index ON repositories USING btree (uuid, owner_uuid, modified_by_client_uuid, modified_by_user_uuid, name, fetch_url, push_url);
+
+
+--
+-- Name: specimens_search_index; Type: INDEX; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE INDEX specimens_search_index ON specimens USING btree (uuid, owner_uuid, modified_by_client_uuid, modified_by_user_uuid, material);
+
+
+--
+-- Name: traits_search_index; Type: INDEX; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE INDEX traits_search_index ON traits USING btree (uuid, owner_uuid, modified_by_client_uuid, modified_by_user_uuid, name);
+
+
--
-- Name: unique_schema_migrations; Type: INDEX; Schema: public; Owner: -; Tablespace:
--
CREATE UNIQUE INDEX unique_schema_migrations ON schema_migrations USING btree (version);
+--
+-- Name: users_search_index; Type: INDEX; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE INDEX users_search_index ON users USING btree (uuid, owner_uuid, modified_by_client_uuid, modified_by_user_uuid, email, first_name, last_name, identity_url, default_owner_uuid);
+
+
+--
+-- Name: virtual_machines_search_index; Type: INDEX; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE INDEX virtual_machines_search_index ON virtual_machines USING btree (uuid, owner_uuid, modified_by_client_uuid, modified_by_user_uuid, hostname);
+
+
--
-- PostgreSQL database dump complete
--
INSERT INTO schema_migrations (version) VALUES ('20141111133038');
-INSERT INTO schema_migrations (version) VALUES ('20141208164553');
\ No newline at end of file
+INSERT INTO schema_migrations (version) VALUES ('20141208164553');
+
+INSERT INTO schema_migrations (version) VALUES ('20141208174553');
+
+INSERT INTO schema_migrations (version) VALUES ('20141208174653');
+
+INSERT INTO schema_migrations (version) VALUES ('20141208185217');
\ No newline at end of file
end
def callback_url
- full_host + script_name + callback_path + "?return_to=" + CGI.escape(request.params['return_to'])
+ full_host + script_name + callback_path + "?return_to=" + CGI.escape(request.params['return_to'] || '')
end
def raw_info
self.class.kind
end
- def etag
- Digest::MD5.hexdigest(self.inspect).to_i(16).to_s(36)
+ def etag attrs=nil
+ Digest::MD5.hexdigest((attrs || self.attributes).inspect).to_i(16).to_s(36)
end
end
}
assert_response :success
end
+
+ test "get collection and verify that file_names is not included" do
+ authorize_with :active
+ get :show, {id: collections(:foo_file).uuid}
+ assert_response :success
+ assert_equal collections(:foo_file).uuid, json_response['uuid']
+ assert_nil json_response['file_names']
+ assert json_response['manifest_text']
+ end
+
+ [
+ [2**8, :success],
+ [2**18, 422],
+ ].each do |description_size, expected_response|
+ test "create collection with description size #{description_size}
+ and expect response #{expected_response}" do
+ skip "(Descriptions are not part of search indexes. Skip until full-text search
+ is implemented, at which point replace with a search in description.)"
+
+ authorize_with :active
+
+ description = 'here is a collection with a very large description'
+ while description.length < description_size
+ description = description + description
+ end
+
+ post :create, collection: {
+ manifest_text: ". d41d8cd98f00b204e9800998ecf8427e+0 0:0:foo.txt\n",
+ description: description,
+ }
+
+ assert_response expected_response
+ end
+ end
end
assert_equal 'a name', json_response['name']
end
+ test "update description for a collection, and search for that description" do
+ collection = collections(:multilevel_collection_1)
+ # update collection's description
+ put "/arvados/v1/collections/#{collection['uuid']}", {
+ format: :json,
+ collection: { description: "something specific" }
+ }, auth(:active)
+ assert_response :success
+ assert_equal 'something specific', json_response['description']
+
+ # get the collection and verify newly added description
+ get "/arvados/v1/collections/#{collection['uuid']}", {
+ format: :json,
+ }, auth(:active)
+ assert_response 200
+ assert_equal 'something specific', json_response['description']
+
+ # search
+ search_using_filter 'specific', 1
+ search_using_filter 'not specific enough', 0
+ end
+
+ test "create collection, update manifest, and search with filename" do
+ # create collection
+ signed_manifest = Collection.sign_manifest(". bad42fa702ae3ea7d888fef11b46f450+44 0:44:my_test_file.txt\n", api_token(:active))
+ post "/arvados/v1/collections", {
+ format: :json,
+ collection: {manifest_text: signed_manifest}.to_json,
+ }, auth(:active)
+ assert_response :success
+ assert_equal true, json_response['manifest_text'].include?('my_test_file.txt')
+
+ created = json_response
+
+ # search using the filename
+ search_using_filter 'my_test_file.txt', 1
+
+ # update the collection's manifest text
+ signed_manifest = Collection.sign_manifest(". bad42fa702ae3ea7d888fef11b46f450+44 0:44:my_updated_test_file.txt\n", api_token(:active))
+ put "/arvados/v1/collections/#{created['uuid']}", {
+ format: :json,
+ collection: {manifest_text: signed_manifest}.to_json,
+ }, auth(:active)
+ assert_response :success
+ assert_equal created['uuid'], json_response['uuid']
+ assert_equal true, json_response['manifest_text'].include?('my_updated_test_file.txt')
+ assert_equal false, json_response['manifest_text'].include?('my_test_file.txt')
+
+ # search using the new filename
+ search_using_filter 'my_updated_test_file.txt', 1
+ search_using_filter 'my_test_file.txt', 0
+ search_using_filter 'there_is_no_such_file.txt', 0
+ end
+
+ def search_using_filter search_filter, expected_items
+ get '/arvados/v1/collections', {
+ :filters => [['any', 'ilike', "%#{search_filter}%"]].to_json
+ }, auth(:active)
+ assert_response :success
+ response_items = json_response['items']
+ assert_not_nil response_items
+ if expected_items == 0
+ assert_equal 0, json_response['items_available']
+ assert_equal 0, response_items.size
+ else
+ assert_equal expected_items, response_items.size
+ first_item = response_items.first
+ assert_not_nil first_item
+ end
+ end
end
+++ /dev/null
-require 'test_helper'
-require 'rails/performance_test_help'
-
-class BrowsingTest < ActionDispatch::PerformanceTest
- # Refer to the documentation for all available options
- # self.profile_options = { :runs => 5, :metrics => [:wall_time, :memory]
- # :output => 'tmp/performance', :formats => [:flat] }
-
- def test_homepage
- get '/'
- end
-end
--- /dev/null
+require 'test_helper'
+require 'rails/performance_test_help'
+
+class IndexTest < ActionDispatch::PerformanceTest
+ def test_links_index
+ get '/arvados/v1/links', {format: :json}, auth(:admin)
+ end
+ def test_links_index_with_filters
+ get '/arvados/v1/links', {format: :json, filters: [%w[head_uuid is_a arvados#collection]].to_json}, auth(:admin)
+ end
+ def test_collections_index
+ get '/arvados/v1/collections', {format: :json}, auth(:admin)
+ end
+end
module ArvadosTestSupport
def json_response
- ActiveSupport::JSON.decode @response.body
+ Oj.load response.body
end
def api_token(api_client_auth_name)
end
end
+ test "store long string" do
+ set_user_from_auth :active
+ longstring = "a"
+ while longstring.length < 2**16
+ longstring = longstring + longstring
+ end
+ g = Group.create! name: 'Has a long description', description: longstring
+ g = Group.find_by_uuid g.uuid
+ assert_equal g.description, longstring
+ end
+
[['uuid', {unique: true}],
['owner_uuid', {}]].each do |the_column, requires|
test "unique index on all models with #{the_column}" do
"Only #{checked} tables have a #{the_column}?!")
end
end
+
+ test "search index exists on models that go into projects" do
+ all_tables = ActiveRecord::Base.connection.tables
+ all_tables.delete 'schema_migrations'
+
+ all_tables.each do |table|
+ table_class = table.classify.constantize
+ if table_class.respond_to?('searchable_columns')
+ search_index_columns = table_class.searchable_columns('ilike')
+ # Disappointing, but text columns aren't indexed yet.
+ search_index_columns -= table_class.columns.select { |c|
+ c.type == :text or c.name == 'description'
+ }.collect(&:name)
+
+ indexes = ActiveRecord::Base.connection.indexes(table)
+ search_index_by_columns = indexes.select do |index|
+ index.columns == search_index_columns
+ end
+ search_index_by_name = indexes.select do |index|
+ index.name == "#{table}_search_index"
+ end
+ assert !search_index_by_columns.empty?, "#{table} has no search index with columns #{search_index_columns}. Instead found search index with columns #{search_index_by_name.first.andand.columns}"
+ end
+ end
+ end
end
assert_match /UTF-8/, c.errors.messages[:manifest_text].first
end
end
+
+ test 'create and update collection and verify file_names' do
+ act_as_system_user do
+ c = create_collection 'foo', Encoding::US_ASCII
+ assert c.valid?
+ created_file_names = c.file_names
+ assert created_file_names
+ assert_match /foo.txt/, c.file_names
+
+ c.update_attribute 'manifest_text', ". d41d8cd98f00b204e9800998ecf8427e+0 0:0:foo2.txt\n"
+ assert_not_equal created_file_names, c.file_names
+ assert_match /foo2.txt/, c.file_names
+ end
+ end
+
+ [
+ [2**8, false],
+ [2**18, true],
+ ].each do |manifest_size, gets_truncated|
+ test "create collection with manifest size #{manifest_size} which gets truncated #{gets_truncated},
+ and not expect exceptions even on very large manifest texts" do
+ # file_names has a max size, hence there will be no errors even on large manifests
+ act_as_system_user do
+ manifest_text = './blurfl d41d8cd98f00b204e9800998ecf8427e+0'
+ index = 0
+ while manifest_text.length < manifest_size
+ manifest_text += ' ' + "0:0:veryverylongfilename000000000000#{index}.txt\n./subdir1"
+ index += 1
+ end
+ manifest_text += "\n"
+ c = Collection.create(manifest_text: manifest_text)
+
+ assert c.valid?
+ assert c.file_names
+ assert_match /veryverylongfilename0000000000001.txt/, c.file_names
+ assert_match /veryverylongfilename0000000000002.txt/, c.file_names
+ if !gets_truncated
+ assert_match /blurfl/, c.file_names
+ assert_match /subdir1/, c.file_names
+ end
+ end
+ end
+ end
end
end
end
+ test "old_attributes preserves values deep inside a hash" do
+ set_user_from_auth :active
+ it = specimens(:owned_by_active_user)
+ it.properties = {'foo' => {'bar' => ['baz', 'qux', {'quux' => 'bleat'}]}}
+ it.save!
+ @log_count += 1
+ it.properties['foo']['bar'][2]['quux'] = 'blert'
+ it.save!
+ assert_logged it, :update do |props|
+ assert_equal 'bleat', props['old_attributes']['properties']['foo']['bar'][2]['quux']
+ assert_equal 'blert', props['new_attributes']['properties']['foo']['bar'][2]['quux']
+ end
+ end
+
test "destroying an authorization makes a log" do
set_user_from_auth :admin_trustedclient
auth = api_client_authorizations(:spectator)
entry.st_mode = stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH
if isinstance(e, Directory):
entry.st_mode |= stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH | stat.S_IFDIR
+ elif isinstance(e, StreamReaderFile):
+ entry.st_mode |= stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH | stat.S_IFREG
else:
entry.st_mode |= stat.S_IFREG
install_requires=[
'arvados-python-client>=0.1.20141203150737.277b3c7',
'llfuse',
- 'python-daemon'
+ 'python-daemon<2',
],
test_suite='tests',
tests_require=['PyYAML'],
'apache-libcloud',
'arvados-python-client',
'pykka',
- 'python-daemon',
+ 'python-daemon<2',
],
scripts=['bin/arvados-node-manager'],
test_suite='tests',