gem 'rails', '~> 4.1.0'
gem 'minitest', '>= 5.0.0'
-gem 'arvados', '>= 0.1.20140917180103'
+gem 'arvados', '>= 0.1.20141114230720'
# Bundle edge Rails instead:
# gem 'rails', :git => 'git://github.com/rails/rails.git'
addressable (2.3.6)
andand (1.3.3)
arel (5.0.1.20140414130214)
- arvados (0.1.20140917180103)
+ arvados (0.1.20141114230720)
activesupport (>= 3.2.13)
- andand
- google-api-client (~> 0.6.3)
- json (>= 1.7.7)
+ andand (~> 1.3, >= 1.3.3)
+ google-api-client (~> 0.6.3, >= 0.6.3)
+ json (~> 1.7, >= 1.7.7)
jwt (>= 0.1.5, < 1.0.0)
autoparse (0.3.3)
addressable (>= 2.3.1)
json (1.8.1)
jwt (0.1.13)
multi_json (>= 1.5)
- launchy (2.4.2)
+ launchy (2.4.3)
addressable (~> 2.3)
less (2.4.0)
commonjs (~> 0.2.7)
DEPENDENCIES
RedCloth
andand
- arvados (>= 0.1.20140917180103)
+ arvados (>= 0.1.20141114230720)
bootstrap-sass (~> 3.1.0)
bootstrap-tab-history-rails
bootstrap-x-editable-rails
// Combining "select" filterable-controls with infinite-scroll is not
// yet supported.
+function updateFilterableQueryNow($target) {
+ var newquery = $target.data('filterable-query-new');
+ var params = $target.data('infinite-content-params-filterable') || {};
+ params.filters = [['any', 'ilike', '%' + newquery + '%']];
+ $target.data('infinite-content-params-filterable', params);
+ $target.data('filterable-query', newquery);
+}
+
$(document).
- on('paste keyup input', 'input[type=text].filterable-control', function() {
+ on('ready ajax:success', function() {
+ // Copy any initial input values into
+ // data-filterable-query[-new].
+ $('input[type=text].filterable-control').each(function() {
+ var $this = $(this);
+ var $target = $($this.attr('data-filterable-target'));
+ if ($target.data('filterable-query-new') === undefined) {
+ $target.data('filterable-query', $this.val());
+ $target.data('filterable-query-new', $this.val());
+ updateFilterableQueryNow($target);
+ }
+ });
+ $('[data-infinite-scroller]').on('refresh-content', '[data-filterable-query]', function(e) {
+ // If some other event causes a refresh-content event while there
+ // is a new query waiting to cooloff, we should use the new query
+ // right away -- otherwise we'd launch an extra ajax request that
+ // would have to be reloaded as soon as the cooloff period ends.
+ if (this != e.target)
+ return;
+ if ($(this).data('filterable-query') == $(this).data('filterable-query-new'))
+ return;
+ updateFilterableQueryNow($(this));
+ });
+ }).
+ on('paste keyup input', 'input[type=text].filterable-control', function(e) {
+ if (this != e.target) return;
var $target = $($(this).attr('data-filterable-target'));
var currentquery = $target.data('filterable-query');
if (currentquery === undefined) currentquery = '';
// in the next 1/4 second (like type or erase
// characters in the search box), hide the stale
// content and ask the server for new results.
- var newquery = $target.data('filterable-query-new');
- var params = $target.data('infinite-content-params-filterable') || {};
- params.filters = [['any', 'ilike', '%' + newquery + '%']];
- $target.data('infinite-content-params-filterable', params);
- $target.data('filterable-query', newquery);
+ updateFilterableQueryNow($target);
$target.trigger('refresh-content');
}, 250));
} else {
function maybe_load_more_content(event) {
- var scroller = this; // element with scroll bars
- var $container; // element that receives new content
- var src; // url for retrieving content
+ var scroller = this;
+ var $container = $(event.data.container);
+ var src; // url for retrieving content
var scrollHeight;
var spinner, colspan;
var serial = Date.now();
>
scrollHeight - 50)
{
- $container = $(event.data.container);
if (!$container.attr('data-infinite-content-href0')) {
// Remember the first page source url, so we can refresh
// from page 1 later.
function run_pipeline_button_state() {
- var a = $('a.editable.required.editable-empty,input.form-control.required[value=]');
+ var a = $('a.editable.required.editable-empty,input.form-control.required[value=""]');
if (a.length > 0) {
$(".run-pipeline-button").addClass("disabled");
}
});
}
};
-$('[data-object-uuid*=-d1hrv-] input[name="uuids[]"]').on('click', showhide_compare);
-showhide_compare();
+$(document).on('change', '[data-object-uuid*=-d1hrv-] input[name="uuids[]"]', function(e) {
+ if(e.target == this) {
+ showhide_compare();
+ }
+});
+$(document).on('ready ajax:success', showhide_compare);
def find_objects_for_index
@objects ||= model_class
@objects = @objects.filter(@filters).limit(@limit).offset(@offset)
+ @objects.fetch_multiple_pages(false)
end
def render_index
respond_to do |f|
- f.json { render json: @objects }
+ f.json {
+ if params[:partial]
+ @next_page_href = next_page_href(partial: params[:partial], filters: @filters.to_json)
+ render json: {
+ content: render_to_string(partial: "show_#{params[:partial]}",
+ formats: [:html]),
+ next_page_href: @next_page_href
+ }
+ else
+ render json: @objects
+ end
+ }
f.html {
if params[:tab_pane]
render_pane params[:tab_pane]
if params[:partial]
f.json {
find_objects_for_index if !@objects
- @objects.fetch_multiple_pages(false)
render json: {
content: render_to_string(partial: "choose_rows.html",
formats: [:html]),
%w(Compare Graph)
end
- def index
- @limit = 20
- super
- end
-
protected
def for_comparison v
if v.is_a? Hash or v.is_a? Array
end
end
+ def load_filters_and_paging_params
+ params[:limit] = 20
+ super
+ end
+
def find_objects_by_uuid
@objects = model_class.where(uuid: params[:uuids])
end
-
end
include ArvadosApiClientHelper
include Enumerable
+ attr_reader :resource_class
+
def initialize resource_class=nil
@resource_class = resource_class
@fetch_multiple_pages = true
end
def items_available
+ results
@items_available
end
def result_limit
+ results
@result_limit
end
def result_offset
+ results
@result_offset
end
reader_tokens: @reader_tokens)
items = arvados_api_client.unpack_api_response res
- break if items.nil? or not items.any?
-
@items_available = items.items_available if items.respond_to?(:items_available)
@result_limit = items.limit if (@fetch_multiple_pages == false) and items.respond_to?(:limit)
@result_offset = items.offset if (@fetch_multiple_pages == false) and items.respond_to?(:offset)
+ break if items.nil? or not items.any?
+
item_count += items.size
if items.respond_to?(:offset)
offset = items.offset + items.size
<div class="modal-content">
<div class="modal-header">
<button type="button" class="close" data-dismiss="modal" aria-hidden="true">×</button>
- <h4 class="modal-title"><%= params[:title] || "Choose #{@objects.first.andand.class_for_display}" %></h4>
+ <h4 class="modal-title"><%= params[:title] || "Choose #{@objects.resource_class.andand.class_for_display}" %></h4>
</div>
<div class="modal-body">
}
</style>
<link href="//netdna.bootstrapcdn.com/font-awesome/4.1.0/css/font-awesome.css" rel="stylesheet">
-<%= piwik_tracking_tag %>
+ <%= piwik_tracking_tag if (PiwikAnalytics.configuration.url != 'localhost' rescue false) %>
</head>
<body>
<%= render template: 'layouts/body' %>
-<%= render partial: "paging", locals: {results: @objects, object: @object} %>
-
<%= form_tag do |f| %>
-<table class="table table-condensed table-fixedlayout">
+<table class="table table-condensed table-fixedlayout arv-recent-pipeline-instances">
<colgroup>
<col width="5%" />
<col width="15%" />
</th>
</tr>
</thead>
- <tbody>
-
- <% @objects.sort_by { |ob| ob.created_at }.reverse.each do |ob| %>
-
- <tr data-object-uuid="<%= ob.uuid %>">
- <td>
- <%= check_box_tag 'uuids[]', ob.uuid, false, :class => 'persistent-selection' %>
- </td><td>
- <%= render partial: 'pipeline_status_label', locals: {:p => ob} %>
- </td><td colspan="1">
- <%= link_to_if_arvados_object ob, friendly_name: true %>
- </td><td>
- <%= link_to_if_arvados_object ob.pipeline_template_uuid, friendly_name: true %>
- </td><td>
- <%= link_to_if_arvados_object ob.owner_uuid, friendly_name: true %>
- </td><td>
- <%= ob.created_at.to_s %>
- </td><td>
- <%= render partial: 'delete_object_button', locals: {object:ob} %>
- </td>
- </tr>
- <tr data-object-uuid="<%= ob.uuid %>">
- <td style="border-top: 0;" colspan="2">
- </td>
- <td style="border-top: 0; opacity: 0.5;" colspan="6">
- <% ob.components.each do |cname, c| %>
- <% if c.is_a?(Hash) and c[:job] %>
- <%= render partial: "job_progress", locals: {:j => c[:job], :title => cname.to_s, :show_progress_bar => false } %>
- <% else %>
- <span class="label label-default"><%= cname.to_s %></span>
- <% end %>
- <% end %>
- </td>
- </tr>
- <% end %>
+ <tbody data-infinite-scroller="#recent-pipeline-instances" id="recent-pipeline-instances"
+ data-infinite-content-href="<%= url_for partial: :recent_rows %>" >
</tbody>
+
</table>
<% end %>
-
-<%= render partial: "paging", locals: {results: @objects, object: @object} %>
--- /dev/null
+<% @objects.sort_by { |ob| ob.created_at }.reverse.each do |ob| %>
+ <tr data-object-uuid="<%= ob.uuid %>" data-kind="<%= ob.kind %>" >
+ <td>
+ <%= check_box_tag 'uuids[]', ob.uuid, false, :class => 'persistent-selection' %>
+ </td><td>
+ <%= render partial: 'pipeline_status_label', locals: {:p => ob} %>
+ </td><td colspan="1">
+ <%= link_to_if_arvados_object ob, friendly_name: true %>
+ </td><td>
+ <%= link_to_if_arvados_object ob.pipeline_template_uuid, friendly_name: true %>
+ </td><td>
+ <%= link_to_if_arvados_object ob.owner_uuid, friendly_name: true %>
+ </td><td>
+ <%= ob.created_at.to_s %>
+ </td><td>
+ <%= render partial: 'delete_object_button', locals: {object:ob} %>
+ </td>
+ </tr>
+ <tr data-object-uuid="<%= ob.uuid %>">
+ <td style="border-top: 0;" colspan="2">
+ </td>
+ <td style="border-top: 0; opacity: 0.5;" colspan="6">
+ <% ob.components.each do |cname, c| %>
+ <% if c.is_a?(Hash) and c[:job] %>
+ <%= render partial: "job_progress", locals: {:j => c[:job], :title => cname.to_s, :show_progress_bar => false } %>
+ <% else %>
+ <span class="label label-default"><%= cname.to_s %></span>
+ <% end %>
+ <% end %>
+ </td>
+ </tr>
+<% end %>
<% content_for :tab_line_buttons do %>
-<%= form_tag({action: 'compare', controller: params[:controller], method: 'get'}, {method: 'get', id: 'compare', class: 'pull-right small-form-margin'}) do |f| %>
+ <div class="input-group">
+ <input type="text" class="form-control filterable-control recent-pipeline-instances-filterable-control"
+ placeholder="Search pipeline instances"
+ data-filterable-target="#recent-pipeline-instances"
+ <%# Just for the double-load test in FilterableInfiniteScrollTest: %>
+ value="<%= params[:search] %>"
+ />
+ </div>
+
+ <%= form_tag({action: 'compare', controller: params[:controller], method: 'get'}, {method: 'get', id: 'compare', class: 'pull-right small-form-margin'}) do |f| %>
<%= submit_tag 'Compare 2 or 3 selected', {class: 'btn btn-primary', disabled: true, style: 'display: none'} %>
-<% end rescue nil %>
+ <% end rescue nil %>
+
<% end %>
<%= render file: 'application/index.html.erb', locals: local_assigns %>
end
end
- test "component rendering copes with unexpeceted components format" do
+ test "component rendering copes with unexpected components format" do
get(:show,
{id: api_fixture("pipeline_instances")["components_is_jobspec"]["uuid"]},
session_for(:active))
--- /dev/null
+require 'integration_helper'
+
+class FilterableInfiniteScrollTest < ActionDispatch::IntegrationTest
+ setup do
+ headless = Headless.new
+ headless.start
+ Capybara.current_driver = :selenium
+ end
+
+ # Chrome remembers what you had in the text field when you hit
+ # "back". Here, we simulate the same effect by sending an otherwise
+ # unused ?search=foo param to pre-populate the search field.
+ test 'no double-load if text input has a value at page load time' do
+ visit page_with_token('admin', '/pipeline_instances')
+ assert_text 'pipeline_2'
+ visit page_with_token('admin', '/pipeline_instances?search=pipeline_1')
+ # Horrible hack to ensure the search results can't load correctly
+ # on the second attempt.
+ assert_selector '#recent-pipeline-instances'
+ assert page.evaluate_script('$("#recent-pipeline-instances[data-infinite-content-href0]").attr("data-infinite-content-href0","/give-me-an-error").length == 1')
+ # Wait for the first page of results to appear.
+ assert_text 'pipeline_1'
+ # Make sure the results are filtered.
+ assert_no_text 'pipeline_2'
+ # Make sure pipeline_2 didn't disappear merely because the results
+ # were replaced with an error message.
+ assert_text 'pipeline_1'
+ end
+end
"Time difference did not match for start_at #{start_at}, finished_at #{finished_at}, ran_for #{match[2]}")
end
end
+
+ [
+ ['fuse', nil, 2, 20], # has 2 as of 11-07-2014
+ ['fuse', 'FUSE project', 1, 1], # 1 with this name
+ ['user1_with_load', nil, 30, 100], # has 37 as of 11-07-2014
+ ['user1_with_load', 'pipeline_10', 2, 2], # 2 with this name
+ ['user1_with_load', '000010pipelines', 10, 10], # owned_by the project zzzzz-j7d0g-000010pipelines
+ ['user1_with_load', '000025pipelines', 25, 25], # owned_by the project zzzzz-j7d0g-000025pipelines, two pages
+ ['admin', nil, 40, 200],
+ ['admin', 'FUSE project', 1, 1],
+ ['admin', 'pipeline_10', 2, 2],
+ ['active', 'containing at least two', 2, 100], # component description
+ ['admin', 'containing at least two', 2, 100],
+ ['active', nil, 10, 100],
+ ['active', 'no such match', 0, 0],
+ ].each do |user, search_filter, expected_min, expected_max|
+ test "scroll pipeline instances page for #{user} with search filter #{search_filter}
+ and expect more than #{expected_min} and less than #{expected_max}" do
+ visit page_with_token(user, "/pipeline_instances")
+
+ if search_filter
+ find('.recent-pipeline-instances-filterable-control').set(search_filter)
+ wait_for_ajax
+ end
+
+ page_scrolls = expected_max/20 + 2 # scroll num_pages+2 times to test scrolling is disabled when it should be
+ within('.arv-recent-pipeline-instances') do
+ (0..page_scrolls).each do |i|
+ page.execute_script "window.scrollBy(0,999000)"
+ begin
+ wait_for_ajax
+ rescue
+ end
+ end
+ end
+
+ # Verify that expected number of pipeline instances are found
+ found_items = page.all('tr[data-kind="arvados#pipelineInstance"]')
+ found_count = found_items.count
+ if expected_min == expected_max
+ assert_equal(true, found_count == expected_min,
+ "Not found expected number of items. Expected #{expected_min} and found #{found_count}")
+ assert page.has_no_text? 'request failed'
+ else
+ assert_equal(true, found_count>=expected_min,
+ "Found too few items. Expected at least #{expected_min} and found #{found_count}")
+ assert_equal(true, found_count<=expected_max,
+ "Found too many items. Expected at most #{expected_max} and found #{found_count}")
+ end
+ end
+ end
+
end
assert_equal c.result_limit, a
end
+ test 'get empty set' do
+ use_token :admin
+ c = Collection.
+ where(owner_uuid: 'doesn-texis-tdoesntexistdoe').
+ fetch_multiple_pages(false)
+ # Important: check c.result_offset before calling c.results here.
+ assert_equal 0, c.result_offset
+ assert_equal 0, c.items_available
+ assert_empty c.results
+ end
+
end
freeze_if_want_freeze ($installpid);
select (undef, undef, undef, 0.1);
}
- Log (undef, "Install script exited ".exit_status_s($?));
+ my $install_exited = $?;
+ Log (undef, "Install script exited ".exit_status_s($install_exited));
+ exit (1) if $install_exited != 0;
}
if (!$have_slurm)
qw(-n1 -c1 -N1 -D), $ENV{'TMPDIR'},
"--job-name=$job_id.$id.$$",
);
- my $build_script_to_send = "";
my $command =
"if [ -e $ENV{TASK_WORK} ]; then rm -rf $ENV{TASK_WORK}; fi; "
."mkdir -p $ENV{CRUNCH_TMP} $ENV{JOB_WORK} $ENV{TASK_WORK} $ENV{TASK_KEEPMOUNT} "
."&& cd $ENV{CRUNCH_TMP} ";
- if ($build_script)
- {
- $build_script_to_send = $build_script;
- $command .=
- "&& perl -";
- }
$command .= "&& exec arv-mount --by-id --allow-other $ENV{TASK_KEEPMOUNT} --exec ";
if ($docker_hash)
{
$command .= "--volume=\Q$ENV{TASK_KEEPMOUNT}:/keep:ro\E ";
$ENV{TASK_KEEPMOUNT} = "/keep";
- # TASK_WORK is a plain docker data volume: it starts out empty,
- # is writable, and persists until no containers use it any
- # more. We don't use --volumes-from to share it with other
- # containers: it is only accessible to this task, and it goes
- # away when this task stops.
- $command .= "--volume=\Q$ENV{TASK_WORK}\E ";
-
- # JOB_WORK is also a plain docker data volume for now. TODO:
- # Share a single JOB_WORK volume across all task containers on a
- # given worker node, and delete it when the job ends (and, in
- # case that doesn't work, when the next job starts).
- $command .= "--volume=\Q$ENV{JOB_WORK}\E ";
+ # TASK_WORK is almost exactly like a docker data volume: it
+ # starts out empty, is writable, and persists until no
+ # containers use it any more. We don't use --volumes-from to
+ # share it with other containers: it is only accessible to this
+ # task, and it goes away when this task stops.
+ #
+ # However, a docker data volume is writable only by root unless
+ # the mount point already happens to exist in the container with
+ # different permissions. Therefore, we [1] assume /tmp already
+ # exists in the image and is writable by the crunch user; [2]
+ # avoid putting TASK_WORK inside CRUNCH_TMP (which won't be
+ # writable if they are created by docker while setting up the
+ # other --volumes); and [3] create $TASK_WORK inside the
+ # container using $build_script.
+ $command .= "--volume=/tmp ";
+ $ENV{"TASK_WORK"} = "/tmp/crunch-job-task-work/$childslotname";
+ $ENV{"HOME"} = $ENV{"TASK_WORK"};
+ $ENV{"TASK_TMPDIR"} = $ENV{"TASK_WORK"}; # deprecated
+
+ # TODO: Share a single JOB_WORK volume across all task
+ # containers on a given worker node, and delete it when the job
+ # ends (and, in case that doesn't work, when the next job
+ # starts).
+ #
+ # For now, use the same approach as TASK_WORK above.
+ $ENV{"JOB_WORK"} = "/tmp/crunch-job-work";
while (my ($env_key, $env_val) = each %ENV)
{
$command .= "--env=\QHOME=$ENV{HOME}\E ";
$command .= "\Q$docker_hash\E ";
$command .= "stdbuf --output=0 --error=0 ";
- $command .= "$ENV{CRUNCH_SRC}/crunch_scripts/" . $Job->{"script"};
+ $command .= "perl - $ENV{CRUNCH_SRC}/crunch_scripts/" . $Job->{"script"};
} else {
# Non-docker run
$command .= "crunchstat -cgroup-root=/sys/fs/cgroup -poll=10000 ";
$command .= "stdbuf --output=0 --error=0 ";
- $command .= "$ENV{CRUNCH_SRC}/crunch_scripts/" . $Job->{"script"};
+ $command .= "perl - $ENV{CRUNCH_SRC}/crunch_scripts/" . $Job->{"script"};
}
my @execargs = ('bash', '-c', $command);
- srun (\@srunargs, \@execargs, undef, $build_script_to_send);
+ srun (\@srunargs, \@execargs, undef, $build_script);
# exec() failed, we assume nothing happened.
die "srun() failed on build script\n";
}
my $destdir = $ENV{"CRUNCH_SRC"};
my $commit = $ENV{"CRUNCH_SRC_COMMIT"};
my $repo = $ENV{"CRUNCH_SRC_URL"};
+my $job_work = $ENV{"JOB_WORK"};
my $task_work = $ENV{"TASK_WORK"};
-for my $dir ($destdir, $task_work) {
+for my $dir ($destdir, $job_work, $task_work) {
if ($dir) {
make_path $dir;
-e $dir or die "Failed to create temporary directory ($dir): $!";
remove_tree($task_work, {keep_root => 1});
}
+my @git_archive_data = <DATA>;
+if (!@git_archive_data) {
+ # Nothing to extract -> nothing to install.
+ run_argv_and_exit();
+}
open L, ">", "$destdir.lock" or die "$destdir.lock: $!";
flock L, LOCK_EX;
if (readlink ("$destdir.commit") eq $commit && -d $destdir) {
- if (@ARGV) {
- exec(@ARGV);
- die "Cannot exec `@ARGV`: $!";
- } else {
- exit 0;
- }
+ # This version already installed -> nothing to do.
+ run_argv_and_exit();
}
unlink "$destdir.commit";
open STDERR, ">&STDOUT";
mkdir $destdir;
-my @git_archive_data = <DATA>;
-if (@git_archive_data) {
- open TARX, "|-", "tar", "-C", $destdir, "-xf", "-";
- print TARX @git_archive_data;
- if(!close(TARX)) {
- die "'tar -C $destdir -xf -' exited $?: $!";
- }
+open TARX, "|-", "tar", "-C", $destdir, "-xf", "-";
+print TARX @git_archive_data;
+if(!close(TARX)) {
+ die "'tar -C $destdir -xf -' exited $?: $!";
}
my $pwd;
close L;
-if (@ARGV) {
+run_argv_and_exit();
+
+sub run_argv_and_exit
+{
+ if (@ARGV) {
exec(@ARGV);
die "Cannot exec `@ARGV`: $!";
-} else {
+ } else {
exit 0;
+ }
}
sub shell_or_die
:with => :render_not_found)
end
+ def default_url_options
+ if Rails.configuration.host
+ {:host => Rails.configuration.host}
+ else
+ {}
+ end
+ end
+
def index
@objects.uniq!(&:id) if @select.nil? or @select.include? "id"
if params[:eager] and params[:eager] != '0' and params[:eager] != 0 and params[:eager] != ''
common:
uuid_prefix: <%= Digest::MD5.hexdigest(`hostname`).to_i(16).to_s(36)[0..4] %>
+ # If not false, this is the hostname that will be used for root_url and
+ # advertised in the discovery document. By default, use the default Rails
+ # logic for deciding on a hostname.
+ host: false
+
# If this is not false, HTML requests at the API server's root URL
# are redirected to this location, and it is provided in the text of
# user activation notification email messages to remind them where
assert_equal unique_uuids.count, resp['items'].count
end
+ test "items.count == items_available with filters" do
+ authorize_with :active
+ get :index, {
+ limit: 100,
+ filters: [['uuid','=',collections(:foo_file).uuid]]
+ }
+ assert_response :success
+ assert_equal 1, assigns(:objects).length
+ assert_equal 1, json_response['items_available']
+ assert_equal 1, json_response['items'].count
+ end
+
test "get index with limit=2 offset=99999" do
# Assume there are not that many test fixtures.
authorize_with :active