+ start_time = parse_browser_timestamp start_at
+ if run_time
+ finished_at = match[3]
+ assert_not_nil(finished_at, 'Did not find finished_at time')
+ finished_time = parse_browser_timestamp finished_at
+ assert_equal(run_time, finished_time-start_time,
+ "Time difference did not match for start_at #{start_at}, finished_at #{finished_at}, ran_for #{match[2]}")
+ else
+ match = /\d(.*)/.match match[2]
+ assert_not_nil match, 'Did not find expected match for running component'
+ end
+ end
+ end
+
+ [
+ ['fuse', nil, 2, 20], # has 2 as of 11-07-2014
+ ['user1_with_load', '000025pipelines', 25, 25], # owned_by the project zzzzz-j7d0g-000025pipelines, two pages
+ ['admin', 'pipeline_20', 1, 1],
+ ['active', 'no such match', 0, 0],
+ ].each do |user, search_filter, expected_min, expected_max|
+ test "scroll pipeline instances page for #{user} with search filter #{search_filter}
+ and expect #{expected_min} <= found_items <= #{expected_max}" do
+ visit page_with_token(user, "/pipeline_instances")
+
+ if search_filter
+ find('.recent-pipeline-instances-filterable-control').set(search_filter)
+ # Wait for 250ms debounce timer (see filterable.js)
+ sleep 0.350
+ wait_for_ajax
+ end
+
+ page_scrolls = expected_max/20 + 2 # scroll num_pages+2 times to test scrolling is disabled when it should be
+ within('.arv-recent-pipeline-instances') do
+ (0..page_scrolls).each do |i|
+ page.driver.scroll_to 0, 999000
+ begin
+ wait_for_ajax
+ rescue
+ end
+ end
+ end
+
+ # Verify that expected number of pipeline instances are found
+ found_items = page.all('tr[data-kind="arvados#pipelineInstance"]')
+ found_count = found_items.count
+ if expected_min == expected_max
+ assert_equal(true, found_count == expected_min,
+ "Not found expected number of items. Expected #{expected_min} and found #{found_count}")
+ assert page.has_no_text? 'request failed'
+ else
+ assert_equal(true, found_count>=expected_min,
+ "Found too few items. Expected at least #{expected_min} and found #{found_count}")
+ assert_equal(true, found_count<=expected_max,
+ "Found too many items. Expected at most #{expected_max} and found #{found_count}")
+ end
+ end
+ end
+
+ test 'render job run time when job record is inaccessible' do
+ pi = api_fixture('pipeline_instances', 'has_component_with_completed_jobs')
+ visit page_with_token 'active', '/pipeline_instances/' + pi['uuid']
+ assert_text 'Queued for '
+ end
+
+ test "job logs linked for running pipeline" do
+ pi = api_fixture("pipeline_instances", "running_pipeline_with_complete_job")
+ visit(page_with_token("active", "/pipeline_instances/#{pi['uuid']}"))
+ find(:xpath, "//a[@href='#Log']").click
+ within "#Log" do
+ assert_text "Log for previous"
+ log_link = find("a", text: "Log for previous")
+ assert_includes(log_link[:href],
+ "/jobs/#{pi["components"]["previous"]["job"]["uuid"]}#Log")
+ assert_selector "#event_log_div"
+ end
+ end
+
+ test "job logs linked for complete pipeline" do
+ pi = api_fixture("pipeline_instances", "complete_pipeline_with_two_jobs")
+ visit(page_with_token("active", "/pipeline_instances/#{pi['uuid']}"))
+ find(:xpath, "//a[@href='#Log']").click
+ within "#Log" do
+ assert_text "Log for previous"
+ pi["components"].each do |cname, cspec|
+ log_link = find("a", text: "Log for #{cname}")
+ assert_includes(log_link[:href], "/jobs/#{cspec["job"]["uuid"]}#Log")
+ end
+ assert_no_selector "#event_log_div"
+ end
+ end
+
+ test "job logs linked for failed pipeline" do
+ pi = api_fixture("pipeline_instances", "failed_pipeline_with_two_jobs")
+ visit(page_with_token("active", "/pipeline_instances/#{pi['uuid']}"))
+ find(:xpath, "//a[@href='#Log']").click
+ within "#Log" do
+ assert_text "Log for previous"
+ pi["components"].each do |cname, cspec|
+ log_link = find("a", text: "Log for #{cname}")
+ assert_includes(log_link[:href], "/jobs/#{cspec["job"]["uuid"]}#Log")
+ end
+ assert_no_selector "#event_log_div"