# Dev/test SSL certificates
/self-signed.key
/self-signed.pem
+
+# Generated git-commit.version file
+/git-commit.version
end
end
+ # star / unstar the current project
+ def star
+ links = Link.where(tail_uuid: current_user.uuid,
+ head_uuid: @object.uuid,
+ link_class: 'star')
+
+ if params['status'] == 'create'
+ # create 'star' link if one does not already exist
+ if !links.andand.any?
+ dst = Link.new(owner_uuid: current_user.uuid,
+ tail_uuid: current_user.uuid,
+ head_uuid: @object.uuid,
+ link_class: 'star',
+ name: @object.uuid)
+ dst.save!
+ end
+ else # delete any existing 'star' links
+ if links.andand.any?
+ links.each do |link|
+ link.destroy
+ end
+ end
+ end
+
+ respond_to do |format|
+ format.js
+ end
+ end
+
protected
def derive_unique_filename filename, manifest_files
# exception here than in a template.)
unless current_user.nil?
begin
- build_project_trees
+ my_starred_projects current_user
+ build_my_wanted_projects_tree current_user
rescue ArvadosApiClient::ApiError
# Fall back to the default-setting code later.
end
end
- @my_project_tree ||= []
- @shared_project_tree ||= []
+ @starred_projects ||= []
+ @my_wanted_projects_tree ||= []
render_error(err_opts)
end
end
end
+ helper_method :is_starred
+ def is_starred
+ links = Link.where(tail_uuid: current_user.uuid,
+ head_uuid: @object.uuid,
+ link_class: 'star')
+
+ return links.andand.any?
+ end
+
protected
helper_method :strip_token_from_path
{collections: c, owners: own}
end
- helper_method :my_project_tree
- def my_project_tree
- build_project_trees
- @my_project_tree
+ helper_method :my_starred_projects
+ def my_starred_projects user
+ return if @starred_projects
+ links = Link.filter([['tail_uuid', '=', user.uuid],
+ ['link_class', '=', 'star'],
+ ['head_uuid', 'is_a', 'arvados#group']]).select(%w(head_uuid))
+ uuids = links.collect { |x| x.head_uuid }
+ starred_projects = Group.filter([['uuid', 'in', uuids]]).order('name')
+ @starred_projects = starred_projects.results
+ end
+
+ # If there are more than 200 projects that are readable by the user,
+ # build the tree using only the top 200+ projects owned by the user,
+ # from the top three levels.
+ # That is: get toplevel projects under home, get subprojects of
+ # these projects, and so on until we hit the limit.
+ def my_wanted_projects user, page_size=100
+ return @my_wanted_projects if @my_wanted_projects
+
+ from_top = []
+ uuids = [user.uuid]
+ depth = 0
+ @too_many_projects = false
+ @reached_level_limit = false
+ while from_top.size <= page_size*2
+ current_level = Group.filter([['group_class','=','project'],
+ ['owner_uuid', 'in', uuids]])
+ .order('name').limit(page_size*2)
+ break if current_level.results.size == 0
+ @too_many_projects = true if current_level.items_available > current_level.results.size
+ from_top.concat current_level.results
+ uuids = current_level.results.collect { |x| x.uuid }
+ depth += 1
+ if depth >= 3
+ @reached_level_limit = true
+ break
+ end
+ end
+ @my_wanted_projects = from_top
end
- helper_method :shared_project_tree
- def shared_project_tree
- build_project_trees
- @shared_project_tree
+ helper_method :my_wanted_projects_tree
+ def my_wanted_projects_tree user, page_size=100
+ build_my_wanted_projects_tree user, page_size
+ [@my_wanted_projects_tree, @too_many_projects, @reached_level_limit]
end
- def build_project_trees
- return if @my_project_tree and @shared_project_tree
- parent_of = {current_user.uuid => 'me'}
- all_projects.each do |ob|
+ def build_my_wanted_projects_tree user, page_size=100
+ return @my_wanted_projects_tree if @my_wanted_projects_tree
+
+ parent_of = {user.uuid => 'me'}
+ my_wanted_projects(user, page_size).each do |ob|
parent_of[ob.uuid] = ob.owner_uuid
end
- children_of = {false => [], 'me' => [current_user]}
- all_projects.each do |ob|
- if ob.owner_uuid != current_user.uuid and
+ children_of = {false => [], 'me' => [user]}
+ my_wanted_projects(user, page_size).each do |ob|
+ if ob.owner_uuid != user.uuid and
not parent_of.has_key? ob.owner_uuid
parent_of[ob.uuid] = false
end
end
paths
end
- @my_project_tree =
+ @my_wanted_projects_tree =
sorted_paths.call buildtree.call(children_of, 'me')
- @shared_project_tree =
- sorted_paths.call({'Projects shared with me' =>
- buildtree.call(children_of, false)})
end
helper_method :get_object
def textile_attributes
[ 'description' ]
end
+
+ def stderr_log_query(limit=nil)
+ query = Log.where(event_type: "stderr", object_uuid: self.uuid)
+ .order("id DESC")
+ query = query.limit(limit) if limit
+ query
+ end
+
+ def stderr_log_lines(limit=2000)
+ stderr_log_query(limit).results.reverse.
+ flat_map { |log| log.properties[:text].split("\n") rescue [] }
+ end
end
+ <nav class="navbar navbar-default breadcrumbs" role="navigation">
+ <ul class="nav navbar-nav navbar-left">
+ <li>
+ <a href="/">
+ <i class="fa fa-lg fa-fw fa-dashboard"></i>
+ Dashboard
+ </a>
+ </li>
+ <li class="dropdown">
+ <a href="#" class="dropdown-toggle" data-toggle="dropdown" id="projects-menu">
+ Projects
+ <span class="caret"></span>
+ </a>
+ <ul class="dropdown-menu" style="min-width: 20em" role="menu">
+ <li role="menuitem">
+ <%= link_to(
+ url_for(
+ action: 'choose',
+ controller: 'search',
+ filters: [['uuid', 'is_a', 'arvados#group']].to_json,
+ title: 'Search',
+ action_name: 'Show',
+ action_href: url_for(controller: :actions, action: :show),
+ action_method: 'get',
+ action_data: {selection_param: 'uuid', success: 'redirect-to-created-object'}.to_json),
+ { remote: true, method: 'get', title: "Search" }) do %>
+ <i class="glyphicon fa-fw glyphicon-search"></i> Search all projects ...
+ <% end %>
+ </li>
+ <% if Rails.configuration.anonymous_user_token and Rails.configuration.enable_public_projects_page %>
+ <li role="menuitem"><a href="/projects/public" role="menuitem"><i class="fa fa-fw fa-list"></i> Browse public projects </a>
+ </li>
+ <% end %>
+ <li role="menuitem">
+ <%= link_to projects_path(options: {ensure_unique_name: true}), role: 'menu-item', method: :post do %>
+ <i class="fa fa-fw fa-plus"></i> Add a new project
+ <% end %>
+ </li>
+ <li role="presentation" class="divider"></li>
+ <%= render partial: "projects_tree_menu", locals: {
+ :project_link_to => Proc.new do |pnode, &block|
+ link_to(project_path(pnode[:object].uuid),
+ data: { 'object-uuid' => pnode[:object].uuid,
+ 'name' => 'name' },
+ &block)
+ end,
+ } %>
+ </ul>
+ </li>
+ <% if @name_link or @object %>
+ <li class="nav-separator">
+ <i class="fa fa-lg fa-angle-double-right"></i>
+ </li>
+ <li>
+ <%= link_to project_path(current_user.uuid) do %>
+ Home
+ <% end %>
+ </li>
+ <% project_breadcrumbs.each do |p| %>
+ <li class="nav-separator">
+ <i class="fa fa-lg fa-angle-double-right"></i>
+ </li>
+ <li>
+ <%= link_to(p.name, project_path(p.uuid), data: {object_uuid: p.uuid, name: 'name'}) %>
+ </li>
+ <% end %>
+ <% end %>
+ </ul>
+ </nav>
+<% starred_projects = my_starred_projects current_user%>
+<% if starred_projects.andand.any? %>
+ <li role="presentation" class="dropdown-header">
+ My favorite projects
+ </li>
+ <li>
+ <%= project_link_to.call({object: current_user, depth: 0}) do %>
+ <span style="padding-left: 0">Home</span>
+ <% end %>
+ </li>
+ <% (starred_projects).each do |pnode| %>
+ <li>
+ <%= project_link_to.call({object: pnode, depth: 0}) do%>
+ <span style="padding-left: 0em"></span><%= pnode[:name] %>
+ <% end %>
+ </li>
+ <% end %>
+ <li role="presentation" class="divider"></li>
+<% end %>
+
<li role="presentation" class="dropdown-header">
My projects
</li>
<span style="padding-left: 0">Home</span>
<% end %>
</li>
-<% my_project_tree.each do |pnode| %>
+<% my_tree = my_wanted_projects_tree current_user %>
+<% my_tree[0].each do |pnode| %>
<% next if pnode[:object].class != Group %>
<li>
<%= project_link_to.call pnode do %>
<% end %>
</li>
<% end %>
+<% if my_tree[1] or my_tree[0].size > 200 %>
+<li role="presentation" class="dropdown-header">
+ Some projects have been omitted.
+</li>
+<% elsif my_tree[2] %>
+<li role="presentation" class="dropdown-header">
+ Showing top three levels of your projects.
+</li>
+<% end %>
--- /dev/null
+<% if current_user and is_starred %>
+ <%= link_to(star_path(status: 'delete', id:@object.uuid, action_method: 'get'), style: "color:#D00", class: "btn btn-xs star-unstar", title: "Remove from list of favorites", remote: true) do %>
+ <i class="fa fa-lg fa-star"></i>
+ <% end %>
+<% else %>
+ <%= link_to(star_path(status: 'create', id:@object.uuid, action_method: 'get'), class: "btn btn-xs star-unstar", title: "Add to list of favorites", remote: true) do %>
+ <i class="fa fa-lg fa-star-o"></i>
+ <% end %>
+<% end %>
--- /dev/null
+$(".star-unstar").html("<%= escape_javascript(render partial: 'show_star') %>");
+$(".breadcrumbs").html("<%= escape_javascript(render partial: 'breadcrumbs') %>");
<div id="event_log_div"
class="arv-log-event-listener arv-log-event-handler-append-logs arv-job-log-window"
data-object-uuid="<%= @object.uuid %>"
- ></div>
+ ><%= @object.stderr_log_lines(Rails.configuration.running_job_log_records_to_fetch).join("\n") %>
+</div>
<%# Applying a long throttle suppresses the auto-refresh of this
partial that would normally be triggered by arv-log-event. %>
</nav>
<% if current_user.andand.is_active %>
- <nav class="navbar navbar-default breadcrumbs" role="navigation">
- <ul class="nav navbar-nav navbar-left">
- <li>
- <a href="/">
- <i class="fa fa-lg fa-fw fa-dashboard"></i>
- Dashboard
- </a>
- </li>
- <li class="dropdown">
- <a href="#" class="dropdown-toggle" data-toggle="dropdown" id="projects-menu">
- Projects
- <span class="caret"></span>
- </a>
- <ul class="dropdown-menu" style="min-width: 20em" role="menu">
- <li role="menuitem">
- <%= link_to(
- url_for(
- action: 'choose',
- controller: 'search',
- filters: [['uuid', 'is_a', 'arvados#group']].to_json,
- title: 'Search',
- action_name: 'Show',
- action_href: url_for(controller: :actions, action: :show),
- action_method: 'get',
- action_data: {selection_param: 'uuid', success: 'redirect-to-created-object'}.to_json),
- { remote: true, method: 'get', title: "Search" }) do %>
- <i class="glyphicon fa-fw glyphicon-search"></i> Search all projects ...
- <% end %>
- </li>
- <% if Rails.configuration.anonymous_user_token and Rails.configuration.enable_public_projects_page %>
- <li role="menuitem"><a href="/projects/public" role="menuitem"><i class="fa fa-fw fa-list"></i> Browse public projects </a>
- </li>
- <% end %>
- <li role="menuitem">
- <%= link_to projects_path(options: {ensure_unique_name: true}), role: 'menu-item', method: :post do %>
- <i class="fa fa-fw fa-plus"></i> Add a new project
- <% end %>
- </li>
- <li role="presentation" class="divider"></li>
- <%= render partial: "projects_tree_menu", locals: {
- :project_link_to => Proc.new do |pnode, &block|
- link_to(project_path(pnode[:object].uuid),
- data: { 'object-uuid' => pnode[:object].uuid,
- 'name' => 'name' },
- &block)
- end,
- } %>
- </ul>
- </li>
- <% if @name_link or @object %>
- <li class="nav-separator">
- <i class="fa fa-lg fa-angle-double-right"></i>
- </li>
- <li>
- <%= link_to project_path(current_user.uuid) do %>
- Home
- <% end %>
- </li>
- <% project_breadcrumbs.each do |p| %>
- <li class="nav-separator">
- <i class="fa fa-lg fa-angle-double-right"></i>
- </li>
- <li>
- <%= link_to(p.name, project_path(p.uuid), data: {object_uuid: p.uuid, name: 'name'}) %>
- </li>
- <% end %>
- <% end %>
- </ul>
- </nav>
+ <%= render partial: 'breadcrumbs' %>
<% elsif !current_user %> <%# anonymous %>
<% if (@name_link or @object) and (project_breadcrumbs.any?) %>
<nav class="navbar navbar-default breadcrumbs" role="navigation">
<div class="modal-body">
<div class="selectable-container" style="height: 15em; overflow-y: scroll">
- <% [my_project_tree, shared_project_tree].each do |tree| %>
- <% tree.each do |projectnode| %>
- <% if projectnode[:object].is_a? String %>
- <div class="row" style="padding-left: <%= 1 + projectnode[:depth] %>em; margin-right: 0px">
- <i class="fa fa-fw fa-share-alt"></i>
- <%= projectnode[:object] %>
- </div>
- <% else
- row_selectable = !params[:editable] || projectnode[:object].editable?
- if projectnode[:object].uuid == current_user.uuid
- row_name = "Home"
- row_selectable = true
- else
- row_name = projectnode[:object].friendly_link_name || 'New project'
- end %>
- <div class="<%= 'selectable project' if row_selectable %> row"
- style="padding-left: <%= 1 + projectnode[:depth] %>em; margin-right: 0px" data-object-uuid="<%= projectnode[:object].uuid %>">
- <i class="fa fa-fw fa-folder-o"></i> <%= row_name %>
- </div>
- <% end %>
+ <% starred_projects = my_starred_projects current_user%>
+ <% if starred_projects.andand.any? %>
+ <% writable_projects = starred_projects.select(&:editable?) %>
+ <% writable_projects.each do |projectnode| %>
+ <% row_name = projectnode.friendly_link_name || 'New project' %>
+ <div class="selectable project row"
+ style="padding-left: 1em; margin-right: 0px"
+ data-object-uuid="<%= projectnode.uuid %>">
+ <i class="fa fa-fw fa-folder-o"></i> <%= row_name %> <i class="fa fa-fw fa-star"></i>
+ </div>
<% end %>
<% end %>
+
+ <% my_projects = my_wanted_projects_tree(current_user) %>
+ <% my_projects[0].each do |projectnode| %>
+ <% if projectnode[:object].uuid == current_user.uuid
+ row_name = "Home"
+ else
+ row_name = projectnode[:object].friendly_link_name || 'New project'
+ end %>
+ <div class="selectable project row"
+ style="padding-left: <%= 1 + projectnode[:depth] %>em; margin-right: 0px"
+ data-object-uuid="<%= projectnode[:object].uuid %>">
+ <i class="fa fa-fw fa-folder-o"></i> <%= row_name %>
+ </div>
+ <% end %>
</div>
+
+ <% if my_projects[1] or my_projects[2] or my_projects[0].size > 200 %>
+ <div>Some of your projects are omitted. Add projects of interest to favorites.</div>
+ <% end %>
</div>
<div class="modal-footer">
<% if @object.uuid == current_user.andand.uuid %>
Home
<% else %>
+ <%= render partial: "show_star" %>
<%= render_editable_attribute @object, 'name', nil, { 'data-emptytext' => "New project" } %>
<% end %>
</h2>
#
# The default setting (false) is appropriate for a multi-user site.
trust_all_content: false
+
+ # Maximum number of historic log records of a running job to fetch
+ # and display in the Log tab, while subscribing to web sockets.
+ running_job_log_records_to_fetch: 2000
get "users/setup" => 'users#setup', :as => :setup_user
get "report_issue_popup" => 'actions#report_issue_popup', :as => :report_issue_popup
post "report_issue" => 'actions#report_issue', :as => :report_issue
+ get "star" => 'actions#star', :as => :star
resources :nodes
resources :humans
resources :traits
}, session_for(:active)
assert_select "#projects-menu + ul li.divider ~ li a[href=/projects/#{project_uuid}]"
end
+
+ [
+ ["active", 5, ["aproject", "asubproject"], "anonymously_accessible_project"],
+ ["user1_with_load", 2, ["project_with_10_collections"], "project_with_2_pipelines_and_60_jobs"],
+ ["admin", 5, ["anonymously_accessible_project", "subproject_in_anonymous_accessible_project"], "aproject"],
+ ].each do |user, page_size, tree_segment, unexpected|
+ test "build my projects tree for #{user} user and verify #{unexpected} is omitted" do
+ use_token user
+ ctrl = ProjectsController.new
+
+ current_user = User.find(api_fixture('users')[user]['uuid'])
+
+ my_tree = ctrl.send :my_wanted_projects_tree, current_user, page_size
+
+ tree_segment_at_depth_1 = api_fixture('groups')[tree_segment[0]]
+ tree_segment_at_depth_2 = api_fixture('groups')[tree_segment[1]] if tree_segment[1]
+
+ tree_nodes = {}
+ my_tree[0].each do |x|
+ tree_nodes[x[:object]['uuid']] = x[:depth]
+ end
+
+ assert_equal(1, tree_nodes[tree_segment_at_depth_1['uuid']])
+ assert_equal(2, tree_nodes[tree_segment_at_depth_2['uuid']]) if tree_segment[1]
+
+ unexpected_project = api_fixture('groups')[unexpected]
+ assert_nil(tree_nodes[unexpected_project['uuid']])
+ end
+ end
+
+ [
+ ["active", 1],
+ ["project_viewer", 1],
+ ["admin", 0],
+ ].each do |user, size|
+ test "starred projects for #{user}" do
+ use_token user
+ ctrl = ProjectsController.new
+ current_user = User.find(api_fixture('users')[user]['uuid'])
+ my_starred_project = ctrl.send :my_starred_projects, current_user
+ assert_equal(size, my_starred_project.andand.size)
+
+ ctrl2 = ProjectsController.new
+ current_user = User.find(api_fixture('users')[user]['uuid'])
+ my_starred_project = ctrl2.send :my_starred_projects, current_user
+ assert_equal(size, my_starred_project.andand.size)
+ end
+ end
+
+ test "unshare project and verify that it is no longer included in shared user's starred projects" do
+ # remove sharing link
+ use_token :system_user
+ Link.find(api_fixture('links')['share_starred_project_with_project_viewer']['uuid']).destroy
+
+ # verify that project is no longer included in starred projects
+ use_token :project_viewer
+ current_user = User.find(api_fixture('users')['project_viewer']['uuid'])
+ ctrl = ProjectsController.new
+ my_starred_project = ctrl.send :my_starred_projects, current_user
+ assert_equal(0, my_starred_project.andand.size)
+
+ # share it again
+ @controller = LinksController.new
+ post :create, {
+ link: {
+ link_class: 'permission',
+ name: 'can_read',
+ head_uuid: api_fixture('groups')['starred_and_shared_active_user_project']['uuid'],
+ tail_uuid: api_fixture('users')['project_viewer']['uuid'],
+ },
+ format: :json
+ }, session_for(:system_user)
+
+ # verify that the project is again included in starred projects
+ use_token :project_viewer
+ ctrl = ProjectsController.new
+ my_starred_project = ctrl.send :my_starred_projects, current_user
+ assert_equal(1, my_starred_project.andand.size)
+ end
end
test 'Create a project and move it into a different project' do
visit page_with_token 'active', '/projects'
find("#projects-menu").click
- find(".dropdown-menu a", text: "Home").click
+ within('.dropdown-menu') do
+ first('li', text: 'Home').click
+ end
+ wait_for_ajax
find('.btn', text: "Add a subproject").click
within('h2') do
visit '/projects'
find("#projects-menu").click
- find(".dropdown-menu a", text: "Home").click
+ within('.dropdown-menu') do
+ first('li', text: 'Home').click
+ end
+ wait_for_ajax
find('.btn', text: "Add a subproject").click
within('h2') do
find('.fa-pencil').click
assert page.has_text?('Unrestricted public data'), 'No text - Unrestricted public data'
assert page.has_text?('An anonymously accessible project'), 'No text - An anonymously accessible project'
end
+
+ test "test star and unstar project" do
+ visit page_with_token 'active', "/projects/#{api_fixture('groups')['anonymously_accessible_project']['uuid']}"
+
+ # add to favorites
+ find('.fa-star-o').click
+ wait_for_ajax
+
+ find("#projects-menu").click
+ within('.dropdown-menu') do
+ assert_selector 'li', text: 'Unrestricted public data'
+ end
+
+ # remove from favotires
+ find('.fa-star').click
+ wait_for_ajax
+
+ find("#projects-menu").click
+ within('.dropdown-menu') do
+ assert_no_selector 'li', text: 'Unrestricted public data'
+ end
+ end
end
datum = page.evaluate_script("jobGraphData[jobGraphData.length-1]['#{series}']")
assert_in_epsilon value, datum.to_f
end
+
+ test "test running job with just a few previous log records" do
+ Thread.current[:arvados_api_token] = @@API_AUTHS["admin"]['api_token']
+ job = Job.where(uuid: api_fixture("jobs")['running']['uuid']).results.first
+ visit page_with_token("admin", "/jobs/#{job.uuid}")
+
+ api = ArvadosApiClient.new
+
+ # Create just one old log record
+ api.api("logs", "", {log: {
+ object_uuid: job.uuid,
+ event_type: "stderr",
+ properties: {"text" => "Historic log message"}}})
+
+ click_link("Log")
+
+ # Expect "all" historic log records because we have less than
+ # default Rails.configuration.running_job_log_records_to_fetch count
+ assert_text 'Historic log message'
+
+ # Create new log record and expect it to show up in log tab
+ api.api("logs", "", {log: {
+ object_uuid: job.uuid,
+ event_type: "stderr",
+ properties: {"text" => "Log message after subscription"}}})
+ assert_text 'Log message after subscription'
+ end
+
+ test "test running job with too many previous log records" do
+ Rails.configuration.running_job_log_records_to_fetch = 5
+
+ Thread.current[:arvados_api_token] = @@API_AUTHS["admin"]['api_token']
+ job = Job.where(uuid: api_fixture("jobs")['running']['uuid']).results.first
+
+ visit page_with_token("admin", "/jobs/#{job.uuid}")
+
+ api = ArvadosApiClient.new
+
+ # Create Rails.configuration.running_job_log_records_to_fetch + 1 log records
+ (0..Rails.configuration.running_job_log_records_to_fetch).each do |count|
+ api.api("logs", "", {log: {
+ object_uuid: job.uuid,
+ event_type: "stderr",
+ properties: {"text" => "Old log message #{count}"}}})
+ end
+
+ # Go to log tab, which results in subscribing to websockets
+ click_link("Log")
+
+ # Expect all but the first historic log records,
+ # because that was one too many than fetch count.
+ (1..Rails.configuration.running_job_log_records_to_fetch).each do |count|
+ assert_text "Old log message #{count}"
+ end
+ assert_no_text 'Old log message 0'
+
+ # Create one more log record after subscription
+ api.api("logs", "", {log: {
+ object_uuid: job.uuid,
+ event_type: "stderr",
+ properties: {"text" => "Life goes on!"}}})
+ # Expect it to show up in log tab
+ assert_text 'Life goes on!'
+ end
end
"required": true,
"dataclass": "Collection"
},
- "sample_subdir": "$(dir $(samples))",
+ "sample_subdir": "$(dir $(sample))",
"read_pair": {
"value": {
"group": "sample_subdir",
if ($docker_pid == 0)
{
srun (["srun", "--nodelist=" . join(',', @node)],
- ["/bin/sh", "-ec", $docker_install_script]);
+ ["/bin/bash", "-o", "pipefail", "-ec", $docker_install_script]);
exit ($?);
}
while (1)
}
if ($? != 0)
{
- croak("Installing Docker image from $docker_locator exited "
- .exit_status_s($?));
+ Log(undef, "Installing Docker image from $docker_locator exited " . exit_status_s($?));
+ exit(EX_RETRY_UNLOCKED);
}
# Determine whether this version of Docker supports memory+swap limits.
import cwltool.draft2tool
import cwltool.workflow
import cwltool.main
+from cwltool.process import shortname
import threading
import cwltool.docker
import fnmatch
args = [image_name]
if image_tag:
args.append(image_tag)
+ logger.info("Uploading Docker image %s", ":".join(args))
arvados.commands.keepdocker.main(args)
return dockerRequirement["dockerImageId"]
"script_version": "master",
"script_parameters": {"tasks": [script_parameters]},
"runtime_constraints": runtime_constraints
- }, find_or_create=kwargs.get("enable_reuse", True)).execute()
+ }, find_or_create=kwargs.get("enable_reuse", True)).execute(num_retries=self.arvrunner.num_retries)
self.arvrunner.jobs[response["uuid"]] = self
- logger.info("Job %s is %s", response["uuid"], response["state"])
+ self.arvrunner.pipeline["components"][self.name] = {"job": response}
+ self.arvrunner.pipeline = self.arvrunner.api.pipeline_instances().update(uuid=self.arvrunner.pipeline["uuid"],
+ body={
+ "components": self.arvrunner.pipeline["components"]
+ }).execute(num_retries=self.arvrunner.num_retries)
+
+ logger.info("Job %s (%s) is %s", self.name, response["uuid"], response["state"])
if response["state"] in ("Complete", "Failed", "Cancelled"):
self.done(response)
logger.error("Got error %s" % str(e))
self.output_callback({}, "permanentFail")
+ def update_pipeline_component(self, record):
+ self.arvrunner.pipeline["components"][self.name] = {"job": record}
+ self.arvrunner.pipeline = self.arvrunner.api.pipeline_instances().update(uuid=self.arvrunner.pipeline["uuid"],
+ body={
+ "components": self.arvrunner.pipeline["components"]
+ }).execute(num_retries=self.arvrunner.num_retries)
def done(self, record):
+ try:
+ self.update_pipeline_component(record)
+ except:
+ pass
+
try:
if record["state"] == "Complete":
processStatus = "success"
try:
outputs = {}
- outputs = self.collect_outputs("keep:" + record["output"])
+ if record["output"]:
+ outputs = self.collect_outputs("keep:" + record["output"])
except Exception as e:
logger.exception("Got exception while collecting job outputs:")
processStatus = "permanentFail"
self._pathmap[src] = (src, "$(task.keep)/%s" % src[5:])
if src not in self._pathmap:
ab = cwltool.pathmapper.abspath(src, basedir)
- st = arvados.commands.run.statfile("", ab)
+ st = arvados.commands.run.statfile("", ab, fnPattern="$(task.keep)/%s/%s")
if kwargs.get("conformance_test"):
self._pathmap[src] = (src, ab)
elif isinstance(st, arvados.commands.run.UploadFile):
self.cond = threading.Condition(self.lock)
self.final_output = None
self.uploaded = {}
+ self.num_retries = 4
def arvMakeTool(self, toolpath_object, **kwargs):
if "class" in toolpath_object and toolpath_object["class"] == "CommandLineTool":
def output_callback(self, out, processStatus):
if processStatus == "success":
logger.info("Overall job status is %s", processStatus)
+ self.api.pipeline_instances().update(uuid=self.pipeline["uuid"],
+ body={"state": "Complete"}).execute(num_retries=self.num_retries)
+
else:
logger.warn("Overall job status is %s", processStatus)
+ self.api.pipeline_instances().update(uuid=self.pipeline["uuid"],
+ body={"state": "Failed"}).execute(num_retries=self.num_retries)
self.final_output = out
+
def on_message(self, event):
if "object_uuid" in event:
if event["object_uuid"] in self.jobs and event["event_type"] == "update":
if event["properties"]["new_attributes"]["state"] == "Running" and self.jobs[event["object_uuid"]].running is False:
- logger.info("Job %s is Running", event["object_uuid"])
+ uuid = event["object_uuid"]
with self.lock:
- self.jobs[event["object_uuid"]].running = True
+ j = self.jobs[uuid]
+ logger.info("Job %s (%s) is Running", j.name, uuid)
+ j.running = True
+ j.update_pipeline_component(event["properties"]["new_attributes"])
elif event["properties"]["new_attributes"]["state"] in ("Complete", "Failed", "Cancelled"):
- logger.info("Job %s is %s", event["object_uuid"], event["properties"]["new_attributes"]["state"])
+ uuid = event["object_uuid"]
try:
self.cond.acquire()
- self.jobs[event["object_uuid"]].done(event["properties"]["new_attributes"])
+ j = self.jobs[uuid]
+ logger.info("Job %s (%s) is %s", j.name, uuid, event["properties"]["new_attributes"]["state"])
+ j.done(event["properties"]["new_attributes"])
self.cond.notify()
finally:
self.cond.release()
def arvExecutor(self, tool, job_order, input_basedir, args, **kwargs):
events = arvados.events.subscribe(arvados.api('v1'), [["object_uuid", "is_a", "arvados#job"]], self.on_message)
+ self.pipeline = self.api.pipeline_instances().create(body={"name": shortname(tool.tool["id"]),
+ "components": {},
+ "state": "RunningOnClient"}).execute(num_retries=self.num_retries)
+
self.fs_access = CollectionFsAccess(input_basedir)
kwargs["fs_access"] = self.fs_access
'bin/arvados-cwl-runner'
],
install_requires=[
- 'cwltool>=1.0.20151026181844',
- 'arvados-python-client>=0.1.20151023214338'
+ 'cwltool>=1.0.20160129152024',
+ 'arvados-python-client>=0.1.20160122132348'
],
zip_safe=True,
cmdclass={'egg_info': tagger},
svc = apiclient_discovery.build('arvados', version, **kwargs)
svc.api_token = token
+ svc.insecure = insecure
kwargs['http'].max_request_size = svc._rootDesc.get('maxRequestSize', 0)
kwargs['http'].cache = None
return svc
import sys
import logging
import tempfile
+import urlparse
import arvados
import arvados.config
copy_opts.add_argument(
'--project-uuid', dest='project_uuid',
help='The UUID of the project at the destination to which the pipeline should be copied.')
+ copy_opts.add_argument(
+ '--allow-git-http-src', action="store_true",
+ help='Allow cloning git repositories over insecure http')
+ copy_opts.add_argument(
+ '--allow-git-http-dst', action="store_true",
+ help='Allow pushing git repositories over insecure http')
+
copy_opts.add_argument(
'object_uuid',
help='The UUID of the object to be copied.')
c['manifest_text'] = dst_manifest
return create_collection_from(c, src, dst, args)
+def select_git_url(api, repo_name, retries, allow_insecure_http, allow_insecure_http_opt):
+ r = api.repositories().list(
+ filters=[['name', '=', repo_name]]).execute(num_retries=retries)
+ if r['items_available'] != 1:
+ raise Exception('cannot identify repo {}; {} repos found'
+ .format(repo_name, r['items_available']))
+
+ https_url = [c for c in r['items'][0]["clone_urls"] if c.startswith("https:")]
+ http_url = [c for c in r['items'][0]["clone_urls"] if c.startswith("http:")]
+ other_url = [c for c in r['items'][0]["clone_urls"] if not c.startswith("http")]
+
+ priority = https_url + other_url + http_url
+
+ git_config = []
+ git_url = None
+ for url in priority:
+ if url.startswith("http"):
+ u = urlparse.urlsplit(url)
+ baseurl = urlparse.urlunsplit((u.scheme, u.netloc, "", "", ""))
+ git_config = ["-c", "credential.%s/.username=none" % baseurl,
+ "-c", "credential.%s/.helper=!cred(){ cat >/dev/null; if [ \"$1\" = get ]; then echo password=$ARVADOS_API_TOKEN; fi; };cred" % baseurl]
+ else:
+ git_config = []
+
+ try:
+ logger.debug("trying %s", url)
+ arvados.util.run_command(["git"] + git_config + ["ls-remote", url],
+ env={"HOME": os.environ["HOME"],
+ "ARVADOS_API_TOKEN": api.api_token,
+ "GIT_ASKPASS": "/bin/false"})
+ except arvados.errors.CommandFailedError:
+ pass
+ else:
+ git_url = url
+ break
+
+ if not git_url:
+ raise Exception('Cannot access git repository, tried {}'
+ .format(priority))
+
+ if git_url.startswith("http:"):
+ if allow_insecure_http:
+ logger.warn("Using insecure git url %s but will allow this because %s", git_url, allow_insecure_http_opt)
+ else:
+ raise Exception("Refusing to use insecure git url %s, use %s if you really want this." % (git_url, allow_insecure_http_opt))
+
+ return (git_url, git_config)
+
+
# copy_git_repo(src_git_repo, src, dst, dst_git_repo, script_version, args)
#
# Copies commits from git repository 'src_git_repo' on Arvados
#
def copy_git_repo(src_git_repo, src, dst, dst_git_repo, script_version, args):
# Identify the fetch and push URLs for the git repositories.
- r = src.repositories().list(
- filters=[['name', '=', src_git_repo]]).execute(num_retries=args.retries)
- if r['items_available'] != 1:
- raise Exception('cannot identify source repo {}; {} repos found'
- .format(src_git_repo, r['items_available']))
- src_git_url = r['items'][0]['fetch_url']
- logger.debug('src_git_url: {}'.format(src_git_url))
- r = dst.repositories().list(
- filters=[['name', '=', dst_git_repo]]).execute(num_retries=args.retries)
- if r['items_available'] != 1:
- raise Exception('cannot identify destination repo {}; {} repos found'
- .format(dst_git_repo, r['items_available']))
- dst_git_push_url = r['items'][0]['push_url']
- logger.debug('dst_git_push_url: {}'.format(dst_git_push_url))
+ (src_git_url, src_git_config) = select_git_url(src, src_git_repo, args.retries, args.allow_git_http_src, "--allow-git-http-src")
+ (dst_git_url, dst_git_config) = select_git_url(dst, dst_git_repo, args.retries, args.allow_git_http_dst, "--allow-git-http-dst")
+
+ logger.debug('src_git_url: {}'.format(src_git_url))
+ logger.debug('dst_git_url: {}'.format(dst_git_url))
dst_branch = re.sub(r'\W+', '_', "{}_{}".format(src_git_url, script_version))
if src_git_repo not in local_repo_dir:
local_repo_dir[src_git_repo] = tempfile.mkdtemp()
arvados.util.run_command(
- ["git", "clone", "--bare", src_git_url,
+ ["git"] + src_git_config + ["clone", "--bare", src_git_url,
local_repo_dir[src_git_repo]],
- cwd=os.path.dirname(local_repo_dir[src_git_repo]))
+ cwd=os.path.dirname(local_repo_dir[src_git_repo]),
+ env={"HOME": os.environ["HOME"],
+ "ARVADOS_API_TOKEN": src.api_token,
+ "GIT_ASKPASS": "/bin/false"})
arvados.util.run_command(
- ["git", "remote", "add", "dst", dst_git_push_url],
+ ["git", "remote", "add", "dst", dst_git_url],
cwd=local_repo_dir[src_git_repo])
arvados.util.run_command(
["git", "branch", dst_branch, script_version],
cwd=local_repo_dir[src_git_repo])
- arvados.util.run_command(["git", "push", "dst", dst_branch],
- cwd=local_repo_dir[src_git_repo])
+ arvados.util.run_command(["git"] + dst_git_config + ["push", "dst", dst_branch],
+ cwd=local_repo_dir[src_git_repo],
+ env={"HOME": os.environ["HOME"],
+ "ARVADOS_API_TOKEN": dst.api_token,
+ "GIT_ASKPASS": "/bin/false"})
def copy_docker_images(pipeline, src, dst, args):
"""Copy any docker images named in the pipeline components'
# ArvFile() (file already exists in a collection), UploadFile() (file needs to
# be uploaded to a collection), or simply returns prefix+fn (which yields the
# original parameter string).
-def statfile(prefix, fn):
+def statfile(prefix, fn, fnPattern="$(file %s/%s)", dirPattern="$(dir %s/%s/)"):
absfn = os.path.abspath(fn)
if os.path.exists(absfn):
st = os.stat(absfn)
sp = os.path.split(absfn)
(pdh, branch) = is_in_collection(sp[0], sp[1])
if pdh:
- return ArvFile(prefix, "$(file %s/%s)" % (pdh, branch))
+ return ArvFile(prefix, fnPattern % (pdh, branch))
else:
# trim leading '/' for path prefix test later
return UploadFile(prefix, absfn[1:])
sp = os.path.split(absfn)
(pdh, branch) = is_in_collection(sp[0], sp[1])
if pdh:
- return ArvFile(prefix, "$(dir %s/%s/)" % (pdh, branch))
+ return ArvFile(prefix, dirPattern % (pdh, branch))
return prefix+fn
return _subscribe_websocket(api, filters, on_event, last_log_id)
try:
- return _subscribe_websocket(api, filters, on_event, last_log_id)
+ if not config.flag_is_true('ARVADOS_DISABLE_WEBSOCKETS'):
+ return _subscribe_websocket(api, filters, on_event, last_log_id)
+ else:
+ _logger.info("Using polling because ARVADOS_DISABLE_WEBSOCKETS is true")
except Exception as e:
_logger.warn("Falling back to polling after websocket error: %s" % e)
p = PollClient(api, filters, on_event, poll_fallback, last_log_id)
('share/doc/arvados-python-client', ['LICENSE-2.0.txt', 'README.rst']),
],
install_requires=[
+ 'google-api-python-client==1.4.2',
+ 'oauth2client >=1.4.6, <2',
'ciso8601',
- 'google-api-python-client',
'httplib2',
'pycurl >=7.19.5.1, <7.21.5',
'python-gflags<3.0',
# Dev/test SSL certificates
/self-signed.key
/self-signed.pem
+
+# Generated git-commit.version file
+/git-commit.version
((attr == 'scopes') and (operator == '=')) ? operand : nil
})
@filters.select! { |attr, operator, operand|
- (attr == 'uuid') and (operator == '=')
+ ((attr == 'uuid') and (operator == '=')) || ((attr == 'api_token') and (operator == '='))
}
end
if @where
end
def find_object_by_uuid
- # Again, to make things easier for the client and our own routing,
- # here we look for the api_token key in a "uuid" (POST) or "id"
- # (GET) parameter.
- @object = model_class.where('api_token=?', params[:uuid] || params[:id]).first
+ @object = model_class.where(uuid: (params[:uuid] || params[:id])).first
end
def current_api_client_is_trusted
unless Thread.current[:api_client].andand.is_trusted
+ if params["action"] == "show"
+ if @object and @object['api_token'] == current_api_client_authorization.andand.api_token
+ return true
+ end
+ elsif params["action"] == "index" and @objects.andand.size == 1
+ filters = @filters.map{|f|f.first}.uniq
+ if ['uuid'] == filters
+ return true if @objects.first['api_token'] == current_api_client_authorization.andand.api_token
+ elsif ['api_token'] == filters
+ return true if @objects.first[:user_id] = current_user.id
+ end
+ end
send_error('Forbidden: this API client cannot manipulate other clients\' access tokens.',
status: 403)
end
class ApiClientAuthorization < ArvadosModel
+ include HasUuid
include KindAndEtag
include CommonApiTemplate
self.user_id_changed?
end
- def uuid
- self.api_token
- end
- def uuid=(x) end
- def uuid_was
- self.api_token_was
- end
- def uuid_changed?
- self.api_token_changed?
- end
-
def modified_by_client_uuid
nil
end
--- /dev/null
+require 'has_uuid'
+
+class AddUuidToApiClientAuthorization < ActiveRecord::Migration
+ extend HasUuid::ClassMethods
+
+ def up
+ add_column :api_client_authorizations, :uuid, :string
+ add_index :api_client_authorizations, :uuid, :unique => true
+
+ prefix = Server::Application.config.uuid_prefix + '-' +
+ Digest::MD5.hexdigest('ApiClientAuthorization'.to_s).to_i(16).to_s(36)[-5..-1] + '-'
+
+ update_sql <<-EOS
+update api_client_authorizations set uuid = (select concat('#{prefix}',
+array_to_string(ARRAY (SELECT substring(api_token FROM (ceil(random()*36))::int FOR 1) FROM generate_series(1, 15)), '')
+));
+EOS
+
+ change_column_null :api_client_authorizations, :uuid, false
+ end
+
+ def down
+ if column_exists?(:api_client_authorizations, :uuid)
+ remove_index :api_client_authorizations, :uuid
+ remove_column :api_client_authorizations, :uuid
+ end
+ end
+end
--- /dev/null
+class AddUuidToApiTokenSearchIndex < ActiveRecord::Migration
+ def up
+ begin
+ remove_index :api_client_authorizations, :name => 'api_client_authorizations_search_index'
+ rescue
+ end
+ add_index :api_client_authorizations,
+ ["api_token", "created_by_ip_address", "last_used_by_ip_address", "default_owner_uuid", "uuid"],
+ name: "api_client_authorizations_search_index"
+ end
+
+ def down
+ begin
+ remove_index :api_client_authorizations, :name => 'api_client_authorizations_search_index'
+ rescue
+ end
+ add_index :api_client_authorizations,
+ ["api_token", "created_by_ip_address", "last_used_by_ip_address", "default_owner_uuid"],
+ name: "api_client_authorizations_search_index"
+ end
+end
default_owner_uuid character varying(255),
scopes text DEFAULT '---
- all
-'::text NOT NULL
+'::text NOT NULL,
+ uuid character varying(255) NOT NULL
);
-- Name: api_client_authorizations_search_index; Type: INDEX; Schema: public; Owner: -; Tablespace:
--
-CREATE INDEX api_client_authorizations_search_index ON api_client_authorizations USING btree (api_token, created_by_ip_address, last_used_by_ip_address, default_owner_uuid);
+CREATE INDEX api_client_authorizations_search_index ON api_client_authorizations USING btree (api_token, created_by_ip_address, last_used_by_ip_address, default_owner_uuid, uuid);
--
CREATE INDEX index_api_client_authorizations_on_user_id ON api_client_authorizations USING btree (user_id);
+--
+-- Name: index_api_client_authorizations_on_uuid; Type: INDEX; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE UNIQUE INDEX index_api_client_authorizations_on_uuid ON api_client_authorizations USING btree (uuid);
+
+
--
-- Name: index_api_clients_on_created_at; Type: INDEX; Schema: public; Owner: -; Tablespace:
--
INSERT INTO schema_migrations (version) VALUES ('20151215134304');
-INSERT INTO schema_migrations (version) VALUES ('20151229214707');
\ No newline at end of file
+INSERT INTO schema_migrations (version) VALUES ('20151229214707');
+
+INSERT INTO schema_migrations (version) VALUES ('20160208210629');
+
+INSERT INTO schema_migrations (version) VALUES ('20160209155729');
\ No newline at end of file
# Read about fixtures at http://api.rubyonrails.org/classes/ActiveRecord/Fixtures.html
system_user:
+ uuid: zzzzz-gj3su-017z32aux8dg2s1
api_client: untrusted
user: system_user
api_token: systemusertesttoken1234567890aoeuidhtnsqjkxbmwvzpy
expires_at: 2038-01-01 00:00:00
admin:
+ uuid: zzzzz-gj3su-027z32aux8dg2s1
api_client: untrusted
user: admin
api_token: 4axaw8zxe0qm22wa6urpp5nskcne8z88cvbupv653y1njyi05h
expires_at: 2038-01-01 00:00:00
admin_trustedclient:
+ uuid: zzzzz-gj3su-037z32aux8dg2s1
api_client: trusted_workbench
user: admin
api_token: 1a9ffdcga2o7cw8q12dndskomgs1ygli3ns9k2o9hgzgmktc78
expires_at: 2038-01-01 00:00:00
data_manager:
+ uuid: zzzzz-gj3su-047z32aux8dg2s1
api_client: untrusted
user: system_user
api_token: 320mkve8qkswstz7ff61glpk3mhgghmg67wmic7elw4z41pke1
- POST /arvados/v1/logs
miniadmin:
+ uuid: zzzzz-gj3su-057z32aux8dg2s1
api_client: untrusted
user: miniadmin
api_token: 2zb2y9pw3e70270te7oe3ewaantea3adyxjascvkz0zob7q7xb
expires_at: 2038-01-01 00:00:00
rominiadmin:
+ uuid: zzzzz-gj3su-067z32aux8dg2s1
api_client: untrusted
user: rominiadmin
api_token: 5tsb2pc3zlatn1ortl98s2tqsehpby88wmmnzmpsjmzwa6payh
expires_at: 2038-01-01 00:00:00
active:
+ uuid: zzzzz-gj3su-077z32aux8dg2s1
api_client: untrusted
user: active
api_token: 3kg6k6lzmp9kj5cpkcoxie963cmvjahbt2fod9zru30k1jqdmi
expires_at: 2038-01-01 00:00:00
active_trustedclient:
+ uuid: zzzzz-gj3su-087z32aux8dg2s1
api_client: trusted_workbench
user: active
api_token: 27bnddk6x2nmq00a1e3gq43n9tsl5v87a3faqar2ijj8tud5en
expires_at: 2038-01-01 00:00:00
active_noscope:
+ uuid: zzzzz-gj3su-097z32aux8dg2s1
api_client: untrusted
user: active
api_token: activenoscopeabcdefghijklmnopqrstuvwxyz12345678901
scopes: []
project_viewer:
+ uuid: zzzzz-gj3su-107z32aux8dg2s1
api_client: untrusted
user: project_viewer
api_token: projectviewertoken1234567890abcdefghijklmnopqrstuv
expires_at: 2038-01-01 00:00:00
project_viewer_trustedclient:
+ uuid: zzzzz-gj3su-117z32aux8dg2s1
api_client: trusted_workbench
user: project_viewer
api_token: projectviewertrustedtoken1234567890abcdefghijklmno
expires_at: 2038-01-01 00:00:00
subproject_admin:
+ uuid: zzzzz-gj3su-127z32aux8dg2s1
api_client: untrusted
user: subproject_admin
api_token: subprojectadmintoken1234567890abcdefghijklmnopqrst
expires_at: 2038-01-01 00:00:00
admin_vm:
+ uuid: zzzzz-gj3su-137z32aux8dg2s1
api_client: untrusted
user: admin
api_token: adminvirtualmachineabcdefghijklmnopqrstuvwxyz12345
scopes: ["GET /arvados/v1/virtual_machines/zzzzz-2x53u-382brsig8rp3064/logins"]
admin_noscope:
+ uuid: zzzzz-gj3su-147z32aux8dg2s1
api_client: untrusted
user: admin
api_token: adminnoscopeabcdefghijklmnopqrstuvwxyz123456789012
scopes: []
active_all_collections:
+ uuid: zzzzz-gj3su-157z32aux8dg2s1
api_client: untrusted
user: active
api_token: activecollectionsabcdefghijklmnopqrstuvwxyz1234567
scopes: ["GET /arvados/v1/collections/", "GET /arvados/v1/keep_services/accessible"]
active_userlist:
+ uuid: zzzzz-gj3su-167z32aux8dg2s1
api_client: untrusted
user: active
api_token: activeuserlistabcdefghijklmnopqrstuvwxyz1234568900
scopes: ["GET /arvados/v1/users"]
active_specimens:
+ uuid: zzzzz-gj3su-177z32aux8dg2s1
api_client: untrusted
user: active
api_token: activespecimensabcdefghijklmnopqrstuvwxyz123456890
scopes: ["GET /arvados/v1/specimens/"]
active_apitokens:
+ uuid: zzzzz-gj3su-187z32aux8dg2s1
api_client: trusted_workbench
user: active
api_token: activeapitokensabcdefghijklmnopqrstuvwxyz123456789
"POST /arvados/v1/api_client_authorizations"]
active_readonly:
+ uuid: zzzzz-gj3su-197z32aux8dg2s1
api_client: untrusted
user: active
api_token: activereadonlyabcdefghijklmnopqrstuvwxyz1234568790
scopes: ["GET /"]
spectator:
+ uuid: zzzzz-gj3su-207z32aux8dg2s1
api_client: untrusted
user: spectator
api_token: zw2f4gwx8hw8cjre7yp6v1zylhrhn3m5gvjq73rtpwhmknrybu
expires_at: 2038-01-01 00:00:00
spectator_specimens:
+ uuid: zzzzz-gj3su-217z32aux8dg2s1
api_client: untrusted
user: spectator
api_token: spectatorspecimensabcdefghijklmnopqrstuvwxyz123245
"POST /arvados/v1/specimens"]
inactive:
+ uuid: zzzzz-gj3su-227z32aux8dg2s1
api_client: untrusted
user: inactive
api_token: 5s29oj2hzmcmpq80hx9cta0rl5wuf3xfd6r7disusaptz7h9m0
expires_at: 2038-01-01 00:00:00
inactive_uninvited:
+ uuid: zzzzz-gj3su-237z32aux8dg2s1
api_client: untrusted
user: inactive_uninvited
api_token: 62mhllc0otp78v08e3rpa3nsmf8q8ogk47f7u5z4erp5gpj9al
expires_at: 2038-01-01 00:00:00
inactive_but_signed_user_agreement:
+ uuid: zzzzz-gj3su-247z32aux8dg2s1
api_client: untrusted
user: inactive_but_signed_user_agreement
api_token: 64k3bzw37iwpdlexczj02rw3m333rrb8ydvn2qq99ohv68so5k
expires_at: 2038-01-01 00:00:00
expired:
+ uuid: zzzzz-gj3su-257z32aux8dg2s1
api_client: untrusted
user: active
api_token: 2ym314ysp27sk7h943q6vtc378srb06se3pq6ghurylyf3pdmx
expires_at: 1970-01-01 00:00:00
expired_trustedclient:
+ uuid: zzzzz-gj3su-267z32aux8dg2s1
api_client: trusted_workbench
user: active
api_token: 5hpni7izokzcatku2896xxwqdbt5ptomn04r6auc7fohnli82v
expires_at: 1970-01-01 00:00:00
valid_token_deleted_user:
+ uuid: zzzzz-gj3su-277z32aux8dg2s1
api_client: trusted_workbench
user_id: 1234567
api_token: tewfa58099sndckyqhlgd37za6e47o6h03r9l1vpll23hudm8b
expires_at: 2038-01-01 00:00:00
anonymous:
+ uuid: zzzzz-gj3su-287z32aux8dg2s1
api_client: untrusted
user: anonymous
api_token: 4kg6k6lzmp9kj4cpkcoxie964cmvjahbt4fod9zru44k4jqdmi
scopes: ["GET /"]
job_reader:
+ uuid: zzzzz-gj3su-297z32aux8dg2s1
api_client: untrusted
user: job_reader
api_token: e99512cdc0f3415c2428b9758f33bdfb07bc3561b00e86e7e6
expires_at: 2038-01-01 00:00:00
active_no_prefs:
+ uuid: zzzzz-gj3su-307z32aux8dg2s1
api_client: untrusted
user: active_no_prefs
api_token: 3kg612cdc0f3415c2428b9758f33bdfb07bc3561b00e86qdmi
expires_at: 2038-01-01 00:00:00
active_no_prefs_profile_no_getting_started_shown:
+ uuid: zzzzz-gj3su-317z32aux8dg2s1
api_client: untrusted
user: active_no_prefs_profile_no_getting_started_shown
api_token: 3kg612cdc0f3415c242856758f33bdfb07bc3561b00e86qdmi
expires_at: 2038-01-01 00:00:00
active_no_prefs_profile_with_getting_started_shown:
+ uuid: zzzzz-gj3su-327z32aux8dg2s1
api_client: untrusted
user: active_no_prefs_profile_with_getting_started_shown
api_token: 3kg612cdc0f3415c245786758f33bdfb07babcd1b00e86qdmi
expires_at: 2038-01-01 00:00:00
active_with_prefs_profile_no_getting_started_shown:
+ uuid: zzzzz-gj3su-337z32aux8dg2s1
api_client: untrusted
user: active_with_prefs_profile_no_getting_started_shown
api_token: 3kg612cdc0f3415c245786758f33bdfb07befgh1b00e86qdmi
expires_at: 2038-01-01 00:00:00
user_foo_in_sharing_group:
+ uuid: zzzzz-gj3su-347z32aux8dg2s1
api_client: untrusted
user: user_foo_in_sharing_group
api_token: 2p1pou8p4ls208mcbedeewlotghppenobcyrmyhq8pyf51xd8u
expires_at: 2038-01-01 00:00:00
user1_with_load:
+ uuid: zzzzz-gj3su-357z32aux8dg2s1
api_client: untrusted
user: user1_with_load
api_token: 1234k6lzmp9kj5cpkcoxie963cmvjahbt2fod9zru30k1jqdmi
expires_at: 2038-01-01 00:00:00
fuse:
+ uuid: zzzzz-gj3su-367z32aux8dg2s1
api_client: untrusted
user: fuse
api_token: 4nagbkv8eap0uok7pxm72nossq5asihls3yn5p4xmvqx5t5e7p
uuid: zzzzz-4zz18-op4e2lbej01tcvu
owner_uuid: zzzzz-tpzed-xurymjxw79nv3jz
created_at: 2014-09-01 12:00:00
+ modified_at: 2014-09-01 12:00:00
portable_data_hash: 0b9a7787660e1fce4a93f33e01376ba6+81
manifest_text: ". cdd549ae79fe6640fa3d5c6261d8303c+195 0:195:zzzzz-8i9sb-0vsrcqi7whchuil.log.txt\n"
name: real_log_collection
manifest_text: ". 73feffa4b7f6bb68e44cf984c85f6e88+3 0:3:baz\n"
owner_uuid: zzzzz-tpzed-81hsbo6mk8nl05c
created_at: 2014-02-03T17:22:54Z
+ modified_at: 2014-02-03T17:22:54Z
name: collection_owned_by_foo
collection_to_remove_from_subproject:
manifest_text: ". 8258b505536a9ab47baa2f4281cb932a+9 0:9:missingno\n"
owner_uuid: zzzzz-j7d0g-axqo7eu9pwvna1x
created_at: 2014-10-15T10:45:00
+ modified_at: 2014-10-15T10:45:00
name: Collection to remove from subproject
collection_with_files_in_subdir:
portable_data_hash: fa7aeb5140e2848d39b416daeef4ffc5+45
manifest_text: ". 37b51d194a7513e45b56f6524f2d51f2+3 0:3:bar\n"
name: bar_file
+ created_at: 2014-02-03T17:22:54Z
+ modified_at: 2014-02-03T17:22:54Z
graph_test_collection2:
uuid: zzzzz-4zz18-uukreo9rbgwsujx
portable_data_hash: 65b17c95fdbc9800fc48acda4e9dcd0b+93
manifest_text: ". 6a4ff0499484c6c79c95cd8c566bd25f+249025 0:249025:FOO_General_Public_License,_version_3.pdf\n"
name: "FOO General Public License, version 3"
+ created_at: 2014-02-03T17:22:54Z
+ modified_at: 2014-02-03T17:22:54Z
graph_test_collection3:
uuid: zzzzz-4zz18-uukreo9rbgwsujj
portable_data_hash: ea10d51bcf88862dbcc36eb292017dfd+45
manifest_text: ". 73feffa4b7f6bb68e44cf984c85f6e88+3 0:3:baz\n"
name: "baz file"
+ created_at: 2014-02-03T17:22:54Z
+ modified_at: 2014-02-03T17:22:54Z
collection_1_owned_by_fuse:
uuid: zzzzz-4zz18-ovx05bfzormx3bg
uuid: zzzzz-4zz18-10gneyn6brkx<%= i.to_s.rjust(3, '0') %>
owner_uuid: zzzzz-j7d0g-0010collections
created_at: <%= i.minute.ago.to_s(:db) %>
+ modified_at: <%= i.minute.ago.to_s(:db) %>
<% end %>
# collections in project_with_201_collections
uuid: zzzzz-4zz18-201gneyn6brd<%= i.to_s.rjust(3, '0') %>
owner_uuid: zzzzz-j7d0g-0201collections
created_at: <%= i.minute.ago.to_s(:db) %>
+ modified_at: <%= i.minute.ago.to_s(:db) %>
<% end %>
# Do not add your fixtures below this line as the rest of this file will be trimmed by test_helper
name: Subproject to test owner uuid and name unique key violation upon removal
description: "Removing this will result in name conflict with 'A project' in Home project and hence get renamed."
group_class: project
+
+starred_and_shared_active_user_project:
+ uuid: zzzzz-j7d0g-starredshared01
+ owner_uuid: zzzzz-tpzed-xurymjxw79nv3jz
+ created_at: 2014-04-21 15:37:48 -0400
+ modified_by_client_uuid: zzzzz-ozdt8-brczlopd8u8d0jr
+ modified_by_user_uuid: zzzzz-tpzed-xurymjxw79nv3jz
+ modified_at: 2014-04-21 15:37:48 -0400
+ updated_at: 2014-04-21 15:37:48 -0400
+ name: Starred and shared active user project
+ description: Starred and shared active user project
+ group_class: project
properties: {}
updated_at: 2014-08-06 22:11:51.242010312 Z
+star_project_for_active_user:
+ uuid: zzzzz-o0j2j-starredbyactive
+ owner_uuid: zzzzz-tpzed-xurymjxw79nv3jz
+ created_at: 2014-01-24 20:42:26 -0800
+ modified_by_client_uuid: zzzzz-ozdt8-brczlopd8u8d0jr
+ modified_by_user_uuid: zzzzz-tpzed-000000000000000
+ modified_at: 2014-01-24 20:42:26 -0800
+ updated_at: 2014-01-24 20:42:26 -0800
+ tail_uuid: zzzzz-tpzed-xurymjxw79nv3jz
+ link_class: star
+ name: zzzzz-j7d0g-starredshared01
+ head_uuid: zzzzz-j7d0g-starredshared01
+ properties: {}
+
+share_starred_project_with_project_viewer:
+ uuid: zzzzz-o0j2j-sharewithviewer
+ owner_uuid: zzzzz-tpzed-000000000000000
+ tail_uuid: zzzzz-tpzed-projectviewer1a
+ link_class: permission
+ name: can_read
+ head_uuid: zzzzz-j7d0g-starredshared01
+
+star_shared_project_for_project_viewer:
+ uuid: zzzzz-o0j2j-starredbyviewer
+ owner_uuid: zzzzz-tpzed-projectviewer1a
+ created_at: 2014-01-24 20:42:26 -0800
+ modified_by_client_uuid: zzzzz-ozdt8-brczlopd8u8d0jr
+ modified_by_user_uuid: zzzzz-tpzed-000000000000000
+ modified_at: 2014-01-24 20:42:26 -0800
+ updated_at: 2014-01-24 20:42:26 -0800
+ tail_uuid: zzzzz-tpzed-projectviewer1a
+ link_class: star
+ name: zzzzz-j7d0g-starredshared01
+ head_uuid: zzzzz-j7d0g-starredshared01
+ properties: {}
authorize_with :admin_trustedclient
post :create_system_auth, scopes: '["test"]'
assert_response :success
+ assert_not_nil JSON.parse(@response.body)['uuid']
end
test "prohibit create system auth with token from non-trusted client" do
assert_found_tokens(auth, {filters: [['scopes', '=', scopes]]}, *expected)
end
end
+
+ [
+ [:admin, :admin, 200],
+ [:admin, :active, 403],
+ [:admin, :admin_vm, 403], # this belongs to the user of current session, but we can't get it by uuid
+ [:admin_trustedclient, :active, 200],
+ ].each do |user, token, status|
+ test "as user #{user} get #{token} token and expect #{status}" do
+ authorize_with user
+ get :show, {id: api_client_authorizations(token).uuid}
+ assert_response status
+ end
+ end
+
+ [
+ [:admin, :admin, 200],
+ [:admin, :active, 403],
+ [:admin, :admin_vm, 403], # this belongs to the user of current session, but we can't list it by uuid
+ [:admin_trustedclient, :active, 200],
+ ].each do |user, token, status|
+ test "as user #{user} list #{token} token using uuid and expect #{status}" do
+ authorize_with user
+ get :index, {
+ filters: [['uuid','=',api_client_authorizations(token).uuid]]
+ }
+ assert_response status
+ end
+ end
+
+ [
+ [:admin, :admin, 200],
+ [:admin, :active, 403],
+ [:admin, :admin_vm, 200], # this belongs to the user of current session, and can be listed by token
+ [:admin_trustedclient, :active, 200],
+ ].each do |user, token, status|
+ test "as user #{user} list #{token} token using token and expect #{status}" do
+ authorize_with user
+ get :index, {
+ filters: [['api_token','=',api_client_authorizations(token).api_token]]
+ }
+ assert_response status
+ end
+ end
end
'A Project (2)',
"new project name '#{new_project['name']}' was expected to be 'A Project (2)'")
end
+
+ test "unsharing a project results in hiding it from previously shared user" do
+ # remove sharing link for project
+ @controller = Arvados::V1::LinksController.new
+ authorize_with :admin
+ post :destroy, id: links(:share_starred_project_with_project_viewer).uuid
+ assert_response :success
+
+ # verify that the user can no longer see the project
+ @counter = 0 # Reset executed action counter
+ @controller = Arvados::V1::GroupsController.new
+ authorize_with :project_viewer
+ get :index, filters: [['group_class', '=', 'project']], format: :json
+ assert_response :success
+ found_projects = {}
+ json_response['items'].each do |g|
+ found_projects[g['uuid']] = g
+ end
+ assert_equal false, found_projects.include?(groups(:starred_and_shared_active_user_project).uuid)
+
+ # share the project
+ @counter = 0
+ @controller = Arvados::V1::LinksController.new
+ authorize_with :system_user
+ post :create, link: {
+ link_class: "permission",
+ name: "can_read",
+ head_uuid: groups(:starred_and_shared_active_user_project).uuid,
+ tail_uuid: users(:project_viewer).uuid,
+ }
+
+ # verify that project_viewer user can now see shared project again
+ @counter = 0
+ @controller = Arvados::V1::GroupsController.new
+ authorize_with :project_viewer
+ get :index, filters: [['group_class', '=', 'project']], format: :json
+ assert_response :success
+ found_projects = {}
+ json_response['items'].each do |g|
+ found_projects[g['uuid']] = g
+ end
+ assert_equal true, found_projects.include?(groups(:starred_and_shared_active_user_project).uuid)
+ end
end
sdkParams := arvadosclient.Dict{
"select": fieldsWanted,
- "order": []string{"modified_at ASC"},
- "filters": [][]string{[]string{"modified_at", ">=", "1900-01-01T00:00:00Z"}}}
+ "order": []string{"modified_at ASC", "uuid ASC"},
+ "filters": [][]string{[]string{"modified_at", ">=", "1900-01-01T00:00:00Z"}},
+ "offset": 0}
if params.BatchSize > 0 {
sdkParams["limit"] = params.BatchSize
// These values are just for getting the loop to run the first time,
// afterwards they'll be set to real values.
- previousTotalCollections := -1
- totalCollections := 0
- for totalCollections > previousTotalCollections {
+ remainingCollections := 1
+ var totalCollections int
+ var previousTotalCollections int
+ for remainingCollections > 0 {
// We're still finding new collections
// Write the heap profile for examining memory usage
if err != nil {
return
}
+ batchCollections := len(collections.Items)
+
+ // We must always have at least one collection in the batch
+ if batchCollections < 1 {
+ err = fmt.Errorf("API query returned no collections for %+v", sdkParams)
+ return
+ }
+
+ // Update count of remaining collections
+ remainingCollections = collections.ItemsAvailable - sdkParams["offset"].(int) - batchCollections
// Process collection and update our date filter.
latestModificationDate, maxManifestSize, totalManifestSize, err := ProcessCollections(params.Logger,
if err != nil {
return results, err
}
- sdkParams["filters"].([][]string)[0][2] = latestModificationDate.Format(time.RFC3339)
+ if sdkParams["filters"].([][]string)[0][2] != latestModificationDate.Format(time.RFC3339) {
+ sdkParams["filters"].([][]string)[0][2] = latestModificationDate.Format(time.RFC3339)
+ sdkParams["offset"] = 0
+ } else {
+ sdkParams["offset"] = sdkParams["offset"].(int) + batchCollections
+ }
// update counts
previousTotalCollections = totalCollections
totalCollections = len(results.UUIDToCollection)
- log.Printf("%d collections read, %d new in last batch, "+
+ log.Printf("%d collections read, %d (%d new) in last batch, "+
+ "%d remaining, "+
"%s latest modified date, %.0f %d %d avg,max,total manifest size",
totalCollections,
+ batchCollections,
totalCollections-previousTotalCollections,
+ remainingCollections,
sdkParams["filters"].([][]string)[0][2],
float32(totalManifestSize)/float32(totalCollections),
maxManifestSize, totalManifestSize)
}
}
+ // Make one final API request to verify that we have processed all collections available up to the latest modification date
+ var collections SdkCollectionList
+ sdkParams["filters"].([][]string)[0][1] = "<="
+ sdkParams["limit"] = 0
+ err = params.Client.List("collections", sdkParams, &collections)
+ if err != nil {
+ return
+ }
+ finalNumberOfCollectionsAvailable, err :=
+ util.NumberItemsAvailable(params.Client, "collections")
+ if err != nil {
+ return
+ }
+ if totalCollections < finalNumberOfCollectionsAvailable {
+ err = fmt.Errorf("API server indicates a total of %d collections "+
+ "available up to %v, but we only retrieved %d. "+
+ "Refusing to continue as this could indicate an "+
+ "otherwise undetected failure.",
+ finalNumberOfCollectionsAvailable,
+ sdkParams["filters"].([][]string)[0][2],
+ totalCollections)
+ return
+ }
+
// Write the heap profile for examining memory usage
err = WriteHeapProfile()
testPutConcurrent(t, factory)
testPutFullBlock(t, factory)
+
+ testTrashUntrash(t, factory)
}
// Put a test block, get it and verify content
t.Error("rdata != wdata")
}
}
+
+// With trashLifetime != 0, perform:
+// Trash an old block - which either raises ErrNotImplemented or succeeds
+// Untrash - which either raises ErrNotImplemented or succeeds
+// Get - which must succeed
+func testTrashUntrash(t TB, factory TestableVolumeFactory) {
+ v := factory(t)
+ defer v.Teardown()
+ defer func() {
+ trashLifetime = 0
+ }()
+
+ trashLifetime = 3600 * time.Second
+
+ // put block and backdate it
+ v.PutRaw(TestHash, TestBlock)
+ v.TouchWithDate(TestHash, time.Now().Add(-2*blobSignatureTTL))
+
+ buf, err := v.Get(TestHash)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if bytes.Compare(buf, TestBlock) != 0 {
+ t.Errorf("Got data %+q, expected %+q", buf, TestBlock)
+ }
+ bufs.Put(buf)
+
+ // Trash
+ err = v.Trash(TestHash)
+ if v.Writable() == false {
+ if err != MethodDisabledError {
+ t.Error(err)
+ }
+ } else if err != nil {
+ if err != ErrNotImplemented {
+ t.Error(err)
+ }
+ } else {
+ _, err = v.Get(TestHash)
+ if err == nil || !os.IsNotExist(err) {
+ t.Errorf("os.IsNotExist(%v) should have been true", err)
+ }
+
+ // Untrash
+ err = v.Untrash(TestHash)
+ if err != nil {
+ t.Fatal(err)
+ }
+ }
+
+ // Get the block - after trash and untrash sequence
+ buf, err = v.Get(TestHash)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if bytes.Compare(buf, TestBlock) != 0 {
+ t.Errorf("Got data %+q, expected %+q", buf, TestBlock)
+ }
+ bufs.Put(buf)
+}
self.ping_host, arvados_node['uuid'],
arvados_node['info']['ping_secret'])
- def find_node(self, name):
- node = [n for n in self.list_nodes() if n.name == name]
- if node:
- return node[0]
- else:
- return None
+ @staticmethod
+ def _name_key(cloud_object):
+ return cloud_object.name
def create_node(self, size, arvados_node):
try:
# loop forever because subsequent create_node attempts will fail
# due to node name collision. So check if the node we intended to
# create shows up in the cloud node list and return it if found.
- try:
- node = self.find_node(kwargs['name'])
- if node:
- return node
- except:
- # Ignore possible exception from find_node in favor of
- # re-raising the original create_node exception.
- pass
- raise
+ node = self.search_for(kwargs['name'], 'list_nodes', self._name_key)
+ if node:
+ return node
+ else:
+ # something else went wrong, re-raise the exception
+ raise
def post_create_node(self, cloud_node):
# ComputeNodeSetupActor calls this method after the cloud node is
self.real.ex_create_tags(cloud_node,
{'Name': arvados_node_fqdn(arvados_node)})
- def find_node(self, name):
- raise NotImplementedError("ec2.ComputeNodeDriver.find_node")
-
def list_nodes(self):
# Need to populate Node.size
nodes = super(ComputeNodeDriver, self).list_nodes()
self._disktype_links = {dt.name: self._object_link(dt)
for dt in self.real.ex_list_disktypes()}
- @staticmethod
- def _name_key(cloud_object):
- return cloud_object.name
-
@staticmethod
def _object_link(cloud_object):
return cloud_object.extra.get('selfLink')
import pykka
from apiclient import errors as apierror
+from .fullstopactor import FullStopActor
+
# IOError is the base class for socket.error, ssl.SSLError, and friends.
# It seems like it hits the sweet spot for operations we want to retry:
# it's low-level, but unlikely to catch code bugs.
NETWORK_ERRORS = (IOError,)
ARVADOS_ERRORS = NETWORK_ERRORS + (apierror.Error,)
-actor_class = pykka.ThreadingActor
+actor_class = FullStopActor
class NodeManagerConfig(ConfigParser.SafeConfigParser):
"""Node Manager Configuration class.
--- /dev/null
+from __future__ import absolute_import, print_function
+
+import errno
+import logging
+import os
+import threading
+import traceback
+
+import pykka
+
+class FullStopActor(pykka.ThreadingActor):
+ def on_failure(self, exception_type, exception_value, tb):
+ lg = getattr(self, "_logger", logging)
+ if (exception_type in (threading.ThreadError, MemoryError) or
+ exception_type is OSError and exception_value.errno == errno.ENOMEM):
+ lg.critical("Unhandled exception is a fatal error, killing Node Manager")
+ os.killpg(os.getpgid(0), 9)
--- /dev/null
+#!/usr/bin/env python
+
+from __future__ import absolute_import, print_function
+
+import errno
+import logging
+import threading
+import unittest
+
+import mock
+import pykka
+
+from . import testutil
+
+import arvnodeman.fullstopactor
+
+class BogusActor(arvnodeman.fullstopactor.FullStopActor):
+ def __init__(self, e):
+ super(BogusActor, self).__init__()
+ self.exp = e
+
+ def doStuff(self):
+ raise self.exp
+
+class ActorUnhandledExceptionTest(unittest.TestCase):
+ def test1(self):
+ for e in (MemoryError(), threading.ThreadError(), OSError(errno.ENOMEM, "")):
+ with mock.patch('os.killpg') as killpg_mock:
+ act = BogusActor.start(e)
+ act.tell({
+ 'command': 'pykka_call',
+ 'attr_path': ("doStuff",),
+ 'args': [],
+ 'kwargs': {}
+ })
+ act.stop(block=True)
+ self.assertTrue(killpg_mock.called)
+
+ with mock.patch('os.killpg') as killpg_mock:
+ act = BogusActor.start(OSError(errno.ENOENT, ""))
+ act.tell({
+ 'command': 'pykka_call',
+ 'attr_path': ("doStuff",),
+ 'args': [],
+ 'kwargs': {}
+ })
+ act.stop(block=True)
+ self.assertFalse(killpg_mock.called)
AVAILABLE_RAM_RATIO = 0.95
+# Workaround datetime.datetime.strptime() thread-safety bug by calling
+# it once before starting threads. https://bugs.python.org/issue7980
+datetime.datetime.strptime('1999-12-31_23:59:59', '%Y-%m-%d_%H:%M:%S')
+
+
class Task(object):
def __init__(self):
self.starttime = None
return label
def text_report(self):
+ if not self.tasks:
+ return "(no report generated)\n"
return "\n".join(itertools.chain(
self._text_report_gen(),
self._recommend_gen())) + "\n"
lambda x: x * 100),
('Overall CPU usage: {}%',
self.job_tot['cpu']['user+sys'] /
- self.job_tot['time']['elapsed'],
+ self.job_tot['time']['elapsed']
+ if self.job_tot['time']['elapsed'] > 0 else 0,
lambda x: x * 100),
('Max memory used by a single task: {}GB',
self.stats_max['mem']['rss'],
int(math.ceil(nearlygibs(used_mib))*AVAILABLE_RAM_RATIO*1024))
def _recommend_keep_cache(self):
- """Recommend increasing keep cache if miss rate is above 0.2%"""
- if self.job_tot['keepcalls']['get'] == 0:
+ """Recommend increasing keep cache if utilization < 80%"""
+ if self.job_tot['net:keep0']['rx'] == 0:
return
- miss_rate = float(self.job_tot['keepcache']['miss']) / float(self.job_tot['keepcalls']['get']) * 100.0
+ utilization = (float(self.job_tot['blkio:0:0']['read']) /
+ float(self.job_tot['net:keep0']['rx']))
asked_mib = self.existing_constraints.get('keep_cache_mb_per_task', 256)
- if miss_rate > 0.2:
+ if utilization < 0.8:
yield (
- '#!! {} Keep cache miss rate was {:.2f}% -- '
- 'try runtime_constraints "keep_cache_mb_per_task":{}'
+ '#!! {} Keep cache utilization was {:.2f}% -- '
+ 'try runtime_constraints "keep_cache_mb_per_task":{} (or more)'
).format(
self.label,
- miss_rate,
+ utilization * 100.0,
asked_mib*2)
else:
self.job = job
rdr = None
- if self.job['log']:
+ if self.job.get('log'):
try:
rdr = crunchstat_summary.reader.CollectionReader(self.job['log'])
except arvados.errors.NotFoundError as e:
if 'job' not in component:
logger.warning(
"%s: skipping component with no job assigned", cname)
- elif component['job'].get('log') is None:
- logger.warning(
- "%s: skipping job %s with no log available",
- cname, component['job'].get('uuid'))
else:
logger.info(
- "%s: logdata %s", cname, component['job']['log'])
+ "%s: job %s", cname, component['job']['uuid'])
summarizer = JobSummarizer(component['job'], **kwargs)
- summarizer.label = cname
+ summarizer.label = '{} {}'.format(
+ cname, component['job']['uuid'])
self.summarizers[cname] = summarizer
self.label = pipeline_instance_uuid
job_report + ['\n'] +
['### Summary for bar (zzzzz-8i9sb-000000000000001)\n'] +
job_report + ['\n'] +
+ ['### Summary for unfinished-job (zzzzz-8i9sb-xxxxxxxxxxxxxxx)\n',
+ '(no report generated)\n',
+ '\n'] +
['### Summary for baz (zzzzz-8i9sb-000000000000002)\n'] +
job_report)
self.diff_report(cmd, expect)