gem 'uglifier', '>= 1.0.3'
end
+group :test do
+ gem 'rvm-capistrano'
+ gem 'selenium-webdriver'
+ gem 'capybara'
+ gem 'poltergeist'
+end
+
gem 'jquery-rails'
gem 'bootstrap-sass', '~> 3.1.0'
gem 'bootstrap-x-editable-rails'
# To use debugger
#gem 'byebug'
-gem 'rvm-capistrano', :group => :test
-
gem 'passenger', :group => :production
gem 'andand'
gem 'RedCloth'
net-sftp (>= 2.0.0)
net-ssh (>= 2.0.14)
net-ssh-gateway (>= 1.1.0)
+ capybara (2.2.1)
+ mime-types (>= 1.16)
+ nokogiri (>= 1.3.3)
+ rack (>= 1.0.0)
+ rack-test (>= 0.5.4)
+ xpath (~> 2.0)
+ childprocess (0.5.1)
+ ffi (~> 1.0, >= 1.0.11)
+ cliver (0.3.2)
coffee-rails (3.2.2)
coffee-script (>= 2.2.0)
railties (~> 3.2.0)
deep_merge (1.0.1)
erubis (2.7.0)
execjs (2.0.2)
+ ffi (1.9.3)
highline (1.6.20)
hike (1.2.3)
httpclient (2.3.4.1)
mime-types (~> 1.16)
treetop (~> 1.4.8)
mime-types (1.25)
+ mini_portile (0.5.2)
multi_json (1.8.2)
net-scp (1.1.2)
net-ssh (>= 2.6.5)
net-ssh (2.7.0)
net-ssh-gateway (1.2.0)
net-ssh (>= 2.6.5)
+ nokogiri (1.6.1)
+ mini_portile (~> 0.5.0)
oj (2.1.7)
passenger (4.0.23)
daemon_controller (>= 1.1.0)
actionpack
activesupport
rails (>= 3.0.0)
+ poltergeist (1.5.0)
+ capybara (~> 2.1)
+ cliver (~> 0.3.1)
+ multi_json (~> 1.0)
+ websocket-driver (>= 0.2.0)
polyglot (0.3.3)
rack (1.4.5)
rack-cache (1.2)
rdoc (3.12.2)
json (~> 1.4)
ref (1.0.5)
+ rubyzip (1.1.0)
rvm-capistrano (1.5.1)
capistrano (~> 2.15.4)
sass (3.2.12)
railties (~> 3.2.0)
sass (>= 3.1.10)
tilt (~> 1.3)
+ selenium-webdriver (2.40.0)
+ childprocess (>= 0.5.0)
+ multi_json (~> 1.0)
+ rubyzip (~> 1.0)
+ websocket (~> 1.0.4)
sprockets (2.2.2)
hike (~> 1.2)
multi_json (~> 1.0)
uglifier (2.3.1)
execjs (>= 0.3.0)
json (>= 1.8.0)
+ websocket (1.0.7)
+ websocket-driver (0.3.2)
+ xpath (2.0.0)
+ nokogiri (~> 1.3)
PLATFORMS
ruby
andand
bootstrap-sass (~> 3.1.0)
bootstrap-x-editable-rails
+ capybara
coffee-rails (~> 3.2.0)
deep_merge
httpclient
oj
passenger
piwik_analytics
+ poltergeist
rails (~> 3.2.0)
rvm-capistrano
sass
sass-rails (~> 3.2.0)
+ selenium-webdriver
sqlite3
themes_for_rails
therubyracer
+++ /dev/null
-== Welcome to Rails
-
-Rails is a web-application framework that includes everything needed to create
-database-backed web applications according to the Model-View-Control pattern.
-
-This pattern splits the view (also called the presentation) into "dumb"
-templates that are primarily responsible for inserting pre-built data in between
-HTML tags. The model contains the "smart" domain objects (such as Account,
-Product, Person, Post) that holds all the business logic and knows how to
-persist themselves to a database. The controller handles the incoming requests
-(such as Save New Account, Update Product, Show Post) by manipulating the model
-and directing data to the view.
-
-In Rails, the model is handled by what's called an object-relational mapping
-layer entitled Active Record. This layer allows you to present the data from
-database rows as objects and embellish these data objects with business logic
-methods. You can read more about Active Record in
-link:files/vendor/rails/activerecord/README.html.
-
-The controller and view are handled by the Action Pack, which handles both
-layers by its two parts: Action View and Action Controller. These two layers
-are bundled in a single package due to their heavy interdependence. This is
-unlike the relationship between the Active Record and Action Pack that is much
-more separate. Each of these packages can be used independently outside of
-Rails. You can read more about Action Pack in
-link:files/vendor/rails/actionpack/README.html.
-
-
-== Getting Started
-
-1. At the command prompt, create a new Rails application:
- <tt>rails new myapp</tt> (where <tt>myapp</tt> is the application name)
-
-2. Change directory to <tt>myapp</tt> and start the web server:
- <tt>cd myapp; rails server</tt> (run with --help for options)
-
-3. Go to http://localhost:3000/ and you'll see:
- "Welcome aboard: You're riding Ruby on Rails!"
-
-4. Follow the guidelines to start developing your application. You can find
-the following resources handy:
-
-* The Getting Started Guide: http://guides.rubyonrails.org/getting_started.html
-* Ruby on Rails Tutorial Book: http://www.railstutorial.org/
-
-
-== Debugging Rails
-
-Sometimes your application goes wrong. Fortunately there are a lot of tools that
-will help you debug it and get it back on the rails.
-
-First area to check is the application log files. Have "tail -f" commands
-running on the server.log and development.log. Rails will automatically display
-debugging and runtime information to these files. Debugging info will also be
-shown in the browser on requests from 127.0.0.1.
-
-You can also log your own messages directly into the log file from your code
-using the Ruby logger class from inside your controllers. Example:
-
- class WeblogController < ActionController::Base
- def destroy
- @weblog = Weblog.find(params[:id])
- @weblog.destroy
- logger.info("#{Time.now} Destroyed Weblog ID ##{@weblog.id}!")
- end
- end
-
-The result will be a message in your log file along the lines of:
-
- Mon Oct 08 14:22:29 +1000 2007 Destroyed Weblog ID #1!
-
-More information on how to use the logger is at http://www.ruby-doc.org/core/
-
-Also, Ruby documentation can be found at http://www.ruby-lang.org/. There are
-several books available online as well:
-
-* Programming Ruby: http://www.ruby-doc.org/docs/ProgrammingRuby/ (Pickaxe)
-* Learn to Program: http://pine.fm/LearnToProgram/ (a beginners guide)
-
-These two books will bring you up to speed on the Ruby language and also on
-programming in general.
-
-
-== Debugger
-
-Debugger support is available through the debugger command when you start your
-Mongrel or WEBrick server with --debugger. This means that you can break out of
-execution at any point in the code, investigate and change the model, and then,
-resume execution! You need to install ruby-debug to run the server in debugging
-mode. With gems, use <tt>sudo gem install ruby-debug</tt>. Example:
-
- class WeblogController < ActionController::Base
- def index
- @posts = Post.all
- debugger
- end
- end
-
-So the controller will accept the action, run the first line, then present you
-with a IRB prompt in the server window. Here you can do things like:
-
- >> @posts.inspect
- => "[#<Post:0x14a6be8
- @attributes={"title"=>nil, "body"=>nil, "id"=>"1"}>,
- #<Post:0x14a6620
- @attributes={"title"=>"Rails", "body"=>"Only ten..", "id"=>"2"}>]"
- >> @posts.first.title = "hello from a debugger"
- => "hello from a debugger"
-
-...and even better, you can examine how your runtime objects actually work:
-
- >> f = @posts.first
- => #<Post:0x13630c4 @attributes={"title"=>nil, "body"=>nil, "id"=>"1"}>
- >> f.
- Display all 152 possibilities? (y or n)
-
-Finally, when you're ready to resume execution, you can enter "cont".
-
-
-== Console
-
-The console is a Ruby shell, which allows you to interact with your
-application's domain model. Here you'll have all parts of the application
-configured, just like it is when the application is running. You can inspect
-domain models, change values, and save to the database. Starting the script
-without arguments will launch it in the development environment.
-
-To start the console, run <tt>rails console</tt> from the application
-directory.
-
-Options:
-
-* Passing the <tt>-s, --sandbox</tt> argument will rollback any modifications
- made to the database.
-* Passing an environment name as an argument will load the corresponding
- environment. Example: <tt>rails console production</tt>.
-
-To reload your controllers and models after launching the console run
-<tt>reload!</tt>
-
-More information about irb can be found at:
-link:http://www.rubycentral.org/pickaxe/irb.html
-
-
-== dbconsole
-
-You can go to the command line of your database directly through <tt>rails
-dbconsole</tt>. You would be connected to the database with the credentials
-defined in database.yml. Starting the script without arguments will connect you
-to the development database. Passing an argument will connect you to a different
-database, like <tt>rails dbconsole production</tt>. Currently works for MySQL,
-PostgreSQL and SQLite 3.
-
-== Description of Contents
-
-The default directory structure of a generated Ruby on Rails application:
-
- |-- app
- | |-- assets
- | |-- images
- | |-- javascripts
- | `-- stylesheets
- | |-- controllers
- | |-- helpers
- | |-- mailers
- | |-- models
- | `-- views
- | `-- layouts
- |-- config
- | |-- environments
- | |-- initializers
- | `-- locales
- |-- db
- |-- doc
- |-- lib
- | `-- tasks
- |-- log
- |-- public
- |-- script
- |-- test
- | |-- fixtures
- | |-- functional
- | |-- integration
- | |-- performance
- | `-- unit
- |-- tmp
- | |-- cache
- | |-- pids
- | |-- sessions
- | `-- sockets
- `-- vendor
- |-- assets
- `-- stylesheets
- `-- plugins
-
-app
- Holds all the code that's specific to this particular application.
-
-app/assets
- Contains subdirectories for images, stylesheets, and JavaScript files.
-
-app/controllers
- Holds controllers that should be named like weblogs_controller.rb for
- automated URL mapping. All controllers should descend from
- ApplicationController which itself descends from ActionController::Base.
-
-app/models
- Holds models that should be named like post.rb. Models descend from
- ActiveRecord::Base by default.
-
-app/views
- Holds the template files for the view that should be named like
- weblogs/index.html.erb for the WeblogsController#index action. All views use
- eRuby syntax by default.
-
-app/views/layouts
- Holds the template files for layouts to be used with views. This models the
- common header/footer method of wrapping views. In your views, define a layout
- using the <tt>layout :default</tt> and create a file named default.html.erb.
- Inside default.html.erb, call <% yield %> to render the view using this
- layout.
-
-app/helpers
- Holds view helpers that should be named like weblogs_helper.rb. These are
- generated for you automatically when using generators for controllers.
- Helpers can be used to wrap functionality for your views into methods.
-
-config
- Configuration files for the Rails environment, the routing map, the database,
- and other dependencies.
-
-db
- Contains the database schema in schema.rb. db/migrate contains all the
- sequence of Migrations for your schema.
-
-doc
- This directory is where your application documentation will be stored when
- generated using <tt>rake doc:app</tt>
-
-lib
- Application specific libraries. Basically, any kind of custom code that
- doesn't belong under controllers, models, or helpers. This directory is in
- the load path.
-
-public
- The directory available for the web server. Also contains the dispatchers and the
- default HTML files. This should be set as the DOCUMENT_ROOT of your web
- server.
-
-script
- Helper scripts for automation and generation.
-
-test
- Unit and functional tests along with fixtures. When using the rails generate
- command, template test files will be generated for you and placed in this
- directory.
-
-vendor
- External libraries that the application depends on. Also includes the plugins
- subdirectory. If the app has frozen rails, those gems also go here, under
- vendor/rails/. This directory is in the load path.
--- /dev/null
+h1. Developing Workbench
+
+This document includes information to help developers who would like to contribute to Workbench. If you just want to install it, please refer to our "Workbench installation guide":http://doc.arvados.org/install/install-workbench-app.html.
+
+h2. Running tests
+
+The Workbench application includes a series of integration tests. When you run these, it starts the API server in a test environment, with all of its fixtures loaded, then tests Workbench by starting that server and making requests against it.
+
+In addition to bundled gems, running the integration tests requires "PhantomJS":http://phantomjs.org/download.html to test JavaScript elements. The simplest way to get started is to download one of the binary builds provided, and install the executable into one of the directories in your @$PATH@.
+
+If you install the Workbench Bundle in deployment mode, you must also install the API server Bundle in deployment mode, and vice versa. If your Bundle installs have mismatched modes, the integration tests will fail with "Gem not found" errors.
+
+h2. Writing tests
+
+Integration tests are written with Capybara, which drives a fully-featured Web browser to interact with Workbench exactly as a user would.
+
+If your test requires JavaScript support, your test method should start with the line @Capybara.current_driver = Capybara.javascript_driver@. Otherwise, Capybara defaults to a simpler browser for speed.
+
+In most tests, you can directly call "Capybara's Session methods":http://rubydoc.info/github/jnicklas/capybara/Capybara/Session to drive the browser and check its state. If you need finer-grained control, refer to the "full Capybara documentation":http://rubydoc.info/github/jnicklas/capybara/Capybara.
return "Invalid selection";
}
}
+
+$.fn.editabletypes.text.defaults.tpl = '<input type="text" name="editable-text">'
+
+$.fn.editableform.buttons = '\
+<button type="submit" class="btn btn-primary btn-sm editable-submit" \
+ id="editable-submit"><i class="glyphicon glyphicon-ok"></i></button>\
+<button type="button" class="btn btn-default btn-sm editable-cancel" \
+ id="editable-cancel"><i class="glyphicon glyphicon-remove"></i></button>\
+'
class ApiClientAuthorizationsController < ApplicationController
def index
- @objects = model_class.all.to_ary.reject do |x|
+ m = model_class.all
+ items_available = m.items_available
+ offset = m.result_offset
+ limit = m.result_limit
+ filtered = m.to_ary.reject do |x|
x.api_client_id == 0 or (x.expires_at and x.expires_at < Time.now) rescue false
end
+ ArvadosApiClient::patch_paging_vars(filtered, items_available, offset, limit)
+ @objects = ArvadosResourceList.new(ApiClientAuthorization)
+ @objects.results= filtered
super
end
end
def index
- @objects ||= model_class.limit(200).all
+ if params[:limit]
+ limit = params[:limit].to_i
+ else
+ limit = 200
+ end
+
+ if params[:offset]
+ offset = params[:offset].to_i
+ else
+ offset = 0
+ end
+
+ @objects ||= model_class.limit(limit).offset(offset).all
respond_to do |f|
f.json { render json: @objects }
f.html { render }
def breadcrumb_page_name
(@breadcrumb_page_name ||
- (@object.friendly_link_name if @object.respond_to? :friendly_link_name))
+ (@object.friendly_link_name if @object.respond_to? :friendly_link_name) ||
+ action_name)
end
def index_pane_list
Collection.where(any: ['contains', params[:search]])).
uniq { |c| c.uuid }
else
- @collections = Collection.limit(100)
+ if params[:limit]
+ limit = params[:limit].to_i
+ else
+ limit = 100
+ end
+
+ if params[:offset]
+ offset = params[:offset].to_i
+ else
+ offset = 0
+ end
+
+ @collections = Collection.limit(limit).offset(offset)
end
@links = Link.limit(1000).
where(head_uuid: @collections.collect(&:uuid))
class PipelineTemplatesController < ApplicationController
def show
- @objects = []
- PipelineInstance.where(pipeline_template_uuid: @object.uuid).each do |pipeline|
- @objects.push(pipeline)
- end
+ @objects = PipelineInstance.where(pipeline_template_uuid: @object.uuid)
super
end
class UsersController < ApplicationController
- skip_before_filter :find_object_by_uuid, :only => :welcome
+ skip_before_filter :find_object_by_uuid, :only => [:welcome, :activity]
skip_around_filter :thread_with_mandatory_api_token, :only => :welcome
- before_filter :ensure_current_user_is_admin, only: :sudo
+ before_filter :ensure_current_user_is_admin, only: [:sudo, :unsetup]
def welcome
if current_user
end
end
+ def activity
+ @breadcrumb_page_name = nil
+ @users = User.all
+ @user_activity = {}
+ @activity = {
+ logins: {},
+ jobs: {},
+ pipeline_instances: {}
+ }
+ @total_activity = {}
+ @spans = [['This week', Time.now.beginning_of_week, Time.now],
+ ['Last week',
+ Time.now.beginning_of_week.advance(weeks:-1),
+ Time.now.beginning_of_week],
+ ['This month', Time.now.beginning_of_month, Time.now],
+ ['Last month',
+ 1.month.ago.beginning_of_month,
+ Time.now.beginning_of_month]]
+ @spans.each do |span, threshold_start, threshold_end|
+ @activity[:logins][span] = Log.
+ filter([[:event_type, '=', 'login'],
+ [:object_kind, '=', 'arvados#user'],
+ [:created_at, '>=', threshold_start],
+ [:created_at, '<', threshold_end]])
+ @activity[:jobs][span] = Job.
+ filter([[:created_at, '>=', threshold_start],
+ [:created_at, '<', threshold_end]])
+ @activity[:pipeline_instances][span] = PipelineInstance.
+ filter([[:created_at, '>=', threshold_start],
+ [:created_at, '<', threshold_end]])
+ @activity.each do |type, act|
+ records = act[span]
+ @users.each do |u|
+ @user_activity[u.uuid] ||= {}
+ @user_activity[u.uuid][span + ' ' + type.to_s] ||= 0
+ end
+ records.each do |record|
+ @user_activity[record.modified_by_user_uuid] ||= {}
+ @user_activity[record.modified_by_user_uuid][span + ' ' + type.to_s] ||= 0
+ @user_activity[record.modified_by_user_uuid][span + ' ' + type.to_s] += 1
+ @total_activity[span + ' ' + type.to_s] ||= 0
+ @total_activity[span + ' ' + type.to_s] += 1
+ end
+ end
+ end
+ @users = @users.sort_by do |a|
+ [-@user_activity[a.uuid].values.inject(:+), a.full_name]
+ end
+ # Prepend a "Total" pseudo-user to the sorted list
+ @user_activity[nil] = @total_activity
+ @users = [OpenStruct.new(uuid: nil)] + @users
+ end
+
def show_pane_list
if current_user.andand.is_admin
super | %w(Admin)
end
end
+ def index_pane_list
+ if current_user.andand.is_admin
+ super | %w(Activity)
+ else
+ super
+ end
+ end
+
def sudo
resp = $arvados_api_client.api(ApiClientAuthorization, '', {
api_client_authorization: {
f.html { render template: 'users/home' }
end
end
+
+ def unsetup
+ if current_user.andand.is_admin
+ @object.unsetup
+ end
+ show
+ end
+
end
link_name = link_uuid
if opts[:friendly_name]
- begin
- link_name = resource_class.find(link_uuid).friendly_link_name
- rescue RuntimeError
- # If that lookup failed, the link will too. So don't make one.
- return attrvalue
+ if attrvalue.respond_to? :friendly_link_name
+ link_name = attrvalue.friendly_link_name
+ else
+ begin
+ link_name = resource_class.find(link_uuid).friendly_link_name
+ rescue RuntimeError
+ # If that lookup failed, the link will too. So don't make one.
+ return attrvalue
+ end
end
end
if opts[:with_class_name]
resp
end
+ def self.patch_paging_vars(ary, items_available, offset, limit)
+ if items_available
+ (class << ary; self; end).class_eval { attr_accessor :items_available }
+ ary.items_available = items_available
+ end
+ if offset
+ (class << ary; self; end).class_eval { attr_accessor :offset }
+ ary.offset = offset
+ end
+ if limit
+ (class << ary; self; end).class_eval { attr_accessor :limit }
+ ary.limit = limit
+ end
+ ary
+ end
+
def unpack_api_response(j, kind=nil)
if j.is_a? Hash and j[:items].is_a? Array and j[:kind].match(/(_list|List)$/)
ary = j[:items].collect { |x| unpack_api_response x, j[:kind] }
- if j[:items_available]
- (class << ary; self; end).class_eval { attr_accessor :items_available }
- ary.items_available = j[:items_available]
- end
- ary
+ ArvadosApiClient::patch_paging_vars(ary, j[:items_available], j[:offset], j[:limit])
elsif j.is_a? Hash and (kind || j[:kind])
oclass = self.kind_class(kind || j[:kind])
if oclass
Rails.configuration.arvados_v1_base
end
- def arvados_schema
- @arvados_schema ||= api 'schema', ''
- end
-
def discovery
@discovery ||= api '../../discovery/v1/apis/arvados/v1/rest', ''
end
return @columns unless @columns.nil?
@columns = []
@attribute_info ||= {}
- return @columns if $arvados_api_client.arvados_schema[self.to_s.to_sym].nil?
- $arvados_api_client.arvados_schema[self.to_s.to_sym].each do |coldef|
- k = coldef[:name].to_sym
- if coldef[:type] == coldef[:type].downcase
- @columns << column(k, coldef[:type].to_sym)
+ schema = $arvados_api_client.discovery[:schemas][self.to_s.to_sym]
+ return @columns if schema.nil?
+ schema[:properties].each do |k, coldef|
+ case k
+ when :etag, :kind
+ attr_reader k
else
- @columns << column(k, :text)
- serialize k, coldef[:type].constantize
+ if coldef[:type] == coldef[:type].downcase
+ # boolean, integer, etc.
+ @columns << column(k, coldef[:type].to_sym)
+ else
+ # Hash, Array
+ @columns << column(k, :text)
+ serialize k, coldef[:type].constantize
+ end
+ attr_accessible k
+ @attribute_info[k] = coldef
end
- attr_accessible k
- @attribute_info[k] = coldef
end
- attr_reader :etag
- attr_reader :kind
@columns
end
ArvadosResourceList.new(self).order(*args)
end
+ def self.filter(*args)
+ ArvadosResourceList.new(self).filter(*args)
+ end
+
def self.where(*args)
ArvadosResourceList.new(self).where(*args)
end
self
end
+ def offset(skip)
+ @offset = skip
+ self
+ end
+
def order(orderby_spec)
@orderby_spec = orderby_spec
self
end
+ def filter _filters
+ @filters ||= []
+ @filters += _filters
+ self
+ end
+
def where(cond)
cond = cond.dup
cond.keys.each do |uuid_key|
}
api_params[:eager] = '1' if @eager
api_params[:limit] = @limit if @limit
+ api_params[:offset] = @offset if @offset
api_params[:order] = @orderby_spec if @orderby_spec
+ api_params[:filters] = @filters if @filters
res = $arvados_api_client.api @resource_class, '', api_params
@results = $arvados_api_client.unpack_api_response res
self
@results
end
+ def results=(r)
+ @results = r
+ end
+
def all
where({})
end
def items_available
results.items_available if results.respond_to? :items_available
end
+
+ def result_limit
+ results.limit if results.respond_to? :limit
+ end
+
+ def result_offset
+ results.offset if results.respond_to? :offset
+ end
+
end
def friendly_link_name
[self.first_name, self.last_name].compact.join ' '
end
+
+ def unsetup
+ self.private_reload($arvados_api_client.api(self.class,
+ "/#{self.uuid}/unsetup",
+ {}))
+ end
+
end
--- /dev/null
+<% content_for :css do %>
+.index-paging {
+text-align: center;
+padding-left: 1em;
+padding-right: 1em;
+background-color: whitesmoke;
+}
+.paging-number {
+display: inline-block;
+min-width: 1.2em;
+}
+<% end %>
+
+<% if results.respond_to? :result_offset and
+ results.respond_to? :result_limit and
+ results.respond_to? :items_available and
+ results.result_offset != nil and
+ results.result_limit != nil and
+ results.items_available != nil
+%>
+<div class="index-paging">
+ Displaying <%= results.result_offset+1 %> –
+ <%= if results.result_offset + results.result_limit > results.items_available
+ results.items_available
+ else
+ results.result_offset + results.result_limit
+ end %>
+ out of <%= results.items_available %>
+</div>
+
+<% if not (results.result_offset == 0 and results.items_available <= results.result_limit) %>
+
+<div class="index-paging">
+
+<% if results.result_offset > 0 %>
+ <% if results.result_offset > results.result_limit %>
+ <% prev_offset = results.result_offset - results.result_limit %>
+ <% else %>
+ <% prev_offset = 0 %>
+ <% end %>
+<% else %>
+ <% prev_offset = nil %>
+<% end %>
+
+<% this_offset = results.result_offset %>
+
+<% if (results.result_offset + results.result_limit) < results.items_available %>
+ <% next_offset = results.result_offset + results.result_limit %>
+<% else %>
+ <% next_offset = nil %>
+<% end %>
+
+<span class="pull-left">
+<% if results.result_offset > 0 %>
+ <%= link_to raw("<span class='glyphicon glyphicon-fast-backward'></span>"), {:id => object, :offset => 0, :limit => results.result_limit} %>
+<% else %>
+ <span class='glyphicon glyphicon-fast-backward text-muted'></span>
+<% end %>
+
+<% if prev_offset %>
+ <%= link_to raw("<span class='glyphicon glyphicon-step-backward'></span>"), {:id => object, :offset => prev_offset, :limit => results.result_limit} %>
+<% else %>
+<span class='glyphicon glyphicon-step-backward text-muted'></span>
+<% end %>
+</span>
+
+<% first = this_offset - (10 * results.result_limit) %>
+<% last = this_offset + (11 * results.result_limit) %>
+
+<% lastpage_offset = (results.items_available / results.result_limit) * results.result_limit %>
+
+<% if last > results.items_available %>
+ <% first -= (last - lastpage_offset) %>
+ <% last -= (last - results.items_available) %>
+<% end %>
+
+<% if first < 0 %>
+ <% d = -first %>
+ <% first += d %>
+ <% last += d %>
+<% end %>
+
+<% last = results.items_available if last > results.items_available %>
+
+<% i = first %>
+<% n = first / results.result_limit %>
+
+<% if first > 0 %>
+…
+<% end %>
+
+<% while i < last %>
+<% if i != this_offset %>
+ <%= link_to "#{n+1}", {:id => @object, :offset => i, :limit => results.result_limit}, class: 'paging-number' %>
+<% else %>
+ <span class="paging-number" style="font-weight: bold;"><%= n+1 %></span>
+<% end %>
+<% i += results.result_limit %>
+<% n += 1 %>
+<% end %>
+
+<% if last < results.items_available %>
+…
+<% end %>
+
+<span class="pull-right">
+<% if next_offset %>
+ <%= link_to raw("<span class='glyphicon glyphicon-step-forward'></span>"), {:id => @object, :offset => next_offset, :limit => results.result_limit} %>
+<% else %>
+<span class='glyphicon glyphicon-forward text-muted'></span>
+<% end %>
+
+<% if (results.items_available - results.result_offset) >= results.result_limit %>
+ <%= link_to raw("<span class='glyphicon glyphicon-fast-forward'></span>"), {:id => @object, :offset => results.items_available - (results.items_available % results.result_limit),
+ :limit => results.result_limit} %>
+<% else %>
+ <span class='glyphicon glyphicon-fast-forward text-muted'></span>
+<% end %>
+
+</span>
+
+</div>
+
+<% end %>
+
+<% end %>
<% attr_blacklist = ' created_at modified_at modified_by_user_uuid modified_by_client_uuid updated_at' %>
+<%= render partial: "paging", locals: {results: @objects, object: @object} %>
+
<%= form_tag do |f| %>
<table class="table table-condensed arv-index">
<% end %>
+<%= render partial: "paging", locals: {results: @objects, object: @object} %>
+
<% end %>
</div>
<% end %>
+<%= render partial: "paging", locals: {results: @collections, object: @object} %>
+
<div style="padding-right: 1em">
<%= form_tag do |f| %>
-<table id="collections-index" class="topalign table table-condensed table-fixedlayout table-fixed-header-row">
+<table id="collections-index" class="topalign table table-condensed table-fixedlayout"> <!-- table-fixed-header-row -->
<colgroup>
<col width="4%" />
<col width="10%" />
</div>
+<%= render partial: "paging", locals: {results: @collections, object: @object} %>
+
<% content_for :footer_js do %>
$(document).on('click', 'form[data-remote] input[type=submit]', function() {
$('table#collections-index tbody').fadeTo(200, 0.3);
+<%= render partial: "paging", locals: {results: @groups, object: @object} %>
+
<table class="table table-hover">
<thead>
<tr class="contain-align-left">
</tbody>
</table>
+
+<%= render partial: "paging", locals: {results: @groups, object: @object} %>
-->
<li class="dropdown notification-menu">
- <a href="#" class="dropdown-toggle" data-toggle="dropdown">
+ <a href="#" class="dropdown-toggle" data-toggle="dropdown" id="collections-menu">
<span class="glyphicon glyphicon-paperclip"></span>
<span class="badge" id="persistent-selection-count"></span>
<span class="caret"></span>
<% if current_user.is_active %>
<li class="dropdown notification-menu">
- <a href="#" class="dropdown-toggle" data-toggle="dropdown">
+ <a href="#" class="dropdown-toggle" data-toggle="dropdown" id="notifications-menu">
<span class="glyphicon glyphicon-envelope"></span>
<span class="badge badge-alert notification-count"><%= @notification_count %></span>
<span class="caret"></span>
<% end %>
<li class="dropdown">
- <a href="#" class="dropdown-toggle" data-toggle="dropdown">
+ <a href="#" class="dropdown-toggle" data-toggle="dropdown" id="user-menu">
<span class="glyphicon glyphicon-user"></span><span class="caret"></span>
</a>
<ul class="dropdown-menu" role="menu">
<% content_for :js do %>
setInterval(function(){$('a.refresh').click()}, 15000);
<% end %>
+
+<% content_for :tab_line_buttons do %>
+ <%= form_tag @object, :method => :put do |f| %>
+
+ <%= hidden_field @object.class.to_s.underscore.singularize.to_sym, :active, :value => false %>
+
+ <%= button_tag "Stop pipeline", {class: 'btn btn-primary pull-right', id: "run-pipeline-button"} %>
+ <% end %>
+<% end %>
+
<% end %>
<% else %>
<% end rescue nil %>
<% end %>
+<%= render partial: "paging", locals: {results: @objects, object: @object} %>
+
<%= form_tag do |f| %>
<table class="table table-condensed table-fixedlayout">
<% end %>
+<%= render partial: "paging", locals: {results: @objects, object: @object} %>
+
<% content_for :footer_js do %>
var showhide_compare = function() {
var form = $('form#compare')[0];
}
<% end %>
+<%= render partial: "paging", locals: {results: @objects, object: @object} %>
+
<table class="table table-hover">
<thead>
<tr class="contain-align-left">
</tbody>
</table>
+
+<%= render partial: "paging", locals: {results: @objects, object: @object} %>
--- /dev/null
+<p>
+ As an admin user, you can <%= link_to "view recent user activity", activity_users_url %>.
+</p>
+
<blockquote>
<%= button_to "Log in as #{@object.full_name}", sudo_user_url(id: @object.uuid), class: 'btn btn-primary' %>
</blockquote>
+
+<p>As an admin, you can deactivate and reset this user. This will remove all repository/VM permissions for the user. If you "setup" the user again, the user will have to sign the user agreement again.</p>
+
+<blockquote>
+<%= button_to "Deactivate #{@object.full_name}", unsetup_user_url(id: @object.uuid), class: 'btn btn-primary', confirm: "Are you sure you want to deactivate #{@object.full_name}?"%>
+</blockquote>
<td>
<small>
- <% if j.success %>
+ <% if j.success and j.output %>
<a href="<%= collection_path(j.output) %>">
<% Collection.limit(1).where(uuid: j.output).each do |c| %>
<span class="glyphicon glyphicon-search"></span>
<% end %>
</span>
- </div>
+ </div>
<% end %>
</div>
<% if not current_user.andand.is_active or @my_collections.empty? %>
<p>
Your account must be activated by an Arvados administrator. If this
is your first time accessing Arvados and would like to request
- access, or you believe you are seeing the page in error, please
- <%= link_to "contact us", Rails.configuration.activation_contact_link %>.
+ access, or you believe you are seeing the page in error, please
+ <%= link_to "contact us", Rails.configuration.activation_contact_link %>.
You should receive an email at the address you used to log in when
- your account is activated. In the mean time, you can
+ your account is activated. In the mean time, you can
<%= link_to "learn more about Arvados", "https://arvados.org/projects/arvados/wiki/Introduction_to_Arvados" %>,
and <%= link_to "read the Arvados user guide", "http://doc.arvados.org/user" %>.
</p>
--- /dev/null
+<% content_for :css do %>
+table#users-activity-table th {
+ overflow-x: hidden;
+}
+table#users-activity-table .cell-for-span-This-month,
+table#users-activity-table .cell-for-span-Last-month {
+ background: #eee;
+}
+<% end %>
+<table class="table table-condensed arv-index" id="users-activity-table">
+ <colgroup>
+ <col width="28%" />
+ </colgroup>
+ <% @spans.each do |_| %>
+ <colgroup>
+ <% 3.times do %>
+ <col width="<%= (72 / @spans.count / 3).floor %>%" />
+ <% end %>
+ </colgroup>
+ <% end %>
+
+ <tr>
+ <th rowspan="2">User</th>
+ <% @spans.each do |span, start_at, end_at| %>
+ <th colspan="3" class="cell-for-span-<%= span.gsub ' ','-' %>">
+ <%= span %>
+ <br />
+ <%= start_at.strftime('%b %-d') %>
+ -
+ <%= (end_at-1.second).strftime('%b %-d') %>
+ </th>
+ <% end %>
+ </tr>
+ <tr>
+ <% @spans.each do |span, _| %>
+ <th class="cell-for-span-<%= span.gsub ' ','-' %>">Logins</th>
+ <th class="cell-for-span-<%= span.gsub ' ','-' %>">Jobs</th>
+ <th class="cell-for-span-<%= span.gsub ' ','-' %>">Pipelines</th>
+ <% end %>
+ </tr>
+
+ <% @users.each do |user| %>
+ <tr>
+ <td>
+ <small>
+ <% if user.uuid %>
+ <%= link_to_if_arvados_object user, friendly_name: true %>
+ <% else %>
+ <b>Total</b>
+ <% end %>
+ </small>
+ </td>
+
+ <% @spans.each do |span, _| %>
+ <% ['logins', 'jobs', 'pipeline_instances'].each do |type| %>
+ <td class="cell-for-span-<%= span.gsub ' ','-' %>">
+ <small>
+ <%= @user_activity[user.uuid][span + " " + type].to_s %>
+ </small>
+ </td>
+ <% end %>
+ <% end %>
+ </tr>
+ <% end %>
+</table>
+
+<% content_for :footer_js do %>
+$('#users-activity-table td small').each(function(){
+ if ($(this).html().trim() == '0')
+ $(this).css('opacity', '0.3');
+});
+<% end %>
-# Do not use this file for site configuration. Create config.yml
+# Do not use this file for site configuration. Create application.yml
# instead (see application.yml.example).
development:
assets.compress: false
assets.debug: true
profiling_enabled: true
- site_name: Workbench:dev
+ site_name: Arvados Workbench (dev)
production:
force_ssl: true
profiling_enabled: false
secret_token: <%= rand(2**256).to_s(36) %>
+ # When you run the Workbench's integration tests, it starts the API
+ # server as a dependency. These settings should match the API
+ # server's Rails defaults. If you adjust those, change these
+ # settings in application.yml to match.
+ arvados_login_base: https://localhost:3001/login
+ arvados_v1_base: https://localhost:3001/arvados/v1
+ arvados_insecure_https: true
+
site_name: Workbench:test
common:
# Expands the lines which load the assets
config.assets.debug = true
- # Log timing data for API transactions
- config.profiling_enabled = true
-
- config.arvados_login_base = 'http://arvados.local/login'
- config.arvados_v1_base = 'http://arvados.local/arvados/v1'
- config.arvados_insecure_https = true # true = do not check server certificate
-
- config.data_import_dir = '/tmp/arvados-workbench-upload'
- config.data_export_dir = '/tmp/arvados-workbench-download'
-
- config.secret_token = File.read('config/.secret_token') if File.exist? 'config/.secret_token'
-
- config.site_name = 'Arvados Workbench (dev)'
- config.activation_contact_link = 'mailto:info@arvados.org'
-
- config.arvados_docsite = 'http://doc.arvados.org'
-
- config.arvados_theme = 'default'
-
- config.show_user_agreement_inline = false
end
# Log timing data for API transactions
config.profiling_enabled = false
- config.arvados_login_base = 'https://arvados.local/login'
- config.arvados_v1_base = 'https://arvados.local/arvados/v1'
- config.arvados_insecure_https = false # true = do not check server certificate
-
- config.data_import_dir = '/data/arvados-workbench-upload/data'
- config.data_export_dir = '/data/arvados-workbench-download/data'
-
- # Authentication stub: hard code pre-approved API tokens.
- # config.accept_api_token = { rand(2**256).to_s(36) => true }
- config.accept_api_token = {}
-
- config.site_name = 'Arvados Workbench'
- config.activation_contact_link = 'mailto:info@arvados.org'
-
- config.arvados_docsite = 'http://doc.arvados.org'
-
- config.arvados_theme = 'default'
-
- config.show_user_agreement_inline = false
end
# Log timing data for API transactions
config.profiling_enabled = false
- config.arvados_login_base = 'http://arvados.local/login'
- config.arvados_v1_base = 'https://arvados.local/arvados/v1'
- config.arvados_insecure_https = true # true = do not check server certificate
-
- config.data_import_dir = '/data/arvados-workbench-upload'
- config.data_export_dir = '/data/arvados-workbench-download'
-
- # Authentication stub: hard code pre-approved API tokens.
- # config.accept_api_token = { rand(2**256).to_s(36) => true }
- config.accept_api_token = {}
-
- config.site_name = 'Arvados Workbench (test)'
- config.activation_contact_link = 'mailto:info@arvados.org'
-
- config.arvados_docsite = 'http://doc.arvados.org'
-
- config.arvados_theme = 'default'
-
- config.show_user_agreement_inline = false
end
resources :users do
get 'home', :on => :member
get 'welcome', :on => :collection
+ get 'activity', :on => :collection
post 'sudo', :on => :member
+ post 'unsetup', :on => :member
end
resources :logs
resources :factory_jobs
+++ /dev/null
-Use this README file to introduce your application and point to useful places in the API for learning more.
-Run "rake doc:app" to generate API documentation for your models, controllers, helpers, and libraries.
desc 'Ensure site configuration has all required settings'
task check: :environment do
$application_config.sort.each do |k, v|
- $stderr.puts "%-32s %s" % [k, eval("Rails.configuration.#{k}")]
+ if ENV.has_key?('QUIET') then
+ # Make sure we still check for the variable to exist
+ eval("Rails.configuration.#{k}")
+ else
+ if /(password|secret)/.match(k) then
+ # Make sure we still check for the variable to exist, but don't print the value
+ eval("Rails.configuration.#{k}")
+ $stderr.puts "%-32s %s" % [k, '*********']
+ else
+ $stderr.puts "%-32s %s" % [k, eval("Rails.configuration.#{k}")]
+ end
+ end
end
end
end
--- /dev/null
+require 'test_helper'
+
+class LoginsTest < ActionDispatch::IntegrationTest
+ test "login with api_token works after redirect" do
+ visit page_with_token('active_trustedclient')
+ assert page.has_text?('Recent jobs'), "Missing 'Recent jobs' from page"
+ assert_no_match(/\bapi_token=/, current_path)
+ end
+
+ test "can't use expired token" do
+ visit page_with_token('expired_trustedclient')
+ assert page.has_text? 'Log in'
+ end
+end
--- /dev/null
+require 'integration_helper'
+require 'uri'
+
+class SmokeTest < ActionDispatch::IntegrationTest
+ def assert_visit_success(allowed=[200])
+ assert_includes(allowed, status_code,
+ "#{current_url} returned #{status_code}, not one of " +
+ allowed.inspect)
+ end
+
+ def all_links_in(find_spec, text_regexp=//)
+ find(find_spec).all('a').collect { |tag|
+ if tag[:href].nil? or tag[:href].empty? or (tag.text !~ text_regexp)
+ nil
+ else
+ url = URI(tag[:href])
+ url.host.nil? ? url.path : nil
+ end
+ }.compact
+ end
+
+ test "all first-level links succeed" do
+ visit page_with_token('active_trustedclient', '/')
+ assert_visit_success
+ click_link 'user-menu'
+ urls = [all_links_in('.arvados-nav'),
+ all_links_in('.navbar', /^Manage /)].flatten
+ seen_urls = ['/']
+ while not (url = urls.shift).nil?
+ next if seen_urls.include? url
+ visit url
+ seen_urls << url
+ assert_visit_success
+ # Uncommenting the line below lets you crawl the entire site for a
+ # more thorough test.
+ # urls += all_links_in('body')
+ end
+ end
+end
--- /dev/null
+require 'integration_helper'
+
+class VirtualMachinesTest < ActionDispatch::IntegrationTest
+ test "make and name a new virtual machine" do
+ Capybara.current_driver = Capybara.javascript_driver
+ visit page_with_token('admin_trustedclient')
+ click_link 'Virtual machines'
+ assert page.has_text? 'testvm.shell'
+ click_on 'Add a new virtual machine'
+ assert page.has_text? 'none'
+ click_link 'none'
+ assert page.has_text? 'Update hostname'
+ fill_in 'editable-text', with: 'testname'
+ click_button 'editable-submit'
+ assert page.has_text? 'testname'
+ end
+end
--- /dev/null
+require 'test_helper'
+require 'capybara/rails'
+require 'capybara/poltergeist'
+require 'uri'
+require 'yaml'
+
+$ARV_API_SERVER_DIR = File.expand_path('../../../../services/api', __FILE__)
+SERVER_PID_PATH = 'tmp/pids/server.pid'
+
+class ActionDispatch::IntegrationTest
+ # Make the Capybara DSL available in all integration tests
+ include Capybara::DSL
+
+ def self.api_fixture(name)
+ # Returns the data structure from the named API server test fixture.
+ path = File.join($ARV_API_SERVER_DIR, 'test', 'fixtures', "#{name}.yml")
+ YAML.load(IO.read(path))
+ end
+
+ @@API_AUTHS = api_fixture('api_client_authorizations')
+
+ def page_with_token(token, path='/')
+ # Generate a page path with an embedded API token.
+ # Typical usage: visit page_with_token('token_name', page)
+ # The token can be specified by the name of an api_client_authorizations
+ # fixture, or passed as a raw string.
+ api_token = ((@@API_AUTHS.include? token) ?
+ @@API_AUTHS[token]['api_token'] : token)
+ sep = (path.include? '?') ? '&' : '?'
+ q_string = URI.encode_www_form('api_token' => api_token)
+ "#{path}#{sep}#{q_string}"
+ end
+end
+
+class IntegrationTestRunner < MiniTest::Unit
+ # Make a hash that unsets Bundle's environment variables.
+ # We'll use this environment when we launch Bundle commands in the API
+ # server. Otherwise, those commands will try to use Workbench's gems, etc.
+ @@APIENV = ENV.map { |(key, val)| (key =~ /^BUNDLE_/) ? [key, nil] : nil }.
+ compact.to_h
+
+ def _system(*cmd)
+ if not system(@@APIENV, *cmd)
+ raise RuntimeError, "#{cmd[0]} returned exit code #{$?.exitstatus}"
+ end
+ end
+
+ def _run(args=[])
+ Capybara.javascript_driver = :poltergeist
+ server_pid = Dir.chdir($ARV_API_SERVER_DIR) do |apidir|
+ _system('bundle', 'exec', 'rake', 'db:test:load')
+ _system('bundle', 'exec', 'rake', 'db:fixtures:load')
+ _system('bundle', 'exec', 'rails', 'server', '-d')
+ timeout = Time.now.tv_sec + 10
+ begin
+ sleep 0.2
+ begin
+ server_pid = IO.read(SERVER_PID_PATH).to_i
+ good_pid = (server_pid > 0) and (Process.kill(0, pid) rescue false)
+ rescue Errno::ENOENT
+ good_pid = false
+ end
+ end while (not good_pid) and (Time.now.tv_sec < timeout)
+ if not good_pid
+ raise RuntimeError, "could not find API server Rails pid"
+ end
+ server_pid
+ end
+ begin
+ super(args)
+ ensure
+ Process.kill('TERM', server_pid)
+ end
+ end
+end
+
+MiniTest::Unit.runner = IntegrationTestRunner.new
+++ /dev/null
-Arvados Documentation
-
-0. Install dependencies
-
- $ bundle install
-
-
-1. To build or update documentation:
- $ rake generate
-
-
-2. To view documentation:
- $ rake run
-[2014-03-10 09:03:41] INFO WEBrick 1.3.1
-[2014-03-10 09:03:41] INFO ruby 2.1.1 (2014-02-24) [x86_64-linux]
-[2014-03-10 09:03:41] INFO WEBrick::HTTPServer#start: pid=8926 port=8000
-
- Then go to http://localhost:8000
-
-
-2. You can set 'baseurl' (the URL prefix for all internal links),
-'arvados_api_host' and 'arvados_workbench_host' without changing _config.yml:
-
- $ rake generate baseurl=/example arvados_api_host=example.com
-
-
-4. To delete generated files:
- $ rake realclean
--- /dev/null
+h1. Arvados documentation
+
+This is the source code for "doc.arvados.org":http://doc.arvados.org.
+
+Here's how to build the HTML pages locally so you can preview your updates before you commit and push.
+
+Additional information is available on the "'Documentation' page on the Arvados wiki":https://arvados.org/projects/arvados/wiki/Documentation.
+
+h2. 0. Install dependencies
+
+<pre>
+arvados/doc$ bundle install
+</pre>
+
+h2. 1. Generate HTML pages
+
+<pre>
+arvados/doc$ rake
+</pre>
+
+Alternately, to make the documentation browsable on the local filesystem:
+
+<pre>
+arvados/doc$ rake generate baseurl=$PWD/.site
+</pre>
+
+h2. 2. Preview HTML pages
+
+<pre>
+arvados/doc$ rake run
+[2014-03-10 09:03:41] INFO WEBrick 1.3.1
+[2014-03-10 09:03:41] INFO ruby 2.1.1 (2014-02-24) [x86_64-linux]
+[2014-03-10 09:03:41] INFO WEBrick::HTTPServer#start: pid=8926 port=8000
+</pre>
+
+Preview the rendered pages at "http://localhost:8000":http://localhost:8000.
+
+h2. 3. Publish HTML pages inside Workbench
+
+(or some other web site)
+
+You can set @baseurl@ (the URL prefix for all internal links), @arvados_api_host@ and @arvados_workbench_host@ without changing @_config.yml@:
+
+<pre>
+arvados/doc$ rake generate baseurl=/doc arvados_api_host=xyzzy.arvadosapi.com
+</pre>
+
+Make the docs appear at {workbench_host}/doc by creating a symbolic link in Workbench's @public@ directory, pointing to the generated HTML tree.
+
+<pre>
+arvados/doc$ ln -sn ../../../doc/.site ../apps/workbench/public/doc
+</pre>
+
+h2. 4. Delete generated files
+
+<pre>
+arvados/doc$ rake realclean
+</pre>
- Reference:
- user/reference/api-tokens.html.textile.liquid
- user/reference/sdk-cli.html.textile.liquid
+ - user/reference/job-and-pipeline-reference.html.textile.liquid
- Arvados License:
- user/copying/copying.html.textile.liquid
- user/copying/agpl-3.0.html
- sdk/python/sdk-python.html.textile.liquid
- sdk/python/python.html.textile.liquid
- sdk/python/crunch-utility-libraries.html.textile.liquid
+ - Perl:
+ - sdk/perl/index.html.textile.liquid
+ - Ruby:
+ - sdk/ruby/index.html.textile.liquid
+ - CLI:
+ - sdk/cli/index.html.textile.liquid
api:
- Concepts:
- api/index.html.textile.liquid
{% if nx == 1 %}
<hr>
{% if prev != "" %}
- <a href="{{ site.baseurl }}{{ prev.url }}" class="pull-left">Previous: {{ prev.title }}</a></li>
+ <a href="{{ site.baseurl }}{{ prev.url }}" class="pull-left">Previous: {{ prev.title }}</a>
{% endif %}
- <a href="{{ site.baseurl }}{{ p.url }}" class="pull-right">Next: {{ p.title }}</a></li>
+ <a href="{{ site.baseurl }}{{ p.url }}" class="pull-right">Next: {{ p.title }}</a>
{% assign nx = 0 %}
{% assign n = 1 %}
{% endif %}
{% endfor %}
{% if n == 0 && prev != "" %}
<hr>
- <a href="{{ site.baseurl }}{{ prev.url }}" class="pull-left">Previous: {{ prev.title }}</a></li>
+ <a href="{{ site.baseurl }}{{ prev.url }}" class="pull-left">Previous: {{ prev.title }}</a>
{% assign n = 1 %}
{% endif %}
\ No newline at end of file
h2. Methods
-See "REST methods for working with Arvados resources":/api/methods.html
+See "REST methods for working with Arvados resources":{{site.baseurl}}/api/methods.html
API endpoint base: @https://{{ site.arvados_api_host }}/arvados/v1/api_clients@
h2. Resources
-Each ApiClient has, in addition to the usual "attributes of Arvados resources":/api/resources.html:
+Each ApiClient has, in addition to the usual "attributes of Arvados resources":{{site.baseurl}}/api/resources.html:
table(table table-bordered table-condensed).
|_. Attribute|_. Type|_. Description|_. Example|
h2. Methods
-See "REST methods for working with Arvados resources":/api/methods.html
+See "REST methods for working with Arvados resources":{{site.baseurl}}/api/methods.html
API endpoint base: @https://{{ site.arvados_api_host }}/arvados/v1/api_client_authorizations@
navsection: api
navmenu: Schema
title: AuthorizedKey
-
...
h2. Methods
-See "REST methods for working with Arvados resources":/api/methods.html
+See "REST methods for working with Arvados resources":{{site.baseurl}}/api/methods.html
API endpoint base: @https://{{ site.arvados_api_host }}/arvados/v1/authorized_keys@
h2. Resources
-Each AuthorizedKey has, in addition to the usual "attributes of Arvados resources":/api/resources.html:
+Each AuthorizedKey has, in addition to the usual "attributes of Arvados resources":{{site.baseurl}}/api/resources.html:
table(table table-bordered table-condensed).
|_. Attribute|_. Type|_. Description|_. Example|
-This resource concerns metadata, usage accounting, and integrity checks for data stored on the cloud. Reading and writing the data _per se_ is achieved by the "Keep":/user/tutorials/tutorial-keep.html storage system.
+This resource concerns metadata, usage accounting, and integrity checks for data stored on the cloud. Reading and writing the data _per se_ is achieved by the "Keep":{{site.baseurl}}/user/tutorials/tutorial-keep.html storage system.
h2. Methods
-See "REST methods for working with Arvados resources":/api/methods.html
+See "REST methods for working with Arvados resources":{{site.baseurl}}/api/methods.html
API endpoint base: @https://{{ site.arvados_api_host }}/arvados/v1/links@
h2. Resource
-Each collection has, in addition to the usual "attributes of Arvados resources":/api/resources.html:
+Each collection has, in addition to the usual "attributes of Arvados resources":{{site.baseurl}}/api/resources.html:
table(table table-bordered table-condensed).
|_. Attribute|_. Type|_. Description|_. Example|
h2. Methods
-See "REST methods for working with Arvados resources":/api/methods.html
+See "REST methods for working with Arvados resources":{{site.baseurl}}/api/methods.html
API endpoint base: @https://{{ site.arvados_api_host }}/arvados/v1/commits@
h2. Resources
-Each Commit has, in addition to the usual "attributes of Arvados resources":/api/resources.html:
+Each Commit has, in addition to the usual "attributes of Arvados resources":{{site.baseurl}}/api/resources.html:
table(table table-bordered table-condensed).
|_. Attribute|_. Type|_. Description|_. Example|
h2. Methods
-See "REST methods for working with Arvados resources":/api/methods.html
+See "REST methods for working with Arvados resources":{{site.baseurl}}/api/methods.html
API endpoint base: @https://{{ site.arvados_api_host }}/arvados/v1/commit_ancestors@
h2. Resources
-Each CommitAncestor has, in addition to the usual "attributes of Arvados resources":/api/resources.html:
+Each CommitAncestor has, in addition to the usual "attributes of Arvados resources":{{site.baseurl}}/api/resources.html:
table(table table-bordered table-condensed).
|_. Attribute|_. Type|_. Description|_. Example|
h2. Methods
-See "REST methods for working with Arvados resources":/api/methods.html
+See "REST methods for working with Arvados resources":{{site.baseurl}}/api/methods.html
API endpoint base: @https://{{ site.arvados_api_host }}/arvados/v1/groups@
h2. Resources
-Each Group has, in addition to the usual "attributes of Arvados resources":/api/resources.html:
+Each Group has, in addition to the usual "attributes of Arvados resources":{{site.baseurl}}/api/resources.html:
table(table table-bordered table-condensed).
|_. Attribute|_. Type|_. Description|_. Example|
h2. Methods
-See "REST methods for working with Arvados resources":/api/methods.html
+See "REST methods for working with Arvados resources":{{site.baseurl}}/api/methods.html
API endpoint base: @https://{{ site.arvados_api_host }}/arvados/v1/humans@
h2. Resources
-Each Human has, in addition to the usual "attributes of Arvados resources":/api/resources.html:
+Each Human has, in addition to the usual "attributes of Arvados resources":{{site.baseurl}}/api/resources.html:
table(table table-bordered table-condensed).
|_. Attribute|_. Type|_. Description|_. Example|
h2. Methods
-See "REST methods for working with Arvados resources":/api/methods.html
+See "REST methods for working with Arvados resources":{{site.baseurl}}/api/methods.html
API endpoint base: @https://{{ site.arvados_api_host }}/arvados/v1/jobs@
→ Job resource list
-This method is equivalent to the "index method":/api/methods.html#index, except that the results are restricted to queued jobs (i.e., jobs that have not yet been started or cancelled) and order defaults to queue priority.
+This method is equivalent to the "index method":{{site.baseurl}}/api/methods.html#index, except that the results are restricted to queued jobs (i.e., jobs that have not yet been started or cancelled) and order defaults to queue priority.
h2. Resource
-Each job has, in addition to the usual "attributes of Arvados resources":/api/resources.html:
+Each job has, in addition to the usual "attributes of Arvados resources":{{site.baseurl}}/api/resources.html:
table(table table-bordered table-condensed).
|_. Attribute|_. Type|_. Description|_. Notes|
h2. Methods
-See "REST methods for working with Arvados resources":/api/methods.html
+See "REST methods for working with Arvados resources":{{site.baseurl}}/api/methods.html
API endpoint base: @https://{{ site.arvados_api_host }}/arvados/v1/job_tasks@
h2. Resources
-Each JobTask has, in addition to the usual "attributes of Arvados resources":/api/resources.html:
+Each JobTask has, in addition to the usual "attributes of Arvados resources":{{site.baseurl}}/api/resources.html:
table(table table-bordered table-condensed).
|_. Attribute|_. Type|_. Description|_. Example|
h2. Methods
-See "REST methods for working with Arvados resources":/api/methods.html
+See "REST methods for working with Arvados resources":{{site.baseurl}}/api/methods.html
API endpoint base: @https://{{ site.arvados_api_host }}/arvados/v1/keep_disks@
h2. Resources
-Each KeepDisk has, in addition to the usual "attributes of Arvados resources":/api/resources.html:
+Each KeepDisk has, in addition to the usual "attributes of Arvados resources":{{site.baseurl}}/api/resources.html:
table(table table-bordered table-condensed).
|_. Attribute|_. Type|_. Description|_. Example|
h2. Methods
-See "REST methods for working with Arvados resources":/api/methods.html
+See "REST methods for working with Arvados resources":{{site.baseurl}}/api/methods.html
API endpoint base: @https://{{ site.arvados_api_host }}/arvados/v1/links@
h2. Resource
-Each link has, in addition to the usual "attributes of Arvados resources":/api/resources.html:
+Each link has, in addition to the usual "attributes of Arvados resources":{{site.baseurl}}/api/resources.html:
table(table table-bordered table-condensed).
|_. Attribute|_. Type|_. Description|
h2. Methods
-See "REST methods for working with Arvados resources":/api/methods.html
+See "REST methods for working with Arvados resources":{{site.baseurl}}/api/methods.html
API endpoint base: @https://{{ site.arvados_api_host }}/arvados/v1/logs@
h2. Resources
-Each Log has, in addition to the usual "attributes of Arvados resources":/api/resources.html:
+Each Log has, in addition to the usual "attributes of Arvados resources":{{site.baseurl}}/api/resources.html:
table(table table-bordered table-condensed).
|_. Attribute|_. Type|_. Description|_. Example|
h2. Methods
-See "REST methods for working with Arvados resources":/api/methods.html
+See "REST methods for working with Arvados resources":{{site.baseurl}}/api/methods.html
API endpoint base: @https://{{ site.arvados_api_host }}/arvados/v1/nodes@
h2. Resources
-Each Node has, in addition to the usual "attributes of Arvados resources":/api/resources.html:
+Each Node has, in addition to the usual "attributes of Arvados resources":{{site.baseurl}}/api/resources.html:
table(table table-bordered table-condensed).
|_. Attribute|_. Type|_. Description|_. Example|
h2. Methods
-See "REST methods for working with Arvados resources":/api/methods.html
+See "REST methods for working with Arvados resources":{{site.baseurl}}/api/methods.html
API endpoint base: @https://{{ site.arvados_api_host }}/arvados/v1/pipeline_instances@
h2. Resources
-Each PipelineInstance has, in addition to the usual "attributes of Arvados resources":/api/resources.html:
+Each PipelineInstance has, in addition to the usual "attributes of Arvados resources":{{site.baseurl}}/api/resources.html:
table(table table-bordered table-condensed).
|_. Attribute|_. Type|_. Description|_. Example|
h2. Methods
-See "REST methods for working with Arvados resources":/api/methods.html
+See "REST methods for working with Arvados resources":{{site.baseurl}}/api/methods.html
API endpoint base: @https://{{ site.arvados_api_host }}/arvados/v1/pipeline_templates@
h2. Resources
-Each PipelineTemplate has, in addition to the usual "attributes of Arvados resources":/api/resources.html:
+Each PipelineTemplate has, in addition to the usual "attributes of Arvados resources":{{site.baseurl}}/api/resources.html:
table(table table-bordered table-condensed).
|_. Attribute|_. Type|_. Description|_. Example|
h2. Methods
-See "REST methods for working with Arvados resources":/api/methods.html
+See "REST methods for working with Arvados resources":{{site.baseurl}}/api/methods.html
API endpoint base: @https://{{ site.arvados_api_host }}/arvados/v1/repositories@
h2. Resources
-Each Repository has, in addition to the usual "attributes of Arvados resources":/api/resources.html:
+Each Repository has, in addition to the usual "attributes of Arvados resources":{{site.baseurl}}/api/resources.html:
table(table table-bordered table-condensed).
|_. Attribute|_. Type|_. Description|_. Example|
h2. Methods
-See "REST methods for working with Arvados resources":/api/methods.html
+See "REST methods for working with Arvados resources":{{site.baseurl}}/api/methods.html
API endpoint base: @https://{{ site.arvados_api_host }}/arvados/v1/specimens@
h2. Resources
-Each Specimen has, in addition to the usual "attributes of Arvados resources":/api/resources.html:
+Each Specimen has, in addition to the usual "attributes of Arvados resources":{{site.baseurl}}/api/resources.html:
table(table table-bordered table-condensed).
|_. Attribute|_. Type|_. Description|_. Example|
h2. Methods
-See "REST methods for working with Arvados resources":/api/methods.html
+See "REST methods for working with Arvados resources":{{site.baseurl}}/api/methods.html
API endpoint base: @https://{{ site.arvados_api_host }}/arvados/v1/traits@
h2. Resources
-Each Trait has, in addition to the usual "attributes of Arvados resources":/api/resources.html:
+Each Trait has, in addition to the usual "attributes of Arvados resources":{{site.baseurl}}/api/resources.html:
table(table table-bordered table-condensed).
|_. Attribute|_. Type|_. Description|_. Example|
h2. Methods
-See "REST methods for working with Arvados resources":/api/methods.html
+See "REST methods for working with Arvados resources":{{site.baseurl}}/api/methods.html
API endpoint base: @https://{{ site.arvados_api_host }}/arvados/v1/users@
h2. Resources
-Each User has, in addition to the usual "attributes of Arvados resources":/api/resources.html:
+Each User has, in addition to the usual "attributes of Arvados resources":{{site.baseurl}}/api/resources.html:
table(table table-bordered table-condensed).
|_. Attribute|_. Type|_. Description|_. Example|
h2. Methods
-See "REST methods for working with Arvados resources":/api/methods.html
+See "REST methods for working with Arvados resources":{{site.baseurl}}/api/methods.html
API endpoint base: @https://{{ site.arvados_api_host }}/arvados/v1/virtual_machines@
h2. Resources
-Each VirtualMachine has, in addition to the usual "attributes of Arvados resources":/api/resources.html:
+Each VirtualMachine has, in addition to the usual "attributes of Arvados resources":{{site.baseurl}}/api/resources.html:
table(table table-bordered table-condensed).
|_. Attribute|_. Type|_. Description|_. Example|
navsection: api
navmenu: Schema
title: {resource}
-navorder: {navorder}
---
h1. {resource}
h2. Methods
-See "REST methods for working with Arvados resources":/api/methods.html
+ See "REST methods for working with Arvados resources":{{{{site.baseurl}}}}/api/methods.html
API endpoint base: @https://{{{{ site.arvados_api_host }}}}/arvados/v1/{res_api_endpoint}@
<p>manuals, guides, and references</p>
</div>
<div class="col-sm-6">
- <img src="images/dax.png" style="max-height: 10em"></img>
+ <img src="images/dax-reading-book.png" style="max-height: 10em" alt="Dax reading a book" />
</div>
</div>
</div>
</p>
<p>
<a href="{{ site.baseurl }}/install/index.html">Install Guide</a> — How to install Arvados on a cloud platform.
- </p>
+ </p>
</div>
</div>
</div>
irb(main):002:0> <span class="userinput">ApiClient.find(1234).update_attributes is_trusted: true</span>
</code></pre>
</notextile>
-
-
--- /dev/null
+---
+layout: default
+navsection: sdk
+navmenu: CLI
+title: "Command line SDK"
+
+...
+
+The @arv@ CLI tool provides a generic set of wrappers so you can make API calls easily. It performs some validation before connecting to the API server: for example, it refuses to do an API call if a required parameter is missing.
+
+It also provides access to Keep storage services with the @arv keep@ subcommand.
+
+h3. Usage
+
+See the "command line interface":{{site.baseurl}}/user/reference/sdk-cli.html page in the user guide.
+
+h3. Installation
+
+If you are logged in to an Arvados VM, the @arv@ should be installed.
+
+To use @arv@ elsewhere, you can either install the @arvados-cli@ gem via RubyGems or build and install the package using the arvados source tree.
+
+h4. Prerequisites: Ruby >= 2.0.0 and curl libraries
+
+You can use "RVM":http://rvm.io/rvm/install to install and manage Ruby versions.
+
+<notextile>
+<pre>
+$ <code class="userinput">sudo apt-get install curl</code>
+$ <code class="userinput">sudo sh -c 'curl -sSL https://get.rvm.io | bash -s stable'</code>
+$ <code class="userinput">source /etc/profile.d/rvm.sh</code>
+</pre>
+</notextile>
+
+Install curl libraries with your system's package manager. For example, with Debian or Ubuntu:
+
+<notextile>
+<pre>
+$ <code class="userinput">sudo apt-get install libcurl3 libcurl3-gnutls libcurl4-openssl-dev</code>
+</pre>
+</notextile>
+
+h4. Option 1: install with RubyGems
+
+<notextile>
+<pre>
+$ <code class="userinput">sudo gem install arvados-cli</code>
+</pre>
+</notextile>
+
+h4. Option 2: build and install from source
+
+<notextile>
+<pre>
+$ <code class="userinput">git clone https://github.com/curoverse/arvados.git</code>
+$ <code class="userinput">cd arvados/sdk/cli</code>
+$ <code class="userinput">gem build arvados-cli.gemspec</code>
+$ <code class="userinput">sudo gem install arvados-cli-*.gem</code>
+</pre>
+</notextile>
+
This section documents how to access the Arvados API and Keep using various programming languages.
-* "Python SDK":python/sdk-python.html
+* "Python SDK":{{site.baseurl}}/sdk/python/sdk-python.html
+* "Perl SDK":{{site.baseurl}}/sdk/perl/index.html
+* "Ruby SDK":{{site.baseurl}}/sdk/ruby/index.html
+* "Command line SDK":{{site.baseurl}}/sdk/cli/index.html ("arv")
+
+SDKs not yet implemented:
+
+* Rails SDK: Workbench uses an ActiveRecord-like interface to Arvados. This hasn't yet been extracted from Workbench and packaged as a gem.
+* R and Java: We plan to support these, but they have not been implemented yet.
--- /dev/null
+---
+layout: default
+navsection: sdk
+navmenu: Perl
+title: "Perl SDK"
+
+...
+
+The Perl SDK provides a generic set of wrappers so you can make API calls easily.
+
+It should be treated as alpha/experimental. Currently, limitations include:
+* Verbose syntax.
+* No native Keep client.
+* No CPAN package.
+
+h3. Installation
+
+<notextile>
+<pre>
+$ <code class="userinput">sudo apt-get install libjson-perl libio-socket-ssl-perl libwww-perl</code>
+$ <code class="userinput">git clone https://github.com/curoverse/arvados.git</code>
+$ <code class="userinput">cd arvados/sdk/perl</code>
+$ <code class="userinput">perl Makefile.PL</code>
+$ <code class="userinput">sudo make install</code>
+</pre>
+</notextile>
+
+h4. Test installation
+
+If the SDK is installed, @perl -MArvados -e ''@ should produce no errors.
+
+If your @ARVADOS_API_HOST@ and @ARVADOS_API_TOKEN@ environment variables are set up correctly (see "api-tokens":{{site.baseurl}}/user/reference/api-tokens.html for details), the following test script should work:
+
+<notextile>
+<pre>$ <code class="userinput">perl <<'EOF'
+use Arvados;
+my $arv = Arvados->new('apiVersion' => 'v1');
+my $me = $arv->{'users'}->{'current'}->execute;
+print ("arvados.v1.users.current.full_name = '", $me->{'full_name'}, "'\n");
+EOF</code>
+arvados.v1.users.current.full_name = 'Your Name'
+</pre>
+</notextile>
+
+h3. Examples
+
+Set up an API client user agent:
+
+<notextile>
+<pre><code class="userinput">my $arv = Arvados->new('apiVersion' => 'v1');
+</code></pre>
+</notextile>
+
+Get the User object for the current user:
+
+<notextile>
+<pre><code class="userinput">my $current_user = $arv->{'users'}->{'current'}->execute;
+</code></pre>
+</notextile>
+
+Get the UUID of an object that was retrieved using the SDK:
+
+<notextile>
+<pre><code class="userinput">my $current_user_uuid = $current_user->{'uuid'}
+</code></pre>
+</notextile>
+
+Retrieve an object by ID:
+
+<notextile>
+<pre><code class="userinput">my $some_user = $arv->{'users'}->{'get'}->execute('uuid' => $current_user_uuid);
+</code></pre>
+</notextile>
+
+Create an object:
+
+<notextile>
+<pre><code class="userinput">my $test_link = $arv->{'links'}->{'create'}->execute('link' => { 'link_class' => 'test', 'name' => 'test' });
+</code></pre>
+</notextile>
+
+Update an object:
+
+<notextile>
+<pre><code class="userinput">my $test_link = $arv->{'links'}->{'update'}->execute(
+ 'uuid' => $test_link->{'uuid'},
+ 'link' => { 'properties' => { 'foo' => 'bar' } });
+</code></pre>
+</notextile>
+
+Get a list of objects:
+
+<notextile>
+<pre><code class="userinput">my $repos = $arv->{'repositories'}->{'list'}->execute;
+print ("UUID of first repo returned is ", $repos->{'items'}->[0], "\n");
+</code></pre>
+</notextile>
+
+The SDK retrieves the list of API methods from the server at run time. Therefore, the set of available methods is determined by the server version rather than the SDK version.
--- /dev/null
+---
+layout: default
+navsection: sdk
+navmenu: Ruby
+title: "Ruby SDK"
+
+...
+
+The Ruby SDK provides a generic set of wrappers so you can make API calls easily.
+
+h3. Installation
+
+If you are logged in to an Arvados VM, the Ruby SDK should be installed.
+
+To use it elsewhere, you can either install the @arvados@ gem via RubyGems or build and install the package using the arvados source tree.
+
+h4. Prerequisites: Ruby >= 2.0.0
+
+You can use "RVM":http://rvm.io/rvm/install to install and manage Ruby versions.
+
+h4. Option 1: install with RubyGems
+
+<notextile>
+<pre>
+$ <code class="userinput">sudo gem install arvados</code>
+</pre>
+</notextile>
+
+h4. Option 2: build and install from source
+
+<notextile>
+<pre>
+$ <code class="userinput">git clone https://github.com/curoverse/arvados.git</code>
+$ <code class="userinput">cd arvados/sdk/cli</code>
+$ <code class="userinput">gem build arvados.gemspec</code>
+$ <code class="userinput">sudo gem install arvados-*.gem</code>
+</pre>
+</notextile>
+
+h4. Test installation
+
+If the SDK is installed, @ruby -r arvados -e 'puts "OK!"'@ should produce no errors.
+
+If your @ARVADOS_API_HOST@ and @ARVADOS_API_TOKEN@ environment variables are set up correctly (see "api-tokens":{{site.baseurl}}/user/reference/api-tokens.html for details), the following test script should work:
+
+<notextile>
+<pre>$ <code class="userinput">ruby -r arvados <<'EOF'
+arv = Arvados.new api_version: 'v1'
+my_full_name = arv.user.current[:full_name]
+puts "arvados.v1.users.current.full_name = '#{my_full_name}'"
+EOF</code>
+arvados.v1.users.current.full_name = 'Your Name'
+</pre>
+</notextile>
+
+h3. Examples
+
+Import the module (we skipped this step above by using "ruby -r arvados"):
+
+<notextile>
+<pre><code class="userinput">require 'arvados'
+</code></pre>
+</notextile>
+
+Set up an API client user agent:
+
+<notextile>
+<pre><code class="userinput">arv = Arvados.new(apiVersion: 'v1')
+</code></pre>
+</notextile>
+
+Get the User object for the current user:
+
+<notextile>
+<pre><code class="userinput">current_user = arv.user.current
+</code></pre>
+</notextile>
+
+Get the UUID of an object that was retrieved using the SDK:
+
+<notextile>
+<pre><code class="userinput">current_user_uuid = current_user[:uuid]
+</code></pre>
+</notextile>
+
+Retrieve an object by ID:
+
+<notextile>
+<pre><code class="userinput">some_user = arv.user.get(uuid: current_user_uuid)
+</code></pre>
+</notextile>
+
+Create an object:
+
+<notextile>
+<pre><code class="userinput">new_link = arv.link.create(link: {link_class: 'test', name: 'test'})
+</code></pre>
+</notextile>
+
+Update an object:
+
+<notextile>
+<pre><code class="userinput">updated_link = arv.link.update(uuid: new_link[:uuid],
+ link: {properties: {foo: 'bar'}})
+</code></pre>
+</notextile>
+
+Delete an object:
+
+<notextile>
+<pre><code class="userinput">arv.link.delete(uuid: new_link[:uuid])
+</code></pre>
+</notextile>
+
+Get a list of objects:
+
+<notextile>
+<pre><code class="userinput">repos = arv.repository.list
+first_repo = repos[:items][0]
+puts "UUID of first repo returned is #{first_repo[:uuid]}"</code>
+UUID of first repo returned is qr1hi-s0uqq-b1bnybpx3u5temz
+</pre>
+</notextile>
+
+The SDK retrieves the list of API methods from the server at run time. Therefore, the set of available methods is determined by the server version rather than the SDK version.
<p style="text-align: center;">Version 3, 19 November 2007</p>
<p>Copyright © 2007 Free Software Foundation,
-Inc. <<a href="http://fsf.org/">http://fsf.org/</a>>
+Inc. <<a href="http://www.fsf.org/">http://fsf.org/</a>>
<br />
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.</p>
--- /dev/null
+---
+layout: default
+navsection: userguide
+title: "Job and Pipeline Reference"
+...
+
+h2. Submitting jobs
+
+table(table table-bordered table-condensed).
+|_. Attribute |_. Type|_. Accepted values |_. Required|_. Description|
+|script |string |filename |yes |The actual script that will be run by crunch. Must be the name of an executable file in the crunch_scripts/ directory at the git revision specified by script_version.|
+|script_version |string |git branch, tag, or version hash |yes |The code version to run, which is available in the specified repository. May be a git hash or tag to specify an exact version, or a branch. If it is a branch, use the branch head.|
+|repository |string |name of git repository hosted by Arvados |yes |The repository to search for script_version.|
+|script_parameters |object |any JSON object |yes |The input parameters for the job, with the parameter names as keys mapping to parameter values.|
+|minimum_script_version |string |git branch, tag, or version hash |no |The minimum acceptable script version when deciding whether to re-use a past job.|
+|exclude_script_versions|array of strings|git branch, tag, or version hash|no |Script versions to exclude when deciding whether to re-use a past job.|
+|nondeterministic |boolean | |no |If true, never re-use a past job, and flag this job so it will never be considered for re-use.|
+|no_reuse |boolean | |no |If true, do not re-use a past job, but this job may be re-used.|
+
+When a job is executed, the 'script_version' field is resolved to an exact git revision and the git hash for that revision is recorded in 'script_version'. If 'script_version' can't be resolved, the job submission will be rejected.
+
+h3. Reusing jobs
+
+Because Arvados records the exact version of the script, input parameters, and runtime environment [1] that was used to run the job, if the script is deterministic (meaning that the same code version is guaranteed to produce the same outputs from the same inputs) then it is possible to re-use the results of past jobs, and avoid re-running the computation to save time. Arvados uses the following algorithm to determine if a past job can be re-used:
+
+notextile. <div class="spaced-out">
+
+# If 'nondeterministic' or 'no_reuse' are true, always create a new job.
+# Find a list of acceptable values for 'script_version'. If 'minimum_script_version' is specified, this is the set of all revisions in the git commit graph between 'minimum_script_version' and 'script_version' (inclusive) [2]. If 'minimum_script_version' is not specified, only 'script_version' is added to the list. If 'exclude_script_versions' is specified, the listed versions are excluded from the list.
+# Select jobs have the same 'script' and 'script_parameters' attributes, and where the 'script_version' attribute is in the list of acceptable versions. Exclude failed jobs or where 'nondeterministic' is true.
+# If there is more than one candidate job, check that all selected past jobs actually did produce the same output.
+# If everything passed, re-use one of the selected past jobs (if there is more than one match, which job will be returned is undefined). Otherwise create a new job.
+
+fn1. As of this writing, versioning the runtime environment is still under development.
+
+fn2. This may include parallel branches if there is more than one path between 'minimum_script_version' and 'script_version' in the git commit graph. Use 'exclude_script_versions' to blacklist specific versions.
+
+</div>
+
+h3. Examples
+
+Run the script "crunch_scripts/hash.py" in the repository "you" using the "master" branch head. Arvados is allowed to re-use a previous job if the script_version of the past job is the same as the "master" branch head (i.e. there have not been any subsequent commits to "master").
+
+<pre>
+{
+ "script": "hash.py",
+ "repository": "you",
+ "script_version": "master",
+ "script_parameters": {
+ "input": "c1bad4b39ca5a924e481008009d94e32+210"
+ }
+}
+</pre>
+
+Run using exactly the version "d00220fb38d4b85ca8fc28a8151702a2b9d1dec5". Arvados is allowed to re-use a previous job if the script_version of that job is also "d00220fb38d4b85ca8fc28a8151702a2b9d1dec5".
+
+<pre>
+{
+ "script": "hash.py",
+ "repository": "you",
+ "script_version": "d00220fb38d4b85ca8fc28a8151702a2b9d1dec5",
+ "script_parameters": {
+ "input": "c1bad4b39ca5a924e481008009d94e32+210"
+ }
+}
+</pre>
+
+Arvados is allowed to re-use a previous job if the script_version of the past job is between "earlier_version_tag" and the head of the "master" branch (inclusive), but not "blacklisted_version_tag". If there are no previous jobs, run the job using the head of the "master" branch as specified in "script_version".
+
+<pre>
+{
+ "script": "hash.py",
+ "repository": "you",
+ "minimum_script_version": "earlier_version_tag",
+ "script_version": "master",
+ "exclude_script_versions", ["blacklisted_version_tag"],
+ "script_parameters": {
+ "input": "c1bad4b39ca5a924e481008009d94e32+210"
+ }
+}
+</pre>
+
+Run the script "crunch_scripts/monte-carlo.py" in the repository "you" using the "master" branch head. Because it is marked as "nondeterministic", never re-use previous jobs, and never re-use this job.
+
+<pre>
+{
+ "script": "monte-carlo.py",
+ "repository": "you",
+ "script_version": "master",
+ "nondeterministic": true,
+ "script_parameters": {
+ "input": "c1bad4b39ca5a924e481008009d94e32+210"
+ }
+}
+</pre>
+
+h2. Pipelines
+
+Pipelines consist of a set of "components". Each component is an Arvados job submission, so when a component job is submitted, Arvados may re-use past jobs based on the rules described above.
+
+table(table table-bordered table-condensed).
+|_. Attribute |_. Type |_. Accepted values |_. Required|_. Description|
+|name |string |any |yes |The human-readable name of the pipeline template.|
+|components |object |JSON object containing job submission objects|yes |The component jobs that make up the pipeline, with the component name as the key. |
+
+h3. Script parameters
+
+When used in a pipeline, each parameter in the 'script_parameters' attribute of a component job can specify that the input parameter must be supplied by the user, or the input parameter should be linked to the output of another component. To do this, the value of the parameter should be JSON object containing one of the following attributes:
+
+table(table table-bordered table-condensed).
+|_. Attribute |_. Type |_. Accepted values |_. Description|
+|default |any |any |The default value for this parameter.|
+|required |boolean |true or false |Specifies whether the parameter is required to have a value or not.|
+|dataclass |string |One of 'Collection', 'File' [3], 'number', or 'text' |Data type of this parameter.|
+|output_of |string |the name of another component in the pipeline |Specifies that the value of this parameter should be set to the 'output' attribute of the job that corresponds to the specified component.|
+
+The 'output_of' parameter is especially important, as this is how components are actually linked together to form a pipeline. Component jobs that depend on the output of other components do not run until the parent job completes and has produced output. If the parent job fails, the entire pipeline fails.
+
+fn3. The 'File' type refers to a specific file within a Keep collection in the form 'collection_hash/filename', for example '887cd41e9c613463eab2f0d885c6dd96+83/bob.txt'.
+
+h3. Examples
+
+This a pipeline named "Filter md5 hash values" with two components, "do_hash" and "filter". The "input" script parameter of the "do_hash" component is required to be filled in by the user, and the expected data type is "Collection". This also specifies that the "input" script parameter of the "filter" component is the output of "do_hash", so "filter" will not run until "do_hash" completes successfully. When the pipeline runs, past jobs that meet the criteria described above may be substituted for either or both components to avoid redundant computation.
+
+<pre>
+{
+ "name": "Filter md5 hash values",
+ "components": {
+ "do_hash": {
+ "script": "hash.py",
+ "repository": "you",
+ "script_version": "master",
+ "script_parameters": {
+ "input": {
+ "required": true,
+ "dataclass": "Collection"
+ }
+ },
+ },
+ "filter": {
+ "script": "0-filter.py",
+ "repository": "you",
+ "script_version": "master",
+ "script_parameters": {
+ "input": {
+ "output_of": "do_hash"
+ }
+ },
+ }
+ }
+}
+</pre>
+
+This pipeline consists of three components. The components "thing1" and "thing2" both depend on "cat_in_the_hat". Once the "cat_in_the_hat" job is complete, both "thing1" and "thing2" can run in parallel, because they do not depend on each other.
+
+<pre>
+{
+ "name": "Wreck the house",
+ "components": {
+ "cat_in_the_hat": {
+ "script": "cat.py",
+ "repository": "you",
+ "script_version": "master",
+ "script_parameters": { }
+ },
+ "thing1": {
+ "script": "thing1.py",
+ "repository": "you",
+ "script_version": "master",
+ "script_parameters": {
+ "input": {
+ "output_of": "cat_in_the_hat"
+ }
+ },
+ },
+ "thing2": {
+ "script": "thing2.py",
+ "repository": "you",
+ "script_version": "master",
+ "script_parameters": {
+ "input": {
+ "output_of": "cat_in_the_hat"
+ }
+ },
+ },
+ }
+}
+</pre>
+
+This pipeline consists of three components. The component "cleanup" depends on "thing1" and "thing2". Both "thing1" and "thing2" are started immediately and can run in parallel, because they do not depend on each other, but "cleanup" cannot begin until both "thing1" and "thing2" have completed.
+
+<pre>
+{
+ "name": "Clean the house",
+ "components": {
+ "thing1": {
+ "script": "thing1.py",
+ "repository": "you",
+ "script_version": "master",
+ "script_parameters": { }
+ },
+ "thing2": {
+ "script": "thing2.py",
+ "repository": "you",
+ "script_version": "master",
+ "script_parameters": { }
+ },
+ "cleanup": {
+ "script": "cleanup.py",
+ "repository": "you",
+ "script_version": "master",
+ "script_parameters": {
+ "mess1": {
+ "output_of": "thing1"
+ },
+ "mess2": {
+ "output_of": "thing2"
+ }
+ }
+ }
+ }
+}
+</pre>
In order to reassemble the file, Keep stores a *collection* data block which lists in sequence the data blocks that make up the original file. A collection data block may store the information for multiple files, including a directory structure.
-In this example we will use @c1bad4b39ca5a924e481008009d94e32+210@ which we added to Keep in "the first Keep tutorial":{{ site.baseurl }}/users/tutorial/tutorial-keep.html. First let us examine the contents of this collection using @arv keep get@:
+In this example we will use @c1bad4b39ca5a924e481008009d94e32+210@ which we added to Keep in "the first Keep tutorial":{{ site.baseurl }}/user/tutorials/tutorial-keep.html. First let us examine the contents of this collection using @arv keep get@:
<notextile>
<pre><code>~$ <span class="userinput">arv keep get c1bad4b39ca5a924e481008009d94e32+210</span>
"script_parameters":{
"input": "887cd41e9c613463eab2f0d885c6dd96+83"
},
- "script_version":"<b>you</b>:master"
+ "repository":"<b>you</b>",
+ "script_version":"master"
},
"filter":{
"script":"0-filter.py",
"output_of":"do_hash"
}
},
- "script_version":"<b>you</b>:master"
+ "repository":"<b>you</b>",
+ "script_version":"master"
}
}
}
<pre><code>~$ <span class="userinput">arv keep get 880b55fb4470b148a447ff38cacdd952+54/md5sum.txt</span>
44b8ae3fde7a8a88d2f7ebd237625b4f var-GS000016015-ASM.tsv.bz2
~$ <span class="userinput">arv keep get 490cd451c8108824b8a17e3723e1f236+19/0-filter.txt</span>
-~$
</code></pre>
</notextile>
~$ <span class="userinput">cat >the_job <<EOF
{
"script":"GATK2-VariantFiltration",
+ "repository":"arvados",
"script_version":"$src_version",
"script_parameters":
{
<pre><code>~$ <span class="userinput">cat >the_job <<EOF
{
"script": "hash",
- "script_version": "arvados:master",
- "script_parameters":
- {
+ "repository": "arvados",
+ "script_version": "master",
+ "script_parameters": {
"input": "c1bad4b39ca5a924e481008009d94e32+210"
}
}
* @<<EOF@ tells the shell to direct the following lines into the standard input for @cat@ up until it sees the line @EOF@
* @>the_job@ redirects standard output to a file called @the_job@
* @"script"@ specifies the name of the script to run. The script is searched for in the "crunch_scripts/" subdirectory of the @git@ checkout specified by @"script_version"@.
-* @"script_version"@ specifies the version of the script that you wish to run. This can be in the form of an explicit @git@ revision hash, or in the form "repository:branch" (in which case it will take the HEAD of the specified branch). Arvados logs the script version that was used in the run, enabling you to go back and re-run any past job with the guarantee that the exact same code will be used as was used in the previous run. You can access a list of available @git@ repositories on the Arvados workbench under "Compute %(rarr)→% Code repositories":http://{{site.arvados_workbench_host}}/repositories .
+* @"repository"@ is the git repository to search for the script version. You can access a list of available @git@ repositories on the Arvados workbench under "Compute %(rarr)→% Code repositories":https://{{site.arvados_workbench_host}}//repositories .
+* @"script_version"@ specifies the version of the script that you wish to run. This can be in the form of an explicit @git@ revision hash, a tag, or a branch (in which case it will take the HEAD of the specified branch). Arvados logs the script version that was used in the run, enabling you to go back and re-run any past job with the guarantee that the exact same code will be used as was used in the previous run.
* @"script_parameters"@ are provided to the script. In this case, the input is the locator for the collection that we inspected in the previous section.
Use @arv job create@ to actually submit the job. It should print out a JSON object which describes the newly created job:
h2. Monitor job progress
-Go to the "Workbench dashboard":http://{{site.arvados_workbench_host}}. Your job should be at the top of the "Recent jobs" table. This table refreshes automatically. When the job has completed successfully, it will show <span class="label label-success">finished</span> in the *Status* column.
+Go to the "Workbench dashboard":https://{{site.arvados_workbench_host}}. Your job should be at the top of the "Recent jobs" table. This table refreshes automatically. When the job has completed successfully, it will show <span class="label label-success">finished</span> in the *Status* column.
On the command line, you can access log messages while the job runs using @arv job log_tail_follow@:
h2. Inspect the job output
-On the "Workbench dashboard":http://{{site.arvados_workbench_host}}, look for the *Output* column of the *Recent jobs* table. Click on the link under *Output* for your job to go to the files page with the job output. The files page lists all the files that were output by the job. Click on the link under the *files* column to view a file, or click on the download icon <span class="glyphicon glyphicon-download-alt"></span> to download the output file.
+On the "Workbench dashboard":https://{{site.arvados_workbench_host}}, look for the *Output* column of the *Recent jobs* table. Click on the link under *Output* for your job to go to the files page with the job output. The files page lists all the files that were output by the job. Click on the link under the *files* column to view a file, or click on the download icon <span class="glyphicon glyphicon-download-alt"></span> to download the output file.
On the command line, you can use @arv job get@ to access a JSON object describing the output:
2013-12-16_20:44:38 qr1hi-8i9sb-1pm1t02dezhupss 7575 status: 0 done, 1 running, 0 todo
2013-12-16_20:44:39 qr1hi-8i9sb-1pm1t02dezhupss 7575 0 child 7681 on compute13.1 exit 0 signal 0 success=true
2013-12-16_20:44:39 qr1hi-8i9sb-1pm1t02dezhupss 7575 0 success in 1 seconds
-2013-12-16_20:44:39 qr1hi-8i9sb-1pm1t02dezhupss 7575 0 output
+2013-12-16_20:44:39 qr1hi-8i9sb-1pm1t02dezhupss 7575 0 output
2013-12-16_20:44:39 qr1hi-8i9sb-1pm1t02dezhupss 7575 wait for last 0 children to finish
2013-12-16_20:44:39 qr1hi-8i9sb-1pm1t02dezhupss 7575 status: 1 done, 0 running, 1 todo
2013-12-16_20:44:39 qr1hi-8i9sb-1pm1t02dezhupss 7575 start level 1
<pre><code>~/<b>you</b>/crunch_scripts$ <span class="userinput">cat >~/the_job <<EOF
{
"script": "parallel-hash.py",
- "script_version": "<b>you</b>:master",
+ "repository": "<b>you</b>",
+ "script_version": "master",
"script_parameters":
{
"input": "887cd41e9c613463eab2f0d885c6dd96+83"
"dataclass": "Collection"
}
},
- "script_version":"<b>you</b>:master"
+ "repository":"<b>you</b>",
+ "script_version":"master"
}
}
}
</code></pre>
</notextile>
-Your new pipeline template will appear on the "Workbench %(rarr)→% Compute %(rarr)→% Pipeline templates":http://{{ site.arvados_workbench_host }}/pipeline_instances page. You can run the "pipeline using workbench":tutorial-pipeline-workbench.html
+Your new pipeline template will appear on the "Workbench %(rarr)→% Compute %(rarr)→% Pipeline templates":https://{{ site.arvados_workbench_host }}/pipeline_instances page. You can run the "pipeline using workbench":tutorial-pipeline-workbench.html
~$ <span class="userinput">git config --global user.email <b>you</b>@example.com</span></code></pre>
</notextile>
-On the Arvados Workbench, navigate to "Compute %(rarr)→% Code repositories":http://{{site.arvados_workbench_host}}/repositories . You should see a repository with your user name listed in the *name* column. Next to *name* is the column *push_url*. Copy the *push_url* value associated with your repository. This should look like <notextile><code>git@git.{{ site.arvados_api_host }}:<b>you</b>.git</code></notextile>.
+On the Arvados Workbench, navigate to "Compute %(rarr)→% Code repositories":https://{{site.arvados_workbench_host}}/repositories . You should see a repository with your user name listed in the *name* column. Next to *name* is the column *push_url*. Copy the *push_url* value associated with your repository. This should look like <notextile><code>git@git.{{ site.arvados_api_host }}:<b>you</b>.git</code></notextile>.
Next, on the Arvados virtual machine, clone your git repository:
"dataclass": "Collection"
}
},
- "script_version":"<b>you</b>:master"
+ "repository":"<b>you</b>",
+ "script_version":"master"
}
}
}
* @"components"@ is a set of scripts that make up the pipeline
* The component is listed with a human-readable name (@"do_hash"@ in this example)
* @"script"@ specifies the name of the script to run. The script is searched for in the "crunch_scripts/" subdirectory of the @git@ checkout specified by @"script_version"@.
-* @"script_version"@ specifies the version of the script that you wish to run. This can be in the form of an explicit @git@ revision hash, or in the form "repository:branch" (in which case it will take the HEAD of the specified branch). Arvados logs the script version that was used in the run, enabling you to go back and re-run any past job with the guarantee that the exact same code will be used as was used in the previous run. You can access a list of available @git@ repositories on the Arvados workbench under "Compute %(rarr)→% Code repositories":http://{{site.arvados_workbench_host}}//repositories .
+* @"repository"@ is the git repository to search for the script version. You can access a list of available @git@ repositories on the Arvados workbench under "Compute %(rarr)→% Code repositories":https://{{site.arvados_workbench_host}}//repositories .
+* @"script_version"@ specifies the version of the script that you wish to run. This can be in the form of an explicit @git@ revision hash, a tag, or a branch (in which case it will take the HEAD of the specified branch). Arvados logs the script version that was used in the run, enabling you to go back and re-run any past job with the guarantee that the exact same code will be used as was used in the previous run.
* @"script_parameters"@ describes the parameters for the script. In this example, there is one parameter called @input@ which is @required@ and is a @Collection@.
Now, use @arv pipeline_template create@ tell Arvados about your pipeline template:
</code></pre>
</notextile>
-Your new pipeline template will appear on the "Workbench %(rarr)→% Compute %(rarr)→% Pipeline templates":http://{{ site.arvados_workbench_host }}/pipeline_instances page. You can run the "pipeline using workbench":tutorial-pipeline-workbench.html
+Your new pipeline template will appear on the "Workbench %(rarr)→% Compute %(rarr)→% Pipeline templates":https://{{ site.arvados_workbench_host }}/pipeline_instances page. You can run the "pipeline using workbench":tutorial-pipeline-workbench.html
</code></pre>
</notextile>
-You can also download indvidual files:
+You can also download individual files:
<notextile>
<pre><code>/scratch/<b>you</b>$ <span class="userinput">arv keep get 887cd41e9c613463eab2f0d885c6dd96+83/alice.txt .</span>
"dataclass": "Collection"
}
},
- "script_version":"<b>you</b>:master"
+ "repository":"<b>you</b>",
+ "script_version":"master"
},
"filter":{
"script":"0-filter.py",
"output_of":"do_hash"
}
},
- "script_version":"<b>you</b>:master"
+ "repository":"<b>you</b>",
+ "script_version":"master"
}
}
}
</code></pre>
</notextile>
-Your new pipeline template will appear on the "Workbench %(rarr)→% Compute %(rarr)→% Pipeline templates":http://{{ site.arvados_workbench_host }}/pipeline_instances page.
-
+Your new pipeline template will appear on the "Workbench %(rarr)→% Compute %(rarr)→% Pipeline templates":https://{{ site.arvados_workbench_host }}/pipeline_instances page.
notextile. <div class="spaced-out">
-# Go to "Collections":http://{{ site.arvados_workbench_host }}/collections .
+# Go to "Collections":https://{{ site.arvados_workbench_host }}/collections .
# On the collections page, go to the search box <span class="glyphicon glyphicon-search"></span> and search for "tutorial".
# This should yield a collection with the contents "var-GS000016015-ASM.tsv.bz2"
# Click on the check box to the left of "var-GS000016015-ASM.tsv.bz2". This puts the collection in your persistent selection list. Click on the paperclip <span class="glyphicon glyphicon-paperclip"></span> in the upper right to get a dropdown menu listing your current selections.
-# Go to "Pipeline templates":http://{{ site.arvados_workbench_host }}/pipeline_templates .
+# Go to "Pipeline templates":https://{{ site.arvados_workbench_host }}/pipeline_templates .
# Look for a pipeline named "Tutorial pipeline".
# Click on the play button <span class="glyphicon glyphicon-play"></span> to the left of "Tutorial pipeline". This will take you to a new page to configure the pipeline.
# Under *parameter* look for "input". Set the value of "input" by clicking on on "none" to get a editing popup. At the top of the selection list in the editing popup will be the collection that you selected in step 4.
rescue LoadError
abort <<-EOS
-Please install all required gems:
+Please install all required gems:
gem install activesupport andand curb google-api-client json oj trollop
end
end
-client = ArvadosClient.new(:host => ENV['ARVADOS_API_HOST'], :application_name => 'arvados-cli', :application_version => '1.0')
-arvados = client.discovered_api('arvados', ENV['ARVADOS_API_VERSION'])
+begin
+ client = ArvadosClient.new(:host => ENV['ARVADOS_API_HOST'], :application_name => 'arvados-cli', :application_version => '1.0')
+ arvados = client.discovered_api('arvados', ENV['ARVADOS_API_VERSION'])
+rescue Exception => e
+ puts "Failed to connect to Arvados API server: #{e}"
+ exit 1
+end
def to_boolean(s)
!!(s =~ /^(true|t|yes|y|1)$/i)
end
banner += "\n"
STDERR.puts banner
-
- if not method.nil? and method != '--help' then
+
+ if not method.nil? and method != '--help' then
Trollop::die ("Unknown method #{method.inspect} " +
"for resource #{resource.inspect}")
end
banner += "\n"
STDERR.puts banner
- if not resource.nil? and resource != '--help' then
+ if not resource.nil? and resource != '--help' then
Trollop::die "Unknown resource type #{resource.inspect}"
end
exit 255
curl.headers['Accept'] = 'text/plain'
curl.headers['Authorization'] = "OAuth2 #{ENV['ARVADOS_API_TOKEN']}"
if ENV['ARVADOS_API_HOST_INSECURE']
- curl.ssl_verify_peer = false
+ curl.ssl_verify_peer = false
curl.ssl_verify_host = false
end
if global_opts[:verbose]
puts results['uuid']
end
end
-
-
# [--no-wait] Make only as much progress as possible without entering
# a sleep/poll loop.
#
-# [--no-reuse-finished] Do not reuse existing outputs to satisfy
-# pipeline components. Always submit a new job
-# or use an existing job which has not yet
-# finished.
-#
# [--no-reuse] Do not reuse existing jobs to satisfy pipeline
# components. Submit a new job for every component.
#
"Do not wait for jobs to finish. Just look up status, submit new jobs if needed, and exit.",
:short => :none,
:type => :boolean)
- opt(:no_reuse_finished,
- "Do not reuse existing outputs to satisfy pipeline components. Always submit a new job or use an existing job which has not yet finished.",
- :short => :none,
- :type => :boolean)
opt(:no_reuse,
"Do not reuse existing jobs to satisfy pipeline components. Submit a new job for every component.",
:short => :none,
if j.is_a? Hash and j[:uuid]
@cache[j[:uuid]] = j
else
- debuglog "create job: #{j[:errors] rescue nil}", 0
+ debuglog "create job: #{j[:errors] rescue nil} with attribute #{attributes}", 0
nil
end
end
moretodo = false
@components.each do |cname, c|
job = nil
- if !c[:job] and
- c[:script_parameters].select { |pname, p| p.is_a? Hash }.empty?
- # Job is fully specified (all parameter values are present) but
- # no particular job has been found.
-
- debuglog "component #{cname} ready to satisfy."
-
- c.delete :wait
- second_place_job = nil # satisfies component, but not finished yet
-
- (@options[:no_reuse] ? [] : JobCache.
- where(script: c[:script],
- script_parameters: c[:script_parameters],
- script_version_descends_from: c[:script_version])
- ).each do |candidate_job|
- candidate_params_downcase = Hash[candidate_job[:script_parameters].
- map { |k,v| [k.downcase,v] }]
- c_params_downcase = Hash[c[:script_parameters].
- map { |k,v| [k.downcase,v] }]
-
- debuglog "component #{cname} considering job #{candidate_job[:uuid]} version #{candidate_job[:script_version]} parameters #{candidate_params_downcase.inspect}", 3
-
- unless candidate_params_downcase == c_params_downcase
- next
- end
-
- if c[:script_version] !=
- candidate_job[:script_version][0,c[:script_version].length]
- debuglog "component #{cname} would be satisfied by job #{candidate_job[:uuid]} if script_version matched.", 2
- next
- end
- unless candidate_job[:success] || candidate_job[:running] ||
- (!candidate_job[:started_at] && !candidate_job[:cancelled_at])
- debuglog "component #{cname} would be satisfied by job #{candidate_job[:uuid]} if it were running or successful.", 2
- next
- end
-
- if candidate_job[:success]
- unless @options[:no_reuse_finished]
- job = candidate_job
- $stderr.puts "using #{job[:uuid]} (finished at #{job[:finished_at]}) for component #{cname}"
- c[:job] = job
- end
- else
- second_place_job ||= candidate_job
- end
- break
- end
- if not c[:job] and second_place_job
- job = second_place_job
- $stderr.puts "using #{job[:uuid]} (running since #{job[:started_at]}) for component #{cname}"
+ if !c[:job] and
+ c[:script_parameters].select { |pname, p| p.is_a? Hash and p[:output_of]}.empty?
+ # No job yet associated with this component and is component inputs
+ # are fully specified (any output_of script_parameters are resolved
+ # to real value)
+ job = JobCache.create({:script => c[:script],
+ :script_parameters => c[:script_parameters],
+ :script_version => c[:script_version],
+ :repository => c[:repository],
+ :minimum_script_version => c[:minimum_script_version],
+ :exclude_script_versions => c[:exclude_minimum_script_versions],
+ :nondeterministic => c[:nondeterministic],
+ :no_reuse => @options[:no_reuse]})
+ if job
+ debuglog "component #{cname} new job #{job[:uuid]}"
c[:job] = job
+ else
+ debuglog "component #{cname} new job failed"
end
- if not c[:job]
- debuglog "component #{cname} not satisfied by any existing job."
- if !@options[:dry_run]
- debuglog "component #{cname} new job."
- job = JobCache.create(:script => c[:script],
- :script_parameters => c[:script_parameters],
- :runtime_constraints => c[:runtime_constraints] || {},
- :script_version => c[:script_version] || 'master')
- if job
- debuglog "component #{cname} new job #{job[:uuid]}"
- c[:job] = job
- else
- debuglog "component #{cname} new job failed"
- end
- end
- end
- else
- c[:wait] = true
end
+
if c[:job] and c[:job][:uuid]
if (c[:job][:running] or
not (c[:job][:finished_at] or c[:job][:cancelled_at]))
- c[:job] = JobCache.get(c[:job][:uuid])
+ # Job is running so update copy of job record
+ c[:job] = JobCache.get(c[:job][:uuid])
end
+
if c[:job][:success]
# Populate script_parameters of other components waiting for
# this job
end
elsif c[:job][:running] ||
(!c[:job][:started_at] && !c[:job][:cancelled_at])
+ # Job is still running
moretodo = true
elsif c[:job][:cancelled_at]
debuglog "component #{cname} job #{c[:job][:uuid]} cancelled."
ended += 1
if c[:job][:success] == true
succeeded += 1
+ elsif c[:job][:success] == false
+ failed += 1
end
end
end
end
- if ended == @components.length
+ if ended == @components.length or failed > 0
@instance[:active] = false
@instance[:success] = (succeeded == @components.length)
end
Arvados API authorization token to use during the course of the job.
+=item --no-clear-tmp
+
+Do not clear per-job/task temporary directories during initial job
+setup. This can speed up development and debugging when running jobs
+locally.
+
=back
=head1 RUNNING JOBS LOCALLY
use IPC::Open2;
use IO::Select;
use File::Temp;
+use Fcntl ':flock';
$ENV{"TMPDIR"} ||= "/tmp";
unless (defined $ENV{"CRUNCH_TMP"}) {
my $git_dir;
my $jobspec;
my $job_api_token;
+my $no_clear_tmp;
my $resume_stash;
GetOptions('force-unlock' => \$force_unlock,
'git-dir=s' => \$git_dir,
'job=s' => \$jobspec,
'job-api-token=s' => \$job_api_token,
+ 'no-clear-tmp' => \$no_clear_tmp,
'resume-stash=s' => \$resume_stash,
);
}
+if (!$have_slurm)
+{
+ must_lock_now("$ENV{CRUNCH_TMP}/.lock", "a job is already running here.");
+}
+
+
my $build_script;
my $skip_install = ($local_job && $Job->{script_version} =~ m{^/});
if ($skip_install)
{
+ if (!defined $no_clear_tmp) {
+ my $clear_tmp_cmd = 'rm -rf $JOB_WORK $CRUNCH_TMP/opt $CRUNCH_TMP/src*';
+ system($clear_tmp_cmd) == 0
+ or croak ("`$clear_tmp_cmd` failed: ".($?>>8));
+ }
$ENV{"CRUNCH_SRC"} = $Job->{script_version};
for my $src_path ("$ENV{CRUNCH_SRC}/arvados/sdk/python") {
if (-d $src_path) {
Log (undef, "Install revision ".$Job->{script_version});
my $nodelist = join(",", @node);
- # Clean out crunch_tmp/work, crunch_tmp/opt, crunch_tmp/src*
+ if (!defined $no_clear_tmp) {
+ # Clean out crunch_tmp/work, crunch_tmp/opt, crunch_tmp/src*
- my $cleanpid = fork();
- if ($cleanpid == 0)
- {
- srun (["srun", "--nodelist=$nodelist", "-D", $ENV{'TMPDIR'}],
- ['bash', '-c', 'if mount | grep -q $JOB_WORK/; then sudo /bin/umount $JOB_WORK/* 2>/dev/null; fi; sleep 1; rm -rf $JOB_WORK $CRUNCH_TMP/opt $CRUNCH_TMP/src*']);
- exit (1);
- }
- while (1)
- {
- last if $cleanpid == waitpid (-1, WNOHANG);
- freeze_if_want_freeze ($cleanpid);
- select (undef, undef, undef, 0.1);
+ my $cleanpid = fork();
+ if ($cleanpid == 0)
+ {
+ srun (["srun", "--nodelist=$nodelist", "-D", $ENV{'TMPDIR'}],
+ ['bash', '-c', 'if mount | grep -q $JOB_WORK/; then sudo /bin/umount $JOB_WORK/* 2>/dev/null; fi; sleep 1; rm -rf $JOB_WORK $CRUNCH_TMP/opt $CRUNCH_TMP/src*']);
+ exit (1);
+ }
+ while (1)
+ {
+ last if $cleanpid == waitpid (-1, WNOHANG);
+ freeze_if_want_freeze ($cleanpid);
+ select (undef, undef, undef, 0.1);
+ }
+ Log (undef, "Clean-work-dir exited $?");
}
- Log (undef, "Clean-work-dir exited $?");
# Install requested code version
Log (undef, "Install exited $?");
}
+if (!$have_slurm)
+{
+ # Grab our lock again (we might have deleted and re-created CRUNCH_TMP above)
+ must_lock_now("$ENV{CRUNCH_TMP}/.lock", "a job is already running here.");
+}
+
foreach (qw (script script_version script_parameters runtime_constraints))
}
$ENV{"TASK_SLOT_NODE"} = $slot[$childslot]->{node}->{name};
$ENV{"TASK_SLOT_NUMBER"} = $slot[$childslot]->{cpu};
- $ENV{"TASK_WORK"} = $ENV{"JOB_WORK"}."/".$slot[$childslot]->{cpu};
+ $ENV{"TASK_WORK"} = $ENV{"JOB_WORK"}."/$id.$$";
$ENV{"TASK_KEEPMOUNT"} = $ENV{"TASK_WORK"}."/keep";
$ENV{"TASK_TMPDIR"} = $ENV{"TASK_WORK"}; # deprecated
$ENV{"CRUNCH_NODE_SLOTS"} = $slot[$childslot]->{node}->{ncpus};
Log (undef, "backing off node " . $slot[$slotid]->{node}->{name} . " for 60 seconds");
}
+sub must_lock_now
+{
+ my ($lockfile, $error_message) = @_;
+ open L, ">", $lockfile or croak("$lockfile: $!");
+ if (!flock L, LOCK_EX|LOCK_NB) {
+ croak("Can't lock $lockfile: $error_message\n");
+ }
+}
+
__DATA__
#!/usr/bin/perl
def load_filters_param
if params[:filters].is_a? Array
@filters = params[:filters]
- elsif params[:filters].is_a? String
+ elsif params[:filters].is_a? String and !params[:filters].empty?
begin
@filters = Oj.load params[:filters]
raise unless @filters.is_a? Array
where(*conditions)
end
end
+
if params[:limit]
begin
- @objects = @objects.limit(params[:limit].to_i)
+ @limit = params[:limit].to_i
rescue
raise ArgumentError.new("Invalid value for limit parameter")
end
else
- @objects = @objects.limit(100)
+ @limit = 100
end
+ @objects = @objects.limit(@limit)
+
+ orders = []
+
+ if params[:offset]
+ begin
+ @objects = @objects.offset(params[:offset].to_i)
+ @offset = params[:offset].to_i
+ rescue
+ raise ArgumentError.new("Invalid value for limit parameter")
+ end
+ else
+ @offset = 0
+ end
+
orders = []
if params[:order]
params[:order].split(',').each do |order|
:kind => "arvados##{(@response_resource_name || resource_name).camelize(:lower)}List",
:etag => "",
:self_link => "",
- :next_page_token => "",
- :next_link => "",
+ :offset => @offset,
+ :limit => @limit,
:items => @objects.as_api_response(nil)
}
if @objects.respond_to? :except
- @object_list[:items_available] = @objects.except(:limit).count
+ @object_list[:items_available] = @objects.except(:limit).except(:offset).count
end
render json: @object_list
end
skip_before_filter :find_object_by_uuid, :only => :queue
skip_before_filter :render_404_if_no_object, :only => :queue
- def index
- return super unless @where.is_a? Hash
- want_ancestor = @where[:script_version_descends_from]
- if want_ancestor
- # Check for missing commit_ancestor rows, and create them if
- # possible.
- @objects.
- dup.
- includes(:commit_ancestors). # I wish Rails would let me
- # specify here which
- # commit_ancestors I am
- # interested in.
- each do |o|
- if o.commit_ancestors.
- select { |ca| ca.ancestor == want_ancestor }.
- empty? and !o.script_version.nil?
- begin
- o.commit_ancestors << CommitAncestor.find_or_create_by_descendant_and_ancestor(o.script_version, want_ancestor)
- rescue
+ def create
+ [:repository, :script, :script_version, :script_parameters].each do |r|
+ if !resource_attrs[r]
+ return render json: {
+ :error => "#{r} attribute must be specified"
+ }, status: :unprocessable_entity
+ end
+ end
+
+ r = Commit.find_commit_range(current_user,
+ resource_attrs[:repository],
+ resource_attrs[:minimum_script_version],
+ resource_attrs[:script_version],
+ resource_attrs[:exclude_script_versions])
+ if !resource_attrs[:nondeterministic] and !resource_attrs[:no_reuse]
+ # Search for jobs where the script_version is in the list of commits
+ # returned by find_commit_range
+ @object = nil
+ Job.readable_by(current_user).where(script: resource_attrs[:script],
+ script_version: r).
+ each do |j|
+ if j.nondeterministic != true and
+ j.success != false and
+ j.script_parameters == resource_attrs[:script_parameters]
+ # Record the first job in the list
+ if !@object
+ @object = j
+ end
+ # Ensure that all candidate jobs actually did produce the same output
+ if @object.output != j.output
+ @object = nil
+ break
end
end
- o.commit_ancestors.
- select { |ca| ca.ancestor == want_ancestor }.
- select(&:is).
- first
+ if @object
+ return show
+ end
end
- # Now it is safe to do an .includes().where() because we are no
- # longer interested in jobs that have other ancestors but not
- # want_ancestor.
- @objects = @objects.
- includes(:commit_ancestors).
- where('commit_ancestors.ancestor = ? and commit_ancestors.is = ?',
- want_ancestor, true)
end
+ if r
+ resource_attrs[:script_version] = r[0]
+ end
+
+ # Don't pass these on to activerecord
+ resource_attrs.delete(:minimum_script_version)
+ resource_attrs.delete(:exclude_script_versions)
+ resource_attrs.delete(:no_reuse)
super
end
@where.merge!({
started_at: nil,
is_locked_by_uuid: nil,
- cancelled_at: nil
+ cancelled_at: nil,
+ success: nil
})
params[:order] ||= 'priority desc, created_at'
find_objects_for_index
class Arvados::V1::SchemaController < ApplicationController
+ skip_before_filter :find_objects_for_index
skip_before_filter :find_object_by_uuid
skip_before_filter :render_404_if_no_object
skip_before_filter :require_auth_scope_all
- def show
- classes = Rails.cache.fetch 'arvados_v1_schema' do
- Rails.application.eager_load!
- classes = {}
- ActiveRecord::Base.descendants.reject(&:abstract_class?).each do |k|
- classes[k] = k.columns.collect do |col|
- if k.serialized_attributes.has_key? col.name
- { name: col.name,
- type: k.serialized_attributes[col.name].object_class.to_s }
- else
- { name: col.name,
- type: col.type }
- end
- end
- end
- classes
- end
- render json: classes
- end
-
- def discovery_rest_description
+ def index
expires_in 24.hours, public: true
discovery = Rails.cache.fetch 'arvados_v1_rest_discovery' do
Rails.application.eager_load!
generatedAt: Time.now.iso8601,
title: "Arvados API",
description: "The API to interact with Arvados.",
- documentationLink: "https://redmine.clinicalfuture.com/projects/arvados/",
+ documentationLink: "http://doc.arvados.org/api/index.html",
protocol: "rest",
baseUrl: root_url + "/arvados/v1/",
basePath: "/arvados/v1/",
minimum: 0,
location: "query",
},
+ offset: {
+ type: "integer",
+ description: "Number of #{k.to_s.underscore.pluralize} to skip before first returned record.",
+ default: 0,
+ format: "int32",
+ minimum: 0,
+ location: "query",
+ },
filters: {
type: "array",
description: "Conditions for filtering #{k.to_s.underscore.pluralize}.",
if httpMethod and
route.defaults[:controller] == 'arvados/v1/' + k.to_s.underscore.pluralize and
!d_methods[action.to_sym] and
- ctl_class.action_methods.include? action
+ ctl_class.action_methods.include? action and
+ ![:show, :index, :destroy].include?(action.to_sym)
method = {
id: "arvados.#{k.to_s.underscore.pluralize}.#{action}",
path: route.path.spec.to_s.sub('/arvados/v1/','').sub('(.:format)','').sub(/:(uu)?id/,'{uuid}'),
class Arvados::V1::UsersController < ApplicationController
skip_before_filter :find_object_by_uuid, only:
- [:activate, :event_stream, :current, :system]
+ [:activate, :event_stream, :current, :system, :setup]
skip_before_filter :render_404_if_no_object, only:
- [:activate, :event_stream, :current, :system]
-
+ [:activate, :event_stream, :current, :system, :setup]
+ before_filter :admin_required, only: [:setup, :unsetup]
+
def current
@object = current_user
show
end
show
end
+
+ # create user object and all the needed links
+ def setup
+ @object = nil
+ if params[:uuid]
+ @object = User.find_by_uuid params[:uuid]
+ if !@object
+ return render_404_if_no_object
+ end
+ object_found = true
+ else
+ if !params[:user]
+ raise ArgumentError.new "Required uuid or user"
+ else
+ if params[:user]['uuid']
+ @object = User.find_by_uuid params[:user]['uuid']
+ if @object
+ object_found = true
+ end
+ end
+
+ if !@object
+ if !params[:user]['email']
+ raise ArgumentError.new "Require user email"
+ end
+
+ if !params[:openid_prefix]
+ raise ArgumentError.new "Required openid_prefix parameter is missing."
+ end
+
+ @object = model_class.create! resource_attrs
+ end
+ end
+ end
+
+ if object_found
+ @response = @object.setup_repo_vm_links params[:repo_name], params[:vm_uuid]
+ else
+ @response = User.setup @object, params[:openid_prefix],
+ params[:repo_name], params[:vm_uuid]
+ end
+
+ render json: { kind: "arvados#HashList", items: @response }
+ end
+
+ # delete user agreements, vm, repository, login links; set state to inactive
+ def unsetup
+ reload_object_before_update
+ @object.unsetup
+ show
+ end
+
end
class Commit < ActiveRecord::Base
require 'shellwords'
- # Make sure the specified commit really exists, and return the full
- # sha1 commit hash.
- #
- # Accepts anything "git rev-list" accepts, optionally (and
- # preferably) preceded by "repo_name:".
- #
- # Examples: "1234567", "master", "apps:1234567", "apps:master",
- # "apps:HEAD"
-
- def self.find_by_commit_ish(commit_ish)
- want_repo = nil
- if commit_ish.index(':')
- want_repo, commit_ish = commit_ish.split(':',2)
+ def self.git_check_ref_format(e)
+ if !e or e.empty? or e[0] == '-' or e[0] == '$'
+ # definitely not valid
+ false
+ else
+ `git check-ref-format --allow-onelevel #{e.shellescape}`
+ $?.success?
end
- repositories.each do |repo_name, repo|
- next if want_repo and want_repo != repo_name
- ENV['GIT_DIR'] = repo[:git_dir]
- IO.foreach("|git rev-list --max-count=1 --format=oneline 'origin/'#{commit_ish.shellescape} 2>/dev/null || git rev-list --max-count=1 --format=oneline ''#{commit_ish.shellescape}") do |line|
- sha1, message = line.strip.split " ", 2
- next if sha1.length != 40
- begin
- Commit.find_or_create_by_repository_name_and_sha1_and_message(repo_name, sha1, message[0..254])
- rescue
- logger.warn "find_or_create failed: repo_name #{repo_name} sha1 #{sha1} message #{message[0..254]}"
- # Ignore cache failure. Commit is real. We should proceed.
+ end
+
+ def self.find_commit_range(current_user, repository, minimum, maximum, exclude)
+ if (minimum and !git_check_ref_format(minimum)) or !git_check_ref_format(maximum)
+ logger.warn "find_commit_range called with invalid minimum or maximum: '#{minimum}', '#{maximum}'"
+ return nil
+ end
+
+ if minimum and minimum.empty?
+ minimum = nil
+ end
+
+ if !maximum
+ maximum = "HEAD"
+ end
+
+ # Get list of actual repository directories under management
+ on_disk_repos = repositories
+
+ # Get list of repository objects readable by user
+ readable = Repository.readable_by(current_user)
+
+ # filter repository objects on requested repository name
+ if repository
+ readable = readable.where(name: repository)
+ end
+
+ commits = []
+ readable.each do |r|
+ if on_disk_repos[r.name]
+ ENV['GIT_DIR'] = on_disk_repos[r.name][:git_dir]
+
+ # We've filtered for invalid characters, so we can pass the contents of
+ # minimum and maximum safely on the command line
+
+ # Get the commit hash for the upper bound
+ max_hash = nil
+ IO.foreach("|git rev-list --max-count=1 #{maximum.shellescape}") do |line|
+ max_hash = line.strip
+ end
+
+ # If not found or string is invalid, nothing else to do
+ next if !max_hash or !git_check_ref_format(max_hash)
+
+ resolved_exclude = nil
+ if exclude
+ resolved_exclude = []
+ exclude.each do |e|
+ if git_check_ref_format(e)
+ IO.foreach("|git rev-list --max-count=1 #{e.shellescape}") do |line|
+ resolved_exclude.push(line.strip)
+ end
+ else
+ logger.warn "find_commit_range called with invalid exclude invalid characters: '#{exclude}'"
+ return nil
+ end
+ end
+ end
+
+ if minimum
+ # Get the commit hash for the lower bound
+ min_hash = nil
+ IO.foreach("|git rev-list --max-count=1 #{minimum.shellescape}") do |line|
+ min_hash = line.strip
+ end
+
+ # If not found or string is invalid, nothing else to do
+ next if !min_hash or !git_check_ref_format(min_hash)
+
+ # Now find all commits between them
+ IO.foreach("|git rev-list #{min_hash.shellescape}..#{max_hash.shellescape}") do |line|
+ hash = line.strip
+ commits.push(hash) if !resolved_exclude or !resolved_exclude.include? hash
+ end
+
+ commits.push(min_hash) if !resolved_exclude or !resolved_exclude.include? min_hash
+ else
+ commits.push(max_hash) if !resolved_exclude or !resolved_exclude.include? max_hash
end
- return sha1
end
end
- nil
+
+ if !commits or commits.empty?
+ nil
+ else
+ commits
+ end
end
# Import all commits from configured git directory into the commits
end
end
+ def self.refresh_repositories
+ @repositories = nil
+ end
+
protected
def self.repositories
next if repo.match /^\./
git_dir = File.join(@gitdirbase,
repo.match(/\.git$/) ? repo : File.join(repo, '.git'))
+ next if git_dir == Rails.configuration.git_internal_dir
repo_name = repo.sub(/\.git$/, '')
@repositories[repo_name] = {git_dir: git_dir}
end
t.add :dependencies
t.add :log_stream_href
t.add :log_buffer
+ t.add :nondeterministic
+ t.add :repository
end
def assert_finished
end
def self.queue
- self.where('started_at is ? and is_locked_by_uuid is ? and cancelled_at is ?',
- nil, nil, nil).
+ self.where('started_at is ? and is_locked_by_uuid is ? and cancelled_at is ? and success is ?',
+ nil, nil, nil, nil).
+ order('priority desc, created_at')
+ end
+
+ def self.running
+ self.where('running = ?', true).
order('priority desc, created_at')
end
return true
end
if new_record? or script_version_changed?
- sha1 = Commit.find_by_commit_ish(self.script_version) rescue nil
+ sha1 = Commit.find_commit_range(current_user, nil, nil, self.script_version, nil)[0] rescue nil
if sha1
self.script_version = sha1
else
def permission_to_update
if is_locked_by_uuid_was and !(current_user and
- current_user.uuid == is_locked_by_uuid_was)
+ (current_user.uuid == is_locked_by_uuid_was or
+ current_user.uuid == system_user.uuid))
if script_changed? or
script_parameters_changed? or
script_version_changed? or
end
end
+ def self.setup(user, openid_prefix, repo_name=nil, vm_uuid=nil)
+ login_perm_props = {identity_url_prefix: openid_prefix}
+
+ # Check oid_login_perm
+ oid_login_perms = Link.where(tail_uuid: user.email,
+ head_kind: 'arvados#user',
+ link_class: 'permission',
+ name: 'can_login')
+
+ if !oid_login_perms.any?
+ # create openid login permission
+ oid_login_perm = Link.create(link_class: 'permission',
+ name: 'can_login',
+ tail_kind: 'email',
+ tail_uuid: user.email,
+ head_kind: 'arvados#user',
+ head_uuid: user.uuid,
+ properties: login_perm_props
+ )
+ logger.info { "openid login permission: " + oid_login_perm[:uuid] }
+ else
+ oid_login_perm = oid_login_perms.first
+ end
+
+ return [oid_login_perm] + user.setup_repo_vm_links(repo_name, vm_uuid)
+ end
+
+ # create links
+ def setup_repo_vm_links(repo_name, vm_uuid)
+ repo_perm = create_user_repo_link repo_name
+ vm_login_perm = create_vm_login_permission_link vm_uuid, repo_name
+ group_perm = create_user_group_link
+
+ return [repo_perm, vm_login_perm, group_perm, self].compact
+ end
+
+ # delete user signatures, login, repo, and vm perms, and mark as inactive
+ def unsetup
+ # delete oid_login_perms for this user
+ oid_login_perms = Link.where(tail_uuid: self.email,
+ head_kind: 'arvados#user',
+ link_class: 'permission',
+ name: 'can_login')
+ oid_login_perms.each do |perm|
+ Link.delete perm
+ end
+
+ # delete repo_perms for this user
+ repo_perms = Link.where(tail_uuid: self.uuid,
+ head_kind: 'arvados#repository',
+ link_class: 'permission',
+ name: 'can_write')
+ repo_perms.each do |perm|
+ Link.delete perm
+ end
+
+ # delete vm_login_perms for this user
+ vm_login_perms = Link.where(tail_uuid: self.uuid,
+ head_kind: 'arvados#virtualMachine',
+ link_class: 'permission',
+ name: 'can_login')
+ vm_login_perms.each do |perm|
+ Link.delete perm
+ end
+
+ # delete any signatures by this user
+ signed_uuids = Link.where(link_class: 'signature',
+ tail_kind: 'arvados#user',
+ tail_uuid: self.uuid)
+ signed_uuids.each do |sign|
+ Link.delete sign
+ end
+
+ # mark the user as inactive
+ self.is_active = false
+ self.save!
+ end
+
protected
def permission_to_update
upstream_path.delete start
merged
end
+
+ def create_user_repo_link(repo_name)
+ # repo_name is optional
+ if not repo_name
+ logger.warn ("Repository name not given for #{self.uuid}.")
+ return
+ end
+
+ # Check for an existing repository with the same name we're about to use.
+ repo = Repository.where(name: repo_name).first
+
+ if repo
+ logger.warn "Repository exists for #{repo_name}: #{repo[:uuid]}."
+
+ # Look for existing repository access for this repo
+ repo_perms = Link.where(tail_uuid: self.uuid,
+ head_kind: 'arvados#repository',
+ head_uuid: repo[:uuid],
+ link_class: 'permission',
+ name: 'can_write')
+ if repo_perms.any?
+ logger.warn "User already has repository access " +
+ repo_perms.collect { |p| p[:uuid] }.inspect
+ return repo_perms.first
+ end
+ end
+
+ # create repo, if does not already exist
+ repo ||= Repository.create(name: repo_name)
+ logger.info { "repo uuid: " + repo[:uuid] }
+
+ repo_perm = Link.create(tail_kind: 'arvados#user',
+ tail_uuid: self.uuid,
+ head_kind: 'arvados#repository',
+ head_uuid: repo[:uuid],
+ link_class: 'permission',
+ name: 'can_write')
+ logger.info { "repo permission: " + repo_perm[:uuid] }
+ return repo_perm
+ end
+
+ # create login permission for the given vm_uuid, if it does not already exist
+ def create_vm_login_permission_link(vm_uuid, repo_name)
+ begin
+
+ # vm uuid is optional
+ if vm_uuid
+ vm = VirtualMachine.where(uuid: vm_uuid).first
+
+ if not vm
+ logger.warn "Could not find virtual machine for #{vm_uuid.inspect}"
+ raise "No vm found for #{vm_uuid}"
+ end
+ else
+ return
+ end
+
+ logger.info { "vm uuid: " + vm[:uuid] }
+
+ login_perms = Link.where(tail_uuid: self.uuid,
+ head_uuid: vm[:uuid],
+ head_kind: 'arvados#virtualMachine',
+ link_class: 'permission',
+ name: 'can_login')
+ if !login_perms.any?
+ login_perm = Link.create(tail_kind: 'arvados#user',
+ tail_uuid: self.uuid,
+ head_kind: 'arvados#virtualMachine',
+ head_uuid: vm[:uuid],
+ link_class: 'permission',
+ name: 'can_login',
+ properties: {username: repo_name})
+ logger.info { "login permission: " + login_perm[:uuid] }
+ else
+ login_perm = login_perms.first
+ end
+
+ return login_perm
+ end
+ end
+
+ # add the user to the 'All users' group
+ def create_user_group_link
+ # Look up the "All users" group (we expect uuid *-*-fffffffffffffff).
+ group = Group.where(name: 'All users').select do |g|
+ g[:uuid].match /-f+$/
+ end.first
+
+ if not group
+ logger.warn "No 'All users' group with uuid '*-*-fffffffffffffff'."
+ raise "No 'All users' group with uuid '*-*-fffffffffffffff' is found"
+ else
+ logger.info { "\"All users\" group uuid: " + group[:uuid] }
+
+ group_perms = Link.where(tail_uuid: self.uuid,
+ head_uuid: group[:uuid],
+ head_kind: 'arvados#group',
+ link_class: 'permission',
+ name: 'can_read')
+
+ if !group_perms.any?
+ group_perm = Link.create(tail_kind: 'arvados#user',
+ tail_uuid: self.uuid,
+ head_kind: 'arvados#group',
+ head_uuid: group[:uuid],
+ link_class: 'permission',
+ name: 'can_read')
+ logger.info { "group permission: " + group_perm[:uuid] }
+ else
+ group_perm = group_perms.first
+ end
+
+ return group_perm
+ end
+ end
+
end
secret_token: ~
uuid_prefix: <%= Digest::MD5.hexdigest(`hostname`).to_i(16).to_s(36)[0..4] %>
- git_repositories_dir: /var/cache/git
+ # Git repositories must be readable by api server, or you won't be
+ # able to submit crunch jobs. To pass the test suites, put a clone
+ # of the arvados tree in {git_repositories_dir}/arvados.git or
+ # {git_repositories_dir}/arvados/.git
+ git_repositories_dir: /var/lib/arvados/git
+
+ # This is a (bare) repository that stores commits used in jobs. When a job
+ # runs, the source commits are first fetched into this repository, then this
+ # repository is used to deploy to compute nodes. This should NOT be a
+ # subdirectory of {git_repositiories_dir}.
+ git_internal_dir: /var/lib/arvados/internal.git
# :none or :slurm_immediate
crunch_job_wrapper: :none
secret_token: <%= rand(2**512).to_s(36) %>
common:
-
- # Git repositories must be readable by api server, or you won't be
- # able to submit crunch jobs. To pass the test suites, put a clone
- # of the arvados tree in {git_repositories_dir}/arvados.git or
- # {git_repositories_dir}/arvados/.git
- #
#git_repositories_dir: /var/cache/git
+ #git_internal_dir: /var/cache/arvados/internal.git
# Initialize the rails application
Server::Application.initialize!
+begin
+ Rails.cache.clear
+rescue Errno::ENOENT => e
+ # Cache directory does not exist? Then cache is clear, proceed.
+ Rails.logger.warn "In Rails.cache.clear, ignoring #{e.inspect}"
+end
config.force_ssl = false
- config.git_repositories_dir = '/var/cache/git'
-
- config.crunch_job_wrapper = :none
- config.crunch_job_user = 'crunch' # if false, do not set uid when running jobs
-
- # The web service must be able to create/write this file, and
- # crunch-job must be able to stat() it.
- config.crunch_refresh_trigger = '/tmp/crunch_refresh_trigger'
-
- # config.dnsmasq_conf_dir = '/etc/dnsmasq.d'
-
- # config.compute_node_ami = 'ami-cbca41a2'
- # config.compute_node_ec2run_args = '-g arvados-compute'
- # config.compute_node_spot_bid = 0.11
-
- # config.compute_node_domain = `hostname --domain`.strip
-
- # config.compute_node_nameservers = ['1.2.3.4', '1.2.3.5']
- config.compute_node_nameservers = ['192.168.201.3']
-
- config.uuid_prefix('development@' + `hostname`.strip)
-
- # Authentication stub: hard code pre-approved API tokens.
- # config.accept_api_token = { rand(2**256).to_s(36) => true }
- config.accept_api_token = {}
-
- config.new_users_are_active = false
- config.admin_notifier_email_from = 'arvados@example.com'
- config.email_subject_prefix = '[ARVADOS] '
-
- # Visitors to the API server will be redirected to the workbench
- config.workbench_address = "http://localhost:3000/"
-
- # The e-mail address of the user you would like to become marked as an admin
- # user on their first login.
- # In the default configuration, authentication happens through the Arvados SSO
- # server, which uses openid against Google's servers, so in that case this
- # should be an address associated with a Google account.
- config.auto_admin_user = ''
end
# Send deprecation notices to registered listeners
config.active_support.deprecation = :notify
- config.git_repositories_dir = '/var/cache/git'
-
- config.crunch_job_wrapper = :slurm_immediate
- config.crunch_job_user = 'crunch' # if false, do not set uid when running jobs
-
- # The web service must be able to create/write this file, and
- # crunch-job must be able to stat() it.
- config.crunch_refresh_trigger = '/tmp/crunch_refresh_trigger'
-
- # config.dnsmasq_conf_dir = '/etc/dnsmasq.d'
-
- # config.compute_node_ami = 'ami-cbca41a2'
- # config.compute_node_ec2run_args = '-g arvados-compute'
- # config.compute_node_spot_bid = 0.11
-
- # config.compute_node_domain = `hostname --domain`.strip
-
- # config.compute_node_nameservers = ['1.2.3.4', '1.2.3.5']
- require 'net/http'
- config.compute_node_nameservers = ['local', 'public'].collect do |iface|
- Net::HTTP.get(URI("http://169.254.169.254/latest/meta-data/#{iface}-ipv4")).match(/^[\d\.]+$/)[0]
- end << '172.16.0.23'
-
- config.uuid_prefix = Digest::MD5.hexdigest('cfi-aws-0').to_i(16).to_s(36)[0..4] # '9ujm1'
-
- # Authentication stub: hard code pre-approved API tokens.
- # config.accept_api_token = { rand(2**256).to_s(36) => true }
- config.accept_api_token = {}
-
- config.new_users_are_active = false
- config.admin_notifier_email_from = 'arvados@example.com'
- config.email_subject_prefix = '[ARVADOS] '
-
- # Visitors to the API server will be redirected to the workbench
- config.workbench_address = "http://workbench." + `hostname`
-
- # The e-mail address of the user you would like to become marked as an admin
- # user on their first login.
- # In the default configuration, authentication happens through the Arvados SSO
- # server, which uses openid against Google's servers, so in that case this
- # should be an address associated with a Google account.
- config.auto_admin_user = ''
end
# Raise exception on mass assignment protection for Active Record models
config.active_record.mass_assignment_sanitizer = :strict
- config.git_repositories_dir = '/var/cache/git'
-
- config.crunch_job_wrapper = :slurm_immediate
- config.crunch_job_user = 'crunch' # if false, do not set uid when running jobs
-
- # The web service must be able to create/write this file, and
- # crunch-job must be able to stat() it.
- config.crunch_refresh_trigger = '/tmp/crunch_refresh_trigger_test'
-
- # config.dnsmasq_conf_dir = '/etc/dnsmasq.d'
-
- # config.compute_node_ami = 'ami-cbca41a2'
- # config.compute_node_ec2run_args = '-g arvados-compute'
- # config.compute_node_spot_bid = 0.11
- config.compute_node_ec2_tag_enable = false
-
- # config.compute_node_domain = `hostname --domain`.strip
-
# No need for SSL while testing
config.force_ssl = false
- # config.compute_node_nameservers = ['1.2.3.4', '1.2.3.5']
- config.compute_node_nameservers = [ "172.16.0.23" ]
-
- config.uuid_prefix = 'zzzzz'
-
- # Authentication stub: hard code pre-approved API tokens.
- # config.accept_api_token = { rand(2**256).to_s(36) => true }
- config.accept_api_token = {}
-
- config.new_users_are_active = false
- config.admin_notifier_email_from = 'arvados@example.com'
- config.email_subject_prefix = '[ARVADOS] '
-
- # Visitors to the API server will be redirected to the workbench
- config.workbench_address = "http://localhost:3000/"
-
- # The e-mail address of the user you would like to become marked as an admin
- # user on their first login.
- # In the default configuration, authentication happens through the Arvados SSO
- # server, which uses openid against Google's servers, so in that case this
- # should be an address associated with a Google account.
- config.auto_admin_user = ''
end
namespace :arvados do
namespace :v1 do
- match '/schema' => 'schema#show'
match '/nodes/:uuid/ping' => 'nodes#ping', :as => :ping_node
match '/keep_disks/ping' => 'keep_disks#ping', :as => :ping_keep_disk
match '/links/from/:tail_uuid' => 'links#index', :as => :arvados_v1_links_from
post '/jobs/:uuid/cancel' => 'jobs#cancel'
match '/users/:uuid/event_stream' => 'users#event_stream'
post '/users/:uuid/activate' => 'users#activate'
+ post '/users/setup' => 'users#setup'
+ post '/users/:uuid/unsetup' => 'users#unsetup'
match '/virtual_machines/get_all_logins' => 'virtual_machines#get_all_logins'
match '/virtual_machines/:uuid/logins' => 'virtual_machines#logins'
post '/api_client_authorizations/create_system_auth' => 'api_client_authorizations#create_system_auth'
match '/login', :to => 'user_sessions#login'
match '/logout', :to => 'user_sessions#logout'
- match '/discovery/v1/apis/arvados/v1/rest', :to => 'arvados/v1/schema#discovery_rest_description'
+ match '/discovery/v1/apis/arvados/v1/rest', :to => 'arvados/v1/schema#index'
match '/static/login_failure', :to => 'static#login_failure', :as => :login_failure
--- /dev/null
+class AddNondeterministicColumnToJob < ActiveRecord::Migration
+ def up
+ add_column :jobs, :nondeterministic, :boolean
+ end
+
+ def down
+ remove_column :jobs, :nondeterministic
+ end
+end
--- /dev/null
+class SeparateRepositoryFromScriptVersion < ActiveRecord::Migration
+ include CurrentApiClient
+
+ def fixup pt
+ c = pt.components
+ c.each do |k, v|
+ commit_ish = v["script_version"]
+ if commit_ish.andand.index(':')
+ want_repo, commit_ish = commit_ish.split(':',2)
+ v[:repository] = want_repo
+ v[:script_version] = commit_ish
+ end
+ end
+ pt.save!
+ end
+
+ def up
+ act_as_system_user do
+ PipelineTemplate.all.each do |pt|
+ fixup pt
+ end
+ PipelineInstance.all.each do |pt|
+ fixup pt
+ end
+ end
+ end
+
+ def down
+ raise ActiveRecord::IrreversibleMigration
+ end
+end
--- /dev/null
+class AddRepositoryColumnToJob < ActiveRecord::Migration
+ def up
+ add_column :jobs, :repository, :string
+ end
+
+ def down
+ remove_column :jobs, :repository
+ end
+end
#
# It's strongly recommended to check this file into your version control system.
-ActiveRecord::Schema.define(:version => 20140129184311) do
+ActiveRecord::Schema.define(:version => 20140321191343) do
create_table "api_client_authorizations", :force => true do |t|
t.string "api_token", :null => false
create_table "collections", :force => true do |t|
t.string "locator"
t.string "owner_uuid"
- t.datetime "created_at", :null => false
+ t.datetime "created_at"
t.string "modified_by_client_uuid"
t.string "modified_by_user_uuid"
t.datetime "modified_at"
t.string "redundancy_confirmed_by_client_uuid"
t.datetime "redundancy_confirmed_at"
t.integer "redundancy_confirmed_as"
- t.datetime "updated_at", :null => false
+ t.datetime "updated_at"
t.string "uuid"
t.text "manifest_text"
end
t.string "repository_name"
t.string "sha1"
t.string "message"
- t.datetime "created_at", :null => false
- t.datetime "updated_at", :null => false
+ t.datetime "created_at"
+ t.datetime "updated_at"
end
add_index "commits", ["repository_name", "sha1"], :name => "index_commits_on_repository_name_and_sha1", :unique => true
t.string "modified_by_user_uuid"
t.datetime "modified_at"
t.text "properties"
- t.datetime "created_at", :null => false
- t.datetime "updated_at", :null => false
+ t.datetime "created_at"
+ t.datetime "updated_at"
end
add_index "humans", ["uuid"], :name => "index_humans_on_uuid", :unique => true
t.boolean "running"
t.boolean "success"
t.string "output"
- t.datetime "created_at", :null => false
- t.datetime "updated_at", :null => false
+ t.datetime "created_at"
+ t.datetime "updated_at"
t.string "priority"
t.string "is_locked_by_uuid"
t.string "log"
t.text "tasks_summary"
t.text "runtime_constraints"
+ t.boolean "nondeterministic"
+ t.string "repository"
end
add_index "jobs", ["created_at"], :name => "index_jobs_on_created_at"
create_table "links", :force => true do |t|
t.string "uuid"
t.string "owner_uuid"
- t.datetime "created_at", :null => false
+ t.datetime "created_at"
t.string "modified_by_client_uuid"
t.string "modified_by_user_uuid"
t.datetime "modified_at"
t.string "name"
t.string "head_uuid"
t.text "properties"
- t.datetime "updated_at", :null => false
+ t.datetime "updated_at"
t.string "head_kind"
end
create_table "pipeline_instances", :force => true do |t|
t.string "uuid"
t.string "owner_uuid"
- t.datetime "created_at", :null => false
+ t.datetime "created_at"
t.string "modified_by_client_uuid"
t.string "modified_by_user_uuid"
t.datetime "modified_at"
t.text "components"
t.boolean "success"
t.boolean "active", :default => false
- t.datetime "updated_at", :null => false
+ t.datetime "updated_at"
t.text "properties"
end
desc 'Ensure site configuration has all required settings'
task check: :environment do
$application_config.sort.each do |k, v|
- $stderr.puts "%-32s %s" % [k, eval("Rails.configuration.#{k}")]
+ if ENV.has_key?('QUIET') then
+ # Make sure we still check for the variable to exist
+ eval("Rails.configuration.#{k}")
+ else
+ if /(password|secret)/.match(k) then
+ # Make sure we still check for the variable to exist, but don't print the value
+ eval("Rails.configuration.#{k}")
+ $stderr.puts "%-32s %s" % [k, '*********']
+ else
+ $stderr.puts "%-32s %s" % [k, eval("Rails.configuration.#{k}")]
+ end
+ end
end
end
end
raise "No CRUNCH_JOB_BIN env var, and crunch-job not in path."
end
+ require 'shellwords'
+
+ arvados_internal = Rails.configuration.git_internal_dir
+ if not File.exists? arvados_internal
+ $stderr.puts `mkdir -p #{arvados_internal.shellescape} && cd #{arvados_internal.shellescape} && git init --bare`
+ end
+
+ src_repo = File.join(Rails.configuration.git_repositories_dir, job.repository + '.git')
+ src_repo = File.join(Rails.configuration.git_repositories_dir, job.repository, '.git') unless File.exists? src_repo
+
+ unless src_repo
+ $stderr.puts "dispatch: #{File.join Rails.configuration.git_repositories_dir, job.repository} doesn't exist"
+ sleep 1
+ untake(job)
+ next
+ end
+
+ $stderr.puts `cd #{arvados_internal.shellescape} && git fetch --no-tags #{src_repo.shellescape} && git tag #{job.uuid.shellescape} #{job.script_version.shellescape}`
+
cmd_args << crunch_job_bin
cmd_args << '--job-api-token'
cmd_args << job_auth.api_token
cmd_args << '--job'
cmd_args << job.uuid
-
- commit = Commit.where(sha1: job.script_version).first
- if commit
- cmd_args << '--git-dir'
- if File.exists?(File.
- join(Rails.configuration.git_repositories_dir,
- commit.repository_name + '.git'))
- cmd_args << File.
- join(Rails.configuration.git_repositories_dir,
- commit.repository_name + '.git')
- else
- cmd_args << File.
- join(Rails.configuration.git_repositories_dir,
- commit.repository_name, '.git')
- end
- end
+ cmd_args << '--git-dir'
+ cmd_args << arvados_internal
$stderr.puts "dispatch: #{cmd_args.join ' '}"
job_done = j_done[:job]
$stderr.puts "dispatch: child #{pid_done} exit"
$stderr.puts "dispatch: job #{job_done.uuid} end"
- $redis.publish job_done.uuid, "end"
# Ensure every last drop of stdout and stderr is consumed
read_pipes
# Wait the thread
j_done[:wait_thr].value
+ jobrecord = Job.find_by_uuid(job_done.uuid)
+ jobrecord.running = false
+ jobrecord.finished_at ||= Time.now
+ # Don't set 'jobrecord.success = false' because if the job failed to run due to an
+ # issue with crunch-job or slurm, we want the job to stay in the queue.
+ jobrecord.save!
+
# Invalidate the per-job auth token
j_done[:job_auth].update_attributes expires_at: Time.now
+ $redis.publish job_done.uuid, "end"
+
@running.delete job_done.uuid
end
def update_pipelines
+ expire_tokens = @pipe_auth_tokens.dup
@todo_pipelines.each do |p|
- pipe_auth = ApiClientAuthorization.
- new(user: User.where('uuid=?', p.modified_by_user_uuid).first,
- api_client_id: 0)
- pipe_auth.save
-
+ pipe_auth = (@pipe_auth_tokens[p.uuid] ||= ApiClientAuthorization.
+ create(user: User.where('uuid=?', p.modified_by_user_uuid).first,
+ api_client_id: 0))
puts `export ARVADOS_API_TOKEN=#{pipe_auth.api_token} && arv-run-pipeline-instance --run-here --no-wait --instance #{p.uuid}`
+ expire_tokens.delete p.uuid
+ end
+
+ expire_tokens.each do |k, v|
+ v.update_attributes expires_at: Time.now
+ @pipe_auth_tokens.delete k
end
end
def run
act_as_system_user
@running ||= {}
+ @pipe_auth_tokens ||= { }
$stderr.puts "dispatch: ready"
while !$signal[:term] or @running.size > 0
read_pipes
unless @todo.empty? or did_recently(:start_jobs, 1.0) or $signal[:term]
start_jobs
end
- unless @todo_pipelines.empty? or did_recently(:update_pipelines, 5.0)
+ unless (@todo_pipelines.empty? and @pipe_auth_tokens.empty?) or did_recently(:update_pipelines, 5.0)
update_pipelines
end
end
end
end
-
-
protected
def did_recently(thing, min_interval)
:config => File.expand_path("config.ru"),
:SSLEnable => true,
:SSLVerifyClient => OpenSSL::SSL::VERIFY_NONE,
- :SSLPrivateKey => OpenSSL::PKey::RSA.new(
- File.open("config/api.clinicalfuture.com.key.pem").read),
- :SSLCertificate => OpenSSL::X509::Certificate.new(
- File.open("config/api.clinicalfuture.com.crt.pem").read),
- :SSLCertName => [["CN", WEBrick::Utils::getservername]]
+ :SSLCertName => [["CN", "#{WEBrick::Utils::getservername} #{Time.now().to_s}"]]
})
end
end
-end
+end
######### /SSL
--- /dev/null
+#!/usr/bin/env ruby
+
+abort 'Error: Ruby >= 1.9.3 required.' if RUBY_VERSION < '1.9.3'
+
+require 'logger'
+require 'trollop'
+
+log = Logger.new STDERR
+log.progname = $0.split('/').last
+
+opts = Trollop::options do
+ banner ''
+ banner "Usage: #{log.progname} " +
+ "{user_uuid_or_email} {user_and_repo_name} {vm_uuid}"
+ banner ''
+ opt :debug, <<-eos
+Show debug messages.
+ eos
+ opt :openid_prefix, <<-eos, default: 'https://www.google.com/accounts/o8/id'
+If creating a new user record, require authentication from an OpenID \
+with this OpenID prefix *and* a matching email address in order to \
+claim the account.
+ eos
+end
+
+log.level = (ENV['DEBUG'] || opts.debug) ? Logger::DEBUG : Logger::WARN
+
+if ARGV.count != 3
+ Trollop::die "required arguments are missing"
+end
+
+user_arg, user_repo_name, vm_uuid = ARGV
+
+require 'arvados'
+arv = Arvados.new(api_version: 'v1')
+
+# Look up the given user by uuid or, failing that, email address.
+begin
+ found_user = arv.user.get(uuid: user_arg)
+rescue Arvados::TransactionFailedError
+ found = arv.user.list(where: {email: user_arg})[:items]
+
+ if found.count == 0
+ if !user_arg.match(/\w\@\w+\.\w+/)
+ abort "About to create new user, but #{user_arg.inspect} " +
+ "does not look like an email address. Stop."
+ end
+ elsif found.count != 1
+ abort "Found #{found.count} users with email. Stop."
+ else
+ found_user = found.first
+ end
+end
+
+# Invoke user setup method
+if (found_user)
+ user = arv.user.setup uuid: found_user[:uuid], repo_name: user_repo_name,
+ vm_uuid: vm_uuid, openid_prefix: opts.openid_prefix
+else
+ user = arv.user.setup user: {email: user_arg}, repo_name: user_repo_name,
+ vm_uuid: vm_uuid, openid_prefix: opts.openid_prefix
+end
+
+log.info {"user uuid: " + user[:uuid]}
+
+puts user.inspect
user_agreement:
- uuid: b519d9cb706a29fc7ea24dbea2f05851
+ uuid: b519d9cb706a29fc7ea24dbea2f05851+249025
owner_uuid: qr1hi-tpzed-tpj2ff66551eyym
created_at: 2013-12-26T19:22:54Z
modified_by_client_uuid: zzzzz-ozdt8-brczlopd8u8d0jr
running: 0
done: 1
runtime_constraints: {}
+
+previous_job_run:
+ uuid: zzzzz-8i9sb-cjs4pklxxjykqqq
+ owner_uuid: zzzzz-tpzed-xurymjxw79nv3jz
+ script: hash
+ script_version: 4fe459abe02d9b365932b8f5dc419439ab4e2577
+ script_parameters:
+ input: fa7aeb5140e2848d39b416daeef4ffc5+45
+ an_integer: "1"
+ success: true
+
+nondeterminisic_job_run:
+ uuid: zzzzz-8i9sb-cjs4pklxxjykyyy
+ owner_uuid: zzzzz-tpzed-xurymjxw79nv3jz
+ script: hash2
+ script_version: 4fe459abe02d9b365932b8f5dc419439ab4e2577
+ script_parameters:
+ input: fa7aeb5140e2848d39b416daeef4ffc5+45
+ an_integer: "1"
+ success: true
+ nondeterministic: true
\ No newline at end of file
link_class: signature
name: require
head_kind: arvados#collection
- head_uuid: b519d9cb706a29fc7ea24dbea2f05851
+ head_uuid: b519d9cb706a29fc7ea24dbea2f05851+249025
properties: {}
user_agreement_readable:
link_class: permission
name: can_read
head_kind: arvados#collection
- head_uuid: b519d9cb706a29fc7ea24dbea2f05851
+ head_uuid: b519d9cb706a29fc7ea24dbea2f05851+249025
properties: {}
active_user_member_of_all_users_group:
link_class: signature
name: click
head_kind: arvados#collection
- head_uuid: b519d9cb706a29fc7ea24dbea2f05851
+ head_uuid: b519d9cb706a29fc7ea24dbea2f05851+249025
properties: {}
user_agreement_signed_by_inactive:
link_class: signature
name: click
head_kind: arvados#collection
- head_uuid: b519d9cb706a29fc7ea24dbea2f05851
+ head_uuid: b519d9cb706a29fc7ea24dbea2f05851+249025
properties: {}
spectator_user_member_of_all_users_group:
head_uuid: zzzzz-8i9sb-cjs4pklxxjykyuq
properties: {}
+foo_repository_readable_by_spectator:
+ uuid: zzzzz-o0j2j-cpy7p41hpk5xxx
+ owner_uuid: zzzzz-tpzed-000000000000000
+ created_at: 2014-01-24 20:42:26 -0800
+ modified_by_client_uuid: zzzzz-ozdt8-brczlopd8u8d0jr
+ modified_by_user_uuid: zzzzz-tpzed-000000000000000
+ modified_at: 2014-01-24 20:42:26 -0800
+ updated_at: 2014-01-24 20:42:26 -0800
+ tail_kind: arvados#user
+ tail_uuid: zzzzz-tpzed-l1s2piq4t4mps8r
+ link_class: permission
+ name: can_read
+ head_kind: arvados#repository
+ head_uuid: zzzzz-2x53u-382brsig8rp3666
+ properties: {}
--- /dev/null
+foo:
+ uuid: zzzzz-2x53u-382brsig8rp3666
+ owner_uuid: zzzzz-tpzed-xurymjxw79nv3jz
+ name: foo
assert_not_nil assigns(:objects)
end
+ [0,1,2].each do |limit|
+ test "get index with limit=#{limit}" do
+ authorize_with :active
+ get :index, limit: limit
+ assert_response :success
+ assert_equal limit, assigns(:objects).count
+ resp = JSON.parse(@response.body)
+ assert_equal limit, resp['limit']
+ end
+ end
+
+ test "get index with limit=2 offset=99999" do
+ # Assume there are not that many test fixtures.
+ authorize_with :active
+ get :index, limit: 2, offset: 99999
+ assert_response :success
+ assert_equal 0, assigns(:objects).count
+ resp = JSON.parse(@response.body)
+ assert_equal 2, resp['limit']
+ assert_equal 99999, resp['offset']
+ end
+
test "should create" do
authorize_with :active
test_collection = {
--- /dev/null
+require 'test_helper'
+load 'test/functional/arvados/v1/git_setup.rb'
+
+class Arvados::V1::CommitsControllerTest < ActionController::TestCase
+ fixtures :repositories, :users
+
+ # See git_setup.rb for the commit log for test.git.tar
+ include GitSetup
+
+ test "test_find_commit_range" do
+ authorize_with :active
+
+ # single
+ a = Commit.find_commit_range(users(:active), nil, nil, '31ce37fe365b3dc204300a3e4c396ad333ed0556', nil)
+ assert_equal ['31ce37fe365b3dc204300a3e4c396ad333ed0556'], a
+
+ #test "test_branch1" do
+ a = Commit.find_commit_range(users(:active), nil, nil, 'master', nil)
+ assert_equal ['077ba2ad3ea24a929091a9e6ce545c93199b8e57'], a
+
+ #test "test_branch2" do
+ a = Commit.find_commit_range(users(:active), 'foo', nil, 'b1', nil)
+ assert_equal ['1de84a854e2b440dc53bf42f8548afa4c17da332'], a
+
+ #test "test_branch3" do
+ a = Commit.find_commit_range(users(:active), 'foo', nil, 'HEAD', nil)
+ assert_equal ['1de84a854e2b440dc53bf42f8548afa4c17da332'], a
+
+ #test "test_single_revision_repo" do
+ a = Commit.find_commit_range(users(:active), "foo", nil, '31ce37fe365b3dc204300a3e4c396ad333ed0556', nil)
+ assert_equal ['31ce37fe365b3dc204300a3e4c396ad333ed0556'], a
+ a = Commit.find_commit_range(users(:active), "bar", nil, '31ce37fe365b3dc204300a3e4c396ad333ed0556', nil)
+ assert_equal nil, a
+
+ #test "test_multi_revision" do
+ a = Commit.find_commit_range(users(:active), nil, '31ce37fe365b3dc204300a3e4c396ad333ed0556', '077ba2ad3ea24a929091a9e6ce545c93199b8e57', nil)
+ assert_equal ['077ba2ad3ea24a929091a9e6ce545c93199b8e57', '4fe459abe02d9b365932b8f5dc419439ab4e2577', '31ce37fe365b3dc204300a3e4c396ad333ed0556'], a
+
+ #test "test_tag" do
+ a = Commit.find_commit_range(users(:active), nil, 'tag1', 'master', nil)
+ assert_equal ['077ba2ad3ea24a929091a9e6ce545c93199b8e57', '4fe459abe02d9b365932b8f5dc419439ab4e2577'], a
+
+ #test "test_multi_revision_exclude" do
+ a = Commit.find_commit_range(users(:active), nil, '31ce37fe365b3dc204300a3e4c396ad333ed0556', '077ba2ad3ea24a929091a9e6ce545c93199b8e57', ['4fe459abe02d9b365932b8f5dc419439ab4e2577'])
+ assert_equal ['077ba2ad3ea24a929091a9e6ce545c93199b8e57', '31ce37fe365b3dc204300a3e4c396ad333ed0556'], a
+
+ #test "test_multi_revision_tagged_exclude" do
+ a = Commit.find_commit_range(users(:active), nil, '31ce37fe365b3dc204300a3e4c396ad333ed0556', '077ba2ad3ea24a929091a9e6ce545c93199b8e57', ['tag1'])
+ assert_equal ['077ba2ad3ea24a929091a9e6ce545c93199b8e57', '31ce37fe365b3dc204300a3e4c396ad333ed0556'], a
+
+ Dir.mktmpdir do |touchdir|
+ # invalid input to maximum
+ a = Commit.find_commit_range(users(:active), nil, nil, "31ce37fe365b3dc204300a3e4c396ad333ed0556 ; touch #{touchdir}/uh_oh", nil)
+ assert !File.exists?("#{touchdir}/uh_oh"), "#{touchdir}/uh_oh should not exist, 'maximum' parameter of find_commit_range is exploitable"
+ assert_equal nil, a
+
+ # invalid input to maximum
+ a = Commit.find_commit_range(users(:active), nil, nil, "$(uname>#{touchdir}/uh_oh)", nil)
+ assert !File.exists?("#{touchdir}/uh_oh"), "#{touchdir}/uh_oh should not exist, 'maximum' parameter of find_commit_range is exploitable"
+ assert_equal nil, a
+
+ # invalid input to minimum
+ a = Commit.find_commit_range(users(:active), nil, "31ce37fe365b3dc204300a3e4c396ad333ed0556 ; touch #{touchdir}/uh_oh", "31ce37fe365b3dc204300a3e4c396ad333ed0556", nil)
+ assert !File.exists?("#{touchdir}/uh_oh"), "#{touchdir}/uh_oh should not exist, 'minimum' parameter of find_commit_range is exploitable"
+ assert_equal nil, a
+
+ # invalid input to minimum
+ a = Commit.find_commit_range(users(:active), nil, "$(uname>#{touchdir}/uh_oh)", "31ce37fe365b3dc204300a3e4c396ad333ed0556", nil)
+ assert !File.exists?("#{touchdir}/uh_oh"), "#{touchdir}/uh_oh should not exist, 'minimum' parameter of find_commit_range is exploitable"
+ assert_equal nil, a
+
+ # invalid input to 'excludes'
+ a = Commit.find_commit_range(users(:active), nil, "31ce37fe365b3dc204300a3e4c396ad333ed0556", "077ba2ad3ea24a929091a9e6ce545c93199b8e57", ["4fe459abe02d9b365932b8f5dc419439ab4e2577 ; touch #{touchdir}/uh_oh"])
+ assert !File.exists?("#{touchdir}/uh_oh"), "#{touchdir}/uh_oh should not exist, 'excludes' parameter of find_commit_range is exploitable"
+ assert_equal nil, a
+
+ # invalid input to 'excludes'
+ a = Commit.find_commit_range(users(:active), nil, "31ce37fe365b3dc204300a3e4c396ad333ed0556", "077ba2ad3ea24a929091a9e6ce545c93199b8e57", ["$(uname>#{touchdir}/uh_oh)"])
+ assert !File.exists?("#{touchdir}/uh_oh"), "#{touchdir}/uh_oh should not exist, 'excludes' parameter of find_commit_range is exploitable"
+ assert_equal nil, a
+
+ end
+
+ end
+
+end
--- /dev/null
+require 'fileutils'
+require 'tmpdir'
+
+# Commit log for test.git.tar
+# master is the main branch
+# b1 is a branch off of master
+# tag1 is a tag
+#
+# 1de84a8 * b1
+# 077ba2a * master
+# 4fe459a * tag1
+# 31ce37f * foo
+
+module GitSetup
+ def setup
+ @tmpdir = Dir.mktmpdir()
+ #puts "setup #{@tmpdir}"
+ `cp test/test.git.tar #{@tmpdir} && cd #{@tmpdir} && tar xf test.git.tar`
+ Rails.configuration.git_repositories_dir = "#{@tmpdir}/test"
+ Commit.refresh_repositories
+ end
+
+ def teardown
+ #puts "teardown #{@tmpdir}"
+ FileUtils.remove_entry @tmpdir, true
+ end
+end
--- /dev/null
+require 'test_helper'
+load 'test/functional/arvados/v1/git_setup.rb'
+
+class Arvados::V1::JobReuseControllerTest < ActionController::TestCase
+ fixtures :repositories, :users, :jobs, :links
+
+ # See git_setup.rb for the commit log for test.git.tar
+ include GitSetup
+
+ test "test_reuse_job" do
+ @controller = Arvados::V1::JobsController.new
+ authorize_with :active
+ post :create, job: {
+ script: "hash",
+ script_version: "4fe459abe02d9b365932b8f5dc419439ab4e2577",
+ repository: "foo",
+ script_parameters: {
+ input: 'fa7aeb5140e2848d39b416daeef4ffc5+45',
+ an_integer: '1'
+ }
+ }
+ assert_response :success
+ assert_not_nil assigns(:object)
+ new_job = JSON.parse(@response.body)
+ assert_equal 'zzzzz-8i9sb-cjs4pklxxjykqqq', new_job['uuid']
+ assert_equal '4fe459abe02d9b365932b8f5dc419439ab4e2577', new_job['script_version']
+ end
+
+ test "test_reuse_job_range" do
+ @controller = Arvados::V1::JobsController.new
+ authorize_with :active
+ post :create, job: {
+ script: "hash",
+ minimum_script_version: "tag1",
+ script_version: "master",
+ repository: "foo",
+ script_parameters: {
+ input: 'fa7aeb5140e2848d39b416daeef4ffc5+45',
+ an_integer: '1'
+ }
+ }
+ assert_response :success
+ assert_not_nil assigns(:object)
+ new_job = JSON.parse(@response.body)
+ assert_equal 'zzzzz-8i9sb-cjs4pklxxjykqqq', new_job['uuid']
+ assert_equal '4fe459abe02d9b365932b8f5dc419439ab4e2577', new_job['script_version']
+ end
+
+ test "test_cannot_reuse_job_different_input" do
+ @controller = Arvados::V1::JobsController.new
+ authorize_with :active
+ post :create, job: {
+ script: "hash",
+ script_version: "4fe459abe02d9b365932b8f5dc419439ab4e2577",
+ repository: "foo",
+ script_parameters: {
+ input: 'fa7aeb5140e2848d39b416daeef4ffc5+45',
+ an_integer: '2'
+ }
+ }
+ assert_response :success
+ assert_not_nil assigns(:object)
+ new_job = JSON.parse(@response.body)
+ assert_not_equal 'zzzzz-8i9sb-cjs4pklxxjykqqq', new_job['uuid']
+ assert_equal '4fe459abe02d9b365932b8f5dc419439ab4e2577', new_job['script_version']
+ end
+
+ test "test_cannot_reuse_job_different_version" do
+ @controller = Arvados::V1::JobsController.new
+ authorize_with :active
+ post :create, job: {
+ script: "hash",
+ script_version: "master",
+ repository: "foo",
+ script_parameters: {
+ input: 'fa7aeb5140e2848d39b416daeef4ffc5+45',
+ an_integer: '2'
+ }
+ }
+ assert_response :success
+ assert_not_nil assigns(:object)
+ new_job = JSON.parse(@response.body)
+ assert_not_equal 'zzzzz-8i9sb-cjs4pklxxjykqqq', new_job['uuid']
+ assert_equal '077ba2ad3ea24a929091a9e6ce545c93199b8e57', new_job['script_version']
+ end
+
+ test "test_cannot_reuse_job_submitted_nondeterministic" do
+ @controller = Arvados::V1::JobsController.new
+ authorize_with :active
+ post :create, job: {
+ script: "hash",
+ script_version: "4fe459abe02d9b365932b8f5dc419439ab4e2577",
+ repository: "foo",
+ script_parameters: {
+ input: 'fa7aeb5140e2848d39b416daeef4ffc5+45',
+ an_integer: '1'
+ },
+ nondeterministic: true
+ }
+ assert_response :success
+ assert_not_nil assigns(:object)
+ new_job = JSON.parse(@response.body)
+ assert_not_equal 'zzzzz-8i9sb-cjs4pklxxjykqqq', new_job['uuid']
+ assert_equal '4fe459abe02d9b365932b8f5dc419439ab4e2577', new_job['script_version']
+ end
+
+ test "test_cannot_reuse_job_past_nondeterministic" do
+ @controller = Arvados::V1::JobsController.new
+ authorize_with :active
+ post :create, job: {
+ script: "hash2",
+ script_version: "4fe459abe02d9b365932b8f5dc419439ab4e2577",
+ repository: "foo",
+ script_parameters: {
+ input: 'fa7aeb5140e2848d39b416daeef4ffc5+45',
+ an_integer: '1'
+ }
+ }
+ assert_response :success
+ assert_not_nil assigns(:object)
+ new_job = JSON.parse(@response.body)
+ assert_not_equal 'zzzzz-8i9sb-cjs4pklxxjykyyy', new_job['uuid']
+ assert_equal '4fe459abe02d9b365932b8f5dc419439ab4e2577', new_job['script_version']
+ end
+
+ test "test_cannot_reuse_job_no_permission" do
+ @controller = Arvados::V1::JobsController.new
+ authorize_with :spectator
+ post :create, job: {
+ script: "hash",
+ script_version: "4fe459abe02d9b365932b8f5dc419439ab4e2577",
+ repository: "foo",
+ script_parameters: {
+ input: 'fa7aeb5140e2848d39b416daeef4ffc5+45',
+ an_integer: '1'
+ }
+ }
+ assert_response :success
+ assert_not_nil assigns(:object)
+ new_job = JSON.parse(@response.body)
+ assert_not_equal 'zzzzz-8i9sb-cjs4pklxxjykqqq', new_job['uuid']
+ assert_equal '4fe459abe02d9b365932b8f5dc419439ab4e2577', new_job['script_version']
+ end
+
+ test "test_cannot_reuse_job_excluded" do
+ @controller = Arvados::V1::JobsController.new
+ authorize_with :active
+ post :create, job: {
+ script: "hash",
+ minimum_script_version: "31ce37fe365b3dc204300a3e4c396ad333ed0556",
+ script_version: "master",
+ repository: "foo",
+ exclude_script_versions: ["tag1"],
+ script_parameters: {
+ input: 'fa7aeb5140e2848d39b416daeef4ffc5+45',
+ an_integer: '1'
+ }
+ }
+ assert_response :success
+ assert_not_nil assigns(:object)
+ new_job = JSON.parse(@response.body)
+ assert_not_equal 'zzzzz-8i9sb-cjs4pklxxjykqqq', new_job['uuid']
+ assert_equal '077ba2ad3ea24a929091a9e6ce545c93199b8e57', new_job['script_version']
+ end
+
+
+end
require 'test_helper'
+load 'test/functional/arvados/v1/git_setup.rb'
class Arvados::V1::JobsControllerTest < ActionController::TestCase
+ include GitSetup
+
test "submit a job" do
authorize_with :active
post :create, job: {
script: "hash",
script_version: "master",
+ repository: "foo",
script_parameters: {}
}
assert_response :success
script: "hash",
script_version: "master",
script_parameters: {},
+ repository: "foo",
started_at: Time.now,
finished_at: Time.now,
running: false,
}
assert_response 422
end
+
+
end
test "should get fresh discovery document" do
MAX_SCHEMA_AGE = 60
- get :discovery_rest_description
+ get :index
assert_response :success
discovery_doc = JSON.parse(@response.body)
assert_equal 'discovery#restDescription', discovery_doc['kind']
class Arvados::V1::UsersControllerTest < ActionController::TestCase
+ setup do
+ @all_links_at_start = Link.all
+ @vm_uuid = virtual_machines(:testvm).uuid
+ end
+
test "activate a user after signing UA" do
authorize_with :inactive_but_signed_user_agreement
get :current
assert_equal true, me['is_active']
end
+ test "create new user with user as input" do
+ authorize_with :admin
+ post :create, user: {
+ first_name: "test_first_name",
+ last_name: "test_last_name",
+ email: "foo@example.com"
+ }
+ assert_response :success
+ created = JSON.parse(@response.body)
+ assert_equal 'test_first_name', created['first_name']
+ assert_not_nil created['uuid'], 'expected uuid for the newly created user'
+ assert_not_nil created['email'], 'expected non-nil email'
+ assert_nil created['identity_url'], 'expected no identity_url'
+ end
+
+ test "create user with user, vm and repo as input" do
+ authorize_with :admin
+ repo_name = 'test_repo'
+
+ post :setup, {
+ repo_name: repo_name,
+ openid_prefix: 'https://www.google.com/accounts/o8/id',
+ user: {
+ uuid: "this_is_agreeable",
+ first_name: "in_create_test_first_name",
+ last_name: "test_last_name",
+ email: "foo@example.com"
+ }
+ }
+ assert_response :success
+ response_items = JSON.parse(@response.body)['items']
+
+ created = find_obj_in_resp response_items, 'User', nil
+ assert_equal 'in_create_test_first_name', created['first_name']
+ assert_not_nil created['uuid'], 'expected non-null uuid for the new user'
+ assert_equal 'this_is_agreeable', created['uuid']
+ assert_not_nil created['email'], 'expected non-nil email'
+ assert_nil created['identity_url'], 'expected no identity_url'
+
+ # arvados#user, repo link and link add user to 'All users' group
+ verify_num_links @all_links_at_start, 3
+
+ verify_link response_items, 'arvados#user', true, 'permission', 'can_login',
+ created['uuid'], created['email'], 'arvados#user', false, 'User'
+
+ verify_link response_items, 'arvados#repository', true, 'permission', 'can_write',
+ repo_name, created['uuid'], 'arvados#repository', true, 'Repository'
+
+ verify_link response_items, 'arvados#group', true, 'permission', 'can_read',
+ 'All users', created['uuid'], 'arvados#group', true, 'Group'
+
+ verify_link response_items, 'arvados#virtualMachine', false, 'permission', 'can_login',
+ nil, created['uuid'], 'arvados#virtualMachine', false, 'VirtualMachine'
+
+ # invoke setup again with the same data
+ post :setup, {
+ repo_name: repo_name,
+ vm_uuid: @vm_uuid,
+ openid_prefix: 'https://www.google.com/accounts/o8/id',
+ user: {
+ uuid: "this_is_agreeable",
+ first_name: "in_create_test_first_name",
+ last_name: "test_last_name",
+ email: "foo@example.com"
+ }
+ }
+
+ response_items = JSON.parse(@response.body)['items']
+
+ created = find_obj_in_resp response_items, 'User', nil
+ assert_equal 'in_create_test_first_name', created['first_name']
+ assert_not_nil created['uuid'], 'expected non-null uuid for the new user'
+ assert_equal 'this_is_agreeable', created['uuid']
+ assert_not_nil created['email'], 'expected non-nil email'
+ assert_nil created['identity_url'], 'expected no identity_url'
+
+ # arvados#user, repo link and link add user to 'All users' group
+ verify_num_links @all_links_at_start, 4
+
+ verify_link response_items, 'arvados#repository', true, 'permission', 'can_write',
+ repo_name, created['uuid'], 'arvados#repository', true, 'Repository'
+
+ verify_link response_items, 'arvados#group', true, 'permission', 'can_read',
+ 'All users', created['uuid'], 'arvados#group', true, 'Group'
+
+ verify_link response_items, 'arvados#virtualMachine', true, 'permission', 'can_login',
+ @vm_uuid, created['uuid'], 'arvados#virtualMachine', false, 'VirtualMachine'
+ end
+
+ test "setup user with bogus uuid and expect error" do
+ authorize_with :admin
+
+ post :setup, {
+ uuid: 'bogus_uuid',
+ repo_name: 'test_repo',
+ vm_uuid: @vm_uuid
+ }
+ response_body = JSON.parse(@response.body)
+ response_errors = response_body['errors']
+ assert_not_nil response_errors, 'Expected error in response'
+ assert (response_errors.first.include? 'Path not found'), 'Expected 404'
+ end
+
+ test "setup user with bogus uuid in user and expect error" do
+ authorize_with :admin
+
+ post :setup, {
+ user: {uuid: 'bogus_uuid'},
+ repo_name: 'test_repo',
+ vm_uuid: @vm_uuid,
+ openid_prefix: 'https://www.google.com/accounts/o8/id'
+ }
+ response_body = JSON.parse(@response.body)
+ response_errors = response_body['errors']
+ assert_not_nil response_errors, 'Expected error in response'
+ assert (response_errors.first.include? 'ArgumentError: Require user email'),
+ 'Expected RuntimeError'
+ end
+
+ test "setup user with no uuid and user, expect error" do
+ authorize_with :admin
+
+ post :setup, {
+ repo_name: 'test_repo',
+ vm_uuid: @vm_uuid,
+ openid_prefix: 'https://www.google.com/accounts/o8/id'
+ }
+ response_body = JSON.parse(@response.body)
+ response_errors = response_body['errors']
+ assert_not_nil response_errors, 'Expected error in response'
+ assert (response_errors.first.include? 'Required uuid or user'),
+ 'Expected ArgumentError'
+ end
+
+ test "setup user with no uuid and email, expect error" do
+ authorize_with :admin
+
+ post :setup, {
+ user: {},
+ repo_name: 'test_repo',
+ vm_uuid: @vm_uuid,
+ openid_prefix: 'https://www.google.com/accounts/o8/id'
+ }
+ response_body = JSON.parse(@response.body)
+ response_errors = response_body['errors']
+ assert_not_nil response_errors, 'Expected error in response'
+ assert (response_errors.first.include? '<ArgumentError: Require user email'),
+ 'Expected ArgumentError'
+ end
+
+ test "invoke setup with existing uuid, vm and repo and verify links" do
+ authorize_with :inactive
+ get :current
+ assert_response :success
+ inactive_user = JSON.parse(@response.body)
+
+ authorize_with :admin
+
+ post :setup, {
+ uuid: inactive_user['uuid'],
+ repo_name: 'test_repo',
+ vm_uuid: @vm_uuid
+ }
+
+ assert_response :success
+
+ response_items = JSON.parse(@response.body)['items']
+ resp_obj = find_obj_in_resp response_items, 'User', nil
+
+ assert_not_nil resp_obj['uuid'], 'expected uuid for the new user'
+ assert_equal inactive_user['uuid'], resp_obj['uuid']
+ assert_equal inactive_user['email'], resp_obj['email'],
+ 'expecting inactive user email'
+
+ # expect repo and vm links
+ verify_link response_items, 'arvados#repository', true, 'permission', 'can_write',
+ 'test_repo', resp_obj['uuid'], 'arvados#repository', true, 'Repository'
+
+ verify_link response_items, 'arvados#virtualMachine', true, 'permission', 'can_login',
+ @vm_uuid, resp_obj['uuid'], 'arvados#virtualMachine', false, 'VirtualMachine'
+ end
+
+ test "invoke setup with existing uuid in user, verify response" do
+ authorize_with :inactive
+ get :current
+ assert_response :success
+ inactive_user = JSON.parse(@response.body)
+
+ authorize_with :admin
+
+ post :setup, {
+ user: {uuid: inactive_user['uuid']},
+ openid_prefix: 'https://www.google.com/accounts/o8/id'
+ }
+
+ assert_response :success
+
+ response_items = JSON.parse(@response.body)['items']
+ resp_obj = find_obj_in_resp response_items, 'User', nil
+
+ assert_not_nil resp_obj['uuid'], 'expected uuid for the new user'
+ assert_equal inactive_user['uuid'], resp_obj['uuid']
+ assert_equal inactive_user['email'], resp_obj['email'],
+ 'expecting inactive user email'
+ end
+
+ test "invoke setup with existing uuid but different email, expect original email" do
+ authorize_with :inactive
+ get :current
+ assert_response :success
+ inactive_user = JSON.parse(@response.body)
+
+ authorize_with :admin
+
+ post :setup, {
+ uuid: inactive_user['uuid'],
+ user: {email: 'junk_email'}
+ }
+
+ assert_response :success
+
+ response_items = JSON.parse(@response.body)['items']
+ resp_obj = find_obj_in_resp response_items, 'User', nil
+
+ assert_not_nil resp_obj['uuid'], 'expected uuid for the new user'
+ assert_equal inactive_user['uuid'], resp_obj['uuid']
+ assert_equal inactive_user['email'], resp_obj['email'],
+ 'expecting inactive user email'
+ end
+
+ test "setup user with valid email and repo as input" do
+ authorize_with :admin
+
+ post :setup, {
+ repo_name: 'test_repo',
+ user: {email: 'foo@example.com'},
+ openid_prefix: 'https://www.google.com/accounts/o8/id'
+ }
+
+ assert_response :success
+ response_items = JSON.parse(@response.body)['items']
+ response_object = find_obj_in_resp response_items, 'User', nil
+ assert_not_nil response_object['uuid'], 'expected uuid for the new user'
+ assert_equal response_object['email'], 'foo@example.com', 'expected given email'
+
+ # three extra links; login link, group link and repo link
+ verify_num_links @all_links_at_start, 3
+ end
+
+ test "setup user with fake vm and expect error" do
+ authorize_with :admin
+
+ post :setup, {
+ repo_name: 'test_repo',
+ vm_uuid: 'no_such_vm',
+ user: {email: 'foo@example.com'},
+ openid_prefix: 'https://www.google.com/accounts/o8/id'
+ }
+
+ response_body = JSON.parse(@response.body)
+ response_errors = response_body['errors']
+ assert_not_nil response_errors, 'Expected error in response'
+ assert (response_errors.first.include? "No vm found for no_such_vm"),
+ 'Expected RuntimeError: No vm found for no_such_vm'
+ end
+
+ test "setup user with valid email, repo and real vm as input" do
+ authorize_with :admin
+
+ post :setup, {
+ repo_name: 'test_repo',
+ openid_prefix: 'https://www.google.com/accounts/o8/id',
+ vm_uuid: @vm_uuid,
+ user: {email: 'foo@example.com'}
+ }
+
+ assert_response :success
+ response_items = JSON.parse(@response.body)['items']
+ response_object = find_obj_in_resp response_items, 'User', nil
+ assert_not_nil response_object['uuid'], 'expected uuid for the new user'
+ assert_equal response_object['email'], 'foo@example.com', 'expected given email'
+
+ # three extra links; login link, group link and repo link
+ verify_num_links @all_links_at_start, 4
+ end
+
+ test "setup user with valid email, no vm and repo as input" do
+ authorize_with :admin
+
+ post :setup, {
+ user: {email: 'foo@example.com'},
+ openid_prefix: 'https://www.google.com/accounts/o8/id'
+ }
+
+ assert_response :success
+ response_items = JSON.parse(@response.body)['items']
+ response_object = find_obj_in_resp response_items, 'User', nil
+ assert_not_nil response_object['uuid'], 'expected uuid for new user'
+ assert_equal response_object['email'], 'foo@example.com', 'expected given email'
+
+ # two extra links; login link and group link
+ verify_num_links @all_links_at_start, 2
+ end
+
+ test "setup user with email, first name, repo name and vm uuid" do
+ authorize_with :admin
+
+ post :setup, {
+ openid_prefix: 'https://www.google.com/accounts/o8/id',
+ repo_name: 'test_repo',
+ vm_uuid: @vm_uuid,
+ user: {
+ first_name: 'test_first_name',
+ email: 'foo@example.com'
+ }
+ }
+
+ assert_response :success
+ response_items = JSON.parse(@response.body)['items']
+ response_object = find_obj_in_resp response_items, 'User', nil
+ assert_not_nil response_object['uuid'], 'expected uuid for new user'
+ assert_equal response_object['email'], 'foo@example.com', 'expected given email'
+ assert_equal 'test_first_name', response_object['first_name'],
+ 'expecting first name'
+
+ # four extra links; login link, group link, repo link and vm link
+ verify_num_links @all_links_at_start, 4
+ end
+
+ test "setup user twice with email and check two different objects created" do
+ authorize_with :admin
+
+ post :setup, {
+ openid_prefix: 'https://www.google.com/accounts/o8/id',
+ repo_name: 'test_repo',
+ user: {
+ email: 'foo@example.com'
+ }
+ }
+
+ assert_response :success
+ response_items = JSON.parse(@response.body)['items']
+ response_object = find_obj_in_resp response_items, 'User', nil
+ assert_not_nil response_object['uuid'], 'expected uuid for new user'
+ assert_equal response_object['email'], 'foo@example.com', 'expected given email'
+ verify_num_links @all_links_at_start, 3 # openid, group, and repo. no vm
+
+ # create again
+ post :setup, {
+ user: {email: 'foo@example.com'},
+ openid_prefix: 'https://www.google.com/accounts/o8/id'
+ }
+
+ assert_response :success
+ response_items = JSON.parse(@response.body)['items']
+ response_object2 = find_obj_in_resp response_items, 'User', nil
+ assert_not_equal response_object['uuid'], response_object2['uuid'],
+ 'expected same uuid as first create operation'
+ assert_equal response_object['email'], 'foo@example.com', 'expected given email'
+
+ # extra login link only
+ verify_num_links @all_links_at_start, 4
+ end
+
+ test "setup user with openid prefix" do
+ authorize_with :admin
+
+ post :setup, {
+ repo_name: 'test_repo',
+ openid_prefix: 'http://www.example.com/account',
+ user: {
+ first_name: "in_create_test_first_name",
+ last_name: "test_last_name",
+ email: "foo@example.com"
+ }
+ }
+
+ assert_response :success
+
+ response_items = JSON.parse(@response.body)['items']
+ created = find_obj_in_resp response_items, 'User', nil
+
+ assert_equal 'in_create_test_first_name', created['first_name']
+ assert_not_nil created['uuid'], 'expected uuid for new user'
+ assert_not_nil created['email'], 'expected non-nil email'
+ assert_nil created['identity_url'], 'expected no identity_url'
+
+ # verify links
+ # 3 new links: arvados#user, repo, and 'All users' group.
+ verify_num_links @all_links_at_start, 3
+
+ verify_link response_items, 'arvados#user', true, 'permission', 'can_login',
+ created['uuid'], created['email'], 'arvados#user', false, 'User'
+
+ verify_link response_items, 'arvados#repository', true, 'permission', 'can_write',
+ 'test_repo', created['uuid'], 'arvados#repository', true, 'Repository'
+
+ verify_link response_items, 'arvados#group', true, 'permission', 'can_read',
+ 'All users', created['uuid'], 'arvados#group', true, 'Group'
+
+ verify_link response_items, 'arvados#virtualMachine', false, 'permission', 'can_login',
+ nil, created['uuid'], 'arvados#virtualMachine', false, 'VirtualMachine'
+ end
+
+ test "invoke setup with no openid prefix, expect error" do
+ authorize_with :admin
+
+ post :setup, {
+ repo_name: 'test_repo',
+ user: {
+ first_name: "in_create_test_first_name",
+ last_name: "test_last_name",
+ email: "foo@example.com"
+ }
+ }
+
+ response_body = JSON.parse(@response.body)
+ response_errors = response_body['errors']
+ assert_not_nil response_errors, 'Expected error in response'
+ assert (response_errors.first.include? 'openid_prefix parameter is missing'),
+ 'Expected ArgumentError'
+ end
+
+ test "setup user with user, vm and repo and verify links" do
+ authorize_with :admin
+
+ post :setup, {
+ user: {
+ first_name: "in_create_test_first_name",
+ last_name: "test_last_name",
+ email: "foo@example.com"
+ },
+ vm_uuid: @vm_uuid,
+ repo_name: 'test_repo',
+ openid_prefix: 'https://www.google.com/accounts/o8/id'
+ }
+
+ assert_response :success
+
+ response_items = JSON.parse(@response.body)['items']
+ created = find_obj_in_resp response_items, 'User', nil
+
+ assert_equal 'in_create_test_first_name', created['first_name']
+ assert_not_nil created['uuid'], 'expected uuid for new user'
+ assert_not_nil created['email'], 'expected non-nil email'
+ assert_nil created['identity_url'], 'expected no identity_url'
+
+ # expect 4 new links: arvados#user, repo, vm and 'All users' group link
+ verify_num_links @all_links_at_start, 4
+
+ verify_link response_items, 'arvados#user', true, 'permission', 'can_login',
+ created['uuid'], created['email'], 'arvados#user', false, 'User'
+
+ verify_link response_items, 'arvados#repository', true, 'permission', 'can_write',
+ 'test_repo', created['uuid'], 'arvados#repository', true, 'Repository'
+
+ verify_link response_items, 'arvados#group', true, 'permission', 'can_read',
+ 'All users', created['uuid'], 'arvados#group', true, 'Group'
+
+ verify_link response_items, 'arvados#virtualMachine', true, 'permission', 'can_login',
+ @vm_uuid, created['uuid'], 'arvados#virtualMachine', false, 'VirtualMachine'
+ end
+
+ test "create user as non admin user and expect error" do
+ authorize_with :active
+
+ post :create, {
+ user: {email: 'foo@example.com'}
+ }
+
+ response_body = JSON.parse(@response.body)
+ response_errors = response_body['errors']
+ assert_not_nil response_errors, 'Expected error in response'
+ assert (response_errors.first.include? 'PermissionDenied'),
+ 'Expected PermissionDeniedError'
+ end
+
+ test "setup user as non admin user and expect error" do
+ authorize_with :active
+
+ post :setup, {
+ openid_prefix: 'https://www.google.com/accounts/o8/id',
+ user: {email: 'foo@example.com'}
+ }
+
+ response_body = JSON.parse(@response.body)
+ response_errors = response_body['errors']
+ assert_not_nil response_errors, 'Expected error in response'
+ assert (response_errors.first.include? 'Forbidden'),
+ 'Expected Forbidden error'
+ end
+
+ test "setup user in multiple steps and verify response" do
+ authorize_with :admin
+
+ post :setup, {
+ openid_prefix: 'http://www.example.com/account',
+ user: {
+ email: "foo@example.com"
+ }
+ }
+
+ assert_response :success
+ response_items = JSON.parse(@response.body)['items']
+ created = find_obj_in_resp response_items, 'User', nil
+
+ assert_not_nil created['uuid'], 'expected uuid for new user'
+ assert_not_nil created['email'], 'expected non-nil email'
+ assert_equal created['email'], 'foo@example.com', 'expected input email'
+
+ # verify links; 2 new links: arvados#user, and 'All users' group.
+ verify_num_links @all_links_at_start, 2
+
+ verify_link response_items, 'arvados#user', true, 'permission', 'can_login',
+ created['uuid'], created['email'], 'arvados#user', false, 'User'
+
+ verify_link response_items, 'arvados#group', true, 'permission', 'can_read',
+ 'All users', created['uuid'], 'arvados#group', true, 'Group'
+
+ verify_link response_items, 'arvados#repository', false, 'permission', 'can_write',
+ 'test_repo', created['uuid'], 'arvados#repository', true, 'Repository'
+
+ verify_link response_items, 'arvados#virtualMachine', false, 'permission', 'can_login',
+ nil, created['uuid'], 'arvados#virtualMachine', false, 'VirtualMachine'
+
+ # invoke setup with a repository
+ post :setup, {
+ openid_prefix: 'http://www.example.com/account',
+ repo_name: 'new_repo',
+ uuid: created['uuid']
+ }
+
+ assert_response :success
+
+ response_items = JSON.parse(@response.body)['items']
+ created = find_obj_in_resp response_items, 'User', nil
+
+ assert_equal 'foo@example.com', created['email'], 'expected input email'
+
+ # verify links
+ verify_link response_items, 'arvados#group', true, 'permission', 'can_read',
+ 'All users', created['uuid'], 'arvados#group', true, 'Group'
+
+ verify_link response_items, 'arvados#repository', true, 'permission', 'can_write',
+ 'new_repo', created['uuid'], 'arvados#repository', true, 'Repository'
+
+ verify_link response_items, 'arvados#virtualMachine', false, 'permission', 'can_login',
+ nil, created['uuid'], 'arvados#virtualMachine', false, 'VirtualMachine'
+
+ # invoke setup with a vm_uuid
+ post :setup, {
+ vm_uuid: @vm_uuid,
+ openid_prefix: 'http://www.example.com/account',
+ user: {
+ email: 'junk_email'
+ },
+ uuid: created['uuid']
+ }
+
+ assert_response :success
+
+ response_items = JSON.parse(@response.body)['items']
+ created = find_obj_in_resp response_items, 'User', nil
+
+ assert_equal created['email'], 'foo@example.com', 'expected original email'
+
+ # verify links
+ verify_link response_items, 'arvados#group', true, 'permission', 'can_read',
+ 'All users', created['uuid'], 'arvados#group', true, 'Group'
+
+ # since no repo name in input, we won't get any; even though user has one
+ verify_link response_items, 'arvados#repository', false, 'permission', 'can_write',
+ 'new_repo', created['uuid'], 'arvados#repository', true, 'Repository'
+
+ verify_link response_items, 'arvados#virtualMachine', true, 'permission', 'can_login',
+ @vm_uuid, created['uuid'], 'arvados#virtualMachine', false, 'VirtualMachine'
+ end
+
+ test "setup and unsetup user" do
+ authorize_with :admin
+
+ post :setup, {
+ repo_name: 'test_repo',
+ vm_uuid: @vm_uuid,
+ user: {email: 'foo@example.com'},
+ openid_prefix: 'https://www.google.com/accounts/o8/id'
+ }
+
+ assert_response :success
+ response_items = JSON.parse(@response.body)['items']
+ created = find_obj_in_resp response_items, 'User', nil
+ assert_not_nil created['uuid'], 'expected uuid for the new user'
+ assert_equal created['email'], 'foo@example.com', 'expected given email'
+
+ # 4 extra links: login, group, repo and vm
+ verify_num_links @all_links_at_start, 4
+
+ verify_link response_items, 'arvados#user', true, 'permission', 'can_login',
+ created['uuid'], created['email'], 'arvados#user', false, 'User'
+
+ verify_link response_items, 'arvados#group', true, 'permission', 'can_read',
+ 'All users', created['uuid'], 'arvados#group', true, 'Group'
+
+ verify_link response_items, 'arvados#repository', true, 'permission', 'can_write',
+ 'test_repo', created['uuid'], 'arvados#repository', true, 'Repository'
+
+ verify_link response_items, 'arvados#virtualMachine', true, 'permission', 'can_login',
+ @vm_uuid, created['uuid'], 'arvados#virtualMachine', false, 'VirtualMachine'
+
+ verify_link_existence created['uuid'], created['email'], true, true, true, false
+
+ # now unsetup this user
+ post :unsetup, uuid: created['uuid']
+ assert_response :success
+
+ created2 = JSON.parse(@response.body)
+ assert_not_nil created2['uuid'], 'expected uuid for the newly created user'
+ assert_equal created['uuid'], created2['uuid'], 'expected uuid not found'
+
+ verify_link_existence created['uuid'], created['email'], false, false, false, false
+ end
+
+ test "unsetup active user" do
+ authorize_with :active
+ get :current
+ assert_response :success
+ active_user = JSON.parse(@response.body)
+ assert_not_nil active_user['uuid'], 'expected uuid for the active user'
+ assert active_user['is_active'], 'expected is_active for active user'
+
+ verify_link_existence active_user['uuid'], active_user['email'],
+ false, false, false, true
+
+ authorize_with :admin
+
+ # now unsetup this user
+ post :unsetup, uuid: active_user['uuid']
+ assert_response :success
+
+ response_user = JSON.parse(@response.body)
+ assert_not_nil response_user['uuid'], 'expected uuid for the upsetup user'
+ assert_equal active_user['uuid'], response_user['uuid'], 'expected uuid not found'
+ assert !response_user['is_active'], 'expected user to be inactive'
+
+ verify_link_existence response_user['uuid'], response_user['email'],
+ false, false, false, false
+ end
+
+ def verify_num_links (original_links, expected_additional_links)
+ links_now = Link.all
+ assert_equal original_links.size+expected_additional_links, Link.all.size,
+ "Expected #{expected_additional_links.inspect} more links"
+ end
+
+ def find_obj_in_resp (response_items, object_type, head_kind=nil)
+ return_obj = nil
+ response_items.each { |x|
+ if !x
+ next
+ end
+
+ if object_type == 'User'
+ if !x['head_kind']
+ return_obj = x
+ break
+ end
+ else # looking for a link
+ if x['head_kind'] == head_kind
+ return_obj = x
+ break
+ end
+ end
+ }
+ return return_obj
+ end
+
+ def verify_link(response_items, link_object_name, expect_link, link_class,
+ link_name, head_uuid, tail_uuid, head_kind, fetch_object, class_name)
+
+ link = find_obj_in_resp response_items, 'Link', link_object_name
+
+ if !expect_link
+ assert_nil link, "Expected no link for #{link_object_name}"
+ return
+ end
+
+ assert_not_nil link, "Expected link for #{link_object_name}"
+
+ if fetch_object
+ object = Object.const_get(class_name).where(name: head_uuid)
+ assert [] != object, "expected #{class_name} with name #{head_uuid}"
+ head_uuid = object.first[:uuid]
+ end
+ assert_equal link['link_class'], link_class,
+ "did not find expected link_class for #{link_object_name}"
+
+ assert_equal link['name'], link_name,
+ "did not find expected link_name for #{link_object_name}"
+
+ assert_equal link['tail_uuid'], tail_uuid,
+ "did not find expected tail_uuid for #{link_object_name}"
+
+ assert_equal link['head_kind'], head_kind,
+ "did not find expected head_kind for #{link_object_name}"
+
+ assert_equal link['head_uuid'], head_uuid,
+ "did not find expected head_uuid for #{link_object_name}"
+ end
+
+ def verify_link_existence uuid, email, expect_oid_login_perms,
+ expect_repo_perms, expect_vm_perms, expect_signatures
+ # verify that all links are deleted for the user
+ oid_login_perms = Link.where(tail_uuid: email,
+ head_kind: 'arvados#user',
+ link_class: 'permission',
+ name: 'can_login')
+ if expect_oid_login_perms
+ assert oid_login_perms.any?, "expected oid_login_perms"
+ else
+ assert !oid_login_perms.any?, "expected all oid_login_perms deleted"
+ end
+
+ repo_perms = Link.where(tail_uuid: uuid,
+ head_kind: 'arvados#repository',
+ link_class: 'permission',
+ name: 'can_write')
+ if expect_repo_perms
+ assert repo_perms.any?, "expected repo_perms"
+ else
+ assert !repo_perms.any?, "expected all repo_perms deleted"
+ end
+
+ vm_login_perms = Link.where(tail_uuid: uuid,
+ head_kind: 'arvados#virtualMachine',
+ link_class: 'permission',
+ name: 'can_login')
+ if expect_vm_perms
+ assert vm_login_perms.any?, "expected vm_login_perms"
+ else
+ assert !vm_login_perms.any?, "expected all vm_login_perms deleted"
+ end
+
+ signed_uuids = Link.where(link_class: 'signature',
+ tail_kind: 'arvados#user',
+ tail_uuid: uuid)
+
+ if expect_signatures
+ assert signed_uuids.any?, "expected singnatures"
+ else
+ assert !signed_uuids.any?, "expected all singnatures deleted"
+ end
+
+ end
end
assert_equal "arvados#collectionList", jresponse['kind']
end
+ test "get index with filters= (empty string)" do
+ get "/arvados/v1/collections", {:format => :json, :filters => ''}, {'HTTP_AUTHORIZATION' => "OAuth2 #{api_client_authorizations(:active).api_token}"}
+ assert_response :success
+ assert_equal "arvados#collectionList", jresponse['kind']
+ end
+
+ test "get index with where= (empty string)" do
+ get "/arvados/v1/collections", {:format => :json, :where => ''}, {'HTTP_AUTHORIZATION' => "OAuth2 #{api_client_authorizations(:active).api_token}"}
+ assert_response :success
+ assert_equal "arvados#collectionList", jresponse['kind']
+ end
+
test "controller 404 response is json" do
get "/arvados/v1/thingsthatdonotexist", {:format => :xml}, {'HTTP_AUTHORIZATION' => "OAuth2 #{api_client_authorizations(:active).api_token}"}
assert_response 404
--- /dev/null
+require 'test_helper'
+
+class PermissionsTest < ActionDispatch::IntegrationTest
+ fixtures :users, :groups, :api_client_authorizations, :collections
+
+ test "adding and removing direct can_read links" do
+ auth = {'HTTP_AUTHORIZATION' => "OAuth2 #{api_client_authorizations(:spectator).api_token}"}
+ admin_auth = {'HTTP_AUTHORIZATION' => "OAuth2 #{api_client_authorizations(:admin).api_token}"}
+
+ # try to read collection as spectator
+ get "/arvados/v1/collections/#{collections(:foo_file).uuid}", {:format => :json}, auth
+ assert_response 404
+
+ # try to add permission as spectator
+ post "/arvados/v1/links", {
+ :format => :json,
+ :link => {
+ tail_kind: 'arvados#user',
+ tail_uuid: users(:spectator).uuid,
+ link_class: 'permission',
+ name: 'can_read',
+ head_kind: 'arvados#collection',
+ head_uuid: collections(:foo_file).uuid,
+ properties: {}
+ }
+ }, auth
+ assert_response 422
+
+ # add permission as admin
+ post "/arvados/v1/links", {
+ :format => :json,
+ :link => {
+ tail_kind: 'arvados#user',
+ tail_uuid: users(:spectator).uuid,
+ link_class: 'permission',
+ name: 'can_read',
+ head_kind: 'arvados#collection',
+ head_uuid: collections(:foo_file).uuid,
+ properties: {}
+ }
+ }, admin_auth
+ u = jresponse['uuid']
+ assert_response :success
+
+ # read collection as spectator
+ get "/arvados/v1/collections/#{collections(:foo_file).uuid}", {:format => :json}, auth
+ assert_response :success
+
+ # try to delete permission as spectator
+ delete "/arvados/v1/links/#{u}", {:format => :json}, auth
+ assert_response 403
+
+ # delete permission as admin
+ delete "/arvados/v1/links/#{u}", {:format => :json}, admin_auth
+ assert_response :success
+
+ # try to read collection as spectator
+ get "/arvados/v1/collections/#{collections(:foo_file).uuid}", {:format => :json}, auth
+ assert_response 404
+ end
+
+
+ test "adding can_read links from user to group, group to collection" do
+ auth = {'HTTP_AUTHORIZATION' => "OAuth2 #{api_client_authorizations(:spectator).api_token}"}
+ admin_auth = {'HTTP_AUTHORIZATION' => "OAuth2 #{api_client_authorizations(:admin).api_token}"}
+
+ # try to read collection as spectator
+ get "/arvados/v1/collections/#{collections(:foo_file).uuid}", {:format => :json}, auth
+ assert_response 404
+
+ # add permission for spectator to read group
+ post "/arvados/v1/links", {
+ :format => :json,
+ :link => {
+ tail_kind: 'arvados#user',
+ tail_uuid: users(:spectator).uuid,
+ link_class: 'permission',
+ name: 'can_read',
+ head_kind: 'arvados#group',
+ head_uuid: groups(:private).uuid,
+ properties: {}
+ }
+ }, admin_auth
+ assert_response :success
+
+ # try to read collection as spectator
+ get "/arvados/v1/collections/#{collections(:foo_file).uuid}", {:format => :json}, auth
+ assert_response 404
+
+ # add permission for group to read collection
+ post "/arvados/v1/links", {
+ :format => :json,
+ :link => {
+ tail_kind: 'arvados#group',
+ tail_uuid: groups(:private).uuid,
+ link_class: 'permission',
+ name: 'can_read',
+ head_kind: 'arvados#collection',
+ head_uuid: collections(:foo_file).uuid,
+ properties: {}
+ }
+ }, admin_auth
+ u = jresponse['uuid']
+ assert_response :success
+
+ # try to read collection as spectator
+ get "/arvados/v1/collections/#{collections(:foo_file).uuid}", {:format => :json}, auth
+ assert_response :success
+
+ # delete permission for group to read collection
+ delete "/arvados/v1/links/#{u}", {:format => :json}, admin_auth
+ assert_response :success
+
+ # try to read collection as spectator
+ get "/arvados/v1/collections/#{collections(:foo_file).uuid}", {:format => :json}, auth
+ assert_response 404
+
+ end
+
+
+ test "adding can_read links from group to collection, user to group" do
+ auth = {'HTTP_AUTHORIZATION' => "OAuth2 #{api_client_authorizations(:spectator).api_token}"}
+ admin_auth = {'HTTP_AUTHORIZATION' => "OAuth2 #{api_client_authorizations(:admin).api_token}"}
+
+ # try to read collection as spectator
+ get "/arvados/v1/collections/#{collections(:foo_file).uuid}", {:format => :json}, auth
+ assert_response 404
+
+ # add permission for group to read collection
+ post "/arvados/v1/links", {
+ :format => :json,
+ :link => {
+ tail_kind: 'arvados#group',
+ tail_uuid: groups(:private).uuid,
+ link_class: 'permission',
+ name: 'can_read',
+ head_kind: 'arvados#collection',
+ head_uuid: collections(:foo_file).uuid,
+ properties: {}
+ }
+ }, admin_auth
+ assert_response :success
+
+ # try to read collection as spectator
+ get "/arvados/v1/collections/#{collections(:foo_file).uuid}", {:format => :json}, auth
+ assert_response 404
+
+ # add permission for spectator to read group
+ post "/arvados/v1/links", {
+ :format => :json,
+ :link => {
+ tail_kind: 'arvados#user',
+ tail_uuid: users(:spectator).uuid,
+ link_class: 'permission',
+ name: 'can_read',
+ head_kind: 'arvados#group',
+ head_uuid: groups(:private).uuid,
+ properties: {}
+ }
+ }, admin_auth
+ u = jresponse['uuid']
+ assert_response :success
+
+ # try to read collection as spectator
+ get "/arvados/v1/collections/#{collections(:foo_file).uuid}", {:format => :json}, auth
+ assert_response :success
+
+ # delete permission for spectator to read group
+ delete "/arvados/v1/links/#{u}", {:format => :json}, admin_auth
+ assert_response :success
+
+ # try to read collection as spectator
+ get "/arvados/v1/collections/#{collections(:foo_file).uuid}", {:format => :json}, auth
+ assert_response 404
+
+ end
+
+ test "adding can_read links from user to group, group to group, group to collection" do
+ auth = {'HTTP_AUTHORIZATION' => "OAuth2 #{api_client_authorizations(:spectator).api_token}"}
+ admin_auth = {'HTTP_AUTHORIZATION' => "OAuth2 #{api_client_authorizations(:admin).api_token}"}
+
+ # try to read collection as spectator
+ get "/arvados/v1/collections/#{collections(:foo_file).uuid}", {:format => :json}, auth
+ assert_response 404
+
+ # add permission for user to read group
+ post "/arvados/v1/links", {
+ :format => :json,
+ :link => {
+ tail_kind: 'arvados#user',
+ tail_uuid: users(:spectator).uuid,
+ link_class: 'permission',
+ name: 'can_read',
+ head_kind: 'arvados#group',
+ head_uuid: groups(:private).uuid,
+ properties: {}
+ }
+ }, admin_auth
+ assert_response :success
+
+ # add permission for group to read group
+ post "/arvados/v1/links", {
+ :format => :json,
+ :link => {
+ tail_kind: 'arvados#group',
+ tail_uuid: groups(:private).uuid,
+ link_class: 'permission',
+ name: 'can_read',
+ head_kind: 'arvados#group',
+ head_uuid: groups(:empty_lonely_group).uuid,
+ properties: {}
+ }
+ }, admin_auth
+ assert_response :success
+
+ # add permission for group to read collection
+ post "/arvados/v1/links", {
+ :format => :json,
+ :link => {
+ tail_kind: 'arvados#group',
+ tail_uuid: groups(:empty_lonely_group).uuid,
+ link_class: 'permission',
+ name: 'can_read',
+ head_kind: 'arvados#collection',
+ head_uuid: collections(:foo_file).uuid,
+ properties: {}
+ }
+ }, admin_auth
+ u = jresponse['uuid']
+ assert_response :success
+
+ # try to read collection as spectator
+ get "/arvados/v1/collections/#{collections(:foo_file).uuid}", {:format => :json}, auth
+ assert_response :success
+
+ # delete permission for group to read collection
+ delete "/arvados/v1/links/#{u}", {:format => :json}, admin_auth
+ assert_response :success
+
+ # try to read collection as spectator
+ get "/arvados/v1/collections/#{collections(:foo_file).uuid}", {:format => :json}, auth
+ assert_response 404
+ end
+end
require 'test_helper'
class UserTest < ActiveSupport::TestCase
- # test "the truth" do
- # assert true
- # end
+
+ # The fixture services/api/test/fixtures/users.yml serves as the input for this test case
+ setup do
+ @all_users = User.find(:all)
+
+ @all_users.each do |user|
+ if user.is_admin && user.is_active
+ @admin_user = user
+ elsif user.is_active && !user.is_admin
+ @active_user = user
+ elsif !user.is_active && !user.is_invited
+ @uninvited_user = user
+ end
+ end
+ end
+
+ test "check non-admin active user properties" do
+ assert !@active_user.is_admin, 'is_admin should not be set for a non-admin user'
+ assert @active_user.is_active, 'user should be active'
+ assert @active_user.is_invited, 'is_invited should be set'
+ assert_not_nil @active_user.prefs, "user's preferences should be non-null, but may be size zero"
+ assert (@active_user.can? :read=>"#{@active_user.uuid}"), "user should be able to read own object"
+ assert (@active_user.can? :write=>"#{@active_user.uuid}"), "user should be able to write own object"
+ assert (@active_user.can? :manage=>"#{@active_user.uuid}"), "user should be able to manage own object"
+
+ assert @active_user.groups_i_can(:read).size > 0, "active user should be able read at least one group"
+
+ # non-admin user cannot manage or write other user objects
+ assert !(@active_user.can? :read=>"#{@uninvited_user.uuid}")
+ assert !(@active_user.can? :write=>"#{@uninvited_user.uuid}")
+ assert !(@active_user.can? :manage=>"#{@uninvited_user.uuid}")
+ end
+
+ test "check admin user properties" do
+ assert @admin_user.is_admin, 'is_admin should be set for admin user'
+ assert @admin_user.is_active, 'admin user cannot be inactive'
+ assert @admin_user.is_invited, 'is_invited should be set'
+ assert_not_nil @admin_user.uuid.size, "user's uuid should be non-null"
+ assert_not_nil @admin_user.prefs, "user's preferences should be non-null, but may be size zero"
+ assert @admin_user.identity_url.size > 0, "user's identity url is expected"
+ assert @admin_user.can? :read=>"#{@admin_user.uuid}"
+ assert @admin_user.can? :write=>"#{@admin_user.uuid}"
+ assert @admin_user.can? :manage=>"#{@admin_user.uuid}"
+
+ assert @admin_user.groups_i_can(:read).size > 0, "admin active user should be able read at least one group"
+ assert @admin_user.groups_i_can(:write).size > 0, "admin active user should be able write to at least one group"
+ assert @admin_user.groups_i_can(:manage).size > 0, "admin active user should be able manage at least one group"
+
+ # admin user can also write or manage other users
+ assert @admin_user.can? :read=>"#{@uninvited_user.uuid}"
+ assert @admin_user.can? :write=>"#{@uninvited_user.uuid}"
+ assert @admin_user.can? :manage=>"#{@uninvited_user.uuid}"
+ end
+
+ test "check inactive and uninvited user properties" do
+ assert !@uninvited_user.is_admin, 'is_admin should not be set for a non-admin user'
+ assert !@uninvited_user.is_active, 'user should be inactive'
+ assert !@uninvited_user.is_invited, 'is_invited should not be set'
+ assert @uninvited_user.can? :read=>"#{@uninvited_user.uuid}"
+ assert @uninvited_user.can? :write=>"#{@uninvited_user.uuid}"
+ assert @uninvited_user.can? :manage=>"#{@uninvited_user.uuid}"
+
+ assert @uninvited_user.groups_i_can(:read).size == 0, "inactive and uninvited user should not be able read any groups"
+ assert @uninvited_user.groups_i_can(:write).size == 0, "inactive and uninvited user should not be able write to any groups"
+ assert @uninvited_user.groups_i_can(:manage).size == 0, "inactive and uninvited user should not be able manage any groups"
+ end
+
+ test "find user method checks" do
+ User.find(:all).each do |user|
+ assert_not_nil user.uuid, "non-null uuid expected for " + user.full_name
+ end
+
+ user = users(:active) # get the active user
+
+ found_user = User.find(user.id) # find a user by the row id
+
+ assert_equal found_user.full_name, user.first_name + ' ' + user.last_name
+ assert_equal found_user.identity_url, user.identity_url
+ end
+
+ test "create new user" do
+ Thread.current[:user] = @admin_user # set admin user as the current user
+
+ user = User.new
+ user.first_name = "first_name_for_newly_created_user"
+ user.save
+
+ # verify there is one extra user in the db now
+ assert (User.find(:all).size == @all_users.size+1)
+
+ user = User.find(user.id) # get the user back
+ assert_equal(user.first_name, 'first_name_for_newly_created_user')
+ assert_not_nil user.uuid, 'uuid should be set for newly created user'
+ assert_nil user.email, 'email should be null for newly created user, because it was not passed in'
+ assert_nil user.identity_url, 'identity_url should be null for newly created user, because it was not passed in'
+
+ user.first_name = 'first_name_for_newly_created_user_updated'
+ user.save
+ user = User.find(user.id) # get the user back
+ assert_equal(user.first_name, 'first_name_for_newly_created_user_updated')
+ end
+
+ test "update existing user" do
+ Thread.current[:user] = @active_user # set active user as current user
+ @active_user.first_name = "first_name_changed"
+ @active_user.save
+
+ @active_user = User.find(@active_user.id) # get the user back
+ assert_equal(@active_user.first_name, 'first_name_changed')
+
+ # admin user also should be able to update the "active" user info
+ Thread.current[:user] = @admin_user # set admin user as current user
+ @active_user.first_name = "first_name_changed_by_admin_for_active_user"
+ @active_user.save
+
+ @active_user = User.find(@active_user.id) # get the user back
+ assert_equal(@active_user.first_name, 'first_name_changed_by_admin_for_active_user')
+ end
+
+ test "delete a user and verify" do
+ active_user_uuid = @active_user.uuid
+
+ Thread.current[:user] = @admin_user
+ @active_user.delete
+
+ found_deleted_user = false
+ User.find(:all).each do |user|
+ if user.uuid == active_user_uuid
+ found_deleted_user = true
+ break
+ end
+ end
+ assert !found_deleted_user, "found deleted user: "+active_user_uuid
+
+ end
+
+ test "create new user as non-admin user" do
+ Thread.current[:user] = @active_user
+
+ begin
+ user = User.new
+ user.save
+ rescue ArvadosModel::PermissionDeniedError => e
+ end
+ assert (e.message.include? 'PermissionDeniedError'),
+ 'Expected PermissionDeniedError'
+ end
+
+ test "setup new user" do
+ Thread.current[:user] = @admin_user
+
+ email = 'foo@example.com'
+ openid_prefix = 'http://openid/prefix'
+
+ user = User.new
+ user.email = email
+ user.uuid = 'abcdefghijklmnop'
+
+ vm = VirtualMachine.create
+
+ response = User.setup user, openid_prefix, 'test_repo', vm.uuid
+
+ resp_user = find_obj_in_resp response, 'User'
+ verify_user resp_user, email
+
+ oid_login_perm = find_obj_in_resp response, 'Link', 'arvados#user'
+ verify_link oid_login_perm, 'permission', 'can_login', resp_user[:email],
+ resp_user[:uuid]
+ assert_equal openid_prefix, oid_login_perm[:properties][:identity_url_prefix],
+ 'expected identity_url_prefix not found for oid_login_perm'
+
+ group_perm = find_obj_in_resp response, 'Link', 'arvados#group'
+ verify_link group_perm, 'permission', 'can_read', resp_user[:uuid], nil
+
+ repo_perm = find_obj_in_resp response, 'Link', 'arvados#repository'
+ verify_link repo_perm, 'permission', 'can_write', resp_user[:uuid], nil
+
+ vm_perm = find_obj_in_resp response, 'Link', 'arvados#virtualMachine'
+ verify_link vm_perm, 'permission', 'can_login', resp_user[:uuid], vm.uuid
+ end
+
+ test "setup new user in multiple steps" do
+ Thread.current[:user] = @admin_user
+
+ email = 'foo@example.com'
+ openid_prefix = 'http://openid/prefix'
+
+ user = User.new
+ user.email = email
+ user.uuid = 'abcdefghijklmnop'
+
+ response = User.setup user, openid_prefix
+
+ resp_user = find_obj_in_resp response, 'User'
+ verify_user resp_user, email
+
+ oid_login_perm = find_obj_in_resp response, 'Link', 'arvados#user'
+ verify_link oid_login_perm, 'permission', 'can_login', resp_user[:email],
+ resp_user[:uuid]
+ assert_equal openid_prefix, oid_login_perm[:properties][:identity_url_prefix],
+ 'expected identity_url_prefix not found for oid_login_perm'
+
+ group_perm = find_obj_in_resp response, 'Link', 'arvados#group'
+ verify_link group_perm, 'permission', 'can_read', resp_user[:uuid], nil
+
+ # invoke setup again with repo_name
+ response = User.setup user, openid_prefix, 'test_repo'
+ resp_user = find_obj_in_resp response, 'User', nil
+ verify_user resp_user, email
+ assert_equal user.uuid, resp_user[:uuid], 'expected uuid not found'
+
+ group_perm = find_obj_in_resp response, 'Link', 'arvados#group'
+ verify_link group_perm, 'permission', 'can_read', resp_user[:uuid], nil
+
+ repo_perm = find_obj_in_resp response, 'Link', 'arvados#repository'
+ verify_link repo_perm, 'permission', 'can_write', resp_user[:uuid], nil
+
+ # invoke setup again with a vm_uuid
+ vm = VirtualMachine.create
+
+ response = User.setup user, openid_prefix, 'test_repo', vm.uuid
+
+ resp_user = find_obj_in_resp response, 'User', nil
+ verify_user resp_user, email
+ assert_equal user.uuid, resp_user[:uuid], 'expected uuid not found'
+
+ group_perm = find_obj_in_resp response, 'Link', 'arvados#group'
+ verify_link group_perm, 'permission', 'can_read', resp_user[:uuid], nil
+
+ repo_perm = find_obj_in_resp response, 'Link', 'arvados#repository'
+ verify_link repo_perm, 'permission', 'can_write', resp_user[:uuid], nil
+
+ vm_perm = find_obj_in_resp response, 'Link', 'arvados#virtualMachine'
+ verify_link vm_perm, 'permission', 'can_login', resp_user[:uuid], vm.uuid
+ end
+
+ def find_obj_in_resp (response, object_type, head_kind=nil)
+ return_obj = nil
+ response.each { |x|
+ if x.class.name == object_type
+ if head_kind
+ if x.head_kind == head_kind
+ return_obj = x
+ break
+ end
+ else
+ return_obj = x
+ break
+ end
+ end
+ }
+ return return_obj
+ end
+
+ def verify_user (resp_user, email)
+ assert_not_nil resp_user, 'expected user object'
+ assert_not_nil resp_user['uuid'], 'expected user object'
+ assert_equal email, resp_user['email'], 'expected email not found'
+
+ end
+
+ def verify_link (link_object, link_class, link_name, tail_uuid, head_uuid)
+ assert_not_nil link_object, 'expected link for #{link_class} #{link_name}'
+ assert_not_nil link_object[:uuid],
+ 'expected non-nil uuid for link for #{link_class} #{link_name}'
+ assert_equal link_class, link_object[:link_class],
+ 'expected link_class not found for #{link_class} #{link_name}'
+ assert_equal link_name, link_object[:name],
+ 'expected link_name not found for #{link_class} #{link_name}'
+ assert_equal tail_uuid, link_object[:tail_uuid],
+ 'expected tail_uuid not found for #{link_class} #{link_name}'
+ if head_uuid
+ assert_equal head_uuid, link_object[:head_uuid],
+ 'expected head_uuid not found for #{link_class} #{link_name}'
+ end
+ end
+
end
--- /dev/null
+package main
+
+import (
+ "bufio"
+ "crypto/md5"
+ "errors"
+ "fmt"
+ "github.com/gorilla/mux"
+ "log"
+ "net/http"
+ "os"
+ "strings"
+)
+
+const DEFAULT_PORT = 25107
+const BLOCKSIZE = 64 * 1024 * 1024
+
+var KeepVolumes []string
+
+func main() {
+ // Look for local keep volumes.
+ KeepVolumes = FindKeepVolumes()
+ if len(KeepVolumes) == 0 {
+ log.Fatal("could not find any keep volumes")
+ }
+ for _, v := range KeepVolumes {
+ log.Println("keep volume:", v)
+ }
+
+ // Set up REST handlers.
+ //
+ // Start with a router that will route each URL path to an
+ // appropriate handler.
+ //
+ rest := mux.NewRouter()
+ rest.HandleFunc("/{hash:[0-9a-f]{32}}", GetBlockHandler).Methods("GET")
+
+ // Tell the built-in HTTP server to direct all requests to the REST
+ // router.
+ http.Handle("/", rest)
+
+ // Start listening for requests.
+ port := fmt.Sprintf(":%d", DEFAULT_PORT)
+ http.ListenAndServe(port, nil)
+}
+
+// FindKeepVolumes
+// Returns a list of Keep volumes mounted on this system.
+//
+// A Keep volume is a normal or tmpfs volume with a /keep
+// directory at the top level of the mount point.
+//
+func FindKeepVolumes() []string {
+ vols := make([]string, 0)
+
+ if f, err := os.Open("/proc/mounts"); err != nil {
+ log.Fatal("could not read /proc/mounts: ", err)
+ } else {
+ scanner := bufio.NewScanner(f)
+ for scanner.Scan() {
+ args := strings.Fields(scanner.Text())
+ dev, mount := args[0], args[1]
+ if (dev == "tmpfs" || strings.HasPrefix(dev, "/dev/")) && mount != "/" {
+ keep := mount + "/keep"
+ if st, err := os.Stat(keep); err == nil && st.IsDir() {
+ vols = append(vols, keep)
+ }
+ }
+ }
+ if err := scanner.Err(); err != nil {
+ log.Fatal(err)
+ }
+ }
+ return vols
+}
+
+func GetBlockHandler(w http.ResponseWriter, req *http.Request) {
+ hash := mux.Vars(req)["hash"]
+
+ block, err := GetBlock(hash)
+ if err != nil {
+ http.Error(w, err.Error(), 404)
+ return
+ }
+
+ _, err = w.Write(block)
+ if err != nil {
+ log.Printf("GetBlockHandler: writing response: %s", err)
+ }
+
+ return
+}
+
+func GetBlock(hash string) ([]byte, error) {
+ var buf = make([]byte, BLOCKSIZE)
+
+ // Attempt to read the requested hash from a keep volume.
+ for _, vol := range KeepVolumes {
+ var f *os.File
+ var err error
+ var nread int
+
+ path := fmt.Sprintf("%s/%s/%s", vol, hash[0:3], hash)
+
+ f, err = os.Open(path)
+ if err != nil {
+ log.Printf("%s: opening %s: %s\n", vol, path, err)
+ continue
+ }
+
+ nread, err = f.Read(buf)
+ if err != nil {
+ log.Printf("%s: reading %s: %s\n", vol, path, err)
+ continue
+ }
+
+ // Double check the file checksum.
+ //
+ filehash := fmt.Sprintf("%x", md5.Sum(buf[:nread]))
+ if filehash != hash {
+ // TODO(twp): this condition probably represents a bad disk and
+ // should raise major alarm bells for an administrator: e.g.
+ // they should be sent directly to an event manager at high
+ // priority or logged as urgent problems.
+ //
+ log.Printf("%s: checksum mismatch: %s (actual hash %s)\n",
+ vol, path, filehash)
+ continue
+ }
+
+ // Success!
+ return buf[:nread], nil
+ }
+
+ log.Printf("%s: not found on any volumes, giving up\n", hash)
+ return buf, errors.New("not found: " + hash)
+}
--- /dev/null
+package main
+
+import (
+ "fmt"
+ "io/ioutil"
+ "os"
+ "path"
+ "testing"
+)
+
+var TEST_BLOCK = []byte("The quick brown fox jumps over the lazy dog.")
+var TEST_HASH = "e4d909c290d0fb1ca068ffaddf22cbd0"
+var BAD_BLOCK = []byte("The magic words are squeamish ossifrage.")
+
+// Test simple block reads.
+func TestGetBlockOK(t *testing.T) {
+ defer teardown()
+
+ // Create two test Keep volumes and store a block in each of them.
+ setup(t, 2)
+ for _, vol := range KeepVolumes {
+ store(t, vol, TEST_HASH, TEST_BLOCK)
+ }
+
+ // Check that GetBlock returns success.
+ result, err := GetBlock(TEST_HASH)
+ if err != nil {
+ t.Errorf("GetBlock error: %s", err)
+ }
+ if fmt.Sprint(result) != fmt.Sprint(TEST_BLOCK) {
+ t.Errorf("expected %s, got %s", TEST_BLOCK, result)
+ }
+}
+
+// Test block reads when one Keep volume is missing.
+func TestGetBlockOneKeepOK(t *testing.T) {
+ defer teardown()
+
+ // Two test Keep volumes, only the second has a block.
+ setup(t, 2)
+ store(t, KeepVolumes[1], TEST_HASH, TEST_BLOCK)
+
+ // Check that GetBlock returns success.
+ result, err := GetBlock(TEST_HASH)
+ if err != nil {
+ t.Errorf("GetBlock error: %s", err)
+ }
+ if fmt.Sprint(result) != fmt.Sprint(TEST_BLOCK) {
+ t.Errorf("expected %s, got %s", TEST_BLOCK, result)
+ }
+}
+
+// Test block read failure.
+func TestGetBlockFail(t *testing.T) {
+ defer teardown()
+
+ // Create two empty test Keep volumes.
+ setup(t, 2)
+
+ // Check that GetBlock returns failure.
+ result, err := GetBlock(TEST_HASH)
+ if err == nil {
+ t.Errorf("GetBlock incorrectly returned success: ", result)
+ }
+}
+
+// Test reading a corrupt block.
+func TestGetBlockCorrupt(t *testing.T) {
+ defer teardown()
+
+ // Create two test Keep volumes and store a block in each of them,
+ // but the hash of the block does not match the filename.
+ setup(t, 2)
+ for _, vol := range KeepVolumes {
+ store(t, vol, TEST_HASH, BAD_BLOCK)
+ }
+
+ // Check that GetBlock returns failure.
+ result, err := GetBlock(TEST_HASH)
+ if err == nil {
+ t.Errorf("GetBlock incorrectly returned success: %s", result)
+ }
+}
+
+// setup
+// Create KeepVolumes for testing.
+//
+func setup(t *testing.T, num_volumes int) {
+ KeepVolumes = make([]string, num_volumes)
+ for i := range KeepVolumes {
+ if dir, err := ioutil.TempDir(os.TempDir(), "keeptest"); err == nil {
+ KeepVolumes[i] = dir + "/keep"
+ } else {
+ t.Fatal(err)
+ }
+ }
+}
+
+// teardown
+// Cleanup to perform after each test.
+//
+func teardown() {
+ for _, vol := range KeepVolumes {
+ os.RemoveAll(path.Dir(vol))
+ }
+}
+
+// store
+//
+func store(t *testing.T, keepdir string, filename string, block []byte) error {
+ blockdir := fmt.Sprintf("%s/%s", keepdir, filename[:3])
+ if err := os.MkdirAll(blockdir, 0755); err != nil {
+ t.Fatal(err)
+ }
+
+ blockpath := fmt.Sprintf("%s/%s", blockdir, filename)
+ if f, err := os.Create(blockpath); err == nil {
+ f.Write(block)
+ f.Close()
+ } else {
+ t.Fatal(err)
+ }
+
+ return nil
+}