Arvados-DCO-1.1-Signed-off-by: Lucas Di Pentima <ldipentima@veritasgenetics.com>
/tmp
.byebug_history
+package-lock.json
+
/config/.secret_token
/config/initializers/secret_token.rb
# Generated when building distribution packages
/package-build.version
+
+# Debugger history
+.byebug_history
\ No newline at end of file
source 'https://rubygems.org'
-gem 'rails', '~> 4.2'
+gem 'rails', '~> 5.0.0'
gem 'responders', '~> 2.0'
-gem 'protected_attributes'
group :test, :development do
gem 'factory_bot_rails'
gem 'database_cleaner'
- gem 'ruby-prof'
+
+ # As of now (2019-03-27) There's an open issue about incompatibilities with
+ # newer versions of this gem: https://github.com/rails/rails-perftest/issues/38
+ gem 'ruby-prof', '~> 0.15.0'
+
# Note: "require: false" here tells bunder not to automatically
# 'require' the packages during application startup. Installation is
# still mandatory.
gem 'simplecov', '~> 0.7.1', require: false
gem 'simplecov-rcov', require: false
gem 'mocha', require: false
+ gem 'byebug'
end
# We need this dependency because of crunchv1
gem 'arvados-cli'
-# We'll need to update related code prior to Rails 5.
-# See: https://github.com/rails/activerecord-deprecated_finders
-gem 'activerecord-deprecated_finders', require: 'active_record/deprecated_finders'
-
-# pg is the only supported database driver.
-# Note: Rails 4.2 is not compatible with pg 1.0
-# (See: https://github.com/rails/rails/pull/31671)
-gem 'pg', '~> 0.18'
+gem 'pg', '~> 1.0'
gem 'multi_json'
gem 'oj'
-# for building assets
-gem 'sass-rails', '~> 4.0'
-gem 'coffee-rails', '~> 4.0'
gem 'therubyracer'
gem 'uglifier', '~> 2.0'
gem 'passenger'
+# Locking to 5.10.3 to workaround issue in 5.11.1 (https://github.com/seattlerb/minitest/issues/730)
+gem 'minitest', '5.10.3'
+
# Restricted because omniauth >= 1.5.0 requires Ruby >= 2.1.9:
gem 'omniauth', '~> 1.4.0'
gem 'omniauth-oauth2', '~> 1.1'
gem 'andand'
-gem 'test_after_commit', :group => :test
-
-gem 'trollop'
+gem 'optimist'
gem 'faye-websocket'
gem 'themes_for_rails', git: 'https://github.com/curoverse/themes_for_rails'
gem 'rails-observers'
+gem 'rails-perftest'
+gem 'rails-controller-testing'
+
# Install any plugin gems
Dir.glob(File.join(File.dirname(__FILE__), 'lib', '**', "Gemfile")) do |f|
eval(IO.read(f), binding)
GEM
remote: https://rubygems.org/
specs:
- actionmailer (4.2.11)
- actionpack (= 4.2.11)
- actionview (= 4.2.11)
- activejob (= 4.2.11)
+ actioncable (5.0.7.2)
+ actionpack (= 5.0.7.2)
+ nio4r (>= 1.2, < 3.0)
+ websocket-driver (~> 0.6.1)
+ actionmailer (5.0.7.2)
+ actionpack (= 5.0.7.2)
+ actionview (= 5.0.7.2)
+ activejob (= 5.0.7.2)
mail (~> 2.5, >= 2.5.4)
- rails-dom-testing (~> 1.0, >= 1.0.5)
- actionpack (4.2.11)
- actionview (= 4.2.11)
- activesupport (= 4.2.11)
- rack (~> 1.6)
- rack-test (~> 0.6.2)
- rails-dom-testing (~> 1.0, >= 1.0.5)
+ rails-dom-testing (~> 2.0)
+ actionpack (5.0.7.2)
+ actionview (= 5.0.7.2)
+ activesupport (= 5.0.7.2)
+ rack (~> 2.0)
+ rack-test (~> 0.6.3)
+ rails-dom-testing (~> 2.0)
rails-html-sanitizer (~> 1.0, >= 1.0.2)
- actionview (4.2.11)
- activesupport (= 4.2.11)
+ actionview (5.0.7.2)
+ activesupport (= 5.0.7.2)
builder (~> 3.1)
erubis (~> 2.7.0)
- rails-dom-testing (~> 1.0, >= 1.0.5)
+ rails-dom-testing (~> 2.0)
rails-html-sanitizer (~> 1.0, >= 1.0.3)
- activejob (4.2.11)
- activesupport (= 4.2.11)
- globalid (>= 0.3.0)
- activemodel (4.2.11)
- activesupport (= 4.2.11)
- builder (~> 3.1)
- activerecord (4.2.11)
- activemodel (= 4.2.11)
- activesupport (= 4.2.11)
- arel (~> 6.0)
- activerecord-deprecated_finders (1.0.4)
- activesupport (4.2.11)
- i18n (~> 0.7)
+ activejob (5.0.7.2)
+ activesupport (= 5.0.7.2)
+ globalid (>= 0.3.6)
+ activemodel (5.0.7.2)
+ activesupport (= 5.0.7.2)
+ activerecord (5.0.7.2)
+ activemodel (= 5.0.7.2)
+ activesupport (= 5.0.7.2)
+ arel (~> 7.0)
+ activesupport (5.0.7.2)
+ concurrent-ruby (~> 1.0, >= 1.0.2)
+ i18n (>= 0.7, < 2)
minitest (~> 5.1)
- thread_safe (~> 0.3, >= 0.3.4)
tzinfo (~> 1.1)
acts_as_api (1.0.1)
activemodel (>= 3.0.0)
addressable (2.6.0)
public_suffix (>= 2.0.2, < 4.0)
andand (1.3.3)
- arel (6.0.4)
- arvados (1.3.1.20190301212059)
+ arel (7.1.4)
+ arvados (1.3.1.20190320201707)
activesupport (>= 3)
andand (~> 1.3, >= 1.3.3)
- cure-google-api-client (>= 0.7, < 0.8.9)
+ arvados-google-api-client (>= 0.7, < 0.8.9)
i18n (~> 0)
json (>= 1.7.7, < 3)
jwt (>= 0.1.5, < 2)
- arvados-cli (1.3.1.20190211211047)
- activesupport (>= 3.2.13, < 5)
+ arvados-cli (1.3.1.20190320201707)
+ activesupport (>= 3.2.13, < 5.1)
andand (~> 1.3, >= 1.3.3)
arvados (~> 1.3.0, >= 1.3.0)
+ arvados-google-api-client (~> 0.6, >= 0.6.3, < 0.8.9)
curb (~> 0.8)
- cure-google-api-client (~> 0.6, >= 0.6.3, < 0.8.9)
json (>= 1.7.7, < 3)
oj (~> 3.0)
optimist (~> 3.0)
+ arvados-google-api-client (0.8.7.2)
+ activesupport (>= 3.2, < 5.1)
+ addressable (~> 2.3)
+ autoparse (~> 0.3)
+ extlib (~> 0.9)
+ faraday (~> 0.9)
+ googleauth (~> 0.3)
+ launchy (~> 2.4)
+ multi_json (~> 1.10)
+ retriable (~> 1.4)
+ signet (~> 0.6)
autoparse (0.3.3)
addressable (>= 2.3.1)
extlib (>= 0.9.15)
multi_json (>= 1.0.0)
builder (3.2.3)
+ byebug (11.0.1)
capistrano (2.15.9)
highline
net-scp (>= 1.0.0)
net-sftp (>= 2.0.0)
net-ssh (>= 2.0.14)
net-ssh-gateway (>= 1.1.0)
- coffee-rails (4.2.2)
- coffee-script (>= 2.2.0)
- railties (>= 4.0.0)
- coffee-script (2.4.1)
- coffee-script-source
- execjs
- coffee-script-source (1.12.2)
- concurrent-ruby (1.1.4)
+ concurrent-ruby (1.1.5)
crass (1.0.4)
- curb (0.9.8)
- cure-google-api-client (0.8.7.1)
- activesupport (>= 3.2, < 5.0)
- addressable (~> 2.3)
- autoparse (~> 0.3)
- extlib (~> 0.9)
- faraday (~> 0.9)
- googleauth (~> 0.3)
- launchy (~> 2.4)
- multi_json (~> 1.10)
- retriable (~> 1.4)
- signet (~> 0.6)
+ curb (0.9.9)
database_cleaner (1.7.0)
erubis (2.7.0)
- eventmachine (1.2.6)
+ eventmachine (1.2.7)
execjs (2.7.0)
extlib (0.9.16)
- factory_bot (4.11.1)
- activesupport (>= 3.0.0)
- factory_bot_rails (4.11.1)
- factory_bot (~> 4.11.1)
- railties (>= 3.0.0)
- faraday (0.12.2)
+ factory_bot (5.0.2)
+ activesupport (>= 4.2.0)
+ factory_bot_rails (5.0.1)
+ factory_bot (~> 5.0.0)
+ railties (>= 4.2.0)
+ faraday (0.15.4)
multipart-post (>= 1.2, < 3)
faye-websocket (0.10.7)
eventmachine (>= 0.12.0)
websocket-driver (>= 0.5.1)
- globalid (0.4.1)
+ globalid (0.4.2)
activesupport (>= 4.2.0)
googleauth (0.8.0)
faraday (~> 0.12)
multi_json (~> 1.11)
os (>= 0.9, < 2.0)
signet (~> 0.7)
- hashie (3.5.7)
- highline (1.7.10)
- hike (1.2.3)
+ hashie (3.6.0)
+ highline (2.0.1)
httpclient (2.8.3)
i18n (0.9.5)
concurrent-ruby (~> 1.0)
mini_mime (>= 0.1.1)
memoist (0.16.0)
metaclass (0.0.4)
+ method_source (0.9.2)
mini_mime (1.0.1)
mini_portile2 (2.4.0)
- minitest (5.11.3)
- mocha (1.5.0)
+ minitest (5.10.3)
+ mocha (1.8.0)
metaclass (~> 0.0.1)
multi_json (1.13.1)
multi_xml (0.6.0)
multipart-post (2.0.0)
- net-scp (1.2.1)
- net-ssh (>= 2.6.5)
+ net-scp (2.0.0)
+ net-ssh (>= 2.6.5, < 6.0.0)
net-sftp (2.1.2)
net-ssh (>= 2.6.5)
- net-ssh (4.2.0)
+ net-ssh (5.2.0)
net-ssh-gateway (2.0.0)
net-ssh (>= 4.0.0)
- nokogiri (1.9.1)
+ nio4r (2.3.1)
+ nokogiri (1.10.2)
mini_portile2 (~> 2.4.0)
- oauth2 (1.4.0)
- faraday (>= 0.8, < 0.13)
- jwt (~> 1.0)
+ oauth2 (1.4.1)
+ faraday (>= 0.8, < 0.16.0)
+ jwt (>= 1.0, < 3.0)
multi_json (~> 1.3)
multi_xml (~> 0.5)
rack (>= 1.2, < 3)
- oj (3.7.9)
+ oj (3.7.11)
omniauth (1.4.3)
hashie (>= 1.2, < 4)
rack (>= 1.6.2, < 3)
omniauth (~> 1.2)
optimist (3.0.0)
os (1.0.0)
- passenger (5.3.0)
+ passenger (6.0.2)
rack
rake (>= 0.8.1)
- pg (0.21.0)
- power_assert (1.1.1)
- protected_attributes (1.1.4)
- activemodel (>= 4.0.1, < 5.0)
+ pg (1.1.4)
+ power_assert (1.1.4)
public_suffix (3.0.3)
- rack (1.6.11)
+ rack (2.0.6)
rack-test (0.6.3)
rack (>= 1.0)
- rails (4.2.11)
- actionmailer (= 4.2.11)
- actionpack (= 4.2.11)
- actionview (= 4.2.11)
- activejob (= 4.2.11)
- activemodel (= 4.2.11)
- activerecord (= 4.2.11)
- activesupport (= 4.2.11)
- bundler (>= 1.3.0, < 2.0)
- railties (= 4.2.11)
- sprockets-rails
- rails-deprecated_sanitizer (1.0.3)
- activesupport (>= 4.2.0.alpha)
- rails-dom-testing (1.0.9)
- activesupport (>= 4.2.0, < 5.0)
- nokogiri (~> 1.6)
- rails-deprecated_sanitizer (>= 1.0.1)
+ rails (5.0.7.2)
+ actioncable (= 5.0.7.2)
+ actionmailer (= 5.0.7.2)
+ actionpack (= 5.0.7.2)
+ actionview (= 5.0.7.2)
+ activejob (= 5.0.7.2)
+ activemodel (= 5.0.7.2)
+ activerecord (= 5.0.7.2)
+ activesupport (= 5.0.7.2)
+ bundler (>= 1.3.0)
+ railties (= 5.0.7.2)
+ sprockets-rails (>= 2.0.0)
+ rails-controller-testing (1.0.4)
+ actionpack (>= 5.0.1.x)
+ actionview (>= 5.0.1.x)
+ activesupport (>= 5.0.1.x)
+ rails-dom-testing (2.0.3)
+ activesupport (>= 4.2.0)
+ nokogiri (>= 1.6)
rails-html-sanitizer (1.0.4)
loofah (~> 2.2, >= 2.2.2)
rails-observers (0.1.5)
activemodel (>= 4.0)
- railties (4.2.11)
- actionpack (= 4.2.11)
- activesupport (= 4.2.11)
+ rails-perftest (0.0.7)
+ railties (5.0.7.2)
+ actionpack (= 5.0.7.2)
+ activesupport (= 5.0.7.2)
+ method_source
rake (>= 0.8.7)
thor (>= 0.18.1, < 2.0)
rake (12.3.2)
ref (2.0.0)
request_store (1.4.1)
rack (>= 1.4)
- responders (2.4.0)
- actionpack (>= 4.2.0, < 5.3)
- railties (>= 4.2.0, < 5.3)
+ responders (2.4.1)
+ actionpack (>= 4.2.0, < 6.0)
+ railties (>= 4.2.0, < 6.0)
retriable (1.4.1)
- ruby-prof (0.17.0)
+ ruby-prof (0.15.9)
rvm-capistrano (1.5.6)
capistrano (~> 2.15.4)
- safe_yaml (1.0.4)
- sass (3.2.19)
- sass-rails (4.0.5)
- railties (>= 4.0.0, < 5.0)
- sass (~> 3.2.2)
- sprockets (~> 2.8, < 3.0)
- sprockets-rails (~> 2.0)
+ safe_yaml (1.0.5)
signet (0.11.0)
addressable (~> 2.3)
faraday (~> 0.9)
simplecov-html (0.7.1)
simplecov-rcov (0.2.3)
simplecov (>= 0.4.1)
- sprockets (2.12.5)
- hike (~> 1.2)
- multi_json (~> 1.0)
- rack (~> 1.0)
- tilt (~> 1.1, != 1.3.0)
- sprockets-rails (2.3.3)
- actionpack (>= 3.0)
- activesupport (>= 3.0)
- sprockets (>= 2.8, < 4.0)
- sshkey (1.9.0)
- test-unit (3.2.7)
+ sprockets (3.7.2)
+ concurrent-ruby (~> 1.0)
+ rack (> 1, < 3)
+ sprockets-rails (3.2.1)
+ actionpack (>= 4.0)
+ activesupport (>= 4.0)
+ sprockets (>= 3.0.0)
+ sshkey (2.0.0)
+ test-unit (3.3.1)
power_assert
- test_after_commit (1.1.0)
- activerecord (>= 3.2)
therubyracer (0.12.3)
libv8 (~> 3.16.14.15)
ref
thor (0.20.3)
thread_safe (0.3.6)
- tilt (1.4.1)
- trollop (2.9.9)
tzinfo (1.2.5)
thread_safe (~> 0.1)
uglifier (2.7.2)
execjs (>= 0.3.0)
json (>= 1.8.0)
- websocket-driver (0.7.0)
+ websocket-driver (0.6.5)
websocket-extensions (>= 0.1.0)
websocket-extensions (0.1.3)
ruby
DEPENDENCIES
- activerecord-deprecated_finders
acts_as_api
andand
arvados (>= 1.3.1.20190301212059)
arvados-cli
- coffee-rails (~> 4.0)
+ byebug
database_cleaner
factory_bot_rails
faye-websocket
jquery-rails
lograge
logstash-event
+ minitest (= 5.10.3)
mocha
multi_json
oj
omniauth (~> 1.4.0)
omniauth-oauth2 (~> 1.1)
+ optimist
passenger
- pg (~> 0.18)
- protected_attributes
- rails (~> 4.2)
+ pg (~> 1.0)
+ rails (~> 5.0.0)
+ rails-controller-testing
rails-observers
+ rails-perftest
responders (~> 2.0)
- ruby-prof
+ ruby-prof (~> 0.15.0)
rvm-capistrano
safe_yaml
- sass-rails (~> 4.0)
simplecov (~> 0.7.1)
simplecov-rcov
sshkey
test-unit (~> 3.0)
- test_after_commit
themes_for_rails!
therubyracer
- trollop
uglifier (~> 2.0)
BUNDLED WITH
- 1.17.2
+ 1.16.6
# for example lib/tasks/capistrano.rake, and they will automatically be available to Rake.
require File.expand_path('../config/application', __FILE__)
+require 'rake/testtask'
Server::Application.load_tasks
ERROR_ACTIONS = [:render_error, :render_not_found]
- around_filter :set_current_request_id
- before_filter :disable_api_methods
- before_filter :set_cors_headers
- before_filter :respond_with_json_by_default
- before_filter :remote_ip
- before_filter :load_read_auths
- before_filter :require_auth_scope, except: ERROR_ACTIONS
-
- before_filter :catch_redirect_hint
- before_filter(:find_object_by_uuid,
+ around_action :set_current_request_id
+ before_action :disable_api_methods
+ before_action :set_cors_headers
+ before_action :respond_with_json_by_default
+ before_action :remote_ip
+ before_action :load_read_auths
+ before_action :require_auth_scope, except: ERROR_ACTIONS
+
+ before_action :catch_redirect_hint
+ before_action(:find_object_by_uuid,
except: [:index, :create] + ERROR_ACTIONS)
- before_filter :load_required_parameters
- before_filter :load_limit_offset_order_params, only: [:index, :contents]
- before_filter :load_where_param, only: [:index, :contents]
- before_filter :load_filters_param, only: [:index, :contents]
- before_filter :find_objects_for_index, :only => :index
- before_filter :reload_object_before_update, :only => :update
- before_filter(:render_404_if_no_object,
+ before_action :load_required_parameters
+ before_action :load_limit_offset_order_params, only: [:index, :contents]
+ before_action :load_where_param, only: [:index, :contents]
+ before_action :load_filters_param, only: [:index, :contents]
+ before_action :find_objects_for_index, :only => :index
+ before_action :reload_object_before_update, :only => :update
+ before_action(:render_404_if_no_object,
except: [:index, :create] + ERROR_ACTIONS)
theme Rails.configuration.arvados_theme
# The obvious render(json: ...) forces a slow JSON encoder. See
# #3021 and commit logs. Might be fixed in Rails 4.1.
render({
- text: SafeJSON.dump(response).html_safe,
+ plain: SafeJSON.dump(response).html_safe,
content_type: 'application/json'
}.merge opts)
end
@objects = @objects.order(@orders.join ", ") if @orders.any?
@objects = @objects.limit(@limit)
@objects = @objects.offset(@offset)
- @objects = @objects.uniq(@distinct) if not @distinct.nil?
+ @objects = @objects.distinct(@distinct) if not @distinct.nil?
end
# limit_database_read ensures @objects (which must be an
def resource_attrs
return @attrs if @attrs
@attrs = params[resource_name]
- if @attrs.is_a? String
+ if @attrs.nil?
+ @attrs = {}
+ elsif @attrs.is_a? String
@attrs = Oj.strict_load @attrs, symbol_keys: true
end
- unless @attrs.is_a? Hash
+ unless [Hash, ActionController::Parameters].include? @attrs.class
message = "No #{resource_name}"
if resource_name.index('_')
message << " (or #{resource_name.camelcase(:lower)})"
end
def respond_with_json_by_default
- html_index = request.accepts.index(Mime::HTML)
- if html_index.nil? or request.accepts[0...html_index].include?(Mime::JSON)
+ html_index = request.accepts.index(Mime[:html])
+ if html_index.nil? or request.accepts[0...html_index].include?(Mime[:json])
request.format = :json
end
end
end
def self.accept_attribute_as_json(attr, must_be_class=nil)
- before_filter lambda { accept_attribute_as_json attr, must_be_class }
+ before_action lambda { accept_attribute_as_json attr, must_be_class }
end
accept_attribute_as_json :properties, Hash
accept_attribute_as_json :info, Hash
end
def self.accept_param_as_json(key, must_be_class=nil)
- prepend_before_filter lambda { load_json_value(params, key, must_be_class) }
+ prepend_before_action lambda { load_json_value(params, key, must_be_class) }
end
accept_param_as_json :reader_tokens, Array
if @objects.respond_to? :except
list[:items_available] = @objects.
except(:limit).except(:offset).
- count(:id, distinct: true)
+ distinct.count(:id)
end
when 'none'
else
class Arvados::V1::ApiClientAuthorizationsController < ApplicationController
accept_attribute_as_json :scopes, Array
- before_filter :current_api_client_is_trusted, :except => [:current]
- before_filter :admin_required, :only => :create_system_auth
- skip_before_filter :render_404_if_no_object, :only => [:create_system_auth, :current]
- skip_before_filter :find_object_by_uuid, :only => [:create_system_auth, :current]
+ before_action :current_api_client_is_trusted, :except => [:current]
+ before_action :admin_required, :only => :create_system_auth
+ skip_before_action :render_404_if_no_object, :only => [:create_system_auth, :current]
+ skip_before_action :find_object_by_uuid, :only => [:create_system_auth, :current]
def self._create_system_auth_requires_parameters
{
# SPDX-License-Identifier: AGPL-3.0
class Arvados::V1::ApiClientsController < ApplicationController
- before_filter :admin_required
+ before_action :admin_required
end
accept_attribute_as_json :command, Array
accept_attribute_as_json :scheduling_parameters, Hash
- skip_before_filter :find_object_by_uuid, only: [:current]
- skip_before_filter :render_404_if_no_object, only: [:current]
+ skip_before_action :find_object_by_uuid, only: [:current]
+ skip_before_action :render_404_if_no_object, only: [:current]
def auth
if @object.locked_by_uuid != Thread.current[:api_client_authorization].uuid
class Arvados::V1::GroupsController < ApplicationController
include TrashableController
- skip_before_filter :find_object_by_uuid, only: :shared
- skip_before_filter :render_404_if_no_object, only: :shared
+ skip_before_action :find_object_by_uuid, only: :shared
+ skip_before_action :render_404_if_no_object, only: :shared
def self._index_requires_parameters
(super rescue {}).
# SPDX-License-Identifier: AGPL-3.0
class Arvados::V1::HealthcheckController < ApplicationController
- skip_before_filter :catch_redirect_hint
- skip_before_filter :find_objects_for_index
- skip_before_filter :find_object_by_uuid
- skip_before_filter :load_filters_param
- skip_before_filter :load_limit_offset_order_params
- skip_before_filter :load_read_auths
- skip_before_filter :load_where_param
- skip_before_filter :render_404_if_no_object
- skip_before_filter :require_auth_scope
+ skip_before_action :catch_redirect_hint
+ skip_before_action :find_objects_for_index
+ skip_before_action :find_object_by_uuid
+ skip_before_action :load_filters_param
+ skip_before_action :load_limit_offset_order_params
+ skip_before_action :load_read_auths
+ skip_before_action :load_where_param
+ skip_before_action :render_404_if_no_object
+ skip_before_action :require_auth_scope
- before_filter :check_auth_header
+ before_action :check_auth_header
def check_auth_header
mgmt_token = Rails.configuration.ManagementToken
accept_attribute_as_json :script_parameters, Hash
accept_attribute_as_json :runtime_constraints, Hash
accept_attribute_as_json :tasks_summary, Hash
- skip_before_filter :find_object_by_uuid, :only => [:queue, :queue_size]
- skip_before_filter :render_404_if_no_object, :only => [:queue, :queue_size]
+ skip_before_action :find_object_by_uuid, :only => [:queue, :queue_size]
+ skip_before_action :render_404_if_no_object, :only => [:queue, :queue_size]
include DbCurrentTime
# SPDX-License-Identifier: AGPL-3.0
class Arvados::V1::KeepDisksController < ApplicationController
- skip_before_filter :require_auth_scope, only: :ping
- skip_before_filter :render_404_if_no_object, only: :ping
+ skip_before_action :require_auth_scope, only: :ping
+ skip_before_action :render_404_if_no_object, only: :ping
def self._ping_requires_parameters
{
class Arvados::V1::KeepServicesController < ApplicationController
- skip_before_filter :find_object_by_uuid, only: :accessible
- skip_before_filter :render_404_if_no_object, only: :accessible
- skip_before_filter :require_auth_scope, only: :accessible
+ skip_before_action :find_object_by_uuid, only: :accessible
+ skip_before_action :render_404_if_no_object, only: :accessible
+ skip_before_action :require_auth_scope, only: :accessible
def find_objects_for_index
# all users can list all keep services
# SPDX-License-Identifier: AGPL-3.0
class Arvados::V1::NodesController < ApplicationController
- skip_before_filter :require_auth_scope, :only => :ping
- skip_before_filter :find_object_by_uuid, :only => :ping
- skip_before_filter :render_404_if_no_object, :only => :ping
+ skip_before_action :require_auth_scope, :only => :ping
+ skip_before_action :find_object_by_uuid, :only => :ping
+ skip_before_action :render_404_if_no_object, :only => :ping
include DbCurrentTime
# SPDX-License-Identifier: AGPL-3.0
class Arvados::V1::RepositoriesController < ApplicationController
- skip_before_filter :find_object_by_uuid, :only => :get_all_permissions
- skip_before_filter :render_404_if_no_object, :only => :get_all_permissions
- before_filter :admin_required, :only => :get_all_permissions
+ skip_before_action :find_object_by_uuid, :only => :get_all_permissions
+ skip_before_action :render_404_if_no_object, :only => :get_all_permissions
+ before_action :admin_required, :only => :get_all_permissions
def get_all_permissions
# user_aks is a map of {user_uuid => array of public keys}
# SPDX-License-Identifier: AGPL-3.0
class Arvados::V1::SchemaController < ApplicationController
- skip_before_filter :catch_redirect_hint
- skip_before_filter :find_objects_for_index
- skip_before_filter :find_object_by_uuid
- skip_before_filter :load_filters_param
- skip_before_filter :load_limit_offset_order_params
- skip_before_filter :load_read_auths
- skip_before_filter :load_where_param
- skip_before_filter :render_404_if_no_object
- skip_before_filter :require_auth_scope
+ skip_before_action :catch_redirect_hint
+ skip_before_action :find_objects_for_index
+ skip_before_action :find_object_by_uuid
+ skip_before_action :load_filters_param
+ skip_before_action :load_limit_offset_order_params
+ skip_before_action :load_read_auths
+ skip_before_action :load_where_param
+ skip_before_action :render_404_if_no_object
+ skip_before_action :require_auth_scope
include DbCurrentTime
object_properties[col.name] = {
type: k.serialized_attributes[col.name].object_class.to_s
}
+ elsif k.attribute_types[col.name].is_a? JsonbType::Hash
+ object_properties[col.name] = {
+ type: Hash.to_s
+ }
+ elsif k.attribute_types[col.name].is_a? JsonbType::Array
+ object_properties[col.name] = {
+ type: Array.to_s
+ }
else
object_properties[col.name] = {
type: col.type
# SPDX-License-Identifier: AGPL-3.0
class Arvados::V1::UserAgreementsController < ApplicationController
- before_filter :admin_required, except: [:index, :sign, :signatures]
- skip_before_filter :find_object_by_uuid, only: [:sign, :signatures]
- skip_before_filter :render_404_if_no_object, only: [:sign, :signatures]
+ before_action :admin_required, except: [:index, :sign, :signatures]
+ skip_before_action :find_object_by_uuid, only: [:sign, :signatures]
+ skip_before_action :render_404_if_no_object, only: [:sign, :signatures]
def model_class
Link
class Arvados::V1::UsersController < ApplicationController
accept_attribute_as_json :prefs, Hash
- skip_before_filter :find_object_by_uuid, only:
+ skip_before_action :find_object_by_uuid, only:
[:activate, :current, :system, :setup, :merge]
- skip_before_filter :render_404_if_no_object, only:
+ skip_before_action :render_404_if_no_object, only:
[:activate, :current, :system, :setup, :merge]
- before_filter :admin_required, only: [:setup, :unsetup, :update_uuid]
+ before_action :admin_required, only: [:setup, :unsetup, :update_uuid]
def current
if current_user
if !@object
return render_404_if_no_object
end
- elsif !params[:user]
+ elsif !params[:user] || params[:user].empty?
raise ArgumentError.new "Required uuid or user"
elsif !params[:user]['email']
raise ArgumentError.new "Require user email"
# SPDX-License-Identifier: AGPL-3.0
class Arvados::V1::VirtualMachinesController < ApplicationController
- skip_before_filter :find_object_by_uuid, :only => :get_all_logins
- skip_before_filter :render_404_if_no_object, :only => :get_all_logins
- before_filter(:admin_required,
+ skip_before_action :find_object_by_uuid, :only => :get_all_logins
+ skip_before_action :render_404_if_no_object, :only => :get_all_logins
+ before_action(:admin_required,
:only => [:logins, :get_all_logins])
# Get all login permissons (user uuid, login account, SSH key) for a
# SPDX-License-Identifier: AGPL-3.0
class DatabaseController < ApplicationController
- skip_before_filter :find_object_by_uuid
- skip_before_filter :render_404_if_no_object
- before_filter :admin_required
+ skip_before_action :find_object_by_uuid
+ skip_before_action :render_404_if_no_object
+ before_action :admin_required
def reset
raise ArvadosModel::PermissionDeniedError unless Rails.env == 'test'
class StaticController < ApplicationController
respond_to :json, :html
- skip_before_filter :find_object_by_uuid
- skip_before_filter :render_404_if_no_object
- skip_before_filter :require_auth_scope, only: [:home, :empty, :login_failure]
+ skip_before_action :find_object_by_uuid
+ skip_before_action :render_404_if_no_object
+ skip_before_action :require_auth_scope, only: [:home, :empty, :login_failure]
def home
respond_to do |f|
end
def empty
- render text: ""
+ render plain: ""
end
end
# SPDX-License-Identifier: AGPL-3.0
class UserSessionsController < ApplicationController
- before_filter :require_auth_scope, :only => [ :destroy ]
+ before_action :require_auth_scope, :only => [ :destroy ]
- skip_before_filter :set_cors_headers
- skip_before_filter :find_object_by_uuid
- skip_before_filter :render_404_if_no_object
+ skip_before_action :set_cors_headers
+ skip_before_action :find_object_by_uuid
+ skip_before_action :render_404_if_no_object
respond_to :html
# omniauth callback method
def create
- omniauth = env['omniauth.auth']
+ omniauth = request.env['omniauth.auth']
identity_url_ok = (omniauth['info']['identity_url'].length > 0) rescue false
unless identity_url_ok
--- /dev/null
+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: AGPL-3.0
+
+class ApplicationRecord < ActiveRecord::Base
+ self.abstract_class = true
+end
\ No newline at end of file
require 'serializers'
require 'request_error'
-class ArvadosModel < ActiveRecord::Base
+class ArvadosModel < ApplicationRecord
self.abstract_class = true
include ArvadosModelUpdates
# penalty.
attr_accessor :async_permissions_update
+ # Ignore listed attributes on mass assignments
+ def self.protected_attributes
+ []
+ end
+
class PermissionDeniedError < RequestError
def http_status
403
# The following permit! is necessary even with
# "ActionController::Parameters.permit_all_parameters = true",
# because permit_all does not permit nested attributes.
+ raw_params ||= {}
+
if raw_params
+ raw_params = raw_params.to_hash
+ raw_params.delete_if { |k, _| self.protected_attributes.include? k }
serialized_attributes.each do |colname, coder|
param = raw_params[colname.to_sym]
if param.nil?
raise ArgumentError.new("#{colname} parameter cannot have non-string hash keys")
end
end
+ # Check JSONB columns that aren't listed on serialized_attributes
+ columns.select{|c| c.type == :jsonb}.collect{|j| j.name}.each do |colname|
+ if serialized_attributes.include? colname || raw_params[colname.to_sym].nil?
+ next
+ end
+ if has_nonstring_keys?(raw_params[colname.to_sym])
+ raise ArgumentError.new("#{colname} parameter cannot have non-string hash keys")
+ end
+ end
end
ActionController::Parameters.new(raw_params).permit!
end
# discover a unique name. It is necessary to handle name choosing at
# this level (as opposed to the client) to ensure that record creation
# never fails due to a race condition.
- err = rn.original_exception
+ err = rn.cause
raise unless err.is_a?(PG::UniqueViolation)
# Unfortunately ActiveRecord doesn't abstract out any of the
def self.full_text_tsvector
parts = full_text_searchable_columns.collect do |column|
- cast = serialized_attributes[column] ? '::text' : ''
+ is_jsonb = self.columns.select{|x|x.name == column}[0].type == :jsonb
+ cast = (is_jsonb || serialized_attributes[column]) ? '::text' : ''
"coalesce(#{column}#{cast},'')"
end
"to_tsvector('english', substr(#{parts.join(" || ' ' || ")}, 0, 8000))"
end
rescue ActiveRecord::RecordNotFound => e
errors.add :owner_uuid, "is not owned by any user: #{e}"
- return false
+ throw(:abort)
end
if uuid_in_path[x]
if x == owner_uuid
else
errors.add :owner_uuid, "has an ownership cycle"
end
- return false
+ throw(:abort)
end
uuid_in_path[x] = true
end
# we'll convert symbols to strings when loading from the
# database. (Otherwise, loading and saving an object with existing
# symbols in a serialized field will crash.)
- self.class.serialized_attributes.each do |colname, attr|
+ jsonb_cols = self.class.columns.select{|c| c.type == :jsonb}.collect{|j| j.name}
+ (jsonb_cols + self.class.serialized_attributes.keys).uniq.each do |colname|
if self.class.has_symbols? attributes[colname]
attributes[colname] = self.class.recursive_stringify attributes[colname]
send(colname + '=',
include CommonApiTemplate
include Trashable
- serialize :properties, Hash
- serialize :storage_classes_desired, Array
- serialize :storage_classes_confirmed, Array
+ # Posgresql JSONB columns should NOT be declared as serialized, Rails 5
+ # already know how to properly treat them.
+ attribute :properties, :jsonbHash, default: {}
+ attribute :storage_classes_desired, :jsonbArray, default: ["default"]
+ attribute :storage_classes_confirmed, :jsonbArray, default: []
before_validation :default_empty_manifest
before_validation :default_storage_classes, on: :create
FILE_TOKEN = /^[[:digit:]]+:[[:digit:]]+:/
def check_signatures
- return false if self.manifest_text.nil?
+ throw(:abort) if self.manifest_text.nil?
return true if current_user.andand.is_admin
end
def check_encoding
- if manifest_text.encoding.name == 'UTF-8' and manifest_text.valid_encoding?
- true
- else
+ if !(manifest_text.encoding.name == 'UTF-8' and manifest_text.valid_encoding?)
begin
# If Ruby thinks the encoding is something else, like 7-bit
# ASCII, but its stored bytes are equal to the (valid) UTF-8
rescue
end
errors.add :manifest_text, "must use UTF-8 encoding"
- false
+ throw(:abort)
end
end
true
rescue ArgumentError => e
errors.add :manifest_text, e.message
- false
+ throw(:abort)
end
end
extend DbCurrentTime
extend LogReuseInfo
+ # Posgresql JSONB columns should NOT be declared as serialized, Rails 5
+ # already know how to properly treat them.
+ attribute :secret_mounts, :jsonbHash, default: {}
+ attribute :runtime_status, :jsonbHash, default: {}
+ attribute :runtime_auth_scopes, :jsonbHash, default: {}
+
serialize :environment, Hash
serialize :mounts, Hash
serialize :runtime_constraints, Hash
serialize :command, Array
serialize :scheduling_parameters, Hash
- serialize :secret_mounts, Hash
- serialize :runtime_status, Hash
before_validation :fill_field_defaults, :if => :new_record?
before_validation :set_timestamps
primary_key: :uuid,
}
- serialize :properties, Hash
+ # Posgresql JSONB columns should NOT be declared as serialized, Rails 5
+ # already know how to properly treat them.
+ attribute :properties, :jsonbHash, default: {}
+ attribute :secret_mounts, :jsonbHash, default: {}
+
serialize :environment, Hash
serialize :mounts, Hash
serialize :runtime_constraints, Hash
serialize :command, Array
serialize :scheduling_parameters, Hash
- serialize :secret_mounts, Hash
before_validation :fill_field_defaults, :if => :new_record?
before_validation :validate_runtime_constraints
self.environment ||= {}
self.runtime_constraints ||= {}
self.mounts ||= {}
+ self.secret_mounts ||= {}
self.cwd ||= "."
self.container_count_max ||= Rails.configuration.container_count_max
self.scheduling_parameters ||= {}
include CanBeAnOwner
include Trashable
- serialize :properties, Hash
+ # Posgresql JSONB columns should NOT be declared as serialized, Rails 5
+ # already know how to properly treat them.
+ attribute :properties, :jsonbHash, default: {}
after_create :invalidate_permissions_cache
after_update :maybe_invalidate_permissions_cache
end
true
end
-
end
extend CurrentApiClient
extend LogReuseInfo
serialize :components, Hash
- attr_protected :arvados_sdk_version, :docker_image_locator
serialize :script_parameters, Hash
serialize :runtime_constraints, Hash
serialize :tasks_summary, Hash
["components"]
end
+ def self.protected_attributes
+ [:arvados_sdk_version, :docker_image_locator]
+ end
+
def assert_finished
update_attributes(finished_at: finished_at || db_current_time,
success: success.nil? ? false : success,
--- /dev/null
+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: AGPL-3.0
+
+class JsonbType
+ # Emulate pre-rails5.0 behavior by having a interpreting NULL/nil as
+ # some other default value.
+ class WithDefault < ActiveModel::Type::Value
+ include ActiveModel::Type::Helpers::Mutable
+
+ def default_value
+ nil
+ end
+
+ def deserialize(value)
+ if value.nil?
+ self.default_value
+ elsif value.is_a?(::String)
+ SafeJSON.load(value) rescue self.default_value
+ else
+ value
+ end
+ end
+
+ def serialize(value)
+ if value.nil?
+ self.default_value
+ else
+ SafeJSON.dump(value)
+ end
+ end
+ end
+
+ class Hash < JsonbType::WithDefault
+ def default_value
+ {}
+ end
+ end
+
+ class Array < JsonbType::WithDefault
+ def default_value
+ []
+ end
+ end
+end
\ No newline at end of file
include HasUuid
include KindAndEtag
include CommonApiTemplate
- serialize :properties, Hash
+
+ # Posgresql JSONB columns should NOT be declared as serialized, Rails 5
+ # already know how to properly treat them.
+ attribute :properties, :jsonbHash, default: {}
+
before_create :permission_to_attach_to_objects
before_update :permission_to_attach_to_objects
after_update :maybe_invalidate_permissions_cache
super
end
end
-
end
include HasUuid
include KindAndEtag
include CommonApiTemplate
- serialize :info, Hash
- serialize :properties, Hash
+
+ # Posgresql JSONB columns should NOT be declared as serialized, Rails 5
+ # already know how to properly treat them.
+ attribute :properties, :jsonbHash, default: {}
+ attribute :info, :jsonbHash, default: {}
+
before_validation :ensure_ping_secret
after_update :dns_server_update
end
end
- if self.state.in?(States)
- true
- else
+ if !self.state.in?(States)
errors.add :state, "'#{state.inspect} must be one of: [#{States.join ', '}]"
- false
+ throw(:abort)
end
end
# delete user signatures, login, repo, and vm perms, and mark as inactive
def unsetup
# delete oid_login_perms for this user
- Link.destroy_all(tail_uuid: self.email,
+ Link.where(tail_uuid: self.email,
link_class: 'permission',
- name: 'can_login')
+ name: 'can_login').destroy_all
# delete repo_perms for this user
- Link.destroy_all(tail_uuid: self.uuid,
+ Link.where(tail_uuid: self.uuid,
link_class: 'permission',
- name: 'can_manage')
+ name: 'can_manage').destroy_all
# delete vm_login_perms for this user
- Link.destroy_all(tail_uuid: self.uuid,
+ Link.where(tail_uuid: self.uuid,
link_class: 'permission',
- name: 'can_login')
+ name: 'can_login').destroy_all
# delete "All users" group read permissions for this user
group = Group.where(name: 'All users').select do |g|
g[:uuid].match(/-f+$/)
end.first
- Link.destroy_all(tail_uuid: self.uuid,
+ Link.where(tail_uuid: self.uuid,
head_uuid: group[:uuid],
link_class: 'permission',
- name: 'can_read')
+ name: 'can_read').destroy_all
# delete any signatures by this user
- Link.destroy_all(link_class: 'signature',
- tail_uuid: self.uuid)
+ Link.where(link_class: 'signature',
+ tail_uuid: self.uuid).destroy_all
# delete user preferences (including profile)
self.prefs = {}
def verify_repositories_empty
unless repositories.first.nil?
errors.add(:username, "can't be unset when the user owns repositories")
- false
+ throw(:abort)
end
end
--- /dev/null
+#!/usr/bin/env ruby
+
+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: AGPL-3.0
+
+ENV['BUNDLE_GEMFILE'] ||= File.expand_path('../../Gemfile', __FILE__)
+load Gem.bin_path('bundler', 'bundle')
--- /dev/null
+#!/usr/bin/env ruby
+
+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: AGPL-3.0
+
+APP_PATH = File.expand_path('../config/application', __dir__)
+require_relative '../config/boot'
+require 'rails/commands'
--- /dev/null
+#!/usr/bin/env ruby
+
+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: AGPL-3.0
+
+require_relative '../config/boot'
+require 'rake'
+Rake.application.run
--- /dev/null
+#!/usr/bin/env ruby
+
+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: AGPL-3.0
+
+require 'pathname'
+require 'fileutils'
+include FileUtils
+
+# path to your application root.
+APP_ROOT = Pathname.new File.expand_path('../../', __FILE__)
+
+def system!(*args)
+ system(*args) || abort("\n== Command #{args} failed ==")
+end
+
+chdir APP_ROOT do
+ # This script is a starting point to setup your application.
+ # Add necessary setup steps to this file.
+
+ puts '== Installing dependencies =='
+ system! 'gem install bundler --conservative'
+ system('bundle check') || system!('bundle install')
+
+ # puts "\n== Copying sample files =="
+ # unless File.exist?('config/database.yml')
+ # cp 'config/database.yml.sample', 'config/database.yml'
+ # end
+
+ puts "\n== Preparing database =="
+ system! 'bin/rails db:setup'
+
+ puts "\n== Removing old logs and tempfiles =="
+ system! 'bin/rails log:clear tmp:clear'
+
+ puts "\n== Restarting application server =="
+ system! 'bin/rails restart'
+end
--- /dev/null
+#!/usr/bin/env ruby
+
+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: AGPL-3.0
+
+require 'pathname'
+require 'fileutils'
+include FileUtils
+
+# path to your application root.
+APP_ROOT = Pathname.new File.expand_path('../../', __FILE__)
+
+def system!(*args)
+ system(*args) || abort("\n== Command #{args} failed ==")
+end
+
+chdir APP_ROOT do
+ # This script is a way to update your development environment automatically.
+ # Add necessary update steps to this file.
+
+ puts '== Installing dependencies =='
+ system! 'gem install bundler --conservative'
+ system('bundle check') || system!('bundle install')
+
+ puts "\n== Updating database =="
+ system! 'bin/rails db:migrate'
+
+ puts "\n== Removing old logs and tempfiles =="
+ system! 'bin/rails log:clear tmp:clear'
+
+ puts "\n== Restarting application server =="
+ system! 'bin/rails restart'
+end
test:
force_ssl: false
cache_classes: true
- serve_static_files: true
- static_cache_control: public, max-age=3600
+ public_file_server:
+ enabled: true
+ headers:
+ 'Cache-Control': public, max-age=3600
whiny_nils: true
consider_all_requests_local: true
action_controller.perform_caching: false
require File.expand_path('../boot', __FILE__)
-require 'rails/all'
+require "rails"
+# Pick only the frameworks we need:
+require "active_model/railtie"
+require "active_job/railtie"
+require "active_record/railtie"
+require "action_controller/railtie"
+require "action_mailer/railtie"
+require "action_view/railtie"
+# Skip ActionCable (new in Rails 5.0) as it adds '/cable' routes that we're not using
+# require "action_cable/engine"
+require "sprockets/railtie"
+require "rails/test_unit/railtie"
+
require 'digest'
module Kernel
# Load entire application at startup.
config.eager_load = true
- config.active_record.raise_in_transactional_callbacks = true
-
config.active_support.test_order = :sorted
config.action_dispatch.perform_deep_munge = false
#
# SPDX-License-Identifier: AGPL-3.0
-require 'rubygems'
-
# Set up gems listed in the Gemfile.
-ENV['BUNDLE_GEMFILE'] ||= File.expand_path('../../Gemfile', __FILE__)
+ENV['BUNDLE_GEMFILE'] ||= File.expand_path('../Gemfile', __dir__)
-require 'bundler/setup' if File.exist?(ENV['BUNDLE_GEMFILE'])
+require 'bundler/setup'
--- /dev/null
+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: AGPL-3.0
+
+development:
+ adapter: async
+
+test:
+ adapter: async
+
+production:
+ adapter: redis
+ url: redis://localhost:6379/1
# SPDX-License-Identifier: AGPL-3.0
# Load the rails application
-require File.expand_path('../application', __FILE__)
+require_relative 'application'
require 'josh_id'
# Initialize the rails application
-Server::Application.initialize!
+Rails.application.initialize!
--- /dev/null
+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: AGPL-3.0
+
+# Be sure to restart your server when you modify this file.
+
+# ActiveSupport::Reloader.to_prepare do
+# ApplicationController.renderer.defaults.merge!(
+# http_host: 'example.org',
+# https: false
+# )
+# end
--- /dev/null
+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: AGPL-3.0
+
+# Be sure to restart your server when you modify this file.
+
+# Version of your assets, change this if you want to expire all your assets.
+Rails.application.config.assets.version = '1.0'
+
+# Add additional assets to the asset load path
+# Rails.application.config.assets.paths << Emoji.images_path
+
+# Precompile additional assets.
+# application.js, application.css, and all non-JS/CSS in app/assets folder are already added.
+# Rails.application.config.assets.precompile += %w( search.js )
--- /dev/null
+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: AGPL-3.0
+
+# Be sure to restart your server when you modify this file.
+
+# Specify a serializer for the signed and encrypted cookie jars.
+# Valid options are :json, :marshal, and :hybrid.
+Rails.application.config.action_dispatch.cookies_serializer = :marshal
--- /dev/null
+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: AGPL-3.0
+
+# JSONB backed Hash & Array types that default to their empty versions when
+# reading NULL from the database, or get nil passed by parameter.
+ActiveRecord::Type.register(:jsonbHash, JsonbType::Hash)
+ActiveRecord::Type.register(:jsonbArray, JsonbType::Array)
--- /dev/null
+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: AGPL-3.0
+
+# Be sure to restart your server when you modify this file.
+
+# Configure sensitive parameters which will be filtered from the log file.
+Rails.application.config.filter_parameters += [:password]
--- /dev/null
+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: AGPL-3.0
+
+# Be sure to restart your server when you modify this file.
+#
+# This file contains migration options to ease your Rails 5.0 upgrade.
+#
+# Once upgraded flip defaults one by one to migrate to the new default.
+#
+# Read the Guide for Upgrading Ruby on Rails for more info on each option.
+
+Rails.application.config.action_controller.raise_on_unfiltered_parameters = true
+
+# Enable per-form CSRF tokens. Previous versions had false.
+Rails.application.config.action_controller.per_form_csrf_tokens = false
+
+# Enable origin-checking CSRF mitigation. Previous versions had false.
+Rails.application.config.action_controller.forgery_protection_origin_check = false
+
+# Make Ruby 2.4 preserve the timezone of the receiver when calling `to_time`.
+# Previous versions had false.
+ActiveSupport.to_time_preserves_timezone = false
+
+# Require `belongs_to` associations by default. Previous versions had false.
+Rails.application.config.active_record.belongs_to_required_by_default = false
# Be sure to restart your server when you modify this file.
-Server::Application.config.session_store :cookie_store, :key => '_server_session'
+Rails.application.config.session_store :cookie_store, key: '_server_session'
# Use the database for sessions instead of the cookie-based default,
# which shouldn't be used to store highly confidential information
--- /dev/null
+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: AGPL-3.0
+
+# Puma can serve each request in a thread from an internal thread pool.
+# The `threads` method setting takes two numbers a minimum and maximum.
+# Any libraries that use thread pools should be configured to match
+# the maximum value specified for Puma. Default is set to 5 threads for minimum
+# and maximum, this matches the default thread size of Active Record.
+#
+threads_count = ENV.fetch("RAILS_MAX_THREADS") { 5 }.to_i
+threads threads_count, threads_count
+
+# Specifies the `port` that Puma will listen on to receive requests, default is 3000.
+#
+port ENV.fetch("PORT") { 3000 }
+
+# Specifies the `environment` that Puma will run in.
+#
+environment ENV.fetch("RAILS_ENV") { "development" }
+
+# Specifies the number of `workers` to boot in clustered mode.
+# Workers are forked webserver processes. If using threads and workers together
+# the concurrency of the application would be max `threads` * `workers`.
+# Workers do not work on JRuby or Windows (both of which do not support
+# processes).
+#
+# workers ENV.fetch("WEB_CONCURRENCY") { 2 }
+
+# Use the `preload_app!` method when specifying a `workers` number.
+# This directive tells Puma to first boot the application and load code
+# before forking the application. This takes advantage of Copy On Write
+# process behavior so workers use less memory. If you use this option
+# you need to make sure to reconnect any threads in the `on_worker_boot`
+# block.
+#
+# preload_app!
+
+# The code in the `on_worker_boot` will be called if you are using
+# clustered mode by specifying a number of `workers`. After each worker
+# process is booted this block will be run, if you are using `preload_app!`
+# option you will want to use this block to reconnect to any threads
+# or connections that may have been created at application boot, Ruby
+# cannot share connections between processes.
+#
+# on_worker_boot do
+# ActiveRecord::Base.establish_connection if defined?(ActiveRecord)
+# end
+
+# Allow puma to be restarted by `rails restart` command.
+plugin :tmp_restart
--- /dev/null
+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: AGPL-3.0
+
+# Be sure to restart your server when you modify this file.
+
+# Your secret key is used for verifying the integrity of signed cookies.
+# If you change this key, all old signed cookies will become invalid!
+
+# Make sure the secret is at least 30 characters and all random,
+# no regular words or you'll be exposed to dictionary attacks.
+# You can use `rails secret` to generate a secure secret key.
+
+# Make sure the secrets in this file are kept private
+# if you're sharing your code publicly.
+
+development:
+ secret_key_base: ef8dfe92893202f906d198094f428aaefa75749338e306ed2874938598cad7153ef0dd3cb8036c618cc7c27bb0c6c559728e8cc224da7cdfa2ad1d02874643b0
+
+test:
+ secret_key_base: 0b5454fe8163063950a7124348e2bc780fabbb022fa15f8a074c2fbcfce8eca480ed46b549b87738904f2bae6617ad949c3c3579e272d486c25aaa0ead563355
+
+# Do not keep production secrets in the repository,
+# instead read values from the environment.
+production:
+ secret_key_base: <%= ENV["SECRET_KEY_BASE"] %>
--- /dev/null
+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: AGPL-3.0
+
+%w(
+ .ruby-version
+ .rbenv-vars
+ tmp/restart.txt
+ tmp/caching-dev.txt
+).each { |path| Spring.watch(path) }
#
# SPDX-License-Identifier: AGPL-3.0
-class CreateCollections < ActiveRecord::Migration
+class CreateCollections < ActiveRecord::Migration[4.2]
def change
create_table :collections do |t|
t.string :locator
#
# SPDX-License-Identifier: AGPL-3.0
-class CreateMetadata < ActiveRecord::Migration
+class CreateMetadata < ActiveRecord::Migration[4.2]
def change
create_table :metadata do |t|
t.string :uuid
#
# SPDX-License-Identifier: AGPL-3.0
-class RenameMetadataClass < ActiveRecord::Migration
+class RenameMetadataClass < ActiveRecord::Migration[4.2]
def up
rename_column :metadata, :metadatum_class, :metadata_class
end
#
# SPDX-License-Identifier: AGPL-3.0
-class RenameCollectionCreatedByClient < ActiveRecord::Migration
+class RenameCollectionCreatedByClient < ActiveRecord::Migration[4.2]
def up
rename_column :collections, :create_by_client, :created_by_client
end
#
# SPDX-License-Identifier: AGPL-3.0
-class AddUuidToCollections < ActiveRecord::Migration
+class AddUuidToCollections < ActiveRecord::Migration[4.2]
def change
add_column :collections, :uuid, :string
end
#
# SPDX-License-Identifier: AGPL-3.0
-class CreateNodes < ActiveRecord::Migration
+class CreateNodes < ActiveRecord::Migration[4.2]
def up
create_table :nodes do |t|
t.string :uuid
#
# SPDX-License-Identifier: AGPL-3.0
-class CreatePipelines < ActiveRecord::Migration
+class CreatePipelines < ActiveRecord::Migration[4.2]
def up
create_table :pipelines do |t|
t.string :uuid
#
# SPDX-License-Identifier: AGPL-3.0
-class CreatePipelineInvocations < ActiveRecord::Migration
+class CreatePipelineInvocations < ActiveRecord::Migration[4.2]
def up
create_table :pipeline_invocations do |t|
t.string :uuid
#
# SPDX-License-Identifier: AGPL-3.0
-class AddIndexToCollectionsAndMetadata < ActiveRecord::Migration
+class AddIndexToCollectionsAndMetadata < ActiveRecord::Migration[4.2]
def up
add_index :collections, :uuid, :unique => true
add_index :metadata, :uuid, :unique => true
#
# SPDX-License-Identifier: AGPL-3.0
-class CreateSpecimens < ActiveRecord::Migration
+class CreateSpecimens < ActiveRecord::Migration[4.2]
def up
create_table :specimens do |t|
t.string :uuid
#
# SPDX-License-Identifier: AGPL-3.0
-class CreateProjects < ActiveRecord::Migration
+class CreateProjects < ActiveRecord::Migration[4.2]
def up
create_table :projects do |t|
t.string :uuid
#
# SPDX-License-Identifier: AGPL-3.0
-class RenameMetadataAttributes < ActiveRecord::Migration
+class RenameMetadataAttributes < ActiveRecord::Migration[4.2]
def up
rename_column :metadata, :target_kind, :tail_kind
rename_column :metadata, :target_uuid, :tail
#
# SPDX-License-Identifier: AGPL-3.0
-class CreateUsers < ActiveRecord::Migration
+class CreateUsers < ActiveRecord::Migration[4.2]
def change
create_table :users do |t|
t.string :uuid
#
# SPDX-License-Identifier: AGPL-3.0
-class CreateLogs < ActiveRecord::Migration
+class CreateLogs < ActiveRecord::Migration[4.2]
def up
create_table :logs do |t|
t.string :uuid
#
# SPDX-License-Identifier: AGPL-3.0
-class AddModifiedAtToLogs < ActiveRecord::Migration
+class AddModifiedAtToLogs < ActiveRecord::Migration[4.2]
def change
add_column :logs, :modified_at, :datetime
end
#
# SPDX-License-Identifier: AGPL-3.0
-class AddUuidIndexToUsers < ActiveRecord::Migration
+class AddUuidIndexToUsers < ActiveRecord::Migration[4.2]
def change
add_index :users, :uuid, :unique => true
end
#
# SPDX-License-Identifier: AGPL-3.0
-class CreateApiClients < ActiveRecord::Migration
+class CreateApiClients < ActiveRecord::Migration[4.2]
def change
create_table :api_clients do |t|
t.string :uuid
#
# SPDX-License-Identifier: AGPL-3.0
-class CreateApiClientAuthorizations < ActiveRecord::Migration
+class CreateApiClientAuthorizations < ActiveRecord::Migration[4.2]
def change
create_table :api_client_authorizations do |t|
t.string :api_token, :null => false
#
# SPDX-License-Identifier: AGPL-3.0
-class RenameCreatedByToOwner < ActiveRecord::Migration
+class RenameCreatedByToOwner < ActiveRecord::Migration[4.2]
def tables
%w{api_clients collections logs metadata nodes pipelines pipeline_invocations projects specimens users}
end
#
# SPDX-License-Identifier: AGPL-3.0
-class RenameMetadataToLinks < ActiveRecord::Migration
+class RenameMetadataToLinks < ActiveRecord::Migration[4.2]
def up
rename_table :metadata, :links
rename_column :links, :tail, :tail_uuid
#
# SPDX-License-Identifier: AGPL-3.0
-class AddPropertiesToSpecimen < ActiveRecord::Migration
+class AddPropertiesToSpecimen < ActiveRecord::Migration[4.2]
def change
add_column :specimens, :properties, :text
end
#
# SPDX-License-Identifier: AGPL-3.0
-class AddManifestTextToCollection < ActiveRecord::Migration
+class AddManifestTextToCollection < ActiveRecord::Migration[4.2]
def change
add_column :collections, :manifest_text, :text
end
#
# SPDX-License-Identifier: AGPL-3.0
-class CreateJobs < ActiveRecord::Migration
+class CreateJobs < ActiveRecord::Migration[4.2]
def change
create_table :jobs do |t|
t.string :uuid
#
# SPDX-License-Identifier: AGPL-3.0
-class CreateJobSteps < ActiveRecord::Migration
+class CreateJobSteps < ActiveRecord::Migration[4.2]
def change
create_table :job_steps do |t|
t.string :uuid
#
# SPDX-License-Identifier: AGPL-3.0
-class AddPriorityToJobs < ActiveRecord::Migration
+class AddPriorityToJobs < ActiveRecord::Migration[4.2]
def change
add_column :jobs, :priority, :string
end
#
# SPDX-License-Identifier: AGPL-3.0
-class AddIndexOnTimestamps < ActiveRecord::Migration
+class AddIndexOnTimestamps < ActiveRecord::Migration[4.2]
def tables
%w{api_clients collections jobs job_steps links logs nodes pipeline_invocations pipelines projects specimens users}
end
#
# SPDX-License-Identifier: AGPL-3.0
-class AddPropertiesToPipelineInvocations < ActiveRecord::Migration
+class AddPropertiesToPipelineInvocations < ActiveRecord::Migration[4.2]
def change
add_column :pipeline_invocations, :properties, :text
end
#
# SPDX-License-Identifier: AGPL-3.0
-class RemoveNativeTargetFromLinks < ActiveRecord::Migration
+class RemoveNativeTargetFromLinks < ActiveRecord::Migration[4.2]
def up
remove_column :links, :native_target_id
remove_column :links, :native_target_type
#
# SPDX-License-Identifier: AGPL-3.0
-class RenameProjectsToGroups < ActiveRecord::Migration
+class RenameProjectsToGroups < ActiveRecord::Migration[4.2]
def up
rename_table :projects, :groups
rename_index :groups, :index_projects_on_created_at, :index_groups_on_created_at
#
# SPDX-License-Identifier: AGPL-3.0
-class AddIsLockedByToJobs < ActiveRecord::Migration
+class AddIsLockedByToJobs < ActiveRecord::Migration[4.2]
def change
add_column :jobs, :is_locked_by, :string
end
#
# SPDX-License-Identifier: AGPL-3.0
-class AddLogToJobs < ActiveRecord::Migration
+class AddLogToJobs < ActiveRecord::Migration[4.2]
def change
add_column :jobs, :log, :string
end
#
# SPDX-License-Identifier: AGPL-3.0
-class AddTasksSummaryToJobs < ActiveRecord::Migration
+class AddTasksSummaryToJobs < ActiveRecord::Migration[4.2]
def change
add_column :jobs, :tasks_summary, :text
end
#
# SPDX-License-Identifier: AGPL-3.0
-class AddResourceLimitsToJobs < ActiveRecord::Migration
+class AddResourceLimitsToJobs < ActiveRecord::Migration[4.2]
def change
add_column :jobs, :resource_limits, :text
end
#
# SPDX-License-Identifier: AGPL-3.0
-class RenameJobCommandToScript < ActiveRecord::Migration
+class RenameJobCommandToScript < ActiveRecord::Migration[4.2]
def up
rename_column :jobs, :command, :script
rename_column :jobs, :command_parameters, :script_parameters
#
# SPDX-License-Identifier: AGPL-3.0
-class RenamePipelineInvocationToPipelineInstance < ActiveRecord::Migration
+class RenamePipelineInvocationToPipelineInstance < ActiveRecord::Migration[4.2]
def up
rename_table :pipeline_invocations, :pipeline_instances
rename_index :pipeline_instances, :index_pipeline_invocations_on_created_at, :index_pipeline_instances_on_created_at
#
# SPDX-License-Identifier: AGPL-3.0
-class RenamePipelinesToPipelineTemplates < ActiveRecord::Migration
+class RenamePipelinesToPipelineTemplates < ActiveRecord::Migration[4.2]
def up
rename_column :pipeline_instances, :pipeline_uuid, :pipeline_template_uuid
rename_table :pipelines, :pipeline_templates
#
# SPDX-License-Identifier: AGPL-3.0
-class RenameJobStepsToJobTasks < ActiveRecord::Migration
+class RenameJobStepsToJobTasks < ActiveRecord::Migration[4.2]
def up
rename_table :job_steps, :job_tasks
rename_index :job_tasks, :index_job_steps_on_created_at, :index_job_tasks_on_created_at
#
# SPDX-License-Identifier: AGPL-3.0
-class AddDefaultOwnerToUsers < ActiveRecord::Migration
+class AddDefaultOwnerToUsers < ActiveRecord::Migration[4.2]
def change
add_column :users, :default_owner, :string
end
#
# SPDX-License-Identifier: AGPL-3.0
-class AddDefaultOwnerToApiClientAuthorizations < ActiveRecord::Migration
+class AddDefaultOwnerToApiClientAuthorizations < ActiveRecord::Migration[4.2]
def change
add_column :api_client_authorizations, :default_owner, :string
end
#
# SPDX-License-Identifier: AGPL-3.0
-class CreateCommits < ActiveRecord::Migration
+class CreateCommits < ActiveRecord::Migration[4.2]
def change
create_table :commits do |t|
t.string :repository_name
#
# SPDX-License-Identifier: AGPL-3.0
-class CreateCommitAncestors < ActiveRecord::Migration
+class CreateCommitAncestors < ActiveRecord::Migration[4.2]
def change
create_table :commit_ancestors do |t|
t.string :repository_name
#
# SPDX-License-Identifier: AGPL-3.0
-class RenameOrvosToArvados < ActiveRecord::Migration
+class RenameOrvosToArvados < ActiveRecord::Migration[4.2]
def up
Link.update_all("head_kind=replace(head_kind,'orvos','arvados')")
Link.update_all("tail_kind=replace(tail_kind,'orvos','arvados')")
#
# SPDX-License-Identifier: AGPL-3.0
-class CreateKeepDisks < ActiveRecord::Migration
+class CreateKeepDisks < ActiveRecord::Migration[4.2]
def change
create_table :keep_disks do |t|
t.string :uuid, :null => false
#
# SPDX-License-Identifier: AGPL-3.0
-class AddServiceHostAndServicePortAndServiceSslFlagToKeepDisks < ActiveRecord::Migration
+class AddServiceHostAndServicePortAndServiceSslFlagToKeepDisks < ActiveRecord::Migration[4.2]
def change
add_column :keep_disks, :service_host, :string
add_column :keep_disks, :service_port, :integer
#
# SPDX-License-Identifier: AGPL-3.0
-class AddCreatedByJobTaskToJobTasks < ActiveRecord::Migration
+class AddCreatedByJobTaskToJobTasks < ActiveRecord::Migration[4.2]
def change
add_column :job_tasks, :created_by_job_task, :string
end
#
# SPDX-License-Identifier: AGPL-3.0
-class AddQsequenceToJobTasks < ActiveRecord::Migration
+class AddQsequenceToJobTasks < ActiveRecord::Migration[4.2]
def change
add_column :job_tasks, :qsequence, :integer
end
#
# SPDX-License-Identifier: AGPL-3.0
-class FixJobTaskQsequenceType < ActiveRecord::Migration
+class FixJobTaskQsequenceType < ActiveRecord::Migration[4.2]
def up
change_column :job_tasks, :qsequence, :integer, :limit => 8
end
#
# SPDX-License-Identifier: AGPL-3.0
-class UpdateNodesIndex < ActiveRecord::Migration
+class UpdateNodesIndex < ActiveRecord::Migration[4.2]
def up
remove_index :nodes, :hostname
add_index :nodes, :hostname
#
# SPDX-License-Identifier: AGPL-3.0
-class CreateAuthorizedKeys < ActiveRecord::Migration
+class CreateAuthorizedKeys < ActiveRecord::Migration[4.2]
def change
create_table :authorized_keys do |t|
t.string :uuid, :null => false
#
# SPDX-License-Identifier: AGPL-3.0
-class CreateVirtualMachines < ActiveRecord::Migration
+class CreateVirtualMachines < ActiveRecord::Migration[4.2]
def change
create_table :virtual_machines do |t|
t.string :uuid, :null => false
#
# SPDX-License-Identifier: AGPL-3.0
-class CreateRepositories < ActiveRecord::Migration
+class CreateRepositories < ActiveRecord::Migration[4.2]
def change
create_table :repositories do |t|
t.string :uuid, :null => false
#
# SPDX-License-Identifier: AGPL-3.0
-class RenameAuthorizedKeyAuthorizedUserToAuthorizedUserUuid < ActiveRecord::Migration
+class RenameAuthorizedKeyAuthorizedUserToAuthorizedUserUuid < ActiveRecord::Migration[4.2]
def up
remove_index :authorized_keys, [:authorized_user, :expires_at]
rename_column :authorized_keys, :authorized_user, :authorized_user_uuid
#
# SPDX-License-Identifier: AGPL-3.0
-class AddNameUniqueIndexToRepositories < ActiveRecord::Migration
+class AddNameUniqueIndexToRepositories < ActiveRecord::Migration[4.2]
def up
remove_index :repositories, :name
add_index :repositories, :name, :unique => true
#
# SPDX-License-Identifier: AGPL-3.0
-class AddIsTrustedToApiClients < ActiveRecord::Migration
+class AddIsTrustedToApiClients < ActiveRecord::Migration[4.2]
def change
add_column :api_clients, :is_trusted, :boolean, :default => false
end
#
# SPDX-License-Identifier: AGPL-3.0
-class AddIsActiveToUsers < ActiveRecord::Migration
+class AddIsActiveToUsers < ActiveRecord::Migration[4.2]
def change
add_column :users, :is_active, :boolean, :default => false
end
#
# SPDX-License-Identifier: AGPL-3.0
-class ActivateAllAdmins < ActiveRecord::Migration
+class ActivateAllAdmins < ActiveRecord::Migration[4.2]
def up
User.update_all({is_active: true}, ['is_admin=?', true])
end
#
# SPDX-License-Identifier: AGPL-3.0
-class CreateTraits < ActiveRecord::Migration
+class CreateTraits < ActiveRecord::Migration[4.2]
def change
create_table :traits do |t|
t.string :uuid, :null => false
#
# SPDX-License-Identifier: AGPL-3.0
-class CreateHumans < ActiveRecord::Migration
+class CreateHumans < ActiveRecord::Migration[4.2]
def change
create_table :humans do |t|
t.string :uuid, :null => false
#
# SPDX-License-Identifier: AGPL-3.0
-class RenameForeignUuidAttributes < ActiveRecord::Migration
+class RenameForeignUuidAttributes < ActiveRecord::Migration[4.2]
def change
rename_column :api_client_authorizations, :default_owner, :default_owner_uuid
[:api_clients, :authorized_keys, :collections,
#
# SPDX-License-Identifier: AGPL-3.0
-class RenameJobForeignUuidAttributes < ActiveRecord::Migration
+class RenameJobForeignUuidAttributes < ActiveRecord::Migration[4.2]
def change
rename_column :jobs, :cancelled_by_client, :cancelled_by_client_uuid
rename_column :jobs, :cancelled_by_user, :cancelled_by_user_uuid
#
# SPDX-License-Identifier: AGPL-3.0
-class RenameUserDefaultOwner < ActiveRecord::Migration
+class RenameUserDefaultOwner < ActiveRecord::Migration[4.2]
def change
rename_column :users, :default_owner, :default_owner_uuid
end
#
# SPDX-License-Identifier: AGPL-3.0
-class AddScopesToApiClientAuthorizations < ActiveRecord::Migration
+class AddScopesToApiClientAuthorizations < ActiveRecord::Migration[4.2]
def change
add_column :api_client_authorizations, :scopes, :text, :null => false, :default => ['all'].to_yaml
end
#
# SPDX-License-Identifier: AGPL-3.0
-class RenameResourceLimitsToRuntimeConstraints < ActiveRecord::Migration
+class RenameResourceLimitsToRuntimeConstraints < ActiveRecord::Migration[4.2]
def change
rename_column :jobs, :resource_limits, :runtime_constraints
end
#
# SPDX-License-Identifier: AGPL-3.0
-class NormalizeCollectionUuid < ActiveRecord::Migration
+class NormalizeCollectionUuid < ActiveRecord::Migration[4.2]
def count_orphans
%w(head tail).each do |ht|
results = ActiveRecord::Base.connection.execute(<<-EOS)
#
# SPDX-License-Identifier: AGPL-3.0
-class FixLinkKindUnderscores < ActiveRecord::Migration
+class FixLinkKindUnderscores < ActiveRecord::Migration[4.2]
def up
update_sql <<-EOS
UPDATE links
#
# SPDX-License-Identifier: AGPL-3.0
-class NormalizeCollectionUuidsInScriptParameters < ActiveRecord::Migration
+class NormalizeCollectionUuidsInScriptParameters < ActiveRecord::Migration[4.2]
include CurrentApiClient
def up
act_as_system_user do
#
# SPDX-License-Identifier: AGPL-3.0
-class AddNondeterministicColumnToJob < ActiveRecord::Migration
+class AddNondeterministicColumnToJob < ActiveRecord::Migration[4.2]
def up
add_column :jobs, :nondeterministic, :boolean
end
#
# SPDX-License-Identifier: AGPL-3.0
-class SeparateRepositoryFromScriptVersion < ActiveRecord::Migration
+class SeparateRepositoryFromScriptVersion < ActiveRecord::Migration[4.2]
include CurrentApiClient
def fixup pt
#
# SPDX-License-Identifier: AGPL-3.0
-class AddRepositoryColumnToJob < ActiveRecord::Migration
+class AddRepositoryColumnToJob < ActiveRecord::Migration[4.2]
def up
add_column :jobs, :repository, :string
end
#
# SPDX-License-Identifier: AGPL-3.0
-class AddOutputIsPersistentToJob < ActiveRecord::Migration
+class AddOutputIsPersistentToJob < ActiveRecord::Migration[4.2]
def change
add_column :jobs, :output_is_persistent, :boolean, null: false, default: false
end
#
# SPDX-License-Identifier: AGPL-3.0
-class RemoveKindColumns < ActiveRecord::Migration
+class RemoveKindColumns < ActiveRecord::Migration[4.2]
include CurrentApiClient
def up
#
# SPDX-License-Identifier: AGPL-3.0
-class AddSystemGroup < ActiveRecord::Migration
+class AddSystemGroup < ActiveRecord::Migration[4.2]
include CurrentApiClient
def up
#
# SPDX-License-Identifier: AGPL-3.0
-class RenameLogInfoToProperties < ActiveRecord::Migration
+class RenameLogInfoToProperties < ActiveRecord::Migration[4.2]
def change
rename_column :logs, :info, :properties
end
#
# SPDX-License-Identifier: AGPL-3.0
-class AddGroupClassToGroups < ActiveRecord::Migration
+class AddGroupClassToGroups < ActiveRecord::Migration[4.2]
def change
add_column :groups, :group_class, :string
add_index :groups, :group_class
#
# SPDX-License-Identifier: AGPL-3.0
-class RenameAuthKeysUserIndex < ActiveRecord::Migration
+class RenameAuthKeysUserIndex < ActiveRecord::Migration[4.2]
# Rails' default name for this index is so long, Rails can't modify
# the index later, because the autogenerated temporary name exceeds
# PostgreSQL's 64-character limit. This migration gives the index
#
# SPDX-License-Identifier: AGPL-3.0
-class TimestampsNotNull < ActiveRecord::Migration
+class TimestampsNotNull < ActiveRecord::Migration[4.2]
def up
ActiveRecord::Base.connection.tables.each do |t|
next if t == 'schema_migrations'
#
# SPDX-License-Identifier: AGPL-3.0
-class PipelineInstanceState < ActiveRecord::Migration
+class PipelineInstanceState < ActiveRecord::Migration[4.2]
include CurrentApiClient
def up
#
# SPDX-License-Identifier: AGPL-3.0
-class AddObjectOwnerToLogs < ActiveRecord::Migration
+class AddObjectOwnerToLogs < ActiveRecord::Migration[4.2]
include CurrentApiClient
def up
# login list. This migration converts those VM-specific scopes to the new
# general format, and back.
-class NewScopeFormat < ActiveRecord::Migration
+class NewScopeFormat < ActiveRecord::Migration[4.2]
include CurrentApiClient
VM_PATH_REGEX =
#
# SPDX-License-Identifier: AGPL-3.0
-class AddUniqueNameIndexToLinks < ActiveRecord::Migration
+class AddUniqueNameIndexToLinks < ActiveRecord::Migration[4.2]
def change
# Make sure PgPower is here. Otherwise the "where" will be ignored
# and we'll end up with a far too restrictive unique
#
# SPDX-License-Identifier: AGPL-3.0
-class CreateKeepServices < ActiveRecord::Migration
+class CreateKeepServices < ActiveRecord::Migration[4.2]
include CurrentApiClient
def change
#
# SPDX-License-Identifier: AGPL-3.0
-class AddDescriptionToPipelineTemplates < ActiveRecord::Migration
+class AddDescriptionToPipelineTemplates < ActiveRecord::Migration[4.2]
def change
add_column :pipeline_templates, :description, :text
end
#
# SPDX-License-Identifier: AGPL-3.0
-class AddSuppliedScriptVersion < ActiveRecord::Migration
+class AddSuppliedScriptVersion < ActiveRecord::Migration[4.2]
def up
add_column :jobs, :supplied_script_version, :string
end
#
# SPDX-License-Identifier: AGPL-3.0
-class RemoveNameFromCollections < ActiveRecord::Migration
+class RemoveNameFromCollections < ActiveRecord::Migration[4.2]
def up
remove_column :collections, :name
end
#
# SPDX-License-Identifier: AGPL-3.0
-class RemoveActiveAndSuccessFromPipelineInstances < ActiveRecord::Migration
+class RemoveActiveAndSuccessFromPipelineInstances < ActiveRecord::Migration[4.2]
include CurrentApiClient
def up
#
# SPDX-License-Identifier: AGPL-3.0
-class RenameFolderToProject < ActiveRecord::Migration
+class RenameFolderToProject < ActiveRecord::Migration[4.2]
def up
Group.update_all("group_class = 'project'", "group_class = 'folder'")
end
#
# SPDX-License-Identifier: AGPL-3.0
-class AddDockerLocatorToJobs < ActiveRecord::Migration
+class AddDockerLocatorToJobs < ActiveRecord::Migration[4.2]
def change
add_column :jobs, :docker_image_locator, :string
end
#
# SPDX-License-Identifier: AGPL-3.0
-class AnonymousGroup < ActiveRecord::Migration
+class AnonymousGroup < ActiveRecord::Migration[4.2]
include CurrentApiClient
def up
#
# SPDX-License-Identifier: AGPL-3.0
-class JobTaskSerialQsequence < ActiveRecord::Migration
+class JobTaskSerialQsequence < ActiveRecord::Migration[4.2]
SEQ_NAME = "job_tasks_qsequence_seq"
def up
#
# SPDX-License-Identifier: AGPL-3.0
-class EmptyCollection < ActiveRecord::Migration
+class EmptyCollection < ActiveRecord::Migration[4.2]
include CurrentApiClient
def up
#
# SPDX-License-Identifier: AGPL-3.0
-class CollectionUseRegularUuids < ActiveRecord::Migration
+class CollectionUseRegularUuids < ActiveRecord::Migration[4.2]
def up
add_column :collections, :name, :string
add_column :collections, :description, :string
#
# SPDX-License-Identifier: AGPL-3.0
-class AddUniqueNameConstraints < ActiveRecord::Migration
+class AddUniqueNameConstraints < ActiveRecord::Migration[4.2]
def change
# Ensure uniqueness before adding constraints.
["collections", "pipeline_templates", "groups"].each do |table|
#
# SPDX-License-Identifier: AGPL-3.0
-class AddNotNullConstraintToGroupName < ActiveRecord::Migration
+class AddNotNullConstraintToGroupName < ActiveRecord::Migration[4.2]
def change
ActiveRecord::Base.connection.execute("update groups set name=uuid where name is null or name=''")
change_column_null :groups, :name, false
#
# SPDX-License-Identifier: AGPL-3.0
-class RemoveOutputIsPersistentColumn < ActiveRecord::Migration
+class RemoveOutputIsPersistentColumn < ActiveRecord::Migration[4.2]
def up
remove_column :jobs, :output_is_persistent
end
#
# SPDX-License-Identifier: AGPL-3.0
-class JobPriorityFixup < ActiveRecord::Migration
+class JobPriorityFixup < ActiveRecord::Migration[4.2]
def up
remove_column :jobs, :priority
add_column :jobs, :priority, :integer, null: false, default: 0
#
# SPDX-License-Identifier: AGPL-3.0
-class AddStartFinishTimeToTasksAndPipelines < ActiveRecord::Migration
+class AddStartFinishTimeToTasksAndPipelines < ActiveRecord::Migration[4.2]
def up
add_column :job_tasks, :started_at, :datetime
add_column :job_tasks, :finished_at, :datetime
#
# SPDX-License-Identifier: AGPL-3.0
-class AddDescriptionToPipelineInstancesAndJobs < ActiveRecord::Migration
+class AddDescriptionToPipelineInstancesAndJobs < ActiveRecord::Migration[4.2]
def up
add_column :pipeline_instances, :description, :text, null: true
add_column :jobs, :description, :text, null: true
#
# SPDX-License-Identifier: AGPL-3.0
-class ChangeUserOwnerUuidNotNull < ActiveRecord::Migration
+class ChangeUserOwnerUuidNotNull < ActiveRecord::Migration[4.2]
include CurrentApiClient
def up
#
# SPDX-License-Identifier: AGPL-3.0
-class AddPropertiesToNode < ActiveRecord::Migration
+class AddPropertiesToNode < ActiveRecord::Migration[4.2]
def up
add_column :nodes, :properties, :text
end
#
# SPDX-License-Identifier: AGPL-3.0
-class AddStateToJob < ActiveRecord::Migration
+class AddStateToJob < ActiveRecord::Migration[4.2]
include CurrentApiClient
def up
#
# SPDX-License-Identifier: AGPL-3.0
-class AddJobUuidToNodes < ActiveRecord::Migration
+class AddJobUuidToNodes < ActiveRecord::Migration[4.2]
def up
change_table :nodes do |t|
t.column :job_uuid, :string
#
# SPDX-License-Identifier: AGPL-3.0
-class AddArvadosSdkVersionToJobs < ActiveRecord::Migration
+class AddArvadosSdkVersionToJobs < ActiveRecord::Migration[4.2]
def up
change_table :jobs do |t|
t.column :arvados_sdk_version, :string
#
# SPDX-License-Identifier: AGPL-3.0
-class OwnerUuidIndex < ActiveRecord::Migration
+class OwnerUuidIndex < ActiveRecord::Migration[4.2]
def tables_with_owner_uuid
%w{api_clients authorized_keys collections groups humans
job_tasks jobs keep_disks keep_services links logs
#
# SPDX-License-Identifier: AGPL-3.0
-class DescriptionsAreStrings < ActiveRecord::Migration
+class DescriptionsAreStrings < ActiveRecord::Migration[4.2]
def tables_with_description_column
%w{collections groups jobs pipeline_instances pipeline_templates}
end
#
# SPDX-License-Identifier: AGPL-3.0
-class CollectionFileNames < ActiveRecord::Migration
+class CollectionFileNames < ActiveRecord::Migration[4.2]
include CurrentApiClient
def up
#
# SPDX-License-Identifier: AGPL-3.0
-class SearchIndex < ActiveRecord::Migration
+class SearchIndex < ActiveRecord::Migration[4.2]
def tables_with_searchable_columns
{
"api_client_authorizations" => ["api_token", "created_by_ip_address", "last_used_by_ip_address", "default_owner_uuid"],
require "./db/migrate/20141208185217_search_index.rb"
-class NoDescriptionInSearchIndex < ActiveRecord::Migration
+class NoDescriptionInSearchIndex < ActiveRecord::Migration[4.2]
def up
all_tables = %w{collections groups jobs pipeline_instances pipeline_templates}
all_tables.each do |table|
#
# SPDX-License-Identifier: AGPL-3.0
-class FullTextSearch < ActiveRecord::Migration
+class FullTextSearch < ActiveRecord::Migration[4.2]
def up
execute "CREATE INDEX collections_full_text_search_idx ON collections USING gin(#{Collection.full_text_tsvector});"
#
# SPDX-License-Identifier: AGPL-3.0
-class SetGroupClassOnAnonymousGroup < ActiveRecord::Migration
+class SetGroupClassOnAnonymousGroup < ActiveRecord::Migration[4.2]
include CurrentApiClient
def up
act_as_system_user do
#
# SPDX-License-Identifier: AGPL-3.0
-class AllUsersCanReadAnonymousGroup < ActiveRecord::Migration
+class AllUsersCanReadAnonymousGroup < ActiveRecord::Migration[4.2]
include CurrentApiClient
def up
#
# SPDX-License-Identifier: AGPL-3.0
-class RenameReplicationAttributes < ActiveRecord::Migration
+class RenameReplicationAttributes < ActiveRecord::Migration[4.2]
RENAME = [[:redundancy, :replication_desired],
[:redundancy_confirmed_as, :replication_confirmed],
[:redundancy_confirmed_at, :replication_confirmed_at]]
#
# SPDX-License-Identifier: AGPL-3.0
-class CollectionNameOwnerUniqueOnlyNonExpired < ActiveRecord::Migration
+class CollectionNameOwnerUniqueOnlyNonExpired < ActiveRecord::Migration[4.2]
def find_index
indexes = ActiveRecord::Base.connection.indexes('collections')
name_owner_index = indexes.select do |index|
require 'has_uuid'
require 'kind_and_etag'
-class FixCollectionPortableDataHashWithHintedManifest < ActiveRecord::Migration
+class FixCollectionPortableDataHashWithHintedManifest < ActiveRecord::Migration[4.2]
include CurrentApiClient
class ArvadosModel < ActiveRecord::Base
#
# SPDX-License-Identifier: AGPL-3.0
-class ChangeCollectionExpiresAtToDatetime < ActiveRecord::Migration
+class ChangeCollectionExpiresAtToDatetime < ActiveRecord::Migration[4.2]
def up
change_column :collections, :expires_at, :datetime
end
require 'has_uuid'
require 'kind_and_etag'
-class AddUsernameToUsers < ActiveRecord::Migration
+class AddUsernameToUsers < ActiveRecord::Migration[4.2]
include CurrentApiClient
SEARCH_INDEX_COLUMNS =
require 'has_uuid'
require 'kind_and_etag'
-class BackwardCompatibilityForUserRepositories < ActiveRecord::Migration
+class BackwardCompatibilityForUserRepositories < ActiveRecord::Migration[4.2]
include CurrentApiClient
class ArvadosModel < ActiveRecord::Base
#
# SPDX-License-Identifier: AGPL-3.0
-class NoFilenamesInCollectionSearchIndex < ActiveRecord::Migration
+class NoFilenamesInCollectionSearchIndex < ActiveRecord::Migration[4.2]
def up
remove_index :collections, :name => 'collections_search_index'
add_index :collections, ["owner_uuid", "modified_by_client_uuid", "modified_by_user_uuid", "portable_data_hash", "uuid", "name"], name: 'collections_search_index'
#
# SPDX-License-Identifier: AGPL-3.0
-class ReadOnlyOnKeepServices < ActiveRecord::Migration
+class ReadOnlyOnKeepServices < ActiveRecord::Migration[4.2]
def change
add_column :keep_services, :read_only, :boolean, null: false, default: false
end
require "./db/migrate/20150123142953_full_text_search.rb"
-class LeadingSpaceOnFullTextIndex < ActiveRecord::Migration
+class LeadingSpaceOnFullTextIndex < ActiveRecord::Migration[4.2]
def up
# Inspect one of the full-text indexes (chosen arbitrarily) to
# determine whether this migration is needed.
#
# SPDX-License-Identifier: AGPL-3.0
-class CreateContainersAndRequests < ActiveRecord::Migration
+class CreateContainersAndRequests < ActiveRecord::Migration[4.2]
def change
create_table :containers do |t|
t.string :uuid
#
# SPDX-License-Identifier: AGPL-3.0
-class FixContainersIndex < ActiveRecord::Migration
+class FixContainersIndex < ActiveRecord::Migration[4.2]
def up
execute "CREATE INDEX container_requests_full_text_search_idx ON container_requests USING gin(#{ContainerRequest.full_text_tsvector});"
add_index :container_requests, ["uuid", "owner_uuid", "modified_by_client_uuid", "modified_by_user_uuid", "name", "state", "requesting_container_uuid", "container_uuid", "container_image", "cwd", "output_path"], name: 'container_requests_search_index'
#
# SPDX-License-Identifier: AGPL-3.0
-class AddExitCodeToContainers < ActiveRecord::Migration
+class AddExitCodeToContainers < ActiveRecord::Migration[4.2]
def change
add_column :containers, :exit_code, :integer
end
require 'has_uuid'
-class AddUuidToApiClientAuthorization < ActiveRecord::Migration
+class AddUuidToApiClientAuthorization < ActiveRecord::Migration[4.2]
extend HasUuid::ClassMethods
def up
#
# SPDX-License-Identifier: AGPL-3.0
-class AddUuidToApiTokenSearchIndex < ActiveRecord::Migration
+class AddUuidToApiTokenSearchIndex < ActiveRecord::Migration[4.2]
def up
begin
remove_index :api_client_authorizations, :name => 'api_client_authorizations_search_index'
#
# SPDX-License-Identifier: AGPL-3.0
-class AddComponentsToJob < ActiveRecord::Migration
+class AddComponentsToJob < ActiveRecord::Migration[4.2]
def up
add_column :jobs, :components, :text
end
#
# SPDX-License-Identifier: AGPL-3.0
-class AddAuthsToContainer < ActiveRecord::Migration
+class AddAuthsToContainer < ActiveRecord::Migration[4.2]
def change
add_column :containers, :auth_uuid, :string
add_column :containers, :locked_by_uuid, :string
#
# SPDX-License-Identifier: AGPL-3.0
-class AddAuthAndLockToContainerIndex < ActiveRecord::Migration
+class AddAuthAndLockToContainerIndex < ActiveRecord::Migration[4.2]
Columns_were = ["uuid", "owner_uuid", "modified_by_client_uuid", "modified_by_user_uuid", "state", "log", "cwd", "output_path", "output", "container_image"]
Columns = Columns_were + ["auth_uuid", "locked_by_uuid"]
def up
#
# SPDX-License-Identifier: AGPL-3.0
-class CreateWorkflows < ActiveRecord::Migration
+class CreateWorkflows < ActiveRecord::Migration[4.2]
def up
create_table :workflows do |t|
t.string :uuid
#
# SPDX-License-Identifier: AGPL-3.0
-class AddScriptParametersDigestToJobs < ActiveRecord::Migration
+class AddScriptParametersDigestToJobs < ActiveRecord::Migration[4.2]
def change
add_column :jobs, :script_parameters_digest, :string
add_index :jobs, :script_parameters_digest
#
# SPDX-License-Identifier: AGPL-3.0
-class PopulateScriptParametersDigest < ActiveRecord::Migration
+class PopulateScriptParametersDigest < ActiveRecord::Migration[4.2]
def up
done = false
while !done
#
# SPDX-License-Identifier: AGPL-3.0
-class RepairScriptParametersDigest < ActiveRecord::Migration
+class RepairScriptParametersDigest < ActiveRecord::Migration[4.2]
def up
Job.find_each do |j|
have = j.script_parameters_digest
#
# SPDX-License-Identifier: AGPL-3.0
-class RenameWorkflowToDefinition < ActiveRecord::Migration
+class RenameWorkflowToDefinition < ActiveRecord::Migration[4.2]
def up
rename_column :workflows, :workflow, :definition
- end
-
+ end
+
def down
rename_column :workflows, :definition, :workflow
end
#
# SPDX-License-Identifier: AGPL-3.0
-class AddContainerCount < ActiveRecord::Migration
+class AddContainerCount < ActiveRecord::Migration[4.2]
def up
add_column :container_requests, :container_count, :int, :default => 0
end
#
# SPDX-License-Identifier: AGPL-3.0
-class AddUseExistingToContainerRequests < ActiveRecord::Migration
+class AddUseExistingToContainerRequests < ActiveRecord::Migration[4.2]
def up
add_column :container_requests, :use_existing, :boolean, :default => true
end
#
# SPDX-License-Identifier: AGPL-3.0
-class AddSchedulingParametersToContainer < ActiveRecord::Migration
+class AddSchedulingParametersToContainer < ActiveRecord::Migration[4.2]
def change
add_column :containers, :scheduling_parameters, :text
add_column :container_requests, :scheduling_parameters, :text
require 'has_uuid'
-class AddOutputAndLogUuidToContainerRequest < ActiveRecord::Migration
+class AddOutputAndLogUuidToContainerRequest < ActiveRecord::Migration[4.2]
extend HasUuid::ClassMethods
def up
#
# SPDX-License-Identifier: AGPL-3.0
-class AddOutputAndLogUuidsToContainerRequestSearchIndex < ActiveRecord::Migration
+class AddOutputAndLogUuidsToContainerRequestSearchIndex < ActiveRecord::Migration[4.2]
def up
begin
remove_index :container_requests, :name => 'container_requests_search_index'
#
# SPDX-License-Identifier: AGPL-3.0
-class FullTextSearchIndexes < ActiveRecord::Migration
+class FullTextSearchIndexes < ActiveRecord::Migration[4.2]
def fts_indexes
{
"collections" => "collections_full_text_search_idx",
#
# SPDX-License-Identifier: AGPL-3.0
-class SplitExpiryToTrashAndDelete < ActiveRecord::Migration
+class SplitExpiryToTrashAndDelete < ActiveRecord::Migration[4.2]
def up
Collection.transaction do
add_column(:collections, :trash_at, :datetime)
#
# SPDX-License-Identifier: AGPL-3.0
-class AddOutputNameToContainerRequests < ActiveRecord::Migration
+class AddOutputNameToContainerRequests < ActiveRecord::Migration[4.2]
def up
add_column :container_requests, :output_name, :string, :default => nil
end
#
# SPDX-License-Identifier: AGPL-3.0
-class AddOutputNameToContainerRequestSearchIndex < ActiveRecord::Migration
+class AddOutputNameToContainerRequestSearchIndex < ActiveRecord::Migration[4.2]
def up
begin
remove_index :container_requests, :name => 'container_requests_search_index'
#
# SPDX-License-Identifier: AGPL-3.0
-class AddOutputNameToCrFtsIndex < ActiveRecord::Migration
+class AddOutputNameToCrFtsIndex < ActiveRecord::Migration[4.2]
def up
t = "container_requests"
i = "container_requests_full_text_search_idx"
#
# SPDX-License-Identifier: AGPL-3.0
-class SetFinishedAtOnFinishedPipelineInstances < ActiveRecord::Migration
+class SetFinishedAtOnFinishedPipelineInstances < ActiveRecord::Migration[4.2]
def change
ActiveRecord::Base.connection.execute("update pipeline_instances set finished_at=updated_at where finished_at is null and (state='Failed' or state='Complete')")
end
#
# SPDX-License-Identifier: AGPL-3.0
-class NoCrMountsAndWorkflowDefInFullTextSearchIndex < ActiveRecord::Migration
+class NoCrMountsAndWorkflowDefInFullTextSearchIndex < ActiveRecord::Migration[4.2]
def fts_indexes
{
"container_requests" => "container_requests_full_text_search_idx",
#
# SPDX-License-Identifier: AGPL-3.0
-class NoDowngradeAfterJson < ActiveRecord::Migration
+class NoDowngradeAfterJson < ActiveRecord::Migration[4.2]
def up
end
#
# SPDX-License-Identifier: AGPL-3.0
-class SerializedColumnsAcceptNull < ActiveRecord::Migration
+class SerializedColumnsAcceptNull < ActiveRecord::Migration[4.2]
def change
change_column :api_client_authorizations, :scopes, :text, null: true, default: '["all"]'
end
#
# SPDX-License-Identifier: AGPL-3.0
-class AddPortableDataHashIndexToCollections < ActiveRecord::Migration
+class AddPortableDataHashIndexToCollections < ActiveRecord::Migration[4.2]
def change
add_index :collections, :portable_data_hash
end
#
# SPDX-License-Identifier: AGPL-3.0
-class AddOutputTtlToContainerRequests < ActiveRecord::Migration
+class AddOutputTtlToContainerRequests < ActiveRecord::Migration[4.2]
def change
add_column :container_requests, :output_ttl, :integer, default: 0, null: false
end
#
# SPDX-License-Identifier: AGPL-3.0
-class AddCreatedByJobTaskIndexToJobTasks < ActiveRecord::Migration
+class AddCreatedByJobTaskIndexToJobTasks < ActiveRecord::Migration[4.2]
def change
add_index :job_tasks, :created_by_job_task_uuid
end
#
# SPDX-License-Identifier: AGPL-3.0
-class AddObjectOwnerIndexToLogs < ActiveRecord::Migration
+class AddObjectOwnerIndexToLogs < ActiveRecord::Migration[4.2]
def change
add_index :logs, :object_owner_uuid
end
#
# SPDX-License-Identifier: AGPL-3.0
-class AddRequestingContainerIndexToContainerRequests < ActiveRecord::Migration
+class AddRequestingContainerIndexToContainerRequests < ActiveRecord::Migration[4.2]
def change
add_index :container_requests, :requesting_container_uuid
end
require 'migrate_yaml_to_json'
-class JobsYamlToJson < ActiveRecord::Migration
+class JobsYamlToJson < ActiveRecord::Migration[4.2]
def up
[
'components',
require 'migrate_yaml_to_json'
-class YamlToJson < ActiveRecord::Migration
+class YamlToJson < ActiveRecord::Migration[4.2]
def up
[
['collections', 'properties'],
require './db/migrate/20161213172944_full_text_search_indexes'
-class JsonCollectionProperties < ActiveRecord::Migration
+class JsonCollectionProperties < ActiveRecord::Migration[4.2]
def up
# Drop the FT index before changing column type to avoid
# "PG::DatatypeMismatch: ERROR: COALESCE types jsonb and text
#
# SPDX-License-Identifier: AGPL-3.0
-class TrashableGroups < ActiveRecord::Migration
+class TrashableGroups < ActiveRecord::Migration[4.2]
def up
add_column :groups, :trash_at, :datetime
add_index(:groups, :trash_at)
#
# SPDX-License-Identifier: AGPL-3.0
-class MaterializedPermissionView < ActiveRecord::Migration
+class MaterializedPermissionView < ActiveRecord::Migration[4.2]
@@idxtables = [:collections, :container_requests, :groups, :jobs, :links, :pipeline_instances, :pipeline_templates, :repositories, :users, :virtual_machines, :workflows, :logs]
#
# SPDX-License-Identifier: AGPL-3.0
-class AddIndexToContainers < ActiveRecord::Migration
+class AddIndexToContainers < ActiveRecord::Migration[4.2]
def up
ActiveRecord::Base.connection.execute("CREATE INDEX index_containers_on_modified_at_uuid ON containers USING btree (modified_at desc, uuid asc)")
ActiveRecord::Base.connection.execute("CREATE INDEX index_container_requests_on_container_uuid on container_requests (container_uuid)")
#
# SPDX-License-Identifier: AGPL-3.0
-class FixTrashFlagFollow < ActiveRecord::Migration
+class FixTrashFlagFollow < ActiveRecord::Migration[4.2]
def change
ActiveRecord::Base.connection.execute("DROP MATERIALIZED VIEW materialized_permission_view")
ActiveRecord::Base.connection.execute(
#
# SPDX-License-Identifier: AGPL-3.0
-class AddGinIndexToCollectionProperties < ActiveRecord::Migration
+class AddGinIndexToCollectionProperties < ActiveRecord::Migration[4.2]
def up
ActiveRecord::Base.connection.execute("CREATE INDEX collection_index_on_properties ON collections USING gin (properties);")
end
#
# SPDX-License-Identifier: AGPL-3.0
-class AddStorageClassesToCollections < ActiveRecord::Migration
+class AddStorageClassesToCollections < ActiveRecord::Migration[4.2]
def up
add_column :collections, :storage_classes_desired, :jsonb, :default => ["default"]
add_column :collections, :storage_classes_confirmed, :jsonb, :default => []
#
# SPDX-License-Identifier: AGPL-3.0
-class AddSecretMountsToContainers < ActiveRecord::Migration
+class AddSecretMountsToContainers < ActiveRecord::Migration[4.2]
def change
add_column :container_requests, :secret_mounts, :jsonb, default: {}
add_column :containers, :secret_mounts, :jsonb, default: {}
#
# SPDX-License-Identifier: AGPL-3.0
-class ChangeContainerPriorityBigint < ActiveRecord::Migration
+class ChangeContainerPriorityBigint < ActiveRecord::Migration[4.2]
def change
change_column :containers, :priority, :integer, limit: 8
end
#
# SPDX-License-Identifier: AGPL-3.0
-class AddRedirectToUserUuidToUsers < ActiveRecord::Migration
+class AddRedirectToUserUuidToUsers < ActiveRecord::Migration[4.2]
def up
add_column :users, :redirect_to_user_uuid, :string
User.reset_column_information
#
# SPDX-License-Identifier: AGPL-3.0
-class AddContainerAuthUuidIndex < ActiveRecord::Migration
+class AddContainerAuthUuidIndex < ActiveRecord::Migration[4.2]
def change
add_index :containers, :auth_uuid
end
require './db/migrate/20161213172944_full_text_search_indexes'
-class PropertiesToJsonb < ActiveRecord::Migration
+class PropertiesToJsonb < ActiveRecord::Migration[4.2]
@@tables_columns = [["nodes", "properties"],
["nodes", "info"],
require './db/migrate/20161213172944_full_text_search_indexes'
-class AddPropertiesToGroups < ActiveRecord::Migration
+class AddPropertiesToGroups < ActiveRecord::Migration[4.2]
def up
add_column :groups, :properties, :jsonb, default: {}
ActiveRecord::Base.connection.execute("CREATE INDEX group_index_on_properties ON groups USING gin (properties);")
#
# SPDX-License-Identifier: AGPL-3.0
-class IndexAllFilenames < ActiveRecord::Migration
+class IndexAllFilenames < ActiveRecord::Migration[4.2]
def up
ActiveRecord::Base.connection.execute 'ALTER TABLE collections ALTER COLUMN file_names TYPE text'
end
#
# SPDX-License-Identifier: AGPL-3.0
-class AddPdhAndTrashIndexToCollections < ActiveRecord::Migration
+class AddPdhAndTrashIndexToCollections < ActiveRecord::Migration[4.2]
def change
add_index :collections, [:portable_data_hash, :trash_at]
end
#
# SPDX-License-Identifier: AGPL-3.0
-class AddLockIndexToContainers < ActiveRecord::Migration
+class AddLockIndexToContainers < ActiveRecord::Migration[4.2]
def change
# For the current code in sdk/go/dispatch:
add_index :containers, [:locked_by_uuid, :priority]
#
# SPDX-License-Identifier: AGPL-3.0
-class DropPdhIndexFromCollections < ActiveRecord::Migration
+class DropPdhIndexFromCollections < ActiveRecord::Migration[4.2]
def change
remove_index :collections, column: :portable_data_hash
end
#
# SPDX-License-Identifier: AGPL-3.0
-class AddMd5IndexToContainers < ActiveRecord::Migration
+class AddMd5IndexToContainers < ActiveRecord::Migration[4.2]
def up
ActiveRecord::Base.connection.execute 'CREATE INDEX index_containers_on_reuse_columns on containers (md5(command), cwd, md5(environment), output_path, container_image, md5(mounts), secret_mounts_md5, md5(runtime_constraints))'
end
#
# SPDX-License-Identifier: AGPL-3.0
-class AddQueueIndexToContainers < ActiveRecord::Migration
+class AddQueueIndexToContainers < ActiveRecord::Migration[4.2]
def up
ActiveRecord::Base.connection.execute 'CREATE INDEX index_containers_on_queued_state on containers (state, (priority > 0))'
end
#
# SPDX-License-Identifier: AGPL-3.0
-class AddRuntimeStatusToContainers < ActiveRecord::Migration
+class AddRuntimeStatusToContainers < ActiveRecord::Migration[4.2]
def change
add_column :containers, :runtime_status, :jsonb, default: {}
add_index :containers, :runtime_status, using: :gin
#
# SPDX-License-Identifier: AGPL-3.0
-class AddVersionInfoToCollections < ActiveRecord::Migration
+class AddVersionInfoToCollections < ActiveRecord::Migration[4.2]
def change
# Do changes in bulk to save time on huge tables
change_table :collections, :bulk => true do |t|
#
# SPDX-License-Identifier: AGPL-3.0
-class SetCurrentVersionUuidOnCollections < ActiveRecord::Migration
+class SetCurrentVersionUuidOnCollections < ActiveRecord::Migration[4.2]
def up
# Set the current version uuid as itself
Collection.where(current_version_uuid: nil).update_all("current_version_uuid=uuid")
require './db/migrate/20161213172944_full_text_search_indexes'
-class ReplaceFullTextIndexes < ActiveRecord::Migration
+class ReplaceFullTextIndexes < ActiveRecord::Migration[4.2]
def up
FullTextSearchIndexes.new.up
end
#
# SPDX-License-Identifier: AGPL-3.0
-class RecomputeFileNamesIndex < ActiveRecord::Migration
+class RecomputeFileNamesIndex < ActiveRecord::Migration[4.2]
def do_batch(pdhs:)
ActiveRecord::Base.connection.exec_query('BEGIN')
Collection.select(:portable_data_hash, :manifest_text).where(portable_data_hash: pdhs).distinct(:portable_data_hash).each do |c|
#
# SPDX-License-Identifier: AGPL-3.0
-class RecreateCollectionUniqueNameIndex < ActiveRecord::Migration
+class RecreateCollectionUniqueNameIndex < ActiveRecord::Migration[4.2]
def up
Collection.transaction do
remove_index(:collections,
#
# SPDX-License-Identifier: AGPL-3.0
-class AddPreserveVersionToCollections < ActiveRecord::Migration
+class AddPreserveVersionToCollections < ActiveRecord::Migration[4.2]
def change
add_column :collections, :preserve_version, :boolean, default: false
end
#
# SPDX-License-Identifier: AGPL-3.0
-class AddCurrentVersionUuidToCollectionSearchIndex < ActiveRecord::Migration
+class AddCurrentVersionUuidToCollectionSearchIndex < ActiveRecord::Migration[4.2]
disable_ddl_transaction!
def up
-class AddContainerRuntimeToken < ActiveRecord::Migration
+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: AGPL-3.0
+
+class AddContainerRuntimeToken < ActiveRecord::Migration[4.2]
def change
add_column :container_requests, :runtime_token, :text, :null => true
add_column :containers, :runtime_user_uuid, :text, :null => true
-class AddRuntimeTokenToContainer < ActiveRecord::Migration
+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: AGPL-3.0
+
+class AddRuntimeTokenToContainer < ActiveRecord::Migration[4.2]
def change
add_column :containers, :runtime_token, :text, :null => true
end
-class AddExpressionIndexToLinks < ActiveRecord::Migration
+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: AGPL-3.0
+
+class AddExpressionIndexToLinks < ActiveRecord::Migration[4.2]
def up
ActiveRecord::Base.connection.execute 'CREATE INDEX index_links_on_substring_head_uuid on links (substring(head_uuid, 7, 5))'
ActiveRecord::Base.connection.execute 'CREATE INDEX index_links_on_substring_tail_uuid on links (substring(tail_uuid, 7, 5))'
-class AddContainerLockCount < ActiveRecord::Migration
+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: AGPL-3.0
+
+class AddContainerLockCount < ActiveRecord::Migration[4.2]
def change
add_column :containers, :lock_count, :int, :null => false, :default => 0
end
ALTER SEQUENCE public.api_clients_id_seq OWNED BY public.api_clients.id;
+--
+-- Name: ar_internal_metadata; Type: TABLE; Schema: public; Owner: -
+--
+
+CREATE TABLE public.ar_internal_metadata (
+ key character varying NOT NULL,
+ value character varying,
+ created_at timestamp without time zone NOT NULL,
+ updated_at timestamp without time zone NOT NULL
+);
+
+
--
-- Name: authorized_keys; Type: TABLE; Schema: public; Owner: -
--
ADD CONSTRAINT api_clients_pkey PRIMARY KEY (id);
+--
+-- Name: ar_internal_metadata ar_internal_metadata_pkey; Type: CONSTRAINT; Schema: public; Owner: -
+--
+
+ALTER TABLE ONLY public.ar_internal_metadata
+ ADD CONSTRAINT ar_internal_metadata_pkey PRIMARY KEY (key);
+
+
--
-- Name: authorized_keys authorized_keys_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
SET search_path TO "$user", public;
-INSERT INTO schema_migrations (version) VALUES ('20121016005009');
-
-INSERT INTO schema_migrations (version) VALUES ('20130105203021');
-
-INSERT INTO schema_migrations (version) VALUES ('20130105224358');
-
-INSERT INTO schema_migrations (version) VALUES ('20130105224618');
-
-INSERT INTO schema_migrations (version) VALUES ('20130107181109');
-
-INSERT INTO schema_migrations (version) VALUES ('20130107212832');
-
-INSERT INTO schema_migrations (version) VALUES ('20130109175700');
-
-INSERT INTO schema_migrations (version) VALUES ('20130109220548');
-
-INSERT INTO schema_migrations (version) VALUES ('20130113214204');
-
-INSERT INTO schema_migrations (version) VALUES ('20130116024233');
-
-INSERT INTO schema_migrations (version) VALUES ('20130116215213');
-
-INSERT INTO schema_migrations (version) VALUES ('20130118002239');
-
-INSERT INTO schema_migrations (version) VALUES ('20130122020042');
-
-INSERT INTO schema_migrations (version) VALUES ('20130122201442');
-
-INSERT INTO schema_migrations (version) VALUES ('20130122221616');
-
-INSERT INTO schema_migrations (version) VALUES ('20130123174514');
-
-INSERT INTO schema_migrations (version) VALUES ('20130123180224');
-
-INSERT INTO schema_migrations (version) VALUES ('20130123180228');
-
-INSERT INTO schema_migrations (version) VALUES ('20130125220425');
-
-INSERT INTO schema_migrations (version) VALUES ('20130128202518');
-
-INSERT INTO schema_migrations (version) VALUES ('20130128231343');
-
-INSERT INTO schema_migrations (version) VALUES ('20130130205749');
-
-INSERT INTO schema_migrations (version) VALUES ('20130203104818');
-
-INSERT INTO schema_migrations (version) VALUES ('20130203104824');
-
-INSERT INTO schema_migrations (version) VALUES ('20130203115329');
-
-INSERT INTO schema_migrations (version) VALUES ('20130207195855');
-
-INSERT INTO schema_migrations (version) VALUES ('20130218181504');
-
-INSERT INTO schema_migrations (version) VALUES ('20130226170000');
-
-INSERT INTO schema_migrations (version) VALUES ('20130313175417');
-
-INSERT INTO schema_migrations (version) VALUES ('20130315155820');
-
-INSERT INTO schema_migrations (version) VALUES ('20130315183626');
-
-INSERT INTO schema_migrations (version) VALUES ('20130315213205');
-
-INSERT INTO schema_migrations (version) VALUES ('20130318002138');
-
-INSERT INTO schema_migrations (version) VALUES ('20130319165853');
-
-INSERT INTO schema_migrations (version) VALUES ('20130319180730');
-
-INSERT INTO schema_migrations (version) VALUES ('20130319194637');
-
-INSERT INTO schema_migrations (version) VALUES ('20130319201431');
-
-INSERT INTO schema_migrations (version) VALUES ('20130319235957');
-
-INSERT INTO schema_migrations (version) VALUES ('20130320000107');
-
-INSERT INTO schema_migrations (version) VALUES ('20130326173804');
-
-INSERT INTO schema_migrations (version) VALUES ('20130326182917');
-
-INSERT INTO schema_migrations (version) VALUES ('20130415020241');
-
-INSERT INTO schema_migrations (version) VALUES ('20130425024459');
-
-INSERT INTO schema_migrations (version) VALUES ('20130425214427');
-
-INSERT INTO schema_migrations (version) VALUES ('20130523060112');
-
-INSERT INTO schema_migrations (version) VALUES ('20130523060213');
-
-INSERT INTO schema_migrations (version) VALUES ('20130524042319');
-
-INSERT INTO schema_migrations (version) VALUES ('20130528134100');
-
-INSERT INTO schema_migrations (version) VALUES ('20130606183519');
-
-INSERT INTO schema_migrations (version) VALUES ('20130608053730');
-
-INSERT INTO schema_migrations (version) VALUES ('20130610202538');
-
-INSERT INTO schema_migrations (version) VALUES ('20130611163736');
-
-INSERT INTO schema_migrations (version) VALUES ('20130612042554');
-
-INSERT INTO schema_migrations (version) VALUES ('20130617150007');
-
-INSERT INTO schema_migrations (version) VALUES ('20130626002829');
-
-INSERT INTO schema_migrations (version) VALUES ('20130626022810');
-
-INSERT INTO schema_migrations (version) VALUES ('20130627154537');
-
-INSERT INTO schema_migrations (version) VALUES ('20130627184333');
-
-INSERT INTO schema_migrations (version) VALUES ('20130708163414');
-
-INSERT INTO schema_migrations (version) VALUES ('20130708182912');
-
-INSERT INTO schema_migrations (version) VALUES ('20130708185153');
-
-INSERT INTO schema_migrations (version) VALUES ('20130724153034');
-
-INSERT INTO schema_migrations (version) VALUES ('20131007180607');
-
-INSERT INTO schema_migrations (version) VALUES ('20140117231056');
-
-INSERT INTO schema_migrations (version) VALUES ('20140124222114');
-
-INSERT INTO schema_migrations (version) VALUES ('20140129184311');
-
-INSERT INTO schema_migrations (version) VALUES ('20140317135600');
-
-INSERT INTO schema_migrations (version) VALUES ('20140319160547');
-
-INSERT INTO schema_migrations (version) VALUES ('20140321191343');
-
-INSERT INTO schema_migrations (version) VALUES ('20140324024606');
-
-INSERT INTO schema_migrations (version) VALUES ('20140325175653');
-
-INSERT INTO schema_migrations (version) VALUES ('20140402001908');
-
-INSERT INTO schema_migrations (version) VALUES ('20140407184311');
-
-INSERT INTO schema_migrations (version) VALUES ('20140421140924');
-
-INSERT INTO schema_migrations (version) VALUES ('20140421151939');
-
-INSERT INTO schema_migrations (version) VALUES ('20140421151940');
-
-INSERT INTO schema_migrations (version) VALUES ('20140422011506');
-
-INSERT INTO schema_migrations (version) VALUES ('20140423132913');
-
-INSERT INTO schema_migrations (version) VALUES ('20140423133559');
-
-INSERT INTO schema_migrations (version) VALUES ('20140501165548');
-
-INSERT INTO schema_migrations (version) VALUES ('20140519205916');
-
-INSERT INTO schema_migrations (version) VALUES ('20140527152921');
-
-INSERT INTO schema_migrations (version) VALUES ('20140530200539');
-
-INSERT INTO schema_migrations (version) VALUES ('20140601022548');
-
-INSERT INTO schema_migrations (version) VALUES ('20140602143352');
-
-INSERT INTO schema_migrations (version) VALUES ('20140607150616');
-
-INSERT INTO schema_migrations (version) VALUES ('20140611173003');
-
-INSERT INTO schema_migrations (version) VALUES ('20140627210837');
-
-INSERT INTO schema_migrations (version) VALUES ('20140709172343');
-
-INSERT INTO schema_migrations (version) VALUES ('20140714184006');
-
-INSERT INTO schema_migrations (version) VALUES ('20140811184643');
-
-INSERT INTO schema_migrations (version) VALUES ('20140817035914');
-
-INSERT INTO schema_migrations (version) VALUES ('20140818125735');
-
-INSERT INTO schema_migrations (version) VALUES ('20140826180337');
-
-INSERT INTO schema_migrations (version) VALUES ('20140828141043');
-
-INSERT INTO schema_migrations (version) VALUES ('20140909183946');
-
-INSERT INTO schema_migrations (version) VALUES ('20140911221252');
-
-INSERT INTO schema_migrations (version) VALUES ('20140918141529');
-
-INSERT INTO schema_migrations (version) VALUES ('20140918153541');
-
-INSERT INTO schema_migrations (version) VALUES ('20140918153705');
-
-INSERT INTO schema_migrations (version) VALUES ('20140924091559');
-
-INSERT INTO schema_migrations (version) VALUES ('20141111133038');
-
-INSERT INTO schema_migrations (version) VALUES ('20141208164553');
-
-INSERT INTO schema_migrations (version) VALUES ('20141208174553');
-
-INSERT INTO schema_migrations (version) VALUES ('20141208174653');
-
-INSERT INTO schema_migrations (version) VALUES ('20141208185217');
-
-INSERT INTO schema_migrations (version) VALUES ('20150122175935');
-
-INSERT INTO schema_migrations (version) VALUES ('20150123142953');
-
-INSERT INTO schema_migrations (version) VALUES ('20150203180223');
-
-INSERT INTO schema_migrations (version) VALUES ('20150206210804');
-
-INSERT INTO schema_migrations (version) VALUES ('20150206230342');
-
-INSERT INTO schema_migrations (version) VALUES ('20150216193428');
-
-INSERT INTO schema_migrations (version) VALUES ('20150303210106');
-
-INSERT INTO schema_migrations (version) VALUES ('20150312151136');
-
-INSERT INTO schema_migrations (version) VALUES ('20150317132720');
-
-INSERT INTO schema_migrations (version) VALUES ('20150324152204');
-
-INSERT INTO schema_migrations (version) VALUES ('20150423145759');
-
-INSERT INTO schema_migrations (version) VALUES ('20150512193020');
-
-INSERT INTO schema_migrations (version) VALUES ('20150526180251');
-
-INSERT INTO schema_migrations (version) VALUES ('20151202151426');
-
-INSERT INTO schema_migrations (version) VALUES ('20151215134304');
-
-INSERT INTO schema_migrations (version) VALUES ('20151229214707');
-
-INSERT INTO schema_migrations (version) VALUES ('20160208210629');
-
-INSERT INTO schema_migrations (version) VALUES ('20160209155729');
-
-INSERT INTO schema_migrations (version) VALUES ('20160324144017');
-
-INSERT INTO schema_migrations (version) VALUES ('20160506175108');
-
-INSERT INTO schema_migrations (version) VALUES ('20160509143250');
-
-INSERT INTO schema_migrations (version) VALUES ('20160808151559');
-
-INSERT INTO schema_migrations (version) VALUES ('20160819195557');
-
-INSERT INTO schema_migrations (version) VALUES ('20160819195725');
-
-INSERT INTO schema_migrations (version) VALUES ('20160901210110');
-
-INSERT INTO schema_migrations (version) VALUES ('20160909181442');
-
-INSERT INTO schema_migrations (version) VALUES ('20160926194129');
-
-INSERT INTO schema_migrations (version) VALUES ('20161019171346');
-
-INSERT INTO schema_migrations (version) VALUES ('20161111143147');
-
-INSERT INTO schema_migrations (version) VALUES ('20161115171221');
-
-INSERT INTO schema_migrations (version) VALUES ('20161115174218');
-
-INSERT INTO schema_migrations (version) VALUES ('20161213172944');
-
-INSERT INTO schema_migrations (version) VALUES ('20161222153434');
-
-INSERT INTO schema_migrations (version) VALUES ('20161223090712');
-
-INSERT INTO schema_migrations (version) VALUES ('20170102153111');
-
-INSERT INTO schema_migrations (version) VALUES ('20170105160301');
-
-INSERT INTO schema_migrations (version) VALUES ('20170105160302');
-
-INSERT INTO schema_migrations (version) VALUES ('20170216170823');
-
-INSERT INTO schema_migrations (version) VALUES ('20170301225558');
-
-INSERT INTO schema_migrations (version) VALUES ('20170319063406');
-
-INSERT INTO schema_migrations (version) VALUES ('20170328215436');
-
-INSERT INTO schema_migrations (version) VALUES ('20170330012505');
-
-INSERT INTO schema_migrations (version) VALUES ('20170419173031');
-
-INSERT INTO schema_migrations (version) VALUES ('20170419173712');
-
-INSERT INTO schema_migrations (version) VALUES ('20170419175801');
-
-INSERT INTO schema_migrations (version) VALUES ('20170628185847');
-
-INSERT INTO schema_migrations (version) VALUES ('20170704160233');
-
-INSERT INTO schema_migrations (version) VALUES ('20170706141334');
-
-INSERT INTO schema_migrations (version) VALUES ('20170824202826');
-
-INSERT INTO schema_migrations (version) VALUES ('20170906224040');
-
-INSERT INTO schema_migrations (version) VALUES ('20171027183824');
-
-INSERT INTO schema_migrations (version) VALUES ('20171208203841');
-
-INSERT INTO schema_migrations (version) VALUES ('20171212153352');
-
-INSERT INTO schema_migrations (version) VALUES ('20180216203422');
-
-INSERT INTO schema_migrations (version) VALUES ('20180228220311');
-
-INSERT INTO schema_migrations (version) VALUES ('20180313180114');
-
-INSERT INTO schema_migrations (version) VALUES ('20180501182859');
-
-INSERT INTO schema_migrations (version) VALUES ('20180514135529');
-
-INSERT INTO schema_migrations (version) VALUES ('20180607175050');
-
-INSERT INTO schema_migrations (version) VALUES ('20180608123145');
-
-INSERT INTO schema_migrations (version) VALUES ('20180806133039');
-
-INSERT INTO schema_migrations (version) VALUES ('20180820130357');
-
-INSERT INTO schema_migrations (version) VALUES ('20180820132617');
-
-INSERT INTO schema_migrations (version) VALUES ('20180820135808');
-
-INSERT INTO schema_migrations (version) VALUES ('20180824152014');
-
-INSERT INTO schema_migrations (version) VALUES ('20180824155207');
-
-INSERT INTO schema_migrations (version) VALUES ('20180904110712');
-
-INSERT INTO schema_migrations (version) VALUES ('20180913175443');
-
-INSERT INTO schema_migrations (version) VALUES ('20180915155335');
-
-INSERT INTO schema_migrations (version) VALUES ('20180917200000');
-
-INSERT INTO schema_migrations (version) VALUES ('20180917205609');
-
-INSERT INTO schema_migrations (version) VALUES ('20180919001158');
-
-INSERT INTO schema_migrations (version) VALUES ('20181001175023');
-
-INSERT INTO schema_migrations (version) VALUES ('20181004131141');
-
-INSERT INTO schema_migrations (version) VALUES ('20181005192222');
-
-INSERT INTO schema_migrations (version) VALUES ('20181011184200');
-
-INSERT INTO schema_migrations (version) VALUES ('20181213183234');
+INSERT INTO "schema_migrations" (version) VALUES
+('20121016005009'),
+('20130105203021'),
+('20130105224358'),
+('20130105224618'),
+('20130107181109'),
+('20130107212832'),
+('20130109175700'),
+('20130109220548'),
+('20130113214204'),
+('20130116024233'),
+('20130116215213'),
+('20130118002239'),
+('20130122020042'),
+('20130122201442'),
+('20130122221616'),
+('20130123174514'),
+('20130123180224'),
+('20130123180228'),
+('20130125220425'),
+('20130128202518'),
+('20130128231343'),
+('20130130205749'),
+('20130203104818'),
+('20130203104824'),
+('20130203115329'),
+('20130207195855'),
+('20130218181504'),
+('20130226170000'),
+('20130313175417'),
+('20130315155820'),
+('20130315183626'),
+('20130315213205'),
+('20130318002138'),
+('20130319165853'),
+('20130319180730'),
+('20130319194637'),
+('20130319201431'),
+('20130319235957'),
+('20130320000107'),
+('20130326173804'),
+('20130326182917'),
+('20130415020241'),
+('20130425024459'),
+('20130425214427'),
+('20130523060112'),
+('20130523060213'),
+('20130524042319'),
+('20130528134100'),
+('20130606183519'),
+('20130608053730'),
+('20130610202538'),
+('20130611163736'),
+('20130612042554'),
+('20130617150007'),
+('20130626002829'),
+('20130626022810'),
+('20130627154537'),
+('20130627184333'),
+('20130708163414'),
+('20130708182912'),
+('20130708185153'),
+('20130724153034'),
+('20131007180607'),
+('20140117231056'),
+('20140124222114'),
+('20140129184311'),
+('20140317135600'),
+('20140319160547'),
+('20140321191343'),
+('20140324024606'),
+('20140325175653'),
+('20140402001908'),
+('20140407184311'),
+('20140421140924'),
+('20140421151939'),
+('20140421151940'),
+('20140422011506'),
+('20140423132913'),
+('20140423133559'),
+('20140501165548'),
+('20140519205916'),
+('20140527152921'),
+('20140530200539'),
+('20140601022548'),
+('20140602143352'),
+('20140607150616'),
+('20140611173003'),
+('20140627210837'),
+('20140709172343'),
+('20140714184006'),
+('20140811184643'),
+('20140817035914'),
+('20140818125735'),
+('20140826180337'),
+('20140828141043'),
+('20140909183946'),
+('20140911221252'),
+('20140918141529'),
+('20140918153541'),
+('20140918153705'),
+('20140924091559'),
+('20141111133038'),
+('20141208164553'),
+('20141208174553'),
+('20141208174653'),
+('20141208185217'),
+('20150122175935'),
+('20150123142953'),
+('20150203180223'),
+('20150206210804'),
+('20150206230342'),
+('20150216193428'),
+('20150303210106'),
+('20150312151136'),
+('20150317132720'),
+('20150324152204'),
+('20150423145759'),
+('20150512193020'),
+('20150526180251'),
+('20151202151426'),
+('20151215134304'),
+('20151229214707'),
+('20160208210629'),
+('20160209155729'),
+('20160324144017'),
+('20160506175108'),
+('20160509143250'),
+('20160808151559'),
+('20160819195557'),
+('20160819195725'),
+('20160901210110'),
+('20160909181442'),
+('20160926194129'),
+('20161019171346'),
+('20161111143147'),
+('20161115171221'),
+('20161115174218'),
+('20161213172944'),
+('20161222153434'),
+('20161223090712'),
+('20170102153111'),
+('20170105160301'),
+('20170105160302'),
+('20170216170823'),
+('20170301225558'),
+('20170319063406'),
+('20170328215436'),
+('20170330012505'),
+('20170419173031'),
+('20170419173712'),
+('20170419175801'),
+('20170628185847'),
+('20170704160233'),
+('20170706141334'),
+('20170824202826'),
+('20170906224040'),
+('20171027183824'),
+('20171208203841'),
+('20171212153352'),
+('20180216203422'),
+('20180228220311'),
+('20180313180114'),
+('20180501182859'),
+('20180514135529'),
+('20180607175050'),
+('20180608123145'),
+('20180806133039'),
+('20180820130357'),
+('20180820132617'),
+('20180820135808'),
+('20180824152014'),
+('20180824155207'),
+('20180904110712'),
+('20180913175443'),
+('20180915155335'),
+('20180917200000'),
+('20180917205609'),
+('20180919001158'),
+('20181001175023'),
+('20181004131141'),
+('20181005192222'),
+('20181011184200'),
+('20181213183234'),
+('20190214214814');
-INSERT INTO schema_migrations (version) VALUES ('20190214214814');
INSERT INTO schema_migrations (version) VALUES ('20190322174136');
next if t == base.table_name
next if t == 'schema_migrations'
next if t == 'permission_refresh_lock'
+ next if t == 'ar_internal_metadata'
klass = t.classify.constantize
next unless klass and 'owner_uuid'.in?(klass.columns.collect(&:name))
base.has_many(t.to_sym,
def check_enable_legacy_jobs_api
if Rails.configuration.enable_legacy_jobs_api == false ||
(Rails.configuration.enable_legacy_jobs_api == "auto" &&
- ActiveRecord::Base.connection.exec_query("select count(*) from jobs").first["count"] == "0")
- Rails.configuration.disable_api_methods = Disable_jobs_api_method_list
+ Job.count == 0)
+ Rails.configuration.disable_api_methods += Disable_jobs_api_method_list
end
end
def destroy_permission_links
if uuid
- Link.destroy_all(['link_class=? and (head_uuid=? or tail_uuid=?)',
- 'permission', uuid, uuid])
+ Link.where(['link_class=? and (head_uuid=? or tail_uuid=?)',
+ 'permission', uuid, uuid]).destroy_all
end
end
end
def load_where_param
if params[:where].nil? or params[:where] == ""
@where = {}
- elsif params[:where].is_a? Hash
+ elsif [Hash, ActionController::Parameters].include? params[:where].class
@where = params[:where]
elsif params[:where].is_a? String
begin
when String
begin
@select = SafeJSON.load(params[:select])
- raise unless @select.is_a? Array or @select.nil?
+ raise unless @select.is_a? Array or @select.nil? or !@select
rescue
raise ArgumentError.new("Could not parse \"select\" param as an array")
end
#
# SPDX-License-Identifier: AGPL-3.0
-require 'trollop'
+require 'optimist'
-opts = Trollop::options do
+opts = Optimist::options do
banner 'Fail jobs that have state=="Running".'
banner 'Options:'
opt(:before,
# If get option is used, an existing anonymous user token is returned. If none exist, one is created.
# If the get option is omitted, a new token is created and returned.
-require 'trollop'
+require 'optimist'
-opts = Trollop::options do
+opts = Optimist::options do
banner ''
banner "Usage: get_anonymous_user_token "
banner ''
# Append to src_collection.name: " (reason; salvaged data at new_collection.uuid)"
# Set portable_data_hash to "d41d8cd98f00b204e9800998ecf8427e+0"
-require 'trollop'
+require 'optimist'
require './lib/salvage_collection'
include SalvageCollection
-opts = Trollop::options do
+opts = Optimist::options do
banner ''
banner "Usage: salvage_collection.rb " +
"{uuid} {reason}"
abort 'Error: Ruby >= 1.9.3 required.' if RUBY_VERSION < '1.9.3'
require 'logger'
-require 'trollop'
+require 'optimist'
log = Logger.new STDERR
log.progname = $0.split('/').last
-opts = Trollop::options do
+opts = Optimist::options do
banner ''
banner "Usage: #{log.progname} " +
"{user_uuid_or_email} {user_and_repo_name} {vm_uuid}"
log.level = (ENV['DEBUG'] || opts.debug) ? Logger::DEBUG : Logger::WARN
if ARGV.count != 3
- Trollop::die "required arguments are missing"
+ Optimist::die "required arguments are missing"
end
user_arg, user_repo_name, vm_uuid = ARGV
last_ping_at: <%= 1.minute.ago.to_s(:db) %>
first_ping_at: <%= 23.hour.ago.to_s(:db) %>
job_uuid: zzzzz-8i9sb-2gx6rz0pjl033w3 # nearly_finished_job
+ properties: {}
info:
ping_secret: "48dpm3b8ijyj3jkr2yczxw0844dqd2752bhll7klodvgz9bg80"
slurm_state: "alloc"
last_ping_at: <%= 1.hour.ago.to_s(:db) %>
first_ping_at: <%= 23.hour.ago.to_s(:db) %>
job_uuid: ~
+ properties: {}
info:
ping_secret: "2k3i71depad36ugwmlgzilbi4e8n0illb2r8l4efg9mzkb3a1k"
last_ping_at: ~
first_ping_at: ~
job_uuid: ~
+ properties: {}
info:
ping_secret: "abcdyi0x4lb5q4gzqqtrnq30oyj08r8dtdimmanbqw49z1anz2"
last_ping_at: ~
first_ping_at: ~
job_uuid: ~
+ properties: {}
info:
ping_secret: "abcdyi0x4lb5q4gzqqtrnq30oyj08r8dtdimmanbqw49z1anz2"
last_ping_at: ~
first_ping_at: ~
job_uuid: ~
+ properties: {}
info:
ping_secret: "abcdyefg4lb5q4gzqqtrnq30oyj08r8dtdimmanbqw49z1anz2"
test "requesting nonexistent object returns 404 error" do
authorize_with :admin
- get(:show, id: BAD_UUID)
+ get(:show, params: {id: BAD_UUID})
check_404
end
test "requesting object without read permission returns 404 error" do
authorize_with :spectator
- get(:show, id: specimens(:owned_by_active_user).uuid)
+ get(:show, params: {id: specimens(:owned_by_active_user).uuid})
check_404
end
test "submitting bad object returns error" do
authorize_with :spectator
- post(:create, specimen: {badattr: "badvalue"})
+ post(:create, params: {specimen: {badattr: "badvalue"}})
assert_response 422
check_error_token
end
test "bogus boolean parameter #{bogus.inspect} returns error" do
@controller = Arvados::V1::GroupsController.new
authorize_with :active
- post :create, {
+ post :create, params: {
group: {},
ensure_unique_name: bogus
}
test "boolean parameter #{boolparam.inspect} acceptable" do
@controller = Arvados::V1::GroupsController.new
authorize_with :active
- post :create, {
+ post :create, params: {
group: {},
ensure_unique_name: boolparam
}
test "boolean parameter #{boolparam.inspect} accepted as #{bool.inspect}" do
@controller = Arvados::V1::GroupsController.new
authorize_with :active
- post :create, {
+ post :create, params: {
group: {
name: groups(:aproject).name,
owner_uuid: groups(:aproject).owner_uuid
test "should not get index with expired auth" do
authorize_with :expired
- get :index, format: :json
+ get :index, params: {format: :json}
assert_response 401
end
test "create system auth" do
authorize_with :admin_trustedclient
- post :create_system_auth, scopes: '["test"]'
+ post :create_system_auth, params: {scopes: '["test"]'}
assert_response :success
assert_not_nil JSON.parse(@response.body)['uuid']
end
test "prohibit create system auth with token from non-trusted client" do
authorize_with :admin
- post :create_system_auth, scopes: '["test"]'
+ post :create_system_auth, params: {scopes: '["test"]'}
assert_response 403
end
test "prohibit create system auth by non-admin" do
authorize_with :active
- post :create_system_auth, scopes: '["test"]'
+ post :create_system_auth, params: {scopes: '["test"]'}
assert_response 403
end
expected_tokens = expected.map do |name|
api_client_authorizations(name).api_token
end
- get :index, search_params
+ get :index, params: search_params
assert_response :success
got_tokens = JSON.parse(@response.body)['items']
.map { |a| a['api_token'] }
].each do |user, token, expect_get_response, expect_list_response, expect_list_items|
test "using '#{user}', get '#{token}' by uuid" do
authorize_with user
- get :show, {
+ get :show, params: {
id: api_client_authorizations(token).uuid,
}
assert_response expect_get_response
test "using '#{user}', update '#{token}' by uuid" do
authorize_with user
- put :update, {
+ put :update, params: {
id: api_client_authorizations(token).uuid,
api_client_authorization: {},
}
test "using '#{user}', delete '#{token}' by uuid" do
authorize_with user
- post :destroy, {
+ post :destroy, params: {
id: api_client_authorizations(token).uuid,
}
assert_response expect_get_response
test "using '#{user}', list '#{token}' by uuid" do
authorize_with user
- get :index, {
+ get :index, params: {
filters: [['uuid','=',api_client_authorizations(token).uuid]],
}
assert_response expect_list_response
if expect_list_items
test "using '#{user}', list '#{token}' by uuid with offset" do
authorize_with user
- get :index, {
+ get :index, params: {
filters: [['uuid','=',api_client_authorizations(token).uuid]],
offset: expect_list_items,
}
test "using '#{user}', list '#{token}' by token" do
authorize_with user
- get :index, {
+ get :index, params: {
filters: [['api_token','=',api_client_authorizations(token).api_token]],
}
assert_response expect_list_response
test "scoped token cannot change its own scopes" do
authorize_with :admin_vm
- put :update, {
+ put :update, params: {
id: api_client_authorizations(:admin_vm).uuid,
api_client_authorization: {scopes: ['all']},
}
test "token cannot change its own uuid" do
authorize_with :admin
- put :update, {
+ put :update, params: {
id: api_client_authorizations(:admin).uuid,
api_client_authorization: {uuid: 'zzzzz-gj3su-zzzzzzzzzzzzzzz'},
}
test "get index with include_old_versions" do
authorize_with :active
- get :index, {
+ get :index, params: {
include_old_versions: true
}
assert_response :success
test "collections.get returns signed locators, and no unsigned_manifest_text" do
permit_unsigned_manifests
authorize_with :active
- get :show, {id: collections(:foo_file).uuid}
+ get :show, params: {id: collections(:foo_file).uuid}
assert_response :success
assert_signed_manifest json_response['manifest_text'], 'foo_file'
refute_includes json_response, 'unsigned_manifest_text'
test "correct signatures are given for #{token_method}" do
token = api_client_authorizations(:active).send(token_method)
authorize_with_token token
- get :show, {id: collections(:foo_file).uuid}
+ get :show, params: {id: collections(:foo_file).uuid}
assert_response :success
assert_signed_manifest json_response['manifest_text'], 'foo_file', token: token
end
key: Rails.configuration.blob_signing_key,
api_token: token)
authorize_with_token token
- put :update, {
+ put :update, params: {
id: collections(:collection_owned_by_active).uuid,
collection: {
manifest_text: ". #{signed} 0:3:foo.txt\n",
test "index with manifest_text selected returns signed locators" do
columns = %w(uuid owner_uuid manifest_text)
authorize_with :active
- get :index, select: columns
+ get :index, params: {select: columns}
assert_response :success
assert(assigns(:objects).andand.any?,
"no Collections returned for index with columns selected")
test "index with unsigned_manifest_text selected returns only unsigned locators" do
authorize_with :active
- get :index, select: ['unsigned_manifest_text']
+ get :index, params: {select: ['unsigned_manifest_text']}
assert_response :success
assert_operator json_response["items"].count, :>, 0
locs = 0
['', nil, false, 'null'].each do |select|
test "index with select=#{select.inspect} returns everything except manifest" do
authorize_with :active
- get :index, select: select
+ get :index, params: {select: select}
assert_response :success
assert json_response['items'].any?
json_response['items'].each do |coll|
'["uuid", "manifest_text"]'].each do |select|
test "index with select=#{select.inspect} returns no name" do
authorize_with :active
- get :index, select: select
+ get :index, params: {select: select}
assert_response :success
assert json_response['items'].any?
json_response['items'].each do |coll|
[0,1,2].each do |limit|
test "get index with limit=#{limit}" do
authorize_with :active
- get :index, limit: limit
+ get :index, params: {limit: limit}
assert_response :success
assert_equal limit, assigns(:objects).count
resp = JSON.parse(@response.body)
test "items.count == items_available" do
authorize_with :active
- get :index, limit: 100000
+ get :index, params: {limit: 100000}
assert_response :success
resp = JSON.parse(@response.body)
assert_equal resp['items_available'], assigns(:objects).length
test "items.count == items_available with filters" do
authorize_with :active
- get :index, {
+ get :index, params: {
limit: 100,
filters: [['uuid','=',collections(:foo_file).uuid]]
}
test "get index with limit=2 offset=99999" do
# Assume there are not that many test fixtures.
authorize_with :active
- get :index, limit: 2, offset: 99999
+ get :index, params: {limit: 2, offset: 99999}
assert_response :success
assert_equal 0, assigns(:objects).count
resp = JSON.parse(@response.body)
coll1 = collections(:collection_1_of_201)
Rails.configuration.max_index_database_read =
yield(coll1.manifest_text.size)
- get :index, {
+ get :index, params: {
select: %w(uuid manifest_text),
filters: [["owner_uuid", "=", coll1.owner_uuid]],
limit: 300,
# post :create will modify test_collection in place, so we save a copy first.
# Hash.deep_dup is not sufficient as it preserves references of strings (??!?)
post_collection = Marshal.load(Marshal.dump(test_collection))
- post :create, {
+ post :create, params: {
collection: post_collection
}
foo_collection = collections(:foo_file)
# Get foo_file using its portable data hash
- get :show, {
+ get :show, params: {
id: foo_collection[:portable_data_hash]
}
assert_response :success
permit_unsigned_manifests
authorize_with :active
manifest_text = ". d41d8cd98f00b204e9800998ecf8427e 0:0:foo.txt\n"
- post :create, {
+ post :create, params: {
collection: {
owner_uuid: 'zzzzz-j7d0g-rew6elm53kancon',
manifest_text: manifest_text,
permit_unsigned_manifests
authorize_with :admin
manifest_text = ". d41d8cd98f00b204e9800998ecf8427e 0:0:foo.txt\n"
- post :create, {
+ post :create, params: {
collection: {
owner_uuid: 'zzzzz-tpzed-000000000000000',
manifest_text: manifest_text,
if !unsigned
manifest_text = Collection.sign_manifest manifest_text, api_token(:active)
end
- post :create, {
+ post :create, params: {
collection: {
owner_uuid: users(:active).uuid,
manifest_text: manifest_text,
permit_unsigned_manifests
authorize_with :active
manifest_text = ". d41d8cd98f00b204e9800998ecf8427e 0:0:foo.txt\n"
- post :create, {
+ post :create, params: {
collection: {
owner_uuid: groups(:active_user_has_can_manage).uuid,
manifest_text: manifest_text,
permit_unsigned_manifests
authorize_with :active
manifest_text = ". d41d8cd98f00b204e9800998ecf8427e 0:0:foo.txt\n"
- post :create, {
+ post :create, params: {
collection: {
owner_uuid: groups(:all_users).uuid,
manifest_text: manifest_text,
permit_unsigned_manifests
authorize_with :active
manifest_text = ". d41d8cd98f00b204e9800998ecf8427e 0:0:foo.txt\n"
- post :create, {
+ post :create, params: {
collection: {
owner_uuid: groups(:public).uuid,
manifest_text: manifest_text,
permit_unsigned_manifests
authorize_with :admin
manifest_text = ". d41d8cd98f00b204e9800998ecf8427e 0:0:foo.txt\n"
- post :create, {
+ post :create, params: {
collection: {
owner_uuid: 'zzzzz-j7d0g-it30l961gq3t0oi',
manifest_text: manifest_text,
test "should create with collection passed as json" do
permit_unsigned_manifests
authorize_with :active
- post :create, {
+ post :create, params: {
collection: <<-EOS
{
"manifest_text":". d41d8cd98f00b204e9800998ecf8427e 0:0:foo.txt\n",\
test "should fail to create with checksum mismatch" do
permit_unsigned_manifests
authorize_with :active
- post :create, {
+ post :create, params: {
collection: <<-EOS
{
"manifest_text":". d41d8cd98f00b204e9800998ecf8427e 0:0:bar.txt\n",\
test "collection UUID is normalized when created" do
permit_unsigned_manifests
authorize_with :active
- post :create, {
+ post :create, params: {
collection: {
manifest_text: ". d41d8cd98f00b204e9800998ecf8427e 0:0:foo.txt\n",
portable_data_hash: "d30fe8ae534397864cb96c544f4cf102+47+Khint+Xhint+Zhint"
test "get full provenance for baz file" do
authorize_with :active
- get :provenance, id: 'ea10d51bcf88862dbcc36eb292017dfd+45'
+ get :provenance, params: {id: 'ea10d51bcf88862dbcc36eb292017dfd+45'}
assert_response :success
resp = JSON.parse(@response.body)
assert_not_nil resp['ea10d51bcf88862dbcc36eb292017dfd+45'] # baz
test "get no provenance for foo file" do
# spectator user cannot even see baz collection
authorize_with :spectator
- get :provenance, id: '1f4b0bc7583c2a7f9102c395f4ffc5e3+45'
+ get :provenance, params: {id: '1f4b0bc7583c2a7f9102c395f4ffc5e3+45'}
assert_response 404
end
test "get partial provenance for baz file" do
# spectator user can see bar->baz job, but not foo->bar job
authorize_with :spectator
- get :provenance, id: 'ea10d51bcf88862dbcc36eb292017dfd+45'
+ get :provenance, params: {id: 'ea10d51bcf88862dbcc36eb292017dfd+45'}
assert_response :success
resp = JSON.parse(@response.body)
assert_not_nil resp['ea10d51bcf88862dbcc36eb292017dfd+45'] # baz
test "search collections with 'any' operator" do
expect_pdh = collections(:docker_image).portable_data_hash
authorize_with :active
- get :index, {
+ get :index, params: {
where: { any: ['contains', expect_pdh[5..25]] }
}
assert_response :success
". " + signed_locators[1] + " 0:0:foo.txt\n" +
". " + signed_locators[2] + " 0:0:foo.txt\n"
- post :create, {
+ post :create, params: {
collection: {
manifest_text: signed_manifest,
portable_data_hash: manifest_uuid,
". " + Blob.sign_locator(locators[1], signing_opts) + " 0:0:foo.txt\n" +
". " + Blob.sign_locator(locators[2], signing_opts) + " 0:0:foo.txt\n"
- post :create, {
+ post :create, params: {
collection: {
manifest_text: signed_manifest,
portable_data_hash: manifest_uuid,
unsigned_manifest.length.to_s
bad_manifest = ". #{bad_locator} 0:0:foo.txt\n"
- post :create, {
+ post :create, params: {
collection: {
manifest_text: bad_manifest,
portable_data_hash: manifest_uuid
'+' +
signed_manifest.length.to_s
- post :create, {
+ post :create, params: {
collection: {
manifest_text: signed_manifest,
portable_data_hash: manifest_uuid
test "reject manifest with unsigned block as stream name" do
authorize_with :active
- post :create, {
+ post :create, params: {
collection: {
manifest_text: "00000000000000000000000000000000+1234 d41d8cd98f00b204e9800998ecf8427e+0 0:0:foo.txt\n"
}
portable_data_hash: manifest_uuid,
}
post_collection = Marshal.load(Marshal.dump(test_collection))
- post :create, {
+ post :create, params: {
collection: post_collection
}
assert_response :success
signed_locators = locators.map { |loc| Blob.sign_locator loc, signing_opts }
signed_manifest = [".", *signed_locators, "0:0:foo.txt\n"].join(" ")
- post :create, {
+ post :create, params: {
collection: {
manifest_text: signed_manifest,
portable_data_hash: manifest_uuid,
authorize_with :active
unsigned_manifest = ". 0cc175b9c0f1b6a831c399e269772661+1 0:1:a.txt\n"
manifest_uuid = Digest::MD5.hexdigest(unsigned_manifest)
- post :create, {
+ post :create, params: {
collection: {
manifest_text: unsigned_manifest,
portable_data_hash: manifest_uuid,
test 'List expired collection returns empty list' do
authorize_with :active
- get :index, {
+ get :index, params: {
where: {name: 'expired_collection'},
}
assert_response :success
test 'Show expired collection returns 404' do
authorize_with :active
- get :show, {
+ get :show, params: {
id: 'zzzzz-4zz18-mto52zx1s7sn3ih',
}
assert_response 404
test 'Update expired collection returns 404' do
authorize_with :active
- post :update, {
+ post :update, params: {
id: 'zzzzz-4zz18-mto52zx1s7sn3ih',
collection: {
name: "still expired"
test 'List collection with future expiration time succeeds' do
authorize_with :active
- get :index, {
+ get :index, params: {
where: {name: 'collection_expires_in_future'},
}
found = assigns(:objects)
test 'Show collection with future expiration time succeeds' do
authorize_with :active
- get :show, {
+ get :show, params: {
id: 'zzzzz-4zz18-padkqo7yb8d9i3j',
}
assert_response :success
test 'Update collection with future expiration time succeeds' do
authorize_with :active
- post :update, {
+ post :update, params: {
id: 'zzzzz-4zz18-padkqo7yb8d9i3j',
collection: {
name: "still not expired"
test "get collection and verify that file_names is not included" do
authorize_with :active
- get :show, {id: collections(:foo_file).uuid}
+ get :show, params: {id: collections(:foo_file).uuid}
assert_response :success
assert_equal collections(:foo_file).uuid, json_response['uuid']
assert_nil json_response['file_names']
description = description + description
end
- post :create, collection: {
- manifest_text: ". d41d8cd98f00b204e9800998ecf8427e+0 0:0:foo.txt\n",
- description: description,
+ post :create, params: {
+ collection: {
+ manifest_text: ". d41d8cd98f00b204e9800998ecf8427e+0 0:0:foo.txt\n",
+ description: description,
+ }
}
assert_response expected_response
test "Set replication_desired=#{ask.inspect}" do
Rails.configuration.default_collection_replication = 2
authorize_with :active
- put :update, {
+ put :update, params: {
id: collections(:replication_undesired_unconfirmed).uuid,
collection: {
replication_desired: ask,
test "get collection with properties" do
authorize_with :active
- get :show, {id: collections(:collection_with_one_property).uuid}
+ get :show, params: {id: collections(:collection_with_one_property).uuid}
assert_response :success
assert_not_nil json_response['uuid']
assert_equal 'value1', json_response['properties']['property1']
test "create collection with properties" do
authorize_with :active
manifest_text = ". d41d8cd98f00b204e9800998ecf8427e 0:0:foo.txt\n"
- post :create, {
+ post :create, params: {
collection: {
manifest_text: manifest_text,
portable_data_hash: "d30fe8ae534397864cb96c544f4cf102+47",
].each do |manifest_text|
test "create collection with invalid manifest #{manifest_text} and expect error" do
authorize_with :active
- post :create, {
+ post :create, params: {
collection: {
manifest_text: manifest_text,
portable_data_hash: "d41d8cd98f00b204e9800998ecf8427e+0"
].each do |manifest_text, pdh|
test "create collection with valid manifest #{manifest_text.inspect} and expect success" do
authorize_with :active
- post :create, {
+ post :create, params: {
collection: {
manifest_text: manifest_text,
portable_data_hash: pdh
].each do |manifest_text|
test "update collection with invalid manifest #{manifest_text} and expect error" do
authorize_with :active
- post :update, {
+ post :update, params: {
id: 'zzzzz-4zz18-bv31uwvy3neko21',
collection: {
manifest_text: manifest_text,
].each do |manifest_text|
test "update collection with valid manifest #{manifest_text.inspect} and expect success" do
authorize_with :active
- post :update, {
+ post :update, params: {
id: 'zzzzz-4zz18-bv31uwvy3neko21',
collection: {
manifest_text: manifest_text,
test 'get trashed collection with include_trash' do
uuid = 'zzzzz-4zz18-mto52zx1s7sn3ih' # expired_collection
authorize_with :active
- get :show, {
+ get :show, params: {
id: uuid,
include_trash: true,
}
test "get trashed collection via filters and #{user} user" do
uuid = 'zzzzz-4zz18-mto52zx1s7sn3ih' # expired_collection
authorize_with user
- get :index, {
+ get :index, params: {
filters: [["current_version_uuid", "=", uuid]],
include_trash: true,
}
test "get trashed collection via filters and #{user} user, including its past versions" do
uuid = 'zzzzz-4zz18-mto52zx1s7sn3ih' # expired_collection
authorize_with :admin
- get :index, {
+ get :index, params: {
filters: [["current_version_uuid", "=", uuid]],
include_trash: true,
include_old_versions: true,
versions.each do |col|
refute col.is_trashed
end
- post :trash, {
+ post :trash, params: {
id: uuid,
}
assert_response 200
test 'get trashed collection without include_trash' do
uuid = 'zzzzz-4zz18-mto52zx1s7sn3ih' # expired_collection
authorize_with :active
- get :show, {
+ get :show, params: {
id: uuid,
}
assert_response 404
test 'trash collection using http DELETE verb' do
uuid = collections(:collection_owned_by_active).uuid
authorize_with :active
- delete :destroy, {
+ delete :destroy, params: {
id: uuid,
}
assert_response 200
test 'delete long-trashed collection immediately using http DELETE verb' do
uuid = 'zzzzz-4zz18-mto52zx1s7sn3ih' # expired_collection
authorize_with :active
- delete :destroy, {
+ delete :destroy, params: {
id: uuid,
}
assert_response 200
end
authorize_with :active
time_before_trashing = db_current_time
- post :trash, {
+ post :trash, params: {
id: uuid,
}
assert_response 200
test 'untrash a trashed collection' do
authorize_with :active
- post :untrash, {
+ post :untrash, params: {
id: collections(:expired_collection).uuid,
}
assert_response 200
test 'untrash error on not trashed collection' do
authorize_with :active
- post :untrash, {
+ post :untrash, params: {
id: collections(:collection_owned_by_active).uuid,
}
assert_response 422
[:active, :admin].each do |user|
test "get trashed collections as #{user}" do
authorize_with user
- get :index, {
+ get :index, params: {
filters: [["is_trashed", "=", true]],
include_trash: true,
}
test 'untrash collection with same name as another with no ensure unique name' do
authorize_with :active
- post :untrash, {
+ post :untrash, params: {
id: collections(:trashed_collection_to_test_name_conflict_on_untrash).uuid,
}
assert_response 422
test 'untrash collection with same name as another with ensure unique name' do
authorize_with :active
- post :untrash, {
+ post :untrash, params: {
id: collections(:trashed_collection_to_test_name_conflict_on_untrash).uuid,
ensure_unique_name: true
}
test 'cannot show collection in trashed subproject' do
authorize_with :active
- get :show, {
+ get :show, params: {
id: collections(:collection_in_trashed_subproject).uuid,
format: :json
}
test 'can show collection in untrashed subproject' do
authorize_with :active
Group.find_by_uuid(groups(:trashed_project).uuid).update! is_trashed: false
- get :show, {
+ get :show, params: {
id: collections(:collection_in_trashed_subproject).uuid,
format: :json,
}
test 'cannot index collection in trashed subproject' do
authorize_with :active
- get :index, { limit: 1000 }
+ get :index, params: { limit: 1000 }
assert_response :success
item_uuids = json_response['items'].map do |item|
item['uuid']
test 'can index collection in untrashed subproject' do
authorize_with :active
Group.find_by_uuid(groups(:trashed_project).uuid).update! is_trashed: false
- get :index, { limit: 1000 }
+ get :index, params: { limit: 1000 }
assert_response :success
item_uuids = json_response['items'].map do |item|
item['uuid']
test 'can index trashed subproject collection with include_trash' do
authorize_with :active
- get :index, {
+ get :index, params: {
include_trash: true,
limit: 1000
}
test 'can get collection with past versions' do
authorize_with :active
- get :index, {
+ get :index, params: {
filters: [['current_version_uuid','=',collections(:collection_owned_by_active).uuid]],
include_old_versions: true
}
test 'can get old version collection by uuid' do
authorize_with :active
- get :show, {
+ get :show, params: {
id: collections(:collection_owned_by_active_past_version_1).uuid,
}
assert_response :success
permit_unsigned_manifests
authorize_with :active
manifest_text = ". d41d8cd98f00b204e9800998ecf8427e 0:0:foo.txt\n"
- post :create, {
+ post :create, params: {
collection: {
name: 'Test collection',
version: 42,
key: Rails.configuration.blob_signing_key,
api_token: token)
authorize_with_token token
- put :update, {
+ put :update, params: {
id: col.uuid,
collection: {
manifest_text: ". #{signed} 0:3:foo.txt\n",
authorize_with :active
sp = {'partitions' => ['test1', 'test2']}
- post :create, {
+ post :create, params: {
container_request: minimal_cr.merge(scheduling_parameters: sp.dup)
}
assert_response :success
test "secret_mounts not in #create responses" do
authorize_with :active
- post :create, {
+ post :create, params: {
container_request: minimal_cr.merge(
secret_mounts: {'/foo' => {'kind' => 'json', 'content' => 'bar'}}),
}
authorize_with :active
req = container_requests(:uncommitted)
- patch :update, {
+ patch :update, params: {
id: req.uuid,
container_request: {
secret_mounts: {'/foo' => {'kind' => 'json', 'content' => 'bar'}},
req = container_requests(:uncommitted)
req.update_attributes!(secret_mounts: {'/foo' => {'kind' => 'json', 'content' => 'bar'}})
- patch :update, {
+ patch :update, params: {
id: req.uuid,
container_request: {
command: ['echo', 'test'],
test "runtime_token not in #create responses" do
authorize_with :active
- post :create, {
+ post :create, params: {
container_request: minimal_cr.merge(
runtime_token: api_client_authorizations(:spectator).token)
}
class Arvados::V1::ContainersControllerTest < ActionController::TestCase
test 'create' do
authorize_with :system_user
- post :create, {
+ post :create, params: {
container: {
command: ['echo', 'hello'],
container_image: 'test',
[Container::Queued, Container::Complete].each do |state|
test "cannot get auth in #{state} state" do
authorize_with :dispatch1
- get :auth, id: containers(:queued).uuid
+ get :auth, params: {id: containers(:queued).uuid}
assert_response 403
end
end
assert c.lock, show_errors(c)
authorize_with :system_user
- get :auth, id: c.uuid
+ get :auth, params: {id: c.uuid}
assert_response 403
end
authorize_with :dispatch1
c = containers(:queued)
assert c.lock, show_errors(c)
- get :auth, id: c.uuid
+ get :auth, params: {id: c.uuid}
assert_response :success
assert_operator 32, :<, json_response['api_token'].length
assert_equal 'arvados#apiClientAuthorization', json_response['kind']
authorize_with :dispatch1
c = containers(:queued)
assert c.lock, show_errors(c)
- get :show, id: c.uuid
+ get :show, params: {id: c.uuid}
assert_response :success
assert_nil json_response['auth']
assert_nil json_response['secret_mounts']
test "lock container" do
authorize_with :dispatch1
uuid = containers(:queued).uuid
- post :lock, {id: uuid}
+ post :lock, params: {id: uuid}
assert_response :success
assert_nil json_response['mounts']
assert_nil json_response['command']
test "unlock container" do
authorize_with :dispatch1
uuid = containers(:locked).uuid
- post :unlock, {id: uuid}
+ post :unlock, params: {id: uuid}
assert_response :success
assert_nil json_response['mounts']
assert_nil json_response['command']
test "unlock container locked by different dispatcher" do
authorize_with :dispatch2
uuid = containers(:locked).uuid
- post :unlock, {id: uuid}
+ post :unlock, params: {id: uuid}
assert_response 422
end
test "state transitions from #{fixture} to #{action}" do
authorize_with :dispatch1
uuid = containers(fixture).uuid
- post action, {id: uuid}
+ post action, params: {id: uuid}
assert_response response
assert_equal state, Container.where(uuid: uuid).first.state
end
].each do |expect_success, auth|
test "get secret_mounts with #{auth} token" do
authorize_with auth
- get :secret_mounts, {id: containers(:running).uuid}
+ get :secret_mounts, params: {id: containers(:running).uuid}
if expect_success
assert_response :success
assert_equal "42\n", json_response["secret_mounts"]["/secret/6x9"]["content"]
test 'get runtime_token auth' do
authorize_with :dispatch2
c = containers(:runtime_token)
- get :auth, id: c.uuid
+ get :auth, params: {id: c.uuid}
assert_response :success
assert_equal "v2/#{json_response['uuid']}/#{json_response['api_token']}", api_client_authorizations(:container_runtime_token).token
assert_equal 'arvados#apiClientAuthorization', json_response['kind']
end
-
end
test '"not in" filter passes null values' do
@controller = Arvados::V1::GroupsController.new
authorize_with :admin
- get :index, {
+ get :index, params: {
filters: [ ['group_class', 'not in', ['project']] ],
controller: 'groups',
}
test 'error message for non-array element in filters array' do
@controller = Arvados::V1::CollectionsController.new
authorize_with :active
- get :index, {
+ get :index, params: {
filters: [{bogus: 'filter'}],
}
assert_response 422
test 'error message for full text search on a specific column' do
@controller = Arvados::V1::CollectionsController.new
authorize_with :active
- get :index, {
+ get :index, params: {
filters: [['uuid', '@@', 'abcdef']],
}
assert_response 422
test 'difficult characters in full text search' do
@controller = Arvados::V1::CollectionsController.new
authorize_with :active
- get :index, {
+ get :index, params: {
filters: [['any', '@@', 'a|b"c']],
}
assert_response :success
test 'array operand in full text search' do
@controller = Arvados::V1::CollectionsController.new
authorize_with :active
- get :index, {
+ get :index, params: {
filters: [['any', '@@', ['abc', 'def']]],
}
assert_response 422
timestamp = mine.modified_at.strftime('%Y-%m-%dT%H:%M:%S.%NZ')
@controller = Arvados::V1::CollectionsController.new
authorize_with :active
- get :index, {
+ get :index, params: {
filters: [['modified_at', operator, timestamp],
['uuid', '=', mine.uuid]],
}
@controller = Arvados::V1::GroupsController.new
authorize_with :admin
- get :contents, {
+ get :contents, params: {
format: :json,
count: 'none',
limit: 1000,
@controller = Arvados::V1::GroupsController.new
- get :contents, {
+ get :contents, params: {
format: :json,
count: 'none',
limit: 1000,
test "jsonb filter properties.#{prop} #{op} #{opr})" do
@controller = Arvados::V1::CollectionsController.new
authorize_with :admin
- get :index, {
+ get :index, params: {
filters: SafeJSON.dump([ ["properties.#{prop}", op, opr] ]),
limit: 1000
}
test "jsonb hash 'exists' and '!=' filter" do
@controller = Arvados::V1::CollectionsController.new
authorize_with :admin
- get :index, {
+ get :index, params: {
filters: [ ['properties.prop1', 'exists', true], ['properties.prop1', '!=', 'value1'] ]
}
assert_response :success
test "jsonb array 'exists'" do
@controller = Arvados::V1::CollectionsController.new
authorize_with :admin
- get :index, {
+ get :index, params: {
filters: [ ['storage_classes_confirmed.default', 'exists', true] ]
}
assert_response :success
test "jsonb hash alternate form 'exists' and '!=' filter" do
@controller = Arvados::V1::CollectionsController.new
authorize_with :admin
- get :index, {
+ get :index, params: {
filters: [ ['properties', 'exists', 'prop1'], ['properties.prop1', '!=', 'value1'] ]
}
assert_response :success
test "jsonb array alternate form 'exists' filter" do
@controller = Arvados::V1::CollectionsController.new
authorize_with :admin
- get :index, {
+ get :index, params: {
filters: [ ['storage_classes_confirmed', 'exists', 'default'] ]
}
assert_response :success
test "jsonb 'exists' must be boolean" do
@controller = Arvados::V1::CollectionsController.new
authorize_with :admin
- get :index, {
+ get :index, params: {
filters: [ ['properties.prop1', 'exists', nil] ]
}
assert_response 422
test "jsonb checks column exists" do
@controller = Arvados::V1::CollectionsController.new
authorize_with :admin
- get :index, {
+ get :index, params: {
filters: [ ['puppies.prop1', '=', 'value1'] ]
}
assert_response 422
test "jsonb checks column is valid" do
@controller = Arvados::V1::CollectionsController.new
authorize_with :admin
- get :index, {
+ get :index, params: {
filters: [ ['name.prop1', '=', 'value1'] ]
}
assert_response 422
test "jsonb invalid operator" do
@controller = Arvados::V1::CollectionsController.new
authorize_with :admin
- get :index, {
+ get :index, params: {
filters: [ ['properties.prop1', '###', 'value1'] ]
}
assert_response 422
test "replication_desired = 2" do
@controller = Arvados::V1::CollectionsController.new
authorize_with :admin
- get :index, {
+ get :index, params: {
filters: SafeJSON.dump([ ['replication_desired', '=', 2] ])
}
assert_response :success
assert_includes(found, collections(:replication_desired_2_unconfirmed).uuid)
assert_includes(found, collections(:replication_desired_2_confirmed_2).uuid)
end
-
end
test "attempt to delete group without read or write access" do
authorize_with :active
- post :destroy, id: groups(:empty_lonely_group).uuid
+ post :destroy, params: {id: groups(:empty_lonely_group).uuid}
assert_response 404
end
test "attempt to delete group without write access" do
authorize_with :active
- post :destroy, id: groups(:all_users).uuid
+ post :destroy, params: {id: groups(:all_users).uuid}
assert_response 403
end
test "get list of projects" do
authorize_with :active
- get :index, filters: [['group_class', '=', 'project']], format: :json
+ get :index, params: {filters: [['group_class', '=', 'project']], format: :json}
assert_response :success
group_uuids = []
json_response['items'].each do |group|
test "get list of groups that are not projects" do
authorize_with :active
- get :index, filters: [['group_class', '!=', 'project']], format: :json
+ get :index, params: {filters: [['group_class', '!=', 'project']], format: :json}
assert_response :success
group_uuids = []
json_response['items'].each do |group|
test "get list of groups with bogus group_class" do
authorize_with :active
- get :index, {
+ get :index, params: {
filters: [['group_class', '=', 'nogrouphasthislittleclass']],
format: :json,
}
test 'get group-owned objects' do
authorize_with :active
- get :contents, {
+ get :contents, params: {
id: groups(:aproject).uuid,
format: :json,
}
test "user with project read permission can see project objects" do
authorize_with :project_viewer
- get :contents, {
+ get :contents, params: {
id: groups(:aproject).uuid,
format: :json,
}
test "list objects across projects" do
authorize_with :project_viewer
- get :contents, {
+ get :contents, params: {
format: :json,
filters: [['uuid', 'is_a', 'arvados#specimen']]
}
test "list trashed collections and projects" do
authorize_with :active
- get(:contents, {
+ get(:contents, params: {
format: :json,
include_trash: true,
filters: [
test "list objects in home project" do
authorize_with :active
- get :contents, {
+ get :contents, params: {
format: :json,
limit: 200,
id: users(:active).uuid
test "user with project read permission can see project collections" do
authorize_with :project_viewer
- get :contents, {
+ get :contents, params: {
id: groups(:asubproject).uuid,
format: :json,
}
].each do |column, order, operator, field|
test "user with project read permission can sort projects on #{column} #{order}" do
authorize_with :project_viewer
- get :contents, {
+ get :contents, params: {
id: groups(:asubproject).uuid,
format: :json,
filters: [['uuid', 'is_a', "arvados#collection"]],
# project tests.
def check_new_project_link_fails(link_attrs)
@controller = Arvados::V1::LinksController.new
- post :create, link: {
- link_class: "permission",
- name: "can_read",
- head_uuid: groups(:aproject).uuid,
- }.merge(link_attrs)
+ post :create, params: {
+ link: {
+ link_class: "permission",
+ name: "can_read",
+ head_uuid: groups(:aproject).uuid,
+ }.merge(link_attrs)
+ }
assert_includes(403..422, response.status)
end
test "user with project read permission can't rename items in it" do
authorize_with :project_viewer
@controller = Arvados::V1::LinksController.new
- post :update, {
+ post :update, params: {
id: jobs(:running).uuid,
name: "Denied test name",
}
test "user with project read permission can't remove items from it" do
@controller = Arvados::V1::PipelineTemplatesController.new
authorize_with :project_viewer
- post :update, {
+ post :update, params: {
id: pipeline_templates(:two_part).uuid,
pipeline_template: {
owner_uuid: users(:project_viewer).uuid,
test "user with project read permission can't delete it" do
authorize_with :project_viewer
- post :destroy, {id: groups(:aproject).uuid}
+ post :destroy, params: {id: groups(:aproject).uuid}
assert_response 403
end
test 'get group-owned objects with limit' do
authorize_with :active
- get :contents, {
+ get :contents, params: {
id: groups(:aproject).uuid,
limit: 1,
format: :json,
test 'get group-owned objects with limit and offset' do
authorize_with :active
- get :contents, {
+ get :contents, params: {
id: groups(:aproject).uuid,
limit: 1,
offset: 12345,
test 'get group-owned objects with additional filter matching nothing' do
authorize_with :active
- get :contents, {
+ get :contents, params: {
id: groups(:aproject).uuid,
filters: [['uuid', 'in', ['foo_not_a_uuid','bar_not_a_uuid']]],
format: :json,
['foo', '', '1234five', '0x10', '-8'].each do |val|
test "Raise error on bogus #{arg} parameter #{val.inspect}" do
authorize_with :active
- get :contents, {
+ get :contents, params: {
:id => groups(:aproject).uuid,
:format => :json,
arg => val,
test "Collection contents don't include manifest_text" do
authorize_with :active
- get :contents, {
+ get :contents, params: {
id: groups(:aproject).uuid,
filters: [["uuid", "is_a", "arvados#collection"]],
format: :json,
test 'get writable_by list for owned group' do
authorize_with :active
- get :show, {
+ get :show, params: {
id: groups(:aproject).uuid,
format: :json
}
test 'no writable_by list for group with read-only access' do
authorize_with :rominiadmin
- get :show, {
+ get :show, params: {
id: groups(:testusergroup_admins).uuid,
format: :json
}
test 'get writable_by list by admin user' do
authorize_with :admin
- get :show, {
+ get :show, params: {
id: groups(:testusergroup_admins).uuid,
format: :json
}
test 'creating subproject with duplicate name fails' do
authorize_with :active
- post :create, {
+ post :create, params: {
group: {
name: 'A Project',
owner_uuid: users(:active).uuid,
test 'creating duplicate named subproject succeeds with ensure_unique_name' do
authorize_with :active
- post :create, {
+ post :create, params: {
group: {
name: 'A Project',
owner_uuid: users(:active).uuid,
new_project['name'])
end
- test "unsharing a project results in hiding it from previously shared user" do
- # remove sharing link for project
- @controller = Arvados::V1::LinksController.new
- authorize_with :admin
- post :destroy, id: links(:share_starred_project_with_project_viewer).uuid
- assert_response :success
-
- # verify that the user can no longer see the project
- @test_counter = 0 # Reset executed action counter
- @controller = Arvados::V1::GroupsController.new
- authorize_with :project_viewer
- get :index, filters: [['group_class', '=', 'project']], format: :json
- assert_response :success
- found_projects = {}
- json_response['items'].each do |g|
- found_projects[g['uuid']] = g
- end
- assert_equal false, found_projects.include?(groups(:starred_and_shared_active_user_project).uuid)
-
- # share the project
- @test_counter = 0
- @controller = Arvados::V1::LinksController.new
- authorize_with :system_user
- post :create, link: {
- link_class: "permission",
- name: "can_read",
- head_uuid: groups(:starred_and_shared_active_user_project).uuid,
- tail_uuid: users(:project_viewer).uuid,
- }
-
- # verify that project_viewer user can now see shared project again
- @test_counter = 0
- @controller = Arvados::V1::GroupsController.new
- authorize_with :project_viewer
- get :index, filters: [['group_class', '=', 'project']], format: :json
- assert_response :success
- found_projects = {}
- json_response['items'].each do |g|
- found_projects[g['uuid']] = g
- end
- assert_equal true, found_projects.include?(groups(:starred_and_shared_active_user_project).uuid)
- end
-
[
[['owner_uuid', '!=', 'zzzzz-tpzed-xurymjxw79nv3jz'], 200,
'zzzzz-d1hrv-subprojpipeline', 'zzzzz-d1hrv-1xfj6xkicf2muk2'],
].each do |filter, expect_code, expect_uuid, not_expect_uuid|
test "get contents with '#{filter}' filter" do
authorize_with :active
- get :contents, filters: [filter], format: :json
+ get :contents, params: {filters: [filter], format: :json}
assert_response expect_code
if expect_code == 200
assert_not_empty json_response['items']
Rails.configuration.disable_api_methods = ['jobs.index', 'pipeline_instances.index']
authorize_with :active
- get :contents, {
+ get :contents, params: {
id: groups(:aproject).uuid,
format: :json,
}
# restricted column
Rails.configuration.max_index_database_read = 12
authorize_with :active
- get :contents, {
+ get :contents, params: {
id: groups(:aproject).uuid,
format: :json,
}
recursive: true,
format: :json,
}
- get :contents, params
+ get :contents, params: params
owners = json_response['items'].map do |item|
item['owner_uuid']
end
format: :json,
}
params[:recursive] = false if recursive == false
- get :contents, params
+ get :contents, params: params
owners = json_response['items'].map do |item|
item['owner_uuid']
end
test 'get home project contents, recursive=true' do
authorize_with :active
- get :contents, {
+ get :contents, params: {
id: users(:active).uuid,
recursive: true,
format: :json,
untrash.each do |pr|
Group.find_by_uuid(groups(pr).uuid).update! is_trashed: false
end
- get :contents, {
+ get :contents, params: {
id: groups(project).owner_uuid,
format: :json
}
untrash.each do |pr|
Group.find_by_uuid(groups(pr).uuid).update! is_trashed: false
end
- get :contents, {
+ get :contents, params: {
id: groups(project).uuid,
format: :json
}
untrash.each do |pr|
Group.find_by_uuid(groups(pr).uuid).update! is_trashed: false
end
- get :index, {
+ get :index, params: {
format: :json,
}
assert_response :success
untrash.each do |pr|
Group.find_by_uuid(groups(pr).uuid).update! is_trashed: false
end
- get :show, {
+ get :show, params: {
id: groups(project).uuid,
format: :json
}
untrash.each do |pr|
Group.find_by_uuid(groups(pr).uuid).update! is_trashed: false
end
- get :show, {
+ get :show, params: {
id: groups(project).uuid,
format: :json,
include_trash: true
untrash.each do |pr|
Group.find_by_uuid(groups(pr).uuid).update! is_trashed: false
end
- get :index, {
+ get :index, params: {
format: :json,
include_trash: true
}
Group.find_by_uuid(groups(pr).uuid).update! is_trashed: false
end
assert !Group.find_by_uuid(groups(:trashed_project).uuid).is_trashed
- post :destroy, {
+ post :destroy, params: {
id: groups(:trashed_project).uuid,
format: :json,
}
test "untrash project #{auth}" do
authorize_with auth
assert Group.find_by_uuid(groups(:trashed_project).uuid).is_trashed
- post :untrash, {
+ post :untrash, params: {
id: groups(:trashed_project).uuid,
format: :json,
}
gc = Group.create!({owner_uuid: "zzzzz-j7d0g-trashedproject1",
name: "trashed subproject 3",
group_class: "project"})
- post :untrash, {
+ post :untrash, params: {
id: groups(:trashed_subproject3).uuid,
format: :json,
ensure_unique_name: true
test "move trashed subproject to new owner #{auth}" do
authorize_with auth
assert_nil Group.readable_by(users(auth)).where(uuid: groups(:trashed_subproject).uuid).first
- put :update, {
+ put :update, params: {
id: groups(:trashed_subproject).uuid,
group: {
owner_uuid: users(:active).uuid
head_uuid: groups(:project_owned_by_foo).uuid)
end
- get :shared, {:filters => [["group_class", "=", "project"]], :include => "owner_uuid"}
+ get :shared, params: {:filters => [["group_class", "=", "project"]], :include => "owner_uuid"}
assert_equal 1, json_response['items'].length
assert_equal json_response['items'][0]["uuid"], groups(:project_owned_by_foo).uuid
head_uuid: groups(:project_owned_by_foo).uuid)
end
- get :shared, {:filters => [["group_class", "=", "project"]], :include => "owner_uuid"}
+ get :shared, params: {:filters => [["group_class", "=", "project"]], :include => "owner_uuid"}
assert_equal 1, json_response['items'].length
assert_equal json_response['items'][0]["uuid"], groups(:project_owned_by_foo).uuid
Group.find_by_uuid(groups(:project_owned_by_foo).uuid).update!(owner_uuid: groups(:group_for_sharing_tests).uuid)
end
- get :shared, {:filters => [["group_class", "=", "project"]], :include => "owner_uuid"}
+ get :shared, params: {:filters => [["group_class", "=", "project"]], :include => "owner_uuid"}
assert_equal 1, json_response['items'].length
assert_equal json_response['items'][0]["uuid"], groups(:project_owned_by_foo).uuid
head_uuid: collections(:collection_owned_by_foo).uuid)
end
- get :contents, {:include => "owner_uuid", :exclude_home_project => true}
+ get :contents, params: {:include => "owner_uuid", :exclude_home_project => true}
assert_equal 2, json_response['items'].length
assert_equal json_response['items'][0]["uuid"], groups(:project_owned_by_foo).uuid
head_uuid: groups(:project_owned_by_foo).uuid)
end
- get :contents, {:include => "owner_uuid", :exclude_home_project => true}
+ get :contents, params: {:include => "owner_uuid", :exclude_home_project => true}
assert_equal 1, json_response['items'].length
assert_equal json_response['items'][0]["uuid"], groups(:project_owned_by_foo).uuid
Group.find_by_uuid(groups(:project_owned_by_foo).uuid).update!(owner_uuid: groups(:group_for_sharing_tests).uuid)
end
- get :contents, {:include => "owner_uuid", :exclude_home_project => true}
+ get :contents, params: {:include => "owner_uuid", :exclude_home_project => true}
assert_equal 1, json_response['items'].length
assert_equal json_response['items'][0]["uuid"], groups(:project_owned_by_foo).uuid
assert_equal json_response['included'][0]["uuid"], groups(:group_for_sharing_tests).uuid
end
-
test 'contents, exclude home, with parent specified' do
authorize_with :active
- get :contents, {id: groups(:aproject).uuid, :include => "owner_uuid", :exclude_home_project => true}
+ get :contents, params: {id: groups(:aproject).uuid, :include => "owner_uuid", :exclude_home_project => true}
assert_response 422
end
-
end
end
test "reuse job with no_reuse=false" do
- post :create, job: {
- no_reuse: false,
- script: "hash",
- script_version: "4fe459abe02d9b365932b8f5dc419439ab4e2577",
- repository: "active/foo",
- script_parameters: {
- an_integer: '1',
- input: 'fa7aeb5140e2848d39b416daeef4ffc5+45'
+ post :create, params: {
+ job: {
+ no_reuse: false,
+ script: "hash",
+ script_version: "4fe459abe02d9b365932b8f5dc419439ab4e2577",
+ repository: "active/foo",
+ script_parameters: {
+ an_integer: '1',
+ input: 'fa7aeb5140e2848d39b416daeef4ffc5+45'
+ }
}
}
assert_response :success
end
test "reuse job with find_or_create=true" do
- post :create, {
+ post :create, params: {
job: {
script: "hash",
script_version: "4fe459abe02d9b365932b8f5dc419439ab4e2577",
end
test "no reuse job with null log" do
- post :create, {
+ post :create, params: {
job: {
script: "hash",
script_version: "4fe459abe02d9b365932b8f5dc419439ab4e2577",
end
test "reuse job with symbolic script_version" do
- post :create, {
+ post :create, params: {
job: {
script: "hash",
script_version: "tag1",
end
test "do not reuse job because no_reuse=true" do
- post :create, {
+ post :create, params: {
job: {
no_reuse: true,
script: "hash",
[false, "false"].each do |whichfalse|
test "do not reuse job because find_or_create=#{whichfalse.inspect}" do
- post :create, {
+ post :create, params: {
job: {
script: "hash",
script_version: "4fe459abe02d9b365932b8f5dc419439ab4e2577",
test "do not reuse job because output is not readable by user" do
authorize_with :job_reader
- post :create, {
+ post :create, params: {
job: {
script: "hash",
script_version: "4fe459abe02d9b365932b8f5dc419439ab4e2577",
end
test "test_cannot_reuse_job_no_output" do
- post :create, job: {
- no_reuse: false,
- script: "hash",
- script_version: "4fe459abe02d9b365932b8f5dc419439ab4e2577",
- repository: "active/foo",
- script_parameters: {
- input: 'fa7aeb5140e2848d39b416daeef4ffc5+45',
- an_integer: '2'
+ post :create, params: {
+ job: {
+ no_reuse: false,
+ script: "hash",
+ script_version: "4fe459abe02d9b365932b8f5dc419439ab4e2577",
+ repository: "active/foo",
+ script_parameters: {
+ input: 'fa7aeb5140e2848d39b416daeef4ffc5+45',
+ an_integer: '2'
+ }
}
}
assert_response :success
end
test "test_reuse_job_range" do
- post :create, job: {
- no_reuse: false,
- script: "hash",
- minimum_script_version: "tag1",
- script_version: "master",
- repository: "active/foo",
- script_parameters: {
- input: 'fa7aeb5140e2848d39b416daeef4ffc5+45',
- an_integer: '1'
+ post :create, params: {
+ job: {
+ no_reuse: false,
+ script: "hash",
+ minimum_script_version: "tag1",
+ script_version: "master",
+ repository: "active/foo",
+ script_parameters: {
+ input: 'fa7aeb5140e2848d39b416daeef4ffc5+45',
+ an_integer: '1'
+ }
}
}
assert_response :success
end
test "cannot_reuse_job_no_minimum_given_so_must_use_specified_commit" do
- post :create, job: {
- no_reuse: false,
- script: "hash",
- script_version: "master",
- repository: "active/foo",
- script_parameters: {
- input: 'fa7aeb5140e2848d39b416daeef4ffc5+45',
- an_integer: '1'
+ post :create, params: {
+ job: {
+ no_reuse: false,
+ script: "hash",
+ script_version: "master",
+ repository: "active/foo",
+ script_parameters: {
+ input: 'fa7aeb5140e2848d39b416daeef4ffc5+45',
+ an_integer: '1'
+ }
}
}
assert_response :success
end
test "test_cannot_reuse_job_different_input" do
- post :create, job: {
- no_reuse: false,
- script: "hash",
- script_version: "4fe459abe02d9b365932b8f5dc419439ab4e2577",
- repository: "active/foo",
- script_parameters: {
- input: 'fa7aeb5140e2848d39b416daeef4ffc5+45',
- an_integer: '2'
+ post :create, params: {
+ job: {
+ no_reuse: false,
+ script: "hash",
+ script_version: "4fe459abe02d9b365932b8f5dc419439ab4e2577",
+ repository: "active/foo",
+ script_parameters: {
+ input: 'fa7aeb5140e2848d39b416daeef4ffc5+45',
+ an_integer: '2'
+ }
}
}
assert_response :success
end
test "test_cannot_reuse_job_different_version" do
- post :create, job: {
- no_reuse: false,
- script: "hash",
- script_version: "master",
- repository: "active/foo",
- script_parameters: {
- input: 'fa7aeb5140e2848d39b416daeef4ffc5+45',
- an_integer: '2'
+ post :create, params: {
+ job: {
+ no_reuse: false,
+ script: "hash",
+ script_version: "master",
+ repository: "active/foo",
+ script_parameters: {
+ input: 'fa7aeb5140e2848d39b416daeef4ffc5+45',
+ an_integer: '2'
+ }
}
}
assert_response :success
end
test "test_can_reuse_job_submitted_nondeterministic" do
- post :create, job: {
- no_reuse: false,
- script: "hash",
- script_version: "4fe459abe02d9b365932b8f5dc419439ab4e2577",
- repository: "active/foo",
- script_parameters: {
- input: 'fa7aeb5140e2848d39b416daeef4ffc5+45',
- an_integer: '1'
- },
- nondeterministic: true
+ post :create, params: {
+ job: {
+ no_reuse: false,
+ script: "hash",
+ script_version: "4fe459abe02d9b365932b8f5dc419439ab4e2577",
+ repository: "active/foo",
+ script_parameters: {
+ input: 'fa7aeb5140e2848d39b416daeef4ffc5+45',
+ an_integer: '1'
+ },
+ nondeterministic: true
+ }
}
assert_response :success
assert_not_nil assigns(:object)
end
test "test_cannot_reuse_job_past_nondeterministic" do
- post :create, job: {
- no_reuse: false,
- script: "hash2",
- script_version: "4fe459abe02d9b365932b8f5dc419439ab4e2577",
- repository: "active/foo",
- script_parameters: {
- input: 'fa7aeb5140e2848d39b416daeef4ffc5+45',
- an_integer: '1'
+ post :create, params: {
+ job: {
+ no_reuse: false,
+ script: "hash2",
+ script_version: "4fe459abe02d9b365932b8f5dc419439ab4e2577",
+ repository: "active/foo",
+ script_parameters: {
+ input: 'fa7aeb5140e2848d39b416daeef4ffc5+45',
+ an_integer: '1'
+ }
}
}
assert_response :success
test "test_cannot_reuse_job_no_permission" do
authorize_with :spectator
- post :create, job: {
- no_reuse: false,
- script: "hash",
- script_version: "4fe459abe02d9b365932b8f5dc419439ab4e2577",
- repository: "active/foo",
- script_parameters: {
- input: 'fa7aeb5140e2848d39b416daeef4ffc5+45',
- an_integer: '1'
+ post :create, params: {
+ job: {
+ no_reuse: false,
+ script: "hash",
+ script_version: "4fe459abe02d9b365932b8f5dc419439ab4e2577",
+ repository: "active/foo",
+ script_parameters: {
+ input: 'fa7aeb5140e2848d39b416daeef4ffc5+45',
+ an_integer: '1'
+ }
}
}
assert_response :success
end
test "test_cannot_reuse_job_excluded" do
- post :create, job: {
- no_reuse: false,
- script: "hash",
- minimum_script_version: "31ce37fe365b3dc204300a3e4c396ad333ed0556",
- script_version: "master",
- repository: "active/foo",
- exclude_script_versions: ["tag1"],
- script_parameters: {
- input: 'fa7aeb5140e2848d39b416daeef4ffc5+45',
- an_integer: '1'
+ post :create, params: {
+ job: {
+ no_reuse: false,
+ script: "hash",
+ minimum_script_version: "31ce37fe365b3dc204300a3e4c396ad333ed0556",
+ script_version: "master",
+ repository: "active/foo",
+ exclude_script_versions: ["tag1"],
+ script_parameters: {
+ input: 'fa7aeb5140e2848d39b416daeef4ffc5+45',
+ an_integer: '1'
+ }
}
}
assert_response :success
end
test "cannot reuse job with find_or_create but excluded version" do
- post :create, {
+ post :create, params: {
job: {
script: "hash",
script_version: "master",
test "can reuse a Job based on filters" do
filters_hash = BASE_FILTERS.
merge('script_version' => ['in git', 'tag1'])
- post(:create, {
+ post(:create, params: {
job: {
script: "hash",
script_version: "master",
filters += [["script_version", "in git",
"31ce37fe365b3dc204300a3e4c396ad333ed0556"],
["script_version", "not in git", ["tag1"]]]
- post(:create, {
+ post(:create, params: {
job: {
script: "hash",
script_version: "master",
test "can not reuse a Job based on arbitrary filters" do
filters_hash = BASE_FILTERS.
merge("created_at" => ["<", "2010-01-01T00:00:00Z"])
- post(:create, {
+ post(:create, params: {
job: {
script: "hash",
script_version: "4fe459abe02d9b365932b8f5dc419439ab4e2577",
end
test "can reuse a Job with a Docker image" do
- post(:create, {
+ post(:create, params: {
job: {
script: "hash",
script_version: "4fe459abe02d9b365932b8f5dc419439ab4e2577",
["=", "4fe459abe02d9b365932b8f5dc419439ab4e2577"],
"docker_image_locator" =>
["in docker", links(:docker_image_collection_hash).name])
- post(:create, {
+ post(:create, params: {
job: {
script: "hash",
script_version: "4fe459abe02d9b365932b8f5dc419439ab4e2577",
["=", "4fe459abe02d9b365932b8f5dc419439ab4e2577"],
"docker_image_locator" =>
["in docker", links(:docker_image_collection_tag2).name])
- post(:create, {
+ post(:create, params: {
job: {
script: "hash",
script_version: "4fe459abe02d9b365932b8f5dc419439ab4e2577",
test "new job with unknown Docker image filter" do
filters_hash = BASE_FILTERS.
merge("docker_image_locator" => ["in docker", "_nonesuchname_"])
- post(:create, {
+ post(:create, params: {
job: {
script: "hash",
script_version: "4fe459abe02d9b365932b8f5dc419439ab4e2577",
["repository", "script"].each do |skip_key|
test "missing #{skip_key} filter raises an error" do
filters = filters_from_hash(BASE_FILTERS.reject { |k| k == skip_key })
- post(:create, {
+ post(:create, params: {
job: {
script: "hash",
script_version: "master",
end
test "find Job with script version range" do
- get :index, filters: [["repository", "=", "active/foo"],
- ["script", "=", "hash"],
- ["script_version", "in git", "tag1"]]
+ get :index, params: {
+ filters: [["repository", "=", "active/foo"],
+ ["script", "=", "hash"],
+ ["script_version", "in git", "tag1"]]
+ }
assert_response :success
assert_not_nil assigns(:objects)
assert_includes(assigns(:objects).map { |job| job.uuid },
end
test "find Job with script version range exclusions" do
- get :index, filters: [["repository", "=", "active/foo"],
- ["script", "=", "hash"],
- ["script_version", "not in git", "tag1"]]
+ get :index, params: {
+ filters: [["repository", "=", "active/foo"],
+ ["script", "=", "hash"],
+ ["script_version", "not in git", "tag1"]]
+ }
assert_response :success
assert_not_nil assigns(:objects)
refute_includes(assigns(:objects).map { |job| job.uuid },
end
test "find Job with Docker image range" do
- get :index, filters: [["docker_image_locator", "in docker",
- "arvados/apitestfixture"]]
+ get :index, params: {
+ filters: [["docker_image_locator", "in docker",
+ "arvados/apitestfixture"]]
+ }
assert_response :success
assert_not_nil assigns(:objects)
assert_includes(assigns(:objects).map { |job| job.uuid },
test "find Job with Docker image using reader tokens" do
authorize_with :inactive
- get(:index, {
+ get(:index, params: {
filters: [["docker_image_locator", "in docker",
"arvados/apitestfixture"]],
reader_tokens: [api_token(:active)],
end
test "'in docker' filter accepts arrays" do
- get :index, filters: [["docker_image_locator", "in docker",
- ["_nonesuchname_", "arvados/apitestfixture"]]]
+ get :index, params: {
+ filters: [["docker_image_locator", "in docker",
+ ["_nonesuchname_", "arvados/apitestfixture"]]]
+ }
assert_response :success
assert_not_nil assigns(:objects)
assert_includes(assigns(:objects).map { |job| job.uuid },
end
test "'not in docker' filter accepts arrays" do
- get :index, filters: [["docker_image_locator", "not in docker",
- ["_nonesuchname_", "arvados/apitestfixture"]]]
+ get :index, params: {
+ filters: [["docker_image_locator", "not in docker",
+ ["_nonesuchname_", "arvados/apitestfixture"]]]
+ }
assert_response :success
assert_not_nil assigns(:objects)
assert_includes(assigns(:objects).map { |job| job.uuid },
end
def create_job_from(params, start_from)
- post(:create, create_job_params(params, start_from))
+ post(:create, params: create_job_params(params, start_from))
assert_response :success
new_job = assigns(:object)
assert_not_nil new_job
end
def check_errors_from(params, start_from=DEFAULT_START_JOB)
- post(:create, create_job_params(params, start_from))
+ post(:create, params: create_job_params(params, start_from))
assert_includes(405..499, @response.code.to_i)
errors = json_response.fetch("errors", [])
assert(errors.any?, "no errors assigned from #{params}")
"docker_image_locator" => ["=", prev_job.docker_image_locator])
filters_hash.delete("script_version")
params = create_job_params(filters: filters_from_hash(filters_hash))
- post(:create, params)
+ post(:create, params: params)
assert_response :success
assert_equal(prev_job.uuid, assigns(:object).uuid)
end
test "submit a job" do
authorize_with :active
- post :create, job: {
- script: "hash",
- script_version: "master",
- repository: "active/foo",
- script_parameters: {}
+ post :create, params: {
+ job: {
+ script: "hash",
+ script_version: "master",
+ repository: "active/foo",
+ script_parameters: {}
+ }
}
assert_response :success
assert_not_nil assigns(:object)
test "normalize output and log uuids when creating job" do
authorize_with :active
- post :create, job: {
- script: "hash",
- script_version: "master",
- script_parameters: {},
- repository: "active/foo",
- started_at: Time.now,
- finished_at: Time.now,
- running: false,
- success: true,
- output: 'd41d8cd98f00b204e9800998ecf8427e+0+K@xyzzy',
- log: 'd41d8cd98f00b204e9800998ecf8427e+0+K@xyzzy'
+ post :create, params: {
+ job: {
+ script: "hash",
+ script_version: "master",
+ script_parameters: {},
+ repository: "active/foo",
+ started_at: Time.now,
+ finished_at: Time.now,
+ running: false,
+ success: true,
+ output: 'd41d8cd98f00b204e9800998ecf8427e+0+K@xyzzy',
+ log: 'd41d8cd98f00b204e9800998ecf8427e+0+K@xyzzy'
+ }
}
assert_response :success
assert_not_nil assigns(:object)
new_output = 'd41d8cd98f00b204e9800998ecf8427e+0+K@xyzzy'
new_log = 'd41d8cd98f00b204e9800998ecf8427e+0+K@xyzzy'
- put :update, {
+ put :update, params: {
id: foobar_job['uuid'],
job: {
output: new_output,
end
authorize_with :active
- put :update, {
+ put :update, params: {
id: jobs(:running).uuid,
job: {
cancelled_at: 4.day.ago
end
authorize_with :active
- self.send http_method, action, { id: jobs(:cancelled).uuid }.merge(params)
+ self.send http_method, action, params: { id: jobs(:cancelled).uuid }.merge(params)
assert_response expected_response
if expected_response == :success
job = json_response
end
authorize_with :active
- put :update, {
+ put :update, params: {
id: jobs(:running_cancelled).uuid,
job: {
cancelled_at: nil
['abc.py', 'hash.py'].each do |script|
test "update job script attribute to #{script} without failing script_version check" do
authorize_with :admin
- put :update, {
+ put :update, params: {
id: jobs(:uses_nonexistent_script_version).uuid,
job: {
script: script
test "search jobs by uuid with >= query" do
authorize_with :active
- get :index, {
+ get :index, params: {
filters: [['uuid', '>=', 'zzzzz-8i9sb-pshmckwoma9plh7']]
}
assert_response :success
test "search jobs by uuid with <= query" do
authorize_with :active
- get :index, {
+ get :index, params: {
filters: [['uuid', '<=', 'zzzzz-8i9sb-pshmckwoma9plh7']]
}
assert_response :success
test "search jobs by uuid with >= and <= query" do
authorize_with :active
- get :index, {
+ get :index, params: {
filters: [['uuid', '>=', 'zzzzz-8i9sb-pshmckwoma9plh7'],
['uuid', '<=', 'zzzzz-8i9sb-pshmckwoma9plh7']]
}
test "search jobs by uuid with < query" do
authorize_with :active
- get :index, {
+ get :index, params: {
filters: [['uuid', '<', 'zzzzz-8i9sb-pshmckwoma9plh7']]
}
assert_response :success
test "search jobs by uuid with like query" do
authorize_with :active
- get :index, {
+ get :index, params: {
filters: [['uuid', 'like', '%hmckwoma9pl%']]
}
assert_response :success
test "search jobs by uuid with 'in' query" do
authorize_with :active
- get :index, {
+ get :index, params: {
filters: [['uuid', 'in', ['zzzzz-8i9sb-4cf0nhn6xte809j',
'zzzzz-8i9sb-pshmckwoma9plh7']]]
}
exclude_uuids = [jobs(:running).uuid,
jobs(:running_cancelled).uuid]
authorize_with :active
- get :index, {
+ get :index, params: {
filters: [['uuid', 'not in', exclude_uuids]]
}
assert_response :success
['output', nil]].each do |attr, operand|
test "search jobs with #{attr} #{operator} #{operand.inspect} query" do
authorize_with :active
- get :index, {
+ get :index, params: {
filters: [[attr, operator, operand]]
}
assert_response :success
test "search jobs by started_at with < query" do
authorize_with :active
- get :index, {
+ get :index, params: {
filters: [['started_at', '<', Time.now.to_s]]
}
assert_response :success
test "search jobs by started_at with > query" do
authorize_with :active
- get :index, {
+ get :index, params: {
filters: [['started_at', '>', Time.now.to_s]]
}
assert_response :success
test "search jobs by started_at with >= query on metric date" do
authorize_with :active
- get :index, {
+ get :index, params: {
filters: [['started_at', '>=', '2014-01-01']]
}
assert_response :success
test "search jobs by started_at with >= query on metric date and time" do
authorize_with :active
- get :index, {
+ get :index, params: {
filters: [['started_at', '>=', '2014-01-01 01:23:45']]
}
assert_response :success
test "search jobs with 'any' operator" do
authorize_with :active
- get :index, {
+ get :index, params: {
where: { any: ['contains', 'pshmckw'] }
}
assert_response :success
test "search jobs by nonexistent column with < query" do
authorize_with :active
- get :index, {
+ get :index, params: {
filters: [['is_borked', '<', 'fizzbuzz']]
}
assert_response 422
test "finish a job" do
authorize_with :active
- put :update, {
+ put :update, params: {
id: jobs(:nearly_finished_job).uuid,
job: {
output: '551392cc37a317abf865b95f66f4ef94+101',
test "get job queue as with a = filter" do
authorize_with :admin
- get :queue, { filters: [['script','=','foo']] }
+ get :queue, params: { filters: [['script','=','foo']] }
assert_response :success
assert_equal ['foo'], assigns(:objects).collect(&:script).uniq
assert_equal 0, assigns(:objects)[0].queue_position
test "get job queue as with a != filter" do
authorize_with :admin
- get :queue, { filters: [['script','!=','foo']] }
+ get :queue, params: { filters: [['script','!=','foo']] }
assert_response :success
assert_equal 0, assigns(:objects).count
end
test "job includes assigned nodes" do
authorize_with :active
- get :show, {id: jobs(:nearly_finished_job).uuid}
+ get :show, params: {id: jobs(:nearly_finished_job).uuid}
assert_response :success
assert_equal([nodes(:busy).uuid], json_response["node_uuids"])
end
test "job lock success" do
authorize_with :active
- post :lock, {id: jobs(:queued).uuid}
+ post :lock, params: {id: jobs(:queued).uuid}
assert_response :success
job = Job.where(uuid: jobs(:queued).uuid).first
assert_equal "Running", job.state
test "job lock conflict" do
authorize_with :active
- post :lock, {id: jobs(:running).uuid}
+ post :lock, params: {id: jobs(:running).uuid}
assert_response 422 # invalid state transition
end
authorize_with :active
url = "http://localhost:1/fake/fake.git"
fetch_remote_from_local_repo url, :foo
- post :create, job: {
- script: "hash",
- script_version: "abc123",
- repository: url,
- script_parameters: {}
+ post :create, params: {
+ job: {
+ script: "hash",
+ script_version: "abc123",
+ repository: url,
+ script_parameters: {}
+ }
}
assert_response 422
end
authorize_with :active
url = "http://localhost:1/fake/fake.git"
fetch_remote_from_local_repo url, :foo
- post :create, job: {
- script: "hash",
- script_version: "master",
- repository: url,
- script_parameters: {}
+ post :create, params: {
+ job: {
+ script: "hash",
+ script_version: "master",
+ repository: url,
+ script_parameters: {}
+ }
}
assert_response :success
assert_equal('077ba2ad3ea24a929091a9e6ce545c93199b8e57',
test 'tag local commit in internal repository' do
authorize_with :active
- post :create, job: {
- script: "hash",
- script_version: "master",
- repository: "active/foo",
- script_parameters: {}
+ post :create, params: {
+ job: {
+ script: "hash",
+ script_version: "master",
+ repository: "active/foo",
+ script_parameters: {}
+ }
}
assert_response :success
assert_equal('077ba2ad3ea24a929091a9e6ce545c93199b8e57',
test 'get job with components' do
authorize_with :active
- get :show, {id: jobs(:running_job_with_components).uuid}
+ get :show, params: {id: jobs(:running_job_with_components).uuid}
assert_response :success
assert_not_nil json_response["components"]
assert_equal ["component1", "component2"], json_response["components"].keys
].each do |user, expected|
test "add components to job locked by active user as #{user} user and expect #{expected}" do
authorize_with user
- put :update, {
+ put :update, params: {
id: jobs(:running).uuid,
job: {
components: {"component1" => "value1", "component2" => "value2"}
end
end
- test 'get_delete components_get again for job with components' do
- authorize_with :active
- get :show, {id: jobs(:running_job_with_components).uuid}
- assert_response :success
- assert_not_nil json_response["components"]
- assert_equal ["component1", "component2"], json_response["components"].keys
-
- # delete second component
- @test_counter = 0 # Reset executed action counter
- @controller = Arvados::V1::JobsController.new
- put :update, {
- id: jobs(:running_job_with_components).uuid,
- job: {
- components: {"component1" => "zzzzz-8i9sb-jobuuid00000001"}
- }
- }
- assert_response :success
-
- @test_counter = 0 # Reset executed action counter
- @controller = Arvados::V1::JobsController.new
- get :show, {id: jobs(:running_job_with_components).uuid}
- assert_response :success
- assert_not_nil json_response["components"]
- assert_equal ["component1"], json_response["components"].keys
-
- # delete all components
- @test_counter = 0 # Reset executed action counter
- @controller = Arvados::V1::JobsController.new
- put :update, {
- id: jobs(:running_job_with_components).uuid,
- job: {
- components: {}
- }
- }
- assert_response :success
-
- @test_counter = 0 # Reset executed action counter
- @controller = Arvados::V1::JobsController.new
- get :show, {id: jobs(:running_job_with_components).uuid}
- assert_response :success
- assert_not_nil json_response["components"]
- assert_equal [], json_response["components"].keys
- end
-
test 'jobs.create disabled in config' do
Rails.configuration.disable_api_methods = ["jobs.create",
"pipeline_instances.create"]
authorize_with :active
- post :create, job: {
- script: "hash",
- script_version: "master",
- repository: "active/foo",
- script_parameters: {}
+ post :create, params: {
+ job: {
+ script: "hash",
+ script_version: "master",
+ repository: "active/foo",
+ script_parameters: {}
+ }
}
assert_response 404
end
test "add keep disk with admin token" do
authorize_with :admin
- post :ping, default_ping_opts.
+ post :ping, params: default_ping_opts.
merge(filesystem_uuid: 'eb1e77a1-db84-4193-b6e6-ca2894f67d5f')
assert_response :success
assert_not_nil assigns(:object)
].each do |opts|
test "add keep disk with[out] filesystem_uuid #{opts}" do
authorize_with :admin
- post :ping, default_ping_opts.merge(opts)
+ post :ping, params: default_ping_opts.merge(opts)
assert_response :success
assert_not_nil JSON.parse(@response.body)['uuid']
end
end
test "refuse to add keep disk without admin token" do
- post :ping, default_ping_opts
+ post :ping, params: default_ping_opts
assert_response 404
end
test "ping keep disk" do
- post :ping, default_ping_opts.
+ post :ping, params: default_ping_opts.
merge(id: keep_disks(:nonfull).uuid,
ping_secret: keep_disks(:nonfull).ping_secret,
filesystem_uuid: keep_disks(:nonfull).filesystem_uuid)
test "search keep_services with 'any' operator" do
authorize_with :active
- get :index, {
+ get :index, params: {
where: { any: ['contains', 'o2t1q5w'] }
}
assert_response :success
found = assigns(:objects).collect(&:uuid)
assert_equal true, !!found.index('zzzzz-penuu-5w2o2t1q5wy7fhn')
end
-
-
end
test "search by service_port with < query" do
authorize_with :active
- get :index, {
+ get :index, params: {
filters: [['service_port', '<', 25107]]
}
assert_response :success
test "search by service_port with >= query" do
authorize_with :active
- get :index, {
+ get :index, params: {
filters: [['service_port', '>=', 25107]]
}
assert_response :success
}
authorize_with :admin
if formatted_link == 'link_json'
- post :create, link: link.to_json
+ post :create, params: {link: link.to_json}
else
- post :create, link: link
+ post :create, params: {link: link}
end
assert_response :success
assert_not_nil assigns(:object)
{nil: nil, bogus: 2.days.ago}.each do |bogustype, bogusvalue|
test "cannot set #{bogustype} #{attr} in create" do
authorize_with :active
- post :create, {
+ post :create, params: {
link: {
properties: {},
link_class: 'test',
test "cannot set #{bogustype} #{attr} in update" do
really_created_at = links(:test_timestamps).created_at
authorize_with :active
- put :update, {
+ put :update, params: {
id: links(:test_timestamps).uuid,
link: {
:properties => {test: 'test'},
head_uuid: 'zzzzz-tpzed-xyzxyzxerrrorxx'
}
authorize_with :admin
- post :create, link: link
+ post :create, params: {link: link}
assert_response 422
end
tail_uuid: 'zzzzz-tpzed-xyzxyzxerrrorxx'
}
authorize_with :admin
- post :create, link: link
+ post :create, params: {link: link}
assert_response 422
end
tail_uuid: users(:spectator).uuid,
}
authorize_with :admin
- post :create, link: link
+ post :create, params: {link: link}
assert_response :success
l = JSON.parse(@response.body)
assert 'arvados#user', l['head_kind']
tail_kind: "arvados#user",
}
authorize_with :admin
- post :create, link: link
+ post :create, params: {link: link}
assert_response :success
l = JSON.parse(@response.body)
assert 'arvados#user', l['head_kind']
tail_uuid: authorized_keys(:admin).uuid,
}
authorize_with :active
- post :create, link: link
+ post :create, params: {link: link}
assert_response 422
end
test "filter links with 'is_a' operator" do
authorize_with :admin
- get :index, {
+ get :index, params: {
filters: [ ['tail_uuid', 'is_a', 'arvados#user'] ]
}
assert_response :success
test "filter links with 'is_a' operator includes remote objects" do
authorize_with :admin
- get :index, {
+ get :index, params: {
filters: [
['tail_uuid', 'is_a', 'arvados#user'],
['link_class', '=', 'permission'],
test "filter links with 'is_a' operator with more than one" do
authorize_with :admin
- get :index, {
+ get :index, params: {
filters: [ ['tail_uuid', 'is_a', ['arvados#user', 'arvados#group'] ] ],
}
assert_response :success
test "filter links with 'is_a' operator with bogus type" do
authorize_with :admin
- get :index, {
+ get :index, params: {
filters: [ ['tail_uuid', 'is_a', ['arvados#bogus'] ] ],
}
assert_response :success
test "filter links with 'is_a' operator with collection" do
authorize_with :admin
- get :index, {
+ get :index, params: {
filters: [ ['head_uuid', 'is_a', ['arvados#collection'] ] ],
}
assert_response :success
test "test can still use where tail_kind" do
authorize_with :admin
- get :index, {
+ get :index, params: {
where: { tail_kind: 'arvados#user' }
}
assert_response :success
test "test can still use where head_kind" do
authorize_with :admin
- get :index, {
+ get :index, params: {
where: { head_kind: 'arvados#user' }
}
assert_response :success
test "test can still use filter tail_kind" do
authorize_with :admin
- get :index, {
+ get :index, params: {
filters: [ ['tail_kind', '=', 'arvados#user'] ]
}
assert_response :success
test "test can still use filter head_kind" do
authorize_with :admin
- get :index, {
+ get :index, params: {
filters: [ ['head_kind', '=', 'arvados#user'] ]
}
assert_response :success
tail_kind: "arvados#user",
}
authorize_with :admin
- post :create, link: link
+ post :create, params: {link: link}
assert_response 422
end
tail_kind: "arvados#user",
}
authorize_with :admin
- post :create, link: link
+ post :create, params: {link: link}
assert_response 422
end
properties: {username: "repo_and_user_name"}
}
authorize_with :admin
- post :create, link: link
+ post :create, params: {link: link}
assert_response 422
end
properties: {username: "repo_and_user_name"}
}
authorize_with :admin
- post :create, link: link
+ post :create, params: {link: link}
assert_response :success
end
test "project owner can show a project permission" do
uuid = links(:project_viewer_can_read_project).uuid
authorize_with :active
- get :show, id: uuid
+ get :show, params: {id: uuid}
assert_response :success
assert_equal(uuid, assigns(:object).andand.uuid)
end
test "admin can show a project permission" do
uuid = links(:project_viewer_can_read_project).uuid
authorize_with :admin
- get :show, id: uuid
+ get :show, params: {id: uuid}
assert_response :success
assert_equal(uuid, assigns(:object).andand.uuid)
end
test "project viewer can't show others' project permissions" do
authorize_with :project_viewer
- get :show, id: links(:admin_can_write_aproject).uuid
+ get :show, params: {id: links(:admin_can_write_aproject).uuid}
assert_response 404
end
test "requesting a nonexistent link returns 404" do
authorize_with :active
- get :show, id: 'zzzzz-zzzzz-zzzzzzzzzzzzzzz'
+ get :show, params: {id: 'zzzzz-zzzzz-zzzzzzzzzzzzzzz'}
assert_response 404
end
# It is possible to retrieve the full set of permissions for a
# single object via /arvados/v1/permissions.
authorize_with :active
- get :index, filters: [['link_class', '=', 'permission'],
- ['head_uuid', '=', groups(:aproject).uuid]]
+ get :index, params: {
+ filters: [['link_class', '=', 'permission'],
+ ['head_uuid', '=', groups(:aproject).uuid]]
+ }
assert_response :success
assert_not_nil assigns(:objects)
assert_includes(assigns(:objects).map(&:uuid),
test "admin can index project permissions" do
authorize_with :admin
- get :index, filters: [['link_class', '=', 'permission'],
- ['head_uuid', '=', groups(:aproject).uuid]]
+ get :index, params: {
+ filters: [['link_class', '=', 'permission'],
+ ['head_uuid', '=', groups(:aproject).uuid]]
+ }
assert_response :success
assert_not_nil assigns(:objects)
assert_includes(assigns(:objects).map(&:uuid),
test "project viewer can't index others' project permissions" do
authorize_with :project_viewer
- get :index, filters: [['link_class', '=', 'permission'],
- ['head_uuid', '=', groups(:aproject).uuid],
- ['tail_uuid', '!=', users(:project_viewer).uuid]]
+ get :index, params: {
+ filters: [['link_class', '=', 'permission'],
+ ['head_uuid', '=', groups(:aproject).uuid],
+ ['tail_uuid', '!=', users(:project_viewer).uuid]]
+ }
assert_response :success
assert_not_nil assigns(:objects)
assert_empty assigns(:objects)
refute users(:user_bar_in_sharing_group).can?(read: collections(:collection_owned_by_foo).uuid)
- post :create, {
+ post :create, params: {
link: {
tail_uuid: users(:user_bar_in_sharing_group).uuid,
link_class: 'permission',
test "non-admins can create their own logs" do
authorize_with :active
- post :create, log: {summary: 'test log'}
+ post :create, params: {log: {summary: 'test log'}}
assert_response :success
resp = assigns(:object)
assert_not_nil resp.uuid
test "non-admins can read their own logs" do
authorize_with :active
my_log = logs(:log_owned_by_active)
- get :show, {id: my_log[:uuid]}
+ get :show, params: {id: my_log[:uuid]}
assert_response(:success, "failed to get log")
resp = assigns(:object)
assert_equal(my_log[:summary], resp.summary, "got wrong log")
test "test can still use where object_kind" do
authorize_with :admin
- get :index, {
+ get :index, params: {
where: { object_kind: 'arvados#user' }
}
assert_response :success
test "test can still use filter object_kind" do
authorize_with :admin
- get :index, {
+ get :index, params: {
filters: [ ['object_kind', '=', 'arvados#user'] ]
}
assert_response :success
end
test "node should ping with ping_secret and no token" do
- post :ping, {
+ post :ping, params: {
id: 'zzzzz-7ekkf-2z3mc76g2q73aio',
instance_id: 'i-0000000',
local_ipv4: '172.17.2.174',
end
test "node should fail ping with invalid ping_secret" do
- post :ping, {
+ post :ping, params: {
id: 'zzzzz-7ekkf-2z3mc76g2q73aio',
instance_id: 'i-0000000',
local_ipv4: '172.17.2.174',
test "create node" do
authorize_with :admin
- post :create, {node: {}}
+ post :create, params: {node: {}}
assert_response :success
assert_not_nil json_response['uuid']
assert_not_nil json_response['info'].is_a? Hash
test "create node and assign slot" do
authorize_with :admin
- post :create, {node: {}, assign_slot: true}
+ post :create, params: {node: {}, assign_slot: true}
assert_response :success
assert_not_nil json_response['uuid']
assert_not_nil json_response['info'].is_a? Hash
test "update node and assign slot" do
authorize_with :admin
node = nodes(:new_with_no_hostname)
- post :update, {id: node.uuid, node: {}, assign_slot: true}
+ post :update, params: {id: node.uuid, node: {}, assign_slot: true}
assert_response :success
assert_operator 0, :<, json_response['slot_number']
n = json_response['slot_number']
test "update node and assign slot, don't clobber hostname" do
authorize_with :admin
node = nodes(:new_with_custom_hostname)
- post :update, {id: node.uuid, node: {}, assign_slot: true}
+ post :update, params: {id: node.uuid, node: {}, assign_slot: true}
assert_response :success
assert_operator 0, :<, json_response['slot_number']
n = json_response['slot_number']
test "ping adds node stats to info" do
authorize_with :admin
node = nodes(:idle)
- post :ping, {
+ post :ping, params: {
id: node.uuid,
ping_secret: node.info['ping_secret'],
total_cpu_cores: 32,
test "active user can see their assigned job" do
authorize_with :active
- get :show, {id: nodes(:busy).uuid}
+ get :show, params: {id: nodes(:busy).uuid}
assert_response :success
assert_equal(jobs(:nearly_finished_job).uuid, json_response["job_uuid"])
end
test "user without job read permission can't see job" do
authorize_with :spectator
- get :show, {id: nodes(:busy).uuid}
+ get :show, params: {id: nodes(:busy).uuid}
assert_response :success
assert_nil(json_response["job"], "spectator can see node's assigned job")
end
[:admin, :spectator].each do |user|
test "select param does not break node list for #{user}" do
authorize_with user
- get :index, {select: ['domain']}
+ get :index, params: {select: ['domain']}
assert_response :success
assert_operator 0, :<, json_response['items_available']
end
changed_node = nodes(:idle)
assigned_job = jobs(:queued)
authorize_with :admin
- post :update, {
+ post :update, params: {
id: changed_node.uuid,
node: {job_uuid: assigned_job.uuid},
}
test "non-admin can't associate a job with a node" do
authorize_with :active
- post :update, {
+ post :update, params: {
id: nodes(:idle).uuid,
node: {job_uuid: jobs(:queued).uuid},
}
test "admin can unassign a job from a node" do
changed_node = nodes(:busy)
authorize_with :admin
- post :update, {
+ post :update, params: {
id: changed_node.uuid,
node: {job_uuid: nil},
}
test "non-admin can't unassign a job from a node" do
authorize_with :project_viewer
- post :update, {
+ post :update, params: {
id: nodes(:busy).uuid,
node: {job_uuid: nil},
}
test "job readable after updating other attributes" do
authorize_with :admin
- post :update, {
+ post :update, params: {
id: nodes(:busy).uuid,
node: {last_ping_at: 1.second.ago},
}
test "node should fail ping with invalid hostname config format" do
Rails.configuration.assign_node_hostname = 'compute%<slot_number>04' # should end with "04d"
- post :ping, {
+ post :ping, params: {
id: nodes(:new_with_no_hostname).uuid,
ping_secret: nodes(:new_with_no_hostname).info['ping_secret'],
}
end
test "first ping should set ip addr using local_ipv4 when provided" do
- post :ping, {
+ post :ping, params: {
id: 'zzzzz-7ekkf-nodenoipaddryet',
instance_id: 'i-0000000',
local_ipv4: '172.17.2.172',
end
test "first ping should set ip addr using remote_ip when local_ipv4 is not provided" do
- post :ping, {
+ post :ping, params: {
id: 'zzzzz-7ekkf-nodenoipaddryet',
instance_id: 'i-0000000',
ping_secret: 'abcdyefg4lb5q4gzqqtrnq30oyj08r8dtdimmanbqw49z1anz2'
end
test "future pings should not change previous ip address" do
- post :ping, {
+ post :ping, params: {
id: 'zzzzz-7ekkf-2z3mc76g2q73aio',
instance_id: 'i-0000000',
local_ipv4: '172.17.2.175',
test 'create pipeline with components copied from template' do
authorize_with :active
- post :create, {
+ post :create, params: {
pipeline_instance: {
pipeline_template_uuid: pipeline_templates(:two_part).uuid
}
test 'create pipeline with no template' do
authorize_with :active
- post :create, {
+ post :create, params: {
pipeline_instance: {
components: {}
}
authorize_with :active
pi_uuid = pipeline_instances(:job_child_pipeline_with_components_at_level_2).uuid
- post :cancel, {id: pi_uuid, cascade: cascade}
+ post :cancel, params: {id: pi_uuid, cascade: cascade}
assert_response :success
pi = PipelineInstance.where(uuid: pi_uuid).first
test 'no fallback orders when order is unambiguous' do
@controller = Arvados::V1::LogsController.new
authorize_with :active
- get :index, {
+ get :index, params: {
order: ['id asc'],
controller: 'logs',
}
test 'fallback orders when order is ambiguous' do
@controller = Arvados::V1::LogsController.new
authorize_with :active
- get :index, {
+ get :index, params: {
order: ['event_type asc'],
controller: 'logs',
}
test 'skip fallback orders already given by client' do
@controller = Arvados::V1::LogsController.new
authorize_with :active
- get :index, {
+ get :index, params: {
order: ['modified_at asc'],
controller: 'logs',
}
test 'eliminate superfluous orders' do
@controller = Arvados::V1::LogsController.new
authorize_with :active
- get :index, {
+ get :index, params: {
order: ['logs.modified_at asc',
'modified_at desc',
'event_type desc',
test 'eliminate orders after the first unique column' do
@controller = Arvados::V1::LogsController.new
authorize_with :active
- get :index, {
+ get :index, params: {
order: ['event_type asc',
'id asc',
'uuid asc',
test 'do not count items_available if count=none' do
@controller = Arvados::V1::LinksController.new
authorize_with :active
- get :index, {
+ get :index, params: {
count: 'none',
}
assert_response(:success)
test "count items_available if params=#{params.inspect}" do
@controller = Arvados::V1::LinksController.new
authorize_with :active
- get :index, params
+ get :index, params: params
assert_response(:success)
assert_operator(json_response['items_available'], :>, 0)
end
test 'error if count=bogus' do
@controller = Arvados::V1::LinksController.new
authorize_with :active
- get :index, {
+ get :index, params: {
count: 'bogus',
}
assert_response(422)
test "select push_url in index" do
authorize_with :active
- get(:index, {select: ["uuid", "push_url"]})
+ get(:index, params: {select: ["uuid", "push_url"]})
assert_response :success
assert_includes(json_response["items"].map { |r| r["push_url"] },
"git@git.zzzzz.arvadosapi.com:active/foo.git")
test "select clone_urls in index" do
authorize_with :active
- get(:index, {select: ["uuid", "clone_urls"]})
+ get(:index, params: {select: ["uuid", "clone_urls"]})
assert_response :success
assert_includes(json_response["items"].map { |r| r["clone_urls"] }.flatten,
"git@git.zzzzz.arvadosapi.com:active/foo.git")
test "activate a user after signing UA" do
authorize_with :inactive_but_signed_user_agreement
- post :activate, id: users(:inactive_but_signed_user_agreement).uuid
+ post :activate, params: {id: users(:inactive_but_signed_user_agreement).uuid}
assert_response :success
assert_not_nil assigns(:object)
me = JSON.parse(@response.body)
authorize_with :inactive
assert_equal false, users(:inactive).is_active
- post :activate, id: users(:inactive).uuid
+ post :activate, params: {id: users(:inactive).uuid}
assert_response 403
resp = json_response
test "activate an already-active user" do
authorize_with :active
- post :activate, id: users(:active).uuid
+ post :activate, params: {id: users(:active).uuid}
assert_response :success
me = JSON.parse(@response.body)
assert_equal true, me['is_active']
test "create new user with user as input" do
authorize_with :admin
- post :create, user: {
- first_name: "test_first_name",
- last_name: "test_last_name",
- email: "foo@example.com"
+ post :create, params: {
+ user: {
+ first_name: "test_first_name",
+ last_name: "test_last_name",
+ email: "foo@example.com"
+ }
}
assert_response :success
created = JSON.parse(@response.body)
authorize_with :admin
repo_name = 'usertestrepo'
- post :setup, {
+ post :setup, params: {
repo_name: repo_name,
openid_prefix: 'https://www.google.com/accounts/o8/id',
user: {
test "setup user with bogus uuid and expect error" do
authorize_with :admin
- post :setup, {
+ post :setup, params: {
uuid: 'bogus_uuid',
repo_name: 'usertestrepo',
vm_uuid: @vm_uuid
test "setup user with bogus uuid in user and expect error" do
authorize_with :admin
- post :setup, {
+ post :setup, params: {
user: {uuid: 'bogus_uuid'},
repo_name: 'usertestrepo',
vm_uuid: @vm_uuid,
test "setup user with no uuid and user, expect error" do
authorize_with :admin
- post :setup, {
+ post :setup, params: {
repo_name: 'usertestrepo',
vm_uuid: @vm_uuid,
openid_prefix: 'https://www.google.com/accounts/o8/id'
test "setup user with no uuid and email, expect error" do
authorize_with :admin
- post :setup, {
+ post :setup, params: {
user: {},
repo_name: 'usertestrepo',
vm_uuid: @vm_uuid,
authorize_with :admin
inactive_user = users(:inactive)
- post :setup, {
+ post :setup, params: {
uuid: users(:inactive).uuid,
repo_name: 'usertestrepo',
vm_uuid: @vm_uuid
authorize_with :admin
inactive_user = users(:inactive)
- post :setup, {
+ post :setup, params: {
uuid: inactive_user['uuid'],
user: {email: 'junk_email'}
}
test "setup user with valid email and repo as input" do
authorize_with :admin
- post :setup, {
+ post :setup, params: {
repo_name: 'usertestrepo',
user: {email: 'foo@example.com'},
openid_prefix: 'https://www.google.com/accounts/o8/id'
test "setup user with fake vm and expect error" do
authorize_with :admin
- post :setup, {
+ post :setup, params: {
repo_name: 'usertestrepo',
vm_uuid: 'no_such_vm',
user: {email: 'foo@example.com'},
test "setup user with valid email, repo and real vm as input" do
authorize_with :admin
- post :setup, {
+ post :setup, params: {
repo_name: 'usertestrepo',
openid_prefix: 'https://www.google.com/accounts/o8/id',
vm_uuid: @vm_uuid,
test "setup user with valid email, no vm and no repo as input" do
authorize_with :admin
- post :setup, {
+ post :setup, params: {
user: {email: 'foo@example.com'},
openid_prefix: 'https://www.google.com/accounts/o8/id'
}
test "setup user with email, first name, repo name and vm uuid" do
authorize_with :admin
- post :setup, {
+ post :setup, params: {
openid_prefix: 'https://www.google.com/accounts/o8/id',
repo_name: 'usertestrepo',
vm_uuid: @vm_uuid,
authorize_with :admin
inactive_user = users(:inactive)
- post :setup, {
+ post :setup, params: {
openid_prefix: 'https://www.google.com/accounts/o8/id',
repo_name: 'usertestrepo',
user: {
test "setup user with openid prefix" do
authorize_with :admin
- post :setup, {
+ post :setup, params: {
repo_name: 'usertestrepo',
openid_prefix: 'http://www.example.com/account',
user: {
test "invoke setup with no openid prefix, expect error" do
authorize_with :admin
- post :setup, {
+ post :setup, params: {
repo_name: 'usertestrepo',
user: {
first_name: "in_create_test_first_name",
test "setup user with user, vm and repo and verify links" do
authorize_with :admin
- post :setup, {
+ post :setup, params: {
user: {
first_name: "in_create_test_first_name",
last_name: "test_last_name",
test "create user as non admin user and expect error" do
authorize_with :active
- post :create, {
+ post :create, params: {
user: {email: 'foo@example.com'}
}
test "setup user as non admin user and expect error" do
authorize_with :active
- post :setup, {
+ post :setup, params: {
openid_prefix: 'https://www.google.com/accounts/o8/id',
user: {email: 'foo@example.com'}
}
active_user = users(:active)
# invoke setup with a repository
- post :setup, {
+ post :setup, params: {
repo_name: 'usertestrepo',
uuid: active_user['uuid']
}
repo_link_count = repo_link_query.count
# invoke setup with a repository
- post :setup, {
+ post :setup, params: {
vm_uuid: @vm_uuid,
uuid: active_user['uuid'],
email: 'junk_email'
authorize_with :admin
# now unsetup this user
- post :unsetup, id: active_user['uuid']
+ post :unsetup, params: {id: active_user['uuid']}
assert_response :success
response_user = JSON.parse(@response.body)
test "setup user with send notification param false and verify no email" do
authorize_with :admin
- post :setup, {
+ post :setup, params: {
openid_prefix: 'http://www.example.com/account',
send_notification_email: 'false',
user: {
test "setup user with send notification param true and verify email" do
authorize_with :admin
- post :setup, {
+ post :setup, params: {
openid_prefix: 'http://www.example.com/account',
send_notification_email: 'true',
user: {
active_user = users(:active)
# invoke setup with a repository
- put :update, {
+ put :update, params: {
id: active_user['uuid'],
user: {
is_active: true,
users = create_list :active_user, 2, join_groups: [g]
token = create :token, user: users[0]
authorize_with_token token
- get :show, id: users[1].uuid
+ get :show, params: {id: users[1].uuid}
check_non_admin_show
end
token = create :token, user: users[0]
authorize_with_token token
- get(:index, limit: limit)
+ get(:index, params: {limit: limit})
check_non_admin_index
assert_equal(limit, json_response["items"].size,
"non-admin index limit was ineffective")
test "admin can filter on user.is_active" do
authorize_with :admin
- get(:index, filters: [["is_active", "=", "true"]])
+ get(:index, params: {filters: [["is_active", "=", "true"]]})
assert_response :success
check_readable_users_index [:active, :spectator], [:inactive]
end
test "admin can search where user.is_active" do
authorize_with :admin
- get(:index, where: {is_active: true})
+ get(:index, params: {where: {is_active: true}})
assert_response :success
check_readable_users_index [:active, :spectator], [:inactive]
end
test "update active_no_prefs user profile and expect notification email" do
authorize_with :admin
- put :update, {
+ put :update, params: {
id: users(:active_no_prefs).uuid,
user: {
prefs: {:profile => {'organization' => 'example.com'}}
user = {}
user[:prefs] = users(:active_no_prefs_profile_no_getting_started_shown).prefs
user[:prefs][:profile] = {:profile => {'organization' => 'example.com'}}
- put :update, {
+ put :update, params: {
id: users(:active_no_prefs_profile_no_getting_started_shown).uuid,
user: user
}
test "update active user profile and expect no notification email" do
authorize_with :admin
- put :update, {
+ put :update, params: {
id: users(:active).uuid,
user: {
prefs: {:profile => {'organization' => 'anotherexample.com'}}
test "update_uuid as #{auth_user}" do
authorize_with auth_user
orig_uuid = users(:active).uuid
- post :update_uuid, {
+ post :update_uuid, params: {
id: orig_uuid,
new_uuid: 'zbbbb-tpzed-abcde12345abcde',
}
test "refuse to merge with redirect_to_user_uuid=false (not yet supported)" do
authorize_with :project_viewer_trustedclient
- post :merge, {
+ post :merge, params: {
new_user_token: api_client_authorizations(:active_trustedclient).api_token,
new_owner_uuid: users(:active).uuid,
redirect_to_new_user: false,
test "refuse to merge user into self" do
authorize_with(:active_trustedclient)
- post(:merge, {
+ post(:merge, params: {
new_user_token: api_client_authorizations(:active_trustedclient).api_token,
new_owner_uuid: users(:active).uuid,
redirect_to_new_user: true,
[:active_trustedclient, :project_viewer]].each do |src, dst|
test "refuse to merge with untrusted token (#{src} -> #{dst})" do
authorize_with(src)
- post(:merge, {
+ post(:merge, params: {
new_user_token: api_client_authorizations(dst).api_token,
new_owner_uuid: api_client_authorizations(dst).user.uuid,
redirect_to_new_user: true,
[:project_viewer_trustedclient, :expired_trustedclient]].each do |src, dst|
test "refuse to merge with expired token (#{src} -> #{dst})" do
authorize_with(src)
- post(:merge, {
+ post(:merge, params: {
new_user_token: api_client_authorizations(dst).api_token,
new_owner_uuid: api_client_authorizations(dst).user.uuid,
redirect_to_new_user: true,
api_client_authorizations(auth).update_attributes(scopes: ["GET /", "POST /", "PUT /"])
end
authorize_with(:active_trustedclient)
- post(:merge, {
+ post(:merge, params: {
new_user_token: api_client_authorizations(:project_viewer_trustedclient).api_token,
new_owner_uuid: users(:project_viewer).uuid,
redirect_to_new_user: true,
test "refuse to merge if new_owner_uuid is not writable" do
authorize_with(:project_viewer_trustedclient)
- post(:merge, {
+ post(:merge, params: {
new_user_token: api_client_authorizations(:active_trustedclient).api_token,
new_owner_uuid: groups(:anonymously_accessible_project).uuid,
redirect_to_new_user: true,
test "refuse to merge if new_owner_uuid is empty" do
authorize_with(:project_viewer_trustedclient)
- post(:merge, {
+ post(:merge, params: {
new_user_token: api_client_authorizations(:active_trustedclient).api_token,
new_owner_uuid: "",
redirect_to_new_user: true,
test "refuse to merge if new_owner_uuid is not provided" do
authorize_with(:project_viewer_trustedclient)
- post(:merge, {
+ post(:merge, params: {
new_user_token: api_client_authorizations(:active_trustedclient).api_token,
redirect_to_new_user: true,
})
test "refuse to update redirect_to_user_uuid directly" do
authorize_with(:active_trustedclient)
- patch(:update, {
+ patch(:update, params: {
id: users(:active).uuid,
user: {
redirect_to_user_uuid: users(:active).uuid,
test "merge 'project_viewer' account into 'active' account" do
authorize_with(:project_viewer_trustedclient)
- post(:merge, {
+ post(:merge, params: {
new_user_token: api_client_authorizations(:active_trustedclient).api_token,
new_owner_uuid: users(:active).uuid,
redirect_to_new_user: true,
def check_inactive_user_findable(params={})
inactive_user = users(:inactive)
- get(:index, params.merge(filters: [["email", "=", inactive_user.email]]))
+ get(:index, params: params.merge(filters: [["email", "=", inactive_user.email]]))
assert_response :success
user_list = json_response["items"]
assert_equal(1, user_list.andand.count)
class Arvados::V1::VirtualMachinesControllerTest < ActionController::TestCase
def get_logins_for(vm_sym)
authorize_with :admin
- get(:logins, id: virtual_machines(vm_sym).uuid)
+ get(:logins, params: {id: virtual_machines(vm_sym).uuid})
end
def find_login(sshkey_sym)
properties: {'username' => 'bobblogin'})
end
authorize_with :admin
- get :logins, id: vm.uuid
+ get :logins, params: {id: vm.uuid}
assert_response :success
assert_equal 1, json_response['items'].length
assert_nil json_response['items'][0]['public_key']
test "new user from new api client" do
authorize_with :inactive
api_client_page = 'http://client.example.com/home'
- get :login, return_to: api_client_page
+ get :login, params: {return_to: api_client_page}
assert_response :redirect
assert_equal(0, @response.redirect_url.index(api_client_page + '?'),
'Redirect url ' + @response.redirect_url +
authorize_with :inactive
api_client_page = 'http://client.example.com/home'
remote_prefix = 'zbbbb'
- get :login, return_to: api_client_page, remote: remote_prefix
+ get :login, params: {return_to: api_client_page, remote: remote_prefix}
assert_response :redirect
api_client_auth = assigns(:api_client_auth)
assert_not_nil api_client_auth
authorize_with :inactive
api_client_page = 'http://client.example.com/home'
remote_prefix = 'invalid_cluster_id'
- get :login, return_to: api_client_page, remote: remote_prefix
+ get :login, params: {return_to: api_client_page, remote: remote_prefix}
assert_response 400
end
end
fixtures :all
test "create system auth" do
- post "/arvados/v1/api_client_authorizations/create_system_auth", {:format => :json, :scopes => ['test'].to_json}, {'HTTP_AUTHORIZATION' => "OAuth2 #{api_client_authorizations(:admin_trustedclient).api_token}"}
+ post "/arvados/v1/api_client_authorizations/create_system_auth",
+ params: {:format => :json, :scopes => ['test'].to_json},
+ headers: {'HTTP_AUTHORIZATION' => "OAuth2 #{api_client_authorizations(:admin_trustedclient).api_token}"}
assert_response :success
end
test "create token for different user" do
- post "/arvados/v1/api_client_authorizations", {
- :format => :json,
- :api_client_authorization => {
- :owner_uuid => users(:spectator).uuid
- }
- }, {'HTTP_AUTHORIZATION' => "OAuth2 #{api_client_authorizations(:admin_trustedclient).api_token}"}
+ post "/arvados/v1/api_client_authorizations",
+ params: {
+ :format => :json,
+ :api_client_authorization => {
+ :owner_uuid => users(:spectator).uuid
+ }
+ },
+ headers: {'HTTP_AUTHORIZATION' => "OAuth2 #{api_client_authorizations(:admin_trustedclient).api_token}"}
assert_response :success
- get "/arvados/v1/users/current", {
- :format => :json
- }, {'HTTP_AUTHORIZATION' => "OAuth2 #{json_response['api_token']}"}
+ get "/arvados/v1/users/current",
+ params: {:format => :json},
+ headers: {'HTTP_AUTHORIZATION' => "OAuth2 #{json_response['api_token']}"}
@json_response = nil
assert_equal users(:spectator).uuid, json_response['uuid']
end
test "refuse to create token for different user if not trusted client" do
- post "/arvados/v1/api_client_authorizations", {
- :format => :json,
- :api_client_authorization => {
- :owner_uuid => users(:spectator).uuid
- }
- }, {'HTTP_AUTHORIZATION' => "OAuth2 #{api_client_authorizations(:admin).api_token}"}
+ post "/arvados/v1/api_client_authorizations",
+ params: {
+ :format => :json,
+ :api_client_authorization => {
+ :owner_uuid => users(:spectator).uuid
+ }
+ },
+ headers: {'HTTP_AUTHORIZATION' => "OAuth2 #{api_client_authorizations(:admin).api_token}"}
assert_response 403
end
test "refuse to create token for different user if not admin" do
- post "/arvados/v1/api_client_authorizations", {
- :format => :json,
- :api_client_authorization => {
- :owner_uuid => users(:spectator).uuid
- }
- }, {'HTTP_AUTHORIZATION' => "OAuth2 #{api_client_authorizations(:active_trustedclient).api_token}"}
+ post "/arvados/v1/api_client_authorizations",
+ params: {
+ :format => :json,
+ :api_client_authorization => {
+ :owner_uuid => users(:spectator).uuid
+ }
+ },
+ headers: {'HTTP_AUTHORIZATION' => "OAuth2 #{api_client_authorizations(:active_trustedclient).api_token}"}
assert_response 403
end
end
test "user list token can only list users" do
- get_args = [{}, auth(:active_userlist)]
+ get_args = [params: {}, headers: auth(:active_userlist)]
get(v1_url('users'), *get_args)
assert_response :success
get(v1_url('users', ''), *get_args) # Add trailing slash.
end
test "narrow + wide scoped tokens for different users" do
- get_args = [{
+ get_args = [params: {
reader_tokens: [api_client_authorizations(:anonymous).api_token]
- }, auth(:active_userlist)]
+ }, headers: auth(:active_userlist)]
get(v1_url('users'), *get_args)
assert_response :success
get(v1_url('users', ''), *get_args) # Add trailing slash.
end
test "specimens token can see exactly owned specimens" do
- get_args = [{}, auth(:active_specimens)]
+ get_args = [params: {}, headers: auth(:active_specimens)]
get(v1_url('specimens'), *get_args)
assert_response 403
get(v1_url('specimens', specimens(:owned_by_active_user).uuid), *get_args)
test "token with multiple scopes can use them all" do
def get_token_count
- get(v1_url('api_client_authorizations'), {}, auth(:active_apitokens))
+ get(v1_url('api_client_authorizations'),
+ params: {},
+ headers: auth(:active_apitokens))
assert_response :success
token_count = JSON.parse(@response.body)['items_available']
assert_not_nil(token_count, "could not find token count")
token_count = get_token_count
# Test the POST scope.
post(v1_url('api_client_authorizations'),
- {api_client_authorization: {user_id: users(:active).id}},
- auth(:active_apitokens))
+ params: {api_client_authorization: {user_id: users(:active).id}},
+ headers: auth(:active_apitokens))
assert_response :success
assert_equal(token_count + 1, get_token_count,
"token count suggests POST was not accepted")
# Test other requests are denied.
get(v1_url('api_client_authorizations',
api_client_authorizations(:active_apitokens).uuid),
- {}, auth(:active_apitokens))
+ params: {}, headers: auth(:active_apitokens))
assert_response 403
end
test "token without scope has no access" do
# Logs are good for this test, because logs have relatively
# few access controls enforced at the model level.
- req_args = [{}, auth(:admin_noscope)]
+ req_args = [params: {}, headers: auth(:admin_noscope)]
get(v1_url('logs'), *req_args)
assert_response 403
get(v1_url('logs', logs(:noop).uuid), *req_args)
def vm_logins_url(name)
v1_url('virtual_machines', virtual_machines(name).uuid, 'logins')
end
- get_args = [{}, auth(:admin_vm)]
+ get_args = [params: {}, headers: auth(:admin_vm)]
get(vm_logins_url(:testvm), *get_args)
assert_response :success
get(vm_logins_url(:testvm2), *get_args)
fixtures :all
test "should get index" do
- get "/arvados/v1/collections", {:format => :json}, auth(:active)
+ get "/arvados/v1/collections",
+ params: {:format => :json},
+ headers: auth(:active)
assert_response :success
assert_equal "arvados#collectionList", json_response['kind']
end
test "get index with filters= (empty string)" do
- get "/arvados/v1/collections", {:format => :json, :filters => ''}, auth(:active)
+ get "/arvados/v1/collections",
+ params: {:format => :json, :filters => ''},
+ headers: auth(:active)
assert_response :success
assert_equal "arvados#collectionList", json_response['kind']
end
test "get index with invalid filters (array of strings) responds 422" do
- get "/arvados/v1/collections", {
- :format => :json,
- :filters => ['uuid', '=', 'ad02e37b6a7f45bbe2ead3c29a109b8a+54'].to_json
- }, auth(:active)
+ get "/arvados/v1/collections",
+ params: {
+ :format => :json,
+ :filters => ['uuid', '=', 'ad02e37b6a7f45bbe2ead3c29a109b8a+54'].to_json
+ },
+ headers: auth(:active)
assert_response 422
assert_match(/nvalid element.*not an array/, json_response['errors'].join(' '))
end
test "get index with invalid filters (unsearchable column) responds 422" do
- get "/arvados/v1/collections", {
- :format => :json,
- :filters => [['this_column_does_not_exist', '=', 'bogus']].to_json
- }, auth(:active)
+ get "/arvados/v1/collections",
+ params: {
+ :format => :json,
+ :filters => [['this_column_does_not_exist', '=', 'bogus']].to_json
+ },
+ headers: auth(:active)
assert_response 422
assert_match(/nvalid attribute/, json_response['errors'].join(' '))
end
test "get index with invalid filters (invalid operator) responds 422" do
- get "/arvados/v1/collections", {
- :format => :json,
- :filters => [['uuid', ':-(', 'displeased']].to_json
- }, auth(:active)
+ get "/arvados/v1/collections",
+ params: {
+ :format => :json,
+ :filters => [['uuid', ':-(', 'displeased']].to_json
+ },
+ headers: auth(:active)
assert_response 422
assert_match(/nvalid operator/, json_response['errors'].join(' '))
end
test "get index with invalid filters (invalid operand type) responds 422" do
- get "/arvados/v1/collections", {
- :format => :json,
- :filters => [['uuid', '=', {foo: 'bar'}]].to_json
- }, auth(:active)
+ get "/arvados/v1/collections",
+ params: {
+ :format => :json,
+ :filters => [['uuid', '=', {foo: 'bar'}]].to_json
+ },
+ headers: auth(:active)
assert_response 422
assert_match(/nvalid operand type/, json_response['errors'].join(' '))
end
test "get index with where= (empty string)" do
- get "/arvados/v1/collections", {:format => :json, :where => ''}, auth(:active)
+ get "/arvados/v1/collections",
+ params: {:format => :json, :where => ''},
+ headers: auth(:active)
assert_response :success
assert_equal "arvados#collectionList", json_response['kind']
end
test "get index with select= (valid attribute)" do
- get "/arvados/v1/collections", {
- :format => :json,
- :select => ['portable_data_hash'].to_json
- }, auth(:active)
+ get "/arvados/v1/collections",
+ params: {
+ :format => :json,
+ :select => ['portable_data_hash'].to_json
+ },
+ headers: auth(:active)
assert_response :success
assert json_response['items'][0].keys.include?('portable_data_hash')
assert not(json_response['items'][0].keys.include?('uuid'))
end
test "get index with select= (invalid attribute) responds 422" do
- get "/arvados/v1/collections", {
- :format => :json,
- :select => ['bogus'].to_json
- }, auth(:active)
+ get "/arvados/v1/collections",
+ params: {
+ :format => :json,
+ :select => ['bogus'].to_json
+ },
+ headers: auth(:active)
assert_response 422
assert_match(/Invalid attribute.*bogus/, json_response['errors'].join(' '))
end
test "get index with select= (invalid attribute type) responds 422" do
- get "/arvados/v1/collections", {
- :format => :json,
- :select => [['bogus']].to_json
- }, auth(:active)
+ get "/arvados/v1/collections",
+ params: {
+ :format => :json,
+ :select => [['bogus']].to_json
+ },
+ headers: auth(:active)
assert_response 422
assert_match(/Invalid attribute.*bogus/, json_response['errors'].join(' '))
end
test "controller 404 response is json" do
- get "/arvados/v1/thingsthatdonotexist", {:format => :xml}, auth(:active)
+ get "/arvados/v1/thingsthatdonotexist",
+ params: {:format => :xml},
+ headers: auth(:active)
assert_response 404
assert_equal 1, json_response['errors'].length
assert_equal true, json_response['errors'][0].is_a?(String)
end
test "object 404 response is json" do
- get "/arvados/v1/groups/zzzzz-j7d0g-o5ba971173cup4f", {}, auth(:active)
+ get "/arvados/v1/groups/zzzzz-j7d0g-o5ba971173cup4f",
+ params: {},
+ headers: auth(:active)
assert_response 404
assert_equal 1, json_response['errors'].length
assert_equal true, json_response['errors'][0].is_a?(String)
}
signed_locator = Blob.sign_locator('bad42fa702ae3ea7d888fef11b46f450+44',
signing_opts)
- post "/arvados/v1/collections", {
- format: :json,
- collection: "{\"manifest_text\":\". #{signed_locator} 0:44:md5sum.txt\\n\",\"portable_data_hash\":\"ad02e37b6a7f45bbe2ead3c29a109b8a+54\"}"
- }, auth(:active)
+ post "/arvados/v1/collections",
+ params: {
+ format: :json,
+ collection: "{\"manifest_text\":\". #{signed_locator} 0:44:md5sum.txt\\n\",\"portable_data_hash\":\"ad02e37b6a7f45bbe2ead3c29a109b8a+54\"}"
+ },
+ headers: auth(:active)
assert_response 200
assert_equal 'ad02e37b6a7f45bbe2ead3c29a109b8a+54', json_response['portable_data_hash']
end
}
signed_locator = Blob.sign_locator('bad42fa702ae3ea7d888fef11b46f450+44',
signing_opts)
- post "/arvados/v1/collections", {
- format: :json,
- collection: "{\"manifest_text\":\". #{signed_locator} 0:44:md5sum.txt\\n\"}"
- }, auth(:active)
+ post "/arvados/v1/collections",
+ params: {
+ format: :json,
+ collection: "{\"manifest_text\":\". #{signed_locator} 0:44:md5sum.txt\\n\"}"
+ },
+ headers: auth(:active)
assert_response 200
assert_equal 'ad02e37b6a7f45bbe2ead3c29a109b8a+54', json_response['portable_data_hash']
end
}
signed_locator = Blob.sign_locator('bad42fa702ae3ea7d888fef11b46f450+44',
signing_opts)
- post "/arvados/v1/collections", {
- format: :json,
- collection: "{\"manifest_text\":\". #{signed_locator} 0:44:md5sum.txt\\n\",\"portable_data_hash\":\"ad02e37b6a7f45bbe2ead3c29a109b8a+54\"}"
- }, auth(:active)
+ post "/arvados/v1/collections",
+ params: {
+ format: :json,
+ collection: "{\"manifest_text\":\". #{signed_locator} 0:44:md5sum.txt\\n\",\"portable_data_hash\":\"ad02e37b6a7f45bbe2ead3c29a109b8a+54\"}"
+ },
+ headers: auth(:active)
assert_response 200
assert_equal 'ad02e37b6a7f45bbe2ead3c29a109b8a+54', json_response['portable_data_hash']
- put "/arvados/v1/collections/#{json_response['uuid']}", {
- format: :json,
- collection: { name: "a name" }
- }, auth(:active)
+ put "/arvados/v1/collections/#{json_response['uuid']}",
+ params: {
+ format: :json,
+ collection: { name: "a name" }
+ },
+ headers: auth(:active)
assert_response 200
assert_equal 'ad02e37b6a7f45bbe2ead3c29a109b8a+54', json_response['portable_data_hash']
assert_equal 'a name', json_response['name']
- get "/arvados/v1/collections/#{json_response['uuid']}", {
- format: :json,
- }, auth(:active)
+ get "/arvados/v1/collections/#{json_response['uuid']}",
+ params: {format: :json},
+ headers: auth(:active)
assert_response 200
assert_equal 'ad02e37b6a7f45bbe2ead3c29a109b8a+54', json_response['portable_data_hash']
collection = collections(:multilevel_collection_1)
# update collection's description
- put "/arvados/v1/collections/#{collection['uuid']}", {
- format: :json,
- collection: { description: "something specific" }
- }, auth(:active)
+ put "/arvados/v1/collections/#{collection['uuid']}",
+ params: {
+ format: :json,
+ collection: { description: "something specific" }
+ },
+ headers: auth(:active)
assert_response :success
assert_equal 'something specific', json_response['description']
# get the collection and verify newly added description
- get "/arvados/v1/collections/#{collection['uuid']}", {
- format: :json,
- }, auth(:active)
+ get "/arvados/v1/collections/#{collection['uuid']}",
+ params: {format: :json},
+ headers: auth(:active)
assert_response 200
assert_equal 'something specific', json_response['description']
test "create collection, update manifest, and search with filename" do
# create collection
signed_manifest = Collection.sign_manifest(". bad42fa702ae3ea7d888fef11b46f450+44 0:44:my_test_file.txt\n", api_token(:active))
- post "/arvados/v1/collections", {
- format: :json,
- collection: {manifest_text: signed_manifest}.to_json,
- }, auth(:active)
+ post "/arvados/v1/collections",
+ params: {
+ format: :json,
+ collection: {manifest_text: signed_manifest}.to_json,
+ },
+ headers: auth(:active)
assert_response :success
assert_equal true, json_response['manifest_text'].include?('my_test_file.txt')
assert_includes json_response['manifest_text'], 'my_test_file.txt'
# update the collection's manifest text
signed_manifest = Collection.sign_manifest(". bad42fa702ae3ea7d888fef11b46f450+44 0:44:my_updated_test_file.txt\n", api_token(:active))
- put "/arvados/v1/collections/#{created['uuid']}", {
- format: :json,
- collection: {manifest_text: signed_manifest}.to_json,
- }, auth(:active)
+ put "/arvados/v1/collections/#{created['uuid']}",
+ params: {
+ format: :json,
+ collection: {manifest_text: signed_manifest}.to_json,
+ },
+ headers: auth(:active)
assert_response :success
assert_equal created['uuid'], json_response['uuid']
assert_includes json_response['manifest_text'], 'my_updated_test_file.txt'
end
def search_using_filter search_filter, expected_items
- get '/arvados/v1/collections', {
- :filters => [['any', 'ilike', "%#{search_filter}%"]].to_json
- }, auth(:active)
+ get '/arvados/v1/collections',
+ params: {:filters => [['any', 'ilike', "%#{search_filter}%"]].to_json},
+ headers: auth(:active)
assert_response :success
response_items = json_response['items']
assert_not_nil response_items
test "search collection using full text search" do
# create collection to be searched for
signed_manifest = Collection.sign_manifest(". 85877ca2d7e05498dd3d109baf2df106+95+A3a4e26a366ee7e4ed3e476ccf05354761be2e4ae@545a9920 0:95:file_in_subdir1\n./subdir2/subdir3 2bbc341c702df4d8f42ec31f16c10120+64+A315d7e7bad2ce937e711fc454fae2d1194d14d64@545a9920 0:32:file1_in_subdir3.txt 32:32:file2_in_subdir3.txt\n./subdir2/subdir3/subdir4 2bbc341c702df4d8f42ec31f16c10120+64+A315d7e7bad2ce937e711fc454fae2d1194d14d64@545a9920 0:32:file3_in_subdir4.txt 32:32:file4_in_subdir4.txt\n", api_token(:active))
- post "/arvados/v1/collections", {
- format: :json,
- collection: {description: 'specific collection description', manifest_text: signed_manifest}.to_json,
- }, auth(:active)
+ post "/arvados/v1/collections",
+ params: {
+ format: :json,
+ collection: {description: 'specific collection description', manifest_text: signed_manifest}.to_json,
+ },
+ headers: auth(:active)
assert_response :success
assert_equal true, json_response['manifest_text'].include?('file4_in_subdir4.txt')
end
def search_using_full_text_search search_filter, expected_items
- get '/arvados/v1/collections', {
- :filters => [['any', '@@', search_filter]].to_json
- }, auth(:active)
+ get '/arvados/v1/collections',
+ params: {:filters => [['any', '@@', search_filter]].to_json},
+ headers: auth(:active)
assert_response :success
response_items = json_response['items']
assert_not_nil response_items
# search for the filename in the file_names column and expect error
test "full text search not supported for individual columns" do
- get '/arvados/v1/collections', {
- :filters => [['name', '@@', 'General']].to_json
- }, auth(:active)
+ get '/arvados/v1/collections',
+ params: {:filters => [['name', '@@', 'General']].to_json},
+ headers: auth(:active)
assert_response 422
end
test "full text search ignores special characters and finds with filter #{search_filter}" do
# description: The quick_brown_fox jumps over the lazy_dog
# full text search treats '_' as space apparently
- get '/arvados/v1/collections', {
- :filters => [['any', '@@', search_filter]].to_json
- }, auth(:active)
+ get '/arvados/v1/collections',
+ params: {:filters => [['any', '@@', search_filter]].to_json},
+ headers: auth(:active)
assert_response 200
response_items = json_response['items']
assert_not_nil response_items
test "create and get collection with properties" do
# create collection to be searched for
signed_manifest = Collection.sign_manifest(". bad42fa702ae3ea7d888fef11b46f450+44 0:44:my_test_file.txt\n", api_token(:active))
- post "/arvados/v1/collections", {
- format: :json,
- collection: {manifest_text: signed_manifest}.to_json,
- }, auth(:active)
+ post "/arvados/v1/collections",
+ params: {
+ format: :json,
+ collection: {manifest_text: signed_manifest}.to_json,
+ },
+ headers: auth(:active)
assert_response 200
assert_not_nil json_response['uuid']
assert_not_nil json_response['properties']
assert_empty json_response['properties']
# update collection's description
- put "/arvados/v1/collections/#{json_response['uuid']}", {
- format: :json,
- collection: { properties: {'property_1' => 'value_1'} }
- }, auth(:active)
+ put "/arvados/v1/collections/#{json_response['uuid']}",
+ params: {
+ format: :json,
+ collection: { properties: {'property_1' => 'value_1'} }
+ },
+ headers: auth(:active)
assert_response :success
assert_equal 'value_1', json_response['properties']['property_1']
end
SafeJSON.dump({"manifest_text" => bigmanifest})
end
time_block 'create' do
- post '/arvados/v1/collections', {collection: json}, auth(:active)
+ post '/arvados/v1/collections',
+ params: {collection: json},
+ headers: auth(:active)
assert_response :success
end
uuid = json_response['uuid']
time_block 'read' do
- get '/arvados/v1/collections/' + uuid, {}, auth(:active)
+ get '/arvados/v1/collections/' + uuid, params: {}, headers: auth(:active)
assert_response :success
end
time_block 'list' do
- get '/arvados/v1/collections', {select: ['manifest_text'], filters: [['uuid', '=', uuid]].to_json}, auth(:active)
+ get '/arvados/v1/collections',
+ params: {select: ['manifest_text'], filters: [['uuid', '=', uuid]].to_json},
+ headers: auth(:active)
assert_response :success
end
time_block 'update' do
- put '/arvados/v1/collections/' + uuid, {collection: json}, auth(:active)
+ put '/arvados/v1/collections/' + uuid,
+ params: {collection: json},
+ headers: auth(:active)
assert_response :success
end
time_block 'delete' do
- delete '/arvados/v1/collections/' + uuid, {}, auth(:active)
+ delete '/arvados/v1/collections/' + uuid, params: {}, headers: auth(:active)
end
end
SafeJSON.dump({manifest_text: hugemanifest})
end
vmpeak "post" do
- post '/arvados/v1/collections', {collection: json}, auth(:active)
+ post '/arvados/v1/collections',
+ params: {collection: json},
+ headers: auth(:active)
end
end
end
fixtures :all
test "container token validate, Running, regular auth" do
- get "/arvados/v1/containers/current", {
- :format => :json
- }, {'HTTP_AUTHORIZATION' => "Bearer #{api_client_authorizations(:running_container_auth).token}/#{containers(:running).uuid}"}
+ get "/arvados/v1/containers/current",
+ params: {:format => :json},
+ headers: {'HTTP_AUTHORIZATION' => "Bearer #{api_client_authorizations(:running_container_auth).token}/#{containers(:running).uuid}"}
# Container is Running, token can be used
assert_response :success
assert_equal containers(:running).uuid, json_response['uuid']
end
test "container token validate, Locked, runtime_token" do
- get "/arvados/v1/containers/current", {
- :format => :json
- }, {'HTTP_AUTHORIZATION' => "Bearer #{api_client_authorizations(:container_runtime_token).token}/#{containers(:runtime_token).uuid}"}
+ get "/arvados/v1/containers/current",
+ params: {:format => :json},
+ headers: {'HTTP_AUTHORIZATION' => "Bearer #{api_client_authorizations(:container_runtime_token).token}/#{containers(:runtime_token).uuid}"}
# Container is Running, token can be used
assert_response :success
assert_equal containers(:runtime_token).uuid, json_response['uuid']
end
test "container token validate, Cancelled, runtime_token" do
- put "/arvados/v1/containers/#{containers(:runtime_token).uuid}", {
- :format => :json,
- :container => {:state => "Cancelled"}
- }, {'HTTP_AUTHORIZATION' => "Bearer #{api_client_authorizations(:dispatch1).token}"}
+ put "/arvados/v1/containers/#{containers(:runtime_token).uuid}",
+ params: {
+ :format => :json,
+ :container => {:state => "Cancelled"}
+ },
+ headers: {'HTTP_AUTHORIZATION' => "Bearer #{api_client_authorizations(:dispatch1).token}"}
assert_response :success
- get "/arvados/v1/containers/current", {
- :format => :json
- }, {'HTTP_AUTHORIZATION' => "Bearer #{api_client_authorizations(:container_runtime_token).token}/#{containers(:runtime_token).uuid}"}
+ get "/arvados/v1/containers/current",
+ params: {:format => :json},
+ headers: {'HTTP_AUTHORIZATION' => "Bearer #{api_client_authorizations(:container_runtime_token).token}/#{containers(:runtime_token).uuid}"}
# Container is Queued, token cannot be used
assert_response 401
end
test "container token validate, Running, without optional portion" do
- get "/arvados/v1/containers/current", {
- :format => :json
- }, {'HTTP_AUTHORIZATION' => "Bearer #{api_client_authorizations(:running_container_auth).token}"}
+ get "/arvados/v1/containers/current",
+ params: {:format => :json},
+ headers: {'HTTP_AUTHORIZATION' => "Bearer #{api_client_authorizations(:running_container_auth).token}"}
# Container is Running, token can be used
assert_response :success
assert_equal containers(:running).uuid, json_response['uuid']
end
test "container token validate, Locked, runtime_token, without optional portion" do
- get "/arvados/v1/containers/current", {
- :format => :json
- }, {'HTTP_AUTHORIZATION' => "Bearer #{api_client_authorizations(:container_runtime_token).token}"}
+ get "/arvados/v1/containers/current",
+ params: {:format => :json},
+ headers: {'HTTP_AUTHORIZATION' => "Bearer #{api_client_authorizations(:container_runtime_token).token}"}
# runtime_token without container uuid won't return 'current'
assert_response 404
end
test "container token validate, wrong container uuid" do
- get "/arvados/v1/containers/current", {
- :format => :json
- }, {'HTTP_AUTHORIZATION' => "Bearer #{api_client_authorizations(:container_runtime_token).token}/#{containers(:running).uuid}"}
+ get "/arvados/v1/containers/current",
+ params: {:format => :json},
+ headers: {'HTTP_AUTHORIZATION' => "Bearer #{api_client_authorizations(:container_runtime_token).token}/#{containers(:running).uuid}"}
# Container uuid mismatch, token can't be used
assert_response 401
end
%w(/login /logout /auth/example/callback /auth/joshid).each do |path|
test "OPTIONS requests are refused at #{path}" do
- options path, {}, {}
+ options path, params: {}, headers: {}
assert_no_cors_headers
end
test "CORS headers do not exist at GET #{path}" do
- get path, {}, {}
+ get path, params: {}, headers: {}
assert_no_cors_headers
end
end
%w(/discovery/v1/apis/arvados/v1/rest).each do |path|
test "CORS headers are set at GET #{path}" do
- get path, {}, {}
+ get path, params: {}, headers: {}
assert_response :success
assert_cors_headers
end
'/arvados/v1/users',
'/arvados/v1/api_client_authorizations'].each do |path|
test "CORS headers are set and body is empty at OPTIONS #{path}" do
- options path, {}, {}
+ options path, params: {}, headers: {}
assert_response :success
assert_cors_headers
assert_equal '', response.body
end
test "CORS headers are set at authenticated GET #{path}" do
- get path, {}, auth(:active_trustedclient)
+ get path, params: {}, headers: auth(:active_trustedclient)
assert_response :success
assert_cors_headers
end
# does not grant access to any resources.
['GET', 'POST'].each do |method|
test "Session does not work at #{method} #{path}" do
- send method.downcase, path, {format: 'json'}, {user_id: 1}
+ send method.downcase, path, params: {format: 'json'}, headers: {user_id: 1}
assert_response 401
assert_cors_headers
end
end
test "job runs" do
- post "/arvados/v1/jobs", {
- format: "json",
- job: {
- script: "log",
- repository: "active/crunchdispatchtest",
- script_version: "f35f99b7d32bac257f5989df02b9f12ee1a9b0d6",
- script_parameters: "{}"
- }
- }, auth(:admin)
+ post "/arvados/v1/jobs",
+ params: {
+ format: "json",
+ job: {
+ script: "log",
+ repository: "active/crunchdispatchtest",
+ script_version: "f35f99b7d32bac257f5989df02b9f12ee1a9b0d6",
+ script_parameters: {
+ input: 'fa7aeb5140e2848d39b416daeef4ffc5+45',
+ an_integer: '1'
+ }
+ }
+ },
+ headers: auth(:admin)
assert_response :success
end
end
begin
Rails.env = 'production'
Rails.application.reload_routes!
- post '/database/reset', {}, auth(:admin)
+ post '/database/reset', params: {}, headers: auth(:admin)
assert_response 404
ensure
Rails.env = rails_env_was
end
test "reset fails with non-admin token" do
- post '/database/reset', {}, auth(:active)
+ post '/database/reset', params: {}, headers: auth(:active)
assert_response 403
end
admin_auth = auth(:admin)
authorize_with :admin
- post '/database/reset', {}, admin_auth
+ post '/database/reset', params: {}, headers: admin_auth
assert_response :success
- post '/arvados/v1/specimens', {specimen: '{}'}, active_auth
+ post '/arvados/v1/specimens', params: {specimen: '{}'}, headers: active_auth
assert_response :success
new_uuid = json_response['uuid']
- get '/arvados/v1/specimens/'+new_uuid, {}, active_auth
+ get '/arvados/v1/specimens/'+new_uuid, params: {}, headers: active_auth
assert_response :success
put('/arvados/v1/specimens/'+new_uuid,
- {specimen: '{"properties":{}}'}, active_auth)
+ params: {specimen: '{"properties":{}}'},
+ headers: active_auth)
assert_response :success
- delete '/arvados/v1/specimens/'+new_uuid, {}, active_auth
+ delete '/arvados/v1/specimens/'+new_uuid, params: {}, headers: active_auth
assert_response :success
- get '/arvados/v1/specimens/'+new_uuid, {}, active_auth
+ get '/arvados/v1/specimens/'+new_uuid, params: {}, headers: active_auth
assert_response 404
end
old_uuid = specimens(:owned_by_active_user).uuid
authorize_with :admin
- post '/database/reset', {}, admin_auth
+ post '/database/reset', params: {}, headers: admin_auth
assert_response :success
- delete '/arvados/v1/specimens/' + old_uuid, {}, active_auth
+ delete '/arvados/v1/specimens/' + old_uuid, params: {}, headers: active_auth
assert_response :success
- post '/arvados/v1/specimens', {specimen: '{}'}, active_auth
+ post '/arvados/v1/specimens', params: {specimen: '{}'}, headers: active_auth
assert_response :success
new_uuid = json_response['uuid']
# Reset to fixtures.
- post '/database/reset', {}, admin_auth
+ post '/database/reset', params: {}, headers: admin_auth
assert_response :success
# New specimen should disappear. Old specimen should reappear.
- get '/arvados/v1/specimens/'+new_uuid, {}, active_auth
+ get '/arvados/v1/specimens/'+new_uuid, params: {}, headers: active_auth
assert_response 404
- get '/arvados/v1/specimens/'+old_uuid, {}, active_auth
+ get '/arvados/v1/specimens/'+old_uuid, params: {}, headers: active_auth
assert_response :success
end
end
%w(/arvados/v1/shoes /arvados/shoes /shoes /nodes /users).each do |path|
test "non-existent route #{path}" do
- get path, {:format => :json}, auth(:active)
+ get path, params: {:format => :json}, headers: auth(:active)
assert_nil assigns(:objects)
assert_nil assigns(:object)
assert_not_nil json_response['errors']
test "results are consistent when provided orders #{orders} is incomplete" do
last = nil
(0..20).each do
- get '/arvados/v1/groups/contents', {
- id: groups(:aproject).uuid,
- filters: [["uuid", "is_a", "arvados#collection"]].to_json,
- orders: orders.to_json,
- format: :json,
- }, auth(:active)
+ get '/arvados/v1/groups/contents',
+ params: {
+ id: groups(:aproject).uuid,
+ filters: [["uuid", "is_a", "arvados#collection"]].to_json,
+ orders: orders.to_json,
+ format: :json,
+ },
+ headers: auth(:active)
assert_response :success
if last.nil?
last = json_response['items']
uuid_received = {}
owner_received = {}
while true
- get "/arvados/v1/groups/contents", {
- id: groups(:aproject).uuid,
- limit: limit,
- offset: offset,
- format: :json,
- }, auth(:active)
+ get "/arvados/v1/groups/contents",
+ params: {
+ id: groups(:aproject).uuid,
+ limit: limit,
+ offset: offset,
+ format: :json,
+ },
+ headers: auth(:active)
assert_response :success
assert_operator(0, :<, json_response['items'].count,
['no-such-thing', false], # script_parameter of pipeline instances
].each do |search_filter, expect_results|
test "full text search of group-owned objects for #{search_filter}" do
- get "/arvados/v1/groups/contents", {
- id: groups(:aproject).uuid,
- limit: 5,
- :filters => [['any', '@@', search_filter]].to_json
- }, auth(:active)
+ get "/arvados/v1/groups/contents",
+ params: {
+ id: groups(:aproject).uuid,
+ limit: 5,
+ :filters => [['any', '@@', search_filter]].to_json
+ },
+ headers: auth(:active)
assert_response :success
if expect_results
refute_empty json_response['items']
end
test "full text search is not supported for individual columns" do
- get "/arvados/v1/groups/contents", {
- :filters => [['name', '@@', 'Private']].to_json
- }, auth(:active)
+ get "/arvados/v1/groups/contents",
+ params: {
+ :filters => [['name', '@@', 'Private']].to_json
+ },
+ headers: auth(:active)
assert_response 422
end
test "group contents with include trash collections" do
- get "/arvados/v1/groups/contents", {
- include_trash: "true",
- filters: [["uuid", "is_a", "arvados#collection"]].to_json,
- limit: 1000
- }, auth(:active)
+ get "/arvados/v1/groups/contents",
+ params: {
+ include_trash: "true",
+ filters: [["uuid", "is_a", "arvados#collection"]].to_json,
+ limit: 1000
+ },
+ headers: auth(:active)
assert_response 200
coll_uuids = []
end
test "group contents without trash collections" do
- get "/arvados/v1/groups/contents", {
- filters: [["uuid", "is_a", "arvados#collection"]].to_json,
- limit: 1000
- }, auth(:active)
+ get "/arvados/v1/groups/contents",
+ params: {
+ filters: [["uuid", "is_a", "arvados#collection"]].to_json,
+ limit: 1000
+ },
+ headers: auth(:active)
assert_response 200
coll_uuids = []
assert_includes coll_uuids, collections(:foo_collection_in_aproject).uuid
assert_not_includes coll_uuids, collections(:expired_collection).uuid
end
+
+ test "unsharing a project results in hiding it from previously shared user" do
+ # remove sharing link for project
+ delete "/arvados/v1/links/#{links(:share_starred_project_with_project_viewer).uuid}", headers: auth(:admin)
+ assert_response 200
+
+ # verify that the user can no longer see the project
+ get "/arvados/v1/groups",
+ params: {
+ filters: [['group_class', '=', 'project']].to_json,
+ limit: 1000
+ }, headers: auth(:project_viewer)
+ assert_response 200
+ found_projects = {}
+ json_response['items'].each do |g|
+ found_projects[g['uuid']] = g
+ end
+ assert_equal false, found_projects.include?(groups(:starred_and_shared_active_user_project).uuid)
+
+ # share the project
+ post "/arvados/v1/links", params: {
+ link: {
+ link_class: "permission",
+ name: "can_read",
+ head_uuid: groups(:starred_and_shared_active_user_project).uuid,
+ tail_uuid: users(:project_viewer).uuid,
+ }
+ }, headers: auth(:system_user)
+ assert_response 200
+ assert_equal 'permission', json_response['link_class']
+
+ # verify that project_viewer user can now see shared project again
+ get "/arvados/v1/groups", params: {
+ filters: [['group_class', '=', 'project']].to_json,
+ limit: 1000
+ }, headers: auth(:project_viewer)
+ assert_response 200
+ found_projects = {}
+ json_response['items'].each do |g|
+ found_projects[g['uuid']] = g
+ end
+ assert_equal true, found_projects.include?(groups(:starred_and_shared_active_user_project).uuid)
+ end
end
class NonTransactionalGroupsTest < ActionDispatch::IntegrationTest
# This is needed because nested transactions share the connection pool, so
# one thread is locked while trying to talk to the database, until the other
# one finishes.
- self.use_transactional_fixtures = false
+ self.use_transactional_tests = false
teardown do
# Explicitly reset the database after each test.
- post '/database/reset', {}, auth(:admin)
+ post '/database/reset', params: {}, headers: auth(:admin)
assert_response :success
end
assert_equal nil, Group.find_by_name(name)
# Trigger the asynchronous permission update by using async=true parameter.
- post "/arvados/v1/groups", {
- group: {
- name: name
+ post "/arvados/v1/groups",
+ params: {
+ group: {
+ name: name
+ },
+ async: true
},
- async: true
- }, auth(:active)
+ headers: auth(:active)
assert_response 202
# The group exists on the database, but it's not accessible yet.
assert_not_nil Group.find_by_name(name)
- get "/arvados/v1/groups", {
- filters: [["name", "=", name]].to_json,
- limit: 10
- }, auth(:active)
+ get "/arvados/v1/groups",
+ params: {
+ filters: [["name", "=", name]].to_json,
+ limit: 10
+ },
+ headers: auth(:active)
assert_response 200
assert_equal 0, json_response['items_available']
# Wait a bit and try again.
sleep(1)
- get "/arvados/v1/groups", {
- filters: [["name", "=", name]].to_json,
- limit: 10
- }, auth(:active)
+ get "/arvados/v1/groups",
+ params: {
+ filters: [["name", "=", name]].to_json,
+ limit: 10
+ },
+ headers: auth(:active)
assert_response 200
assert_equal 1, json_response['items_available']
end
fixtures :all
test "cancel job" do
- post "/arvados/v1/jobs/#{jobs(:running).uuid}/cancel", {:format => :json}, {'HTTP_AUTHORIZATION' => "OAuth2 #{api_client_authorizations(:active).api_token}"}
+ post "/arvados/v1/jobs/#{jobs(:running).uuid}/cancel",
+ params: {:format => :json},
+ headers: {'HTTP_AUTHORIZATION' => "OAuth2 #{api_client_authorizations(:active).api_token}"}
assert_response :success
assert_equal "arvados#job", json_response['kind']
assert_not_nil json_response['cancelled_at']
end
test "cancel someone else's visible job" do
- post "/arvados/v1/jobs/#{jobs(:runningbarbaz).uuid}/cancel", {:format => :json}, {'HTTP_AUTHORIZATION' => "OAuth2 #{api_client_authorizations(:spectator).api_token}"}
+ post "/arvados/v1/jobs/#{jobs(:runningbarbaz).uuid}/cancel",
+ params: {:format => :json},
+ headers: {'HTTP_AUTHORIZATION' => "OAuth2 #{api_client_authorizations(:spectator).api_token}"}
assert_response 403
end
test "cancel someone else's invisible job" do
- post "/arvados/v1/jobs/#{jobs(:running).uuid}/cancel", {:format => :json}, {'HTTP_AUTHORIZATION' => "OAuth2 #{api_client_authorizations(:spectator).api_token}"}
+ post "/arvados/v1/jobs/#{jobs(:running).uuid}/cancel",
+ params: {:format => :json},
+ headers: {'HTTP_AUTHORIZATION' => "OAuth2 #{api_client_authorizations(:spectator).api_token}"}
assert_response 404
end
test "task qsequence values automatically increase monotonically" do
post_args = ["/arvados/v1/job_tasks",
- {job_task: {
+ params: {job_task: {
job_uuid: jobs(:running).uuid,
sequence: 1,
}},
- auth(:active)]
+ headers: auth(:active)]
last_qsequence = -1
(1..3).each do |task_num|
@response = nil
last_qsequence = qsequence
end
end
+
+ test 'get_delete components_get again for job with components' do
+ authorize_with :active
+ get "/arvados/v1/jobs/#{jobs(:running_job_with_components).uuid}",
+ headers: auth(:active)
+ assert_response 200
+ assert_not_nil json_response["components"]
+ assert_equal ["component1", "component2"], json_response["components"].keys
+
+ # delete second component
+ put "/arvados/v1/jobs/#{jobs(:running_job_with_components).uuid}", params: {
+ job: {
+ components: {"component1" => "zzzzz-8i9sb-jobuuid00000001"}
+ },
+ limit: 1000
+ }, headers: auth(:active)
+ assert_response 200
+
+ get "/arvados/v1/jobs/#{jobs(:running_job_with_components).uuid}",
+ headers: auth(:active)
+ assert_response 200
+ assert_not_nil json_response["components"]
+ assert_equal ["component1"], json_response["components"].keys
+
+ # delete all components
+ put "/arvados/v1/jobs/#{jobs(:running_job_with_components).uuid}", params: {
+ job: {
+ components: nil
+ },
+ limit: 1000
+ }, headers: auth(:active)
+ assert_response 200
+
+ get "/arvados/v1/jobs/#{jobs(:running_job_with_components).uuid}",
+ headers: auth(:active)
+ assert_response 200
+ assert_not_nil json_response["components"]
+ assert_equal [], json_response["components"].keys
+ end
end
class KeepProxyTest < ActionDispatch::IntegrationTest
test "request keep disks" do
- get "/arvados/v1/keep_services/accessible", {:format => :json}, auth(:active)
+ get "/arvados/v1/keep_services/accessible",
+ params: {:format => :json},
+ headers: auth(:active)
assert_response :success
services = json_response['items']
end
test "request keep proxy" do
- get "/arvados/v1/keep_services/accessible", {:format => :json}, auth(:active).merge({'HTTP_X_EXTERNAL_CLIENT' => '1'})
+ get "/arvados/v1/keep_services/accessible",
+ params: {:format => :json},
+ headers: auth(:active).merge({'HTTP_X_EXTERNAL_CLIENT' => '1'})
assert_response :success
services = json_response['items']
class LoginWorkflowTest < ActionDispatch::IntegrationTest
test "default prompt to login is JSON" do
- post('/arvados/v1/specimens', {specimen: {}},
- {'HTTP_ACCEPT' => ''})
+ post('/arvados/v1/specimens',
+ params: {specimen: {}},
+ headers: {'HTTP_ACCEPT' => ''})
assert_response 401
assert_includes(json_response['errors'], "Not logged in")
end
test "login prompt respects JSON Accept header" do
- post('/arvados/v1/specimens', {specimen: {}},
- {'HTTP_ACCEPT' => 'application/json'})
+ post('/arvados/v1/specimens',
+ params: {specimen: {}},
+ headers: {'HTTP_ACCEPT' => 'application/json'})
assert_response 401
assert_includes(json_response['errors'], "Not logged in")
end
test "login prompt respects HTML Accept header" do
- post('/arvados/v1/specimens', {specimen: {}},
- {'HTTP_ACCEPT' => 'text/html'})
+ post('/arvados/v1/specimens',
+ params: {specimen: {}},
+ headers: {'HTTP_ACCEPT' => 'text/html'})
assert_response 302
assert_match(%r{/auth/joshid$}, @response.headers['Location'],
"HTML login prompt did not include expected redirect")
def check(val)
post "/arvados/v1/container_requests",
- {
- :container_request => {
- :name => "workflow",
- :state => "Uncommitted",
- :command => ["echo"],
- :container_image => "arvados/jobs",
- :output_path => "/",
- :mounts => {
- :foo => {
- :kind => "json",
- :content => JSON.parse(SafeJSON.dump(val)),
- }
- }
- }
- }.to_json, {'HTTP_AUTHORIZATION' => "OAuth2 #{api_client_authorizations(:admin).api_token}",
- 'CONTENT_TYPE' => 'application/json'}
+ params: {
+ :container_request => {
+ :name => "workflow",
+ :state => "Uncommitted",
+ :command => ["echo"],
+ :container_image => "arvados/jobs",
+ :output_path => "/",
+ :mounts => {
+ :foo => {
+ :kind => "json",
+ :content => JSON.parse(SafeJSON.dump(val)),
+ }
+ }
+ }
+ }.to_json,
+ headers: {
+ 'HTTP_AUTHORIZATION' => "OAuth2 #{api_client_authorizations(:admin).api_token}",
+ 'CONTENT_TYPE' => 'application/json'
+ }
assert_response :success
assert_equal "arvados#containerRequest", json_response['kind']
assert_equal val, json_response['mounts']['foo']['content']
test "adding and removing direct can_read links" do
# try to read collection as spectator
- get "/arvados/v1/collections/#{collections(:foo_file).uuid}", {:format => :json}, auth(:spectator)
+ get "/arvados/v1/collections/#{collections(:foo_file).uuid}",
+ params: {:format => :json},
+ headers: auth(:spectator)
assert_response 404
# try to add permission as spectator
- post "/arvados/v1/links", {
- :format => :json,
- :link => {
- tail_uuid: users(:spectator).uuid,
- link_class: 'permission',
- name: 'can_read',
- head_uuid: collections(:foo_file).uuid,
- properties: {}
- }
- }, auth(:spectator)
+ post "/arvados/v1/links",
+ params: {
+ :format => :json,
+ :link => {
+ tail_uuid: users(:spectator).uuid,
+ link_class: 'permission',
+ name: 'can_read',
+ head_uuid: collections(:foo_file).uuid,
+ properties: {}
+ }
+ },
+ headers: auth(:spectator)
assert_response 422
# add permission as admin
- post "/arvados/v1/links", {
- :format => :json,
- :link => {
- tail_uuid: users(:spectator).uuid,
- link_class: 'permission',
- name: 'can_read',
- head_uuid: collections(:foo_file).uuid,
- properties: {}
- }
- }, auth(:admin)
+ post "/arvados/v1/links",
+ params: {
+ :format => :json,
+ :link => {
+ tail_uuid: users(:spectator).uuid,
+ link_class: 'permission',
+ name: 'can_read',
+ head_uuid: collections(:foo_file).uuid,
+ properties: {}
+ }
+ },
+ headers: auth(:admin)
u = json_response['uuid']
assert_response :success
# read collection as spectator
- get "/arvados/v1/collections/#{collections(:foo_file).uuid}", {:format => :json}, auth(:spectator)
+ get "/arvados/v1/collections/#{collections(:foo_file).uuid}",
+ params: {:format => :json},
+ headers: auth(:spectator)
assert_response :success
# try to delete permission as spectator
- delete "/arvados/v1/links/#{u}", {:format => :json}, auth(:spectator)
+ delete "/arvados/v1/links/#{u}",
+ params: {:format => :json},
+ headers: auth(:spectator)
assert_response 403
# delete permission as admin
- delete "/arvados/v1/links/#{u}", {:format => :json}, auth(:admin)
+ delete "/arvados/v1/links/#{u}",
+ params: {:format => :json},
+ headers: auth(:admin)
assert_response :success
# try to read collection as spectator
- get "/arvados/v1/collections/#{collections(:foo_file).uuid}", {:format => :json}, auth(:spectator)
+ get "/arvados/v1/collections/#{collections(:foo_file).uuid}",
+ params: {:format => :json},
+ headers: auth(:spectator)
assert_response 404
end
test "adding can_read links from user to group, group to collection" do
# try to read collection as spectator
- get "/arvados/v1/collections/#{collections(:foo_file).uuid}", {:format => :json}, auth(:spectator)
+ get "/arvados/v1/collections/#{collections(:foo_file).uuid}",
+ params: {:format => :json},
+ headers: auth(:spectator)
assert_response 404
# add permission for spectator to read group
- post "/arvados/v1/links", {
- :format => :json,
- :link => {
- tail_uuid: users(:spectator).uuid,
- link_class: 'permission',
- name: 'can_read',
- head_uuid: groups(:private).uuid,
- properties: {}
- }
- }, auth(:admin)
+ post "/arvados/v1/links",
+ params: {
+ :format => :json,
+ :link => {
+ tail_uuid: users(:spectator).uuid,
+ link_class: 'permission',
+ name: 'can_read',
+ head_uuid: groups(:private).uuid,
+ properties: {}
+ }
+ },
+ headers: auth(:admin)
assert_response :success
# try to read collection as spectator
- get "/arvados/v1/collections/#{collections(:foo_file).uuid}", {:format => :json}, auth(:spectator)
+ get "/arvados/v1/collections/#{collections(:foo_file).uuid}",
+ params: {:format => :json},
+ headers: auth(:spectator)
assert_response 404
# add permission for group to read collection
- post "/arvados/v1/links", {
- :format => :json,
- :link => {
- tail_uuid: groups(:private).uuid,
- link_class: 'permission',
- name: 'can_read',
- head_uuid: collections(:foo_file).uuid,
- properties: {}
- }
- }, auth(:admin)
+ post "/arvados/v1/links",
+ params: {
+ :format => :json,
+ :link => {
+ tail_uuid: groups(:private).uuid,
+ link_class: 'permission',
+ name: 'can_read',
+ head_uuid: collections(:foo_file).uuid,
+ properties: {}
+ }
+ },
+ headers: auth(:admin)
u = json_response['uuid']
assert_response :success
# try to read collection as spectator
- get "/arvados/v1/collections/#{collections(:foo_file).uuid}", {:format => :json}, auth(:spectator)
+ get "/arvados/v1/collections/#{collections(:foo_file).uuid}",
+ params: {:format => :json},
+ headers: auth(:spectator)
assert_response :success
# delete permission for group to read collection
- delete "/arvados/v1/links/#{u}", {:format => :json}, auth(:admin)
+ delete "/arvados/v1/links/#{u}",
+ params: {:format => :json},
+ headers: auth(:admin)
assert_response :success
# try to read collection as spectator
- get "/arvados/v1/collections/#{collections(:foo_file).uuid}", {:format => :json}, auth(:spectator)
+ get "/arvados/v1/collections/#{collections(:foo_file).uuid}",
+ params: {:format => :json},
+ headers: auth(:spectator)
assert_response 404
end
test "adding can_read links from group to collection, user to group" do
# try to read collection as spectator
- get "/arvados/v1/collections/#{collections(:foo_file).uuid}", {:format => :json}, auth(:spectator)
+ get "/arvados/v1/collections/#{collections(:foo_file).uuid}",
+ params: {:format => :json},
+ headers: auth(:spectator)
assert_response 404
# add permission for group to read collection
- post "/arvados/v1/links", {
- :format => :json,
- :link => {
- tail_uuid: groups(:private).uuid,
- link_class: 'permission',
- name: 'can_read',
- head_uuid: collections(:foo_file).uuid,
- properties: {}
- }
- }, auth(:admin)
+ post "/arvados/v1/links",
+ params: {
+ :format => :json,
+ :link => {
+ tail_uuid: groups(:private).uuid,
+ link_class: 'permission',
+ name: 'can_read',
+ head_uuid: collections(:foo_file).uuid,
+ properties: {}
+ }
+ },
+ headers: auth(:admin)
assert_response :success
# try to read collection as spectator
- get "/arvados/v1/collections/#{collections(:foo_file).uuid}", {:format => :json}, auth(:spectator)
+ get "/arvados/v1/collections/#{collections(:foo_file).uuid}",
+ params: {:format => :json},
+ headers: auth(:spectator)
assert_response 404
# add permission for spectator to read group
- post "/arvados/v1/links", {
- :format => :json,
- :link => {
- tail_uuid: users(:spectator).uuid,
- link_class: 'permission',
- name: 'can_read',
- head_uuid: groups(:private).uuid,
- properties: {}
- }
- }, auth(:admin)
+ post "/arvados/v1/links",
+ params: {
+ :format => :json,
+ :link => {
+ tail_uuid: users(:spectator).uuid,
+ link_class: 'permission',
+ name: 'can_read',
+ head_uuid: groups(:private).uuid,
+ properties: {}
+ }
+ },
+ headers: auth(:admin)
u = json_response['uuid']
assert_response :success
# try to read collection as spectator
- get "/arvados/v1/collections/#{collections(:foo_file).uuid}", {:format => :json}, auth(:spectator)
+ get "/arvados/v1/collections/#{collections(:foo_file).uuid}",
+ params: {:format => :json},
+ headers: auth(:spectator)
assert_response :success
# delete permission for spectator to read group
- delete "/arvados/v1/links/#{u}", {:format => :json}, auth(:admin)
+ delete "/arvados/v1/links/#{u}",
+ params: {:format => :json},
+ headers: auth(:admin)
assert_response :success
# try to read collection as spectator
- get "/arvados/v1/collections/#{collections(:foo_file).uuid}", {:format => :json}, auth(:spectator)
+ get "/arvados/v1/collections/#{collections(:foo_file).uuid}",
+ params: {:format => :json},
+ headers: auth(:spectator)
assert_response 404
end
test "adding can_read links from user to group, group to group, group to collection" do
# try to read collection as spectator
- get "/arvados/v1/collections/#{collections(:foo_file).uuid}", {:format => :json}, auth(:spectator)
+ get "/arvados/v1/collections/#{collections(:foo_file).uuid}",
+ params: {:format => :json},
+ headers: auth(:spectator)
assert_response 404
# add permission for user to read group
- post "/arvados/v1/links", {
- :format => :json,
- :link => {
- tail_uuid: users(:spectator).uuid,
- link_class: 'permission',
- name: 'can_read',
- head_uuid: groups(:private).uuid,
- properties: {}
- }
- }, auth(:admin)
+ post "/arvados/v1/links",
+ params: {
+ :format => :json,
+ :link => {
+ tail_uuid: users(:spectator).uuid,
+ link_class: 'permission',
+ name: 'can_read',
+ head_uuid: groups(:private).uuid,
+ properties: {}
+ }
+ },
+ headers: auth(:admin)
assert_response :success
# add permission for group to read group
- post "/arvados/v1/links", {
- :format => :json,
- :link => {
- tail_uuid: groups(:private).uuid,
- link_class: 'permission',
- name: 'can_read',
- head_uuid: groups(:empty_lonely_group).uuid,
- properties: {}
- }
- }, auth(:admin)
+ post "/arvados/v1/links",
+ params: {
+ :format => :json,
+ :link => {
+ tail_uuid: groups(:private).uuid,
+ link_class: 'permission',
+ name: 'can_read',
+ head_uuid: groups(:empty_lonely_group).uuid,
+ properties: {}
+ }
+ },
+ headers: auth(:admin)
assert_response :success
# add permission for group to read collection
- post "/arvados/v1/links", {
- :format => :json,
- :link => {
- tail_uuid: groups(:empty_lonely_group).uuid,
- link_class: 'permission',
- name: 'can_read',
- head_uuid: collections(:foo_file).uuid,
- properties: {}
- }
- }, auth(:admin)
+ post "/arvados/v1/links",
+ params: {
+ :format => :json,
+ :link => {
+ tail_uuid: groups(:empty_lonely_group).uuid,
+ link_class: 'permission',
+ name: 'can_read',
+ head_uuid: collections(:foo_file).uuid,
+ properties: {}
+ }
+ },
+ headers: auth(:admin)
u = json_response['uuid']
assert_response :success
# try to read collection as spectator
- get "/arvados/v1/collections/#{collections(:foo_file).uuid}", {:format => :json}, auth(:spectator)
+ get "/arvados/v1/collections/#{collections(:foo_file).uuid}",
+ params: {:format => :json},
+ headers: auth(:spectator)
assert_response :success
# delete permission for group to read collection
- delete "/arvados/v1/links/#{u}", {:format => :json}, auth(:admin)
+ delete "/arvados/v1/links/#{u}",
+ params: {:format => :json},
+ headers: auth(:admin)
assert_response :success
# try to read collection as spectator
- get "/arvados/v1/collections/#{collections(:foo_file).uuid}", {:format => :json}, auth(:spectator)
+ get "/arvados/v1/collections/#{collections(:foo_file).uuid}",
+ params: {:format => :json},
+ headers: auth(:spectator)
assert_response 404
end
test "read-only group-admin cannot modify administered user" do
- put "/arvados/v1/users/#{users(:active).uuid}", {
- :user => {
- first_name: 'KilroyWasHere'
+ put "/arvados/v1/users/#{users(:active).uuid}",
+ params: {
+ :user => {
+ first_name: 'KilroyWasHere'
+ },
+ :format => :json
},
- :format => :json
- }, auth(:rominiadmin)
+ headers: auth(:rominiadmin)
assert_response 403
end
test "read-only group-admin cannot read or update non-administered user" do
- get "/arvados/v1/users/#{users(:spectator).uuid}", {
- :format => :json
- }, auth(:rominiadmin)
+ get "/arvados/v1/users/#{users(:spectator).uuid}",
+ params: {:format => :json},
+ headers: auth(:rominiadmin)
assert_response 404
- put "/arvados/v1/users/#{users(:spectator).uuid}", {
- :user => {
- first_name: 'KilroyWasHere'
+ put "/arvados/v1/users/#{users(:spectator).uuid}",
+ params: {
+ :user => {
+ first_name: 'KilroyWasHere'
+ },
+ :format => :json
},
- :format => :json
- }, auth(:rominiadmin)
+ headers: auth(:rominiadmin)
assert_response 404
end
test "RO group-admin finds user's specimens, RW group-admin can update" do
[[:rominiadmin, false],
[:miniadmin, true]].each do |which_user, update_should_succeed|
- get "/arvados/v1/specimens", {:format => :json}, auth(which_user)
+ get "/arvados/v1/specimens",
+ params: {:format => :json},
+ headers: auth(which_user)
assert_response :success
resp_uuids = json_response['items'].collect { |i| i['uuid'] }
[[true, specimens(:owned_by_active_user).uuid],
[which_user.to_s,
should_find ? '' : 'not ',
uuid])
- put "/arvados/v1/specimens/#{uuid}", {
- :specimen => {
- properties: {
- miniadmin_was_here: true
- }
+ put "/arvados/v1/specimens/#{uuid}",
+ params: {
+ :specimen => {
+ properties: {
+ miniadmin_was_here: true
+ }
+ },
+ :format => :json
},
- :format => :json
- }, auth(which_user)
+ headers: auth(which_user)
if !should_find
assert_response 404
elsif !update_should_succeed
test "get_permissions returns list" do
# First confirm that user :active cannot get permissions on group :public
- get "/arvados/v1/permissions/#{groups(:public).uuid}", nil, auth(:active)
+ get "/arvados/v1/permissions/#{groups(:public).uuid}",
+ params: nil,
+ headers: auth(:active)
assert_response 404
# add some permissions, including can_manage
# permission for user :active
- post "/arvados/v1/links", {
- :format => :json,
- :link => {
- tail_uuid: users(:spectator).uuid,
- link_class: 'permission',
- name: 'can_read',
- head_uuid: groups(:public).uuid,
- properties: {}
- }
- }, auth(:admin)
+ post "/arvados/v1/links",
+ params: {
+ :format => :json,
+ :link => {
+ tail_uuid: users(:spectator).uuid,
+ link_class: 'permission',
+ name: 'can_read',
+ head_uuid: groups(:public).uuid,
+ properties: {}
+ }
+ },
+ headers: auth(:admin)
assert_response :success
can_read_uuid = json_response['uuid']
- post "/arvados/v1/links", {
- :format => :json,
- :link => {
- tail_uuid: users(:inactive).uuid,
- link_class: 'permission',
- name: 'can_write',
- head_uuid: groups(:public).uuid,
- properties: {}
- }
- }, auth(:admin)
+ post "/arvados/v1/links",
+ params: {
+ :format => :json,
+ :link => {
+ tail_uuid: users(:inactive).uuid,
+ link_class: 'permission',
+ name: 'can_write',
+ head_uuid: groups(:public).uuid,
+ properties: {}
+ }
+ },
+ headers: auth(:admin)
assert_response :success
can_write_uuid = json_response['uuid']
- post "/arvados/v1/links", {
- :format => :json,
- :link => {
- tail_uuid: users(:active).uuid,
- link_class: 'permission',
- name: 'can_manage',
- head_uuid: groups(:public).uuid,
- properties: {}
- }
- }, auth(:admin)
+ post "/arvados/v1/links",
+ params: {
+ :format => :json,
+ :link => {
+ tail_uuid: users(:active).uuid,
+ link_class: 'permission',
+ name: 'can_manage',
+ head_uuid: groups(:public).uuid,
+ properties: {}
+ }
+ },
+ headers: auth(:admin)
assert_response :success
can_manage_uuid = json_response['uuid']
# Now user :active should be able to retrieve permissions
# on group :public.
get("/arvados/v1/permissions/#{groups(:public).uuid}",
- { :format => :json },
- auth(:active))
+ params: { :format => :json },
+ headers: auth(:active))
assert_response :success
perm_uuids = json_response['items'].map { |item| item['uuid'] }
test "get_permissions returns 404 for nonexistent uuid" do
nonexistent = Group.generate_uuid
# make sure it really doesn't exist
- get "/arvados/v1/groups/#{nonexistent}", nil, auth(:admin)
+ get "/arvados/v1/groups/#{nonexistent}", params: nil, headers: auth(:admin)
assert_response 404
- get "/arvados/v1/permissions/#{nonexistent}", nil, auth(:active)
+ get "/arvados/v1/permissions/#{nonexistent}", params: nil, headers: auth(:active)
assert_response 404
end
test "get_permissions returns 403 if user can read but not manage" do
- post "/arvados/v1/links", {
- :link => {
- tail_uuid: users(:active).uuid,
- link_class: 'permission',
- name: 'can_read',
- head_uuid: groups(:public).uuid,
- properties: {}
- }
- }, auth(:admin)
+ post "/arvados/v1/links",
+ params: {
+ :link => {
+ tail_uuid: users(:active).uuid,
+ link_class: 'permission',
+ name: 'can_read',
+ head_uuid: groups(:public).uuid,
+ properties: {}
+ }
+ },
+ headers: auth(:admin)
assert_response :success
- get "/arvados/v1/permissions/#{groups(:public).uuid}", nil, auth(:active)
+ get "/arvados/v1/permissions/#{groups(:public).uuid}",
+ params: nil,
+ headers: auth(:active)
assert_response 403
end
# The active user should be able to read the empty collection.
get("/arvados/v1/collections/#{empty_collection_uuid}",
- { :format => :json },
- auth(:active))
+ params: {:format => :json},
+ headers: auth(:active))
assert_response :success
assert_empty json_response['manifest_text'], "empty collection manifest_text is not empty"
end
}
post("/arvados/v1/pipeline_instances",
- {pipeline_instance: {components: {comp_name => component}}.to_json},
- auth(:active))
+ params: {
+ pipeline_instance: {
+ components: {comp_name => component}
+ }.to_json
+ },
+ headers: auth(:active))
check_component_match(comp_name, component)
pi_uuid = json_response["uuid"]
@response = nil
- get("/arvados/v1/pipeline_instances/#{pi_uuid}", {}, auth(:active))
+ get("/arvados/v1/pipeline_instances/#{pi_uuid}", params: {}, headers: auth(:active))
check_component_match(comp_name, component)
end
end
params[:reader_tokens] = [api_token(read_auth)].send(formatter) if read_auth
headers = {}
headers.merge!(auth(main_auth)) if main_auth
- get('/arvados/v1/specimens', params, headers)
+ get('/arvados/v1/specimens', params: params, headers: headers)
end
def get_specimen_uuids(main_auth, read_auth, formatter=:to_a)
expected = 401
end
post('/arvados/v1/specimens.json',
- {specimen: {}, reader_tokens: [api_token(read_auth)].send(formatter)},
- headers)
+ params: {specimen: {}, reader_tokens: [api_token(read_auth)].send(formatter)},
+ headers: headers)
assert_response expected
end
test "scopes are still limited with reader tokens" do
get('/arvados/v1/collections',
- {reader_tokens: [api_token(:spectator_specimens)]},
- auth(:active_noscope))
+ params: {reader_tokens: [api_token(:spectator_specimens)]},
+ headers: auth(:active_noscope))
assert_response 403
end
end
test 'authenticate with remote token' do
- get '/arvados/v1/users/current', {format: 'json'}, auth(remote: 'zbbbb')
+ get '/arvados/v1/users/current',
+ params: {format: 'json'},
+ headers: auth(remote: 'zbbbb')
assert_response :success
assert_equal 'zbbbb-tpzed-000000000000000', json_response['uuid']
assert_equal false, json_response['is_admin']
@stub_status = 401
# re-authorize before cache expires
- get '/arvados/v1/users/current', {format: 'json'}, auth(remote: 'zbbbb')
+ get '/arvados/v1/users/current',
+ params: {format: 'json'},
+ headers: auth(remote: 'zbbbb')
assert_response :success
# simulate cache expiry
update_all(expires_at: db_current_time - 1.minute)
# re-authorize after cache expires
- get '/arvados/v1/users/current', {format: 'json'}, auth(remote: 'zbbbb')
+ get '/arvados/v1/users/current',
+ params: {format: 'json'},
+ headers: auth(remote: 'zbbbb')
assert_response 401
# simulate cached token indicating wrong user (e.g., local user
@stub_status = 200
@stub_content[:username] = 'blarney'
@stub_content[:email] = 'blarney@example.com'
- get '/arvados/v1/users/current', {format: 'json'}, auth(remote: 'zbbbb')
+ get '/arvados/v1/users/current',
+ params: {format: 'json'},
+ headers: auth(remote: 'zbbbb')
assert_response :success
assert_equal 'barney', json_response['username'], 'local username should not change once assigned'
assert_equal 'blarney@example.com', json_response['email']
test 'authenticate with remote token, remote username conflicts with local' do
@stub_content[:username] = 'active'
- get '/arvados/v1/users/current', {format: 'json'}, auth(remote: 'zbbbb')
+ get '/arvados/v1/users/current',
+ params: {format: 'json'},
+ headers: auth(remote: 'zbbbb')
assert_response :success
assert_equal 'active2', json_response['username']
end
test 'authenticate with remote token, remote username is nil' do
@stub_content.delete :username
- get '/arvados/v1/users/current', {format: 'json'}, auth(remote: 'zbbbb')
+ get '/arvados/v1/users/current',
+ params: {format: 'json'},
+ headers: auth(remote: 'zbbbb')
assert_response :success
assert_equal 'foo', json_response['username']
end
test 'authenticate with remote token from misbhehaving remote cluster' do
- get '/arvados/v1/users/current', {format: 'json'}, auth(remote: 'zbork')
+ get '/arvados/v1/users/current',
+ params: {format: 'json'},
+ headers: auth(remote: 'zbork')
assert_response 401
end
@stub_content = {
error: 'not authorized',
}
- get '/arvados/v1/users/current', {format: 'json'}, auth(remote: 'zbbbb')
+ get '/arvados/v1/users/current',
+ params: {format: 'json'},
+ headers: auth(remote: 'zbbbb')
assert_response 401
end
'///',
].each do |token|
test "authenticate with malformed remote token #{token}" do
- get '/arvados/v1/users/current', {format: 'json'}, {"HTTP_AUTHORIZATION" => "Bearer #{token}"}
+ get '/arvados/v1/users/current',
+ params: {format: 'json'},
+ headers: {"HTTP_AUTHORIZATION" => "Bearer #{token}"}
assert_response 401
end
end
test "ignore extra fields in remote token" do
token = salted_active_token(remote: 'zbbbb') + '/foo/bar/baz/*'
- get '/arvados/v1/users/current', {format: 'json'}, {"HTTP_AUTHORIZATION" => "Bearer #{token}"}
+ get '/arvados/v1/users/current',
+ params: {format: 'json'},
+ headers: {"HTTP_AUTHORIZATION" => "Bearer #{token}"}
assert_response :success
end
test 'remote api server is not an api server' do
@stub_status = 200
@stub_content = '<html>bad</html>'
- get '/arvados/v1/users/current', {format: 'json'}, auth(remote: 'zbbbb')
+ get '/arvados/v1/users/current',
+ params: {format: 'json'},
+ headers: auth(remote: 'zbbbb')
assert_response 401
end
['zbbbb', 'z0000'].each do |token_valid_for|
test "validate #{token_valid_for}-salted token for remote cluster zbbbb" do
salted_token = salt_token(fixture: :active, remote: token_valid_for)
- get '/arvados/v1/users/current', {format: 'json', remote: 'zbbbb'}, {
- "HTTP_AUTHORIZATION" => "Bearer #{salted_token}"
- }
+ get '/arvados/v1/users/current',
+ params: {format: 'json', remote: 'zbbbb'},
+ headers: {"HTTP_AUTHORIZATION" => "Bearer #{salted_token}"}
if token_valid_for == 'zbbbb'
assert_response 200
assert_equal(users(:active).uuid, json_response['uuid'])
test "list readable groups with salted token" do
salted_token = salt_token(fixture: :active, remote: 'zbbbb')
- get '/arvados/v1/groups', {
- format: 'json',
- remote: 'zbbbb',
- limit: 10000,
- }, {
- "HTTP_AUTHORIZATION" => "Bearer #{salted_token}"
- }
+ get '/arvados/v1/groups',
+ params: {
+ format: 'json',
+ remote: 'zbbbb',
+ limit: 10000,
+ },
+ headers: {"HTTP_AUTHORIZATION" => "Bearer #{salted_token}"}
assert_response 200
group_uuids = json_response['items'].collect { |i| i['uuid'] }
assert_includes(group_uuids, 'zzzzz-j7d0g-fffffffffffffff')
test 'auto-activate user from trusted cluster' do
Rails.configuration.auto_activate_users_from = ['zbbbb']
- get '/arvados/v1/users/current', {format: 'json'}, auth(remote: 'zbbbb')
+ get '/arvados/v1/users/current',
+ params: {format: 'json'},
+ headers: auth(remote: 'zbbbb')
assert_response :success
assert_equal 'zbbbb-tpzed-000000000000000', json_response['uuid']
assert_equal false, json_response['is_admin']
end
test 'pre-activate remote user' do
- post '/arvados/v1/users', {
- "user" => {
- "uuid" => "zbbbb-tpzed-000000000000000",
- "email" => 'foo@example.com',
- "username" => 'barney',
- "is_active" => true
- }
- }, {'HTTP_AUTHORIZATION' => "OAuth2 #{api_token(:admin)}"}
+ post '/arvados/v1/users',
+ params: {
+ "user" => {
+ "uuid" => "zbbbb-tpzed-000000000000000",
+ "email" => 'foo@example.com',
+ "username" => 'barney',
+ "is_active" => true
+ }
+ },
+ headers: {'HTTP_AUTHORIZATION' => "OAuth2 #{api_token(:admin)}"}
assert_response :success
- get '/arvados/v1/users/current', {format: 'json'}, auth(remote: 'zbbbb')
+ get '/arvados/v1/users/current',
+ params: {format: 'json'},
+ headers: auth(remote: 'zbbbb')
assert_response :success
assert_equal 'zbbbb-tpzed-000000000000000', json_response['uuid']
assert_equal nil, json_response['is_admin']
test "validate unsalted v2 token for remote cluster zbbbb" do
auth = api_client_authorizations(:active)
token = "v2/#{auth.uuid}/#{auth.api_token}"
- get '/arvados/v1/users/current', {format: 'json', remote: 'zbbbb'}, {
- "HTTP_AUTHORIZATION" => "Bearer #{token}"
- }
+ get '/arvados/v1/users/current',
+ params: {format: 'json', remote: 'zbbbb'},
+ headers: {"HTTP_AUTHORIZATION" => "Bearer #{token}"}
assert_response :success
assert_equal(users(:active).uuid, json_response['uuid'])
end
["invalid local", "v2/#{api_client_authorizations(:active).uuid}/fakefakefake"],
["invalid remote", "v2/zbork-gj3su-000000000000000/abc"],
].each do |label, runtime_token|
- post '/arvados/v1/container_requests', {
- "container_request" => {
- "command" => ["echo"],
- "container_image" => "xyz",
- "output_path" => "/",
- "cwd" => "/",
- "runtime_token" => runtime_token
- }
- }, {"HTTP_AUTHORIZATION" => "Bearer #{api_client_authorizations(:active).api_token}"}
+ post '/arvados/v1/container_requests',
+ params: {
+ "container_request" => {
+ "command" => ["echo"],
+ "container_image" => "xyz",
+ "output_path" => "/",
+ "cwd" => "/",
+ "runtime_token" => runtime_token
+ }
+ },
+ headers: {"HTTP_AUTHORIZATION" => "Bearer #{api_client_authorizations(:active).api_token}"}
if label.include? "invalid"
assert_response 422
else
class SelectTest < ActionDispatch::IntegrationTest
test "should select just two columns" do
- get "/arvados/v1/links", {:format => :json, :select => ['uuid', 'link_class']}, auth(:active)
+ get "/arvados/v1/links",
+ params: {:format => :json, :select => ['uuid', 'link_class']},
+ headers: auth(:active)
assert_response :success
assert_equal json_response['items'].count, json_response['items'].select { |i|
i.count == 3 and i['uuid'] != nil and i['link_class'] != nil
end
test "fewer distinct than total count" do
- get "/arvados/v1/links", {:format => :json, :select => ['link_class'], :distinct => false}, auth(:active)
+ get "/arvados/v1/links",
+ params: {:format => :json, :select => ['link_class'], :distinct => false},
+ headers: auth(:active)
assert_response :success
links = json_response['items']
- get "/arvados/v1/links", {:format => :json, :select => ['link_class'], :distinct => true}, auth(:active)
+ get "/arvados/v1/links",
+ params: {:format => :json, :select => ['link_class'], :distinct => true},
+ headers: auth(:active)
assert_response :success
distinct = json_response['items']
end
test "select with order" do
- get "/arvados/v1/links", {:format => :json, :select => ['uuid'], :order => ["uuid asc"]}, auth(:active)
+ get "/arvados/v1/links",
+ params: {:format => :json, :select => ['uuid'], :order => ["uuid asc"]},
+ headers: auth(:active)
assert_response :success
assert json_response['items'].length > 0
end
test "select with default order" do
- get "/arvados/v1/links", {format: :json, select: ['uuid']}, auth(:admin)
+ get "/arvados/v1/links",
+ params: {format: :json, select: ['uuid']},
+ headers: auth(:admin)
assert_response :success
uuids = json_response['items'].collect { |i| i['uuid'] }
assert_equal uuids, uuids.sort
end
test "select two columns with order" do
- get "/arvados/v1/links", {:format => :json, :select => ['link_class', 'uuid'], :order => ['link_class asc', "uuid desc"]}, auth(:active)
+ get "/arvados/v1/links",
+ params: {
+ :format => :json,
+ :select => ['link_class', 'uuid'], :order => ['link_class asc', "uuid desc"]
+ },
+ headers: auth(:active)
assert_response :success
assert json_response['items'].length > 0
end
test "select two columns with old-style order syntax" do
- get "/arvados/v1/links", {:format => :json, :select => ['link_class', 'uuid'], :order => 'link_class asc, uuid desc'}, auth(:active)
+ get "/arvados/v1/links",
+ params: {
+ :format => :json,
+ :select => ['link_class', 'uuid'], :order => 'link_class asc, uuid desc'
+ },
+ headers: auth(:active)
assert_response :success
assert json_response['items'].length > 0
}.each_pair do |resource, postdata|
test "create json-encoded #{resource.to_s}" do
post("/arvados/v1/#{resource.to_s.pluralize}",
- {resource => postdata.to_json}, auth(:admin_trustedclient))
+ params: {resource => postdata.to_json},
+ headers: auth(:admin_trustedclient))
assert_response :success
end
end
mock['info']['username'] = username unless username.nil?
mock['info']['identity_url'] = identity_url unless identity_url.nil?
post('/auth/josh_id/callback',
- {return_to: client_url(remote: remote)},
- {'omniauth.auth' => mock})
+ params: {return_to: client_url(remote: remote)},
+ headers: {'omniauth.auth' => mock})
errors = {
:redirect => 'Did not redirect to client with token',
test "setup user multiple times" do
repo_name = 'usertestrepo'
- post "/arvados/v1/users/setup", {
- repo_name: repo_name,
- openid_prefix: 'https://www.google.com/accounts/o8/id',
- user: {
- uuid: 'zzzzz-tpzed-abcdefghijklmno',
- first_name: "in_create_test_first_name",
- last_name: "test_last_name",
- email: "foo@example.com"
- }
- }, auth(:admin)
+ post "/arvados/v1/users/setup",
+ params: {
+ repo_name: repo_name,
+ openid_prefix: 'https://www.google.com/accounts/o8/id',
+ user: {
+ uuid: 'zzzzz-tpzed-abcdefghijklmno',
+ first_name: "in_create_test_first_name",
+ last_name: "test_last_name",
+ email: "foo@example.com"
+ }
+ },
+ headers: auth(:admin)
assert_response :success
verify_system_group_permission_link_for created['uuid']
# invoke setup again with the same data
- post "/arvados/v1/users/setup", {
- repo_name: repo_name,
- vm_uuid: virtual_machines(:testvm).uuid,
- openid_prefix: 'https://www.google.com/accounts/o8/id',
- user: {
- uuid: 'zzzzz-tpzed-abcdefghijklmno',
- first_name: "in_create_test_first_name",
- last_name: "test_last_name",
- email: "foo@example.com"
- }
- }, auth(:admin)
+ post "/arvados/v1/users/setup",
+ params: {
+ repo_name: repo_name,
+ vm_uuid: virtual_machines(:testvm).uuid,
+ openid_prefix: 'https://www.google.com/accounts/o8/id',
+ user: {
+ uuid: 'zzzzz-tpzed-abcdefghijklmno',
+ first_name: "in_create_test_first_name",
+ last_name: "test_last_name",
+ email: "foo@example.com"
+ }
+ },
+ headers: auth(:admin)
assert_response 422 # cannot create another user with same UUID
# invoke setup on the same user
- post "/arvados/v1/users/setup", {
- repo_name: repo_name,
- vm_uuid: virtual_machines(:testvm).uuid,
- openid_prefix: 'https://www.google.com/accounts/o8/id',
- uuid: 'zzzzz-tpzed-abcdefghijklmno',
- }, auth(:admin)
+ post "/arvados/v1/users/setup",
+ params: {
+ repo_name: repo_name,
+ vm_uuid: virtual_machines(:testvm).uuid,
+ openid_prefix: 'https://www.google.com/accounts/o8/id',
+ uuid: 'zzzzz-tpzed-abcdefghijklmno',
+ },
+ headers: auth(:admin)
response_items = json_response['items']
end
test "setup user in multiple steps and verify response" do
- post "/arvados/v1/users/setup", {
- openid_prefix: 'http://www.example.com/account',
- user: {
- email: "foo@example.com"
- }
- }, auth(:admin)
+ post "/arvados/v1/users/setup",
+ params: {
+ openid_prefix: 'http://www.example.com/account',
+ user: {
+ email: "foo@example.com"
+ }
+ },
+ headers: auth(:admin)
assert_response :success
response_items = json_response['items']
nil, created['uuid'], 'arvados#virtualMachine', false, 'VirtualMachine'
# invoke setup with a repository
- post "/arvados/v1/users/setup", {
- openid_prefix: 'http://www.example.com/account',
- repo_name: 'newusertestrepo',
- uuid: created['uuid']
- }, auth(:admin)
+ post "/arvados/v1/users/setup",
+ params: {
+ openid_prefix: 'http://www.example.com/account',
+ repo_name: 'newusertestrepo',
+ uuid: created['uuid']
+ },
+ headers: auth(:admin)
assert_response :success
nil, created['uuid'], 'arvados#virtualMachine', false, 'VirtualMachine'
# invoke setup with a vm_uuid
- post "/arvados/v1/users/setup", {
- vm_uuid: virtual_machines(:testvm).uuid,
- openid_prefix: 'http://www.example.com/account',
- user: {
- email: 'junk_email'
+ post "/arvados/v1/users/setup",
+ params: {
+ vm_uuid: virtual_machines(:testvm).uuid,
+ openid_prefix: 'http://www.example.com/account',
+ user: {
+ email: 'junk_email'
+ },
+ uuid: created['uuid']
},
- uuid: created['uuid']
- }, auth(:admin)
+ headers: auth(:admin)
assert_response :success
end
test "setup and unsetup user" do
- post "/arvados/v1/users/setup", {
- repo_name: 'newusertestrepo',
- vm_uuid: virtual_machines(:testvm).uuid,
- user: {email: 'foo@example.com'},
- openid_prefix: 'https://www.google.com/accounts/o8/id'
- }, auth(:admin)
+ post "/arvados/v1/users/setup",
+ params: {
+ repo_name: 'newusertestrepo',
+ vm_uuid: virtual_machines(:testvm).uuid,
+ user: {email: 'foo@example.com'},
+ openid_prefix: 'https://www.google.com/accounts/o8/id'
+ },
+ headers: auth(:admin)
assert_response :success
response_items = json_response['items']
verify_link_existence created['uuid'], created['email'], true, true, true, true, false
- post "/arvados/v1/users/#{created['uuid']}/unsetup", {}, auth(:admin)
+ post "/arvados/v1/users/#{created['uuid']}/unsetup", params: {}, headers: auth(:admin)
assert_response :success
end
test 'merge active into project_viewer account' do
- post('/arvados/v1/groups', {
- group: {
- group_class: 'project',
- name: "active user's stuff",
- },
- }, auth(:project_viewer))
+ post('/arvados/v1/groups',
+ params: {
+ group: {
+ group_class: 'project',
+ name: "active user's stuff",
+ },
+ },
+ headers: auth(:project_viewer))
assert_response(:success)
project_uuid = json_response['uuid']
- post('/arvados/v1/users/merge', {
- new_user_token: api_client_authorizations(:project_viewer_trustedclient).api_token,
- new_owner_uuid: project_uuid,
- redirect_to_new_user: true,
- }, auth(:active_trustedclient))
+ post('/arvados/v1/users/merge',
+ params: {
+ new_user_token: api_client_authorizations(:project_viewer_trustedclient).api_token,
+ new_owner_uuid: project_uuid,
+ redirect_to_new_user: true,
+ },
+ headers: auth(:active_trustedclient))
assert_response(:success)
- get('/arvados/v1/users/current', {}, auth(:active))
+ get('/arvados/v1/users/current', params: {}, headers: auth(:active))
assert_response(:success)
assert_equal(users(:project_viewer).uuid, json_response['uuid'])
- get('/arvados/v1/authorized_keys/' + authorized_keys(:active).uuid, {}, auth(:active))
+ get('/arvados/v1/authorized_keys/' + authorized_keys(:active).uuid,
+ params: {},
+ headers: auth(:active))
assert_response(:success)
assert_equal(users(:project_viewer).uuid, json_response['owner_uuid'])
assert_equal(users(:project_viewer).uuid, json_response['authorized_user_uuid'])
- get('/arvados/v1/repositories/' + repositories(:foo).uuid, {}, auth(:active))
+ get('/arvados/v1/repositories/' + repositories(:foo).uuid,
+ params: {},
+ headers: auth(:active))
assert_response(:success)
assert_equal(users(:project_viewer).uuid, json_response['owner_uuid'])
- get('/arvados/v1/groups/' + groups(:aproject).uuid, {}, auth(:active))
+ get('/arvados/v1/groups/' + groups(:aproject).uuid,
+ params: {},
+ headers: auth(:active))
assert_response(:success)
assert_equal(project_uuid, json_response['owner_uuid'])
end
test "tail must exist on update" do
admin_auth = {'HTTP_AUTHORIZATION' => "OAuth2 #{api_client_authorizations(:admin).api_token}"}
- post "/arvados/v1/links", {
- :format => :json,
- :link => {
- link_class: 'test',
- name: 'stuff',
- head_uuid: users(:active).uuid,
- tail_uuid: virtual_machines(:testvm).uuid
- }
- }, admin_auth
+ post "/arvados/v1/links",
+ params: {
+ :format => :json,
+ :link => {
+ link_class: 'test',
+ name: 'stuff',
+ head_uuid: users(:active).uuid,
+ tail_uuid: virtual_machines(:testvm).uuid
+ }
+ },
+ headers: admin_auth
assert_response :success
u = json_response['uuid']
- put "/arvados/v1/links/#{u}", {
- :format => :json,
- :link => {
- tail_uuid: virtual_machines(:testvm2).uuid
- }
- }, admin_auth
+ put "/arvados/v1/links/#{u}",
+ params: {
+ :format => :json,
+ :link => {
+ tail_uuid: virtual_machines(:testvm2).uuid
+ }
+ },
+ headers: admin_auth
assert_response :success
assert_equal virtual_machines(:testvm2).uuid, (ActiveSupport::JSON.decode @response.body)['tail_uuid']
- put "/arvados/v1/links/#{u}", {
- :format => :json,
- :link => {
- tail_uuid: 'zzzzz-tpzed-xyzxyzxerrrorxx'
- }
- }, admin_auth
+ put "/arvados/v1/links/#{u}",
+ params: {
+ :format => :json,
+ :link => {
+ tail_uuid: 'zzzzz-tpzed-xyzxyzxerrrorxx'
+ }
+ },
+ headers: admin_auth
assert_response 422
end
# SPDX-License-Identifier: AGPL-3.0
require 'test_helper'
-require 'rails/performance_test_help'
+require 'benchmark'
-class IndexTest < ActionDispatch::PerformanceTest
+class IndexTest < ActionDispatch::IntegrationTest
def test_links_index
- get '/arvados/v1/links', {format: :json}, auth(:admin)
+ puts("Get links index: ", Benchmark.measure do
+ get '/arvados/v1/links', params: {
+ limit: 1000,
+ format: :json
+ }, headers: auth(:admin)
+ end)
end
def test_links_index_with_filters
- get '/arvados/v1/links', {format: :json, filters: [%w[head_uuid is_a arvados#collection]].to_json}, auth(:admin)
+ puts("Get links index with filters: ", Benchmark.measure do
+ get '/arvados/v1/links', params: {
+ format: :json,
+ filters: [%w[head_uuid is_a arvados#collection]].to_json
+ }, headers: auth(:admin)
+ end)
end
def test_collections_index
- get '/arvados/v1/collections', {format: :json}, auth(:admin)
+ puts("Get collections index: ", Benchmark.measure do
+ get '/arvados/v1/collections', params: {
+ format: :json
+ }, headers: auth(:admin)
+ end)
end
end
end
end
end
+ User.invalidate_permissions_cache
end
end)
end
puts "Time spent getting group index:"
(0..4).each do
puts(Benchmark.measure do
- get '/arvados/v1/groups', {format: :json, limit: 1000}, auth(:permission_perftest)
+ get '/arvados/v1/groups', params: {format: :json, limit: 1000}, headers: auth(:permission_perftest)
assert json_response['items_available'] >= n
end)
end
class ActionController::TestCase
setup do
@test_counter = 0
+ self.request.headers['Accept'] = 'application/json'
+ self.request.headers['Content-Type'] = 'application/json'
end
def check_counter action
[:get, :post, :put, :patch, :delete].each do |method|
define_method method do |action, *args|
check_counter action
+ # After Rails 5.0 upgrade, some params don't get properly serialized.
+ # One case are filters: [['attr', 'op', 'val']] become [['attr'], ['op'], ['val']]
+ # if not passed upstream as a JSON string.
+ if args[0].is_a?(Hash) && args[0][:params].is_a?(Hash)
+ args[0][:params].each do |key, _|
+ next if key == :exclude_script_versions # Job Reuse tests
+ # Keys could be: :filters, :where, etc
+ if [Array, Hash].include?(args[0][:params][key].class)
+ args[0][:params][key] = SafeJSON.dump(args[0][:params][key])
+ end
+ end
+ end
super action, *args
end
end
all_tables = ActiveRecord::Base.connection.tables
all_tables.delete 'schema_migrations'
all_tables.delete 'permission_refresh_lock'
+ all_tables.delete 'ar_internal_metadata'
all_tables.each do |table|
table_class = table.classify.constantize
indexes = ActiveRecord::Base.connection.indexes(table)
search_index_by_columns = indexes.select do |index|
- index.columns.sort == search_index_columns.sort
+ # After rails 5.0 upgrade, AR::Base.connection.indexes() started to include
+ # GIN indexes, with its 'columns' attribute being a String like
+ # 'to_tsvector(...)'
+ index.columns.is_a?(Array) ? index.columns.sort == search_index_columns.sort : false
end
search_index_by_name = indexes.select do |index|
index.name == "#{table}_search_index"
runtime_status: {'warning' => 'This is not an error'},
progress: 0.15})
c_faster_started_second.update_attributes!({state: Container::Locked})
+ assert_equal 0, Container.where("runtime_status->'error' is not null").count
c_faster_started_second.update_attributes!({state: Container::Running,
runtime_status: {'error' => 'Something bad happened'},
progress: 0.2})
+ assert_equal 1, Container.where("runtime_status->'error' is not null").count
reused = Container.find_reusable(common_attrs)
assert_not_nil reused
# Selected the non-failing container even if it's the one with less progress done
begin
pid = Process.fork do
begin
- # Abandon database connections inherited from parent
- # process. Credit to
- # https://github.com/kstephens/rails_is_forked
- ActiveRecord::Base.connection_handler.connection_pools.each_value do |pool|
- pool.instance_eval do
- @reserved_connections = {}
- @connections = []
- end
- end
- ActiveRecord::Base.establish_connection
-
dispatch = CrunchDispatch.new
dispatch.stubs(:did_recently).returns true
dispatch.run []
test 'enable legacy api configuration option = auto, has jobs' do
Rails.configuration.enable_legacy_jobs_api = "auto"
+ assert Job.count > 0
+ assert_equal [], Rails.configuration.disable_api_methods
check_enable_legacy_jobs_api
assert_equal [], Rails.configuration.disable_api_methods
end
act_as_system_user do
Job.destroy_all
end
- puts "ZZZ #{Job.count}"
+ assert_equal 0, Job.count
+ assert_equal [], Rails.configuration.disable_api_methods
check_enable_legacy_jobs_api
assert_equal Disable_jobs_api_method_list, Rails.configuration.disable_api_methods
end