Merge branch 'master' into 6588-documentation
authorradhika <radhika@curoverse.com>
Fri, 31 Jul 2015 15:36:28 +0000 (11:36 -0400)
committerradhika <radhika@curoverse.com>
Fri, 31 Jul 2015 15:36:28 +0000 (11:36 -0400)
32 files changed:
apps/workbench/Gemfile.lock
apps/workbench/app/helpers/pipeline_instances_helper.rb
apps/workbench/app/views/pipeline_instances/_running_component.html.erb
apps/workbench/app/views/pipeline_instances/_show_components_running.html.erb
apps/workbench/test/integration/pipeline_instances_test.rb
apps/workbench/test/integration/user_manage_account_test.rb
doc/_includes/_arv_copy_expectations.liquid
doc/_includes/_install_git_curl.liquid [new file with mode: 0644]
doc/_includes/_install_ruby_and_bundler.liquid
doc/install/install-docker.html.textile.liquid
doc/install/install-sso.html.textile.liquid
doc/sdk/cli/install.html.textile.liquid
sdk/cli/bin/crunch-job
sdk/cli/test/binstub_clean_fail/mount [new file with mode: 0755]
sdk/cli/test/binstub_docker_noop/docker.io [new file with mode: 0755]
sdk/cli/test/binstub_sanity_check/docker.io [new file with mode: 0755]
sdk/cli/test/binstub_sanity_check/true [new file with mode: 0755]
sdk/cli/test/test_arv-collection-create.rb
sdk/cli/test/test_arv-get.rb
sdk/cli/test/test_arv-put.rb
sdk/cli/test/test_arv-run-pipeline-instance.rb
sdk/cli/test/test_arv-tag.rb
sdk/cli/test/test_crunch-job.rb [new file with mode: 0644]
sdk/perl/Makefile.PL
sdk/python/arvados/commands/arv_copy.py
services/api/app/controllers/arvados/v1/repositories_controller.rb
services/api/app/models/arvados_model.rb
services/api/app/models/authorized_key.rb
services/api/app/models/commit.rb
services/api/test/fixtures/links.yml
services/api/test/functional/arvados/v1/repositories_controller_test.rb
services/api/test/unit/authorized_key_test.rb

index bc7ddaf7eda77c9a53df1a5c7e5839319b423042..20b8d6164ccca273e11756928a21c1a17851f07a 100644 (file)
@@ -294,6 +294,3 @@ DEPENDENCIES
   therubyracer
   uglifier (>= 1.0.3)
   wiselinks
-
-BUNDLED WITH
-   1.10.5
index ba05f9e88cd7cd2f0345c993d5e60a7603e2bb8c..8fafbc2022d5873032d1f9565c2385a26f4a794b 100644 (file)
@@ -289,7 +289,7 @@ module PipelineInstancesHelper
     else
       s = ""
       if days > 0
-        s += "#{days}<span class='time-label-divider'>d</span> "
+        s += "#{days}<span class='time-label-divider'>d</span>"
       end
 
       if (hours > 0)
@@ -298,7 +298,7 @@ module PipelineInstancesHelper
 
       s += "#{minutes}<span class='time-label-divider'>m</span>"
 
-      if not round_to_min
+      if not round_to_min or (days == 0 and hours == 0 and minutes == 0)
         s += "#{seconds}<span class='time-label-divider'>s</span>"
       end
     end
index 63075f7e66018ff675cfea93efecbd9aef5243ec..63a2371a1b3dd2dc398c9b672311bd606d64bdf7 100644 (file)
           <div class="col-md-3">
             <% if current_job[:started_at] %>
               <% walltime = ((if current_job[:finished_at] then current_job[:finished_at] else Time.now() end) - current_job[:started_at]) %>
-              <% cputime = tasks.map { |task|
-                   if task.started_at and task.job_uuid == current_job[:uuid]
-                      finished_at = task.finished_at || current_job[:finished_at] || Time.now()
-                      finished_at - task.started_at
-                   else
-                     0
-                   end
-                 }.reduce(:+) || 0 %>
-              <%= render_runtime(walltime, false, false) %>
-              <% if cputime > 0 %> / <%= render_runtime(cputime, false, false) %> (<%= (cputime/walltime).round(1) %>&Cross;)<% end %>
+              <% cputime = (current_job[:runtime_constraints].andand[:min_nodes] || 1) *
+                           ((current_job[:finished_at] || Time.now()) - current_job[:started_at]) %>
+              <%= render_runtime(walltime, false) %>
+              <% if cputime > 0 %> / <%= render_runtime(cputime, false) %> (<%= (cputime/walltime).round(1) %>&Cross;)<% end %>
             <% end %>
           </div>
           <% end %>
@@ -41,7 +35,7 @@
             <%# column offset 5 %>
             <div class="col-md-6">
               <% queuetime = Time.now - Time.parse(current_job[:created_at].to_s) %>
-              Queued for <%= render_runtime(queuetime, true) %>.
+              Queued for <%= render_runtime(queuetime, false) %>.
               <% begin %>
                 <% if current_job[:queue_position] == 0 %>
                   This job is next in the queue to run.
index b0b8601a0c326a0d03ac1cab12039299fa0ead3d..566e3d771e12796b8d8ebb7e273e34fe499def33 100644 (file)
@@ -12,7 +12,6 @@
 
 <% pipeline_jobs = render_pipeline_jobs %>
 <% job_uuids = pipeline_jobs.map { |j| j[:job].andand[:uuid] }.compact %>
-<% job_uuids_finished = {}; pipeline_jobs.map { |j| job_uuids_finished[j[:job].andand[:uuid]] = j[:job].andand[:finished_at] } %>
 
 <% if @object.state == 'Paused' %>
   <p>
@@ -21,7 +20,6 @@
   </p>
 <% end %>
 
-<% tasks = JobTask.filter([['job_uuid', 'in', job_uuids]]).results %>
 <% runningtime = determine_wallclock_runtime(pipeline_jobs.map {|j| j[:job]}.compact) %>
 
 <p>
@@ -43,9 +41,9 @@
                   end %>
 
     <%= if walltime > runningtime
-          render_runtime(walltime, true, false)
+          render_runtime(walltime, false)
         else
-          render_runtime(runningtime, true, false)
+          render_runtime(runningtime, false)
         end %><% if @object.finished_at %> at <%= render_localized_date(@object.finished_at) %><% end %>.
     <% else %>
       This pipeline is <%= if @object.state.start_with? 'Running' then 'active' else @object.state.downcase end %>.
       ran
     <% end %>
     for
-    <% cputime = tasks.map { |task|
-        if task.started_at
-          finished_at = task.finished_at || job_uuids_finished[task.job_uuid] || Time.now()
-          finished_at - task.started_at
+    <%
+        cputime = pipeline_jobs.map { |j|
+        if j[:job][:started_at]
+          (j[:job][:runtime_constraints].andand[:min_nodes] || 1) * ((j[:job][:finished_at] || Time.now()) - j[:job][:started_at])
         else
           0
         end
        }.reduce(:+) || 0 %>
-    <%= render_runtime(runningtime, true, false) %><% if (walltime - runningtime) > 0 %>
-      (<%= render_runtime(walltime - runningtime, true, false) %> queued)<% end %><% if cputime == 0 %>.<% else %>
+    <%= render_runtime(runningtime, false) %><% if (walltime - runningtime) > 0 %>
+      (<%= render_runtime(walltime - runningtime, false) %> queued)<% end %><% if cputime == 0 %>.<% else %>
       and used
-    <%= render_runtime(cputime, true, false) %>
-    of CPU time (<%= (cputime/runningtime).round(1) %>&Cross; scaling).
+    <%= render_runtime(cputime, false) %>
+    of node allocation time (<%= (cputime/runningtime).round(1) %>&Cross; scaling).
     <% end %>
 </p>
 
@@ -99,5 +97,5 @@
 %>
 
 <% pipeline_jobs.each_with_index do |pj, i| %>
-  <%= render partial: 'running_component', locals: {tasks: tasks, pj: pj, i: i, expanded: false} %>
+  <%= render partial: 'running_component', locals: {pj: pj, i: i, expanded: false} %>
 <% end %>
index da8f439dfe2df0a310d6bc7837968033bae49f29..b6bf700d0948a1eb85a8ed00fe0c30fc6ae6e037 100644 (file)
@@ -469,7 +469,7 @@ class PipelineInstancesTest < ActionDispatch::IntegrationTest
       page_text = page.text
 
       if run_time
-        match = /This pipeline started at (.*)\. It failed after (.*) seconds at (.*)\. Check the Log/.match page_text
+        match = /This pipeline started at (.*)\. It failed after (.*) at (.*)\. Check the Log/.match page_text
       else
         match = /This pipeline started at (.*). It has been active for(.*)/.match page_text
       end
index cc28b276c82186cbd991b2fb87569ead7bc067d2..1b80daf03ce49905e8ed4097496544c958a432bf 100644 (file)
@@ -42,13 +42,13 @@ class UserManageAccountTest < ActionDispatch::IntegrationTest
 
         page.find_field('public_key').set 'first test with an incorrect ssh key value'
         click_button 'Submit'
-        assert page.has_text?('Public key does not appear to be a valid ssh-rsa or dsa public key'), 'No text - Public key does not appear to be a valid'
+        assert_text 'Public key does not appear to be a valid ssh-rsa or dsa public key'
 
         public_key_str = api_fixture('authorized_keys')['active']['public_key']
         page.find_field('public_key').set public_key_str
         page.find_field('name').set 'added_in_test'
         click_button 'Submit'
-        assert page.has_text?('Public key already exists in the database, use a different key.'), 'No text - Public key already exists'
+        assert_text 'Public key already exists in the database, use a different key.'
 
         new_key = SSHKey.generate
         page.find_field('public_key').set new_key.ssh_public_key
@@ -57,7 +57,7 @@ class UserManageAccountTest < ActionDispatch::IntegrationTest
       end
 
       # key must be added. look for it in the refreshed page
-      assert page.has_text?('added_in_test'), 'No text - added_in_test'
+      assert_text 'added_in_test'
   end
 
   [
index 6db78e8bfa4be918e25e4ebd455b9671a6e387b6..a76c9e7c255b0d8be82c6d480a8f43337475062a 100644 (file)
@@ -1,3 +1,6 @@
 {% include 'notebox_begin' %}
-This part of the tutorial assumes that you have a working git repository in the destination cluster. If you do not have a repository created, you can follow the "Adding a new repository":{{site.baseurl}}/user/tutorials/add-new-repository.html page. We will use the *tutorial* repository created in that page as the example.
+As stated above, arv-copy is recursive by default and requires a working git repository in the destination cluster. If you do not have a repository created, you can follow the "Adding a new repository":{{site.baseurl}}/user/tutorials/add-new-repository.html page. We will use the *tutorial* repository created in that page as the example.
+
+<br/>In addition, arv-copy requires git when copying to a git repository. Please make sure that git is installed and available.
+
 {% include 'notebox_end' %}
diff --git a/doc/_includes/_install_git_curl.liquid b/doc/_includes/_install_git_curl.liquid
new file mode 100644 (file)
index 0000000..edf07a7
--- /dev/null
@@ -0,0 +1,13 @@
+On a Debian-based system, install the following packages:
+
+<notextile>
+<pre><code>~$ <span class="userinput">sudo apt-get install git curl</span>
+</code></pre>
+</notextile>
+
+On a Red Hat-based system, install the following packages:
+
+<notextile>
+<pre><code>~$ <span class="userinput">sudo yum install git curl</span>
+</code></pre>
+</notextile>
index 55627b4d5af50ef3ccce2d9657d131e8e163275c..369bf46e476b5b1cc1ce5429f08ecadd9e8c031d 100644 (file)
@@ -1,11 +1,10 @@
 Currently, only Ruby 2.1 is supported.
 
-h4(#rvm). *Option 1: Install with rvm*
+h4(#rvm). *Option 1: Install with RVM*
 
 <notextile>
 <pre><code><span class="userinput">sudo gpg --keyserver hkp://keys.gnupg.net --recv-keys 409B6B1796C275462A1703113804BB82D39DC0E3
 \curl -sSL https://get.rvm.io | sudo bash -s stable --ruby=2.1
-sudo -i gem install bundler
 sudo adduser "$USER" rvm
 </span></code></pre></notextile>
 
@@ -15,6 +14,12 @@ Either log out and log back in to activate RVM, or explicitly load it in all ope
 <pre><code><span class="userinput">source /usr/local/rvm/scripts/rvm
 </span></code></pre></notextile>
 
+Once RVM is activated in your shell, install Bundler:
+
+<notextile>
+<pre><code>~$ <span class="userinput">gem install bundler</span>
+</code></pre></notextile>
+
 h4(#fromsource). *Option 2: Install from source*
 
 Install prerequisites for Debian 7 or 8:
index 374428919dc714045009e25b8dccb26deced6f8e..1566e80a9cc4a7f6519793f101cc5662177e6f40 100644 (file)
@@ -11,7 +11,11 @@ h2. Prerequisites
 # A GNU/Linux (virtual) machine
 # A working Docker installation (see "Installing Docker":https://docs.docker.com/installation/)
 # A working Go installation (see "Install the Go tools":https://golang.org/doc/install)
-# A working Ruby installation (see "Install Ruby and bundler":install-manual-prerequisites-ruby.html)
+# A working Ruby installation, with the Bundler gem installed
+
+h3. Install Ruby and Bundler
+
+{% include 'install_ruby_and_bundler' %}
 
 h2. Download the source tree
 
index 3efe124ca2cfc5ef80817da7c097ea23440882ef..4fe1fb157b903ea30447114477e02a2f1c1947ae 100644 (file)
@@ -6,6 +6,10 @@ title: Install Single Sign On (SSO) server
 
 h2(#dependencies). Install dependencies
 
+h3(#install_git_curl). Install git and curl
+
+{% include 'install_git_curl' %}
+
 h3(#install_ruby_and_bundler). Install Ruby and Bundler
 
 {% include 'install_ruby_and_bundler' %}
@@ -22,7 +26,7 @@ h3. Get SSO server code and run bundle
 <pre><code>~$ <span class="userinput">cd $HOME</span> # (or wherever you want to install)
 ~$ <span class="userinput">git clone https://github.com/curoverse/sso-devise-omniauth-provider.git</span>
 ~$ <span class="userinput">cd sso-devise-omniauth-provider</span>
-~/sso-devise-omniauth-provider$ <span class="userinput">bundle</span>
+~/sso-devise-omniauth-provider$ <span class="userinput">bundle install --without=development</span>
 </code></pre></notextile>
 
 h2. Configure the SSO server
@@ -33,13 +37,20 @@ First, copy the example configuration file:
 <pre><code>~/sso-devise-omniauth-provider$ <span class="userinput">cp -i config/application.yml.example config/application.yml</span>
 </code></pre></notextile>
 
-The SSO server reads the @config/application.yml@ file, as well as the @config/application.defaults.yml@ file. Values in @config/application.yml@ take precedence over the defaults that are defined in @config/application.defaults.yml@. The @config/application.yml.example@ file is not read by the SSO server and is provided for installation convenience, only.
+The SSO server reads the @config/application.yml@ file, as well as the @config/application.defaults.yml@ file. Values in @config/application.yml@ take precedence over the defaults that are defined in @config/application.defaults.yml@. The @config/application.yml.example@ file is not read by the SSO server and is provided for installation convenience only.
 
-Consult @config/application.default.yml@ for a full list of configuration options. Always put your local configuration in @config/application.yml@, never edit @config/application.default.yml@.
+Consult @config/application.default.yml@ for a full list of configuration options.  Local configuration goes in @config/application.yml@, do not edit @config/application.default.yml@.
 
 h3(#uuid_prefix). uuid_prefix
 
-Define your @uuid_prefix@ in @config/application.yml@ by setting the @uuid_prefix@ field in the section for your environment.  This prefix is used for all database identifiers to identify the record as originating from this site.  It must be exactly 5 alphanumeric characters (lowercase ASCII letters and digits).
+Generate a uuid prefix for the single sign on service.  This prefix is used to identify user records as originating from this site.  It must be exactly 5 lowercase ASCII letters and/or digits.  You may use the following snippet to generate a uuid prefix:
+
+<notextile>
+<pre><code>~/sso-devise-omniauth-provider$ <span class="userinput">ruby -e 'puts "#{rand(2**64).to_s(36)[0,5]}"'</span>
+abcde
+</code></pre></notextile>
+
+Edit @config/application.yml@ and set @uuid_prefix@ in the "common" section.
 
 h3(#secret_token). secret_token
 
@@ -50,89 +61,9 @@ Generate a new secret token for signing cookies:
 zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz
 </code></pre></notextile>
 
-Then put that value in the @secret_token@ field.
-
-h3(#authentication_methods). Authentication methods
+Edit @config/application.yml@ and set @secret_token@ in the "common" section.
 
-Three authentication methods are supported: Google+, LDAP, and local accounts.
-
-h3(#google). Google+ authentication
-
-In order to use Google+ authentication, you must use the "Google Developers Console":https://console.developers.google.com to create a set of client credentials. In short:
-
-* Enable the Contacts and Google+ APIs.
-* Create an OAuth Client ID for a web application.
-** JavaScript origins: @https://sso.example.com/@
-** Redirect URIs: @https://sso.example.com/auth/google_oauth2/callback@
-
-Copy the "Client ID" and "Client secret" values from the Google Developers Console into the Google section of @config/application.yml@, like this:
-
-<notextile>
-<pre><code>
-  # Google API tokens required for OAuth2 login.
-  #
-  # See https://github.com/zquestz/omniauth-google-oauth2
-  #
-  # and https://developers.google.com/accounts/docs/OAuth2
-  google_oauth2_client_id: "<span class="userinput">---YOUR---CLIENT---ID---HERE---</span>"
-  google_oauth2_client_secret: "<span class="userinput">---YOUR---CLIENT---SECRET---HERE---</span>"
-
-  # Set this to your OpenId 2.0 realm to enable migration from Google OpenId
-  # 2.0 to Google OAuth2 OpenId Connect (Google will provide OpenId 2.0 user
-  # identifiers via the openid.realm parameter in the OAuth2 flow until 2017).
-  google_openid_realm: <span class="userinput">false</span></code></pre></notextile>
-
-h3(#ldap). LDAP authentication
-
-LDAP authentication can be configured with these options. Make sure to preserve the indentation of the fields beyond @use_ldap@.
-
-<pre>
-  # Enable LDAP support.
-  #
-  # If you want to use LDAP, you need to provide
-  # the following set of fields under the use_ldap key.
-  #
-  # use_ldap: false
-  #   title: Example LDAP
-  #   host: ldap.example.com
-  #   port: 636
-  #   method: ssl
-  #   base: "ou=Users, dc=example, dc=com"
-  #   uid: uid
-  #   email_domain: example.com
-  #   #bind_dn: "some_user"
-  #   #password: "some_password"
-  use_ldap: false
-</pre>
-
-h3(#local_accounts). Local account authentication
-
-If neither Google OAuth2 nor LDAP are enabled, the SSO server automatically
-falls back to local accounts. There are two configuration options for local
-accounts:
-
-<pre>
-  # If true, allow new creation of new accounts in the SSO server's internal
-  # user database.
-  allow_account_registration: false
-
-  # If true, send an email confirmation before activating new accounts in the
-  # SSO server's internal user database.
-  require_email_confirmation: false
-</pre>
-
-You can also create local accounts on the SSO server from the rails console:
-
-<notextile>
-<pre><code>~/sso-devise-omniauth-provider$ <span class="userinput">RAILS_ENV=production bundle exec rails console</span>
-:001 &gt; <span class="userinput">user = User.new(:email =&gt; "test@example.com")</span>
-:002 &gt; <span class="userinput">user.password = "passw0rd"</span>
-:003 &gt; <span class="userinput">user.save!</span>
-:004 &gt; <span class="userinput">quit</span>
-</code></pre>
-</notextile>
-
-h2. Set up the database
+h2(#database). Set up the database
 
 Generate a new database password. Nobody ever needs to memorize it or type it, so make a strong one:
 
@@ -149,10 +80,10 @@ Enter password for new role: <span class="userinput">paste-database-password-you
 Enter it again: <span class="userinput">paste-database-password-you-generated</span>
 </code></pre></notextile>
 
-Configure SSO server to connect to your database by creating and updating @config/database.yml@. Replace the @xxxxxxxx@ database password placeholders with the new password you generated above.
+Configure SSO server to connect to your database by creating and updating @config/database.yml@. Replace the @xxxxxxxx@ database password placeholders with the new password you generated above.  If you are planning a production system, update the @production@ section, otherwise use @development@.
 
 <notextile>
-<pre><code>~/sso-devise-omniauth-provider$ <span class="userinput">cp -i config/database.yml.sample config/database.yml</span>
+<pre><code>~/sso-devise-omniauth-provider$ <span class="userinput">cp -i config/database.yml.example config/database.yml</span>
 ~/sso-devise-omniauth-provider$ <span class="userinput">edit config/database.yml</span>
 </code></pre></notextile>
 
@@ -165,23 +96,14 @@ Create and initialize the database. If you are planning a production system, cho
 Alternatively, if the database user you intend to use for the SSO server is not allowed to create new databases, you can create the database first and then populate it with rake. Be sure to adjust the database name if you are using the @development@ environment. This sequence of commands is functionally equivalent to the rake db:setup command above:
 
 <notextile>
-<pre><code>~/sso-devise-omniauth-provider$ <span class="userinput">su postgres createdb arvados_sso_production -E UTF8 -O arvados_sso</span>
+<pre><code>~/sso-devise-omniauth-provider$ <span class="userinput">sudo -u postgres createdb arvados_sso_production -E UTF8 -O arvados_sso -T template0</span>
 ~/sso-devise-omniauth-provider$ <span class="userinput">RAILS_ENV=production bundle exec rake db:schema:load</span>
 ~/sso-devise-omniauth-provider$ <span class="userinput">RAILS_ENV=production bundle exec rake db:seed</span>
 </code></pre></notextile>
 
-h2(#assets). Generate assets
-
-If you are running in the production environment, you'll want to generate the assets:
-
-<notextile>
-<pre><code>~/sso-devise-omniauth-provider$ <span class="userinput">RAILS_ENV=production bundle exec rake assets:precompile</span>
-</code></pre>
-</notextile>
-
 h2(#client). Create arvados-server client
 
-Use @rails console@ to create a @Client@ record that will be used by the Arvados API server.  The values of @app_id@ and @app_secret@ correspond to the @APP_ID@ and @APP_SECRET@ that must be set in in "Setting up Omniauth in the API server.":install-api-server.html#omniauth
+Use @rails console@ to create a @Client@ record that will be used by the Arvados API server.  The values of @app_id@ and @app_secret@ correspond to the values for @sso_app_id@ and @sso_app_secret@ in the "API server's SSO settings.":install-api-server.html#omniauth
 
 <notextile>
 <pre><code>~/sso-devise-omniauth-provider$ <span class="userinput">ruby -e 'puts rand(2**400).to_s(36)'</span>
@@ -196,17 +118,121 @@ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
 </code></pre>
 </notextile>
 
-h2. Start the SSO server
+h2(#assets). Precompile assets
+
+If you are running in the production environment, you must precompile the assets:
+
+<notextile>
+<pre><code>~/sso-devise-omniauth-provider$ <span class="userinput">RAILS_ENV=production bundle exec rake assets:precompile</span>
+</code></pre>
+</notextile>
+
+h2(#authentication_methods). Authentication methods
+
+Authentication methods are configured in @application.yml@.  Currently three authentication methods are supported: local accounts, LDAP, and Google+.  If neither Google+ nor LDAP are enabled, the SSO server defaults to local user accounts.   Only one authentication mechanism should be in use at a time.
+
+h3(#local_accounts). Local account authentication
+
+There are two configuration options for local accounts:
+
+<pre>
+  # If true, allow new creation of new accounts in the SSO server's internal
+  # user database.
+  allow_account_registration: false
+
+  # If true, send an email confirmation before activating new accounts in the
+  # SSO server's internal user database (otherwise users are activated immediately.)
+  require_email_confirmation: false
+</pre>
+
+For more information about configuring backend support for sending email (required to send email confirmations) see "Configuring Action Mailer":http://guides.rubyonrails.org/configuring.html#configuring-action-mailer
+
+If @allow_account_registration@ is false, you may manually create local accounts on the SSO server from the rails console:
+
+<notextile>
+<pre><code>~/sso-devise-omniauth-provider$ <span class="userinput">RAILS_ENV=production bundle exec rails console</span>
+:001 &gt; <span class="userinput">user = User.new(:email =&gt; "test@example.com")</span>
+:002 &gt; <span class="userinput">user.password = "passw0rd"</span>
+:003 &gt; <span class="userinput">user.save!</span>
+:004 &gt; <span class="userinput">quit</span>
+</code></pre>
+</notextile>
+
+h3(#ldap). LDAP authentication
+
+The following options are available to configure LDAP authentication.  Note that you must preserve the indentation of the fields listed under @use_ldap@.
+
+<pre>
+  use_ldap:
+    title: Example LDAP
+    host: ldap.example.com
+    port: 636
+    method: ssl
+    base: "ou=Users, dc=example, dc=com"
+    uid: uid
+    email_domain: example.com
+    #bind_dn: "some_user"
+    #password: "some_password"
+</pre>
+
+table(table).
+|_. Option|_. Description|
+|title |Title displayed to the user on the login page|
+|host  |LDAP server hostname|
+|port  |LDAP server port|
+|method|One of "plain", "ssl", "tls"|
+|base  |Directory lookup base|
+|uid   |User id field used for directory lookup|
+|email_domain|Strip off specified email domain from login and perform lookup on bare username|
+|bind_dn|If required by server, username to log with in before performing directory lookup|
+|password|If required by server, password to log with before performing directory lookup|
+
+h3(#google). Google+ authentication
+
+In order to use Google+ authentication, you must use the <a href="https://console.developers.google.com" target="_blank">Google Developers Console</a> to create a set of client credentials.
+
+# Go to the <a href="https://console.developers.google.com" target="_blank">Google Developers Console</a> and select or create a project; this will take you to the project page.
+# On the sidebar, click on *APIs & auth* then select *APIs*.
+## Search for *Contacts API* and click on *Enable API*.
+## Search for *Google+ API* and click on *Enable API*.
+# On the sidebar, click on *Credentials*; under *OAuth* click on *Create new Client ID* to bring up the *Create Client ID* dialog box.
+# Under *Application type* select *Web application*.
+# If the authorization origins are not displayed, clicking on *Create Client ID* will take you to *Consent screen* settings.
+## On consent screen settings, enter the appropriate details and click on *Save*.
+## This will return you to the *Create Client ID* dialog box.
+# You must set the authorization origins.  Edit @sso.your-site.com@ to the appropriate hostname that you will use to access the SSO service:
+## JavaScript origin should be @https://sso.your-site.com/@
+## Redirect URI should be @https://sso.your-site.com/users/auth/google_oauth2/callback@
+# Copy the values of *Client ID* and *Client secret* from the Google Developers Console into the Google section of @config/application.yml@, like this:
 
-h3. Run a simple standalone server
+<notextile>
+<pre><code>  # Google API tokens required for OAuth2 login.
+  google_oauth2_client_id: <span class="userinput">"---YOUR---CLIENT---ID---HERE--"-</span>
+  google_oauth2_client_secret: <span class="userinput">"---YOUR---CLIENT---SECRET---HERE--"-</span></code></pre></notextile>
+
+h2(#start). Start the SSO server
 
-You can use the Webrick server that is bundled with Ruby to quickly verify that your installation is functioning:
+h3. Run a standalone passenger server
 
 <notextile>
-<pre><code>~/sso-devise-omniauth-provider$ <span class="userinput">RAILS_ENV=production bundle exec rails server</span>
+<pre><code>~/sso-devise-omniauth-provider$ <span class="userinput">RAILS_ENV=production passenger start</span>
+=============== Phusion Passenger Standalone web server started ===============
+...
 </code></pre>
 </notextile>
 
-h3. Production environment
+You can now test your installation by going to the page reported by passenger as "Accessible via: ..."
 
-As a Ruby on Rails application, the SSO server should be compatible with any Ruby application server that supports Rack applications.  We recommend "Passenger":https://www.phusionpassenger.com/ to run the SSO server in production.
+Note: if you get the following warning "you may safely ignore it:":https://stackoverflow.com/questions/10374871/no-secret-option-provided-to-racksessioncookie-warning
+
+<pre>
+Connecting to database specified by database.yml
+App 4574 stderr:         SECURITY WARNING: No secret option provided to Rack::Session::Cookie.
+App 4574 stderr:         This poses a security threat. It is strongly recommended that you
+App 4574 stderr:         provide a secret to prevent exploits that may be possible from crafted
+App 4574 stderr:         cookies. This will not be supported in future versions of Rack, and
+App 4574 stderr:         future versions will even invalidate your existing user cookies.
+App 4574 stderr:
+App 4574 stderr:         Called from: /var/lib/gems/2.1.0/gems/actionpack-3.2.8/lib/action_dispatch/middleware/session/abstract_store.rb:28:in `initialize'.
+App 4592 stdout:
+</pre>
index df5507702443103be54c9db56122404c9de9b05b..9db56b9bbd1bdad70711e0505a0c6fdb85634fd7 100644 (file)
@@ -8,9 +8,9 @@ title: "Installation"
 
 To use the @arv@ command, you can either install the @arvados-cli@ gem via RubyGems or build and install the package from source.
 
-h4. Prerequisites: Ruby &gt;= 2.1.0 and curl libraries
+h3. Prerequisites: Ruby, Bundler, and curl libraries
 
-Make sure you have "Ruby and bundler":{{site.baseurl}}/install/install-manual-prerequisites-ruby.html installed.
+{% include 'install_ruby_and_bundler' %}
 
 Install curl libraries with your system's package manager. For example, on Debian or Ubuntu:
 
@@ -20,7 +20,7 @@ $ <code class="userinput">sudo apt-get install libcurl3 libcurl3-gnutls libcurl4
 </pre>
 </notextile>
 
-h4. Option 1: install with RubyGems
+h3. Option 1: Install with RubyGems
 
 <notextile>
 <pre>
@@ -28,7 +28,7 @@ $ <code class="userinput">sudo gem install arvados-cli</code>
 </pre>
 </notextile>
 
-h4. Option 2: build and install from source
+h3. Option 2: Build and install from source
 
 <notextile>
 <pre>
index c84d89bf4a5533c0549789fb88f19dd8de1b1ee2..13001e7f92d5962fa9917dc74bc96b4710f953ff 100755 (executable)
@@ -126,7 +126,7 @@ my $jobspec;
 my $job_api_token;
 my $no_clear_tmp;
 my $resume_stash;
-my $docker_bin = "/usr/bin/docker.io";
+my $docker_bin = "docker.io";
 GetOptions('force-unlock' => \$force_unlock,
            'git-dir=s' => \$git_dir,
            'job=s' => \$jobspec,
@@ -169,8 +169,7 @@ if ($jobspec =~ /^[-a-z\d]+$/)
 }
 else
 {
-  $Job = JSON::decode_json($jobspec);
-  $local_job = 1;
+  $local_job = JSON::decode_json($jobspec);
 }
 
 
@@ -178,7 +177,7 @@ else
 # at least able to run basic commands: they aren't down or severely
 # misconfigured.
 my $cmd = ['true'];
-if ($Job->{docker_image_locator}) {
+if (($Job || $local_job)->{docker_image_locator}) {
   $cmd = [$docker_bin, 'ps', '-q'];
 }
 Log(undef, "Sanity check is `@$cmd`");
@@ -208,15 +207,15 @@ else
 {
   if (!$resume_stash)
   {
-    map { croak ("No $_ specified") unless $Job->{$_} }
+    map { croak ("No $_ specified") unless $local_job->{$_} }
     qw(script script_version script_parameters);
   }
 
-  $Job->{'is_locked_by_uuid'} = $User->{'uuid'};
-  $Job->{'started_at'} = gmtime;
-  $Job->{'state'} = 'Running';
+  $local_job->{'is_locked_by_uuid'} = $User->{'uuid'};
+  $local_job->{'started_at'} = gmtime;
+  $local_job->{'state'} = 'Running';
 
-  $Job = api_call("jobs/create", job => $Job);
+  $Job = api_call("jobs/create", job => $local_job);
 }
 $job_id = $Job->{'uuid'};
 
@@ -396,7 +395,7 @@ if (!defined $no_clear_tmp) {
     # TODO: When #5036 is done and widely deployed, we can get rid of the
     # regular expression and just unmount everything with type fuse.keep.
     srun (["srun", "--nodelist=$nodelist", "-D", $ENV{'TMPDIR'}],
-          ['bash', '-ec', 'mount -t fuse,fuse.keep | awk \'($3 ~ /\ykeep\y/){print $3}\' | xargs -r -n 1 fusermount -u -z; sleep 1; rm -rf $JOB_WORK $CRUNCH_INSTALL $CRUNCH_TMP/task $CRUNCH_TMP/src* $CRUNCH_TMP/*.cid']);
+          ['bash', '-ec', '-o', 'pipefail', 'mount -t fuse,fuse.keep | awk \'($3 ~ /\ykeep\y/){print $3}\' | xargs -r -n 1 fusermount -u -z; sleep 1; rm -rf $JOB_WORK $CRUNCH_INSTALL $CRUNCH_TMP/task $CRUNCH_TMP/src* $CRUNCH_TMP/*.cid']);
     exit (1);
   }
   while (1)
@@ -405,7 +404,10 @@ if (!defined $no_clear_tmp) {
     freeze_if_want_freeze ($cleanpid);
     select (undef, undef, undef, 0.1);
   }
-  Log (undef, "Cleanup command exited ".exit_status_s($?));
+  if ($?) {
+    Log(undef, "Clean work dirs: exit ".exit_status_s($?));
+    exit(EX_RETRY_UNLOCKED);
+  }
 }
 
 # If this job requires a Docker image, install that.
@@ -596,7 +598,7 @@ else {
   unless ($? == 0 && $sha1 =~ /^([0-9a-f]{40})$/) {
     croak("`$gitcmd rev-list` exited "
           .exit_status_s($?)
-          .", '$treeish' not found. Giving up.");
+          .", '$treeish' not found, giving up");
   }
   $commit = $1;
   Log(undef, "Version $treeish is commit $commit");
diff --git a/sdk/cli/test/binstub_clean_fail/mount b/sdk/cli/test/binstub_clean_fail/mount
new file mode 100755 (executable)
index 0000000..961ac28
--- /dev/null
@@ -0,0 +1,3 @@
+#!/bin/sh
+echo >&2 Failing mount stub was called
+exit 1
diff --git a/sdk/cli/test/binstub_docker_noop/docker.io b/sdk/cli/test/binstub_docker_noop/docker.io
new file mode 100755 (executable)
index 0000000..af3a4e4
--- /dev/null
@@ -0,0 +1,2 @@
+#!/bin/sh
+true
diff --git a/sdk/cli/test/binstub_sanity_check/docker.io b/sdk/cli/test/binstub_sanity_check/docker.io
new file mode 100755 (executable)
index 0000000..8f1569d
--- /dev/null
@@ -0,0 +1,2 @@
+#!/bin/sh
+exit 8
diff --git a/sdk/cli/test/binstub_sanity_check/true b/sdk/cli/test/binstub_sanity_check/true
new file mode 100755 (executable)
index 0000000..4b88b91
--- /dev/null
@@ -0,0 +1,2 @@
+#!/bin/sh
+exit 7
index 18bef403b761f52701fdc86b2919dac44de59e13..3dc4bdd434a101507fee3ebd8f2e5004e66cd49c 100644 (file)
@@ -7,8 +7,6 @@ class TestCollectionCreate < Minitest::Test
   end
 
   def test_small_collection
-    skip "Waiting unitl #4534 is implemented"
-
     uuid = Digest::MD5.hexdigest(foo_manifest) + '+' + foo_manifest.size.to_s
     out, err = capture_subprocess_io do
       assert_arv('--format', 'uuid', 'collection', 'create', '--collection', {
index 67dd399a2456fe4a7c2a2a2cf4d86401d409e6d6..5e58014cbfa10d3b9b67a8b7cddca8b8676f646c 100644 (file)
@@ -30,14 +30,10 @@ class TestArvGet < Minitest::Test
   end
 
   def test_file_to_dev_stdout
-    skip "Waiting unitl #4534 is implemented"
-
     test_file_to_stdout('/dev/stdout')
   end
 
   def test_file_to_stdout(specify_stdout_as='-')
-    skip "Waiting unitl #4534 is implemented"
-
     out, err = capture_subprocess_io do
       assert_arv_get @@foo_manifest_locator + '/foo', specify_stdout_as
     end
@@ -46,8 +42,6 @@ class TestArvGet < Minitest::Test
   end
 
   def test_file_to_file
-    skip "Waiting unitl #4534 is implemented"
-
     remove_tmp_foo
     out, err = capture_subprocess_io do
       assert_arv_get @@foo_manifest_locator + '/foo', 'tmp/foo'
@@ -58,34 +52,30 @@ class TestArvGet < Minitest::Test
   end
 
   def test_file_to_file_no_overwrite_file
-    skip "Waiting unitl #4534 is implemented"
     File.open './tmp/foo', 'wb' do |f|
       f.write 'baz'
     end
     out, err = capture_subprocess_io do
       assert_arv_get false, @@foo_manifest_locator + '/foo', 'tmp/foo'
     end
-    assert_match /Error:/, err
+    assert_match /Local file tmp\/foo already exists/, err
     assert_equal '', out
     assert_equal 'baz', IO.read('tmp/foo')
   end
 
   def test_file_to_file_no_overwrite_file_in_dir
-    skip "Waiting unitl #4534 is implemented"
     File.open './tmp/foo', 'wb' do |f|
       f.write 'baz'
     end
     out, err = capture_subprocess_io do
       assert_arv_get false, @@foo_manifest_locator + '/', 'tmp/'
     end
-    assert_match /Error:/, err
+    assert_match /Local file tmp\/foo already exists/, err
     assert_equal '', out
     assert_equal 'baz', IO.read('tmp/foo')
   end
 
   def test_file_to_file_force_overwrite
-    skip "Waiting unitl #4534 is implemented"
-
     File.open './tmp/foo', 'wb' do |f|
       f.write 'baz'
     end
@@ -99,8 +89,6 @@ class TestArvGet < Minitest::Test
   end
 
   def test_file_to_file_skip_existing
-    skip "Waiting unitl #4534 is implemented"
-
     File.open './tmp/foo', 'wb' do |f|
       f.write 'baz'
     end
@@ -114,8 +102,6 @@ class TestArvGet < Minitest::Test
   end
 
   def test_file_to_dir
-    skip "Waiting unitl #4534 is implemented"
-
     remove_tmp_foo
     out, err = capture_subprocess_io do
       assert_arv_get @@foo_manifest_locator + '/foo', 'tmp/'
@@ -142,28 +128,22 @@ class TestArvGet < Minitest::Test
   end
 
   def test_nonexistent_block
-    skip "Waiting unitl #4534 is implemented"
-
     out, err = capture_subprocess_io do
-      assert_arv_get false, 'f1554a91e925d6213ce7c3103c5110c6'
+      assert_arv_get false, 'e796ab2294f3e48ec709ffa8d6daf58c'
     end
     assert_equal '', out
     assert_match /Error:/, err
   end
 
   def test_nonexistent_manifest
-    skip "Waiting unitl #4534 is implemented"
-
     out, err = capture_subprocess_io do
-      assert_arv_get false, 'f1554a91e925d6213ce7c3103c5110c6/', 'tmp/'
+      assert_arv_get false, 'acbd18db4cc2f85cedef654fccc4a4d8/', 'tmp/'
     end
     assert_equal '', out
     assert_match /Error:/, err
   end
 
   def test_manifest_root_to_dir
-    skip "Waiting unitl #4534 is implemented"
-
     remove_tmp_foo
     out, err = capture_subprocess_io do
       assert_arv_get '-r', @@foo_manifest_locator + '/', 'tmp/'
@@ -174,8 +154,6 @@ class TestArvGet < Minitest::Test
   end
 
   def test_manifest_root_to_dir_noslash
-    skip "Waiting unitl #4534 is implemented"
-
     remove_tmp_foo
     out, err = capture_subprocess_io do
       assert_arv_get '-r', @@foo_manifest_locator + '/', 'tmp'
@@ -186,8 +164,6 @@ class TestArvGet < Minitest::Test
   end
 
   def test_display_md5sum
-    skip "Waiting unitl #4534 is implemented"
-
     remove_tmp_foo
     out, err = capture_subprocess_io do
       assert_arv_get '-r', '--md5sum', @@foo_manifest_locator + '/', 'tmp/'
@@ -198,8 +174,6 @@ class TestArvGet < Minitest::Test
   end
 
   def test_md5sum_nowrite
-    skip "Waiting unitl #4534 is implemented"
-
     remove_tmp_foo
     out, err = capture_subprocess_io do
       assert_arv_get '-n', '--md5sum', @@foo_manifest_locator + '/', 'tmp/'
@@ -210,8 +184,6 @@ class TestArvGet < Minitest::Test
   end
 
   def test_sha1_nowrite
-    skip "Waiting unitl #4534 is implemented"
-
     remove_tmp_foo
     out, err = capture_subprocess_io do
       assert_arv_get '-n', '-r', '--hash', 'sha1', @@foo_manifest_locator+'/', 'tmp/'
@@ -222,8 +194,6 @@ class TestArvGet < Minitest::Test
   end
 
   def test_block_to_file
-    skip "Waiting unitl #4534 is implemented"
-
     remove_tmp_foo
     out, err = capture_subprocess_io do
       assert_arv_get @@foo_manifest_locator, 'tmp/foo'
@@ -236,8 +206,6 @@ class TestArvGet < Minitest::Test
   end
 
   def test_create_directory_tree
-    skip "Waiting unitl #4534 is implemented"
-
     `rm -rf ./tmp/arv-get-test/`
     Dir.mkdir './tmp/arv-get-test'
     out, err = capture_subprocess_io do
@@ -249,8 +217,6 @@ class TestArvGet < Minitest::Test
   end
 
   def test_create_partial_directory_tree
-    skip "Waiting unitl #4534 is implemented"
-
     `rm -rf ./tmp/arv-get-test/`
     Dir.mkdir './tmp/arv-get-test'
     out, err = capture_subprocess_io do
index 73513db56cb17ee5f6d88d151205f437e6d22107..2f20e18440a2ff61dde6b748d3b327587530b142 100644 (file)
@@ -22,8 +22,6 @@ class TestArvPut < Minitest::Test
   end
 
   def test_raw_stdin
-    skip "Waiting unitl #4534 is implemented"
-
     out, err = capture_subprocess_io do
       r,w = IO.pipe
       wpid = fork do
@@ -41,8 +39,6 @@ class TestArvPut < Minitest::Test
   end
 
   def test_raw_file
-    skip "Waiting unitl #4534 is implemented"
-
     out, err = capture_subprocess_io do
       assert arv_put('--raw', './tmp/foo')
     end
@@ -52,8 +48,6 @@ class TestArvPut < Minitest::Test
   end
 
   def test_raw_empty_file
-    skip "Waiting unitl #4534 is implemented"
-
     out, err = capture_subprocess_io do
       assert arv_put('--raw', './tmp/empty_file')
     end
@@ -83,8 +77,6 @@ class TestArvPut < Minitest::Test
   end
 
   def test_filename_arg_with_empty_file
-    skip "Waiting unitl #4534 is implemented"
-
     out, err = capture_subprocess_io do
       assert arv_put('--filename', 'foo', './tmp/empty_file')
     end
@@ -94,8 +86,6 @@ class TestArvPut < Minitest::Test
   end
 
   def test_as_stream
-    skip "Waiting unitl #4534 is implemented"
-
     out, err = capture_subprocess_io do
       assert arv_put('--as-stream', './tmp/foo')
     end
@@ -105,8 +95,6 @@ class TestArvPut < Minitest::Test
   end
 
   def test_progress
-    skip "Waiting unitl #4534 is implemented"
-
     out, err = capture_subprocess_io do
       assert arv_put('--manifest', '--progress', './tmp/foo')
     end
@@ -115,8 +103,6 @@ class TestArvPut < Minitest::Test
   end
 
   def test_batch_progress
-    skip "Waiting unitl #4534 is implemented"
-
     out, err = capture_subprocess_io do
       assert arv_put('--manifest', '--batch-progress', './tmp/foo')
     end
@@ -136,20 +122,14 @@ class TestArvPut < Minitest::Test
   end
 
   def test_read_from_implicit_stdin
-    skip "Waiting unitl #4534 is implemented"
-
     test_read_from_stdin(specify_stdin_as='--manifest')
   end
 
   def test_read_from_dev_stdin
-    skip "Waiting unitl #4534 is implemented"
-
     test_read_from_stdin(specify_stdin_as='/dev/stdin')
   end
 
   def test_read_from_stdin(specify_stdin_as='-')
-    skip "Waiting unitl #4534 is implemented"
-
     out, err = capture_subprocess_io do
       r,w = IO.pipe
       wpid = fork do
@@ -168,22 +148,16 @@ class TestArvPut < Minitest::Test
   end
 
   def test_read_from_implicit_stdin_implicit_manifest
-    skip "Waiting unitl #4534 is implemented"
-
     test_read_from_stdin_implicit_manifest(specify_stdin_as=nil,
                                            expect_filename='stdin')
   end
 
   def test_read_from_dev_stdin_implicit_manifest
-    skip "Waiting unitl #4534 is implemented"
-
     test_read_from_stdin_implicit_manifest(specify_stdin_as='/dev/stdin')
   end
 
   def test_read_from_stdin_implicit_manifest(specify_stdin_as='-',
                                              expect_filename=nil)
-    skip "Waiting unitl #4534 is implemented"
-
     expect_filename = expect_filename || specify_stdin_as.split('/').last
     out, err = capture_subprocess_io do
       r,w = IO.pipe
index 8c8d1d8331ae05fcbda64a65289732188c66bcd8..cac89b37bc0555c4929c6efadf873c32aed01297 100644 (file)
@@ -5,8 +5,6 @@ class TestRunPipelineInstance < Minitest::Test
   end
 
   def test_run_pipeline_instance_get_help
-    skip "Waiting unitl #4534 is implemented"
-
     out, err = capture_subprocess_io do
       system ('arv-run-pipeline-instance -h')
     end
index a5a1c94fff29227e0944afcae08383529cfe0b33..f4eba4651cbcd06494c41d1e05311dac663f65ed 100644 (file)
@@ -9,7 +9,7 @@ end
 class TestArvTag < Minitest::Test
 
   def test_no_args
-    skip "Waiting unitl #4534 is implemented"
+    skip "Waiting until #4534 is implemented"
 
     # arv-tag exits with failure if run with no args
     out, err = capture_subprocess_io do
diff --git a/sdk/cli/test/test_crunch-job.rb b/sdk/cli/test/test_crunch-job.rb
new file mode 100644 (file)
index 0000000..22d756a
--- /dev/null
@@ -0,0 +1,126 @@
+require 'minitest/autorun'
+
+class TestCrunchJob < Minitest::Test
+  SPECIAL_EXIT = {
+    EX_RETRY_UNLOCKED: 93,
+    EX_TEMPFAIL: 75,
+  }
+
+  JOBSPEC = {
+    grep_local: {
+      script: 'grep',
+      script_version: 'master',
+      repository: File.absolute_path('../../../..', __FILE__),
+      script_parameters: {foo: 'bar'},
+    },
+  }
+
+  def setup
+  end
+
+  def crunchjob
+    File.absolute_path '../../bin/crunch-job', __FILE__
+  end
+
+  # Return environment suitable for running crunch-job.
+  def crunchenv opts={}
+    env = ENV.to_h
+    env['CRUNCH_REFRESH_TRIGGER'] =
+      File.absolute_path('../../../../tmp/crunch-refresh-trigger', __FILE__)
+    env
+  end
+
+  def jobspec label
+    JOBSPEC[label].dup
+  end
+
+  # Encode job record to json and run it with crunch-job.
+  #
+  # opts[:binstubs] is an array of X where ./binstub_X is added to
+  # PATH in order to mock system programs.
+  def tryjobrecord jobrecord, opts={}
+    env = crunchenv
+    (opts[:binstubs] || []).each do |binstub|
+      env['PATH'] = File.absolute_path('../binstub_'+binstub, __FILE__) + ':' + env['PATH']
+    end
+    system env, crunchjob, '--job', jobrecord.to_json
+  end
+
+  def test_bogus_json
+    out, err = capture_subprocess_io do
+      system crunchenv, crunchjob, '--job', '"}{"'
+    end
+    assert_equal false, $?.success?
+    # Must not conflict with our special exit statuses
+    assert_jobfail $?
+    assert_match /JSON/, err
+  end
+
+  def test_fail_sanity_check
+    out, err = capture_subprocess_io do
+      j = {}
+      tryjobrecord j, binstubs: ['sanity_check']
+    end
+    assert_equal 75, $?.exitstatus
+    assert_match /Sanity check failed: 7/, err
+  end
+
+  def test_fail_docker_sanity_check
+    out, err = capture_subprocess_io do
+      j = {}
+      j[:docker_image_locator] = '4d449b9d34f2e2222747ef79c53fa3ff+1234'
+      tryjobrecord j, binstubs: ['sanity_check']
+    end
+    assert_equal 75, $?.exitstatus
+    assert_match /Sanity check failed: 8/, err
+  end
+
+  def test_no_script_specified
+    out, err = capture_subprocess_io do
+      j = jobspec :grep_local
+      j.delete :script
+      tryjobrecord j
+    end
+    assert_match /No script specified/, err
+    assert_jobfail $?
+  end
+
+  def test_fail_clean_tmp
+    out, err = capture_subprocess_io do
+      j = jobspec :grep_local
+      tryjobrecord j, binstubs: ['clean_fail']
+    end
+    assert_match /Failing mount stub was called/, err
+    assert_match /Clean work dirs: exit 1\n$/, err
+    assert_equal SPECIAL_EXIT[:EX_RETRY_UNLOCKED], $?.exitstatus
+  end
+
+  def test_docker_image_missing
+    skip 'API bug: it refuses to create this job in Running state'
+    out, err = capture_subprocess_io do
+      j = jobspec :grep_local
+      j[:docker_image_locator] = '4d449b9d34f2e2222747ef79c53fa3ff+1234'
+      tryjobrecord j, binstubs: ['docker_noop']
+    end
+    assert_match /No Docker image hash found from locator/, err
+    assert_jobfail $?
+  end
+
+  def test_script_version_not_found_in_repository
+    bogus_version = 'f8b72707c1f5f740dbf1ed56eb429a36e0dee770'
+    out, err = capture_subprocess_io do
+      j = jobspec :grep_local
+      j[:script_version] = bogus_version
+      tryjobrecord j
+    end
+    assert_match /'#{bogus_version}' not found, giving up/, err
+    assert_jobfail $?
+  end
+
+  # Ensure procstatus is not interpreted as a temporary infrastructure
+  # problem. Would be assert_http_4xx if this were http.
+  def assert_jobfail procstatus
+    refute_includes SPECIAL_EXIT.values, procstatus.exitstatus
+    assert_equal false, procstatus.success?
+  end
+end
index 21e31ad8055ce27e84f303c538789da050c021f6..d676d37a8a7dfb54fbb0429a548b4e5b90cac6e4 100644 (file)
@@ -6,5 +6,10 @@ use ExtUtils::MakeMaker;
 
 WriteMakefile(
     NAME            => 'Arvados',
-    VERSION_FROM    => 'lib/Arvados.pm'
+    VERSION_FROM    => 'lib/Arvados.pm',
+    PREREQ_PM       => {
+        'JSON'     => 0,
+        'LWP'      => 0,
+        'Net::SSL' => 0,
+    },
 );
index 1d10b042729f1712a896a4d1d17c1a01d1cef29c..c5749cce5e860c5ee251f8f3bfa5616befa2c7d4 100755 (executable)
@@ -188,6 +188,13 @@ def api_for_instance(instance_name):
         abort('need ARVADOS_API_HOST and ARVADOS_API_TOKEN for {}'.format(instance_name))
     return client
 
+# Check if git is available
+def check_git_availability():
+    try:
+        arvados.util.run_command(['git', '--help'])
+    except Exception:
+        abort('git command is not available. Please ensure git is installed.')
+
 # copy_pipeline_instance(pi_uuid, src, dst, args)
 #
 #    Copies a pipeline instance identified by pi_uuid from src to dst.
@@ -212,6 +219,8 @@ def copy_pipeline_instance(pi_uuid, src, dst, args):
     pi = src.pipeline_instances().get(uuid=pi_uuid).execute(num_retries=args.retries)
 
     if args.recursive:
+        check_git_availability()
+
         if not args.dst_git_repo:
             abort('--dst-git-repo is required when copying a pipeline recursively.')
         # Copy the pipeline template and save the copied template.
@@ -265,6 +274,8 @@ def copy_pipeline_template(pt_uuid, src, dst, args):
     pt = src.pipeline_templates().get(uuid=pt_uuid).execute(num_retries=args.retries)
 
     if args.recursive:
+        check_git_availability()
+
         if not args.dst_git_repo:
             abort('--dst-git-repo is required when copying a pipeline recursively.')
         # Copy input collections, docker images and git repos.
@@ -318,9 +329,9 @@ def copy_collections(obj, src, dst, args):
         obj = arvados.util.portable_data_hash_pattern.sub(copy_collection_fn, obj)
         obj = arvados.util.collection_uuid_pattern.sub(copy_collection_fn, obj)
         return obj
-    elif type(obj) == dict:
+    elif isinstance(obj, dict):
         return {v: copy_collections(obj[v], src, dst, args) for v in obj}
-    elif type(obj) == list:
+    elif isinstance(obj, list):
         return [copy_collections(v, src, dst, args) for v in obj]
     return obj
 
index fd6ab582071cc65f27ad0c943ad2b2d75ff03f6b..4bf9a6a0945462e2bf74596d620ec21575541844 100644 (file)
@@ -2,15 +2,27 @@ class Arvados::V1::RepositoriesController < ApplicationController
   skip_before_filter :find_object_by_uuid, :only => :get_all_permissions
   skip_before_filter :render_404_if_no_object, :only => :get_all_permissions
   before_filter :admin_required, :only => :get_all_permissions
+
   def get_all_permissions
-    @users = {}
-    User.includes(:authorized_keys).find_each do |u|
-      @users[u.uuid] = u
+    # users is a map of {user_uuid => User object}
+    users = {}
+    # user_aks is a map of {user_uuid => array of public keys}
+    user_aks = {}
+    # admins is an array of user_uuids
+    admins = []
+    User.eager_load(:authorized_keys).find_each do |u|
+      next unless u.is_active or u.uuid == anonymous_user_uuid
+      users[u.uuid] = u
+      user_aks[u.uuid] = u.authorized_keys.collect do |ak|
+        {
+          public_key: ak.public_key,
+          authorized_key_uuid: ak.uuid
+        }
+      end
+      admins << u.uuid if u.is_admin
     end
-    admins = @users.select { |k,v| v.is_admin }
-    @user_aks = {}
     @repo_info = {}
-    Repository.includes(:permissions).find_each do |repo|
+    Repository.eager_load(:permissions).find_each do |repo|
       @repo_info[repo.uuid] = {
         uuid: repo.uuid,
         name: repo.name,
@@ -18,60 +30,92 @@ class Arvados::V1::RepositoriesController < ApplicationController
         fetch_url: repo.fetch_url,
         user_permissions: {},
       }
-      gitolite_permissions = ''
-      perms = []
+      # evidence is an array of {name: 'can_xxx', user_uuid: 'x-y-z'},
+      # one entry for each piece of evidence we find in the permission
+      # database that establishes that a user can access this
+      # repository. Multiple entries can be added for a given user,
+      # possibly with different access levels; these will be compacted
+      # below.
+      evidence = []
       repo.permissions.each do |perm|
         if ArvadosModel::resource_class_for_uuid(perm.tail_uuid) == Group
-          @users.each do |user_uuid, user|
-            user.group_permissions.each do |group_uuid, perm_mask|
-              if perm_mask[:manage]
-                perms << {name: 'can_manage', user_uuid: user_uuid}
-              elsif perm_mask[:write]
-                perms << {name: 'can_write', user_uuid: user_uuid}
-              elsif perm_mask[:read]
-                perms << {name: 'can_read', user_uuid: user_uuid}
-              end
+          # A group has permission. Each user who has access to this
+          # group also has access to the repository. Access level is
+          # min(group-to-repo permission, user-to-group permission).
+          users.each do |user_uuid, user|
+            perm_mask = user.group_permissions[perm.tail_uuid]
+            if not perm_mask
+              next
+            elsif perm_mask[:manage] and perm.name == 'can_manage'
+              evidence << {name: 'can_manage', user_uuid: user_uuid}
+            elsif perm_mask[:write] and ['can_manage', 'can_write'].index perm.name
+              evidence << {name: 'can_write', user_uuid: user_uuid}
+            elsif perm_mask[:read]
+              evidence << {name: 'can_read', user_uuid: user_uuid}
             end
           end
-        else
-          perms << {name: perm.name, user_uuid: perm.tail_uuid}
+        elsif users[perm.tail_uuid]
+          # A user has permission; the user exists; and either the
+          # user is active, or it's the special case of the anonymous
+          # user which is never "active" but is allowed to read
+          # content from public repositories.
+          evidence << {name: perm.name, user_uuid: perm.tail_uuid}
         end
       end
-      # Owner of the repository, and all admins, can RW
-      ([repo.owner_uuid] + admins.keys).each do |user_uuid|
-        perms << {name: 'can_write', user_uuid: user_uuid}
+      # Owner of the repository, and all admins, can do everything.
+      ([repo.owner_uuid] | admins).each do |user_uuid|
+        # Except: no permissions for inactive users, even if they own
+        # repositories.
+        next unless users[user_uuid]
+        evidence << {name: 'can_manage', user_uuid: user_uuid}
       end
-      perms.each do |perm|
+      # Distill all the evidence about permissions on this repository
+      # into one hash per user, of the form {'can_xxx' => true, ...}.
+      # The hash is nil for a user who has no permissions at all on
+      # this particular repository.
+      evidence.each do |perm|
         user_uuid = perm[:user_uuid]
-        @user_aks[user_uuid] = @users[user_uuid].andand.authorized_keys.andand.
-          collect do |ak|
-          {
-            public_key: ak.public_key,
-            authorized_key_uuid: ak.uuid
-          }
-        end || []
-        if @user_aks[user_uuid].any?
-          ri = (@repo_info[repo.uuid][:user_permissions][user_uuid] ||= {})
-          ri[perm[:name]] = true
-        end
+        user_perms = (@repo_info[repo.uuid][:user_permissions][user_uuid] ||= {})
+        user_perms[perm[:name]] = true
       end
     end
-    @repo_info.values.each do |repo_users|
-      repo_users[:user_permissions].each do |user_uuid,perms|
-        if perms['can_manage']
-          perms[:gitolite_permissions] = 'RW'
-          perms['can_write'] = true
-          perms['can_read'] = true
-        elsif perms['can_write']
-          perms[:gitolite_permissions] = 'RW'
-          perms['can_read'] = true
-        elsif perms['can_read']
-          perms[:gitolite_permissions] = 'R'
+    # Revisit each {'can_xxx' => true, ...} hash for some final
+    # cleanup to make life easier for the requestor.
+    #
+    # Add a 'gitolite_permissions' key alongside the 'can_xxx' keys,
+    # for the convenience of the gitolite config file generator.
+    #
+    # Add all lesser permissions when a greater permission is
+    # present. If the requestor only wants to know who can write, it
+    # only has to test for 'can_write' in the response.
+    @repo_info.values.each do |repo|
+      repo[:user_permissions].each do |user_uuid, user_perms|
+        if user_perms['can_manage']
+          user_perms['gitolite_permissions'] = 'RW'
+          user_perms['can_write'] = true
+          user_perms['can_read'] = true
+        elsif user_perms['can_write']
+          user_perms['gitolite_permissions'] = 'RW'
+          user_perms['can_read'] = true
+        elsif user_perms['can_read']
+          user_perms['gitolite_permissions'] = 'R'
         end
       end
     end
+    # The response looks like
+    #   {"kind":"...",
+    #    "repositories":[r1,r2,r3,...],
+    #    "user_keys":usermap}
+    # where each of r1,r2,r3 looks like
+    #   {"uuid":"repo-uuid-1",
+    #    "name":"username/reponame",
+    #    "push_url":"...",
+    #    "user_permissions":{"user-uuid-a":{"can_read":true,"gitolite_permissions":"R"}}}
+    # and usermap looks like
+    #   {"user-uuid-a":[{"public_key":"ssh-rsa g...","authorized_key_uuid":"ak-uuid-g"},...],
+    #    "user-uuid-b":[{"public_key":"ssh-rsa h...","authorized_key_uuid":"ak-uuid-h"},...],...}
     send_json(kind: 'arvados#RepositoryPermissionSnapshot',
               repositories: @repo_info.values,
-              user_keys: @user_aks)
+              user_keys: user_aks)
   end
 end
index d5b2f871cc89a138a894753deaf9b026b0bc8443..35dd1a94c9d983b343fc6394370f03ca795ca896 100644 (file)
@@ -23,6 +23,7 @@ class ArvadosModel < ActiveRecord::Base
   after_destroy :log_destroy
   after_find :convert_serialized_symbols_to_strings
   before_validation :normalize_collection_uuids
+  before_validation :set_default_owner
   validate :ensure_serialized_attribute_type
   validate :ensure_valid_uuids
 
@@ -276,12 +277,14 @@ class ArvadosModel < ActiveRecord::Base
     true
   end
 
-  def ensure_owner_uuid_is_permitted
-    raise PermissionDeniedError if !current_user
-
-    if new_record? and respond_to? :owner_uuid=
+  def set_default_owner
+    if new_record? and current_user and respond_to? :owner_uuid=
       self.owner_uuid ||= current_user.uuid
     end
+  end
+
+  def ensure_owner_uuid_is_permitted
+    raise PermissionDeniedError if !current_user
 
     if self.owner_uuid.nil?
       errors.add :owner_uuid, "cannot be nil"
index b156a1d0f697440ae0912ff352049bdf0de28c2a..452cd6967bfa7c40d22f8746cc69c2ea10c7fc89 100644 (file)
@@ -33,14 +33,14 @@ class AuthorizedKey < ArvadosModel
 
   def public_key_must_be_unique
     if self.public_key
-      #key = /^ssh-(rsa|dss) [A-Za-z0-9+\/=\+]+\b/.match(self.public_key)
       valid_key = SSHKey.valid_ssh_public_key? self.public_key
 
       if not valid_key
         errors.add(:public_key, "does not appear to be a valid ssh-rsa or dsa public key")
       else
         # Valid if no other rows have this public key
-        if self.class.where('public_key like ?', "%#{self.public_key}%").any?
+        if self.class.where('uuid != ? and public_key like ?',
+                            uuid || '', "%#{self.public_key}%").any?
           errors.add(:public_key, "already exists in the database, use a different key.")
           return false
         end
index a6b085722e90fb043a2277f7781727218e8e2559..f74e2fedc7396335e6ff537bc2e882aa5da3e177 100644 (file)
@@ -58,12 +58,22 @@ class Commit < ActiveRecord::Base
 
     # Get the commit hash for the upper bound
     max_hash = nil
-    IO.foreach("|git rev-list --max-count=1 #{maximum.shellescape} --") do |line|
+    git_max_hash_cmd = "git rev-list --max-count=1 #{maximum.shellescape} --"
+    IO.foreach("|#{git_max_hash_cmd}") do |line|
       max_hash = line.strip
     end
 
-    # If not found or string is invalid, nothing else to do
-    return [] if !max_hash or !git_check_ref_format(max_hash)
+    # If not found, nothing else to do
+    if !max_hash
+      logger.warn "no refs found looking for max_hash: `GIT_DIR=#{gitdir} #{git_max_hash_cmd}` returned no output"
+      return []
+    end
+
+    # If string is invalid, nothing else to do
+    if !git_check_ref_format(max_hash)
+      logger.warn "ref returned by `GIT_DIR=#{gitdir} #{git_max_hash_cmd}` was invalid for max_hash: #{max_hash}"
+      return []
+    end
 
     resolved_exclude = nil
     if exclude
@@ -83,12 +93,22 @@ class Commit < ActiveRecord::Base
     if minimum
       # Get the commit hash for the lower bound
       min_hash = nil
-      IO.foreach("|git rev-list --max-count=1 #{minimum.shellescape} --") do |line|
+      git_min_hash_cmd = "git rev-list --max-count=1 #{minimum.shellescape} --"
+      IO.foreach("|#{git_min_hash_cmd}") do |line|
         min_hash = line.strip
       end
 
-      # If not found or string is invalid, nothing else to do
-      return [] if !min_hash or !git_check_ref_format(min_hash)
+      # If not found, nothing else to do
+      if !min_hash
+        logger.warn "no refs found looking for min_hash: `GIT_DIR=#{gitdir} #{git_min_hash_cmd}` returned no output"
+        return []
+      end
+
+      # If string is invalid, nothing else to do
+      if !git_check_ref_format(min_hash)
+        logger.warn "ref returned by `GIT_DIR=#{gitdir} #{git_min_hash_cmd}` was invalid for min_hash: #{min_hash}"
+        return []
+      end
 
       # Now find all commits between them
       IO.foreach("|git rev-list #{min_hash.shellescape}..#{max_hash.shellescape} --") do |line|
index 434f9c768d2dbeb4a0e908fbd6d8de6d6d903f80..925e4661248279b1543052fd8f8cc563e5efc8d8 100644 (file)
@@ -405,6 +405,20 @@ admin_can_write_aproject:
   head_uuid: zzzzz-j7d0g-v955i6s2oi1cbso
   properties: {}
 
+project_viewer_member_of_all_users_group:
+  uuid: zzzzz-o0j2j-cdnq6627g0h0r2x
+  owner_uuid: zzzzz-tpzed-000000000000000
+  created_at: 2015-07-28T21:34:41.361747000Z
+  modified_by_client_uuid: zzzzz-ozdt8-brczlopd8u8d0jr
+  modified_by_user_uuid: zzzzz-tpzed-000000000000000
+  modified_at: 2015-07-28T21:34:41.361747000Z
+  updated_at: 2015-07-28T21:34:41.361747000Z
+  tail_uuid: zzzzz-tpzed-projectviewer1a
+  link_class: permission
+  name: can_read
+  head_uuid: zzzzz-j7d0g-fffffffffffffff
+  properties: {}
+
 project_viewer_can_read_project:
   uuid: zzzzz-o0j2j-projviewerreadp
   owner_uuid: zzzzz-tpzed-000000000000000
index 7ba2183d3e7c62d540ace6721fe75a65efb00809..514bb66bb2b55eaabfffd9e2494c59500c1a58bc 100644 (file)
@@ -42,6 +42,26 @@ class Arvados::V1::RepositoriesControllerTest < ActionController::TestCase
     end
   end
 
+  test "get_all_permissions takes into account is_active flag" do
+    r = nil
+    act_as_user users(:active) do
+      r = Repository.create! name: 'active/testrepo'
+    end
+    act_as_system_user do
+      u = users(:active)
+      u.is_active = false
+      u.save!
+    end
+    authorize_with :admin
+    get :get_all_permissions
+    assert_response :success
+    json_response['repositories'].each do |r|
+      r['user_permissions'].each do |user_uuid, perms|
+        refute_equal user_uuid, users(:active).uuid
+      end
+    end
+  end
+
   test "get_all_permissions does not give any access to user without permission" do
     viewer_uuid = users(:project_viewer).uuid
     assert_equal(authorized_keys(:project_viewer).authorized_user_uuid,
@@ -88,15 +108,84 @@ class Arvados::V1::RepositoriesControllerTest < ActionController::TestCase
     end
   end
 
-  test "get_all_permissions lists repos with no authorized keys" do
+  test "get_all_permissions lists all repos regardless of permissions" do
+    act_as_system_user do
+      # Create repos that could potentially be left out of the
+      # permission list by accident.
+
+      # No authorized_key, no username (this can't even be done
+      # without skipping validations)
+      r = Repository.create name: 'root/testrepo'
+      assert r.save validate: false
+
+      r = Repository.create name: 'invalid username / repo name', owner_uuid: users(:inactive).uuid
+      assert r.save validate: false
+    end
+    authorize_with :admin
+    get :get_all_permissions
+    assert_response :success
+    assert_equal(Repository.count, json_response["repositories"].size)
+  end
+
+  test "get_all_permissions lists user permissions for users with no authorized keys" do
     authorize_with :admin
     AuthorizedKey.destroy_all
     get :get_all_permissions
     assert_response :success
     assert_equal(Repository.count, json_response["repositories"].size)
-    assert(json_response["repositories"].any? do |repo|
-             repo["user_permissions"].empty?
-           end, "test is invalid - all repositories have authorized keys")
+    repos_with_perms = []
+    json_response['repositories'].each do |repo|
+      if repo['user_permissions'].any?
+        repos_with_perms << repo['uuid']
+      end
+    end
+    assert_not_empty repos_with_perms, 'permissions are missing'
+  end
+
+  # Ensure get_all_permissions correctly describes what the normal
+  # permission system would do.
+  test "get_all_permissions obeys group permissions" do
+    act_as_user system_user do
+      r = Repository.create!(name: 'admin/groupcanwrite', owner_uuid: users(:admin).uuid)
+      g = Group.create!(group_class: 'group', name: 'repo-writers')
+      u1 = users(:active)
+      u2 = users(:spectator)
+      Link.create!(tail_uuid: g.uuid, head_uuid: r.uuid, link_class: 'permission', name: 'can_manage')
+      Link.create!(tail_uuid: u1.uuid, head_uuid: g.uuid, link_class: 'permission', name: 'can_write')
+      Link.create!(tail_uuid: u2.uuid, head_uuid: g.uuid, link_class: 'permission', name: 'can_read')
+
+      r = Repository.create!(name: 'admin/groupreadonly', owner_uuid: users(:admin).uuid)
+      g = Group.create!(group_class: 'group', name: 'repo-readers')
+      u1 = users(:active)
+      u2 = users(:spectator)
+      Link.create!(tail_uuid: g.uuid, head_uuid: r.uuid, link_class: 'permission', name: 'can_read')
+      Link.create!(tail_uuid: u1.uuid, head_uuid: g.uuid, link_class: 'permission', name: 'can_write')
+      Link.create!(tail_uuid: u2.uuid, head_uuid: g.uuid, link_class: 'permission', name: 'can_read')
+    end
+    authorize_with :admin
+    get :get_all_permissions
+    assert_response :success
+    json_response['repositories'].each do |repo|
+      repo['user_permissions'].each do |user_uuid, perms|
+        u = User.find_by_uuid(user_uuid)
+        if perms['can_read']
+          assert u.can? read: repo['uuid']
+          assert_match /R/, perms['gitolite_permissions']
+        else
+          refute_match /R/, perms['gitolite_permissions']
+        end
+        if perms['can_write']
+          assert u.can? write: repo['uuid']
+          assert_match /RW/, perms['gitolite_permissions']
+        else
+          refute_match /W/, perms['gitolite_permissions']
+        end
+        if perms['can_manage']
+          assert u.can? manage: repo['uuid']
+          assert_match /RW/, perms['gitolite_permissions']
+        end
+      end
+    end
   end
 
   test "default index includes fetch_url" do
index b8d9b6786cc951a7a37f23e2126a39ac70fd2a78..5a661785bd7bef903747b5890bb135b8bacaebf1 100644 (file)
@@ -1,7 +1,47 @@
 require 'test_helper'
 
 class AuthorizedKeyTest < ActiveSupport::TestCase
-  # test "the truth" do
-  #   assert true
-  # end
+  TEST_KEY = "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCf5aTI55uyWr44TckP/ELUAyPsdnf5fTZDcSDN4qiMZYAL7TYV2ixwnbPObLObM0GmHSSFLV1KqsuFICUPgkyKoHbAH6XPgmtfOLU60VkGf1v5uxQ/kXCECRCJmPb3K9dIXGEw+1DXPdOV/xG7rJNvo4a9WK9iqqZr8p+VGKM6C017b8BDLk0tuEEjZ5jXcT/ka/hTScxWkKgF6auPOVQ79OA5+0VaYm4uQLzVUdgwVUPWQQecRrtnc08XYM1htpcLDIAbWfUNK7uE6XR3/OhtrJGf05FGbtGguPgi33F9W3Q3yw6saOK5Y3TfLbskgFaEdLgzqK/QSBRk2zBF49Tj test@localhost"
+
+  test 'create and update key' do
+    u1 = users(:active)
+    act_as_user u1 do
+      ak = AuthorizedKey.new(name: "foo", public_key: TEST_KEY, authorized_user_uuid: u1.uuid)
+      assert ak.save, ak.errors.full_messages.to_s
+      ak.name = "bar"
+      assert ak.valid?, ak.errors.full_messages.to_s
+      assert ak.save, ak.errors.full_messages.to_s
+    end
+  end
+
+  test 'duplicate key not permitted' do
+    u1 = users(:active)
+    act_as_user u1 do
+      ak = AuthorizedKey.new(name: "foo", public_key: TEST_KEY, authorized_user_uuid: u1.uuid)
+      assert ak.save
+    end
+    u2 = users(:spectator)
+    act_as_user u2 do
+      ak2 = AuthorizedKey.new(name: "bar", public_key: TEST_KEY, authorized_user_uuid: u2.uuid)
+      refute ak2.valid?
+      refute ak2.save
+      assert_match /already exists/, ak2.errors.full_messages.to_s
+    end
+  end
+
+  test 'attach key to wrong user account' do
+    act_as_user users(:active) do
+      ak = AuthorizedKey.new(name: "foo", public_key: TEST_KEY)
+      ak.authorized_user_uuid = users(:spectator).uuid
+      refute ak.save
+      ak.uuid = nil
+      ak.authorized_user_uuid = users(:admin).uuid
+      refute ak.save
+      ak.uuid = nil
+      ak.authorized_user_uuid = users(:active).uuid
+      assert ak.save, ak.errors.full_messages.to_s
+      ak.authorized_user_uuid = users(:admin).uuid
+      refute ak.save
+    end
+  end
 end