Merge branch '15138-version-our-documentation'
authorWard Vandewege <wvandewege@veritasgenetics.com>
Wed, 12 Jun 2019 18:02:08 +0000 (14:02 -0400)
committerWard Vandewege <wvandewege@veritasgenetics.com>
Wed, 12 Jun 2019 18:02:34 +0000 (14:02 -0400)
refs #15138

Arvados-DCO-1.1-Signed-off-by: Ward Vandewege <wvandewege@veritasgenetics.com>

53 files changed:
apps/workbench/app/controllers/application_controller.rb
apps/workbench/app/controllers/projects_controller.rb
apps/workbench/app/helpers/application_helper.rb
apps/workbench/app/helpers/arvados_api_client_helper.rb
apps/workbench/app/helpers/collections_helper.rb
apps/workbench/app/helpers/pipeline_instances_helper.rb
apps/workbench/app/helpers/provenance_helper.rb
apps/workbench/app/models/arvados_base.rb
apps/workbench/app/models/arvados_resource_list.rb
apps/workbench/app/models/collection.rb
apps/workbench/app/models/container_work_unit.rb
apps/workbench/app/views/application/_breadcrumbs.html.erb
apps/workbench/app/views/layouts/application.html.erb
apps/workbench/app/views/projects/_show_dashboard.html.erb
apps/workbench/config/initializers/inflections.rb
apps/workbench/config/load_config.rb
apps/workbench/test/integration_helper.rb
apps/workbench/test/test_helper.rb
build/package-build-dockerfiles/centos7/Dockerfile
build/package-build-dockerfiles/debian8/Dockerfile
build/package-build-dockerfiles/debian9/Dockerfile
build/package-build-dockerfiles/ubuntu1604/Dockerfile
build/package-build-dockerfiles/ubuntu1804/Dockerfile
build/package-test-dockerfiles/debian8/Dockerfile
build/package-test-dockerfiles/debian9/Dockerfile
build/package-test-dockerfiles/ubuntu1604/Dockerfile
build/package-test-dockerfiles/ubuntu1804/Dockerfile
build/run-tests.sh
doc/_includes/_install_ruby_and_bundler.liquid
doc/_includes/_install_ruby_and_bundler_sso.liquid [new file with mode: 0644]
doc/install/install-sso.html.textile.liquid
doc/sdk/python/cookbook.html.textile.liquid
sdk/cwl/arvados_cwl/runner.py
sdk/cwl/setup.py
sdk/cwl/tests/secondary/wf.cwl
sdk/cwl/tests/test_container.py
sdk/cwl/tests/test_job.py
sdk/cwl/tests/test_submit.py
sdk/cwl/tests/wf-defaults/wf6.cwl
sdk/cwl/tests/wf-defaults/wf7.cwl
sdk/cwl/tests/wf/runin-reqs-wf.cwl
sdk/cwl/tests/wf/runin-reqs-wf2.cwl
sdk/cwl/tests/wf/runin-reqs-wf4.cwl
sdk/cwl/tests/wf/runin-wf.cwl
sdk/cwl/tests/wf/runin-with-ttl-wf.cwl
sdk/go/keepclient/keepclient_test.go
sdk/python/arvados/commands/put.py
sdk/python/tests/test_arv_put.py
services/api/Gemfile.lock
services/api/app/controllers/application_controller.rb
services/api/app/models/commit.rb
services/login-sync/.gitignore
services/login-sync/Gemfile.lock [new file with mode: 0644]

index 21e9b49fd800fdce05d34e3358eafc9692111e12..f913a15ffe23c997c7e5554f9755a470599c2e4c 100644 (file)
@@ -226,6 +226,7 @@ class ApplicationController < ActionController::Base
   end
 
   def index
+    @objects = nil if !defined?(@objects)
     find_objects_for_index if !@objects
     render_index
   end
@@ -322,6 +323,7 @@ class ApplicationController < ActionController::Base
   end
 
   def choose
+    @objects = nil if !defined?(@objects)
     params[:limit] ||= 40
     respond_to do |f|
       if params[:partial]
@@ -542,7 +544,7 @@ class ApplicationController < ActionController::Base
 
 
   def accept_uuid_as_id_param
-    if params[:id] and params[:id].match /\D/
+    if params[:id] and params[:id].match(/\D/)
       params[:uuid] = params.delete :id
     end
   end
@@ -812,6 +814,7 @@ class ApplicationController < ActionController::Base
 
   helper_method :user_notifications
   def user_notifications
+    @errors = nil if !defined?(@errors)
     return [] if @errors or not current_user.andand.is_active or not Rails.configuration.show_user_notifications
     @notifications ||= @@notification_tests.map do |t|
       t.call(self, current_user)
@@ -923,7 +926,7 @@ class ApplicationController < ActionController::Base
 
   helper_method :my_starred_projects
   def my_starred_projects user
-    return if @starred_projects
+    return if defined?(@starred_projects) && @starred_projects
     links = Link.filter([['tail_uuid', '=', user.uuid],
                          ['link_class', '=', 'star'],
                          ['head_uuid', 'is_a', 'arvados#group']]).select(%w(head_uuid))
@@ -938,7 +941,7 @@ class ApplicationController < ActionController::Base
   # That is: get toplevel projects under home, get subprojects of
   # these projects, and so on until we hit the limit.
   def my_wanted_projects(user, page_size=100)
-    return @my_wanted_projects if @my_wanted_projects
+    return @my_wanted_projects if defined?(@my_wanted_projects) && @my_wanted_projects
 
     from_top = []
     uuids = [user.uuid]
@@ -969,7 +972,7 @@ class ApplicationController < ActionController::Base
   end
 
   def build_my_wanted_projects_tree(user, page_size=100)
-    return @my_wanted_projects_tree if @my_wanted_projects_tree
+    return @my_wanted_projects_tree if defined?(@my_wanted_projects_tree) && @my_wanted_projects_tree
 
     parent_of = {user.uuid => 'me'}
     my_wanted_projects(user, page_size).each do |ob|
@@ -984,10 +987,10 @@ class ApplicationController < ActionController::Base
       children_of[parent_of[ob.uuid]] ||= []
       children_of[parent_of[ob.uuid]] << ob
     end
-    buildtree = lambda do |children_of, root_uuid=false|
+    buildtree = lambda do |chldrn_of, root_uuid=false|
       tree = {}
-      children_of[root_uuid].andand.each do |ob|
-        tree[ob] = buildtree.call(children_of, ob.uuid)
+      chldrn_of[root_uuid].andand.each do |ob|
+        tree[ob] = buildtree.call(chldrn_of, ob.uuid)
       end
       tree
     end
index cc657cbad92406d9887eb5f0a2173415aa8ff51e..8237dc7152c9e72dddf2ace2515bf8b80f0b1294 100644 (file)
@@ -132,7 +132,6 @@ class ProjectsController < ApplicationController
 
   def remove_items
     @removed_uuids = []
-    links = []
     params[:item_uuids].collect { |uuid| ArvadosBase.find uuid }.each do |item|
       if item.class == Collection or item.class == Group
         # Use delete API on collections and projects/groups
index 3f72d5a2aae7015f00f6a4526aed33d65c811455..83123b26c35c469e3efad7ec2833679f55445a39 100644 (file)
@@ -12,7 +12,7 @@ module ApplicationHelper
   end
 
   def current_api_host
-    Rails.configuration.arvados_v1_base.gsub /https?:\/\/|\/arvados\/v1/,''
+    Rails.configuration.arvados_v1_base.gsub(/https?:\/\/|\/arvados\/v1/, '')
   end
 
   def current_uuid_prefix
@@ -474,8 +474,8 @@ module ApplicationHelper
   def cwl_inputs_required(object, inputs_schema, set_attr_path)
     r = 0
     inputs_schema.each do |input|
-      required, primary_type, param_id = cwl_input_info(input)
-      dn, attrvalue = cwl_input_value(object, input, set_attr_path + [param_id])
+      required, _, param_id = cwl_input_info(input)
+      _, attrvalue = cwl_input_value(object, input, set_attr_path + [param_id])
       r += 1 if required and attrvalue.nil?
     end
     r
@@ -687,6 +687,6 @@ module ApplicationHelper
 
 private
   def is_textile?( object, attr )
-    is_textile = object.textile_attributes.andand.include?(attr)
+    object.textile_attributes.andand.include?(attr)
   end
 end
index 5901de40b82a0d19ca38a9e8b01c463284aaa1dc..929b64923ed16aa1d3d4178a856d3c0c7133f816 100644 (file)
@@ -11,7 +11,7 @@ end
 # For the benefit of themes that still expect $arvados_api_client to work:
 class ArvadosClientProxyHack
   def method_missing *args
-    ArvadosApiClient.new_or_current.send *args
+    ArvadosApiClient.new_or_current.send(*args)
   end
 end
 $arvados_api_client = ArvadosClientProxyHack.new
index f5f54851e78dc7161ea263cf8d03b6923cb1bf33..5eb1e8c768d927dfdc14e814143d2a945ed0c011 100644 (file)
@@ -55,7 +55,7 @@ module CollectionsHelper
     f0 = '' if f0 == '.'
     f0 = f0[2..-1] if f0[0..1] == './'
     f0 += '/' if not f0.empty?
-    file_path = "#{f0}#{file[1]}"
+    "#{f0}#{file[1]}"
   end
 
   ##
index 214237522635d1e0b1e628c1f353ba2d2f736d7a..ac0cbbcccd46451028be3d2acc7afc89c38f839f 100644 (file)
@@ -207,7 +207,7 @@ module PipelineInstancesHelper
         end
       else
         if step[:progress] and
-            (re = step[:progress].match /^(\d+)\+(\d+)\/(\d+)$/)
+            (re = step[:progress].match(/^(\d+)\+(\d+)\/(\d+)$/))
           pj[:progress] = (((re[1].to_f + re[2].to_f/2) / re[3].to_f) rescue 0.5)
         else
           pj[:progress] = 0.0
index 9b4d265dfa3a78df33f550f6309603b45dcc64e8..75261adbdaa55d6516ef92ab7e00015453864969 100644 (file)
@@ -221,13 +221,13 @@ module ProvenanceHelper
                          {label: 'output'})
             end
             # Input collection nodes
-            output_pdhs = @opts[:output_collections].values.collect{|c|
-              c[:portable_data_hash]}
+            output_pdhs = @opts[:output_collections].values.collect{|oc|
+              oc[:portable_data_hash]}
             ProvenanceHelper::cr_input_pdhs(cr).each do |pdh|
               if not output_pdhs.include?(pdh)
                 # Search for collections on the same project first
-                cols = @opts[:input_collections][pdh].andand.select{|c|
-                  c[:owner_uuid] == cr[:owner_uuid]}
+                cols = @opts[:input_collections][pdh].andand.select{|ic|
+                  ic[:owner_uuid] == cr[:owner_uuid]}
                 if not cols or cols.empty?
                   # Search for any collection with this PDH
                   cols = @opts[:input_collections][pdh]
index 9e3ea46b10b6504ee16bdc261afa32a62757e65a..b9162c2aec364bd03a34171c9981262304bc9d06 100644 (file)
@@ -107,8 +107,8 @@ class ArvadosBase
   end
 
   def self.columns
+    @discovered_columns = [] if !defined?(@discovered_columns)
     return @discovered_columns if @discovered_columns.andand.any?
-    @discovered_columns = []
     @attribute_info ||= {}
     schema = arvados_api_client.discovery[:schemas][self.to_s.to_sym]
     return @discovered_columns if schema.nil?
@@ -539,17 +539,17 @@ class ArvadosBase
     if opts[:class].is_a? Class
       return opts[:class]
     end
-    if uuid.match /^[0-9a-f]{32}(\+[^,]+)*(,[0-9a-f]{32}(\+[^,]+)*)*$/
+    if uuid.match(/^[0-9a-f]{32}(\+[^,]+)*(,[0-9a-f]{32}(\+[^,]+)*)*$/)
       return Collection
     end
     resource_class = nil
-    uuid.match /^[0-9a-z]{5}-([0-9a-z]{5})-[0-9a-z]{15}$/ do |re|
+    uuid.match(/^[0-9a-z]{5}-([0-9a-z]{5})-[0-9a-z]{15}$/) do |re|
       resource_class ||= arvados_api_client.
         kind_class(self.uuid_infix_object_kind[re[1]])
     end
     if opts[:referring_object] and
         opts[:referring_attr] and
-        opts[:referring_attr].match /_uuid$/
+        opts[:referring_attr].match(/_uuid$/)
       resource_class ||= arvados_api_client.
         kind_class(opts[:referring_object].
                    attributes[opts[:referring_attr].
index 9ba61eaba08ef1d9e008233cf4f06e6c9f8ac97f..cbd544ebb798f8413d499748b59d57555fd48d7f 100644 (file)
@@ -13,6 +13,17 @@ class ArvadosResourceList
     @fetch_multiple_pages = true
     @arvados_api_token = Thread.current[:arvados_api_token]
     @reader_tokens = Thread.current[:reader_tokens]
+    @results = nil
+    @count = nil
+    @offset = 0
+    @cond = nil
+    @eager = nil
+    @select = nil
+    @orderby_spec = nil
+    @filters = nil
+    @distinct = nil
+    @include_trash = nil
+    @limit = nil
   end
 
   def eager(bool=true)
@@ -90,7 +101,7 @@ class ArvadosResourceList
         end
       end
     end
-    @cond.keys.select { |x| x.match /_kind$/ }.each do |kind_key|
+    @cond.keys.select { |x| x.match(/_kind$/) }.each do |kind_key|
       if @cond[kind_key].is_a? Class
         @cond = @cond.merge({ kind_key => 'arvados#' + arvados_api_client.class_kind(@cond[kind_key]) })
       end
@@ -134,7 +145,7 @@ class ArvadosResourceList
 
   def each(&block)
     if not @results.nil?
-      @results.each &block
+      @results.each(&block)
     else
       self.each_page do |items|
         items.each do |i|
index f5aef841ea47a6bfc66e84bccbaa86c617267a3f..ead2c951c3f8e5fdf41589e4366d96cdc891596e 100644 (file)
@@ -86,14 +86,6 @@ class Collection < ArvadosBase
     arvados_api_client.api "collections/#{self.uuid}/", "used_by"
   end
 
-  def uuid
-    if self[:uuid].nil?
-      return self[:portable_data_hash]
-    else
-      super
-    end
-  end
-
   def friendly_link_name lookup=nil
     name || portable_data_hash
   end
index ef20a7f8f49cfd7ed72f7ff9c51774ca36ddaa74..c564dc12e4381d8f53e896b5281b0307666dc278 100644 (file)
@@ -14,6 +14,7 @@ class ContainerWorkUnit < ProxyWorkUnit
         @container = Container.find(container_uuid)
       end
     end
+    @container = nil if !defined?(@container)
     @child_proxies = child_objects
   end
 
index fb4a1462a1a36789bae8cefade8365ba16dfd0d4..7a2d08e54a1afa4faacb3041f0268eaec47f0094 100644 (file)
@@ -51,7 +51,7 @@ SPDX-License-Identifier: AGPL-3.0 %>
               } %>
             </ul>
           </li>
-          <% if @name_link or @object %>
+          <% if (defined?(@name_link) && @name_link) or (defined?(@object) && @object) %>
             <li class="nav-separator">
               <i class="fa fa-lg fa-angle-double-right"></i>
             </li>
index b59bad4ec151b6dc003e18020b36a6391fc9f609..638ee8970c35a5b5af83528d7b3e650e0e487cc3 100644 (file)
@@ -33,7 +33,7 @@ SPDX-License-Identifier: AGPL-3.0 %>
   <meta property="og:type" content="article" />
   <meta property="og:url" content="<%= request.url %>" />
   <meta property="og:site_name" content="<%= Rails.configuration.site_name %>" />
-  <% if @object %>
+  <% if defined?(@object) && @object %>
     <% if @object.respond_to?(:name) and @object.name.present? %>
       <meta property="og:title" content="<%= @object.name%>" />
     <% end %>
index 69abf04e689694c90682a7f507529d8f628642e0..d4ea2de155af7384a844d0c4593314ebd201867c 100644 (file)
@@ -10,9 +10,6 @@ SPDX-License-Identifier: AGPL-3.0 %>
   recent_cr_containers = recent_crs.map {|cr| cr.container_uuid}.compact.uniq
   preload_objects_for_dataclass(Container, recent_cr_containers) if recent_cr_containers.andand.any?
 
-  # fetch children of all the active crs in one call, if there are any
-  active_crs = recent_crs.each {|cr| cr if (cr.priority.andand > 0 and cr.state != 'Final' and cr.container_uuid)}
-
   wus = {}
   outputs = []
   recent_procs.each do |p|
index 01e71585e382fe38e66d833446a90bba2a7e758c..55399f0f39eda0bd1367112c9538779222175f54 100644 (file)
@@ -19,8 +19,8 @@
 # end
 
 ActiveSupport::Inflector.inflections do |inflect|
-  inflect.plural /^([Ss]pecimen)$/i, '\1s'
-  inflect.singular /^([Ss]pecimen)s?/i, '\1'
-  inflect.plural /^([Hh]uman)$/i, '\1s'
-  inflect.singular /^([Hh]uman)s?/i, '\1'
+  inflect.plural(/^([Ss]pecimen)$/i, '\1s')
+  inflect.singular(/^([Ss]pecimen)s?/i, '\1')
+  inflect.plural(/^([Hh]uman)$/i, '\1s')
+  inflect.singular(/^([Hh]uman)s?/i, '\1')
 end
index d8d4dff567a45e63e27e744ab8b0f9a0da36c82b..5f0d9caf96450ebceb9f9beb90324805e410a9b8 100644 (file)
@@ -9,7 +9,7 @@ $application_config = {}
 
 %w(application.default application).each do |cfgfile|
   path = "#{::Rails.root.to_s}/config/#{cfgfile}.yml"
-  if File.exists? path
+  if File.exist? path
     yaml = ERB.new(IO.read path).result(binding)
     confs = YAML.load(yaml, deserialize_symbols: true)
     $application_config.merge!(confs['common'] || {})
index 85c929fdbaad0ec20f6a2e8704188c7c9a91af7d..9337daf4eed8c1d90d31cb9537af8f5f50a4ff0c 100644 (file)
@@ -197,9 +197,9 @@ class ActionDispatch::IntegrationTest
   # exception if not found. Use this with assertions to explain that
   # the error signifies a failed test rather than an unexpected error
   # during a testing procedure.
-  def find? *args
+  def find?(*args)
     begin
-      find *args
+      find(*args)
     rescue Capybara::ElementNotFound
       false
     end
index bbd733bb47d98d345381763476c9dd48c5f6f883..a71d0b46141fd74f2497e01447506831830d83d6 100644 (file)
@@ -292,7 +292,7 @@ class ActiveSupport::TestCase
 
   def after_teardown
     if self.class.want_reset_api_fixtures[:after_each_test] and
-        @want_reset_api_fixtures != false
+        (!defined?(@want_reset_api_fixtures) or @want_reset_api_fixtures != false)
       self.class.reset_api_fixtures_now
     end
     super
index ad6f4e1e8f5051c8bb90e449eb89873ca13ea107..cc8265c3d4b3098aeff6ee46bfdeeb249cd823bc 100644 (file)
@@ -14,9 +14,8 @@ ADD generated/pkuczynski.asc /tmp/
 RUN gpg --import --no-tty /tmp/mpapis.asc && \
     gpg --import --no-tty /tmp/pkuczynski.asc && \
     curl -L https://get.rvm.io | bash -s stable && \
-    /usr/local/rvm/bin/rvm install 2.3 && \
-    /usr/local/rvm/bin/rvm alias create default ruby-2.3 && \
-    /usr/local/rvm/bin/rvm-exec default gem install bundler && \
+    /usr/local/rvm/bin/rvm install 2.5 && \
+    /usr/local/rvm/bin/rvm alias create default ruby-2.5 && \
     /usr/local/rvm/bin/rvm-exec default gem install fpm --version 1.10.2
 
 # Install golang binary
index ec7ae07d826e440cf653dbc0ebcfb8b5bca255ca..a1a1ed6f798676be5e862551df47952d80eb0e1e 100644 (file)
@@ -21,9 +21,8 @@ ADD generated/pkuczynski.asc /tmp/
 RUN gpg --import --no-tty /tmp/mpapis.asc && \
     gpg --import --no-tty /tmp/pkuczynski.asc && \
     curl -L https://get.rvm.io | bash -s stable && \
-    /usr/local/rvm/bin/rvm install 2.3 && \
-    /usr/local/rvm/bin/rvm alias create default ruby-2.3 && \
-    /usr/local/rvm/bin/rvm-exec default gem install bundler && \
+    /usr/local/rvm/bin/rvm install 2.5 && \
+    /usr/local/rvm/bin/rvm alias create default ruby-2.5 && \
     /usr/local/rvm/bin/rvm-exec default gem install fpm --version 1.10.2
 
 # Install golang binary
index 6f7f3faafe8c4e18f580be1f8e341a978cf38c84..770db51e7c5180bd0ba83d7e5c57d2899b269c61 100644 (file)
@@ -20,9 +20,8 @@ ADD generated/pkuczynski.asc /tmp/
 RUN gpg --import --no-tty /tmp/mpapis.asc && \
     gpg --import --no-tty /tmp/pkuczynski.asc && \
     curl -L https://get.rvm.io | bash -s stable && \
-    /usr/local/rvm/bin/rvm install 2.3 && \
-    /usr/local/rvm/bin/rvm alias create default ruby-2.3 && \
-    /usr/local/rvm/bin/rvm-exec default gem install bundler && \
+    /usr/local/rvm/bin/rvm install 2.5 && \
+    /usr/local/rvm/bin/rvm alias create default ruby-2.5 && \
     /usr/local/rvm/bin/rvm-exec default gem install fpm --version 1.10.2
 
 # Install golang binary
index a83fc77132315651dfd432877c7fecb5b4090f4e..dac82097bd6c7b3ab4b9dd02a5e41dbb88f6fc61 100644 (file)
@@ -19,9 +19,8 @@ ADD generated/pkuczynski.asc /tmp/
 RUN gpg --import --no-tty /tmp/mpapis.asc && \
     gpg --import --no-tty /tmp/pkuczynski.asc && \
     curl -L https://get.rvm.io | bash -s stable && \
-    /usr/local/rvm/bin/rvm install 2.3 && \
-    /usr/local/rvm/bin/rvm alias create default ruby-2.3 && \
-    /usr/local/rvm/bin/rvm-exec default gem install bundler && \
+    /usr/local/rvm/bin/rvm install 2.5 && \
+    /usr/local/rvm/bin/rvm alias create default ruby-2.5 && \
     /usr/local/rvm/bin/rvm-exec default gem install fpm --version 1.10.2
 
 # Install golang binary
index d0a099911ce91ac9c0e7892d75072310c91e710e..fdfbd04206f4a90e923034dcaee1474504b3e149 100644 (file)
@@ -19,9 +19,8 @@ ADD generated/pkuczynski.asc /tmp/
 RUN gpg --import --no-tty /tmp/mpapis.asc && \
     gpg --import --no-tty /tmp/pkuczynski.asc && \
     curl -L https://get.rvm.io | bash -s stable && \
-    /usr/local/rvm/bin/rvm install 2.3 && \
-    /usr/local/rvm/bin/rvm alias create default ruby-2.3 && \
-    /usr/local/rvm/bin/rvm-exec default gem install bundler && \
+    /usr/local/rvm/bin/rvm install 2.5 && \
+    /usr/local/rvm/bin/rvm alias create default ruby-2.5 && \
     /usr/local/rvm/bin/rvm-exec default gem install fpm --version 1.10.2
 
 # Install golang binary
index 82d679abfba0e7321d9229280ad1e1d10995d5c1..1d3bb87f699079f20175884295bf194ceb53d864 100644 (file)
@@ -19,8 +19,8 @@ ADD generated/pkuczynski.asc /tmp/
 RUN gpg --import --no-tty /tmp/mpapis.asc && \
     gpg --import --no-tty /tmp/pkuczynski.asc && \
     curl -L https://get.rvm.io | bash -s stable && \
-    /usr/local/rvm/bin/rvm install 2.3 && \
-    /usr/local/rvm/bin/rvm alias create default ruby-2.3
+    /usr/local/rvm/bin/rvm install 2.5 && \
+    /usr/local/rvm/bin/rvm alias create default ruby-2.5
 
 # udev daemon can't start in a container, so don't try.
 RUN mkdir -p /etc/udev/disabled
index 9c46ef601313939d38549fbd44fa32bee44bfbfb..e759c9ce1dc1966e846f9bb6899f87b7ab52089c 100644 (file)
@@ -17,8 +17,8 @@ ADD generated/pkuczynski.asc /tmp/
 RUN gpg --import --no-tty /tmp/mpapis.asc && \
     gpg --import --no-tty /tmp/pkuczynski.asc && \
     curl -L https://get.rvm.io | bash -s stable && \
-    /usr/local/rvm/bin/rvm install 2.3 && \
-    /usr/local/rvm/bin/rvm alias create default ruby-2.3
+    /usr/local/rvm/bin/rvm install 2.5 && \
+    /usr/local/rvm/bin/rvm alias create default ruby-2.5
 
 # udev daemon can't start in a container, so don't try.
 RUN mkdir -p /etc/udev/disabled
index 615ab1c00e9a48ba598f88c323b0fbc69e084d46..422ee43e061eb80382368e7bb5c6ce9c850aac7f 100644 (file)
@@ -17,8 +17,8 @@ ADD generated/pkuczynski.asc /tmp/
 RUN gpg --import --no-tty /tmp/mpapis.asc && \
     gpg --import --no-tty /tmp/pkuczynski.asc && \
     curl -L https://get.rvm.io | bash -s stable && \
-    /usr/local/rvm/bin/rvm install 2.3 && \
-    /usr/local/rvm/bin/rvm alias create default ruby-2.3
+    /usr/local/rvm/bin/rvm install 2.5 && \
+    /usr/local/rvm/bin/rvm alias create default ruby-2.5
 
 # udev daemon can't start in a container, so don't try.
 RUN mkdir -p /etc/udev/disabled
index d530d22a292545873331b65e8d27201e1d5ab747..68babe3fd58c3fd8f308b639347ca7c0b2d12ff0 100644 (file)
@@ -17,8 +17,8 @@ ADD generated/pkuczynski.asc /tmp/
 RUN gpg --import --no-tty /tmp/mpapis.asc && \
     gpg --import --no-tty /tmp/pkuczynski.asc && \
     curl -L https://get.rvm.io | bash -s stable && \
-    /usr/local/rvm/bin/rvm install 2.3 && \
-    /usr/local/rvm/bin/rvm alias create default ruby-2.3
+    /usr/local/rvm/bin/rvm install 2.5 && \
+    /usr/local/rvm/bin/rvm alias create default ruby-2.5
 
 # udev daemon can't start in a container, so don't try.
 RUN mkdir -p /etc/udev/disabled
index 2f5060909508647df66d6d04b49ff9011b3c5fe9..fa0d8ca7fbb8fe809a93ff8153070e8d0ea969ce 100755 (executable)
@@ -904,7 +904,7 @@ install_gem() {
         && cd "$WORKSPACE/$srcpath" \
         && bundle_install_trylocal \
         && gem build "$gemname.gemspec" \
-        && with_test_gemset gem install --no-ri --no-rdoc $(ls -t "$gemname"-*.gem|head -n1)
+        && with_test_gemset gem install --no-document $(ls -t "$gemname"-*.gem|head -n1)
 }
 
 install_sdk/ruby() {
index d5a5a15362f40dfa4efae9def5ebea1a7deeb193..a8323f592d00643e90b5d6ed68e48fd4094e49d5 100644 (file)
@@ -4,13 +4,13 @@ Copyright (C) The Arvados Authors. All rights reserved.
 SPDX-License-Identifier: CC-BY-SA-3.0
 {% endcomment %}
 
-Ruby 2.3 is recommended; Ruby 2.1 is also known to work.
+Ruby 2.5 is recommended; Ruby 2.3 is also known to work.
 
 h4(#rvm). *Option 1: Install with RVM*
 
 <notextile>
-<pre><code><span class="userinput">sudo gpg --keyserver hkp://keys.gnupg.net --recv-keys 409B6B1796C275462A1703113804BB82D39DC0E3
-\curl -sSL https://get.rvm.io | sudo bash -s stable --ruby=2.3
+<pre><code><span class="userinput">sudo gpg --recv-keys 409B6B1796C275462A1703113804BB82D39DC0E3 7D2BAF1CF37B13E2069D6956105BD0E739499BDB
+\curl -sSL https://get.rvm.io | sudo bash -s stable --ruby=2.5
 </span></code></pre></notextile>
 
 Either log out and log back in to activate RVM, or explicitly load it in all open shells like this:
@@ -59,8 +59,8 @@ Build and install Ruby:
 <notextile>
 <pre><code><span class="userinput">mkdir -p ~/src
 cd ~/src
-curl -f http://cache.ruby-lang.org/pub/ruby/2.3/ruby-2.3.3.tar.gz | tar xz
-cd ruby-2.3.3
+curl -f http://cache.ruby-lang.org/pub/ruby/2.5/ruby-2.5.5.tar.gz | tar xz
+cd ruby-2.5.5
 ./configure --disable-install-rdoc
 make
 sudo make install
diff --git a/doc/_includes/_install_ruby_and_bundler_sso.liquid b/doc/_includes/_install_ruby_and_bundler_sso.liquid
new file mode 100644 (file)
index 0000000..a8d14ef
--- /dev/null
@@ -0,0 +1,69 @@
+{% comment %}
+Copyright (C) The Arvados Authors. All rights reserved.
+
+SPDX-License-Identifier: CC-BY-SA-3.0
+{% endcomment %}
+
+Ruby 2.3 is recommended; Ruby 2.1 is also known to work.
+
+h4(#rvm). *Option 1: Install with RVM*
+
+<notextile>
+<pre><code><span class="userinput">sudo gpg --recv-keys 409B6B1796C275462A1703113804BB82D39DC0E3 7D2BAF1CF37B13E2069D6956105BD0E739499BDB
+\curl -sSL https://get.rvm.io | sudo bash -s stable --ruby=2.3
+</span></code></pre></notextile>
+
+Either log out and log back in to activate RVM, or explicitly load it in all open shells like this:
+
+<notextile>
+<pre><code><span class="userinput">source /usr/local/rvm/scripts/rvm
+</span></code></pre></notextile>
+
+Once RVM is activated in your shell, install Bundler:
+
+<notextile>
+<pre><code>~$ <span class="userinput">gem install bundler</span>
+</code></pre></notextile>
+
+h4(#fromsource). *Option 2: Install from source*
+
+Install prerequisites for Debian 8:
+
+<notextile>
+<pre><code><span class="userinput">sudo apt-get install \
+    bison build-essential gettext libcurl3 libcurl3-gnutls \
+    libcurl4-openssl-dev libpcre3-dev libreadline-dev \
+    libssl-dev libxslt1.1 zlib1g-dev
+</span></code></pre></notextile>
+
+Install prerequisites for CentOS 7:
+
+<notextile>
+<pre><code><span class="userinput">sudo yum install \
+    libyaml-devel glibc-headers autoconf gcc-c++ glibc-devel \
+    patch readline-devel zlib-devel libffi-devel openssl-devel \
+    make automake libtool bison sqlite-devel tar
+</span></code></pre></notextile>
+
+Install prerequisites for Ubuntu 12.04 or 14.04:
+
+<notextile>
+<pre><code><span class="userinput">sudo apt-get install \
+    gawk g++ gcc make libc6-dev libreadline6-dev zlib1g-dev libssl-dev \
+    libyaml-dev libsqlite3-dev sqlite3 autoconf libgdbm-dev \
+    libncurses5-dev automake libtool bison pkg-config libffi-dev curl
+</span></code></pre></notextile>
+
+Build and install Ruby:
+
+<notextile>
+<pre><code><span class="userinput">mkdir -p ~/src
+cd ~/src
+curl -f http://cache.ruby-lang.org/pub/ruby/2.3/ruby-2.3.3.tar.gz | tar xz
+cd ruby-2.3.3
+./configure --disable-install-rdoc
+make
+sudo make install
+
+sudo -i gem install bundler</span>
+</code></pre></notextile>
index b2a4f671a3fa0e1e2484f2e2625e9631422a4144..fbed12495b6d5f541f2d01286d929ead4ff5113b 100644 (file)
@@ -15,7 +15,7 @@ The Arvados package repository includes an SSO server package that can help auto
 
 h3(#install_ruby_and_bundler). Install Ruby and Bundler
 
-{% include 'install_ruby_and_bundler' %}
+{% include 'install_ruby_and_bundler_sso' %}
 
 h3(#install_web_server). Set up a Web server
 
index 4a6c453cae89e3039a526d268382c89be28cc55e..0bc697077cf892aaad54c78c7c966316aa1c3f24 100644 (file)
@@ -48,7 +48,7 @@ h2. Get input of a CWL workflow
 import arvados
 api = arvados.api()
 container_request_uuid="qr1hi-xvhdp-zzzzzzzzzzzzzzz"
-container_request = arvados.api().container_requests().get(uuid=container_request_uuid).execute()
+container_request = api.container_requests().get(uuid=container_request_uuid).execute()
 print(container_request["mounts"]["/var/lib/cwl/cwl.input.json"])
 {% endcodeblock %}
 
@@ -59,11 +59,40 @@ import arvados
 import arvados.collection
 api = arvados.api()
 container_request_uuid="qr1hi-xvhdp-zzzzzzzzzzzzzzz"
-container_request = arvados.api().container_requests().get(uuid=container_request_uuid).execute()
+container_request = api.container_requests().get(uuid=container_request_uuid).execute()
 collection = arvados.collection.CollectionReader(container_request["output_uuid"])
 print(collection.open("cwl.output.json").read())
 {% endcodeblock %}
 
+h2. Get state of a CWL workflow
+
+{% codeblock as python %}
+import arvados
+def get_cr_state(cr_uuid):
+    api = arvados.api()
+    cr = api.container_requests().get(uuid=cr_uuid).execute()
+    if cr['container_uuid'] is None:
+        return cr['state']
+    c = api.containers().get(uuid=cr['container_uuid']).execute()
+    if cr['state'] == 'Final' and c['state'] != 'Complete':
+        return 'Cancelled'
+    elif c['state'] in ['Locked', 'Queued']:
+        if c['priority'] == 0:
+            return 'On hold'
+        else:
+            return 'Queued'
+    elif c['state'] == 'Complete' and c['exit_code'] != 0
+        return 'Failed'
+    elif c['state'] == 'Running':
+        if c['runtime_status'].get('error', None):
+            return 'Failing'
+        elif c['runtime_status'].get('warning', None):
+            return 'Warning'
+    return c['state']
+container_request_uuid = 'qr1hi-xvhdp-zzzzzzzzzzzzzzz'
+print(get_cr_state(container_request_uuid))
+{% endcodeblock %}
+
 h2. List input of child requests
 
 {% codeblock as python %}
@@ -71,9 +100,9 @@ import arvados
 api = arvados.api()
 parent_request_uuid = "qr1hi-xvhdp-zzzzzzzzzzzzzzz"
 namefilter = "bwa%"  # the "like" filter uses SQL pattern match syntax
-container_request = arvados.api().container_requests().get(uuid=parent_request_uuid).execute()
+container_request = api.container_requests().get(uuid=parent_request_uuid).execute()
 parent_container_uuid = container_request["container_uuid"]
-child_requests = arvados.api().container_requests().list(filters=[
+child_requests = api.container_requests().list(filters=[
     ["requesting_container_uuid", "=", parent_container_uuid],
     ["name", "like", namefilter]]).execute()
 for c in child_requests["items"]:
@@ -90,13 +119,13 @@ import arvados
 api = arvados.api()
 parent_request_uuid = "qr1hi-xvhdp-zzzzzzzzzzzzzzz"
 namefilter = "bwa%"  # the "like" filter uses SQL pattern match syntax
-container_request = arvados.api().container_requests().get(uuid=parent_request_uuid).execute()
+container_request = api.container_requests().get(uuid=parent_request_uuid).execute()
 parent_container_uuid = container_request["container_uuid"]
-child_requests = arvados.api().container_requests().list(filters=[
+child_requests = api.container_requests().list(filters=[
     ["requesting_container_uuid", "=", parent_container_uuid],
     ["name", "like", namefilter]]).execute()
 output_uuids = [c["output_uuid"] for c in child_requests["items"]]
-collections = arvados.api().collections().list(filters=[["uuid", "in", output_uuids]]).execute()
+collections = api.collections().list(filters=[["uuid", "in", output_uuids]]).execute()
 uuid_to_pdh = {c["uuid"]: c["portable_data_hash"] for c in collections["items"]}
 for c in child_requests["items"]:
     print("%s -> %s" % (c["name"], uuid_to_pdh[c["output_uuid"]]))
@@ -108,12 +137,12 @@ h2. List failed child requests
 import arvados
 api = arvados.api()
 parent_request_uuid = "qr1hi-xvhdp-zzzzzzzzzzzzzzz"
-container_request = arvados.api().container_requests().get(uuid=parent_request_uuid).execute()
+container_request = api.container_requests().get(uuid=parent_request_uuid).execute()
 parent_container_uuid = container_request["container_uuid"]
-child_requests = arvados.api().container_requests().list(filters=[
+child_requests = api.container_requests().list(filters=[
     ["requesting_container_uuid", "=", parent_container_uuid]], limit=1000).execute()
 child_containers = {c["container_uuid"]: c for c in child_requests["items"]}
-cancelled_child_containers = arvados.api().containers().list(filters=[
+cancelled_child_containers = api.containers().list(filters=[
     ["exit_code", "!=", "0"],
     ["uuid", "in", child_containers.keys()]], limit=1000).execute()
 for c in cancelled_child_containers["items"]:
@@ -127,7 +156,7 @@ import arvados
 import arvados.collection
 api = arvados.api()
 container_request_uuid = "qr1hi-xvhdp-zzzzzzzzzzzzzzz"
-container_request = arvados.api().container_requests().get(uuid=container_request_uuid).execute()
+container_request = api.container_requests().get(uuid=container_request_uuid).execute()
 collection = arvados.collection.CollectionReader(container_request["log_uuid"])
 for c in collection:
     print(collection.open(c).read())
index 183e21cf95b01c046ea3acbbad2bc94f39fa41f5..912faf0e875b45655de56355374cca025bcc7377 100644 (file)
@@ -174,7 +174,7 @@ def set_secondary(fsaccess, builder, inputschema, secondaryspec, primary, discov
             if pattern is None:
                 continue
             sfpath = substitute(primary["location"], pattern)
-            required = builder.do_eval(sf["required"], context=primary)
+            required = builder.do_eval(sf.get("required"), context=primary)
 
             if fsaccess.exists(sfpath):
                 primary["secondaryFiles"].append({"location": sfpath, "class": "File"})
index b1c7b90b0d9c66464c81f30986c63e86c4c101d7..ceea719efe3f3f8a539dd9c9079d67bb93b588e4 100644 (file)
@@ -33,7 +33,7 @@ setup(name='arvados-cwl-runner',
       # Note that arvados/build/run-build-packages.sh looks at this
       # file to determine what version of cwltool and schema-salad to build.
       install_requires=[
-          'cwltool==1.0.20190603140227',
+          'cwltool==1.0.20190607183319',
           'schema-salad==4.2.20190417121603',
           'typing >= 3.6.4',
           'ruamel.yaml >=0.15.54, <=0.15.77',
index 5539562070ff2c226b8188f66df8a452af7f59a8..da9790e2d0f3913bed05fd87bdc17a79d6ab92e5 100644 (file)
@@ -9,6 +9,8 @@ $namespaces:
   cwltool: "http://commonwl.org/cwltool#"
 requirements:
   SubworkflowFeatureRequirement: {}
+  DockerRequirement:
+    dockerPull: arvados/jobs:1.4.0.20190604172024
 inputs:
   i:
     type: File
index b62229fbcafa517cbfb30ebf6bb71e39988753cb..c22ece38b62259a12d8f05842b2cf2ce0a2b02fb 100644 (file)
@@ -56,6 +56,9 @@ class CollectionMock(object):
 
 class TestContainer(unittest.TestCase):
 
+    def setUp(self):
+        cwltool.process._names = set()
+
     def helper(self, runner, enable_reuse=True):
         document_loader, avsc_names, schema_metadata, metaschema_loader = cwltool.process.get_schema("v1.1")
 
@@ -823,6 +826,9 @@ class TestContainer(unittest.TestCase):
 
 
 class TestWorkflow(unittest.TestCase):
+    def setUp(self):
+        cwltool.process._names = set()
+
     def helper(self, runner, enable_reuse=True):
         document_loader, avsc_names, schema_metadata, metaschema_loader = cwltool.process.get_schema("v1.1")
 
index d6d9b485ed16cb8fed4acf638733ad9f33d462b7..f08e14f7cdff0e056d9962408d7d7f449c13024f 100644 (file)
@@ -35,6 +35,9 @@ if not os.getenv('ARVADOS_DEBUG'):
 
 class TestJob(unittest.TestCase):
 
+    def setUp(self):
+        cwltool.process._names = set()
+
     def helper(self, runner, enable_reuse=True):
         document_loader, avsc_names, schema_metadata, metaschema_loader = cwltool.process.get_schema("v1.1")
 
@@ -345,6 +348,10 @@ class TestJob(unittest.TestCase):
 
 
 class TestWorkflow(unittest.TestCase):
+
+    def setUp(self):
+        cwltool.process._names = set()
+
     def helper(self, runner, enable_reuse=True):
         document_loader, avsc_names, schema_metadata, metaschema_loader = cwltool.process.get_schema("v1.1")
 
index d2c4d9c2eaff4e0255e365ae062c478f5f0fcc3f..1dbd968eaa730f738ab87c3eef8ac66b486b8bfc 100644 (file)
@@ -17,6 +17,7 @@ import logging
 import mock
 import sys
 import unittest
+import cwltool.process
 
 from io import BytesIO
 
@@ -335,6 +336,10 @@ def stubs(func):
 
 
 class TestSubmit(unittest.TestCase):
+
+    def setUp(self):
+        cwltool.process._names = set()
+
     @mock.patch("arvados_cwl.arvdocker.arv_docker_get_image")
     @mock.patch("time.sleep")
     @stubs
index 6bcf69ed7fcd50561377878d7e1da9efca7a6240..f607caa7fa7f214425e5b6559b3488bd6e83bc1a 100644 (file)
@@ -10,10 +10,13 @@ $namespaces:
   arv: "http://arvados.org/cwl#"
 requirements:
   SubworkflowFeatureRequirement: {}
+hints:
+  DockerRequirement:
+    dockerPull: arvados/jobs:1.4.0.20190604172024
 steps:
   step1:
     requirements:
       arv:RunInSingleContainer: {}
     in: []
     out: []
-    run: default-dir6.cwl
\ No newline at end of file
+    run: default-dir6.cwl
index 715f1efcdfb542065faf11bc208463918a9a3c88..c2d4850f3526d1b3f466235f282bd8a9e9fb1e16 100644 (file)
@@ -10,10 +10,13 @@ $namespaces:
   arv: "http://arvados.org/cwl#"
 requirements:
   SubworkflowFeatureRequirement: {}
+hints:
+  DockerRequirement:
+    dockerPull: arvados/jobs:1.4.0.20190604172024
 steps:
   step1:
     requirements:
       arv:RunInSingleContainer: {}
     in: []
     out: []
-    run: default-dir7.cwl
\ No newline at end of file
+    run: default-dir7.cwl
index acaebb5d58b491a390b48e3064701b8a52331c31..9d23c6a87e69d1502d073b59424a92574519cc7a 100644 (file)
@@ -22,6 +22,9 @@ requirements:
   ScatterFeatureRequirement: {}
   InlineJavascriptRequirement: {}
   StepInputExpressionRequirement: {}
+hints:
+  DockerRequirement:
+    dockerPull: arvados/jobs:1.4.0.20190604172024
 steps:
   substep:
     in:
index 5795759d9fcf30a33bc86f6f222d5363d34df71f..2701fd1c85991cc018b69a568fb8161dc520321f 100644 (file)
@@ -22,6 +22,9 @@ requirements:
   ScatterFeatureRequirement: {}
   InlineJavascriptRequirement: {}
   StepInputExpressionRequirement: {}
+hints:
+  DockerRequirement:
+    dockerPull: arvados/jobs:1.4.0.20190604172024
 steps:
   substep:
     in:
index fc06fb30a6157a5a393764029a7d9d9d22c3c8e3..9a26d01132cf07d6ae1cb11aff97a627807ddca5 100644 (file)
@@ -22,6 +22,9 @@ requirements:
   ScatterFeatureRequirement: {}
   InlineJavascriptRequirement: {}
   StepInputExpressionRequirement: {}
+hints:
+  DockerRequirement:
+    dockerPull: arvados/jobs:1.4.0.20190604172024
 steps:
   substep:
     in:
index a192b86488703187ec2ffb8f4e186693b0ec18df..68a26a0d361a36fe609b981af3b180901a632331 100644 (file)
@@ -35,6 +35,8 @@ steps:
     out: [out]
     hints:
       - class: arv:RunInSingleContainer
+      - class: DockerRequirement
+        dockerPull: arvados/jobs:1.4.0.20190604172024
     run:
       class: Workflow
       id: mysub
@@ -60,5 +62,5 @@ steps:
               out:
                 type: string
                 outputBinding:
-                  outputEval: "out"
+                  outputEval: $("out")
             baseCommand: cat
index 713e0c06cf10bbcad0308144f7a5dc4b56f7601e..b81ad5a22e3ab5cb8e2981fce56c4dec21341373 100644 (file)
@@ -27,6 +27,8 @@ requirements:
 hints:
   arv:IntermediateOutput:
     outputTTL: 60
+  DockerRequirement:
+    dockerPull: arvados/jobs:1.4.0.20190604172024
 steps:
   substep:
     in:
@@ -59,5 +61,5 @@ steps:
               out:
                 type: string
                 outputBinding:
-                  outputEval: "out"
+                  outputEval: $("out")
             baseCommand: cat
index 176ad65bb11bb93672e56807be3aa49822cebaf0..8fedbfc3180c4dfddcc89b6a43019a9f60ad1827 100644 (file)
@@ -695,13 +695,13 @@ func (s *StandaloneSuite) TestGetWithLocalServiceHint(c *C) {
                        "zzzzz-bi6l4-yyyyyyyyyyyyyyy": ks0.url,
                        "zzzzz-bi6l4-xxxxxxxxxxxxxxx": ks0.url,
                        "zzzzz-bi6l4-wwwwwwwwwwwwwww": ks0.url,
-                       uuid: ks.url},
+                       uuid:                          ks.url},
                nil,
                map[string]string{
                        "zzzzz-bi6l4-yyyyyyyyyyyyyyy": ks0.url,
                        "zzzzz-bi6l4-xxxxxxxxxxxxxxx": ks0.url,
                        "zzzzz-bi6l4-wwwwwwwwwwwwwww": ks0.url,
-                       uuid: ks.url},
+                       uuid:                          ks.url},
        )
 
        r, n, uri, err := kc.Get(hash + "+K@" + uuid)
index afe75b31056c5c7d872256875a74265012de3b91..5773cb4f98792354c671a3cfb3ecb90f7f92f0f9 100644 (file)
@@ -10,6 +10,7 @@ import argparse
 import arvados
 import arvados.collection
 import base64
+import ciso8601
 import copy
 import datetime
 import errno
@@ -234,6 +235,19 @@ _group.add_argument('--no-cache', action='store_false', dest='use_cache',
 Do not save upload state in a cache file for resuming.
 """)
 
+_group = upload_opts.add_mutually_exclusive_group()
+_group.add_argument('--trash-at', metavar='YYYY-MM-DDTHH:MM', default=None,
+                    help="""
+Set the trash date of the resulting collection to an absolute date in the future.
+The accepted format is defined by the ISO 8601 standard. Examples: 20090103, 2009-01-03, 20090103T181505, 2009-01-03T18:15:05.\n
+Timezone information can be added. If not, the provided date/time is assumed as being in the local system's timezone.
+""")
+_group.add_argument('--trash-after', type=int, metavar='DAYS', default=None,
+                    help="""
+Set the trash date of the resulting collection to an amount of days from the
+date/time that the upload process finishes.
+""")
+
 arg_parser = argparse.ArgumentParser(
     description='Copy data from the local filesystem to Keep.',
     parents=[upload_opts, run_opts, arv_cmd.retry_opt])
@@ -430,7 +444,8 @@ class ArvPutUploadJob(object):
                  put_threads=None, replication_desired=None, filename=None,
                  update_time=60.0, update_collection=None, storage_classes=None,
                  logger=logging.getLogger('arvados.arv_put'), dry_run=False,
-                 follow_links=True, exclude_paths=[], exclude_names=None):
+                 follow_links=True, exclude_paths=[], exclude_names=None,
+                 trash_at=None):
         self.paths = paths
         self.resume = resume
         self.use_cache = use_cache
@@ -470,6 +485,13 @@ class ArvPutUploadJob(object):
         self.follow_links = follow_links
         self.exclude_paths = exclude_paths
         self.exclude_names = exclude_names
+        self._trash_at = trash_at
+
+        if self._trash_at is not None:
+            if type(self._trash_at) not in [datetime.datetime, datetime.timedelta]:
+                raise TypeError('trash_at should be None, timezone-naive datetime or timedelta')
+            if type(self._trash_at) == datetime.datetime and self._trash_at.tzinfo is not None:
+                raise TypeError('provided trash_at datetime should be timezone-naive')
 
         if not self.use_cache and self.resume:
             raise ArvPutArgumentConflict('resume cannot be True when use_cache is False')
@@ -610,6 +632,17 @@ class ArvPutUploadJob(object):
             if self.use_cache:
                 self._cache_file.close()
 
+    def _collection_trash_at(self):
+        """
+        Returns the trash date that the collection should use at save time.
+        Takes into account absolute/relative trash_at values requested
+        by the user.
+        """
+        if type(self._trash_at) == datetime.timedelta:
+            # Get an absolute datetime for trash_at
+            return datetime.datetime.utcnow() + self._trash_at
+        return self._trash_at
+
     def save_collection(self):
         if self.update:
             # Check if files should be updated on the remote collection.
@@ -625,7 +658,8 @@ class ArvPutUploadJob(object):
                     # The file already exist on remote collection, skip it.
                     pass
             self._remote_collection.save(storage_classes=self.storage_classes,
-                                         num_retries=self.num_retries)
+                                         num_retries=self.num_retries,
+                                         trash_at=self._collection_trash_at())
         else:
             if self.storage_classes is None:
                 self.storage_classes = ['default']
@@ -633,7 +667,8 @@ class ArvPutUploadJob(object):
                 name=self.name, owner_uuid=self.owner_uuid,
                 storage_classes=self.storage_classes,
                 ensure_unique_name=self.ensure_unique_name,
-                num_retries=self.num_retries)
+                num_retries=self.num_retries,
+                trash_at=self._collection_trash_at())
 
     def destroy_cache(self):
         if self.use_cache:
@@ -688,6 +723,15 @@ class ArvPutUploadJob(object):
                     self._save_state()
                 except Exception as e:
                     self.logger.error("Unexpected error trying to save cache file: {}".format(e))
+            # Keep remote collection's trash_at attribute synced when using relative expire dates
+            if self._remote_collection is not None and type(self._trash_at) == datetime.timedelta:
+                try:
+                    self._api_client.collections().update(
+                        uuid=self._remote_collection.manifest_locator(),
+                        body={'trash_at': self._collection_trash_at().strftime("%Y-%m-%dT%H:%M:%S.%fZ")}
+                    ).execute(num_retries=self.num_retries)
+                except Exception as e:
+                    self.logger.error("Unexpected error trying to update remote collection's expire date: {}".format(e))
         else:
             self.bytes_written = self.bytes_skipped
         # Call the reporter, if any
@@ -958,6 +1002,9 @@ class ArvPutUploadJob(object):
     def collection_name(self):
         return self._my_collection().api_response()['name'] if self._my_collection().api_response() else None
 
+    def collection_trash_at(self):
+        return self._my_collection().get_trash_at()
+
     def manifest_locator(self):
         return self._my_collection().manifest_locator()
 
@@ -1073,6 +1120,44 @@ def main(arguments=None, stdout=sys.stdout, stderr=sys.stderr,
     if install_sig_handlers:
         arv_cmd.install_signal_handlers()
 
+    # Trash arguments validation
+    trash_at = None
+    if args.trash_at is not None:
+        # ciso8601 considers YYYYMM as invalid but YYYY-MM as valid, so here we
+        # make sure the user provides a complete YYYY-MM-DD date.
+        if not re.match(r'^\d{4}(?P<dash>-?)\d{2}?(?P=dash)\d{2}', args.trash_at):
+            logger.error("--trash-at argument format invalid, use --help to see examples.")
+            sys.exit(1)
+        # Check if no time information was provided. In that case, assume end-of-day.
+        if re.match(r'^\d{4}(?P<dash>-?)\d{2}?(?P=dash)\d{2}$', args.trash_at):
+            args.trash_at += 'T23:59:59'
+        try:
+            trash_at = ciso8601.parse_datetime(args.trash_at)
+        except:
+            logger.error("--trash-at argument format invalid, use --help to see examples.")
+            sys.exit(1)
+        else:
+            if trash_at.tzinfo is not None:
+                # Timezone aware datetime provided.
+                utcoffset = -trash_at.utcoffset()
+            else:
+                # Timezone naive datetime provided. Assume is local.
+                if time.daylight:
+                    utcoffset = datetime.timedelta(seconds=time.altzone)
+                else:
+                    utcoffset = datetime.timedelta(seconds=time.timezone)
+            # Convert to UTC timezone naive datetime.
+            trash_at = trash_at.replace(tzinfo=None) + utcoffset
+
+        if trash_at <= datetime.datetime.utcnow():
+            logger.error("--trash-at argument must be set in the future")
+            sys.exit(1)
+    if args.trash_after is not None:
+        if args.trash_after < 1:
+            logger.error("--trash-after argument must be >= 1")
+            sys.exit(1)
+        trash_at = datetime.timedelta(seconds=(args.trash_after * 24 * 60 * 60))
+
     # Determine the name to use
     if args.name:
         if args.stream or args.raw:
@@ -1178,7 +1263,8 @@ def main(arguments=None, stdout=sys.stdout, stderr=sys.stderr,
                                  dry_run=args.dry_run,
                                  follow_links=args.follow_links,
                                  exclude_paths=exclude_paths,
-                                 exclude_names=exclude_names)
+                                 exclude_names=exclude_names,
+                                 trash_at=trash_at)
     except ResumeCacheConflict:
         logger.error("\n".join([
             "arv-put: Another process is already uploading this data.",
@@ -1192,7 +1278,7 @@ def main(arguments=None, stdout=sys.stdout, stderr=sys.stderr,
             "         --no-resume to start a new resume cache.",
             "         --no-cache to disable resume cache."]))
         sys.exit(1)
-    except CollectionUpdateError as error:
+    except (CollectionUpdateError, PathDoesNotExistError) as error:
         logger.error("\n".join([
             "arv-put: %s" % str(error)]))
         sys.exit(1)
@@ -1202,10 +1288,6 @@ def main(arguments=None, stdout=sys.stdout, stderr=sys.stderr,
     except ArvPutUploadNotPending:
         # No files pending for upload
         sys.exit(0)
-    except PathDoesNotExistError as error:
-        logger.error("\n".join([
-            "arv-put: %s" % str(error)]))
-        sys.exit(1)
 
     if not args.dry_run and not args.update_collection and args.resume and writer.bytes_written > 0:
         logger.warning("\n".join([
@@ -1234,10 +1316,21 @@ def main(arguments=None, stdout=sys.stdout, stderr=sys.stderr,
         output = ','.join(writer.data_locators())
     else:
         try:
+            expiration_notice = ""
+            if writer.collection_trash_at() is not None:
+                # Get the local timezone-naive version, and log it with timezone information.
+                if time.daylight:
+                    local_trash_at = writer.collection_trash_at().replace(tzinfo=None) - datetime.timedelta(seconds=time.altzone)
+                else:
+                    local_trash_at = writer.collection_trash_at().replace(tzinfo=None) - datetime.timedelta(seconds=time.timezone)
+                expiration_notice = ". It will expire on {} {}.".format(
+                    local_trash_at.strftime("%Y-%m-%d %H:%M:%S"), time.strftime("%z"))
             if args.update_collection:
-                logger.info(u"Collection updated: '{}'".format(writer.collection_name()))
+                logger.info(u"Collection updated: '{}'{}".format(
+                    writer.collection_name(), expiration_notice))
             else:
-                logger.info(u"Collection saved as '{}'".format(writer.collection_name()))
+                logger.info(u"Collection saved as '{}'{}".format(
+                    writer.collection_name(), expiration_notice))
             if args.portable_data_hash:
                 output = writer.portable_data_hash()
             else:
index 540e06c6c6a0d571e7a269e5eae7c9e8a1989419..42adf2450d2d5a16cd499f342c3f2f243eefdb75 100644 (file)
@@ -12,6 +12,7 @@ from builtins import str
 from builtins import range
 from functools import partial
 import apiclient
+import ciso8601
 import datetime
 import hashlib
 import json
@@ -212,7 +213,7 @@ class ArvadosPutResumeCacheTest(ArvadosBaseTestCase):
 
     def test_cache_is_locked(self):
         with tempfile.NamedTemporaryFile() as cachefile:
-            cache = arv_put.ResumeCache(cachefile.name)
+            _ = arv_put.ResumeCache(cachefile.name)
             self.assertRaises(arv_put.ResumeCacheConflict,
                               arv_put.ResumeCache, cachefile.name)
 
@@ -311,7 +312,7 @@ class ArvPutUploadJobTest(run_test_server.TestCaseWithServers,
     def test_passing_nonexistant_path_raise_exception(self):
         uuid_str = str(uuid.uuid4())
         with self.assertRaises(arv_put.PathDoesNotExistError):
-            cwriter = arv_put.ArvPutUploadJob(["/this/path/does/not/exist/{}".format(uuid_str)])
+            arv_put.ArvPutUploadJob(["/this/path/does/not/exist/{}".format(uuid_str)])
 
     def test_writer_works_without_cache(self):
         cwriter = arv_put.ArvPutUploadJob(['/dev/null'], resume=False)
@@ -843,7 +844,7 @@ class ArvadosPutTest(run_test_server.TestCaseWithServers,
             fake_httplib2_response(403), b'{}')
         with mock.patch('arvados.collection.Collection.save_new',
                         new=coll_save_mock):
-            with self.assertRaises(SystemExit) as exc_test:
+            with self.assertRaises(SystemExit):
                 self.call_main_with_args(['/dev/null'])
             self.assertRegex(
                 self.main_stderr.getvalue(), matcher)
@@ -918,7 +919,7 @@ class ArvPutIntegrationTest(run_test_server.TestCaseWithServers,
         BAD_UUID = 'zzzzz-tpzed-zzzzzzzzzzzzzzz'
         self.authorize_with('active')
         with self.assertRaises(apiclient.errors.HttpError):
-            result = arv_put.desired_project_uuid(arv_put.api_client, BAD_UUID,
+            arv_put.desired_project_uuid(arv_put.api_client, BAD_UUID,
                                                   0)
 
     def test_short_put_from_stdin(self):
@@ -979,7 +980,7 @@ class ArvPutIntegrationTest(run_test_server.TestCaseWithServers,
         # we're about to create is not present in our test fixture.
         manifest_uuid = "00b4e9f40ac4dd432ef89749f1c01e74+47"
         with self.assertRaises(apiclient.errors.HttpError):
-            notfound = arv_put.api_client.collections().get(
+            arv_put.api_client.collections().get(
                 uuid=manifest_uuid).execute()
 
         datadir = self.make_tmpdir()
@@ -990,7 +991,7 @@ class ArvPutIntegrationTest(run_test_server.TestCaseWithServers,
                              stdout=subprocess.PIPE,
                              stderr=subprocess.PIPE,
                              env=self.ENVIRON)
-        (out, err) = p.communicate()
+        (_, err) = p.communicate()
         self.assertRegex(err.decode(), r'INFO: Collection saved as ')
         self.assertEqual(p.returncode, 0)
 
@@ -1030,7 +1031,7 @@ class ArvPutIntegrationTest(run_test_server.TestCaseWithServers,
                              stdout=subprocess.PIPE,
                              stderr=subprocess.PIPE,
                              env=self.ENVIRON)
-        (out, err) = p.communicate()
+        (_, err) = p.communicate()
         self.assertRegex(err.decode(), r'INFO: Creating new cache file at ')
         self.assertEqual(p.returncode, 0)
         cache_filepath = re.search(r'INFO: Creating new cache file at (.*)',
@@ -1053,7 +1054,7 @@ class ArvPutIntegrationTest(run_test_server.TestCaseWithServers,
                              stdout=subprocess.PIPE,
                              stderr=subprocess.PIPE,
                              env=self.ENVIRON)
-        (out, err) = p.communicate()
+        (_, err) = p.communicate()
         self.assertRegex(
             err.decode(),
             r'INFO: Cache expired, starting from scratch.*')
@@ -1069,7 +1070,7 @@ class ArvPutIntegrationTest(run_test_server.TestCaseWithServers,
                              stdout=subprocess.PIPE,
                              stderr=subprocess.PIPE,
                              env=self.ENVIRON)
-        (out, err) = p.communicate()
+        (_, err) = p.communicate()
         self.assertRegex(err.decode(), r'INFO: Creating new cache file at ')
         self.assertEqual(p.returncode, 0)
         cache_filepath = re.search(r'INFO: Creating new cache file at (.*)',
@@ -1091,7 +1092,7 @@ class ArvPutIntegrationTest(run_test_server.TestCaseWithServers,
                              stdout=subprocess.PIPE,
                              stderr=subprocess.PIPE,
                              env=self.ENVIRON)
-        (out, err) = p.communicate()
+        (_, err) = p.communicate()
         self.assertRegex(
             err.decode(),
             r'ERROR: arv-put: Resume cache contains invalid signature.*')
@@ -1111,7 +1112,7 @@ class ArvPutIntegrationTest(run_test_server.TestCaseWithServers,
                              stdout=subprocess.PIPE,
                              stderr=subprocess.PIPE,
                              env=self.ENVIRON)
-        (out, err) = p.communicate()
+        (_, err) = p.communicate()
         self.assertRegex(err.decode(), r'INFO: Creating new cache file at ')
         self.assertEqual(p.returncode, 0)
         cache_filepath = re.search(r'INFO: Creating new cache file at (.*)',
@@ -1135,7 +1136,7 @@ class ArvPutIntegrationTest(run_test_server.TestCaseWithServers,
                              stdout=subprocess.PIPE,
                              stderr=subprocess.PIPE,
                              env=self.ENVIRON)
-        (out, err) = p.communicate()
+        (_, err) = p.communicate()
         self.assertRegex(
             err.decode(),
             r'WARNING: Uploaded file \'.*barfile.txt\' access token expired, will re-upload it from scratch')
@@ -1160,6 +1161,107 @@ class ArvPutIntegrationTest(run_test_server.TestCaseWithServers,
         c = arv_put.api_client.collections().get(uuid=updated_col['uuid']).execute()
         self.assertRegex(c['manifest_text'], r'^\..* .*:44:file2\n')
 
+    def test_put_collection_with_utc_expiring_datetime(self):
+        tmpdir = self.make_tmpdir()
+        trash_at = (datetime.datetime.utcnow() + datetime.timedelta(days=90)).strftime('%Y%m%dT%H%MZ')
+        with open(os.path.join(tmpdir, 'file1'), 'w') as f:
+            f.write('Relaxing in basins at the end of inlets terminates the endless tests from the box')
+        col = self.run_and_find_collection(
+            "",
+            ['--no-progress', '--trash-at', trash_at, tmpdir])
+        self.assertNotEqual(None, col['uuid'])
+        c = arv_put.api_client.collections().get(uuid=col['uuid']).execute()
+        self.assertEqual(ciso8601.parse_datetime(trash_at),
+            ciso8601.parse_datetime(c['trash_at']))
+
+    def test_put_collection_with_timezone_aware_expiring_datetime(self):
+        tmpdir = self.make_tmpdir()
+        trash_at = (datetime.datetime.utcnow() + datetime.timedelta(days=90)).strftime('%Y%m%dT%H%M-0300')
+        with open(os.path.join(tmpdir, 'file1'), 'w') as f:
+            f.write('Relaxing in basins at the end of inlets terminates the endless tests from the box')
+        col = self.run_and_find_collection(
+            "",
+            ['--no-progress', '--trash-at', trash_at, tmpdir])
+        self.assertNotEqual(None, col['uuid'])
+        c = arv_put.api_client.collections().get(uuid=col['uuid']).execute()
+        self.assertEqual(
+            ciso8601.parse_datetime(trash_at).replace(tzinfo=None) + datetime.timedelta(hours=3),
+            ciso8601.parse_datetime(c['trash_at']).replace(tzinfo=None))
+
+    def test_put_collection_with_timezone_naive_expiring_datetime(self):
+        tmpdir = self.make_tmpdir()
+        trash_at = (datetime.datetime.utcnow() + datetime.timedelta(days=90)).strftime('%Y%m%dT%H%M')
+        with open(os.path.join(tmpdir, 'file1'), 'w') as f:
+            f.write('Relaxing in basins at the end of inlets terminates the endless tests from the box')
+        col = self.run_and_find_collection(
+            "",
+            ['--no-progress', '--trash-at', trash_at, tmpdir])
+        self.assertNotEqual(None, col['uuid'])
+        c = arv_put.api_client.collections().get(uuid=col['uuid']).execute()
+        if time.daylight:
+            offset = datetime.timedelta(seconds=time.altzone)
+        else:
+            offset = datetime.timedelta(seconds=time.timezone)
+        self.assertEqual(
+            ciso8601.parse_datetime(trash_at) + offset,
+            ciso8601.parse_datetime(c['trash_at']).replace(tzinfo=None))
+
+    def test_put_collection_with_expiring_date_only(self):
+        tmpdir = self.make_tmpdir()
+        trash_at = '2140-01-01'
+        end_of_day = datetime.timedelta(hours=23, minutes=59, seconds=59)
+        with open(os.path.join(tmpdir, 'file1'), 'w') as f:
+            f.write('Relaxing in basins at the end of inlets terminates the endless tests from the box')
+        col = self.run_and_find_collection(
+            "",
+            ['--no-progress', '--trash-at', trash_at, tmpdir])
+        self.assertNotEqual(None, col['uuid'])
+        c = arv_put.api_client.collections().get(uuid=col['uuid']).execute()
+        if time.daylight:
+            offset = datetime.timedelta(seconds=time.altzone)
+        else:
+            offset = datetime.timedelta(seconds=time.timezone)
+        self.assertEqual(
+            ciso8601.parse_datetime(trash_at) + end_of_day + offset,
+            ciso8601.parse_datetime(c['trash_at']).replace(tzinfo=None))
+
+    def test_put_collection_with_invalid_absolute_expiring_datetimes(self):
+        cases = ['2100', '210010','2100-10', '2100-Oct']
+        tmpdir = self.make_tmpdir()
+        with open(os.path.join(tmpdir, 'file1'), 'w') as f:
+            f.write('Relaxing in basins at the end of inlets terminates the endless tests from the box')
+        for test_datetime in cases:
+            with self.assertRaises(AssertionError):
+                self.run_and_find_collection(
+                    "",
+                    ['--no-progress', '--trash-at', test_datetime, tmpdir])
+
+    def test_put_collection_with_relative_expiring_datetime(self):
+        expire_after = 7
+        dt_before = datetime.datetime.utcnow() + datetime.timedelta(days=expire_after)
+        tmpdir = self.make_tmpdir()
+        with open(os.path.join(tmpdir, 'file1'), 'w') as f:
+            f.write('Relaxing in basins at the end of inlets terminates the endless tests from the box')
+        col = self.run_and_find_collection(
+            "",
+            ['--no-progress', '--trash-after', str(expire_after), tmpdir])
+        self.assertNotEqual(None, col['uuid'])
+        dt_after = datetime.datetime.utcnow() + datetime.timedelta(days=expire_after)
+        c = arv_put.api_client.collections().get(uuid=col['uuid']).execute()
+        trash_at = ciso8601.parse_datetime(c['trash_at']).replace(tzinfo=None)
+        self.assertTrue(dt_before < trash_at)
+        self.assertTrue(dt_after > trash_at)
+
+    def test_put_collection_with_invalid_relative_expiring_datetime(self):
+        expire_after = 0 # Must be >= 1
+        tmpdir = self.make_tmpdir()
+        with open(os.path.join(tmpdir, 'file1'), 'w') as f:
+            f.write('Relaxing in basins at the end of inlets terminates the endless tests from the box')
+        with self.assertRaises(AssertionError):
+            self.run_and_find_collection(
+                "",
+                ['--no-progress', '--trash-after', str(expire_after), tmpdir])
+
     def test_upload_directory_reference_without_trailing_slash(self):
         tmpdir1 = self.make_tmpdir()
         tmpdir2 = self.make_tmpdir()
index 078b2b7f418d1e94ca2b4ab424be702cdad1197b..b5ac62c9fb397dfe2d429ab365bce8ea7b5c1aba 100644 (file)
@@ -325,4 +325,4 @@ DEPENDENCIES
   uglifier (~> 2.0)
 
 BUNDLED WITH
-   1.16.6
+   1.17.3
index 30ee0787d060689ee91cb2ce136a116e02dbe18c..e07a5aca79b5a310d320c6901df9c5a82326465a 100644 (file)
@@ -248,7 +248,7 @@ class ApplicationController < ActionController::Base
               conditions[0] << " and #{ar_table_name}.#{attr} in (?)"
               conditions << value
             end
-          elsif value.is_a? String or value.is_a? Fixnum or value == true or value == false
+          elsif value.is_a? String or value.is_a? Integer or value == true or value == false
             conditions[0] << " and #{ar_table_name}.#{attr}=?"
             conditions << value
           elsif value.is_a? Hash
index a3cef64212ba04122b28148482a03c3b431470cb..2f7e9cd9345ff347c9f5bee05abe58a113dd267b 100644 (file)
@@ -235,7 +235,7 @@ class Commit < ActiveRecord::Base
     begin
       must_git gitdir, "branch"
     rescue GitError => e
-      raise unless /Not a git repository/ =~ e.to_s
+      raise unless /Not a git repository/i =~ e.to_s
       # OK, this just means we need to create a blank cache repository
       # before fetching.
       FileUtils.mkdir_p gitdir
index cec3cb55a9e83385313b49333523f08532e6e6d2..c111b331371ae211d3bc2e3a9e34ad2a7d6b3982 100644 (file)
@@ -1,2 +1 @@
 *.gem
-Gemfile.lock
diff --git a/services/login-sync/Gemfile.lock b/services/login-sync/Gemfile.lock
new file mode 100644 (file)
index 0000000..d03512d
--- /dev/null
@@ -0,0 +1,87 @@
+PATH
+  remote: .
+  specs:
+    arvados-login-sync (1.3.3.20190528194843)
+      arvados (~> 1.3.0, >= 1.3.0)
+
+GEM
+  remote: https://rubygems.org/
+  specs:
+    activesupport (5.0.7.2)
+      concurrent-ruby (~> 1.0, >= 1.0.2)
+      i18n (>= 0.7, < 2)
+      minitest (~> 5.1)
+      tzinfo (~> 1.1)
+    addressable (2.6.0)
+      public_suffix (>= 2.0.2, < 4.0)
+    andand (1.3.3)
+    arvados (1.3.3.20190320201707)
+      activesupport (>= 3)
+      andand (~> 1.3, >= 1.3.3)
+      arvados-google-api-client (>= 0.7, < 0.8.9)
+      i18n (~> 0)
+      json (>= 1.7.7, < 3)
+      jwt (>= 0.1.5, < 2)
+    arvados-google-api-client (0.8.7.2)
+      activesupport (>= 3.2, < 5.1)
+      addressable (~> 2.3)
+      autoparse (~> 0.3)
+      extlib (~> 0.9)
+      faraday (~> 0.9)
+      googleauth (~> 0.3)
+      launchy (~> 2.4)
+      multi_json (~> 1.10)
+      retriable (~> 1.4)
+      signet (~> 0.6)
+    autoparse (0.3.3)
+      addressable (>= 2.3.1)
+      extlib (>= 0.9.15)
+      multi_json (>= 1.0.0)
+    concurrent-ruby (1.1.5)
+    extlib (0.9.16)
+    faraday (0.15.4)
+      multipart-post (>= 1.2, < 3)
+    googleauth (0.8.1)
+      faraday (~> 0.12)
+      jwt (>= 1.4, < 3.0)
+      memoist (~> 0.16)
+      multi_json (~> 1.11)
+      os (>= 0.9, < 2.0)
+      signet (~> 0.7)
+    i18n (0.9.5)
+      concurrent-ruby (~> 1.0)
+    json (2.2.0)
+    jwt (1.5.6)
+    launchy (2.4.3)
+      addressable (~> 2.3)
+    memoist (0.16.0)
+    metaclass (0.0.4)
+    minitest (5.11.3)
+    mocha (1.8.0)
+      metaclass (~> 0.0.1)
+    multi_json (1.13.1)
+    multipart-post (2.1.1)
+    os (1.0.1)
+    public_suffix (3.0.3)
+    rake (12.3.2)
+    retriable (1.4.1)
+    signet (0.11.0)
+      addressable (~> 2.3)
+      faraday (~> 0.9)
+      jwt (>= 1.5, < 3.0)
+      multi_json (~> 1.10)
+    thread_safe (0.3.6)
+    tzinfo (1.2.5)
+      thread_safe (~> 0.1)
+
+PLATFORMS
+  ruby
+
+DEPENDENCIES
+  arvados-login-sync!
+  minitest (>= 5.0.0)
+  mocha (>= 1.5.0)
+  rake
+
+BUNDLED WITH
+   1.17.3