Merge branch 'master' of git.curoverse.com:arvados into 11876-r-sdk
authorFuad Muhic <fmuhic@capeannenterprises.com>
Wed, 17 Jan 2018 16:22:31 +0000 (17:22 +0100)
committerFuad Muhic <fmuhic@capeannenterprises.com>
Wed, 17 Jan 2018 16:22:31 +0000 (17:22 +0100)
Arvados-DCO-1.1-Signed-off-by: Fuad Muhic <fmuhic@capeannenterprises.com>

37 files changed:
apps/workbench/app/assets/javascripts/application.js
apps/workbench/app/assets/javascripts/components/edit_tags.js [new file with mode: 0644]
apps/workbench/app/assets/javascripts/edit_collection_tags.js [deleted file]
apps/workbench/app/assets/javascripts/mithril_mount.js
apps/workbench/app/assets/javascripts/models/session_db.js
apps/workbench/app/assets/stylesheets/application.css.scss
apps/workbench/app/controllers/collections_controller.rb
apps/workbench/app/views/collections/_show_tag_rows.html.erb [deleted file]
apps/workbench/app/views/collections/_show_tags.html.erb
apps/workbench/app/views/collections/save_tags.js.erb [deleted file]
apps/workbench/app/views/collections/tags.js.erb [deleted file]
apps/workbench/npm_packages
apps/workbench/public/vocabulary-example.json [new file with mode: 0644]
apps/workbench/test/controllers/collections_controller_test.rb
apps/workbench/test/integration/collections_test.rb
build/build-dev-docker-jobs-image.sh
doc/_config.yml
doc/admin/change-account-owner.html.textile.liquid [new file with mode: 0644]
doc/admin/merge-remote-account.html.textile.liquid [new file with mode: 0644]
doc/api/methods/users.html.textile.liquid
sdk/cwl/arvados_cwl/__init__.py
sdk/cwl/arvados_cwl/arvcontainer.py
sdk/cwl/arvados_cwl/arvjob.py
sdk/cwl/arvados_cwl/arvworkflow.py
sdk/cwl/arvados_cwl/fsaccess.py
sdk/cwl/arvados_cwl/runner.py
sdk/cwl/setup.py
sdk/cwl/tests/test_container.py
sdk/cwl/tests/test_job.py
sdk/cwl/tests/test_submit.py
sdk/go/arvados/node.go [new file with mode: 0644]
services/api/app/controllers/arvados/v1/schema_controller.rb
services/api/app/models/api_client_authorization.rb
services/api/test/functional/arvados/v1/schema_controller_test.rb
services/api/test/integration/remote_user_test.rb
services/crunch-run/crunchrun.go
services/crunch-run/crunchrun_test.go

index bba2f9dcc044b8b20b42e12cac8c62af5c4846f4..b90081f46fe9d5ccdec360165e6bc2528817d7b2 100644 (file)
@@ -33,6 +33,7 @@
 //= require jquery.number.min
 //= require npm-dependencies
 //= require mithril/stream/stream
+//= require awesomplete
 //= require_tree .
 
 Es6ObjectAssign.polyfill()
diff --git a/apps/workbench/app/assets/javascripts/components/edit_tags.js b/apps/workbench/app/assets/javascripts/components/edit_tags.js
new file mode 100644 (file)
index 0000000..ac4d2df
--- /dev/null
@@ -0,0 +1,265 @@
+// Copyright (C) The Arvados Authors. All rights reserved.
+//
+// SPDX-License-Identifier: AGPL-3.0
+
+window.SimpleInput = {
+    view: function(vnode) {
+        return m("input.form-control", {
+            style: {
+                width: '100%',
+            },
+            type: 'text',
+            placeholder: 'Add ' + vnode.attrs.placeholder,
+            value: vnode.attrs.value,
+            onchange: function() {
+                if (this.value != '') {
+                    vnode.attrs.value(this.value)
+                }
+            },
+        }, vnode.attrs.value)
+    },
+}
+
+window.SelectOrAutocomplete = {
+    view: function(vnode) {
+        return m("input.form-control", {
+            style: {
+                width: '100%'
+            },
+            type: 'text',
+            value: vnode.attrs.value,
+            placeholder: (vnode.attrs.create ? 'Add or select ': 'Select ') + vnode.attrs.placeholder,
+        }, vnode.attrs.value)
+    },
+    oncreate: function(vnode) {
+        vnode.state.awesomplete = new Awesomplete(vnode.dom, {
+            list: vnode.attrs.options,
+            minChars: 0,
+            maxItems: 1000000,
+            autoFirst: true,
+            sort: false,
+        })
+        vnode.state.create = vnode.attrs.create
+        vnode.state.options = vnode.attrs.options
+        // Option is selected from the list.
+        $(vnode.dom).on('awesomplete-selectcomplete', function(event) {
+            vnode.attrs.value(this.value)
+        })
+        $(vnode.dom).on('change', function(event) {
+            if (!vnode.state.create && !(this.value in vnode.state.options)) {
+                this.value = vnode.attrs.value()
+            } else {
+                if (vnode.attrs.value() !== this.value) {
+                    vnode.attrs.value(this.value)
+                }
+            }
+        })
+        $(vnode.dom).on('focusin', function(event) {
+            if (this.value === '') {
+                vnode.state.awesomplete.evaluate()
+                vnode.state.awesomplete.open()
+            }
+        })
+    },
+    onupdate: function(vnode) {
+        vnode.state.awesomplete.list = vnode.attrs.options
+        vnode.state.create = vnode.attrs.create
+        vnode.state.options = vnode.attrs.options
+    },
+}
+
+window.TagEditorRow = {
+    view: function(vnode) {
+        var nameOpts = Object.keys(vnode.attrs.vocabulary().tags)
+        var valueOpts = []
+        var inputComponent = SelectOrAutocomplete
+        if (nameOpts.length === 0) {
+            // If there's not vocabulary defined, switch to a simple input field
+            inputComponent = SimpleInput
+        } else {
+            // Name options list
+            if (vnode.attrs.name() != '' && !(vnode.attrs.name() in vnode.attrs.vocabulary().tags)) {
+                nameOpts.push(vnode.attrs.name())
+            }
+            // Value options list
+            if (vnode.attrs.name() in vnode.attrs.vocabulary().tags &&
+                'values' in vnode.attrs.vocabulary().tags[vnode.attrs.name()]) {
+                    valueOpts = vnode.attrs.vocabulary().tags[vnode.attrs.name()].values
+            }
+        }
+        return m("tr", [
+            // Erase tag
+            m("td", [
+                vnode.attrs.editMode &&
+                m('div.text-center', m('a.btn.btn-default.btn-sm', {
+                    style: {
+                        align: 'center'
+                    },
+                    onclick: function(e) { vnode.attrs.removeTag() }
+                }, m('i.fa.fa-fw.fa-trash-o')))
+            ]),
+            // Tag key
+            m("td", [
+                vnode.attrs.editMode ?
+                m("div", {key: 'key'}, [
+                    m(inputComponent, {
+                        options: nameOpts,
+                        value: vnode.attrs.name,
+                        // Allow any tag name unless "strict" is set to true.
+                        create: !vnode.attrs.vocabulary().strict,
+                        placeholder: 'key',
+                    })
+                ])
+                : vnode.attrs.name
+            ]),
+            // Tag value
+            m("td", [
+                vnode.attrs.editMode ?
+                m("div", {key: 'value'}, [
+                    m(inputComponent, {
+                        options: valueOpts,
+                        value: vnode.attrs.value,
+                        placeholder: 'value',
+                        // Allow any value on tags not listed on the vocabulary.
+                        // Allow any value on tags without values, or the ones
+                        // that aren't explicitly declared to be strict.
+                        create: !(vnode.attrs.name() in vnode.attrs.vocabulary().tags)
+                            || !vnode.attrs.vocabulary().tags[vnode.attrs.name()].values
+                            || vnode.attrs.vocabulary().tags[vnode.attrs.name()].values.length === 0
+                            || !vnode.attrs.vocabulary().tags[vnode.attrs.name()].strict,
+                    })
+                ])
+                : vnode.attrs.value
+            ])
+        ])
+    }
+}
+
+window.TagEditorTable = {
+    view: function(vnode) {
+        return m("table.table.table-condensed.table-justforlayout", [
+            m("colgroup", [
+                m("col", {width:"5%"}),
+                m("col", {width:"25%"}),
+                m("col", {width:"70%"}),
+            ]),
+            m("thead", [
+                m("tr", [
+                    m("th"),
+                    m("th", "Key"),
+                    m("th", "Value"),
+                ])
+            ]),
+            m("tbody", [
+                vnode.attrs.tags.length > 0
+                ? vnode.attrs.tags.map(function(tag, idx) {
+                    return m(TagEditorRow, {
+                        key: tag.rowKey,
+                        removeTag: function() {
+                            vnode.attrs.tags.splice(idx, 1)
+                            vnode.attrs.dirty(true)
+                        },
+                        editMode: vnode.attrs.editMode,
+                        name: tag.name,
+                        value: tag.value,
+                        vocabulary: vnode.attrs.vocabulary
+                    })
+                })
+                : m("tr", m("td[colspan=3]", m("center", "Loading tags...")))
+            ]),
+        ])
+    }
+}
+
+var uniqueID = 1
+
+window.TagEditorApp = {
+    appendTag: function(vnode, name, value) {
+        var tag = {name: m.stream(name), value: m.stream(value), rowKey: uniqueID++}
+        vnode.state.tags.push(tag)
+        // Set dirty flag when any of name/value changes to non empty string
+        tag.name.map(function() { vnode.state.dirty(true) })
+        tag.value.map(function() { vnode.state.dirty(true) })
+        tag.name.map(m.redraw)
+    },
+    oninit: function(vnode) {
+        vnode.state.sessionDB = new SessionDB()
+        // Get vocabulary
+        vnode.state.vocabulary = m.stream({"strict":false, "tags":{}})
+        var vocabularyTimestamp = parseInt(Date.now() / 300000) // Bust cache every 5 minutes
+        m.request('/vocabulary.json?v=' + vocabularyTimestamp).then(vnode.state.vocabulary)
+        vnode.state.editMode = vnode.attrs.targetEditable
+        vnode.state.tags = []
+        vnode.state.dirty = m.stream(false)
+        vnode.state.dirty.map(m.redraw)
+        vnode.state.objPath = '/arvados/v1/'+vnode.attrs.targetController+'/'+vnode.attrs.targetUuid
+        // Get tags
+        vnode.state.sessionDB.request(
+            vnode.state.sessionDB.loadLocal(),
+            '/arvados/v1/'+vnode.attrs.targetController,
+            {
+                data: {
+                    filters: JSON.stringify([['uuid', '=', vnode.attrs.targetUuid]]),
+                    select: JSON.stringify(['properties'])
+                },
+            }).then(function(obj) {
+                if (obj.items.length == 1) {
+                    o = obj.items[0]
+                    Object.keys(o.properties).forEach(function(k) {
+                        vnode.state.appendTag(vnode, k, o.properties[k])
+                    })
+                    if (vnode.state.editMode) {
+                        vnode.state.appendTag(vnode, '', '')
+                    }
+                    // Data synced with server, so dirty state should be false
+                    vnode.state.dirty(false)
+                    // Add new tag row when the last one is completed
+                    vnode.state.dirty.map(function() {
+                        if (!vnode.state.editMode) { return }
+                        lastTag = vnode.state.tags.slice(-1).pop()
+                        if (lastTag === undefined || (lastTag.name() !== '' || lastTag.value() !== '')) {
+                            vnode.state.appendTag(vnode, '', '')
+                        }
+                    })
+                }
+            }
+        )
+    },
+    view: function(vnode) {
+        return [
+            vnode.state.editMode &&
+            m("div.pull-left", [
+                m("a.btn.btn-primary.btn-sm"+(vnode.state.dirty() ? '' : '.disabled'), {
+                    style: {
+                        margin: '10px 0px'
+                    },
+                    onclick: function(e) {
+                        var tags = {}
+                        vnode.state.tags.forEach(function(t) {
+                            // Only ignore tags with empty key
+                            if (t.name() != '') {
+                                tags[t.name()] = t.value()
+                            }
+                        })
+                        vnode.state.sessionDB.request(
+                            vnode.state.sessionDB.loadLocal(),
+                            vnode.state.objPath, {
+                                method: "PUT",
+                                data: {properties: JSON.stringify(tags)}
+                            }
+                        ).then(function(v) {
+                            vnode.state.dirty(false)
+                        })
+                    }
+                }, vnode.state.dirty() ? ' Save changes ' : ' Saved ')
+            ]),
+            // Tags table
+            m(TagEditorTable, {
+                editMode: vnode.state.editMode,
+                tags: vnode.state.tags,
+                vocabulary: vnode.state.vocabulary,
+                dirty: vnode.state.dirty
+            })
+        ]
+    },
+}
diff --git a/apps/workbench/app/assets/javascripts/edit_collection_tags.js b/apps/workbench/app/assets/javascripts/edit_collection_tags.js
deleted file mode 100644 (file)
index e1c1515..0000000
+++ /dev/null
@@ -1,72 +0,0 @@
-// Copyright (C) The Arvados Authors. All rights reserved.
-//
-// SPDX-License-Identifier: AGPL-3.0
-
-jQuery(function($){
-  $(document).
-    on('click', '.collection-tag-save, .collection-tag-cancel', function(event) {
-        $('.edit-collection-tags').removeClass('disabled');
-        $('#edit-collection-tags').attr("title", "Edit tags");
-        $('.collection-tag-add').addClass('hide');
-        $('.collection-tag-remove').addClass('hide');
-        $('.collection-tag-save').addClass('hide');
-        $('.collection-tag-cancel').addClass('hide');
-        $('.collection-tag-field').prop("contenteditable", false);
-    }).
-    on('click', '.edit-collection-tags', function(event) {
-        $('.edit-collection-tags').addClass('disabled');
-        $('#edit-collection-tags').attr("title", "");
-        $('.collection-tag-add').removeClass('hide');
-        $('.collection-tag-remove').removeClass('hide');
-        $('.collection-tag-save').removeClass('hide');
-        $('.collection-tag-cancel').removeClass('hide');
-        $('.collection-tag-field').prop("contenteditable", true);
-        $('div').remove('.collection-tags-status-label');
-    }).
-    on('click', '.collection-tag-save', function(event) {
-      var tag_data = {};
-      var has_tags = false;
-
-      var $tags = $(".collection-tags-table");
-      $tags.find('tr').each(function (i, el) {
-        var $tds = $(this).find('td');
-        var $key = $tds.eq(1).text();
-        if ($key && $key.trim().length > 0) {
-          has_tags = true;
-          tag_data[$key.trim()] = $tds.eq(2).text().trim();
-        }
-      });
-
-      var to_send;
-      if (has_tags == false) {
-        to_send = {tag_data: "empty"}
-      } else {
-        to_send = {tag_data: tag_data}
-      }
-
-      $.ajax($(location).attr('pathname')+'/save_tags', {
-          type: 'POST',
-          data: to_send
-      }).success(function(data, status, jqxhr) {
-        $('.collection-tags-status').append('<div class="collection-tags-status-label alert alert-success"><p class="contain-align-left">Saved successfully.</p></div>');
-      }).fail(function(jqxhr, status, error) {
-        $('.collection-tags-status').append('<div class="collection-tags-status-label alert alert-danger"><p class="contain-align-left">We are sorry. There was an error saving tags. Please try again.</p></div>');
-      });
-    }).
-    on('click', '.collection-tag-cancel', function(event) {
-      $.ajax($(location).attr('pathname')+'/tags', {
-          type: 'GET'
-      });
-    }).
-    on('click', '.collection-tag-remove', function(event) {
-      $(this).parents('tr').detach();
-    }).
-    on('click', '.collection-tag-add', function(event) {
-      var $collection_tags = $(this).closest('.collection-tags-container');
-      var $clone = $collection_tags.find('tr.hide').clone(true).removeClass('hide');
-      $collection_tags.find('table').append($clone);
-    }).
-    on('keypress', '.collection-tag-field', function(event){
-      return event.which != 13;
-    });
-});
index f4689b51d7ebfc265476dfa3e0b748ac33b62b94..7995ffea6ab8a69dd97030bef774de3599dfe5e6 100644 (file)
@@ -4,6 +4,7 @@
 
 $(document).on('ready arv:pane:loaded', function() {
     $('[data-mount-mithril]').each(function() {
-        m.mount(this, window[$(this).data('mount-mithril')])
+        var data = $(this).data()
+        m.mount(this, {view: function () {return m(window[data.mountMithril], data)}})
     })
 })
index ad9ad1878417370dfd75294e9bd9cecbe25880d1..d5cd98d386499adf68f049980c036fbd1c4155e7 100644 (file)
@@ -28,6 +28,17 @@ window.SessionDB = function() {
             })
             return sessions
         },
+        loadLocal: function() {
+            var sessions = db.loadActive()
+            var s = false
+            Object.values(sessions).forEach(function(session) {
+                if (session.isFromRails) {
+                    s = session
+                    return
+                }
+            })
+            return s
+        },
         save: function(k, v) {
             var sessions = db.loadAll()
             sessions[k] = v
@@ -116,7 +127,7 @@ window.SessionDB = function() {
                     },
                 }).then(function(user) {
                     session.user = user
-                    db.save(user.uuid.slice(0, 5), session)
+                    db.save(user.owner_uuid.slice(0, 5), session)
                     db.trash(key)
                 })
             })
@@ -134,10 +145,11 @@ window.SessionDB = function() {
             // Guess workbench.{apihostport} is a Workbench... unless
             // the host part of apihostport is an IPv4 or [IPv6]
             // address.
-            if (!session.baseURL.match('://(\\[|\\d+\\.\\d+\\.\\d+\\.\\d+[:/])'))
+            if (!session.baseURL.match('://(\\[|\\d+\\.\\d+\\.\\d+\\.\\d+[:/])')) {
                 var wbUrl = session.baseURL.replace('://', '://workbench.')
                 // Remove the trailing slash, if it's there.
                 return wbUrl.slice(-1) == '/' ? wbUrl.slice(0, -1) : wbUrl
+            }
             return null
         },
         // Return a m.stream that will get fulfilled with the
index 32d80255d9fd831d2ea7f3e55ec163e53f154f06..4112b22b8ce60b0c6beb2171c7f9d834ba09bbc9 100644 (file)
@@ -16,6 +16,7 @@
  *= require bootstrap
  *= require bootstrap3-editable/bootstrap-editable
  *= require morris
+ *= require awesomplete
  *= require_tree .
  */
 
@@ -320,3 +321,13 @@ ul.nav.nav-tabs {
 {
     width: 98%!important;
 }
+
+/* Needed for awesomplete to play nice with bootstrap */
+div.awesomplete {
+    display: block;
+}
+/* Makes awesomplete listings to be scrollable */
+.awesomplete > ul {
+    max-height: 410px;
+    overflow-y: auto;
+}
\ No newline at end of file
index 5fcb2dc569ff6b2446c602dc26de61a069155ba2..0a7f22b95789edc163198fbf32ab55045317f298 100644 (file)
@@ -300,30 +300,6 @@ class CollectionsController < ApplicationController
     end
   end
 
-  def tags
-    render
-  end
-
-  def save_tags
-    tags_param = params['tag_data']
-    if tags_param
-      if tags_param.is_a?(String) && tags_param == "empty"
-        tags = {}
-      else
-        tags = tags_param
-      end
-    end
-
-    if tags
-      if @object.update_attributes properties: tags
-        @saved_tags = true
-        render
-      else
-        self.render_error status: 422
-      end
-    end
-  end
-
   protected
 
   def find_usable_token(token_list)
diff --git a/apps/workbench/app/views/collections/_show_tag_rows.html.erb b/apps/workbench/app/views/collections/_show_tag_rows.html.erb
deleted file mode 100644 (file)
index eb57913..0000000
+++ /dev/null
@@ -1,35 +0,0 @@
-<%# Copyright (C) The Arvados Authors. All rights reserved.
-
-SPDX-License-Identifier: AGPL-3.0 %>
-
-<%
-  tags = object.properties
-%>
-      <% if tags.andand.is_a?(Hash) %>
-        <% tags.each do |k, v| %>
-          <tr class="collection-tag-<%=k%>">
-            <td>
-              <% if object.editable? %>
-                <i class="glyphicon glyphicon-remove collection-tag-remove hide" style="cursor: pointer;"></i>
-              <% end %>
-            </td>
-            <td class="collection-tag-field collection-tag-field-key">
-              <%= k %>
-            </td>
-            <td class="collection-tag-field collection-tag-field-value">
-              <%= v %>
-            </td>
-          </tr>
-        <% end %>
-      <% end %>
-
-      <% if @object.editable? %>
-        <!-- A hidden row to add new tag -->
-        <tr class="collection-tag-hidden hide">
-          <td>
-            <i class="glyphicon glyphicon-remove collection-tag-remove hide" style="cursor: pointer"></i>
-          </td>
-          <td class="collection-tag-field collection-tag-field-key"></td>
-          <td class="collection-tag-field collection-tag-field-value"></td>
-        </tr>
-      <% end %>
index afab5266e9c2e46dbe5c84425ba9f42b7f41f544..3e0460a2398134955e936df58cc58622f0aa6385 100644 (file)
@@ -2,51 +2,11 @@
 
 SPDX-License-Identifier: AGPL-3.0 %>
 
-<%
-  object = @object unless object
-%>
+  <div class="arv-log-refresh-control"
+    data-load-throttle="86486400000" <%# 1001 nights (in milliseconds) %>
+    ></div>
 
   <div class="collection-tags-container" style="padding-left:2em;padding-right:2em;">
-    <% if object.editable? %>
-      <p title="Edit tags" id="edit-collection-tags">
-        <a class="btn btn-primary edit-collection-tags">Edit</a>
-      </p>
-    <% end %>
-
-    <table class="table table-condensed table-fixedlayout collection-tags-table" border="1">
-      <colgroup>
-        <col width="5%" />
-        <col width="25%" />
-        <col width="70%" />
-      </colgroup>
-
-      <thead>
-        <tr>
-          <th></th>
-          <th>Key</th>
-          <th>Value</th>
-        </tr>
-      </thead>
-
-      <tbody class="collection-tag-rows">
-        <%= render partial: 'show_tag_rows', locals: {object: object} %>
-      </tbody>
-    </table>
-    <div>
-      <% if object.editable? %>
-        <div class="pull-left">
-          <a class="btn btn-primary btn-sm collection-tag-add hide"><i class="glyphicon glyphicon-plus"></i> Add new tag </a>
-        </div>
-        <div class="pull-right">
-          <%= link_to(save_tags_collection_path, {class: 'btn btn-sm btn-primary collection-tag-save hide', :remote => true, method: 'post', return_to: request.url}) do %>
-            Save
-          <% end %>
-          <%= link_to(tags_collection_path, {class: 'btn btn-sm btn-primary collection-tag-cancel hide', :remote => true, method: 'get', return_to: request.url}) do %>
-            Cancel
-          <% end %>
-        </div>
-
-        <div><div class="collection-tags-status"/></div></div>
-      <% end %>
-    </div>
+    <div data-mount-mithril="TagEditorApp" data-target-controller="<%= controller_name %>" data-target-uuid="<%= @object.uuid %>" data-target-editable="<%= @object.editable? %>"></div>
   </div>
\ No newline at end of file
diff --git a/apps/workbench/app/views/collections/save_tags.js.erb b/apps/workbench/app/views/collections/save_tags.js.erb
deleted file mode 100644 (file)
index 073db7d..0000000
+++ /dev/null
@@ -1,7 +0,0 @@
-<%# Copyright (C) The Arvados Authors. All rights reserved.
-
-SPDX-License-Identifier: AGPL-3.0 %>
-
-<% if @saved_tags %>
-$(".collection-tag-rows").html("<%= escape_javascript(render partial: 'show_tag_rows', locals: {object: @object}) %>");
-<% end %>
diff --git a/apps/workbench/app/views/collections/tags.js.erb b/apps/workbench/app/views/collections/tags.js.erb
deleted file mode 100644 (file)
index e2154d6..0000000
+++ /dev/null
@@ -1,5 +0,0 @@
-<%# Copyright (C) The Arvados Authors. All rights reserved.
-
-SPDX-License-Identifier: AGPL-3.0 %>
-
-$(".collection-tag-rows").html("<%= escape_javascript(render partial: 'show_tag_rows', locals: {object: @object}) %>");
index 2d57573dfd53dc3ceb5b90dccfe3bef363081ecc..c126b559fb138387b27424773cd931053f6bcc86 100644 (file)
@@ -6,6 +6,8 @@
 
 # Browserify is required.
 npm 'browserify', require: false
+npm 'jquery'
+npm 'awesomplete'
 
 npm 'mithril'
 npm 'es6-object-assign'
diff --git a/apps/workbench/public/vocabulary-example.json b/apps/workbench/public/vocabulary-example.json
new file mode 100644 (file)
index 0000000..b227dc2
--- /dev/null
@@ -0,0 +1,32 @@
+{
+    "strict": false,
+    "tags": {
+        "fruit": {
+            "values": ["pineapple", "tomato", "orange", "banana", "advocado", "lemon", "apple", "peach", "strawberry"],
+            "strict": true
+        },
+        "animal": {
+            "values": ["human", "dog", "elephant", "eagle"],
+            "strict": false
+        },
+        "color": {
+            "values": ["yellow", "red", "magenta", "green"],
+            "strict": false
+        },
+        "text": {},
+        "category": {
+            "values": ["experimental", "development", "production"]
+        },
+        "comments": {},
+        "importance": {
+            "values": ["critical", "important", "low priority"]
+        },
+        "size": {
+            "values": ["x-small", "small", "medium", "large", "x-large"]
+        },
+        "country": {
+            "values": ["Afghanistan","Åland Islands","Albania","Algeria","American Samoa","AndorrA","Angola","Anguilla","Antarctica","Antigua and Barbuda","Argentina","Armenia","Aruba","Australia","Austria","Azerbaijan","Bahamas","Bahrain","Bangladesh","Barbados","Belarus","Belgium","Belize","Benin","Bermuda","Bhutan","Bolivia","Bosnia and Herzegovina","Botswana","Bouvet Island","Brazil","British Indian Ocean Territory","Brunei Darussalam","Bulgaria","Burkina Faso","Burundi","Cambodia","Cameroon","Canada","Cape Verde","Cayman Islands","Central African Republic","Chad","Chile","China","Christmas Island","Cocos (Keeling) Islands","Colombia","Comoros","Congo","Congo, The Democratic Republic of the","Cook Islands","Costa Rica","Cote D'Ivoire","Croatia","Cuba","Cyprus","Czech Republic","Denmark","Djibouti","Dominica","Dominican Republic","Ecuador","Egypt","El Salvador","Equatorial Guinea","Eritrea","Estonia","Ethiopia","Falkland Islands (Malvinas)","Faroe Islands","Fiji","Finland","France","French Guiana","French Polynesia","French Southern Territories","Gabon","Gambia","Georgia","Germany","Ghana","Gibraltar","Greece","Greenland","Grenada","Guadeloupe","Guam","Guatemala","Guernsey","Guinea","Guinea-Bissau","Guyana","Haiti","Heard Island and Mcdonald Islands","Holy See (Vatican City State)","Honduras","Hong Kong","Hungary","Iceland","India","Indonesia","Iran, Islamic Republic Of","Iraq","Ireland","Isle of Man","Israel","Italy","Jamaica","Japan","Jersey","Jordan","Kazakhstan","Kenya","Kiribati","Korea, Democratic People'S Republic of","Korea, Republic of","Kuwait","Kyrgyzstan","Lao People'S Democratic Republic","Latvia","Lebanon","Lesotho","Liberia","Libyan Arab Jamahiriya","Liechtenstein","Lithuania","Luxembourg","Macao","Macedonia, The Former Yugoslav Republic of","Madagascar","Malawi","Malaysia","Maldives","Mali","Malta","Marshall Islands","Martinique","Mauritania","Mauritius","Mayotte","Mexico","Micronesia, Federated States of","Moldova, Republic of","Monaco","Mongolia","Montserrat","Morocco","Mozambique","Myanmar","Namibia","Nauru","Nepal","Netherlands","Netherlands Antilles","New Caledonia","New Zealand","Nicaragua","Niger","Nigeria","Niue","Norfolk Island","Northern Mariana Islands","Norway","Oman","Pakistan","Palau","Palestinian Territory, Occupied","Panama","Papua New Guinea","Paraguay","Peru","Philippines","Pitcairn","Poland","Portugal","Puerto Rico","Qatar","Reunion","Romania","Russian Federation","RWANDA","Saint Helena","Saint Kitts and Nevis","Saint Lucia","Saint Pierre and Miquelon","Saint Vincent and the Grenadines","Samoa","San Marino","Sao Tome and Principe","Saudi Arabia","Senegal","Serbia and Montenegro","Seychelles","Sierra Leone","Singapore","Slovakia","Slovenia","Solomon Islands","Somalia","South Africa","South Georgia and the South Sandwich Islands","Spain","Sri Lanka","Sudan","Suriname","Svalbard and Jan Mayen","Swaziland","Sweden","Switzerland","Syrian Arab Republic","Taiwan, Province of China","Tajikistan","Tanzania, United Republic of","Thailand","Timor-Leste","Togo","Tokelau","Tonga","Trinidad and Tobago","Tunisia","Turkey","Turkmenistan","Turks and Caicos Islands","Tuvalu","Uganda","Ukraine","United Arab Emirates","United Kingdom","United States","United States Minor Outlying Islands","Uruguay","Uzbekistan","Vanuatu","Venezuela","Viet Nam","Virgin Islands, British","Virgin Islands, U.S.","Wallis and Futuna","Western Sahara","Yemen","Zambia","Zimbabwe"],
+            "strict": true
+        }
+    }
+}
\ No newline at end of file
index 773a4f45714b515d664ec290ecb61e32bcca5695..abe7f6af453f1c72070251ba90a889407469097a 100644 (file)
@@ -728,64 +728,4 @@ class CollectionsControllerTest < ActionController::TestCase
     assert_response 422
     assert_includes json_response['errors'], 'Duplicate file path'
   end
-
-  [
-    [:active, true],
-    [:spectator, false],
-  ].each do |user, editable|
-    test "tags tab #{editable ? 'shows' : 'does not show'} edit button to #{user}" do
-      use_token user
-
-      get :tags, {
-        id: api_fixture('collections')['collection_with_tags_owned_by_active']['uuid'],
-        format: :js,
-      }, session_for(user)
-
-      assert_response :success
-
-      found = 0
-      response.body.scan /<i[^>]+>/ do |remove_icon|
-        remove_icon.scan(/\ collection-tag-remove(.*?)\"/).each do |i,|
-          found += 1
-        end
-      end
-
-      if editable
-        assert_equal(3, found)  # two from the tags + 1 from the hidden "add tag" row
-      else
-        assert_equal(0, found)
-      end
-    end
-  end
-
-  test "save_tags and verify that 'other' properties are retained" do
-    use_token :active
-
-    collection = api_fixture('collections')['collection_with_tags_owned_by_active']
-
-    new_tags = {"new_tag1" => "new_tag1_value",
-                "new_tag2" => "new_tag2_value"}
-
-    post :save_tags, {
-      id: collection['uuid'],
-      tag_data: new_tags,
-      format: :js,
-    }, session_for(:active)
-
-    assert_response :success
-    assert_equal true, response.body.include?("new_tag1")
-    assert_equal true, response.body.include?("new_tag1_value")
-    assert_equal true, response.body.include?("new_tag2")
-    assert_equal true, response.body.include?("new_tag2_value")
-    assert_equal false, response.body.include?("existing tag 1")
-    assert_equal false, response.body.include?("value for existing tag 1")
-
-    updated_tags = Collection.find(collection['uuid']).properties
-    assert_equal true, updated_tags.keys.include?(:'new_tag1')
-    assert_equal new_tags['new_tag1'], updated_tags[:'new_tag1']
-    assert_equal true, updated_tags.keys.include?(:'new_tag2')
-    assert_equal new_tags['new_tag2'], updated_tags[:'new_tag2']
-    assert_equal false, updated_tags.keys.include?(:'existing tag 1')
-    assert_equal false, updated_tags.keys.include?(:'existing tag 2')
-  end
 end
index 71cfe38abfda32b2d5b5ce943ecdbf26f46ff52b..443130a4a92c60cd6a46a4f4ca749d9712a5a7f9 100644 (file)
@@ -434,88 +434,4 @@ class CollectionsTest < ActionDispatch::IntegrationTest
     first('.lock-collection-btn').click
     accept_alert
   end
-
-  test "collection tags tab" do
-    visit page_with_token('active', '/collections/zzzzz-4zz18-bv31uwvy3neko21')
-
-    click_link 'Tags'
-    wait_for_ajax
-
-    # verify initial state
-    assert_selector 'a', text: 'Edit'
-    assert_no_selector 'a', text: 'Add new tag'
-    assert_no_selector 'a', text: 'Save'
-    assert_no_selector 'a', text: 'Cancel'
-
-    # Verify controls in edit mode
-    first('.edit-collection-tags').click
-    assert_selector 'a.disabled', text: 'Edit'
-    assert_selector 'a', text: 'Add new tag'
-    assert_selector 'a', text: 'Save'
-    assert_selector 'a', text: 'Cancel'
-
-    # add two tags
-    first('.glyphicon-plus').click
-    first('.collection-tag-field-key').click
-    first('.collection-tag-field-key').set('key 1')
-    first('.collection-tag-field-value').click
-    first('.collection-tag-field-value').set('value 1')
-
-    first('.glyphicon-plus').click
-    editable_key_fields = page.all('.collection-tag-field-key')
-    editable_key_fields[1].click
-    editable_key_fields[1].set('key 2')
-    editable_val_fields = page.all('.collection-tag-field-value')
-    editable_val_fields[1].click
-    editable_val_fields[1].set('value 2')
-
-    click_on 'Save'
-    wait_for_ajax
-
-    # added tags; verify
-    assert_text 'key 1'
-    assert_text 'value 1'
-    assert_text 'key 2'
-    assert_text 'value 2'
-    assert_selector 'a', text: 'Edit'
-    assert_no_selector 'a', text: 'Save'
-
-    # remove first tag
-    first('.edit-collection-tags').click
-    assert_not_nil first('.glyphicon-remove')
-    first('.glyphicon-remove').click
-    click_on 'Save'
-    wait_for_ajax
-
-    assert_text 'key 2'
-    assert_text 'value 2'
-    assert_no_text 'key 1'
-    assert_no_text 'value 1'
-    assert_selector 'a', text: 'Edit'
-
-    # Click on cancel and verify
-    first('.edit-collection-tags').click
-    first('.collection-tag-field-key').click
-    first('.collection-tag-field-key').set('this key wont stick')
-    first('.collection-tag-field-value').click
-    first('.collection-tag-field-value').set('this value wont stick')
-
-    click_on 'Cancel'
-    wait_for_ajax
-
-    assert_text 'key 2'
-    assert_text 'value 2'
-    assert_no_text 'this key wont stick'
-    assert_no_text 'this value wont stick'
-
-    # remove all tags
-    first('.edit-collection-tags').click
-    first('.glyphicon-remove').click
-    click_on 'Save'
-    wait_for_ajax
-
-    assert_selector 'a', text: 'Edit'
-    assert_no_text 'key 2'
-    assert_no_text 'value 2'
-  end
 end
index 639096c9e83712f0ccf47fc111f5d500add34f0a..e1e5063f738b1c9c092a166f7877bc7525964254 100755 (executable)
@@ -64,12 +64,13 @@ fi
 python_sdk_ts=$(cd sdk/python && timestamp_from_git)
 cwl_runner_ts=$(cd sdk/cwl && timestamp_from_git)
 
+python_sdk_version=$(cd sdk/python && nohash_version_from_git 0.1)
+cwl_runner_version=$(cd sdk/cwl && nohash_version_from_git 1.0)
+
 if [[ $python_sdk_ts -gt $cwl_runner_ts ]]; then
-    gittag=$(git log --first-parent --max-count=1 --format=format:%H sdk/python)
-else
-    gittag=$(git log --first-parent --max-count=1 --format=format:%H sdk/cwl)
+    cwl_runner_version=$(cd sdk/python && nohash_version_from_git 1.0)
 fi
 
-docker build --build-arg sdk=$sdk --build-arg runner=$runner --build-arg salad=$salad --build-arg cwltool=$cwltool -f "$WORKSPACE/sdk/dev-jobs.dockerfile" -t arvados/jobs:$gittag "$WORKSPACE/sdk"
-echo arv-keepdocker arvados/jobs $gittag
-arv-keepdocker arvados/jobs $gittag
+docker build --build-arg sdk=$sdk --build-arg runner=$runner --build-arg salad=$salad --build-arg cwltool=$cwltool -f "$WORKSPACE/sdk/dev-jobs.dockerfile" -t arvados/jobs:$cwl_runner_version "$WORKSPACE/sdk"
+echo arv-keepdocker arvados/jobs $cwl_runner_version
+arv-keepdocker arvados/jobs $cwl_runner_version
index 7b992d6c9453ec87f43dd80fe044c94da2ad256b..08b581c0d71de7c3fe7ab831191735ee4e7c05b1 100644 (file)
@@ -74,7 +74,10 @@ navbar:
       - user/topics/run-command.html.textile.liquid
       - user/reference/job-pipeline-ref.html.textile.liquid
       - user/examples/crunch-examples.html.textile.liquid
+    - Admin tools:
       - user/topics/arvados-sync-groups.html.textile.liquid
+      - admin/change-account-owner.html.textile.liquid
+      - admin/merge-remote-account.html.textile.liquid
     - Query the metadata database:
       - user/topics/tutorial-trait-search.html.textile.liquid
     - Arvados License:
diff --git a/doc/admin/change-account-owner.html.textile.liquid b/doc/admin/change-account-owner.html.textile.liquid
new file mode 100644 (file)
index 0000000..d48572b
--- /dev/null
@@ -0,0 +1,41 @@
+---
+layout: default
+navsection: userguide
+title: "Changing account ownership"
+...
+{% comment %}
+Copyright (C) The Arvados Authors. All rights reserved.
+
+SPDX-License-Identifier: CC-BY-SA-3.0
+{% endcomment %}
+
+It is sometimes necessary to reassign an existing Arvados user account to a new Google account.
+
+Examples:
+* A user’s email address has changed from <code>person@old.example.com</code> to <code>person@new.example.com</code>.
+* A user who used to authenticate via LDAP is switching to Google login.
+
+This can be done by an administrator using Arvados APIs.
+
+First, determine the user’s existing UUID, e.g., @aaaaa-tpzed-abcdefghijklmno@.
+
+Ensure the new email address is not already associated with a different Arvados account. If it is, disassociate it by clearing that account’s @identity_url@ and @email@ fields.
+
+Clear the @identity_url@ field of the existing user record.
+
+Create a Link object with the following attributes (where @tail_uuid@ is the new email address, and @head_uuid@ is the existing user UUID):
+
+<notextile>
+<pre><code>{
+  "link_class":"permission",
+  "name":"can_login",
+  "tail_uuid":"<span class="userinput">person@new.example.com</span>",
+  "head_uuid":"<span class="userinput">aaaaa-tpzed-abcdefghijklmno</span>",
+  "properties":{
+    "identity_url_prefix":"https://www.google.com/"
+  }
+}
+</code></pre>
+</notextile>
+
+Have the user log in using their <code>person@new.example.com</code> Google account. You can verify this by checking that the @identity_url@ field has been populated.
diff --git a/doc/admin/merge-remote-account.html.textile.liquid b/doc/admin/merge-remote-account.html.textile.liquid
new file mode 100644 (file)
index 0000000..1ce35e9
--- /dev/null
@@ -0,0 +1,47 @@
+---
+layout: default
+navsection: userguide
+title: "Merging a remote account"
+...
+{% comment %}
+Copyright (C) The Arvados Authors. All rights reserved.
+
+SPDX-License-Identifier: CC-BY-SA-3.0
+{% endcomment %}
+
+When you use federation capabilities to connect two or more clusters that were already operating, some users might already have accounts on multiple clusters. Typically, they will want to choose a single account on one of the clusters and abandon the rest, transferring all data or permissions from their old “remote” accounts to a single “home” account.
+
+This effect can be achieved by changing the UUIDs of the user records on the remote clusters. This should be done before the user has ever used federation features to access cluster B with cluster A credentials. Otherwise, see "managing conflicting accounts" below.
+
+For example, a user might have:
+* an account A on cluster A with uuid @aaaaa-tpzed-abcdefghijklmno@, and
+* an account B on cluster B with uuid @bbbbb-tpzed-lmnopqrstuvwxyz@
+
+An administrator at cluster B can merge the two accounts by renaming account B to account A.
+
+<notextile>
+<pre><code>#!/usr/bin/env python
+import arvados
+arvados.api('v1').users().update_uuid(
+    uuid="<span class="userinput">bbbbb-tpzed-lmnopqrstuvwxyz</span>",
+    new_uuid="<span class="userinput">aaaaa-tpzed-abcdefghijklmno</span>").execute()
+</code></pre></notextile>
+
+This should be done when the user is idle, i.e., not logged in and not running any jobs or containers.
+
+h2. Managing conflicting accounts
+
+If the user has already used federation capabilities to access cluster B using account A before the above migration has been done, this will have already created a database entry for account A on cluster B, and the above program will error out. To fix this, the same "update_uuid API call":../api/methods/users.html#update_uuid can be used to move the conflicting account out of the way first.
+
+<notextile>
+<pre><code>#!/usr/bin/env python
+import arvados
+import random
+import string
+random_chars = ''.join(random.SystemRandom().choice(string.ascii_lowercase + string.digits) for _ in range(15))
+arvados.api('v1').users().update_uuid(
+    uuid="<span class="userinput">aaaaa-tpzed-abcdefghijklmno</span>",
+    new_uuid="bbbbb-tpzed-"+random_chars).execute()
+</code></pre></notextile>
+
+After this is done and the migration is complete, the affected user should wait 5 minutes for the authorization cache to expire before using the remote cluster.
index 2e5dee51810f188591c7903776d1d70e3b23f289..098c2ca118c71c85a9ce56576594e2d9b90926fb 100644 (file)
@@ -112,7 +112,7 @@ table(table table-bordered table-condensed).
 {background:#ccffcc}.|uuid|string|The UUID of the User in question.|path||
 |user|object||query||
 
-h3. update_uuid
+h3(#update_uuid). update_uuid
 
 Change the UUID of an existing user, updating all database references accordingly.
 
index d15acf767fc90cc3e594be1fca0e67ae96f034ec..e82fd9feef1f3d88c4068e7d0d6cbfee6232c3f2 100644 (file)
@@ -22,7 +22,6 @@ from cwltool.errors import WorkflowException
 import cwltool.main
 import cwltool.workflow
 import cwltool.process
-import schema_salad
 from schema_salad.sourceline import SourceLine
 
 import arvados
@@ -111,8 +110,7 @@ class ArvCwlRunner(object):
         kwargs["fetcher_constructor"] = partial(CollectionFetcher,
                                                 api_client=self.api,
                                                 fs_access=CollectionFsAccess("", collection_cache=self.collection_cache),
-                                                num_retries=self.num_retries,
-                                                overrides=kwargs.get("override_tools"))
+                                                num_retries=self.num_retries)
         kwargs["resolver"] = partial(collectionResolver, self.api, num_retries=self.num_retries)
         if "class" in toolpath_object and toolpath_object["class"] == "CommandLineTool":
             return ArvadosCommandTool(self, toolpath_object, **kwargs)
@@ -367,8 +365,7 @@ class ArvCwlRunner(object):
 
         # Upload direct dependencies of workflow steps, get back mapping of files to keep references.
         # Also uploads docker images.
-        override_tools = {}
-        upload_workflow_deps(self, tool, override_tools)
+        merged_map = upload_workflow_deps(self, tool)
 
         # Reload tool object which may have been updated by
         # upload_workflow_deps
@@ -376,8 +373,7 @@ class ArvCwlRunner(object):
                                   makeTool=self.arv_make_tool,
                                   loader=tool.doc_loader,
                                   avsc_names=tool.doc_schema,
-                                  metadata=tool.metadata,
-                                  override_tools=override_tools)
+                                  metadata=tool.metadata)
 
         # Upload local file references in the job order.
         job_order = upload_job_order(self, "%s input" % kwargs["name"],
@@ -391,7 +387,8 @@ class ArvCwlRunner(object):
                                       kwargs.get("enable_reuse"),
                                       uuid=existing_uuid,
                                       submit_runner_ram=kwargs.get("submit_runner_ram"),
-                                      name=kwargs["name"])
+                                      name=kwargs["name"],
+                                      merged_map=merged_map)
                 tmpl.save()
                 # cwltool.main will write our return value to stdout.
                 return (tmpl.uuid, "success")
@@ -400,7 +397,8 @@ class ArvCwlRunner(object):
                                         self.project_uuid,
                                         uuid=existing_uuid,
                                         submit_runner_ram=kwargs.get("submit_runner_ram"),
-                                        name=kwargs["name"]),
+                                        name=kwargs["name"],
+                                        merged_map=merged_map),
                         "success")
 
         self.ignore_docker_for_reuse = kwargs.get("ignore_docker_for_reuse")
@@ -438,7 +436,8 @@ class ArvCwlRunner(object):
                                                 name=kwargs.get("name"),
                                                 on_error=kwargs.get("on_error"),
                                                 submit_runner_image=kwargs.get("submit_runner_image"),
-                                                intermediate_output_ttl=kwargs.get("intermediate_output_ttl"))
+                                                intermediate_output_ttl=kwargs.get("intermediate_output_ttl"),
+                                                merged_map=merged_map)
             elif self.work_api == "jobs":
                 runnerjob = RunnerJob(self, tool, job_order, kwargs.get("enable_reuse"),
                                       self.output_name,
@@ -446,7 +445,8 @@ class ArvCwlRunner(object):
                                       submit_runner_ram=kwargs.get("submit_runner_ram"),
                                       name=kwargs.get("name"),
                                       on_error=kwargs.get("on_error"),
-                                      submit_runner_image=kwargs.get("submit_runner_image"))
+                                      submit_runner_image=kwargs.get("submit_runner_image"),
+                                      merged_map=merged_map)
         elif "cwl_runner_job" not in kwargs and self.work_api == "jobs":
             # Create pipeline for local run
             self.pipeline = self.api.pipeline_instances().create(
@@ -554,7 +554,7 @@ def versionstring():
     arvpkg = pkg_resources.require("arvados-python-client")
     cwlpkg = pkg_resources.require("cwltool")
 
-    return "%s %s %s, %s %s, %s %s" % (sys.argv[0], __version__, arvcwlpkg[0].version,
+    return "%s %s, %s %s, %s %s" % (sys.argv[0], arvcwlpkg[0].version,
                                     "arvados-python-client", arvpkg[0].version,
                                     "cwltool", cwlpkg[0].version)
 
index e8e2a5113195174a45dea36e13f3f5bb78b1fc5d..014e1b94aae5b283ea851fd74e480dd5df926f55 100644 (file)
@@ -330,7 +330,7 @@ class RunnerContainer(Runner):
                 "portable_data_hash": "%s" % workflowcollection
             }
         else:
-            packed = packed_workflow(self.arvrunner, self.tool)
+            packed = packed_workflow(self.arvrunner, self.tool, self.merged_map)
             workflowpath = "/var/lib/cwl/workflow.json#main"
             container_req["mounts"]["/var/lib/cwl/workflow.json"] = {
                 "kind": "json",
@@ -339,6 +339,7 @@ class RunnerContainer(Runner):
             if self.tool.tool.get("id", "").startswith("arvwf:"):
                 container_req["properties"]["template_uuid"] = self.tool.tool["id"][6:33]
 
+
         command = ["arvados-cwl-runner", "--local", "--api=containers", "--no-log-timestamps"]
         if self.output_name:
             command.append("--output-name=" + self.output_name)
index 25f64ea23065f887517c2ddba5ac728f18e856b6..2731b2694422fcf8a986057266efe23354830c46 100644 (file)
@@ -280,7 +280,7 @@ class RunnerJob(Runner):
         if self.tool.tool["id"].startswith("keep:"):
             self.job_order["cwl:tool"] = self.tool.tool["id"][5:]
         else:
-            packed = packed_workflow(self.arvrunner, self.tool)
+            packed = packed_workflow(self.arvrunner, self.tool, self.merged_map)
             wf_pdh = upload_workflow_collection(self.arvrunner, self.name, packed)
             self.job_order["cwl:tool"] = "%s/workflow.cwl#main" % wf_pdh
 
@@ -370,7 +370,7 @@ class RunnerTemplate(object):
     }
 
     def __init__(self, runner, tool, job_order, enable_reuse, uuid,
-                 submit_runner_ram=0, name=None):
+                 submit_runner_ram=0, name=None, merged_map=None):
         self.runner = runner
         self.tool = tool
         self.job = RunnerJob(
@@ -381,7 +381,8 @@ class RunnerTemplate(object):
             output_name=None,
             output_tags=None,
             submit_runner_ram=submit_runner_ram,
-            name=name)
+            name=name,
+            merged_map=merged_map)
         self.uuid = uuid
 
     def pipeline_component_spec(self):
index 79bfacd28a10daad1393bf090f2350e7c166244c..f0f9c77f40fed6c5a27d8160abdfc1d710ddb9f1 100644 (file)
@@ -27,9 +27,9 @@ logger = logging.getLogger('arvados.cwl-runner')
 metrics = logging.getLogger('arvados.cwl-runner.metrics')
 
 def upload_workflow(arvRunner, tool, job_order, project_uuid, uuid=None,
-                    submit_runner_ram=0, name=None):
+                    submit_runner_ram=0, name=None, merged_map=None):
 
-    packed = packed_workflow(arvRunner, tool)
+    packed = packed_workflow(arvRunner, tool, merged_map)
 
     adjustDirObjs(job_order, trim_listing)
     adjustFileObjs(job_order, trim_anonymous_location)
@@ -173,7 +173,8 @@ class ArvadosWorkflow(Workflow):
                         }]
                 }],
                 "hints": self.hints,
-                "arguments": ["--no-container", "--move-outputs", "--preserve-entire-environment", "workflow.cwl#main", "cwl.input.yml"]
+                "arguments": ["--no-container", "--move-outputs", "--preserve-entire-environment", "workflow.cwl#main", "cwl.input.yml"],
+                "id": "#"
             })
             kwargs["loader"] = self.doc_loader
             kwargs["avsc_names"] = self.doc_schema
index 5e3d685911b869b64ba18787291eee61c34f6bc4..69f918ead939c4a56fdbed12449c3cf2fca630ec 100644 (file)
@@ -110,7 +110,7 @@ class CollectionFsAccess(cwltool.stdfsaccess.StdFsAccess):
         if collection is not None and not rest:
             return [pattern]
         patternsegments = rest.split("/")
-        return self._match(collection, patternsegments, "keep:" + collection.manifest_locator())
+        return sorted(self._match(collection, patternsegments, "keep:" + collection.manifest_locator()))
 
     def open(self, fn, mode):
         collection, rest = self.get_collection(fn)
@@ -179,16 +179,13 @@ class CollectionFsAccess(cwltool.stdfsaccess.StdFsAccess):
             return os.path.realpath(path)
 
 class CollectionFetcher(DefaultFetcher):
-    def __init__(self, cache, session, api_client=None, fs_access=None, num_retries=4, overrides=None):
+    def __init__(self, cache, session, api_client=None, fs_access=None, num_retries=4):
         super(CollectionFetcher, self).__init__(cache, session)
         self.api_client = api_client
         self.fsaccess = fs_access
         self.num_retries = num_retries
-        self.overrides = overrides if overrides else {}
 
     def fetch_text(self, url):
-        if url in self.overrides:
-            return self.overrides[url]
         if url.startswith("keep:"):
             with self.fsaccess.open(url, "r") as f:
                 return f.read()
@@ -199,8 +196,6 @@ class CollectionFetcher(DefaultFetcher):
         return super(CollectionFetcher, self).fetch_text(url)
 
     def check_exists(self, url):
-        if url in self.overrides:
-            return True
         try:
             if url.startswith("http://arvados.org/cwl"):
                 return True
index 28de7f368a23ccbc52a0cae37fa55d358744d0e1..2ca63cfe5048a62f0a1853e2aca06be865ea1fd4 100644 (file)
@@ -172,13 +172,32 @@ def upload_docker(arvrunner, tool):
         for s in tool.steps:
             upload_docker(arvrunner, s.embedded_tool)
 
-def packed_workflow(arvrunner, tool):
+def packed_workflow(arvrunner, tool, merged_map):
     """Create a packed workflow.
 
     A "packed" workflow is one where all the components have been combined into a single document."""
 
-    return pack(tool.doc_loader, tool.doc_loader.fetch(tool.tool["id"]),
-                tool.tool["id"], tool.metadata)
+    rewrites = {}
+    packed = pack(tool.doc_loader, tool.doc_loader.fetch(tool.tool["id"]),
+                  tool.tool["id"], tool.metadata, rewrite_out=rewrites)
+
+    rewrite_to_orig = {}
+    for k,v in rewrites.items():
+        rewrite_to_orig[v] = k
+
+    def visit(v, cur_id):
+        if isinstance(v, dict):
+            if v.get("class") in ("CommandLineTool", "Workflow"):
+                cur_id = rewrite_to_orig.get(v["id"], v["id"])
+            if "location" in v and not v["location"].startswith("keep:"):
+                v["location"] = merged_map[cur_id][v["location"]]
+            for l in v:
+                visit(v[l], cur_id)
+        if isinstance(v, list):
+            for l in v:
+                visit(l, cur_id)
+    visit(packed, None)
+    return packed
 
 def tag_git_version(packed):
     if tool.tool["id"].startswith("file://"):
@@ -229,16 +248,18 @@ def upload_job_order(arvrunner, name, tool, job_order):
 
     return job_order
 
-def upload_workflow_deps(arvrunner, tool, override_tools):
+def upload_workflow_deps(arvrunner, tool):
     # Ensure that Docker images needed by this workflow are available
 
     upload_docker(arvrunner, tool)
 
     document_loader = tool.doc_loader
 
+    merged_map = {}
+
     def upload_tool_deps(deptool):
         if "id" in deptool:
-            upload_dependencies(arvrunner,
+            pm = upload_dependencies(arvrunner,
                                 "%s dependencies" % (shortname(deptool["id"])),
                                 document_loader,
                                 deptool,
@@ -246,10 +267,15 @@ def upload_workflow_deps(arvrunner, tool, override_tools):
                                 False,
                                 include_primary=False)
             document_loader.idx[deptool["id"]] = deptool
-            override_tools[deptool["id"]] = json.dumps(deptool)
+            toolmap = {}
+            for k,v in pm.items():
+                toolmap[k] = v.resolved
+            merged_map[deptool["id"]] = toolmap
 
     tool.visit(upload_tool_deps)
 
+    return merged_map
+
 def arvados_jobs_image(arvrunner, img):
     """Determine if the right arvados/jobs image version is available.  If not, try to pull and upload it."""
 
@@ -291,7 +317,7 @@ class Runner(object):
     def __init__(self, runner, tool, job_order, enable_reuse,
                  output_name, output_tags, submit_runner_ram=0,
                  name=None, on_error=None, submit_runner_image=None,
-                 intermediate_output_ttl=0):
+                 intermediate_output_ttl=0, merged_map=None):
         self.arvrunner = runner
         self.tool = tool
         self.job_order = job_order
@@ -320,6 +346,8 @@ class Runner(object):
         if self.submit_runner_ram <= 0:
             raise Exception("Value of --submit-runner-ram must be greater than zero")
 
+        self.merged_map = merged_map or {}
+
     def update_pipeline_component(self, record):
         pass
 
index ae487355c33700e889c2cb8d06184d9818f2b3dc..88e3d80db35293a5fab4aec7dbd4db023e26a943 100644 (file)
@@ -41,8 +41,8 @@ setup(name='arvados-cwl-runner',
       # Note that arvados/build/run-build-packages.sh looks at this
       # file to determine what version of cwltool and schema-salad to build.
       install_requires=[
-          'cwltool==1.0.20170928192020',
-          'schema-salad==2.6.20171116190026',
+          'cwltool==1.0.20180116213856',
+          'schema-salad==2.6.20171201034858',
           'typing==3.5.3.0',
           'ruamel.yaml==0.13.7',
           'arvados-python-client>=0.1.20170526013812',
index c516d7b35340b15ac1f4eaac1a7fcbc645b95eda..c20693469ad2e32b9a3f437b2f75592d861530ec 100644 (file)
@@ -44,7 +44,8 @@ class TestContainer(unittest.TestCase):
                 "inputs": [],
                 "outputs": [],
                 "baseCommand": "ls",
-                "arguments": [{"valueFrom": "$(runtime.outdir)"}]
+                "arguments": [{"valueFrom": "$(runtime.outdir)"}],
+                "id": "#"
             })
             make_fs_access=functools.partial(arvados_cwl.CollectionFsAccess,
                                          collection_cache=arvados_cwl.CollectionCache(runner.api, None, 0))
@@ -124,7 +125,8 @@ class TestContainer(unittest.TestCase):
                 "class": "http://arvados.org/cwl#ReuseRequirement",
                 "enableReuse": False
             }],
-            "baseCommand": "ls"
+            "baseCommand": "ls",
+            "id": "#"
         })
         make_fs_access=functools.partial(arvados_cwl.CollectionFsAccess,
                                          collection_cache=arvados_cwl.CollectionCache(runner.api, None, 0))
@@ -231,7 +233,8 @@ class TestContainer(unittest.TestCase):
                     "location": "keep:99999999999999999999999999999995+99/subdir"
                 }                        ]
             }],
-            "baseCommand": "ls"
+            "baseCommand": "ls",
+            "id": "#"
         })
         make_fs_access=functools.partial(arvados_cwl.CollectionFsAccess,
                                          collection_cache=arvados_cwl.CollectionCache(runner.api, None, 0))
@@ -329,7 +332,8 @@ class TestContainer(unittest.TestCase):
             "stdout": "stdout.txt",
             "stderr": "stderr.txt",
             "stdin": "/keep/99999999999999999999999999999996+99/file.txt",
-            "arguments": [{"valueFrom": "$(runtime.outdir)"}]
+            "arguments": [{"valueFrom": "$(runtime.outdir)"}],
+            "id": "#"
         })
         make_fs_access=functools.partial(arvados_cwl.CollectionFsAccess,
                                          collection_cache=arvados_cwl.CollectionCache(runner.api, None, 0))
@@ -449,7 +453,8 @@ class TestContainer(unittest.TestCase):
             ],
             "outputs": [],
             "baseCommand": "ls",
-            "arguments": [{"valueFrom": "$(runtime.outdir)"}]
+            "arguments": [{"valueFrom": "$(runtime.outdir)"}],
+            "id": "#"
         })
         make_fs_access=functools.partial(arvados_cwl.CollectionFsAccess,
                                      collection_cache=arvados_cwl.CollectionCache(runner.api, None, 0))
index faac1f77a640456b5f8671e1735629b713d04b42..309ac0996ac54cb0cbda7c7940cad937cc8ee688 100644 (file)
@@ -52,7 +52,8 @@ class TestJob(unittest.TestCase):
                 "inputs": [],
                 "outputs": [],
                 "baseCommand": "ls",
-                "arguments": [{"valueFrom": "$(runtime.outdir)"}]
+                "arguments": [{"valueFrom": "$(runtime.outdir)"}],
+                "id": "#"
             })
             make_fs_access=functools.partial(arvados_cwl.CollectionFsAccess,
                                          collection_cache=arvados_cwl.CollectionCache(runner.api, None, 0))
@@ -142,7 +143,8 @@ class TestJob(unittest.TestCase):
                 "class": "http://arvados.org/cwl#ReuseRequirement",
                 "enableReuse": False
             }],
-            "baseCommand": "ls"
+            "baseCommand": "ls",
+            "id": "#"
         }
         make_fs_access=functools.partial(arvados_cwl.CollectionFsAccess,
                                          collection_cache=arvados_cwl.CollectionCache(runner.api, None, 0))
index 059b47275c9207279dc3aa8e0933980268e70512..9cabea0794716fa79d88752616d4e7205ae6b4eb 100644 (file)
@@ -532,6 +532,10 @@ class TestSubmit(unittest.TestCase):
                 "enableReuse": False,
             },
         ]
+        expect_container["mounts"]["/var/lib/cwl/workflow.json"]["content"]["$graph"][0]["$namespaces"] = {
+            "arv": "http://arvados.org/cwl#",
+            "cwltool": "http://commonwl.org/cwltool#"
+        }
 
         stubs.api.container_requests().create.assert_called_with(
             body=JsonDiffMatcher(expect_container))
diff --git a/sdk/go/arvados/node.go b/sdk/go/arvados/node.go
new file mode 100644 (file)
index 0000000..cc844fe
--- /dev/null
@@ -0,0 +1,44 @@
+// Copyright (C) The Arvados Authors. All rights reserved.
+//
+// SPDX-License-Identifier: Apache-2.0
+
+package arvados
+
+import "time"
+
+// Node is an arvados#node resource.
+type Node struct {
+       UUID       string         `json:"uuid"`
+       Domain     string         `json:"domain"`
+       Hostname   string         `json:"hostname"`
+       IPAddress  string         `json:"ip_address"`
+       LastPingAt *time.Time     `json:"last_ping_at,omitempty"`
+       SlotNumber int            `json:"slot_number"`
+       Status     string         `json:"status"`
+       JobUUID    string         `json:"job_uuid,omitempty"`
+       Properties NodeProperties `json:"properties"`
+}
+
+type NodeProperties struct {
+       CloudNode      NodePropertiesCloudNode `json:"cloud_node"`
+       TotalCPUCores  int                     `json:"total_cpu_cores,omitempty"`
+       TotalScratchMB int64                   `json:"total_scratch_mb,omitempty"`
+       TotalRAMMB     int64                   `json:"total_ram_mb,omitempty"`
+}
+
+type NodePropertiesCloudNode struct {
+       Size  string  `json:"size,omitempty"`
+       Price float64 `json:"price"`
+}
+
+func (c Node) resourceName() string {
+       return "node"
+}
+
+// NodeList is an arvados#nodeList resource.
+type NodeList struct {
+       Items          []Node `json:"items"`
+       ItemsAvailable int    `json:"items_available"`
+       Offset         int    `json:"offset"`
+       Limit          int    `json:"limit"`
+}
index d4be3c8093fee71692d5b1ed7b2d5fd57c96e44d..91685f59988bf1a852a7a25a2aa46f47a3f32300 100644 (file)
@@ -44,6 +44,7 @@ class Arvados::V1::SchemaController < ApplicationController
         rootUrl: root_url,
         servicePath: "arvados/v1/",
         batchPath: "batch",
+        uuidPrefix: Rails.application.config.uuid_prefix,
         defaultTrashLifetime: Rails.application.config.default_trash_lifetime,
         blobSignatureTtl: Rails.application.config.blob_signature_ttl,
         maxRequestSize: Rails.application.config.max_request_size,
index 3af206c450290cce28a914caa51b5ee385847269..b158faa272635d1cce630faf58bea0fc307fa128 100644 (file)
@@ -185,7 +185,10 @@ class ApiClientAuthorization < ArvadosModel
         # 5 minutes. TODO: Request the actual api_client_auth
         # record from the remote server in case it wants the token
         # to expire sooner.
-        auth.update_attributes!(expires_at: Time.now + 5.minutes)
+        auth.update_attributes!(user: user,
+                                api_token: secret,
+                                api_client_id: 0,
+                                expires_at: Time.now + 5.minutes)
       end
       return auth
     else
@@ -207,10 +210,8 @@ class ApiClientAuthorization < ArvadosModel
   end
 
   def permission_to_update
-    (permission_to_create and
-     not uuid_changed? and
-     not user_id_changed? and
-     not owner_uuid_changed?)
+    permission_to_create && !uuid_changed? &&
+      (current_user.andand.is_admin || !user_id_changed?)
   end
 
   def log_update
index 235b78e332f337c0e618da27709517ee9c29af3f..c15060d1a9847cf33f774399b6decf7ff8f96b45 100644 (file)
@@ -33,6 +33,7 @@ class Arvados::V1::SchemaControllerTest < ActionController::TestCase
     assert_match(/^[0-9a-f]+(-modified)?$/, discovery_doc['source_version'])
     assert_equal discovery_doc['websocketUrl'], Rails.application.config.websocket_address
     assert_equal discovery_doc['workbenchUrl'], Rails.application.config.workbench_address
+    assert_equal('zzzzz', discovery_doc['uuidPrefix'])
   end
 
   test "discovery document overrides source_version with config" do
index 591bbaf575bee1cdacab9ecb7cfc9a40f819c1c7..6d7f4a0616e4068956c050b3db84f504b2e34ef3 100644 (file)
@@ -104,6 +104,13 @@ class RemoteUsersTest < ActionDispatch::IntegrationTest
     get '/arvados/v1/users/current', {format: 'json'}, auth(remote: 'zbbbb')
     assert_response 401
 
+    # simulate cached token indicating wrong user (e.g., local user
+    # entry was migrated out of the way taking the cached token with
+    # it, or authorizing cluster reassigned auth to a different user)
+    ApiClientAuthorization.where(
+      uuid: salted_active_token(remote: 'zbbbb').split('/')[1]).
+      update_all(user_id: users(:active).id)
+
     # revive original token and re-authorize
     @stub_status = 200
     @stub_content[:username] = 'blarney'
index e5e0ea001669f21219c8f6d01717f7771b885f59..b480c068c597ecc178b08be7278af2edbd72bce9 100644 (file)
@@ -660,9 +660,12 @@ type infoCommand struct {
        cmd   []string
 }
 
-// LogNodeInfo gathers node information and store it on the log for debugging
-// purposes.
-func (runner *ContainerRunner) LogNodeInfo() (err error) {
+// LogHostInfo logs info about the current host, for debugging and
+// accounting purposes. Although it's logged as "node-info", this is
+// about the environment where crunch-run is actually running, which
+// might differ from what's described in the node record (see
+// LogNodeRecord).
+func (runner *ContainerRunner) LogHostInfo() (err error) {
        w := runner.NewLogWriter("node-info")
 
        commands := []infoCommand{
@@ -710,38 +713,71 @@ func (runner *ContainerRunner) LogNodeInfo() (err error) {
 }
 
 // LogContainerRecord gets and saves the raw JSON container record from the API server
-func (runner *ContainerRunner) LogContainerRecord() (err error) {
+func (runner *ContainerRunner) LogContainerRecord() error {
+       logged, err := runner.logAPIResponse("container", "containers", map[string]interface{}{"filters": [][]string{{"uuid", "=", runner.Container.UUID}}}, nil)
+       if !logged && err == nil {
+               err = fmt.Errorf("error: no container record found for %s", runner.Container.UUID)
+       }
+       return err
+}
+
+// LogNodeRecord logs arvados#node record corresponding to the current host.
+func (runner *ContainerRunner) LogNodeRecord() error {
+       hostname := os.Getenv("SLURMD_NODENAME")
+       if hostname == "" {
+               hostname, _ = os.Hostname()
+       }
+       _, err := runner.logAPIResponse("node", "nodes", map[string]interface{}{"filters": [][]string{{"hostname", "=", hostname}}}, func(resp interface{}) {
+               // The "info" field has admin-only info when obtained
+               // with a privileged token, and should not be logged.
+               node, ok := resp.(map[string]interface{})
+               if ok {
+                       delete(node, "info")
+               }
+       })
+       return err
+}
+
+func (runner *ContainerRunner) logAPIResponse(label, path string, params map[string]interface{}, munge func(interface{})) (logged bool, err error) {
        w := &ArvLogWriter{
                ArvClient:     runner.ArvClient,
                UUID:          runner.Container.UUID,
-               loggingStream: "container",
-               writeCloser:   runner.LogCollection.Open("container.json"),
+               loggingStream: label,
+               writeCloser:   runner.LogCollection.Open(label + ".json"),
        }
 
-       // Get Container record JSON from the API Server
-       reader, err := runner.ArvClient.CallRaw("GET", "containers", runner.Container.UUID, "", nil)
+       reader, err := runner.ArvClient.CallRaw("GET", path, "", "", arvadosclient.Dict(params))
        if err != nil {
-               return fmt.Errorf("While retrieving container record from the API server: %v", err)
+               return false, fmt.Errorf("error getting %s record: %v", label, err)
        }
        defer reader.Close()
 
        dec := json.NewDecoder(reader)
        dec.UseNumber()
-       var cr map[string]interface{}
-       if err = dec.Decode(&cr); err != nil {
-               return fmt.Errorf("While decoding the container record JSON response: %v", err)
+       var resp map[string]interface{}
+       if err = dec.Decode(&resp); err != nil {
+               return false, fmt.Errorf("error decoding %s list response: %v", label, err)
+       }
+       items, ok := resp["items"].([]interface{})
+       if !ok {
+               return false, fmt.Errorf("error decoding %s list response: no \"items\" key in API list response", label)
+       } else if len(items) < 1 {
+               return false, nil
+       }
+       if munge != nil {
+               munge(items[0])
        }
        // Re-encode it using indentation to improve readability
        enc := json.NewEncoder(w)
        enc.SetIndent("", "    ")
-       if err = enc.Encode(cr); err != nil {
-               return fmt.Errorf("While logging the JSON container record: %v", err)
+       if err = enc.Encode(items[0]); err != nil {
+               return false, fmt.Errorf("error logging %s record: %v", label, err)
        }
        err = w.Close()
        if err != nil {
-               return fmt.Errorf("While closing container.json log: %v", err)
+               return false, fmt.Errorf("error closing %s.json in log collection: %v", label, err)
        }
-       return nil
+       return true, nil
 }
 
 // AttachStreams connects the docker container stdin, stdout and stderr logs
@@ -1050,14 +1086,34 @@ func (runner *ContainerRunner) UploadOutputFile(
        relocateTo string,
        followed int) (manifestText string, err error) {
 
-       if info.Mode().IsDir() {
-               return
-       }
-
        if infoerr != nil {
                return "", infoerr
        }
 
+       if info.Mode().IsDir() {
+               // if empty, need to create a .keep file
+               dir, direrr := os.Open(path)
+               if direrr != nil {
+                       return "", direrr
+               }
+               defer dir.Close()
+               names, eof := dir.Readdirnames(1)
+               if len(names) == 0 && eof == io.EOF && path != runner.HostOutputDir {
+                       containerPath := runner.OutputPath + path[len(runner.HostOutputDir):]
+                       for _, bind := range binds {
+                               mnt := runner.Container.Mounts[bind]
+                               // Check if there is a bind for this
+                               // directory, in which case assume we don't need .keep
+                               if (containerPath == bind || strings.HasPrefix(containerPath, bind+"/")) && mnt.PortableDataHash != "d41d8cd98f00b204e9800998ecf8427e+0" {
+                                       return
+                               }
+                       }
+                       outputSuffix := path[len(runner.HostOutputDir)+1:]
+                       return fmt.Sprintf("./%v d41d8cd98f00b204e9800998ecf8427e+0 0:0:.keep\n", outputSuffix), nil
+               }
+               return
+       }
+
        if followed >= limitFollowSymlinks {
                // Got stuck in a loop or just a pathological number of
                // directory links, give up.
@@ -1065,9 +1121,16 @@ func (runner *ContainerRunner) UploadOutputFile(
                return
        }
 
-       // When following symlinks, the source path may need to be logically
-       // relocated to some other path within the output collection.  Remove
-       // the relocateFrom prefix and replace it with relocateTo.
+       // "path" is the actual path we are visiting
+       // "tgt" is the target of "path" (a non-symlink) after following symlinks
+       // "relocated" is the path in the output manifest where the file should be placed,
+       // but has HostOutputDir as a prefix.
+
+       // The destination path in the output manifest may need to be
+       // logically relocated to some other path in order to appear
+       // in the correct location as a result of following a symlink.
+       // Remove the relocateFrom prefix and replace it with
+       // relocateTo.
        relocated := relocateTo + path[len(relocateFrom):]
 
        tgt, readlinktgt, info, derefErr := runner.derefOutputSymlink(path, info)
@@ -1088,7 +1151,7 @@ func (runner *ContainerRunner) UploadOutputFile(
 
                        // Terminates in this keep mount, so add the
                        // manifest text at appropriate location.
-                       outputSuffix := path[len(runner.HostOutputDir):]
+                       outputSuffix := relocated[len(runner.HostOutputDir):]
                        manifestText, err = runner.getCollectionManifestForPath(adjustedMount, outputSuffix)
                        return
                }
@@ -1552,13 +1615,14 @@ func (runner *ContainerRunner) Run() (err error) {
        if err != nil {
                return
        }
-
-       // Gather and record node information
-       err = runner.LogNodeInfo()
+       err = runner.LogHostInfo()
+       if err != nil {
+               return
+       }
+       err = runner.LogNodeRecord()
        if err != nil {
                return
        }
-       // Save container.json record on log collection
        err = runner.LogContainerRecord()
        if err != nil {
                return
index ab7417e542bee44fa2346e50a97732ec3b41304b..4979cf8a0c801ee5bc56e48933e97f736dc3d00d 100644 (file)
@@ -244,8 +244,23 @@ func (client *ArvTestClient) Call(method, resourceType, uuid, action string, par
 func (client *ArvTestClient) CallRaw(method, resourceType, uuid, action string,
        parameters arvadosclient.Dict) (reader io.ReadCloser, err error) {
        var j []byte
-       if method == "GET" && resourceType == "containers" && action == "" && !client.callraw {
-               j, err = json.Marshal(client.Container)
+       if method == "GET" && resourceType == "nodes" && uuid == "" && action == "" {
+               j = []byte(`{
+                       "kind": "arvados#nodeList",
+                       "items": [{
+                               "uuid": "zzzzz-7ekkf-2z3mc76g2q73aio",
+                               "hostname": "compute2",
+                               "properties": {"total_cpu_cores": 16}
+                       }]}`)
+       } else if method == "GET" && resourceType == "containers" && action == "" && !client.callraw {
+               if uuid == "" {
+                       j, err = json.Marshal(map[string]interface{}{
+                               "items": []interface{}{client.Container},
+                               "kind":  "arvados#nodeList",
+                       })
+               } else {
+                       j, err = json.Marshal(client.Container)
+               }
        } else {
                j = []byte(`{
                        "command": ["sleep", "1"],
@@ -768,6 +783,7 @@ func (s *TestSuite) TestCrunchstat(c *C) {
 }
 
 func (s *TestSuite) TestNodeInfoLog(c *C) {
+       os.Setenv("SLURMD_NODENAME", "compute2")
        api, _, _ := FullRunHelper(c, `{
                "command": ["sleep", "1"],
                "container_image": "d4ab34d3d4f8a72f5c4973051ae69fab+122",
@@ -786,12 +802,19 @@ func (s *TestSuite) TestNodeInfoLog(c *C) {
        c.Check(api.CalledWith("container.exit_code", 0), NotNil)
        c.Check(api.CalledWith("container.state", "Complete"), NotNil)
 
+       c.Assert(api.Logs["node"], NotNil)
+       json := api.Logs["node"].String()
+       c.Check(json, Matches, `(?ms).*"uuid": *"zzzzz-7ekkf-2z3mc76g2q73aio".*`)
+       c.Check(json, Matches, `(?ms).*"total_cpu_cores": *16.*`)
+       c.Check(json, Not(Matches), `(?ms).*"info":.*`)
+
        c.Assert(api.Logs["node-info"], NotNil)
-       c.Check(api.Logs["node-info"].String(), Matches, `(?ms).*Host Information.*`)
-       c.Check(api.Logs["node-info"].String(), Matches, `(?ms).*CPU Information.*`)
-       c.Check(api.Logs["node-info"].String(), Matches, `(?ms).*Memory Information.*`)
-       c.Check(api.Logs["node-info"].String(), Matches, `(?ms).*Disk Space.*`)
-       c.Check(api.Logs["node-info"].String(), Matches, `(?ms).*Disk INodes.*`)
+       json = api.Logs["node-info"].String()
+       c.Check(json, Matches, `(?ms).*Host Information.*`)
+       c.Check(json, Matches, `(?ms).*CPU Information.*`)
+       c.Check(json, Matches, `(?ms).*Memory Information.*`)
+       c.Check(json, Matches, `(?ms).*Disk Space.*`)
+       c.Check(json, Matches, `(?ms).*Disk INodes.*`)
 }
 
 func (s *TestSuite) TestContainerRecordLog(c *C) {