*#*
.DS_Store
.vscode
+.Rproj.user
//= require npm-dependencies
//= require mithril/stream/stream
//= require awesomplete
+//= require jssha
//= require_tree .
Es6ObjectAssign.polyfill()
collections: m('i.fa.fa-fw.fa-archive'),
projects: m('i.fa.fa-fw.fa-folder'),
}
+ var db = new SessionDB()
+ var sessions = db.loadActive()
return m('table.table.table-condensed', [
m('thead', m('tr', [
m('th'),
])),
m('tbody', [
loader.items().map(function(item) {
+ var session = sessions[item.uuid.slice(0,5)]
+ var tokenParam = ''
+ // Add the salted token to search result links from federated
+ // remote hosts.
+ if (!session.isFromRails && session.token.indexOf('v2/') == 0) {
+ tokenParam = '?api_token='+session.token
+ }
return m('tr', [
m('td', [
item.workbenchBaseURL() &&
'data-original-title': 'show '+item.objectType.description,
'data-placement': 'top',
'data-toggle': 'tooltip',
- href: item.workbenchBaseURL()+'/'+item.objectType.wb_path+'/'+item.uuid,
+ href: item.workbenchBaseURL()+'/'+item.objectType.wb_path+'/'+item.uuid+tokenParam,
// Bootstrap's tooltip feature
oncreate: function(vnode) { $(vnode.dom).tooltip() },
}, iconsMap[item.objectType.wb_path]),
})
},
view: function(vnode) {
- var sessions = vnode.state.sessionDB.loadAll()
return m('form', {
onsubmit: function() {
vnode.state.searchActive(vnode.state.searchEntered())
var db = new SessionDB()
db.checkForNewToken()
db.fillMissingUUIDs()
+ db.migrateNonFederatedSessions()
+ db.autoLoadRemoteHosts()
})
window.SessionsTable = {
var session = sessions[uuidPrefix]
return m('tr', [
session.token && session.user ? [
- m('td', m('span.label.label-success', 'logged in')),
- m('td', {title: session.baseURL}, uuidPrefix),
+ m('td', session.user.is_active ?
+ m('span.label.label-success', 'logged in') :
+ m('span.label.label-warning', 'inactive')),
+ m('td', {title: session.baseURL}, [
+ m('a', {
+ href: db.workbenchBaseURL(session) + '?api_token=' + session.token
+ }, uuidPrefix),
+ ]),
m('td', session.user.username),
m('td', session.user.email),
m('td', session.isFromRails ? null : m('button.btn.btn-xs.btn-default', {
uuidPrefix: uuidPrefix,
onclick: m.withAttr('uuidPrefix', db.logout),
- }, 'Log out ', m('span.glyphicon.glyphicon-log-out'))),
+ }, session.listedHost ? 'Disable ':'Log out ', m('span.glyphicon.glyphicon-log-out'))),
] : [
m('td', m('span.label.label-default', 'logged out')),
m('td', {title: session.baseURL}, uuidPrefix),
m('td', m('a.btn.btn-xs.btn-primary', {
uuidPrefix: uuidPrefix,
onclick: db.login.bind(db, session.baseURL),
- }, 'Log in ', m('span.glyphicon.glyphicon-log-in'))),
+ }, session.listedHost ? 'Enable ':'Log in ', m('span.glyphicon.glyphicon-log-in'))),
],
m('td', session.isFromRails ? null : m('button.btn.btn-xs.btn-default', {
uuidPrefix: uuidPrefix,
var db = this
Object.assign(db, {
discoveryCache: {},
+ tokenUUIDCache: null,
loadFromLocalStorage: function() {
try {
return JSON.parse(window.localStorage.getItem('sessions')) || {}
loadActive: function() {
var sessions = db.loadAll()
Object.keys(sessions).forEach(function(key) {
- if (!sessions[key].token)
+ if (!sessions[key].token || (sessions[key].user && !sessions[key].user.is_active))
delete sessions[key]
})
return sessions
// for the corresponding API server's base URL. Typical
// use:
// sessionDB.findAPI('https://workbench.example/foo').then(sessionDB.login)
+ if (url.length === 5 && url.indexOf('.') < 0)
+ url += '.arvadosapi.com'
if (url.indexOf('://') < 0)
url = 'https://' + url
url = new URL(url)
})
})
},
- login: function(baseURL) {
+ login: function(baseURL, fallbackLogin = true) {
// Initiate login procedure with given API base URL (e.g.,
// "http://api.example/").
//
// also call checkForNewToken() on (at least) its first
// render. Otherwise, the login procedure can't be
// completed.
- document.location = baseURL + 'login?return_to=' + encodeURIComponent(document.location.href.replace(/\?.*/, '')+'?baseURL='+encodeURIComponent(baseURL))
+ var session = db.loadLocal()
+ var uuidPrefix = session.user.owner_uuid.slice(0, 5)
+ var apiHostname = new URL(session.baseURL).hostname
+ m.request(session.baseURL+'discovery/v1/apis/arvados/v1/rest').then(function(localDD) {
+ m.request(baseURL+'discovery/v1/apis/arvados/v1/rest').then(function(dd) {
+ if (uuidPrefix in dd.remoteHosts ||
+ (dd.remoteHostsViaDNS && apiHostname.indexOf('arvadosapi.com') >= 0)) {
+ // Federated identity login via salted token
+ db.saltedToken(dd.uuidPrefix).then(function(token) {
+ m.request(baseURL+'arvados/v1/users/current', {
+ headers: {
+ authorization: 'Bearer '+token,
+ },
+ }).then(function(user) {
+ // Federated login successful.
+ var remoteSession = {
+ user: user,
+ baseURL: baseURL,
+ token: token,
+ listedHost: (dd.uuidPrefix in localDD.remoteHosts),
+ }
+ db.save(dd.uuidPrefix, remoteSession)
+ }).catch(function(e) {
+ if (dd.uuidPrefix in localDD.remoteHosts) {
+ // If the remote system is configured to allow federated
+ // logins from this cluster, but rejected the salted
+ // token, save as a logged out session anyways.
+ var remoteSession = {
+ baseURL: baseURL,
+ listedHost: true,
+ }
+ db.save(dd.uuidPrefix, remoteSession)
+ } else if (fallbackLogin) {
+ // Remote cluster not listed as a remote host and rejecting
+ // the salted token, try classic login.
+ db.loginClassic(baseURL)
+ }
+ })
+ })
+ } else if (fallbackLogin) {
+ // Classic login will be used when the remote system doesn't list this
+ // cluster as part of the federation.
+ db.loginClassic(baseURL)
+ }
+ })
+ })
return false
},
+ loginClassic: function(baseURL) {
+ document.location = baseURL + 'login?return_to=' + encodeURIComponent(document.location.href.replace(/\?.*/, '')+'?baseURL='+encodeURIComponent(baseURL))
+ },
logout: function(k) {
// Forget the token, but leave the other info in the db so
// the user can log in again without providing the login
delete sessions[k].token
db.save(k, sessions[k])
},
+ saltedToken: function(uuid_prefix) {
+ // Takes a cluster UUID prefix and returns a salted token to allow
+ // log into said cluster using federated identity.
+ var session = db.loadLocal()
+ return db.tokenUUID().then(function(token_uuid){
+ var shaObj = new jsSHA("SHA-1", "TEXT")
+ shaObj.setHMACKey(session.token, "TEXT")
+ shaObj.update(uuid_prefix)
+ var hmac = shaObj.getHMAC("HEX")
+ return 'v2/' + token_uuid + '/' + hmac
+ })
+ },
checkForNewToken: function() {
// If there's a token and baseURL in the location bar (i.e.,
// we just landed here after a successful login), save it and
}
return cache
},
+ // Return a promise with the local session token's UUID from the API server.
+ tokenUUID: function() {
+ var cache = db.tokenUUIDCache
+ if (!cache) {
+ var session = db.loadLocal()
+ return db.request(session, '/arvados/v1/api_client_authorizations', {
+ data: {
+ filters: JSON.stringify([['api_token', '=', session.token]]),
+ }
+ }).then(function(resp) {
+ var uuid = resp.items[0].uuid
+ db.tokenUUIDCache = uuid
+ return uuid
+ })
+ } else {
+ return new Promise(function(resolve, reject) {
+ resolve(cache)
+ })
+ }
+ },
request: function(session, path, opts) {
opts = opts || {}
opts.headers = opts.headers || {}
opts.headers.authorization = 'OAuth2 '+ session.token
return m.request(session.baseURL + path, opts)
},
+ // Check non-federated remote active sessions if they should be migrated to
+ // a salted token.
+ migrateNonFederatedSessions: function() {
+ var sessions = db.loadActive()
+ Object.keys(sessions).map(function(uuidPrefix) {
+ session = sessions[uuidPrefix]
+ if (!session.isFromRails && session.token && session.token.indexOf('v2/') < 0) {
+ // Only try the federated login
+ db.login(session.baseURL, false)
+ }
+ })
+ },
+ // If remoteHosts is listed on the local API discovery doc, try to add any
+ // listed remote without an active session.
+ autoLoadRemoteHosts: function() {
+ var activeSessions = db.loadActive()
+ var doc = db.discoveryDoc(db.loadLocal())
+ doc.map(function(d) {
+ Object.keys(d.remoteHosts).map(function(uuidPrefix) {
+ if (!(uuidPrefix in Object.keys(activeSessions))) {
+ db.findAPI(d.remoteHosts[uuidPrefix]).then(function(baseURL) {
+ db.login(baseURL, false)
+ })
+ }
+ })
+ })
+ },
+ // If the current logged in account is from a remote federated cluster,
+ // redirect the user to their home cluster's workbench.
+ // This is meant to avoid confusion when the user clicks through a search
+ // result on the home cluster's multi site search page, landing on the
+ // remote workbench and later trying to do another search by just clicking
+ // on the multi site search button instead of going back with the browser.
+ autoRedirectToHomeCluster: function(path = '/') {
+ var session = db.loadLocal()
+ var userUUIDPrefix = session.user.uuid.slice(0, 5)
+ // If the current user is local to the cluster, do nothing.
+ if (userUUIDPrefix == session.user.owner_uuid.slice(0, 5)) {
+ return
+ }
+ var doc = db.discoveryDoc(session)
+ doc.map(function(d) {
+ // Guess the remote host from the local discovery doc settings
+ var rHost = null
+ if (d.remoteHosts[userUUIDPrefix]) {
+ rHost = d.remoteHosts[userUUIDPrefix]
+ } else if (d.remoteHostsViaDNS) {
+ rHost = userUUIDPrefix + '.arvadosapi.com'
+ } else {
+ // This should not happen: having remote user whose uuid prefix
+ // isn't listed on remoteHosts and dns mechanism is deactivated
+ return
+ }
+ // Get the remote cluster workbench url & redirect there.
+ db.findAPI(rHost).then(function(apiUrl) {
+ var doc = db.discoveryDoc({baseURL: apiUrl})
+ doc.map(function(d) {
+ document.location = d.workbenchUrl + path
+ })
+ })
+ })
+ },
})
}
end
def cancel
+ if @object.container_uuid
+ c = Container.select(['state']).where(uuid: @object.container_uuid).first
+ if c && c.state != 'Running'
+ # If the container hasn't started yet, setting priority=0
+ # leaves our request in "Committed" state and doesn't cancel
+ # the container (even if no other requests are giving it
+ # priority). To avoid showing this container request as "on
+ # hold" after hitting the Cancel button, set state=Final too.
+ @object.state = 'Final'
+ end
+ end
@object.update_attributes! priority: 0
if params[:return_to]
redirect_to params[:return_to]
end
def can_cancel?
- @proxied.is_a?(ContainerRequest) && @proxied.state == "Committed" && @proxied.priority > 0 && @proxied.editable?
+ @proxied.is_a?(ContainerRequest) &&
+ @proxied.state == "Committed" &&
+ (@proxied.priority > 0 || get(:state, @container) != 'Running') &&
+ @proxied.editable?
end
def container_uuid
end
def state_label
- ec = exit_code
- return "Failed" if (ec && ec != 0)
-
- state = get_combined(:state)
-
- return "Queued" if state == "Locked"
- return "Cancelled" if ((priority == 0) and (state == "Queued"))
- state
+ if get(:state) == 'Final' && get(:state, @container) != 'Complete'
+ # Request was finalized before its container started (or the
+ # container was cancelled)
+ return 'Cancelled'
+ end
+ state = get(:state, @container) || get(:state, @proxied)
+ case state
+ when 'Locked', 'Queued'
+ if priority == 0
+ 'On hold'
+ else
+ 'Queued'
+ end
+ when 'Complete'
+ if exit_code == 0
+ state
+ else
+ 'Failed'
+ end
+ else
+ # Cancelled, Running, or Uncommitted (no container assigned)
+ state
+ end
end
def exit_code
<td>
<span class="label label-<%= wu.state_bootstrap_class %>"><%= wu.state_label %></span>
</td><td>
- <%= link_to_if_arvados_object obj, friendly_name: true, link_text: if !obj.name.empty? then obj.name else obj.uuid end %>
+ <%= link_to_if_arvados_object obj, friendly_name: true, link_text: if obj.name && !obj.name.empty? then obj.name else obj.uuid end %>
</td><td>
<%= obj.description || '' %>
</td><td>
</div>
<div class="clearfix">
- Started at <%= render_localized_date(wu.started_at || wu.created_at, "noseconds") %>.
- <% wu_time = Time.now - (wu.started_at || wu.created_at) %>
- Active for <%= render_runtime(wu_time, false) %>.
-
- <div class="pull-right">
- </div>
+ <% if wu.started_at %>
+ Started at <%= render_localized_date(wu.started_at, "noseconds") %>
+ Active for <%= render_runtime(Time.now - wu.started_at, false) %>.
+ <% else %>
+ Created at <%= render_localized_date(wu.created_at, "noseconds") %>.
+ <% if wu.state_label == 'Queued' %>
+ Queued for <%= render_runtime(Time.now - wu.created_at, false) %>.
+ <% end %>
+ <% end %>
</div>
</div>
<% end %>
SPDX-License-Identifier: AGPL-3.0 -->
+<script type="text/javascript">
+ $(document).on('ready', function() {
+ var db = new SessionDB()
+ db.autoRedirectToHomeCluster('/search')
+ })
+</script>
+
<div data-mount-mithril="Search"></div>
SPDX-License-Identifier: AGPL-3.0 -->
+<script type="text/javascript">
+ $(document).on('ready', function() {
+ var db = new SessionDB()
+ db.autoRedirectToHomeCluster('/sessions')
+ })
+</script>
+
<div data-mount-mithril="SessionsTable"></div>
#
# Link to use for Arvados Workflow Composer app, or false if not available.
#
- composer_url: false
\ No newline at end of file
+ composer_url: false
npm 'browserify', require: false
npm 'jquery'
npm 'awesomplete'
+npm 'jssha'
npm 'mithril'
npm 'es6-object-assign'
get :show, {id: uuid}, session_for(:active)
assert_response :success
- assert_includes @response.body, "action=\"/container_requests/#{uuid}/copy\""
+ assert_includes @response.body, "action=\"/container_requests/#{uuid}/copy\""
+ end
+
+ test "cancel request for queued container" do
+ cr_fixture = api_fixture('container_requests')['queued']
+ post :cancel, {id: cr_fixture['uuid']}, session_for(:active)
+ assert_response 302
+
+ use_token 'active'
+ cr = ContainerRequest.find(cr_fixture['uuid'])
+ assert_equal 'Final', cr.state
+ assert_equal 0, cr.priority
+ c = Container.find(cr_fixture['container_uuid'])
+ assert_equal 'Queued', c.state
+ assert_equal 0, c.priority
end
[
[ContainerRequest, 'cr_for_requester', 'cwu', 1, "Complete", true, 1.0],
[ContainerRequest, 'queued', 'cwu', 0, "Queued", nil, 0.0], # priority 1
[ContainerRequest, 'canceled_with_queued_container', 'cwu', 0, "Cancelled", false, 0.0],
- [ContainerRequest, 'canceled_with_locked_container', 'cwu', 0, "Queued", nil, 0.0],
+ [ContainerRequest, 'canceled_with_locked_container', 'cwu', 0, "Cancelled", false, 0.0],
[ContainerRequest, 'canceled_with_running_container', 'cwu', 1, "Running", nil, 0.0],
].each do |type, fixture, label, num_children, state, success, progress|
test "children of #{fixture}" do
#
# SPDX-License-Identifier: AGPL-3.0
-LIBCLOUD_PIN=2.2.2.dev3
+LIBCLOUD_PIN=2.2.2.dev4
sdk/go/stats
sdk/go/crunchrunner
sdk/cwl
+sdk/R
tools/sync-groups
tools/crunchstat-summary
tools/keep-exercise
PYTHONPATH=
GEMHOME=
PERLINSTALLBASE=
+R_LIBS=
short=
only_install=
which Xvfb || fatal "No xvfb. Try: apt-get install xvfb"
echo -n 'graphviz: '
dot -V || fatal "No graphviz. Try: apt-get install graphviz"
+
+ # R SDK stuff
+ echo -n 'R: '
+ which R || fatal "No R. Try: apt-get install r-base"
+ echo -n 'testthat: '
+ R -q -e "library('testthat')" || fatal "No testthat. Try: apt-get install r-cran-testthat"
+ # needed for roxygen2, needed for devtools, needed for R sdk
+ pkg-config --exists libxml-2.0 || fatal "No libxml2. Try: apt-get install libxml2-dev"
}
rotate_logfile() {
fi
# Set up temporary install dirs (unless existing dirs were supplied)
-for tmpdir in VENVDIR VENV3DIR GOPATH GEMHOME PERLINSTALLBASE
+for tmpdir in VENVDIR VENV3DIR GOPATH GEMHOME PERLINSTALLBASE R_LIBS
do
if [[ -z "${!tmpdir}" ]]; then
eval "$tmpdir"="$temp/$tmpdir"
export PERLINSTALLBASE
export PERLLIB="$PERLINSTALLBASE/lib/perl5:${PERLLIB:+$PERLLIB}"
+export R_LIBS
export GOPATH
mkdir -p "$GOPATH/src/git.curoverse.com"
}
do_install sdk/ruby ruby_sdk
+install_R_sdk() {
+ cd "$WORKSPACE/sdk/R" \
+ && R --quiet --vanilla <<EOF
+options(repos=structure(c(CRAN="http://cran.wustl.edu/")))
+if (!requireNamespace("devtools")) {
+ install.packages("devtools")
+}
+if (!requireNamespace("roxygen2")) {
+ install.packages("roxygen2")
+}
+devtools::install_dev_deps()
+EOF
+}
+do_install sdk/R R_sdk
+
install_perl_sdk() {
cd "$WORKSPACE/sdk/perl" \
&& perl Makefile.PL INSTALL_BASE="$PERLINSTALLBASE" \
}
do_test sdk/ruby ruby_sdk
+test_R_sdk() {
+ cd "$WORKSPACE/sdk/R" \
+ && R --quiet --file=run_test.R
+}
+do_test sdk/R R_sdk
+
test_cli() {
cd "$WORKSPACE/sdk/cli" \
&& mkdir -p /tmp/keep \
1. Only mount points of kind @collection@ are supported.
-2. Mount points underneath output_path must not use @"writable":true@. If any of them are set as @writable@, the API will refuse to create/update the container request, and crunch-run will fail the container.
+2. Mount points underneath output_path which have "writable":true are copied into output_path during container initialization and may be updated, renamed, or deleted by the running container. The original collection is not modified. On container completion, files remaining in the output are saved to the output collection. The mount at output_path must be big enough to accommodate copies of the inner writable mounts.
3. If any such mount points are configured as @exclude_from_output":true@, they will be excluded from the output.
--- /dev/null
+^.*\.Rproj$
+^\.Rproj\.user$
--- /dev/null
+Version: 1.0
+
+RestoreWorkspace: Default
+SaveWorkspace: Default
+AlwaysSaveHistory: Default
+
+EnableCodeIndexing: Yes
+UseSpacesForTab: Yes
+NumSpacesForTab: 4
+Encoding: UTF-8
+
+RnwWeave: Sweave
+LaTeX: pdfLaTeX
+
+AutoAppendNewline: Yes
+StripTrailingWhitespace: Yes
+
+BuildType: Package
+PackageUseDevtools: Yes
+PackageInstallArgs: --no-multiarch --with-keep.source
--- /dev/null
+Package: ArvadosR
+Type: Package
+Title: Arvados R SDK
+Version: 0.0.2
+Author: Fuad Muhic
+Maintainer: Ward Vandewege <wvandewege@veritasgenetics.com>
+Description: This is the Arvados R SDK
+URL: http://doc.arvados.org
+License: Apache-2.0
+Encoding: UTF-8
+LazyData: true
+RoxygenNote: 6.0.1.9000
+Imports:
+ R6,
+ httr,
+ stringr,
+ jsonlite,
+ curl,
+ XML
+Suggests: testthat
--- /dev/null
+# Generated by roxygen2: do not edit by hand
+
+export(Arvados)
+export(ArvadosFile)
+export(Collection)
+export(Subcollection)
+export(print.Arvados)
+export(print.ArvadosFile)
+export(print.Collection)
+export(print.Subcollection)
--- /dev/null
+source("./R/RESTService.R")
+source("./R/HttpRequest.R")
+source("./R/HttpParser.R")
+
+#' Arvados SDK Object
+#'
+#' All Arvados logic is inside this class
+#'
+#' @field token Token represents user authentification token.
+#' @field host Host represents server name we wish to connect to.
+#' @examples arv = Arvados$new("token", "host_name")
+#' @export Arvados
+Arvados <- R6::R6Class(
+
+ "Arvados",
+
+ public = list(
+
+ initialize = function(authToken = NULL, hostName = NULL, numRetries = 0)
+ {
+ if(!is.null(hostName))
+ Sys.setenv(ARVADOS_API_HOST = hostName)
+
+ if(!is.null(authToken))
+ Sys.setenv(ARVADOS_API_TOKEN = authToken)
+
+ hostName <- Sys.getenv("ARVADOS_API_HOST");
+ token <- Sys.getenv("ARVADOS_API_TOKEN");
+
+ if(hostName == "" | token == "")
+ stop(paste("Please provide host name and authentification token",
+ "or set ARVADOS_API_HOST and ARVADOS_API_TOKEN",
+ "environment variables."))
+
+ private$numRetries <- numRetries
+ private$REST <- RESTService$new(token, hostName,
+ HttpRequest$new(), HttpParser$new(),
+ numRetries)
+
+ private$token <- private$REST$token
+ private$host <- private$REST$hostName
+ },
+
+ getToken = function() private$REST$token,
+ getHostName = function() private$REST$hostName,
+ getWebDavHostName = function() private$REST$getWebDavHostName(),
+ getRESTService = function() private$REST,
+ setRESTService = function(newRESTService) private$REST <- newRESTService,
+
+ getNumRetries = function() private$REST$numRetries,
+ setNumRetries = function(newNumOfRetries)
+ {
+ private$REST$setNumRetries(newNumOfRetries)
+ },
+
+ getCollection = function(uuid)
+ {
+ collection <- private$REST$getResource("collections", uuid)
+ collection
+ },
+
+ listCollections = function(filters = NULL, limit = 100, offset = 0)
+ {
+ if(!is.null(filters))
+ names(filters) <- c("collection")
+
+ collections <- private$REST$listResources("collections", filters,
+ limit, offset)
+ collections
+ },
+
+ listAllCollections = function(filters = NULL)
+ {
+ if(!is.null(filters))
+ names(filters) <- c("collection")
+
+ collectionURL <- paste0(private$host, "collections")
+ allCollection <- private$REST$fetchAllItems(collectionURL, filters)
+ allCollection
+ },
+
+ deleteCollection = function(uuid)
+ {
+ removedCollection <- private$REST$deleteResource("collections", uuid)
+ removedCollection
+ },
+
+ updateCollection = function(uuid, newContent)
+ {
+ body <- list(list())
+ names(body) <- c("collection")
+ body$collection <- newContent
+
+ updatedCollection <- private$REST$updateResource("collections",
+ uuid, body)
+ updatedCollection
+ },
+
+ createCollection = function(content)
+ {
+ body <- list(list())
+ names(body) <- c("collection")
+ body$collection <- content
+
+ newCollection <- private$REST$createResource("collections", body)
+ newCollection
+ },
+
+ getProject = function(uuid)
+ {
+ project <- private$REST$getResource("groups", uuid)
+ project
+ },
+
+ createProject = function(content)
+ {
+ body <- list(list())
+ names(body) <- c("group")
+ body$group <- c("group_class" = "project", content)
+
+ newProject <- private$REST$createResource("groups", body)
+ newProject
+ },
+
+ updateProject = function(uuid, newContent)
+ {
+ body <- list(list())
+ names(body) <- c("group")
+ body$group <- newContent
+
+ updatedProject <- private$REST$updateResource("groups", uuid, body)
+ updatedProject
+ },
+
+ listProjects = function(filters = NULL, limit = 100, offset = 0)
+ {
+ if(!is.null(filters))
+ names(filters) <- c("groups")
+
+ filters[[length(filters) + 1]] <- list("group_class", "=", "project")
+
+ projects <- private$REST$listResources("groups", filters, limit, offset)
+ projects
+ },
+
+ listAllProjects = function(filters = NULL)
+ {
+ if(!is.null(filters))
+ names(filters) <- c("groups")
+
+ filters[[length(filters) + 1]] <- list("group_class", "=", "project")
+
+ projectURL <- paste0(private$host, "groups")
+
+ result <- private$REST$fetchAllItems(projectURL, filters)
+ result
+ },
+
+ deleteProject = function(uuid)
+ {
+ removedProject <- private$REST$deleteResource("groups", uuid)
+ removedProject
+ }
+ ),
+
+ private = list(
+
+ token = NULL,
+ host = NULL,
+ REST = NULL,
+ numRetries = NULL
+ ),
+
+ cloneable = FALSE
+)
+
+#' @export print.Arvados
+print.Arvados = function(arvados)
+{
+ cat(paste0("Type: ", "\"", "Arvados", "\""), sep = "\n")
+ cat(paste0("Host: ", "\"", arvados$getHostName(), "\""), sep = "\n")
+ cat(paste0("Token: ", "\"", arvados$getToken(), "\""), sep = "\n")
+}
--- /dev/null
+source("./R/util.R")
+
+#' ArvadosFile Object
+#'
+#' Update description
+#'
+#' @export ArvadosFile
+ArvadosFile <- R6::R6Class(
+
+ "ArvadosFile",
+
+ public = list(
+
+ initialize = function(name)
+ {
+ if(name == "")
+ stop("Invalid name.")
+
+ private$name <- name
+ },
+
+ getName = function() private$name,
+
+ getFileListing = function(fullpath = TRUE)
+ {
+ self$getName()
+ },
+
+ getSizeInBytes = function()
+ {
+ if(is.null(private$collection))
+ return(0)
+
+ REST <- private$collection$getRESTService()
+
+ fileSize <- REST$getResourceSize(self$getRelativePath(),
+ private$collection$uuid)
+
+ fileSize
+ },
+
+ get = function(fileLikeObjectName)
+ {
+ return(NULL)
+ },
+
+ getFirst = function()
+ {
+ return(NULL)
+ },
+
+ getCollection = function() private$collection,
+
+ setCollection = function(collection)
+ {
+ private$collection <- collection
+ },
+
+ getRelativePath = function()
+ {
+ relativePath <- c(private$name)
+ parent <- private$parent
+
+ while(!is.null(parent))
+ {
+ relativePath <- c(parent$getName(), relativePath)
+ parent <- parent$getParent()
+ }
+
+ relativePath <- relativePath[relativePath != ""]
+ paste0(relativePath, collapse = "/")
+ },
+
+ getParent = function() private$parent,
+
+ setParent = function(newParent) private$parent <- newParent,
+
+ read = function(contentType = "raw", offset = 0, length = 0)
+ {
+ if(is.null(private$collection))
+ stop("ArvadosFile doesn't belong to any collection.")
+
+ if(offset < 0 || length < 0)
+ stop("Offset and length must be positive values.")
+
+ REST <- private$collection$getRESTService()
+
+ fileContent <- REST$read(self$getRelativePath(),
+ private$collection$uuid,
+ contentType, offset, length)
+ fileContent
+ },
+
+ connection = function(rw)
+ {
+ if (rw == "r" || rw == "rb")
+ {
+ REST <- private$collection$getRESTService()
+ return(REST$getConnection(private$collection$uuid,
+ self$getRelativePath(),
+ rw))
+ }
+ else if (rw == "w")
+ {
+ private$buffer <- textConnection(NULL, "w")
+
+ return(private$buffer)
+ }
+ },
+
+ flush = function()
+ {
+ v <- textConnectionValue(private$buffer)
+ close(private$buffer)
+ self$write(paste(v, collapse='\n'))
+ },
+
+ write = function(content, contentType = "text/html")
+ {
+ if(is.null(private$collection))
+ stop("ArvadosFile doesn't belong to any collection.")
+
+ REST <- private$collection$getRESTService()
+
+ writeResult <- REST$write(self$getRelativePath(),
+ private$collection$uuid,
+ content, contentType)
+ writeResult
+ },
+
+ move = function(newLocation)
+ {
+ if(is.null(private$collection))
+ stop("ArvadosFile doesn't belong to any collection")
+
+ newLocation <- trimFromEnd(newLocation, "/")
+ nameAndPath <- splitToPathAndName(newLocation)
+
+ newParent <- private$collection$get(nameAndPath$path)
+
+ if(is.null(newParent))
+ {
+ stop("Unable to get destination subcollection")
+ }
+
+ childWithSameName <- newParent$get(nameAndPath$name)
+
+ if(!is.null(childWithSameName))
+ stop("Destination already contains content with same name.")
+
+ REST <- private$collection$getRESTService()
+ REST$move(self$getRelativePath(),
+ paste0(newParent$getRelativePath(), "/", nameAndPath$name),
+ private$collection$uuid)
+
+ private$dettachFromCurrentParent()
+ private$attachToNewParent(newParent)
+
+ private$name <- nameAndPath$name
+
+ "Content moved successfully."
+ }
+ ),
+
+ private = list(
+
+ name = NULL,
+ size = NULL,
+ parent = NULL,
+ collection = NULL,
+ buffer = NULL,
+
+ attachToNewParent = function(newParent)
+ {
+ #Note: We temporary set parents collection to NULL. This will ensure that
+ # add method doesn't post file on REST.
+ parentsCollection <- newParent$getCollection()
+ newParent$setCollection(NULL, setRecursively = FALSE)
+
+ newParent$add(self)
+
+ newParent$setCollection(parentsCollection, setRecursively = FALSE)
+
+ private$parent <- newParent
+ },
+
+ dettachFromCurrentParent = function()
+ {
+ #Note: We temporary set parents collection to NULL. This will ensure that
+ # remove method doesn't remove this subcollection from REST.
+ parent <- private$parent
+ parentsCollection <- parent$getCollection()
+ parent$setCollection(NULL, setRecursively = FALSE)
+
+ parent$remove(private$name)
+
+ parent$setCollection(parentsCollection, setRecursively = FALSE)
+ }
+ ),
+
+ cloneable = FALSE
+)
+
+#' @export print.ArvadosFile
+print.ArvadosFile = function(arvadosFile)
+{
+ collection <- NULL
+ relativePath <- arvadosFile$getRelativePath()
+
+ if(!is.null(arvadosFile$getCollection()))
+ {
+ collection <- arvadosFile$getCollection()$uuid
+ relativePath <- paste0("/", relativePath)
+ }
+
+ cat(paste0("Type: ", "\"", "ArvadosFile", "\""), sep = "\n")
+ cat(paste0("Name: ", "\"", arvadosFile$getName(), "\""), sep = "\n")
+ cat(paste0("Relative path: ", "\"", relativePath, "\""), sep = "\n")
+ cat(paste0("Collection: ", "\"", collection, "\""), sep = "\n")
+}
--- /dev/null
+source("./R/Subcollection.R")
+source("./R/ArvadosFile.R")
+source("./R/RESTService.R")
+source("./R/util.R")
+
+#' Arvados Collection Object
+#'
+#' Update description
+#'
+#' @examples arv = Collection$new(api, uuid)
+#' @export Collection
+Collection <- R6::R6Class(
+
+ "Collection",
+
+ public = list(
+
+ api = NULL,
+ uuid = NULL,
+
+ initialize = function(api, uuid)
+ {
+ self$api <- api
+ private$REST <- api$getRESTService()
+
+ self$uuid <- uuid
+
+ private$fileContent <- private$REST$getCollectionContent(uuid)
+ private$tree <- CollectionTree$new(private$fileContent, self)
+ },
+
+ add = function(content, relativePath = "")
+ {
+ if(relativePath == "" ||
+ relativePath == "." ||
+ relativePath == "./")
+ {
+ subcollection <- private$tree$getTree()
+ }
+ else
+ {
+ relativePath <- trimFromEnd(relativePath, "/")
+ subcollection <- self$get(relativePath)
+ }
+
+ if(is.null(subcollection))
+ stop(paste("Subcollection", relativePath, "doesn't exist."))
+
+ if("ArvadosFile" %in% class(content) ||
+ "Subcollection" %in% class(content))
+ {
+
+ if(content$getName() == "")
+ stop("Content has invalid name.")
+
+ subcollection$add(content)
+ content
+ }
+ else
+ {
+ stop(paste0("Expected AravodsFile or Subcollection object, got ",
+ paste0("(", paste0(class(content), collapse = ", "), ")"),
+ "."))
+ }
+ },
+
+ create = function(fileNames, relativePath = "")
+ {
+ if(relativePath == "" ||
+ relativePath == "." ||
+ relativePath == "./")
+ {
+ subcollection <- private$tree$getTree()
+ }
+ else
+ {
+ relativePath <- trimFromEnd(relativePath, "/")
+ subcollection <- self$get(relativePath)
+ }
+
+ if(is.null(subcollection))
+ stop(paste("Subcollection", relativePath, "doesn't exist."))
+
+ if(is.character(fileNames))
+ {
+ arvadosFiles <- NULL
+ sapply(fileNames, function(fileName)
+ {
+ childWithSameName <- subcollection$get(fileName)
+ if(!is.null(childWithSameName))
+ stop("Destination already contains file with same name.")
+
+ newFile <- ArvadosFile$new(fileName)
+ subcollection$add(newFile)
+
+ arvadosFiles <<- c(arvadosFiles, newFile)
+ })
+
+ if(length(arvadosFiles) == 1)
+ return(arvadosFiles[[1]])
+ else
+ return(arvadosFiles)
+ }
+ else
+ {
+ stop(paste0("Expected character vector, got ",
+ paste0("(", paste0(class(fileNames), collapse = ", "), ")"),
+ "."))
+ }
+ },
+
+ remove = function(paths)
+ {
+ if(is.character(paths))
+ {
+ sapply(paths, function(filePath)
+ {
+ filePath <- trimFromEnd(filePath, "/")
+ file <- self$get(filePath)
+
+ if(is.null(file))
+ stop(paste("File", filePath, "doesn't exist."))
+
+ parent <- file$getParent()
+
+ if(is.null(parent))
+ stop("You can't delete root folder.")
+
+ parent$remove(file$getName())
+ })
+
+ "Content removed"
+ }
+ else
+ {
+ stop(paste0("Expected character vector, got ",
+ paste0("(", paste0(class(paths), collapse = ", "), ")"),
+ "."))
+ }
+ },
+
+ move = function(content, newLocation)
+ {
+ content <- trimFromEnd(content, "/")
+
+ elementToMove <- self$get(content)
+
+ if(is.null(elementToMove))
+ stop("Content you want to move doesn't exist in the collection.")
+
+ elementToMove$move(newLocation)
+ },
+
+ getFileListing = function()
+ {
+ content <- private$REST$getCollectionContent(self$uuid)
+ content[order(tolower(content))]
+ },
+
+ get = function(relativePath)
+ {
+ private$tree$getElement(relativePath)
+ },
+
+ getRESTService = function() private$REST,
+ setRESTService = function(newRESTService) private$REST <- newRESTService
+ ),
+
+ private = list(
+
+ REST = NULL,
+ tree = NULL,
+ fileContent = NULL
+ ),
+
+ cloneable = FALSE
+)
+
+#' @export print.Collection
+print.Collection = function(collection)
+{
+ cat(paste0("Type: ", "\"", "Arvados Collection", "\""), sep = "\n")
+ cat(paste0("uuid: ", "\"", collection$uuid, "\""), sep = "\n")
+}
--- /dev/null
+source("./R/Subcollection.R")
+source("./R/ArvadosFile.R")
+source("./R/util.R")
+
+#' Arvados Collection Object
+#'
+#' Update description
+#'
+#' @examples arv = Collection$new(api, uuid)
+CollectionTree <- R6::R6Class(
+ "CollectionTree",
+ public = list(
+
+ pathsList = NULL,
+
+ initialize = function(fileContent, collection)
+ {
+ self$pathsList <- fileContent
+
+ treeBranches <- sapply(fileContent, function(filePath)
+ {
+ splitPath <- unlist(strsplit(filePath, "/", fixed = TRUE))
+ branch <- private$createBranch(splitPath)
+ })
+
+ root <- Subcollection$new("")
+
+ sapply(treeBranches, function(branch)
+ {
+ private$addBranch(root, branch)
+ })
+
+ root$setCollection(collection)
+ private$tree <- root
+ },
+
+ getElement = function(relativePath)
+ {
+ relativePath <- trimFromStart(relativePath, "./")
+ relativePath <- trimFromEnd(relativePath, "/")
+
+ if(endsWith(relativePath, "/"))
+ relativePath <- substr(relativePath, 0, nchar(relativePath) - 1)
+
+ splitPath <- unlist(strsplit(relativePath, "/", fixed = TRUE))
+ returnElement <- private$tree
+
+ for(pathFragment in splitPath)
+ {
+ returnElement <- returnElement$get(pathFragment)
+
+ if(is.null(returnElement))
+ return(NULL)
+ }
+
+ returnElement
+ },
+
+ getTree = function() private$tree
+ ),
+
+ private = list(
+
+ tree = NULL,
+
+ createBranch = function(splitPath)
+ {
+ branch <- NULL
+ lastElementIndex <- length(splitPath)
+
+ for(elementIndex in lastElementIndex:1)
+ {
+ if(elementIndex == lastElementIndex)
+ {
+ branch <- ArvadosFile$new(splitPath[[elementIndex]])
+ }
+ else
+ {
+ newFolder <- Subcollection$new(splitPath[[elementIndex]])
+ newFolder$add(branch)
+ branch <- newFolder
+ }
+ }
+
+ branch
+ },
+
+ addBranch = function(container, node)
+ {
+ child <- container$get(node$getName())
+
+ if(is.null(child))
+ {
+ container$add(node)
+ }
+ else
+ {
+ # Note: REST always returns folder name alone before other folder
+ # content, so in first iteration we don't know if it's a file
+ # or folder since its just a name, so we assume it's a file.
+ # If we encounter that same name again we know
+ # it's a folder so we need to replace ArvadosFile with Subcollection.
+ if("ArvadosFile" %in% class(child))
+ {
+ child = private$replaceFileWithSubcollection(child)
+ }
+
+ private$addBranch(child, node$getFirst())
+ }
+ },
+
+ replaceFileWithSubcollection = function(arvadosFile)
+ {
+ subcollection <- Subcollection$new(arvadosFile$getName())
+ fileParent <- arvadosFile$getParent()
+ fileParent$remove(arvadosFile$getName())
+ fileParent$add(subcollection)
+
+ arvadosFile$setParent(NULL)
+
+ subcollection
+ }
+ )
+)
--- /dev/null
+#' HttpParser
+#'
+HttpParser <- R6::R6Class(
+
+ "HttrParser",
+
+ public = list(
+
+ validContentTypes = NULL,
+
+ initialize = function()
+ {
+ self$validContentTypes <- c("text", "raw")
+ },
+
+ parseJSONResponse = function(serverResponse)
+ {
+ parsed_response <- httr::content(serverResponse,
+ as = "parsed",
+ type = "application/json")
+ },
+
+ parseResponse = function(serverResponse, outputType)
+ {
+ parsed_response <- httr::content(serverResponse, as = outputType)
+ },
+
+ getFileNamesFromResponse = function(response, uri)
+ {
+ text <- rawToChar(response$content)
+ doc <- XML::xmlParse(text, asText=TRUE)
+ base <- paste(paste("/", strsplit(uri, "/")[[1]][-1:-3], sep="", collapse=""), "/", sep="")
+ result <- unlist(
+ XML::xpathApply(doc, "//D:response/D:href", function(node) {
+ sub(base, "", URLdecode(XML::xmlValue(node)), fixed=TRUE)
+ })
+ )
+ result <- result[result != ""]
+ result[-1]
+ },
+
+ getFileSizesFromResponse = function(response, uri)
+ {
+ text <- rawToChar(response$content)
+ doc <- XML::xmlParse(text, asText=TRUE)
+
+ base <- paste(paste("/", strsplit(uri, "/")[[1]][-1:-3], sep="", collapse=""), "/", sep="")
+ result <- XML::xpathApply(doc, "//D:response/D:propstat/D:prop/D:getcontentlength", function(node) {
+ XML::xmlValue(node)
+ })
+
+ unlist(result)
+ }
+ )
+)
--- /dev/null
+source("./R/util.R")
+
+HttpRequest <- R6::R6Class(
+
+ "HttrRequest",
+
+ public = list(
+
+ validContentTypes = NULL,
+ validVerbs = NULL,
+
+ initialize = function()
+ {
+ self$validContentTypes <- c("text", "raw")
+ self$validVerbs <- c("GET", "POST", "PUT", "DELETE", "PROPFIND", "MOVE")
+ },
+
+ execute = function(verb, url, headers = NULL, body = NULL, query = NULL,
+ limit = NULL, offset = NULL, retryTimes = 0)
+ {
+ if(!(verb %in% self$validVerbs))
+ stop("Http verb is not valid.")
+
+ headers <- httr::add_headers(unlist(headers))
+ urlQuery <- self$createQuery(query, limit, offset)
+ url <- paste0(url, urlQuery)
+
+ # times = 1 regular call + numberOfRetries
+ response <- httr::RETRY(verb, url = url, body = body,
+ config = headers, times = retryTimes + 1)
+ },
+
+ createQuery = function(filters, limit, offset)
+ {
+ finalQuery <- NULL
+
+ finalQuery <- c(finalQuery, private$createFiltersQuery(filters))
+ finalQuery <- c(finalQuery, private$createLimitQuery(limit))
+ finalQuery <- c(finalQuery, private$createOffsetQuery(offset))
+
+ finalQuery <- finalQuery[!is.null(finalQuery)]
+ finalQuery <- paste0(finalQuery, collapse = "&")
+
+ if(finalQuery != "")
+ finalQuery <- paste0("/?", finalQuery)
+
+ finalQuery
+ }
+ ),
+
+ private = list(
+
+ createFiltersQuery = function(filters)
+ {
+ if(!is.null(filters))
+ {
+ filters <- RListToPythonList(filters, ",")
+ encodedQuery <- URLencode(filters, reserved = T, repeated = T)
+
+ return(paste0("filters=", encodedQuery))
+ }
+
+ return(NULL)
+ },
+
+ createLimitQuery = function(limit)
+ {
+ if(!is.null(limit))
+ {
+ limit <- suppressWarnings(as.numeric(limit))
+
+ if(is.na(limit))
+ stop("Limit must be a numeric type.")
+
+ return(paste0("limit=", limit))
+ }
+
+ return(NULL)
+ },
+
+ createOffsetQuery = function(offset)
+ {
+ if(!is.null(offset))
+ {
+ offset <- suppressWarnings(as.numeric(offset))
+
+ if(is.na(offset))
+ stop("Offset must be a numeric type.")
+
+ return(paste0("offset=", offset))
+ }
+
+ return(NULL)
+ }
+ ),
+
+ cloneable = FALSE
+)
--- /dev/null
+RESTService <- R6::R6Class(
+
+ "RESTService",
+
+ public = list(
+
+ hostName = NULL,
+ token = NULL,
+ http = NULL,
+ httpParser = NULL,
+ numRetries = NULL,
+
+ initialize = function(token, hostName,
+ http, httpParser,
+ numRetries = 0,
+ webDavHostName = NULL)
+ {
+ version <- "v1"
+
+ self$token <- token
+ self$hostName <- paste0("https://", hostName,
+ "/arvados/", version, "/")
+ self$http <- http
+ self$httpParser <- httpParser
+ self$numRetries <- numRetries
+
+ private$rawHostName <- hostName
+ private$webDavHostName <- webDavHostName
+ },
+
+ setNumConnRetries = function(newNumOfRetries)
+ {
+ self$numRetries <- newNumOfRetries
+ },
+
+ getWebDavHostName = function()
+ {
+ if(is.null(private$webDavHostName))
+ {
+ discoveryDocumentURL <- paste0("https://", private$rawHostName,
+ "/discovery/v1/apis/arvados/v1/rest")
+
+ headers <- list(Authorization = paste("OAuth2", self$token))
+
+ serverResponse <- self$http$execute("GET", discoveryDocumentURL, headers,
+ retryTimes = self$numRetries)
+
+ discoveryDocument <- self$httpParser$parseJSONResponse(serverResponse)
+ private$webDavHostName <- discoveryDocument$keepWebServiceUrl
+
+ if(is.null(private$webDavHostName))
+ stop("Unable to find WebDAV server.")
+ }
+
+ private$webDavHostName
+ },
+
+ getResource = function(resource, uuid)
+ {
+ resourceURL <- paste0(self$hostName, resource, "/", uuid)
+ headers <- list(Authorization = paste("OAuth2", self$token))
+
+ serverResponse <- self$http$execute("GET", resourceURL, headers,
+ retryTimes = self$numRetries)
+
+ resource <- self$httpParser$parseJSONResponse(serverResponse)
+
+ if(!is.null(resource$errors))
+ stop(resource$errors)
+
+ resource
+ },
+
+ listResources = function(resource, filters = NULL, limit = 100, offset = 0)
+ {
+ resourceURL <- paste0(self$hostName, resource)
+ headers <- list(Authorization = paste("OAuth2", self$token))
+ body <- NULL
+
+ serverResponse <- self$http$execute("GET", resourceURL, headers, body,
+ filters, limit, offset,
+ self$numRetries)
+
+ resources <- self$httpParser$parseJSONResponse(serverResponse)
+
+ if(!is.null(resources$errors))
+ stop(resources$errors)
+
+ resources
+ },
+
+ fetchAllItems = function(resourceURL, filters)
+ {
+ headers <- list(Authorization = paste("OAuth2", self$token))
+
+ offset <- 0
+ itemsAvailable <- .Machine$integer.max
+ items <- c()
+ while(length(items) < itemsAvailable)
+ {
+ serverResponse <- self$http$execute(verb = "GET",
+ url = resourceURL,
+ headers = headers,
+ body = NULL,
+ query = filters,
+ limit = NULL,
+ offset = offset,
+ retryTimes = self$numRetries)
+
+ parsedResponse <- self$httpParser$parseJSONResponse(serverResponse)
+
+ if(!is.null(parsedResponse$errors))
+ stop(parsedResponse$errors)
+
+ items <- c(items, parsedResponse$items)
+ offset <- length(items)
+ itemsAvailable <- parsedResponse$items_available
+ }
+
+ items
+ },
+
+ deleteResource = function(resource, uuid)
+ {
+ collectionURL <- paste0(self$hostName, resource, "/", uuid)
+ headers <- list("Authorization" = paste("OAuth2", self$token),
+ "Content-Type" = "application/json")
+
+ serverResponse <- self$http$execute("DELETE", collectionURL, headers,
+ retryTimes = self$numRetries)
+
+ removedResource <- self$httpParser$parseJSONResponse(serverResponse)
+
+ if(!is.null(removedResource$errors))
+ stop(removedResource$errors)
+
+ removedResource
+ },
+
+ updateResource = function(resource, uuid, newContent)
+ {
+ resourceURL <- paste0(self$hostName, resource, "/", uuid)
+ headers <- list("Authorization" = paste("OAuth2", self$token),
+ "Content-Type" = "application/json")
+
+ newContent <- jsonlite::toJSON(newContent, auto_unbox = T)
+
+ serverResponse <- self$http$execute("PUT", resourceURL, headers, newContent,
+ retryTimes = self$numRetries)
+
+ updatedResource <- self$httpParser$parseJSONResponse(serverResponse)
+
+ if(!is.null(updatedResource$errors))
+ stop(updatedResource$errors)
+
+ updatedResource
+ },
+
+ createResource = function(resource, content)
+ {
+ resourceURL <- paste0(self$hostName, resource)
+ headers <- list("Authorization" = paste("OAuth2", self$token),
+ "Content-Type" = "application/json")
+
+ content <- jsonlite::toJSON(content, auto_unbox = T)
+
+ serverResponse <- self$http$execute("POST", resourceURL, headers, content,
+ retryTimes = self$numRetries)
+
+ newResource <- self$httpParser$parseJSONResponse(serverResponse)
+
+ if(!is.null(newResource$errors))
+ stop(newResource$errors)
+
+ newResource
+ },
+
+ create = function(files, uuid)
+ {
+ sapply(files, function(filePath)
+ {
+ private$createNewFile(filePath, uuid, "text/html")
+ })
+ },
+
+ delete = function(relativePath, uuid)
+ {
+ fileURL <- paste0(self$getWebDavHostName(), "c=",
+ uuid, "/", relativePath);
+ headers <- list(Authorization = paste("OAuth2", self$token))
+
+ serverResponse <- self$http$execute("DELETE", fileURL, headers,
+ retryTimes = self$numRetries)
+
+ if(serverResponse$status_code < 200 || serverResponse$status_code >= 300)
+ stop(paste("Server code:", serverResponse$status_code))
+
+ serverResponse
+ },
+
+ move = function(from, to, uuid)
+ {
+ collectionURL <- paste0(self$getWebDavHostName(), "c=", uuid, "/")
+ fromURL <- paste0(collectionURL, from)
+ toURL <- paste0(collectionURL, to)
+
+ headers <- list("Authorization" = paste("OAuth2", self$token),
+ "Destination" = toURL)
+
+ serverResponse <- self$http$execute("MOVE", fromURL, headers,
+ retryTimes = self$numRetries)
+
+ if(serverResponse$status_code < 200 || serverResponse$status_code >= 300)
+ stop(paste("Server code:", serverResponse$status_code))
+
+ serverResponse
+ },
+
+ getCollectionContent = function(uuid)
+ {
+ collectionURL <- URLencode(paste0(self$getWebDavHostName(),
+ "c=", uuid))
+
+ headers <- list("Authorization" = paste("OAuth2", self$token))
+
+ response <- self$http$execute("PROPFIND", collectionURL, headers,
+ retryTimes = self$numRetries)
+
+ if(all(response == ""))
+ stop("Response is empty, request may be misconfigured")
+
+ if(response$status_code < 200 || response$status_code >= 300)
+ stop(paste("Server code:", response$status_code))
+
+ self$httpParser$getFileNamesFromResponse(response, collectionURL)
+ },
+
+ getResourceSize = function(relativePath, uuid)
+ {
+ collectionURL <- URLencode(paste0(self$getWebDavHostName(),
+ "c=", uuid))
+
+ subcollectionURL <- paste0(collectionURL, "/", relativePath);
+
+ headers <- list("Authorization" = paste("OAuth2", self$token))
+
+ response <- self$http$execute("PROPFIND", subcollectionURL, headers,
+ retryTimes = self$numRetries)
+
+ if(all(response == ""))
+ stop("Response is empty, request may be misconfigured")
+
+ if(response$status_code < 200 || response$status_code >= 300)
+ stop(paste("Server code:", response$status_code))
+
+ sizes <- self$httpParser$getFileSizesFromResponse(response,
+ collectionURL)
+ as.numeric(sizes)
+ },
+
+ read = function(relativePath, uuid, contentType = "raw", offset = 0, length = 0)
+ {
+ fileURL <- paste0(self$getWebDavHostName(),
+ "c=", uuid, "/", relativePath);
+
+ range <- paste0("bytes=", offset, "-")
+
+ if(length > 0)
+ range = paste0(range, offset + length - 1)
+
+ if(offset == 0 && length == 0)
+ {
+ headers <- list(Authorization = paste("OAuth2", self$token))
+ }
+ else
+ {
+ headers <- list(Authorization = paste("OAuth2", self$token),
+ Range = range)
+ }
+
+ if(!(contentType %in% self$httpParser$validContentTypes))
+ stop("Invalid contentType. Please use text or raw.")
+
+ serverResponse <- self$http$execute("GET", fileURL, headers,
+ retryTimes = self$numRetries)
+
+ if(serverResponse$status_code < 200 || serverResponse$status_code >= 300)
+ stop(paste("Server code:", serverResponse$status_code))
+
+ self$httpParser$parseResponse(serverResponse, contentType)
+ },
+
+ write = function(relativePath, uuid, content, contentType)
+ {
+ fileURL <- paste0(self$getWebDavHostName(),
+ "c=", uuid, "/", relativePath);
+ headers <- list(Authorization = paste("OAuth2", self$token),
+ "Content-Type" = contentType)
+ body <- content
+
+ serverResponse <- self$http$execute("PUT", fileURL, headers, body,
+ retryTimes = self$numRetries)
+
+ if(serverResponse$status_code < 200 || serverResponse$status_code >= 300)
+ stop(paste("Server code:", serverResponse$status_code))
+
+ self$httpParser$parseResponse(serverResponse, "text")
+ },
+
+ getConnection = function(uuid, relativePath, openMode)
+ {
+ fileURL <- paste0(self$getWebDavHostName(),
+ "c=", uuid, "/", relativePath);
+ headers <- list(Authorization = paste("OAuth2", self$token))
+
+ h <- curl::new_handle()
+ curl::handle_setheaders(h, .list = headers)
+
+ conn <- curl::curl(url = fileURL, open = openMode, handle = h)
+
+ conn
+ }
+ ),
+
+ private = list(
+
+ webDavHostName = NULL,
+ rawHostName = NULL,
+
+ createNewFile = function(relativePath, uuid, contentType)
+ {
+ fileURL <- paste0(self$getWebDavHostName(), "c=",
+ uuid, "/", relativePath)
+ headers <- list(Authorization = paste("OAuth2", self$token),
+ "Content-Type" = contentType)
+ body <- NULL
+
+ serverResponse <- self$http$execute("PUT", fileURL, headers, body,
+ retryTimes = self$numRetries)
+
+ if(serverResponse$status_code < 200 || serverResponse$status_code >= 300)
+ stop(paste("Server code:", serverResponse$status_code))
+
+ paste("File created:", relativePath)
+ }
+ ),
+
+ cloneable = FALSE
+)
--- /dev/null
+source("./R/util.R")
+
+#' Arvados SubCollection Object
+#'
+#' Update description
+#'
+#' @export Subcollection
+Subcollection <- R6::R6Class(
+
+ "Subcollection",
+
+ public = list(
+
+ initialize = function(name)
+ {
+ private$name <- name
+ },
+
+ getName = function() private$name,
+
+ getRelativePath = function()
+ {
+ relativePath <- c(private$name)
+ parent <- private$parent
+
+ while(!is.null(parent))
+ {
+ relativePath <- c(parent$getName(), relativePath)
+ parent <- parent$getParent()
+ }
+
+ relativePath <- relativePath[relativePath != ""]
+ paste0(relativePath, collapse = "/")
+ },
+
+ add = function(content)
+ {
+ if("ArvadosFile" %in% class(content) ||
+ "Subcollection" %in% class(content))
+ {
+ if(content$getName() == "")
+ stop("Content has invalid name.")
+
+ childWithSameName <- self$get(content$getName())
+
+ if(!is.null(childWithSameName))
+ stop(paste("Subcollection already contains ArvadosFile",
+ "or Subcollection with same name."))
+
+ if(!is.null(private$collection))
+ {
+ if(self$getRelativePath() != "")
+ contentPath <- paste0(self$getRelativePath(),
+ "/", content$getFileListing())
+ else
+ contentPath <- content$getFileListing()
+
+ REST <- private$collection$getRESTService()
+ REST$create(contentPath, private$collection$uuid)
+ content$setCollection(private$collection)
+ }
+
+ private$children <- c(private$children, content)
+ content$setParent(self)
+
+ "Content added successfully."
+ }
+ else
+ {
+ stop(paste0("Expected AravodsFile or Subcollection object, got ",
+ paste0("(", paste0(class(content), collapse = ", "), ")"),
+ "."))
+ }
+ },
+
+ remove = function(name)
+ {
+ if(is.character(name))
+ {
+ child <- self$get(name)
+
+ if(is.null(child))
+ stop(paste("Subcollection doesn't contains ArvadosFile",
+ "or Subcollection with specified name."))
+
+ if(!is.null(private$collection))
+ {
+ REST <- private$collection$getRESTService()
+ REST$delete(child$getRelativePath(), private$collection$uuid)
+
+ child$setCollection(NULL)
+ }
+
+ private$removeChild(name)
+ child$setParent(NULL)
+
+ "Content removed"
+ }
+ else
+ {
+ stop(paste0("Expected character, got ",
+ paste0("(", paste0(class(name), collapse = ", "), ")"),
+ "."))
+ }
+ },
+
+ getFileListing = function(fullPath = TRUE)
+ {
+ content <- private$getContentAsCharVector(fullPath)
+ content[order(tolower(content))]
+ },
+
+ getSizeInBytes = function()
+ {
+ if(is.null(private$collection))
+ return(0)
+
+ REST <- private$collection$getRESTService()
+
+ fileSizes <- REST$getResourceSize(paste0(self$getRelativePath(), "/"),
+ private$collection$uuid)
+ return(sum(fileSizes))
+ },
+
+ move = function(newLocation)
+ {
+ if(is.null(private$collection))
+ stop("Subcollection doesn't belong to any collection")
+
+ newLocation <- trimFromEnd(newLocation, "/")
+ nameAndPath <- splitToPathAndName(newLocation)
+
+ newParent <- private$collection$get(nameAndPath$path)
+
+ if(is.null(newParent))
+ {
+ stop("Unable to get destination subcollection")
+ }
+
+ childWithSameName <- newParent$get(nameAndPath$name)
+
+ if(!is.null(childWithSameName))
+ stop("Destination already contains content with same name.")
+
+ REST <- private$collection$getRESTService()
+ REST$move(self$getRelativePath(),
+ paste0(newParent$getRelativePath(), "/", nameAndPath$name),
+ private$collection$uuid)
+
+ private$dettachFromCurrentParent()
+ private$attachToNewParent(newParent)
+
+ private$name <- nameAndPath$name
+
+ "Content moved successfully."
+ },
+
+ get = function(name)
+ {
+ for(child in private$children)
+ {
+ if(child$getName() == name)
+ return(child)
+ }
+
+ return(NULL)
+ },
+
+ getFirst = function()
+ {
+ if(length(private$children) == 0)
+ return(NULL)
+
+ private$children[[1]]
+ },
+
+ setCollection = function(collection, setRecursively = TRUE)
+ {
+ private$collection = collection
+
+ if(setRecursively)
+ {
+ for(child in private$children)
+ child$setCollection(collection)
+ }
+ },
+
+ getCollection = function() private$collection,
+
+ getParent = function() private$parent,
+
+ setParent = function(newParent) private$parent <- newParent
+ ),
+
+ private = list(
+
+ name = NULL,
+ children = NULL,
+ parent = NULL,
+ collection = NULL,
+
+ removeChild = function(name)
+ {
+ numberOfChildren = length(private$children)
+ if(numberOfChildren > 0)
+ {
+ for(childIndex in 1:numberOfChildren)
+ {
+ if(private$children[[childIndex]]$getName() == name)
+ {
+ private$children = private$children[-childIndex]
+ return()
+ }
+ }
+ }
+ },
+
+ attachToNewParent = function(newParent)
+ {
+ #Note: We temporary set parents collection to NULL. This will ensure that
+ # add method doesn't post file on REST.
+ parentsCollection <- newParent$getCollection()
+ newParent$setCollection(NULL, setRecursively = FALSE)
+
+ newParent$add(self)
+
+ newParent$setCollection(parentsCollection, setRecursively = FALSE)
+
+ private$parent <- newParent
+ },
+
+ dettachFromCurrentParent = function()
+ {
+ #Note: We temporary set parents collection to NULL. This will ensure that
+ # remove method doesn't remove this subcollection from REST.
+ parent <- private$parent
+ parentsCollection <- parent$getCollection()
+ parent$setCollection(NULL, setRecursively = FALSE)
+
+ parent$remove(private$name)
+
+ parent$setCollection(parentsCollection, setRecursively = FALSE)
+ },
+
+ getContentAsCharVector = function(fullPath = TRUE)
+ {
+ content <- NULL
+
+ if(fullPath)
+ {
+ for(child in private$children)
+ content <- c(content, child$getFileListing())
+
+ if(private$name != "")
+ content <- unlist(paste0(private$name, "/", content))
+ }
+ else
+ {
+ for(child in private$children)
+ content <- c(content, child$getName())
+ }
+
+ content
+ }
+ ),
+
+ cloneable = FALSE
+)
+
+#' @export print.Subcollection
+print.Subcollection = function(subCollection)
+{
+ collection <- NULL
+ relativePath <- subCollection$getRelativePath()
+
+ if(!is.null(subCollection$getCollection()))
+ {
+ collection <- subCollection$getCollection()$uuid
+
+ if(!subCollection$getName() == "")
+ relativePath <- paste0("/", relativePath)
+ }
+
+ cat(paste0("Type: ", "\"", "Arvados Subcollection", "\""), sep = "\n")
+ cat(paste0("Name: ", "\"", subCollection$getName(), "\""), sep = "\n")
+ cat(paste0("Relative path: ", "\"", relativePath, "\""), sep = "\n")
+ cat(paste0("Collection: ", "\"", collection, "\""), sep = "\n")
+}
--- /dev/null
+trimFromStart <- function(sample, trimCharacters)
+{
+ if(startsWith(sample, trimCharacters))
+ sample <- substr(sample, nchar(trimCharacters) + 1, nchar(sample))
+
+ sample
+}
+
+trimFromEnd <- function(sample, trimCharacters)
+{
+ if(endsWith(sample, trimCharacters))
+ sample <- substr(sample, 0, nchar(sample) - nchar(trimCharacters))
+
+ sample
+}
+
+RListToPythonList <- function(RList, separator = ", ")
+{
+ pythonArrayContent <- sapply(RList, function(elementInList)
+ {
+ if((is.vector(elementInList) || is.list(elementInList)) &&
+ length(elementInList) > 1)
+ {
+ return(RListToPythonList(elementInList, separator))
+ }
+ else
+ {
+ return(paste0("\"", elementInList, "\""))
+ }
+ })
+
+ pythonArray <- paste0("[", paste0(pythonArrayContent, collapse = separator), "]")
+ pythonArray
+}
+
+appendToStartIfNotExist <- function(sample, characters)
+{
+ if(!startsWith(sample, characters))
+ sample <- paste0(characters, sample)
+
+ sample
+}
+
+splitToPathAndName = function(path)
+{
+ path <- appendToStartIfNotExist(path, "/")
+ components <- unlist(stringr::str_split(path, "/"))
+ nameAndPath <- list()
+ nameAndPath$name <- components[length(components)]
+ nameAndPath$path <- trimFromStart(paste0(components[-length(components)], collapse = "/"),
+ "/")
+
+ nameAndPath
+}
--- /dev/null
+R SDK for Arvados
+
+This SDK focuses on providing support for accessing Arvados projects, collections, and the files within collections.
+
+The API is not final and feedback is solicited from users on ways in which it could be improved.
+
+INSTALLATION
+
+1. Install the dependencies
+
+ > install.packages(c('R6', 'httr', 'stringr', 'jsonlite', 'curl', 'XML'))
+
+If needed, you may have to install the supporting packages first. On Linux, these are:
+
+ libxml2-dev, libssl-dev, libcurl4-gnutls-dev or libcurl4-openssl-dev
+
+2. Install the ArvardosR package
+
+ > install.packages('/path/to/ArvadosR_0.0.2.tar.gz', repos = NULL, type="source", dependencies = TRUE)
+
+
+EXAMPLES OF USAGE
+
+
+#Load Library and Initialize API:
+
+library('ArvadosR')
+arv <- Arvados$new() # uses environment variables ARVADOS_API_TOKEN and ARVADOS_API_HOST
+arv <- Arvados$new("your Arvados token", "example.arvadosapi.com")
+
+
+#Optionally, add numRetries parameter to specify number of times to retry failed service requests.
+#Default is 0.
+
+arv <- Arvados$new("your Arvados token", "example.arvadosapi.com", numRetries = 3)
+
+#This parameter can be set at any time using setNumRetries
+
+arv$setNumRetries(5)
+
+--------------------------------------------------------------------------------------------------------------------------------
+
+#Get a collection:
+
+arv$getCollection("uuid")
+
+--------------------------------------------------------------------------------------------------------------------------------
+
+#List collections:
+collectionList <- arv$listCollections(list(list("name", "like", "Test%"))) # offset of 0 and default limit of 100
+collectionList <- arv$listCollections(list(list("name", "like", "Test%")), limit = 10, offset = 2)
+
+collectionList$items_available # count of total number of items (may be more than returned due to paging)
+collectionList$items # items which match the filter criteria
+
+#Next example will list all collections even when the number of items is greater than maximum API limit
+
+collectionList <- arv$listAllCollections(list(list("name", "like", "Test%")))
+
+--------------------------------------------------------------------------------------------------------------------------------
+
+#Delete a collection:
+
+deletedCollection <- arv$deleteCollection("uuid")
+
+--------------------------------------------------------------------------------------------------------------------------------
+
+#Update a collection's metadata:
+
+updatedCollection <- arv$updateCollection("uuid", list(name = "New name", description = "New description"))
+
+--------------------------------------------------------------------------------------------------------------------------------
+
+#Create collection:
+
+createdCollection <- arv$createCollection(list(name = "Example", description = "This is a test collection"))
+
+
+--------------------------------------------------------------------------------------------------------------------------------
+COLLECTION CONTENT MANIPULATION
+--------------------------------------------------------------------------------------------------------------------------------
+
+#Create collection object:
+
+collection <- Collection$new(arv, "uuid")
+
+--------------------------------------------------------------------------------------------------------------------------------
+
+#Get list of files
+
+collection$getFileListing()
+
+--------------------------------------------------------------------------------------------------------------------------------
+
+#This will return ArvadosFile or Subcollection from internal tree-like structure.
+
+arvadosFile <- collection$get("location/to/my/file.cpp")
+
+#or
+
+arvadosSubcollection <- collection$get("location/to/my/directory/")
+
+--------------------------------------------------------------------------------------------------------------------------------
+
+#Read a table
+
+arvadosFile <- collection$get("myinput.txt")
+arvConnection <- arvadosFile$connection("r")
+mytable <- read.table(arvConnection)
+
+#Write a table
+
+arvadosFile <- collection$create("myoutput.txt")
+arvConnection <- arvadosFile$connection("w")
+write.table(mytable, arvConnection)
+arvadosFile$flush()
+
+--------------------------------------------------------------------------------------------------------------------------------
+
+#Read whole file or just a portion of it.
+
+fileContent <- arvadosFile$read()
+fileContent <- arvadosFile$read("text")
+fileContent <- arvadosFile$read("raw", offset = 1024, length = 512)
+
+--------------------------------------------------------------------------------------------------------------------------------
+
+#Get ArvadosFile or Subcollection size
+
+size <- arvadosFile$getSizeInBytes()
+size <- arvadosSubcollection$getSizeInBytes()
+
+--------------------------------------------------------------------------------------------------------------------------------
+
+#Create new file in a collection
+
+collection$create(fileNames, optionalRelativePath)
+
+#Example
+
+mainFile <- collection$create("main.cpp", "cpp/src/")
+fileList <- collection$create(c("main.cpp", lib.dll), "cpp/src/")
+
+--------------------------------------------------------------------------------------------------------------------------------
+
+#Add existing ArvadosFile or Subcollection to a collection
+
+folder <- Subcollection$new("src")
+file <- ArvadosFile$new("main.cpp")
+folder$add(file)
+
+collection$add(folder, "cpp")
+
+#This examples will add file "main.cpp" in "./cpp/src/" folder if folder exists.
+#If subcollection contains more files or folders they will be added recursively.
+
+--------------------------------------------------------------------------------------------------------------------------------
+
+#Write to existing file (Override current content of the file)
+
+arvadosFile <- collection$get("location/to/my/file.cpp")
+
+arvadosFile$write("This is new file content")
+
+--------------------------------------------------------------------------------------------------------------------------------
+
+#Delete file from a collection
+
+collection$remove("location/to/my/file.cpp")
+
+#You can remove both Subcollection and ArvadosFile
+#If subcollection contains more files or folders they will be removed recursively.
+
+#You can also remove multiple files
+
+collection$remove(c("path/to/my/file.cpp", "path/to/other/file.cpp"))
+
+#Delete file or folder from a Subcollection
+
+subcollection <- collection$get("mySubcollection/")
+subcollection$remove("fileInsideSubcollection.exe")
+subcollection$remove("folderInsideSubcollection/")
+
+--------------------------------------------------------------------------------------------------------------------------------
+
+#Move file or folder inside collection
+
+#Directley from collection
+
+collection$move("folder/file.cpp", "file.cpp")
+
+#Or from file
+
+file <- collection$get("location/to/my/file.cpp")
+file$move("newDestination/file.cpp")
+
+#Or from subcollection
+
+subcollection <- collection$get("location/to/folder")
+subcollection$move("newDestination/folder")
+
+#Make sure to include new file name in destination
+#In second example file$move("newDestination/") will not work
+
+--------------------------------------------------------------------------------------------------------------------------------
+WORKING WITH ARVADOS PROJECTS
+--------------------------------------------------------------------------------------------------------------------------------
+
+#Get a project:
+
+arv$getProject("uuid")
+
+--------------------------------------------------------------------------------------------------------------------------------
+
+#List projects:
+
+projects <- arv$listProjects(list(list("owner_uuid", "=", "aaaaa-j7d0g-ccccccccccccccc"))) # list subprojects of a project
+arv$listProjects(list(list("name","like","Example%"))) # list projects which have names beginning with Example
+
+#Next example will list all projects even when the number of items is greater than maximum API limit
+
+collectionList <- arv$listAllProjects(list(list("name","like","Example%")))
+
+--------------------------------------------------------------------------------------------------------------------------------
+
+#Delete a project:
+
+deletedProject <- arv$deleteProject("uuid")
+
+--------------------------------------------------------------------------------------------------------------------------------
+
+#Update project:
+
+updatedProject <- arv$updateProject("uuid", list(name = "new_name", description = "new description"))
+
+--------------------------------------------------------------------------------------------------------------------------------
+
+#Create project:
+
+createdProject <- arv$createProject(list(name = "project_name", description = "project description"))
+
+
+--------------------------------------------------------------------------------------------------------------------------------
+BUILDING THE ARVADOS SDK TARBALL
+--------------------------------------------------------------------------------------------------------------------------------
+
+
+cd arvados/sdk
+R CMD build R
+
+This will create a tarball of the Arvados package in the current directory.
--- /dev/null
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/Arvados.R
+\docType{data}
+\name{Arvados}
+\alias{Arvados}
+\title{Arvados SDK Object}
+\format{An object of class \code{R6ClassGenerator} of length 24.}
+\usage{
+Arvados
+}
+\description{
+All Arvados logic is inside this class
+}
+\section{Fields}{
+
+\describe{
+\item{\code{token}}{Token represents user authentification token.}
+
+\item{\code{host}}{Host represents server name we wish to connect to.}
+}}
+
+\examples{
+arv = Arvados$new("token", "host_name")
+}
+\keyword{datasets}
--- /dev/null
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/ArvadosFile.R
+\docType{data}
+\name{ArvadosFile}
+\alias{ArvadosFile}
+\title{ArvadosFile Object}
+\format{An object of class \code{R6ClassGenerator} of length 24.}
+\usage{
+ArvadosFile
+}
+\description{
+Update description
+}
+\keyword{datasets}
--- /dev/null
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/Collection.R
+\docType{data}
+\name{Collection}
+\alias{Collection}
+\title{Arvados Collection Object}
+\format{An object of class \code{R6ClassGenerator} of length 24.}
+\usage{
+Collection
+}
+\description{
+Update description
+}
+\examples{
+arv = Collection$new(api, uuid)
+}
+\keyword{datasets}
--- /dev/null
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/CollectionTree.R
+\docType{data}
+\name{CollectionTree}
+\alias{CollectionTree}
+\title{Arvados Collection Object}
+\format{An object of class \code{R6ClassGenerator} of length 24.}
+\usage{
+CollectionTree
+}
+\description{
+Update description
+}
+\examples{
+arv = Collection$new(api, uuid)
+}
+\keyword{datasets}
--- /dev/null
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/HttpParser.R
+\docType{data}
+\name{HttpParser}
+\alias{HttpParser}
+\title{HttpParser}
+\format{An object of class \code{R6ClassGenerator} of length 24.}
+\usage{
+HttpParser
+}
+\description{
+HttpParser
+}
+\keyword{datasets}
--- /dev/null
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/Subcollection.R
+\docType{data}
+\name{Subcollection}
+\alias{Subcollection}
+\title{Arvados SubCollection Object}
+\format{An object of class \code{R6ClassGenerator} of length 24.}
+\usage{
+Subcollection
+}
+\description{
+Update description
+}
+\keyword{datasets}
--- /dev/null
+results <- devtools::test()
+any_error <- any(as.data.frame(results)$error)
+if (any_error) {
+ q("no", 1)
+} else {
+ q("no", 0)
+}
--- /dev/null
+library(testthat)
+library(ArvadosR)
+
+test_check("ArvadosR")
--- /dev/null
+FakeArvados <- R6::R6Class(
+
+ "FakeArvados",
+
+ public = list(
+
+ token = NULL,
+ host = NULL,
+ webdavHost = NULL,
+ http = NULL,
+ httpParser = NULL,
+ REST = NULL,
+
+ initialize = function(token = NULL,
+ host = NULL,
+ webdavHost = NULL,
+ http = NULL,
+ httpParser = NULL)
+ {
+ self$token <- token
+ self$host <- host
+ self$webdavHost <- webdavHost
+ self$http <- http
+ self$httpParser <- httpParser
+ },
+
+ getToken = function() self$token,
+ getHostName = function() self$host,
+ getHttpClient = function() self$http,
+ getHttpParser = function() self$httpParser,
+ getWebDavHostName = function() self$webdavHost
+ ),
+
+ cloneable = FALSE
+)
--- /dev/null
+FakeHttpParser <- R6::R6Class(
+
+ "FakeHttrParser",
+
+ public = list(
+
+ validContentTypes = NULL,
+ parserCallCount = NULL,
+
+ initialize = function()
+ {
+ self$parserCallCount <- 0
+ self$validContentTypes <- c("text", "raw")
+ },
+
+ parseJSONResponse = function(serverResponse)
+ {
+ self$parserCallCount <- self$parserCallCount + 1
+
+ if(!is.null(serverResponse$content))
+ return(serverResponse$content)
+
+ serverResponse
+ },
+
+ parseResponse = function(serverResponse, outputType)
+ {
+ self$parserCallCount <- self$parserCallCount + 1
+
+ if(!is.null(serverResponse$content))
+ return(serverResponse$content)
+
+ serverResponse
+ },
+
+ getFileNamesFromResponse = function(serverResponse, uri)
+ {
+ self$parserCallCount <- self$parserCallCount + 1
+
+ if(!is.null(serverResponse$content))
+ return(serverResponse$content)
+
+ serverResponse
+ },
+
+ getFileSizesFromResponse = function(serverResponse, uri)
+ {
+ self$parserCallCount <- self$parserCallCount + 1
+
+ if(!is.null(serverResponse$content))
+ return(serverResponse$content)
+
+ serverResponse
+ }
+ )
+)
--- /dev/null
+FakeHttpRequest <- R6::R6Class(
+
+ "FakeHttpRequest",
+
+ public = list(
+
+ serverMaxElementsPerRequest = NULL,
+
+ content = NULL,
+ expectedURL = NULL,
+ URLIsProperlyConfigured = NULL,
+ expectedQueryFilters = NULL,
+ queryFiltersAreCorrect = NULL,
+ requestHeaderContainsAuthorizationField = NULL,
+ requestHeaderContainsDestinationField = NULL,
+ requestHeaderContainsRangeField = NULL,
+ requestHeaderContainsContentTypeField = NULL,
+ JSONEncodedBodyIsProvided = NULL,
+ requestBodyIsProvided = NULL,
+
+ numberOfGETRequests = NULL,
+ numberOfDELETERequests = NULL,
+ numberOfPUTRequests = NULL,
+ numberOfPOSTRequests = NULL,
+ numberOfMOVERequests = NULL,
+
+ initialize = function(expectedURL = NULL,
+ serverResponse = NULL,
+ expectedFilters = NULL)
+ {
+ if(is.null(serverResponse))
+ {
+ self$content <- list()
+ self$content$status_code <- 200
+ }
+ else
+ self$content <- serverResponse
+
+ self$expectedURL <- expectedURL
+ self$URLIsProperlyConfigured <- FALSE
+ self$expectedQueryFilters <- expectedFilters
+ self$queryFiltersAreCorrect <- FALSE
+ self$requestHeaderContainsAuthorizationField <- FALSE
+ self$requestHeaderContainsDestinationField <- FALSE
+ self$requestHeaderContainsRangeField <- FALSE
+ self$requestHeaderContainsContentTypeField <- FALSE
+ self$JSONEncodedBodyIsProvided <- FALSE
+ self$requestBodyIsProvided <- FALSE
+
+ self$numberOfGETRequests <- 0
+ self$numberOfDELETERequests <- 0
+ self$numberOfPUTRequests <- 0
+ self$numberOfPOSTRequests <- 0
+ self$numberOfMOVERequests <- 0
+
+ self$serverMaxElementsPerRequest <- 5
+ },
+
+ execute = function(verb, url, headers = NULL, body = NULL, query = NULL,
+ limit = NULL, offset = NULL, retryTimes = 0)
+ {
+ private$validateURL(url)
+ private$validateHeaders(headers)
+ private$validateFilters(queryFilters)
+ private$validateBody(body)
+
+ if(verb == "GET")
+ self$numberOfGETRequests <- self$numberOfGETRequests + 1
+ else if(verb == "POST")
+ self$numberOfPOSTRequests <- self$numberOfPOSTRequests + 1
+ else if(verb == "PUT")
+ self$numberOfPUTRequests <- self$numberOfPUTRequests + 1
+ else if(verb == "DELETE")
+ self$numberOfDELETERequests <- self$numberOfDELETERequests + 1
+ else if(verb == "MOVE")
+ self$numberOfMOVERequests <- self$numberOfMOVERequests + 1
+ else if(verb == "PROPFIND")
+ {
+ return(self$content)
+ }
+
+ if(!is.null(self$content$items_available))
+ return(private$getElements(offset, limit))
+ else
+ return(self$content)
+ }
+ ),
+
+ private = list(
+
+ validateURL = function(url)
+ {
+ if(!is.null(self$expectedURL) && url == self$expectedURL)
+ self$URLIsProperlyConfigured <- TRUE
+ },
+
+ validateHeaders = function(headers)
+ {
+ if(!is.null(headers$Authorization))
+ self$requestHeaderContainsAuthorizationField <- TRUE
+
+ if(!is.null(headers$Destination))
+ self$requestHeaderContainsDestinationField <- TRUE
+
+ if(!is.null(headers$Range))
+ self$requestHeaderContainsRangeField <- TRUE
+
+ if(!is.null(headers[["Content-Type"]]))
+ self$requestHeaderContainsContentTypeField <- TRUE
+ },
+
+ validateBody = function(body)
+ {
+ if(!is.null(body))
+ {
+ self$requestBodyIsProvided <- TRUE
+
+ if(class(body) == "json")
+ self$JSONEncodedBodyIsProvided <- TRUE
+ }
+ },
+
+ validateFilters = function(filters)
+ {
+ if(!is.null(self$expectedQueryFilters) &&
+ !is.null(filters) &&
+ all.equal(unname(filters), self$expectedQueryFilters))
+ {
+ self$queryFiltersAreCorrect <- TRUE
+ }
+ },
+
+ getElements = function(offset, limit)
+ {
+ start <- 1
+ elementCount <- self$serverMaxElementsPerRequest
+
+ if(!is.null(offset))
+ {
+ if(offset > self$content$items_available)
+ stop("Invalid offset")
+
+ start <- offset + 1
+ }
+
+ if(!is.null(limit))
+ if(limit < self$serverMaxElementsPerRequest)
+ elementCount <- limit - 1
+
+
+ serverResponse <- list()
+ serverResponse$items_available <- self$content$items_available
+ serverResponse$items <- self$content$items[start:(start + elementCount - 1)]
+
+ if(start + elementCount > self$content$items_available)
+ {
+ elementCount = self$content$items_available - start
+ serverResponse$items <- self$content$items[start:(start + elementCount)]
+ }
+
+ serverResponse
+ }
+ ),
+
+ cloneable = FALSE
+)
--- /dev/null
+FakeRESTService <- R6::R6Class(
+
+ "FakeRESTService",
+
+ public = list(
+
+ getResourceCallCount = NULL,
+ createResourceCallCount = NULL,
+ listResourcesCallCount = NULL,
+ deleteResourceCallCount = NULL,
+ updateResourceCallCount = NULL,
+ fetchAllItemsCallCount = NULL,
+
+ createCallCount = NULL,
+ deleteCallCount = NULL,
+ moveCallCount = NULL,
+ getCollectionContentCallCount = NULL,
+ getResourceSizeCallCount = NULL,
+ readCallCount = NULL,
+ writeCallCount = NULL,
+ getConnectionCallCount = NULL,
+ writeBuffer = NULL,
+ filtersAreConfiguredCorrectly = NULL,
+ bodyIsConfiguredCorrectly = NULL,
+ expectedFilterContent = NULL,
+
+ collectionContent = NULL,
+ returnContent = NULL,
+
+ initialize = function(collectionContent = NULL, returnContent = NULL,
+ expectedFilterContent = NULL)
+ {
+ self$getResourceCallCount <- 0
+ self$createResourceCallCount <- 0
+ self$listResourcesCallCount <- 0
+ self$deleteResourceCallCount <- 0
+ self$updateResourceCallCount <- 0
+ self$fetchAllItemsCallCount <- 0
+
+ self$createCallCount <- 0
+ self$deleteCallCount <- 0
+ self$moveCallCount <- 0
+ self$getCollectionContentCallCount <- 0
+ self$getResourceSizeCallCount <- 0
+ self$readCallCount <- 0
+ self$writeCallCount <- 0
+ self$getConnectionCallCount <- 0
+ self$filtersAreConfiguredCorrectly <- FALSE
+ self$bodyIsConfiguredCorrectly <- FALSE
+
+ self$collectionContent <- collectionContent
+ self$returnContent <- returnContent
+ self$expectedFilterContent <- expectedFilterContent
+ },
+
+ getWebDavHostName = function()
+ {
+ },
+
+ getResource = function(resource, uuid)
+ {
+ self$getResourceCallCount <- self$getResourceCallCount + 1
+ self$returnContent
+ },
+
+ listResources = function(resource, filters = NULL, limit = 100, offset = 0)
+ {
+ self$listResourcesCallCount <- self$listResourcesCallCount + 1
+
+ if(!is.null(self$expectedFilterContent) && !is.null(filters))
+ if(all.equal(filters, self$expectedFilterContent))
+ self$filtersAreConfiguredCorrectly <- TRUE
+
+ self$returnContent
+ },
+
+ fetchAllItems = function(resourceURL, filters)
+ {
+ self$fetchAllItemsCallCount <- self$fetchAllItemsCallCount + 1
+
+ if(!is.null(self$expectedFilterContent) && !is.null(filters))
+ if(all.equal(filters, self$expectedFilterContent))
+ self$filtersAreConfiguredCorrectly <- TRUE
+
+ self$returnContent
+ },
+
+ deleteResource = function(resource, uuid)
+ {
+ self$deleteResourceCallCount <- self$deleteResourceCallCount + 1
+ self$returnContent
+ },
+
+ updateResource = function(resource, uuid, newContent)
+ {
+ self$updateResourceCallCount <- self$updateResourceCallCount + 1
+
+ if(!is.null(self$returnContent) && !is.null(newContent))
+ if(all.equal(newContent, self$returnContent))
+ self$bodyIsConfiguredCorrectly <- TRUE
+
+ self$returnContent
+ },
+
+ createResource = function(resource, content)
+ {
+ self$createResourceCallCount <- self$createResourceCallCount + 1
+
+ if(!is.null(self$returnContent) && !is.null(content))
+ if(all.equal(content, self$returnContent))
+ self$bodyIsConfiguredCorrectly <- TRUE
+
+ self$returnContent
+ },
+
+ create = function(files, uuid)
+ {
+ self$createCallCount <- self$createCallCount + 1
+ self$returnContent
+ },
+
+ delete = function(relativePath, uuid)
+ {
+ self$deleteCallCount <- self$deleteCallCount + 1
+ self$returnContent
+ },
+
+ move = function(from, to, uuid)
+ {
+ self$moveCallCount <- self$moveCallCount + 1
+ self$returnContent
+ },
+
+ getCollectionContent = function(uuid)
+ {
+ self$getCollectionContentCallCount <- self$getCollectionContentCallCount + 1
+ self$collectionContent
+ },
+
+ getResourceSize = function(uuid, relativePathToResource)
+ {
+ self$getResourceSizeCallCount <- self$getResourceSizeCallCount + 1
+ self$returnContent
+ },
+
+ read = function(relativePath, uuid, contentType = "text", offset = 0, length = 0)
+ {
+ self$readCallCount <- self$readCallCount + 1
+ self$returnContent
+ },
+
+ write = function(uuid, relativePath, content, contentType)
+ {
+ self$writeBuffer <- content
+ self$writeCallCount <- self$writeCallCount + 1
+ self$returnContent
+ },
+
+ getConnection = function(relativePath, uuid, openMode)
+ {
+ self$getConnectionCallCount <- self$getConnectionCallCount + 1
+ self$returnContent
+ }
+ ),
+
+ cloneable = FALSE
+)
--- /dev/null
+context("Arvados API")
+
+source("fakes/FakeRESTService.R")
+
+test_that("Constructor will use environment variables if no parameters are passed to it", {
+
+ Sys.setenv(ARVADOS_API_HOST = "environment_api_host")
+ Sys.setenv(ARVADOS_API_TOKEN = "environment_api_token")
+
+ arv <- Arvados$new()
+
+ Sys.unsetenv("ARVADOS_API_HOST")
+ Sys.unsetenv("ARVADOS_API_TOKEN")
+
+ expect_that("https://environment_api_host/arvados/v1/",
+ equals(arv$getHostName()))
+
+ expect_that("environment_api_token",
+ equals(arv$getToken()))
+})
+
+test_that("Constructor preferes constructor fields over environment variables", {
+
+ Sys.setenv(ARVADOS_API_HOST = "environment_api_host")
+ Sys.setenv(ARVADOS_API_TOKEN = "environment_api_token")
+
+ arv <- Arvados$new("constructor_api_token", "constructor_api_host")
+
+ Sys.unsetenv("ARVADOS_API_HOST")
+ Sys.unsetenv("ARVADOS_API_TOKEN")
+
+ expect_that("https://constructor_api_host/arvados/v1/",
+ equals(arv$getHostName()))
+
+ expect_that("constructor_api_token",
+ equals(arv$getToken()))
+})
+
+test_that("Constructor raises exception if fields and environment variables are not provided", {
+
+ expect_that(Arvados$new(),
+ throws_error(paste0("Please provide host name and authentification token",
+ " or set ARVADOS_API_HOST and ARVADOS_API_TOKEN",
+ " environment variables.")))
+})
+
+test_that("getCollection delegates operation to RESTService class", {
+
+ arv <- Arvados$new("token", "hostName")
+ fakeREST <- FakeRESTService$new()
+ arv$setRESTService(fakeREST)
+ collectionUUID <- "aaaaa-j7d0g-ccccccccccccccc"
+
+ arv$getCollection(collectionUUID)
+
+ expect_that(fakeREST$getResourceCallCount, equals(1))
+})
+
+test_that("listCollections delegates operation to RESTService class", {
+
+ arv <- Arvados$new("token", "hostName")
+ fakeREST <- FakeRESTService$new()
+ arv$setRESTService(fakeREST)
+
+ arv$listCollections()
+
+ expect_that(fakeREST$listResourcesCallCount, equals(1))
+})
+
+test_that("listCollections filter paramerter must be named 'collection'", {
+
+ filters <- list(list("name", "like", "MyCollection"))
+ names(filters) <- c("collection")
+ fakeREST <- FakeRESTService$new(expectedFilterContent = filters)
+ arv <- Arvados$new("token", "hostName")
+ arv$setRESTService(fakeREST)
+
+ arv$listCollections(list(list("name", "like", "MyCollection")))
+
+ expect_that(fakeREST$filtersAreConfiguredCorrectly, is_true())
+})
+
+test_that("listAllCollections delegates operation to RESTService class", {
+
+ arv <- Arvados$new("token", "hostName")
+ fakeREST <- FakeRESTService$new()
+ arv$setRESTService(fakeREST)
+
+ arv$listAllCollections()
+
+ expect_that(fakeREST$fetchAllItemsCallCount, equals(1))
+})
+
+test_that("listAllCollections filter paramerter must be named 'collection'", {
+
+ filters <- list(list("name", "like", "MyCollection"))
+ names(filters) <- c("collection")
+ fakeREST <- FakeRESTService$new(expectedFilterContent = filters)
+ arv <- Arvados$new("token", "hostName")
+ arv$setRESTService(fakeREST)
+
+ arv$listAllCollections(list(list("name", "like", "MyCollection")))
+
+ expect_that(fakeREST$filtersAreConfiguredCorrectly, is_true())
+})
+
+test_that("deleteCollection delegates operation to RESTService class", {
+
+ arv <- Arvados$new("token", "hostName")
+ fakeREST <- FakeRESTService$new()
+ arv$setRESTService(fakeREST)
+ collectionUUID <- "aaaaa-j7d0g-ccccccccccccccc"
+
+ arv$deleteCollection(collectionUUID)
+
+ expect_that(fakeREST$deleteResourceCallCount, equals(1))
+})
+
+test_that("updateCollection delegates operation to RESTService class", {
+
+ arv <- Arvados$new("token", "hostName")
+ fakeREST <- FakeRESTService$new()
+ arv$setRESTService(fakeREST)
+ newCollectionContent <- list(newName = "Brand new shiny name")
+ collectionUUID <- "aaaaa-j7d0g-ccccccccccccccc"
+
+ arv$updateCollection(collectionUUID, newCollectionContent)
+
+ expect_that(fakeREST$updateResourceCallCount, equals(1))
+})
+
+test_that("updateCollection adds content to request parameter named 'collection'", {
+
+ collectionUUID <- "aaaaa-j7d0g-ccccccccccccccc"
+ body <- list(list())
+ names(body) <- c("collection")
+ body$collection <- list(name = "MyCollection", desc = "No description")
+ fakeREST <- FakeRESTService$new(returnContent = body)
+ arv <- Arvados$new("token", "hostName")
+ arv$setRESTService(fakeREST)
+
+ arv$updateCollection(collectionUUID,
+ list(name = "MyCollection", desc = "No description"))
+
+ expect_that(fakeREST$bodyIsConfiguredCorrectly, is_true())
+})
+
+test_that("createCollection delegates operation to RESTService class", {
+
+ arv <- Arvados$new("token", "hostName")
+ fakeREST <- FakeRESTService$new()
+ arv$setRESTService(fakeREST)
+ collectionContent <- list(newName = "Brand new shiny name")
+
+ arv$createCollection(collectionContent)
+
+ expect_that(fakeREST$createResourceCallCount, equals(1))
+})
+
+test_that("createCollection adds content to request parameter named 'collection'", {
+
+ body <- list(list())
+ names(body) <- c("collection")
+ body$collection <- list(name = "MyCollection", desc = "No description")
+ fakeREST <- FakeRESTService$new(returnContent = body)
+ arv <- Arvados$new("token", "hostName")
+ arv$setRESTService(fakeREST)
+
+ arv$createCollection(list(name = "MyCollection", desc = "No description"))
+
+ expect_that(fakeREST$bodyIsConfiguredCorrectly, is_true())
+})
+
+test_that("getProject delegates operation to RESTService class", {
+
+ arv <- Arvados$new("token", "hostName")
+ fakeREST <- FakeRESTService$new()
+ arv$setRESTService(fakeREST)
+ projectUUID <- "aaaaa-j7d0g-ccccccccccccccc"
+
+ arv$getCollection(projectUUID)
+
+ expect_that(fakeREST$getResourceCallCount, equals(1))
+})
+
+test_that("listProjects delegates operation to RESTService class", {
+
+ arv <- Arvados$new("token", "hostName")
+ fakeREST <- FakeRESTService$new()
+ arv$setRESTService(fakeREST)
+
+ arv$listCollections()
+
+ expect_that(fakeREST$listResourcesCallCount, equals(1))
+})
+
+test_that("listProjects filter contains additional 'group_class' field by default", {
+
+ filters <- list(list("name", "like", "MyProject"))
+ names(filters) <- c("groups")
+ filters[[length(filters) + 1]] <- list("group_class", "=", "project")
+
+ fakeREST <- FakeRESTService$new(expectedFilterContent = filters)
+ arv <- Arvados$new("token", "hostName")
+ arv$setRESTService(fakeREST)
+
+ arv$listProjects(list(list("name", "like", "MyProject")))
+
+ expect_that(fakeREST$filtersAreConfiguredCorrectly, is_true())
+})
+
+test_that("listAllProjects delegates operation to RESTService class", {
+
+ arv <- Arvados$new("token", "hostName")
+ fakeREST <- FakeRESTService$new()
+ arv$setRESTService(fakeREST)
+
+ arv$listAllProjects()
+
+ expect_that(fakeREST$fetchAllItemsCallCount, equals(1))
+})
+
+test_that("listAllProjects filter contains additional 'group_class' field by default", {
+
+ filters <- list(list("name", "like", "MyProject"))
+ names(filters) <- c("groups")
+ filters[[length(filters) + 1]] <- list("group_class", "=", "project")
+
+ fakeREST <- FakeRESTService$new(expectedFilterContent = filters)
+ arv <- Arvados$new("token", "hostName")
+ arv$setRESTService(fakeREST)
+
+ arv$listAllProjects(list(list("name", "like", "MyProject")))
+
+ expect_that(fakeREST$filtersAreConfiguredCorrectly, is_true())
+})
+
+test_that("deleteProject delegates operation to RESTService class", {
+
+ arv <- Arvados$new("token", "hostName")
+ fakeREST <- FakeRESTService$new()
+ arv$setRESTService(fakeREST)
+ projectUUID <- "aaaaa-j7d0g-ccccccccccccccc"
+
+ arv$deleteCollection(projectUUID)
+
+ expect_that(fakeREST$deleteResourceCallCount, equals(1))
+})
+
+test_that("updateProject delegates operation to RESTService class", {
+
+ arv <- Arvados$new("token", "hostName")
+ fakeREST <- FakeRESTService$new()
+ arv$setRESTService(fakeREST)
+ newProjectContent <- list(newName = "Brand new shiny name")
+ projectUUID <- "aaaaa-j7d0g-ccccccccccccccc"
+
+ arv$updateCollection(projectUUID, newProjectContent)
+
+ expect_that(fakeREST$updateResourceCallCount, equals(1))
+})
+
+test_that("updateProject adds content to request parameter named 'group'", {
+
+ projectUUID <- "aaaaa-j7d0g-ccccccccccccccc"
+ body <- list(list())
+ names(body) <- c("group")
+ body$group <- list(name = "MyProject", desc = "No description")
+
+ fakeREST <- FakeRESTService$new(returnContent = body)
+ arv <- Arvados$new("token", "hostName")
+ arv$setRESTService(fakeREST)
+
+ arv$updateProject(projectUUID,
+ list(name = "MyProject", desc = "No description"))
+
+ expect_that(fakeREST$bodyIsConfiguredCorrectly, is_true())
+})
+
+test_that("createProject delegates operation to RESTService class", {
+
+ arv <- Arvados$new("token", "hostName")
+ fakeREST <- FakeRESTService$new()
+ arv$setRESTService(fakeREST)
+ projectContent <- list(newName = "Brand new shiny name")
+
+ arv$createCollection(projectContent)
+
+ expect_that(fakeREST$createResourceCallCount, equals(1))
+})
+
+test_that("createProject request body contains 'goup_class' filed", {
+
+ body <- list(list())
+ names(body) <- c("group")
+ body$group <- c("group_class" = "project",
+ list(name = "MyProject", desc = "No description"))
+
+ fakeREST <- FakeRESTService$new(returnContent = body)
+ arv <- Arvados$new("token", "hostName")
+ arv$setRESTService(fakeREST)
+
+ arv$createProject(list(name = "MyProject", desc = "No description"))
+
+ expect_that(fakeREST$bodyIsConfiguredCorrectly, is_true())
+})
--- /dev/null
+source("fakes/FakeRESTService.R")
+
+context("ArvadosFile")
+
+test_that("constructor raises error if file name is empty string", {
+
+ expect_that(ArvadosFile$new(""), throws_error("Invalid name."))
+})
+
+test_that("getFileListing always returns file name", {
+
+ dog <- ArvadosFile$new("dog")
+
+ expect_that(dog$getFileListing(), equals("dog"))
+})
+
+test_that("get always returns NULL", {
+
+ dog <- ArvadosFile$new("dog")
+
+ responseIsNull <- is.null(dog$get("something"))
+ expect_that(responseIsNull, is_true())
+})
+
+test_that("getFirst always returns NULL", {
+
+ dog <- ArvadosFile$new("dog")
+
+ responseIsNull <- is.null(dog$getFirst())
+ expect_that(responseIsNull, is_true())
+})
+
+test_that(paste("getSizeInBytes returns zero if arvadosFile",
+ "is not part of a collection"), {
+
+ dog <- ArvadosFile$new("dog")
+
+ expect_that(dog$getSizeInBytes(), equals(0))
+})
+
+test_that(paste("getSizeInBytes delegates size calculation",
+ "to REST service class"), {
+
+ collectionContent <- c("animal", "animal/fish")
+ returnSize <- 100
+ fakeREST <- FakeRESTService$new(collectionContent, returnSize)
+
+ api <- Arvados$new("myToken", "myHostName")
+ api$setRESTService(fakeREST)
+ collection <- Collection$new(api, "myUUID")
+ fish <- collection$get("animal/fish")
+
+ resourceSize <- fish$getSizeInBytes()
+
+ expect_that(resourceSize, equals(100))
+})
+
+test_that("getRelativePath returns path relative to the tree root", {
+
+ animal <- Subcollection$new("animal")
+ fish <- Subcollection$new("fish")
+ shark <- ArvadosFile$new("shark")
+
+ animal$add(fish)
+ fish$add(shark)
+
+ expect_that(shark$getRelativePath(), equals("animal/fish/shark"))
+})
+
+test_that("read raises exception if file doesn't belong to a collection", {
+
+ dog <- ArvadosFile$new("dog")
+
+ expect_that(dog$read(),
+ throws_error("ArvadosFile doesn't belong to any collection."))
+})
+
+test_that("read raises exception offset or length is negative number", {
+
+
+ collectionContent <- c("animal", "animal/fish")
+ fakeREST <- FakeRESTService$new(collectionContent)
+
+ api <- Arvados$new("myToken", "myHostName")
+ api$setRESTService(fakeREST)
+ collection <- Collection$new(api, "myUUID")
+ fish <- collection$get("animal/fish")
+
+ expect_that(fish$read(contentType = "text", offset = -1),
+ throws_error("Offset and length must be positive values."))
+ expect_that(fish$read(contentType = "text", length = -1),
+ throws_error("Offset and length must be positive values."))
+ expect_that(fish$read(contentType = "text", offset = -1, length = -1),
+ throws_error("Offset and length must be positive values."))
+})
+
+test_that("read delegates reading operation to REST service class", {
+
+ collectionContent <- c("animal", "animal/fish")
+ readContent <- "my file"
+ fakeREST <- FakeRESTService$new(collectionContent, readContent)
+
+ api <- Arvados$new("myToken", "myHostName")
+ api$setRESTService(fakeREST)
+ collection <- Collection$new(api, "myUUID")
+ fish <- collection$get("animal/fish")
+
+ fileContent <- fish$read("text")
+
+ expect_that(fileContent, equals("my file"))
+ expect_that(fakeREST$readCallCount, equals(1))
+})
+
+test_that(paste("connection delegates connection creation ro RESTService class",
+ "which returns curl connection opened in read mode when",
+ "'r' of 'rb' is passed as argument"), {
+
+ collectionContent <- c("animal", "animal/fish")
+ fakeREST <- FakeRESTService$new(collectionContent)
+
+ api <- Arvados$new("myToken", "myHostName")
+ api$setRESTService(fakeREST)
+ collection <- Collection$new(api, "myUUID")
+ fish <- collection$get("animal/fish")
+
+ connection <- fish$connection("r")
+
+ expect_that(fakeREST$getConnectionCallCount, equals(1))
+})
+
+test_that(paste("connection returns textConnection opened",
+ "in write mode when 'w' is passed as argument"), {
+
+ collectionContent <- c("animal", "animal/fish")
+ fakeREST <- FakeRESTService$new(collectionContent)
+
+ api <- Arvados$new("myToken", "myHostName")
+ api$setRESTService(fakeREST)
+ collection <- Collection$new(api, "myUUID")
+ fish <- collection$get("animal/fish")
+
+ connection <- fish$connection("w")
+
+ writeLines("file", connection)
+ writeLines("content", connection)
+
+ writeResult <- textConnectionValue(connection)
+
+ expect_that(writeResult[1], equals("file"))
+ expect_that(writeResult[2], equals("content"))
+})
+
+test_that("flush sends data stored in a connection to a REST server", {
+
+
+ collectionContent <- c("animal", "animal/fish")
+ fakeREST <- FakeRESTService$new(collectionContent)
+
+ api <- Arvados$new("myToken", "myHostName")
+ api$setRESTService(fakeREST)
+ collection <- Collection$new(api, "myUUID")
+ fish <- collection$get("animal/fish")
+
+ connection <- fish$connection("w")
+
+ writeLines("file content", connection)
+
+ fish$flush()
+
+ expect_that(fakeREST$writeBuffer, equals("file content"))
+})
+
+test_that("write raises exception if file doesn't belong to a collection", {
+
+ dog <- ArvadosFile$new("dog")
+
+ expect_that(dog$write(),
+ throws_error("ArvadosFile doesn't belong to any collection."))
+})
+
+test_that("write delegates writing operation to REST service class", {
+
+
+ collectionContent <- c("animal", "animal/fish")
+ fakeREST <- FakeRESTService$new(collectionContent)
+
+ api <- Arvados$new("myToken", "myHostName")
+ api$setRESTService(fakeREST)
+ collection <- Collection$new(api, "myUUID")
+ fish <- collection$get("animal/fish")
+
+ fileContent <- fish$write("new file content")
+
+ expect_that(fakeREST$writeBuffer, equals("new file content"))
+})
+
+test_that(paste("move raises exception if arvados file",
+ "doesn't belong to any collection"), {
+
+ animal <- ArvadosFile$new("animal")
+
+ expect_that(animal$move("new/location"),
+ throws_error("ArvadosFile doesn't belong to any collection"))
+})
+
+test_that(paste("move raises exception if newLocationInCollection",
+ "parameter is invalid"), {
+
+
+ collectionContent <- c("animal",
+ "animal/fish",
+ "animal/dog",
+ "animal/fish/shark",
+ "ball")
+
+ fakeREST <- FakeRESTService$new(collectionContent)
+
+ api <- Arvados$new("myToken", "myHostName")
+ api$setRESTService(fakeREST)
+
+ collection <- Collection$new(api, "myUUID")
+ dog <- collection$get("animal/dog")
+
+ expect_that(dog$move("objects/dog"),
+ throws_error("Unable to get destination subcollection"))
+})
+
+test_that("move raises exception if new location contains content with the same name", {
+
+
+ collectionContent <- c("animal",
+ "animal/fish",
+ "animal/dog",
+ "animal/fish/shark",
+ "dog")
+
+ fakeREST <- FakeRESTService$new(collectionContent)
+
+ api <- Arvados$new("myToken", "myHostName")
+ api$setRESTService(fakeREST)
+ collection <- Collection$new(api, "myUUID")
+ dog <- collection$get("animal/dog")
+
+ expect_that(dog$move("dog"),
+ throws_error("Destination already contains content with same name."))
+
+})
+
+test_that("move moves arvados file inside collection tree", {
+
+
+ collectionContent <- c("animal",
+ "animal/fish",
+ "animal/dog",
+ "animal/fish/shark",
+ "ball")
+
+ fakeREST <- FakeRESTService$new(collectionContent)
+
+ api <- Arvados$new("myToken", "myHostName")
+ api$setRESTService(fakeREST)
+ collection <- Collection$new(api, "myUUID")
+ dog <- collection$get("animal/dog")
+
+ dog$move("dog")
+ dogIsNullOnOldLocation <- is.null(collection$get("animal/dog"))
+ dogExistsOnNewLocation <- !is.null(collection$get("dog"))
+
+ expect_that(dogIsNullOnOldLocation, is_true())
+ expect_that(dogExistsOnNewLocation, is_true())
+})
--- /dev/null
+source("fakes/FakeRESTService.R")
+
+context("Collection")
+
+test_that(paste("constructor creates file tree from text content",
+ "retreived form REST service"), {
+
+ collectionContent <- c("animal", "animal/fish", "ball")
+ fakeREST <- FakeRESTService$new(collectionContent)
+
+ api <- Arvados$new("myToken", "myHostName")
+ api$setRESTService(fakeREST)
+ collection <- Collection$new(api, "myUUID")
+
+ root <- collection$get("")
+
+ expect_that(fakeREST$getCollectionContentCallCount, equals(1))
+ expect_that(root$getName(), equals(""))
+})
+
+test_that(paste("add raises exception if passed argumet is not",
+ "ArvadosFile or Subcollection"), {
+
+ collectionContent <- c("animal", "animal/fish", "ball")
+ fakeREST <- FakeRESTService$new(collectionContent)
+
+ api <- Arvados$new("myToken", "myHostName")
+ api$setRESTService(fakeREST)
+ collection <- Collection$new(api, "myUUID")
+
+ newNumber <- 10
+
+ expect_that(collection$add(newNumber),
+ throws_error(paste("Expected AravodsFile or Subcollection",
+ "object, got (numeric)."), fixed = TRUE))
+})
+
+test_that("add raises exception if relative path is not valid", {
+
+ collectionContent <- c("animal", "animal/fish", "ball")
+ fakeREST <- FakeRESTService$new(collectionContent)
+
+ api <- Arvados$new("myToken", "myHostName")
+ api$setRESTService(fakeREST)
+ collection <- Collection$new(api, "myUUID")
+
+ newPen <- ArvadosFile$new("pen")
+
+ expect_that(collection$add(newPen, "objects"),
+ throws_error("Subcollection objects doesn't exist.",
+ fixed = TRUE))
+})
+
+test_that("add raises exception if content name is empty string", {
+
+ collectionContent <- c("animal", "animal/fish")
+ fakeREST <- FakeRESTService$new(collectionContent)
+
+ api <- Arvados$new("myToken", "myHostName")
+ api$setRESTService(fakeREST)
+ collection <- Collection$new(api, "myUUID")
+
+ rootFolder <- Subcollection$new("")
+
+ expect_that(collection$add(rootFolder),
+ throws_error("Content has invalid name.", fixed = TRUE))
+})
+
+test_that(paste("add adds ArvadosFile or Subcollection",
+ "to local tree structure and remote REST service"), {
+
+ collectionContent <- c("animal", "animal/fish", "ball")
+ fakeREST <- FakeRESTService$new(collectionContent)
+
+ api <- Arvados$new("myToken", "myHostName")
+ api$setRESTService(fakeREST)
+ collection <- Collection$new(api, "myUUID")
+
+ newDog <- ArvadosFile$new("dog")
+ collection$add(newDog, "animal")
+
+ dog <- collection$get("animal/dog")
+ dogExistsInCollection <- !is.null(dog) && dog$getName() == "dog"
+
+ expect_that(dogExistsInCollection, is_true())
+ expect_that(fakeREST$createCallCount, equals(1))
+})
+
+test_that("create raises exception if passed argumet is not character vector", {
+
+ collectionContent <- c("animal", "animal/fish", "ball")
+ fakeREST <- FakeRESTService$new(collectionContent)
+
+ api <- Arvados$new("myToken", "myHostName")
+ api$setRESTService(fakeREST)
+ collection <- Collection$new(api, "myUUID")
+
+ expect_that(collection$create(10),
+ throws_error("Expected character vector, got (numeric).",
+ fixed = TRUE))
+})
+
+test_that("create raises exception if relative path is not valid", {
+
+ collectionContent <- c("animal",
+ "animal/fish",
+ "ball")
+
+ fakeREST <- FakeRESTService$new(collectionContent)
+
+ api <- Arvados$new("myToken", "myHostName")
+ api$setRESTService(fakeREST)
+ collection <- Collection$new(api, "myUUID")
+
+ newPen <- ArvadosFile$new("pen")
+
+ expect_that(collection$create(newPen, "objects"),
+ throws_error("Subcollection objects doesn't exist.",
+ fixed = TRUE))
+})
+
+test_that(paste("create adds files specified by fileNames",
+ "to local tree structure and remote REST service"), {
+
+ collectionContent <- c("animal", "animal/fish", "ball")
+ fakeREST <- FakeRESTService$new(collectionContent)
+
+ api <- Arvados$new("myToken", "myHostName")
+ api$setRESTService(fakeREST)
+ collection <- Collection$new(api, "myUUID")
+
+ files <- c("dog", "cat")
+ collection$create(files, "animal")
+
+ dog <- collection$get("animal/dog")
+ cat <- collection$get("animal/cat")
+ dogExistsInCollection <- !is.null(dog) && dog$getName() == "dog"
+ catExistsInCollection <- !is.null(cat) && cat$getName() == "cat"
+
+ expect_that(dogExistsInCollection, is_true())
+ expect_that(catExistsInCollection, is_true())
+ expect_that(fakeREST$createCallCount, equals(2))
+})
+
+test_that("remove raises exception if passed argumet is not character vector", {
+
+ collectionContent <- c("animal", "animal/fish", "ball")
+ fakeREST <- FakeRESTService$new(collectionContent)
+
+ api <- Arvados$new("myToken", "myHostName")
+ api$setRESTService(fakeREST)
+ collection <- Collection$new(api, "myUUID")
+
+ expect_that(collection$remove(10),
+ throws_error("Expected character vector, got (numeric).",
+ fixed = TRUE))
+})
+
+test_that("remove raises exception if user tries to remove root folder", {
+
+ collectionContent <- c("animal", "animal/fish")
+ fakeREST <- FakeRESTService$new(collectionContent)
+
+ api <- Arvados$new("myToken", "myHostName")
+ api$setRESTService(fakeREST)
+ collection <- Collection$new(api, "myUUID")
+
+ expect_that(collection$remove(""),
+ throws_error("You can't delete root folder.", fixed = TRUE))
+})
+
+test_that(paste("remove removes files specified by paths",
+ "from local tree structure and from remote REST service"), {
+
+ collectionContent <- c("animal", "animal/fish", "animal/dog", "animal/cat", "ball")
+ fakeREST <- FakeRESTService$new(collectionContent)
+
+ api <- Arvados$new("myToken", "myHostName")
+ api$setRESTService(fakeREST)
+ collection <- Collection$new(api, "myUUID")
+
+ collection$remove(c("animal/dog", "animal/cat"))
+
+ dog <- collection$get("animal/dog")
+ cat <- collection$get("animal/dog")
+ dogExistsInCollection <- !is.null(dog) && dog$getName() == "dog"
+ catExistsInCollection <- !is.null(cat) && cat$getName() == "cat"
+
+ expect_that(dogExistsInCollection, is_false())
+ expect_that(catExistsInCollection, is_false())
+ expect_that(fakeREST$deleteCallCount, equals(2))
+})
+
+test_that(paste("move moves content to a new location inside file tree",
+ "and on REST service"), {
+
+ collectionContent <- c("animal", "animal/dog", "ball")
+ fakeREST <- FakeRESTService$new(collectionContent)
+
+ api <- Arvados$new("myToken", "myHostName")
+ api$setRESTService(fakeREST)
+ collection <- Collection$new(api, "myUUID")
+
+ collection$move("animal/dog", "dog")
+
+ dogIsNullOnOldLocation <- is.null(collection$get("animal/dog"))
+ dogExistsOnNewLocation <- !is.null(collection$get("dog"))
+
+ expect_that(dogIsNullOnOldLocation, is_true())
+ expect_that(dogExistsOnNewLocation, is_true())
+ expect_that(fakeREST$moveCallCount, equals(1))
+})
+
+test_that("move raises exception if new location is not valid", {
+
+ collectionContent <- c("animal", "animal/fish", "ball")
+ fakeREST <- FakeRESTService$new(collectionContent)
+
+ api <- Arvados$new("myToken", "myHostName")
+ api$setRESTService(fakeREST)
+ collection <- Collection$new(api, "myUUID")
+
+ expect_that(collection$move("fish", "object"),
+ throws_error("Content you want to move doesn't exist in the collection.",
+ fixed = TRUE))
+})
+
+test_that("getFileListing returns sorted collection content received from REST service", {
+
+ collectionContent <- c("animal", "animal/fish", "ball")
+ fakeREST <- FakeRESTService$new(collectionContent)
+
+ api <- Arvados$new("myToken", "myHostName")
+ api$setRESTService(fakeREST)
+ collection <- Collection$new(api, "myUUID")
+
+ contentMatchExpected <- all(collection$getFileListing() ==
+ c("animal", "animal/fish", "ball"))
+
+ expect_that(contentMatchExpected, is_true())
+ #2 calls because Collection$new calls getFileListing once
+ expect_that(fakeREST$getCollectionContentCallCount, equals(2))
+
+})
+
+test_that("get returns arvados file or subcollection from internal tree structure", {
+
+ collectionContent <- c("animal", "animal/fish", "ball")
+ fakeREST <- FakeRESTService$new(collectionContent)
+
+ api <- Arvados$new("myToken", "myHostName")
+ api$setRESTService(fakeREST)
+ collection <- Collection$new(api, "myUUID")
+
+ fish <- collection$get("animal/fish")
+ fishIsNotNull <- !is.null(fish)
+
+ expect_that(fishIsNotNull, is_true())
+ expect_that(fish$getName(), equals("fish"))
+})
--- /dev/null
+context("CollectionTree")
+
+test_that("constructor creates file tree from character array properly", {
+
+ collection <- "myCollection"
+ characterArray <- c("animal",
+ "animal/dog",
+ "boat")
+
+ collectionTree <- CollectionTree$new(characterArray, collection)
+
+ root <- collectionTree$getTree()
+ animal <- collectionTree$getElement("animal")
+ dog <- collectionTree$getElement("animal/dog")
+ boat <- collectionTree$getElement("boat")
+
+ rootHasNoParent <- is.null(root$getParent())
+ rootIsOfTypeSubcollection <- "Subcollection" %in% class(root)
+ animalIsOfTypeSubcollection <- "Subcollection" %in% class(animal)
+ dogIsOfTypeArvadosFile <- "ArvadosFile" %in% class(dog)
+ boatIsOfTypeArvadosFile <- "ArvadosFile" %in% class(boat)
+ animalsParentIsRoot <- animal$getParent()$getName() == root$getName()
+ animalContainsDog <- animal$getFirst()$getName() == dog$getName()
+ dogsParentIsAnimal <- dog$getParent()$getName() == animal$getName()
+ boatsParentIsRoot <- boat$getParent()$getName() == root$getName()
+
+ allElementsBelongToSameCollection <- root$getCollection() == "myCollection" &&
+ animal$getCollection() == "myCollection" &&
+ dog$getCollection() == "myCollection" &&
+ boat$getCollection() == "myCollection"
+
+ expect_that(root$getName(), equals(""))
+ expect_that(rootIsOfTypeSubcollection, is_true())
+ expect_that(rootHasNoParent, is_true())
+ expect_that(animalIsOfTypeSubcollection, is_true())
+ expect_that(animalsParentIsRoot, is_true())
+ expect_that(animalContainsDog, is_true())
+ expect_that(dogIsOfTypeArvadosFile, is_true())
+ expect_that(dogsParentIsAnimal, is_true())
+ expect_that(boatIsOfTypeArvadosFile, is_true())
+ expect_that(boatsParentIsRoot, is_true())
+ expect_that(allElementsBelongToSameCollection, is_true())
+})
+
+test_that("getElement returns element from tree if element exists on specified path", {
+
+ collection <- "myCollection"
+ characterArray <- c("animal",
+ "animal/dog",
+ "boat")
+
+ collectionTree <- CollectionTree$new(characterArray, collection)
+
+ dog <- collectionTree$getElement("animal/dog")
+
+ expect_that(dog$getName(), equals("dog"))
+})
+
+test_that("getElement returns NULL from tree if element doesn't exists on specified path", {
+
+ collection <- "myCollection"
+ characterArray <- c("animal",
+ "animal/dog",
+ "boat")
+
+ collectionTree <- CollectionTree$new(characterArray, collection)
+
+ fish <- collectionTree$getElement("animal/fish")
+ fishIsNULL <- is.null(fish)
+
+ expect_that(fishIsNULL, is_true())
+})
+
+test_that("getElement trims ./ from start of relativePath", {
+
+ collection <- "myCollection"
+ characterArray <- c("animal",
+ "animal/dog",
+ "boat")
+
+ collectionTree <- CollectionTree$new(characterArray, collection)
+
+ dog <- collectionTree$getElement("animal/dog")
+ dogWithDotSlash <- collectionTree$getElement("./animal/dog")
+
+ expect_that(dogWithDotSlash$getName(), equals(dog$getName()))
+})
+
+test_that("getElement trims / from end of relativePath", {
+
+ collection <- "myCollection"
+ characterArray <- c("animal",
+ "animal/dog",
+ "boat")
+
+ collectionTree <- CollectionTree$new(characterArray, collection)
+
+ animal <- collectionTree$getElement("animal")
+ animalWithSlash <- collectionTree$getElement("animal/")
+
+ expect_that(animalWithSlash$getName(), equals(animal$getName()))
+})
--- /dev/null
+context("Http Parser")
+
+
+test_that("parseJSONResponse generates and returns JSON object from server response", {
+
+ JSONContent <- "{\"bar\":{\"foo\":[10]}}"
+ serverResponse <- list()
+ serverResponse$content <- charToRaw(JSONContent)
+ serverResponse$headers[["Content-Type"]] <- "application/json; charset=utf-8"
+ class(serverResponse) <- c("response")
+
+ parser <- HttpParser$new()
+
+ result <- parser$parseJSONResponse(serverResponse)
+ barExists <- !is.null(result$bar)
+
+ expect_that(barExists, is_true())
+ expect_that(unlist(result$bar$foo), equals(10))
+})
+
+test_that(paste("parseResponse generates and returns character vector",
+ "from server response if outputType is text"), {
+
+ content <- "random text"
+ serverResponse <- list()
+ serverResponse$content <- charToRaw(content)
+ serverResponse$headers[["Content-Type"]] <- "text/plain; charset=utf-8"
+ class(serverResponse) <- c("response")
+
+ parser <- HttpParser$new()
+ parsedResponse <- parser$parseResponse(serverResponse, "text")
+
+ expect_that(parsedResponse, equals("random text"))
+})
+
+
+webDAVResponseSample =
+ paste0("<?xml version=\"1.0\" encoding=\"UTF-8\"?><D:multistatus xmlns:",
+ "D=\"DAV:\"><D:response><D:href>/c=aaaaa-bbbbb-ccccccccccccccc</D",
+ ":href><D:propstat><D:prop><D:resourcetype><D:collection xmlns:D=",
+ "\"DAV:\"/></D:resourcetype><D:getlastmodified>Fri, 11 Jan 2018 1",
+ "1:11:11 GMT</D:getlastmodified><D:displayname></D:displayname><D",
+ ":supportedlock><D:lockentry xmlns:D=\"DAV:\"><D:lockscope><D:exc",
+ "lusive/></D:lockscope><D:locktype><D:write/></D:locktype></D:loc",
+ "kentry></D:supportedlock></D:prop><D:status>HTTP/1.1 200 OK</D:s",
+ "tatus></D:propstat></D:response><D:response><D:href>/c=aaaaa-bbb",
+ "bb-ccccccccccccccc/myFile.exe</D:href><D:propstat><D:prop><D:r",
+ "esourcetype></D:resourcetype><D:getlastmodified>Fri, 12 Jan 2018",
+ " 22:22:22 GMT</D:getlastmodified><D:getcontenttype>text/x-c++src",
+ "; charset=utf-8</D:getcontenttype><D:displayname>myFile.exe</D",
+ ":displayname><D:getcontentlength>25</D:getcontentlength><D:getet",
+ "ag>\"123b12dd1234567890\"</D:getetag><D:supportedlock><D:lockent",
+ "ry xmlns:D=\"DAV:\"><D:lockscope><D:exclusive/></D:lockscope><D:",
+ "locktype><D:write/></D:locktype></D:lockentry></D:supportedlock>",
+ "</D:prop><D:status>HTTP/1.1 200 OK</D:status></D:propstat></D:re",
+ "sponse></D:multistatus>")
+
+
+
+test_that(paste("getFileNamesFromResponse returns file names belonging to specific",
+ "collection parsed from webDAV server response"), {
+
+ serverResponse <- list()
+ serverResponse$content <- charToRaw(webDAVResponseSample)
+ serverResponse$headers[["Content-Type"]] <- "text/xml; charset=utf-8"
+ class(serverResponse) <- c("response")
+ url <- URLencode("https://webdav/c=aaaaa-bbbbb-ccccccccccccccc")
+
+ parser <- HttpParser$new()
+ result <- parser$getFileNamesFromResponse(serverResponse, url)
+ expectedResult <- "myFile.exe"
+ resultMatchExpected <- all.equal(result, expectedResult)
+
+ expect_that(resultMatchExpected, is_true())
+})
+
+test_that(paste("getFileSizesFromResponse returns file sizes",
+ "parsed from webDAV server response"), {
+
+ serverResponse <- list()
+ serverResponse$content <- charToRaw(webDAVResponseSample)
+ serverResponse$headers[["Content-Type"]] <- "text/xml; charset=utf-8"
+ class(serverResponse) <- c("response")
+ url <- URLencode("https://webdav/c=aaaaa-bbbbb-ccccccccccccccc")
+
+ parser <- HttpParser$new()
+ expectedResult <- "25"
+ result <- parser$getFileSizesFromResponse(serverResponse, url)
+ resultMatchExpected <- result == expectedResult
+
+ expect_that(resultMatchExpected, is_true())
+})
--- /dev/null
+context("Http Request")
+
+
+test_that("execyte raises exception if http verb is not valid", {
+
+ http <- HttpRequest$new()
+ expect_that(http$execute("FAKE VERB", "url"),
+ throws_error("Http verb is not valid."))
+})
+
+test_that(paste("createQuery generates and encodes query portion of http",
+ "request based on filters, limit and offset parameters"), {
+
+ http <- HttpRequest$new()
+ filters <- list(list("color", "=", "red"))
+ limit <- 20
+ offset <- 50
+ expect_that(http$createQuery(filters, limit, offset),
+ equals(paste0("/?filters=%5B%5B%22color%22%2C%22%3D%22%2C%22red",
+ "%22%5D%5D&limit=20&offset=50")))
+})
+
+test_that(paste("createQuery generates and empty string",
+ "when filters, limit and offset parameters are set to NULL"), {
+
+ http <- HttpRequest$new()
+ expect_that(http$createQuery(NULL, NULL, NULL), equals(""))
+})
--- /dev/null
+source("fakes/FakeArvados.R")
+source("fakes/FakeHttpRequest.R")
+source("fakes/FakeHttpParser.R")
+
+context("REST service")
+
+test_that("getWebDavHostName calls REST service properly", {
+
+ expectedURL <- "https://host/discovery/v1/apis/arvados/v1/rest"
+ serverResponse <- list(keepWebServiceUrl = "https://myWebDavServer.com")
+ httpRequest <- FakeHttpRequest$new(expectedURL, serverResponse)
+
+ REST <- RESTService$new("token", "host",
+ httpRequest, FakeHttpParser$new())
+
+ REST$getWebDavHostName()
+
+ expect_that(httpRequest$URLIsProperlyConfigured, is_true())
+ expect_that(httpRequest$requestHeaderContainsAuthorizationField, is_true())
+ expect_that(httpRequest$numberOfGETRequests, equals(1))
+})
+
+test_that("getWebDavHostName returns webDAV host name properly", {
+
+ serverResponse <- list(keepWebServiceUrl = "https://myWebDavServer.com")
+ httpRequest <- FakeHttpRequest$new(expectedURL = NULL, serverResponse)
+
+ REST <- RESTService$new("token", "host",
+ httpRequest, FakeHttpParser$new())
+
+ expect_that("https://myWebDavServer.com", equals(REST$getWebDavHostName()))
+})
+
+test_that("getResource calls REST service properly", {
+
+ serverResponse <- NULL
+ resourceUUID <- "aaaaa-j7d0g-ccccccccccccccc"
+ expectedURL <- paste0("https://host/arvados/v1/collections/", resourceUUID)
+
+ httpRequest <- FakeHttpRequest$new(expectedURL, serverResponse)
+
+ REST <- RESTService$new("token", "host",
+ httpRequest, FakeHttpParser$new(),
+ 0, "webDavHost")
+
+ REST$getResource("collections", resourceUUID)
+
+ expect_that(httpRequest$URLIsProperlyConfigured, is_true())
+ expect_that(httpRequest$requestHeaderContainsAuthorizationField, is_true())
+ expect_that(httpRequest$numberOfGETRequests, equals(1))
+})
+
+test_that("getResource parses server response", {
+
+ resourceUUID <- "aaaaa-j7d0g-ccccccccccccccc"
+ httpParser <- FakeHttpParser$new()
+ REST <- RESTService$new("token", "host",
+ FakeHttpRequest$new(), httpParser,
+ 0, "webDavHost")
+
+ REST$getResource("collections", resourceUUID)
+
+ expect_that(httpParser$parserCallCount, equals(1))
+})
+
+test_that("getResource raises exception if response contains errors field", {
+
+ resourceUUID <- "aaaaa-j7d0g-ccccccccccccccc"
+ serverResponse <- list(errors = 404)
+
+ REST <- RESTService$new("token", "host",
+ FakeHttpRequest$new(NULL, serverResponse),
+ FakeHttpParser$new(),
+ 0, "webDavHost")
+
+ expect_that(REST$getResource("collections", resourceUUID), throws_error("404", fixed = TRUE))
+})
+
+test_that("listResources calls REST service properly", {
+
+ serverResponse <- NULL
+ expectedURL <- paste0("https://host/arvados/v1/collections")
+ httpRequest <- FakeHttpRequest$new(expectedURL, serverResponse)
+
+ REST <- RESTService$new("token", "host",
+ httpRequest, FakeHttpParser$new(),
+ 0, "webDavHost")
+
+ REST$listResources("collections")
+
+ expect_that(httpRequest$URLIsProperlyConfigured, is_true())
+ expect_that(httpRequest$requestHeaderContainsAuthorizationField, is_true())
+ expect_that(httpRequest$numberOfGETRequests, equals(1))
+})
+
+test_that("listResources parses server response", {
+
+ httpParser <- FakeHttpParser$new()
+ REST <- RESTService$new("token", "host",
+ FakeHttpRequest$new(), httpParser,
+ 0, "webDavHost")
+
+ REST$listResources("collections")
+
+ expect_that(httpParser$parserCallCount, equals(1))
+})
+
+test_that("listResources raises exception if response contains errors field", {
+
+ serverResponse <- list(errors = 404)
+
+ REST <- RESTService$new("token", "host",
+ FakeHttpRequest$new(NULL, serverResponse),
+ FakeHttpParser$new(),
+ 0, "webDavHost")
+
+ expect_that(REST$listResources("collections"), throws_error("404", fixed = TRUE))
+})
+
+test_that("fetchAllItems always returns all resource items from server", {
+
+ expectedURL <- NULL
+ serverResponse <- list(items_available = 8,
+ items = list("collection1",
+ "collection2",
+ "collection3",
+ "collection4",
+ "collection5",
+ "collection6",
+ "collection7",
+ "collection8"))
+
+ httpParser <- FakeHttpParser$new()
+ httpRequest <- FakeHttpRequest$new(expectedURL, serverResponse)
+ httpRequest$serverMaxElementsPerRequest <- 3
+
+ REST <- RESTService$new("token", "host",
+ httpRequest, httpParser,
+ 0, "webDavHost")
+
+ result <- REST$fetchAllItems(NULL, NULL)
+
+ expect_that(length(result), equals(8))
+ expect_that(httpRequest$numberOfGETRequests, equals(3))
+ expect_that(httpParser$parserCallCount, equals(3))
+})
+
+test_that("deleteResource calls REST service properly", {
+
+ serverResponse <- NULL
+ resourceUUID <- "aaaaa-j7d0g-ccccccccccccccc"
+ expectedURL <- paste0("https://host/arvados/v1/collections/", resourceUUID)
+
+ httpRequest <- FakeHttpRequest$new(expectedURL, serverResponse)
+
+ REST <- RESTService$new("token", "host",
+ httpRequest, FakeHttpParser$new(),
+ 0, "webDavHost")
+
+ REST$deleteResource("collections", resourceUUID)
+
+ expect_that(httpRequest$URLIsProperlyConfigured, is_true())
+ expect_that(httpRequest$requestHeaderContainsAuthorizationField, is_true())
+ expect_that(httpRequest$numberOfDELETERequests, equals(1))
+})
+
+test_that("deleteCollection parses server response", {
+
+ resourceUUID <- "aaaaa-j7d0g-ccccccccccccccc"
+ httpParser <- FakeHttpParser$new()
+ REST <- RESTService$new("token", "host",
+ FakeHttpRequest$new(), httpParser,
+ 0, "webDavHost")
+
+ REST$deleteResource("collections", resourceUUID)
+
+ expect_that(httpParser$parserCallCount, equals(1))
+})
+
+test_that("deleteCollection raises exception if response contains errors field", {
+
+ resourceUUID <- "aaaaa-j7d0g-ccccccccccccccc"
+ serverResponse <- list(errors = 404)
+
+ REST <- RESTService$new("token", "host",
+ FakeHttpRequest$new(NULL, serverResponse),
+ FakeHttpParser$new(),
+ 0, "webDavHost")
+
+ expect_that(REST$deleteResource("collections", resourceUUID), throws_error("404", fixed = TRUE))
+})
+
+test_that("updateResource calls REST service properly", {
+
+ serverResponse <- NULL
+ resourceUUID <- "aaaaa-j7d0g-ccccccccccccccc"
+ expectedURL <- paste0("https://host/arvados/v1/collections/", resourceUUID)
+ newResourceContent <- list(newName = "Brand new shiny name")
+
+ httpRequest <- FakeHttpRequest$new(expectedURL, serverResponse)
+
+ REST <- RESTService$new("token", "host",
+ httpRequest, FakeHttpParser$new(),
+ 0, "webDavHost")
+
+ REST$updateResource("collections", resourceUUID, newResourceContent)
+
+ expect_that(httpRequest$URLIsProperlyConfigured, is_true())
+ expect_that(httpRequest$requestHeaderContainsAuthorizationField, is_true())
+ expect_that(httpRequest$JSONEncodedBodyIsProvided, is_true())
+ expect_that(httpRequest$numberOfPUTRequests, equals(1))
+})
+
+test_that("updateResource parses server response", {
+
+ newResourceContent <- list(newName = "Brand new shiny name")
+ resourceUUID <- "aaaaa-j7d0g-ccccccccccccccc"
+ httpParser <- FakeHttpParser$new()
+ REST <- RESTService$new("token", "host",
+ FakeHttpRequest$new(), httpParser,
+ 0, "webDavHost")
+
+ REST$updateResource("collections", resourceUUID, newResourceContent)
+
+ expect_that(httpParser$parserCallCount, equals(1))
+})
+
+test_that("updateResource raises exception if response contains errors field", {
+
+ resourceUUID <- "aaaaa-j7d0g-ccccccccccccccc"
+ serverResponse <- list(errors = 404)
+ newResourceContent <- list(newName = "Brand new shiny name")
+ REST <- RESTService$new("token", "host",
+ FakeHttpRequest$new(NULL, serverResponse),
+ FakeHttpParser$new(),
+ 0, "webDavHost")
+
+ expect_that(REST$updateResource("collections", resourceUUID, newResourceContent),
+ throws_error("404", fixed = TRUE))
+})
+
+test_that("createResource calls REST service properly", {
+
+ resourceContent <- list(name = "My favorite collection")
+ serverResponse <- NULL
+ resourceUUID <- "aaaaa-j7d0g-ccccccccccccccc"
+ expectedURL <- "https://host/arvados/v1/collections"
+ newResourceContent <- list(newName = "Brand new shiny name")
+
+ httpRequest <- FakeHttpRequest$new(expectedURL, serverResponse)
+
+ REST <- RESTService$new("token", "host",
+ httpRequest, FakeHttpParser$new(),
+ 0, "webDavHost")
+
+ REST$createResource("collections", resourceContent)
+
+ expect_that(httpRequest$URLIsProperlyConfigured, is_true())
+ expect_that(httpRequest$requestHeaderContainsAuthorizationField, is_true())
+ expect_that(httpRequest$JSONEncodedBodyIsProvided, is_true())
+ expect_that(httpRequest$numberOfPOSTRequests, equals(1))
+})
+
+test_that("createResource parses server response", {
+
+ resourceContent <- list(newName = "Brand new shiny name")
+ resourceUUID <- "aaaaa-j7d0g-ccccccccccccccc"
+ httpParser <- FakeHttpParser$new()
+ REST <- RESTService$new("token", "host",
+ FakeHttpRequest$new(), httpParser,
+ 0, "webDavHost")
+
+ REST$createResource("collections", resourceContent)
+
+ expect_that(httpParser$parserCallCount, equals(1))
+})
+
+test_that("createResource raises exception if response contains errors field", {
+
+ resourceUUID <- "aaaaa-j7d0g-ccccccccccccccc"
+ serverResponse <- list(errors = 404)
+ resourceContent <- list(newName = "Brand new shiny name")
+ REST <- RESTService$new("token", "host",
+ FakeHttpRequest$new(NULL, serverResponse),
+ FakeHttpParser$new(),
+ 0, "webDavHost")
+
+ expect_that(REST$createResource("collections", resourceContent),
+ throws_error("404", fixed = TRUE))
+})
+
+test_that("create calls REST service properly", {
+
+ uuid <- "aaaaa-j7d0g-ccccccccccccccc"
+ expectedURL <- "https://webDavHost/c=aaaaa-j7d0g-ccccccccccccccc/file"
+ fakeHttp <- FakeHttpRequest$new(expectedURL)
+ fakeHttpParser <- FakeHttpParser$new()
+
+ REST <- RESTService$new("token", "https://host/",
+ fakeHttp, fakeHttpParser,
+ 0, "https://webDavHost/")
+
+ REST$create("file", uuid)
+
+ expect_that(fakeHttp$URLIsProperlyConfigured, is_true())
+ expect_that(fakeHttp$requestHeaderContainsAuthorizationField, is_true())
+ expect_that(fakeHttp$numberOfPUTRequests, equals(1))
+})
+
+test_that("create raises exception if server response code is not between 200 and 300", {
+
+ uuid <- "aaaaa-j7d0g-ccccccccccccccc"
+ response <- list()
+ response$status_code <- 404
+ fakeHttp <- FakeHttpRequest$new(serverResponse = response)
+
+ REST <- RESTService$new("token", "https://host/",
+ fakeHttp, HttpParser$new(),
+ 0, "https://webDavHost/")
+
+ expect_that(REST$create("file", uuid),
+ throws_error("Server code: 404"))
+})
+
+test_that("delete calls REST service properly", {
+
+ uuid <- "aaaaa-j7d0g-ccccccccccccccc"
+ expectedURL <- "https://webDavHost/c=aaaaa-j7d0g-ccccccccccccccc/file"
+ fakeHttp <- FakeHttpRequest$new(expectedURL)
+ fakeHttpParser <- FakeHttpParser$new()
+
+ REST <- RESTService$new("token", "https://host/",
+ fakeHttp, fakeHttpParser,
+ 0, "https://webDavHost/")
+
+ REST$delete("file", uuid)
+
+ expect_that(fakeHttp$URLIsProperlyConfigured, is_true())
+ expect_that(fakeHttp$requestHeaderContainsAuthorizationField, is_true())
+ expect_that(fakeHttp$numberOfDELETERequests, equals(1))
+})
+
+test_that("delete raises exception if server response code is not between 200 and 300", {
+
+ uuid <- "aaaaa-j7d0g-ccccccccccccccc"
+ response <- list()
+ response$status_code <- 404
+ fakeHttp <- FakeHttpRequest$new(serverResponse = response)
+
+ REST <- RESTService$new("token", "https://host/",
+ fakeHttp, HttpParser$new(),
+ 0, "https://webDavHost/")
+
+ expect_that(REST$delete("file", uuid),
+ throws_error("Server code: 404"))
+})
+
+test_that("move calls REST service properly", {
+
+ uuid <- "aaaaa-j7d0g-ccccccccccccccc"
+ expectedURL <- "https://webDavHost/c=aaaaa-j7d0g-ccccccccccccccc/file"
+ fakeHttp <- FakeHttpRequest$new(expectedURL)
+ fakeHttpParser <- FakeHttpParser$new()
+
+ REST <- RESTService$new("token", "https://host/",
+ fakeHttp, fakeHttpParser,
+ 0, "https://webDavHost/")
+
+ REST$move("file", "newDestination/file", uuid)
+
+ expect_that(fakeHttp$URLIsProperlyConfigured, is_true())
+ expect_that(fakeHttp$requestHeaderContainsAuthorizationField, is_true())
+ expect_that(fakeHttp$requestHeaderContainsDestinationField, is_true())
+ expect_that(fakeHttp$numberOfMOVERequests, equals(1))
+})
+
+test_that("move raises exception if server response code is not between 200 and 300", {
+
+ uuid <- "aaaaa-j7d0g-ccccccccccccccc"
+ response <- list()
+ response$status_code <- 404
+ fakeHttp <- FakeHttpRequest$new(serverResponse = response)
+
+ REST <- RESTService$new("token", "https://host/",
+ fakeHttp, HttpParser$new(),
+ 0, "https://webDavHost/")
+
+ expect_that(REST$move("file", "newDestination/file", uuid),
+ throws_error("Server code: 404"))
+})
+
+test_that("getCollectionContent retreives correct content from WebDAV server", {
+
+ uuid <- "aaaaa-j7d0g-ccccccccccccccc"
+ expectedURL <- "https://webDavHost/c=aaaaa-j7d0g-ccccccccccccccc"
+ returnContent <- list()
+ returnContent$status_code <- 200
+ returnContent$content <- c("animal", "animal/dog", "ball")
+
+ fakeHttp <- FakeHttpRequest$new(expectedURL, returnContent)
+
+ REST <- RESTService$new("token", "https://host/",
+ fakeHttp, FakeHttpParser$new(),
+ 0, "https://webDavHost/")
+
+ returnResult <- REST$getCollectionContent(uuid)
+ returnedContentMatchExpected <- all.equal(returnResult,
+ c("animal", "animal/dog", "ball"))
+
+ expect_that(returnedContentMatchExpected, is_true())
+ expect_that(fakeHttp$requestHeaderContainsAuthorizationField, is_true())
+})
+
+test_that("getCollectionContent raises exception if server returns empty response", {
+
+ uuid <- "aaaaa-j7d0g-ccccccccccccccc"
+ response <- ""
+ fakeHttp <- FakeHttpRequest$new(serverResponse = response)
+
+ REST <- RESTService$new("token", "https://host/",
+ fakeHttp, FakeHttpParser$new(),
+ 0, "https://webDavHost/")
+
+ expect_that(REST$getCollectionContent(uuid),
+ throws_error("Response is empty, request may be misconfigured"))
+})
+
+test_that("getCollectionContent parses server response", {
+
+ uuid <- "aaaaa-j7d0g-ccccccccccccccc"
+ fakeHttpParser <- FakeHttpParser$new()
+ REST <- RESTService$new("token", "https://host/",
+ FakeHttpRequest$new(), fakeHttpParser,
+ 0, "https://webDavHost/")
+
+ REST$getCollectionContent(uuid)
+
+ expect_that(fakeHttpParser$parserCallCount, equals(1))
+})
+
+test_that("getCollectionContent raises exception if server returns empty response", {
+
+ uuid <- "aaaaa-j7d0g-ccccccccccccccc"
+ response <- ""
+ fakeHttp <- FakeHttpRequest$new(serverResponse = response)
+
+ REST <- RESTService$new("token", "https://host/",
+ fakeHttp, FakeHttpParser$new(),
+ 0, "https://webDavHost/")
+
+ expect_that(REST$getCollectionContent(uuid),
+ throws_error("Response is empty, request may be misconfigured"))
+})
+
+test_that(paste("getCollectionContent raises exception if server",
+ "response code is not between 200 and 300"), {
+
+ uuid <- "aaaaa-j7d0g-ccccccccccccccc"
+ response <- list()
+ response$status_code <- 404
+ fakeHttp <- FakeHttpRequest$new(serverResponse = response)
+
+ REST <- RESTService$new("token", "https://host/",
+ fakeHttp, HttpParser$new(),
+ 0, "https://webDavHost/")
+
+ expect_that(REST$getCollectionContent(uuid),
+ throws_error("Server code: 404"))
+})
+
+
+test_that("getResourceSize calls REST service properly", {
+
+ uuid <- "aaaaa-j7d0g-ccccccccccccccc"
+ expectedURL <- "https://webDavHost/c=aaaaa-j7d0g-ccccccccccccccc/file"
+ response <- list()
+ response$status_code <- 200
+ response$content <- c(6, 2, 931, 12003)
+ fakeHttp <- FakeHttpRequest$new(expectedURL, response)
+
+ REST <- RESTService$new("token", "https://host/",
+ fakeHttp, FakeHttpParser$new(),
+ 0, "https://webDavHost/")
+
+ returnResult <- REST$getResourceSize("file", uuid)
+ returnedContentMatchExpected <- all.equal(returnResult,
+ c(6, 2, 931, 12003))
+
+ expect_that(fakeHttp$URLIsProperlyConfigured, is_true())
+ expect_that(fakeHttp$requestHeaderContainsAuthorizationField, is_true())
+ expect_that(returnedContentMatchExpected, is_true())
+})
+
+test_that("getResourceSize raises exception if server returns empty response", {
+
+ uuid <- "aaaaa-j7d0g-ccccccccccccccc"
+ response <- ""
+ fakeHttp <- FakeHttpRequest$new(serverResponse = response)
+
+ REST <- RESTService$new("token", "https://host/",
+ fakeHttp, FakeHttpParser$new(),
+ 0, "https://webDavHost/")
+
+ expect_that(REST$getResourceSize("file", uuid),
+ throws_error("Response is empty, request may be misconfigured"))
+})
+
+test_that(paste("getResourceSize raises exception if server",
+ "response code is not between 200 and 300"), {
+
+ uuid <- "aaaaa-j7d0g-ccccccccccccccc"
+ response <- list()
+ response$status_code <- 404
+ fakeHttp <- FakeHttpRequest$new(serverResponse = response)
+
+ REST <- RESTService$new("token", "https://host/",
+ fakeHttp, HttpParser$new(),
+ 0, "https://webDavHost/")
+
+ expect_that(REST$getResourceSize("file", uuid),
+ throws_error("Server code: 404"))
+})
+
+test_that("getResourceSize parses server response", {
+
+ uuid <- "aaaaa-j7d0g-ccccccccccccccc"
+ fakeHttpParser <- FakeHttpParser$new()
+ REST <- RESTService$new("token", "https://host/",
+ FakeHttpRequest$new(), fakeHttpParser,
+ 0, "https://webDavHost/")
+
+ REST$getResourceSize("file", uuid)
+
+ expect_that(fakeHttpParser$parserCallCount, equals(1))
+})
+
+test_that("read calls REST service properly", {
+
+ uuid <- "aaaaa-j7d0g-ccccccccccccccc"
+ expectedURL <- "https://webDavHost/c=aaaaa-j7d0g-ccccccccccccccc/file"
+ serverResponse <- list()
+ serverResponse$status_code <- 200
+ serverResponse$content <- "file content"
+
+ fakeHttp <- FakeHttpRequest$new(expectedURL, serverResponse)
+
+ REST <- RESTService$new("token", "https://host/",
+ fakeHttp, FakeHttpParser$new(),
+ 0, "https://webDavHost/")
+
+ returnResult <- REST$read("file", uuid, "text", 1024, 512)
+
+ expect_that(fakeHttp$URLIsProperlyConfigured, is_true())
+ expect_that(fakeHttp$requestHeaderContainsAuthorizationField, is_true())
+ expect_that(fakeHttp$requestHeaderContainsRangeField, is_true())
+ expect_that(returnResult, equals("file content"))
+})
+
+test_that("read raises exception if server response code is not between 200 and 300", {
+
+ uuid <- "aaaaa-j7d0g-ccccccccccccccc"
+ response <- list()
+ response$status_code <- 404
+ fakeHttp <- FakeHttpRequest$new(serverResponse = response)
+
+ REST <- RESTService$new("token", "https://host/",
+ fakeHttp, HttpParser$new(),
+ 0, "https://webDavHost/")
+
+ expect_that(REST$read("file", uuid),
+ throws_error("Server code: 404"))
+})
+
+test_that("read raises exception if contentType is not valid", {
+
+ uuid <- "aaaaa-j7d0g-ccccccccccccccc"
+ fakeHttp <- FakeHttpRequest$new()
+
+ REST <- RESTService$new("token", "https://host/",
+ fakeHttp, HttpParser$new(),
+ 0, "https://webDavHost/")
+
+ expect_that(REST$read("file", uuid, "some invalid content type"),
+ throws_error("Invalid contentType. Please use text or raw."))
+})
+
+test_that("read parses server response", {
+
+ uuid <- "aaaaa-j7d0g-ccccccccccccccc"
+ fakeHttpParser <- FakeHttpParser$new()
+ REST <- RESTService$new("token", "https://host/",
+ FakeHttpRequest$new(), fakeHttpParser,
+ 0, "https://webDavHost/")
+
+ REST$read("file", uuid, "text", 1024, 512)
+
+ expect_that(fakeHttpParser$parserCallCount, equals(1))
+})
+
+test_that("write calls REST service properly", {
+
+ fileContent <- "new file content"
+ uuid <- "aaaaa-j7d0g-ccccccccccccccc"
+ expectedURL <- "https://webDavHost/c=aaaaa-j7d0g-ccccccccccccccc/file"
+ fakeHttp <- FakeHttpRequest$new(expectedURL)
+
+ REST <- RESTService$new("token", "https://host/",
+ fakeHttp, FakeHttpParser$new(),
+ 0, "https://webDavHost/")
+
+ REST$write("file", uuid, fileContent, "text/html")
+
+ expect_that(fakeHttp$URLIsProperlyConfigured, is_true())
+ expect_that(fakeHttp$requestBodyIsProvided, is_true())
+ expect_that(fakeHttp$requestHeaderContainsAuthorizationField, is_true())
+ expect_that(fakeHttp$requestHeaderContainsContentTypeField, is_true())
+})
+
+test_that("write raises exception if server response code is not between 200 and 300", {
+
+ uuid <- "aaaaa-j7d0g-ccccccccccccccc"
+ fileContent <- "new file content"
+ response <- list()
+ response$status_code <- 404
+ fakeHttp <- FakeHttpRequest$new(serverResponse = response)
+
+ REST <- RESTService$new("token", "https://host/",
+ fakeHttp, HttpParser$new(),
+ 0, "https://webDavHost/")
+
+ expect_that(REST$write("file", uuid, fileContent, "text/html"),
+ throws_error("Server code: 404"))
+})
--- /dev/null
+source("fakes/FakeRESTService.R")
+
+context("Subcollection")
+
+test_that("getRelativePath returns path relative to the tree root", {
+
+ animal <- Subcollection$new("animal")
+
+ fish <- Subcollection$new("fish")
+ animal$add(fish)
+
+ expect_that(animal$getRelativePath(), equals("animal"))
+ expect_that(fish$getRelativePath(), equals("animal/fish"))
+})
+
+test_that(paste("getFileListing by default returns sorted path of all files",
+ "relative to the current subcollection"), {
+
+ animal <- Subcollection$new("animal")
+ fish <- Subcollection$new("fish")
+ shark <- ArvadosFile$new("shark")
+ blueFish <- ArvadosFile$new("blueFish")
+
+ animal$add(fish)
+ fish$add(shark)
+ fish$add(blueFish)
+
+ result <- animal$getFileListing()
+
+ #expect sorted array
+ expectedResult <- c("animal/fish/blueFish", "animal/fish/shark")
+
+ resultsMatch <- length(expectedResult) == length(result) &&
+ all(expectedResult == result)
+
+ expect_that(resultsMatch, is_true())
+})
+
+test_that(paste("getFileListing returns sorted names of all direct children",
+ "if fullPath is set to FALSE"), {
+
+ animal <- Subcollection$new("animal")
+ fish <- Subcollection$new("fish")
+ shark <- ArvadosFile$new("shark")
+ dog <- ArvadosFile$new("dog")
+
+ animal$add(fish)
+ animal$add(dog)
+ fish$add(shark)
+
+ result <- animal$getFileListing(fullPath = FALSE)
+ expectedResult <- c("dog", "fish")
+
+ resultsMatch <- length(expectedResult) == length(result) &&
+ all(expectedResult == result)
+
+ expect_that(resultsMatch, is_true())
+})
+
+test_that("add adds content to inside collection tree", {
+
+ animal <- Subcollection$new("animal")
+ fish <- Subcollection$new("fish")
+ dog <- ArvadosFile$new("dog")
+
+ animal$add(fish)
+ animal$add(dog)
+
+ animalContainsFish <- animal$get("fish")$getName() == fish$getName()
+ animalContainsDog <- animal$get("dog")$getName() == dog$getName()
+
+ expect_that(animalContainsFish, is_true())
+ expect_that(animalContainsDog, is_true())
+})
+
+test_that("add raises exception if content name is empty string", {
+
+ animal <- Subcollection$new("animal")
+ rootFolder <- Subcollection$new("")
+
+ expect_that(animal$add(rootFolder),
+ throws_error("Content has invalid name.", fixed = TRUE))
+})
+
+test_that(paste("add raises exception if ArvadosFile/Subcollection",
+ "with same name already exists in the subcollection"), {
+
+ animal <- Subcollection$new("animal")
+ fish <- Subcollection$new("fish")
+ secondFish <- Subcollection$new("fish")
+ thirdFish <- ArvadosFile$new("fish")
+
+ animal$add(fish)
+
+ expect_that(animal$add(secondFish),
+ throws_error(paste("Subcollection already contains ArvadosFile or",
+ "Subcollection with same name."), fixed = TRUE))
+ expect_that(animal$add(thirdFish),
+ throws_error(paste("Subcollection already contains ArvadosFile or",
+ "Subcollection with same name."), fixed = TRUE))
+})
+
+test_that(paste("add raises exception if passed argument is",
+ "not ArvadosFile or Subcollection"), {
+
+ animal <- Subcollection$new("animal")
+ number <- 10
+
+ expect_that(animal$add(number),
+ throws_error(paste("Expected AravodsFile or Subcollection object,",
+ "got (numeric)."), fixed = TRUE))
+})
+
+test_that(paste("add post content to a REST service",
+ "if subcollection belongs to a collection"), {
+
+ collectionContent <- c("animal", "animal/fish")
+ fakeREST <- FakeRESTService$new(collectionContent)
+
+ api <- Arvados$new("myToken", "myHostName")
+ api$setRESTService(fakeREST)
+
+ collection <- Collection$new(api, "myUUID")
+ animal <- collection$get("animal")
+ dog <- ArvadosFile$new("dog")
+
+ animal$add(dog)
+
+ expect_that(fakeREST$createCallCount, equals(1))
+})
+
+test_that("remove removes content from subcollection", {
+
+ animal <- Subcollection$new("animal")
+ fish <- Subcollection$new("fish")
+
+ animal$add(fish)
+ animal$remove("fish")
+
+ returnValueAfterRemovalIsNull <- is.null(animal$get("fish"))
+
+ expect_that(returnValueAfterRemovalIsNull, is_true())
+})
+
+test_that(paste("remove raises exception",
+ "if content to remove doesn't exist in the subcollection"), {
+
+ animal <- Subcollection$new("animal")
+
+ expect_that(animal$remove("fish"),
+ throws_error(paste("Subcollection doesn't contains ArvadosFile",
+ "or Subcollection with specified name.")))
+})
+
+test_that("remove raises exception if passed argument is not character vector", {
+
+ animal <- Subcollection$new("animal")
+ number <- 10
+
+ expect_that(animal$remove(number),
+ throws_error(paste("Expected character,",
+ "got (numeric)."), fixed = TRUE))
+})
+
+test_that(paste("remove removes content from REST service",
+ "if subcollection belongs to a collection"), {
+
+ collectionContent <- c("animal", "animal/fish", "animal/dog")
+ fakeREST <- FakeRESTService$new(collectionContent)
+
+ api <- Arvados$new("myToken", "myHostName")
+ api$setRESTService(fakeREST)
+ collection <- Collection$new(api, "myUUID")
+ animal <- collection$get("animal")
+
+ animal$remove("fish")
+
+ expect_that(fakeREST$deleteCallCount, equals(1))
+})
+
+test_that(paste("get returns ArvadosFile or Subcollection",
+ "if file or folder with given name exists"), {
+
+ animal <- Subcollection$new("animal")
+ fish <- Subcollection$new("fish")
+ dog <- ArvadosFile$new("dog")
+
+ animal$add(fish)
+ animal$add(dog)
+
+ returnedFish <- animal$get("fish")
+ returnedDog <- animal$get("dog")
+
+ returnedFishIsSubcollection <- "Subcollection" %in% class(returnedFish)
+ returnedDogIsArvadosFile <- "ArvadosFile" %in% class(returnedDog)
+
+ expect_that(returnedFishIsSubcollection, is_true())
+ expect_that(returnedFish$getName(), equals("fish"))
+
+ expect_that(returnedDogIsArvadosFile, is_true())
+ expect_that(returnedDog$getName(), equals("dog"))
+})
+
+test_that(paste("get returns NULL if file or folder",
+ "with given name doesn't exists"), {
+
+ animal <- Subcollection$new("animal")
+ fish <- Subcollection$new("fish")
+
+ animal$add(fish)
+
+ returnedDogIsNull <- is.null(animal$get("dog"))
+
+ expect_that(returnedDogIsNull, is_true())
+})
+
+test_that("getFirst returns first child in the subcollection", {
+
+ animal <- Subcollection$new("animal")
+ fish <- Subcollection$new("fish")
+
+ animal$add(fish)
+
+ expect_that(animal$getFirst()$getName(), equals("fish"))
+})
+
+test_that("getFirst returns NULL if subcollection contains no children", {
+
+ animal <- Subcollection$new("animal")
+
+ returnedElementIsNull <- is.null(animal$getFirst())
+
+ expect_that(returnedElementIsNull, is_true())
+})
+
+test_that(paste("setCollection by default sets collection",
+ "filed of subcollection and all its children"), {
+
+ animal <- Subcollection$new("animal")
+ fish <- Subcollection$new("fish")
+ animal$add(fish)
+
+ animal$setCollection("myCollection")
+
+ expect_that(animal$getCollection(), equals("myCollection"))
+ expect_that(fish$getCollection(), equals("myCollection"))
+})
+
+test_that(paste("setCollection sets collection filed of subcollection only",
+ "if parameter setRecursively is set to FALSE"), {
+
+ animal <- Subcollection$new("animal")
+ fish <- Subcollection$new("fish")
+ animal$add(fish)
+
+ animal$setCollection("myCollection", setRecursively = FALSE)
+ fishCollectionIsNull <- is.null(fish$getCollection())
+
+ expect_that(animal$getCollection(), equals("myCollection"))
+ expect_that(fishCollectionIsNull, is_true())
+})
+
+test_that(paste("move raises exception if subcollection",
+ "doesn't belong to any collection"), {
+
+ animal <- Subcollection$new("animal")
+
+ expect_that(animal$move("new/location"),
+ throws_error("Subcollection doesn't belong to any collection"))
+})
+
+test_that("move raises exception if new location contains content with the same name", {
+
+ collectionContent <- c("animal",
+ "animal/fish",
+ "animal/dog",
+ "animal/fish/shark",
+ "fish")
+ fakeREST <- FakeRESTService$new(collectionContent)
+
+ api <- Arvados$new("myToken", "myHostName")
+ api$setRESTService(fakeREST)
+ collection <- Collection$new(api, "myUUID")
+ fish <- collection$get("animal/fish")
+
+ expect_that(fish$move("fish"),
+ throws_error("Destination already contains content with same name."))
+
+})
+
+test_that(paste("move raises exception if newLocationInCollection",
+ "parameter is invalid"), {
+
+ collectionContent <- c("animal",
+ "animal/fish",
+ "animal/dog",
+ "animal/fish/shark",
+ "ball")
+ fakeREST <- FakeRESTService$new(collectionContent)
+
+ api <- Arvados$new("myToken", "myHostName")
+ api$setRESTService(fakeREST)
+
+ collection <- Collection$new(api, "myUUID")
+ fish <- collection$get("animal/fish")
+
+ expect_that(fish$move("objects/dog"),
+ throws_error("Unable to get destination subcollection"))
+})
+
+test_that("move moves subcollection inside collection tree", {
+
+ collectionContent <- c("animal",
+ "animal/fish",
+ "animal/dog",
+ "animal/fish/shark",
+ "ball")
+ fakeREST <- FakeRESTService$new(collectionContent)
+
+ api <- Arvados$new("myToken", "myHostName")
+ api$setRESTService(fakeREST)
+ collection <- Collection$new(api, "myUUID")
+ fish <- collection$get("animal/fish")
+
+ fish$move("fish")
+ fishIsNullOnOldLocation <- is.null(collection$get("animal/fish"))
+ fishExistsOnNewLocation <- !is.null(collection$get("fish"))
+
+ expect_that(fishIsNullOnOldLocation, is_true())
+ expect_that(fishExistsOnNewLocation, is_true())
+})
+
+test_that(paste("getSizeInBytes returns zero if subcollection",
+ "is not part of a collection"), {
+
+ animal <- Subcollection$new("animal")
+
+ expect_that(animal$getSizeInBytes(), equals(0))
+})
+
+test_that(paste("getSizeInBytes delegates size calculation",
+ "to REST service class"), {
+
+ collectionContent <- c("animal", "animal/fish")
+ returnSize <- 100
+ fakeREST <- FakeRESTService$new(collectionContent, returnSize)
+
+ api <- Arvados$new("myToken", "myHostName")
+ api$setRESTService(fakeREST)
+ collection <- Collection$new(api, "myUUID")
+ animal <- collection$get("animal")
+
+ resourceSize <- animal$getSizeInBytes()
+
+ expect_that(resourceSize, equals(100))
+})
--- /dev/null
+context("Utility function")
+
+test_that("trimFromStart trims string correctly if string starts with trimCharacters", {
+
+ sample <- "./something/random"
+ trimCharacters <- "./something/"
+
+ result <- trimFromStart(sample, trimCharacters)
+
+ expect_that(result, equals("random"))
+})
+
+test_that("trimFromStart returns original string if string doesn't starts with trimCharacters", {
+
+ sample <- "./something/random"
+ trimCharacters <- "./nothing/"
+
+ result <- trimFromStart(sample, trimCharacters)
+
+ expect_that(result, equals("./something/random"))
+})
+
+test_that("trimFromEnd trims string correctly if string ends with trimCharacters", {
+
+ sample <- "./something/random"
+ trimCharacters <- "/random"
+
+ result <- trimFromEnd(sample, trimCharacters)
+
+ expect_that(result, equals("./something"))
+})
+
+test_that("trimFromEnd returns original string if string doesn't end with trimCharacters", {
+
+ sample <- "./something/random"
+ trimCharacters <- "specific"
+
+ result <- trimFromStart(sample, trimCharacters)
+
+ expect_that(result, equals("./something/random"))
+})
+
+test_that("RListToPythonList converts nested R list to char representation of Python list", {
+
+ sample <- list("insert", list("random", list("text")), list("here"))
+
+ result <- RListToPythonList(sample)
+ resultWithSeparator <- RListToPythonList(sample, separator = ",+")
+
+ expect_that(result, equals("[\"insert\", [\"random\", \"text\"], \"here\"]"))
+ expect_that(resultWithSeparator,
+ equals("[\"insert\",+[\"random\",+\"text\"],+\"here\"]"))
+})
+
+test_that("appendToStartIfNotExist appends characters to beginning of a string", {
+
+ sample <- "New Year"
+ charactersToAppend <- "Happy "
+
+ result <- appendToStartIfNotExist(sample, charactersToAppend)
+
+ expect_that(result, equals("Happy New Year"))
+})
+
+test_that(paste("appendToStartIfNotExist returns original string if string",
+ "doesn't start with specified characters"), {
+
+ sample <- "Happy New Year"
+ charactersToAppend <- "Happy"
+
+ result <- appendToStartIfNotExist(sample, charactersToAppend)
+
+ expect_that(result, equals("Happy New Year"))
+})
+
+test_that(paste("splitToPathAndName splits relative path to file/folder",
+ "name and rest of the path"), {
+
+ relativePath <- "path/to/my/file.exe"
+
+ result <- splitToPathAndName( relativePath)
+
+ expect_that(result$name, equals("file.exe"))
+ expect_that(result$path, equals("path/to/my"))
+})
def check_features(self, obj):
if isinstance(obj, dict):
- if obj.get("writable"):
- raise SourceLine(obj, "writable", UnsupportedRequirement).makeError("InitialWorkDir feature 'writable: true' not supported")
+ if obj.get("writable") and self.work_api != "containers":
+ raise SourceLine(obj, "writable", UnsupportedRequirement).makeError("InitialWorkDir feature 'writable: true' not supported with --api=jobs")
if obj.get("class") == "DockerRequirement":
if obj.get("dockerOutputDirectory"):
- # TODO: can be supported by containers API, but not jobs API.
- raise SourceLine(obj, "dockerOutputDirectory", UnsupportedRequirement).makeError(
- "Option 'dockerOutputDirectory' of DockerRequirement not supported.")
+ if self.work_api != "containers":
+ raise SourceLine(obj, "dockerOutputDirectory", UnsupportedRequirement).makeError(
+ "Option 'dockerOutputDirectory' of DockerRequirement not supported with --api=jobs.")
+ if not obj.get("dockerOutputDirectory").startswith('/'):
+ raise SourceLine(obj, "dockerOutputDirectory", validate.ValidationException).makeError(
+ "Option 'dockerOutputDirectory' must be an absolute path.")
for v in obj.itervalues():
self.check_features(v)
elif isinstance(obj, list):
def rewrite(fileobj):
fileobj["location"] = generatemapper.mapper(fileobj["location"]).target
- for k in ("basename", "listing", "contents", "nameext", "nameroot", "dirname"):
+ for k in ("listing", "contents", "nameext", "nameroot", "dirname"):
if k in fileobj:
del fileobj[k]
generatemapper = NoFollowPathMapper([self.generatefiles], "", "",
separateDirs=False)
+ logger.debug("generatemapper is %s", generatemapper._pathmap)
+
with Perf(metrics, "createfiles %s" % self.name):
for f, p in generatemapper.items():
if not p.target:
pass
- elif p.type in ("File", "Directory"):
- source, path = self.arvrunner.fs_access.get_collection(p.resolved)
- vwd.copy(path, p.target, source_collection=source)
+ elif p.type in ("File", "Directory", "WritableFile", "WritableDirectory"):
+ if p.resolved.startswith("_:"):
+ vwd.mkdirs(p.target)
+ else:
+ source, path = self.arvrunner.fs_access.get_collection(p.resolved)
+ vwd.copy(path, p.target, source_collection=source)
elif p.type == "CreateFile":
with vwd.open(p.target, "w") as n:
n.write(p.resolved.encode("utf-8"))
+ def keepemptydirs(p):
+ if isinstance(p, arvados.collection.RichCollectionBase):
+ if len(p) == 0:
+ p.open(".keep", "w").close()
+ else:
+ for c in p:
+ keepemptydirs(p[c])
+
+ keepemptydirs(vwd)
+
with Perf(metrics, "generatefiles.save_new %s" % self.name):
vwd.save_new()
mounts[mountpoint] = {"kind": "collection",
"portable_data_hash": vwd.portable_data_hash(),
"path": p.target}
+ if p.type.startswith("Writable"):
+ mounts[mountpoint]["writable"] = True
container_request["environment"] = {"TMPDIR": self.tmpdir, "HOME": self.outdir}
if self.environment:
def job(self, joborder, output_callback, **kwargs):
if self.work_api == "containers":
- kwargs["outdir"] = "/var/spool/cwl"
- kwargs["docker_outdir"] = "/var/spool/cwl"
+ dockerReq, is_req = self.get_requirement("DockerRequirement")
+ if dockerReq and dockerReq.get("dockerOutputDirectory"):
+ kwargs["outdir"] = dockerReq.get("dockerOutputDirectory")
+ kwargs["docker_outdir"] = dockerReq.get("dockerOutputDirectory")
+ else:
+ kwargs["outdir"] = "/var/spool/cwl"
+ kwargs["docker_outdir"] = "/var/spool/cwl"
elif self.work_api == "jobs":
kwargs["outdir"] = "$(task.outdir)"
kwargs["docker_outdir"] = "$(task.outdir)"
tgt = os.path.join(stagedir, obj["basename"])
basetgt, baseext = os.path.splitext(tgt)
n = 1
- while tgt in self.targets:
- n += 1
- tgt = "%s_%i%s" % (basetgt, n, baseext)
+ if tgt in self.targets and (self.reversemap(tgt)[0] != loc):
+ while tgt in self.targets:
+ n += 1
+ tgt = "%s_%i%s" % (basetgt, n, baseext)
self.targets.add(tgt)
if obj["class"] == "Directory":
- self._pathmap[loc] = MapperEnt(loc, tgt, "Directory", staged)
+ if obj.get("writable"):
+ self._pathmap[loc] = MapperEnt(loc, tgt, "WritableDirectory", staged)
+ else:
+ self._pathmap[loc] = MapperEnt(loc, tgt, "Directory", staged)
if loc.startswith("_:") or self._follow_dirs:
self.visitlisting(obj.get("listing", []), tgt, basedir)
elif obj["class"] == "File":
if "contents" in obj and loc.startswith("_:"):
self._pathmap[loc] = MapperEnt(obj["contents"], tgt, "CreateFile", staged)
else:
- if copy:
+ if copy or obj.get("writable"):
self._pathmap[loc] = MapperEnt(loc, tgt, "WritableFile", staged)
else:
self._pathmap[loc] = MapperEnt(loc, tgt, "File", staged)
if isinstance(tool, CommandLineTool):
(docker_req, docker_is_req) = get_feature(tool, "DockerRequirement")
if docker_req:
- if docker_req.get("dockerOutputDirectory"):
+ if docker_req.get("dockerOutputDirectory") and arvrunner.work_api != "containers":
# TODO: can be supported by containers API, but not jobs API.
raise SourceLine(docker_req, "dockerOutputDirectory", UnsupportedRequirement).makeError(
"Option 'dockerOutputDirectory' of DockerRequirement not supported.")
# Note that arvados/build/run-build-packages.sh looks at this
# file to determine what version of cwltool and schema-salad to build.
install_requires=[
- 'cwltool==1.0.20180116213856',
+ 'cwltool==1.0.20180130110340',
'schema-salad==2.6.20171201034858',
'typing==3.5.3.0',
'ruamel.yaml==0.13.7',
final.save_new.assert_has_calls([mock.call(ensure_unique_name=True, name='Test output', owner_uuid='zzzzz-j7d0g-zzzzzzzzzzzzzzz')])
self.assertEqual("""{
"bar": {
+ "basename": "baz.txt",
"class": "File",
"location": "baz.txt",
"size": 4
},
"foo": {
+ "basename": "foo.txt",
"class": "File",
"location": "foo.txt",
"size": 3
self.priority = ContainerRequest.
where(container_uuid: uuid,
state: ContainerRequest::Committed).
- maximum('priority')
+ maximum('priority') || 0
self.save!
end
end
cr.with_lock do
# Use row locking because this increments container_count
cr.container_uuid = c.uuid
- cr.save
+ cr.save!
end
end
end
cr.finalize!
end
- # Try to cancel any outstanding container requests made by this container.
- ContainerRequest.where(requesting_container_uuid: uuid,
- state: ContainerRequest::Committed).each do |cr|
- cr.priority = 0
- cr.save
+ # Cancel outstanding container requests made by this container.
+ ContainerRequest.
+ includes(:container).
+ where(requesting_container_uuid: uuid,
+ state: ContainerRequest::Committed).each do |cr|
+ cr.update_attributes!(priority: 0)
+ cr.container.reload
+ if cr.container.state == Container::Queued || cr.container.state == Container::Locked
+ # If the child container hasn't started yet, finalize the
+ # child CR now instead of leaving it "on hold", i.e.,
+ # Queued with priority 0. (OTOH, if the child is already
+ # running, leave it alone so it can get cancelled the
+ # usual way, get a copy of the log collection, etc.)
+ cr.update_attributes!(state: ContainerRequest::Final)
+ end
end
end
end
include CommonApiTemplate
include WhitelistUpdate
+ belongs_to :container, foreign_key: :container_uuid, primary_key: :uuid
+
serialize :properties, Hash
serialize :environment, Hash
serialize :mounts, Hash
end
when Final
- if self.state_changed? and not current_user.andand.is_admin
- self.errors.add :state, "of container request can only be set to Final by system."
- end
-
if self.state_was == Committed
- permitted.push :output_uuid, :log_uuid
+ # "Cancel" means setting priority=0, state=Committed
+ permitted.push :priority
+
+ if current_user.andand.is_admin
+ permitted.push :output_uuid, :log_uuid
+ end
end
end
uuid: zzzzz-xvhdp-canceledqueuedc
owner_uuid: zzzzz-tpzed-xurymjxw79nv3jz
name: canceled with queued container
- state: Committed
+ state: Final
priority: 0
created_at: 2016-01-11 11:11:11.111111111 Z
updated_at: 2016-01-11 11:11:11.111111111 Z
uuid: zzzzz-xvhdp-canceledlocekdc
owner_uuid: zzzzz-tpzed-xurymjxw79nv3jz
name: canceled with locked container
- state: Committed
+ state: Final
priority: 0
created_at: 2016-01-11 11:11:11.111111111 Z
updated_at: 2016-01-11 11:11:11.111111111 Z
end
test "Container queued cancel" do
- c, _ = minimal_new
+ c, cr = minimal_new({container_count_max: 1})
set_user_from_auth :dispatch1
assert c.update_attributes(state: Container::Cancelled), show_errors(c)
check_no_change_from_cancelled c
+ cr.reload
+ assert_equal ContainerRequest::Final, cr.state
end
test "Container queued count" do
LogsPDH *string
RunArvMount
MkTempDir
- ArvMount *exec.Cmd
- ArvMountPoint string
- HostOutputDir string
- CleanupTempDir []string
- Binds []string
- Volumes map[string]struct{}
- OutputPDH *string
- SigChan chan os.Signal
- ArvMountExit chan error
- finalState string
+ ArvMount *exec.Cmd
+ ArvMountPoint string
+ HostOutputDir string
+ Binds []string
+ Volumes map[string]struct{}
+ OutputPDH *string
+ SigChan chan os.Signal
+ ArvMountExit chan error
+ finalState string
+ parentTemp string
statLogger io.WriteCloser
statReporter *crunchstat.Reporter
func (runner *ContainerRunner) SetupArvMountPoint(prefix string) (err error) {
if runner.ArvMountPoint == "" {
- runner.ArvMountPoint, err = runner.MkTempDir("", prefix)
+ runner.ArvMountPoint, err = runner.MkTempDir(runner.parentTemp, prefix)
}
return
}
+func copyfile(src string, dst string) (err error) {
+ srcfile, err := os.Open(src)
+ if err != nil {
+ return
+ }
+
+ os.MkdirAll(path.Dir(dst), 0777)
+
+ dstfile, err := os.Create(dst)
+ if err != nil {
+ return
+ }
+ _, err = io.Copy(dstfile, srcfile)
+ if err != nil {
+ return
+ }
+
+ err = srcfile.Close()
+ err2 := dstfile.Close()
+
+ if err != nil {
+ return
+ }
+
+ if err2 != nil {
+ return err2
+ }
+
+ return nil
+}
+
func (runner *ContainerRunner) SetupMounts() (err error) {
err = runner.SetupArvMountPoint("keep")
if err != nil {
runner.Binds = nil
runner.Volumes = make(map[string]struct{})
needCertMount := true
+ type copyFile struct {
+ src string
+ bind string
+ }
+ var copyFiles []copyFile
var binds []string
for bind := range runner.Container.Mounts {
pdhOnly = false
src = fmt.Sprintf("%s/by_id/%s", runner.ArvMountPoint, mnt.UUID)
} else if mnt.PortableDataHash != "" {
- if mnt.Writable {
+ if mnt.Writable && !strings.HasPrefix(bind, runner.Container.OutputPath+"/") {
return fmt.Errorf("Can never write to a collection specified by portable data hash")
}
idx := strings.Index(mnt.PortableDataHash, "/")
if mnt.Writable {
if bind == runner.Container.OutputPath {
runner.HostOutputDir = src
+ runner.Binds = append(runner.Binds, fmt.Sprintf("%s:%s", src, bind))
} else if strings.HasPrefix(bind, runner.Container.OutputPath+"/") {
- return fmt.Errorf("Writable mount points are not permitted underneath the output_path: %v", bind)
+ copyFiles = append(copyFiles, copyFile{src, runner.HostOutputDir + bind[len(runner.Container.OutputPath):]})
+ } else {
+ runner.Binds = append(runner.Binds, fmt.Sprintf("%s:%s", src, bind))
}
- runner.Binds = append(runner.Binds, fmt.Sprintf("%s:%s", src, bind))
} else {
runner.Binds = append(runner.Binds, fmt.Sprintf("%s:%s:ro", src, bind))
}
case mnt.Kind == "tmp":
var tmpdir string
- tmpdir, err = runner.MkTempDir("", "")
+ tmpdir, err = runner.MkTempDir(runner.parentTemp, "tmp")
if err != nil {
return fmt.Errorf("While creating mount temp dir: %v", err)
}
if staterr != nil {
return fmt.Errorf("While Chmod temp dir: %v", err)
}
- runner.CleanupTempDir = append(runner.CleanupTempDir, tmpdir)
runner.Binds = append(runner.Binds, fmt.Sprintf("%s:%s", tmpdir, bind))
if bind == runner.Container.OutputPath {
runner.HostOutputDir = tmpdir
// can ensure the file is world-readable
// inside the container, without having to
// make it world-readable on the docker host.
- tmpdir, err := runner.MkTempDir("", "")
+ tmpdir, err := runner.MkTempDir(runner.parentTemp, "json")
if err != nil {
return fmt.Errorf("creating temp dir: %v", err)
}
- runner.CleanupTempDir = append(runner.CleanupTempDir, tmpdir)
tmpfn := filepath.Join(tmpdir, "mountdata.json")
err = ioutil.WriteFile(tmpfn, jsondata, 0644)
if err != nil {
runner.Binds = append(runner.Binds, fmt.Sprintf("%s:%s:ro", tmpfn, bind))
case mnt.Kind == "git_tree":
- tmpdir, err := runner.MkTempDir("", "")
+ tmpdir, err := runner.MkTempDir(runner.parentTemp, "git_tree")
if err != nil {
return fmt.Errorf("creating temp dir: %v", err)
}
- runner.CleanupTempDir = append(runner.CleanupTempDir, tmpdir)
err = gitMount(mnt).extractTree(runner.ArvClient, tmpdir, token)
if err != nil {
return err
}
}
+ for _, cp := range copyFiles {
+ st, err := os.Stat(cp.src)
+ if err != nil {
+ return fmt.Errorf("While staging writable file from %q to %q: %v", cp.src, cp.bind, err)
+ }
+ if st.IsDir() {
+ err = filepath.Walk(cp.src, func(walkpath string, walkinfo os.FileInfo, walkerr error) error {
+ if walkerr != nil {
+ return walkerr
+ }
+ target := path.Join(cp.bind, walkpath[len(cp.src):])
+ if walkinfo.Mode().IsRegular() {
+ copyerr := copyfile(walkpath, target)
+ if copyerr != nil {
+ return copyerr
+ }
+ return os.Chmod(target, walkinfo.Mode()|0777)
+ } else if walkinfo.Mode().IsDir() {
+ mkerr := os.MkdirAll(target, 0777)
+ if mkerr != nil {
+ return mkerr
+ }
+ return os.Chmod(target, walkinfo.Mode()|os.ModeSetgid|0777)
+ } else {
+ return fmt.Errorf("Source %q is not a regular file or directory", cp.src)
+ }
+ })
+ } else if st.Mode().IsRegular() {
+ err = copyfile(cp.src, cp.bind)
+ if err == nil {
+ err = os.Chmod(cp.bind, st.Mode()|0777)
+ }
+ }
+ if err != nil {
+ return fmt.Errorf("While staging writable file from %q to %q: %v", cp.src, cp.bind, err)
+ }
+ }
+
return nil
}
// go through mounts and try reverse map to collection reference
for _, bind := range binds {
mnt := runner.Container.Mounts[bind]
- if tgt == bind || strings.HasPrefix(tgt, bind+"/") {
+ if (tgt == bind || strings.HasPrefix(tgt, bind+"/")) && !mnt.Writable {
// get path relative to bind
targetSuffix := tgt[len(bind):]
continue
}
- if mnt.ExcludeFromOutput == true {
+ if mnt.ExcludeFromOutput == true || mnt.Writable {
continue
}
}
}
- for _, tmpdir := range runner.CleanupTempDir {
- if rmerr := os.RemoveAll(tmpdir); rmerr != nil {
- runner.CrunchLog.Printf("While cleaning up temporary directory %s: %v", tmpdir, rmerr)
- }
+ if rmerr := os.RemoveAll(runner.parentTemp); rmerr != nil {
+ runner.CrunchLog.Printf("While cleaning up temporary directory %s: %v", runner.parentTemp, rmerr)
}
}
os.Exit(1)
}
+ parentTemp, tmperr := cr.MkTempDir("", "crunch-run."+containerId+".")
+ if tmperr != nil {
+ log.Fatalf("%s: %v", containerId, tmperr)
+ }
+
+ cr.parentTemp = parentTemp
cr.statInterval = *statInterval
cr.cgroupRoot = *cgroupRoot
cr.expectCgroupParent = *cgroupParent
"net"
"os"
"os/exec"
- "path/filepath"
"runtime/pprof"
"sort"
"strings"
c.Assert(err, IsNil)
stubCertPath := stubCert(certTemp)
+ cr.parentTemp = realTemp
+
defer os.RemoveAll(realTemp)
defer os.RemoveAll(certTemp)
}
checkEmpty := func() {
- filepath.Walk(realTemp, func(path string, _ os.FileInfo, err error) error {
- c.Check(path, Equals, realTemp)
- c.Check(err, IsNil)
- return nil
- })
+ // Should be deleted.
+ _, err := os.Stat(realTemp)
+ c.Assert(os.IsNotExist(err), Equals, true)
+
+ // Now recreate it for the next test.
+ c.Assert(os.Mkdir(realTemp, 0777), IsNil)
}
{
c.Check(am.Cmd, DeepEquals, []string{"--foreground", "--allow-other",
"--read-write", "--crunchstat-interval=5",
"--mount-by-pdh", "by_id", realTemp + "/keep1"})
- c.Check(cr.Binds, DeepEquals, []string{realTemp + "/2:/tmp"})
+ c.Check(cr.Binds, DeepEquals, []string{realTemp + "/tmp2:/tmp"})
os.RemoveAll(cr.ArvMountPoint)
cr.CleanupDirs()
checkEmpty()
c.Check(am.Cmd, DeepEquals, []string{"--foreground", "--allow-other",
"--read-write", "--crunchstat-interval=5",
"--mount-by-pdh", "by_id", realTemp + "/keep1"})
- c.Check(cr.Binds, DeepEquals, []string{realTemp + "/2:/out", realTemp + "/3:/tmp"})
+ c.Check(cr.Binds, DeepEquals, []string{realTemp + "/tmp2:/out", realTemp + "/tmp3:/tmp"})
os.RemoveAll(cr.ArvMountPoint)
cr.CleanupDirs()
checkEmpty()
c.Check(am.Cmd, DeepEquals, []string{"--foreground", "--allow-other",
"--read-write", "--crunchstat-interval=5",
"--mount-by-pdh", "by_id", realTemp + "/keep1"})
- c.Check(cr.Binds, DeepEquals, []string{realTemp + "/2:/tmp", stubCertPath + ":/etc/arvados/ca-certificates.crt:ro"})
+ c.Check(cr.Binds, DeepEquals, []string{realTemp + "/tmp2:/tmp", stubCertPath + ":/etc/arvados/ca-certificates.crt:ro"})
os.RemoveAll(cr.ArvMountPoint)
cr.CleanupDirs()
checkEmpty()
err := cr.SetupMounts()
c.Check(err, IsNil)
sort.StringSlice(cr.Binds).Sort()
- c.Check(cr.Binds, DeepEquals, []string{realTemp + "/2/mountdata.json:/mnt/test.json:ro"})
- content, err := ioutil.ReadFile(realTemp + "/2/mountdata.json")
+ c.Check(cr.Binds, DeepEquals, []string{realTemp + "/json2/mountdata.json:/mnt/test.json:ro"})
+ content, err := ioutil.ReadFile(realTemp + "/json2/mountdata.json")
c.Check(err, IsNil)
c.Check(content, DeepEquals, []byte(test.out))
os.RemoveAll(cr.ArvMountPoint)
c.Check(am.Cmd, DeepEquals, []string{"--foreground", "--allow-other",
"--read-write", "--crunchstat-interval=5",
"--file-cache", "512", "--mount-tmp", "tmp0", "--mount-by-pdh", "by_id", realTemp + "/keep1"})
- c.Check(cr.Binds, DeepEquals, []string{realTemp + "/2:/tmp", realTemp + "/keep1/tmp0:/tmp/foo:ro"})
+ c.Check(cr.Binds, DeepEquals, []string{realTemp + "/tmp2:/tmp", realTemp + "/keep1/tmp0:/tmp/foo:ro"})
os.RemoveAll(cr.ArvMountPoint)
cr.CleanupDirs()
checkEmpty()
}
- // Writable mount points are not allowed underneath output_dir mount point
+ // Writable mount points copied to output_dir mount point
{
i = 0
cr.ArvMountPoint = ""
cr.Container.Mounts = make(map[string]arvados.Mount)
cr.Container.Mounts = map[string]arvados.Mount{
- "/tmp": {Kind: "tmp"},
- "/tmp/foo": {Kind: "collection", Writable: true},
+ "/tmp": {Kind: "tmp"},
+ "/tmp/foo": {Kind: "collection",
+ PortableDataHash: "59389a8f9ee9d399be35462a0f92541c+53",
+ Writable: true},
+ "/tmp/bar": {Kind: "collection",
+ PortableDataHash: "59389a8f9ee9d399be35462a0f92541d+53",
+ Path: "baz",
+ Writable: true},
}
cr.OutputPath = "/tmp"
+ os.MkdirAll(realTemp+"/keep1/by_id/59389a8f9ee9d399be35462a0f92541c+53", os.ModePerm)
+ os.MkdirAll(realTemp+"/keep1/by_id/59389a8f9ee9d399be35462a0f92541d+53/baz", os.ModePerm)
+
+ rf, _ := os.Create(realTemp + "/keep1/by_id/59389a8f9ee9d399be35462a0f92541d+53/baz/quux")
+ rf.Write([]byte("bar"))
+ rf.Close()
+
err := cr.SetupMounts()
- c.Check(err, NotNil)
- c.Check(err, ErrorMatches, `Writable mount points are not permitted underneath the output_path.*`)
+ c.Check(err, IsNil)
+ _, err = os.Stat(cr.HostOutputDir + "/foo")
+ c.Check(err, IsNil)
+ _, err = os.Stat(cr.HostOutputDir + "/bar/quux")
+ c.Check(err, IsNil)
os.RemoveAll(cr.ArvMountPoint)
cr.CleanupDirs()
checkEmpty()
t.logWriter.Close()
})
- c.Check(runner.Binds, DeepEquals, []string{realtemp + "/2:/tmp",
+ c.Check(runner.Binds, DeepEquals, []string{realtemp + "/tmp2:/tmp",
realtemp + "/keep1/by_id/a0def87f80dd594d4675809e83bd4f15+367/file2_in_main.txt:/tmp/foo/bar:ro",
realtemp + "/keep1/by_id/a0def87f80dd594d4675809e83bd4f15+367/subdir1/subdir2/file2_in_subdir2.txt:/tmp/foo/baz/sub2file2:ro",
realtemp + "/keep1/by_id/a0def87f80dd594d4675809e83bd4f15+367/subdir1:/tmp/foo/sub1:ro",
}
api, _, _ := s.fullRunHelper(c, helperRecord, extraMounts, 0, func(t *TestDockerClient) {
- os.Symlink("/keep/foo/sub1file2", t.realTemp+"/2/baz")
- os.Symlink("/keep/foo2/subdir1/file2_in_subdir1.txt", t.realTemp+"/2/baz2")
- os.Symlink("/keep/foo2/subdir1", t.realTemp+"/2/baz3")
- os.Mkdir(t.realTemp+"/2/baz4", 0700)
- os.Symlink("/keep/foo2/subdir1/file2_in_subdir1.txt", t.realTemp+"/2/baz4/baz5")
+ os.Symlink("/keep/foo/sub1file2", t.realTemp+"/tmp2/baz")
+ os.Symlink("/keep/foo2/subdir1/file2_in_subdir1.txt", t.realTemp+"/tmp2/baz2")
+ os.Symlink("/keep/foo2/subdir1", t.realTemp+"/tmp2/baz3")
+ os.Mkdir(t.realTemp+"/tmp2/baz4", 0700)
+ os.Symlink("/keep/foo2/subdir1/file2_in_subdir1.txt", t.realTemp+"/tmp2/baz4/baz5")
t.logWriter.Close()
})
extraMounts := []string{}
api, _, _ := s.fullRunHelper(c, helperRecord, extraMounts, 0, func(t *TestDockerClient) {
- os.Symlink("/etc/hosts", t.realTemp+"/2/baz")
+ os.Symlink("/etc/hosts", t.realTemp+"/tmp2/baz")
t.logWriter.Close()
})
extraMounts := []string{}
api, _, _ := s.fullRunHelper(c, helperRecord, extraMounts, 0, func(t *TestDockerClient) {
- rf, _ := os.Create(t.realTemp + "/2/realfile")
+ rf, _ := os.Create(t.realTemp + "/tmp2/realfile")
rf.Write([]byte("foo"))
rf.Close()
- os.Mkdir(t.realTemp+"/2/realdir", 0700)
- rf, _ = os.Create(t.realTemp + "/2/realdir/subfile")
+ os.Mkdir(t.realTemp+"/tmp2/realdir", 0700)
+ rf, _ = os.Create(t.realTemp + "/tmp2/realdir/subfile")
rf.Write([]byte("bar"))
rf.Close()
- os.Symlink("/tmp/realfile", t.realTemp+"/2/file1")
- os.Symlink("realfile", t.realTemp+"/2/file2")
- os.Symlink("/tmp/file1", t.realTemp+"/2/file3")
- os.Symlink("file2", t.realTemp+"/2/file4")
- os.Symlink("realdir", t.realTemp+"/2/dir1")
- os.Symlink("/tmp/realdir", t.realTemp+"/2/dir2")
+ os.Symlink("/tmp/realfile", t.realTemp+"/tmp2/file1")
+ os.Symlink("realfile", t.realTemp+"/tmp2/file2")
+ os.Symlink("/tmp/file1", t.realTemp+"/tmp2/file3")
+ os.Symlink("file2", t.realTemp+"/tmp2/file4")
+ os.Symlink("realdir", t.realTemp+"/tmp2/dir1")
+ os.Symlink("/tmp/realdir", t.realTemp+"/tmp2/dir2")
t.logWriter.Close()
})
'setuptools'
],
dependency_links=[
- "https://github.com/curoverse/libcloud/archive/apache-libcloud-2.2.2.dev3.zip"
+ "https://github.com/curoverse/libcloud/archive/apache-libcloud-2.2.2.dev4.zip"
],
test_suite='tests',
tests_require=[
'requests',
'pbr<1.7.0',
'mock>=1.0',
- 'apache-libcloud==2.2.2.dev3',
+ 'apache-libcloud==2.2.2.dev4',
],
zip_safe=False,
cmdclass={'egg_info': tagger},
apt-transport-https ca-certificates slurm-wlm \
linkchecker python3-virtualenv python-virtualenv xvfb iceweasel \
libgnutls28-dev python3-dev vim cadaver cython gnupg dirmngr \
- libsecret-1-dev && \
+ libsecret-1-dev r-base r-cran-testthat libxml2-dev && \
apt-get clean
ENV RUBYVERSION_MINOR 2.3