*agpl-3.0.html
*agpl-3.0.txt
+apache-2.0.txt
apps/workbench/app/assets/javascripts/list.js
apps/workbench/public/webshell/*
+AUTHORS
*/bootstrap.css
*/bootstrap.js
*bootstrap-theme.css
--- /dev/null
+# Names should be added to this file with this pattern:
+#
+# For individuals:
+# Name <email address>
+#
+# For organizations:
+# Organization <fnmatch pattern>
+#
+# See python fnmatch module documentation for more information.
+
+Curoverse, Inc. <*@curoverse.com>
+Adam Savitzky <adam.savitzky@gmail.com>
+Colin Nolan <colin.nolan@sanger.ac.uk>
+David <davide.fiorentino.loregio@gmail.com>
+Guillermo Carrasco <guille.ch.88@gmail.com>
+Joshua Randall <joshua.randall@sanger.ac.uk>
+President and Fellows of Harvard College <*@harvard.edu>
+Thomas Mooney <tmooney@genome.wustl.edu>
-Server-side components of Arvados contained in the apps/ and services/
-directories, including the API Server, Workbench, and Crunch, are licensed
-under the GNU Affero General Public License version 3 (see agpl-3.0.txt).
+Unless indicated otherwise in the header of the file, the files in this
+repository are distributed under one of three different licenses: AGPL-3.0,
+Apache-2.0 or CC-BY-SA-3.0.
-The files and directories under the build/, lib/ and tools/ directories are
-licensed under the GNU Affero General Public License version 3 (see
-agpl-3.0.txt).
+Individual files contain an SPDX tag that indicates the license for the file.
+These are the three tags in use:
-The Arvados client Software Development Kits contained in the sdk/ directory,
-example scripts in the crunch_scripts/ directory, the files and directories
-under backports/ and docker/, and code samples in the Aravados documentation
-are licensed under the Apache License, Version 2.0 (see LICENSE-2.0.txt).
+ SPDX-License-Identifier: AGPL-3.0
+ SPDX-License-Identifier: Apache-2.0
+ SPDX-License-Identifier: CC-BY-SA-3.0
-The Arvados Documentation located in the doc/ directory is licensed under the
-Creative Commons Attribution-Share Alike 3.0 United States (see by-sa-3.0.txt).
+This enables machine processing of license information based on the SPDX
+License Identifiers that are available here: http://spdx.org/licenses/
+
+The full license text for each license is available in this directory:
+
+ AGPL-3.0: agpl-3.0.txt
+ Apache-2.0: apache-2.0.txt
+ CC-BY-SA-3.0: cc-by-sa-3.0.txt
end
def show_file_links
+ if Rails.configuration.keep_web_url || Rails.configuration.keep_web_download_url
+ # show_file will redirect to keep-web's directory listing
+ return show_file
+ end
Thread.current[:reader_tokens] = [params[:reader_token]]
return if false.equal?(find_object_by_uuid)
render layout: false
helper_method :download_link
def download_link
- collections_url + "/download/#{@object.uuid}/#{@search_sharing.first.api_token}/"
+ token = @search_sharing.first.api_token
+ if Rails.configuration.keep_web_url || Rails.configuration.keep_web_download_url
+ keep_web_url(@object.uuid, nil, {path_token: token})
+ else
+ collections_url + "/download/#{@object.uuid}/#{token}/"
+ end
end
def share
uri.path += 't=' + opts[:path_token] + '/'
end
uri.path += '_/'
- uri.path += URI.escape(file)
+ uri.path += URI.escape(file) if file
query = Hash[URI.decode_www_form(uri.query || '')]
{ query_token: 'api_token',
<div class="collection-tags-container" style="padding-left:2em;padding-right:2em;">
<% if object.editable? %>
<p title="Edit tags" id="edit-collection-tags">
- <a type="button" class="btn btn-primary edit-collection-tags">Edit</a>
+ <a class="btn btn-primary edit-collection-tags">Edit</a>
</p>
<% end %>
test "Report network error" do
need_selenium "to make file uploads work"
use_token :admin do
- # Even if you somehow do port>2^16, surely nx.example.net won't
+ # Even if port 0 is a thing, surely nx.example.net won't
# respond
KeepService.where(service_type: 'proxy').first.
update_attributes(service_host: 'nx.example.net',
- service_port: 99999)
+ service_port: 0)
end
visit page_with_token 'active', sandbox_path
end
test "collection tags tab" do
- need_selenium
-
visit page_with_token('active', '/collections/zzzzz-4zz18-bv31uwvy3neko21')
click_link 'Tags'
assert_selector 'a', text: 'Cancel'
# add two tags
- first('.edit-collection-tags').click
-
first('.glyphicon-plus').click
first('.collection-tag-field-key').click
first('.collection-tag-field-key').set('key 1')
class DownloadTest < ActionDispatch::IntegrationTest
include KeepWebConfig
+ @@wrote_test_data = false
+
setup do
use_keep_web_config
# Keep data isn't populated by fixtures, so we have to write any
# data we expect to read.
- ['foo', 'w a z', "Hello world\n"].each do |data|
- md5 = `echo -n #{data.shellescape} | arv-put --no-progress --raw -`
- assert_match /^#{Digest::MD5.hexdigest(data)}/, md5
- assert $?.success?, $?
+ if !@@wrote_test_data
+ ['foo', 'w a z', "Hello world\n"].each do |data|
+ md5 = `echo -n #{data.shellescape} | arv-put --no-progress --raw -`
+ assert_match /^#{Digest::MD5.hexdigest(data)}/, md5
+ assert $?.success?, $?
+ end
+ @@wrote_test_data = true
end
end
uuid_or_pdh = api_fixture('collections')['foo_file'][id_type]
token = api_fixture('api_client_authorizations')['active_all_collections']['api_token']
visit "/collections/download/#{uuid_or_pdh}/#{token}/"
- within "#collection_files" do
+ within 'ul' do
click_link 'foo'
end
assert_no_selector 'a'
${cc}${cc:+ }SPDX-License-Identifier: CC-BY-SA-3.0${ce}"
found=$(head -n20 "$fnm" | egrep -A${grepAfter} -B${grepBefore} 'Copyright.*Arvados' || true)
case ${fnm} in
- Makefile | build/* | lib/* | tools/* | apps/* | services/*)
+ Makefile | build/* | lib/* | tools/* | apps/* | services/* | sdk/cli/bin/crunch-job)
want=${wantGPL}
;;
crunch_scripts/* | backports/* | docker/* | sdk/*)
[]
end
end
+
+ # create() returns [job, exception]. If both job and exception are
+ # nil, there was a non-retryable error and the call should not be
+ # attempted again.
def self.create(pipeline, component, job, create_params)
@cache ||= {}
body = {job: no_nil_values(job)}.merge(no_nil_values(create_params))
- result = $client.execute(:api_method => $arvados.jobs.create,
- :body_object => body,
- :authenticated => false,
- :headers => {
- authorization: 'OAuth2 '+$arv.config['ARVADOS_API_TOKEN']
- })
- j = JSON.parse result.body, :symbolize_names => true
- if j.is_a? Hash and j[:uuid]
+ result = nil
+ begin
+ result = $client.execute(
+ :api_method => $arvados.jobs.create,
+ :body_object => body,
+ :authenticated => false,
+ :headers => {
+ authorization: 'OAuth2 '+$arv.config['ARVADOS_API_TOKEN']
+ })
+ if result.status == 429 || result.status >= 500
+ raise Exception.new("HTTP status #{result.status}")
+ end
+ rescue Exception => e
+ return nil, e
+ end
+ j = JSON.parse(result.body, :symbolize_names => true) rescue nil
+ if result.status == 200 && j.is_a?(Hash) && j[:uuid]
@cache[j[:uuid]] = j
+ return j, nil
else
- debuglog "create job: #{j[:errors] rescue nil} with attributes #{body}", 0
+ errors = j[:errors] rescue []
+ debuglog "create job: [#{result.status}] #{errors.inspect} with attributes #{body}", 0
msg = ""
- j[:errors].each do |err|
+ errors.each do |err|
msg += "Error creating job for component #{component}: #{err}\n"
end
msg += "Job submission was: #{body.to_json}"
pipeline.log_stderr(msg)
- nil
+ return nil, nil
end
end
end
end
if !errors.empty?
- abort "\n#{Time.now} -- pipeline_template #{@template[:uuid]}\nErrors:\n#{errors.collect { |c,p,e| "#{c}::#{p} - #{e}\n" }.join ""}"
+ all_errors = errors.collect do |c,p,e|
+ "#{c}::#{p} - #{e}\n"
+ end.join("")
+ abort "\n#{Time.now} -- pipeline_template #{@template[:uuid]}\nErrors:\n#{all_errors}"
end
debuglog "options=" + @options.pretty_inspect
self
# are fully specified (any output_of script_parameters are resolved
# to real value)
my_submit_id = "instance #{@instance[:uuid]} rand #{rand(2**64).to_s(36)}"
- job = JobCache.create(@instance, cname, {
+ job, err = JobCache.create(@instance, cname, {
:script => c[:script],
:script_parameters => Hash[c[:script_parameters].map do |key, spec|
[key, spec[:value]]
c[:job] = job
c[:run_in_process] = (@options[:run_jobs_here] and
job[:submit_id] == my_submit_id)
- else
+ elsif err.nil?
debuglog "component #{cname} new job failed", 0
job_creation_failed += 1
+ else
+ debuglog "component #{cname} new job failed, err=#{err}", 0
end
end
@instance[:state] = 'Complete'
else
@instance[:state] = 'Paused'
- end
+ end
else
if ended == @components.length or failed > 0
@instance[:state] = success ? 'Complete' : 'Failed'
#!/usr/bin/env perl
# Copyright (C) The Arvados Authors. All rights reserved.
#
-# SPDX-License-Identifier: Apache-2.0
+# SPDX-License-Identifier: AGPL-3.0
# -*- mode: perl; perl-indent-level: 2; indent-tabs-mode: nil; -*-
if kwargs.get("submit"):
# Submit a runner job to run the workflow for us.
if self.work_api == "containers":
- if tool.tool["class"] == "CommandLineTool":
+ if tool.tool["class"] == "CommandLineTool" and kwargs.get("wait"):
kwargs["runnerjob"] = tool.tool["id"]
upload_dependencies(self,
kwargs["name"],
fi
if test $reset_container = 1 ; then
+ arvbox stop
+ docker rm $ARVBOX_CONTAINER
arvbox reset -f
fi
if test "$tag" = "latest" ; then
arv-keepdocker --pull arvados/jobs $tag
else
- jobsimg=$(curl http://versions.arvados.org/v1/commit/$tag | python -c "import json; import sys; sys.stdout.write(json.load(sys.stdin)['Versions']['Docker']['arvados/jobs'])")
+ jobsimg=\$(curl http://versions.arvados.org/v1/commit/$tag | python -c "import json; import sys; sys.stdout.write(json.load(sys.stdin)['Versions']['Docker']['arvados/jobs'])")
arv-keepdocker --pull arvados/jobs $jobsimg
docker tag -f arvados/jobs:$jobsimg arvados/jobs:latest
arv-keepdocker arvados/jobs latest
+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: Apache-2.0
+
cwlVersion: v1.0
class: Workflow
$namespaces:
+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: Apache-2.0
+
cwlVersion: v1.0
class: CommandLineTool
baseCommand: echo
'keep_disk': {'keep_service_uuid': svc['uuid'] }
}).execute()
- # If keepproxy is running, send SIGHUP to make it discover the new
- # keepstore services.
- proxypidfile = _pidfile('keepproxy')
- if os.path.exists(proxypidfile):
- try:
- os.kill(int(open(proxypidfile).read()), signal.SIGHUP)
- except OSError:
- os.remove(proxypidfile)
+ # If keepproxy and/or keep-web is running, send SIGHUP to make
+ # them discover the new keepstore services.
+ for svc in ('keepproxy', 'keep-web'):
+ pidfile = _pidfile('keepproxy')
+ if os.path.exists(pidfile):
+ try:
+ os.kill(int(open(pidfile).read()), signal.SIGHUP)
+ except OSError:
+ os.remove(pidfile)
def _stop_keep(n):
kill_server_pid(_pidfile('keep{}'.format(n)))
origfile = File.new origfnm
origfile.each_line do |line|
if !copyright_done
- if !/Copyright .* Arvados/
+ if !/Copyright .* Arvados/.match(line)
tmpfile.write "-- Copyright (C) The Arvados Authors. All rights reserved.\n--\n-- SPDX-License-Identifier: AGPL-3.0\n\n"
end
copyright_done = true
+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: AGPL-3.0
+
require 'migrate_yaml_to_json'
class JobsYamlToJson < ActiveRecord::Migration
+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: AGPL-3.0
+
module MigrateYAMLToJSON
def self.migrate(table, column)
conn = ActiveRecord::Base.connection
self.collection.stop_threads()
def clear(self):
+ if self.collection is not None:
+ self.collection.stop_threads()
super(CollectionDirectory, self).clear()
self._manifest_size = 0
self.num_retries = num_retries
self._poll = True
self._poll_time = poll_time
+ self._extra = set()
def want_event_subscribe(self):
return True
def update(self):
with llfuse.lock_released:
tags = self.api.links().list(
- filters=[['link_class', '=', 'tag']],
- select=['name'], distinct=True
+ filters=[['link_class', '=', 'tag'], ["name", "!=", ""]],
+ select=['name'], distinct=True, limit=1000
).execute(num_retries=self.num_retries)
if "items" in tags:
- self.merge(tags['items'],
+ self.merge(tags['items']+[{"name": n} for n in self._extra],
lambda i: i['name'],
lambda a, i: a.tag == i['name'],
lambda i: TagDirectory(self.inode, self.inodes, self.api, self.num_retries, i['name'], poll=self._poll, poll_time=self._poll_time))
+ @use_counter
+ @check_update
+ def __getitem__(self, item):
+ if super(TagsDirectory, self).__contains__(item):
+ return super(TagsDirectory, self).__getitem__(item)
+ with llfuse.lock_released:
+ tags = self.api.links().list(
+ filters=[['link_class', '=', 'tag'], ['name', '=', item]], limit=1
+ ).execute(num_retries=self.num_retries)
+ if tags["items"]:
+ self._extra.add(item)
+ self.update()
+ return super(TagsDirectory, self).__getitem__(item)
+
+ @use_counter
+ @check_update
+ def __contains__(self, k):
+ if super(TagsDirectory, self).__contains__(k):
+ return True
+ try:
+ self[k]
+ return True
+ except KeyError:
+ pass
+ return False
+
class TagDirectory(Directory):
"""A special directory that contains as subdirectories all collections visible
expire: exp,
pdh: collection.PortableDataHash,
})
- c.collections.Add(collection.PortableDataHash, &cachedCollection{
- expire: exp,
- collection: collection,
- })
- if int64(len(collection.ManifestText)) > c.MaxCollectionBytes/int64(c.MaxCollectionEntries) {
- go c.pruneCollections()
- }
+ // Disabled, see #11945
+ // c.collections.Add(collection.PortableDataHash, &cachedCollection{
+ // expire: exp,
+ // collection: collection,
+ // })
+ // if int64(len(collection.ManifestText)) > c.MaxCollectionBytes/int64(c.MaxCollectionEntries) {
+ // go c.pruneCollections()
+ // }
return collection, nil
}
)
func (s *UnitSuite) TestCache(c *check.C) {
+ c.Skip("see #11945")
+
arv, err := arvadosclient.MakeArvadosClient()
c.Assert(err, check.Equals, nil)
}
func (s *UnitSuite) TestCacheForceReloadByPDH(c *check.C) {
+ c.Skip("see #11945")
+
arv, err := arvadosclient.MakeArvadosClient()
c.Assert(err, check.Equals, nil)
}
func (s *UnitSuite) TestCacheForceReloadByUUID(c *check.C) {
+ c.Skip("see #11945")
+
arv, err := arvadosclient.MakeArvadosClient()
c.Assert(err, check.Equals, nil)