pane_list <<
{
:name => 'Pipeline_templates',
- :filters => [%w(uuid is_a arvados#pipelineTemplate)]
+ :filters => [%w(uuid is_a) + [%w(arvados#pipelineTemplate arvados#workflow)]]
}
pane_list <<
{
class WorkflowsController < ApplicationController
+ skip_around_filter :require_thread_api_token, if: proc { |ctrl|
+ Rails.configuration.anonymous_user_token and
+ 'show' == ctrl.action_name
+ }
end
def template_uuid
properties = get(:properties)
if properties
- properties[:workflow_uuid]
+ properties[:template_uuid]
end
end
-<% if @object.respond_to? :name %>
- <h2>
- <%= render_editable_attribute @object, 'name', nil, { 'data-emptytext' => "New #{controller.model_class.to_s.underscore.gsub("_"," ")}" } %>
- </h2>
-<% end %>
-
-<% if @object.respond_to? :description %>
- <div class="arv-description-as-subtitle">
- <%= render_editable_attribute @object, 'description', nil, { 'data-emptytext' => "(No description provided)", 'data-toggle' => 'manual' } %>
- </div>
-<% end %>
+<%= render partial: 'object_name' %>
+<%= render partial: 'object_description' %>
--- /dev/null
+<% if @object.respond_to? :description %>
+ <div class="arv-description-as-subtitle">
+ <%= render_editable_attribute @object, 'description', nil, { 'data-emptytext' => "(No description provided)", 'data-toggle' => 'manual' } %>
+ </div>
+<% end %>
--- /dev/null
+<% if @object.respond_to? :name %>
+ <h2>
+ <%= render_editable_attribute @object, 'name', nil, { 'data-emptytext' => "New #{controller.model_class.to_s.underscore.gsub("_"," ")}" } %>
+ </h2>
+<% end %>
--- /dev/null
+<%
+ wu = @object.work_unit
+ template_uuid = wu.template_uuid
+ template = Workflow.find?(template_uuid) if template_uuid
+ div_class = "col-sm-12"
+ div_class = "col-sm-6" if template
+%>
+
+<div class="<%=div_class%>">
+ <%= render partial: 'object_name' %>
+ <%= render partial: 'object_description' %>
+</div>
+
+<% if template %>
+ <div class="alert alert-info <%=div_class%>">
+ This container request was created from the workflow <%= link_to_if_arvados_object template, friendly_name: true %><br />
+ <% if template.modified_at && (template.modified_at > @object.created_at) %>
+ Note: This workflow has been modified since this container request was created.
+ <% end %>
+ </div>
+<% end %>
<%= render_pane 'tab_contents', to_string: true, locals: {
- filters: [['uuid', 'is_a', ["arvados#pipelineTemplate"]]],
- sortable_columns: { 'name' => 'pipeline_templates.name', 'description' => 'pipeline_templates.description' }
+ limit: 50,
+ filters: [['uuid', 'is_a', ["arvados#pipelineTemplate", "arvados#workflow"]]],
+ sortable_columns: { 'name' => 'pipeline_templates.name, workflows.name', 'description' => 'pipeline_templates.description, workflows.description' }
}.merge(local_assigns) %>
assert_no_selector 'a', text: 'Re-run options'
end
- test "anonymous user accesses pipeline templates tab in shared project" do
- visit PUBLIC_PROJECT
- click_link 'Data collections'
- assert_text 'GNU General Public License'
+ [
+ 'pipelineTemplate',
+ 'workflow'
+ ].each do |type|
+ test "anonymous user accesses pipeline templates tab in shared project and click on #{type}" do
+ visit PUBLIC_PROJECT
+ click_link 'Data collections'
+ assert_text 'GNU General Public License'
- assert_selector 'a', text: 'Pipeline templates'
+ assert_selector 'a', text: 'Pipeline templates'
- click_link 'Pipeline templates'
- assert_text 'Pipeline template in publicly accessible project'
+ click_link 'Pipeline templates'
+ assert_text 'Pipeline template in publicly accessible project'
+ assert_text 'Workflow with input specifications'
- within first('tr[data-kind="arvados#pipelineTemplate"]') do
- click_link 'Show'
- end
+ if type == 'pipelineTemplate'
+ within first('tr[data-kind="arvados#pipelineTemplate"]') do
+ click_link 'Show'
+ end
- # in template page
- assert_text 'Public Projects Unrestricted public data'
- assert_text 'script version'
- assert_no_selector 'a', text: 'Run this pipeline'
+ # in template page
+ assert_text 'Public Projects Unrestricted public data'
+ assert_text 'script version'
+ assert_no_selector 'a', text: 'Run this pipeline'
+ else
+ within first('tr[data-kind="arvados#workflow"]') do
+ click_link 'Show'
+ end
+
+ # in workflow page
+ assert_text 'Public Projects Unrestricted public data'
+ assert_text 'this workflow has inputs specified'
+ end
+ end
end
test "anonymous user accesses subprojects tab in shared project" do
# in the process page now
assert_text process_txt
+ assert_selector 'a', text: template_name
end
end
end
require 'yaml'
def available_port for_what
- Addrinfo.tcp("0.0.0.0", 0).listen do |srv|
- port = srv.connect_address.ip_port
- STDERR.puts "Using port #{port} for #{for_what}"
- return port
+ begin
+ Addrinfo.tcp("0.0.0.0", 0).listen do |srv|
+ port = srv.connect_address.ip_port
+ # Selenium needs an additional locking port, check if it's available
+ # and retry if necessary.
+ if for_what == 'selenium'
+ locking_port = port - 1
+ Addrinfo.tcp("0.0.0.0", locking_port).listen.close
+ end
+ STDERR.puts "Using port #{port} for #{for_what}"
+ return port
+ end
+ rescue Errno::EADDRINUSE, Errno::EACCES
+ retry
end
end
--- /dev/null
+Scripts in this directory:
+
+run-tests.sh Run unit and integration test suite.
+
+run-build-test-packages-one-target.sh Entry point, wraps
+ run-build-packages-one-target.sh to
+ perform package building and testing
+ inside Docker.
+
+run-build-packages-one-target.sh Build packages for one target inside Docker.
+
+run-build-packages-all-targets.sh Run run-build-packages-one-target.sh
+ for every target.
+
+run-build-packages.sh Actually build packages. Intended to run
+ inside Docker container with proper
+ build environment.
+
+run-build-packages-sso.sh Build single-sign-on server packages.
+
+run-build-packages-python-and-ruby.sh Build Python and Ruby packages suitable
+ for upload to PyPi and Rubygems.
+
+run-build-docker-images.sh Build arvbox Docker images.
+
+run-build-docker-jobs-image.sh Build arvados/jobs Docker image.
+
+run-library.sh A library of functions shared by the
+ various scripts in this
+ directory.
\ No newline at end of file
rsa uritemplate httplib2 ws4py pykka six pyexecjs jsonschema \
ciso8601 pycrypto backports.ssl_match_hostname llfuse==0.41.1 \
'pycurl<7.21.5' contextlib2 pyyaml 'rdflib>=4.2.0' \
- shellescape mistune typing avro ruamel.ordereddict)
+ shellescape mistune typing avro ruamel.ordereddict
+ cachecontrol cwltest)
PYTHON3_BACKPORTS=(docker-py==1.7.2 six requests websocket-client)
;;
debian8)
rsa uritemplate httplib2 ws4py pykka six pyexecjs jsonschema \
ciso8601 pycrypto backports.ssl_match_hostname llfuse==0.41.1 \
'pycurl<7.21.5' pyyaml 'rdflib>=4.2.0' \
- shellescape mistune typing avro ruamel.ordereddict)
+ shellescape mistune typing avro ruamel.ordereddict
+ cachecontrol cwltest)
PYTHON3_BACKPORTS=(docker-py==1.7.2 six requests websocket-client)
;;
ubuntu1204)
rsa uritemplate httplib2 ws4py pykka six pyexecjs jsonschema \
ciso8601 pycrypto backports.ssl_match_hostname llfuse==0.41.1 \
contextlib2 'pycurl<7.21.5' pyyaml 'rdflib>=4.2.0' \
- shellescape mistune typing avro isodate ruamel.ordereddict)
+ shellescape mistune typing avro isodate ruamel.ordereddict
+ cachecontrol cwltest)
PYTHON3_BACKPORTS=(docker-py==1.7.2 six requests websocket-client)
;;
ubuntu1404)
PYTHON_BACKPORTS=(pyasn1==0.1.7 pyasn1-modules==0.0.5 llfuse==0.41.1 ciso8601 \
google-api-python-client==1.4.2 six uritemplate oauth2client==1.5.2 httplib2 \
rsa 'pycurl<7.21.5' backports.ssl_match_hostname pyyaml 'rdflib>=4.2.0' \
- shellescape mistune typing avro ruamel.ordereddict)
+ shellescape mistune typing avro ruamel.ordereddict
+ cachecontrol cwltest)
PYTHON3_BACKPORTS=(docker-py==1.7.2 requests websocket-client)
;;
centos6)
python-daemon lockfile llfuse==0.41.1 'pbr<1.0' pyyaml \
'rdflib>=4.2.0' shellescape mistune typing avro requests \
isodate pyparsing sparqlwrapper html5lib==0.9999999 keepalive \
- ruamel.ordereddict)
+ ruamel.ordereddict cachecontrol cwltest)
PYTHON3_BACKPORTS=(docker-py==1.7.2 six requests websocket-client)
export PYCURL_SSL_LIBRARY=nss
;;
python-daemon llfuse==0.41.1 'pbr<1.0' pyyaml \
'rdflib>=4.2.0' shellescape mistune typing avro \
isodate pyparsing sparqlwrapper html5lib==0.9999999 keepalive \
- ruamel.ordereddict)
+ ruamel.ordereddict cachecontrol cwltest)
PYTHON3_BACKPORTS=(docker-py==1.7.2 six requests websocket-client)
export PYCURL_SSL_LIBRARY=nss
;;
# So we build this thing separately.
#
# Ward, 2016-03-17
-fpm_build schema_salad "" "" python 1.16.20160810195039
+fpm_build schema_salad "" "" python 1.17.20160820171034
# And schema_salad now depends on ruamel-yaml, which apparently has a braindead setup.py that requires special arguments to build (otherwise, it aborts with 'error: you have to install with "pip install ."'). Sigh.
# Ward, 2016-05-26
-# ...and schema_salad 1.12.20160610104117 doesn't work with ruamel-yaml > 0.11.11.
-fpm_build ruamel.yaml "" "" python 0.11.11 --python-setup-py-arguments "--single-version-externally-managed"
+fpm_build ruamel.yaml "" "" python 0.12.4 --python-setup-py-arguments "--single-version-externally-managed"
# And for cwltool we have the same problem as for schema_salad. Ward, 2016-03-17
-fpm_build cwltool "" "" python 1.0.20160811184335
+fpm_build cwltool "" "" python 1.0.20160901133827
# FPM eats the trailing .0 in the python-rdflib-jsonld package when built with 'rdflib-jsonld>=0.3.0'. Force the version. Ward, 2016-03-25
fpm_build rdflib-jsonld "" "" python 0.3.0
<code class="userinput">$ARVADOS_HOME/sdk/java/ArvadosSDKJavaExampleWithPrompt.java</code> can be
used to make calls to API server interactively.
-Please use these implementations to see how you would want use the SDK from your java program.
+Please use these implementations to see how you would use the SDK from your java program.
Also, refer to <code class="userinput">$ARVADOS_HOME/arvados/sdk/java/src/test/java/org/arvados/sdk/java/ArvadosTest.java</code>
for more sample API invocation examples.
* To compile the examples
<notextile>
<pre>
-$ <code class="userinput">javac -cp $ARVADOS_HOME/sdk/java/target/arvados-sdk-1.0-jar-with-dependencies.jar \
+$ <code class="userinput">javac -cp $ARVADOS_HOME/sdk/java/target/arvados-sdk-1.1-jar-with-dependencies.jar \
ArvadosSDKJavaExample*.java</code>
This results in the generation of the ArvadosSDKJavaExample*.class files
in the same directory as the java files
* To run the samples
<notextile>
<pre>
-$ <code class="userinput">java -cp .:$ARVADOS_HOME/sdk/java/target/arvados-sdk-1.0-jar-with-dependencies.jar \
+$ <code class="userinput">java -cp .:$ARVADOS_HOME/sdk/java/target/arvados-sdk-1.1-jar-with-dependencies.jar \
ArvadosSDKJavaExample</code>
-$ <code class="userinput">java -cp .:$ARVADOS_HOME/sdk/java/target/arvados-sdk-1.0-jar-with-dependencies.jar \
+$ <code class="userinput">java -cp .:$ARVADOS_HOME/sdk/java/target/arvados-sdk-1.1-jar-with-dependencies.jar \
ArvadosSDKJavaExampleWithPrompt</code>
</pre>
</notextile>
collection, rest = self.get_collection(fn)
if collection:
if rest:
- return isinstance(collection.find(rest), arvados.collection.Collection)
+ return isinstance(collection.find(rest), arvados.collection.RichCollectionBase)
else:
return True
else:
dir = collection
if dir is None:
raise IOError(errno.ENOENT, "Directory '%s' in '%s' not found" % (rest, collection.portable_data_hash()))
- if not isinstance(dir, arvados.collection.Collection):
+ if not isinstance(dir, arvados.collection.RichCollectionBase):
raise IOError(errno.ENOENT, "Path '%s' in '%s' is not a Directory" % (rest, collection.portable_data_hash()))
return [abspath(l, fn) for l in dir.keys()]
else:
# Make sure to update arvados/build/run-build-packages.sh as well
# when updating the cwltool version pin.
install_requires=[
- 'cwltool==1.0.20160811184335',
+ 'cwltool==1.0.20160901133827',
'arvados-python-client>=0.1.20160714204738',
],
data_files=[
import functools
import cwltool.process
+from schema_salad.ref_resolver import Loader
+
if not os.getenv('ARVADOS_DEBUG'):
logging.getLogger('arvados.cwl-runner').setLevel(logging.WARN)
logging.getLogger('arvados.arv-run').setLevel(logging.WARN)
}
make_fs_access=functools.partial(arvados_cwl.CollectionFsAccess, api_client=runner.api)
arvtool = arvados_cwl.ArvadosCommandTool(runner, tool, work_api="containers", avsc_names=avsc_names,
- basedir="", make_fs_access=make_fs_access)
+ basedir="", make_fs_access=make_fs_access, loader=Loader({}))
arvtool.formatgraph = None
for j in arvtool.job({}, mock.MagicMock(), basedir="", name="test_run",
make_fs_access=make_fs_access, tmpdir="/tmp"):
}
make_fs_access=functools.partial(arvados_cwl.CollectionFsAccess, api_client=runner.api)
arvtool = arvados_cwl.ArvadosCommandTool(runner, tool, work_api="containers",
- avsc_names=avsc_names, make_fs_access=make_fs_access)
+ avsc_names=avsc_names, make_fs_access=make_fs_access,
+ loader=Loader({}))
arvtool.formatgraph = None
for j in arvtool.job({}, mock.MagicMock(), basedir="", name="test_resource_requirements",
make_fs_access=make_fs_access, tmpdir="/tmp"):
import functools
import cwltool.process
+from schema_salad.ref_resolver import Loader
+
if not os.getenv('ARVADOS_DEBUG'):
logging.getLogger('arvados.cwl-runner').setLevel(logging.WARN)
logging.getLogger('arvados.arv-run').setLevel(logging.WARN)
"arguments": [{"valueFrom": "$(runtime.outdir)"}]
}
make_fs_access=functools.partial(arvados_cwl.CollectionFsAccess, api_client=runner.api)
- arvtool = arvados_cwl.ArvadosCommandTool(runner, tool, work_api="jobs", avsc_names=avsc_names, basedir="", make_fs_access=make_fs_access)
+ arvtool = arvados_cwl.ArvadosCommandTool(runner, tool, work_api="jobs", avsc_names=avsc_names,
+ basedir="", make_fs_access=make_fs_access, loader=Loader({}))
arvtool.formatgraph = None
for j in arvtool.job({}, mock.MagicMock(), basedir="", make_fs_access=make_fs_access):
j.run()
"baseCommand": "ls"
}
make_fs_access=functools.partial(arvados_cwl.CollectionFsAccess, api_client=runner.api)
- arvtool = arvados_cwl.ArvadosCommandTool(runner, tool, work_api="jobs", avsc_names=avsc_names, make_fs_access=make_fs_access)
+ arvtool = arvados_cwl.ArvadosCommandTool(runner, tool, work_api="jobs", avsc_names=avsc_names,
+ make_fs_access=make_fs_access, loader=Loader({}))
arvtool.formatgraph = None
for j in arvtool.job({}, mock.MagicMock(), basedir="", make_fs_access=make_fs_access):
j.run()
*
*/
-import org.arvados.sdk.java.Arvados;
+import org.arvados.sdk.Arvados;
import java.io.File;
import java.util.HashMap;
}
}
}
-}
\ No newline at end of file
+}
* @author radhika
*/
-import org.arvados.sdk.java.Arvados;
+import org.arvados.sdk.Arvados;
import java.io.File;
import java.util.HashMap;
-package org.arvados.sdk.java;
+package org.arvados.sdk;
import java.io.File;
import java.io.FileInputStream;
Map<String, Object> params = new HashMap<String, Object>();
params.put("pipeline_template", new String(data));
Map response = arv.call("pipeline_templates", "create", params);
-
assertEquals("Expected kind to be user", "arvados#pipelineTemplate", response.get("kind"));
String uuid = (String)response.get("uuid");
assertNotNull("Expected uuid for pipeline template", uuid);
assertTrue("Excected some optional parameters for list method for users", parameters.get("optional").contains("filters"));
}
-}
\ No newline at end of file
+}
{
- "name":"first pipeline",
"components":{
"do_hash":{
"script":"hash.py",
end
require 'load_param'
-require 'record_filters'
class ApplicationController < ActionController::Base
include CurrentApiClient
include ThemesForRails::ActionController
include LoadParam
- include RecordFilters
respond_to :json
protect_from_forgery
def apply_filters model_class=nil
model_class ||= self.model_class
- ft = record_filters @filters, model_class
- if ft[:cond_out].any?
- @objects = @objects.where('(' + ft[:cond_out].join(') AND (') + ')',
- *ft[:param_out])
- end
+ @objects = model_class.apply_filters(@objects, @filters)
end
def apply_where_limit_order_params model_class=nil
request_filters = @filters
klasses = [Group,
- Job, PipelineInstance, PipelineTemplate, ContainerRequest,
+ Job, PipelineInstance, PipelineTemplate, ContainerRequest, Workflow,
Collection,
Human, Specimen, Trait]
return super if !params[:find_or_create]
return if !load_filters_param
- if @filters.empty? # Translate older creation parameters into filters.
- @filters =
- [["repository", "=", resource_attrs[:repository]],
- ["script", "=", resource_attrs[:script]],
- ["script_version", "not in git", params[:exclude_script_versions]],
- ].reject { |filter| filter.last.nil? or filter.last.empty? }
- if !params[:minimum_script_version].blank?
- @filters << ["script_version", "in git",
- params[:minimum_script_version]]
- else
- add_default_git_filter("script_version", resource_attrs[:repository],
- resource_attrs[:script_version])
- end
- if image_search = resource_attrs[:runtime_constraints].andand["docker_image"]
- if image_tag = resource_attrs[:runtime_constraints]["docker_image_tag"]
- image_search += ":#{image_tag}"
- end
- image_locator = Collection.
- for_latest_docker_image(image_search).andand.portable_data_hash
- else
- image_locator = nil
- end
- @filters << ["docker_image_locator", "=", image_locator]
- if sdk_version = resource_attrs[:runtime_constraints].andand["arvados_sdk_version"]
- add_default_git_filter("arvados_sdk_version", "arvados", sdk_version)
- end
- begin
- load_job_specific_filters
- rescue ArgumentError => error
- return send_error(error.message)
- end
- end
-
- # Check specified filters for some reasonableness.
- filter_names = @filters.map { |f| f.first }.uniq
- ["repository", "script"].each do |req_filter|
- if not filter_names.include?(req_filter)
- return send_error("#{req_filter} filter required")
- end
- end
-
- # Search for a reusable Job, and return it if found.
- @objects = Job.
- readable_by(current_user).
- where('state = ? or (owner_uuid = ? and state in (?))',
- Job::Complete, current_user.uuid, [Job::Queued, Job::Running]).
- where('script_parameters_digest = ?', Job.sorted_hash_digest(resource_attrs[:script_parameters])).
- where('nondeterministic is distinct from ?', true).
- order('state desc, created_at') # prefer Running jobs over Queued
- apply_filters
- @object = nil
- incomplete_job = nil
- @objects.each do |j|
- if j.state != Job::Complete
- # We'll use this if we don't find a job that has completed
- incomplete_job ||= j
- next
- end
-
- if @object == false
- # We have already decided not to reuse any completed job
- next
- elsif @object
- if @object.output != j.output
- # If two matching jobs produced different outputs, run a new
- # job (or use one that's already running/queued) instead of
- # choosing one arbitrarily.
- @object = false
- end
- # ...and that's the only thing we need to do once we've chosen
- # an @object to reuse.
- elsif !Collection.readable_by(current_user).find_by_portable_data_hash(j.output)
- # As soon as the output we will end up returning (if any) is
- # decided, check whether it will be visible to the user; if
- # not, any further investigation of reusable jobs is futile.
- return super
- else
- @object = j
- end
+ begin
+ @object = Job.find_reusable(resource_attrs, params, @filters, @read_users)
+ rescue ArgumentError => error
+ return send_error(error.message)
end
- @object ||= incomplete_job
if @object
show
else
protected
- def add_default_git_filter(attr_name, repo_name, refspec)
- # Add a filter to @filters for `attr_name` = the latest commit available
- # in `repo_name` at `refspec`. No filter is added if refspec can't be
- # resolved.
- commits = Commit.find_commit_range(repo_name, nil, refspec, nil)
- if commit_hash = commits.first
- @filters << [attr_name, "=", commit_hash]
- end
- end
-
- def load_job_specific_filters
- # Convert Job-specific @filters entries into general SQL filters.
- script_info = {"repository" => nil, "script" => nil}
- git_filters = Hash.new do |hash, key|
- hash[key] = {"max_version" => "HEAD", "exclude_versions" => []}
- end
- @filters.select! do |(attr, operator, operand)|
- if (script_info.has_key? attr) and (operator == "=")
- if script_info[attr].nil?
- script_info[attr] = operand
- elsif script_info[attr] != operand
- raise ArgumentError.new("incompatible #{attr} filters")
- end
- end
- case operator
- when "in git"
- git_filters[attr]["min_version"] = operand
- false
- when "not in git"
- git_filters[attr]["exclude_versions"] += Array.wrap(operand)
- false
- when "in docker", "not in docker"
- image_hashes = Array.wrap(operand).flat_map do |search_term|
- image_search, image_tag = search_term.split(':', 2)
- Collection.
- find_all_for_docker_image(image_search, image_tag, @read_users).
- map(&:portable_data_hash)
- end
- @filters << [attr, operator.sub(/ docker$/, ""), image_hashes]
- false
- else
- true
- end
- end
-
- # Build a real script_version filter from any "not? in git" filters.
- git_filters.each_pair do |attr, filter|
- case attr
- when "script_version"
- script_info.each_pair do |key, value|
- if value.nil?
- raise ArgumentError.new("script_version filter needs #{key} filter")
- end
- end
- filter["repository"] = script_info["repository"]
- begin
- filter["max_version"] = resource_attrs[:script_version]
- rescue
- # Using HEAD, set earlier by the hash default, is fine.
- end
- when "arvados_sdk_version"
- filter["repository"] = "arvados"
- else
- raise ArgumentError.new("unknown attribute for git filter: #{attr}")
- end
- revisions = Commit.find_commit_range(filter["repository"],
- filter["min_version"],
- filter["max_version"],
- filter["exclude_versions"])
- if revisions.empty?
- raise ArgumentError.
- new("error searching #{filter['repository']} from " +
- "'#{filter['min_version']}' to '#{filter['max_version']}', " +
- "excluding #{filter['exclude_versions']}")
- end
- @filters.append([attr, "in", revisions])
- end
- end
-
def load_filters_param
begin
super
- load_job_specific_filters
+ attrs = resource_attrs rescue {}
+ @filters = Job.load_job_specific_filters attrs, @filters, @read_users
rescue ArgumentError => error
send_error(error.message)
false
require 'has_uuid'
+require 'record_filters'
class ArvadosModel < ActiveRecord::Base
self.abstract_class = true
include CurrentApiClient # current_user, current_api_client, etc.
include DbCurrentTime
+ extend RecordFilters
attr_protected :created_at
attr_protected :modified_by_user_uuid
"to_tsvector('english', ' ' || #{parts.join(" || ' ' || ")})"
end
+ def self.apply_filters query, filters
+ ft = record_filters filters, self
+ if not ft[:cond_out].any?
+ return query
+ end
+ query.where('(' + ft[:cond_out].join(') AND (') + ')',
+ *ft[:param_out])
+ end
+
protected
def ensure_ownership_path_leads_to_user
include HasUuid
include KindAndEtag
include CommonApiTemplate
+ extend CurrentApiClient
serialize :components, Hash
attr_protected :arvados_sdk_version, :docker_image_locator
serialize :script_parameters, Hash
super - ["script_parameters_digest"]
end
+ def self.load_job_specific_filters attrs, orig_filters, read_users
+ # Convert Job-specific @filters entries into general SQL filters.
+ script_info = {"repository" => nil, "script" => nil}
+ git_filters = Hash.new do |hash, key|
+ hash[key] = {"max_version" => "HEAD", "exclude_versions" => []}
+ end
+ filters = []
+ orig_filters.each do |attr, operator, operand|
+ if (script_info.has_key? attr) and (operator == "=")
+ if script_info[attr].nil?
+ script_info[attr] = operand
+ elsif script_info[attr] != operand
+ raise ArgumentError.new("incompatible #{attr} filters")
+ end
+ end
+ case operator
+ when "in git"
+ git_filters[attr]["min_version"] = operand
+ when "not in git"
+ git_filters[attr]["exclude_versions"] += Array.wrap(operand)
+ when "in docker", "not in docker"
+ image_hashes = Array.wrap(operand).flat_map do |search_term|
+ image_search, image_tag = search_term.split(':', 2)
+ Collection.
+ find_all_for_docker_image(image_search, image_tag, read_users).
+ map(&:portable_data_hash)
+ end
+ filters << [attr, operator.sub(/ docker$/, ""), image_hashes]
+ else
+ filters << [attr, operator, operand]
+ end
+ end
+
+ # Build a real script_version filter from any "not? in git" filters.
+ git_filters.each_pair do |attr, filter|
+ case attr
+ when "script_version"
+ script_info.each_pair do |key, value|
+ if value.nil?
+ raise ArgumentError.new("script_version filter needs #{key} filter")
+ end
+ end
+ filter["repository"] = script_info["repository"]
+ if attrs[:script_version]
+ filter["max_version"] = attrs[:script_version]
+ else
+ # Using HEAD, set earlier by the hash default, is fine.
+ end
+ when "arvados_sdk_version"
+ filter["repository"] = "arvados"
+ else
+ raise ArgumentError.new("unknown attribute for git filter: #{attr}")
+ end
+ revisions = Commit.find_commit_range(filter["repository"],
+ filter["min_version"],
+ filter["max_version"],
+ filter["exclude_versions"])
+ if revisions.empty?
+ raise ArgumentError.
+ new("error searching #{filter['repository']} from " +
+ "'#{filter['min_version']}' to '#{filter['max_version']}', " +
+ "excluding #{filter['exclude_versions']}")
+ end
+ filters.append([attr, "in", revisions])
+ end
+
+ filters
+ end
+
+ def self.find_reusable attrs, params, filters, read_users
+ if filters.empty? # Translate older creation parameters into filters.
+ filters =
+ [["repository", "=", attrs[:repository]],
+ ["script", "=", attrs[:script]],
+ ["script_version", "not in git", params[:exclude_script_versions]],
+ ].reject { |filter| filter.last.nil? or filter.last.empty? }
+ if !params[:minimum_script_version].blank?
+ filters << ["script_version", "in git",
+ params[:minimum_script_version]]
+ else
+ filters += default_git_filters("script_version", attrs[:repository],
+ attrs[:script_version])
+ end
+ if image_search = attrs[:runtime_constraints].andand["docker_image"]
+ if image_tag = attrs[:runtime_constraints]["docker_image_tag"]
+ image_search += ":#{image_tag}"
+ end
+ image_locator = Collection.
+ for_latest_docker_image(image_search).andand.portable_data_hash
+ else
+ image_locator = nil
+ end
+ filters << ["docker_image_locator", "=", image_locator]
+ if sdk_version = attrs[:runtime_constraints].andand["arvados_sdk_version"]
+ filters += default_git_filters("arvados_sdk_version", "arvados", sdk_version)
+ end
+ filters = load_job_specific_filters(attrs, filters, read_users)
+ end
+
+ # Check specified filters for some reasonableness.
+ filter_names = filters.map { |f| f.first }.uniq
+ ["repository", "script"].each do |req_filter|
+ if not filter_names.include?(req_filter)
+ return send_error("#{req_filter} filter required")
+ end
+ end
+
+ # Search for a reusable Job, and return it if found.
+ candidates = Job.
+ readable_by(current_user).
+ where('state = ? or (owner_uuid = ? and state in (?))',
+ Job::Complete, current_user.uuid, [Job::Queued, Job::Running]).
+ where('script_parameters_digest = ?', Job.sorted_hash_digest(attrs[:script_parameters])).
+ where('nondeterministic is distinct from ?', true).
+ order('state desc, created_at') # prefer Running jobs over Queued
+ candidates = apply_filters candidates, filters
+ chosen = nil
+ incomplete_job = nil
+ candidates.each do |j|
+ if j.state != Job::Complete
+ # We'll use this if we don't find a job that has completed
+ incomplete_job ||= j
+ next
+ end
+
+ if chosen == false
+ # We have already decided not to reuse any completed job
+ next
+ elsif chosen
+ if chosen.output != j.output
+ # If two matching jobs produced different outputs, run a new
+ # job (or use one that's already running/queued) instead of
+ # choosing one arbitrarily.
+ chosen = false
+ end
+ # ...and that's the only thing we need to do once we've chosen
+ # a job to reuse.
+ elsif !Collection.readable_by(current_user).find_by_portable_data_hash(j.output)
+ # As soon as the output we will end up returning (if any) is
+ # decided, check whether it will be visible to the user; if
+ # not, any further investigation of reusable jobs is futile.
+ chosen = false
+ else
+ chosen = j
+ end
+ end
+ chosen || incomplete_job
+ end
+
+ def self.default_git_filters(attr_name, repo_name, refspec)
+ # Add a filter to @filters for `attr_name` = the latest commit available
+ # in `repo_name` at `refspec`. No filter is added if refspec can't be
+ # resolved.
+ commits = Commit.find_commit_range(repo_name, nil, refspec, nil)
+ if commit_hash = commits.first
+ [[attr_name, "=", commit_hash]]
+ else
+ []
+ end
+ end
+
protected
def self.sorted_hash_digest h
Digest::MD5.hexdigest(Oj.dump(deep_sort_hash(h)))
end
- def self.deep_sort_hash h
- return h unless h.is_a? Hash
- h.sort.collect do |k, v|
- [k, deep_sort_hash(v)]
- end.to_h
+ def self.deep_sort_hash x
+ if x.is_a? Hash
+ x.sort.collect do |k, v|
+ [k, deep_sort_hash(v)]
+ end.to_h
+ elsif x.is_a? Array
+ x.collect { |v| deep_sort_hash(v) }
+ else
+ x
+ end
end
def foreign_key_attributes
# Default lifetime for ephemeral collections: 2 weeks.
default_trash_lifetime: 1209600
+ # Maximum characters of (JSON-encoded) query parameters to include
+ # in each request log entry. When params exceed this size, they will
+ # be JSON-encoded, truncated to this size, and logged as
+ # params_truncated.
+ max_request_log_params_size: 2000
+
# Maximum size (in bytes) allowed for a single API request. This
# limit is published in the discovery document for use by clients.
# Note: You must separately configure the upstream web server or
exceptions = %w(controller action format id)
params = event.payload[:params].except(*exceptions)
params_s = Oj.dump(params)
- if params_s.length > 1000
- { params_truncated: params_s[0..1000] + "[...]" }
+ if params_s.length > Rails.configuration.max_request_log_params_size
+ { params_truncated: params_s[0..Rails.configuration.max_request_log_params_size] + "[...]" }
else
{ params: params }
end
--- /dev/null
+class RepairScriptParametersDigest < ActiveRecord::Migration
+ def up
+ Job.find_each do |j|
+ have = j.script_parameters_digest
+ want = j.update_script_parameters_digest
+ if have != want
+ # where().update_all() skips validations, event logging, and
+ # timestamp updates, and just runs SQL. (This change is
+ # invisible to clients.)
+ Job.where('id=?', j.id).update_all(script_parameters_digest: want)
+ end
+ end
+ end
+
+ def down
+ end
+end
INSERT INTO schema_migrations (version) VALUES ('20160819195557');
-INSERT INTO schema_migrations (version) VALUES ('20160819195725');
\ No newline at end of file
+INSERT INTO schema_migrations (version) VALUES ('20160819195725');
+
+INSERT INTO schema_migrations (version) VALUES ('20160901210110');
\ No newline at end of file
factory :api_client do
is_trusted false
to_create do |instance|
- act_as_system_user do
+ CurrentApiClientHelper.act_as_system_user do
instance.save!
end
end
end
to_create do |instance|
- act_as_user instance.user do
+ CurrentApiClientHelper.act_as_user instance.user do
instance.save!
end
end
-include CurrentApiClient
+class CurrentApiClientHelper
+ extend CurrentApiClient
+end
FactoryGirl.define do
factory :user do
join_groups []
end
after :create do |user, evaluator|
- act_as_system_user do
+ CurrentApiClientHelper.act_as_system_user do
evaluator.join_groups.each do |g|
Link.create!(tail_uuid: user.uuid,
head_uuid: g.uuid,
factory :active_user do
is_active true
after :create do |user|
- act_as_system_user do
+ CurrentApiClientHelper.act_as_system_user do
Link.create!(tail_uuid: user.uuid,
head_uuid: Group.where('uuid ~ ?', '-f+$').first.uuid,
link_class: 'permission',
end
end
to_create do |instance|
- act_as_system_user do
+ CurrentApiClientHelper.act_as_system_user do
instance.save!
end
end
workflow_with_input_specifications:
uuid: zzzzz-7fd4e-validwithinputs
- owner_uuid: zzzzz-tpzed-xurymjxw79nv3jz
+ owner_uuid: zzzzz-j7d0g-zhxawtyetzwc5f0
name: Workflow with input specifications
description: this workflow has inputs specified
created_at: <%= 1.minute.ago.to_s(:db) %>
errors = json_response.fetch("errors", [])
assert(errors.any?, "no errors assigned from #{params}")
refute(errors.any? { |msg| msg =~ /^#<[A-Za-z]+: / },
- "errors include raw exception")
+ "errors include raw exception: #{errors.inspect}")
errors
end
fixtures :all
include ArvadosTestSupport
+ include CurrentApiClient
setup do
Rails.logger.warn "\n\n#{'=' * 70}\n#{self.class}\##{method_name}\n#{'-' * 70}\n\n"
"wrong script_parameters_digest for #{j.uuid}")
end
end
+
+ test 'deep_sort_hash on array of hashes' do
+ a = {'z' => [[{'a' => 'a', 'b' => 'b'}]]}
+ b = {'z' => [[{'b' => 'b', 'a' => 'a'}]]}
+ assert_equal Job.deep_sort_hash(a).to_json, Job.deep_sort_hash(b).to_json
+ end
end
re.search(r'\+A[0-9a-f]+@([0-9a-f]+)', got_loc).group(1),
16)
self.assertGreaterEqual(
- got_exp, want_exp-1,
+ got_exp, want_exp-2,
msg='now+2w = {:x}, but fuse fetched locator {} (old_exp {:x})'.format(
want_exp, got_loc, old_exp))
self.assertLessEqual(