owner_uuid: @object.uuid
}
})
- redirect_to root_url(api_token: resp[:api_token])
+ redirect_to root_url(api_token: "v2/#{resp[:uuid]}/#{resp[:api_token]}")
end
def home
assert_match /\/users\/welcome/, @response.redirect_url
end
+ test "'log in as user' feature uses a v2 token" do
+ post :sudo, {
+ id: api_fixture('users')['active']['uuid']
+ }, session_for('admin_trustedclient')
+ assert_response :redirect
+ assert_match /api_token=v2%2F/, @response.redirect_url
+ end
+
test "request shell access" do
user = api_fixture('users')['spectator']
#
# SPDX-License-Identifier: AGPL-3.0
+set -e
+
+arvados-cwl-runner --version
+
exec python <<EOF
import arvados_cwl
print "arvados-cwl-runner version", arvados_cwl.__version__
# clean up the docker build environment
cd "$WORKSPACE"
+if [[ -z "$ARVADOS_BUILDING_VERSION" ]] && ! [[ -z "$version_tag" ]]; then
+ ARVADOS_BUILDING_VERSION="$version_tag"
+ ARVADOS_BUILDING_ITERATION="1"
+fi
+
python_sdk_ts=$(cd sdk/python && timestamp_from_git)
cwl_runner_ts=$(cd sdk/cwl && timestamp_from_git)
echo cwl_runner_version $cwl_runner_version python_sdk_version $python_sdk_version
+if [[ "${python_sdk_version}" != "${ARVADOS_BUILDING_VERSION}" ]]; then
+ python_sdk_version="${python_sdk_version}-2"
+else
+ python_sdk_version="${ARVADOS_BUILDING_VERSION}-${ARVADOS_BUILDING_ITERATION}"
+fi
+
+cwl_runner_version_orig=$cwl_runner_version
+
+if [[ "${cwl_runner_version}" != "${ARVADOS_BUILDING_VERSION}" ]]; then
+ cwl_runner_version="${cwl_runner_version}-4"
+else
+ cwl_runner_version="${ARVADOS_BUILDING_VERSION}-${ARVADOS_BUILDING_ITERATION}"
+fi
+
cd docker/jobs
docker build $NOCACHE \
- --build-arg python_sdk_version=${python_sdk_version}-2 \
- --build-arg cwl_runner_version=${cwl_runner_version}-4 \
- -t arvados/jobs:$cwl_runner_version .
+ --build-arg python_sdk_version=${python_sdk_version} \
+ --build-arg cwl_runner_version=${cwl_runner_version} \
+ -t arvados/jobs:$cwl_runner_version_orig .
ECODE=$?
FORCE=-f
fi
if ! [[ -z "$version_tag" ]]; then
- docker tag $FORCE arvados/jobs:$cwl_runner_version arvados/jobs:"$version_tag"
+ docker tag $FORCE arvados/jobs:$cwl_runner_version_orig arvados/jobs:"$version_tag"
else
- docker tag $FORCE arvados/jobs:$cwl_runner_version arvados/jobs:latest
+ docker tag $FORCE arvados/jobs:$cwl_runner_version_orig arvados/jobs:latest
fi
ECODE=$?
if ! [[ -z "$version_tag" ]]; then
docker_push arvados/jobs:"$version_tag"
else
- docker_push arvados/jobs:$cwl_runner_version
+ docker_push arvados/jobs:$cwl_runner_version_orig
docker_push arvados/jobs:latest
fi
title "upload arvados images finished (`timer`)"
TODO: extract this information based on git commit messages and generate changelogs / release notes automatically.
{% endcomment %}
+h3. v1.3.0 (2018-12-05)
+
+This release includes several database migrations, which will be executed automatically as part of the API server upgrade. On large Arvados installations, these migrations will take a while. We've seen the upgrade take 30 minutes or more on installations with a lot of collections.
+
+The @arvados-controller@ component now requires the /etc/arvados/config.yml file to be present. See <a href="{{ site.baseurl }}/install/install-controller.html#configuration">the @arvados-controller@ installation instructions</a>.
+
+Support for the deprecated "jobs" API is broken in this release. Users who rely on it should not upgrade. This will be fixed in an upcoming 1.3.1 patch release, however users are "encouraged to migrate":upgrade-crunch2.html as support for the "jobs" API will be dropped in an upcoming release. Users who are already using the "containers" API are not affected.
+
h3. v1.2.1 (2018-11-26)
There are no special upgrade notes for this release.
</code></pre>
</notextile>
-h3. Configure arvados-controller
+h3(#configuration). Configure arvados-controller
Create the cluster configuration file @/etc/arvados/config.yml@ using the following template.
# apt.arvados.org
-deb http://apt.arvados.org/ jessie main
deb http://apt.arvados.org/ jessie-dev main
help=argparse.SUPPRESS)
parser.add_argument("--thread-count", type=int,
- default=4, help="Number of threads to use for job submit and output collection.")
+ default=1, help="Number of threads to use for job submit and output collection.")
parser.add_argument("--http-timeout", type=int,
default=5*60, dest="http_timeout", help="API request timeout in seconds. Default is 300 seconds (5 minutes).")
"properties": {}
}
- if self.tool.tool.get("id", "").startswith("keep:"):
- sp = self.tool.tool["id"].split('/')
+ if self.embedded_tool.tool.get("id", "").startswith("keep:"):
+ sp = self.embedded_tool.tool["id"].split('/')
workflowcollection = sp[0][5:]
workflowname = "/".join(sp[1:])
workflowpath = "/var/lib/cwl/workflow/%s" % workflowname
"portable_data_hash": "%s" % workflowcollection
}
else:
- packed = packed_workflow(self.arvrunner, self.tool, self.merged_map)
+ packed = packed_workflow(self.arvrunner, self.embedded_tool, self.merged_map)
workflowpath = "/var/lib/cwl/workflow.json#main"
container_req["mounts"]["/var/lib/cwl/workflow.json"] = {
"kind": "json",
"content": packed
}
- if self.tool.tool.get("id", "").startswith("arvwf:"):
- container_req["properties"]["template_uuid"] = self.tool.tool["id"][6:33]
+ if self.embedded_tool.tool.get("id", "").startswith("arvwf:"):
+ container_req["properties"]["template_uuid"] = self.embedded_tool.tool["id"][6:33]
# --local means execute the workflow instead of submitting a container request
a pipeline template or pipeline instance.
"""
- if self.tool.tool["id"].startswith("keep:"):
- self.job_order["cwl:tool"] = self.tool.tool["id"][5:]
+ if self.embedded_tool.tool["id"].startswith("keep:"):
+ self.job_order["cwl:tool"] = self.embedded_tool.tool["id"][5:]
else:
- packed = packed_workflow(self.arvrunner, self.tool, self.merged_map)
+ packed = packed_workflow(self.arvrunner, self.embedded_tool, self.merged_map)
wf_pdh = upload_workflow_collection(self.arvrunner, self.name, packed)
self.job_order["cwl:tool"] = "%s/workflow.cwl#main" % wf_pdh
}
def __init__(self, runner, tool, job_order, enable_reuse, uuid,
- submit_runner_ram=0, name=None, merged_map=None):
+ submit_runner_ram=0, name=None, merged_map=None,
+ loadingContext=None):
self.runner = runner
- self.tool = tool
+ self.embedded_tool = tool
self.job = RunnerJob(
runner=runner,
tool=tool,
- job_order=job_order,
enable_reuse=enable_reuse,
output_name=None,
output_tags=None,
submit_runner_ram=submit_runner_ram,
name=name,
- merged_map=merged_map)
+ merged_map=merged_map,
+ loadingContext=loadingContext)
+ self.job.job_order = job_order
self.uuid = uuid
def pipeline_component_spec(self):
job_params = spec['script_parameters']
spec['script_parameters'] = {}
- for param in self.tool.tool['inputs']:
+ for param in self.embedded_tool.tool['inputs']:
param = copy.deepcopy(param)
# Data type and "required" flag...
#
# SPDX-License-Identifier: Apache-2.0
-from cwltool.command_line_tool import CommandLineTool
+from cwltool.command_line_tool import CommandLineTool, ExpressionTool
from cwltool.builder import Builder
from .arvjob import ArvadosJob
from .arvcontainer import ArvadosContainer
runtimeContext.tmpdir = "$(task.tmpdir)"
runtimeContext.docker_tmpdir = "$(task.tmpdir)"
return super(ArvadosCommandTool, self).job(joborder, output_callback, runtimeContext)
+
+class ArvadosExpressionTool(ExpressionTool):
+ def __init__(self, arvrunner, toolpath_object, loadingContext):
+ super(ArvadosExpressionTool, self).__init__(toolpath_object, loadingContext)
+ self.arvrunner = arvrunner
+
+ def job(self,
+ job_order, # type: Mapping[Text, Text]
+ output_callback, # type: Callable[[Any, Any], Any]
+ runtimeContext # type: RuntimeContext
+ ):
+ return super(ArvadosExpressionTool, self).job(job_order, self.arvrunner.get_wrapped_callback(output_callback), runtimeContext)
raise WorkflowException("Non-top-level ResourceRequirement in single container cannot have expressions")
if not dyn:
self.static_resource_req.append(req)
+ if req["class"] == "DockerRequirement":
+ if "http://arvados.org/cwl#dockerCollectionPDH" in req:
+ del req["http://arvados.org/cwl#dockerCollectionPDH"]
visit_class(packed["$graph"], ("Workflow", "CommandLineTool"), visit)
arvargs.output_name = output_name
arvargs.output_tags = output_tags
arvargs.thread_count = 1
+ arvargs.collection_cache_size = None
runner = arvados_cwl.ArvCwlExecutor(api_client=arvados.safeapi.ThreadSafeApiCache(
api_params={"model": OrderedJsonModel()}, keep_params={"num_retries": 4}),
from .arvcontainer import RunnerContainer
from .arvjob import RunnerJob, RunnerTemplate
from .runner import Runner, upload_docker, upload_job_order, upload_workflow_deps
-from .arvtool import ArvadosCommandTool, validate_cluster_target
+from .arvtool import ArvadosCommandTool, validate_cluster_target, ArvadosExpressionTool
from .arvworkflow import ArvadosWorkflow, upload_workflow
from .fsaccess import CollectionFsAccess, CollectionFetcher, collectionResolver, CollectionCache, pdh_size
from .perf import Perf
return ArvadosCommandTool(self, toolpath_object, loadingContext)
elif "class" in toolpath_object and toolpath_object["class"] == "Workflow":
return ArvadosWorkflow(self, toolpath_object, loadingContext)
+ elif "class" in toolpath_object and toolpath_object["class"] == "ExpressionTool":
+ return ArvadosExpressionTool(self, toolpath_object, loadingContext)
else:
- return cwltool.workflow.default_make_tool(toolpath_object, loadingContext)
+ raise Exception("Unknown tool %s" % toolpath_object.get("class"))
def output_callback(self, out, processStatus):
with self.workflow_eval_lock:
uuid=existing_uuid,
submit_runner_ram=runtimeContext.submit_runner_ram,
name=runtimeContext.name,
- merged_map=merged_map)
+ merged_map=merged_map,
+ loadingContext=loadingContext)
tmpl.save()
# cwltool.main will write our return value to stdout.
return (tmpl.uuid, "success")
if self.work_api == "containers":
if tool.tool["class"] == "CommandLineTool" and runtimeContext.wait and (not runtimeContext.always_submit_runner):
runtimeContext.runnerjob = tool.tool["id"]
- runnerjob = tool.job(job_order,
- self.output_callback,
- runtimeContext).next()
else:
- runnerjob = RunnerContainer(self, tool, job_order, runtimeContext.enable_reuse,
+ tool = RunnerContainer(self, tool, loadingContext, runtimeContext.enable_reuse,
self.output_name,
self.output_tags,
submit_runner_ram=runtimeContext.submit_runner_ram,
collection_cache_size=runtimeContext.collection_cache_size,
collection_cache_is_default=self.should_estimate_cache_size)
elif self.work_api == "jobs":
- runnerjob = RunnerJob(self, tool, job_order, runtimeContext.enable_reuse,
+ tool = RunnerJob(self, tool, loadingContext, runtimeContext.enable_reuse,
self.output_name,
self.output_tags,
submit_runner_ram=runtimeContext.submit_runner_ram,
"state": "RunningOnClient"}).execute(num_retries=self.num_retries)
logger.info("Pipeline instance %s", self.pipeline["uuid"])
- if runnerjob and not runtimeContext.wait:
- submitargs = runtimeContext.copy()
- submitargs.submit = False
- runnerjob.run(submitargs)
+ if runtimeContext.cwl_runner_job is not None:
+ self.uuid = runtimeContext.cwl_runner_job.get('uuid')
+
+ jobiter = tool.job(job_order,
+ self.output_callback,
+ runtimeContext)
+
+ if runtimeContext.submit and not runtimeContext.wait:
+ runnerjob = jobiter.next()
+ runnerjob.run(runtimeContext)
return (runnerjob.uuid, "success")
current_container = arvados_cwl.util.get_current_container(self.api, self.num_retries, logger)
try:
self.workflow_eval_lock.acquire()
- if runnerjob:
- jobiter = iter((runnerjob,))
- else:
- if runtimeContext.cwl_runner_job is not None:
- self.uuid = runtimeContext.cwl_runner_job.get('uuid')
- jobiter = tool.job(job_order,
- self.output_callback,
- runtimeContext)
# Holds the lock while this code runs and releases it when
# it is safe to do so in self.workflow_eval_lock.wait(),
if self.pipeline:
self.api.pipeline_instances().update(uuid=self.pipeline["uuid"],
body={"state": "Failed"}).execute(num_retries=self.num_retries)
- if runnerjob and runnerjob.uuid and self.work_api == "containers":
- self.api.container_requests().update(uuid=runnerjob.uuid,
+ if runtimeContext.submit and isinstance(tool, Runner):
+ runnerjob = tool
+ if runnerjob.uuid and self.work_api == "containers":
+ self.api.container_requests().update(uuid=runnerjob.uuid,
body={"priority": "0"}).execute(num_retries=self.num_retries)
finally:
self.workflow_eval_lock.release()
if self.final_output is None:
raise WorkflowException("Workflow did not return a result.")
- if runtimeContext.submit and isinstance(runnerjob, Runner):
- logger.info("Final output collection %s", runnerjob.final_output)
+ if runtimeContext.submit and isinstance(tool, Runner):
+ logger.info("Final output collection %s", tool.final_output)
else:
if self.output_name is None:
self.output_name = "Output of %s" % (shortname(tool.tool["id"]))
from cwltool.command_line_tool import CommandLineTool
import cwltool.workflow
-from cwltool.process import scandeps, UnsupportedRequirement, normalizeFilesDirs, shortname
+from cwltool.process import scandeps, UnsupportedRequirement, normalizeFilesDirs, shortname, Process
from cwltool.load_tool import fetch_document
from cwltool.pathmapper import adjustFileObjs, adjustDirObjs, visit_class
from cwltool.utils import aslist
return collection.portable_data_hash()
-class Runner(object):
+class Runner(Process):
"""Base class for runner processes, which submit an instance of
arvados-cwl-runner and wait for the final result."""
- def __init__(self, runner, tool, job_order, enable_reuse,
+ def __init__(self, runner, tool, loadingContext, enable_reuse,
output_name, output_tags, submit_runner_ram=0,
name=None, on_error=None, submit_runner_image=None,
intermediate_output_ttl=0, merged_map=None,
priority=None, secret_store=None,
collection_cache_size=256,
collection_cache_is_default=True):
+
+ super(Runner, self).__init__(tool.tool, loadingContext)
+
self.arvrunner = runner
- self.tool = tool
- self.job_order = job_order
+ self.embedded_tool = tool
+ self.job_order = None
self.running = False
if enable_reuse:
# If reuse is permitted by command line arguments but
# disabled by the workflow itself, disable it.
- reuse_req, _ = self.tool.get_requirement("http://arvados.org/cwl#ReuseRequirement")
+ reuse_req, _ = self.embedded_tool.get_requirement("http://arvados.org/cwl#ReuseRequirement")
if reuse_req:
enable_reuse = reuse_req["enableReuse"]
self.enable_reuse = enable_reuse
self.submit_runner_ram = 1024 # defaut 1 GiB
self.collection_cache_size = collection_cache_size
- runner_resource_req, _ = self.tool.get_requirement("http://arvados.org/cwl#WorkflowRunnerResources")
+ runner_resource_req, _ = self.embedded_tool.get_requirement("http://arvados.org/cwl#WorkflowRunnerResources")
if runner_resource_req:
if runner_resource_req.get("coresMin"):
self.submit_runner_cores = runner_resource_req["coresMin"]
self.merged_map = merged_map or {}
+ def job(self,
+ job_order, # type: Mapping[Text, Text]
+ output_callbacks, # type: Callable[[Any, Any], Any]
+ runtimeContext # type: RuntimeContext
+ ): # type: (...) -> Generator[Any, None, None]
+ self.job_order = job_order
+ self._init_job(job_order, runtimeContext)
+ yield self
+
def update_pipeline_component(self, record):
pass
'schema-salad==2.7.20181116024232',
'typing >= 3.6.4',
'ruamel.yaml >=0.15.54, <=0.15.77',
- 'arvados-python-client>=1.1.4.20180607143841',
+ 'arvados-python-client>=1.2.1.20181130020805',
'setuptools',
'ciso8601 >=1.0.6, <2.0.0',
'subprocess32>=3.5.1',
--- /dev/null
+{
+ "x": {
+ "class": "File",
+ "path": "input/blorp.txt"
+ },
+ "y": {
+ "class": "Directory",
+ "location": "keep:99999999999999999999999999999998+99",
+ "listing": [{
+ "class": "File",
+ "location": "keep:99999999999999999999999999999998+99/file1.txt"
+ }]
+ }
+}
'state': 'Committed',
'command': ['arvados-cwl-runner', '--local', '--api=containers',
'--no-log-timestamps', '--disable-validate',
- '--eval-timeout=20', '--thread-count=4',
+ '--eval-timeout=20', '--thread-count=1',
'--enable-reuse', "--collection-cache-size=256", '--debug', '--on-error=continue',
'/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json'],
'name': 'submit_wf.cwl',
expect_container["command"] = [
'arvados-cwl-runner', '--local', '--api=containers',
'--no-log-timestamps', '--disable-validate',
- '--eval-timeout=20', '--thread-count=4',
+ '--eval-timeout=20', '--thread-count=1',
'--disable-reuse', "--collection-cache-size=256",
'--debug', '--on-error=continue',
'/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
expect_container["command"] = [
'arvados-cwl-runner', '--local', '--api=containers',
'--no-log-timestamps', '--disable-validate',
- '--eval-timeout=20', '--thread-count=4',
+ '--eval-timeout=20', '--thread-count=1',
'--disable-reuse', "--collection-cache-size=256", '--debug', '--on-error=continue',
'/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
expect_container["use_existing"] = False
expect_container = copy.deepcopy(stubs.expect_container_spec)
expect_container["command"] = ['arvados-cwl-runner', '--local', '--api=containers',
'--no-log-timestamps', '--disable-validate',
- '--eval-timeout=20', '--thread-count=4',
+ '--eval-timeout=20', '--thread-count=1',
'--enable-reuse', "--collection-cache-size=256",
'--debug', '--on-error=stop',
'/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
expect_container = copy.deepcopy(stubs.expect_container_spec)
expect_container["command"] = ['arvados-cwl-runner', '--local', '--api=containers',
'--no-log-timestamps', '--disable-validate',
- '--eval-timeout=20', '--thread-count=4',
+ '--eval-timeout=20', '--thread-count=1',
'--enable-reuse', "--collection-cache-size=256",
"--output-name="+output_name, '--debug', '--on-error=continue',
'/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
expect_container = copy.deepcopy(stubs.expect_container_spec)
expect_container["command"] = ['arvados-cwl-runner', '--local', '--api=containers',
'--no-log-timestamps', '--disable-validate',
- '--eval-timeout=20', '--thread-count=4',
+ '--eval-timeout=20', '--thread-count=1',
'--enable-reuse', "--collection-cache-size=256", "--debug",
"--storage-classes=foo", '--on-error=continue',
'/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
expect_container = copy.deepcopy(stubs.expect_container_spec)
expect_container["command"] = ['arvados-cwl-runner', '--local', '--api=containers',
'--no-log-timestamps', '--disable-validate',
- '--eval-timeout=20', '--thread-count=4',
+ '--eval-timeout=20', '--thread-count=1',
'--enable-reuse', "--collection-cache-size=256", '--debug',
'--on-error=continue',
"--intermediate-output-ttl=3600",
expect_container = copy.deepcopy(stubs.expect_container_spec)
expect_container["command"] = ['arvados-cwl-runner', '--local', '--api=containers',
'--no-log-timestamps', '--disable-validate',
- '--eval-timeout=20', '--thread-count=4',
+ '--eval-timeout=20', '--thread-count=1',
'--enable-reuse', "--collection-cache-size=256",
'--debug', '--on-error=continue',
"--trash-intermediate",
expect_container = copy.deepcopy(stubs.expect_container_spec)
expect_container["command"] = ['arvados-cwl-runner', '--local', '--api=containers',
'--no-log-timestamps', '--disable-validate',
- '--eval-timeout=20', '--thread-count=4',
+ '--eval-timeout=20', '--thread-count=1',
'--enable-reuse', "--collection-cache-size=256",
"--output-tags="+output_tags, '--debug', '--on-error=continue',
'/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
'container_image': '999999999999999999999999999999d3+99',
'command': ['arvados-cwl-runner', '--local', '--api=containers',
'--no-log-timestamps', '--disable-validate',
- '--eval-timeout=20', '--thread-count=4',
+ '--eval-timeout=20', '--thread-count=1',
'--enable-reuse', "--collection-cache-size=256", '--debug', '--on-error=continue',
'/var/lib/cwl/workflow/expect_arvworkflow.cwl#main', '/var/lib/cwl/cwl.input.json'],
'cwd': '/var/spool/cwl',
'container_image': "999999999999999999999999999999d3+99",
'command': ['arvados-cwl-runner', '--local', '--api=containers',
'--no-log-timestamps', '--disable-validate',
- '--eval-timeout=20', '--thread-count=4',
+ '--eval-timeout=20', '--thread-count=1',
'--enable-reuse', "--collection-cache-size=256", '--debug', '--on-error=continue',
'/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json'],
'cwd': '/var/spool/cwl',
stubs.expect_container_request_uuid + '\n')
+ @stubs
+ def test_submit_missing_input(self, stubs):
+ capture_stdout = cStringIO.StringIO()
+ exited = arvados_cwl.main(
+ ["--submit", "--no-wait", "--api=containers", "--debug",
+ "tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
+ capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
+ self.assertEqual(exited, 0)
+
+ capture_stdout = cStringIO.StringIO()
+ exited = arvados_cwl.main(
+ ["--submit", "--no-wait", "--api=containers", "--debug",
+ "tests/wf/submit_wf.cwl", "tests/submit_test_job_missing.json"],
+ capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
+ self.assertEqual(exited, 1)
+
+
@stubs
def test_submit_container_project(self, stubs):
project_uuid = 'zzzzz-j7d0g-zzzzzzzzzzzzzzz'
expect_container["owner_uuid"] = project_uuid
expect_container["command"] = ['arvados-cwl-runner', '--local', '--api=containers',
'--no-log-timestamps', '--disable-validate',
- "--eval-timeout=20", "--thread-count=4",
+ "--eval-timeout=20", "--thread-count=1",
'--enable-reuse', "--collection-cache-size=256", '--debug',
'--on-error=continue',
'--project-uuid='+project_uuid,
expect_container = copy.deepcopy(stubs.expect_container_spec)
expect_container["command"] = ['arvados-cwl-runner', '--local', '--api=containers',
'--no-log-timestamps', '--disable-validate',
- '--eval-timeout=60.0', '--thread-count=4',
+ '--eval-timeout=60.0', '--thread-count=1',
'--enable-reuse', "--collection-cache-size=256",
'--debug', '--on-error=continue',
'/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
expect_container = copy.deepcopy(stubs.expect_container_spec)
expect_container["command"] = ['arvados-cwl-runner', '--local', '--api=containers',
'--no-log-timestamps', '--disable-validate',
- '--eval-timeout=20', '--thread-count=4',
+ '--eval-timeout=20', '--thread-count=1',
'--enable-reuse', "--collection-cache-size=500",
'--debug', '--on-error=continue',
'/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
}
expect_container['command'] = ['arvados-cwl-runner', '--local', '--api=containers',
'--no-log-timestamps', '--disable-validate',
- '--eval-timeout=20', '--thread-count=4',
+ '--eval-timeout=20', '--thread-count=1',
'--enable-reuse', "--collection-cache-size=512", '--debug', '--on-error=continue',
'/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
"--no-log-timestamps",
"--disable-validate",
"--eval-timeout=20",
- '--thread-count=4',
+ '--thread-count=1',
"--enable-reuse",
"--collection-cache-size=256",
'--debug',
if !include_trash
if sql_table != "api_client_authorizations"
# Only include records where the owner is not trashed
- sql_conds = "NOT EXISTS(SELECT 1 FROM #{PERMISSION_VIEW} "+
- "WHERE trashed = 1 AND "+
- "(#{sql_table}.owner_uuid = target_uuid)) #{exclude_trashed_records}"
+ sql_conds = "#{sql_table}.owner_uuid NOT IN (SELECT target_uuid FROM #{PERMISSION_VIEW} "+
+ "WHERE trashed = 1) #{exclude_trashed_records}"
end
end
else
# see issue 13208 for details.
# Match a direct read permission link from the user to the record uuid
- direct_check = "EXISTS(SELECT 1 FROM #{PERMISSION_VIEW} "+
- "WHERE user_uuid IN (:user_uuids) AND perm_level >= 1 #{trashed_check} AND target_uuid = #{sql_table}.uuid)"
+ direct_check = "#{sql_table}.uuid IN (SELECT target_uuid FROM #{PERMISSION_VIEW} "+
+ "WHERE user_uuid IN (:user_uuids) AND perm_level >= 1 #{trashed_check})"
# Match a read permission link from the user to the record's owner_uuid
owner_check = ""
if sql_table != "api_client_authorizations" and sql_table != "groups" then
- owner_check = "OR EXISTS(SELECT 1 FROM #{PERMISSION_VIEW} "+
- "WHERE user_uuid IN (:user_uuids) AND perm_level >= 1 #{trashed_check} AND target_uuid = #{sql_table}.owner_uuid AND target_owner_uuid IS NOT NULL) "
+ owner_check = "OR #{sql_table}.owner_uuid IN (SELECT target_uuid FROM #{PERMISSION_VIEW} "+
+ "WHERE user_uuid IN (:user_uuids) AND perm_level >= 1 #{trashed_check} AND target_owner_uuid IS NOT NULL) "
end
links_cond = ""
cast = serialized_attributes[column] ? '::text' : ''
"coalesce(#{column}#{cast},'')"
end
- "to_tsvector('english', #{parts.join(" || ' ' || ")})"
+ "to_tsvector('english', substr(#{parts.join(" || ' ' || ")}, 0, 8000))"
end
def self.apply_filters query, filters
candidates = candidates.where_serialized(:runtime_constraints, resolve_runtime_constraints(attrs[:runtime_constraints]), md5: true)
log_reuse_info(candidates) { "after filtering on runtime_constraints #{attrs[:runtime_constraints].inspect}" }
- candidates = candidates.where('runtime_user_uuid = ? or (runtime_user_uuid is NULL and runtime_auth_scopes is NULL)',
- attrs[:runtime_user_uuid])
- log_reuse_info(candidates) { "after filtering on runtime_user_uuid #{attrs[:runtime_user_uuid].inspect}" }
-
- candidates = candidates.where('runtime_auth_scopes = ? or (runtime_user_uuid is NULL and runtime_auth_scopes is NULL)',
- SafeJSON.dump(attrs[:runtime_auth_scopes].sort))
- log_reuse_info(candidates) { "after filtering on runtime_auth_scopes #{attrs[:runtime_auth_scopes].inspect}" }
-
log_reuse_info { "checking for state=Complete with readable output and log..." }
select_readable_pdh = Collection.
--- /dev/null
+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: AGPL-3.0
+
+require './db/migrate/20161213172944_full_text_search_indexes'
+
+class ReplaceFullTextIndexes < ActiveRecord::Migration
+ def up
+ FullTextSearchIndexes.new.up
+ end
+
+ def down
+ end
+end
-- Name: collections_full_text_search_idx; Type: INDEX; Schema: public; Owner: -
--
-CREATE INDEX collections_full_text_search_idx ON public.collections USING gin (to_tsvector('english'::regconfig, (((((((((((((((((COALESCE(owner_uuid, ''::character varying))::text || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(portable_data_hash, ''::character varying))::text) || ' '::text) || (COALESCE(uuid, ''::character varying))::text) || ' '::text) || (COALESCE(name, ''::character varying))::text) || ' '::text) || (COALESCE(description, ''::character varying))::text) || ' '::text) || COALESCE((properties)::text, ''::text)) || ' '::text) || COALESCE(file_names, (''::character varying)::text))));
+CREATE INDEX collections_full_text_search_idx ON public.collections USING gin (to_tsvector('english'::regconfig, substr((((((((((((((((((COALESCE(owner_uuid, ''::character varying))::text || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(portable_data_hash, ''::character varying))::text) || ' '::text) || (COALESCE(uuid, ''::character varying))::text) || ' '::text) || (COALESCE(name, ''::character varying))::text) || ' '::text) || (COALESCE(description, ''::character varying))::text) || ' '::text) || COALESCE((properties)::text, ''::text)) || ' '::text) || COALESCE(file_names, ''::text)), 0, 1000000)));
--
-- Name: container_requests_full_text_search_idx; Type: INDEX; Schema: public; Owner: -
--
-CREATE INDEX container_requests_full_text_search_idx ON public.container_requests USING gin (to_tsvector('english'::regconfig, (((((((((((((((((((((((((((((((((((((((((COALESCE(uuid, ''::character varying))::text || ' '::text) || (COALESCE(owner_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(name, ''::character varying))::text) || ' '::text) || COALESCE(description, ''::text)) || ' '::text) || COALESCE((properties)::text, ''::text)) || ' '::text) || (COALESCE(state, ''::character varying))::text) || ' '::text) || (COALESCE(requesting_container_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(container_uuid, ''::character varying))::text) || ' '::text) || COALESCE(runtime_constraints, ''::text)) || ' '::text) || (COALESCE(container_image, ''::character varying))::text) || ' '::text) || COALESCE(environment, ''::text)) || ' '::text) || (COALESCE(cwd, ''::character varying))::text) || ' '::text) || COALESCE(command, ''::text)) || ' '::text) || (COALESCE(output_path, ''::character varying))::text) || ' '::text) || COALESCE(filters, ''::text)) || ' '::text) || COALESCE(scheduling_parameters, ''::text)) || ' '::text) || (COALESCE(output_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(log_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(output_name, ''::character varying))::text)));
+CREATE INDEX container_requests_full_text_search_idx ON public.container_requests USING gin (to_tsvector('english'::regconfig, substr((((((((((((((((((((((((((((((((((((((((((COALESCE(uuid, ''::character varying))::text || ' '::text) || (COALESCE(owner_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(name, ''::character varying))::text) || ' '::text) || COALESCE(description, ''::text)) || ' '::text) || COALESCE((properties)::text, ''::text)) || ' '::text) || (COALESCE(state, ''::character varying))::text) || ' '::text) || (COALESCE(requesting_container_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(container_uuid, ''::character varying))::text) || ' '::text) || COALESCE(runtime_constraints, ''::text)) || ' '::text) || (COALESCE(container_image, ''::character varying))::text) || ' '::text) || COALESCE(environment, ''::text)) || ' '::text) || (COALESCE(cwd, ''::character varying))::text) || ' '::text) || COALESCE(command, ''::text)) || ' '::text) || (COALESCE(output_path, ''::character varying))::text) || ' '::text) || COALESCE(filters, ''::text)) || ' '::text) || COALESCE(scheduling_parameters, ''::text)) || ' '::text) || (COALESCE(output_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(log_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(output_name, ''::character varying))::text), 0, 1000000)));
--
-- Name: groups_full_text_search_idx; Type: INDEX; Schema: public; Owner: -
--
-CREATE INDEX groups_full_text_search_idx ON public.groups USING gin (to_tsvector('english'::regconfig, (((((((((((((((COALESCE(uuid, ''::character varying))::text || ' '::text) || (COALESCE(owner_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(name, ''::character varying))::text) || ' '::text) || (COALESCE(description, ''::character varying))::text) || ' '::text) || (COALESCE(group_class, ''::character varying))::text) || ' '::text) || COALESCE((properties)::text, ''::text))));
+CREATE INDEX groups_full_text_search_idx ON public.groups USING gin (to_tsvector('english'::regconfig, substr((((((((((((((((COALESCE(uuid, ''::character varying))::text || ' '::text) || (COALESCE(owner_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(name, ''::character varying))::text) || ' '::text) || (COALESCE(description, ''::character varying))::text) || ' '::text) || (COALESCE(group_class, ''::character varying))::text) || ' '::text) || COALESCE((properties)::text, ''::text)), 0, 1000000)));
--
-- Name: jobs_full_text_search_idx; Type: INDEX; Schema: public; Owner: -
--
-CREATE INDEX jobs_full_text_search_idx ON public.jobs USING gin (to_tsvector('english'::regconfig, (((((((((((((((((((((((((((((((((((((((((((COALESCE(uuid, ''::character varying))::text || ' '::text) || (COALESCE(owner_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(submit_id, ''::character varying))::text) || ' '::text) || (COALESCE(script, ''::character varying))::text) || ' '::text) || (COALESCE(script_version, ''::character varying))::text) || ' '::text) || COALESCE(script_parameters, ''::text)) || ' '::text) || (COALESCE(cancelled_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(cancelled_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(output, ''::character varying))::text) || ' '::text) || (COALESCE(is_locked_by_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(log, ''::character varying))::text) || ' '::text) || COALESCE(tasks_summary, ''::text)) || ' '::text) || COALESCE(runtime_constraints, ''::text)) || ' '::text) || (COALESCE(repository, ''::character varying))::text) || ' '::text) || (COALESCE(supplied_script_version, ''::character varying))::text) || ' '::text) || (COALESCE(docker_image_locator, ''::character varying))::text) || ' '::text) || (COALESCE(description, ''::character varying))::text) || ' '::text) || (COALESCE(state, ''::character varying))::text) || ' '::text) || (COALESCE(arvados_sdk_version, ''::character varying))::text) || ' '::text) || COALESCE(components, ''::text))));
+CREATE INDEX jobs_full_text_search_idx ON public.jobs USING gin (to_tsvector('english'::regconfig, substr((((((((((((((((((((((((((((((((((((((((((((COALESCE(uuid, ''::character varying))::text || ' '::text) || (COALESCE(owner_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(submit_id, ''::character varying))::text) || ' '::text) || (COALESCE(script, ''::character varying))::text) || ' '::text) || (COALESCE(script_version, ''::character varying))::text) || ' '::text) || COALESCE(script_parameters, ''::text)) || ' '::text) || (COALESCE(cancelled_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(cancelled_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(output, ''::character varying))::text) || ' '::text) || (COALESCE(is_locked_by_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(log, ''::character varying))::text) || ' '::text) || COALESCE(tasks_summary, ''::text)) || ' '::text) || COALESCE(runtime_constraints, ''::text)) || ' '::text) || (COALESCE(repository, ''::character varying))::text) || ' '::text) || (COALESCE(supplied_script_version, ''::character varying))::text) || ' '::text) || (COALESCE(docker_image_locator, ''::character varying))::text) || ' '::text) || (COALESCE(description, ''::character varying))::text) || ' '::text) || (COALESCE(state, ''::character varying))::text) || ' '::text) || (COALESCE(arvados_sdk_version, ''::character varying))::text) || ' '::text) || COALESCE(components, ''::text)), 0, 1000000)));
--
-- Name: pipeline_instances_full_text_search_idx; Type: INDEX; Schema: public; Owner: -
--
-CREATE INDEX pipeline_instances_full_text_search_idx ON public.pipeline_instances USING gin (to_tsvector('english'::regconfig, (((((((((((((((((((((COALESCE(uuid, ''::character varying))::text || ' '::text) || (COALESCE(owner_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(pipeline_template_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(name, ''::character varying))::text) || ' '::text) || COALESCE(components, ''::text)) || ' '::text) || COALESCE(properties, ''::text)) || ' '::text) || (COALESCE(state, ''::character varying))::text) || ' '::text) || COALESCE(components_summary, ''::text)) || ' '::text) || (COALESCE(description, ''::character varying))::text)));
+CREATE INDEX pipeline_instances_full_text_search_idx ON public.pipeline_instances USING gin (to_tsvector('english'::regconfig, substr((((((((((((((((((((((COALESCE(uuid, ''::character varying))::text || ' '::text) || (COALESCE(owner_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(pipeline_template_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(name, ''::character varying))::text) || ' '::text) || COALESCE(components, ''::text)) || ' '::text) || COALESCE(properties, ''::text)) || ' '::text) || (COALESCE(state, ''::character varying))::text) || ' '::text) || COALESCE(components_summary, ''::text)) || ' '::text) || (COALESCE(description, ''::character varying))::text), 0, 1000000)));
--
-- Name: pipeline_templates_full_text_search_idx; Type: INDEX; Schema: public; Owner: -
--
-CREATE INDEX pipeline_templates_full_text_search_idx ON public.pipeline_templates USING gin (to_tsvector('english'::regconfig, (((((((((((((COALESCE(uuid, ''::character varying))::text || ' '::text) || (COALESCE(owner_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(name, ''::character varying))::text) || ' '::text) || COALESCE(components, ''::text)) || ' '::text) || (COALESCE(description, ''::character varying))::text)));
+CREATE INDEX pipeline_templates_full_text_search_idx ON public.pipeline_templates USING gin (to_tsvector('english'::regconfig, substr((((((((((((((COALESCE(uuid, ''::character varying))::text || ' '::text) || (COALESCE(owner_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(name, ''::character varying))::text) || ' '::text) || COALESCE(components, ''::text)) || ' '::text) || (COALESCE(description, ''::character varying))::text), 0, 1000000)));
--
-- Name: workflows_full_text_search_idx; Type: INDEX; Schema: public; Owner: -
--
-CREATE INDEX workflows_full_text_search_idx ON public.workflows USING gin (to_tsvector('english'::regconfig, (((((((((((COALESCE(uuid, ''::character varying))::text || ' '::text) || (COALESCE(owner_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(name, ''::character varying))::text) || ' '::text) || COALESCE(description, ''::text))));
+CREATE INDEX workflows_full_text_search_idx ON public.workflows USING gin (to_tsvector('english'::regconfig, substr((((((((((((COALESCE(uuid, ''::character varying))::text || ' '::text) || (COALESCE(owner_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_client_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(modified_by_user_uuid, ''::character varying))::text) || ' '::text) || (COALESCE(name, ''::character varying))::text) || ' '::text) || COALESCE(description, ''::text)), 0, 1000000)));
--
INSERT INTO schema_migrations (version) VALUES ('20180915155335');
+INSERT INTO schema_migrations (version) VALUES ('20180917200000');
+
INSERT INTO schema_migrations (version) VALUES ('20180917205609');
INSERT INTO schema_migrations (version) VALUES ('20180919001158');
c1, _ = minimal_new(common_attrs.merge({runtime_token: api_client_authorizations(:active).token}))
assert_equal Container::Queued, c1.state
reused = Container.find_reusable(common_attrs.merge(runtime_token_attr(:container_runtime_token)))
- assert_nil reused
+ # See #14584
+ assert_equal c1.uuid, reused.uuid
end
test "find_reusable method with nil runtime_token, then runtime_token with different user" do
c1, _ = minimal_new(common_attrs.merge({runtime_token: nil}))
assert_equal Container::Queued, c1.state
reused = Container.find_reusable(common_attrs.merge(runtime_token_attr(:container_runtime_token)))
- assert_nil reused
+ # See #14584
+ assert_equal c1.uuid, reused.uuid
end
test "find_reusable method with different runtime_token, different scope, same user" do
c1, _ = minimal_new(common_attrs.merge({runtime_token: api_client_authorizations(:runtime_token_limited_scope).token}))
assert_equal Container::Queued, c1.state
reused = Container.find_reusable(common_attrs.merge(runtime_token_attr(:container_runtime_token)))
- assert_nil reused
+ # See #14584
+ assert_equal c1.uuid, reused.uuid
end
test "Container running" do