Joshua Randall <joshua.randall@sanger.ac.uk>
President and Fellows of Harvard College <*@harvard.edu>
Thomas Mooney <tmooney@genome.wustl.edu>
+Chen Chen <aflyhorse@gmail.com>
debian8,ubuntu1204,ubuntu1404,centos7|shellescape|3.4.1|2|python|all
debian8,ubuntu1204,ubuntu1404,ubuntu1604,centos7|mistune|0.7.3|2|python|all
debian8,ubuntu1204,ubuntu1404,ubuntu1604,centos7|typing|3.5.3.0|2|python|all
-debian8,ubuntu1204,ubuntu1404,centos7|avro|1.8.1|2|python|all
+debian8,ubuntu1204,ubuntu1404,ubuntu1604,centos7|avro|1.8.1|2|python|all
debian8,ubuntu1204,ubuntu1404,centos7|ruamel.ordereddict|0.4.9|2|python|amd64
debian8,ubuntu1204,ubuntu1404,ubuntu1604,centos7|cachecontrol|0.11.7|2|python|all
debian8,ubuntu1204,ubuntu1404,ubuntu1604,centos7|pathlib2|2.1.0|2|python|all
# SPDX-License-Identifier: AGPL-3.0
FROM centos:7
-MAINTAINER Brett Smith <brett@curoverse.com>
+MAINTAINER Ward Vandewege <ward@curoverse.com>
-# Install build dependencies provided in base distribution
+# Install dependencies.
RUN yum -q -y install make automake gcc gcc-c++ libyaml-devel patch readline-devel zlib-devel libffi-devel openssl-devel bzip2 libtool bison sqlite-devel rpm-build git perl-ExtUtils-MakeMaker libattr-devel nss-devel libcurl-devel which tar unzip scl-utils centos-release-scl postgresql-devel python-devel python-setuptools fuse-devel xz-libs git
-# Install golang binary
-ADD generated/go1.8.3.linux-amd64.tar.gz /usr/local/
-RUN ln -s /usr/local/go/bin/go /usr/local/bin/
-
# Install RVM
RUN gpg --keyserver pool.sks-keyservers.net --recv-keys D39DC0E3 && \
curl -L https://get.rvm.io | bash -s stable && \
/usr/local/rvm/bin/rvm-exec default gem install bundler && \
/usr/local/rvm/bin/rvm-exec default gem install cure-fpm --version 1.6.0b
+# Install golang binary
+ADD generated/go1.8.3.linux-amd64.tar.gz /usr/local/
+RUN ln -s /usr/local/go/bin/go /usr/local/bin/
+
# Need to "touch" RPM database to workaround bug in interaction between
# overlayfs and yum (https://bugzilla.redhat.com/show_bug.cgi?id=1213602)
RUN touch /var/lib/rpm/* && yum -q -y install python33
RUN scl enable python33 "easy_install-3.3 pip" && easy_install-2.7 pip
+# Old versions of setuptools cannot build a schema-salad package.
+RUN pip install --upgrade setuptools
+
ENV WORKSPACE /arvados
CMD ["scl", "enable", "python33", "/usr/local/rvm/bin/rvm-exec default bash /jenkins/run-build-packages.sh --target centos7"]
FROM debian:jessie
MAINTAINER Ward Vandewege <ward@curoverse.com>
-# Install dependencies and set up system.
+ENV DEBIAN_FRONTEND noninteractive
+
+# Install dependencies.
RUN /usr/bin/apt-get update && /usr/bin/apt-get install -q -y python2.7-dev python3 python-setuptools python3-setuptools libcurl4-gnutls-dev curl git procps libattr1-dev libfuse-dev libgnutls28-dev libpq-dev python-pip unzip
# Install RVM
ADD generated/go1.8.3.linux-amd64.tar.gz /usr/local/
RUN ln -s /usr/local/go/bin/go /usr/local/bin/
+# Old versions of setuptools cannot build a schema-salad package.
+RUN pip install --upgrade setuptools
+
ENV WORKSPACE /arvados
CMD ["/usr/local/rvm/bin/rvm-exec", "default", "bash", "/jenkins/run-build-packages.sh", "--target", "debian8"]
FROM ubuntu:precise
MAINTAINER Ward Vandewege <ward@curoverse.com>
-# Install dependencies and set up system.
+ENV DEBIAN_FRONTEND noninteractive
+
+# Install dependencies.
RUN /usr/bin/apt-get update && /usr/bin/apt-get install -q -y python2.7-dev python3 python-setuptools python3-setuptools libcurl4-gnutls-dev curl git libattr1-dev libfuse-dev libpq-dev python-pip build-essential unzip
# Install RVM
ADD generated/go1.8.3.linux-amd64.tar.gz /usr/local/
RUN ln -s /usr/local/go/bin/go /usr/local/bin/
+# Old versions of setuptools cannot build a schema-salad package.
+RUN pip install --upgrade setuptools
+
ENV WORKSPACE /arvados
CMD ["/usr/local/rvm/bin/rvm-exec", "default", "bash", "/jenkins/run-build-packages.sh", "--target", "ubuntu1204"]
# SPDX-License-Identifier: AGPL-3.0
FROM ubuntu:trusty
-MAINTAINER Brett Smith <brett@curoverse.com>
+MAINTAINER Ward Vandewege <ward@curoverse.com>
-# Install dependencies and set up system.
+ENV DEBIAN_FRONTEND noninteractive
+
+# Install dependencies.
RUN /usr/bin/apt-get update && /usr/bin/apt-get install -q -y python2.7-dev python3 python-setuptools python3-setuptools libcurl4-gnutls-dev curl git libattr1-dev libfuse-dev libpq-dev python-pip unzip
# Install RVM
ADD generated/go1.8.3.linux-amd64.tar.gz /usr/local/
RUN ln -s /usr/local/go/bin/go /usr/local/bin/
+# Old versions of setuptools cannot build a schema-salad package.
+RUN pip install --upgrade setuptools
+
ENV WORKSPACE /arvados
CMD ["/usr/local/rvm/bin/rvm-exec", "default", "bash", "/jenkins/run-build-packages.sh", "--target", "ubuntu1404"]
FROM ubuntu:xenial
MAINTAINER Ward Vandewege <ward@curoverse.com>
-# Install dependencies and set up system.
+ENV DEBIAN_FRONTEND noninteractive
+
+# Install dependencies.
RUN /usr/bin/apt-get update && /usr/bin/apt-get install -q -y python2.7-dev python3 python-setuptools python3-setuptools libcurl4-gnutls-dev libgnutls-dev curl git libattr1-dev libfuse-dev libpq-dev python-pip unzip tzdata
# Install RVM
ADD generated/go1.8.3.linux-amd64.tar.gz /usr/local/
RUN ln -s /usr/local/go/bin/go /usr/local/bin/
+# Old versions of setuptools cannot build a schema-salad package.
+RUN pip install --upgrade setuptools
+
ENV WORKSPACE /arvados
CMD ["/usr/local/rvm/bin/rvm-exec", "default", "bash", "/jenkins/run-build-packages.sh", "--target", "ubuntu1604"]
# SPDX-License-Identifier: AGPL-3.0
FROM centos:7
-MAINTAINER Brett Smith <brett@curoverse.com>
+MAINTAINER Ward Vandewege <ward@curoverse.com>
RUN yum -q -y install scl-utils centos-release-scl which tar
# SPDX-License-Identifier: AGPL-3.0
FROM debian:8
-MAINTAINER Peter Amstutz <peter.amstutz@curoverse.com>
+MAINTAINER Ward Vandewege <ward@curoverse.com>
+
+ENV DEBIAN_FRONTEND noninteractive
# Install RVM
RUN apt-get update && \
- DEBIAN_FRONTEND=noninteractive apt-get -y install --no-install-recommends curl ca-certificates && \
+ apt-get -y install --no-install-recommends curl ca-certificates && \
gpg --keyserver pool.sks-keyservers.net --recv-keys D39DC0E3 && \
curl -L https://get.rvm.io | bash -s stable && \
/usr/local/rvm/bin/rvm install 2.3 && \
# SPDX-License-Identifier: AGPL-3.0
FROM ubuntu:precise
-MAINTAINER Peter Amstutz <peter.amstutz@curoverse.com>
+MAINTAINER Ward Vandewege <ward@curoverse.com>
+
+ENV DEBIAN_FRONTEND noninteractive
# Install RVM
RUN apt-get update && \
- DEBIAN_FRONTEND=noninteractive apt-get -y install --no-install-recommends curl ca-certificates g++ && \
+ apt-get -y install --no-install-recommends curl ca-certificates g++ && \
gpg --keyserver pool.sks-keyservers.net --recv-keys D39DC0E3 && \
curl -L https://get.rvm.io | bash -s stable && \
/usr/local/rvm/bin/rvm install 2.3 && \
# SPDX-License-Identifier: AGPL-3.0
FROM ubuntu:trusty
-MAINTAINER Peter Amstutz <peter.amstutz@curoverse.com>
+MAINTAINER Ward Vandewege <ward@curoverse.com>
+
+ENV DEBIAN_FRONTEND noninteractive
# Install RVM
RUN apt-get update && \
- DEBIAN_FRONTEND=noninteractive apt-get -y install --no-install-recommends curl ca-certificates && \
+ apt-get -y install --no-install-recommends curl ca-certificates && \
gpg --keyserver pool.sks-keyservers.net --recv-keys D39DC0E3 && \
curl -L https://get.rvm.io | bash -s stable && \
/usr/local/rvm/bin/rvm install 2.3 && \
FROM ubuntu:xenial
MAINTAINER Ward Vandewege <ward@curoverse.com>
+ENV DEBIAN_FRONTEND noninteractive
+
# Install RVM
RUN apt-get update && \
- DEBIAN_FRONTEND=noninteractive apt-get -y install --no-install-recommends curl ca-certificates && \
+ apt-get -y install --no-install-recommends curl ca-certificates && \
gpg --keyserver pool.sks-keyservers.net --recv-keys D39DC0E3 && \
curl -L https://get.rvm.io | bash -s stable && \
/usr/local/rvm/bin/rvm install 2.3 && \
saladversion=$(cat "$WORKSPACE/sdk/cwl/setup.py" | grep schema-salad== | sed "s/.*==\(.*\)'.*/\1/")
test_package_presence python-schema-salad "$saladversion" python
if [[ "$?" == "0" ]]; then
- fpm_build schema_salad "" "" python $saladversion --depends "${PYTHON2_PKG_PREFIX}-lockfile >= 1:0.12.2-2"
+ fpm_build schema_salad "" "" python $saladversion --depends "${PYTHON2_PKG_PREFIX}-lockfile >= 1:0.12.2-2" --depends "${PYTHON2_PKG_PREFIX}-avro = 1.8.1-2"
fi
# And for cwltool we have the same problem as for schema_salad. Ward, 2016-03-17
self.collection_cache = CollectionCache(self.api, self.keep_client, self.num_retries)
self.work_api = None
- expected_api = ["containers", "jobs"]
+ expected_api = ["jobs", "containers"]
for api in expected_api:
try:
methods = self.api._rootDesc.get('resources')[api]['methods']
if self.work_api == "containers":
if tool.tool["class"] == "CommandLineTool" and kwargs.get("wait"):
kwargs["runnerjob"] = tool.tool["id"]
- upload_dependencies(self,
- kwargs["name"],
- tool.doc_loader,
- tool.tool,
- tool.tool["id"],
- False)
runnerjob = tool.job(job_order,
self.output_callback,
**kwargs).next()
parser.add_argument("--api", type=str,
default=None, dest="work_api",
choices=("jobs", "containers"),
- help="Select work submission API. Default is 'containers' if that API is available, otherwise 'jobs'.")
+ help="Select work submission API. Default is 'jobs' if that API is available, otherwise 'containers'.")
parser.add_argument("--compute-checksum", action="store_true", default=False,
help="Compute checksum of contents while collecting outputs",
arvargs.use_container = True
arvargs.relax_path_checks = True
arvargs.validate = None
+ arvargs.print_supported_versions = False
make_fs_access = partial(CollectionFsAccess,
collection_cache=runner.collection_cache)
self.arvrunner = arvrunner
self.work_api = kwargs["work_api"]
- def makeJobRunner(self):
+ def makeJobRunner(self, use_container=True):
if self.work_api == "containers":
return ArvadosContainer(self.arvrunner)
elif self.work_api == "jobs":
def exists(self, fn):
collection, rest = self.get_collection(fn)
- if collection:
+ if collection is not None:
if rest:
return collection.exists(rest)
else:
import cwltool.workflow
from cwltool.process import get_feature, scandeps, UnsupportedRequirement, normalizeFilesDirs, shortname
from cwltool.load_tool import fetch_document
-from cwltool.pathmapper import adjustFileObjs, adjustDirObjs
+from cwltool.pathmapper import adjustFileObjs, adjustDirObjs, visit_class
from cwltool.utils import aslist
from cwltool.builder import substitute
from cwltool.pack import pack
if obj.get("location", "").startswith("_:"):
del obj["location"]
+def find_defaults(d, op):
+ if isinstance(d, list):
+ for i in d:
+ find_defaults(i, op)
+ elif isinstance(d, dict):
+ if "default" in d:
+ op(d)
+ else:
+ for i in d.itervalues():
+ find_defaults(i, op)
+
def upload_dependencies(arvrunner, name, document_loader,
workflowobj, uri, loadref_run, include_primary=True):
"""Upload the dependencies of the workflowobj document to Keep.
for s in workflowobj["$schemas"]:
sc.append({"class": "File", "location": s})
+ def capture_default(obj):
+ remove = [False]
+ def add_default(f):
+ if "location" not in f and "path" in f:
+ f["location"] = f["path"]
+ del f["path"]
+ if "location" in f and not arvrunner.fs_access.exists(f["location"]):
+ # Remove from sc
+ sc[:] = [x for x in sc if x["location"] != f["location"]]
+ # Delete "default" from workflowobj
+ remove[0] = True
+ visit_class(obj["default"], ("File", "Directory"), add_default)
+ if remove[0]:
+ del obj["default"]
+
+ find_defaults(workflowobj, capture_default)
+
mapper = ArvPathMapper(arvrunner, sc, "",
"keep:%s",
"keep:%s/%s",
# Note that arvados/build/run-build-packages.sh looks at this
# file to determine what version of cwltool and schema-salad to build.
install_requires=[
- 'cwltool==1.0.20170525215327',
- 'schema-salad==2.5.20170428142041',
+ 'cwltool==1.0.20170707200431',
+ 'schema-salad==2.6.20170630075932',
'typing==3.5.3.0',
'ruamel.yaml==0.13.7',
'arvados-python-client>=0.1.20170526013812',
api._rootDesc = get_rootDesc()
runner = arvados_cwl.ArvCwlRunner(api)
- runner.work_api = 'jobs'
+ self.assertEqual(runner.work_api, 'jobs')
list_images_in_arv.return_value = [["zzzzz-4zz18-zzzzzzzzzzzzzzz"]]
runner.api.collections().get().execute.return_vaulue = {"portable_data_hash": "99999999999999999999999999999993+99"}
if uuid in (v["uuid"], v["portable_data_hash"]):
return CollectionExecute(v)
- created_collections = {}
+ created_collections = {
+ "99999999999999999999999999999998+99": {
+ "uuid": "",
+ "portable_data_hash": "99999999999999999999999999999998+99",
+ "manifest_text": ". 99999999999999999999999999999998+99 0:0:file1.txt"
+ }}
stubs.api.collections().create.side_effect = functools.partial(collection_createstub, created_collections)
stubs.api.collections().get.side_effect = functools.partial(collection_getstub, created_collections)
'runtime_constraints': {'docker_image': 'arvados/jobs:'+arvados_cwl.__version__, 'min_ram_mb_per_node': 1024},
'script_parameters': {
'y': {"value": {'basename': '99999999999999999999999999999998+99', 'location': 'keep:99999999999999999999999999999998+99', 'class': 'Directory'}},
- 'x': {"value": {'basename': 'blorp.txt', 'class': 'File', 'location': 'keep:169f39d466a5438ac4a90e779bf750c7+53/blorp.txt'}},
+ 'x': {"value": {
+ 'basename': 'blorp.txt',
+ 'class': 'File',
+ 'location': 'keep:169f39d466a5438ac4a90e779bf750c7+53/blorp.txt',
+ "nameext": ".txt",
+ "nameroot": "blorp"
+ }},
'z': {"value": {'basename': 'anonymous', 'class': 'Directory',
'listing': [
- {'basename': 'renamed.txt', 'class': 'File', 'location': 'keep:99999999999999999999999999999998+99/file1.txt'}
+ {
+ 'basename': 'renamed.txt',
+ 'class': 'File', 'location':
+ 'keep:99999999999999999999999999999998+99/file1.txt',
+ "nameext": ".txt",
+ "nameroot": "renamed"
+ }
]}},
'cwl:tool': '3fffdeaa75e018172e1b583425f4ebff+60/workflow.cwl#main',
'arv:enable_reuse': True,
'/var/lib/cwl/cwl.input.json': {
'kind': 'json',
'content': {
- 'y': {'basename': '99999999999999999999999999999998+99', 'location': 'keep:99999999999999999999999999999998+99', 'class': 'Directory'},
- 'x': {'basename': u'blorp.txt', 'class': 'File', 'location': u'keep:169f39d466a5438ac4a90e779bf750c7+53/blorp.txt'},
+ 'y': {
+ 'basename': '99999999999999999999999999999998+99',
+ 'location': 'keep:99999999999999999999999999999998+99',
+ 'class': 'Directory'},
+ 'x': {
+ 'basename': u'blorp.txt',
+ 'class': 'File',
+ 'location': u'keep:169f39d466a5438ac4a90e779bf750c7+53/blorp.txt',
+ "nameext": ".txt",
+ "nameroot": "blorp"
+ },
'z': {'basename': 'anonymous', 'class': 'Directory', 'listing': [
- {'basename': 'renamed.txt', 'class': 'File', 'location': 'keep:99999999999999999999999999999998+99/file1.txt'}
+ {'basename': 'renamed.txt',
+ 'class': 'File',
+ 'location': 'keep:99999999999999999999999999999998+99/file1.txt',
+ "nameext": ".txt",
+ "nameroot": "renamed"
+ }
]}
},
'kind': 'json'
def test_submit_runner_ram(self, stubs, tm):
capture_stdout = cStringIO.StringIO()
exited = arvados_cwl.main(
- ["--submit", "--no-wait", "--api=jobs", "--debug", "--submit-runner-ram=2048",
+ ["--submit", "--no-wait", "--debug", "--submit-runner-ram=2048",
"tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
capture_stdout, sys.stderr, api_client=stubs.api)
self.assertEqual(exited, 0)
def test_submit_invalid_runner_ram(self, stubs, tm):
capture_stdout = cStringIO.StringIO()
exited = arvados_cwl.main(
- ["--submit", "--no-wait", "--api=jobs", "--debug", "--submit-runner-ram=-2048",
+ ["--submit", "--no-wait", "--debug", "--submit-runner-ram=-2048",
"tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
capture_stdout, sys.stderr, api_client=stubs.api)
self.assertEqual(exited, 1)
capture_stdout = cStringIO.StringIO()
exited = arvados_cwl.main(
- ["--submit", "--no-wait", "--api=jobs", "--debug", "--output-name", output_name,
+ ["--submit", "--no-wait", "--debug", "--output-name", output_name,
"tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
capture_stdout, sys.stderr, api_client=stubs.api)
self.assertEqual(exited, 0)
def test_submit_pipeline_name(self, stubs, tm):
capture_stdout = cStringIO.StringIO()
exited = arvados_cwl.main(
- ["--submit", "--no-wait", "--api=jobs", "--debug", "--name=hello job 123",
+ ["--submit", "--no-wait", "--debug", "--name=hello job 123",
"tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
capture_stdout, sys.stderr, api_client=stubs.api)
self.assertEqual(exited, 0)
capture_stdout = cStringIO.StringIO()
exited = arvados_cwl.main(
- ["--submit", "--no-wait", "--api=jobs", "--debug", "--output-tags", output_tags,
+ ["--submit", "--no-wait", "--debug", "--output-tags", output_tags,
"tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
capture_stdout, sys.stderr, api_client=stubs.api)
self.assertEqual(exited, 0)
project_uuid = 'zzzzz-j7d0g-zzzzzzzzzzzzzzz'
exited = arvados_cwl.main(
- ["--submit", "--no-wait", "--api=jobs",
+ ["--submit", "--no-wait",
"--project-uuid", project_uuid,
"tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
sys.stdout, sys.stderr, api_client=stubs.api)
capture_stdout = cStringIO.StringIO()
try:
exited = arvados_cwl.main(
- ["--submit", "--no-wait", "--debug",
+ ["--submit", "--no-wait", "--api=containers", "--debug",
"tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
self.assertEqual(exited, 0)
arvados_cwl.runner.arvados_jobs_image(arvrunner, "arvados/jobs:"+arvados_cwl.__version__))
class TestCreateTemplate(unittest.TestCase):
- existing_template_uuid = "zzzzz-p5p6p-validworkfloyml"
+ existing_template_uuid = "zzzzz-d1hrv-validworkfloyml"
def _adjust_script_params(self, expect_component):
expect_component['script_parameters']['x'] = {
@stubs
def test_inputs_empty(self, stubs):
exited = arvados_cwl.main(
- ["--create-template", "--api=jobs",
+ ["--create-template",
"tests/wf/inputs_test.cwl", "tests/order/empty_order.json"],
cStringIO.StringIO(), sys.stderr, api_client=stubs.api)
self.assertEqual(exited, 0)
@stubs
def test_inputs(self, stubs):
exited = arvados_cwl.main(
- ["--create-template", "--api=jobs",
+ ["--create-template",
"tests/wf/inputs_test.cwl", "tests/order/inputs_test_order.json"],
cStringIO.StringIO(), sys.stderr, api_client=stubs.api)
self.assertEqual(exited, 0)
- id: '#main/x'
type: File
default: {class: File, location: 'keep:169f39d466a5438ac4a90e779bf750c7+53/blorp.txt',
- basename: blorp.txt}
+ basename: blorp.txt, nameroot: blorp, nameext: .txt}
- id: '#main/y'
type: Directory
default: {class: Directory, location: 'keep:99999999999999999999999999999998+99',
- id: '#main/z'
type: Directory
default: {class: Directory, basename: anonymous, listing: [{basename: renamed.txt,
- class: File, location: 'keep:99999999999999999999999999999998+99/file1.txt'}]}
+ class: File, location: 'keep:99999999999999999999999999999998+99/file1.txt',
+ nameroot: renamed, nameext: .txt}]}
outputs: []
steps:
- id: '#main/step1'
after_save :update_priority
after_save :finalize_if_needed
before_create :set_requesting_container_uuid
+ before_destroy :set_priority_zero
api_accessible :user, extend: :common do |t|
t.add :command
end
end
+ def set_priority_zero
+ self.update_attributes!(priority: 0) if self.state != Final
+ end
+
def set_requesting_container_uuid
return !new_record? if self.requesting_container_uuid # already set
vcpus: 1
ram: 123
+running_to_be_deleted:
+ uuid: zzzzz-xvhdp-cr5runningcntnr
+ owner_uuid: zzzzz-tpzed-xurymjxw79nv3jz
+ name: running to be deleted
+ state: Committed
+ priority: 1
+ created_at: <%= 2.days.ago.to_s(:db) %>
+ updated_at: <%= 1.days.ago.to_s(:db) %>
+ modified_at: <%= 1.days.ago.to_s(:db) %>
+ modified_by_user_uuid: zzzzz-tpzed-xurymjxw79nv3jz
+ container_image: test
+ cwd: test
+ output_path: test
+ command: ["echo", "hello"]
+ container_uuid: zzzzz-dz642-runnincntrtodel
+ runtime_constraints:
+ vcpus: 1
+ ram: 123
+
completed_with_input_mounts:
uuid: zzzzz-xvhdp-crwithinputmnts
owner_uuid: zzzzz-tpzed-xurymjxw79nv3jz
runtime_constraints:
ram: 12000000000
vcpus: 4
+
+running_to_be_deleted:
+ uuid: zzzzz-dz642-runnincntrtodel
+ owner_uuid: zzzzz-tpzed-000000000000000
+ state: Running
+ priority: 1
+ created_at: <%= 1.minute.ago.to_s(:db) %>
+ updated_at: <%= 1.minute.ago.to_s(:db) %>
+ started_at: <%= 1.minute.ago.to_s(:db) %>
+ container_image: test
+ cwd: test
+ output_path: test
+ command: ["echo", "hello"]
+ runtime_constraints:
+ ram: 12000000000
+ vcpus: 4
+ auth_uuid: zzzzz-gj3su-077z32aux8dg2s2
end
end
end
+
+ test "delete container_request and check its container's priority" do
+ act_as_user users(:active) do
+ cr = ContainerRequest.find_by_uuid container_requests(:running_to_be_deleted).uuid
+
+ # initially the cr's container has priority > 0
+ c = Container.find_by_uuid(cr.container_uuid)
+ assert_equal 1, c.priority
+
+ # destroy the cr
+ assert_nothing_raised {cr.destroy}
+
+ # the cr's container now has priority of 0
+ c = Container.find_by_uuid(cr.container_uuid)
+ assert_equal 0, c.priority
+ end
+ end
+
+ test "delete container_request in final state and expect no error due to before_destroy callback" do
+ act_as_user users(:active) do
+ cr = ContainerRequest.find_by_uuid container_requests(:completed).uuid
+ assert_nothing_raised {cr.destroy}
+ end
+ end
end
}
func (ps *pgEventSource) DB() *sql.DB {
+ ps.WaitReady()
return ps.db
}
pc.nMisses++
err = pc.RequestAndDecode(&buf, "GET", path, nil, url.Values{
- "select": {`["uuid"]`},
+ "include_trash": {"true"},
+ "select": {`["uuid"]`},
})
var allowed bool
errQueueFull = errors.New("client queue full")
errFrameTooBig = errors.New("frame too big")
- sendObjectAttributes = []string{"state", "name", "owner_uuid", "portable_data_hash"}
+ // Send clients only these keys from the
+ // log.properties.old_attributes and
+ // log.properties.new_attributes hashes.
+ sendObjectAttributes = []string{
+ "is_trashed",
+ "name",
+ "owner_uuid",
+ "portable_data_hash",
+ "state",
+ }
v0subscribeOK = []byte(`{"status":200}`)
v0subscribeFail = []byte(`{"status":400}`)
return nil, nil
}
- ok, err := sess.permChecker.Check(detail.ObjectUUID)
+ var permTarget string
+ if detail.EventType == "delete" {
+ // It's pointless to check permission by reading
+ // ObjectUUID if it has just been deleted, but if the
+ // client has permission on the parent project then
+ // it's OK to send the event.
+ permTarget = detail.ObjectOwnerUUID
+ } else {
+ permTarget = detail.ObjectUUID
+ }
+ ok, err := sess.permChecker.Check(permTarget)
if err != nil || !ok {
return nil, err
}
if sub.LastLogID == 0 {
return
}
- sess.log.WithField("LastLogID", sub.LastLogID).Debug("getOldEvents")
+ sess.log.WithField("LastLogID", sub.LastLogID).Debug("sendOldEvents")
// Here we do a "select id" query and queue an event for every
// log since the given ID, then use (*event)Detail() to
// retrieve the whole row and decide whether to send it. This
sub.LastLogID,
time.Now().UTC().Add(-10*time.Minute).Format(time.RFC3339Nano))
if err != nil {
- sess.log.WithError(err).Error("db.Query failed")
+ sess.log.WithError(err).Error("sendOldEvents db.Query failed")
return
}
- defer rows.Close()
+
+ var ids []uint64
for rows.Next() {
var id uint64
err := rows.Scan(&id)
if err != nil {
- sess.log.WithError(err).Error("row Scan failed")
+ sess.log.WithError(err).Error("sendOldEvents row Scan failed")
continue
}
+ ids = append(ids, id)
+ }
+ if err := rows.Err(); err != nil {
+ sess.log.WithError(err).Error("sendOldEvents db.Query failed")
+ }
+ rows.Close()
+
+ for _, id := range ids {
for len(sess.sendq)*2 > cap(sess.sendq) {
// Ugly... but if we fill up the whole client
// queue with a backlog of old events, a
}
}
}
- if err := rows.Err(); err != nil {
- sess.log.WithError(err).Error("db.Query failed")
- }
}
type v0subscribe struct {
"io"
"net/url"
"os"
+ "sync"
"time"
"git.curoverse.com/arvados.git/sdk/go/arvados"
serverSuite serverSuite
token string
toDelete []string
+ wg sync.WaitGroup
+ ignoreLogID uint64
}
func (s *v0Suite) SetUpTest(c *check.C) {
s.serverSuite.SetUpTest(c)
+ go s.serverSuite.srv.Run()
+ s.serverSuite.srv.WaitReady()
+
s.token = arvadostest.ActiveToken
+ s.ignoreLogID = s.lastLogID(c)
+}
+
+func (s *v0Suite) TearDownTest(c *check.C) {
+ s.wg.Wait()
+ s.serverSuite.srv.Close()
}
func (s *v0Suite) TearDownSuite(c *check.C) {
+ s.deleteTestObjects(c)
+}
+
+func (s *v0Suite) deleteTestObjects(c *check.C) {
ac := arvados.NewClientFromEnv()
ac.AuthToken = arvadostest.AdminToken
for _, path := range s.toDelete {
panic(err)
}
}
+ s.toDelete = nil
}
func (s *v0Suite) TestFilters(c *check.C) {
- srv, conn, r, w := s.testClient()
- defer srv.Close()
+ conn, r, w := s.testClient()
defer conn.Close()
c.Check(w.Encode(map[string]interface{}{
}
func (s *v0Suite) TestLastLogID(c *check.C) {
- var lastID uint64
- c.Assert(testDB().QueryRow(`SELECT MAX(id) FROM logs`).Scan(&lastID), check.IsNil)
+ lastID := s.lastLogID(c)
- srv, conn, r, w := s.testClient()
- defer srv.Close()
- defer conn.Close()
+ checkLogs := func(r *json.Decoder, uuid string) {
+ for _, etype := range []string{"create", "blip", "update"} {
+ lg := s.expectLog(c, r)
+ for lg.ObjectUUID != uuid {
+ lg = s.expectLog(c, r)
+ }
+ c.Check(lg.EventType, check.Equals, etype)
+ }
+ }
- uuidChan := make(chan string, 2)
+ // Connecting connEarly (before sending the early events) lets
+ // us confirm all of the "early" events have already passed
+ // through the server.
+ connEarly, rEarly, wEarly := s.testClient()
+ defer connEarly.Close()
+ c.Check(wEarly.Encode(map[string]interface{}{
+ "method": "subscribe",
+ }), check.IsNil)
+ s.expectStatus(c, rEarly, 200)
+
+ // Send the early events.
+ uuidChan := make(chan string, 1)
s.emitEvents(uuidChan)
+ uuidEarly := <-uuidChan
+
+ // Wait for the early events to pass through.
+ checkLogs(rEarly, uuidEarly)
+
+ // Connect the client that wants to get old events via
+ // last_log_id.
+ conn, r, w := s.testClient()
+ defer conn.Close()
c.Check(w.Encode(map[string]interface{}{
"method": "subscribe",
}), check.IsNil)
s.expectStatus(c, r, 200)
- avoidRace := make(chan struct{}, cap(uuidChan))
- go func() {
- // When last_log_id is given, although v0session sends
- // old events in order, and sends new events in order,
- // it doesn't necessarily finish sending all old
- // events before sending any new events. To avoid
- // hitting this bug in the test, we wait for the old
- // events to arrive before emitting any new events.
- <-avoidRace
- s.emitEvents(uuidChan)
- close(uuidChan)
- }()
-
- go func() {
- for uuid := range uuidChan {
- for _, etype := range []string{"create", "blip", "update"} {
- lg := s.expectLog(c, r)
- for lg.ObjectUUID != uuid {
- lg = s.expectLog(c, r)
- }
- c.Check(lg.EventType, check.Equals, etype)
- }
- avoidRace <- struct{}{}
- }
- }()
+ checkLogs(r, uuidEarly)
+ s.emitEvents(uuidChan)
+ checkLogs(r, <-uuidChan)
}
func (s *v0Suite) TestPermission(c *check.C) {
- srv, conn, r, w := s.testClient()
- defer srv.Close()
+ conn, r, w := s.testClient()
defer conn.Close()
c.Check(w.Encode(map[string]interface{}{
}
}
+// Two users create private objects; admin deletes both objects; each
+// user receives a "delete" event for their own object (not for the
+// other user's object).
+func (s *v0Suite) TestEventTypeDelete(c *check.C) {
+ clients := []struct {
+ token string
+ uuid string
+ conn *websocket.Conn
+ r *json.Decoder
+ w *json.Encoder
+ }{{token: arvadostest.ActiveToken}, {token: arvadostest.SpectatorToken}}
+ for i := range clients {
+ uuidChan := make(chan string, 1)
+ s.token = clients[i].token
+ s.emitEvents(uuidChan)
+ clients[i].uuid = <-uuidChan
+ clients[i].conn, clients[i].r, clients[i].w = s.testClient()
+
+ c.Check(clients[i].w.Encode(map[string]interface{}{
+ "method": "subscribe",
+ }), check.IsNil)
+ s.expectStatus(c, clients[i].r, 200)
+ }
+
+ s.ignoreLogID = s.lastLogID(c)
+ s.deleteTestObjects(c)
+
+ for _, client := range clients {
+ lg := s.expectLog(c, client.r)
+ c.Check(lg.ObjectUUID, check.Equals, client.uuid)
+ c.Check(lg.EventType, check.Equals, "delete")
+ }
+}
+
+// Trashing/deleting a collection produces an "update" event with
+// properties["new_attributes"]["is_trashed"] == true.
+func (s *v0Suite) TestTrashedCollection(c *check.C) {
+ ac := arvados.NewClientFromEnv()
+ ac.AuthToken = s.token
+
+ coll := &arvados.Collection{ManifestText: ""}
+ err := ac.RequestAndDecode(coll, "POST", "arvados/v1/collections", s.jsonBody("collection", coll), map[string]interface{}{"ensure_unique_name": true})
+ c.Assert(err, check.IsNil)
+ s.ignoreLogID = s.lastLogID(c)
+
+ conn, r, w := s.testClient()
+ defer conn.Close()
+
+ c.Check(w.Encode(map[string]interface{}{
+ "method": "subscribe",
+ }), check.IsNil)
+ s.expectStatus(c, r, 200)
+
+ err = ac.RequestAndDecode(nil, "DELETE", "arvados/v1/collections/"+coll.UUID, nil, nil)
+ c.Assert(err, check.IsNil)
+
+ lg := s.expectLog(c, r)
+ c.Check(lg.ObjectUUID, check.Equals, coll.UUID)
+ c.Check(lg.EventType, check.Equals, "update")
+ c.Check(lg.Properties["old_attributes"].(map[string]interface{})["is_trashed"], check.Equals, false)
+ c.Check(lg.Properties["new_attributes"].(map[string]interface{})["is_trashed"], check.Equals, true)
+}
+
func (s *v0Suite) TestSendBadJSON(c *check.C) {
- srv, conn, r, w := s.testClient()
- defer srv.Close()
+ conn, r, w := s.testClient()
defer conn.Close()
c.Check(w.Encode(map[string]interface{}{
}
func (s *v0Suite) TestSubscribe(c *check.C) {
- srv, conn, r, w := s.testClient()
- defer srv.Close()
+ conn, r, w := s.testClient()
defer conn.Close()
s.emitEvents(nil)
// created workflow. If uuidChan is not nil, send the new workflow
// UUID to uuidChan as soon as it's known.
func (s *v0Suite) emitEvents(uuidChan chan<- string) {
+ s.wg.Add(1)
+ defer s.wg.Done()
+
ac := arvados.NewClientFromEnv()
ac.AuthToken = s.token
wf := &arvados.Workflow{
lg := &arvados.Log{}
ok := make(chan struct{})
go func() {
- c.Check(r.Decode(lg), check.IsNil)
+ for lg.ID <= s.ignoreLogID {
+ c.Check(r.Decode(lg), check.IsNil)
+ }
close(ok)
}()
select {
}
}
-func (s *v0Suite) testClient() (*server, *websocket.Conn, *json.Decoder, *json.Encoder) {
- go s.serverSuite.srv.Run()
- s.serverSuite.srv.WaitReady()
+func (s *v0Suite) testClient() (*websocket.Conn, *json.Decoder, *json.Encoder) {
srv := s.serverSuite.srv
conn, err := websocket.Dial("ws://"+srv.listener.Addr().String()+"/websocket?api_token="+s.token, "", "http://"+srv.listener.Addr().String())
if err != nil {
}
w := json.NewEncoder(conn)
r := json.NewDecoder(conn)
- return srv, conn, r, w
+ return conn, r, w
+}
+
+func (s *v0Suite) lastLogID(c *check.C) uint64 {
+ var lastID uint64
+ c.Assert(testDB().QueryRow(`SELECT MAX(id) FROM logs`).Scan(&lastID), check.IsNil)
+ return lastID
}