.DS_Store
.vscode
.Rproj.user
-_version.py
\ No newline at end of file
+_version.py
+*.bak
+++ /dev/null
-# Copyright (C) The Arvados Authors. All rights reserved.
-#
-# SPDX-License-Identifier: Apache-2.0
-
-case "$TARGET" in
- ubuntu1204)
- fpm_depends+=('libfuse2 = 2.9.2-5')
- ;;
-esac
+++ /dev/null
-#!/bin/bash
-# Copyright (C) The Arvados Authors. All rights reserved.
-#
-# SPDX-License-Identifier: Apache-2.0
-
-case $TARGET in
- centos7)
- # fpm incorrectly transforms the dependency name in this case.
- fpm_depends+=(python-backports-ssl_match_hostname)
- fpm_args+=(--python-disable-dependency backports.ssl-match-hostname)
- ;;
-esac
+++ /dev/null
-# Copyright (C) The Arvados Authors. All rights reserved.
-#
-# SPDX-License-Identifier: Apache-2.0
-
-case "$TARGET" in
- centos*)
- fpm_depends+=(glibc)
- ;;
- debian* | ubuntu*)
- fpm_depends+=(libc6)
- ;;
-esac
+++ /dev/null
-# Copyright (C) The Arvados Authors. All rights reserved.
-#
-# SPDX-License-Identifier: Apache-2.0
-
-case "$TARGET" in
- centos*)
- build_depends+=('fuse-devel')
- fpm_depends+=(glibc fuse-libs)
- ;;
- ubuntu1204)
- build_depends+=(libfuse2 libfuse-dev)
- fpm_depends+=(libc6 python-contextlib2 'libfuse2 = 2.9.2-5' 'fuse = 2.9.2-5')
- ;;
- debian* | ubuntu*)
- build_depends+=('libfuse-dev')
- fpm_depends+=(libc6 'libfuse2 > 2.9.0' 'fuse > 2.9.0')
- ;;
-esac
+++ /dev/null
-# Copyright (C) The Arvados Authors. All rights reserved.
-#
-# SPDX-License-Identifier: Apache-2.0
-
-case "$TARGET" in
- debian8)
- fpm_depends+=(
- libc6
- libcomerr2
- libcurl3-gnutls
- libffi6
- libgcrypt20
- libgmp10
- libgnutls-deb0-28
- libgpg-error0
- libgssapi-krb5-2
- libhogweed2
- libidn11
- libk5crypto3
- libkeyutils1
- libkrb5-3
- libkrb5support0
- libldap-2.4-2
- libnettle4
- libp11-kit0
- librtmp1
- libsasl2-2
- libssh2-1
- libtasn1-6
- zlib1g
- ) ;;
- ubuntu1204)
- fpm_depends+=(
- libasn1-8-heimdal
- libc6
- libcomerr2
- libcurl3-gnutls
- libgcrypt11
- libgnutls26
- libgpg-error0
- libgssapi-krb5-2
- libgssapi3-heimdal
- libhcrypto4-heimdal
- libheimbase1-heimdal
- libheimntlm0-heimdal
- libhx509-5-heimdal
- libidn11
- libk5crypto3
- libkeyutils1
- libkrb5-26-heimdal
- libkrb5-3
- libkrb5support0
- libldap-2.4-2
- libp11-kit0
- libroken18-heimdal
- librtmp0
- libsasl2-2
- libsqlite3-0
- libtasn1-3
- libwind0-heimdal
- zlib1g
- ) ;;
- ubuntu1404)
- fpm_depends+=(
- libasn1-8-heimdal
- libc6
- libcomerr2
- libcurl3-gnutls
- libffi6
- libgcrypt11
- libgnutls26
- libgpg-error0
- libgssapi-krb5-2
- libgssapi3-heimdal
- libhcrypto4-heimdal
- libheimbase1-heimdal
- libheimntlm0-heimdal
- libhx509-5-heimdal
- libidn11
- libk5crypto3
- libkeyutils1
- libkrb5-26-heimdal
- libkrb5-3
- libkrb5support0
- libldap-2.4-2
- libp11-kit0
- libroken18-heimdal
- librtmp0
- libsasl2-2
- libsqlite3-0
- libtasn1-6
- libwind0-heimdal
- zlib1g
- ) ;;
-esac
WORKSPACE=path Path to the Arvados source tree to build packages from
CWLTOOL=path (optional) Path to cwltool git repository.
SALAD=path (optional) Path to schema_salad git repository.
+PYCMD=pythonexec (optional) Specify the python executable to use in the docker image. Defaults to "python".
EOF
cd "$WORKSPACE"
+py=python
+if [[ -n "$PYCMD" ]] ; then
+ py="$PYCMD" ;
+fi
+
(cd sdk/python && python setup.py sdist)
sdk=$(cd sdk/python/dist && ls -t arvados-python-client-*.tar.gz | head -n1)
cwl_runner_version=$(cd sdk/python && nohash_version_from_git 1.0)
fi
-docker build --build-arg sdk=$sdk --build-arg runner=$runner --build-arg salad=$salad --build-arg cwltool=$cwltool -f "$WORKSPACE/sdk/dev-jobs.dockerfile" -t arvados/jobs:$cwl_runner_version "$WORKSPACE/sdk"
+docker build --build-arg sdk=$sdk --build-arg runner=$runner --build-arg salad=$salad --build-arg cwltool=$cwltool --build-arg pythoncmd=$py -f "$WORKSPACE/sdk/dev-jobs.dockerfile" -t arvados/jobs:$cwl_runner_version "$WORKSPACE/sdk"
echo arv-keepdocker arvados/jobs $cwl_runner_version
arv-keepdocker arvados/jobs $cwl_runner_version
+++ /dev/null
-# Copyright (C) The Arvados Authors. All rights reserved.
-#
-# SPDX-License-Identifier: AGPL-3.0
-
-#distribution(s)|name|version|iteration|type|architecture|extra fpm arguments
-debian8,debian9,centos7|python-gflags|2.0|2|python|all
-debian8,debian9,ubuntu1404,ubuntu1604,ubuntu1804,centos7|google-api-python-client|1.6.2|2|python|all
-debian8,debian9,ubuntu1404,centos7|oauth2client|1.5.2|2|python|all
-debian8,debian9,ubuntu1404,centos7|pyasn1|0.1.7|2|python|all
-debian8,debian9,ubuntu1404,centos7|pyasn1-modules|0.0.5|2|python|all
-debian8,debian9,ubuntu1404,ubuntu1604,ubuntu1804,centos7|rsa|3.4.2|2|python|all
-debian8,debian9,ubuntu1404,ubuntu1604,ubuntu1804,centos7|uritemplate|3.0.0|2|python|all
-debian8,debian9,ubuntu1404,ubuntu1604,ubuntu1804,centos7|httplib2|0.9.2|3|python|all
-debian8,debian9,centos7,ubuntu1404,ubuntu1604|ws4py|0.4.2|2|python|all
-debian8,debian9,centos7|pykka|1.2.1|2|python|all
-debian8,debian9,ubuntu1404,centos7|six|1.10.0|2|python|all
-debian8,debian9,ubuntu1404,ubuntu1604,ubuntu1804,centos7|ciso8601|1.0.6|3|python|amd64
-debian8,debian9,centos7|pycrypto|2.6.1|3|python|amd64
-debian8,debian9,ubuntu1404,ubuntu1604,ubuntu1804|backports.ssl_match_hostname|3.5.0.1|2|python|all
-debian8,debian9,ubuntu1404,ubuntu1604,ubuntu1804,centos7|llfuse|1.2|3|python|amd64
-debian8,debian9,ubuntu1404,centos7|pycurl|7.19.5.3|3|python|amd64
-debian8,debian9,ubuntu1404,ubuntu1604,ubuntu1804,centos7|pyyaml|3.12|2|python|amd64
-debian8,debian9,ubuntu1404,ubuntu1604,ubuntu1804,centos7|rdflib|4.2.2|2|python|all
-debian8,debian9,ubuntu1404,centos7|shellescape|3.4.1|2|python|all
-debian8,debian9,ubuntu1404,ubuntu1604,ubuntu1804,centos7|mistune|0.8.1|2|python|all
-debian8,debian9,ubuntu1404,ubuntu1604,ubuntu1804,centos7|typing|3.6.4|2|python|all
-debian8,debian9,ubuntu1404,ubuntu1604,ubuntu1804,centos7|avro|1.8.1|2|python|all
-debian8,debian9,ubuntu1404,centos7|ruamel.ordereddict|0.4.9|2|python|amd64
-debian8,debian9,ubuntu1404,ubuntu1604,ubuntu1804,centos7|cachecontrol|0.11.7|2|python|all
-debian8,debian9,ubuntu1404,ubuntu1604,ubuntu1804,centos7|pathlib2|2.3.2|2|python|all
-debian8,debian9,ubuntu1404,ubuntu1604,ubuntu1804,centos7|scandir|1.7|2|python|all
-debian8,debian9,ubuntu1404,ubuntu1604,ubuntu1804,centos7|docker-py|1.7.2|2|python3|all
-debian8,debian9,centos7|six|1.10.0|2|python3|all
-debian8,debian9,ubuntu1404,centos7|requests|2.12.4|2|python3|all
-debian8,debian9,ubuntu1404,ubuntu1604,ubuntu1804,centos7|websocket-client|0.37.0|2|python3|all
-debian8,ubuntu1404,centos7|requests|2.6.1|2|python|all
-centos7|contextlib2|0.5.4|2|python|all
-centos7|isodate|0.5.4|2|python|all
-centos7|python-daemon|2.1.2|1|python|all
-centos7|pbr|0.11.1|2|python|all
-centos7|pyparsing|2.1.10|2|python|all
-centos7|keepalive|0.5|2|python|all
-centos7|networkx|1.11|0|python|all
-centos7|psutil|5.0.1|0|python|all
-debian8,debian9,ubuntu1404,ubuntu1604,ubuntu1804,centos7|lockfile|0.12.2|2|python|all|--epoch 1
-debian8,debian9,ubuntu1404,ubuntu1604,ubuntu1804,centos7|subprocess32|3.5.1|2|python|all
-all|ruamel.yaml|0.15.77|1|python|amd64|--python-setup-py-arguments --single-version-externally-managed --depends 'python-ruamel.ordereddict >= 0.4.9'
-all|cwltest|1.0.20180518074130|4|python|all|--depends 'python-futures >= 3.0.5' --depends 'python-subprocess32 >= 3.5.0'
-all|junit-xml|1.8|3|python|all
-all|rdflib-jsonld|0.4.0|2|python|all
-all|futures|3.0.5|2|python|all
-all|future|0.16.0|2|python|all
-all|future|0.16.0|2|python3|all
-all|mypy-extensions|0.3.0|1|python|all
-all|prov|1.5.1|0|python|all
-all|bagit|1.6.4|0|python|all
-all|typing-extensions|3.6.5|0|python|all
Makefile | build/* | lib/* | tools/* | apps/* | services/* | sdk/cli/bin/crunch-job)
want=${wantGPL}
;;
- crunch_scripts/* | backports/* | docker/* | sdk/*)
+ crunch_scripts/* | docker/* | sdk/*)
want=${wantApache}
;;
doc/*)
GOTARBALL=go1.10.1.linux-amd64.tar.gz
NODETARBALL=node-v6.11.2-linux-x64.tar.xz
-RVMKEY=rvm.asc
+RVMKEY1=mpapis.asc
+RVMKEY2=pkuczynski.asc
-common-generated-all: common-generated/$(GOTARBALL) common-generated/$(NODETARBALL) common-generated/$(RVMKEY)
+common-generated-all: common-generated/$(GOTARBALL) common-generated/$(NODETARBALL) common-generated/$(RVMKEY1) common-generated/$(RVMKEY2)
common-generated/$(GOTARBALL): common-generated
wget -cqO common-generated/$(GOTARBALL) http://storage.googleapis.com/golang/$(GOTARBALL)
common-generated/$(NODETARBALL): common-generated
wget -cqO common-generated/$(NODETARBALL) https://nodejs.org/dist/v6.11.2/$(NODETARBALL)
-common-generated/$(RVMKEY): common-generated
- wget -cqO common-generated/$(RVMKEY) https://rvm.io/pkuczynski.asc
+common-generated/$(RVMKEY1): common-generated
+ wget -cqO common-generated/$(RVMKEY1) https://rvm.io/mpapis.asc
+
+common-generated/$(RVMKEY2): common-generated
+ wget -cqO common-generated/$(RVMKEY2) https://rvm.io/pkuczynski.asc
common-generated:
mkdir common-generated
MAINTAINER Ward Vandewege <ward@curoverse.com>
# Install dependencies.
-RUN yum -q -y install make automake gcc gcc-c++ libyaml-devel patch readline-devel zlib-devel libffi-devel openssl-devel bzip2 libtool bison sqlite-devel rpm-build git perl-ExtUtils-MakeMaker libattr-devel nss-devel libcurl-devel which tar unzip scl-utils centos-release-scl postgresql-devel python-devel python-setuptools fuse-devel xz-libs git
+RUN yum -q -y install make automake gcc gcc-c++ libyaml-devel patch readline-devel zlib-devel libffi-devel openssl-devel bzip2 libtool bison sqlite-devel rpm-build git perl-ExtUtils-MakeMaker libattr-devel nss-devel libcurl-devel which tar unzip scl-utils centos-release-scl postgresql-devel python-devel python-setuptools fuse-devel xz-libs git python-virtualenv wget
# Install RVM
-ADD generated/rvm.asc /tmp/
-RUN gpg --import /tmp/rvm.asc && \
+ADD generated/mpapis.asc /tmp/
+ADD generated/pkuczynski.asc /tmp/
+RUN gpg --import --no-tty /tmp/mpapis.asc && \
+ gpg --import --no-tty /tmp/pkuczynski.asc && \
curl -L https://get.rvm.io | bash -s stable && \
/usr/local/rvm/bin/rvm install 2.3 && \
/usr/local/rvm/bin/rvm alias create default ruby-2.3 && \
# Need to "touch" RPM database to workaround bug in interaction between
# overlayfs and yum (https://bugzilla.redhat.com/show_bug.cgi?id=1213602)
-RUN touch /var/lib/rpm/* && yum -q -y install python33
-RUN scl enable python33 "easy_install-3.3 pip" && easy_install-2.7 pip
+RUN touch /var/lib/rpm/* && yum -q -y install rh-python35
+RUN scl enable rh-python35 "easy_install-3.5 pip" && easy_install-2.7 pip
# Old versions of setuptools cannot build a schema-salad package.
RUN pip install --upgrade setuptools
+# Add epel, we need it for the python-pam dependency
+RUN wget http://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm
+RUN rpm -ivh epel-release-latest-7.noarch.rpm
+
RUN git clone --depth 1 git://git.curoverse.com/arvados.git /tmp/arvados && cd /tmp/arvados/services/api && /usr/local/rvm/bin/rvm-exec default bundle && cd /tmp/arvados/apps/workbench && /usr/local/rvm/bin/rvm-exec default bundle && rm -rf /tmp/arvados
ENV WORKSPACE /arvados
-CMD ["scl", "enable", "python33", "/usr/local/rvm/bin/rvm-exec default bash /jenkins/run-build-packages.sh --target centos7"]
+CMD ["scl", "enable", "rh-python35", "/usr/local/rvm/bin/rvm-exec default bash /jenkins/run-build-packages.sh --target centos7"]
ENV DEBIAN_FRONTEND noninteractive
# Install dependencies.
-RUN /usr/bin/apt-get update && /usr/bin/apt-get install -q -y python2.7-dev python3 python-setuptools python3-setuptools libcurl4-gnutls-dev curl git procps libattr1-dev libfuse-dev libgnutls28-dev libpq-dev python-pip unzip
+RUN /usr/bin/apt-get update && /usr/bin/apt-get install -q -y python2.7-dev python3 python-setuptools python3-setuptools libcurl4-gnutls-dev curl git procps libattr1-dev libfuse-dev libgnutls28-dev libpq-dev python-pip unzip python3-venv python3-dev
+
+# Install virtualenv
+RUN /usr/bin/pip install virtualenv
# Install RVM
-ADD generated/rvm.asc /tmp/
-RUN gpg --import /tmp/rvm.asc && \
+ADD generated/mpapis.asc /tmp/
+ADD generated/pkuczynski.asc /tmp/
+RUN gpg --import --no-tty /tmp/mpapis.asc && \
+ gpg --import --no-tty /tmp/pkuczynski.asc && \
curl -L https://get.rvm.io | bash -s stable && \
/usr/local/rvm/bin/rvm install 2.3 && \
/usr/local/rvm/bin/rvm alias create default ruby-2.3 && \
ENV DEBIAN_FRONTEND noninteractive
# Install dependencies.
-RUN /usr/bin/apt-get update && /usr/bin/apt-get install -q -y python2.7-dev python3 python-setuptools python3-setuptools libcurl4-gnutls-dev curl git procps libattr1-dev libfuse-dev libgnutls28-dev libpq-dev python-pip unzip
+RUN /usr/bin/apt-get update && /usr/bin/apt-get install -q -y python2.7-dev python3 python-setuptools python3-setuptools libcurl4-gnutls-dev curl git procps libattr1-dev libfuse-dev libgnutls28-dev libpq-dev python-pip unzip python3-venv python3-dev
+
+# Install virtualenv
+RUN /usr/bin/pip install virtualenv
# Install RVM
-ADD generated/rvm.asc /tmp/
-RUN gpg --no-tty --import /tmp/rvm.asc && \
+ADD generated/mpapis.asc /tmp/
+ADD generated/pkuczynski.asc /tmp/
+RUN gpg --import --no-tty /tmp/mpapis.asc && \
+ gpg --import --no-tty /tmp/pkuczynski.asc && \
curl -L https://get.rvm.io | bash -s stable && \
/usr/local/rvm/bin/rvm install 2.3 && \
/usr/local/rvm/bin/rvm alias create default ruby-2.3 && \
ENV DEBIAN_FRONTEND noninteractive
# Install dependencies.
-RUN /usr/bin/apt-get update && /usr/bin/apt-get install -q -y python2.7-dev python3 python-setuptools python3-setuptools libcurl4-gnutls-dev curl git libattr1-dev libfuse-dev libpq-dev python-pip unzip
+RUN /usr/bin/apt-get update && /usr/bin/apt-get install -q -y python2.7-dev python3 python-setuptools python3-setuptools libcurl4-gnutls-dev curl git libattr1-dev libfuse-dev libpq-dev python-pip unzip python3.4-venv python3.4-dev
+
+# Install virtualenv
+RUN /usr/bin/pip install virtualenv
# Install RVM
-ADD generated/rvm.asc /tmp/
-RUN gpg --import /tmp/rvm.asc && \
+ADD generated/mpapis.asc /tmp/
+ADD generated/pkuczynski.asc /tmp/
+RUN gpg --import --no-tty /tmp/mpapis.asc && \
+ gpg --import --no-tty /tmp/pkuczynski.asc && \
curl -L https://get.rvm.io | bash -s stable && \
/usr/local/rvm/bin/rvm install 2.3 && \
/usr/local/rvm/bin/rvm alias create default ruby-2.3 && \
ENV DEBIAN_FRONTEND noninteractive
# Install dependencies.
-RUN /usr/bin/apt-get update && /usr/bin/apt-get install -q -y python2.7-dev python3 python-setuptools python3-setuptools libcurl4-gnutls-dev libgnutls-dev curl git libattr1-dev libfuse-dev libpq-dev python-pip unzip tzdata
+RUN /usr/bin/apt-get update && /usr/bin/apt-get install -q -y python2.7-dev python3 python-setuptools python3-setuptools libcurl4-gnutls-dev libgnutls-dev curl git libattr1-dev libfuse-dev libpq-dev python-pip unzip tzdata python3-venv python3-dev
+
+# Install virtualenv
+RUN /usr/bin/pip install virtualenv
# Install RVM
-ADD generated/rvm.asc /tmp/
-RUN gpg --import /tmp/rvm.asc && \
+ADD generated/mpapis.asc /tmp/
+ADD generated/pkuczynski.asc /tmp/
+RUN gpg --import --no-tty /tmp/mpapis.asc && \
+ gpg --import --no-tty /tmp/pkuczynski.asc && \
curl -L https://get.rvm.io | bash -s stable && \
/usr/local/rvm/bin/rvm install 2.3 && \
/usr/local/rvm/bin/rvm alias create default ruby-2.3 && \
ENV DEBIAN_FRONTEND noninteractive
# Install dependencies.
-RUN /usr/bin/apt-get update && /usr/bin/apt-get install -q -y python2.7-dev python3 python-setuptools python3-pip libcurl4-gnutls-dev libgnutls28-dev curl git libattr1-dev libfuse-dev libpq-dev python-pip unzip tzdata
+RUN /usr/bin/apt-get update && /usr/bin/apt-get install -q -y python2.7-dev python3 python-setuptools python3-pip libcurl4-gnutls-dev libgnutls28-dev curl git libattr1-dev libfuse-dev libpq-dev python-pip unzip tzdata python3-venv python3-dev
+
+# Install virtualenv
+RUN /usr/bin/pip install virtualenv
# Install RVM
-ADD generated/rvm.asc /tmp/
-RUN gpg --import /tmp/rvm.asc && \
+ADD generated/mpapis.asc /tmp/
+ADD generated/pkuczynski.asc /tmp/
+RUN gpg --import --no-tty /tmp/mpapis.asc && \
+ gpg --import --no-tty /tmp/pkuczynski.asc && \
curl -L https://get.rvm.io | bash -s stable && \
/usr/local/rvm/bin/rvm install 2.3 && \
/usr/local/rvm/bin/rvm alias create default ruby-2.3 && \
test -d ubuntu1804/generated || mkdir ubuntu1804/generated
cp -rlt ubuntu1804/generated common-generated/*
-RVMKEY=rvm.asc
+RVMKEY1=mpapis.asc
+RVMKEY2=pkuczynski.asc
-common-generated-all: common-generated/$(RVMKEY)
+common-generated-all: common-generated/$(RVMKEY1) common-generated/$(RVMKEY2)
-common-generated/$(RVMKEY): common-generated
- wget -cqO common-generated/$(RVMKEY) https://rvm.io/pkuczynski.asc
+common-generated/$(RVMKEY1): common-generated
+ wget -cqO common-generated/$(RVMKEY1) https://rvm.io/mpapis.asc
+
+common-generated/$(RVMKEY2): common-generated
+ wget -cqO common-generated/$(RVMKEY2) https://rvm.io/pkuczynski.asc
common-generated:
mkdir common-generated
MAINTAINER Ward Vandewege <wvandewege@veritasgenetics.com>
# Install dependencies.
-RUN yum -q -y install scl-utils centos-release-scl which tar
+RUN yum -q -y install scl-utils centos-release-scl which tar wget
# Install RVM
-ADD generated/rvm.asc /tmp/
+ADD generated/mpapis.asc /tmp/
+ADD generated/pkuczynski.asc /tmp/
RUN touch /var/lib/rpm/* && \
- gpg --import /tmp/rvm.asc && \
+ gpg --import --no-tty /tmp/mpapis.asc && \
+ gpg --import --no-tty /tmp/pkuczynski.asc && \
curl -L https://get.rvm.io | bash -s stable && \
/usr/local/rvm/bin/rvm install 2.3 && \
- /usr/local/rvm/bin/rvm alias create default ruby-2.3 && \
- /usr/local/rvm/bin/rvm-exec default gem install bundle && \
- /usr/local/rvm/bin/rvm-exec default gem install cure-fpm --version 1.6.0b
+ /usr/local/rvm/bin/rvm alias create default ruby-2.3
+
+# Add epel, we need it for the python-pam dependency
+RUN wget http://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm
+RUN rpm -ivh epel-release-latest-7.noarch.rpm
COPY localrepo.repo /etc/yum.repos.d/localrepo.repo
apt-get -y install --no-install-recommends curl ca-certificates
# Install RVM
-ADD generated/rvm.asc /tmp/
-RUN gpg --import /tmp/rvm.asc && \
+ADD generated/mpapis.asc /tmp/
+ADD generated/pkuczynski.asc /tmp/
+RUN gpg --import --no-tty /tmp/mpapis.asc && \
+ gpg --import --no-tty /tmp/pkuczynski.asc && \
curl -L https://get.rvm.io | bash -s stable && \
/usr/local/rvm/bin/rvm install 2.3 && \
/usr/local/rvm/bin/rvm alias create default ruby-2.3
apt-get -y install --no-install-recommends curl ca-certificates gpg procps
# Install RVM
-ADD generated/rvm.asc /tmp/
-RUN gpg --no-tty --import /tmp/rvm.asc && \
+ADD generated/mpapis.asc /tmp/
+ADD generated/pkuczynski.asc /tmp/
+RUN gpg --import --no-tty /tmp/mpapis.asc && \
+ gpg --import --no-tty /tmp/pkuczynski.asc && \
curl -L https://get.rvm.io | bash -s stable && \
/usr/local/rvm/bin/rvm install 2.3 && \
/usr/local/rvm/bin/rvm alias create default ruby-2.3
apt-get -y install --no-install-recommends curl ca-certificates python2.7-dev python3 python-setuptools python3-setuptools libcurl4-gnutls-dev curl git libattr1-dev libfuse-dev libpq-dev python-pip unzip binutils build-essential ca-certificates
# Install RVM
-ADD generated/rvm.asc /tmp/
-RUN gpg --import /tmp/rvm.asc && \
+ADD generated/mpapis.asc /tmp/
+ADD generated/pkuczynski.asc /tmp/
+RUN gpg --import --no-tty /tmp/mpapis.asc && \
+ gpg --import --no-tty /tmp/pkuczynski.asc && \
curl -L https://get.rvm.io | bash -s stable && \
/usr/local/rvm/bin/rvm install 2.3 && \
/usr/local/rvm/bin/rvm alias create default ruby-2.3
apt-get -y install --no-install-recommends curl ca-certificates
# Install RVM
-ADD generated/rvm.asc /tmp/
-RUN gpg --import /tmp/rvm.asc && \
+ADD generated/mpapis.asc /tmp/
+ADD generated/pkuczynski.asc /tmp/
+RUN gpg --import --no-tty /tmp/mpapis.asc && \
+ gpg --import --no-tty /tmp/pkuczynski.asc && \
curl -L https://get.rvm.io | bash -s stable && \
/usr/local/rvm/bin/rvm install 2.3 && \
/usr/local/rvm/bin/rvm alias create default ruby-2.3
apt-get -y install --no-install-recommends curl ca-certificates gnupg2
# Install RVM
-ADD generated/rvm.asc /tmp/
-RUN gpg --import /tmp/rvm.asc && \
+ADD generated/mpapis.asc /tmp/
+ADD generated/pkuczynski.asc /tmp/
+RUN gpg --import --no-tty /tmp/mpapis.asc && \
+ gpg --import --no-tty /tmp/pkuczynski.asc && \
curl -L https://get.rvm.io | bash -s stable && \
/usr/local/rvm/bin/rvm install 2.3 && \
/usr/local/rvm/bin/rvm alias create default ruby-2.3
# Copyright (C) The Arvados Authors. All rights reserved.
#
# SPDX-License-Identifier: AGPL-3.0
-
set -eu
+# Set up
+DEBUG=${ARVADOS_DEBUG:-0}
+STDOUT_IF_DEBUG=/dev/null
+STDERR_IF_DEBUG=/dev/null
+DASHQQ_UNLESS_DEBUG=-qq
+if [[ "$DEBUG" != "0" ]]; then
+ STDOUT_IF_DEBUG=/dev/stdout
+ STDERR_IF_DEBUG=/dev/stderr
+ DASHQQ_UNLESS_DEBUG=
+fi
+
# Multiple .deb based distros symlink to this script, so extract the target
# from the invocation path.
target=$(echo $0 | sed 's/.*test-packages-\([^.]*\)\.sh.*/\1/')
dpkg-query --show > "$ARV_PACKAGES_DIR/$1.before"
-apt-get -qq update
-apt-get --assume-yes --allow-unauthenticated install "$1"
+apt-get $DASHQQ_UNLESS_DEBUG update
+
+apt-get $DASHQQ_UNLESS_DEBUG -y --allow-unauthenticated install "$1" >"$STDOUT_IF_DEBUG" 2>"$STDERR_IF_DEBUG"
dpkg-query --show > "$ARV_PACKAGES_DIR/$1.after"
dpkg-deb -x $debpkg .
-while read so && [ -n "$so" ]; do
- echo
- echo "== Packages dependencies for $so =="
- ldd "$so" | awk '($3 ~ /^\//){print $3}' | sort -u | xargs dpkg -S | cut -d: -f1 | sort -u
-done <<EOF
+if [[ "$DEBUG" != "0" ]]; then
+ while read so && [ -n "$so" ]; do
+ echo
+ echo "== Packages dependencies for $so =="
+ ldd "$so" | awk '($3 ~ /^\//){print $3}' | sort -u | xargs dpkg -S | cut -d: -f1 | sort -u
+ done <<EOF
$(find -name '*.so')
EOF
+fi
exec /jenkins/package-testing/common-test-packages.sh "$1"
set -eu
+# Set up
+DEBUG=${ARVADOS_DEBUG:-0}
+STDOUT_IF_DEBUG=/dev/null
+STDERR_IF_DEBUG=/dev/null
+if [[ "$DEBUG" != "0" ]]; then
+ STDOUT_IF_DEBUG=/dev/stdout
+ STDERR_IF_DEBUG=/dev/stderr
+fi
+
target=$(basename "$0" | grep -Eo '\bcentos[[:digit:]]+\b')
yum -q clean all
rpm -qa | sort > "$ARV_PACKAGES_DIR/$1.before"
-yum install --assumeyes $1
+yum install --assumeyes -e 0 $1
rpm -qa | sort > "$ARV_PACKAGES_DIR/$1.after"
rpm2cpio $(ls -t "$ARV_PACKAGES_DIR/$1"-*.rpm | head -n1) | cpio -idm 2>/dev/null
-find -name '*.so' | while read so; do
- echo -e "\n== Packages dependencies for $so =="
- ldd "$so" \
- | awk '($3 ~ /^\//){print $3}' | sort -u | xargs rpm -qf | sort -u
-done
+if [[ "$DEBUG" != "0" ]]; then
+ find -name '*.so' | while read so; do
+ echo -e "\n== Packages dependencies for $so =="
+ ldd "$so" \
+ | awk '($3 ~ /^\//){print $3}' | sort -u | xargs rpm -qf | sort -u
+ done
+fi
exec /jenkins/package-testing/common-test-packages.sh "$1"
#
# SPDX-License-Identifier: AGPL-3.0
-exec python <<EOF
+set -e
+
+arvados-node-manager --version
+
+exec /usr/share/python2.7/dist/arvados-node-manager/bin/python2.7 <<EOF
import libcloud.compute.types
import libcloud.compute.providers
libcloud.compute.providers.get_driver(libcloud.compute.types.Provider.AZURE_ARM)
set -e
arvados-cwl-runner --version
-
-exec python <<EOF
-import arvados_cwl
-print "arvados-cwl-runner version", arvados_cwl.__version__
-EOF
#
# SPDX-License-Identifier: AGPL-3.0
-exec python <<EOF
-import arvados_fuse
-print "Successfully imported arvados_fuse"
-EOF
+set -e
+
+arv-mount --version
#
# SPDX-License-Identifier: AGPL-3.0
-exec python2.7 <<EOF
+set -e
+
+arv-put --version
+
+/usr/share/python2.7/dist/python-arvados-python-client/bin/python2.7 << EOF
import arvados
print "Successfully imported arvados"
EOF
echo cwl_runner_version $cwl_runner_version python_sdk_version $python_sdk_version
if [[ "${python_sdk_version}" != "${ARVADOS_BUILDING_VERSION}" ]]; then
- python_sdk_version="${python_sdk_version}-2"
+ python_sdk_version="${python_sdk_version}-1"
else
python_sdk_version="${ARVADOS_BUILDING_VERSION}-${ARVADOS_BUILDING_ITERATION}"
fi
cwl_runner_version_orig=$cwl_runner_version
if [[ "${cwl_runner_version}" != "${ARVADOS_BUILDING_VERSION}" ]]; then
- cwl_runner_version="${cwl_runner_version}-4"
+ cwl_runner_version="${cwl_runner_version}-1"
else
cwl_runner_version="${ARVADOS_BUILDING_VERSION}-${ARVADOS_BUILDING_ITERATION}"
fi
;;
--only-test)
test_packages=1
+ testing_one_package=1
packages="$2"; shift
;;
--force-test)
fi
if [[ -n "$test_packages" ]]; then
- if [[ -n "$(find $WORKSPACE/packages/$TARGET -name '*.rpm')" ]] ; then
- set +e
- /usr/bin/which createrepo >/dev/null
- if [[ "$?" != "0" ]]; then
- echo >&2
- echo >&2 "Error: please install createrepo. E.g. sudo apt-get install createrepo"
- echo >&2
- exit 1
- fi
- set -e
- createrepo $WORKSPACE/packages/$TARGET
+ if [[ -n "$(find $WORKSPACE/packages/$TARGET -name '*.rpm')" ]] ; then
+ set +e
+ /usr/bin/which createrepo >/dev/null
+ if [[ "$?" != "0" ]]; then
+ echo >&2
+ echo >&2 "Error: please install createrepo. E.g. sudo apt-get install createrepo"
+ echo >&2
+ exit 1
fi
+ set -e
+ createrepo $WORKSPACE/packages/$TARGET
+ fi
- if [[ -n "$(find $WORKSPACE/packages/$TARGET -name '*.deb')" ]] ; then
- (cd $WORKSPACE/packages/$TARGET
- dpkg-scanpackages . 2> >(grep -v 'warning' 1>&2) | tee Packages | gzip -c > Packages.gz
- apt-ftparchive -o APT::FTPArchive::Release::Origin=Arvados release . > Release
- )
- fi
+ if [[ -n "$(find $WORKSPACE/packages/$TARGET -name '*.deb')" ]] ; then
+ (cd $WORKSPACE/packages/$TARGET
+ dpkg-scanpackages . 2> >(grep -v 'warning' 1>&2) | tee Packages | gzip -c > Packages.gz
+ apt-ftparchive -o APT::FTPArchive::Release::Origin=Arvados release . > Release
+ )
+ fi
- COMMAND="/jenkins/package-testing/test-packages-$TARGET.sh"
- IMAGE="arvados/package-test:$TARGET"
+ COMMAND="/jenkins/package-testing/test-packages-$TARGET.sh"
+ IMAGE="arvados/package-test:$TARGET"
else
- IMAGE="arvados/build:$TARGET"
- if [[ "$COMMAND" != "" ]]; then
- COMMAND="/usr/local/rvm/bin/rvm-exec default bash /jenkins/$COMMAND --target $TARGET$DEBUG"
- fi
+ IMAGE="arvados/build:$TARGET"
+ if [[ "$COMMAND" != "" ]]; then
+ COMMAND="/usr/local/rvm/bin/rvm-exec default bash /jenkins/$COMMAND --target $TARGET$DEBUG"
+ fi
fi
JENKINS_DIR=$(dirname "$(readlink -e "$0")")
continue
fi
fi
+ # If we're testing all packages, we should not error out on packages that don't exist.
+ # If we are testing one specific package only (i.e. --only-test was given), we should
+ # error out if that package does not exist.
+ if [[ -z "$testing_one_package" ]]; then
+ MATCH=`find ${WORKSPACE}/packages/ -regextype posix-extended -regex .*${TARGET}/$p.*\\(deb\\|rpm\\)`
+ if [[ "$MATCH" == "" ]]; then
+ # No new package has been built that needs testing
+ echo "Skipping $p test because no package file is available to test."
+ continue
+ fi
+ fi
echo
echo "START: $p test on $IMAGE" >&2
# ulimit option can be removed when debian8 and ubuntu1404 are retired
fi
done
- touch ${WORKSPACE}/packages/.last_test_${TARGET}
+ if [[ "$FINAL_EXITCODE" == "0" ]]; then
+ touch ${WORKSPACE}/packages/.last_test_${TARGET}
+ fi
else
echo
echo "START: build packages on $IMAGE" >&2
EOF
-EXITCODE=0
+# Begin of user configuration
+
+# set to --no-cache-dir to disable pip caching
+CACHE_FLAG=
+
+MAINTAINER="Ward Vandewege <wvandewege@veritasgenetics.com>"
+VENDOR="Veritas Genetics, Inc."
+
+# End of user configuration
+
DEBUG=${ARVADOS_DEBUG:-0}
+EXITCODE=0
TARGET=debian8
COMMAND=
PYTHON2_INSTALL_LIB=lib/python$PYTHON2_VERSION/site-packages
PYTHON3_PACKAGE=$(rpm -qf "$(which python$PYTHON3_VERSION)" --queryformat '%{NAME}\n')
PYTHON3_PKG_PREFIX=$PYTHON3_PACKAGE
- PYTHON3_PREFIX=/opt/rh/python33/root/usr
+ PYTHON3_PREFIX=/opt/rh/rh-python35/root/usr
PYTHON3_INSTALL_LIB=lib/python$PYTHON3_VERSION/site-packages
export PYCURL_SSL_LIBRARY=nss
;;
perl Makefile.PL INSTALL_BASE=install >"$STDOUT_IF_DEBUG" && \
make install INSTALLDIRS=perl >"$STDOUT_IF_DEBUG" && \
fpm_build install/lib/=/usr/share libarvados-perl \
- "Curoverse, Inc." dir "$(version_from_git)" install/man/=/usr/share/man \
+ dir "$(version_from_git)" install/man/=/usr/share/man \
"$WORKSPACE/apache-2.0.txt=/usr/share/doc/libarvados-perl/apache-2.0.txt" && \
mv --no-clobber libarvados-perl*.$FORMAT "$WORKSPACE/packages/$TARGET/"
fi
cd "$SRC_BUILD_DIR"
PKG_VERSION=$(version_from_git)
cd $WORKSPACE/packages/$TARGET
- fpm_build $SRC_BUILD_DIR/=/usr/local/arvados/src arvados-src 'Curoverse, Inc.' 'dir' "$PKG_VERSION" "--exclude=usr/local/arvados/src/.git" "--url=https://arvados.org" "--license=GNU Affero General Public License, version 3.0" "--description=The Arvados source code" "--architecture=all"
+ fpm_build $SRC_BUILD_DIR/=/usr/local/arvados/src arvados-src 'dir' "$PKG_VERSION" "--exclude=usr/local/arvados/src/.git" "--url=https://arvados.org" "--license=GNU Affero General Public License, version 3.0" "--description=The Arvados source code" "--architecture=all"
rm -rf "$SRC_BUILD_DIR"
-
fi
)
package_go_binary tools/keep-exercise keep-exercise \
"Performance testing tool for Arvados Keep"
-
-# we need explicit debian_revision values in the dependencies for ruamel.yaml, because we have a package iteration
-# greater than zero. So we parse setup.py, get the ruamel.yaml dependencies, tell fpm not to automatically include
-# them in the package being built, and re-add them manually with an appropriate debian_revision value.
-# See #14552 for the reason for this (nasty) workaround. We use ${ruamel_depends[@]} in a few places further down
-# in this script.
-# Ward, 2018-11-28
-IFS=', ' read -r -a deps <<< `grep ruamel.yaml $WORKSPACE/sdk/python/setup.py |cut -f 3 -dl |sed -e "s/'//g"`
-declare -a ruamel_depends=()
-for i in ${deps[@]}; do
- i=`echo "$i" | sed -e 's!\([0-9]\)! \1!'`
- if [[ $i =~ .*\>.* ]]; then
- ruamel_depends+=(--depends "python-ruamel.yaml $i-1")
- elif [[ $i =~ .*\<.* ]]; then
- ruamel_depends+=(--depends "python-ruamel.yaml $i-9")
- else
- echo "Encountered ruamel dependency that I can't parse. Aborting..."
- exit 1
- fi
-done
-
-
# The Python SDK
-# Please resist the temptation to add --no-python-fix-name to the fpm call here
-# (which would remove the python- prefix from the package name), because this
-# package is a dependency of arvados-fuse, and fpm can not omit the python-
-# prefix from only one of the dependencies of a package... Maybe I could
-# whip up a patch and send it upstream, but that will be for another day. Ward,
-# 2014-05-15
-cd $WORKSPACE/packages/$TARGET
-rm -rf "$WORKSPACE/sdk/python/build"
-arvados_python_client_version=${ARVADOS_BUILDING_VERSION:-$(awk '($1 == "Version:"){print $2}' $WORKSPACE/sdk/python/arvados_python_client.egg-info/PKG-INFO)}
-test_package_presence ${PYTHON2_PKG_PREFIX}-arvados-python-client "$arvados_python_client_version" python
-if [[ "$?" == "0" ]]; then
+fpm_build_virtualenv "arvados-python-client" "sdk/python"
+fpm_build_virtualenv "arvados-python-client" "sdk/python" "python3"
- fpm_build $WORKSPACE/sdk/python "${PYTHON2_PKG_PREFIX}-arvados-python-client" 'Curoverse, Inc.' 'python' "$arvados_python_client_version" "--url=https://arvados.org" "--description=The Arvados Python SDK" --depends "${PYTHON2_PKG_PREFIX}-setuptools" --deb-recommends=git --python-disable-dependency ruamel.yaml "${ruamel_depends[@]}"
-fi
-
-# cwl-runner
-cd $WORKSPACE/packages/$TARGET
-rm -rf "$WORKSPACE/sdk/cwl/build"
-arvados_cwl_runner_version=${ARVADOS_BUILDING_VERSION:-$(awk '($1 == "Version:"){print $2}' $WORKSPACE/sdk/cwl/arvados_cwl_runner.egg-info/PKG-INFO)}
-declare -a iterargs=()
-if [[ -z "$ARVADOS_BUILDING_VERSION" ]]; then
- arvados_cwl_runner_iteration=4
- iterargs+=(--iteration $arvados_cwl_runner_iteration)
-else
- arvados_cwl_runner_iteration=
-fi
-test_package_presence ${PYTHON2_PKG_PREFIX}-arvados-cwl-runner "$arvados_cwl_runner_version" python "$arvados_cwl_runner_iteration"
-if [[ "$?" == "0" ]]; then
- fpm_build $WORKSPACE/sdk/cwl "${PYTHON2_PKG_PREFIX}-arvados-cwl-runner" 'Curoverse, Inc.' 'python' "$arvados_cwl_runner_version" "--url=https://arvados.org" "--description=The Arvados CWL runner" --depends "${PYTHON2_PKG_PREFIX}-setuptools" --depends "${PYTHON2_PKG_PREFIX}-subprocess32 >= 3.5.0" --depends "${PYTHON2_PKG_PREFIX}-pathlib2" --depends "${PYTHON2_PKG_PREFIX}-scandir" --python-disable-dependency ruamel.yaml "${ruamel_depends[@]}" "${iterargs[@]}"
-fi
-
-# schema_salad. This is a python dependency of arvados-cwl-runner,
-# but we can't use the usual PYTHONPACKAGES way to build this package due to the
-# intricacies of how version numbers get generated in setup.py: we need a specific version,
-# e.g. 1.7.20160316203940. If we don't explicitly list that version with the -v
-# argument to fpm, and instead specify it as schema_salad==1.7.20160316203940, we get
-# a package with version 1.7. That's because our gittagger hack is not being
-# picked up by self.distribution.get_version(), which is called from
-# https://github.com/jordansissel/fpm/blob/master/lib/fpm/package/pyfpm/get_metadata.py
-# by means of this command:
-#
-# python2.7 setup.py --command-packages=pyfpm get_metadata --output=metadata.json
-#
-# So we build this thing separately.
-#
-# Ward, 2016-03-17
-saladversion=$(cat "$WORKSPACE/sdk/cwl/setup.py" | grep schema-salad== | sed "s/.*==\(.*\)'.*/\1/")
-test_package_presence python-schema-salad "$saladversion" python 2
-if [[ "$?" == "0" ]]; then
- fpm_build schema_salad "" "" python $saladversion --depends "${PYTHON2_PKG_PREFIX}-lockfile >= 1:0.12.2-2" --depends "${PYTHON2_PKG_PREFIX}-avro = 1.8.1-2" --iteration 2
-fi
-
-# And for cwltool we have the same problem as for schema_salad. Ward, 2016-03-17
-cwltoolversion=$(cat "$WORKSPACE/sdk/cwl/setup.py" | grep cwltool== | sed "s/.*==\(.*\)'.*/\1/")
-test_package_presence python-cwltool "$cwltoolversion" python 3
-if [[ "$?" == "0" ]]; then
- fpm_build cwltool "" "" python $cwltoolversion --iteration 3 --python-disable-dependency ruamel.yaml "${ruamel_depends[@]}"
-fi
+# Arvados cwl runner
+fpm_build_virtualenv "arvados-cwl-runner" "sdk/cwl"
# The PAM module
-if [[ $TARGET =~ debian|ubuntu ]]; then
- cd $WORKSPACE/packages/$TARGET
- rm -rf "$WORKSPACE/sdk/pam/build"
- libpam_arvados_version=$(awk '($1 == "Version:"){print $2}' $WORKSPACE/sdk/pam/arvados_pam.egg-info/PKG-INFO)
- test_package_presence libpam-arvados "$libpam_arvados_version" python
- if [[ "$?" == "0" ]]; then
- fpm_build $WORKSPACE/sdk/pam libpam-arvados 'Curoverse, Inc.' 'python' "$libpam_arvados_version" "--url=https://arvados.org" "--description=PAM module for authenticating shell logins using Arvados API tokens" --depends libpam-python
- fi
-fi
+fpm_build_virtualenv "libpam-arvados" "sdk/pam"
# The FUSE driver
-# Please see comment about --no-python-fix-name above; we stay consistent and do
-# not omit the python- prefix first.
-cd $WORKSPACE/packages/$TARGET
-rm -rf "$WORKSPACE/services/fuse/build"
-arvados_fuse_version=${ARVADOS_BUILDING_VERSION:-$(awk '($1 == "Version:"){print $2}' $WORKSPACE/services/fuse/arvados_fuse.egg-info/PKG-INFO)}
-test_package_presence "${PYTHON2_PKG_PREFIX}-arvados-fuse" "$arvados_fuse_version" python
-if [[ "$?" == "0" ]]; then
- fpm_build $WORKSPACE/services/fuse "${PYTHON2_PKG_PREFIX}-arvados-fuse" 'Curoverse, Inc.' 'python' "$arvados_fuse_version" "--url=https://arvados.org" "--description=The Keep FUSE driver" --depends "${PYTHON2_PKG_PREFIX}-setuptools"
-fi
+fpm_build_virtualenv "arvados-fuse" "services/fuse"
# The node manager
-cd $WORKSPACE/packages/$TARGET
-rm -rf "$WORKSPACE/services/nodemanager/build"
-nodemanager_version=${ARVADOS_BUILDING_VERSION:-$(awk '($1 == "Version:"){print $2}' $WORKSPACE/services/nodemanager/arvados_node_manager.egg-info/PKG-INFO)}
-iteration="${ARVADOS_BUILDING_ITERATION:-1}"
-test_package_presence arvados-node-manager "$nodemanager_version" python "$iteration"
-if [[ "$?" == "0" ]]; then
- fpm_build $WORKSPACE/services/nodemanager arvados-node-manager 'Curoverse, Inc.' 'python' "$nodemanager_version" "--url=https://arvados.org" "--description=The Arvados node manager" --depends "${PYTHON2_PKG_PREFIX}-setuptools" --iteration "$iteration"
-fi
+fpm_build_virtualenv "arvados-node-manager" "services/nodemanager"
# The Docker image cleaner
-cd $WORKSPACE/packages/$TARGET
-rm -rf "$WORKSPACE/services/dockercleaner/build"
-dockercleaner_version=${ARVADOS_BUILDING_VERSION:-$(awk '($1 == "Version:"){print $2}' $WORKSPACE/services/dockercleaner/arvados_docker_cleaner.egg-info/PKG-INFO)}
-iteration="${ARVADOS_BUILDING_ITERATION:-4}"
-test_package_presence arvados-docker-cleaner "$dockercleaner_version" python "$iteration"
-if [[ "$?" == "0" ]]; then
- fpm_build $WORKSPACE/services/dockercleaner arvados-docker-cleaner 'Curoverse, Inc.' 'python3' "$dockercleaner_version" "--url=https://arvados.org" "--description=The Arvados Docker image cleaner" --depends "${PYTHON3_PKG_PREFIX}-websocket-client = 0.37.0" --iteration "$iteration"
-fi
+fpm_build_virtualenv "arvados-docker-cleaner" "services/dockercleaner" "python3"
# The Arvados crunchstat-summary tool
-cd $WORKSPACE/packages/$TARGET
-crunchstat_summary_version=${ARVADOS_BUILDING_VERSION:-$(awk '($1 == "Version:"){print $2}' $WORKSPACE/tools/crunchstat-summary/crunchstat_summary.egg-info/PKG-INFO)}
-iteration="${ARVADOS_BUILDING_ITERATION:-2}"
-test_package_presence "$PYTHON2_PKG_PREFIX"-crunchstat-summary "$crunchstat_summary_version" python "$iteration"
-if [[ "$?" == "0" ]]; then
- rm -rf "$WORKSPACE/tools/crunchstat-summary/build"
- fpm_build $WORKSPACE/tools/crunchstat-summary ${PYTHON2_PKG_PREFIX}-crunchstat-summary 'Curoverse, Inc.' 'python' "$crunchstat_summary_version" "--url=https://arvados.org" "--description=Crunchstat-summary reads Arvados Crunch log files and summarize resource usage" --iteration "$iteration"
-fi
-
-# Forked libcloud
-if test_package_presence "$PYTHON2_PKG_PREFIX"-apache-libcloud "$LIBCLOUD_PIN" python 2
-then
- LIBCLOUD_DIR=$(mktemp -d)
- (
- cd $LIBCLOUD_DIR
- git clone $DASHQ_UNLESS_DEBUG https://github.com/curoverse/libcloud.git .
- git checkout $DASHQ_UNLESS_DEBUG apache-libcloud-$LIBCLOUD_PIN
- # libcloud is absurdly noisy without -q, so force -q here
- OLD_DASHQ_UNLESS_DEBUG=$DASHQ_UNLESS_DEBUG
- DASHQ_UNLESS_DEBUG=-q
- handle_python_package
- DASHQ_UNLESS_DEBUG=$OLD_DASHQ_UNLESS_DEBUG
- )
-
- # libcloud >= 2.3.0 now requires python-requests 2.4.3 or higher, otherwise
- # it throws
- # ImportError: No module named packages.urllib3.poolmanager
- # when loaded. We only see this problem on ubuntu1404, because that is our
- # only supported distribution that ships with a python-requests older than
- # 2.4.3.
- fpm_build $LIBCLOUD_DIR "$PYTHON2_PKG_PREFIX"-apache-libcloud "" python "" --iteration 2 --depends 'python-requests >= 2.4.3'
- rm -rf $LIBCLOUD_DIR
-fi
-
-# Python 2 dependencies
-declare -a PIP_DOWNLOAD_SWITCHES=(--no-deps)
-# Add --no-use-wheel if this pip knows it.
-pip install --no-use-wheel >/dev/null 2>&1
-case "$?" in
- 0) PIP_DOWNLOAD_SWITCHES+=(--no-use-wheel) ;;
- 1) ;;
- 2) ;;
- *) echo "WARNING: 'pip install --no-use-wheel' test returned unknown exit code $?" ;;
-esac
-
-while read -r line || [[ -n "$line" ]]; do
-# echo "Text read from file: $line"
- if [[ "$line" =~ ^# ]]; then
- continue
- fi
- IFS='|'; arr=($line); unset IFS
-
- dist=${arr[0]}
-
- IFS=',';dists=($dist); unset IFS
-
- MATCH=0
- for d in "${dists[@]}"; do
- if [[ "$d" == "$TARGET" ]] || [[ "$d" == "all" ]]; then
- MATCH=1
- fi
- done
-
- if [[ "$MATCH" != "1" ]]; then
- continue
- fi
- name=${arr[1]}
- version=${arr[2]}
- iteration=${arr[3]}
- pkgtype=${arr[4]}
- arch=${arr[5]}
- extra=${arr[6]}
- declare -a 'extra_arr=('"$extra"')'
-
- if [[ "$FORMAT" == "rpm" ]]; then
- if [[ "$arch" == "all" ]]; then
- arch="noarch"
- fi
- if [[ "$arch" == "amd64" ]]; then
- arch="x86_64"
- fi
- fi
-
- if [[ "$pkgtype" == "python" ]]; then
- outname=$(echo "$name" | sed -e 's/^python-//' -e 's/_/-/g' -e "s/^/${PYTHON2_PKG_PREFIX}-/")
- else
- outname=$(echo "$name" | sed -e 's/^python-//' -e 's/_/-/g' -e "s/^/${PYTHON3_PKG_PREFIX}-/")
- fi
-
- if [[ -n "$ONLY_BUILD" ]] && [[ "$outname" != "$ONLY_BUILD" ]] ; then
- continue
- fi
-
- case "$name" in
- httplib2|google-api-python-client)
- test_package_presence $outname $version $pkgtype $iteration $arch
- if [[ "$?" == "0" ]]; then
- # Work around 0640 permissions on some package files.
- # See #7591 and #7991.
- pyfpm_workdir=$(mktemp --tmpdir -d pyfpm-XXXXXX) && (
- set -e
- cd "$pyfpm_workdir"
- PIP_VERSION=`python$PYTHON2_VERSION -c "import pip; print(pip.__version__)" |cut -f1 -d.`
- if (( $PIP_VERSION < 8 )); then
- pip install "${PIP_DOWNLOAD_SWITCHES[@]}" --download . "$name==$version"
- else
- pip download --no-deps --no-binary :all: "$name==$version"
- fi
- # Sometimes pip gives us a tarball, sometimes a zip file...
- DOWNLOADED=`ls $name-*`
- [[ "$DOWNLOADED" =~ ".tar" ]] && tar -xf $DOWNLOADED
- [[ "$DOWNLOADED" =~ ".zip" ]] && unzip $DOWNLOADED
- cd "$name"-*/
- "python$PYTHON2_VERSION" setup.py $DASHQ_UNLESS_DEBUG egg_info build
- chmod -R go+rX .
- set +e
- fpm_build . "$outname" "" "$pkgtype" "$version" --iteration "$iteration" "${extra_arr[@]}"
- # The upload step uses the package timestamp to determine
- # if it is new. --no-clobber plays nice with that.
- mv --no-clobber "$outname"*.$FORMAT "$WORKSPACE/packages/$TARGET"
- )
- if [ 0 != "$?" ]; then
- echo "ERROR: $name build process failed"
- EXITCODE=1
- fi
- if [ -n "$pyfpm_workdir" ]; then
- rm -rf "$pyfpm_workdir"
- fi
- fi
- ;;
- *)
- test_package_presence $outname $version $pkgtype $iteration $arch
- if [[ "$?" == "0" ]]; then
- fpm_build "$name" "$outname" "" "$pkgtype" "$version" --iteration "$iteration" "${extra_arr[@]}"
- fi
- ;;
- esac
-
-done <`dirname "$(readlink -f "$0")"`"/build.list"
+fpm_build_virtualenv "crunchstat-summary" "tools/crunchstat-summary"
# Build the API server package
test_rails_package_presence arvados-api-server "$WORKSPACE/services/api"
fi
switches+=("$WORKSPACE/${license_file}=/usr/share/doc/$prog/${license_file}")
- fpm_build "$GOPATH/bin/${basename}=/usr/bin/${prog}" "${prog}" 'Curoverse, Inc.' dir "${version}" "--url=https://arvados.org" "--license=GNU Affero General Public License, version 3.0" "--description=${description}" "${switches[@]}"
+ fpm_build "$GOPATH/bin/${basename}=/usr/bin/${prog}" "${prog}" dir "${version}" "--url=https://arvados.org" "--license=GNU Affero General Public License, version 3.0" "--description=${description}" "${switches[@]}"
}
default_iteration() {
rpm_architecture="x86_64"
deb_architecture="amd64"
- if [[ "$pkgtype" =~ ^(python|python3)$ ]]; then
- rpm_architecture="noarch"
- deb_architecture="all"
- fi
-
if [[ "$pkgtype" =~ ^(src)$ ]]; then
rpm_architecture="noarch"
deb_architecture="all"
echo "Package $complete_pkgname exists, not rebuilding!"
curl -o ./${complete_pkgname} http://apt.arvados.org/pool/${D}/main/${repo_subdir}/${complete_pkgname}
return 1
- elif test -f "$WORKSPACE/packages/$TARGET/processed/${complete_pkgname}" ; then
+ elif test -f "$WORKSPACE/packages/$TARGET/processed/${complete_pkgname}" ; then
echo "Package $complete_pkgname exists, not rebuilding!"
return 1
else
echo "Package $complete_pkgname exists, not rebuilding!"
curl -o ./${complete_pkgname} ${centos_repo}${complete_pkgname}
return 1
+ elif test -f "$WORKSPACE/packages/$TARGET/processed/${complete_pkgname}" ; then
+ echo "Package $complete_pkgname exists, not rebuilding!"
+ return 1
else
echo "Package $complete_pkgname not found, building"
return 0
return 1
fi
local railsdir="/var/www/${pkgname%-server}/current"
- local -a pos_args=("$srcdir/=$railsdir" "$pkgname" "Curoverse, Inc." dir "$version")
+ local -a pos_args=("$srcdir/=$railsdir" "$pkgname" dir "$version")
local license_arg="$license_path=$railsdir/$(basename "$license_path")"
local -a switches=(--after-install "$scripts_dir/postinst"
--before-remove "$scripts_dir/prerm"
rm -rf "$scripts_dir"
}
+# Build python packages with a virtualenv built-in
+fpm_build_virtualenv () {
+ PKG=$1
+ shift
+ PKG_DIR=$1
+ shift
+ PACKAGE_TYPE=${1:-python}
+ shift
+
+ # Set up
+ STDOUT_IF_DEBUG=/dev/null
+ STDERR_IF_DEBUG=/dev/null
+ DASHQ_UNLESS_DEBUG=-q
+ if [[ "$DEBUG" != "0" ]]; then
+ STDOUT_IF_DEBUG=/dev/stdout
+ STDERR_IF_DEBUG=/dev/stderr
+ DASHQ_UNLESS_DEBUG=
+ fi
+ if [[ "$ARVADOS_BUILDING_ITERATION" == "" ]]; then
+ ARVADOS_BUILDING_ITERATION=1
+ fi
+
+ local python=""
+ case "$PACKAGE_TYPE" in
+ python)
+ # All Arvados Python2 packages depend on Python 2.7.
+ # Make sure we build with that for consistency.
+ python=python2.7
+ PACKAGE_PREFIX=$PYTHON2_PKG_PREFIX
+ ;;
+ python3)
+ PACKAGE_PREFIX=$PYTHON3_PKG_PREFIX
+ python=python3
+ ;;
+ esac
+
+ if [[ "$PKG" != "libpam-arvados" ]] &&
+ [[ "$PKG" != "arvados-node-manager" ]] &&
+ [[ "$PKG" != "arvados-docker-cleaner" ]]; then
+ PYTHON_PKG=$PACKAGE_PREFIX-$PKG
+ else
+ # Exception to our package naming convention
+ PYTHON_PKG=$PKG
+ fi
+
+ if [[ -n "$ONLY_BUILD" ]] && [[ "$PYTHON_PKG" != "$ONLY_BUILD" ]] && [[ "$PKG" != "$ONLY_BUILD" ]]; then
+ return 0
+ fi
+
+ cd $WORKSPACE/$PKG_DIR
+
+ rm -rf dist/*
+
+ if ! $python setup.py $DASHQ_UNLESS_DEBUG sdist; then
+ echo "Error, unable to run python setup.py sdist for $PKG"
+ exit 1
+ fi
+
+ PACKAGE_PATH=`(cd dist; ls *tar.gz)`
+
+ # Determine the package version from the generated sdist archive
+ PYTHON_VERSION=${ARVADOS_BUILDING_VERSION:-$(awk '($1 == "Version:"){print $2}' *.egg-info/PKG-INFO)}
+
+ # See if we actually need to build this package; does it exist already?
+ # We can't do this earlier than here, because we need PYTHON_VERSION...
+ # This isn't so bad; the sdist call above is pretty quick compared to
+ # the invocation of virtualenv and fpm, below.
+ if ! test_package_presence "$PYTHON_PKG" $PYTHON_VERSION $PACKAGE_TYPE $ARVADOS_BUILDING_ITERATION; then
+ return 0
+ fi
+
+ echo "Building $FORMAT package for $PKG from $PKG_DIR"
+
+ # Package the sdist in a virtualenv
+ echo "Creating virtualenv..."
+
+ cd dist
+
+ rm -rf build
+ rm -f $PYTHON_PKG*deb
+
+ virtualenv_command="virtualenv --python `which $python` $DASHQ_UNLESS_DEBUG build/usr/share/$python/dist/$PYTHON_PKG"
+
+ if ! $virtualenv_command; then
+ echo "Error, unable to run"
+ echo " $virtualenv_command"
+ exit 1
+ fi
+
+ if ! build/usr/share/$python/dist/$PYTHON_PKG/bin/pip install $DASHQ_UNLESS_DEBUG $CACHE_FLAG -U pip; then
+ echo "Error, unable to upgrade pip with"
+ echo " build/usr/share/$python/dist/$PYTHON_PKG/bin/pip install $DASHQ_UNLESS_DEBUG $CACHE_FLAG -U pip"
+ exit 1
+ fi
+ if ! build/usr/share/$python/dist/$PYTHON_PKG/bin/pip install $DASHQ_UNLESS_DEBUG $CACHE_FLAG -U wheel; then
+ echo "Error, unable to upgrade wheel with"
+ echo " build/usr/share/$python/dist/$PYTHON_PKG/bin/pip install $DASHQ_UNLESS_DEBUG $CACHE_FLAG -U wheel"
+ exit 1
+ fi
+
+ if [[ "$TARGET" != "centos7" ]] || [[ "$PYTHON_PKG" != "python-arvados-fuse" ]]; then
+ build/usr/share/$python/dist/$PYTHON_PKG/bin/pip install $DASHQ_UNLESS_DEBUG $CACHE_FLAG $PACKAGE_PATH
+ else
+ # centos7 needs these special tweaks to install python-arvados-fuse
+ build/usr/share/$python/dist/$PYTHON_PKG/bin/pip install $DASHQ_UNLESS_DEBUG $CACHE_FLAG docutils
+ PYCURL_SSL_LIBRARY=nss build/usr/share/$python/dist/$PYTHON_PKG/bin/pip install $DASHQ_UNLESS_DEBUG $CACHE_FLAG $PACKAGE_PATH
+ fi
+
+ if [[ "$?" != "0" ]]; then
+ echo "Error, unable to run"
+ echo " build/usr/share/$python/dist/$PYTHON_PKG/bin/pip install $DASHQ_UNLESS_DEBUG $CACHE_FLAG $PACKAGE_PATH"
+ exit 1
+ fi
+
+ cd build/usr/share/$python/dist/$PYTHON_PKG/
+
+ # Replace the shebang lines in all python scripts, and handle the activate
+ # scripts too This is a functional replacement of the 237 line
+ # virtualenv_tools.py script that doesn't work in python3 without serious
+ # patching, minus the parts we don't need (modifying pyc files, etc).
+ for binfile in `ls bin/`; do
+ if ! file --mime bin/$binfile |grep -q binary; then
+ # Not a binary file
+ if [[ "$binfile" =~ ^activate(.csh|.fish|)$ ]]; then
+ # these 'activate' scripts need special treatment
+ sed -i "s/VIRTUAL_ENV=\".*\"/VIRTUAL_ENV=\"\/usr\/share\/$python\/dist\/$PYTHON_PKG\"/" bin/$binfile
+ sed -i "s/VIRTUAL_ENV \".*\"/VIRTUAL_ENV \"\/usr\/share\/$python\/dist\/$PYTHON_PKG\"/" bin/$binfile
+ else
+ if grep -q -E '^#!.*/bin/python\d?' bin/$binfile; then
+ # Replace shebang line
+ sed -i "1 s/^.*$/#!\/usr\/share\/$python\/dist\/$PYTHON_PKG\/bin\/python/" bin/$binfile
+ fi
+ fi
+ fi
+ done
+
+ cd - >$STDOUT_IF_DEBUG
+
+ find build -iname '*.pyc' -exec rm {} \;
+ find build -iname '*.pyo' -exec rm {} \;
+
+ # Finally, generate the package
+ echo "Creating package..."
+
+ declare -a COMMAND_ARR=("fpm" "-s" "dir" "-t" "$FORMAT")
+
+ if [[ "$MAINTAINER" != "" ]]; then
+ COMMAND_ARR+=('--maintainer' "$MAINTAINER")
+ fi
+
+ if [[ "$VENDOR" != "" ]]; then
+ COMMAND_ARR+=('--vendor' "$VENDOR")
+ fi
+
+ COMMAND_ARR+=('--url' 'https://arvados.org')
+
+ # Get description
+ DESCRIPTION=`grep '\sdescription' $WORKSPACE/$PKG_DIR/setup.py|cut -f2 -d=|sed -e "s/[',\\"]//g"`
+ COMMAND_ARR+=('--description' "$DESCRIPTION")
+
+ # Get license string
+ LICENSE_STRING=`grep license $WORKSPACE/$PKG_DIR/setup.py|cut -f2 -d=|sed -e "s/[',\\"]//g"`
+ COMMAND_ARR+=('--license' "$LICENSE_STRING")
+
+ # 12271 - As FPM-generated packages don't include scripts by default, the
+ # packages cleanup on upgrade depends on files being listed on the %files
+ # section in the generated SPEC files. To remove DIRECTORIES, they need to
+ # be listed in that sectiontoo, so we need to add this parameter to properly
+ # remove lingering dirs. But this only works for python2: if used on
+ # python33, it includes dirs like /opt/rh/python33 that belong to
+ # other packages.
+ if [[ "$FORMAT" == "rpm" ]] && [[ "$python" == "python2.7" ]]; then
+ COMMAND_ARR+=('--rpm-auto-add-directories')
+ fi
+
+ if [[ "$PKG" == "arvados-python-client" ]]; then
+ if [[ "$python" == "python2.7" ]]; then
+ COMMAND_ARR+=('--conflicts' "$PYTHON3_PKG_PREFIX-$PKG")
+ else
+ COMMAND_ARR+=('--conflicts' "$PYTHON2_PKG_PREFIX-$PKG")
+ fi
+ fi
+
+ if [[ "$DEBUG" != "0" ]]; then
+ COMMAND_ARR+=('--verbose' '--log' 'info')
+ fi
+
+ COMMAND_ARR+=('-v' "$PYTHON_VERSION")
+ COMMAND_ARR+=('--iteration' "$ARVADOS_BUILDING_ITERATION")
+ COMMAND_ARR+=('-n' "$PYTHON_PKG")
+ COMMAND_ARR+=('-C' "build")
+
+ if [[ -e "$WORKSPACE/$PKG_DIR/$PKG.service" ]]; then
+ COMMAND_ARR+=('--after-install' "${WORKSPACE}/build/go-python-package-scripts/postinst")
+ COMMAND_ARR+=('--before-remove' "${WORKSPACE}/build/go-python-package-scripts/prerm")
+ fi
+
+ if [[ "$python" == "python2.7" ]]; then
+ COMMAND_ARR+=('--depends' "$PYTHON2_PACKAGE")
+ else
+ COMMAND_ARR+=('--depends' "$PYTHON3_PACKAGE")
+ fi
+
+ # avoid warning
+ COMMAND_ARR+=('--deb-no-default-config-files')
+
+ # Append --depends X and other arguments specified by fpm-info.sh in
+ # the package source dir. These are added last so they can override
+ # the arguments added by this script.
+ declare -a fpm_args=()
+ declare -a fpm_depends=()
+
+ fpminfo="$WORKSPACE/$PKG_DIR/fpm-info.sh"
+ if [[ -e "$fpminfo" ]]; then
+ echo "Loading fpm overrides from $fpminfo"
+ if ! source "$fpminfo"; then
+ echo "Error, unable to source $WORKSPACE/$PKG_DIR/fpm-info.sh for $PKG"
+ exit 1
+ fi
+ fi
+
+ for i in "${fpm_depends[@]}"; do
+ COMMAND_ARR+=('--depends' "$i")
+ done
+
+ COMMAND_ARR+=("${fpm_args[@]}")
+
+ # Make sure to install all our package binaries in /usr/bin.
+ # We have to walk $WORKSPACE/$PKG_DIR/bin rather than
+ # $WORKSPACE/build/usr/share/$python/dist/$PYTHON_PKG/bin/ to get the list
+ # because the latter also includes all the python binaries for the virtualenv.
+ # We have to take the copies of our binaries from the latter directory, though,
+ # because those are the ones we rewrote the shebang line of, above.
+ if [[ -e "$WORKSPACE/$PKG_DIR/bin" ]]; then
+ for binary in `ls $WORKSPACE/$PKG_DIR/bin`; do
+ COMMAND_ARR+=("usr/share/$python/dist/$PYTHON_PKG/bin/$binary=/usr/bin/")
+ done
+ fi
+
+ # the libpam module should place this file in the historically correct place
+ # so as not to break backwards compatibility
+ if [[ -e "$WORKSPACE/$PKG_DIR/dist/build/usr/share/python2.7/dist/libpam-arvados/lib/security/libpam_arvados.py" ]]; then
+ COMMAND_ARR+=("usr/share/$python/dist/$PYTHON_PKG/data/lib/security/libpam_arvados.py=/usr/data/lib/security/")
+ fi
+
+ # the python-arvados-cwl-runner package comes with cwltool, expose that version
+ if [[ -e "$WORKSPACE/$PKG_DIR/dist/build/usr/share/python2.7/dist/python-arvados-cwl-runner/bin/cwltool" ]]; then
+ COMMAND_ARR+=("usr/share/python2.7/dist/python-arvados-cwl-runner/bin/cwltool=/usr/bin/")
+ fi
+
+ COMMAND_ARR+=(".")
+
+ FPM_RESULTS=$("${COMMAND_ARR[@]}")
+ FPM_EXIT_CODE=$?
+
+ # if something went wrong and debug is off, print out the fpm command that errored
+ if ! fpm_verify $FPM_EXIT_CODE $FPM_RESULTS && [[ "$STDOUT_IF_DEBUG" == "/dev/null" ]]; then
+ echo "fpm returned an error executing the command:"
+ echo
+ echo -e "\n${COMMAND_ARR[@]}\n"
+ else
+ echo `ls *$FORMAT`
+ mv $WORKSPACE/$PKG_DIR/dist/*$FORMAT $WORKSPACE/packages/$TARGET/
+ fi
+ echo
+}
+
# Build packages for everything
fpm_build () {
# The package source. Depending on the source type, this can be a
# The name of the package to build.
PACKAGE_NAME=$1
shift
- # Optional: the vendor of the package. Should be "Curoverse, Inc." for
- # packages of our own software. Passed to fpm --vendor.
- VENDOR=$1
- shift
- # The type of source package. Passed to fpm -s. Default "python".
- PACKAGE_TYPE=${1:-python}
+ # The type of source package. Passed to fpm -s. Default "dir".
+ PACKAGE_TYPE=${1:-dir}
shift
# Optional: the package version number. Passed to fpm -v.
VERSION=$1
fi
local default_iteration_value="$(default_iteration "$PACKAGE" "$VERSION" "$PACKAGE_TYPE")"
- local python=""
- case "$PACKAGE_TYPE" in
- python)
- # All Arvados Python2 packages depend on Python 2.7.
- # Make sure we build with that for consistency.
- python=python2.7
- set -- "$@" --python-bin python2.7 \
- "${PYTHON_FPM_INSTALLER[@]}" \
- --python-package-name-prefix "$PYTHON2_PKG_PREFIX" \
- --prefix "$PYTHON2_PREFIX" \
- --python-install-lib "$PYTHON2_INSTALL_LIB" \
- --python-install-data . \
- --exclude "${PYTHON2_INSTALL_LIB#/}/tests" \
- --depends "$PYTHON2_PACKAGE"
- ;;
- python3)
- # fpm does not actually support a python3 package type. Instead
- # we recognize it as a convenience shortcut to add several
- # necessary arguments to fpm's command line later, after we're
- # done handling positional arguments.
- PACKAGE_TYPE=python
- python=python3
- set -- "$@" --python-bin python3 \
- "${PYTHON3_FPM_INSTALLER[@]}" \
- --python-package-name-prefix "$PYTHON3_PKG_PREFIX" \
- --prefix "$PYTHON3_PREFIX" \
- --python-install-lib "$PYTHON3_INSTALL_LIB" \
- --python-install-data . \
- --exclude "${PYTHON3_INSTALL_LIB#/}/tests" \
- --depends "$PYTHON3_PACKAGE"
- ;;
- esac
-
- declare -a COMMAND_ARR=("fpm" "--maintainer=Ward Vandewege <ward@curoverse.com>" "-s" "$PACKAGE_TYPE" "-t" "$FORMAT")
+ declare -a COMMAND_ARR=("fpm" "-s" "$PACKAGE_TYPE" "-t" "$FORMAT")
if [ python = "$PACKAGE_TYPE" ] && [ deb = "$FORMAT" ]; then
# Dependencies are built from setup.py. Since setup.py will never
# refer to Debian package iterations, it doesn't make sense to
# 12271 - As FPM-generated packages don't include scripts by default, the
# packages cleanup on upgrade depends on files being listed on the %files
# section in the generated SPEC files. To remove DIRECTORIES, they need to
- # be listed in that sectiontoo, so we need to add this parameter to properly
+ # be listed in that section too, so we need to add this parameter to properly
# remove lingering dirs. But this only works for python2: if used on
# python33, it includes dirs like /opt/rh/python33 that belong to
# other packages.
COMMAND_ARR+=('--rpm-auto-add-directories')
fi
- if [[ "${DEBUG:-0}" != "0" ]]; then
+ if [[ "$DEBUG" != "0" ]]; then
COMMAND_ARR+=('--verbose' '--log' 'info')
fi
COMMAND_ARR+=('-n' "$PACKAGE_NAME")
fi
+ if [[ "$MAINTAINER" != "" ]]; then
+ COMMAND_ARR+=('--maintainer' "$MAINTAINER")
+ fi
+
if [[ "$VENDOR" != "" ]]; then
COMMAND_ARR+=('--vendor' "$VENDOR")
fi
COMMAND_ARR+=(--iteration "$default_iteration_value")
fi
- if [[ python = "$PACKAGE_TYPE" ]] && [[ -e "${PACKAGE}/${PACKAGE_NAME}.service" ]]
- then
- COMMAND_ARR+=(
- --after-install "${WORKSPACE}/build/go-python-package-scripts/postinst"
- --before-remove "${WORKSPACE}/build/go-python-package-scripts/prerm"
- )
- fi
-
# Append --depends X and other arguments specified by fpm-info.sh in
# the package source dir. These are added last so they can override
# the arguments added by this script.
declare -a fpm_exclude=()
declare -a fpm_dirs=(
# source dir part of 'dir' package ("/source=/dest" => "/source"):
- "${PACKAGE%%=/*}"
- # backports ("llfuse>=1.0" => "backports/python-llfuse")
- "${WORKSPACE}/backports/${PACKAGE_TYPE}-${PACKAGE%%[<=>]*}")
- if [[ -n "$PACKAGE_NAME" ]]; then
- fpm_dirs+=("${WORKSPACE}/backports/${PACKAGE_NAME}")
- fi
+ "${PACKAGE%%=/*}")
for pkgdir in "${fpm_dirs[@]}"; do
fpminfo="$pkgdir/fpm-info.sh"
if [[ -e "$fpminfo" ]]; then
}
export PERLINSTALLBASE
-export PERLLIB="$PERLINSTALLBASE/lib/perl5:${PERLLIB:+$PERLLIB}"
+export PERL5LIB="$PERLINSTALLBASE/lib/perl5${PERL5LIB:+:$PERL5LIB}"
export R_LIBS
sdk/python
sdk/python:py3
sdk/cwl
+ sdk/cwl:py3
services/dockercleaner:py3
services/fuse
services/nodemanager
TODO: extract this information based on git commit messages and generate changelogs / release notes automatically.
{% endcomment %}
+h3. current master branch
+
+h4. Python packaging change
+
+As part of story "#9945":https://dev.arvados.org/issues/9945, the distribution packaging (deb/rpm) of our Python packages has changed. These packages now include a built-in virtualenv to reduce dependencies on system packages. We have also stopped packaging and publishing backports for all the Python dependencies of our packages, as they are no longer needed.
+
+One practical consequence of this change is that the use of the Arvados Python SDK (aka "import arvados") will require a tweak if the SDK was installed from a distribution package. It now requires the loading of the virtualenv environment from our packages. The "Install documentation for the Arvados Python SDK":/sdk/python/sdk-python.html reflects this change. This does not affect the use of the command line tools (e.g. arv-get, etc.).
+
+Python scripts that rely on the distribution Arvados Python SDK packages to import the Arvados SDK will need to be tweaked to load the correct Python environment.
+
+This can be done by activating the virtualenv outside of the script:
+
+<notextile>
+<pre>~$ <code class="userinput">source /usr/share/python2.7/dist/python-arvados-python-client/bin/activate</code>
+(python-arvados-python-client) ~$ <code class="userinput">path-to-the-python-script</code>
+</pre>
+</notextile>
+
+Or alternatively, by updating the shebang line at the start of the script to:
+
+<notextile>
+<pre>
+#!/usr/share/python2.7/dist/python-arvados-python-client/bin/python
+</pre>
+</notextile>
+
+h4. python-arvados-cwl-runner deb/rpm package now conflicts with python-cwltool deb/rpm package
+
+As part of story "#9945":https://dev.arvados.org/issues/9945, the distribution packaging (deb/rpm) of our Python packages has changed. The python-arvados-cwl-runner package now includes a version of cwltool. If present, the python-cwltool and cwltool distribution packages will need to be uninstalled before the python-arvados-cwl-runner deb or rpm package can be installed.
+
+h4. Centos7 Python 3 dependency upgraded to rh-python35
+
+As part of story "#9945":https://dev.arvados.org/issues/9945, the Python 3 dependency for Centos7 Arvados packages was upgraded from SCL python33 to rh-python35.
+
+h4. Centos7 package for libpam-arvados depends on the python-pam package, which is available from EPEL
+
+As part of story "#9945":https://dev.arvados.org/issues/9945, it was discovered that the Centos7 package for libpam-arvados was missing a dependency on the python-pam package, which is available from the EPEL repository. The dependency has been added to the libpam-arvados package. This means that going forward, the EPEL repository will need to be enabled to install libpam-arvados on Centos7.
+
h3. v1.3.0 (2018-12-05)
This release includes several database migrations, which will be executed automatically as part of the API server upgrade. On large Arvados installations, these migrations will take a while. We've seen the upgrade take 30 minutes or more on installations with a lot of collections.
The Python SDK provides access from Python to the Arvados API and Keep. It also includes a number of command line tools for using and administering Arvados and Keep, and some conveniences for use in Crunch scripts; see "Crunch utility libraries":crunch-utility-libraries.html for details.
-h3. Installation
+h2. Installation
If you are logged in to an Arvados VM, the Python SDK should be installed.
-To use the Python SDK elsewhere, you can install from a distribution package, PyPI, or source.
+To use the Python SDK elsewhere, you can install from PyPI or a distribution package.
{% include 'notebox_begin' %}
The Python SDK requires Python 2.7.
{% include 'notebox_end' %}
-h4. Option 1: Install from distribution packages
+h3. Option 1: Install with pip
-First, "add the appropriate package repository for your distribution":{{ site.baseurl }}/install/install-manual-prerequisites.html#repos.
+This installation method is recommended to make the SDK available for use in your own Python programs. It can coexist with the system-wide installation method from a distribution package (option 2, below).
+
+Run @pip-2.7 install arvados-python-client@ in an appropriate installation environment, such as a virtualenv.
+
+If your version of @pip@ is 1.4 or newer, the @pip install@ command might give an error: "Could not find a version that satisfies the requirement arvados-python-client". If this happens, try @pip-2.7 install --pre arvados-python-client@.
+
+h3. Option 2: Install from a distribution package
-{% assign rh_version = "6" %}
-{% include 'note_python_sc' %}
+This installation method is recommended to make the CLI tools available system-wide. It can coexist with the installation method described in option 1, above.
+
+First, "add the appropriate package repository for your distribution":{{ site.baseurl }}/install/install-manual-prerequisites.html#repos.
On Red Hat-based systems:
</code></pre>
</notextile>
-h4. Option 2: Install with pip
+h3. Test installation
-Run @pip-2.7 install arvados-python-client@ in an appropriate installation environment, such as a virtualenv.
-
-If your version of @pip@ is 1.4 or newer, the @pip install@ command might give an error: "Could not find a version that satisfies the requirement arvados-python-client". If this happens, try @pip-2.7 install --pre arvados-python-client@.
-
-h4. Option 3: Install from source
+If the SDK is installed and your @ARVADOS_API_HOST@ and @ARVADOS_API_TOKEN@ environment variables are set up correctly (see "api-tokens":{{site.baseurl}}/user/reference/api-tokens.html for details), @import arvados@ should produce no errors.
-Install the @python-setuptools@ package from your distribution. Then run the following:
+If you installed with pip (option 1, above):
<notextile>
-<pre><code>~$ <span class="userinput">git clone https://github.com/curoverse/arvados.git</span>
-~$ <span class="userinput">cd arvados/sdk/python</span>
-~/arvados/sdk/python$ <span class="userinput">python2.7 setup.py install</span>
-</code></pre>
+<pre>~$ <code class="userinput">python</code>
+Python 2.7.4 (default, Sep 26 2013, 03:20:26)
+[GCC 4.7.3] on linux2
+Type "help", "copyright", "credits" or "license" for more information.
+>>> <code class="userinput">import arvados</code>
+>>> <code class="userinput">arvados.api('v1')</code>
+<apiclient.discovery.Resource object at 0x233bb50>
+</pre>
</notextile>
-You may optionally run the final installation command in a virtualenv, or with the @--user@ option.
+If you installed from a distribution package (option 2): the package includes a virtualenv, which means the correct Python environment needs to be loaded before the Arvados SDK can be imported. This can be done by activating the virtualenv first:
-h4. Test installation
+<notextile>
+<pre>~$ <code class="userinput">source /usr/share/python2.7/dist/python-arvados-python-client/bin/activate</code>
+(python-arvados-python-client) ~$ <code class="userinput">python</code>
+Python 2.7.4 (default, Sep 26 2013, 03:20:26)
+[GCC 4.7.3] on linux2
+Type "help", "copyright", "credits" or "license" for more information.
+>>> <code class="userinput">import arvados</code>
+>>> <code class="userinput">arvados.api('v1')</code>
+<apiclient.discovery.Resource object at 0x233bb50>
+</pre>
+</notextile>
-If the SDK is installed and your @ARVADOS_API_HOST@ and @ARVADOS_API_TOKEN@ environment variables are set up correctly (see "api-tokens":{{site.baseurl}}/user/reference/api-tokens.html for details), @import arvados@ should produce no errors:
+Or alternatively, by using the Python executable from the virtualenv directly:
<notextile>
-<pre>~$ <code class="userinput">python2.7</code>
+<pre>~$ <code class="userinput">/usr/share/python2.7/dist/python-arvados-python-client/bin/python</code>
Python 2.7.4 (default, Sep 26 2013, 03:20:26)
[GCC 4.7.3] on linux2
Type "help", "copyright", "credits" or "license" for more information.
return (undef, undef); # More than one file in the Collection.
} else {
$filename = (split(/:/, $filedata, 3))[2];
+ $filename =~ s/\\([0-3][0-7][0-7])/chr(oct($1))/ge;
}
}
}
# Implement cwl-runner interface for submitting and running work on Arvados, using
# either the Crunch jobs API or Crunch containers API.
+from future.utils import viewitems
+from builtins import str
+
import argparse
import logging
import os
def arg_parser(): # type: () -> argparse.ArgumentParser
parser = argparse.ArgumentParser(description='Arvados executor for Common Workflow Language')
- parser.add_argument("--basedir", type=str,
+ parser.add_argument("--basedir",
help="Base directory used to resolve relative references in the input, default to directory of input object file or current directory (if inputs piped/provided on command line).")
- parser.add_argument("--outdir", type=str, default=os.path.abspath('.'),
+ parser.add_argument("--outdir", default=os.path.abspath('.'),
help="Output directory, default current directory")
parser.add_argument("--eval-timeout",
default=True, dest="enable_reuse",
help="Disable job or container reuse")
- parser.add_argument("--project-uuid", type=str, metavar="UUID", help="Project that will own the workflow jobs, if not provided, will go to home project.")
- parser.add_argument("--output-name", type=str, help="Name to use for collection that stores the final output.", default=None)
- parser.add_argument("--output-tags", type=str, help="Tags for the final output collection separated by commas, e.g., '--output-tags tag0,tag1,tag2'.", default=None)
+ parser.add_argument("--project-uuid", metavar="UUID", help="Project that will own the workflow jobs, if not provided, will go to home project.")
+ parser.add_argument("--output-name", help="Name to use for collection that stores the final output.", default=None)
+ parser.add_argument("--output-tags", help="Tags for the final output collection separated by commas, e.g., '--output-tags tag0,tag1,tag2'.", default=None)
parser.add_argument("--ignore-docker-for-reuse", action="store_true",
help="Ignore Docker image version when deciding whether to reuse past jobs.",
default=False)
exgroup.add_argument("--create-template", action="store_true", help="(Deprecated) synonym for --create-workflow.",
dest="create_workflow")
exgroup.add_argument("--create-workflow", action="store_true", help="Create an Arvados workflow (if using the 'containers' API) or pipeline template (if using the 'jobs' API). See --api.")
- exgroup.add_argument("--update-workflow", type=str, metavar="UUID", help="Update an existing Arvados workflow or pipeline template with the given UUID.")
+ exgroup.add_argument("--update-workflow", metavar="UUID", help="Update an existing Arvados workflow or pipeline template with the given UUID.")
exgroup = parser.add_mutually_exclusive_group()
exgroup.add_argument("--wait", action="store_true", help="After submitting workflow runner job, wait for completion.",
exgroup.add_argument("--no-log-timestamps", action="store_false", help="No timestamp on logging lines",
default=True, dest="log_timestamps")
- parser.add_argument("--api", type=str,
+ parser.add_argument("--api",
default=None, dest="work_api",
choices=("jobs", "containers"),
help="Select work submission API. Default is 'jobs' if that API is available, otherwise 'containers'.")
help="RAM (in MiB) required for the workflow runner job (default 1024)",
default=None)
- parser.add_argument("--submit-runner-image", type=str,
+ parser.add_argument("--submit-runner-image",
help="Docker image for workflow runner job, default arvados/jobs:%s" % __version__,
default=None)
default=False)
exgroup = parser.add_mutually_exclusive_group()
- exgroup.add_argument("--submit-request-uuid", type=str,
+ exgroup.add_argument("--submit-request-uuid",
default=None,
help="Update and commit to supplied container request instead of creating a new one (containers API only).",
metavar="UUID")
- exgroup.add_argument("--submit-runner-cluster", type=str,
+ exgroup.add_argument("--submit-runner-cluster",
help="Submit workflow runner to a remote cluster (containers API only)",
default=None,
metavar="CLUSTER_ID")
default=None,
help="Collection cache size (in MiB, default 256).")
- parser.add_argument("--name", type=str,
+ parser.add_argument("--name",
help="Name to use for workflow execution instance.",
default=None)
parser.add_argument("--enable-dev", action="store_true",
help="Enable loading and running development versions "
"of CWL spec.", default=False)
- parser.add_argument('--storage-classes', default="default", type=str,
+ parser.add_argument('--storage-classes', default="default",
help="Specify comma separated list of storage classes to be used when saving workflow output to Keep.")
parser.add_argument("--intermediate-output-ttl", type=int, metavar="N",
default=False, dest="trash_intermediate",
help="Do not trash intermediate outputs (default).")
- parser.add_argument("workflow", type=str, default=None, help="The workflow to execute")
+ parser.add_argument("workflow", default=None, help="The workflow to execute")
parser.add_argument("job_order", nargs=argparse.REMAINDER, help="The input object to the workflow.")
return parser
])
def exit_signal_handler(sigcode, frame):
- logger.error("Caught signal {}, exiting.".format(sigcode))
+ logger.error(str(u"Caught signal {}, exiting.").format(sigcode))
sys.exit(-sigcode)
def main(args, stdout, stderr, api_client=None, keep_client=None,
arvargs = parser.parse_args(args)
if len(arvargs.storage_classes.strip().split(',')) > 1:
- logger.error("Multiple storage classes are not supported currently.")
+ logger.error(str(u"Multiple storage classes are not supported currently."))
return 1
arvargs.use_container = True
else:
want_api = None
if want_api and arvargs.work_api and want_api != arvargs.work_api:
- logger.error('--update-workflow arg {!r} uses {!r} API, but --api={!r} specified'.format(
+ logger.error(str(u'--update-workflow arg {!r} uses {!r} API, but --api={!r} specified').format(
arvargs.update_workflow, want_api, arvargs.work_api))
return 1
arvargs.work_api = want_api
add_arv_hints()
- for key, val in cwltool.argparser.get_default_args().items():
+ for key, val in viewitems(cwltool.argparser.get_default_args()):
if not hasattr(arvargs, key):
setattr(arvargs, key, val)
#
# SPDX-License-Identifier: Apache-2.0
+from future import standard_library
+standard_library.install_aliases()
+from builtins import str
+
import logging
import json
import os
-import urllib
+import urllib.request, urllib.parse, urllib.error
import time
import datetime
import ciso8601
generatemapper = NoFollowPathMapper(self.generatefiles["listing"], "", "",
separateDirs=False)
- sorteditems = sorted(generatemapper.items(), None, key=lambda n: n[1].target)
+ sorteditems = sorted(generatemapper.items(), key=lambda n: n[1].target)
logger.debug("generatemapper is %s", sorteditems)
}
else:
with vwd.open(p.target, "w") as n:
- n.write(p.resolved.encode("utf-8"))
+ n.write(p.resolved)
def keepemptydirs(p):
if isinstance(p, arvados.collection.RichCollectionBase):
#
# SPDX-License-Identifier: Apache-2.0
+from past.builtins import basestring
+from builtins import object
+from future.utils import viewitems
+
import logging
import re
import copy
api_client=self.arvrunner.api,
keep_client=self.arvrunner.keep_client,
num_retries=self.arvrunner.num_retries)
- log = logc.open(logc.keys()[0])
+ log = logc.open(list(logc.keys())[0])
dirs = {
"tmpdir": "/tmpdir",
"outdir": "/outdir",
find_or_create=self.enable_reuse
).execute(num_retries=self.arvrunner.num_retries)
- for k,v in job_spec["script_parameters"].items():
+ for k,v in viewitems(job_spec["script_parameters"]):
if v is False or v is None or isinstance(v, dict):
job_spec["script_parameters"][k] = {"value": v}
runtimeContext.submit_runner_cluster not in arvrunner.api._rootDesc["remoteHosts"] and
runtimeContext.submit_runner_cluster != arvrunner.api._rootDesc["uuidPrefix"]):
raise WorkflowException("Unknown or invalid cluster id '%s' known remote clusters are %s" % (runtimeContext.submit_runner_cluster,
- ", ".join(arvrunner.api._rootDesc["remoteHosts"].keys())))
+ ", ".join(list(arvrunner.api._rootDesc["remoteHosts"].keys()))))
def set_cluster_target(tool, arvrunner, builder, runtimeContext):
cluster_target_req = None
for field in ("hints", "requirements"):
#
# SPDX-License-Identifier: Apache-2.0
+from past.builtins import basestring
+from future.utils import viewitems
+
import os
import json
import copy
runtimeContext = runtimeContext.copy()
runtimeContext.toplevel = True # Preserve behavior for #13365
- builder = make_builder({shortname(k): v for k,v in joborder.items()}, self.hints, self.requirements, runtimeContext)
+ builder = make_builder({shortname(k): v for k,v in viewitems(joborder)}, self.hints, self.requirements, runtimeContext)
runtimeContext = set_cluster_target(self.tool, self.arvrunner, builder, runtimeContext)
return super(ArvadosWorkflowStep, self).job(joborder, output_callback, runtimeContext)
# tool. When the workflow completes, record the output object in an output
# collection for this runner job.
+from past.builtins import basestring
+from future.utils import viewitems
+
import arvados
import arvados_cwl
import arvados.collection
if "location" in v:
v["location"] = keeppath(v["location"])
- for k,v in job_order_object.items():
+ for k,v in viewitems(job_order_object):
if isinstance(v, basestring) and arvados.util.keep_locator_pattern.match(v):
job_order_object[k] = {
"class": "File",
#
# SPDX-License-Identifier: Apache-2.0
+from future.utils import viewvalues
+
import re
from cwltool.errors import WorkflowException
from collections import deque
containersapi = ("crunch-run.txt" in logcollection)
mergelogs = {}
- for log in logcollection.keys():
+ for log in list(logcollection):
if not containersapi or log in ("crunch-run.txt", "stdout.txt", "stderr.txt"):
logname = log[:-4]
logt = deque([], maxlen)
logt.append(l)
if containersapi:
- keys = mergelogs.keys()
+ keys = list(mergelogs)
loglines = []
while True:
earliest = None
loglines.append("%s %s %s" % (ts, earliest, msg))
loglines = loglines[-maxlen:]
else:
- loglines = mergelogs.values()[0]
+ loglines = mergelogs[list(mergelogs)[0]]
logtxt = "\n ".join(l.strip() for l in loglines)
logfunc("%s\n\n %s", header, logtxt)
#
# SPDX-License-Identifier: Apache-2.0
+from __future__ import division
+from builtins import next
+from builtins import object
+from builtins import str
+from future.utils import viewvalues
+
import argparse
import logging
import os
raise Exception("Unsupported API '%s', expected one of %s" % (arvargs.work_api, expected_api))
if self.work_api == "jobs":
- logger.warn("""
+ logger.warning("""
*******************************
Using the deprecated 'jobs' API.
# if running inside a container
if arvados_cwl.util.get_current_container(self.api, self.num_retries, logger):
root_logger = logging.getLogger('')
+
+ # Remove existing RuntimeStatusLoggingHandlers if they exist
+ handlers = [h for h in root_logger.handlers if not isinstance(h, RuntimeStatusLoggingHandler)]
+ root_logger.handlers = handlers
+
handler = RuntimeStatusLoggingHandler(self.runtime_status_update)
root_logger.addHandler(handler)
if self.stop_polling.is_set():
break
with self.workflow_eval_lock:
- keys = list(self.processes.keys())
+ keys = list(self.processes)
if not keys:
remain_wait = self.poll_interval
continue
try:
proc_states = table.list(filters=[["uuid", "in", page]]).execute(num_retries=self.num_retries)
except Exception as e:
- logger.warn("Error checking states on API server: %s", e)
+ logger.warning("Error checking states on API server: %s", e)
remain_wait = self.poll_interval
continue
try:
self.api.collections().delete(uuid=i).execute(num_retries=self.num_retries)
except:
- logger.warn("Failed to delete intermediate output: %s", sys.exc_info()[1], exc_info=(sys.exc_info()[1] if self.debug else False))
+ logger.warning("Failed to delete intermediate output: %s", sys.exc_info()[1], exc_info=(sys.exc_info()[1] if self.debug else False))
if sys.exc_info()[0] is KeyboardInterrupt or sys.exc_info()[0] is SystemExit:
break
"Option 'dockerOutputDirectory' must be an absolute path.")
if obj.get("class") == "http://commonwl.org/cwltool#Secrets" and self.work_api != "containers":
raise SourceLine(obj, "class", UnsupportedRequirement).makeError("Secrets not supported with --api=jobs")
- for v in obj.itervalues():
+ for v in viewvalues(obj):
self.check_features(v)
elif isinstance(obj, list):
for i,v in enumerate(obj):
logger.error("Creating CollectionReader for '%s' '%s': %s", k, v, e)
raise
except IOError as e:
- logger.warn("While preparing output collection: %s", e)
+ logger.warning("While preparing output collection: %s", e)
def rewrite(fileobj):
fileobj["location"] = generatemapper.mapper(fileobj["location"]).target
adjustFileObjs(outputObj, rewrite)
with final.open("cwl.output.json", "w") as f:
- json.dump(outputObj, f, sort_keys=True, indent=4, separators=(',',': '))
+ res = str(json.dumps(outputObj, sort_keys=True, indent=4, separators=(',',': '), ensure_ascii=False))
+ f.write(res)
final.save_new(name=name, owner_uuid=self.project_uuid, storage_classes=storage_classes, ensure_unique_name=True)
visited.add(m.group(1))
estimated_size[0] += int(m.group(2))
visit_class(job_order, ("File", "Directory"), estimate_collection_cache)
- runtimeContext.collection_cache_size = max(((estimated_size[0]*192) / (1024*1024))+1, 256)
+ runtimeContext.collection_cache_size = max(((estimated_size[0]*192) // (1024*1024))+1, 256)
self.collection_cache.set_cap(runtimeContext.collection_cache_size*1024*1024)
logger.info("Using collection cache size %s MiB", runtimeContext.collection_cache_size)
runtimeContext)
if runtimeContext.submit and not runtimeContext.wait:
- runnerjob = jobiter.next()
+ runnerjob = next(jobiter)
runnerjob.run(runtimeContext)
return (runnerjob.uuid, "success")
#
# SPDX-License-Identifier: Apache-2.0
+from future import standard_library
+standard_library.install_aliases()
+from builtins import object
+from builtins import str
+from future.utils import viewvalues
+
import fnmatch
import os
import errno
-import urlparse
+import urllib.parse
import re
import logging
import threading
def cap_cache(self, required):
# ordered dict iterates from oldest to newest
- for pdh, v in self.collections.items():
+ for pdh, v in list(self.collections.items()):
available = self.cap - self.total
if available >= required or len(self.collections) < self.min_entries:
return
p = sp[0]
if p.startswith("keep:") and arvados.util.keep_locator_pattern.match(p[5:]):
pdh = p[5:]
- return (self.collection_cache.get(pdh), urlparse.unquote(sp[1]) if len(sp) == 2 else None)
+ return (self.collection_cache.get(pdh), urllib.parse.unquote(sp[1]) if len(sp) == 2 else None)
else:
return (None, path)
raise IOError(errno.ENOENT, "Directory '%s' in '%s' not found" % (rest, collection.portable_data_hash()))
if not isinstance(dir, arvados.collection.RichCollectionBase):
raise IOError(errno.ENOENT, "Path '%s' in '%s' is not a Directory" % (rest, collection.portable_data_hash()))
- return [abspath(l, fn) for l in dir.keys()]
+ return [abspath(l, fn) for l in list(dir.keys())]
else:
return super(CollectionFsAccess, self).listdir(fn)
if not url:
return base_url
- urlsp = urlparse.urlsplit(url)
+ urlsp = urllib.parse.urlsplit(url)
if urlsp.scheme or not base_url:
return url
- basesp = urlparse.urlsplit(base_url)
+ basesp = urllib.parse.urlsplit(base_url)
if basesp.scheme in ("keep", "arvwf"):
if not basesp.path:
raise IOError(errno.EINVAL, "Invalid Keep locator", base_url)
baseparts.pop()
path = "/".join([pdh] + baseparts + urlparts)
- return urlparse.urlunsplit((basesp.scheme, "", path, "", urlsp.fragment))
+ return urllib.parse.urlunsplit((basesp.scheme, "", path, "", urlsp.fragment))
return super(CollectionFetcher, self).urljoin(base_url, url)
def collectionResolver(api_client, document_loader, uri, num_retries=4):
if uri.startswith("keep:") or uri.startswith("arvwf:"):
- return uri
+ return str(uri)
if workflow_uuid_pattern.match(uri):
- return "arvwf:%s#main" % (uri)
+ return u"arvwf:%s#main" % (uri)
if pipeline_template_uuid_pattern.match(uri):
pt = api_client.pipeline_templates().get(uuid=uri).execute(num_retries=num_retries)
- return "keep:" + pt["components"].values()[0]["script_parameters"]["cwl:tool"]
+ return u"keep:" + viewvalues(pt["components"])[0]["script_parameters"]["cwl:tool"]
p = uri.split("/")
if arvados.util.keep_locator_pattern.match(p[0]):
- return "keep:%s" % (uri)
+ return u"keep:%s" % (uri)
if arvados.util.collection_uuid_pattern.match(p[0]):
- return "keep:%s%s" % (api_client.collections().
+ return u"keep:%s%s" % (api_client.collections().
get(uuid=p[0]).execute()["portable_data_hash"],
uri[len(p[0]):])
#
# SPDX-License-Identifier: Apache-2.0
+from __future__ import division
+from future import standard_library
+standard_library.install_aliases()
+
import requests
import email.utils
import time
import re
import arvados
import arvados.collection
-import urlparse
+import urllib.parse
import logging
import calendar
if fresh_cache(url, properties, now):
# Do nothing
cr = arvados.collection.CollectionReader(item["portable_data_hash"], api_client=api)
- return "keep:%s/%s" % (item["portable_data_hash"], cr.keys()[0])
+ return "keep:%s/%s" % (item["portable_data_hash"], list(cr.keys())[0])
if not changed(url, properties, now):
# ETag didn't change, same content, just update headers
api.collections().update(uuid=item["uuid"], body={"collection":{"properties": properties}}).execute()
cr = arvados.collection.CollectionReader(item["portable_data_hash"], api_client=api)
- return "keep:%s/%s" % (item["portable_data_hash"], cr.keys()[0])
+ return "keep:%s/%s" % (item["portable_data_hash"], list(cr.keys())[0])
properties = {}
req = requests.get(url, stream=True, allow_redirects=True)
else:
name = grp.group(4)
else:
- name = urlparse.urlparse(url).path.split("/")[-1]
+ name = urllib.parse.urlparse(url).path.split("/")[-1]
count = 0
start = time.time()
f.write(chunk)
loopnow = time.time()
if (loopnow - checkpoint) > 20:
- bps = (float(count)/float(loopnow - start))
+ bps = count / (loopnow - start)
if cl is not None:
logger.info("%2.1f%% complete, %3.2f MiB/s, %1.0f seconds left",
- float(count * 100) / float(cl),
- bps/(1024*1024),
- (cl-count)/bps)
+ ((count * 100) / cl),
+ (bps // (1024*1024)),
+ ((cl-count) // bps))
else:
- logger.info("%d downloaded, %3.2f MiB/s", count, bps/(1024*1024))
+ logger.info("%d downloaded, %3.2f MiB/s", count, (bps / (1024*1024)))
checkpoint = loopnow
c.save_new(name="Downloaded from %s" % url, owner_uuid=project_uuid, ensure_unique_name=True)
#
# SPDX-License-Identifier: Apache-2.0
+from future import standard_library
+standard_library.install_aliases()
+from builtins import str
+from past.builtins import basestring
+from future.utils import viewitems
+
import re
import logging
import uuid
import os
-import urllib
+import urllib.request, urllib.parse, urllib.error
import arvados_cwl.util
import arvados.commands.run
src = src[:src.index("#")]
if isinstance(src, basestring) and ArvPathMapper.pdh_dirpath.match(src):
- self._pathmap[src] = MapperEnt(src, self.collection_pattern % urllib.unquote(src[5:]), srcobj["class"], True)
+ self._pathmap[src] = MapperEnt(src, self.collection_pattern % urllib.parse.unquote(src[5:]), srcobj["class"], True)
debug = logger.isEnabledFor(logging.DEBUG)
if isinstance(st, arvados.commands.run.UploadFile):
uploadfiles.add((src, ab, st))
elif isinstance(st, arvados.commands.run.ArvFile):
- self._pathmap[src] = MapperEnt(st.fn, self.collection_pattern % urllib.unquote(st.fn[5:]), "File", True)
+ self._pathmap[src] = MapperEnt(st.fn, self.collection_pattern % urllib.parse.unquote(st.fn[5:]), "File", True)
else:
raise WorkflowException("Input file path '%s' is invalid" % st)
elif src.startswith("_:"):
remap.append((obj["location"], path + "/" + obj["basename"]))
elif obj["location"].startswith("_:") and "contents" in obj:
with c.open(path + "/" + obj["basename"], "w") as f:
- f.write(obj["contents"].encode("utf-8"))
+ f.write(obj["contents"])
remap.append((obj["location"], path + "/" + obj["basename"]))
else:
raise SourceLine(obj, "location", WorkflowException).makeError("Don't know what to do with '%s'" % obj["location"])
packed=False)
for src, ab, st in uploadfiles:
- self._pathmap[src] = MapperEnt(urllib.quote(st.fn, "/:+@"), self.collection_pattern % st.fn[5:],
+ self._pathmap[src] = MapperEnt(urllib.parse.quote(st.fn, "/:+@"), self.collection_pattern % st.fn[5:],
"Directory" if os.path.isdir(ab) else "File", True)
for srcobj in referenced_files:
ab, "File", True)
if srcobj.get("secondaryFiles"):
ab = self.collection_pattern % c.portable_data_hash()
- self._pathmap["_:" + unicode(uuid.uuid4())] = MapperEnt("keep:"+c.portable_data_hash(), ab, "Directory", True)
+ self._pathmap["_:" + str(uuid.uuid4())] = MapperEnt("keep:"+c.portable_data_hash(), ab, "Directory", True)
if remap:
for loc, sub in remap:
# with any secondary files.
self.visitlisting(referenced_files, self.stagedir, basedir)
- for path, (ab, tgt, type, staged) in self._pathmap.items():
+ for path, (ab, tgt, type, staged) in viewitems(self._pathmap):
if type in ("File", "Directory") and ab.startswith("keep:"):
self._pathmap[path] = MapperEnt("$(task.keep)/%s" % ab[5:], tgt, type, staged)
#
# SPDX-License-Identifier: Apache-2.0
+from builtins import object
+
import time
import uuid
#
# SPDX-License-Identifier: Apache-2.0
+from future import standard_library
+standard_library.install_aliases()
+from future.utils import viewvalues, viewitems
+
import os
-import urlparse
+import urllib.parse
from functools import partial
import logging
import json
import subprocess32 as subprocess
from collections import namedtuple
-from StringIO import StringIO
+from io import StringIO
from schema_salad.sourceline import SourceLine, cmap
if "default" in d:
op(d)
else:
- for i in d.itervalues():
+ for i in viewvalues(d):
find_defaults(i, op)
def setSecondary(t, fileobj, discovered):
loaded = set()
def loadref(b, u):
joined = document_loader.fetcher.urljoin(b, u)
- defrg, _ = urlparse.urldefrag(joined)
+ defrg, _ = urllib.parse.urldefrag(joined)
if defrg not in loaded:
loaded.add(defrg)
# Use fetch_text to get raw file (before preprocessing).
visit_class(workflowobj, ("CommandLineTool", "Workflow"), discover_default_secondary_files)
- for d in list(discovered.keys()):
+ for d in list(discovered):
# Only interested in discovered secondaryFiles which are local
# files that need to be uploaded.
if d.startswith("file:"):
packed = pack(tool.doc_loader, tool.doc_loader.fetch(tool.tool["id"]),
tool.tool["id"], tool.metadata, rewrite_out=rewrites)
- rewrite_to_orig = {v: k for k,v in rewrites.items()}
+ rewrite_to_orig = {v: k for k,v in viewitems(rewrites)}
def visit(v, cur_id):
if isinstance(v, dict):
if "cwl.output.json" in outc:
with outc.open("cwl.output.json", "rb") as f:
if f.size() > 0:
- outputs = json.load(f)
+ outputs = json.loads(f.read().decode())
def keepify(fileobj):
path = fileobj["location"]
if not path.startswith("keep:"):
#
# SPDX-License-Identifier: Apache-2.0
-import Queue
+from future import standard_library
+standard_library.install_aliases()
+from builtins import range
+from builtins import object
+
+import queue
import threading
import logging
class TaskQueue(object):
def __init__(self, lock, thread_count):
self.thread_count = thread_count
- self.task_queue = Queue.Queue(maxsize=self.thread_count)
+ self.task_queue = queue.Queue(maxsize=self.thread_count)
self.task_queue_threads = []
self.lock = lock
self.in_flight = 0
self.error = None
- for r in xrange(0, self.thread_count):
+ for r in range(0, self.thread_count):
t = threading.Thread(target=self.task_queue_func)
self.task_queue_threads.append(t)
t.start()
return
self.task_queue.put(task, block=True, timeout=3)
return
- except Queue.Full:
+ except queue.Full:
pass
finally:
unlock.acquire()
# Drain queue
while not self.task_queue.empty():
self.task_queue.get(True, .1)
- except Queue.Empty:
+ except queue.Empty:
pass
def join(self):
current_container = api.containers().current().execute(num_retries=num_retries)
except ApiError as e:
# Status code 404 just means we're not running in a container.
- if e.resp.status != 404 and logger:
- logger.info("Getting current container: %s", e)
+ if e.resp.status != 404:
+ if logger:
+ logger.info("Getting current container: %s", e)
+ raise e
+
return current_container
#
# SPDX-License-Identifier: Apache-2.0
+from builtins import str
+from builtins import next
+
import subprocess
import time
import os
# SPDX-License-Identifier: Apache-2.0
case "$TARGET" in
- centos*)
- fpm_depends+=(glibc)
- ;;
debian8)
- fpm_depends+=(libc6 libgmp10)
+ fpm_depends+=(libgnutls-deb0-28 libcurl3-gnutls)
;;
debian* | ubuntu*)
- fpm_depends+=(libc6)
+ fpm_depends+=(libcurl3-gnutls libpython2.7)
;;
esac
+
+fpm_args+=(--conflicts=python-cwltool --conflicts=cwltool)
#
# SPDX-License-Identifier: Apache-2.0
+from builtins import str
+from builtins import next
+
from setuptools.command.egg_info import egg_info
import subprocess
import time
data_files=[
('share/doc/arvados-cwl-runner', ['LICENSE-2.0.txt', 'README.rst']),
],
+ classifiers=[
+ 'Programming Language :: Python :: 2',
+ 'Programming Language :: Python :: 3',
+ ],
test_suite='tests',
tests_require=[
'mock>=1.0',
leave_running=0
config=dev
tag="latest"
+pythoncmd=python
+suite=conformance
+runapi=containers
while test -n "$1" ; do
arg="$1"
tag=$2
shift ; shift
;;
+ --build)
+ build=1
+ shift
+ ;;
+ --pythoncmd)
+ pythoncmd=$2
+ shift ; shift
+ ;;
+ --suite)
+ suite=$2
+ shift ; shift
+ ;;
+ --api)
+ runapi=$2
+ shift ; shift
+ ;;
-h|--help)
- echo "$0 [--no-reset-container] [--leave-running] [--config dev|localdemo] [--tag docker_tag]"
+ echo "$0 [--no-reset-container] [--leave-running] [--config dev|localdemo] [--tag docker_tag] [--build] [--pythoncmd python(2|3)] [--suite (integration|conformance)]"
exit
;;
*)
. /usr/local/lib/arvbox/common.sh
+export PYCMD=$pythoncmd
+
if test $config = dev ; then
cd /usr/src/arvados/sdk/cwl
- python setup.py sdist
+ \$PYCMD setup.py sdist
pip_install \$(ls -r dist/arvados-cwl-runner-*.tar.gz | head -n1)
fi
-pip install cwltest
+set -x
+
+if [ \$PYCMD = "python3" ]; then
+ pip3 install cwltest
+else
+ pip install cwltest
+fi
mkdir -p /tmp/cwltest
cd /tmp/cwltest
export ARVADOS_API_TOKEN=\$(cat /var/lib/arvados/superuser_token)
-if test "$tag" = "latest" ; then
+if test -n "$build" ; then
+ /usr/src/arvados/build/build-dev-docker-jobs-image.sh
+elif test "$tag" = "latest" ; then
arv-keepdocker --pull arvados/jobs $tag
else
jobsimg=\$(curl https://versions.arvados.org/v1/commit/$tag | python -c "import json; import sys; sys.stdout.write(json.load(sys.stdin)['Versions']['Docker']['arvados/jobs'])")
chmod +x /tmp/cwltest/arv-cwl-containers
env
-exec ./run_test.sh RUNNER=/tmp/cwltest/arv-cwl-containers EXTRA=--compute-checksum $@
+if [[ "$suite" = "conformance" ]] ; then
+ exec ./run_test.sh RUNNER=/tmp/cwltest/arv-cwl-${runapi} EXTRA=--compute-checksum $@
+elif [[ "$suite" = "integration" ]] ; then
+ cd /usr/src/arvados/sdk/cwl/tests
+ exec ./arvados-tests.sh $@
+fi
EOF
CODE=$?
${
var samples = {};
var pattern = /^(.+)(_S[0-9]{1,3}_)(.+)$/;
+ inputs.dir.listing = inputs.dir.listing.sort(function(a, b) { return a.basename.localeCompare(b.basename); });
for (var i = 0; i < inputs.dir.listing.length; i++) {
var file = inputs.dir.listing[i];
var groups = file.basename.match(pattern);
"listing": samples[sampleid]});
});
return {"out": dirs};
- }
\ No newline at end of file
+ }
"size": 32
},
{
- "checksum": "sha1$83483b9c65d99967aecc794c14f9f4743314d186",
- "location": "sample2_S01_R3_001.fastq.txt",
+ "checksum": "sha1$5f3b4df1b0f7fdced751fc6079778600ad9fdb45",
+ "location": "sample2_S01_R1_001.fastq.txt",
"class": "File",
"size": 32
}
#
# SPDX-License-Identifier: Apache-2.0
+from __future__ import print_function
import arvados
import json
#
# SPDX-License-Identifier: Apache-2.0
+from __future__ import print_function
import arvados
import json
#
# SPDX-License-Identifier: Apache-2.0
-print "Hello world"
+from __future__ import print_function
+print("Hello world")
#
# SPDX-License-Identifier: Apache-2.0
+from builtins import object
+
import difflib
import json
import re
#
# SPDX-License-Identifier: Apache-2.0
+from builtins import str
+from builtins import object
+
import arvados_cwl
import arvados_cwl.context
import arvados_cwl.util
#
# SPDX-License-Identifier: Apache-2.0
+from future import standard_library
+standard_library.install_aliases()
+
import copy
-import cStringIO
+import io
import functools
import hashlib
import json
#
# SPDX-License-Identifier: Apache-2.0
+from future import standard_library
+standard_library.install_aliases()
+from builtins import str
+from builtins import next
+
import functools
import json
import logging
import os
import unittest
import copy
-import StringIO
+import io
import arvados
import arvados_cwl
# sharing link on the job
runner.api.links().create.side_effect = ApiError(
mock.MagicMock(return_value={'status': 403}),
- 'Permission denied')
+ bytes(b'Permission denied'))
j.run(runtimeContext)
else:
assert not runner.api.links().create.called
runner.num_retries = 0
runner.ignore_docker_for_reuse = False
- reader().open.return_value = StringIO.StringIO(
- """2016-11-02_23:12:18 c97qk-8i9sb-cryqw2blvzy4yaj 13358 0 stderr 2016/11/02 23:12:18 crunchrunner: $(task.tmpdir)=/tmp/crunch-job-task-work/compute3.1/tmpdir
+ reader().keys.return_value = "log.txt"
+ reader().open.return_value = io.StringIO(
+ str(u"""2016-11-02_23:12:18 c97qk-8i9sb-cryqw2blvzy4yaj 13358 0 stderr 2016/11/02 23:12:18 crunchrunner: $(task.tmpdir)=/tmp/crunch-job-task-work/compute3.1/tmpdir
2016-11-02_23:12:18 c97qk-8i9sb-cryqw2blvzy4yaj 13358 0 stderr 2016/11/02 23:12:18 crunchrunner: $(task.outdir)=/tmp/crunch-job-task-work/compute3.1/outdir
2016-11-02_23:12:18 c97qk-8i9sb-cryqw2blvzy4yaj 13358 0 stderr 2016/11/02 23:12:18 crunchrunner: $(task.keep)=/keep
- """)
+ """))
api.collections().list().execute.side_effect = ({"items": []},
{"items": [{"manifest_text": "XYZ"}]},
{"items": []},
runner.project_uuid = "zzzzz-8i9sb-zzzzzzzzzzzzzzz"
runner.num_retries = 0
- reader().open.return_value = StringIO.StringIO(
- """2016-11-02_23:12:18 c97qk-8i9sb-cryqw2blvzy4yaj 13358 0 stderr 2016/11/02 23:12:18 crunchrunner: $(task.tmpdir)=/tmp/crunch-job-task-work/compute3.1/tmpdir
+ reader().keys.return_value = "log.txt"
+ reader().open.return_value = io.StringIO(
+ str(u"""2016-11-02_23:12:18 c97qk-8i9sb-cryqw2blvzy4yaj 13358 0 stderr 2016/11/02 23:12:18 crunchrunner: $(task.tmpdir)=/tmp/crunch-job-task-work/compute3.1/tmpdir
2016-11-02_23:12:18 c97qk-8i9sb-cryqw2blvzy4yaj 13358 0 stderr 2016/11/02 23:12:18 crunchrunner: $(task.outdir)=/tmp/crunch-job-task-work/compute3.1/outdir
2016-11-02_23:12:18 c97qk-8i9sb-cryqw2blvzy4yaj 13358 0 stderr 2016/11/02 23:12:18 crunchrunner: $(task.keep)=/keep
- """)
+ """))
api.collections().list().execute.side_effect = (
{"items": [{"uuid": "zzzzz-4zz18-zzzzzzzzzzzzzz2"}]},
arvtool.formatgraph = None
it = arvtool.job({}, mock.MagicMock(), runtimeContext)
- it.next().run(runtimeContext)
- it.next().run(runtimeContext)
+ next(it).run(runtimeContext)
+ next(it).run(runtimeContext)
with open("tests/wf/scatter2_subwf.cwl") as f:
subwf = StripYAMLComments(f.read())
mockc.open().__enter__().write.assert_has_calls([mock.call(subwf)])
mockc.open().__enter__().write.assert_has_calls([mock.call(
-'''{
+bytes(b'''{
"fileblub": {
"basename": "token.txt",
"class": "File",
"size": 0
},
"sleeptime": 5
-}''')])
+}'''))])
# The test passes no builder.resources
# Hence the default resources will apply: {'cores': 1, 'ram': 1024, 'outdirSize': 1024, 'tmpdirSize': 1024}
arvtool = arvados_cwl.ArvadosWorkflow(runner, tool, loadingContext)
arvtool.formatgraph = None
it = arvtool.job({}, mock.MagicMock(), runtimeContext)
- it.next().run(runtimeContext)
- it.next().run(runtimeContext)
+
+ next(it).run(runtimeContext)
+ next(it).run(runtimeContext)
with open("tests/wf/echo-subwf.cwl") as f:
subwf = StripYAMLComments(f.read())
#
# SPDX-License-Identifier: Apache-2.0
+from future import standard_library
+standard_library.install_aliases()
+
import functools
import json
import logging
import mock
import os
-import StringIO
+import io
import unittest
import arvados
final_uuid = final.manifest_locator()
num_retries = runner.num_retries
- cwlout = StringIO.StringIO()
+ cwlout = io.StringIO()
openmock = mock.MagicMock()
final.open.return_value = openmock
openmock.__enter__.return_value = cwlout
#
# SPDX-License-Identifier: Apache-2.0
+from future import standard_library
+standard_library.install_aliases()
+from builtins import object
+from builtins import str
+from future.utils import viewvalues
+
import copy
-import cStringIO
+import io
import functools
import hashlib
import json
import sys
import unittest
+from io import BytesIO, StringIO
+
import arvados
import arvados.collection
import arvados_cwl
@mock.patch("arvados.keep.KeepClient")
@mock.patch("arvados.events.subscribe")
def wrapped(self, events, keep_client1, keep_client2, keepdocker, *args, **kwargs):
- class Stubs:
+ class Stubs(object):
pass
stubs = Stubs()
stubs.events = events
stubs.fake_user_uuid = "zzzzz-tpzed-zzzzzzzzzzzzzzz"
stubs.fake_container_uuid = "zzzzz-dz642-zzzzzzzzzzzzzzz"
+ if sys.version_info[0] < 3:
+ stubs.capture_stdout = BytesIO()
+ else:
+ stubs.capture_stdout = StringIO()
+
stubs.api = mock.MagicMock()
stubs.api._rootDesc = get_rootDesc()
return self.exe
def collection_createstub(created_collections, body, ensure_unique_name=None):
- mt = body["manifest_text"]
+ mt = body["manifest_text"].encode('utf-8')
uuid = "zzzzz-4zz18-zzzzzzzzzzzzzx%d" % len(created_collections)
pdh = "%s+%i" % (hashlib.md5(mt).hexdigest(), len(mt))
created_collections[uuid] = {
"uuid": uuid,
"portable_data_hash": pdh,
- "manifest_text": mt
+ "manifest_text": mt.decode('utf-8')
}
return CollectionExecute(created_collections[uuid])
def collection_getstub(created_collections, uuid):
- for v in created_collections.itervalues():
+ for v in viewvalues(created_collections):
if uuid in (v["uuid"], v["portable_data_hash"]):
return CollectionExecute(v)
return '999999999999999999999999999999d4+99'
arvdock.side_effect = get_image
- capture_stdout = cStringIO.StringIO()
exited = arvados_cwl.main(
["--submit", "--no-wait", "--api=jobs", "--debug",
"tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
- capture_stdout, sys.stderr, api_client=stubs.api)
- self.assertEqual(exited, 0)
+ stubs.capture_stdout, sys.stderr, api_client=stubs.api)
stubs.api.collections().create.assert_has_calls([
mock.call(body=JsonDiffMatcher({
expect_pipeline = copy.deepcopy(stubs.expect_pipeline_instance)
stubs.api.pipeline_instances().create.assert_called_with(
body=JsonDiffMatcher(expect_pipeline))
- self.assertEqual(capture_stdout.getvalue(),
+ self.assertEqual(stubs.capture_stdout.getvalue(),
stubs.expect_pipeline_uuid + '\n')
-
+ self.assertEqual(exited, 0)
@mock.patch("time.sleep")
@stubs
def test_submit_no_reuse(self, stubs, tm):
- capture_stdout = cStringIO.StringIO()
exited = arvados_cwl.main(
["--submit", "--no-wait", "--api=jobs", "--debug", "--disable-reuse",
"tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
- capture_stdout, sys.stderr, api_client=stubs.api)
- self.assertEqual(exited, 0)
+ stubs.capture_stdout, sys.stderr, api_client=stubs.api)
expect_pipeline = copy.deepcopy(stubs.expect_pipeline_instance)
expect_pipeline["components"]["cwl-runner"]["script_parameters"]["arv:enable_reuse"] = {"value": False}
stubs.api.pipeline_instances().create.assert_called_with(
body=JsonDiffMatcher(expect_pipeline))
- self.assertEqual(capture_stdout.getvalue(),
+ self.assertEqual(stubs.capture_stdout.getvalue(),
stubs.expect_pipeline_uuid + '\n')
+ self.assertEqual(exited, 0)
@stubs
def test_error_when_multiple_storage_classes_specified(self, stubs):
@mock.patch("time.sleep")
@stubs
def test_submit_on_error(self, stubs, tm):
- capture_stdout = cStringIO.StringIO()
exited = arvados_cwl.main(
["--submit", "--no-wait", "--api=jobs", "--debug", "--on-error=stop",
"tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
- capture_stdout, sys.stderr, api_client=stubs.api)
- self.assertEqual(exited, 0)
+ stubs.capture_stdout, sys.stderr, api_client=stubs.api)
expect_pipeline = copy.deepcopy(stubs.expect_pipeline_instance)
expect_pipeline["components"]["cwl-runner"]["script_parameters"]["arv:on_error"] = "stop"
stubs.api.pipeline_instances().create.assert_called_with(
body=JsonDiffMatcher(expect_pipeline))
- self.assertEqual(capture_stdout.getvalue(),
+ self.assertEqual(stubs.capture_stdout.getvalue(),
stubs.expect_pipeline_uuid + '\n')
-
+ self.assertEqual(exited, 0)
@mock.patch("time.sleep")
@stubs
def test_submit_runner_ram(self, stubs, tm):
- capture_stdout = cStringIO.StringIO()
exited = arvados_cwl.main(
["--submit", "--no-wait", "--debug", "--submit-runner-ram=2048",
"tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
- capture_stdout, sys.stderr, api_client=stubs.api)
- self.assertEqual(exited, 0)
+ stubs.capture_stdout, sys.stderr, api_client=stubs.api)
expect_pipeline = copy.deepcopy(stubs.expect_pipeline_instance)
expect_pipeline["components"]["cwl-runner"]["runtime_constraints"]["min_ram_mb_per_node"] = 2048
stubs.api.pipeline_instances().create.assert_called_with(
body=JsonDiffMatcher(expect_pipeline))
- self.assertEqual(capture_stdout.getvalue(),
+ self.assertEqual(stubs.capture_stdout.getvalue(),
stubs.expect_pipeline_uuid + '\n')
-
+ self.assertEqual(exited, 0)
@mock.patch("time.sleep")
@stubs
def test_submit_invalid_runner_ram(self, stubs, tm):
- capture_stdout = cStringIO.StringIO()
exited = arvados_cwl.main(
["--submit", "--no-wait", "--debug", "--submit-runner-ram=-2048",
"tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
- capture_stdout, sys.stderr, api_client=stubs.api)
+ stubs.capture_stdout, sys.stderr, api_client=stubs.api)
self.assertEqual(exited, 1)
@mock.patch("time.sleep")
def test_submit_output_name(self, stubs, tm):
output_name = "test_output_name"
- capture_stdout = cStringIO.StringIO()
exited = arvados_cwl.main(
["--submit", "--no-wait", "--debug", "--output-name", output_name,
"tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
- capture_stdout, sys.stderr, api_client=stubs.api)
- self.assertEqual(exited, 0)
+ stubs.capture_stdout, sys.stderr, api_client=stubs.api)
expect_pipeline = copy.deepcopy(stubs.expect_pipeline_instance)
expect_pipeline["components"]["cwl-runner"]["script_parameters"]["arv:output_name"] = output_name
stubs.api.pipeline_instances().create.assert_called_with(
body=JsonDiffMatcher(expect_pipeline))
- self.assertEqual(capture_stdout.getvalue(),
+ self.assertEqual(stubs.capture_stdout.getvalue(),
stubs.expect_pipeline_uuid + '\n')
-
+ self.assertEqual(exited, 0)
@mock.patch("time.sleep")
@stubs
def test_submit_pipeline_name(self, stubs, tm):
- capture_stdout = cStringIO.StringIO()
exited = arvados_cwl.main(
["--submit", "--no-wait", "--debug", "--name=hello job 123",
"tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
- capture_stdout, sys.stderr, api_client=stubs.api)
+ stubs.capture_stdout, sys.stderr, api_client=stubs.api)
self.assertEqual(exited, 0)
expect_pipeline = copy.deepcopy(stubs.expect_pipeline_instance)
stubs.api.pipeline_instances().create.assert_called_with(
body=JsonDiffMatcher(expect_pipeline))
- self.assertEqual(capture_stdout.getvalue(),
+ self.assertEqual(stubs.capture_stdout.getvalue(),
stubs.expect_pipeline_uuid + '\n')
@mock.patch("time.sleep")
def test_submit_output_tags(self, stubs, tm):
output_tags = "tag0,tag1,tag2"
- capture_stdout = cStringIO.StringIO()
exited = arvados_cwl.main(
["--submit", "--no-wait", "--debug", "--output-tags", output_tags,
"tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
- capture_stdout, sys.stderr, api_client=stubs.api)
+ stubs.capture_stdout, sys.stderr, api_client=stubs.api)
self.assertEqual(exited, 0)
expect_pipeline = copy.deepcopy(stubs.expect_pipeline_instance)
stubs.api.pipeline_instances().create.assert_called_with(
body=JsonDiffMatcher(expect_pipeline))
- self.assertEqual(capture_stdout.getvalue(),
+ self.assertEqual(stubs.capture_stdout.getvalue(),
stubs.expect_pipeline_uuid + '\n')
@mock.patch("time.sleep")
body=JsonDiffMatcher(expect_pipeline))
@stubs
- def test_submit_container(self, stubs):
- capture_stdout = cStringIO.StringIO()
- try:
- exited = arvados_cwl.main(
- ["--submit", "--no-wait", "--api=containers", "--debug",
- "tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
- capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
- self.assertEqual(exited, 0)
- except:
- logging.exception("")
+ def test_submit_container(self, stubs):
+ exited = arvados_cwl.main(
+ ["--submit", "--no-wait", "--api=containers", "--debug",
+ "tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
+ stubs.capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
stubs.api.collections().create.assert_has_calls([
mock.call(body=JsonDiffMatcher({
expect_container = copy.deepcopy(stubs.expect_container_spec)
stubs.api.container_requests().create.assert_called_with(
body=JsonDiffMatcher(expect_container))
- self.assertEqual(capture_stdout.getvalue(),
+ self.assertEqual(stubs.capture_stdout.getvalue(),
stubs.expect_container_request_uuid + '\n')
+ self.assertEqual(exited, 0)
@stubs
def test_submit_container_no_reuse(self, stubs):
- capture_stdout = cStringIO.StringIO()
- try:
- exited = arvados_cwl.main(
- ["--submit", "--no-wait", "--api=containers", "--debug", "--disable-reuse",
- "tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
- capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
- self.assertEqual(exited, 0)
- except:
- logging.exception("")
+ exited = arvados_cwl.main(
+ ["--submit", "--no-wait", "--api=containers", "--debug", "--disable-reuse",
+ "tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
+ stubs.capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
expect_container = copy.deepcopy(stubs.expect_container_spec)
expect_container["command"] = [
stubs.api.container_requests().create.assert_called_with(
body=JsonDiffMatcher(expect_container))
- self.assertEqual(capture_stdout.getvalue(),
- stubs.expect_container_request_uuid + '\n')
-
+ self.assertEqual(stubs.capture_stdout.getvalue(),
+ stubs.expect_container_request_uuid + '\n')
+ self.assertEqual(exited, 0)
@stubs
def test_submit_container_reuse_disabled_by_workflow(self, stubs):
- capture_stdout = cStringIO.StringIO()
-
exited = arvados_cwl.main(
["--submit", "--no-wait", "--api=containers", "--debug",
"tests/wf/submit_wf_no_reuse.cwl", "tests/submit_test_job.json"],
- capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
+ stubs.capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
self.assertEqual(exited, 0)
expect_container = copy.deepcopy(stubs.expect_container_spec)
stubs.api.container_requests().create.assert_called_with(
body=JsonDiffMatcher(expect_container))
- self.assertEqual(capture_stdout.getvalue(),
+ self.assertEqual(stubs.capture_stdout.getvalue(),
stubs.expect_container_request_uuid + '\n')
@stubs
def test_submit_container_on_error(self, stubs):
- capture_stdout = cStringIO.StringIO()
- try:
- exited = arvados_cwl.main(
- ["--submit", "--no-wait", "--api=containers", "--debug", "--on-error=stop",
- "tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
- capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
- self.assertEqual(exited, 0)
- except:
- logging.exception("")
-
+ exited = arvados_cwl.main(
+ ["--submit", "--no-wait", "--api=containers", "--debug", "--on-error=stop",
+ "tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
+ stubs.capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
+
expect_container = copy.deepcopy(stubs.expect_container_spec)
expect_container["command"] = ['arvados-cwl-runner', '--local', '--api=containers',
'--no-log-timestamps', '--disable-validate',
stubs.api.container_requests().create.assert_called_with(
body=JsonDiffMatcher(expect_container))
- self.assertEqual(capture_stdout.getvalue(),
+ self.assertEqual(stubs.capture_stdout.getvalue(),
stubs.expect_container_request_uuid + '\n')
+ self.assertEqual(exited, 0)
@stubs
def test_submit_container_output_name(self, stubs):
output_name = "test_output_name"
-
- capture_stdout = cStringIO.StringIO()
- try:
- exited = arvados_cwl.main(
- ["--submit", "--no-wait", "--api=containers", "--debug", "--output-name", output_name,
- "tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
- capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
- self.assertEqual(exited, 0)
- except:
- logging.exception("")
+
+ exited = arvados_cwl.main(
+ ["--submit", "--no-wait", "--api=containers", "--debug", "--output-name", output_name,
+ "tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
+ stubs.capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
expect_container = copy.deepcopy(stubs.expect_container_spec)
expect_container["command"] = ['arvados-cwl-runner', '--local', '--api=containers',
stubs.api.container_requests().create.assert_called_with(
body=JsonDiffMatcher(expect_container))
- self.assertEqual(capture_stdout.getvalue(),
+ self.assertEqual(stubs.capture_stdout.getvalue(),
stubs.expect_container_request_uuid + '\n')
+ self.assertEqual(exited, 0)
@stubs
- def test_submit_storage_classes(self, stubs):
- capture_stdout = cStringIO.StringIO()
- try:
- exited = arvados_cwl.main(
- ["--debug", "--submit", "--no-wait", "--api=containers", "--storage-classes=foo",
- "tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
- capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
- self.assertEqual(exited, 0)
- except:
- logging.exception("")
+ def test_submit_storage_classes(self, stubs):
+ exited = arvados_cwl.main(
+ ["--debug", "--submit", "--no-wait", "--api=containers", "--storage-classes=foo",
+ "tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
+ stubs.capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
expect_container = copy.deepcopy(stubs.expect_container_spec)
expect_container["command"] = ['arvados-cwl-runner', '--local', '--api=containers',
stubs.api.container_requests().create.assert_called_with(
body=JsonDiffMatcher(expect_container))
- self.assertEqual(capture_stdout.getvalue(),
+ self.assertEqual(stubs.capture_stdout.getvalue(),
stubs.expect_container_request_uuid + '\n')
+ self.assertEqual(exited, 0)
@mock.patch("arvados_cwl.task_queue.TaskQueue")
@mock.patch("arvados_cwl.arvworkflow.ArvadosWorkflow.job")
return []
job.side_effect = set_final_output
- try:
- exited = arvados_cwl.main(
- ["--debug", "--local", "--storage-classes=foo",
- "tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
- sys.stdin, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
- self.assertEqual(exited, 0)
- except:
- logging.exception("")
+ exited = arvados_cwl.main(
+ ["--debug", "--local", "--storage-classes=foo",
+ "tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
+ sys.stdin, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
make_output.assert_called_with(u'Output of submit_wf.cwl', ['foo'], '', 'zzzzz-4zz18-zzzzzzzzzzzzzzzz')
+ self.assertEqual(exited, 0)
@mock.patch("arvados_cwl.task_queue.TaskQueue")
@mock.patch("arvados_cwl.arvworkflow.ArvadosWorkflow.job")
return []
job.side_effect = set_final_output
- try:
- exited = arvados_cwl.main(
- ["--debug", "--local",
- "tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
- sys.stdin, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
- self.assertEqual(exited, 0)
- except:
- logging.exception("")
+ exited = arvados_cwl.main(
+ ["--debug", "--local",
+ "tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
+ sys.stdin, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
make_output.assert_called_with(u'Output of submit_wf.cwl', ['default'], '', 'zzzzz-4zz18-zzzzzzzzzzzzzzzz')
+ self.assertEqual(exited, 0)
@stubs
def test_submit_container_output_ttl(self, stubs):
- capture_stdout = cStringIO.StringIO()
- try:
- exited = arvados_cwl.main(
- ["--submit", "--no-wait", "--api=containers", "--debug", "--intermediate-output-ttl", "3600",
- "tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
- capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
- self.assertEqual(exited, 0)
- except:
- logging.exception("")
+ exited = arvados_cwl.main(
+ ["--submit", "--no-wait", "--api=containers", "--debug", "--intermediate-output-ttl", "3600",
+ "tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
+ stubs.capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
expect_container = copy.deepcopy(stubs.expect_container_spec)
expect_container["command"] = ['arvados-cwl-runner', '--local', '--api=containers',
stubs.api.container_requests().create.assert_called_with(
body=JsonDiffMatcher(expect_container))
- self.assertEqual(capture_stdout.getvalue(),
+ self.assertEqual(stubs.capture_stdout.getvalue(),
stubs.expect_container_request_uuid + '\n')
+ self.assertEqual(exited, 0)
@stubs
def test_submit_container_trash_intermediate(self, stubs):
- capture_stdout = cStringIO.StringIO()
- try:
- exited = arvados_cwl.main(
- ["--submit", "--no-wait", "--api=containers", "--debug", "--trash-intermediate",
- "tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
- capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
- self.assertEqual(exited, 0)
- except:
- logging.exception("")
+ exited = arvados_cwl.main(
+ ["--submit", "--no-wait", "--api=containers", "--debug", "--trash-intermediate",
+ "tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
+ stubs.capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
+
expect_container = copy.deepcopy(stubs.expect_container_spec)
expect_container["command"] = ['arvados-cwl-runner', '--local', '--api=containers',
stubs.api.container_requests().create.assert_called_with(
body=JsonDiffMatcher(expect_container))
- self.assertEqual(capture_stdout.getvalue(),
+ self.assertEqual(stubs.capture_stdout.getvalue(),
stubs.expect_container_request_uuid + '\n')
+ self.assertEqual(exited, 0)
@stubs
def test_submit_container_output_tags(self, stubs):
output_tags = "tag0,tag1,tag2"
- capture_stdout = cStringIO.StringIO()
- try:
- exited = arvados_cwl.main(
- ["--submit", "--no-wait", "--api=containers", "--debug", "--output-tags", output_tags,
- "tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
- capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
- self.assertEqual(exited, 0)
- except:
- logging.exception("")
+ exited = arvados_cwl.main(
+ ["--submit", "--no-wait", "--api=containers", "--debug", "--output-tags", output_tags,
+ "tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
+ stubs.capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
expect_container = copy.deepcopy(stubs.expect_container_spec)
expect_container["command"] = ['arvados-cwl-runner', '--local', '--api=containers',
stubs.api.container_requests().create.assert_called_with(
body=JsonDiffMatcher(expect_container))
- self.assertEqual(capture_stdout.getvalue(),
+ self.assertEqual(stubs.capture_stdout.getvalue(),
stubs.expect_container_request_uuid + '\n')
+ self.assertEqual(exited, 0)
@stubs
def test_submit_container_runner_ram(self, stubs):
- capture_stdout = cStringIO.StringIO()
- try:
- exited = arvados_cwl.main(
- ["--submit", "--no-wait", "--api=containers", "--debug", "--submit-runner-ram=2048",
- "tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
- capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
- self.assertEqual(exited, 0)
- except:
- logging.exception("")
+ exited = arvados_cwl.main(
+ ["--submit", "--no-wait", "--api=containers", "--debug", "--submit-runner-ram=2048",
+ "tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
+ stubs.capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
expect_container = copy.deepcopy(stubs.expect_container_spec)
expect_container["runtime_constraints"]["ram"] = (2048+256)*1024*1024
stubs.api.container_requests().create.assert_called_with(
body=JsonDiffMatcher(expect_container))
- self.assertEqual(capture_stdout.getvalue(),
+ self.assertEqual(stubs.capture_stdout.getvalue(),
stubs.expect_container_request_uuid + '\n')
+ self.assertEqual(exited, 0)
@mock.patch("arvados.collection.CollectionReader")
@mock.patch("time.sleep")
@stubs
def test_submit_file_keepref(self, stubs, tm, collectionReader):
- capture_stdout = cStringIO.StringIO()
collectionReader().find.return_value = arvados.arvfile.ArvadosFile(mock.MagicMock(), "blorp.txt")
exited = arvados_cwl.main(
["--submit", "--no-wait", "--api=containers", "--debug",
"tests/wf/submit_keepref_wf.cwl"],
- capture_stdout, sys.stderr, api_client=stubs.api)
+ stubs.capture_stdout, sys.stderr, api_client=stubs.api)
self.assertEqual(exited, 0)
-
@mock.patch("arvados.collection.CollectionReader")
@mock.patch("time.sleep")
@stubs
def test_submit_keepref(self, stubs, tm, reader):
- capture_stdout = cStringIO.StringIO()
-
with open("tests/wf/expect_arvworkflow.cwl") as f:
reader().open().__enter__().read.return_value = f.read()
exited = arvados_cwl.main(
["--submit", "--no-wait", "--api=containers", "--debug",
"keep:99999999999999999999999999999994+99/expect_arvworkflow.cwl#main", "-x", "XxX"],
- capture_stdout, sys.stderr, api_client=stubs.api)
- self.assertEqual(exited, 0)
+ stubs.capture_stdout, sys.stderr, api_client=stubs.api)
expect_container = {
'priority': 500,
stubs.api.container_requests().create.assert_called_with(
body=JsonDiffMatcher(expect_container))
- self.assertEqual(capture_stdout.getvalue(),
+ self.assertEqual(stubs.capture_stdout.getvalue(),
stubs.expect_container_request_uuid + '\n')
-
+ self.assertEqual(exited, 0)
@mock.patch("arvados.collection.CollectionReader")
@mock.patch("time.sleep")
@stubs
def test_submit_jobs_keepref(self, stubs, tm, reader):
- capture_stdout = cStringIO.StringIO()
-
with open("tests/wf/expect_arvworkflow.cwl") as f:
reader().open().__enter__().read.return_value = f.read()
exited = arvados_cwl.main(
["--submit", "--no-wait", "--api=jobs", "--debug",
"keep:99999999999999999999999999999994+99/expect_arvworkflow.cwl#main", "-x", "XxX"],
- capture_stdout, sys.stderr, api_client=stubs.api)
- self.assertEqual(exited, 0)
+ stubs.capture_stdout, sys.stderr, api_client=stubs.api)
expect_pipeline = copy.deepcopy(stubs.expect_pipeline_instance)
expect_pipeline["components"]["cwl-runner"]["script_parameters"]["x"] = "XxX"
expect_pipeline["name"] = "expect_arvworkflow.cwl#main"
stubs.api.pipeline_instances().create.assert_called_with(
body=JsonDiffMatcher(expect_pipeline))
+ self.assertEqual(exited, 0)
@mock.patch("time.sleep")
@stubs
def test_submit_arvworkflow(self, stubs, tm):
- capture_stdout = cStringIO.StringIO()
-
with open("tests/wf/expect_arvworkflow.cwl") as f:
stubs.api.workflows().get().execute.return_value = {"definition": f.read(), "name": "a test workflow"}
exited = arvados_cwl.main(
["--submit", "--no-wait", "--api=containers", "--debug",
"962eh-7fd4e-gkbzl62qqtfig37", "-x", "XxX"],
- capture_stdout, sys.stderr, api_client=stubs.api)
- self.assertEqual(exited, 0)
+ stubs.capture_stdout, sys.stderr, api_client=stubs.api)
expect_container = {
'priority': 500,
stubs.api.container_requests().create.assert_called_with(
body=JsonDiffMatcher(expect_container))
- self.assertEqual(capture_stdout.getvalue(),
+ self.assertEqual(stubs.capture_stdout.getvalue(),
stubs.expect_container_request_uuid + '\n')
-
+ self.assertEqual(exited, 0)
@stubs
def test_submit_container_name(self, stubs):
- capture_stdout = cStringIO.StringIO()
- try:
- exited = arvados_cwl.main(
- ["--submit", "--no-wait", "--api=containers", "--debug", "--name=hello container 123",
- "tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
- capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
- self.assertEqual(exited, 0)
- except:
- logging.exception("")
+ exited = arvados_cwl.main(
+ ["--submit", "--no-wait", "--api=containers", "--debug", "--name=hello container 123",
+ "tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
+ stubs.capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
expect_container = copy.deepcopy(stubs.expect_container_spec)
expect_container["name"] = "hello container 123"
stubs.api.container_requests().create.assert_called_with(
body=JsonDiffMatcher(expect_container))
- self.assertEqual(capture_stdout.getvalue(),
+ self.assertEqual(stubs.capture_stdout.getvalue(),
stubs.expect_container_request_uuid + '\n')
-
+ self.assertEqual(exited, 0)
@stubs
def test_submit_missing_input(self, stubs):
- capture_stdout = cStringIO.StringIO()
exited = arvados_cwl.main(
["--submit", "--no-wait", "--api=containers", "--debug",
"tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
- capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
+ stubs.capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
self.assertEqual(exited, 0)
- capture_stdout = cStringIO.StringIO()
exited = arvados_cwl.main(
["--submit", "--no-wait", "--api=containers", "--debug",
"tests/wf/submit_wf.cwl", "tests/submit_test_job_missing.json"],
- capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
+ stubs.capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
self.assertEqual(exited, 1)
-
@stubs
def test_submit_container_project(self, stubs):
project_uuid = 'zzzzz-j7d0g-zzzzzzzzzzzzzzz'
- capture_stdout = cStringIO.StringIO()
- try:
- exited = arvados_cwl.main(
- ["--submit", "--no-wait", "--api=containers", "--debug", "--project-uuid="+project_uuid,
- "tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
- capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
- self.assertEqual(exited, 0)
- except:
- logging.exception("")
+ exited = arvados_cwl.main(
+ ["--submit", "--no-wait", "--api=containers", "--debug", "--project-uuid="+project_uuid,
+ "tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
+ stubs.capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
expect_container = copy.deepcopy(stubs.expect_container_spec)
expect_container["owner_uuid"] = project_uuid
stubs.api.container_requests().create.assert_called_with(
body=JsonDiffMatcher(expect_container))
- self.assertEqual(capture_stdout.getvalue(),
+ self.assertEqual(stubs.capture_stdout.getvalue(),
stubs.expect_container_request_uuid + '\n')
+ self.assertEqual(exited, 0)
@stubs
def test_submit_container_eval_timeout(self, stubs):
- project_uuid = 'zzzzz-j7d0g-zzzzzzzzzzzzzzz'
- capture_stdout = cStringIO.StringIO()
- try:
- exited = arvados_cwl.main(
- ["--submit", "--no-wait", "--api=containers", "--debug", "--eval-timeout=60",
- "tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
- capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
- self.assertEqual(exited, 0)
- except:
- logging.exception("")
+ exited = arvados_cwl.main(
+ ["--submit", "--no-wait", "--api=containers", "--debug", "--eval-timeout=60",
+ "tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
+ stubs.capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
expect_container = copy.deepcopy(stubs.expect_container_spec)
expect_container["command"] = ['arvados-cwl-runner', '--local', '--api=containers',
stubs.api.container_requests().create.assert_called_with(
body=JsonDiffMatcher(expect_container))
- self.assertEqual(capture_stdout.getvalue(),
+ self.assertEqual(stubs.capture_stdout.getvalue(),
stubs.expect_container_request_uuid + '\n')
+ self.assertEqual(exited, 0)
@stubs
def test_submit_container_collection_cache(self, stubs):
- project_uuid = 'zzzzz-j7d0g-zzzzzzzzzzzzzzz'
- capture_stdout = cStringIO.StringIO()
- try:
- exited = arvados_cwl.main(
- ["--submit", "--no-wait", "--api=containers", "--debug", "--collection-cache-size=500",
- "tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
- capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
- self.assertEqual(exited, 0)
- except:
- logging.exception("")
+ exited = arvados_cwl.main(
+ ["--submit", "--no-wait", "--api=containers", "--debug", "--collection-cache-size=500",
+ "tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
+ stubs.capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
expect_container = copy.deepcopy(stubs.expect_container_spec)
expect_container["command"] = ['arvados-cwl-runner', '--local', '--api=containers',
stubs.api.container_requests().create.assert_called_with(
body=JsonDiffMatcher(expect_container))
- self.assertEqual(capture_stdout.getvalue(),
+ self.assertEqual(stubs.capture_stdout.getvalue(),
stubs.expect_container_request_uuid + '\n')
-
+ self.assertEqual(exited, 0)
@stubs
def test_submit_container_thread_count(self, stubs):
- project_uuid = 'zzzzz-j7d0g-zzzzzzzzzzzzzzz'
- capture_stdout = cStringIO.StringIO()
- try:
- exited = arvados_cwl.main(
- ["--submit", "--no-wait", "--api=containers", "--debug", "--thread-count=20",
- "tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
- capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
- self.assertEqual(exited, 0)
- except:
- logging.exception("")
+ exited = arvados_cwl.main(
+ ["--submit", "--no-wait", "--api=containers", "--debug", "--thread-count=20",
+ "tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
+ stubs.capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
expect_container = copy.deepcopy(stubs.expect_container_spec)
expect_container["command"] = ['arvados-cwl-runner', '--local', '--api=containers',
stubs.api.container_requests().create.assert_called_with(
body=JsonDiffMatcher(expect_container))
- self.assertEqual(capture_stdout.getvalue(),
+ self.assertEqual(stubs.capture_stdout.getvalue(),
stubs.expect_container_request_uuid + '\n')
-
+ self.assertEqual(exited, 0)
@stubs
def test_submit_job_runner_image(self, stubs):
- capture_stdout = cStringIO.StringIO()
- try:
- exited = arvados_cwl.main(
- ["--submit", "--no-wait", "--api=jobs", "--debug", "--submit-runner-image=arvados/jobs:123",
- "tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
- capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
- self.assertEqual(exited, 0)
- except:
- logging.exception("")
+ exited = arvados_cwl.main(
+ ["--submit", "--no-wait", "--api=jobs", "--debug", "--submit-runner-image=arvados/jobs:123",
+ "tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
+ stubs.capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
stubs.expect_pipeline_instance["components"]["cwl-runner"]["runtime_constraints"]["docker_image"] = "999999999999999999999999999999d5+99"
expect_pipeline = copy.deepcopy(stubs.expect_pipeline_instance)
stubs.api.pipeline_instances().create.assert_called_with(
body=JsonDiffMatcher(expect_pipeline))
- self.assertEqual(capture_stdout.getvalue(),
+ self.assertEqual(stubs.capture_stdout.getvalue(),
stubs.expect_pipeline_uuid + '\n')
+ self.assertEqual(exited, 0)
@stubs
def test_submit_container_runner_image(self, stubs):
- capture_stdout = cStringIO.StringIO()
- try:
- exited = arvados_cwl.main(
- ["--submit", "--no-wait", "--api=containers", "--debug", "--submit-runner-image=arvados/jobs:123",
- "tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
- capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
- self.assertEqual(exited, 0)
- except:
- logging.exception("")
+ exited = arvados_cwl.main(
+ ["--submit", "--no-wait", "--api=containers", "--debug", "--submit-runner-image=arvados/jobs:123",
+ "tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
+ stubs.capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
stubs.expect_container_spec["container_image"] = "999999999999999999999999999999d5+99"
expect_container = copy.deepcopy(stubs.expect_container_spec)
stubs.api.container_requests().create.assert_called_with(
body=JsonDiffMatcher(expect_container))
- self.assertEqual(capture_stdout.getvalue(),
+ self.assertEqual(stubs.capture_stdout.getvalue(),
stubs.expect_container_request_uuid + '\n')
+ self.assertEqual(exited, 0)
@stubs
def test_submit_priority(self, stubs):
- capture_stdout = cStringIO.StringIO()
- try:
- exited = arvados_cwl.main(
- ["--submit", "--no-wait", "--api=containers", "--debug", "--priority=669",
- "tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
- capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
- self.assertEqual(exited, 0)
- except:
- logging.exception("")
+ exited = arvados_cwl.main(
+ ["--submit", "--no-wait", "--api=containers", "--debug", "--priority=669",
+ "tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
+ stubs.capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
stubs.expect_container_spec["priority"] = 669
expect_container = copy.deepcopy(stubs.expect_container_spec)
stubs.api.container_requests().create.assert_called_with(
body=JsonDiffMatcher(expect_container))
- self.assertEqual(capture_stdout.getvalue(),
+ self.assertEqual(stubs.capture_stdout.getvalue(),
stubs.expect_container_request_uuid + '\n')
-
+ self.assertEqual(exited, 0)
@stubs
def test_submit_wf_runner_resources(self, stubs):
- capture_stdout = cStringIO.StringIO()
- try:
- exited = arvados_cwl.main(
- ["--submit", "--no-wait", "--api=containers", "--debug",
- "tests/wf/submit_wf_runner_resources.cwl", "tests/submit_test_job.json"],
- capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
- self.assertEqual(exited, 0)
- except:
- logging.exception("")
+ exited = arvados_cwl.main(
+ ["--submit", "--no-wait", "--api=containers", "--debug",
+ "tests/wf/submit_wf_runner_resources.cwl", "tests/submit_test_job.json"],
+ stubs.capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
expect_container = copy.deepcopy(stubs.expect_container_spec)
expect_container["runtime_constraints"] = {
stubs.api.container_requests().create.assert_called_with(
body=JsonDiffMatcher(expect_container))
- self.assertEqual(capture_stdout.getvalue(),
+ self.assertEqual(stubs.capture_stdout.getvalue(),
stubs.expect_container_request_uuid + '\n')
+ self.assertEqual(exited, 0)
def tearDown(self):
arvados_cwl.arvdocker.arv_docker_clear_cache()
@stubs
def test_submit_secrets(self, stubs):
- capture_stdout = cStringIO.StringIO()
- try:
- exited = arvados_cwl.main(
- ["--submit", "--no-wait", "--api=containers", "--debug",
- "tests/wf/secret_wf.cwl", "tests/secret_test_job.yml"],
- capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
- self.assertEqual(exited, 0)
- except:
- logging.exception("")
-
+ exited = arvados_cwl.main(
+ ["--submit", "--no-wait", "--api=containers", "--debug",
+ "tests/wf/secret_wf.cwl", "tests/secret_test_job.yml"],
+ stubs.capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
expect_container = {
"command": [
stubs.api.container_requests().create.assert_called_with(
body=JsonDiffMatcher(expect_container))
- self.assertEqual(capture_stdout.getvalue(),
+ self.assertEqual(stubs.capture_stdout.getvalue(),
stubs.expect_container_request_uuid + '\n')
+ self.assertEqual(exited, 0)
@stubs
def test_submit_request_uuid(self, stubs):
"state": "Queued"
}
- capture_stdout = cStringIO.StringIO()
- try:
- exited = arvados_cwl.main(
- ["--submit", "--no-wait", "--api=containers", "--debug", "--submit-request-uuid=zzzzz-xvhdp-yyyyyyyyyyyyyyy",
- "tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
- capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
- self.assertEqual(exited, 0)
- except:
- logging.exception("")
+ exited = arvados_cwl.main(
+ ["--submit", "--no-wait", "--api=containers", "--debug", "--submit-request-uuid=zzzzz-xvhdp-yyyyyyyyyyyyyyy",
+ "tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
+ stubs.capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
stubs.api.container_requests().update.assert_called_with(
uuid="zzzzz-xvhdp-yyyyyyyyyyyyyyy", body=JsonDiffMatcher(stubs.expect_container_spec), cluster_id="zzzzz")
- self.assertEqual(capture_stdout.getvalue(),
+ self.assertEqual(stubs.capture_stdout.getvalue(),
stubs.expect_container_request_uuid + '\n')
+ self.assertEqual(exited, 0)
@stubs
def test_submit_container_cluster_id(self, stubs):
- capture_stdout = cStringIO.StringIO()
stubs.api._rootDesc["remoteHosts"]["zbbbb"] = "123"
- try:
- exited = arvados_cwl.main(
- ["--submit", "--no-wait", "--api=containers", "--debug", "--submit-runner-cluster=zbbbb",
- "tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
- capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
- self.assertEqual(exited, 0)
- except:
- logging.exception("")
+
+ exited = arvados_cwl.main(
+ ["--submit", "--no-wait", "--api=containers", "--debug", "--submit-runner-cluster=zbbbb",
+ "tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
+ stubs.capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
expect_container = copy.deepcopy(stubs.expect_container_spec)
stubs.api.container_requests().create.assert_called_with(
body=JsonDiffMatcher(expect_container), cluster_id="zbbbb")
- self.assertEqual(capture_stdout.getvalue(),
+ self.assertEqual(stubs.capture_stdout.getvalue(),
stubs.expect_container_request_uuid + '\n')
-
+ self.assertEqual(exited, 0)
@stubs
def test_submit_validate_cluster_id(self, stubs):
- capture_stdout = cStringIO.StringIO()
stubs.api._rootDesc["remoteHosts"]["zbbbb"] = "123"
exited = arvados_cwl.main(
["--submit", "--no-wait", "--api=containers", "--debug", "--submit-runner-cluster=zcccc",
"tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
- capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
+ stubs.capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
self.assertEqual(exited, 1)
def test_create(self, stubs):
project_uuid = 'zzzzz-j7d0g-zzzzzzzzzzzzzzz'
- capture_stdout = cStringIO.StringIO()
-
exited = arvados_cwl.main(
["--create-workflow", "--debug",
"--api=jobs",
"--project-uuid", project_uuid,
"tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
- capture_stdout, sys.stderr, api_client=stubs.api)
- self.assertEqual(exited, 0)
+ stubs.capture_stdout, sys.stderr, api_client=stubs.api)
stubs.api.pipeline_instances().create.refute_called()
stubs.api.jobs().create.refute_called()
stubs.api.pipeline_templates().create.assert_called_with(
body=JsonDiffMatcher(expect_template), ensure_unique_name=True)
- self.assertEqual(capture_stdout.getvalue(),
+ self.assertEqual(stubs.capture_stdout.getvalue(),
stubs.expect_pipeline_template_uuid + '\n')
-
+ self.assertEqual(exited, 0)
@stubs
def test_create_name(self, stubs):
project_uuid = 'zzzzz-j7d0g-zzzzzzzzzzzzzzz'
- capture_stdout = cStringIO.StringIO()
-
exited = arvados_cwl.main(
["--create-workflow", "--debug",
"--project-uuid", project_uuid,
"--api=jobs",
"--name", "testing 123",
"tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
- capture_stdout, sys.stderr, api_client=stubs.api)
- self.assertEqual(exited, 0)
+ stubs.capture_stdout, sys.stderr, api_client=stubs.api)
stubs.api.pipeline_instances().create.refute_called()
stubs.api.jobs().create.refute_called()
stubs.api.pipeline_templates().create.assert_called_with(
body=JsonDiffMatcher(expect_template), ensure_unique_name=True)
- self.assertEqual(capture_stdout.getvalue(),
+ self.assertEqual(stubs.capture_stdout.getvalue(),
stubs.expect_pipeline_template_uuid + '\n')
-
+ self.assertEqual(exited, 0)
@stubs
def test_update_name(self, stubs):
project_uuid = 'zzzzz-j7d0g-zzzzzzzzzzzzzzz'
- capture_stdout = cStringIO.StringIO()
-
exited = arvados_cwl.main(
["--update-workflow", self.existing_template_uuid,
"--debug",
"--api=jobs",
"--name", "testing 123",
"tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
- capture_stdout, sys.stderr, api_client=stubs.api)
- self.assertEqual(exited, 0)
+ stubs.capture_stdout, sys.stderr, api_client=stubs.api)
stubs.api.pipeline_instances().create.refute_called()
stubs.api.jobs().create.refute_called()
stubs.api.pipeline_templates().update.assert_called_with(
body=JsonDiffMatcher(expect_template), uuid=self.existing_template_uuid)
- self.assertEqual(capture_stdout.getvalue(),
+ self.assertEqual(stubs.capture_stdout.getvalue(),
self.existing_template_uuid + '\n')
-
+ self.assertEqual(exited, 0)
+
class TestCreateWorkflow(unittest.TestCase):
existing_workflow_uuid = "zzzzz-7fd4e-validworkfloyml"
def test_create(self, stubs):
project_uuid = 'zzzzz-j7d0g-zzzzzzzzzzzzzzz'
- capture_stdout = cStringIO.StringIO()
-
exited = arvados_cwl.main(
["--create-workflow", "--debug",
"--api=containers",
"--project-uuid", project_uuid,
"tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
- capture_stdout, sys.stderr, api_client=stubs.api)
- self.assertEqual(exited, 0)
+ stubs.capture_stdout, sys.stderr, api_client=stubs.api)
stubs.api.pipeline_templates().create.refute_called()
stubs.api.container_requests().create.refute_called()
stubs.api.workflows().create.assert_called_with(
body=JsonDiffMatcher(body))
- self.assertEqual(capture_stdout.getvalue(),
+ self.assertEqual(stubs.capture_stdout.getvalue(),
stubs.expect_workflow_uuid + '\n')
-
+ self.assertEqual(exited, 0)
@stubs
def test_create_name(self, stubs):
project_uuid = 'zzzzz-j7d0g-zzzzzzzzzzzzzzz'
- capture_stdout = cStringIO.StringIO()
-
exited = arvados_cwl.main(
["--create-workflow", "--debug",
"--api=containers",
"--project-uuid", project_uuid,
"--name", "testing 123",
"tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
- capture_stdout, sys.stderr, api_client=stubs.api)
- self.assertEqual(exited, 0)
+ stubs.capture_stdout, sys.stderr, api_client=stubs.api)
stubs.api.pipeline_templates().create.refute_called()
stubs.api.container_requests().create.refute_called()
stubs.api.workflows().create.assert_called_with(
body=JsonDiffMatcher(body))
- self.assertEqual(capture_stdout.getvalue(),
+ self.assertEqual(stubs.capture_stdout.getvalue(),
stubs.expect_workflow_uuid + '\n')
+ self.assertEqual(exited, 0)
@stubs
def test_incompatible_api(self, stubs):
- capture_stderr = cStringIO.StringIO()
- logging.getLogger('arvados.cwl-runner').addHandler(
- logging.StreamHandler(capture_stderr))
+ capture_stderr = io.StringIO()
+ acr_logger = logging.getLogger('arvados.cwl-runner')
+ stderr_logger = logging.StreamHandler(capture_stderr)
+ acr_logger.addHandler(stderr_logger)
exited = arvados_cwl.main(
["--update-workflow", self.existing_workflow_uuid,
self.assertRegexpMatches(
capture_stderr.getvalue(),
"--update-workflow arg '{}' uses 'containers' API, but --api='jobs' specified".format(self.existing_workflow_uuid))
+ acr_logger.removeHandler(stderr_logger)
@stubs
def test_update(self, stubs):
- capture_stdout = cStringIO.StringIO()
-
exited = arvados_cwl.main(
["--update-workflow", self.existing_workflow_uuid,
"--debug",
"tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
- capture_stdout, sys.stderr, api_client=stubs.api)
- self.assertEqual(exited, 0)
+ stubs.capture_stdout, sys.stderr, api_client=stubs.api)
body = {
"workflow": {
stubs.api.workflows().update.assert_called_with(
uuid=self.existing_workflow_uuid,
body=JsonDiffMatcher(body))
- self.assertEqual(capture_stdout.getvalue(),
+ self.assertEqual(stubs.capture_stdout.getvalue(),
self.existing_workflow_uuid + '\n')
-
+ self.assertEqual(exited, 0)
@stubs
def test_update_name(self, stubs):
- capture_stdout = cStringIO.StringIO()
-
exited = arvados_cwl.main(
["--update-workflow", self.existing_workflow_uuid,
"--debug", "--name", "testing 123",
"tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
- capture_stdout, sys.stderr, api_client=stubs.api)
- self.assertEqual(exited, 0)
+ stubs.capture_stdout, sys.stderr, api_client=stubs.api)
body = {
"workflow": {
stubs.api.workflows().update.assert_called_with(
uuid=self.existing_workflow_uuid,
body=JsonDiffMatcher(body))
- self.assertEqual(capture_stdout.getvalue(),
+ self.assertEqual(stubs.capture_stdout.getvalue(),
self.existing_workflow_uuid + '\n')
-
+ self.assertEqual(exited, 0)
@stubs
def test_create_collection_per_tool(self, stubs):
project_uuid = 'zzzzz-j7d0g-zzzzzzzzzzzzzzz'
- capture_stdout = cStringIO.StringIO()
-
exited = arvados_cwl.main(
["--create-workflow", "--debug",
"--api=containers",
"--project-uuid", project_uuid,
"tests/collection_per_tool/collection_per_tool.cwl"],
- capture_stdout, sys.stderr, api_client=stubs.api)
- self.assertEqual(exited, 0)
+ stubs.capture_stdout, sys.stderr, api_client=stubs.api)
toolfile = "tests/collection_per_tool/collection_per_tool_packed.cwl"
expect_workflow = StripYAMLComments(open(toolfile).read())
stubs.api.workflows().create.assert_called_with(
body=JsonDiffMatcher(body))
- self.assertEqual(capture_stdout.getvalue(),
+ self.assertEqual(stubs.capture_stdout.getvalue(),
stubs.expect_workflow_uuid + '\n')
+ self.assertEqual(exited, 0)
class TestTemplateInputs(unittest.TestCase):
expect_template = {
exited = arvados_cwl.main(
["--create-template",
"tests/wf/inputs_test.cwl", "tests/order/empty_order.json"],
- cStringIO.StringIO(), sys.stderr, api_client=stubs.api)
- self.assertEqual(exited, 0)
+ stubs.capture_stdout, sys.stderr, api_client=stubs.api)
stubs.api.pipeline_templates().create.assert_called_with(
body=JsonDiffMatcher(self.expect_template), ensure_unique_name=True)
+ self.assertEqual(exited, 0)
+
@stubs
def test_inputs(self, stubs):
exited = arvados_cwl.main(
["--create-template",
"tests/wf/inputs_test.cwl", "tests/order/inputs_test_order.json"],
- cStringIO.StringIO(), sys.stderr, api_client=stubs.api)
- self.assertEqual(exited, 0)
+ stubs.capture_stdout, sys.stderr, api_client=stubs.api)
expect_template = copy.deepcopy(self.expect_template)
params = expect_template[
stubs.api.pipeline_templates().create.assert_called_with(
body=JsonDiffMatcher(expect_template), ensure_unique_name=True)
+ self.assertEqual(exited, 0)
cf = CollectionFetcher({}, None)
- self.assertEquals("keep:99999999999999999999999999999991+99/hw.py",
+ self.assertEqual("keep:99999999999999999999999999999991+99/hw.py",
cf.urljoin("keep:99999999999999999999999999999991+99", "hw.py"))
- self.assertEquals("keep:99999999999999999999999999999991+99/hw.py",
+ self.assertEqual("keep:99999999999999999999999999999991+99/hw.py",
cf.urljoin("keep:99999999999999999999999999999991+99/", "hw.py"))
- self.assertEquals("keep:99999999999999999999999999999991+99/hw.py#main",
+ self.assertEqual("keep:99999999999999999999999999999991+99/hw.py#main",
cf.urljoin("keep:99999999999999999999999999999991+99", "hw.py#main"))
- self.assertEquals("keep:99999999999999999999999999999991+99/hw.py#main",
+ self.assertEqual("keep:99999999999999999999999999999991+99/hw.py#main",
cf.urljoin("keep:99999999999999999999999999999991+99/hw.py", "#main"))
- self.assertEquals("keep:99999999999999999999999999999991+99/dir/hw.py#main",
+ self.assertEqual("keep:99999999999999999999999999999991+99/dir/hw.py#main",
cf.urljoin("keep:99999999999999999999999999999991+99/dir/hw.py", "#main"))
- self.assertEquals("keep:99999999999999999999999999999991+99/dir/wh.py",
+ self.assertEqual("keep:99999999999999999999999999999991+99/dir/wh.py",
cf.urljoin("keep:99999999999999999999999999999991+99/dir/hw.py", "wh.py"))
- self.assertEquals("keep:99999999999999999999999999999991+99/wh.py",
+ self.assertEqual("keep:99999999999999999999999999999991+99/wh.py",
cf.urljoin("keep:99999999999999999999999999999991+99/dir/hw.py", "/wh.py"))
- self.assertEquals("keep:99999999999999999999999999999991+99/wh.py#main",
+ self.assertEqual("keep:99999999999999999999999999999991+99/wh.py#main",
cf.urljoin("keep:99999999999999999999999999999991+99/dir/hw.py", "/wh.py#main"))
- self.assertEquals("keep:99999999999999999999999999999991+99/wh.py",
+ self.assertEqual("keep:99999999999999999999999999999991+99/wh.py",
cf.urljoin("keep:99999999999999999999999999999991+99/hw.py#main", "wh.py"))
- self.assertEquals("keep:99999999999999999999999999999992+99",
+ self.assertEqual("keep:99999999999999999999999999999992+99",
cf.urljoin("keep:99999999999999999999999999999991+99", "keep:99999999999999999999999999999992+99"))
- self.assertEquals("keep:99999999999999999999999999999991+99/dir/wh.py",
+ self.assertEqual("keep:99999999999999999999999999999991+99/dir/wh.py",
cf.urljoin("keep:99999999999999999999999999999991+99/dir/", "wh.py"))
def test_resolver(self):
#
# SPDX-License-Identifier: Apache-2.0
+from builtins import bytes
+
import unittest
import mock
import datetime
def test_get_current_container_error(self):
api = mock.MagicMock()
- api.containers().current().execute.side_effect = ApiError(httplib2.Response({"status": 300}), "")
+ api.containers().current().execute.side_effect = ApiError(httplib2.Response({"status": 300}), bytes(b""))
+ logger = mock.MagicMock()
+
+ with self.assertRaises(ApiError):
+ get_current_container(api, num_retries=0, logger=logger)
+
+ def test_get_current_container_404_error(self):
+ api = mock.MagicMock()
+ api.containers().current().execute.side_effect = ApiError(httplib2.Response({"status": 404}), bytes(b""))
logger = mock.MagicMock()
- self.assertRaises(ApiError, get_current_container(api, num_retries=0, logger=logger))
+ current_container = get_current_container(api, num_retries=0, logger=logger)
+ self.assertEqual(current_container, None)
\ No newline at end of file
#
# SPDX-License-Identifier: Apache-2.0
+from __future__ import print_function
+from __future__ import division
+
import arvados
import sys
import os
if "JOB_UUID" in os.environ:
requested = arvados.api().jobs().get(uuid=os.environ["JOB_UUID"]).execute()["runtime_constraints"]["min_ram_mb_per_node"]
else:
- requested = arvados.api().containers().current().execute()["runtime_constraints"]["ram"]/(1024*1024)
+ requested = arvados.api().containers().current().execute()["runtime_constraints"]["ram"] // (1024*1024)
print("Requested %d expected %d" % (requested, int(sys.argv[1])))
ENV DEBIAN_FRONTEND noninteractive
-RUN apt-get update -q && apt-get install -qy git python-pip python-virtualenv python-dev libcurl4-gnutls-dev libgnutls28-dev nodejs python-pyasn1-modules
+ARG pythoncmd=python
-RUN pip install -U setuptools six requests
+RUN apt-get update -q && apt-get install -qy --no-install-recommends \
+ git ${pythoncmd}-pip ${pythoncmd}-virtualenv ${pythoncmd}-dev libcurl4-gnutls-dev \
+ libgnutls28-dev nodejs ${pythoncmd}-pyasn1-modules build-essential
+
+RUN if [ "$pythoncmd" = "python3" ]; then \
+ pip3 install -U setuptools six requests ; \
+ else \
+ pip install -U setuptools six requests ; \
+ fi
ARG sdk
ARG runner
ADD cwl/cwltool_dist/$cwltool /tmp/
ADD cwl/dist/$runner /tmp/
-RUN cd /tmp/arvados-python-client-* && python setup.py install
-RUN if test -d /tmp/schema-salad-* ; then cd /tmp/schema-salad-* && python setup.py install ; fi
-RUN if test -d /tmp/cwltool-* ; then cd /tmp/cwltool-* && python setup.py install ; fi
-RUN cd /tmp/arvados-cwl-runner-* && python setup.py install
+RUN cd /tmp/arvados-python-client-* && $pythoncmd setup.py install
+RUN if test -d /tmp/schema-salad-* ; then cd /tmp/schema-salad-* && $pythoncmd setup.py install ; fi
+RUN if test -d /tmp/cwltool-* ; then cd /tmp/cwltool-* && $pythoncmd setup.py install ; fi
+RUN cd /tmp/arvados-cwl-runner-* && $pythoncmd setup.py install
# Install dependencies and set up system.
RUN /usr/sbin/adduser --disabled-password \
case "$TARGET" in
debian* | ubuntu*)
- fpm_depends+=('libpam-python')
+ fpm_depends+=('libpam-python' 'libcurl3-gnutls')
+ ;;
+ centos*)
+ fpm_depends+=('python-pam')
;;
*)
echo >&2 "ERROR: $PACKAGE: pam_python.so dependency unavailable in $TARGET."
#
# SPDX-License-Identifier: Apache-2.0
+import sys
+sys.path.append('/usr/share/python2.7/dist/libpam-arvados/lib/python2.7/site-packages')
from arvados_pam import *
next(list_output) # Ignore the header line
for line in list_output:
words = line.split()
+ words = [word.decode() for word in words]
size_index = len(words) - 2
repo, tag, imageid = words[:3]
ctime = ' '.join(words[3:size_index])
else:
json_filename = raw_image_hash + '/json'
json_file = image_tar.extractfile(image_tar.getmember(json_filename))
- image_metadata = json.load(json_file)
+ image_metadata = json.loads(json_file.read().decode())
json_file.close()
image_tar.close()
link_base = {'head_uuid': coll_uuid, 'properties': {}}
# SPDX-License-Identifier: Apache-2.0
case "$TARGET" in
- ubuntu1204)
- fpm_depends+=('libfuse2 = 2.9.2-5')
+ debian* | ubuntu*)
+ fpm_depends+=(libcurl3-gnutls)
;;
esac
'ws4py >=0.4.2',
'subprocess32 >=3.5.1',
],
+ classifiers=[
+ 'Programming Language :: Python :: 2',
+ 'Programming Language :: Python :: 3',
+ ],
test_suite='tests',
tests_require=['pbr<1.7.0', 'mock>=1.0', 'PyYAML'],
zip_safe=False
--- /dev/null
+.. Copyright (C) The Arvados Authors. All rights reserved.
+..
+.. SPDX-License-Identifier: Apache-2.0
+
+Arvados Docker Cleaner.
# This unwieldy ExecStart command detects at runtime whether
# arvados-docker-cleaner is installed with the Python 3.3 Software
# Collection, and if so, invokes it with the "scl" wrapper.
-ExecStart=/bin/sh -c 'if [ -e /opt/rh/python33/root/bin/arvados-docker-cleaner ]; then exec scl enable python33 arvados-docker-cleaner; else exec arvados-docker-cleaner; fi'
+ExecStart=/bin/sh -c 'if [ -e /opt/rh/rh-python35/root/bin/arvados-docker-cleaner ]; then exec scl enable rh-python35 arvados-docker-cleaner; else exec arvados-docker-cleaner; fi'
# systemd<=219 (centos:7, debian:8, ubuntu:trusty) obeys StartLimitInterval in the [Service] section
StartLimitInterval=0
--- /dev/null
+#!/usr/bin/env python
+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: AGPL-3.0
+
+from __future__ import absolute_import, print_function
+
+from arvados_docker.cleaner import main
+main()
# SPDX-License-Identifier: AGPL-3.0
fpm_depends+=(fuse)
+
+case "$TARGET" in
+ centos*)
+ fpm_depends+=(fuse-libs)
+ ;;
+ debian* | ubuntu*)
+ fpm_depends+=(libcurl3-gnutls libpython2.7)
+ ;;
+esac
--- /dev/null
+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: AGPL-3.0
+
+case "$TARGET" in
+ debian* | ubuntu*)
+ fpm_depends+=(libcurl3-gnutls libpython2.7)
+ ;;
+esac
;;
sh*)
- exec docker exec -ti -e LINES=$(tput lines) -e COLUMNS=$(tput cols) -e TERM=$TERM -e GEM_HOME=/var/lib/gems $ARVBOX_CONTAINER /bin/bash
+ exec docker exec -ti \
+ -e LINES=$(tput lines) \
+ -e COLUMNS=$(tput cols) \
+ -e TERM=$TERM \
+ -e GEM_HOME=/var/lib/gems \
+ $ARVBOX_CONTAINER /bin/bash
;;
pipe)
apt-transport-https ca-certificates slurm-wlm \
linkchecker python3-virtualenv python-virtualenv xvfb iceweasel \
libgnutls28-dev python3-dev vim cadaver cython gnupg dirmngr \
- libsecret-1-dev r-base r-cran-testthat libxml2-dev pandoc && \
+ libsecret-1-dev r-base r-cran-testthat libxml2-dev pandoc \
+ python3-setuptools python3-pip && \
apt-get clean
ENV RUBYVERSION_MINOR 2.3
fi
}
+PYCMD=""
pip_install() {
pushd /var/lib/pip
for p in $(ls http*.tar.gz) $(ls http*.tar.bz2) $(ls http*.whl) $(ls http*.zip) ; do
done
popd
- if ! pip install --no-index --find-links /var/lib/pip $1 ; then
- pip install $1
+ if [ "$PYCMD" = "python3" ]; then
+ if ! pip3 install --no-index --find-links /var/lib/pip $1 ; then
+ pip3 install $1
+ fi
+ else
+ if ! pip install --no-index --find-links /var/lib/pip $1 ; then
+ pip install $1
+ fi
fi
}
fi
if test ! -s /var/lib/arvados/server-cert-${localip}.pem ; then
+
+ if [[ $localip =~ ^[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}$ ]]; then
+ san=IP:$localip
+ else
+ san=DNS:$localip
+ fi
+
# req signing request sub-command
# -new new certificate request
# -nodes "no des" don't encrypt key
-reqexts x509_ext \
-extensions x509_ext \
-config <(cat /etc/ssl/openssl.cnf \
- <(printf "\n[x509_ext]\nkeyUsage=critical,digitalSignature,keyEncipherment\nsubjectAltName=DNS:localhost,IP:$localip")) \
+ <(printf "\n[x509_ext]\nkeyUsage=critical,digitalSignature,keyEncipherment\nsubjectAltName=DNS:localhost,$san")) \
-out /var/lib/arvados/server-cert-${localip}.csr \
-keyout /var/lib/arvados/server-cert-${localip}.key \
-days 365
-out /var/lib/arvados/server-cert-${localip}.pem \
-set_serial $RANDOM$RANDOM \
-extfile <(cat /etc/ssl/openssl.cnf \
- <(printf "\n[x509_ext]\nkeyUsage=critical,digitalSignature,keyEncipherment\nsubjectAltName=DNS:localhost,IP:$localip")) \
+ <(printf "\n[x509_ext]\nkeyUsage=critical,digitalSignature,keyEncipherment\nsubjectAltName=DNS:localhost,$san")) \
-extensions x509_ext
chown arvbox:arvbox /var/lib/arvados/server-cert-${localip}.*
cp /var/lib/arvados/root-cert.pem /usr/local/share/ca-certificates/arvados-testing-cert.crt
update-ca-certificates
-sv stop certificate
\ No newline at end of file
+sv stop certificate
--- /dev/null
+.. Copyright (C) The Arvados Authors. All rights reserved.
+..
+.. SPDX-License-Identifier: Apache-2.0
+
+Arvados Crunchstat Summary.
--- /dev/null
+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: AGPL-3.0
+
+case "$TARGET" in
+ debian* | ubuntu*)
+ fpm_depends+=(libcurl3-gnutls)
+ ;;
+esac
setup(name='crunchstat_summary',
version=version,
- description='read crunch log files and summarize resource usage',
+ description='Arvados crunchstat-summary reads crunch log files and summarizes resource usage',
author='Arvados',
author_email='info@arvados.org',
url="https://arvados.org",