doc/sdk/python/arvados
doc/sdk/R/arvados
doc/sdk/java-v2/javadoc
-sdk/perl/MYMETA.*
-sdk/perl/Makefile
-sdk/perl/blib
-sdk/perl/pm_to_blib
*/vendor
*/*/vendor
sdk/java/target
SHELL ["/bin/bash", "-c"]
# Install dependencies.
-RUN yum -q -y install make automake gcc gcc-c++ libyaml-devel patch readline-devel zlib-devel libffi-devel openssl-devel bzip2 libtool bison sqlite-devel rpm-build git perl-ExtUtils-MakeMaker libattr-devel nss-devel libcurl-devel which tar unzip scl-utils centos-release-scl postgresql-devel fuse-devel xz-libs git wget pam-devel
+RUN yum -q -y install make automake gcc gcc-c++ libyaml-devel patch readline-devel zlib-devel libffi-devel openssl-devel bzip2 libtool bison sqlite-devel rpm-build git libattr-devel nss-devel libcurl-devel which tar unzip scl-utils centos-release-scl postgresql-devel fuse-devel xz-libs git wget pam-devel
# Install RVM
ADD generated/mpapis.asc /tmp/
keep-rsync
keep-block-check
keep-web
- libarvados-perl
libpam-arvados-go
python3-cwltest
python3-arvados-fuse
# Required due to CVE-2022-24765
git config --global --add safe.directory /arvados
-# Perl packages
-debug_echo -e "\nPerl packages\n"
-
-handle_libarvados_perl
-
# Ruby gems
debug_echo -e "\nRuby gems\n"
)
}
-# Usage: handle_libarvados_perl
-handle_libarvados_perl () {
- if [[ -n "$ONLY_BUILD" ]] && [[ "$ONLY_BUILD" != "libarvados-perl" ]] ; then
- debug_echo -e "Skipping build of libarvados-perl package."
- return 0
- fi
- # The perl sdk subdirectory is so old that it has no tag in its history,
- # which causes version_at_commit.sh to fail. Just rebuild it every time.
- cd "$WORKSPACE"
- libarvados_perl_version="$(version_from_git)"
- cd "$WORKSPACE/sdk/perl"
-
- cd $WORKSPACE/packages/$TARGET
- test_package_presence libarvados-perl "$libarvados_perl_version"
-
- if [[ "$?" == "0" ]]; then
- cd "$WORKSPACE/sdk/perl"
-
- if [[ -e Makefile ]]; then
- make realclean >"$STDOUT_IF_DEBUG"
- fi
- find -maxdepth 1 \( -name 'MANIFEST*' -or -name "libarvados-perl*.$FORMAT" \) \
- -delete
- rm -rf install
-
- perl Makefile.PL INSTALL_BASE=install >"$STDOUT_IF_DEBUG" && \
- make install INSTALLDIRS=perl >"$STDOUT_IF_DEBUG" && \
- fpm_build "$WORKSPACE/sdk/perl" install/lib/=/usr/share libarvados-perl \
- dir "$libarvados_perl_version" install/man/=/usr/share/man \
- "$WORKSPACE/apache-2.0.txt=/usr/share/doc/libarvados-perl/apache-2.0.txt" && \
- mv --no-clobber libarvados-perl*.$FORMAT "$WORKSPACE/packages/$TARGET/"
- fi
-}
-
# Build python packages with a virtualenv built-in
# Usage: fpm_build_virtualenv arvados-python-client sdk/python [deb|rpm] [amd64|arm64]
fpm_build_virtualenv () {
VENV3DIR=
PYTHONPATH=
GEMHOME=
-PERLINSTALLBASE=
R_LIBS=
export LANG=en_US.UTF-8
echo -n 'nginx: '
PATH="$PATH:/sbin:/usr/sbin:/usr/local/sbin" nginx -v \
|| fatal "No nginx. Try: apt-get install nginx"
- echo -n 'perl: '
- perl -v | grep version \
- || fatal "No perl. Try: apt-get install perl"
- for mod in ExtUtils::MakeMaker JSON LWP Net::SSL; do
- echo -n "perl $mod: "
- perl -e "use $mod; print \"\$$mod::VERSION\\n\"" \
- || fatal "No $mod. Try: apt-get install perl-modules libcrypt-ssleay-perl libjson-perl libwww-perl"
- done
echo -n 'gitolite: '
which gitolite \
|| fatal "No gitolite. Try: apt-get install gitolite3"
fi
# Set up temporary install dirs (unless existing dirs were supplied)
- for tmpdir in VENV3DIR GOPATH GEMHOME PERLINSTALLBASE R_LIBS
+ for tmpdir in VENV3DIR GOPATH GEMHOME R_LIBS
do
if [[ -z "${!tmpdir}" ]]; then
eval "$tmpdir"="$temp/$tmpdir"
rm -vf "${WORKSPACE}/tmp/*.log"
- export PERLINSTALLBASE
- export PERL5LIB="$PERLINSTALLBASE/lib/perl5${PERL5LIB:+:$PERL5LIB}"
-
export R_LIBS
export GOPATH
fi
}
-install_sdk/perl() {
- cd "$WORKSPACE/sdk/perl" \
- && perl Makefile.PL INSTALL_BASE="$PERLINSTALLBASE" \
- && make install INSTALLDIRS=perl
-}
-
install_sdk/cli() {
install_gem arvados-cli sdk/cli
}
do_install env
do_install cmd/arvados-server go
do_install sdk/cli
- do_install sdk/perl
do_install sdk/python pip "${VENV3DIR}/bin/"
do_install sdk/ruby
do_install services/api
do_install doc
do_install sdk/ruby
do_install sdk/R
- do_install sdk/perl
do_install sdk/cli
do_install services/login-sync
for p in "${pythonstuff[@]}"
- sdk/java-v2/index.html.textile.liquid
- sdk/java-v2/example.html.textile.liquid
- sdk/java-v2/javadoc.html.textile.liquid
- - Perl:
- - sdk/perl/index.html.textile.liquid
- - sdk/perl/example.html.textile.liquid
api:
- Concepts:
- api/index.html.textile.liquid
To check for services that have not restarted since the configuration file was updated, run the @arvados-server check@ command on each system node.
+To test functionality and check for common problems, run the @arvados-client sudo diagnostics@ command on a system node.
+
h2(#upgrading). Upgrading Arvados
Upgrading Arvados typically involves the following steps:
# Run @arvados-server config-check@ to detect configuration errors or deprecated entries.
# Verify that the Arvados services were restarted as part of the package upgrades.
# Run @arvados-server check@ to detect services that did not restart properly.
+# Run @arvados-client sudo diagnostics@ to test functionality.
On the dispatch node, start monitoring the arvados-dispatch-cloud logs:
<notextile>
-<pre><code>~$ <span class="userinput">sudo journalctl -o cat -fu arvados-dispatch-cloud.service</span>
+<pre><code># <span class="userinput">journalctl -o cat -fu arvados-dispatch-cloud.service</span>
</code></pre>
</notextile>
-"Make sure to install the arvados/jobs image.":../install-jobs-image.html
-
-Submit a simple container request:
+In another terminal window, use the diagnostics tool to run a simple container.
<notextile>
-<pre><code>shell:~$ <span class="userinput">arv container_request create --container-request '{
- "name": "test",
- "state": "Committed",
- "priority": 1,
- "container_image": "arvados/jobs:latest",
- "command": ["echo", "Hello, Crunch!"],
- "output_path": "/out",
- "mounts": {
- "/out": {
- "kind": "tmp",
- "capacity": 1000
- }
- },
- "runtime_constraints": {
- "vcpus": 1,
- "ram": 1048576
- }
-}'</span>
+<pre><code># <span class="userinput">arvados-client sudo diagnostics</span>
+INFO 5: running health check (same as `arvados-server check`)
+INFO 10: getting discovery document from https://zzzzz.arvadosapi.com/discovery/v1/apis/arvados/v1/rest
+...
+INFO 160: running a container
+INFO ... container request submitted, waiting up to 10m for container to run
</code></pre>
</notextile>
-This command should return a record with a @container_uuid@ field. Once @arvados-dispatch-cloud@ polls the API server for new containers to run, you should see it dispatch that same container.
+After performing a number of other quick tests, this will submit a new container request and wait for it to finish.
+
+While the diagnostics tool is waiting, the @arvados-dispatch-cloud@ logs will show details about creating a cloud instance, waiting for it to be ready, and scheduling the new container on it.
-The @arvados-dispatch-cloud@ API provides a list of queued and running jobs and cloud instances. Use your @ManagementToken@ to test the dispatcher's endpoint. For example, when one container is running:
+You can also use the "arvados-dispatch-cloud API":{{site.baseurl}}/api/dispatch.html to get a list of queued and running jobs and cloud instances. Use your @ManagementToken@ to test the dispatcher's endpoint. For example, when one container is running:
<notextile>
<pre><code>~$ <span class="userinput">curl -sH "Authorization: Bearer $token" http://localhost:9006/arvados/v1/dispatch/containers</span>
A similar request can be made to the @http://localhost:9006/arvados/v1/dispatch/instances@ endpoint.
-When the container finishes, the dispatcher will log it.
-
After the container finishes, you can get the container record by UUID *from a shell server* to see its results:
<notextile>
{% include 'start_service' %}
{% include 'restart_api' %}
+
+h2(#confirm-working). Confirm working installation
+
+On the dispatch node, start monitoring the arvados-dispatch-lsf logs:
+
+<notextile>
+<pre><code># <span class="userinput">journalctl -o cat -fu arvados-dispatch-lsf.service</span>
+</code></pre>
+</notextile>
+
+In another terminal window, use the diagnostics tool to run a simple container.
+
+<notextile>
+<pre><code># <span class="userinput">arvados-client sudo diagnostics</span>
+INFO 5: running health check (same as `arvados-server check`)
+INFO 10: getting discovery document from https://zzzzz.arvadosapi.com/discovery/v1/apis/arvados/v1/rest
+...
+INFO 160: running a container
+INFO ... container request submitted, waiting up to 10m for container to run
+</code></pre>
+</notextile>
+
+After performing a number of other quick tests, this will submit a new container request and wait for it to finish.
+
+While the diagnostics tool is waiting, the @arvados-dispatch-lsf@ logs will show details about submitting an LSF job to run the container.
On the dispatch node, start monitoring the crunch-dispatch-slurm logs:
<notextile>
-<pre><code>~$ <span class="userinput">sudo journalctl -o cat -fu crunch-dispatch-slurm.service</span>
+<pre><code># <span class="userinput">journalctl -o cat -fu crunch-dispatch-slurm.service</span>
</code></pre>
</notextile>
-Submit a simple container request:
+In another terminal window, use the diagnostics tool to run a simple container.
<notextile>
-<pre><code>shell:~$ <span class="userinput">arv container_request create --container-request '{
- "name": "test",
- "state": "Committed",
- "priority": 1,
- "container_image": "arvados/jobs:latest",
- "command": ["echo", "Hello, Crunch!"],
- "output_path": "/out",
- "mounts": {
- "/out": {
- "kind": "tmp",
- "capacity": 1000
- }
- },
- "runtime_constraints": {
- "vcpus": 1,
- "ram": 8388608
- }
-}'</span>
+<pre><code># <span class="userinput">arvados-client sudo diagnostics</span>
+INFO 5: running health check (same as `arvados-server check`)
+INFO 10: getting discovery document from https://zzzzz.arvadosapi.com/discovery/v1/apis/arvados/v1/rest
+...
+INFO 160: running a container
+INFO ... container request submitted, waiting up to 10m for container to run
</code></pre>
</notextile>
-This command should return a record with a @container_uuid@ field. Once @crunch-dispatch-slurm@ polls the API server for new containers to run, you should see it dispatch that same container. It will log messages like:
+Once @crunch-dispatch-slurm@ polls the API server for new containers to run, you should see it dispatch the new container. It will log messages like:
<notextile>
<pre><code>2016/08/05 13:52:54 Monitoring container zzzzz-dz642-hdp2vpu9nq14tx0 started
* "R SDK":{{site.baseurl}}/sdk/R/index.html
* "Ruby SDK":{{site.baseurl}}/sdk/ruby/index.html
* "Java SDK v2":{{site.baseurl}}/sdk/java-v2/index.html
-* "Perl SDK":{{site.baseurl}}/sdk/perl/index.html
Many Arvados Workbench pages, under the *Advanced* tab, provide examples of API and SDK use for accessing the current resource .
+++ /dev/null
----
-layout: default
-navsection: sdk
-navmenu: Perl
-title: "Examples"
-...
-{% comment %}
-Copyright (C) The Arvados Authors. All rights reserved.
-
-SPDX-License-Identifier: CC-BY-SA-3.0
-{% endcomment %}
-
-h2. Initialize SDK
-
-Set up an API client user agent:
-
-{% codeblock as perl %}
-use Arvados;
-my $arv = Arvados->new('apiVersion' => 'v1');
-{% endcodeblock %}
-
-The SDK retrieves the list of API methods from the server at run time. Therefore, the set of available methods is determined by the server version rather than the SDK version.
-
-h2. create
-
-Create an object:
-
-{% codeblock as perl %}
-my $test_link = $arv->{'links'}->{'create'}->execute('link' => { 'link_class' => 'test', 'name' => 'test' });
-{% endcodeblock %}
-
-h2. delete
-
-{% codeblock as perl %}
-my $some_user = $arv->{'collections'}->{'get'}->execute('uuid' => $collection_uuid);
-{% endcodeblock %}
-
-h2. get
-
-Retrieve an object by ID:
-
-{% codeblock as perl %}
-my $some_user = $arv->{'users'}->{'get'}->execute('uuid' => $current_user_uuid);
-{% endcodeblock %}
-
-Get the UUID of an object that was retrieved using the SDK:
-
-{% codeblock as perl %}
-my $current_user_uuid = $current_user->{'uuid'}
-{% endcodeblock %}
-
-h2. list
-
-Get a list of objects:
-
-{% codeblock as perl %}
-my $repos = $arv->{'repositories'}->{'list'}->execute;
-print ("UUID of first repo returned is ", $repos->{'items'}->[0], "\n");
-{% endcodeblock %}
-
-h2. update
-
-Update an object:
-
-{% codeblock as perl %}
-my $test_link = $arv->{'links'}->{'update'}->execute(
- 'uuid' => $test_link->{'uuid'},
- 'link' => { 'properties' => { 'foo' => 'bar' } });
-{% endcodeblock %}
-
-h2. Get current user
-
-Get the User object for the current user:
-
-{% codeblock as perl %}
-my $current_user = $arv->{'users'}->{'current'}->execute;
-{% endcodeblock %}
+++ /dev/null
----
-layout: default
-navsection: sdk
-navmenu: Perl
-title: "Installation"
-...
-{% comment %}
-Copyright (C) The Arvados Authors. All rights reserved.
-
-SPDX-License-Identifier: CC-BY-SA-3.0
-{% endcomment %}
-
-The Perl SDK provides a generic set of wrappers so you can make API calls easily.
-
-This is a legacy SDK. It is no longer used or maintained regularly.
-
-h3. Installation
-
-h4. Option 1: Install from distribution packages
-
-First, "add the appropriate package repository for your distribution":{{ site.baseurl }}/install/install-manual-prerequisites.html#repos.
-
-On Debian-based systems:
-
-<notextile>
-<pre><code>~$ <span class="userinput">sudo apt-get install libjson-perl libio-socket-ssl-perl libwww-perl libipc-system-simple-perl libarvados-perl</code>
-</code></pre>
-</notextile>
-
-On Red Hat-based systems:
-
-<notextile>
-<pre><code>~$ <span class="userinput">sudo yum install perl-ExtUtils-MakeMaker perl-JSON perl-IO-Socket-SSL perl-Crypt-SSLeay perl-WWW-Curl libarvados-perl</code>
-</code></pre>
-</notextile>
-
-h4. Option 2: Install from source
-
-First, install dependencies from your distribution. Refer to the package lists above, but don't install @libarvados-perl@.
-
-Then run the following:
-
-<notextile>
-<pre><code>~$ <span class="userinput">git clone https://github.com/arvados/arvados.git</span>
-~$ <span class="userinput">cd arvados/sdk/perl</span>
-~$ <span class="userinput">perl Makefile.PL</span>
-~$ <span class="userinput">sudo make install</span>
-</code></pre>
-</notextile>
-
-h3. Test installation
-
-If the SDK is installed, @perl -MArvados -e ''@ should produce no errors.
-
-If your @ARVADOS_API_HOST@ and @ARVADOS_API_TOKEN@ environment variables are set up correctly (see "api-tokens":{{site.baseurl}}/user/reference/api-tokens.html for details), the following test script should work:
-
-<notextile>
-<pre>~$ <code class="userinput">perl <<'EOF'
-use Arvados;
-my $arv = Arvados->new('apiVersion' => 'v1');
-my $me = $arv->{'users'}->{'current'}->execute;
-print ("arvados.v1.users.current.full_name = '", $me->{'full_name'}, "'\n");
-EOF</code>
-arvados.v1.users.current.full_name = 'Your Name'
-</pre>
-</notextile>
# Extra RAM to reserve on the node, in addition to
# the amount specified in the container's RuntimeConstraints
- ReserveExtraRAM: 256MiB
+ ReserveExtraRAM: 550MiB
# Minimum time between two attempts to run the same container
MinRetryPeriod: 0s
needRAM := ctr.RuntimeConstraints.RAM + ctr.RuntimeConstraints.KeepCacheRAM
needRAM += int64(cc.Containers.ReserveExtraRAM)
- needRAM += int64(cc.Containers.LocalKeepBlobBuffersPerVCPU * needVCPUs * (1 << 26))
+ if cc.Containers.LocalKeepBlobBuffersPerVCPU > 0 {
+ // + 200 MiB for keepstore process + 10% for GOGC=10
+ needRAM += 220 << 20
+ // + 64 MiB for each blob buffer + 10% for GOGC=10
+ needRAM += int64(cc.Containers.LocalKeepBlobBuffersPerVCPU * needVCPUs * (1 << 26) * 11 / 10)
+ }
needRAM = (needRAM * 100) / int64(100-discountConfiguredRAMPercent)
ok := false
"costly": {Price: 4.4, RAM: 4000000000, VCPUs: 8, Scratch: 2 * GiB, Name: "costly"},
},
} {
- best, err := ChooseInstanceType(&arvados.Cluster{InstanceTypes: menu, Containers: arvados.ContainersConfig{ReserveExtraRAM: 268435456}}, &arvados.Container{
+ best, err := ChooseInstanceType(&arvados.Cluster{InstanceTypes: menu, Containers: arvados.ContainersConfig{
+ LocalKeepBlobBuffersPerVCPU: 1,
+ ReserveExtraRAM: 268435456,
+ }}, &arvados.Container{
Mounts: map[string]arvados.Mount{
"/tmp": {Kind: "tmp", Capacity: 2 * int64(GiB)},
},
}
}
-func (*NodeSizeSuite) TestChoosePreemptable(c *check.C) {
+func (*NodeSizeSuite) TestChooseWithBlobBuffersOverhead(c *check.C) {
+ menu := map[string]arvados.InstanceType{
+ "nearly": {Price: 2.2, RAM: 4000000000, VCPUs: 4, Scratch: 2 * GiB, Name: "small"},
+ "best": {Price: 3.3, RAM: 8000000000, VCPUs: 4, Scratch: 2 * GiB, Name: "best"},
+ "costly": {Price: 4.4, RAM: 12000000000, VCPUs: 8, Scratch: 2 * GiB, Name: "costly"},
+ }
+ best, err := ChooseInstanceType(&arvados.Cluster{InstanceTypes: menu, Containers: arvados.ContainersConfig{
+ LocalKeepBlobBuffersPerVCPU: 16, // 1 GiB per vcpu => 2 GiB
+ ReserveExtraRAM: 268435456,
+ }}, &arvados.Container{
+ Mounts: map[string]arvados.Mount{
+ "/tmp": {Kind: "tmp", Capacity: 2 * int64(GiB)},
+ },
+ RuntimeConstraints: arvados.RuntimeConstraints{
+ VCPUs: 2,
+ RAM: 987654321,
+ KeepCacheRAM: 123456789,
+ },
+ })
+ c.Check(err, check.IsNil)
+ c.Check(best.Name, check.Equals, "best")
+}
+
+func (*NodeSizeSuite) TestChoosePreemptible(c *check.C) {
menu := map[string]arvados.InstanceType{
"costly": {Price: 4.4, RAM: 4000000000, VCPUs: 8, Scratch: 2 * GiB, Preemptible: true, Name: "costly"},
"almost best": {Price: 2.2, RAM: 2000000000, VCPUs: 4, Scratch: 2 * GiB, Name: "almost best"},
"default-jre-headless",
"gettext",
"libattr1-dev",
- "libcrypt-ssleay-perl",
"libfuse-dev",
"libgbm1", // cypress / workbench2 tests
"libgnutls28-dev",
- "libjson-perl",
"libpam-dev",
"libpcre3-dev",
"libpq-dev",
"libreadline-dev",
"libssl-dev",
- "libwww-perl",
"libxml2-dev",
"libxslt1-dev",
"linkchecker",
}
switch {
case osv.Debian && osv.Major >= 11:
- pkgs = append(pkgs, "g++", "libcurl4", "libcurl4-openssl-dev", "perl-modules-5.32")
+ pkgs = append(pkgs, "g++", "libcurl4", "libcurl4-openssl-dev")
case osv.Debian && osv.Major >= 10:
- pkgs = append(pkgs, "g++", "libcurl4", "libcurl4-openssl-dev", "perl-modules")
+ pkgs = append(pkgs, "g++", "libcurl4", "libcurl4-openssl-dev")
case osv.Debian || osv.Ubuntu:
- pkgs = append(pkgs, "g++", "libcurl3", "libcurl3-openssl-dev", "perl-modules")
+ pkgs = append(pkgs, "g++", "libcurl3", "libcurl3-openssl-dev")
case osv.Centos:
pkgs = append(pkgs, "gcc", "gcc-c++", "libcurl-devel", "postgresql-devel")
}
c.Assert(err, check.IsNil)
cluster, err := cfg.GetCluster("")
c.Assert(err, check.IsNil)
+ cluster.Containers.ReserveExtraRAM = 256 << 20
cluster.Containers.CloudVMs.PollInterval = arvados.Duration(time.Second / 4)
cluster.Containers.MinRetryPeriod = arvados.Duration(time.Second / 4)
cluster.InstanceTypes = arvados.InstanceTypeMap{
from .perf import Perf
from ._version import __version__
from .executor import ArvCwlExecutor
+from .fsaccess import workflow_uuid_pattern
# These aren't used directly in this file but
# other code expects to import them from here
action="store_false", default=True,
help=argparse.SUPPRESS)
+ parser.add_argument("--disable-git", dest="git_info",
+ action="store_false", default=True,
+ help=argparse.SUPPRESS)
+
parser.add_argument("--disable-color", dest="enable_color",
action="store_false", default=True,
help=argparse.SUPPRESS)
parser.add_argument("--http-timeout", type=int,
default=5*60, dest="http_timeout", help="API request timeout in seconds. Default is 300 seconds (5 minutes).")
+ parser.add_argument("--defer-downloads", action="store_true", default=False,
+ help="When submitting a workflow, defer downloading HTTP URLs to workflow launch instead of downloading to Keep before submit.")
+
+ parser.add_argument("--varying-url-params", type=str, default="",
+ help="A comma separated list of URL query parameters that should be ignored when storing HTTP URLs in Keep.")
+
+ parser.add_argument("--prefer-cached-downloads", action="store_true", default=False,
+ help="If a HTTP URL is found in Keep, skip upstream URL freshness check (will not notice if the upstream has changed, but also not error if upstream is unavailable).")
+
exgroup = parser.add_mutually_exclusive_group()
exgroup.add_argument("--enable-preemptible", dest="enable_preemptible", default=None, action="store_true", help="Use preemptible instances. Control individual steps with arv:UsePreemptible hint.")
exgroup.add_argument("--disable-preemptible", dest="enable_preemptible", default=None, action="store_false", help="Don't use preemptible instances.")
# unit tests.
stdout = None
+ if arvargs.submit and (arvargs.workflow.startswith("arvwf:") or workflow_uuid_pattern.match(arvargs.workflow)):
+ executor.loadingContext.do_validate = False
+ executor.fast_submit = True
+
return cwltool.main.main(args=arvargs,
stdout=stdout,
stderr=stderr,
"kind": "collection",
"portable_data_hash": "%s" % workflowcollection
}
+ elif self.embedded_tool.tool.get("id", "").startswith("arvwf:"):
+ workflowpath = "/var/lib/cwl/workflow.json#main"
+ record = self.arvrunner.api.workflows().get(uuid=self.embedded_tool.tool["id"][6:33]).execute(num_retries=self.arvrunner.num_retries)
+ packed = yaml.safe_load(record["definition"])
+ container_req["mounts"]["/var/lib/cwl/workflow.json"] = {
+ "kind": "json",
+ "content": packed
+ }
+ container_req["properties"]["template_uuid"] = self.embedded_tool.tool["id"][6:33]
else:
packed = packed_workflow(self.arvrunner, self.embedded_tool, self.merged_map, runtimeContext, git_info)
workflowpath = "/var/lib/cwl/workflow.json#main"
"kind": "json",
"content": packed
}
- if self.embedded_tool.tool.get("id", "").startswith("arvwf:"):
- container_req["properties"]["template_uuid"] = self.embedded_tool.tool["id"][6:33]
container_req["properties"].update({k.replace("http://arvados.org/cwl#", "arv:"): v for k, v in git_info.items()})
if runtimeContext.enable_preemptible is False:
command.append("--disable-preemptible")
+ if runtimeContext.varying_url_params:
+ command.append("--varying-url-params="+runtimeContext.varying_url_params)
+
+ if runtimeContext.prefer_cached_downloads:
+ command.append("--prefer-cached-downloads")
+
command.extend([workflowpath, "/var/lib/cwl/cwl.input.json"])
container_req["command"] = command
from schema_salad.sourceline import SourceLine, cmap
import schema_salad.ref_resolver
+import arvados.collection
+
from cwltool.pack import pack
from cwltool.load_tool import fetch_document, resolve_and_validate_document
from cwltool.process import shortname
max_res_pars = ("coresMin", "coresMax", "ramMin", "ramMax", "tmpdirMin", "tmpdirMax")
sum_res_pars = ("outdirMin", "outdirMax")
+def make_wrapper_workflow(arvRunner, main, packed, project_uuid, name, git_info, tool):
+ col = arvados.collection.Collection(api_client=arvRunner.api,
+ keep_client=arvRunner.keep_client)
+
+ with col.open("workflow.json", "wt") as f:
+ json.dump(packed, f, sort_keys=True, indent=4, separators=(',',': '))
+
+ pdh = col.portable_data_hash()
+
+ toolname = tool.tool.get("label") or tool.metadata.get("label") or os.path.basename(tool.tool["id"])
+ if git_info and git_info.get("http://arvados.org/cwl#gitDescribe"):
+ toolname = "%s (%s)" % (toolname, git_info.get("http://arvados.org/cwl#gitDescribe"))
+
+ existing = arvRunner.api.collections().list(filters=[["portable_data_hash", "=", pdh], ["owner_uuid", "=", project_uuid]]).execute(num_retries=arvRunner.num_retries)
+ if len(existing["items"]) == 0:
+ col.save_new(name=toolname, owner_uuid=project_uuid, ensure_unique_name=True)
+
+ # now construct the wrapper
+
+ step = {
+ "id": "#main/" + toolname,
+ "in": [],
+ "out": [],
+ "run": "keep:%s/workflow.json#main" % pdh,
+ "label": name
+ }
+
+ newinputs = []
+ for i in main["inputs"]:
+ inp = {}
+ # Make sure to only copy known fields that are meaningful at
+ # the workflow level. In practice this ensures that if we're
+ # wrapping a CommandLineTool we don't grab inputBinding.
+ # Right now also excludes extension fields, which is fine,
+ # Arvados doesn't currently look for any extension fields on
+ # input parameters.
+ for f in ("type", "label", "secondaryFiles", "streamable",
+ "doc", "id", "format", "loadContents",
+ "loadListing", "default"):
+ if f in i:
+ inp[f] = i[f]
+ newinputs.append(inp)
+
+ wrapper = {
+ "class": "Workflow",
+ "id": "#main",
+ "inputs": newinputs,
+ "outputs": [],
+ "steps": [step]
+ }
+
+ for i in main["inputs"]:
+ step["in"].append({
+ "id": "#main/step/%s" % shortname(i["id"]),
+ "source": i["id"]
+ })
+
+ for i in main["outputs"]:
+ step["out"].append({"id": "#main/step/%s" % shortname(i["id"])})
+ wrapper["outputs"].append({"outputSource": "#main/step/%s" % shortname(i["id"]),
+ "type": i["type"],
+ "id": i["id"]})
+
+ wrapper["requirements"] = [{"class": "SubworkflowFeatureRequirement"}]
+
+ if main.get("requirements"):
+ wrapper["requirements"].extend(main["requirements"])
+ if main.get("hints"):
+ wrapper["hints"] = main["hints"]
+
+ doc = {"cwlVersion": "v1.2", "$graph": [wrapper]}
+
+ if git_info:
+ for g in git_info:
+ doc[g] = git_info[g]
+
+ return json.dumps(doc, sort_keys=True, indent=4, separators=(',',': '))
+
def upload_workflow(arvRunner, tool, job_order, project_uuid,
runtimeContext, uuid=None,
submit_runner_ram=0, name=None, merged_map=None,
main["hints"] = hints
+ wrapper = make_wrapper_workflow(arvRunner, main, packed, project_uuid, name, git_info, tool)
+
body = {
"workflow": {
"name": name,
"description": tool.tool.get("doc", ""),
- "definition":json.dumps(packed, sort_keys=True, indent=4, separators=(',',': '))
+ "definition": wrapper
}}
if project_uuid:
body["workflow"]["owner_uuid"] = project_uuid
**argv
): # type: (...) -> None
- super(ArvadosWorkflowStep, self).__init__(toolpath_object, pos, loadingContext, *argc, **argv)
- self.tool["class"] = "WorkflowStep"
+ if arvrunner.fast_submit:
+ self.tool = toolpath_object
+ self.tool["inputs"] = []
+ self.tool["outputs"] = []
+ else:
+ super(ArvadosWorkflowStep, self).__init__(toolpath_object, pos, loadingContext, *argc, **argv)
+ self.tool["class"] = "WorkflowStep"
self.arvrunner = arvrunner
def job(self, joborder, output_callback, runtimeContext):
self.match_local_docker = False
self.enable_preemptible = None
self.copy_deps = None
+ self.defer_downloads = False
+ self.varying_url_params = ""
+ self.prefer_cached_downloads = False
super(ArvRuntimeContext, self).__init__(kwargs)
kind = 'error'
elif record.levelno >= logging.WARNING:
kind = 'warning'
+ if kind == 'warning' and record.name == "salad":
+ # Don't send validation warnings to runtime status,
+ # they're noisy and unhelpful.
+ return
if kind is not None and self.updatingRuntimeStatus is not True:
self.updatingRuntimeStatus = True
try:
arvargs.output_tags = None
arvargs.thread_count = 1
arvargs.collection_cache_size = None
+ arvargs.git_info = True
+ arvargs.submit = False
+ arvargs.defer_downloads = False
self.api = api_client
self.processes = {}
self.fs_access = None
self.secret_store = None
self.stdout = stdout
+ self.fast_submit = False
+ self.git_info = arvargs.git_info
if keep_client is not None:
self.keep_client = keep_client
self.toplevel_runtimeContext.make_fs_access = partial(CollectionFsAccess,
collection_cache=self.collection_cache)
+ self.defer_downloads = arvargs.submit and arvargs.defer_downloads
+
validate_cluster_target(self, self.toplevel_runtimeContext)
page = keys[:pageSize]
try:
proc_states = table.list(filters=[["uuid", "in", page]]).execute(num_retries=self.num_retries)
- except Exception:
- logger.exception("Error checking states on API server: %s")
+ except Exception as e:
+ logger.exception("Error checking states on API server: %s", e)
remain_wait = self.poll_interval
continue
def arv_executor(self, updated_tool, job_order, runtimeContext, logger=None):
self.debug = runtimeContext.debug
- git_info = self.get_git_info(updated_tool)
+ git_info = self.get_git_info(updated_tool) if self.git_info else {}
if git_info:
logger.info("Git provenance")
for g in git_info:
controller = self.api.config()["Services"]["Controller"]["ExternalURL"]
logger.info("Using cluster %s (%s)", self.api.config()["ClusterID"], workbench2 or workbench1 or controller)
- updated_tool.visit(self.check_features)
+ if not self.fast_submit:
+ updated_tool.visit(self.check_features)
self.pipeline = None
self.fs_access = runtimeContext.make_fs_access(runtimeContext.basedir)
loadingContext = self.loadingContext.copy()
loadingContext.do_validate = False
loadingContext.disable_js_validation = True
- if submitting:
+ if submitting and not self.fast_submit:
loadingContext.do_update = False
# Document may have been auto-updated. Reload the original
# document with updating disabled because we want to
# Upload direct dependencies of workflow steps, get back mapping of files to keep references.
# Also uploads docker images.
- logger.info("Uploading workflow dependencies")
- with Perf(metrics, "upload_workflow_deps"):
- merged_map = upload_workflow_deps(self, tool, runtimeContext)
+ if not self.fast_submit:
+ logger.info("Uploading workflow dependencies")
+ with Perf(metrics, "upload_workflow_deps"):
+ merged_map = upload_workflow_deps(self, tool, runtimeContext)
+ else:
+ merged_map = {}
# Recreate process object (ArvadosWorkflow or
# ArvadosCommandTool) because tool document may have been
try:
if url.startswith("http://arvados.org/cwl"):
return True
- if url.startswith("keep:"):
- return self.fsaccess.exists(url)
- if url.startswith("arvwf:"):
- if self.fetch_text(url):
+ urld, _ = urllib.parse.urldefrag(url)
+ if urld.startswith("keep:"):
+ return self.fsaccess.exists(urld)
+ if urld.startswith("arvwf:"):
+ if self.fetch_text(urld):
return True
except arvados.errors.NotFoundError:
return False
properties[url]["Date"] = my_formatdate(now)
-def changed(url, properties, now):
+def changed(url, clean_url, properties, now):
req = requests.head(url, allow_redirects=True)
- remember_headers(url, properties, req.headers, now)
if req.status_code != 200:
- raise Exception("Got status %s" % req.status_code)
+ # Sometimes endpoints are misconfigured and will deny HEAD but
+ # allow GET so instead of failing here, we'll try GET If-None-Match
+ return True
- pr = properties[url]
- if "ETag" in pr and "ETag" in req.headers:
- if pr["ETag"] == req.headers["ETag"]:
- return False
+ etag = properties[url].get("ETag")
+
+ if url in properties:
+ del properties[url]
+ remember_headers(clean_url, properties, req.headers, now)
+
+ if "ETag" in req.headers and etag == req.headers["ETag"]:
+ # Didn't change
+ return False
return True
-def http_to_keep(api, project_uuid, url, utcnow=datetime.datetime.utcnow):
- r = api.collections().list(filters=[["properties", "exists", url]]).execute()
+def etag_quote(etag):
+ # if it already has leading and trailing quotes, do nothing
+ if etag[0] == '"' and etag[-1] == '"':
+ return etag
+ else:
+ # Add quotes.
+ return '"' + etag + '"'
+
+
+def http_to_keep(api, project_uuid, url, utcnow=datetime.datetime.utcnow, varying_url_params="", prefer_cached_downloads=False):
+ varying_params = [s.strip() for s in varying_url_params.split(",")]
+
+ parsed = urllib.parse.urlparse(url)
+ query = [q for q in urllib.parse.parse_qsl(parsed.query)
+ if q[0] not in varying_params]
+
+ clean_url = urllib.parse.urlunparse((parsed.scheme, parsed.netloc, parsed.path, parsed.params,
+ urllib.parse.urlencode(query, safe="/"), parsed.fragment))
+
+ r1 = api.collections().list(filters=[["properties", "exists", url]]).execute()
+
+ if clean_url == url:
+ items = r1["items"]
+ else:
+ r2 = api.collections().list(filters=[["properties", "exists", clean_url]]).execute()
+ items = r1["items"] + r2["items"]
now = utcnow()
- for item in r["items"]:
+ etags = {}
+
+ for item in items:
properties = item["properties"]
- if fresh_cache(url, properties, now):
- # Do nothing
+
+ if clean_url in properties:
+ cache_url = clean_url
+ elif url in properties:
+ cache_url = url
+ else:
+ return False
+
+ if prefer_cached_downloads or fresh_cache(cache_url, properties, now):
+ # HTTP caching rules say we should use the cache
cr = arvados.collection.CollectionReader(item["portable_data_hash"], api_client=api)
return "keep:%s/%s" % (item["portable_data_hash"], list(cr.keys())[0])
- if not changed(url, properties, now):
+ if not changed(cache_url, clean_url, properties, now):
# ETag didn't change, same content, just update headers
api.collections().update(uuid=item["uuid"], body={"collection":{"properties": properties}}).execute()
cr = arvados.collection.CollectionReader(item["portable_data_hash"], api_client=api)
return "keep:%s/%s" % (item["portable_data_hash"], list(cr.keys())[0])
+ if "ETag" in properties[cache_url] and len(properties[cache_url]["ETag"]) > 2:
+ etags[properties[cache_url]["ETag"]] = item
+
+ logger.debug("Found ETags %s", etags)
+
properties = {}
- req = requests.get(url, stream=True, allow_redirects=True)
+ headers = {}
+ if etags:
+ headers['If-None-Match'] = ', '.join([etag_quote(k) for k,v in etags.items()])
+ logger.debug("Sending GET request with headers %s", headers)
+ req = requests.get(url, stream=True, allow_redirects=True, headers=headers)
- if req.status_code != 200:
+ if req.status_code not in (200, 304):
raise Exception("Failed to download '%s' got status %s " % (url, req.status_code))
- remember_headers(url, properties, req.headers, now)
+ remember_headers(clean_url, properties, req.headers, now)
+
+ if req.status_code == 304 and "ETag" in req.headers and req.headers["ETag"] in etags:
+ item = etags[req.headers["ETag"]]
+ item["properties"].update(properties)
+ api.collections().update(uuid=item["uuid"], body={"collection":{"properties": item["properties"]}}).execute()
+ cr = arvados.collection.CollectionReader(item["portable_data_hash"], api_client=api)
+ return "keep:%s/%s" % (item["portable_data_hash"], list(cr.keys())[0])
- if "Content-Length" in properties[url]:
- cl = int(properties[url]["Content-Length"])
+ if "Content-Length" in properties[clean_url]:
+ cl = int(properties[clean_url]["Content-Length"])
logger.info("Downloading %s (%s bytes)", url, cl)
else:
cl = None
else:
name = grp.group(4)
else:
- name = urllib.parse.urlparse(url).path.split("/")[-1]
+ name = parsed.path.split("/")[-1]
count = 0
start = time.time()
logger.info("%d downloaded, %3.2f MiB/s", count, (bps / (1024*1024)))
checkpoint = loopnow
+ logger.info("Download complete")
+
+ collectionname = "Downloaded from %s" % urllib.parse.quote(clean_url, safe='')
+
+ # max length - space to add a timestamp used by ensure_unique_name
+ max_name_len = 254 - 28
+
+ if len(collectionname) > max_name_len:
+ over = len(collectionname) - max_name_len
+ split = int(max_name_len/2)
+ collectionname = collectionname[0:split] + "…" + collectionname[split+over:]
- collectionname = "Downloaded from %s" % urllib.parse.quote(url, safe='')
c.save_new(name=collectionname, owner_uuid=project_uuid, ensure_unique_name=True)
api.collections().update(uuid=c.manifest_locator(), body={"collection":{"properties": properties}}).execute()
raise WorkflowException("Directory literal '%s' is missing `listing`" % src)
elif src.startswith("http:") or src.startswith("https:"):
try:
- keepref = http_to_keep(self.arvrunner.api, self.arvrunner.project_uuid, src)
- logger.info("%s is %s", src, keepref)
- self._pathmap[src] = MapperEnt(keepref, keepref, srcobj["class"], True)
+ if self.arvrunner.defer_downloads:
+ # passthrough, we'll download it later.
+ self._pathmap[src] = MapperEnt(src, src, srcobj["class"], True)
+ else:
+ keepref = http_to_keep(self.arvrunner.api, self.arvrunner.project_uuid, src,
+ varying_url_params=self.arvrunner.toplevel_runtimeContext.varying_url_params,
+ prefer_cached_downloads=self.arvrunner.toplevel_runtimeContext.prefer_cached_downloads)
+ logger.info("%s is %s", src, keepref)
+ self._pathmap[src] = MapperEnt(keepref, keepref, srcobj["class"], True)
except Exception as e:
logger.warning(str(e))
else:
if loc.startswith("_:"):
return True
+ if self.arvrunner.defer_downloads and (loc.startswith("http:") or loc.startswith("https:")):
+ return False
+
i = loc.rfind("/")
if i > -1:
loc_prefix = loc[:i+1]
'cwltool==3.1.20220907141119',
'schema-salad==8.3.20220913105718',
'arvados-python-client{}'.format(pysdk_dep),
- 'setuptools',
'ciso8601 >= 2.0.0',
'networkx < 2.6',
'msgpack==1.0.3',
- 'importlib-metadata<5'
+ 'importlib-metadata<5',
+ 'setuptools>=40.3.0'
],
data_files=[
('share/doc/arvados-cwl-runner', ['LICENSE-2.0.txt', 'README.rst']),
--- /dev/null
+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: Apache-2.0
+
+{
+ "$graph": [
+ {
+ "class": "Workflow",
+ "hints": [
+ {
+ "acrContainerImage": "999999999999999999999999999999d3+99",
+ "class": "http://arvados.org/cwl#WorkflowRunnerResources"
+ }
+ ],
+ "id": "#main",
+ "inputs": [],
+ "outputs": [],
+ "requirements": [
+ {
+ "class": "SubworkflowFeatureRequirement"
+ }
+ ],
+ "steps": [
+ {
+ "id": "#main/collection_per_tool.cwl",
+ "in": [],
+ "label": "collection_per_tool.cwl",
+ "out": [],
+ "run": "keep:92045991f69a417f2f26660db67911ef+61/workflow.json#main"
+ }
+ ]
+ }
+ ],
+ "cwlVersion": "v1.2"
+}
r = arvados_cwl.http.http_to_keep(api, None, "http://example.com/file1.txt", utcnow=utcnow)
self.assertEqual(r, "keep:99999999999999999999999999999998+99/file1.txt")
- getmock.assert_called_with("http://example.com/file1.txt", stream=True, allow_redirects=True)
+ getmock.assert_called_with("http://example.com/file1.txt", stream=True, allow_redirects=True, headers={})
cm.open.assert_called_with("file1.txt", "wb")
cm.save_new.assert_called_with(name="Downloaded from http%3A%2F%2Fexample.com%2Ffile1.txt",
r = arvados_cwl.http.http_to_keep(api, None, "http://example.com/file1.txt", utcnow=utcnow)
self.assertEqual(r, "keep:99999999999999999999999999999997+99/file1.txt")
- getmock.assert_called_with("http://example.com/file1.txt", stream=True, allow_redirects=True)
+ getmock.assert_called_with("http://example.com/file1.txt", stream=True, allow_redirects=True, headers={})
cm.open.assert_called_with("file1.txt", "wb")
cm.save_new.assert_called_with(name="Downloaded from http%3A%2F%2Fexample.com%2Ffile1.txt",
'http://example.com/file1.txt': {
'Date': 'Tue, 15 May 2018 00:00:00 GMT',
'Expires': 'Tue, 16 May 2018 00:00:00 GMT',
- 'ETag': '123456'
+ 'ETag': '"123456"'
}
}
}]
req.headers = {
'Date': 'Tue, 17 May 2018 00:00:00 GMT',
'Expires': 'Tue, 19 May 2018 00:00:00 GMT',
- 'ETag': '123456'
+ 'ETag': '"123456"'
}
headmock.return_value = req
body={"collection":{"properties": {'http://example.com/file1.txt': {
'Date': 'Tue, 17 May 2018 00:00:00 GMT',
'Expires': 'Tue, 19 May 2018 00:00:00 GMT',
- 'ETag': '123456'
+ 'ETag': '"123456"'
}}}})
])
r = arvados_cwl.http.http_to_keep(api, None, "http://example.com/download?fn=/file1.txt", utcnow=utcnow)
self.assertEqual(r, "keep:99999999999999999999999999999998+99/file1.txt")
- getmock.assert_called_with("http://example.com/download?fn=/file1.txt", stream=True, allow_redirects=True)
+ getmock.assert_called_with("http://example.com/download?fn=/file1.txt", stream=True, allow_redirects=True, headers={})
cm.open.assert_called_with("file1.txt", "wb")
cm.save_new.assert_called_with(name="Downloaded from http%3A%2F%2Fexample.com%2Fdownload%3Ffn%3D%2Ffile1.txt",
mock.call(uuid=cm.manifest_locator(),
body={"collection":{"properties": {"http://example.com/download?fn=/file1.txt": {'Date': 'Tue, 15 May 2018 00:00:00 GMT'}}}})
])
+
+ @mock.patch("requests.get")
+ @mock.patch("requests.head")
+ @mock.patch("arvados.collection.CollectionReader")
+ def test_http_etag_if_none_match(self, collectionmock, headmock, getmock):
+ api = mock.MagicMock()
+
+ api.collections().list().execute.return_value = {
+ "items": [{
+ "uuid": "zzzzz-4zz18-zzzzzzzzzzzzzz3",
+ "portable_data_hash": "99999999999999999999999999999998+99",
+ "properties": {
+ 'http://example.com/file1.txt': {
+ 'Date': 'Tue, 15 May 2018 00:00:00 GMT',
+ 'Expires': 'Tue, 16 May 2018 00:00:00 GMT',
+ 'ETag': '"123456"'
+ }
+ }
+ }]
+ }
+
+ cm = mock.MagicMock()
+ cm.manifest_locator.return_value = "zzzzz-4zz18-zzzzzzzzzzzzzz3"
+ cm.portable_data_hash.return_value = "99999999999999999999999999999998+99"
+ cm.keys.return_value = ["file1.txt"]
+ collectionmock.return_value = cm
+
+ # Head request fails, will try a conditional GET instead
+ req = mock.MagicMock()
+ req.status_code = 403
+ req.headers = {
+ }
+ headmock.return_value = req
+
+ utcnow = mock.MagicMock()
+ utcnow.return_value = datetime.datetime(2018, 5, 17)
+
+ req = mock.MagicMock()
+ req.status_code = 304
+ req.headers = {
+ 'Date': 'Tue, 17 May 2018 00:00:00 GMT',
+ 'Expires': 'Tue, 19 May 2018 00:00:00 GMT',
+ 'ETag': '"123456"'
+ }
+ getmock.return_value = req
+
+ r = arvados_cwl.http.http_to_keep(api, None, "http://example.com/file1.txt", utcnow=utcnow)
+ self.assertEqual(r, "keep:99999999999999999999999999999998+99/file1.txt")
+
+ getmock.assert_called_with("http://example.com/file1.txt", stream=True, allow_redirects=True, headers={"If-None-Match": '"123456"'})
+ cm.open.assert_not_called()
+
+ api.collections().update.assert_has_calls([
+ mock.call(uuid=cm.manifest_locator(),
+ body={"collection":{"properties": {'http://example.com/file1.txt': {
+ 'Date': 'Tue, 17 May 2018 00:00:00 GMT',
+ 'Expires': 'Tue, 19 May 2018 00:00:00 GMT',
+ 'ETag': '"123456"'
+ }}}})
+ ])
+
+
+ @mock.patch("requests.get")
+ @mock.patch("requests.head")
+ @mock.patch("arvados.collection.CollectionReader")
+ def test_http_prefer_cached_downloads(self, collectionmock, headmock, getmock):
+ api = mock.MagicMock()
+
+ api.collections().list().execute.return_value = {
+ "items": [{
+ "uuid": "zzzzz-4zz18-zzzzzzzzzzzzzz3",
+ "portable_data_hash": "99999999999999999999999999999998+99",
+ "properties": {
+ 'http://example.com/file1.txt': {
+ 'Date': 'Tue, 15 May 2018 00:00:00 GMT',
+ 'Expires': 'Tue, 16 May 2018 00:00:00 GMT',
+ 'ETag': '"123456"'
+ }
+ }
+ }]
+ }
+
+ cm = mock.MagicMock()
+ cm.manifest_locator.return_value = "zzzzz-4zz18-zzzzzzzzzzzzzz3"
+ cm.portable_data_hash.return_value = "99999999999999999999999999999998+99"
+ cm.keys.return_value = ["file1.txt"]
+ collectionmock.return_value = cm
+
+ utcnow = mock.MagicMock()
+ utcnow.return_value = datetime.datetime(2018, 5, 17)
+
+ r = arvados_cwl.http.http_to_keep(api, None, "http://example.com/file1.txt", utcnow=utcnow, prefer_cached_downloads=True)
+ self.assertEqual(r, "keep:99999999999999999999999999999998+99/file1.txt")
+
+ headmock.assert_not_called()
+ getmock.assert_not_called()
+ cm.open.assert_not_called()
+ api.collections().update.assert_not_called()
+
+ @mock.patch("requests.get")
+ @mock.patch("requests.head")
+ @mock.patch("arvados.collection.CollectionReader")
+ def test_http_varying_url_params(self, collectionmock, headmock, getmock):
+ for prurl in ("http://example.com/file1.txt", "http://example.com/file1.txt?KeyId=123&Signature=456&Expires=789"):
+ api = mock.MagicMock()
+
+ api.collections().list().execute.return_value = {
+ "items": [{
+ "uuid": "zzzzz-4zz18-zzzzzzzzzzzzzz3",
+ "portable_data_hash": "99999999999999999999999999999998+99",
+ "properties": {
+ prurl: {
+ 'Date': 'Tue, 15 May 2018 00:00:00 GMT',
+ 'Expires': 'Tue, 16 May 2018 00:00:00 GMT',
+ 'ETag': '"123456"'
+ }
+ }
+ }]
+ }
+
+ cm = mock.MagicMock()
+ cm.manifest_locator.return_value = "zzzzz-4zz18-zzzzzzzzzzzzzz3"
+ cm.portable_data_hash.return_value = "99999999999999999999999999999998+99"
+ cm.keys.return_value = ["file1.txt"]
+ collectionmock.return_value = cm
+
+ req = mock.MagicMock()
+ req.status_code = 200
+ req.headers = {
+ 'Date': 'Tue, 17 May 2018 00:00:00 GMT',
+ 'Expires': 'Tue, 19 May 2018 00:00:00 GMT',
+ 'ETag': '"123456"'
+ }
+ headmock.return_value = req
+
+ utcnow = mock.MagicMock()
+ utcnow.return_value = datetime.datetime(2018, 5, 17)
+
+ r = arvados_cwl.http.http_to_keep(api, None, "http://example.com/file1.txt?KeyId=123&Signature=456&Expires=789",
+ utcnow=utcnow, varying_url_params="KeyId,Signature,Expires")
+ self.assertEqual(r, "keep:99999999999999999999999999999998+99/file1.txt")
+
+ getmock.assert_not_called()
+ cm.open.assert_not_called()
+
+ api.collections().update.assert_has_calls([
+ mock.call(uuid=cm.manifest_locator(),
+ body={"collection":{"properties": {'http://example.com/file1.txt': {
+ 'Date': 'Tue, 17 May 2018 00:00:00 GMT',
+ 'Expires': 'Tue, 19 May 2018 00:00:00 GMT',
+ 'ETag': '"123456"'
+ }}}})
+ ])
gitinfo_workflow["$graph"][0]["id"] = "file://%s/tests/wf/%s" % (cwd, wfpath)
mocktool = mock.NonCallableMock(tool=gitinfo_workflow["$graph"][0], metadata=gitinfo_workflow)
- git_info = arvados_cwl.executor.ArvCwlExecutor.get_git_info(mocktool)
- expect_packed_workflow.update(git_info)
+ stubs.git_info = arvados_cwl.executor.ArvCwlExecutor.get_git_info(mocktool)
+ expect_packed_workflow.update(stubs.git_info)
- git_props = {"arv:"+k.split("#", 1)[1]: v for k,v in git_info.items()}
+ stubs.git_props = {"arv:"+k.split("#", 1)[1]: v for k,v in stubs.git_info.items()}
if wfname == wfpath:
- container_name = "%s (%s)" % (wfpath, git_props["arv:gitDescribe"])
+ container_name = "%s (%s)" % (wfpath, stubs.git_props["arv:gitDescribe"])
else:
container_name = wfname
'ram': (1024+256)*1024*1024
},
'use_existing': False,
- 'properties': git_props,
+ 'properties': stubs.git_props,
'secret_mounts': {}
}
root_logger.handlers = handlers
@mock.patch("time.sleep")
- @stubs
+ @stubs()
def test_submit_invalid_runner_ram(self, stubs, tm):
exited = arvados_cwl.main(
["--submit", "--no-wait", "--debug", "--submit-runner-ram=-2048",
self.assertEqual(exited, 1)
- @stubs
+ @stubs()
def test_submit_container(self, stubs):
exited = arvados_cwl.main(
["--submit", "--no-wait", "--api=containers", "--debug",
'manifest_text':
'. 979af1245a12a1fed634d4222473bfdc+16 0:16:blorp.txt\n',
'replication_desired': None,
- 'name': 'submit_wf.cwl input (169f39d466a5438ac4a90e779bf750c7+53)',
+ 'name': 'submit_wf.cwl ('+ stubs.git_props["arv:gitDescribe"] +') input (169f39d466a5438ac4a90e779bf750c7+53)',
}), ensure_unique_name=False),
mock.call(body=JsonDiffMatcher({
'manifest_text':
self.assertEqual(exited, 0)
- @stubs
+ @stubs()
def test_submit_container_tool(self, stubs):
# test for issue #16139
exited = arvados_cwl.main(
stubs.expect_container_request_uuid + '\n')
self.assertEqual(exited, 0)
- @stubs
+ @stubs()
def test_submit_container_no_reuse(self, stubs):
exited = arvados_cwl.main(
["--submit", "--no-wait", "--api=containers", "--debug", "--disable-reuse",
'--no-log-timestamps', '--disable-validate', '--disable-color',
'--eval-timeout=20', '--thread-count=0',
'--disable-reuse', "--collection-cache-size=256",
- "--output-name=Output from workflow submit_wf.cwl",
+ '--output-name=Output from workflow submit_wf.cwl (%s)' % stubs.git_props["arv:gitDescribe"],
'--debug', '--on-error=continue',
'/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
expect_container["use_existing"] = False
stubs.expect_container_request_uuid + '\n')
- @stubs
+ @stubs()
def test_submit_container_on_error(self, stubs):
exited = arvados_cwl.main(
["--submit", "--no-wait", "--api=containers", "--debug", "--on-error=stop",
'--no-log-timestamps', '--disable-validate', '--disable-color',
'--eval-timeout=20', '--thread-count=0',
'--enable-reuse', "--collection-cache-size=256",
- "--output-name=Output from workflow submit_wf.cwl",
+ '--output-name=Output from workflow submit_wf.cwl (%s)' % stubs.git_props["arv:gitDescribe"],
'--debug', '--on-error=stop',
'/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
stubs.expect_container_request_uuid + '\n')
self.assertEqual(exited, 0)
- @stubs
+ @stubs()
def test_submit_container_output_name(self, stubs):
output_name = "test_output_name"
stubs.expect_container_request_uuid + '\n')
self.assertEqual(exited, 0)
- @stubs
+ @stubs()
def test_submit_storage_classes(self, stubs):
exited = arvados_cwl.main(
["--debug", "--submit", "--no-wait", "--api=containers", "--storage-classes=foo",
'--no-log-timestamps', '--disable-validate', '--disable-color',
'--eval-timeout=20', '--thread-count=0',
'--enable-reuse', "--collection-cache-size=256",
- '--output-name=Output from workflow submit_wf.cwl',
+ '--output-name=Output from workflow submit_wf.cwl (%s)' % stubs.git_props["arv:gitDescribe"],
"--debug",
"--storage-classes=foo", '--on-error=continue',
'/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
stubs.expect_container_request_uuid + '\n')
self.assertEqual(exited, 0)
- @stubs
+ @stubs()
def test_submit_multiple_storage_classes(self, stubs):
exited = arvados_cwl.main(
["--debug", "--submit", "--no-wait", "--api=containers", "--storage-classes=foo,bar", "--intermediate-storage-classes=baz",
'--no-log-timestamps', '--disable-validate', '--disable-color',
'--eval-timeout=20', '--thread-count=0',
'--enable-reuse', "--collection-cache-size=256",
- "--output-name=Output from workflow submit_wf.cwl",
+ '--output-name=Output from workflow submit_wf.cwl (%s)' % stubs.git_props["arv:gitDescribe"],
"--debug",
"--storage-classes=foo,bar", "--intermediate-storage-classes=baz", '--on-error=continue',
'/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
@mock.patch("cwltool.task_queue.TaskQueue")
@mock.patch("arvados_cwl.arvworkflow.ArvadosWorkflow.job")
@mock.patch("arvados_cwl.executor.ArvCwlExecutor.make_output_collection")
- @stubs
+ @stubs()
def test_storage_classes_correctly_propagate_to_make_output_collection(self, stubs, make_output, job, tq):
final_output_c = arvados.collection.Collection()
make_output.return_value = ({},final_output_c)
job.side_effect = set_final_output
exited = arvados_cwl.main(
- ["--debug", "--local", "--storage-classes=foo",
+ ["--debug", "--local", "--storage-classes=foo", "--disable-git",
"tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
stubs.capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
- make_output.assert_called_with(u'Output of submit_wf.cwl', ['foo'], '', {}, {"out": "zzzzz"})
+ make_output.assert_called_with(u'Output from workflow submit_wf.cwl', ['foo'], '', {}, {"out": "zzzzz"})
self.assertEqual(exited, 0)
@mock.patch("cwltool.task_queue.TaskQueue")
@mock.patch("arvados_cwl.arvworkflow.ArvadosWorkflow.job")
@mock.patch("arvados_cwl.executor.ArvCwlExecutor.make_output_collection")
- @stubs
+ @stubs()
def test_default_storage_classes_correctly_propagate_to_make_output_collection(self, stubs, make_output, job, tq):
final_output_c = arvados.collection.Collection()
make_output.return_value = ({},final_output_c)
job.side_effect = set_final_output
exited = arvados_cwl.main(
- ["--debug", "--local",
+ ["--debug", "--local", "--disable-git",
"tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
stubs.capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
- make_output.assert_called_with(u'Output of submit_wf.cwl', ['default'], '', {}, {"out": "zzzzz"})
+ make_output.assert_called_with(u'Output from workflow submit_wf.cwl', ['default'], '', {}, {"out": "zzzzz"})
self.assertEqual(exited, 0)
@mock.patch("cwltool.task_queue.TaskQueue")
@mock.patch("arvados_cwl.arvworkflow.ArvadosWorkflow.job")
@mock.patch("arvados_cwl.executor.ArvCwlExecutor.make_output_collection")
- @stubs
+ @stubs()
def test_storage_class_hint_to_make_output_collection(self, stubs, make_output, job, tq):
final_output_c = arvados.collection.Collection()
make_output.return_value = ({},final_output_c)
job.side_effect = set_final_output
exited = arvados_cwl.main(
- ["--debug", "--local",
+ ["--debug", "--local", "--disable-git",
"tests/wf/submit_storage_class_wf.cwl", "tests/submit_test_job.json"],
stubs.capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
- make_output.assert_called_with(u'Output of submit_storage_class_wf.cwl', ['foo', 'bar'], '', {}, {"out": "zzzzz"})
+ make_output.assert_called_with(u'Output from workflow submit_storage_class_wf.cwl', ['foo', 'bar'], '', {}, {"out": "zzzzz"})
self.assertEqual(exited, 0)
- @stubs
+ @stubs()
def test_submit_container_output_ttl(self, stubs):
exited = arvados_cwl.main(
["--submit", "--no-wait", "--api=containers", "--debug", "--intermediate-output-ttl", "3600",
'--no-log-timestamps', '--disable-validate', '--disable-color',
'--eval-timeout=20', '--thread-count=0',
'--enable-reuse', "--collection-cache-size=256",
- "--output-name=Output from workflow submit_wf.cwl", '--debug',
+ '--output-name=Output from workflow submit_wf.cwl (%s)' % stubs.git_props["arv:gitDescribe"],
+ '--debug',
'--on-error=continue',
"--intermediate-output-ttl=3600",
'/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
stubs.expect_container_request_uuid + '\n')
self.assertEqual(exited, 0)
- @stubs
+ @stubs()
def test_submit_container_trash_intermediate(self, stubs):
exited = arvados_cwl.main(
["--submit", "--no-wait", "--api=containers", "--debug", "--trash-intermediate",
'--no-log-timestamps', '--disable-validate', '--disable-color',
'--eval-timeout=20', '--thread-count=0',
'--enable-reuse', "--collection-cache-size=256",
+ '--output-name=Output from workflow submit_wf.cwl (%s)' % stubs.git_props["arv:gitDescribe"],
'--debug', '--on-error=continue',
"--trash-intermediate",
'/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
stubs.expect_container_request_uuid + '\n')
self.assertEqual(exited, 0)
- @stubs
+ @stubs()
def test_submit_container_output_tags(self, stubs):
output_tags = "tag0,tag1,tag2"
'--no-log-timestamps', '--disable-validate', '--disable-color',
'--eval-timeout=20', '--thread-count=0',
'--enable-reuse', "--collection-cache-size=256",
- "--output-name=Output from workflow submit_wf.cwl",
+ '--output-name=Output from workflow submit_wf.cwl (%s)' % stubs.git_props["arv:gitDescribe"],
"--output-tags="+output_tags, '--debug', '--on-error=continue',
'/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
stubs.expect_container_request_uuid + '\n')
self.assertEqual(exited, 0)
- @stubs
+ @stubs()
def test_submit_container_runner_ram(self, stubs):
exited = arvados_cwl.main(
["--submit", "--no-wait", "--api=containers", "--debug", "--submit-runner-ram=2048",
@mock.patch("arvados.collection.CollectionReader")
@mock.patch("time.sleep")
- @stubs
+ @stubs()
def test_submit_file_keepref(self, stubs, tm, collectionReader):
collectionReader().exists.return_value = True
collectionReader().find.return_value = arvados.arvfile.ArvadosFile(mock.MagicMock(), "blorp.txt")
@mock.patch("arvados.collection.CollectionReader")
@mock.patch("time.sleep")
- @stubs
+ @stubs()
def test_submit_keepref(self, stubs, tm, reader):
with open("tests/wf/expect_arvworkflow.cwl") as f:
reader().open().__enter__().read.return_value = f.read()
self.assertEqual(exited, 0)
@mock.patch("time.sleep")
- @stubs
+ @stubs()
def test_submit_arvworkflow(self, stubs, tm):
with open("tests/wf/expect_arvworkflow.cwl") as f:
stubs.api.workflows().get().execute.return_value = {"definition": f.read(), "name": "a test workflow"}
exited = arvados_cwl.main(
- ["--submit", "--no-wait", "--api=containers", "--debug",
+ ["--submit", "--no-wait", "--api=containers", "--debug", "--disable-git",
"962eh-7fd4e-gkbzl62qqtfig37", "-x", "XxX"],
stubs.capture_stdout, sys.stderr, api_client=stubs.api)
'requirements': [
{
'dockerPull': 'debian:buster-slim',
- 'class': 'DockerRequirement',
- "http://arvados.org/cwl#dockerCollectionPDH": "999999999999999999999999999999d4+99"
+ 'class': 'DockerRequirement'
}
],
'id': '#submit_tool.cwl',
'command': ['arvados-cwl-runner', '--local', '--api=containers',
'--no-log-timestamps', '--disable-validate', '--disable-color',
'--eval-timeout=20', '--thread-count=0',
- '--enable-reuse', "--collection-cache-size=256", '--debug', '--on-error=continue',
+ '--enable-reuse', "--collection-cache-size=256",
+ "--output-name=Output from workflow a test workflow",
+ '--debug', '--on-error=continue',
'/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json'],
+ 'output_name': 'Output from workflow a test workflow',
'cwd': '/var/spool/cwl',
'runtime_constraints': {
'API': True,
stubs.expect_container_request_uuid + '\n')
self.assertEqual(exited, 0)
- @stubs
+ @stubs()
def test_submit_missing_input(self, stubs):
exited = arvados_cwl.main(
["--submit", "--no-wait", "--api=containers", "--debug",
stubs.capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
self.assertEqual(exited, 1)
- @stubs
+ @stubs()
def test_submit_container_project(self, stubs):
project_uuid = 'zzzzz-j7d0g-zzzzzzzzzzzzzzz'
stubs.api.groups().get().execute.return_value = {"group_class": "project"}
'--no-log-timestamps', '--disable-validate', '--disable-color',
"--eval-timeout=20", "--thread-count=0",
'--enable-reuse', "--collection-cache-size=256",
- "--output-name=Output from workflow submit_wf.cwl", '--debug',
+ '--output-name=Output from workflow submit_wf.cwl (%s)' % stubs.git_props["arv:gitDescribe"],
+ '--debug',
'--on-error=continue',
'--project-uuid='+project_uuid,
'/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
stubs.expect_container_request_uuid + '\n')
self.assertEqual(exited, 0)
- @stubs
+ @stubs()
def test_submit_container_eval_timeout(self, stubs):
exited = arvados_cwl.main(
["--submit", "--no-wait", "--api=containers", "--debug", "--eval-timeout=60",
'--no-log-timestamps', '--disable-validate', '--disable-color',
'--eval-timeout=60.0', '--thread-count=0',
'--enable-reuse', "--collection-cache-size=256",
+ '--output-name=Output from workflow submit_wf.cwl (%s)' % stubs.git_props["arv:gitDescribe"],
'--debug', '--on-error=continue',
'/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
stubs.expect_container_request_uuid + '\n')
self.assertEqual(exited, 0)
- @stubs
+ @stubs()
def test_submit_container_collection_cache(self, stubs):
exited = arvados_cwl.main(
["--submit", "--no-wait", "--api=containers", "--debug", "--collection-cache-size=500",
'--no-log-timestamps', '--disable-validate', '--disable-color',
'--eval-timeout=20', '--thread-count=0',
'--enable-reuse', "--collection-cache-size=500",
+ '--output-name=Output from workflow submit_wf.cwl (%s)' % stubs.git_props["arv:gitDescribe"],
'--debug', '--on-error=continue',
'/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
expect_container["runtime_constraints"]["ram"] = (1024+500)*1024*1024
stubs.expect_container_request_uuid + '\n')
self.assertEqual(exited, 0)
- @stubs
+ @stubs()
def test_submit_container_thread_count(self, stubs):
exited = arvados_cwl.main(
["--submit", "--no-wait", "--api=containers", "--debug", "--thread-count=20",
'--no-log-timestamps', '--disable-validate', '--disable-color',
'--eval-timeout=20', '--thread-count=20',
'--enable-reuse', "--collection-cache-size=256",
+ '--output-name=Output from workflow submit_wf.cwl (%s)' % stubs.git_props["arv:gitDescribe"],
'--debug', '--on-error=continue',
'/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
stubs.expect_container_request_uuid + '\n')
self.assertEqual(exited, 0)
- @stubs
+ @stubs()
def test_submit_container_runner_image(self, stubs):
exited = arvados_cwl.main(
["--submit", "--no-wait", "--api=containers", "--debug", "--submit-runner-image=arvados/jobs:123",
stubs.expect_container_request_uuid + '\n')
self.assertEqual(exited, 0)
- @stubs
+ @stubs()
def test_submit_priority(self, stubs):
exited = arvados_cwl.main(
["--submit", "--no-wait", "--api=containers", "--debug", "--priority=669",
arvados_cwl.runner.arvados_jobs_image(arvrunner, "arvados/jobs:"+arvados_cwl.__version__, arvrunner.runtimeContext))
- @stubs
+ @stubs()
def test_submit_secrets(self, stubs):
exited = arvados_cwl.main(
["--submit", "--no-wait", "--api=containers", "--debug",
"tests/wf/secret_wf.cwl", "tests/secret_test_job.yml"],
stubs.capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
+ stubs.git_props["arv:gitPath"] = "sdk/cwl/tests/wf/secret_wf.cwl"
+ stubs.git_info["http://arvados.org/cwl#gitPath"] = "sdk/cwl/tests/wf/secret_wf.cwl"
+
expect_container = {
"command": [
"arvados-cwl-runner",
'--thread-count=0',
"--enable-reuse",
"--collection-cache-size=256",
- '--output-name=Output from workflow secret_wf.cwl'
- '--debug',
+ '--output-name=Output from workflow secret_wf.cwl (%s)' % stubs.git_props["arv:gitDescribe"],
+ "--debug",
"--on-error=continue",
"/var/lib/cwl/workflow.json#main",
"/var/lib/cwl/cwl.input.json"
"path": "/var/spool/cwl/cwl.output.json"
}
},
- "name": "secret_wf.cwl",
- "output_name": "Output from workflow secret_wf.cwl",
+ "name": "secret_wf.cwl (%s)" % stubs.git_props["arv:gitDescribe"],
+ "output_name": "Output from workflow secret_wf.cwl (%s)" % stubs.git_props["arv:gitDescribe"],
"output_path": "/var/spool/cwl",
"priority": 500,
- "properties": {},
+ "properties": stubs.git_props,
"runtime_constraints": {
"API": True,
"ram": 1342177280,
"use_existing": False
}
+ expect_container["mounts"]["/var/lib/cwl/workflow.json"]["content"].update(stubs.git_info)
+
stubs.api.container_requests().create.assert_called_with(
body=JsonDiffMatcher(expect_container))
self.assertEqual(stubs.capture_stdout.getvalue(),
stubs.expect_container_request_uuid + '\n')
self.assertEqual(exited, 0)
- @stubs
+ @stubs()
def test_submit_request_uuid(self, stubs):
stubs.api._rootDesc["remoteHosts"]["zzzzz"] = "123"
stubs.expect_container_request_uuid = "zzzzz-xvhdp-yyyyyyyyyyyyyyy"
stubs.expect_container_request_uuid + '\n')
self.assertEqual(exited, 0)
- @stubs
+ @stubs()
def test_submit_container_cluster_id(self, stubs):
stubs.api._rootDesc["remoteHosts"]["zbbbb"] = "123"
stubs.expect_container_request_uuid + '\n')
self.assertEqual(exited, 0)
- @stubs
+ @stubs()
def test_submit_validate_cluster_id(self, stubs):
stubs.api._rootDesc["remoteHosts"]["zbbbb"] = "123"
exited = arvados_cwl.main(
stubs.capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
self.assertEqual(exited, 1)
- @stubs
+ @stubs()
def test_submit_validate_project_uuid(self, stubs):
# Fails with bad cluster prefix
exited = arvados_cwl.main(
@mock.patch("arvados.collection.CollectionReader")
- @stubs
+ @stubs()
def test_submit_uuid_inputs(self, stubs, collectionReader):
collectionReader().exists.return_value = True
collectionReader().find.return_value = arvados.arvfile.ArvadosFile(mock.MagicMock(), "file1.txt")
stubs.expect_container_request_uuid + '\n')
self.assertEqual(exited, 0)
- @stubs
+ @stubs()
def test_submit_mismatched_uuid_inputs(self, stubs):
def list_side_effect(**kwargs):
m = mock.MagicMock()
cwltool_logger.removeHandler(stderr_logger)
@mock.patch("arvados.collection.CollectionReader")
- @stubs
+ @stubs()
def test_submit_unknown_uuid_inputs(self, stubs, collectionReader):
collectionReader().find.return_value = arvados.arvfile.ArvadosFile(mock.MagicMock(), "file1.txt")
capture_stderr = StringIO()
self.assertEqual(exited, 0)
- @stubs
+ @stubs()
def test_submit_enable_preemptible(self, stubs):
exited = arvados_cwl.main(
["--submit", "--no-wait", "--api=containers", "--debug", "--enable-preemptible",
expect_container = copy.deepcopy(stubs.expect_container_spec)
expect_container['command'] = ['arvados-cwl-runner', '--local', '--api=containers',
- '--no-log-timestamps', '--disable-validate', '--disable-color',
- '--eval-timeout=20', '--thread-count=0',
- '--enable-reuse', "--collection-cache-size=256", '--debug', '--on-error=continue',
+ '--no-log-timestamps', '--disable-validate', '--disable-color',
+ '--eval-timeout=20', '--thread-count=0',
+ '--enable-reuse', "--collection-cache-size=256",
+ '--output-name=Output from workflow submit_wf.cwl (%s)' % stubs.git_props["arv:gitDescribe"],
+ '--debug', '--on-error=continue',
'--enable-preemptible',
- '/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
+ '/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
stubs.api.container_requests().create.assert_called_with(
body=JsonDiffMatcher(expect_container))
stubs.expect_container_request_uuid + '\n')
self.assertEqual(exited, 0)
- @stubs
+ @stubs()
def test_submit_disable_preemptible(self, stubs):
exited = arvados_cwl.main(
["--submit", "--no-wait", "--api=containers", "--debug", "--disable-preemptible",
expect_container = copy.deepcopy(stubs.expect_container_spec)
expect_container['command'] = ['arvados-cwl-runner', '--local', '--api=containers',
- '--no-log-timestamps', '--disable-validate', '--disable-color',
- '--eval-timeout=20', '--thread-count=0',
- '--enable-reuse', "--collection-cache-size=256", '--debug', '--on-error=continue',
+ '--no-log-timestamps', '--disable-validate', '--disable-color',
+ '--eval-timeout=20', '--thread-count=0',
+ '--enable-reuse', "--collection-cache-size=256",
+ '--output-name=Output from workflow submit_wf.cwl (%s)' % stubs.git_props["arv:gitDescribe"],
+ '--debug', '--on-error=continue',
'--disable-preemptible',
- '/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
+ '/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
+
+ stubs.api.container_requests().create.assert_called_with(
+ body=JsonDiffMatcher(expect_container))
+ self.assertEqual(stubs.capture_stdout.getvalue(),
+ stubs.expect_container_request_uuid + '\n')
+ self.assertEqual(exited, 0)
+
+ @stubs()
+ def test_submit_container_prefer_cached_downloads(self, stubs):
+ exited = arvados_cwl.main(
+ ["--submit", "--no-wait", "--api=containers", "--debug", "--prefer-cached-downloads",
+ "tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
+ stubs.capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
+
+ expect_container = copy.deepcopy(stubs.expect_container_spec)
+ expect_container["command"] = ['arvados-cwl-runner', '--local', '--api=containers',
+ '--no-log-timestamps', '--disable-validate', '--disable-color',
+ '--eval-timeout=20', '--thread-count=0',
+ '--enable-reuse', "--collection-cache-size=256",
+ '--output-name=Output from workflow submit_wf.cwl (%s)' % stubs.git_props["arv:gitDescribe"],
+ '--debug', "--on-error=continue", '--prefer-cached-downloads',
+ '/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
+
+ stubs.api.container_requests().create.assert_called_with(
+ body=JsonDiffMatcher(expect_container))
+ self.assertEqual(stubs.capture_stdout.getvalue(),
+ stubs.expect_container_request_uuid + '\n')
+ self.assertEqual(exited, 0)
+
+ @stubs()
+ def test_submit_container_varying_url_params(self, stubs):
+ exited = arvados_cwl.main(
+ ["--submit", "--no-wait", "--api=containers", "--debug", "--varying-url-params", "KeyId,Signature",
+ "tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
+ stubs.capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
+
+ expect_container = copy.deepcopy(stubs.expect_container_spec)
+ expect_container["command"] = ['arvados-cwl-runner', '--local', '--api=containers',
+ '--no-log-timestamps', '--disable-validate', '--disable-color',
+ '--eval-timeout=20', '--thread-count=0',
+ '--enable-reuse', "--collection-cache-size=256",
+ '--output-name=Output from workflow submit_wf.cwl (%s)' % stubs.git_props["arv:gitDescribe"],
+ '--debug', "--on-error=continue", "--varying-url-params=KeyId,Signature",
+ '/var/lib/cwl/workflow.json#main', '/var/lib/cwl/cwl.input.json']
stubs.api.container_requests().create.assert_called_with(
body=JsonDiffMatcher(expect_container))
class TestCreateWorkflow(unittest.TestCase):
existing_workflow_uuid = "zzzzz-7fd4e-validworkfloyml"
expect_workflow = StripYAMLComments(
- open("tests/wf/expect_upload_packed.cwl").read().rstrip())
+ open("tests/wf/expect_upload_wrapper.cwl").read().rstrip())
+ expect_workflow_altname = StripYAMLComments(
+ open("tests/wf/expect_upload_wrapper_altname.cwl").read().rstrip())
def setUp(self):
cwltool.process._names = set()
handlers = [h for h in root_logger.handlers if not isinstance(h, arvados_cwl.executor.RuntimeStatusLoggingHandler)]
root_logger.handlers = handlers
- @stubs
+ @stubs()
def test_create(self, stubs):
project_uuid = 'zzzzz-j7d0g-zzzzzzzzzzzzzzz'
stubs.api.groups().get().execute.return_value = {"group_class": "project"}
["--create-workflow", "--debug",
"--api=containers",
"--project-uuid", project_uuid,
+ "--disable-git",
"tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
stubs.capture_stdout, sys.stderr, api_client=stubs.api)
stubs.expect_workflow_uuid + '\n')
self.assertEqual(exited, 0)
- @stubs
+ @stubs()
def test_create_name(self, stubs):
project_uuid = 'zzzzz-j7d0g-zzzzzzzzzzzzzzz'
stubs.api.groups().get().execute.return_value = {"group_class": "project"}
"--api=containers",
"--project-uuid", project_uuid,
"--name", "testing 123",
+ "--disable-git",
"tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
stubs.capture_stdout, sys.stderr, api_client=stubs.api)
"owner_uuid": project_uuid,
"name": "testing 123",
"description": "",
- "definition": self.expect_workflow,
+ "definition": self.expect_workflow_altname,
}
}
stubs.api.workflows().create.assert_called_with(
self.assertEqual(exited, 0)
- @stubs
+ @stubs()
def test_update(self, stubs):
project_uuid = 'zzzzz-j7d0g-zzzzzzzzzzzzzzz'
stubs.api.workflows().get().execute.return_value = {"owner_uuid": project_uuid}
exited = arvados_cwl.main(
["--update-workflow", self.existing_workflow_uuid,
"--debug",
+ "--disable-git",
"tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
stubs.capture_stdout, sys.stderr, api_client=stubs.api)
self.assertEqual(exited, 0)
- @stubs
+ @stubs()
def test_update_name(self, stubs):
project_uuid = 'zzzzz-j7d0g-zzzzzzzzzzzzzzz'
stubs.api.workflows().get().execute.return_value = {"owner_uuid": project_uuid}
exited = arvados_cwl.main(
["--update-workflow", self.existing_workflow_uuid,
"--debug", "--name", "testing 123",
+ "--disable-git",
"tests/wf/submit_wf.cwl", "tests/submit_test_job.json"],
stubs.capture_stdout, sys.stderr, api_client=stubs.api)
"workflow": {
"name": "testing 123",
"description": "",
- "definition": self.expect_workflow,
+ "definition": self.expect_workflow_altname,
"owner_uuid": project_uuid
}
}
self.existing_workflow_uuid + '\n')
self.assertEqual(exited, 0)
- @stubs
+ @stubs()
def test_create_collection_per_tool(self, stubs):
project_uuid = 'zzzzz-j7d0g-zzzzzzzzzzzzzzz'
stubs.api.groups().get().execute.return_value = {"group_class": "project"}
["--create-workflow", "--debug",
"--api=containers",
"--project-uuid", project_uuid,
+ "--disable-git",
"tests/collection_per_tool/collection_per_tool.cwl"],
stubs.capture_stdout, sys.stderr, api_client=stubs.api)
- toolfile = "tests/collection_per_tool/collection_per_tool_packed.cwl"
+ toolfile = "tests/collection_per_tool/collection_per_tool_wrapper.cwl"
expect_workflow = StripYAMLComments(open(toolfile).read().rstrip())
body = {
stubs.expect_workflow_uuid + '\n')
self.assertEqual(exited, 0)
- @stubs
+ @stubs()
def test_create_with_imports(self, stubs):
project_uuid = 'zzzzz-j7d0g-zzzzzzzzzzzzzzz'
stubs.api.groups().get().execute.return_value = {"group_class": "project"}
stubs.expect_workflow_uuid + '\n')
self.assertEqual(exited, 0)
- @stubs
+ @stubs()
def test_create_with_no_input(self, stubs):
project_uuid = 'zzzzz-j7d0g-zzzzzzzzzzzzzzz'
stubs.api.groups().get().execute.return_value = {"group_class": "project"}
--- /dev/null
+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: Apache-2.0
+
+{
+ "$graph": [
+ {
+ "class": "Workflow",
+ "hints": [
+ {
+ "acrContainerImage": "999999999999999999999999999999d3+99",
+ "class": "http://arvados.org/cwl#WorkflowRunnerResources"
+ }
+ ],
+ "id": "#main",
+ "inputs": [
+ {
+ "default": {
+ "basename": "blorp.txt",
+ "class": "File",
+ "location": "keep:169f39d466a5438ac4a90e779bf750c7+53/blorp.txt",
+ "nameext": ".txt",
+ "nameroot": "blorp",
+ "size": 16
+ },
+ "id": "#main/x",
+ "type": "File"
+ },
+ {
+ "default": {
+ "basename": "99999999999999999999999999999998+99",
+ "class": "Directory",
+ "location": "keep:99999999999999999999999999999998+99"
+ },
+ "id": "#main/y",
+ "type": "Directory"
+ },
+ {
+ "default": {
+ "basename": "anonymous",
+ "class": "Directory",
+ "listing": [
+ {
+ "basename": "renamed.txt",
+ "class": "File",
+ "location": "keep:99999999999999999999999999999998+99/file1.txt",
+ "nameext": ".txt",
+ "nameroot": "renamed",
+ "size": 0
+ }
+ ],
+ "location": "_:df80736f-f14d-4b10-b2e3-03aa27f034b2"
+ },
+ "id": "#main/z",
+ "type": "Directory"
+ }
+ ],
+ "outputs": [],
+ "requirements": [
+ {
+ "class": "SubworkflowFeatureRequirement"
+ }
+ ],
+ "steps": [
+ {
+ "id": "#main/submit_wf.cwl",
+ "in": [
+ {
+ "id": "#main/step/x",
+ "source": "#main/x"
+ },
+ {
+ "id": "#main/step/y",
+ "source": "#main/y"
+ },
+ {
+ "id": "#main/step/z",
+ "source": "#main/z"
+ }
+ ],
+ "label": "submit_wf.cwl",
+ "out": [],
+ "run": "keep:f1c2b0c514a5fb9b2a8b5b38a31bab66+61/workflow.json#main"
+ }
+ ]
+ }
+ ],
+ "cwlVersion": "v1.2"
+}
--- /dev/null
+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: Apache-2.0
+
+{
+ "$graph": [
+ {
+ "class": "Workflow",
+ "hints": [
+ {
+ "acrContainerImage": "999999999999999999999999999999d3+99",
+ "class": "http://arvados.org/cwl#WorkflowRunnerResources"
+ }
+ ],
+ "id": "#main",
+ "inputs": [
+ {
+ "default": {
+ "basename": "blorp.txt",
+ "class": "File",
+ "location": "keep:169f39d466a5438ac4a90e779bf750c7+53/blorp.txt",
+ "nameext": ".txt",
+ "nameroot": "blorp",
+ "size": 16
+ },
+ "id": "#main/x",
+ "type": "File"
+ },
+ {
+ "default": {
+ "basename": "99999999999999999999999999999998+99",
+ "class": "Directory",
+ "location": "keep:99999999999999999999999999999998+99"
+ },
+ "id": "#main/y",
+ "type": "Directory"
+ },
+ {
+ "default": {
+ "basename": "anonymous",
+ "class": "Directory",
+ "listing": [
+ {
+ "basename": "renamed.txt",
+ "class": "File",
+ "location": "keep:99999999999999999999999999999998+99/file1.txt",
+ "nameext": ".txt",
+ "nameroot": "renamed",
+ "size": 0
+ }
+ ],
+ "location": "_:df80736f-f14d-4b10-b2e3-03aa27f034b2"
+ },
+ "id": "#main/z",
+ "type": "Directory"
+ }
+ ],
+ "outputs": [],
+ "requirements": [
+ {
+ "class": "SubworkflowFeatureRequirement"
+ }
+ ],
+ "steps": [
+ {
+ "id": "#main/submit_wf.cwl",
+ "in": [
+ {
+ "id": "#main/step/x",
+ "source": "#main/x"
+ },
+ {
+ "id": "#main/step/y",
+ "source": "#main/y"
+ },
+ {
+ "id": "#main/step/z",
+ "source": "#main/z"
+ }
+ ],
+ "label": "testing 123",
+ "out": [],
+ "run": "keep:f1c2b0c514a5fb9b2a8b5b38a31bab66+61/workflow.json#main"
+ }
+ ]
+ }
+ ],
+ "cwlVersion": "v1.2"
+}
+++ /dev/null
-#! /usr/bin/perl
-# Copyright (C) The Arvados Authors. All rights reserved.
-#
-# SPDX-License-Identifier: Apache-2.0
-
-use strict;
-
-use ExtUtils::MakeMaker;
-
-WriteMakefile(
- NAME => 'Arvados',
- VERSION_FROM => 'lib/Arvados.pm',
- PREREQ_PM => {
- 'JSON' => 0,
- 'LWP' => 0,
- 'Net::SSL' => 0,
- },
-);
+++ /dev/null
-# Copyright (C) The Arvados Authors. All rights reserved.
-#
-# SPDX-License-Identifier: Apache-2.0
-
-=head1 NAME
-
-Arvados -- client library for Arvados services
-
-=head1 SYNOPSIS
-
- use Arvados;
- $arv = Arvados->new(apiHost => 'arvados.local');
-
- my $instances = $arv->{'pipeline_instances'}->{'list'}->execute();
- print "UUID is ", $instances->{'items'}->[0]->{'uuid'}, "\n";
-
- $uuid = 'eiv0u-arx5y-2c5ovx43zw90gvh';
- $instance = $arv->{'pipeline_instances'}->{'get'}->execute('uuid' => $uuid);
- print "ETag is ", $instance->{'etag'}, "\n";
-
- $instance->{'active'} = 1;
- $instance->{'name'} = '';
- $instance->save();
- print "ETag is ", $instance->{'etag'}, "\n";
-
-=head1 METHODS
-
-=head2 new()
-
- my $whc = Arvados->new( %OPTIONS );
-
-Set up a client and retrieve the schema from the server.
-
-=head3 Options
-
-=over
-
-=item apiHost
-
-Hostname of API discovery service. Default: C<ARVADOS_API_HOST>
-environment variable, or C<arvados>
-
-=item apiProtocolScheme
-
-Protocol scheme. Default: C<ARVADOS_API_PROTOCOL_SCHEME> environment
-variable, or C<https>
-
-=item authToken
-
-Authorization token. Default: C<ARVADOS_API_TOKEN> environment variable
-
-=item apiService
-
-Default C<arvados>
-
-=item apiVersion
-
-Default C<v1>
-
-=back
-
-=cut
-
-package Arvados;
-
-use Net::SSL (); # From Crypt-SSLeay
-BEGIN {
- $Net::HTTPS::SSL_SOCKET_CLASS = "Net::SSL"; # Force use of Net::SSL
-}
-
-use JSON;
-use Carp;
-use Arvados::ResourceAccessor;
-use Arvados::ResourceMethod;
-use Arvados::ResourceProxy;
-use Arvados::ResourceProxyList;
-use Arvados::Request;
-use Data::Dumper;
-
-$Arvados::VERSION = 0.1;
-
-sub new
-{
- my $class = shift;
- my %self = @_;
- my $self = \%self;
- bless ($self, $class);
- return $self->build(@_);
-}
-
-sub build
-{
- my $self = shift;
-
- $config = load_config_file("$ENV{HOME}/.config/arvados/settings.conf");
-
- $self->{'authToken'} ||=
- $ENV{ARVADOS_API_TOKEN} || $config->{ARVADOS_API_TOKEN};
-
- $self->{'apiHost'} ||=
- $ENV{ARVADOS_API_HOST} || $config->{ARVADOS_API_HOST};
-
- $self->{'noVerifyHostname'} ||=
- $ENV{ARVADOS_API_HOST_INSECURE};
-
- $self->{'apiProtocolScheme'} ||=
- $ENV{ARVADOS_API_PROTOCOL_SCHEME} ||
- $config->{ARVADOS_API_PROTOCOL_SCHEME};
-
- $self->{'ua'} = new Arvados::Request;
-
- my $host = $self->{'apiHost'} || 'arvados';
- my $service = $self->{'apiService'} || 'arvados';
- my $version = $self->{'apiVersion'} || 'v1';
- my $scheme = $self->{'apiProtocolScheme'} || 'https';
- my $uri = "$scheme://$host/discovery/v1/apis/$service/$version/rest";
- my $r = $self->new_request;
- $r->set_uri($uri);
- $r->set_method("GET");
- $r->process_request();
- my $data, $headers;
- my ($status_number, $status_phrase) = $r->get_status();
- $data = $r->get_body() if $status_number == 200;
- $headers = $r->get_headers();
- if ($data) {
- my $doc = $self->{'discoveryDocument'} = JSON::decode_json($data);
- print STDERR Dumper $doc if $ENV{'DEBUG_ARVADOS_API_DISCOVERY'};
- my $k, $v;
- while (($k, $v) = each %{$doc->{'resources'}}) {
- $self->{$k} = Arvados::ResourceAccessor->new($self, $k);
- }
- } else {
- croak "No discovery doc at $uri - $status_number $status_phrase";
- }
- $self;
-}
-
-sub new_request
-{
- my $self = shift;
- local $ENV{'PERL_LWP_SSL_VERIFY_HOSTNAME'};
- if ($self->{'noVerifyHostname'} || ($host =~ /\.local$/)) {
- $ENV{'PERL_LWP_SSL_VERIFY_HOSTNAME'} = 0;
- }
- Arvados::Request->new();
-}
-
-sub load_config_file ($)
-{
- my $config_file = shift;
- my %config;
-
- if (open (CONF, $config_file)) {
- while (<CONF>) {
- next if /^\s*#/ || /^\s*$/; # skip comments and blank lines
- chomp;
- my ($key, $val) = split /\s*=\s*/, $_, 2;
- $config{$key} = $val;
- }
- }
- close CONF;
- return \%config;
-}
-
-1;
+++ /dev/null
-# Copyright (C) The Arvados Authors. All rights reserved.
-#
-# SPDX-License-Identifier: Apache-2.0
-
-package Arvados::Request;
-use Data::Dumper;
-use LWP::UserAgent;
-use URI::Escape;
-use Encode;
-use strict;
-@Arvados::HTTP::ISA = qw(LWP::UserAgent);
-
-sub new
-{
- my $class = shift;
- my $self = {};
- bless ($self, $class);
- return $self->_init(@_);
-}
-
-sub _init
-{
- my $self = shift;
- $self->{'ua'} = new LWP::UserAgent(@_);
- $self->{'ua'}->agent ("libarvados-perl/".$Arvados::VERSION);
- $self;
-}
-
-sub set_uri
-{
- my $self = shift;
- $self->{'uri'} = shift;
-}
-
-sub process_request
-{
- my $self = shift;
- my %req;
- my %content;
- my $method = $self->{'method'};
- if ($method eq 'GET' || $method eq 'HEAD') {
- $content{'_method'} = $method;
- $method = 'POST';
- }
- $req{$method} = $self->{'uri'};
- $self->{'req'} = new HTTP::Request (%req);
- $self->{'req'}->header('Authorization' => ('OAuth2 ' . $self->{'authToken'})) if $self->{'authToken'};
- $self->{'req'}->header('Accept' => 'application/json');
-
- # allow_nonref lets us encode JSON::true and JSON::false, see #12078
- my $json = JSON->new->allow_nonref;
- my ($p, $v);
- while (($p, $v) = each %{$self->{'queryParams'}}) {
- $content{$p} = (ref($v) eq "") ? $v : $json->encode($v);
- }
- my $content;
- while (($p, $v) = each %content) {
- $content .= '&' unless $content eq '';
- $content .= uri_escape($p);
- $content .= '=';
- $content .= uri_escape($v);
- }
- $self->{'req'}->content_type("application/x-www-form-urlencoded; charset='utf8'");
- $self->{'req'}->content(Encode::encode('utf8', $content));
- $self->{'res'} = $self->{'ua'}->request ($self->{'req'});
-}
-
-sub get_status
-{
- my $self = shift;
- return ($self->{'res'}->code(),
- $self->{'res'}->message());
-}
-
-sub get_body
-{
- my $self = shift;
- return $self->{'res'}->content;
-}
-
-sub set_method
-{
- my $self = shift;
- $self->{'method'} = shift;
-}
-
-sub set_query_params
-{
- my $self = shift;
- $self->{'queryParams'} = shift;
-}
-
-sub set_auth_token
-{
- my $self = shift;
- $self->{'authToken'} = shift;
-}
-
-sub get_headers
-{
- ""
-}
-
-1;
+++ /dev/null
-# Copyright (C) The Arvados Authors. All rights reserved.
-#
-# SPDX-License-Identifier: Apache-2.0
-
-package Arvados::ResourceAccessor;
-use Carp;
-use Data::Dumper;
-
-sub new
-{
- my $class = shift;
- my $self = {};
- bless ($self, $class);
-
- $self->{'api'} = shift;
- $self->{'resourcesName'} = shift;
- $self->{'methods'} = $self->{'api'}->{'discoveryDocument'}->{'resources'}->{$self->{'resourcesName'}}->{'methods'};
- my $method_name, $method;
- while (($method_name, $method) = each %{$self->{'methods'}}) {
- $self->{$method_name} = Arvados::ResourceMethod->new($self, $method);
- }
- $self;
-}
-
-1;
+++ /dev/null
-# Copyright (C) The Arvados Authors. All rights reserved.
-#
-# SPDX-License-Identifier: Apache-2.0
-
-package Arvados::ResourceMethod;
-use Carp;
-use Data::Dumper;
-
-sub new
-{
- my $class = shift;
- my $self = {};
- bless ($self, $class);
- return $self->_init(@_);
-}
-
-sub _init
-{
- my $self = shift;
- $self->{'resourceAccessor'} = shift;
- $self->{'method'} = shift;
- return $self;
-}
-
-sub execute
-{
- my $self = shift;
- my $method = $self->{'method'};
-
- my $path = $method->{'path'};
-
- my %body_params;
- my %given_params = @_;
- my %extra_params = %given_params;
- my %method_params = %{$method->{'parameters'}};
- if ($method->{'request'}->{'properties'}) {
- while (my ($prop_name, $prop_value) =
- each %{$method->{'request'}->{'properties'}}) {
- if (ref($prop_value) eq 'HASH' && $prop_value->{'$ref'}) {
- $method_params{$prop_name} = { 'type' => 'object' };
- }
- }
- }
- while (my ($param_name, $param) = each %method_params) {
- delete $extra_params{$param_name};
- if ($param->{'required'} && !exists $given_params{$param_name}) {
- croak("Required parameter not supplied: $param_name");
- }
- elsif ($param->{'location'} eq 'path') {
- $path =~ s/{\Q$param_name\E}/$given_params{$param_name}/eg;
- }
- elsif (!exists $given_params{$param_name}) {
- ;
- }
- elsif ($param->{'type'} eq 'object') {
- my %param_value;
- my ($p, $v);
- if (exists $param->{'properties'}) {
- while (my ($property_name, $property) =
- each %{$param->{'properties'}}) {
- # if the discovery doc specifies object structure,
- # convert to true/false depending on supplied type
- if (!exists $given_params{$param_name}->{$property_name}) {
- ;
- }
- elsif (!defined $given_params{$param_name}->{$property_name}) {
- $param_value{$property_name} = JSON::null;
- }
- elsif ($property->{'type'} eq 'boolean') {
- $param_value{$property_name} = $given_params{$param_name}->{$property_name} ? JSON::true : JSON::false;
- }
- else {
- $param_value{$property_name} = $given_params{$param_name}->{$property_name};
- }
- }
- }
- else {
- while (my ($property_name, $property) =
- each %{$given_params{$param_name}}) {
- if (ref $property eq '' || $property eq undef) {
- $param_value{$property_name} = $property;
- }
- elsif (ref $property eq 'HASH') {
- $param_value{$property_name} = {};
- while (my ($k, $v) = each %$property) {
- $param_value{$property_name}->{$k} = $v;
- }
- }
- }
- }
- $body_params{$param_name} = \%param_value;
- } elsif ($param->{'type'} eq 'boolean') {
- $body_params{$param_name} = $given_params{$param_name} ? JSON::true : JSON::false;
- } else {
- $body_params{$param_name} = $given_params{$param_name};
- }
- }
- if (%extra_params) {
- croak("Unsupported parameter(s) passed to API call /$path: \"" . join('", "', keys %extra_params) . '"');
- }
- my $r = $self->{'resourceAccessor'}->{'api'}->new_request;
- my $base_uri = $self->{'resourceAccessor'}->{'api'}->{'discoveryDocument'}->{'baseUrl'};
- $base_uri =~ s:/$::;
- $r->set_uri($base_uri . "/" . $path);
- $r->set_method($method->{'httpMethod'});
- $r->set_auth_token($self->{'resourceAccessor'}->{'api'}->{'authToken'});
- $r->set_query_params(\%body_params) if %body_params;
- $r->process_request();
- my $data, $headers;
- my ($status_number, $status_phrase) = $r->get_status();
- if ($status_number != 200) {
- croak("API call /$path failed: $status_number $status_phrase\n". $r->get_body());
- }
- $data = $r->get_body();
- $headers = $r->get_headers();
- my $result = JSON::decode_json($data);
- if ($method->{'response'}->{'$ref'} =~ /List$/) {
- Arvados::ResourceProxyList->new($result, $self->{'resourceAccessor'});
- } else {
- Arvados::ResourceProxy->new($result, $self->{'resourceAccessor'});
- }
-}
-
-1;
+++ /dev/null
-# Copyright (C) The Arvados Authors. All rights reserved.
-#
-# SPDX-License-Identifier: Apache-2.0
-
-package Arvados::ResourceProxy;
-
-sub new
-{
- my $class = shift;
- my $self = shift;
- $self->{'resourceAccessor'} = shift;
- bless ($self, $class);
- $self;
-}
-
-sub save
-{
- my $self = shift;
- $response = $self->{'resourceAccessor'}->{'update'}->execute('uuid' => $self->{'uuid'}, $self->resource_parameter_name() => $self);
- foreach my $param (keys %$self) {
- if (exists $response->{$param}) {
- $self->{$param} = $response->{$param};
- }
- }
- $self;
-}
-
-sub update_attributes
-{
- my $self = shift;
- my %updates = @_;
- $response = $self->{'resourceAccessor'}->{'update'}->execute('uuid' => $self->{'uuid'}, $self->resource_parameter_name() => \%updates);
- foreach my $param (keys %updates) {
- if (exists $response->{$param}) {
- $self->{$param} = $response->{$param};
- }
- }
- $self;
-}
-
-sub reload
-{
- my $self = shift;
- $response = $self->{'resourceAccessor'}->{'get'}->execute('uuid' => $self->{'uuid'});
- foreach my $param (keys %$self) {
- if (exists $response->{$param}) {
- $self->{$param} = $response->{$param};
- }
- }
- $self;
-}
-
-sub resource_parameter_name
-{
- my $self = shift;
- my $pname = $self->{'resourceAccessor'}->{'resourcesName'};
- $pname =~ s/s$//; # XXX not a very good singularize()
- $pname;
-}
-
-1;
+++ /dev/null
-# Copyright (C) The Arvados Authors. All rights reserved.
-#
-# SPDX-License-Identifier: Apache-2.0
-
-package Arvados::ResourceProxyList;
-
-sub new
-{
- my $class = shift;
- my $self = {};
- bless ($self, $class);
- $self->_init(@_);
-}
-
-sub _init
-{
- my $self = shift;
- $self->{'serverResponse'} = shift;
- $self->{'resourceAccessor'} = shift;
- $self->{'items'} = [ map { Arvados::ResourceProxy->new($_, $self->{'resourceAccessor'}) } @{$self->{'serverResponse'}->{'items'}} ];
- $self;
-}
-
-1;
'google-auth<2',
'httplib2 >=0.9.2, <0.20.2',
'pycurl >=7.19.5.1, <7.45.0',
- 'ruamel.yaml >=0.15.54, <0.17.11',
+ 'ruamel.yaml >=0.15.54, <0.17.22',
'setuptools',
'ws4py >=0.4.2',
- 'protobuf<4.0.0dev'
+ 'protobuf<4.0.0dev',
+ 'pyparsing<3',
+ 'setuptools>=40.3.0',
],
classifiers=[
'Programming Language :: Python :: 3',