Merge branch '16141-gosdk-missing-fields'
authorLucas Di Pentima <lucas@di-pentima.com.ar>
Tue, 11 Feb 2020 17:57:54 +0000 (14:57 -0300)
committerLucas Di Pentima <lucas@di-pentima.com.ar>
Tue, 11 Feb 2020 17:57:54 +0000 (14:57 -0300)
Closes #16141

Arvados-DCO-1.1-Signed-off-by: Lucas Di Pentima <lucas@di-pentima.com.ar>

16 files changed:
build/package-build-dockerfiles/centos7/Dockerfile
build/package-build-dockerfiles/debian10/Dockerfile
build/package-build-dockerfiles/debian9/Dockerfile
build/package-build-dockerfiles/ubuntu1604/Dockerfile
build/package-build-dockerfiles/ubuntu1804/Dockerfile
build/run-library.sh
doc/_includes/_navbar_top.liquid
doc/admin/metrics.html.textile.liquid
sdk/cwl/arvados_cwl/executor.py
sdk/cwl/arvados_cwl/runner.py
sdk/cwl/setup.py
sdk/cwl/tests/test_submit.py
sdk/cwl/tests/tool/blub.txt.cat [new file with mode: 0644]
sdk/cwl/tests/tool/tool_with_sf.cwl [new file with mode: 0644]
sdk/cwl/tests/tool/tool_with_sf.yml [new file with mode: 0644]
sdk/python/setup.py

index 3a61e64cf4fafeedc64da0826848ac4a09402ce6..9c2660387abfcd40e9ca5372fc8c7afeee0f9783 100644 (file)
@@ -50,7 +50,7 @@ RUN rpm -ivh epel-release-latest-7.noarch.rpm
 RUN git clone --depth 1 git://git.arvados.org/arvados.git /tmp/arvados && cd /tmp/arvados/services/api && /usr/local/rvm/bin/rvm-exec default bundle && cd /tmp/arvados/apps/workbench && /usr/local/rvm/bin/rvm-exec default bundle
 
 # The version of setuptools that comes with CentOS is way too old
-RUN pip install --upgrade setuptools
+RUN pip install --upgrade 'setuptools<45'
 
 ENV WORKSPACE /arvados
 CMD ["scl", "enable", "rh-python36", "/usr/local/rvm/bin/rvm-exec default bash /jenkins/run-build-packages.sh --target centos7"]
index 20527dd45cb6eac5d1461142283a258be8d83c9b..ff86262d38597aa99af8714ec5b94f3d11dfdbc6 100644 (file)
@@ -12,7 +12,7 @@ ENV DEBIAN_FRONTEND noninteractive
 RUN /usr/bin/apt-get update && /usr/bin/apt-get install -q -y python2.7-dev python3 python-setuptools python3-setuptools python3-pip libcurl4-gnutls-dev curl git procps libattr1-dev libfuse-dev libgnutls28-dev libpq-dev python-pip unzip python3-venv python3-dev
 
 # Install virtualenv
-RUN /usr/bin/pip install virtualenv
+RUN /usr/bin/pip install 'virtualenv<20'
 
 # Install RVM
 ADD generated/mpapis.asc /tmp/
index 2aaccf6aa7c68a605709d29891e4db0273bc15ef..257523a72feb58e8d28e78688ccd7d04859e777a 100644 (file)
@@ -12,7 +12,7 @@ ENV DEBIAN_FRONTEND noninteractive
 RUN /usr/bin/apt-get update && /usr/bin/apt-get install -q -y python2.7-dev python3 python-setuptools python3-setuptools python3-pip libcurl4-gnutls-dev curl git procps libattr1-dev libfuse-dev libgnutls28-dev libpq-dev python-pip unzip python3-venv python3-dev
 
 # Install virtualenv
-RUN /usr/bin/pip install virtualenv
+RUN /usr/bin/pip install 'virtualenv<20'
 
 # Install RVM
 ADD generated/mpapis.asc /tmp/
index 23fcca07dfbf925e6a878e1066fe2d01a988d038..e046ae1690f81525836df5d4469a511c05f92583 100644 (file)
@@ -11,7 +11,7 @@ ENV DEBIAN_FRONTEND noninteractive
 RUN /usr/bin/apt-get update && /usr/bin/apt-get install -q -y python2.7-dev python3 python-setuptools python3-setuptools python3-pip libcurl4-gnutls-dev libgnutls-dev curl git libattr1-dev libfuse-dev libpq-dev python-pip unzip tzdata python3-venv python3-dev
 
 # Install virtualenv
-RUN /usr/bin/pip install virtualenv
+RUN /usr/bin/pip install 'virtualenv<20'
 
 # Install RVM
 ADD generated/mpapis.asc /tmp/
index 06c21ca3fa41664acde3c9724e21a098fc34d7b3..c652fe1c0826e9c4d5124c2fddebe07e1946ebef 100644 (file)
@@ -11,7 +11,7 @@ ENV DEBIAN_FRONTEND noninteractive
 RUN /usr/bin/apt-get update && /usr/bin/apt-get install -q -y python2.7-dev python3 python-setuptools python3-pip libcurl4-gnutls-dev libgnutls28-dev curl git libattr1-dev libfuse-dev libpq-dev python-pip unzip tzdata python3-venv python3-dev
 
 # Install virtualenv
-RUN /usr/bin/pip install virtualenv
+RUN /usr/bin/pip install 'virtualenv<20'
 
 # Install RVM
 ADD generated/mpapis.asc /tmp/
index 5b1dde46a9f3fec05cee114641564a2fdbacc486..ac5dc718be1c6e36e86e743bf0805ab11c891da7 100755 (executable)
@@ -475,9 +475,9 @@ fpm_build_virtualenv () {
   rm -rf dist/*
 
   # Get the latest setuptools
-  if ! $pip install $DASHQ_UNLESS_DEBUG $CACHE_FLAG -U setuptools; then
+  if ! $pip install $DASHQ_UNLESS_DEBUG $CACHE_FLAG -U 'setuptools<45'; then
     echo "Error, unable to upgrade setuptools with"
-    echo "  $pip install $DASHQ_UNLESS_DEBUG $CACHE_FLAG -U setuptools"
+    echo "  $pip install $DASHQ_UNLESS_DEBUG $CACHE_FLAG -U 'setuptools<45'"
     exit 1
   fi
   # filter a useless warning (when building the cwltest package) from the stderr output
@@ -532,9 +532,9 @@ fpm_build_virtualenv () {
   fi
   echo "pip version:        `build/usr/share/$python/dist/$PYTHON_PKG/bin/$pip --version`"
 
-  if ! build/usr/share/$python/dist/$PYTHON_PKG/bin/$pip install $DASHQ_UNLESS_DEBUG $CACHE_FLAG -U setuptools; then
+  if ! build/usr/share/$python/dist/$PYTHON_PKG/bin/$pip install $DASHQ_UNLESS_DEBUG $CACHE_FLAG -U 'setuptools<45'; then
     echo "Error, unable to upgrade setuptools with"
-    echo "  build/usr/share/$python/dist/$PYTHON_PKG/bin/$pip install $DASHQ_UNLESS_DEBUG $CACHE_FLAG -U setuptools"
+    echo "  build/usr/share/$python/dist/$PYTHON_PKG/bin/$pip install $DASHQ_UNLESS_DEBUG $CACHE_FLAG -U 'setuptools<45'"
     exit 1
   fi
   echo "setuptools version: `build/usr/share/$python/dist/$PYTHON_PKG/bin/$python -c 'import setuptools; print(setuptools.__version__)'`"
index c69a2efbec1eb5f9a76b7fc8aaa97fc411a93553..38c8e0a1db9573fa4726c23148bdc9a83af19a6c 100644 (file)
@@ -57,10 +57,10 @@ SPDX-License-Identifier: CC-BY-SA-3.0
 
    <script>
      function dismissAnnotateNotify() {
-        window.sessionStorage.setItem("dismiss-annotate-notify", "true");
+        window.localStorage.setItem("dismiss-annotate-notify", "true");
          $('#annotate-notify').attr('style', "display: none;");
      }
-     if (window.sessionStorage.getItem("dismiss-annotate-notify") === "true") {
+     if (window.localStorage.getItem("dismiss-annotate-notify") === "true") {
         dismissAnnotateNotify();
      } else {
          $('#annotate-notify').attr('style', "display: inline-block;");
index 893eac1c8325c2033c7d36b398541f1cccedfb0f..9616d4add43a44105d78fbf5ff6f4ae9b8e1c3cd 100644 (file)
@@ -10,172 +10,48 @@ Copyright (C) The Arvados Authors. All rights reserved.
 SPDX-License-Identifier: CC-BY-SA-3.0
 {% endcomment %}
 
-Some Arvados services publish Prometheus/OpenMetrics-compatible metrics at @/metrics@, and some provide additional runtime status at @/status.json@.  Metrics can help you understand how components perform under load, find performance bottlenecks, and detect and diagnose problems.
+Some Arvados services publish Prometheus/OpenMetrics-compatible metrics at @/metrics@. Metrics can help you understand how components perform under load, find performance bottlenecks, and detect and diagnose problems.
 
 To access metrics endpoints, services must be configured with a "management token":management-token.html. When accessing a metrics endpoint, prefix the management token with @"Bearer "@ and supply it in the @Authorization@ request header.
 
-<pre>curl -sfH "Authorization: Bearer your_management_token_goes_here" "https://0.0.0.0:25107/status.json"
+<pre>curl -sfH "Authorization: Bearer your_management_token_goes_here" "https://0.0.0.0:25107/metrics"
 </pre>
 
-h2. Keep-web
+The plain text export format includes "help" messages with a description of each reported metric.
 
-Keep-web exports metrics at @/metrics@ -- e.g., @https://collections.zzzzz.arvadosapi.com/metrics@.
+When configuring Prometheus, use a @bearer_token@ or @bearer_token_file@ option to authenticate requests.
 
-table(table table-bordered table-condensed).
-|_. Name|_. Type|_. Description|
-|request_duration_seconds|summary|elapsed time between receiving a request and sending the last byte of the response body (segmented by HTTP request method and response status code)|
-|time_to_status_seconds|summary|elapsed time between receiving a request and sending the HTTP response status code (segmented by HTTP request method and response status code)|
-
-Metrics in the @arvados_keepweb_collectioncache@ namespace report keep-web's internal cache of Arvados collection metadata.
-
-table(table table-bordered table-condensed).
-|_. Name|_. Type|_. Description|
-|arvados_keepweb_collectioncache_requests|counter|cache lookups|
-|arvados_keepweb_collectioncache_api_calls|counter|outgoing API calls|
-|arvados_keepweb_collectioncache_permission_hits|counter|collection-to-permission cache hits|
-|arvados_keepweb_collectioncache_pdh_hits|counter|UUID-to-PDH cache hits|
-|arvados_keepweb_collectioncache_hits|counter|PDH-to-manifest cache hits|
-|arvados_keepweb_collectioncache_cached_manifests|gauge|number of collections in the cache|
-|arvados_keepweb_collectioncache_cached_manifest_bytes|gauge|memory consumed by cached collection manifests|
-
-h2. Keepstore
-
-Keepstore exports metrics at @/status.json@ -- e.g., @http://keep0.zzzzz.arvadosapi.com:25107/status.json@.
-
-h3. Root
-
-table(table table-bordered table-condensed).
-|_. Attribute|_. Type|_. Description|
-|Volumes|         array of "volumeStatusEnt":#volumeStatusEnt ||
-|BufferPool|      "PoolStatus":#PoolStatus ||
-|PullQueue|       "WorkQueueStatus":#WorkQueueStatus ||
-|TrashQueue|      "WorkQueueStatus":#WorkQueueStatus ||
-|RequestsCurrent| int ||
-|RequestsMax|     int ||
-|Version|         string ||
-
-h3(#volumeStatusEnt). volumeStatusEnt
-
-table(table table-bordered table-condensed).
-|_. Attribute|_. Type|_. Description|
-|Label|         string||
-|Status|        "VolumeStatus":#VolumeStatus ||
-|VolumeStats|   "ioStats":#ioStats ||
-
-h3(#VolumeStatus). VolumeStatus
-
-table(table table-bordered table-condensed).
-|_. Attribute|_. Type|_. Description|
-|MountPoint| string||
-|DeviceNum|  uint64||
-|BytesFree|  uint64||
-|BytesUsed|  uint64||
-
-h3(#ioStats). ioStats
-
-table(table table-bordered table-condensed).
-|_. Attribute|_. Type|_. Description|
-|Errors|     uint64||
-|Ops|        uint64||
-|CompareOps| uint64||
-|GetOps|     uint64||
-|PutOps|     uint64||
-|TouchOps|   uint64||
-|InBytes|    uint64||
-|OutBytes|   uint64||
-
-h3(#PoolStatus). PoolStatus
-
-table(table table-bordered table-condensed).
-|_. Attribute|_. Type|_. Description|
-|BytesAllocatedCumulative|      uint64||
-|BuffersMax|   int||
-|BuffersInUse| int||
-
-h3(#WorkQueueStatus). WorkQueueStatus
-
-table(table table-bordered table-condensed).
-|_. Attribute|_. Type|_. Description|
-|InProgress| int||
-|Queued|     int||
-
-h3. Example response
-
-<pre>
-{
-  "Volumes": [
-    {
-      "Label": "[UnixVolume /var/lib/arvados/keep0]",
-      "Status": {
-        "MountPoint": "/var/lib/arvados/keep0",
-        "DeviceNum": 65029,
-        "BytesFree": 222532972544,
-        "BytesUsed": 435456679936
-      },
-      "InternalStats": {
-        "Errors": 0,
-        "InBytes": 1111,
-        "OutBytes": 0,
-        "OpenOps": 1,
-        "StatOps": 4,
-        "FlockOps": 0,
-        "UtimesOps": 0,
-        "CreateOps": 0,
-        "RenameOps": 0,
-        "UnlinkOps": 0,
-        "ReaddirOps": 0
-      }
-    }
-  ],
-  "BufferPool": {
-    "BytesAllocatedCumulative": 67108864,
-    "BuffersMax": 20,
-    "BuffersInUse": 0
-  },
-  "PullQueue": {
-    "InProgress": 0,
-    "Queued": 0
-  },
-  "TrashQueue": {
-    "InProgress": 0,
-    "Queued": 0
-  },
-  "RequestsCurrent": 1,
-  "RequestsMax": 40,
-  "Version": "dev"
-}
+<pre>scrape_configs:
+  - job_name: keepstore
+    bearer_token: your_management_token_goes_here
+    static_configs:
+    - targets:
+      - "keep0.ClusterID.example.com:25107"
 </pre>
 
-h2. Keep-balance
-
-Keep-balance exports metrics at @/metrics@ -- e.g., @http://keep.zzzzz.arvadosapi.com:9005/metrics@.
-
-table(table table-bordered table-condensed).
-|_. Name|_. Type|_. Description|
-|arvados_keep_total_{replicas,blocks,bytes}|gauge|stored data (stored in backend volumes, whether referenced or not)|
-|arvados_keep_garbage_{replicas,blocks,bytes}|gauge|garbage data (unreferenced, and old enough to trash)|
-|arvados_keep_transient_{replicas,blocks,bytes}|gauge|transient data (unreferenced, but too new to trash)|
-|arvados_keep_overreplicated_{replicas,blocks,bytes}|gauge|overreplicated data (more replicas exist than are needed)|
-|arvados_keep_underreplicated_{replicas,blocks,bytes}|gauge|underreplicated data (fewer replicas exist than are needed)|
-|arvados_keep_lost_{replicas,blocks,bytes}|gauge|lost data (referenced by collections, but not found on any backend volume)|
-|arvados_keep_dedup_block_ratio|gauge|deduplication ratio (block references in collections &divide; distinct blocks referenced)|
-|arvados_keep_dedup_byte_ratio|gauge|deduplication ratio (block references in collections &divide; distinct blocks referenced, weighted by block size)|
-|arvados_keepbalance_get_state_seconds|summary|time to get all collections and keepstore volume indexes for one iteration|
-|arvados_keepbalance_changeset_compute_seconds|summary|time to compute changesets for one iteration|
-|arvados_keepbalance_send_pull_list_seconds|summary|time to send pull lists to all keepstore servers for one iteration|
-|arvados_keepbalance_send_trash_list_seconds|summary|time to send trash lists to all keepstore servers for one iteration|
-|arvados_keepbalance_sweep_seconds|summary|time to complete one iteration|
-
-Each @arvados_keep_@ storage state statistic above is presented as a set of three metrics:
-
-table(table table-bordered table-condensed).
-|*_blocks|distinct block hashes|
-|*_bytes|bytes stored on backend volumes|
-|*_replicas|objects/files stored on backend volumes|
+table(table table-bordered table-condensed table-hover).
+|_. Component|_. Metrics endpoint|
+|arvados-api-server||
+|arvados-controller|✓|
+|arvados-dispatch-cloud|✓|
+|arvados-git-httpd||
+|arvados-node-manager||
+|arvados-ws||
+|composer||
+|keepproxy||
+|keepstore|✓|
+|keep-balance|✓|
+|keep-web|✓|
+|sso-provider||
+|workbench1||
+|workbench2||
 
 h2. Node manager
 
-The node manager status end point provides a snapshot of internal status at the time of the most recent wishlist update.
+The node manager does not export prometheus-style metrics, but its @/status.json@ endpoint provides a snapshot of internal status at the time of the most recent wishlist update.
+
+<pre>curl -sfH "Authorization: Bearer your_management_token_goes_here" "http://0.0.0.0:8989/status.json"
+</pre>
 
 table(table table-bordered table-condensed).
 |_. Attribute|_. Type|_. Description|
index 406ebfd2da064df383105b8e0a7c8f4e7b19a529..99d4c4e9a10a883abc54ce0fe1cbc476af7c7692 100644 (file)
@@ -521,10 +521,10 @@ The 'jobs' API is no longer supported.
             for req in job_reqs:
                 tool.requirements.append(req)
 
-    def arv_executor(self, tool, job_order, runtimeContext, logger=None):
+    def arv_executor(self, updated_tool, job_order, runtimeContext, logger=None):
         self.debug = runtimeContext.debug
 
-        tool.visit(self.check_features)
+        updated_tool.visit(self.check_features)
 
         self.project_uuid = runtimeContext.project_uuid
         self.pipeline = None
@@ -545,16 +545,20 @@ The 'jobs' API is no longer supported.
             raise Exception("--submit-request-uuid requires containers API, but using '{}' api".format(self.work_api))
 
         if not runtimeContext.name:
-            runtimeContext.name = self.name = tool.tool.get("label") or tool.metadata.get("label") or os.path.basename(tool.tool["id"])
+            runtimeContext.name = self.name = updated_tool.tool.get("label") or updated_tool.metadata.get("label") or os.path.basename(updated_tool.tool["id"])
 
         # Upload local file references in the job order.
         job_order = upload_job_order(self, "%s input" % runtimeContext.name,
-                                     tool, job_order)
+                                     updated_tool, job_order)
+
+        # the last clause means: if it is a command line tool, and we
+        # are going to wait for the result, and always_submit_runner
+        # is false, then we don't submit a runner process.
 
         submitting = (runtimeContext.update_workflow or
                       runtimeContext.create_workflow or
                       (runtimeContext.submit and not
-                       (tool.tool["class"] == "CommandLineTool" and
+                       (updated_tool.tool["class"] == "CommandLineTool" and
                         runtimeContext.wait and
                         not runtimeContext.always_submit_runner)))
 
@@ -564,8 +568,11 @@ The 'jobs' API is no longer supported.
         if submitting:
             # Document may have been auto-updated. Reload the original
             # document with updating disabled because we want to
-            # submit the original document, not the auto-updated one.
-            tool = load_tool(tool.tool["id"], loadingContext)
+            # submit the document with its original CWL version, not
+            # the auto-updated one.
+            tool = load_tool(updated_tool.tool["id"], loadingContext)
+        else:
+            tool = updated_tool
 
         # Upload direct dependencies of workflow steps, get back mapping of files to keep references.
         # Also uploads docker images.
@@ -632,22 +639,23 @@ The 'jobs' API is no longer supported.
         if runtimeContext.submit:
             # Submit a runner job to run the workflow for us.
             if self.work_api == "containers":
-                if tool.tool["class"] == "CommandLineTool" and runtimeContext.wait and (not runtimeContext.always_submit_runner):
-                    runtimeContext.runnerjob = tool.tool["id"]
+                if submitting:
+                    tool = RunnerContainer(self, updated_tool,
+                                           tool, loadingContext, runtimeContext.enable_reuse,
+                                           self.output_name,
+                                           self.output_tags,
+                                           submit_runner_ram=runtimeContext.submit_runner_ram,
+                                           name=runtimeContext.name,
+                                           on_error=runtimeContext.on_error,
+                                           submit_runner_image=runtimeContext.submit_runner_image,
+                                           intermediate_output_ttl=runtimeContext.intermediate_output_ttl,
+                                           merged_map=merged_map,
+                                           priority=runtimeContext.priority,
+                                           secret_store=self.secret_store,
+                                           collection_cache_size=runtimeContext.collection_cache_size,
+                                           collection_cache_is_default=self.should_estimate_cache_size)
                 else:
-                    tool = RunnerContainer(self, tool, loadingContext, runtimeContext.enable_reuse,
-                                                self.output_name,
-                                                self.output_tags,
-                                                submit_runner_ram=runtimeContext.submit_runner_ram,
-                                                name=runtimeContext.name,
-                                                on_error=runtimeContext.on_error,
-                                                submit_runner_image=runtimeContext.submit_runner_image,
-                                                intermediate_output_ttl=runtimeContext.intermediate_output_ttl,
-                                                merged_map=merged_map,
-                                                priority=runtimeContext.priority,
-                                                secret_store=self.secret_store,
-                                                collection_cache_size=runtimeContext.collection_cache_size,
-                                                collection_cache_is_default=self.should_estimate_cache_size)
+                    runtimeContext.runnerjob = tool.tool["id"]
 
         if runtimeContext.cwl_runner_job is not None:
             self.uuid = runtimeContext.cwl_runner_job.get('uuid')
index 19a6dd98b332c6dbc8363e989104a075cf90f587..2239e0f9df952b9ab75a7e9a96ab46953ab29f94 100644 (file)
@@ -578,7 +578,8 @@ class Runner(Process):
     """Base class for runner processes, which submit an instance of
     arvados-cwl-runner and wait for the final result."""
 
-    def __init__(self, runner, tool, loadingContext, enable_reuse,
+    def __init__(self, runner, updated_tool,
+                 tool, loadingContext, enable_reuse,
                  output_name, output_tags, submit_runner_ram=0,
                  name=None, on_error=None, submit_runner_image=None,
                  intermediate_output_ttl=0, merged_map=None,
@@ -587,10 +588,9 @@ class Runner(Process):
                  collection_cache_is_default=True):
 
         loadingContext = loadingContext.copy()
-        loadingContext.metadata = loadingContext.metadata.copy()
-        loadingContext.metadata["cwlVersion"] = INTERNAL_VERSION
+        loadingContext.metadata = updated_tool.metadata.copy()
 
-        super(Runner, self).__init__(tool.tool, loadingContext)
+        super(Runner, self).__init__(updated_tool.tool, loadingContext)
 
         self.arvrunner = runner
         self.embedded_tool = tool
index aa68933c6abc17f2d19860c2cd838a3026546257..62ceab2fa11c1895983cd518fb2d4370a49da986 100644 (file)
@@ -60,7 +60,7 @@ setup(name='arvados-cwl-runner',
       ],
       test_suite='tests',
       tests_require=[
-          'mock>=1.0',
+          'mock>=1.0,<4',
           'subprocess32>=3.5.1',
       ],
       zip_safe=True
index 927e43ad76c1fe1e547fd91dc026282726c82a1c..397ae142253d26b41d11aee03586fbc3e352bb31 100644 (file)
@@ -388,6 +388,19 @@ class TestSubmit(unittest.TestCase):
                          stubs.expect_container_request_uuid + '\n')
         self.assertEqual(exited, 0)
 
+
+    @stubs
+    def test_submit_container_tool(self, stubs):
+        # test for issue #16139
+        exited = arvados_cwl.main(
+            ["--submit", "--no-wait", "--api=containers", "--debug",
+                "tests/tool/tool_with_sf.cwl", "tests/tool/tool_with_sf.yml"],
+            stubs.capture_stdout, sys.stderr, api_client=stubs.api, keep_client=stubs.keep_client)
+
+        self.assertEqual(stubs.capture_stdout.getvalue(),
+                         stubs.expect_container_request_uuid + '\n')
+        self.assertEqual(exited, 0)
+
     @stubs
     def test_submit_container_no_reuse(self, stubs):
         exited = arvados_cwl.main(
diff --git a/sdk/cwl/tests/tool/blub.txt.cat b/sdk/cwl/tests/tool/blub.txt.cat
new file mode 100644 (file)
index 0000000..d7c4221
--- /dev/null
@@ -0,0 +1 @@
+clipper clupper
diff --git a/sdk/cwl/tests/tool/tool_with_sf.cwl b/sdk/cwl/tests/tool/tool_with_sf.cwl
new file mode 100644 (file)
index 0000000..0beb7ad
--- /dev/null
@@ -0,0 +1,24 @@
+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: Apache-2.0
+
+# Test case for arvados-cwl-runner
+#
+# Used to test whether scanning a tool file for dependencies (e.g. default
+# value blub.txt) and uploading to Keep works as intended.
+
+class: CommandLineTool
+cwlVersion: v1.0
+requirements:
+  - class: DockerRequirement
+    dockerPull: debian:8
+inputs:
+  - id: x
+    type: File
+    secondaryFiles:
+      - .cat
+    inputBinding:
+      valueFrom: $(self.path).cat
+      position: 1
+outputs: []
+baseCommand: cat
diff --git a/sdk/cwl/tests/tool/tool_with_sf.yml b/sdk/cwl/tests/tool/tool_with_sf.yml
new file mode 100644 (file)
index 0000000..3f79d57
--- /dev/null
@@ -0,0 +1,3 @@
+x:
+  class: File
+  location: blub.txt
index 87977c2187e014017a2d76c4314ee3b1eede6b9c..ff68e2a7fd8888d10d56b96e13a5a9bf2690a177 100644 (file)
@@ -64,6 +64,6 @@ setup(name='arvados-python-client',
           'Programming Language :: Python :: 3',
       ],
       test_suite='tests',
-      tests_require=['pbr<1.7.0', 'mock>=1.0', 'PyYAML'],
+      tests_require=['pbr<1.7.0', 'mock>=1.0,<4', 'PyYAML'],
       zip_safe=False
       )