Merge branch 'master' into 14075-uploadfiles
authorPeter Amstutz <pamstutz@veritasgenetics.com>
Fri, 24 Aug 2018 19:28:16 +0000 (15:28 -0400)
committerPeter Amstutz <pamstutz@veritasgenetics.com>
Fri, 24 Aug 2018 19:28:16 +0000 (15:28 -0400)
Arvados-DCO-1.1-Signed-off-by: Peter Amstutz <pamstutz@veritasgenetics.com>

sdk/cwl/setup.py
sdk/cwl/tests/test_container.py
sdk/cwl/tests/test_job.py
sdk/cwl/tests/test_submit.py
sdk/python/arvados/commands/run.py

index e452ce26440da32cdf3732cdc8f43f271600b432..2b7b31b9f3f4b4070cbd14d986ffe87259989200 100644 (file)
@@ -36,7 +36,9 @@ setup(name='arvados-cwl-runner',
           'cwltool==1.0.20180806194258',
           'schema-salad==2.7.20180719125426',
           'typing >= 3.6.4',
-          'ruamel.yaml >=0.13.11, <0.16',
+          # Need to limit ruamel.yaml version to 0.15.26 because of bug
+          # https://bitbucket.org/ruamel/yaml/issues/227/regression-parsing-flow-mapping
+          'ruamel.yaml >=0.13.11, <= 0.15.26',
           'arvados-python-client>=1.1.4.20180607143841',
           'setuptools',
           'ciso8601 >=1.0.6, <2.0.0',
index 3f8a32816ddccdad01c78eedfdce1ed0b2be5e64..69f3ae046e31ca3e02aa52be7967e43da2394d00 100644 (file)
@@ -5,6 +5,7 @@
 import arvados_cwl
 import arvados_cwl.context
 from arvados_cwl.arvdocker import arv_docker_clear_cache
+import arvados.config
 import logging
 import mock
 import unittest
@@ -21,6 +22,32 @@ if not os.getenv('ARVADOS_DEBUG'):
     logging.getLogger('arvados.cwl-runner').setLevel(logging.WARN)
     logging.getLogger('arvados.arv-run').setLevel(logging.WARN)
 
+class CollectionMock(object):
+    def __init__(self, vwdmock, *args, **kwargs):
+        self.vwdmock = vwdmock
+        self.count = 0
+
+    def open(self, *args, **kwargs):
+        self.count += 1
+        return self.vwdmock.open(*args, **kwargs)
+
+    def copy(self, *args, **kwargs):
+        self.count += 1
+        self.vwdmock.copy(*args, **kwargs)
+
+    def save_new(self, *args, **kwargs):
+        pass
+
+    def __len__(self):
+        return self.count
+
+    def portable_data_hash(self):
+        if self.count == 0:
+            return arvados.config.EMPTY_BLOCK_LOCATOR
+        else:
+            return "99999999999999999999999999999996+99"
+
+
 class TestContainer(unittest.TestCase):
 
     def helper(self, runner, enable_reuse=True):
@@ -231,8 +258,7 @@ class TestContainer(unittest.TestCase):
         runner.fs_access.get_collection.side_effect = get_collection_mock
 
         vwdmock = mock.MagicMock()
-        collection_mock.return_value = vwdmock
-        vwdmock.portable_data_hash.return_value = "99999999999999999999999999999996+99"
+        collection_mock.side_effect = lambda *args, **kwargs: CollectionMock(vwdmock, *args, **kwargs)
 
         tool = cmap({
             "inputs": [],
index 4473b88ca0d785dbb2eaff961bf64fd21c25c280..20efe1513981585b3c699f73d0dbba6994f7c682 100644 (file)
@@ -19,6 +19,7 @@ from schema_salad.ref_resolver import Loader
 from schema_salad.sourceline import cmap
 from .mock_discovery import get_rootDesc
 from .matcher import JsonDiffMatcher, StripYAMLComments
+from .test_container import CollectionMock
 
 if not os.getenv('ARVADOS_DEBUG'):
     logging.getLogger('arvados.cwl-runner').setLevel(logging.WARN)
@@ -388,7 +389,8 @@ class TestWorkflow(unittest.TestCase):
         tool, metadata = loadingContext.loader.resolve_ref("tests/wf/scatter2.cwl")
         metadata["cwlVersion"] = tool["cwlVersion"]
 
-        mockcollection().portable_data_hash.return_value = "99999999999999999999999999999999+118"
+        mockc = mock.MagicMock()
+        mockcollection.side_effect = lambda *args, **kwargs: CollectionMock(mockc, *args, **kwargs)
         mockcollectionreader().find.return_value = arvados.arvfile.ArvadosFile(mock.MagicMock(), "token.txt")
 
         arvtool = arvados_cwl.ArvadosWorkflow(runner, tool, loadingContext)
@@ -412,8 +414,8 @@ class TestWorkflow(unittest.TestCase):
                         'HOME': '$(task.outdir)',
                         'TMPDIR': '$(task.tmpdir)'},
                                'task.vwd': {
-                                   'workflow.cwl': '$(task.keep)/99999999999999999999999999999999+118/workflow.cwl',
-                                   'cwl.input.yml': '$(task.keep)/99999999999999999999999999999999+118/cwl.input.yml'
+                                   'workflow.cwl': '$(task.keep)/99999999999999999999999999999996+99/workflow.cwl',
+                                   'cwl.input.yml': '$(task.keep)/99999999999999999999999999999996+99/cwl.input.yml'
                                },
                     'command': [u'cwltool', u'--no-container', u'--move-outputs', u'--preserve-entire-environment', u'workflow.cwl#main', u'cwl.input.yml'],
                     'task.stdout': 'cwl.output.json'}]},
@@ -430,8 +432,8 @@ class TestWorkflow(unittest.TestCase):
                      ['docker_image_locator', 'in docker', 'arvados/jobs']],
             find_or_create=True)
 
-        mockcollection().open().__enter__().write.assert_has_calls([mock.call(subwf)])
-        mockcollection().open().__enter__().write.assert_has_calls([mock.call(
+        mockc.open().__enter__().write.assert_has_calls([mock.call(subwf)])
+        mockc.open().__enter__().write.assert_has_calls([mock.call(
 '''{
   "fileblub": {
     "basename": "token.txt",
@@ -469,7 +471,7 @@ class TestWorkflow(unittest.TestCase):
         tool, metadata = loadingContext.loader.resolve_ref("tests/wf/echo-wf.cwl")
         metadata["cwlVersion"] = tool["cwlVersion"]
 
-        mockcollection().portable_data_hash.return_value = "99999999999999999999999999999999+118"
+        mockcollection.side_effect = lambda *args, **kwargs: CollectionMock(mock.MagicMock(), *args, **kwargs)
 
         arvtool = arvados_cwl.ArvadosWorkflow(runner, tool, loadingContext)
         arvtool.formatgraph = None
@@ -491,8 +493,8 @@ class TestWorkflow(unittest.TestCase):
                         'HOME': '$(task.outdir)',
                         'TMPDIR': '$(task.tmpdir)'},
                                'task.vwd': {
-                                   'workflow.cwl': '$(task.keep)/99999999999999999999999999999999+118/workflow.cwl',
-                                   'cwl.input.yml': '$(task.keep)/99999999999999999999999999999999+118/cwl.input.yml'
+                                   'workflow.cwl': '$(task.keep)/99999999999999999999999999999996+99/workflow.cwl',
+                                   'cwl.input.yml': '$(task.keep)/99999999999999999999999999999996+99/cwl.input.yml'
                                },
                     'command': [u'cwltool', u'--no-container', u'--move-outputs', u'--preserve-entire-environment', u'workflow.cwl#main', u'cwl.input.yml'],
                     'task.stdout': 'cwl.output.json'}]},
index d980db575dd8d6e3db1ac3dbb0f7709cb14a894e..8875b7d954d916e332175b5dbdd67103833719b1 100644 (file)
@@ -289,14 +289,14 @@ class TestSubmit(unittest.TestCase):
                 'manifest_text':
                 '. 5bcc9fe8f8d5992e6cf418dc7ce4dbb3+16 0:16:blub.txt\n',
                 'replication_desired': None,
-                'name': 'submit_tool.cwl dependencies',
-            }), ensure_unique_name=True),
+                'name': 'submit_tool.cwl dependencies (5d373e7629203ce39e7c22af98a0f881+52)',
+            }), ensure_unique_name=False),
             mock.call(body=JsonDiffMatcher({
                 'manifest_text':
                 '. 979af1245a12a1fed634d4222473bfdc+16 0:16:blorp.txt\n',
                 'replication_desired': None,
-                'name': 'submit_wf.cwl input',
-            }), ensure_unique_name=True),
+                'name': 'submit_wf.cwl input (169f39d466a5438ac4a90e779bf750c7+53)',
+            }), ensure_unique_name=False),
             mock.call(body=JsonDiffMatcher({
                 'manifest_text':
                 '. 61df2ed9ee3eb7dd9b799e5ca35305fa+1217 0:1217:workflow.cwl\n',
@@ -485,14 +485,14 @@ class TestSubmit(unittest.TestCase):
                 'manifest_text':
                 '. 5bcc9fe8f8d5992e6cf418dc7ce4dbb3+16 0:16:blub.txt\n',
                 'replication_desired': None,
-                'name': 'submit_tool.cwl dependencies',
-            }), ensure_unique_name=True),
+                'name': 'submit_tool.cwl dependencies (5d373e7629203ce39e7c22af98a0f881+52)',
+            }), ensure_unique_name=False),
             mock.call(body=JsonDiffMatcher({
                 'manifest_text':
                 '. 979af1245a12a1fed634d4222473bfdc+16 0:16:blorp.txt\n',
                 'replication_desired': None,
-                'name': 'submit_wf.cwl input',
-            }), ensure_unique_name=True)])
+                'name': 'submit_wf.cwl input (169f39d466a5438ac4a90e779bf750c7+53)',
+            }), ensure_unique_name=False)])
 
         expect_container = copy.deepcopy(stubs.expect_container_spec)
         stubs.api.container_requests().create.assert_called_with(
index c4748fa995759ef0cc934b699a14523f8a3181f8..96f5bdd44a12ae42c25fbe64f68b342cb0356fcf 100644 (file)
@@ -1,6 +1,19 @@
 # Copyright (C) The Arvados Authors. All rights reserved.
+# Copyright (C) 2018 Genome Research Ltd.
 #
 # SPDX-License-Identifier: Apache-2.0
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
 
 from __future__ import print_function
 from __future__ import absolute_import
@@ -22,6 +35,7 @@ import sys
 import errno
 import arvados.commands._util as arv_cmd
 import arvados.collection
+import arvados.config as config
 
 from arvados._version import __version__
 
@@ -207,22 +221,48 @@ def uploadfiles(files, api, dry_run=False, num_retries=0,
                     for src in iterfiles:
                         write_file(collection, pathprefix, os.path.join(root, src), not packed)
 
-        filters=[["portable_data_hash", "=", collection.portable_data_hash()]]
-        if name:
-            filters.append(["name", "like", name+"%"])
-        if project:
-            filters.append(["owner_uuid", "=", project])
-
-        exists = api.collections().list(filters=filters, limit=1).execute(num_retries=num_retries)
-
-        if exists["items"]:
-            item = exists["items"][0]
-            pdh = item["portable_data_hash"]
-            logger.info("Using collection %s (%s)", pdh, item["uuid"])
-        elif len(collection) > 0:
-            collection.save_new(name=name, owner_uuid=project, ensure_unique_name=True)
+        pdh = None
+        if len(collection) > 0:
+            # non-empty collection
+            filters = [["portable_data_hash", "=", collection.portable_data_hash()]]
+            name_pdh = "%s (%s)" % (name, collection.portable_data_hash())
+            if name:
+                filters.append(["name", "=", name_pdh])
+            if project:
+                filters.append(["owner_uuid", "=", project])
+
+            # do the list / create in a loop with up to 2 tries as we are using `ensure_unique_name=False`
+            # and there is a potential race with other workflows that may have created the collection
+            # between when we list it and find it does not exist and when we attempt to create it.
+            tries = 2
+            while pdh is None and tries > 0:
+                exists = api.collections().list(filters=filters, limit=1).execute(num_retries=num_retries)
+
+                if exists["items"]:
+                    item = exists["items"][0]
+                    pdh = item["portable_data_hash"]
+                    logger.info("Using collection %s (%s)", pdh, item["uuid"])
+                else:
+                    try:
+                        collection.save_new(name=name_pdh, owner_uuid=project, ensure_unique_name=False)
+                        pdh = collection.portable_data_hash()
+                        logger.info("Uploaded to %s (%s)", pdh, collection.manifest_locator())
+                    except arvados.errors.ApiError as ae:
+                        tries -= 1
+            if pdh is None:
+                # Something weird going on here, probably a collection
+                # with a conflicting name but wrong PDH.  We won't
+                # able to reuse it but we still need to save our
+                # collection, so so save it with unique name.
+                logger.info("Name conflict on '%s', existing collection has an unexpected portable data hash", name_pdh)
+                collection.save_new(name=name_pdh, owner_uuid=project, ensure_unique_name=True)
+                pdh = collection.portable_data_hash()
+                logger.info("Uploaded to %s (%s)", pdh, collection.manifest_locator())
+        else:
+            # empty collection
             pdh = collection.portable_data_hash()
-            logger.info("Uploaded to %s (%s)", pdh, collection.manifest_locator())
+            assert (pdh == config.EMPTY_BLOCK_LOCATOR), "Empty collection portable_data_hash did not have expected locator, was %s" % pdh
+            logger.info("Using empty collection %s", pdh)
 
     for c in files:
         c.keepref = "%s/%s" % (pdh, c.fn)