key = NodeAuthSSHKey(ssh_file.read())
return 'auth', key
- def search_for(self, term, list_method, key=attrgetter('id'), **kwargs):
+ def search_for_now(self, term, list_method, key=attrgetter('id'), **kwargs):
"""Return one matching item from a list of cloud objects.
Raises ValueError if the number of matching objects is not exactly 1.
value search for a `term` match on each item. Returns the
object's 'id' attribute by default.
"""
+ items = getattr(self.real, list_method)(**kwargs)
+ results = [item for item in items if key(item) == term]
+ count = len(results)
+ if count != 1:
+ raise ValueError("{} returned {} results for {!r}".format(
+ list_method, count, term))
+ return results[0]
+
+ def search_for(self, term, list_method, key=attrgetter('id'), **kwargs):
+ """Return one cached matching item from a list of cloud objects.
+
+ See search_for_now() for details of arguments and exceptions.
+ This method caches results, so it's good to find static cloud objects
+ like node sizes, regions, etc.
+ """
cache_key = (list_method, term)
if cache_key not in self.SEARCH_CACHE:
- items = getattr(self.real, list_method)(**kwargs)
- results = [item for item in items
- if key(item) == term]
- count = len(results)
- if count != 1:
- raise ValueError("{} returned {} results for '{}'".format(
- list_method, count, term))
- self.SEARCH_CACHE[cache_key] = results[0]
+ self.SEARCH_CACHE[cache_key] = self.search_for_now(
+ term, list_method, key, **kwargs)
return self.SEARCH_CACHE[cache_key]
def list_nodes(self, **kwargs):
l.update(kwargs)
return self.real.list_nodes(**l)
+ def create_cloud_name(self, arvados_node):
+ """Return a cloud node name for the given Arvados node record.
+
+ Subclasses must override this method. It should return a string
+ that can be used as the name for a newly-created cloud node,
+ based on identifying information in the Arvados node record.
+
+ Arguments:
+ * arvados_node: This Arvados node record to seed the new cloud node.
+ """
+ raise NotImplementedError("BaseComputeNodeDriver.create_cloud_name")
+
def arvados_create_kwargs(self, size, arvados_node):
"""Return dynamic keyword arguments for create_node.
kwargs.update(self.arvados_create_kwargs(size, arvados_node))
kwargs['size'] = size
return self.real.create_node(**kwargs)
- except self.CLOUD_ERRORS:
+ except self.CLOUD_ERRORS as create_error:
# Workaround for bug #6702: sometimes the create node request
# succeeds but times out and raises an exception instead of
# returning a result. If this happens, we get stuck in a retry
# loop forever because subsequent create_node attempts will fail
# due to node name collision. So check if the node we intended to
# create shows up in the cloud node list and return it if found.
- node = self.search_for(kwargs['name'], 'list_nodes', self._name_key)
- if node:
- return node
- else:
- # something else went wrong, re-raise the exception
- raise
+ try:
+ return self.search_for_now(kwargs['name'], 'list_nodes', self._name_key)
+ except ValueError:
+ raise create_error
def post_create_node(self, cloud_node):
# ComputeNodeSetupActor calls this method after the cloud node is
auth_kwargs, list_kwargs, create_kwargs,
driver_class)
+ def create_cloud_name(self, arvados_node):
+ uuid_parts = arvados_node['uuid'].split('-', 2)
+ return 'compute-{parts[2]}-{parts[0]}'.format(parts=uuid_parts)
+
def arvados_create_kwargs(self, size, arvados_node):
- cluster_id, _, node_id = arvados_node['uuid'].split('-')
- name = 'compute-{}-{}'.format(node_id, cluster_id)
tags = {
'booted_at': time.strftime(ARVADOS_TIMEFMT, time.gmtime()),
'arv-ping-url': self._make_ping_url(arvados_node)
}
tags.update(self.tags)
+ name = self.create_cloud_name(arvados_node)
customdata = """#!/bin/sh
mkdir -p /var/tmp/arv-node-data/meta-data
echo %s > /var/tmp/arv-node-data/arv-ping-url
import threading
import time
+import libcloud.common.types as cloud_types
import mock
import pykka
self.assertTrue(self.driver_mock.called)
self.assertIs(driver.real, driver_mock2)
+ def test_create_can_find_node_after_timeout(self):
+ driver = self.new_driver()
+ arv_node = arvados_node_mock()
+ cloud_node = cloud_node_mock()
+ cloud_node.name = driver.create_cloud_name(arv_node)
+ create_method = self.driver_mock().create_node
+ create_method.side_effect = cloud_types.LibcloudError("fake timeout")
+ list_method = self.driver_mock().list_nodes
+ list_method.return_value = [cloud_node]
+ actual = driver.create_node(MockSize(1), arv_node)
+ self.assertIs(cloud_node, actual)
+
+ def test_create_can_raise_exception_after_timeout(self):
+ driver = self.new_driver()
+ arv_node = arvados_node_mock()
+ create_method = self.driver_mock().create_node
+ create_method.side_effect = cloud_types.LibcloudError("fake timeout")
+ list_method = self.driver_mock().list_nodes
+ list_method.return_value = []
+ with self.assertRaises(cloud_types.LibcloudError) as exc_test:
+ driver.create_node(MockSize(1), arv_node)
+ self.assertIs(create_method.side_effect, exc_test.exception)
+
+
class RemotePollLoopActorTestMixin(ActorTestMixin):
def build_monitor(self, *args, **kwargs):
self.timer = mock.MagicMock(name='timer_mock')