Merge branch '17989-pysdk-timeout' into main. Refs #17989
authorLucas Di Pentima <lucas.dipentima@curii.com>
Wed, 8 Sep 2021 13:55:22 +0000 (10:55 -0300)
committerLucas Di Pentima <lucas.dipentima@curii.com>
Wed, 8 Sep 2021 13:55:22 +0000 (10:55 -0300)
Arvados-DCO-1.1-Signed-off-by: Lucas Di Pentima <lucas.dipentima@curii.com>

sdk/python/arvados/api.py
sdk/python/tests/test_api.py
tools/test-collection-create/test-collection-create.py

index 4fe3999f2c3098391f387f51fe96cc628bf1a790..55b1e22b78a9add1588d46f24047d5a9a8aa7c1c 100644 (file)
@@ -211,7 +211,8 @@ def api(version=None, cache=True, host=None, token=None, insecure=False,
         pass
     elif not host and not token:
         return api_from_config(
-            version=version, cache=cache, request_id=request_id, **kwargs)
+            version=version, cache=cache, timeout=timeout,
+            request_id=request_id, **kwargs)
     else:
         # Caller provided one but not the other
         if not host:
index 60183e06a352259530534bedf56da1bbba5c3443..9b944f6c53e7ec0abc4d323d83d7ba0aece43741 100644 (file)
@@ -107,6 +107,12 @@ class ArvadosApiTest(run_test_server.TestCaseWithServers):
         self.assertEqual(api._http.timeout, 10,
             "Default timeout value should be 10")
 
+    # Checks for bug #17989
+    def test_custom_request_timeout(self):
+        api = arvados.api('v1', timeout=1234)
+        self.assertEqual(api._http.timeout, 1234,
+            "Requested timeout value was 1234")
+
     def test_ordered_json_model(self):
         mock_responses = {
             'arvados.humans.get': (
index 9a02745694e6dd3898b543ec4623c29c7a01e7d2..c8eae240d29b04cae64a1e8a51fc8d5955993826 100644 (file)
@@ -16,6 +16,8 @@ import arvados.collection
 logger = logging.getLogger('arvados.test_collection_create')
 logger.setLevel(logging.INFO)
 
+max_manifest_size = 127*1024*1024
+
 opts = argparse.ArgumentParser(add_help=False)
 opts.add_argument('--min-files', type=int, default=30000, help="""
 Minimum number of files on each directory. Default: 30000.
@@ -381,7 +383,7 @@ def create_substreams(depth, base_stream_name, max_filesize, data_loc, args, cur
     current_size += len(current_stream)
     streams = [current_stream]
 
-    if current_size >= (128 * 1024 * 1024):
+    if current_size >= max_manifest_size:
         logger.debug("Maximum manifest size reached -- finishing early at {}".format(base_stream_name))
     elif depth == 0:
         logger.debug("Finished stream {}".format(base_stream_name))
@@ -391,7 +393,7 @@ def create_substreams(depth, base_stream_name, max_filesize, data_loc, args, cur
             substreams = create_substreams(depth-1, stream_name, max_filesize,
                 data_loc, args, current_size)
             current_size += sum([len(x) for x in substreams])
-            if current_size >= (128 * 1024 * 1024):
+            if current_size >= max_manifest_size:
                 break
             streams.extend(substreams)
     return streams
@@ -421,7 +423,7 @@ def main(arguments=None):
         '.', max_filesize, data_loc, args)
     manifest = ''
     for s in streams:
-        if len(manifest)+len(s) > (1024*1024*128)-2:
+        if len(manifest)+len(s) > max_manifest_size:
             logger.info("Skipping stream {} to avoid making a manifest bigger than 128MiB".format(s.split(' ')[0]))
             break
         manifest += s + '\n'