21223: Add a few more --file-cache RLIMIT_NOFILE tests
[arvados.git] / sdk / python / arvados / _pycurlhelper.py
1 # Copyright (C) The Arvados Authors. All rights reserved.
2 #
3 # SPDX-License-Identifier: Apache-2.0
4
5 import collections
6 import socket
7 import pycurl
8 import math
9
10 class PyCurlHelper:
11     # Default Keep server connection timeout:  2 seconds
12     # Default Keep server read timeout:       256 seconds
13     # Default Keep server bandwidth minimum:  32768 bytes per second
14     # Default Keep proxy connection timeout:  20 seconds
15     # Default Keep proxy read timeout:        256 seconds
16     # Default Keep proxy bandwidth minimum:   32768 bytes per second
17     DEFAULT_TIMEOUT = (2, 256, 32768)
18     DEFAULT_PROXY_TIMEOUT = (20, 256, 32768)
19
20     def __init__(self, title_case_headers=False):
21         self._socket = None
22         self.title_case_headers = title_case_headers
23
24     def _socket_open(self, *args, **kwargs):
25         if len(args) + len(kwargs) == 2:
26             return self._socket_open_pycurl_7_21_5(*args, **kwargs)
27         else:
28             return self._socket_open_pycurl_7_19_3(*args, **kwargs)
29
30     def _socket_open_pycurl_7_19_3(self, family, socktype, protocol, address=None):
31         return self._socket_open_pycurl_7_21_5(
32             purpose=None,
33             address=collections.namedtuple(
34                 'Address', ['family', 'socktype', 'protocol', 'addr'],
35             )(family, socktype, protocol, address))
36
37     def _socket_open_pycurl_7_21_5(self, purpose, address):
38         """Because pycurl doesn't have CURLOPT_TCP_KEEPALIVE"""
39         s = socket.socket(address.family, address.socktype, address.protocol)
40         s.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
41         # Will throw invalid protocol error on mac. This test prevents that.
42         if hasattr(socket, 'TCP_KEEPIDLE'):
43             s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, 75)
44         s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPINTVL, 75)
45         self._socket = s
46         return s
47
48     def _setcurltimeouts(self, curl, timeouts, ignore_bandwidth=False):
49         if not timeouts:
50             return
51         elif isinstance(timeouts, tuple):
52             if len(timeouts) == 2:
53                 conn_t, xfer_t = timeouts
54                 bandwidth_bps = self.DEFAULT_TIMEOUT[2]
55             else:
56                 conn_t, xfer_t, bandwidth_bps = timeouts
57         else:
58             conn_t, xfer_t = (timeouts, timeouts)
59             bandwidth_bps = self.DEFAULT_TIMEOUT[2]
60         curl.setopt(pycurl.CONNECTTIMEOUT_MS, int(conn_t*1000))
61         if not ignore_bandwidth:
62             curl.setopt(pycurl.LOW_SPEED_TIME, int(math.ceil(xfer_t)))
63             curl.setopt(pycurl.LOW_SPEED_LIMIT, int(math.ceil(bandwidth_bps)))
64
65     def _headerfunction(self, header_line):
66         if isinstance(header_line, bytes):
67             header_line = header_line.decode('iso-8859-1')
68         if ':' in header_line:
69             name, value = header_line.split(':', 1)
70             if self.title_case_headers:
71                 name = name.strip().title()
72             else:
73                 name = name.strip().lower()
74             value = value.strip()
75         elif self._headers:
76             name = self._lastheadername
77             value = self._headers[name] + ' ' + header_line.strip()
78         elif header_line.startswith('HTTP/'):
79             name = 'x-status-line'
80             value = header_line
81         else:
82             _logger.error("Unexpected header line: %s", header_line)
83             return
84         self._lastheadername = name
85         self._headers[name] = value
86         # Returning None implies all bytes were written