Development of an internal social media platform with personalised dashboards for students
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

adapters.py 21KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530
  1. # -*- coding: utf-8 -*-
  2. """
  3. requests.adapters
  4. ~~~~~~~~~~~~~~~~~
  5. This module contains the transport adapters that Requests uses to define
  6. and maintain connections.
  7. """
  8. import os.path
  9. import socket
  10. from pip._vendor.urllib3.poolmanager import PoolManager, proxy_from_url
  11. from pip._vendor.urllib3.response import HTTPResponse
  12. from pip._vendor.urllib3.util import parse_url
  13. from pip._vendor.urllib3.util import Timeout as TimeoutSauce
  14. from pip._vendor.urllib3.util.retry import Retry
  15. from pip._vendor.urllib3.exceptions import ClosedPoolError
  16. from pip._vendor.urllib3.exceptions import ConnectTimeoutError
  17. from pip._vendor.urllib3.exceptions import HTTPError as _HTTPError
  18. from pip._vendor.urllib3.exceptions import MaxRetryError
  19. from pip._vendor.urllib3.exceptions import NewConnectionError
  20. from pip._vendor.urllib3.exceptions import ProxyError as _ProxyError
  21. from pip._vendor.urllib3.exceptions import ProtocolError
  22. from pip._vendor.urllib3.exceptions import ReadTimeoutError
  23. from pip._vendor.urllib3.exceptions import SSLError as _SSLError
  24. from pip._vendor.urllib3.exceptions import ResponseError
  25. from .models import Response
  26. from .compat import urlparse, basestring
  27. from .utils import (DEFAULT_CA_BUNDLE_PATH, extract_zipped_paths,
  28. get_encoding_from_headers, prepend_scheme_if_needed,
  29. get_auth_from_url, urldefragauth, select_proxy)
  30. from .structures import CaseInsensitiveDict
  31. from .cookies import extract_cookies_to_jar
  32. from .exceptions import (ConnectionError, ConnectTimeout, ReadTimeout, SSLError,
  33. ProxyError, RetryError, InvalidSchema, InvalidProxyURL)
  34. from .auth import _basic_auth_str
  35. try:
  36. from pip._vendor.urllib3.contrib.socks import SOCKSProxyManager
  37. except ImportError:
  38. def SOCKSProxyManager(*args, **kwargs):
  39. raise InvalidSchema("Missing dependencies for SOCKS support.")
  40. DEFAULT_POOLBLOCK = False
  41. DEFAULT_POOLSIZE = 10
  42. DEFAULT_RETRIES = 0
  43. DEFAULT_POOL_TIMEOUT = None
  44. class BaseAdapter(object):
  45. """The Base Transport Adapter"""
  46. def __init__(self):
  47. super(BaseAdapter, self).__init__()
  48. def send(self, request, stream=False, timeout=None, verify=True,
  49. cert=None, proxies=None):
  50. """Sends PreparedRequest object. Returns Response object.
  51. :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
  52. :param stream: (optional) Whether to stream the request content.
  53. :param timeout: (optional) How long to wait for the server to send
  54. data before giving up, as a float, or a :ref:`(connect timeout,
  55. read timeout) <timeouts>` tuple.
  56. :type timeout: float or tuple
  57. :param verify: (optional) Either a boolean, in which case it controls whether we verify
  58. the server's TLS certificate, or a string, in which case it must be a path
  59. to a CA bundle to use
  60. :param cert: (optional) Any user-provided SSL certificate to be trusted.
  61. :param proxies: (optional) The proxies dictionary to apply to the request.
  62. """
  63. raise NotImplementedError
  64. def close(self):
  65. """Cleans up adapter specific items."""
  66. raise NotImplementedError
  67. class HTTPAdapter(BaseAdapter):
  68. """The built-in HTTP Adapter for urllib3.
  69. Provides a general-case interface for Requests sessions to contact HTTP and
  70. HTTPS urls by implementing the Transport Adapter interface. This class will
  71. usually be created by the :class:`Session <Session>` class under the
  72. covers.
  73. :param pool_connections: The number of urllib3 connection pools to cache.
  74. :param pool_maxsize: The maximum number of connections to save in the pool.
  75. :param max_retries: The maximum number of retries each connection
  76. should attempt. Note, this applies only to failed DNS lookups, socket
  77. connections and connection timeouts, never to requests where data has
  78. made it to the server. By default, Requests does not retry failed
  79. connections. If you need granular control over the conditions under
  80. which we retry a request, import urllib3's ``Retry`` class and pass
  81. that instead.
  82. :param pool_block: Whether the connection pool should block for connections.
  83. Usage::
  84. >>> import requests
  85. >>> s = requests.Session()
  86. >>> a = requests.adapters.HTTPAdapter(max_retries=3)
  87. >>> s.mount('http://', a)
  88. """
  89. __attrs__ = ['max_retries', 'config', '_pool_connections', '_pool_maxsize',
  90. '_pool_block']
  91. def __init__(self, pool_connections=DEFAULT_POOLSIZE,
  92. pool_maxsize=DEFAULT_POOLSIZE, max_retries=DEFAULT_RETRIES,
  93. pool_block=DEFAULT_POOLBLOCK):
  94. if max_retries == DEFAULT_RETRIES:
  95. self.max_retries = Retry(0, read=False)
  96. else:
  97. self.max_retries = Retry.from_int(max_retries)
  98. self.config = {}
  99. self.proxy_manager = {}
  100. super(HTTPAdapter, self).__init__()
  101. self._pool_connections = pool_connections
  102. self._pool_maxsize = pool_maxsize
  103. self._pool_block = pool_block
  104. self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block)
  105. def __getstate__(self):
  106. return dict((attr, getattr(self, attr, None)) for attr in
  107. self.__attrs__)
  108. def __setstate__(self, state):
  109. # Can't handle by adding 'proxy_manager' to self.__attrs__ because
  110. # self.poolmanager uses a lambda function, which isn't pickleable.
  111. self.proxy_manager = {}
  112. self.config = {}
  113. for attr, value in state.items():
  114. setattr(self, attr, value)
  115. self.init_poolmanager(self._pool_connections, self._pool_maxsize,
  116. block=self._pool_block)
  117. def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs):
  118. """Initializes a urllib3 PoolManager.
  119. This method should not be called from user code, and is only
  120. exposed for use when subclassing the
  121. :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
  122. :param connections: The number of urllib3 connection pools to cache.
  123. :param maxsize: The maximum number of connections to save in the pool.
  124. :param block: Block when no free connections are available.
  125. :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager.
  126. """
  127. # save these values for pickling
  128. self._pool_connections = connections
  129. self._pool_maxsize = maxsize
  130. self._pool_block = block
  131. self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize,
  132. block=block, strict=True, **pool_kwargs)
  133. def proxy_manager_for(self, proxy, **proxy_kwargs):
  134. """Return urllib3 ProxyManager for the given proxy.
  135. This method should not be called from user code, and is only
  136. exposed for use when subclassing the
  137. :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
  138. :param proxy: The proxy to return a urllib3 ProxyManager for.
  139. :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager.
  140. :returns: ProxyManager
  141. :rtype: urllib3.ProxyManager
  142. """
  143. if proxy in self.proxy_manager:
  144. manager = self.proxy_manager[proxy]
  145. elif proxy.lower().startswith('socks'):
  146. username, password = get_auth_from_url(proxy)
  147. manager = self.proxy_manager[proxy] = SOCKSProxyManager(
  148. proxy,
  149. username=username,
  150. password=password,
  151. num_pools=self._pool_connections,
  152. maxsize=self._pool_maxsize,
  153. block=self._pool_block,
  154. **proxy_kwargs
  155. )
  156. else:
  157. proxy_headers = self.proxy_headers(proxy)
  158. manager = self.proxy_manager[proxy] = proxy_from_url(
  159. proxy,
  160. proxy_headers=proxy_headers,
  161. num_pools=self._pool_connections,
  162. maxsize=self._pool_maxsize,
  163. block=self._pool_block,
  164. **proxy_kwargs)
  165. return manager
  166. def cert_verify(self, conn, url, verify, cert):
  167. """Verify a SSL certificate. This method should not be called from user
  168. code, and is only exposed for use when subclassing the
  169. :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
  170. :param conn: The urllib3 connection object associated with the cert.
  171. :param url: The requested URL.
  172. :param verify: Either a boolean, in which case it controls whether we verify
  173. the server's TLS certificate, or a string, in which case it must be a path
  174. to a CA bundle to use
  175. :param cert: The SSL certificate to verify.
  176. """
  177. if url.lower().startswith('https') and verify:
  178. cert_loc = None
  179. # Allow self-specified cert location.
  180. if verify is not True:
  181. cert_loc = verify
  182. if not cert_loc:
  183. cert_loc = extract_zipped_paths(DEFAULT_CA_BUNDLE_PATH)
  184. if not cert_loc or not os.path.exists(cert_loc):
  185. raise IOError("Could not find a suitable TLS CA certificate bundle, "
  186. "invalid path: {0}".format(cert_loc))
  187. conn.cert_reqs = 'CERT_REQUIRED'
  188. if not os.path.isdir(cert_loc):
  189. conn.ca_certs = cert_loc
  190. else:
  191. conn.ca_cert_dir = cert_loc
  192. else:
  193. conn.cert_reqs = 'CERT_NONE'
  194. conn.ca_certs = None
  195. conn.ca_cert_dir = None
  196. if cert:
  197. if not isinstance(cert, basestring):
  198. conn.cert_file = cert[0]
  199. conn.key_file = cert[1]
  200. else:
  201. conn.cert_file = cert
  202. conn.key_file = None
  203. if conn.cert_file and not os.path.exists(conn.cert_file):
  204. raise IOError("Could not find the TLS certificate file, "
  205. "invalid path: {0}".format(conn.cert_file))
  206. if conn.key_file and not os.path.exists(conn.key_file):
  207. raise IOError("Could not find the TLS key file, "
  208. "invalid path: {0}".format(conn.key_file))
  209. def build_response(self, req, resp):
  210. """Builds a :class:`Response <requests.Response>` object from a urllib3
  211. response. This should not be called from user code, and is only exposed
  212. for use when subclassing the
  213. :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`
  214. :param req: The :class:`PreparedRequest <PreparedRequest>` used to generate the response.
  215. :param resp: The urllib3 response object.
  216. :rtype: requests.Response
  217. """
  218. response = Response()
  219. # Fallback to None if there's no status_code, for whatever reason.
  220. response.status_code = getattr(resp, 'status', None)
  221. # Make headers case-insensitive.
  222. response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {}))
  223. # Set encoding.
  224. response.encoding = get_encoding_from_headers(response.headers)
  225. response.raw = resp
  226. response.reason = response.raw.reason
  227. if isinstance(req.url, bytes):
  228. response.url = req.url.decode('utf-8')
  229. else:
  230. response.url = req.url
  231. # Add new cookies from the server.
  232. extract_cookies_to_jar(response.cookies, req, resp)
  233. # Give the Response some context.
  234. response.request = req
  235. response.connection = self
  236. return response
  237. def get_connection(self, url, proxies=None):
  238. """Returns a urllib3 connection for the given URL. This should not be
  239. called from user code, and is only exposed for use when subclassing the
  240. :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
  241. :param url: The URL to connect to.
  242. :param proxies: (optional) A Requests-style dictionary of proxies used on this request.
  243. :rtype: urllib3.ConnectionPool
  244. """
  245. proxy = select_proxy(url, proxies)
  246. if proxy:
  247. proxy = prepend_scheme_if_needed(proxy, 'http')
  248. proxy_url = parse_url(proxy)
  249. if not proxy_url.host:
  250. raise InvalidProxyURL("Please check proxy URL. It is malformed"
  251. " and could be missing the host.")
  252. proxy_manager = self.proxy_manager_for(proxy)
  253. conn = proxy_manager.connection_from_url(url)
  254. else:
  255. # Only scheme should be lower case
  256. parsed = urlparse(url)
  257. url = parsed.geturl()
  258. conn = self.poolmanager.connection_from_url(url)
  259. return conn
  260. def close(self):
  261. """Disposes of any internal state.
  262. Currently, this closes the PoolManager and any active ProxyManager,
  263. which closes any pooled connections.
  264. """
  265. self.poolmanager.clear()
  266. for proxy in self.proxy_manager.values():
  267. proxy.clear()
  268. def request_url(self, request, proxies):
  269. """Obtain the url to use when making the final request.
  270. If the message is being sent through a HTTP proxy, the full URL has to
  271. be used. Otherwise, we should only use the path portion of the URL.
  272. This should not be called from user code, and is only exposed for use
  273. when subclassing the
  274. :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
  275. :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
  276. :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs.
  277. :rtype: str
  278. """
  279. proxy = select_proxy(request.url, proxies)
  280. scheme = urlparse(request.url).scheme
  281. is_proxied_http_request = (proxy and scheme != 'https')
  282. using_socks_proxy = False
  283. if proxy:
  284. proxy_scheme = urlparse(proxy).scheme.lower()
  285. using_socks_proxy = proxy_scheme.startswith('socks')
  286. url = request.path_url
  287. if is_proxied_http_request and not using_socks_proxy:
  288. url = urldefragauth(request.url)
  289. return url
  290. def add_headers(self, request, **kwargs):
  291. """Add any headers needed by the connection. As of v2.0 this does
  292. nothing by default, but is left for overriding by users that subclass
  293. the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
  294. This should not be called from user code, and is only exposed for use
  295. when subclassing the
  296. :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
  297. :param request: The :class:`PreparedRequest <PreparedRequest>` to add headers to.
  298. :param kwargs: The keyword arguments from the call to send().
  299. """
  300. pass
  301. def proxy_headers(self, proxy):
  302. """Returns a dictionary of the headers to add to any request sent
  303. through a proxy. This works with urllib3 magic to ensure that they are
  304. correctly sent to the proxy, rather than in a tunnelled request if
  305. CONNECT is being used.
  306. This should not be called from user code, and is only exposed for use
  307. when subclassing the
  308. :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
  309. :param proxies: The url of the proxy being used for this request.
  310. :rtype: dict
  311. """
  312. headers = {}
  313. username, password = get_auth_from_url(proxy)
  314. if username:
  315. headers['Proxy-Authorization'] = _basic_auth_str(username,
  316. password)
  317. return headers
  318. def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None):
  319. """Sends PreparedRequest object. Returns Response object.
  320. :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
  321. :param stream: (optional) Whether to stream the request content.
  322. :param timeout: (optional) How long to wait for the server to send
  323. data before giving up, as a float, or a :ref:`(connect timeout,
  324. read timeout) <timeouts>` tuple.
  325. :type timeout: float or tuple or urllib3 Timeout object
  326. :param verify: (optional) Either a boolean, in which case it controls whether
  327. we verify the server's TLS certificate, or a string, in which case it
  328. must be a path to a CA bundle to use
  329. :param cert: (optional) Any user-provided SSL certificate to be trusted.
  330. :param proxies: (optional) The proxies dictionary to apply to the request.
  331. :rtype: requests.Response
  332. """
  333. conn = self.get_connection(request.url, proxies)
  334. self.cert_verify(conn, request.url, verify, cert)
  335. url = self.request_url(request, proxies)
  336. self.add_headers(request, stream=stream, timeout=timeout, verify=verify, cert=cert, proxies=proxies)
  337. chunked = not (request.body is None or 'Content-Length' in request.headers)
  338. if isinstance(timeout, tuple):
  339. try:
  340. connect, read = timeout
  341. timeout = TimeoutSauce(connect=connect, read=read)
  342. except ValueError as e:
  343. # this may raise a string formatting error.
  344. err = ("Invalid timeout {0}. Pass a (connect, read) "
  345. "timeout tuple, or a single float to set "
  346. "both timeouts to the same value".format(timeout))
  347. raise ValueError(err)
  348. elif isinstance(timeout, TimeoutSauce):
  349. pass
  350. else:
  351. timeout = TimeoutSauce(connect=timeout, read=timeout)
  352. try:
  353. if not chunked:
  354. resp = conn.urlopen(
  355. method=request.method,
  356. url=url,
  357. body=request.body,
  358. headers=request.headers,
  359. redirect=False,
  360. assert_same_host=False,
  361. preload_content=False,
  362. decode_content=False,
  363. retries=self.max_retries,
  364. timeout=timeout
  365. )
  366. # Send the request.
  367. else:
  368. if hasattr(conn, 'proxy_pool'):
  369. conn = conn.proxy_pool
  370. low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT)
  371. try:
  372. low_conn.putrequest(request.method,
  373. url,
  374. skip_accept_encoding=True)
  375. for header, value in request.headers.items():
  376. low_conn.putheader(header, value)
  377. low_conn.endheaders()
  378. for i in request.body:
  379. low_conn.send(hex(len(i))[2:].encode('utf-8'))
  380. low_conn.send(b'\r\n')
  381. low_conn.send(i)
  382. low_conn.send(b'\r\n')
  383. low_conn.send(b'0\r\n\r\n')
  384. # Receive the response from the server
  385. try:
  386. # For Python 2.7+ versions, use buffering of HTTP
  387. # responses
  388. r = low_conn.getresponse(buffering=True)
  389. except TypeError:
  390. # For compatibility with Python 2.6 versions and back
  391. r = low_conn.getresponse()
  392. resp = HTTPResponse.from_httplib(
  393. r,
  394. pool=conn,
  395. connection=low_conn,
  396. preload_content=False,
  397. decode_content=False
  398. )
  399. except:
  400. # If we hit any problems here, clean up the connection.
  401. # Then, reraise so that we can handle the actual exception.
  402. low_conn.close()
  403. raise
  404. except (ProtocolError, socket.error) as err:
  405. raise ConnectionError(err, request=request)
  406. except MaxRetryError as e:
  407. if isinstance(e.reason, ConnectTimeoutError):
  408. # TODO: Remove this in 3.0.0: see #2811
  409. if not isinstance(e.reason, NewConnectionError):
  410. raise ConnectTimeout(e, request=request)
  411. if isinstance(e.reason, ResponseError):
  412. raise RetryError(e, request=request)
  413. if isinstance(e.reason, _ProxyError):
  414. raise ProxyError(e, request=request)
  415. if isinstance(e.reason, _SSLError):
  416. # This branch is for urllib3 v1.22 and later.
  417. raise SSLError(e, request=request)
  418. raise ConnectionError(e, request=request)
  419. except ClosedPoolError as e:
  420. raise ConnectionError(e, request=request)
  421. except _ProxyError as e:
  422. raise ProxyError(e)
  423. except (_SSLError, _HTTPError) as e:
  424. if isinstance(e, _SSLError):
  425. # This branch is for urllib3 versions earlier than v1.22
  426. raise SSLError(e, request=request)
  427. elif isinstance(e, ReadTimeoutError):
  428. raise ReadTimeout(e, request=request)
  429. else:
  430. raise
  431. return self.build_response(request, resp)