Development of an internal social media platform with personalised dashboards for students
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

connectionpool.py 35KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906
  1. from __future__ import absolute_import
  2. import errno
  3. import logging
  4. import sys
  5. import warnings
  6. from socket import error as SocketError, timeout as SocketTimeout
  7. import socket
  8. from .exceptions import (
  9. ClosedPoolError,
  10. ProtocolError,
  11. EmptyPoolError,
  12. HeaderParsingError,
  13. HostChangedError,
  14. LocationValueError,
  15. MaxRetryError,
  16. ProxyError,
  17. ReadTimeoutError,
  18. SSLError,
  19. TimeoutError,
  20. InsecureRequestWarning,
  21. NewConnectionError,
  22. )
  23. from .packages.ssl_match_hostname import CertificateError
  24. from .packages import six
  25. from .packages.six.moves import queue
  26. from .connection import (
  27. port_by_scheme,
  28. DummyConnection,
  29. HTTPConnection, HTTPSConnection, VerifiedHTTPSConnection,
  30. HTTPException, BaseSSLError,
  31. )
  32. from .request import RequestMethods
  33. from .response import HTTPResponse
  34. from .util.connection import is_connection_dropped
  35. from .util.request import set_file_position
  36. from .util.response import assert_header_parsing
  37. from .util.retry import Retry
  38. from .util.timeout import Timeout
  39. from .util.url import get_host, Url, NORMALIZABLE_SCHEMES
  40. from .util.queue import LifoQueue
  41. xrange = six.moves.xrange
  42. log = logging.getLogger(__name__)
  43. _Default = object()
  44. # Pool objects
  45. class ConnectionPool(object):
  46. """
  47. Base class for all connection pools, such as
  48. :class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`.
  49. """
  50. scheme = None
  51. QueueCls = LifoQueue
  52. def __init__(self, host, port=None):
  53. if not host:
  54. raise LocationValueError("No host specified.")
  55. self.host = _ipv6_host(host, self.scheme)
  56. self._proxy_host = host.lower()
  57. self.port = port
  58. def __str__(self):
  59. return '%s(host=%r, port=%r)' % (type(self).__name__,
  60. self.host, self.port)
  61. def __enter__(self):
  62. return self
  63. def __exit__(self, exc_type, exc_val, exc_tb):
  64. self.close()
  65. # Return False to re-raise any potential exceptions
  66. return False
  67. def close(self):
  68. """
  69. Close all pooled connections and disable the pool.
  70. """
  71. pass
  72. # This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252
  73. _blocking_errnos = set([errno.EAGAIN, errno.EWOULDBLOCK])
  74. class HTTPConnectionPool(ConnectionPool, RequestMethods):
  75. """
  76. Thread-safe connection pool for one host.
  77. :param host:
  78. Host used for this HTTP Connection (e.g. "localhost"), passed into
  79. :class:`httplib.HTTPConnection`.
  80. :param port:
  81. Port used for this HTTP Connection (None is equivalent to 80), passed
  82. into :class:`httplib.HTTPConnection`.
  83. :param strict:
  84. Causes BadStatusLine to be raised if the status line can't be parsed
  85. as a valid HTTP/1.0 or 1.1 status line, passed into
  86. :class:`httplib.HTTPConnection`.
  87. .. note::
  88. Only works in Python 2. This parameter is ignored in Python 3.
  89. :param timeout:
  90. Socket timeout in seconds for each individual connection. This can
  91. be a float or integer, which sets the timeout for the HTTP request,
  92. or an instance of :class:`urllib3.util.Timeout` which gives you more
  93. fine-grained control over request timeouts. After the constructor has
  94. been parsed, this is always a `urllib3.util.Timeout` object.
  95. :param maxsize:
  96. Number of connections to save that can be reused. More than 1 is useful
  97. in multithreaded situations. If ``block`` is set to False, more
  98. connections will be created but they will not be saved once they've
  99. been used.
  100. :param block:
  101. If set to True, no more than ``maxsize`` connections will be used at
  102. a time. When no free connections are available, the call will block
  103. until a connection has been released. This is a useful side effect for
  104. particular multithreaded situations where one does not want to use more
  105. than maxsize connections per host to prevent flooding.
  106. :param headers:
  107. Headers to include with all requests, unless other headers are given
  108. explicitly.
  109. :param retries:
  110. Retry configuration to use by default with requests in this pool.
  111. :param _proxy:
  112. Parsed proxy URL, should not be used directly, instead, see
  113. :class:`urllib3.connectionpool.ProxyManager`"
  114. :param _proxy_headers:
  115. A dictionary with proxy headers, should not be used directly,
  116. instead, see :class:`urllib3.connectionpool.ProxyManager`"
  117. :param \\**conn_kw:
  118. Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`,
  119. :class:`urllib3.connection.HTTPSConnection` instances.
  120. """
  121. scheme = 'http'
  122. ConnectionCls = HTTPConnection
  123. ResponseCls = HTTPResponse
  124. def __init__(self, host, port=None, strict=False,
  125. timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1, block=False,
  126. headers=None, retries=None,
  127. _proxy=None, _proxy_headers=None,
  128. **conn_kw):
  129. ConnectionPool.__init__(self, host, port)
  130. RequestMethods.__init__(self, headers)
  131. self.strict = strict
  132. if not isinstance(timeout, Timeout):
  133. timeout = Timeout.from_float(timeout)
  134. if retries is None:
  135. retries = Retry.DEFAULT
  136. self.timeout = timeout
  137. self.retries = retries
  138. self.pool = self.QueueCls(maxsize)
  139. self.block = block
  140. self.proxy = _proxy
  141. self.proxy_headers = _proxy_headers or {}
  142. # Fill the queue up so that doing get() on it will block properly
  143. for _ in xrange(maxsize):
  144. self.pool.put(None)
  145. # These are mostly for testing and debugging purposes.
  146. self.num_connections = 0
  147. self.num_requests = 0
  148. self.conn_kw = conn_kw
  149. if self.proxy:
  150. # Enable Nagle's algorithm for proxies, to avoid packet fragmentation.
  151. # We cannot know if the user has added default socket options, so we cannot replace the
  152. # list.
  153. self.conn_kw.setdefault('socket_options', [])
  154. def _new_conn(self):
  155. """
  156. Return a fresh :class:`HTTPConnection`.
  157. """
  158. self.num_connections += 1
  159. log.debug("Starting new HTTP connection (%d): %s:%s",
  160. self.num_connections, self.host, self.port or "80")
  161. conn = self.ConnectionCls(host=self.host, port=self.port,
  162. timeout=self.timeout.connect_timeout,
  163. strict=self.strict, **self.conn_kw)
  164. return conn
  165. def _get_conn(self, timeout=None):
  166. """
  167. Get a connection. Will return a pooled connection if one is available.
  168. If no connections are available and :prop:`.block` is ``False``, then a
  169. fresh connection is returned.
  170. :param timeout:
  171. Seconds to wait before giving up and raising
  172. :class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and
  173. :prop:`.block` is ``True``.
  174. """
  175. conn = None
  176. try:
  177. conn = self.pool.get(block=self.block, timeout=timeout)
  178. except AttributeError: # self.pool is None
  179. raise ClosedPoolError(self, "Pool is closed.")
  180. except queue.Empty:
  181. if self.block:
  182. raise EmptyPoolError(self,
  183. "Pool reached maximum size and no more "
  184. "connections are allowed.")
  185. pass # Oh well, we'll create a new connection then
  186. # If this is a persistent connection, check if it got disconnected
  187. if conn and is_connection_dropped(conn):
  188. log.debug("Resetting dropped connection: %s", self.host)
  189. conn.close()
  190. if getattr(conn, 'auto_open', 1) == 0:
  191. # This is a proxied connection that has been mutated by
  192. # httplib._tunnel() and cannot be reused (since it would
  193. # attempt to bypass the proxy)
  194. conn = None
  195. return conn or self._new_conn()
  196. def _put_conn(self, conn):
  197. """
  198. Put a connection back into the pool.
  199. :param conn:
  200. Connection object for the current host and port as returned by
  201. :meth:`._new_conn` or :meth:`._get_conn`.
  202. If the pool is already full, the connection is closed and discarded
  203. because we exceeded maxsize. If connections are discarded frequently,
  204. then maxsize should be increased.
  205. If the pool is closed, then the connection will be closed and discarded.
  206. """
  207. try:
  208. self.pool.put(conn, block=False)
  209. return # Everything is dandy, done.
  210. except AttributeError:
  211. # self.pool is None.
  212. pass
  213. except queue.Full:
  214. # This should never happen if self.block == True
  215. log.warning(
  216. "Connection pool is full, discarding connection: %s",
  217. self.host)
  218. # Connection never got put back into the pool, close it.
  219. if conn:
  220. conn.close()
  221. def _validate_conn(self, conn):
  222. """
  223. Called right before a request is made, after the socket is created.
  224. """
  225. pass
  226. def _prepare_proxy(self, conn):
  227. # Nothing to do for HTTP connections.
  228. pass
  229. def _get_timeout(self, timeout):
  230. """ Helper that always returns a :class:`urllib3.util.Timeout` """
  231. if timeout is _Default:
  232. return self.timeout.clone()
  233. if isinstance(timeout, Timeout):
  234. return timeout.clone()
  235. else:
  236. # User passed us an int/float. This is for backwards compatibility,
  237. # can be removed later
  238. return Timeout.from_float(timeout)
  239. def _raise_timeout(self, err, url, timeout_value):
  240. """Is the error actually a timeout? Will raise a ReadTimeout or pass"""
  241. if isinstance(err, SocketTimeout):
  242. raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value)
  243. # See the above comment about EAGAIN in Python 3. In Python 2 we have
  244. # to specifically catch it and throw the timeout error
  245. if hasattr(err, 'errno') and err.errno in _blocking_errnos:
  246. raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value)
  247. # Catch possible read timeouts thrown as SSL errors. If not the
  248. # case, rethrow the original. We need to do this because of:
  249. # http://bugs.python.org/issue10272
  250. if 'timed out' in str(err) or 'did not complete (read)' in str(err): # Python 2.6
  251. raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value)
  252. def _make_request(self, conn, method, url, timeout=_Default, chunked=False,
  253. **httplib_request_kw):
  254. """
  255. Perform a request on a given urllib connection object taken from our
  256. pool.
  257. :param conn:
  258. a connection from one of our connection pools
  259. :param timeout:
  260. Socket timeout in seconds for the request. This can be a
  261. float or integer, which will set the same timeout value for
  262. the socket connect and the socket read, or an instance of
  263. :class:`urllib3.util.Timeout`, which gives you more fine-grained
  264. control over your timeouts.
  265. """
  266. self.num_requests += 1
  267. timeout_obj = self._get_timeout(timeout)
  268. timeout_obj.start_connect()
  269. conn.timeout = timeout_obj.connect_timeout
  270. # Trigger any extra validation we need to do.
  271. try:
  272. self._validate_conn(conn)
  273. except (SocketTimeout, BaseSSLError) as e:
  274. # Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout.
  275. self._raise_timeout(err=e, url=url, timeout_value=conn.timeout)
  276. raise
  277. # conn.request() calls httplib.*.request, not the method in
  278. # urllib3.request. It also calls makefile (recv) on the socket.
  279. if chunked:
  280. conn.request_chunked(method, url, **httplib_request_kw)
  281. else:
  282. conn.request(method, url, **httplib_request_kw)
  283. # Reset the timeout for the recv() on the socket
  284. read_timeout = timeout_obj.read_timeout
  285. # App Engine doesn't have a sock attr
  286. if getattr(conn, 'sock', None):
  287. # In Python 3 socket.py will catch EAGAIN and return None when you
  288. # try and read into the file pointer created by http.client, which
  289. # instead raises a BadStatusLine exception. Instead of catching
  290. # the exception and assuming all BadStatusLine exceptions are read
  291. # timeouts, check for a zero timeout before making the request.
  292. if read_timeout == 0:
  293. raise ReadTimeoutError(
  294. self, url, "Read timed out. (read timeout=%s)" % read_timeout)
  295. if read_timeout is Timeout.DEFAULT_TIMEOUT:
  296. conn.sock.settimeout(socket.getdefaulttimeout())
  297. else: # None or a value
  298. conn.sock.settimeout(read_timeout)
  299. # Receive the response from the server
  300. try:
  301. try: # Python 2.7, use buffering of HTTP responses
  302. httplib_response = conn.getresponse(buffering=True)
  303. except TypeError: # Python 2.6 and older, Python 3
  304. try:
  305. httplib_response = conn.getresponse()
  306. except Exception as e:
  307. # Remove the TypeError from the exception chain in Python 3;
  308. # otherwise it looks like a programming error was the cause.
  309. six.raise_from(e, None)
  310. except (SocketTimeout, BaseSSLError, SocketError) as e:
  311. self._raise_timeout(err=e, url=url, timeout_value=read_timeout)
  312. raise
  313. # AppEngine doesn't have a version attr.
  314. http_version = getattr(conn, '_http_vsn_str', 'HTTP/?')
  315. log.debug("%s://%s:%s \"%s %s %s\" %s %s", self.scheme, self.host, self.port,
  316. method, url, http_version, httplib_response.status,
  317. httplib_response.length)
  318. try:
  319. assert_header_parsing(httplib_response.msg)
  320. except (HeaderParsingError, TypeError) as hpe: # Platform-specific: Python 3
  321. log.warning(
  322. 'Failed to parse headers (url=%s): %s',
  323. self._absolute_url(url), hpe, exc_info=True)
  324. return httplib_response
  325. def _absolute_url(self, path):
  326. return Url(scheme=self.scheme, host=self.host, port=self.port, path=path).url
  327. def close(self):
  328. """
  329. Close all pooled connections and disable the pool.
  330. """
  331. if self.pool is None:
  332. return
  333. # Disable access to the pool
  334. old_pool, self.pool = self.pool, None
  335. try:
  336. while True:
  337. conn = old_pool.get(block=False)
  338. if conn:
  339. conn.close()
  340. except queue.Empty:
  341. pass # Done.
  342. def is_same_host(self, url):
  343. """
  344. Check if the given ``url`` is a member of the same host as this
  345. connection pool.
  346. """
  347. if url.startswith('/'):
  348. return True
  349. # TODO: Add optional support for socket.gethostbyname checking.
  350. scheme, host, port = get_host(url)
  351. host = _ipv6_host(host, self.scheme)
  352. # Use explicit default port for comparison when none is given
  353. if self.port and not port:
  354. port = port_by_scheme.get(scheme)
  355. elif not self.port and port == port_by_scheme.get(scheme):
  356. port = None
  357. return (scheme, host, port) == (self.scheme, self.host, self.port)
  358. def urlopen(self, method, url, body=None, headers=None, retries=None,
  359. redirect=True, assert_same_host=True, timeout=_Default,
  360. pool_timeout=None, release_conn=None, chunked=False,
  361. body_pos=None, **response_kw):
  362. """
  363. Get a connection from the pool and perform an HTTP request. This is the
  364. lowest level call for making a request, so you'll need to specify all
  365. the raw details.
  366. .. note::
  367. More commonly, it's appropriate to use a convenience method provided
  368. by :class:`.RequestMethods`, such as :meth:`request`.
  369. .. note::
  370. `release_conn` will only behave as expected if
  371. `preload_content=False` because we want to make
  372. `preload_content=False` the default behaviour someday soon without
  373. breaking backwards compatibility.
  374. :param method:
  375. HTTP request method (such as GET, POST, PUT, etc.)
  376. :param body:
  377. Data to send in the request body (useful for creating
  378. POST requests, see HTTPConnectionPool.post_url for
  379. more convenience).
  380. :param headers:
  381. Dictionary of custom headers to send, such as User-Agent,
  382. If-None-Match, etc. If None, pool headers are used. If provided,
  383. these headers completely replace any pool-specific headers.
  384. :param retries:
  385. Configure the number of retries to allow before raising a
  386. :class:`~urllib3.exceptions.MaxRetryError` exception.
  387. Pass ``None`` to retry until you receive a response. Pass a
  388. :class:`~urllib3.util.retry.Retry` object for fine-grained control
  389. over different types of retries.
  390. Pass an integer number to retry connection errors that many times,
  391. but no other types of errors. Pass zero to never retry.
  392. If ``False``, then retries are disabled and any exception is raised
  393. immediately. Also, instead of raising a MaxRetryError on redirects,
  394. the redirect response will be returned.
  395. :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
  396. :param redirect:
  397. If True, automatically handle redirects (status codes 301, 302,
  398. 303, 307, 308). Each redirect counts as a retry. Disabling retries
  399. will disable redirect, too.
  400. :param assert_same_host:
  401. If ``True``, will make sure that the host of the pool requests is
  402. consistent else will raise HostChangedError. When False, you can
  403. use the pool on an HTTP proxy and request foreign hosts.
  404. :param timeout:
  405. If specified, overrides the default timeout for this one
  406. request. It may be a float (in seconds) or an instance of
  407. :class:`urllib3.util.Timeout`.
  408. :param pool_timeout:
  409. If set and the pool is set to block=True, then this method will
  410. block for ``pool_timeout`` seconds and raise EmptyPoolError if no
  411. connection is available within the time period.
  412. :param release_conn:
  413. If False, then the urlopen call will not release the connection
  414. back into the pool once a response is received (but will release if
  415. you read the entire contents of the response such as when
  416. `preload_content=True`). This is useful if you're not preloading
  417. the response's content immediately. You will need to call
  418. ``r.release_conn()`` on the response ``r`` to return the connection
  419. back into the pool. If None, it takes the value of
  420. ``response_kw.get('preload_content', True)``.
  421. :param chunked:
  422. If True, urllib3 will send the body using chunked transfer
  423. encoding. Otherwise, urllib3 will send the body using the standard
  424. content-length form. Defaults to False.
  425. :param int body_pos:
  426. Position to seek to in file-like body in the event of a retry or
  427. redirect. Typically this won't need to be set because urllib3 will
  428. auto-populate the value when needed.
  429. :param \\**response_kw:
  430. Additional parameters are passed to
  431. :meth:`urllib3.response.HTTPResponse.from_httplib`
  432. """
  433. if headers is None:
  434. headers = self.headers
  435. if not isinstance(retries, Retry):
  436. retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
  437. if release_conn is None:
  438. release_conn = response_kw.get('preload_content', True)
  439. # Check host
  440. if assert_same_host and not self.is_same_host(url):
  441. raise HostChangedError(self, url, retries)
  442. conn = None
  443. # Track whether `conn` needs to be released before
  444. # returning/raising/recursing. Update this variable if necessary, and
  445. # leave `release_conn` constant throughout the function. That way, if
  446. # the function recurses, the original value of `release_conn` will be
  447. # passed down into the recursive call, and its value will be respected.
  448. #
  449. # See issue #651 [1] for details.
  450. #
  451. # [1] <https://github.com/shazow/urllib3/issues/651>
  452. release_this_conn = release_conn
  453. # Merge the proxy headers. Only do this in HTTP. We have to copy the
  454. # headers dict so we can safely change it without those changes being
  455. # reflected in anyone else's copy.
  456. if self.scheme == 'http':
  457. headers = headers.copy()
  458. headers.update(self.proxy_headers)
  459. # Must keep the exception bound to a separate variable or else Python 3
  460. # complains about UnboundLocalError.
  461. err = None
  462. # Keep track of whether we cleanly exited the except block. This
  463. # ensures we do proper cleanup in finally.
  464. clean_exit = False
  465. # Rewind body position, if needed. Record current position
  466. # for future rewinds in the event of a redirect/retry.
  467. body_pos = set_file_position(body, body_pos)
  468. try:
  469. # Request a connection from the queue.
  470. timeout_obj = self._get_timeout(timeout)
  471. conn = self._get_conn(timeout=pool_timeout)
  472. conn.timeout = timeout_obj.connect_timeout
  473. is_new_proxy_conn = self.proxy is not None and not getattr(conn, 'sock', None)
  474. if is_new_proxy_conn:
  475. self._prepare_proxy(conn)
  476. # Make the request on the httplib connection object.
  477. httplib_response = self._make_request(conn, method, url,
  478. timeout=timeout_obj,
  479. body=body, headers=headers,
  480. chunked=chunked)
  481. # If we're going to release the connection in ``finally:``, then
  482. # the response doesn't need to know about the connection. Otherwise
  483. # it will also try to release it and we'll have a double-release
  484. # mess.
  485. response_conn = conn if not release_conn else None
  486. # Pass method to Response for length checking
  487. response_kw['request_method'] = method
  488. # Import httplib's response into our own wrapper object
  489. response = self.ResponseCls.from_httplib(httplib_response,
  490. pool=self,
  491. connection=response_conn,
  492. retries=retries,
  493. **response_kw)
  494. # Everything went great!
  495. clean_exit = True
  496. except queue.Empty:
  497. # Timed out by queue.
  498. raise EmptyPoolError(self, "No pool connections are available.")
  499. except (TimeoutError, HTTPException, SocketError, ProtocolError,
  500. BaseSSLError, SSLError, CertificateError) as e:
  501. # Discard the connection for these exceptions. It will be
  502. # replaced during the next _get_conn() call.
  503. clean_exit = False
  504. if isinstance(e, (BaseSSLError, CertificateError)):
  505. e = SSLError(e)
  506. elif isinstance(e, (SocketError, NewConnectionError)) and self.proxy:
  507. e = ProxyError('Cannot connect to proxy.', e)
  508. elif isinstance(e, (SocketError, HTTPException)):
  509. e = ProtocolError('Connection aborted.', e)
  510. retries = retries.increment(method, url, error=e, _pool=self,
  511. _stacktrace=sys.exc_info()[2])
  512. retries.sleep()
  513. # Keep track of the error for the retry warning.
  514. err = e
  515. finally:
  516. if not clean_exit:
  517. # We hit some kind of exception, handled or otherwise. We need
  518. # to throw the connection away unless explicitly told not to.
  519. # Close the connection, set the variable to None, and make sure
  520. # we put the None back in the pool to avoid leaking it.
  521. conn = conn and conn.close()
  522. release_this_conn = True
  523. if release_this_conn:
  524. # Put the connection back to be reused. If the connection is
  525. # expired then it will be None, which will get replaced with a
  526. # fresh connection during _get_conn.
  527. self._put_conn(conn)
  528. if not conn:
  529. # Try again
  530. log.warning("Retrying (%r) after connection "
  531. "broken by '%r': %s", retries, err, url)
  532. return self.urlopen(method, url, body, headers, retries,
  533. redirect, assert_same_host,
  534. timeout=timeout, pool_timeout=pool_timeout,
  535. release_conn=release_conn, body_pos=body_pos,
  536. **response_kw)
  537. def drain_and_release_conn(response):
  538. try:
  539. # discard any remaining response body, the connection will be
  540. # released back to the pool once the entire response is read
  541. response.read()
  542. except (TimeoutError, HTTPException, SocketError, ProtocolError,
  543. BaseSSLError, SSLError) as e:
  544. pass
  545. # Handle redirect?
  546. redirect_location = redirect and response.get_redirect_location()
  547. if redirect_location:
  548. if response.status == 303:
  549. method = 'GET'
  550. try:
  551. retries = retries.increment(method, url, response=response, _pool=self)
  552. except MaxRetryError:
  553. if retries.raise_on_redirect:
  554. # Drain and release the connection for this response, since
  555. # we're not returning it to be released manually.
  556. drain_and_release_conn(response)
  557. raise
  558. return response
  559. # drain and return the connection to the pool before recursing
  560. drain_and_release_conn(response)
  561. retries.sleep_for_retry(response)
  562. log.debug("Redirecting %s -> %s", url, redirect_location)
  563. return self.urlopen(
  564. method, redirect_location, body, headers,
  565. retries=retries, redirect=redirect,
  566. assert_same_host=assert_same_host,
  567. timeout=timeout, pool_timeout=pool_timeout,
  568. release_conn=release_conn, body_pos=body_pos,
  569. **response_kw)
  570. # Check if we should retry the HTTP response.
  571. has_retry_after = bool(response.getheader('Retry-After'))
  572. if retries.is_retry(method, response.status, has_retry_after):
  573. try:
  574. retries = retries.increment(method, url, response=response, _pool=self)
  575. except MaxRetryError:
  576. if retries.raise_on_status:
  577. # Drain and release the connection for this response, since
  578. # we're not returning it to be released manually.
  579. drain_and_release_conn(response)
  580. raise
  581. return response
  582. # drain and return the connection to the pool before recursing
  583. drain_and_release_conn(response)
  584. retries.sleep(response)
  585. log.debug("Retry: %s", url)
  586. return self.urlopen(
  587. method, url, body, headers,
  588. retries=retries, redirect=redirect,
  589. assert_same_host=assert_same_host,
  590. timeout=timeout, pool_timeout=pool_timeout,
  591. release_conn=release_conn,
  592. body_pos=body_pos, **response_kw)
  593. return response
  594. class HTTPSConnectionPool(HTTPConnectionPool):
  595. """
  596. Same as :class:`.HTTPConnectionPool`, but HTTPS.
  597. When Python is compiled with the :mod:`ssl` module, then
  598. :class:`.VerifiedHTTPSConnection` is used, which *can* verify certificates,
  599. instead of :class:`.HTTPSConnection`.
  600. :class:`.VerifiedHTTPSConnection` uses one of ``assert_fingerprint``,
  601. ``assert_hostname`` and ``host`` in this order to verify connections.
  602. If ``assert_hostname`` is False, no verification is done.
  603. The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``,
  604. ``ca_cert_dir``, and ``ssl_version`` are only used if :mod:`ssl` is
  605. available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade
  606. the connection socket into an SSL socket.
  607. """
  608. scheme = 'https'
  609. ConnectionCls = HTTPSConnection
  610. def __init__(self, host, port=None,
  611. strict=False, timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1,
  612. block=False, headers=None, retries=None,
  613. _proxy=None, _proxy_headers=None,
  614. key_file=None, cert_file=None, cert_reqs=None,
  615. ca_certs=None, ssl_version=None,
  616. assert_hostname=None, assert_fingerprint=None,
  617. ca_cert_dir=None, **conn_kw):
  618. HTTPConnectionPool.__init__(self, host, port, strict, timeout, maxsize,
  619. block, headers, retries, _proxy, _proxy_headers,
  620. **conn_kw)
  621. if ca_certs and cert_reqs is None:
  622. cert_reqs = 'CERT_REQUIRED'
  623. self.key_file = key_file
  624. self.cert_file = cert_file
  625. self.cert_reqs = cert_reqs
  626. self.ca_certs = ca_certs
  627. self.ca_cert_dir = ca_cert_dir
  628. self.ssl_version = ssl_version
  629. self.assert_hostname = assert_hostname
  630. self.assert_fingerprint = assert_fingerprint
  631. def _prepare_conn(self, conn):
  632. """
  633. Prepare the ``connection`` for :meth:`urllib3.util.ssl_wrap_socket`
  634. and establish the tunnel if proxy is used.
  635. """
  636. if isinstance(conn, VerifiedHTTPSConnection):
  637. conn.set_cert(key_file=self.key_file,
  638. cert_file=self.cert_file,
  639. cert_reqs=self.cert_reqs,
  640. ca_certs=self.ca_certs,
  641. ca_cert_dir=self.ca_cert_dir,
  642. assert_hostname=self.assert_hostname,
  643. assert_fingerprint=self.assert_fingerprint)
  644. conn.ssl_version = self.ssl_version
  645. return conn
  646. def _prepare_proxy(self, conn):
  647. """
  648. Establish tunnel connection early, because otherwise httplib
  649. would improperly set Host: header to proxy's IP:port.
  650. """
  651. # Python 2.7+
  652. try:
  653. set_tunnel = conn.set_tunnel
  654. except AttributeError: # Platform-specific: Python 2.6
  655. set_tunnel = conn._set_tunnel
  656. if sys.version_info <= (2, 6, 4) and not self.proxy_headers: # Python 2.6.4 and older
  657. set_tunnel(self._proxy_host, self.port)
  658. else:
  659. set_tunnel(self._proxy_host, self.port, self.proxy_headers)
  660. conn.connect()
  661. def _new_conn(self):
  662. """
  663. Return a fresh :class:`httplib.HTTPSConnection`.
  664. """
  665. self.num_connections += 1
  666. log.debug("Starting new HTTPS connection (%d): %s:%s",
  667. self.num_connections, self.host, self.port or "443")
  668. if not self.ConnectionCls or self.ConnectionCls is DummyConnection:
  669. raise SSLError("Can't connect to HTTPS URL because the SSL "
  670. "module is not available.")
  671. actual_host = self.host
  672. actual_port = self.port
  673. if self.proxy is not None:
  674. actual_host = self.proxy.host
  675. actual_port = self.proxy.port
  676. conn = self.ConnectionCls(host=actual_host, port=actual_port,
  677. timeout=self.timeout.connect_timeout,
  678. strict=self.strict, **self.conn_kw)
  679. return self._prepare_conn(conn)
  680. def _validate_conn(self, conn):
  681. """
  682. Called right before a request is made, after the socket is created.
  683. """
  684. super(HTTPSConnectionPool, self)._validate_conn(conn)
  685. # Force connect early to allow us to validate the connection.
  686. if not getattr(conn, 'sock', None): # AppEngine might not have `.sock`
  687. conn.connect()
  688. if not conn.is_verified:
  689. warnings.warn((
  690. 'Unverified HTTPS request is being made. '
  691. 'Adding certificate verification is strongly advised. See: '
  692. 'https://urllib3.readthedocs.io/en/latest/advanced-usage.html'
  693. '#ssl-warnings'),
  694. InsecureRequestWarning)
  695. def connection_from_url(url, **kw):
  696. """
  697. Given a url, return an :class:`.ConnectionPool` instance of its host.
  698. This is a shortcut for not having to parse out the scheme, host, and port
  699. of the url before creating an :class:`.ConnectionPool` instance.
  700. :param url:
  701. Absolute URL string that must include the scheme. Port is optional.
  702. :param \\**kw:
  703. Passes additional parameters to the constructor of the appropriate
  704. :class:`.ConnectionPool`. Useful for specifying things like
  705. timeout, maxsize, headers, etc.
  706. Example::
  707. >>> conn = connection_from_url('http://google.com/')
  708. >>> r = conn.request('GET', '/')
  709. """
  710. scheme, host, port = get_host(url)
  711. port = port or port_by_scheme.get(scheme, 80)
  712. if scheme == 'https':
  713. return HTTPSConnectionPool(host, port=port, **kw)
  714. else:
  715. return HTTPConnectionPool(host, port=port, **kw)
  716. def _ipv6_host(host, scheme):
  717. """
  718. Process IPv6 address literals
  719. """
  720. # httplib doesn't like it when we include brackets in IPv6 addresses
  721. # Specifically, if we include brackets but also pass the port then
  722. # httplib crazily doubles up the square brackets on the Host header.
  723. # Instead, we need to make sure we never pass ``None`` as the port.
  724. # However, for backward compatibility reasons we can't actually
  725. # *assert* that. See http://bugs.python.org/issue28539
  726. #
  727. # Also if an IPv6 address literal has a zone identifier, the
  728. # percent sign might be URIencoded, convert it back into ASCII
  729. if host.startswith('[') and host.endswith(']'):
  730. host = host.replace('%25', '%').strip('[]')
  731. if scheme in NORMALIZABLE_SCHEMES:
  732. host = host.lower()
  733. return host