Funktionierender Prototyp des Serious Games zur Vermittlung von Wissen zu Software-Engineering-Arbeitsmodellen.
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

adapters.py 19KB

1 year ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538
  1. """
  2. requests.adapters
  3. ~~~~~~~~~~~~~~~~~
  4. This module contains the transport adapters that Requests uses to define
  5. and maintain connections.
  6. """
  7. import os.path
  8. import socket # noqa: F401
  9. from urllib3.exceptions import ClosedPoolError, ConnectTimeoutError
  10. from urllib3.exceptions import HTTPError as _HTTPError
  11. from urllib3.exceptions import InvalidHeader as _InvalidHeader
  12. from urllib3.exceptions import (
  13. LocationValueError,
  14. MaxRetryError,
  15. NewConnectionError,
  16. ProtocolError,
  17. )
  18. from urllib3.exceptions import ProxyError as _ProxyError
  19. from urllib3.exceptions import ReadTimeoutError, ResponseError
  20. from urllib3.exceptions import SSLError as _SSLError
  21. from urllib3.poolmanager import PoolManager, proxy_from_url
  22. from urllib3.util import Timeout as TimeoutSauce
  23. from urllib3.util import parse_url
  24. from urllib3.util.retry import Retry
  25. from .auth import _basic_auth_str
  26. from .compat import basestring, urlparse
  27. from .cookies import extract_cookies_to_jar
  28. from .exceptions import (
  29. ConnectionError,
  30. ConnectTimeout,
  31. InvalidHeader,
  32. InvalidProxyURL,
  33. InvalidSchema,
  34. InvalidURL,
  35. ProxyError,
  36. ReadTimeout,
  37. RetryError,
  38. SSLError,
  39. )
  40. from .models import Response
  41. from .structures import CaseInsensitiveDict
  42. from .utils import (
  43. DEFAULT_CA_BUNDLE_PATH,
  44. extract_zipped_paths,
  45. get_auth_from_url,
  46. get_encoding_from_headers,
  47. prepend_scheme_if_needed,
  48. select_proxy,
  49. urldefragauth,
  50. )
  51. try:
  52. from urllib3.contrib.socks import SOCKSProxyManager
  53. except ImportError:
  54. def SOCKSProxyManager(*args, **kwargs):
  55. raise InvalidSchema("Missing dependencies for SOCKS support.")
  56. DEFAULT_POOLBLOCK = False
  57. DEFAULT_POOLSIZE = 10
  58. DEFAULT_RETRIES = 0
  59. DEFAULT_POOL_TIMEOUT = None
  60. class BaseAdapter:
  61. """The Base Transport Adapter"""
  62. def __init__(self):
  63. super().__init__()
  64. def send(
  65. self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None
  66. ):
  67. """Sends PreparedRequest object. Returns Response object.
  68. :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
  69. :param stream: (optional) Whether to stream the request content.
  70. :param timeout: (optional) How long to wait for the server to send
  71. data before giving up, as a float, or a :ref:`(connect timeout,
  72. read timeout) <timeouts>` tuple.
  73. :type timeout: float or tuple
  74. :param verify: (optional) Either a boolean, in which case it controls whether we verify
  75. the server's TLS certificate, or a string, in which case it must be a path
  76. to a CA bundle to use
  77. :param cert: (optional) Any user-provided SSL certificate to be trusted.
  78. :param proxies: (optional) The proxies dictionary to apply to the request.
  79. """
  80. raise NotImplementedError
  81. def close(self):
  82. """Cleans up adapter specific items."""
  83. raise NotImplementedError
  84. class HTTPAdapter(BaseAdapter):
  85. """The built-in HTTP Adapter for urllib3.
  86. Provides a general-case interface for Requests sessions to contact HTTP and
  87. HTTPS urls by implementing the Transport Adapter interface. This class will
  88. usually be created by the :class:`Session <Session>` class under the
  89. covers.
  90. :param pool_connections: The number of urllib3 connection pools to cache.
  91. :param pool_maxsize: The maximum number of connections to save in the pool.
  92. :param max_retries: The maximum number of retries each connection
  93. should attempt. Note, this applies only to failed DNS lookups, socket
  94. connections and connection timeouts, never to requests where data has
  95. made it to the server. By default, Requests does not retry failed
  96. connections. If you need granular control over the conditions under
  97. which we retry a request, import urllib3's ``Retry`` class and pass
  98. that instead.
  99. :param pool_block: Whether the connection pool should block for connections.
  100. Usage::
  101. >>> import requests
  102. >>> s = requests.Session()
  103. >>> a = requests.adapters.HTTPAdapter(max_retries=3)
  104. >>> s.mount('http://', a)
  105. """
  106. __attrs__ = [
  107. "max_retries",
  108. "config",
  109. "_pool_connections",
  110. "_pool_maxsize",
  111. "_pool_block",
  112. ]
  113. def __init__(
  114. self,
  115. pool_connections=DEFAULT_POOLSIZE,
  116. pool_maxsize=DEFAULT_POOLSIZE,
  117. max_retries=DEFAULT_RETRIES,
  118. pool_block=DEFAULT_POOLBLOCK,
  119. ):
  120. if max_retries == DEFAULT_RETRIES:
  121. self.max_retries = Retry(0, read=False)
  122. else:
  123. self.max_retries = Retry.from_int(max_retries)
  124. self.config = {}
  125. self.proxy_manager = {}
  126. super().__init__()
  127. self._pool_connections = pool_connections
  128. self._pool_maxsize = pool_maxsize
  129. self._pool_block = pool_block
  130. self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block)
  131. def __getstate__(self):
  132. return {attr: getattr(self, attr, None) for attr in self.__attrs__}
  133. def __setstate__(self, state):
  134. # Can't handle by adding 'proxy_manager' to self.__attrs__ because
  135. # self.poolmanager uses a lambda function, which isn't pickleable.
  136. self.proxy_manager = {}
  137. self.config = {}
  138. for attr, value in state.items():
  139. setattr(self, attr, value)
  140. self.init_poolmanager(
  141. self._pool_connections, self._pool_maxsize, block=self._pool_block
  142. )
  143. def init_poolmanager(
  144. self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs
  145. ):
  146. """Initializes a urllib3 PoolManager.
  147. This method should not be called from user code, and is only
  148. exposed for use when subclassing the
  149. :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
  150. :param connections: The number of urllib3 connection pools to cache.
  151. :param maxsize: The maximum number of connections to save in the pool.
  152. :param block: Block when no free connections are available.
  153. :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager.
  154. """
  155. # save these values for pickling
  156. self._pool_connections = connections
  157. self._pool_maxsize = maxsize
  158. self._pool_block = block
  159. self.poolmanager = PoolManager(
  160. num_pools=connections,
  161. maxsize=maxsize,
  162. block=block,
  163. **pool_kwargs,
  164. )
  165. def proxy_manager_for(self, proxy, **proxy_kwargs):
  166. """Return urllib3 ProxyManager for the given proxy.
  167. This method should not be called from user code, and is only
  168. exposed for use when subclassing the
  169. :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
  170. :param proxy: The proxy to return a urllib3 ProxyManager for.
  171. :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager.
  172. :returns: ProxyManager
  173. :rtype: urllib3.ProxyManager
  174. """
  175. if proxy in self.proxy_manager:
  176. manager = self.proxy_manager[proxy]
  177. elif proxy.lower().startswith("socks"):
  178. username, password = get_auth_from_url(proxy)
  179. manager = self.proxy_manager[proxy] = SOCKSProxyManager(
  180. proxy,
  181. username=username,
  182. password=password,
  183. num_pools=self._pool_connections,
  184. maxsize=self._pool_maxsize,
  185. block=self._pool_block,
  186. **proxy_kwargs,
  187. )
  188. else:
  189. proxy_headers = self.proxy_headers(proxy)
  190. manager = self.proxy_manager[proxy] = proxy_from_url(
  191. proxy,
  192. proxy_headers=proxy_headers,
  193. num_pools=self._pool_connections,
  194. maxsize=self._pool_maxsize,
  195. block=self._pool_block,
  196. **proxy_kwargs,
  197. )
  198. return manager
  199. def cert_verify(self, conn, url, verify, cert):
  200. """Verify a SSL certificate. This method should not be called from user
  201. code, and is only exposed for use when subclassing the
  202. :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
  203. :param conn: The urllib3 connection object associated with the cert.
  204. :param url: The requested URL.
  205. :param verify: Either a boolean, in which case it controls whether we verify
  206. the server's TLS certificate, or a string, in which case it must be a path
  207. to a CA bundle to use
  208. :param cert: The SSL certificate to verify.
  209. """
  210. if url.lower().startswith("https") and verify:
  211. cert_loc = None
  212. # Allow self-specified cert location.
  213. if verify is not True:
  214. cert_loc = verify
  215. if not cert_loc:
  216. cert_loc = extract_zipped_paths(DEFAULT_CA_BUNDLE_PATH)
  217. if not cert_loc or not os.path.exists(cert_loc):
  218. raise OSError(
  219. f"Could not find a suitable TLS CA certificate bundle, "
  220. f"invalid path: {cert_loc}"
  221. )
  222. conn.cert_reqs = "CERT_REQUIRED"
  223. if not os.path.isdir(cert_loc):
  224. conn.ca_certs = cert_loc
  225. else:
  226. conn.ca_cert_dir = cert_loc
  227. else:
  228. conn.cert_reqs = "CERT_NONE"
  229. conn.ca_certs = None
  230. conn.ca_cert_dir = None
  231. if cert:
  232. if not isinstance(cert, basestring):
  233. conn.cert_file = cert[0]
  234. conn.key_file = cert[1]
  235. else:
  236. conn.cert_file = cert
  237. conn.key_file = None
  238. if conn.cert_file and not os.path.exists(conn.cert_file):
  239. raise OSError(
  240. f"Could not find the TLS certificate file, "
  241. f"invalid path: {conn.cert_file}"
  242. )
  243. if conn.key_file and not os.path.exists(conn.key_file):
  244. raise OSError(
  245. f"Could not find the TLS key file, invalid path: {conn.key_file}"
  246. )
  247. def build_response(self, req, resp):
  248. """Builds a :class:`Response <requests.Response>` object from a urllib3
  249. response. This should not be called from user code, and is only exposed
  250. for use when subclassing the
  251. :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`
  252. :param req: The :class:`PreparedRequest <PreparedRequest>` used to generate the response.
  253. :param resp: The urllib3 response object.
  254. :rtype: requests.Response
  255. """
  256. response = Response()
  257. # Fallback to None if there's no status_code, for whatever reason.
  258. response.status_code = getattr(resp, "status", None)
  259. # Make headers case-insensitive.
  260. response.headers = CaseInsensitiveDict(getattr(resp, "headers", {}))
  261. # Set encoding.
  262. response.encoding = get_encoding_from_headers(response.headers)
  263. response.raw = resp
  264. response.reason = response.raw.reason
  265. if isinstance(req.url, bytes):
  266. response.url = req.url.decode("utf-8")
  267. else:
  268. response.url = req.url
  269. # Add new cookies from the server.
  270. extract_cookies_to_jar(response.cookies, req, resp)
  271. # Give the Response some context.
  272. response.request = req
  273. response.connection = self
  274. return response
  275. def get_connection(self, url, proxies=None):
  276. """Returns a urllib3 connection for the given URL. This should not be
  277. called from user code, and is only exposed for use when subclassing the
  278. :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
  279. :param url: The URL to connect to.
  280. :param proxies: (optional) A Requests-style dictionary of proxies used on this request.
  281. :rtype: urllib3.ConnectionPool
  282. """
  283. proxy = select_proxy(url, proxies)
  284. if proxy:
  285. proxy = prepend_scheme_if_needed(proxy, "http")
  286. proxy_url = parse_url(proxy)
  287. if not proxy_url.host:
  288. raise InvalidProxyURL(
  289. "Please check proxy URL. It is malformed "
  290. "and could be missing the host."
  291. )
  292. proxy_manager = self.proxy_manager_for(proxy)
  293. conn = proxy_manager.connection_from_url(url)
  294. else:
  295. # Only scheme should be lower case
  296. parsed = urlparse(url)
  297. url = parsed.geturl()
  298. conn = self.poolmanager.connection_from_url(url)
  299. return conn
  300. def close(self):
  301. """Disposes of any internal state.
  302. Currently, this closes the PoolManager and any active ProxyManager,
  303. which closes any pooled connections.
  304. """
  305. self.poolmanager.clear()
  306. for proxy in self.proxy_manager.values():
  307. proxy.clear()
  308. def request_url(self, request, proxies):
  309. """Obtain the url to use when making the final request.
  310. If the message is being sent through a HTTP proxy, the full URL has to
  311. be used. Otherwise, we should only use the path portion of the URL.
  312. This should not be called from user code, and is only exposed for use
  313. when subclassing the
  314. :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
  315. :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
  316. :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs.
  317. :rtype: str
  318. """
  319. proxy = select_proxy(request.url, proxies)
  320. scheme = urlparse(request.url).scheme
  321. is_proxied_http_request = proxy and scheme != "https"
  322. using_socks_proxy = False
  323. if proxy:
  324. proxy_scheme = urlparse(proxy).scheme.lower()
  325. using_socks_proxy = proxy_scheme.startswith("socks")
  326. url = request.path_url
  327. if is_proxied_http_request and not using_socks_proxy:
  328. url = urldefragauth(request.url)
  329. return url
  330. def add_headers(self, request, **kwargs):
  331. """Add any headers needed by the connection. As of v2.0 this does
  332. nothing by default, but is left for overriding by users that subclass
  333. the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
  334. This should not be called from user code, and is only exposed for use
  335. when subclassing the
  336. :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
  337. :param request: The :class:`PreparedRequest <PreparedRequest>` to add headers to.
  338. :param kwargs: The keyword arguments from the call to send().
  339. """
  340. pass
  341. def proxy_headers(self, proxy):
  342. """Returns a dictionary of the headers to add to any request sent
  343. through a proxy. This works with urllib3 magic to ensure that they are
  344. correctly sent to the proxy, rather than in a tunnelled request if
  345. CONNECT is being used.
  346. This should not be called from user code, and is only exposed for use
  347. when subclassing the
  348. :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
  349. :param proxy: The url of the proxy being used for this request.
  350. :rtype: dict
  351. """
  352. headers = {}
  353. username, password = get_auth_from_url(proxy)
  354. if username:
  355. headers["Proxy-Authorization"] = _basic_auth_str(username, password)
  356. return headers
  357. def send(
  358. self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None
  359. ):
  360. """Sends PreparedRequest object. Returns Response object.
  361. :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
  362. :param stream: (optional) Whether to stream the request content.
  363. :param timeout: (optional) How long to wait for the server to send
  364. data before giving up, as a float, or a :ref:`(connect timeout,
  365. read timeout) <timeouts>` tuple.
  366. :type timeout: float or tuple or urllib3 Timeout object
  367. :param verify: (optional) Either a boolean, in which case it controls whether
  368. we verify the server's TLS certificate, or a string, in which case it
  369. must be a path to a CA bundle to use
  370. :param cert: (optional) Any user-provided SSL certificate to be trusted.
  371. :param proxies: (optional) The proxies dictionary to apply to the request.
  372. :rtype: requests.Response
  373. """
  374. try:
  375. conn = self.get_connection(request.url, proxies)
  376. except LocationValueError as e:
  377. raise InvalidURL(e, request=request)
  378. self.cert_verify(conn, request.url, verify, cert)
  379. url = self.request_url(request, proxies)
  380. self.add_headers(
  381. request,
  382. stream=stream,
  383. timeout=timeout,
  384. verify=verify,
  385. cert=cert,
  386. proxies=proxies,
  387. )
  388. chunked = not (request.body is None or "Content-Length" in request.headers)
  389. if isinstance(timeout, tuple):
  390. try:
  391. connect, read = timeout
  392. timeout = TimeoutSauce(connect=connect, read=read)
  393. except ValueError:
  394. raise ValueError(
  395. f"Invalid timeout {timeout}. Pass a (connect, read) timeout tuple, "
  396. f"or a single float to set both timeouts to the same value."
  397. )
  398. elif isinstance(timeout, TimeoutSauce):
  399. pass
  400. else:
  401. timeout = TimeoutSauce(connect=timeout, read=timeout)
  402. try:
  403. resp = conn.urlopen(
  404. method=request.method,
  405. url=url,
  406. body=request.body,
  407. headers=request.headers,
  408. redirect=False,
  409. assert_same_host=False,
  410. preload_content=False,
  411. decode_content=False,
  412. retries=self.max_retries,
  413. timeout=timeout,
  414. chunked=chunked,
  415. )
  416. except (ProtocolError, OSError) as err:
  417. raise ConnectionError(err, request=request)
  418. except MaxRetryError as e:
  419. if isinstance(e.reason, ConnectTimeoutError):
  420. # TODO: Remove this in 3.0.0: see #2811
  421. if not isinstance(e.reason, NewConnectionError):
  422. raise ConnectTimeout(e, request=request)
  423. if isinstance(e.reason, ResponseError):
  424. raise RetryError(e, request=request)
  425. if isinstance(e.reason, _ProxyError):
  426. raise ProxyError(e, request=request)
  427. if isinstance(e.reason, _SSLError):
  428. # This branch is for urllib3 v1.22 and later.
  429. raise SSLError(e, request=request)
  430. raise ConnectionError(e, request=request)
  431. except ClosedPoolError as e:
  432. raise ConnectionError(e, request=request)
  433. except _ProxyError as e:
  434. raise ProxyError(e)
  435. except (_SSLError, _HTTPError) as e:
  436. if isinstance(e, _SSLError):
  437. # This branch is for urllib3 versions earlier than v1.22
  438. raise SSLError(e, request=request)
  439. elif isinstance(e, ReadTimeoutError):
  440. raise ReadTimeout(e, request=request)
  441. elif isinstance(e, _InvalidHeader):
  442. raise InvalidHeader(e, request=request)
  443. else:
  444. raise
  445. return self.build_response(request, resp)