Funktionierender Prototyp des Serious Games zur Vermittlung von Wissen zu Software-Engineering-Arbeitsmodellen.
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

npipeconn.py 3.4KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103
  1. import queue
  2. import requests.adapters
  3. from docker.transport.basehttpadapter import BaseHTTPAdapter
  4. from .. import constants
  5. from .npipesocket import NpipeSocket
  6. import urllib3
  7. import urllib3.connection
  8. RecentlyUsedContainer = urllib3._collections.RecentlyUsedContainer
  9. class NpipeHTTPConnection(urllib3.connection.HTTPConnection):
  10. def __init__(self, npipe_path, timeout=60):
  11. super().__init__(
  12. 'localhost', timeout=timeout
  13. )
  14. self.npipe_path = npipe_path
  15. self.timeout = timeout
  16. def connect(self):
  17. sock = NpipeSocket()
  18. sock.settimeout(self.timeout)
  19. sock.connect(self.npipe_path)
  20. self.sock = sock
  21. class NpipeHTTPConnectionPool(urllib3.connectionpool.HTTPConnectionPool):
  22. def __init__(self, npipe_path, timeout=60, maxsize=10):
  23. super().__init__(
  24. 'localhost', timeout=timeout, maxsize=maxsize
  25. )
  26. self.npipe_path = npipe_path
  27. self.timeout = timeout
  28. def _new_conn(self):
  29. return NpipeHTTPConnection(
  30. self.npipe_path, self.timeout
  31. )
  32. # When re-using connections, urllib3 tries to call select() on our
  33. # NpipeSocket instance, causing a crash. To circumvent this, we override
  34. # _get_conn, where that check happens.
  35. def _get_conn(self, timeout):
  36. conn = None
  37. try:
  38. conn = self.pool.get(block=self.block, timeout=timeout)
  39. except AttributeError: # self.pool is None
  40. raise urllib3.exceptions.ClosedPoolError(self, "Pool is closed.")
  41. except queue.Empty:
  42. if self.block:
  43. raise urllib3.exceptions.EmptyPoolError(
  44. self,
  45. "Pool reached maximum size and no more "
  46. "connections are allowed."
  47. )
  48. # Oh well, we'll create a new connection then
  49. return conn or self._new_conn()
  50. class NpipeHTTPAdapter(BaseHTTPAdapter):
  51. __attrs__ = requests.adapters.HTTPAdapter.__attrs__ + ['npipe_path',
  52. 'pools',
  53. 'timeout',
  54. 'max_pool_size']
  55. def __init__(self, base_url, timeout=60,
  56. pool_connections=constants.DEFAULT_NUM_POOLS,
  57. max_pool_size=constants.DEFAULT_MAX_POOL_SIZE):
  58. self.npipe_path = base_url.replace('npipe://', '')
  59. self.timeout = timeout
  60. self.max_pool_size = max_pool_size
  61. self.pools = RecentlyUsedContainer(
  62. pool_connections, dispose_func=lambda p: p.close()
  63. )
  64. super().__init__()
  65. def get_connection(self, url, proxies=None):
  66. with self.pools.lock:
  67. pool = self.pools.get(url)
  68. if pool:
  69. return pool
  70. pool = NpipeHTTPConnectionPool(
  71. self.npipe_path, self.timeout,
  72. maxsize=self.max_pool_size
  73. )
  74. self.pools[url] = pool
  75. return pool
  76. def request_url(self, request, proxies):
  77. # The select_proxy utility in requests errors out when the provided URL
  78. # doesn't have a hostname, like is the case when using a UNIX socket.
  79. # Since proxies are an irrelevant notion in the case of UNIX sockets
  80. # anyway, we simply return the path URL directly.
  81. # See also: https://github.com/docker/docker-sdk-python/issues/811
  82. return request.path_url