Funktionierender Prototyp des Serious Games zur Vermittlung von Wissen zu Software-Engineering-Arbeitsmodellen.
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

client.py 19KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516
  1. import json
  2. import struct
  3. import urllib
  4. from functools import partial
  5. import requests
  6. import requests.exceptions
  7. import websocket
  8. from .. import auth
  9. from ..constants import (DEFAULT_NUM_POOLS, DEFAULT_NUM_POOLS_SSH,
  10. DEFAULT_MAX_POOL_SIZE, DEFAULT_TIMEOUT_SECONDS,
  11. DEFAULT_USER_AGENT, IS_WINDOWS_PLATFORM,
  12. MINIMUM_DOCKER_API_VERSION, STREAM_HEADER_SIZE_BYTES)
  13. from ..errors import (DockerException, InvalidVersion, TLSParameterError,
  14. create_api_error_from_http_exception)
  15. from ..tls import TLSConfig
  16. from ..transport import SSLHTTPAdapter, UnixHTTPAdapter
  17. from ..utils import check_resource, config, update_headers, utils
  18. from ..utils.json_stream import json_stream
  19. from ..utils.proxy import ProxyConfig
  20. from ..utils.socket import consume_socket_output, demux_adaptor, frames_iter
  21. from .build import BuildApiMixin
  22. from .config import ConfigApiMixin
  23. from .container import ContainerApiMixin
  24. from .daemon import DaemonApiMixin
  25. from .exec_api import ExecApiMixin
  26. from .image import ImageApiMixin
  27. from .network import NetworkApiMixin
  28. from .plugin import PluginApiMixin
  29. from .secret import SecretApiMixin
  30. from .service import ServiceApiMixin
  31. from .swarm import SwarmApiMixin
  32. from .volume import VolumeApiMixin
  33. try:
  34. from ..transport import NpipeHTTPAdapter
  35. except ImportError:
  36. pass
  37. try:
  38. from ..transport import SSHHTTPAdapter
  39. except ImportError:
  40. pass
  41. class APIClient(
  42. requests.Session,
  43. BuildApiMixin,
  44. ConfigApiMixin,
  45. ContainerApiMixin,
  46. DaemonApiMixin,
  47. ExecApiMixin,
  48. ImageApiMixin,
  49. NetworkApiMixin,
  50. PluginApiMixin,
  51. SecretApiMixin,
  52. ServiceApiMixin,
  53. SwarmApiMixin,
  54. VolumeApiMixin):
  55. """
  56. A low-level client for the Docker Engine API.
  57. Example:
  58. >>> import docker
  59. >>> client = docker.APIClient(base_url='unix://var/run/docker.sock')
  60. >>> client.version()
  61. {u'ApiVersion': u'1.33',
  62. u'Arch': u'amd64',
  63. u'BuildTime': u'2017-11-19T18:46:37.000000000+00:00',
  64. u'GitCommit': u'f4ffd2511c',
  65. u'GoVersion': u'go1.9.2',
  66. u'KernelVersion': u'4.14.3-1-ARCH',
  67. u'MinAPIVersion': u'1.12',
  68. u'Os': u'linux',
  69. u'Version': u'17.10.0-ce'}
  70. Args:
  71. base_url (str): URL to the Docker server. For example,
  72. ``unix:///var/run/docker.sock`` or ``tcp://127.0.0.1:1234``.
  73. version (str): The version of the API to use. Set to ``auto`` to
  74. automatically detect the server's version. Default: ``1.35``
  75. timeout (int): Default timeout for API calls, in seconds.
  76. tls (bool or :py:class:`~docker.tls.TLSConfig`): Enable TLS. Pass
  77. ``True`` to enable it with default options, or pass a
  78. :py:class:`~docker.tls.TLSConfig` object to use custom
  79. configuration.
  80. user_agent (str): Set a custom user agent for requests to the server.
  81. credstore_env (dict): Override environment variables when calling the
  82. credential store process.
  83. use_ssh_client (bool): If set to `True`, an ssh connection is made
  84. via shelling out to the ssh client. Ensure the ssh client is
  85. installed and configured on the host.
  86. max_pool_size (int): The maximum number of connections
  87. to save in the pool.
  88. """
  89. __attrs__ = requests.Session.__attrs__ + ['_auth_configs',
  90. '_general_configs',
  91. '_version',
  92. 'base_url',
  93. 'timeout']
  94. def __init__(self, base_url=None, version=None,
  95. timeout=DEFAULT_TIMEOUT_SECONDS, tls=False,
  96. user_agent=DEFAULT_USER_AGENT, num_pools=None,
  97. credstore_env=None, use_ssh_client=False,
  98. max_pool_size=DEFAULT_MAX_POOL_SIZE):
  99. super().__init__()
  100. if tls and not base_url:
  101. raise TLSParameterError(
  102. 'If using TLS, the base_url argument must be provided.'
  103. )
  104. self.base_url = base_url
  105. self.timeout = timeout
  106. self.headers['User-Agent'] = user_agent
  107. self._general_configs = config.load_general_config()
  108. proxy_config = self._general_configs.get('proxies', {})
  109. try:
  110. proxies = proxy_config[base_url]
  111. except KeyError:
  112. proxies = proxy_config.get('default', {})
  113. self._proxy_configs = ProxyConfig.from_dict(proxies)
  114. self._auth_configs = auth.load_config(
  115. config_dict=self._general_configs, credstore_env=credstore_env,
  116. )
  117. self.credstore_env = credstore_env
  118. base_url = utils.parse_host(
  119. base_url, IS_WINDOWS_PLATFORM, tls=bool(tls)
  120. )
  121. # SSH has a different default for num_pools to all other adapters
  122. num_pools = num_pools or DEFAULT_NUM_POOLS_SSH if \
  123. base_url.startswith('ssh://') else DEFAULT_NUM_POOLS
  124. if base_url.startswith('http+unix://'):
  125. self._custom_adapter = UnixHTTPAdapter(
  126. base_url, timeout, pool_connections=num_pools,
  127. max_pool_size=max_pool_size
  128. )
  129. self.mount('http+docker://', self._custom_adapter)
  130. self._unmount('http://', 'https://')
  131. # host part of URL should be unused, but is resolved by requests
  132. # module in proxy_bypass_macosx_sysconf()
  133. self.base_url = 'http+docker://localhost'
  134. elif base_url.startswith('npipe://'):
  135. if not IS_WINDOWS_PLATFORM:
  136. raise DockerException(
  137. 'The npipe:// protocol is only supported on Windows'
  138. )
  139. try:
  140. self._custom_adapter = NpipeHTTPAdapter(
  141. base_url, timeout, pool_connections=num_pools,
  142. max_pool_size=max_pool_size
  143. )
  144. except NameError:
  145. raise DockerException(
  146. 'Install pypiwin32 package to enable npipe:// support'
  147. )
  148. self.mount('http+docker://', self._custom_adapter)
  149. self.base_url = 'http+docker://localnpipe'
  150. elif base_url.startswith('ssh://'):
  151. try:
  152. self._custom_adapter = SSHHTTPAdapter(
  153. base_url, timeout, pool_connections=num_pools,
  154. max_pool_size=max_pool_size, shell_out=use_ssh_client
  155. )
  156. except NameError:
  157. raise DockerException(
  158. 'Install paramiko package to enable ssh:// support'
  159. )
  160. self.mount('http+docker://ssh', self._custom_adapter)
  161. self._unmount('http://', 'https://')
  162. self.base_url = 'http+docker://ssh'
  163. else:
  164. # Use SSLAdapter for the ability to specify SSL version
  165. if isinstance(tls, TLSConfig):
  166. tls.configure_client(self)
  167. elif tls:
  168. self._custom_adapter = SSLHTTPAdapter(
  169. pool_connections=num_pools)
  170. self.mount('https://', self._custom_adapter)
  171. self.base_url = base_url
  172. # version detection needs to be after unix adapter mounting
  173. if version is None or (isinstance(
  174. version,
  175. str
  176. ) and version.lower() == 'auto'):
  177. self._version = self._retrieve_server_version()
  178. else:
  179. self._version = version
  180. if not isinstance(self._version, str):
  181. raise DockerException(
  182. 'Version parameter must be a string or None. Found {}'.format(
  183. type(version).__name__
  184. )
  185. )
  186. if utils.version_lt(self._version, MINIMUM_DOCKER_API_VERSION):
  187. raise InvalidVersion(
  188. 'API versions below {} are no longer supported by this '
  189. 'library.'.format(MINIMUM_DOCKER_API_VERSION)
  190. )
  191. def _retrieve_server_version(self):
  192. try:
  193. return self.version(api_version=False)["ApiVersion"]
  194. except KeyError:
  195. raise DockerException(
  196. 'Invalid response from docker daemon: key "ApiVersion"'
  197. ' is missing.'
  198. )
  199. except Exception as e:
  200. raise DockerException(
  201. f'Error while fetching server API version: {e}'
  202. )
  203. def _set_request_timeout(self, kwargs):
  204. """Prepare the kwargs for an HTTP request by inserting the timeout
  205. parameter, if not already present."""
  206. kwargs.setdefault('timeout', self.timeout)
  207. return kwargs
  208. @update_headers
  209. def _post(self, url, **kwargs):
  210. return self.post(url, **self._set_request_timeout(kwargs))
  211. @update_headers
  212. def _get(self, url, **kwargs):
  213. return self.get(url, **self._set_request_timeout(kwargs))
  214. @update_headers
  215. def _put(self, url, **kwargs):
  216. return self.put(url, **self._set_request_timeout(kwargs))
  217. @update_headers
  218. def _delete(self, url, **kwargs):
  219. return self.delete(url, **self._set_request_timeout(kwargs))
  220. def _url(self, pathfmt, *args, **kwargs):
  221. for arg in args:
  222. if not isinstance(arg, str):
  223. raise ValueError(
  224. 'Expected a string but found {} ({}) '
  225. 'instead'.format(arg, type(arg))
  226. )
  227. quote_f = partial(urllib.parse.quote, safe="/:")
  228. args = map(quote_f, args)
  229. if kwargs.get('versioned_api', True):
  230. return '{}/v{}{}'.format(
  231. self.base_url, self._version, pathfmt.format(*args)
  232. )
  233. else:
  234. return f'{self.base_url}{pathfmt.format(*args)}'
  235. def _raise_for_status(self, response):
  236. """Raises stored :class:`APIError`, if one occurred."""
  237. try:
  238. response.raise_for_status()
  239. except requests.exceptions.HTTPError as e:
  240. raise create_api_error_from_http_exception(e) from e
  241. def _result(self, response, json=False, binary=False):
  242. assert not (json and binary)
  243. self._raise_for_status(response)
  244. if json:
  245. return response.json()
  246. if binary:
  247. return response.content
  248. return response.text
  249. def _post_json(self, url, data, **kwargs):
  250. # Go <1.1 can't unserialize null to a string
  251. # so we do this disgusting thing here.
  252. data2 = {}
  253. if data is not None and isinstance(data, dict):
  254. for k, v in iter(data.items()):
  255. if v is not None:
  256. data2[k] = v
  257. elif data is not None:
  258. data2 = data
  259. if 'headers' not in kwargs:
  260. kwargs['headers'] = {}
  261. kwargs['headers']['Content-Type'] = 'application/json'
  262. return self._post(url, data=json.dumps(data2), **kwargs)
  263. def _attach_params(self, override=None):
  264. return override or {
  265. 'stdout': 1,
  266. 'stderr': 1,
  267. 'stream': 1
  268. }
  269. @check_resource('container')
  270. def _attach_websocket(self, container, params=None):
  271. url = self._url("/containers/{0}/attach/ws", container)
  272. req = requests.Request("POST", url, params=self._attach_params(params))
  273. full_url = req.prepare().url
  274. full_url = full_url.replace("http://", "ws://", 1)
  275. full_url = full_url.replace("https://", "wss://", 1)
  276. return self._create_websocket_connection(full_url)
  277. def _create_websocket_connection(self, url):
  278. return websocket.create_connection(url)
  279. def _get_raw_response_socket(self, response):
  280. self._raise_for_status(response)
  281. if self.base_url == "http+docker://localnpipe":
  282. sock = response.raw._fp.fp.raw.sock
  283. elif self.base_url.startswith('http+docker://ssh'):
  284. sock = response.raw._fp.fp.channel
  285. else:
  286. sock = response.raw._fp.fp.raw
  287. if self.base_url.startswith("https://"):
  288. sock = sock._sock
  289. try:
  290. # Keep a reference to the response to stop it being garbage
  291. # collected. If the response is garbage collected, it will
  292. # close TLS sockets.
  293. sock._response = response
  294. except AttributeError:
  295. # UNIX sockets can't have attributes set on them, but that's
  296. # fine because we won't be doing TLS over them
  297. pass
  298. return sock
  299. def _stream_helper(self, response, decode=False):
  300. """Generator for data coming from a chunked-encoded HTTP response."""
  301. if response.raw._fp.chunked:
  302. if decode:
  303. yield from json_stream(self._stream_helper(response, False))
  304. else:
  305. reader = response.raw
  306. while not reader.closed:
  307. # this read call will block until we get a chunk
  308. data = reader.read(1)
  309. if not data:
  310. break
  311. if reader._fp.chunk_left:
  312. data += reader.read(reader._fp.chunk_left)
  313. yield data
  314. else:
  315. # Response isn't chunked, meaning we probably
  316. # encountered an error immediately
  317. yield self._result(response, json=decode)
  318. def _multiplexed_buffer_helper(self, response):
  319. """A generator of multiplexed data blocks read from a buffered
  320. response."""
  321. buf = self._result(response, binary=True)
  322. buf_length = len(buf)
  323. walker = 0
  324. while True:
  325. if buf_length - walker < STREAM_HEADER_SIZE_BYTES:
  326. break
  327. header = buf[walker:walker + STREAM_HEADER_SIZE_BYTES]
  328. _, length = struct.unpack_from('>BxxxL', header)
  329. start = walker + STREAM_HEADER_SIZE_BYTES
  330. end = start + length
  331. walker = end
  332. yield buf[start:end]
  333. def _multiplexed_response_stream_helper(self, response):
  334. """A generator of multiplexed data blocks coming from a response
  335. stream."""
  336. # Disable timeout on the underlying socket to prevent
  337. # Read timed out(s) for long running processes
  338. socket = self._get_raw_response_socket(response)
  339. self._disable_socket_timeout(socket)
  340. while True:
  341. header = response.raw.read(STREAM_HEADER_SIZE_BYTES)
  342. if not header:
  343. break
  344. _, length = struct.unpack('>BxxxL', header)
  345. if not length:
  346. continue
  347. data = response.raw.read(length)
  348. if not data:
  349. break
  350. yield data
  351. def _stream_raw_result(self, response, chunk_size=1, decode=True):
  352. ''' Stream result for TTY-enabled container and raw binary data'''
  353. self._raise_for_status(response)
  354. # Disable timeout on the underlying socket to prevent
  355. # Read timed out(s) for long running processes
  356. socket = self._get_raw_response_socket(response)
  357. self._disable_socket_timeout(socket)
  358. yield from response.iter_content(chunk_size, decode)
  359. def _read_from_socket(self, response, stream, tty=True, demux=False):
  360. """Consume all data from the socket, close the response and return the
  361. data. If stream=True, then a generator is returned instead and the
  362. caller is responsible for closing the response.
  363. """
  364. socket = self._get_raw_response_socket(response)
  365. gen = frames_iter(socket, tty)
  366. if demux:
  367. # The generator will output tuples (stdout, stderr)
  368. gen = (demux_adaptor(*frame) for frame in gen)
  369. else:
  370. # The generator will output strings
  371. gen = (data for (_, data) in gen)
  372. if stream:
  373. return gen
  374. else:
  375. try:
  376. # Wait for all frames, concatenate them, and return the result
  377. return consume_socket_output(gen, demux=demux)
  378. finally:
  379. response.close()
  380. def _disable_socket_timeout(self, socket):
  381. """ Depending on the combination of python version and whether we're
  382. connecting over http or https, we might need to access _sock, which
  383. may or may not exist; or we may need to just settimeout on socket
  384. itself, which also may or may not have settimeout on it. To avoid
  385. missing the correct one, we try both.
  386. We also do not want to set the timeout if it is already disabled, as
  387. you run the risk of changing a socket that was non-blocking to
  388. blocking, for example when using gevent.
  389. """
  390. sockets = [socket, getattr(socket, '_sock', None)]
  391. for s in sockets:
  392. if not hasattr(s, 'settimeout'):
  393. continue
  394. timeout = -1
  395. if hasattr(s, 'gettimeout'):
  396. timeout = s.gettimeout()
  397. # Don't change the timeout if it is already disabled.
  398. if timeout is None or timeout == 0.0:
  399. continue
  400. s.settimeout(None)
  401. @check_resource('container')
  402. def _check_is_tty(self, container):
  403. cont = self.inspect_container(container)
  404. return cont['Config']['Tty']
  405. def _get_result(self, container, stream, res):
  406. return self._get_result_tty(stream, res, self._check_is_tty(container))
  407. def _get_result_tty(self, stream, res, is_tty):
  408. # We should also use raw streaming (without keep-alives)
  409. # if we're dealing with a tty-enabled container.
  410. if is_tty:
  411. return self._stream_raw_result(res) if stream else \
  412. self._result(res, binary=True)
  413. self._raise_for_status(res)
  414. sep = b''
  415. if stream:
  416. return self._multiplexed_response_stream_helper(res)
  417. else:
  418. return sep.join(
  419. [x for x in self._multiplexed_buffer_helper(res)]
  420. )
  421. def _unmount(self, *args):
  422. for proto in args:
  423. self.adapters.pop(proto)
  424. def get_adapter(self, url):
  425. try:
  426. return super().get_adapter(url)
  427. except requests.exceptions.InvalidSchema as e:
  428. if self._custom_adapter:
  429. return self._custom_adapter
  430. else:
  431. raise e
  432. @property
  433. def api_version(self):
  434. return self._version
  435. def reload_config(self, dockercfg_path=None):
  436. """
  437. Force a reload of the auth configuration
  438. Args:
  439. dockercfg_path (str): Use a custom path for the Docker config file
  440. (default ``$HOME/.docker/config.json`` if present,
  441. otherwise ``$HOME/.dockercfg``)
  442. Returns:
  443. None
  444. """
  445. self._auth_configs = auth.load_config(
  446. dockercfg_path, credstore_env=self.credstore_env
  447. )