Funktionierender Prototyp des Serious Games zur Vermittlung von Wissen zu Software-Engineering-Arbeitsmodellen.
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

build.py 14KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360
  1. import json
  2. import logging
  3. import os
  4. import random
  5. from .. import auth
  6. from .. import constants
  7. from .. import errors
  8. from .. import utils
  9. log = logging.getLogger(__name__)
  10. class BuildApiMixin:
  11. def build(self, path=None, tag=None, quiet=False, fileobj=None,
  12. nocache=False, rm=False, timeout=None,
  13. custom_context=False, encoding=None, pull=False,
  14. forcerm=False, dockerfile=None, container_limits=None,
  15. decode=False, buildargs=None, gzip=False, shmsize=None,
  16. labels=None, cache_from=None, target=None, network_mode=None,
  17. squash=None, extra_hosts=None, platform=None, isolation=None,
  18. use_config_proxy=True):
  19. """
  20. Similar to the ``docker build`` command. Either ``path`` or ``fileobj``
  21. needs to be set. ``path`` can be a local path (to a directory
  22. containing a Dockerfile) or a remote URL. ``fileobj`` must be a
  23. readable file-like object to a Dockerfile.
  24. If you have a tar file for the Docker build context (including a
  25. Dockerfile) already, pass a readable file-like object to ``fileobj``
  26. and also pass ``custom_context=True``. If the stream is compressed
  27. also, set ``encoding`` to the correct value (e.g ``gzip``).
  28. Example:
  29. >>> from io import BytesIO
  30. >>> from docker import APIClient
  31. >>> dockerfile = '''
  32. ... # Shared Volume
  33. ... FROM busybox:buildroot-2014.02
  34. ... VOLUME /data
  35. ... CMD ["/bin/sh"]
  36. ... '''
  37. >>> f = BytesIO(dockerfile.encode('utf-8'))
  38. >>> cli = APIClient(base_url='tcp://127.0.0.1:2375')
  39. >>> response = [line for line in cli.build(
  40. ... fileobj=f, rm=True, tag='yourname/volume'
  41. ... )]
  42. >>> response
  43. ['{"stream":" ---\\u003e a9eb17255234\\n"}',
  44. '{"stream":"Step 1 : VOLUME /data\\n"}',
  45. '{"stream":" ---\\u003e Running in abdc1e6896c6\\n"}',
  46. '{"stream":" ---\\u003e 713bca62012e\\n"}',
  47. '{"stream":"Removing intermediate container abdc1e6896c6\\n"}',
  48. '{"stream":"Step 2 : CMD [\\"/bin/sh\\"]\\n"}',
  49. '{"stream":" ---\\u003e Running in dba30f2a1a7e\\n"}',
  50. '{"stream":" ---\\u003e 032b8b2855fc\\n"}',
  51. '{"stream":"Removing intermediate container dba30f2a1a7e\\n"}',
  52. '{"stream":"Successfully built 032b8b2855fc\\n"}']
  53. Args:
  54. path (str): Path to the directory containing the Dockerfile
  55. fileobj: A file object to use as the Dockerfile. (Or a file-like
  56. object)
  57. tag (str): A tag to add to the final image
  58. quiet (bool): Whether to return the status
  59. nocache (bool): Don't use the cache when set to ``True``
  60. rm (bool): Remove intermediate containers. The ``docker build``
  61. command now defaults to ``--rm=true``, but we have kept the old
  62. default of `False` to preserve backward compatibility
  63. timeout (int): HTTP timeout
  64. custom_context (bool): Optional if using ``fileobj``
  65. encoding (str): The encoding for a stream. Set to ``gzip`` for
  66. compressing
  67. pull (bool): Downloads any updates to the FROM image in Dockerfiles
  68. forcerm (bool): Always remove intermediate containers, even after
  69. unsuccessful builds
  70. dockerfile (str): path within the build context to the Dockerfile
  71. gzip (bool): If set to ``True``, gzip compression/encoding is used
  72. buildargs (dict): A dictionary of build arguments
  73. container_limits (dict): A dictionary of limits applied to each
  74. container created by the build process. Valid keys:
  75. - memory (int): set memory limit for build
  76. - memswap (int): Total memory (memory + swap), -1 to disable
  77. swap
  78. - cpushares (int): CPU shares (relative weight)
  79. - cpusetcpus (str): CPUs in which to allow execution, e.g.,
  80. ``"0-3"``, ``"0,1"``
  81. decode (bool): If set to ``True``, the returned stream will be
  82. decoded into dicts on the fly. Default ``False``
  83. shmsize (int): Size of `/dev/shm` in bytes. The size must be
  84. greater than 0. If omitted the system uses 64MB
  85. labels (dict): A dictionary of labels to set on the image
  86. cache_from (:py:class:`list`): A list of images used for build
  87. cache resolution
  88. target (str): Name of the build-stage to build in a multi-stage
  89. Dockerfile
  90. network_mode (str): networking mode for the run commands during
  91. build
  92. squash (bool): Squash the resulting images layers into a
  93. single layer.
  94. extra_hosts (dict): Extra hosts to add to /etc/hosts in building
  95. containers, as a mapping of hostname to IP address.
  96. platform (str): Platform in the format ``os[/arch[/variant]]``
  97. isolation (str): Isolation technology used during build.
  98. Default: `None`.
  99. use_config_proxy (bool): If ``True``, and if the docker client
  100. configuration file (``~/.docker/config.json`` by default)
  101. contains a proxy configuration, the corresponding environment
  102. variables will be set in the container being built.
  103. Returns:
  104. A generator for the build output.
  105. Raises:
  106. :py:class:`docker.errors.APIError`
  107. If the server returns an error.
  108. ``TypeError``
  109. If neither ``path`` nor ``fileobj`` is specified.
  110. """
  111. remote = context = None
  112. headers = {}
  113. container_limits = container_limits or {}
  114. buildargs = buildargs or {}
  115. if path is None and fileobj is None:
  116. raise TypeError("Either path or fileobj needs to be provided.")
  117. if gzip and encoding is not None:
  118. raise errors.DockerException(
  119. 'Can not use custom encoding if gzip is enabled'
  120. )
  121. for key in container_limits.keys():
  122. if key not in constants.CONTAINER_LIMITS_KEYS:
  123. raise errors.DockerException(
  124. f'Invalid container_limits key {key}'
  125. )
  126. if custom_context:
  127. if not fileobj:
  128. raise TypeError("You must specify fileobj with custom_context")
  129. context = fileobj
  130. elif fileobj is not None:
  131. context = utils.mkbuildcontext(fileobj)
  132. elif path.startswith(('http://', 'https://',
  133. 'git://', 'github.com/', 'git@')):
  134. remote = path
  135. elif not os.path.isdir(path):
  136. raise TypeError("You must specify a directory to build in path")
  137. else:
  138. dockerignore = os.path.join(path, '.dockerignore')
  139. exclude = None
  140. if os.path.exists(dockerignore):
  141. with open(dockerignore) as f:
  142. exclude = list(filter(
  143. lambda x: x != '' and x[0] != '#',
  144. [line.strip() for line in f.read().splitlines()]
  145. ))
  146. dockerfile = process_dockerfile(dockerfile, path)
  147. context = utils.tar(
  148. path, exclude=exclude, dockerfile=dockerfile, gzip=gzip
  149. )
  150. encoding = 'gzip' if gzip else encoding
  151. u = self._url('/build')
  152. params = {
  153. 't': tag,
  154. 'remote': remote,
  155. 'q': quiet,
  156. 'nocache': nocache,
  157. 'rm': rm,
  158. 'forcerm': forcerm,
  159. 'pull': pull,
  160. 'dockerfile': dockerfile,
  161. }
  162. params.update(container_limits)
  163. if use_config_proxy:
  164. proxy_args = self._proxy_configs.get_environment()
  165. for k, v in proxy_args.items():
  166. buildargs.setdefault(k, v)
  167. if buildargs:
  168. params.update({'buildargs': json.dumps(buildargs)})
  169. if shmsize:
  170. if utils.version_gte(self._version, '1.22'):
  171. params.update({'shmsize': shmsize})
  172. else:
  173. raise errors.InvalidVersion(
  174. 'shmsize was only introduced in API version 1.22'
  175. )
  176. if labels:
  177. if utils.version_gte(self._version, '1.23'):
  178. params.update({'labels': json.dumps(labels)})
  179. else:
  180. raise errors.InvalidVersion(
  181. 'labels was only introduced in API version 1.23'
  182. )
  183. if cache_from:
  184. if utils.version_gte(self._version, '1.25'):
  185. params.update({'cachefrom': json.dumps(cache_from)})
  186. else:
  187. raise errors.InvalidVersion(
  188. 'cache_from was only introduced in API version 1.25'
  189. )
  190. if target:
  191. if utils.version_gte(self._version, '1.29'):
  192. params.update({'target': target})
  193. else:
  194. raise errors.InvalidVersion(
  195. 'target was only introduced in API version 1.29'
  196. )
  197. if network_mode:
  198. if utils.version_gte(self._version, '1.25'):
  199. params.update({'networkmode': network_mode})
  200. else:
  201. raise errors.InvalidVersion(
  202. 'network_mode was only introduced in API version 1.25'
  203. )
  204. if squash:
  205. if utils.version_gte(self._version, '1.25'):
  206. params.update({'squash': squash})
  207. else:
  208. raise errors.InvalidVersion(
  209. 'squash was only introduced in API version 1.25'
  210. )
  211. if extra_hosts is not None:
  212. if utils.version_lt(self._version, '1.27'):
  213. raise errors.InvalidVersion(
  214. 'extra_hosts was only introduced in API version 1.27'
  215. )
  216. if isinstance(extra_hosts, dict):
  217. extra_hosts = utils.format_extra_hosts(extra_hosts)
  218. params.update({'extrahosts': extra_hosts})
  219. if platform is not None:
  220. if utils.version_lt(self._version, '1.32'):
  221. raise errors.InvalidVersion(
  222. 'platform was only introduced in API version 1.32'
  223. )
  224. params['platform'] = platform
  225. if isolation is not None:
  226. if utils.version_lt(self._version, '1.24'):
  227. raise errors.InvalidVersion(
  228. 'isolation was only introduced in API version 1.24'
  229. )
  230. params['isolation'] = isolation
  231. if context is not None:
  232. headers = {'Content-Type': 'application/tar'}
  233. if encoding:
  234. headers['Content-Encoding'] = encoding
  235. self._set_auth_headers(headers)
  236. response = self._post(
  237. u,
  238. data=context,
  239. params=params,
  240. headers=headers,
  241. stream=True,
  242. timeout=timeout,
  243. )
  244. if context is not None and not custom_context:
  245. context.close()
  246. return self._stream_helper(response, decode=decode)
  247. @utils.minimum_version('1.31')
  248. def prune_builds(self):
  249. """
  250. Delete the builder cache
  251. Returns:
  252. (dict): A dictionary containing information about the operation's
  253. result. The ``SpaceReclaimed`` key indicates the amount of
  254. bytes of disk space reclaimed.
  255. Raises:
  256. :py:class:`docker.errors.APIError`
  257. If the server returns an error.
  258. """
  259. url = self._url("/build/prune")
  260. return self._result(self._post(url), True)
  261. def _set_auth_headers(self, headers):
  262. log.debug('Looking for auth config')
  263. # If we don't have any auth data so far, try reloading the config
  264. # file one more time in case anything showed up in there.
  265. if not self._auth_configs or self._auth_configs.is_empty:
  266. log.debug("No auth config in memory - loading from filesystem")
  267. self._auth_configs = auth.load_config(
  268. credstore_env=self.credstore_env
  269. )
  270. # Send the full auth configuration (if any exists), since the build
  271. # could use any (or all) of the registries.
  272. if self._auth_configs:
  273. auth_data = self._auth_configs.get_all_credentials()
  274. # See https://github.com/docker/docker-py/issues/1683
  275. if (auth.INDEX_URL not in auth_data and
  276. auth.INDEX_NAME in auth_data):
  277. auth_data[auth.INDEX_URL] = auth_data.get(auth.INDEX_NAME, {})
  278. log.debug(
  279. 'Sending auth config ({})'.format(
  280. ', '.join(repr(k) for k in auth_data.keys())
  281. )
  282. )
  283. if auth_data:
  284. headers['X-Registry-Config'] = auth.encode_header(
  285. auth_data
  286. )
  287. else:
  288. log.debug('No auth config found')
  289. def process_dockerfile(dockerfile, path):
  290. if not dockerfile:
  291. return (None, None)
  292. abs_dockerfile = dockerfile
  293. if not os.path.isabs(dockerfile):
  294. abs_dockerfile = os.path.join(path, dockerfile)
  295. if constants.IS_WINDOWS_PLATFORM and path.startswith(
  296. constants.WINDOWS_LONGPATH_PREFIX):
  297. abs_dockerfile = '{}{}'.format(
  298. constants.WINDOWS_LONGPATH_PREFIX,
  299. os.path.normpath(
  300. abs_dockerfile[len(constants.WINDOWS_LONGPATH_PREFIX):]
  301. )
  302. )
  303. if (os.path.splitdrive(path)[0] != os.path.splitdrive(abs_dockerfile)[0] or
  304. os.path.relpath(abs_dockerfile, path).startswith('..')):
  305. # Dockerfile not in context - read data to insert into tar later
  306. with open(abs_dockerfile) as df:
  307. return (
  308. f'.dockerfile.{random.getrandbits(160):x}',
  309. df.read()
  310. )
  311. # Dockerfile is inside the context - return path relative to context root
  312. if dockerfile == abs_dockerfile:
  313. # Only calculate relpath if necessary to avoid errors
  314. # on Windows client -> Linux Docker
  315. # see https://github.com/docker/compose/issues/5969
  316. dockerfile = os.path.relpath(abs_dockerfile, path)
  317. return (dockerfile, None)