Development of an internal social media platform with personalised dashboards for students
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

log.py 8.9KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257
  1. # -*- coding: utf-8 -*-
  2. """
  3. celery.app.log
  4. ~~~~~~~~~~~~~~
  5. The Celery instances logging section: ``Celery.log``.
  6. Sets up logging for the worker and other programs,
  7. redirects stdouts, colors log output, patches logging
  8. related compatibility fixes, and so on.
  9. """
  10. from __future__ import absolute_import
  11. import logging
  12. import os
  13. import sys
  14. from logging.handlers import WatchedFileHandler
  15. from kombu.log import NullHandler
  16. from kombu.utils.encoding import set_default_encoding_file
  17. from celery import signals
  18. from celery._state import get_current_task
  19. from celery.five import class_property, string_t
  20. from celery.utils import isatty, node_format
  21. from celery.utils.log import (
  22. get_logger, mlevel,
  23. ColorFormatter, ensure_process_aware_logger,
  24. LoggingProxy, get_multiprocessing_logger,
  25. reset_multiprocessing_logger,
  26. )
  27. from celery.utils.term import colored
  28. __all__ = ['TaskFormatter', 'Logging']
  29. MP_LOG = os.environ.get('MP_LOG', False)
  30. class TaskFormatter(ColorFormatter):
  31. def format(self, record):
  32. task = get_current_task()
  33. if task and task.request:
  34. record.__dict__.update(task_id=task.request.id,
  35. task_name=task.name)
  36. else:
  37. record.__dict__.setdefault('task_name', '???')
  38. record.__dict__.setdefault('task_id', '???')
  39. return ColorFormatter.format(self, record)
  40. class Logging(object):
  41. #: The logging subsystem is only configured once per process.
  42. #: setup_logging_subsystem sets this flag, and subsequent calls
  43. #: will do nothing.
  44. _setup = False
  45. def __init__(self, app):
  46. self.app = app
  47. self.loglevel = mlevel(self.app.conf.CELERYD_LOG_LEVEL)
  48. self.format = self.app.conf.CELERYD_LOG_FORMAT
  49. self.task_format = self.app.conf.CELERYD_TASK_LOG_FORMAT
  50. self.colorize = self.app.conf.CELERYD_LOG_COLOR
  51. def setup(self, loglevel=None, logfile=None, redirect_stdouts=False,
  52. redirect_level='WARNING', colorize=None, hostname=None):
  53. handled = self.setup_logging_subsystem(
  54. loglevel, logfile, colorize=colorize, hostname=hostname,
  55. )
  56. if not handled:
  57. if redirect_stdouts:
  58. self.redirect_stdouts(redirect_level)
  59. os.environ.update(
  60. CELERY_LOG_LEVEL=str(loglevel) if loglevel else '',
  61. CELERY_LOG_FILE=str(logfile) if logfile else '',
  62. )
  63. return handled
  64. def redirect_stdouts(self, loglevel=None, name='celery.redirected'):
  65. self.redirect_stdouts_to_logger(
  66. get_logger(name), loglevel=loglevel
  67. )
  68. os.environ.update(
  69. CELERY_LOG_REDIRECT='1',
  70. CELERY_LOG_REDIRECT_LEVEL=str(loglevel or ''),
  71. )
  72. def setup_logging_subsystem(self, loglevel=None, logfile=None, format=None,
  73. colorize=None, hostname=None, **kwargs):
  74. if self.already_setup:
  75. return
  76. if logfile and hostname:
  77. logfile = node_format(logfile, hostname)
  78. self.already_setup = True
  79. loglevel = mlevel(loglevel or self.loglevel)
  80. format = format or self.format
  81. colorize = self.supports_color(colorize, logfile)
  82. reset_multiprocessing_logger()
  83. ensure_process_aware_logger()
  84. receivers = signals.setup_logging.send(
  85. sender=None, loglevel=loglevel, logfile=logfile,
  86. format=format, colorize=colorize,
  87. )
  88. if not receivers:
  89. root = logging.getLogger()
  90. if self.app.conf.CELERYD_HIJACK_ROOT_LOGGER:
  91. root.handlers = []
  92. get_logger('celery').handlers = []
  93. get_logger('celery.task').handlers = []
  94. get_logger('celery.redirected').handlers = []
  95. # Configure root logger
  96. self._configure_logger(
  97. root, logfile, loglevel, format, colorize, **kwargs
  98. )
  99. # Configure the multiprocessing logger
  100. self._configure_logger(
  101. get_multiprocessing_logger(),
  102. logfile, loglevel if MP_LOG else logging.ERROR,
  103. format, colorize, **kwargs
  104. )
  105. signals.after_setup_logger.send(
  106. sender=None, logger=root,
  107. loglevel=loglevel, logfile=logfile,
  108. format=format, colorize=colorize,
  109. )
  110. # then setup the root task logger.
  111. self.setup_task_loggers(loglevel, logfile, colorize=colorize)
  112. try:
  113. stream = logging.getLogger().handlers[0].stream
  114. except (AttributeError, IndexError):
  115. pass
  116. else:
  117. set_default_encoding_file(stream)
  118. # This is a hack for multiprocessing's fork+exec, so that
  119. # logging before Process.run works.
  120. logfile_name = logfile if isinstance(logfile, string_t) else ''
  121. os.environ.update(_MP_FORK_LOGLEVEL_=str(loglevel),
  122. _MP_FORK_LOGFILE_=logfile_name,
  123. _MP_FORK_LOGFORMAT_=format)
  124. return receivers
  125. def _configure_logger(self, logger, logfile, loglevel,
  126. format, colorize, **kwargs):
  127. if logger is not None:
  128. self.setup_handlers(logger, logfile, format,
  129. colorize, **kwargs)
  130. if loglevel:
  131. logger.setLevel(loglevel)
  132. def setup_task_loggers(self, loglevel=None, logfile=None, format=None,
  133. colorize=None, propagate=False, **kwargs):
  134. """Setup the task logger.
  135. If `logfile` is not specified, then `sys.stderr` is used.
  136. Will return the base task logger object.
  137. """
  138. loglevel = mlevel(loglevel or self.loglevel)
  139. format = format or self.task_format
  140. colorize = self.supports_color(colorize, logfile)
  141. logger = self.setup_handlers(
  142. get_logger('celery.task'),
  143. logfile, format, colorize,
  144. formatter=TaskFormatter, **kwargs
  145. )
  146. logger.setLevel(loglevel)
  147. # this is an int for some reason, better not question why.
  148. logger.propagate = int(propagate)
  149. signals.after_setup_task_logger.send(
  150. sender=None, logger=logger,
  151. loglevel=loglevel, logfile=logfile,
  152. format=format, colorize=colorize,
  153. )
  154. return logger
  155. def redirect_stdouts_to_logger(self, logger, loglevel=None,
  156. stdout=True, stderr=True):
  157. """Redirect :class:`sys.stdout` and :class:`sys.stderr` to a
  158. logging instance.
  159. :param logger: The :class:`logging.Logger` instance to redirect to.
  160. :param loglevel: The loglevel redirected messages will be logged as.
  161. """
  162. proxy = LoggingProxy(logger, loglevel)
  163. if stdout:
  164. sys.stdout = proxy
  165. if stderr:
  166. sys.stderr = proxy
  167. return proxy
  168. def supports_color(self, colorize=None, logfile=None):
  169. colorize = self.colorize if colorize is None else colorize
  170. if self.app.IS_WINDOWS:
  171. # Windows does not support ANSI color codes.
  172. return False
  173. if colorize or colorize is None:
  174. # Only use color if there is no active log file
  175. # and stderr is an actual terminal.
  176. return logfile is None and isatty(sys.stderr)
  177. return colorize
  178. def colored(self, logfile=None, enabled=None):
  179. return colored(enabled=self.supports_color(enabled, logfile))
  180. def setup_handlers(self, logger, logfile, format, colorize,
  181. formatter=ColorFormatter, **kwargs):
  182. if self._is_configured(logger):
  183. return logger
  184. handler = self._detect_handler(logfile)
  185. handler.setFormatter(formatter(format, use_color=colorize))
  186. logger.addHandler(handler)
  187. return logger
  188. def _detect_handler(self, logfile=None):
  189. """Create log handler with either a filename, an open stream
  190. or :const:`None` (stderr)."""
  191. logfile = sys.__stderr__ if logfile is None else logfile
  192. if hasattr(logfile, 'write'):
  193. return logging.StreamHandler(logfile)
  194. return WatchedFileHandler(logfile)
  195. def _has_handler(self, logger):
  196. if logger.handlers:
  197. return any(not isinstance(h, NullHandler) for h in logger.handlers)
  198. def _is_configured(self, logger):
  199. return self._has_handler(logger) and not getattr(
  200. logger, '_rudimentary_setup', False)
  201. def setup_logger(self, name='celery', *args, **kwargs):
  202. """Deprecated: No longer used."""
  203. self.setup_logging_subsystem(*args, **kwargs)
  204. return logging.root
  205. def get_default_logger(self, name='celery', **kwargs):
  206. return get_logger(name)
  207. @class_property
  208. def already_setup(cls):
  209. return cls._setup
  210. @already_setup.setter # noqa
  211. def already_setup(cls, was_setup):
  212. cls._setup = was_setup