Development of an internal social media platform with personalised dashboards for students
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

builtins.py 14KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379
  1. # -*- coding: utf-8 -*-
  2. """
  3. celery.app.builtins
  4. ~~~~~~~~~~~~~~~~~~~
  5. Built-in tasks that are always available in all
  6. app instances. E.g. chord, group and xmap.
  7. """
  8. from __future__ import absolute_import
  9. from collections import deque
  10. from celery._state import get_current_worker_task, connect_on_app_finalize
  11. from celery.utils import uuid
  12. from celery.utils.log import get_logger
  13. __all__ = []
  14. logger = get_logger(__name__)
  15. @connect_on_app_finalize
  16. def add_backend_cleanup_task(app):
  17. """The backend cleanup task can be used to clean up the default result
  18. backend.
  19. If the configured backend requires periodic cleanup this task is also
  20. automatically configured to run every day at 4am (requires
  21. :program:`celery beat` to be running).
  22. """
  23. @app.task(name='celery.backend_cleanup',
  24. shared=False, _force_evaluate=True)
  25. def backend_cleanup():
  26. app.backend.cleanup()
  27. return backend_cleanup
  28. @connect_on_app_finalize
  29. def add_unlock_chord_task(app):
  30. """This task is used by result backends without native chord support.
  31. It joins chords by creating a task chain polling the header for completion.
  32. """
  33. from celery.canvas import signature
  34. from celery.exceptions import ChordError
  35. from celery.result import allow_join_result, result_from_tuple
  36. default_propagate = app.conf.CELERY_CHORD_PROPAGATES
  37. @app.task(name='celery.chord_unlock', max_retries=None, shared=False,
  38. default_retry_delay=1, ignore_result=True, _force_evaluate=True,
  39. bind=True)
  40. def unlock_chord(self, group_id, callback, interval=None, propagate=None,
  41. max_retries=None, result=None,
  42. Result=app.AsyncResult, GroupResult=app.GroupResult,
  43. result_from_tuple=result_from_tuple):
  44. # if propagate is disabled exceptions raised by chord tasks
  45. # will be sent as part of the result list to the chord callback.
  46. # Since 3.1 propagate will be enabled by default, and instead
  47. # the chord callback changes state to FAILURE with the
  48. # exception set to ChordError.
  49. propagate = default_propagate if propagate is None else propagate
  50. if interval is None:
  51. interval = self.default_retry_delay
  52. # check if the task group is ready, and if so apply the callback.
  53. deps = GroupResult(
  54. group_id,
  55. [result_from_tuple(r, app=app) for r in result],
  56. app=app,
  57. )
  58. j = deps.join_native if deps.supports_native_join else deps.join
  59. try:
  60. ready = deps.ready()
  61. except Exception as exc:
  62. raise self.retry(
  63. exc=exc, countdown=interval, max_retries=max_retries,
  64. )
  65. else:
  66. if not ready:
  67. raise self.retry(countdown=interval, max_retries=max_retries)
  68. callback = signature(callback, app=app)
  69. try:
  70. with allow_join_result():
  71. ret = j(timeout=3.0, propagate=propagate)
  72. except Exception as exc:
  73. try:
  74. culprit = next(deps._failed_join_report())
  75. reason = 'Dependency {0.id} raised {1!r}'.format(
  76. culprit, exc,
  77. )
  78. except StopIteration:
  79. reason = repr(exc)
  80. logger.error('Chord %r raised: %r', group_id, exc, exc_info=1)
  81. app.backend.chord_error_from_stack(callback,
  82. ChordError(reason))
  83. else:
  84. try:
  85. callback.delay(ret)
  86. except Exception as exc:
  87. logger.error('Chord %r raised: %r', group_id, exc, exc_info=1)
  88. app.backend.chord_error_from_stack(
  89. callback,
  90. exc=ChordError('Callback error: {0!r}'.format(exc)),
  91. )
  92. return unlock_chord
  93. @connect_on_app_finalize
  94. def add_map_task(app):
  95. from celery.canvas import signature
  96. @app.task(name='celery.map', shared=False, _force_evaluate=True)
  97. def xmap(task, it):
  98. task = signature(task, app=app).type
  99. return [task(item) for item in it]
  100. return xmap
  101. @connect_on_app_finalize
  102. def add_starmap_task(app):
  103. from celery.canvas import signature
  104. @app.task(name='celery.starmap', shared=False, _force_evaluate=True)
  105. def xstarmap(task, it):
  106. task = signature(task, app=app).type
  107. return [task(*item) for item in it]
  108. return xstarmap
  109. @connect_on_app_finalize
  110. def add_chunk_task(app):
  111. from celery.canvas import chunks as _chunks
  112. @app.task(name='celery.chunks', shared=False, _force_evaluate=True)
  113. def chunks(task, it, n):
  114. return _chunks.apply_chunks(task, it, n)
  115. return chunks
  116. @connect_on_app_finalize
  117. def add_group_task(app):
  118. _app = app
  119. from celery.canvas import maybe_signature, signature
  120. from celery.result import result_from_tuple
  121. class Group(app.Task):
  122. app = _app
  123. name = 'celery.group'
  124. accept_magic_kwargs = False
  125. _decorated = True
  126. def run(self, tasks, result, group_id, partial_args,
  127. add_to_parent=True):
  128. app = self.app
  129. result = result_from_tuple(result, app)
  130. # any partial args are added to all tasks in the group
  131. taskit = (signature(task, app=app).clone(partial_args)
  132. for i, task in enumerate(tasks))
  133. if self.request.is_eager or app.conf.CELERY_ALWAYS_EAGER:
  134. return app.GroupResult(
  135. result.id,
  136. [stask.apply(group_id=group_id) for stask in taskit],
  137. )
  138. with app.producer_or_acquire() as pub:
  139. [stask.apply_async(group_id=group_id, producer=pub,
  140. add_to_parent=False) for stask in taskit]
  141. parent = get_current_worker_task()
  142. if add_to_parent and parent:
  143. parent.add_trail(result)
  144. return result
  145. def prepare(self, options, tasks, args, **kwargs):
  146. options['group_id'] = group_id = (
  147. options.setdefault('task_id', uuid()))
  148. def prepare_member(task):
  149. task = maybe_signature(task, app=self.app)
  150. task.options['group_id'] = group_id
  151. return task, task.freeze()
  152. try:
  153. tasks, res = list(zip(
  154. *[prepare_member(task) for task in tasks]
  155. ))
  156. except ValueError: # tasks empty
  157. tasks, res = [], []
  158. return (tasks, self.app.GroupResult(group_id, res), group_id, args)
  159. def apply_async(self, partial_args=(), kwargs={}, **options):
  160. if self.app.conf.CELERY_ALWAYS_EAGER:
  161. return self.apply(partial_args, kwargs, **options)
  162. tasks, result, gid, args = self.prepare(
  163. options, args=partial_args, **kwargs
  164. )
  165. super(Group, self).apply_async((
  166. list(tasks), result.as_tuple(), gid, args), **options
  167. )
  168. return result
  169. def apply(self, args=(), kwargs={}, **options):
  170. return super(Group, self).apply(
  171. self.prepare(options, args=args, **kwargs),
  172. **options).get()
  173. return Group
  174. @connect_on_app_finalize
  175. def add_chain_task(app):
  176. from celery.canvas import (
  177. Signature, chain, chord, group, maybe_signature, maybe_unroll_group,
  178. )
  179. _app = app
  180. class Chain(app.Task):
  181. app = _app
  182. name = 'celery.chain'
  183. accept_magic_kwargs = False
  184. _decorated = True
  185. def prepare_steps(self, args, tasks):
  186. app = self.app
  187. steps = deque(tasks)
  188. next_step = prev_task = prev_res = None
  189. tasks, results = [], []
  190. i = 0
  191. while steps:
  192. # First task get partial args from chain.
  193. task = maybe_signature(steps.popleft(), app=app)
  194. task = task.clone() if i else task.clone(args)
  195. res = task.freeze()
  196. i += 1
  197. if isinstance(task, group):
  198. task = maybe_unroll_group(task)
  199. if isinstance(task, chain):
  200. # splice the chain
  201. steps.extendleft(reversed(task.tasks))
  202. continue
  203. elif isinstance(task, group) and steps and \
  204. not isinstance(steps[0], group):
  205. # automatically upgrade group(..) | s to chord(group, s)
  206. try:
  207. next_step = steps.popleft()
  208. # for chords we freeze by pretending it's a normal
  209. # task instead of a group.
  210. res = Signature.freeze(next_step)
  211. task = chord(task, body=next_step, task_id=res.task_id)
  212. except IndexError:
  213. pass # no callback, so keep as group
  214. if prev_task:
  215. # link previous task to this task.
  216. prev_task.link(task)
  217. # set the results parent attribute.
  218. if not res.parent:
  219. res.parent = prev_res
  220. if not isinstance(prev_task, chord):
  221. results.append(res)
  222. tasks.append(task)
  223. prev_task, prev_res = task, res
  224. return tasks, results
  225. def apply_async(self, args=(), kwargs={}, group_id=None, chord=None,
  226. task_id=None, link=None, link_error=None, **options):
  227. if self.app.conf.CELERY_ALWAYS_EAGER:
  228. return self.apply(args, kwargs, **options)
  229. options.pop('publisher', None)
  230. tasks, results = self.prepare_steps(args, kwargs['tasks'])
  231. result = results[-1]
  232. if group_id:
  233. tasks[-1].set(group_id=group_id)
  234. if chord:
  235. tasks[-1].set(chord=chord)
  236. if task_id:
  237. tasks[-1].set(task_id=task_id)
  238. result = tasks[-1].type.AsyncResult(task_id)
  239. # make sure we can do a link() and link_error() on a chain object.
  240. if link:
  241. tasks[-1].set(link=link)
  242. # and if any task in the chain fails, call the errbacks
  243. if link_error:
  244. for task in tasks:
  245. task.set(link_error=link_error)
  246. tasks[0].apply_async(**options)
  247. return result
  248. def apply(self, args=(), kwargs={}, signature=maybe_signature,
  249. **options):
  250. app = self.app
  251. last, fargs = None, args # fargs passed to first task only
  252. for task in kwargs['tasks']:
  253. res = signature(task, app=app).clone(fargs).apply(
  254. last and (last.get(), ),
  255. )
  256. res.parent, last, fargs = last, res, None
  257. return last
  258. return Chain
  259. @connect_on_app_finalize
  260. def add_chord_task(app):
  261. """Every chord is executed in a dedicated task, so that the chord
  262. can be used as a signature, and this generates the task
  263. responsible for that."""
  264. from celery import group
  265. from celery.canvas import maybe_signature
  266. _app = app
  267. default_propagate = app.conf.CELERY_CHORD_PROPAGATES
  268. class Chord(app.Task):
  269. app = _app
  270. name = 'celery.chord'
  271. accept_magic_kwargs = False
  272. ignore_result = False
  273. _decorated = True
  274. def run(self, header, body, partial_args=(), interval=None,
  275. countdown=1, max_retries=None, propagate=None,
  276. eager=False, **kwargs):
  277. app = self.app
  278. propagate = default_propagate if propagate is None else propagate
  279. group_id = uuid()
  280. # - convert back to group if serialized
  281. tasks = header.tasks if isinstance(header, group) else header
  282. header = group([
  283. maybe_signature(s, app=app).clone() for s in tasks
  284. ], app=self.app)
  285. # - eager applies the group inline
  286. if eager:
  287. return header.apply(args=partial_args, task_id=group_id)
  288. body['chord_size'] = len(header.tasks)
  289. results = header.freeze(group_id=group_id, chord=body).results
  290. return self.backend.apply_chord(
  291. header, partial_args, group_id,
  292. body, interval=interval, countdown=countdown,
  293. max_retries=max_retries, propagate=propagate, result=results,
  294. )
  295. def apply_async(self, args=(), kwargs={}, task_id=None,
  296. group_id=None, chord=None, **options):
  297. app = self.app
  298. if app.conf.CELERY_ALWAYS_EAGER:
  299. return self.apply(args, kwargs, **options)
  300. header = kwargs.pop('header')
  301. body = kwargs.pop('body')
  302. header, body = (maybe_signature(header, app=app),
  303. maybe_signature(body, app=app))
  304. # forward certain options to body
  305. if chord is not None:
  306. body.options['chord'] = chord
  307. if group_id is not None:
  308. body.options['group_id'] = group_id
  309. [body.link(s) for s in options.pop('link', [])]
  310. [body.link_error(s) for s in options.pop('link_error', [])]
  311. body_result = body.freeze(task_id)
  312. parent = super(Chord, self).apply_async((header, body, args),
  313. kwargs, **options)
  314. body_result.parent = parent
  315. return body_result
  316. def apply(self, args=(), kwargs={}, propagate=True, **options):
  317. body = kwargs['body']
  318. res = super(Chord, self).apply(args, dict(kwargs, eager=True),
  319. **options)
  320. return maybe_signature(body, app=self.app).apply(
  321. args=(res.get(propagate=propagate).get(), ))
  322. return Chord