Development of an internal social media platform with personalised dashboards for students
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

__init__.py 6.4KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201
  1. # -*- coding: utf-8 -*-
  2. """
  3. celery.backends.database
  4. ~~~~~~~~~~~~~~~~~~~~~~~~
  5. SQLAlchemy result store backend.
  6. """
  7. from __future__ import absolute_import
  8. import logging
  9. from contextlib import contextmanager
  10. from functools import wraps
  11. from celery import states
  12. from celery.backends.base import BaseBackend
  13. from celery.exceptions import ImproperlyConfigured
  14. from celery.five import range
  15. from celery.utils.timeutils import maybe_timedelta
  16. from .models import Task
  17. from .models import TaskSet
  18. from .session import SessionManager
  19. logger = logging.getLogger(__name__)
  20. __all__ = ['DatabaseBackend']
  21. def _sqlalchemy_installed():
  22. try:
  23. import sqlalchemy
  24. except ImportError:
  25. raise ImproperlyConfigured(
  26. 'The database result backend requires SQLAlchemy to be installed.'
  27. 'See http://pypi.python.org/pypi/SQLAlchemy')
  28. return sqlalchemy
  29. _sqlalchemy_installed()
  30. from sqlalchemy.exc import DatabaseError, InvalidRequestError # noqa
  31. from sqlalchemy.orm.exc import StaleDataError # noqa
  32. @contextmanager
  33. def session_cleanup(session):
  34. try:
  35. yield
  36. except Exception:
  37. session.rollback()
  38. raise
  39. finally:
  40. session.close()
  41. def retry(fun):
  42. @wraps(fun)
  43. def _inner(*args, **kwargs):
  44. max_retries = kwargs.pop('max_retries', 3)
  45. for retries in range(max_retries):
  46. try:
  47. return fun(*args, **kwargs)
  48. except (DatabaseError, InvalidRequestError, StaleDataError):
  49. logger.warning(
  50. "Failed operation %s. Retrying %s more times.",
  51. fun.__name__, max_retries - retries - 1,
  52. exc_info=True,
  53. )
  54. if retries + 1 >= max_retries:
  55. raise
  56. return _inner
  57. class DatabaseBackend(BaseBackend):
  58. """The database result backend."""
  59. # ResultSet.iterate should sleep this much between each pool,
  60. # to not bombard the database with queries.
  61. subpolling_interval = 0.5
  62. def __init__(self, dburi=None, expires=None,
  63. engine_options=None, url=None, **kwargs):
  64. # The `url` argument was added later and is used by
  65. # the app to set backend by url (celery.backends.get_backend_by_url)
  66. super(DatabaseBackend, self).__init__(**kwargs)
  67. conf = self.app.conf
  68. self.expires = maybe_timedelta(self.prepare_expires(expires))
  69. self.url = url or dburi or conf.CELERY_RESULT_DBURI
  70. self.engine_options = dict(
  71. engine_options or {},
  72. **conf.CELERY_RESULT_ENGINE_OPTIONS or {})
  73. self.short_lived_sessions = kwargs.get(
  74. 'short_lived_sessions',
  75. conf.CELERY_RESULT_DB_SHORT_LIVED_SESSIONS,
  76. )
  77. tablenames = conf.CELERY_RESULT_DB_TABLENAMES or {}
  78. Task.__table__.name = tablenames.get('task', 'celery_taskmeta')
  79. TaskSet.__table__.name = tablenames.get('group', 'celery_tasksetmeta')
  80. if not self.url:
  81. raise ImproperlyConfigured(
  82. 'Missing connection string! Do you have '
  83. 'CELERY_RESULT_DBURI set to a real value?')
  84. def ResultSession(self, session_manager=SessionManager()):
  85. return session_manager.session_factory(
  86. dburi=self.url,
  87. short_lived_sessions=self.short_lived_sessions,
  88. **self.engine_options
  89. )
  90. @retry
  91. def _store_result(self, task_id, result, status,
  92. traceback=None, max_retries=3, **kwargs):
  93. """Store return value and status of an executed task."""
  94. session = self.ResultSession()
  95. with session_cleanup(session):
  96. task = list(session.query(Task).filter(Task.task_id == task_id))
  97. task = task and task[0]
  98. if not task:
  99. task = Task(task_id)
  100. session.add(task)
  101. session.flush()
  102. task.result = result
  103. task.status = status
  104. task.traceback = traceback
  105. session.commit()
  106. return result
  107. @retry
  108. def _get_task_meta_for(self, task_id):
  109. """Get task metadata for a task by id."""
  110. session = self.ResultSession()
  111. with session_cleanup(session):
  112. task = list(session.query(Task).filter(Task.task_id == task_id))
  113. task = task and task[0]
  114. if not task:
  115. task = Task(task_id)
  116. task.status = states.PENDING
  117. task.result = None
  118. return self.meta_from_decoded(task.to_dict())
  119. @retry
  120. def _save_group(self, group_id, result):
  121. """Store the result of an executed group."""
  122. session = self.ResultSession()
  123. with session_cleanup(session):
  124. group = TaskSet(group_id, result)
  125. session.add(group)
  126. session.flush()
  127. session.commit()
  128. return result
  129. @retry
  130. def _restore_group(self, group_id):
  131. """Get metadata for group by id."""
  132. session = self.ResultSession()
  133. with session_cleanup(session):
  134. group = session.query(TaskSet).filter(
  135. TaskSet.taskset_id == group_id).first()
  136. if group:
  137. return group.to_dict()
  138. @retry
  139. def _delete_group(self, group_id):
  140. """Delete metadata for group by id."""
  141. session = self.ResultSession()
  142. with session_cleanup(session):
  143. session.query(TaskSet).filter(
  144. TaskSet.taskset_id == group_id).delete()
  145. session.flush()
  146. session.commit()
  147. @retry
  148. def _forget(self, task_id):
  149. """Forget about result."""
  150. session = self.ResultSession()
  151. with session_cleanup(session):
  152. session.query(Task).filter(Task.task_id == task_id).delete()
  153. session.commit()
  154. def cleanup(self):
  155. """Delete expired metadata."""
  156. session = self.ResultSession()
  157. expires = self.expires
  158. now = self.app.now()
  159. with session_cleanup(session):
  160. session.query(Task).filter(
  161. Task.date_done < (now - expires)).delete()
  162. session.query(TaskSet).filter(
  163. TaskSet.date_done < (now - expires)).delete()
  164. session.commit()
  165. def __reduce__(self, args=(), kwargs={}):
  166. kwargs.update(
  167. dict(dburi=self.url,
  168. expires=self.expires,
  169. engine_options=self.engine_options))
  170. return super(DatabaseBackend, self).__reduce__(args, kwargs)