Development of an internal social media platform with personalised dashboards for students
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

graph.py 15KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382
  1. import warnings
  2. from functools import total_ordering
  3. from django.db.migrations.state import ProjectState
  4. from django.utils.datastructures import OrderedSet
  5. from .exceptions import CircularDependencyError, NodeNotFoundError
  6. RECURSION_DEPTH_WARNING = (
  7. "Maximum recursion depth exceeded while generating migration graph, "
  8. "falling back to iterative approach. If you're experiencing performance issues, "
  9. "consider squashing migrations as described at "
  10. "https://docs.djangoproject.com/en/dev/topics/migrations/#squashing-migrations."
  11. )
  12. @total_ordering
  13. class Node:
  14. """
  15. A single node in the migration graph. Contains direct links to adjacent
  16. nodes in either direction.
  17. """
  18. def __init__(self, key):
  19. self.key = key
  20. self.children = set()
  21. self.parents = set()
  22. def __eq__(self, other):
  23. return self.key == other
  24. def __lt__(self, other):
  25. return self.key < other
  26. def __hash__(self):
  27. return hash(self.key)
  28. def __getitem__(self, item):
  29. return self.key[item]
  30. def __str__(self):
  31. return str(self.key)
  32. def __repr__(self):
  33. return '<%s: (%r, %r)>' % (self.__class__.__name__, self.key[0], self.key[1])
  34. def add_child(self, child):
  35. self.children.add(child)
  36. def add_parent(self, parent):
  37. self.parents.add(parent)
  38. # Use manual caching, @cached_property effectively doubles the
  39. # recursion depth for each recursion.
  40. def ancestors(self):
  41. # Use self.key instead of self to speed up the frequent hashing
  42. # when constructing an OrderedSet.
  43. if '_ancestors' not in self.__dict__:
  44. ancestors = []
  45. for parent in sorted(self.parents, reverse=True):
  46. ancestors += parent.ancestors()
  47. ancestors.append(self.key)
  48. self.__dict__['_ancestors'] = list(OrderedSet(ancestors))
  49. return self.__dict__['_ancestors']
  50. # Use manual caching, @cached_property effectively doubles the
  51. # recursion depth for each recursion.
  52. def descendants(self):
  53. # Use self.key instead of self to speed up the frequent hashing
  54. # when constructing an OrderedSet.
  55. if '_descendants' not in self.__dict__:
  56. descendants = []
  57. for child in sorted(self.children, reverse=True):
  58. descendants += child.descendants()
  59. descendants.append(self.key)
  60. self.__dict__['_descendants'] = list(OrderedSet(descendants))
  61. return self.__dict__['_descendants']
  62. class DummyNode(Node):
  63. def __init__(self, key, origin, error_message):
  64. super().__init__(key)
  65. self.origin = origin
  66. self.error_message = error_message
  67. def promote(self):
  68. """
  69. Transition dummy to a normal node and clean off excess attribs.
  70. Creating a Node object from scratch would be too much of a
  71. hassle as many dependendies would need to be remapped.
  72. """
  73. del self.origin
  74. del self.error_message
  75. self.__class__ = Node
  76. def raise_error(self):
  77. raise NodeNotFoundError(self.error_message, self.key, origin=self.origin)
  78. class MigrationGraph:
  79. """
  80. Represent the digraph of all migrations in a project.
  81. Each migration is a node, and each dependency is an edge. There are
  82. no implicit dependencies between numbered migrations - the numbering is
  83. merely a convention to aid file listing. Every new numbered migration
  84. has a declared dependency to the previous number, meaning that VCS
  85. branch merges can be detected and resolved.
  86. Migrations files can be marked as replacing another set of migrations -
  87. this is to support the "squash" feature. The graph handler isn't responsible
  88. for these; instead, the code to load them in here should examine the
  89. migration files and if the replaced migrations are all either unapplied
  90. or not present, it should ignore the replaced ones, load in just the
  91. replacing migration, and repoint any dependencies that pointed to the
  92. replaced migrations to point to the replacing one.
  93. A node should be a tuple: (app_path, migration_name). The tree special-cases
  94. things within an app - namely, root nodes and leaf nodes ignore dependencies
  95. to other apps.
  96. """
  97. def __init__(self):
  98. self.node_map = {}
  99. self.nodes = {}
  100. self.cached = False
  101. def add_node(self, key, migration):
  102. # If the key already exists, then it must be a dummy node.
  103. dummy_node = self.node_map.get(key)
  104. if dummy_node:
  105. # Promote DummyNode to Node.
  106. dummy_node.promote()
  107. else:
  108. node = Node(key)
  109. self.node_map[key] = node
  110. self.nodes[key] = migration
  111. self.clear_cache()
  112. def add_dummy_node(self, key, origin, error_message):
  113. node = DummyNode(key, origin, error_message)
  114. self.node_map[key] = node
  115. self.nodes[key] = None
  116. def add_dependency(self, migration, child, parent, skip_validation=False):
  117. """
  118. This may create dummy nodes if they don't yet exist. If
  119. `skip_validation=True`, validate_consistency() should be called
  120. afterwards.
  121. """
  122. if child not in self.nodes:
  123. error_message = (
  124. "Migration %s dependencies reference nonexistent"
  125. " child node %r" % (migration, child)
  126. )
  127. self.add_dummy_node(child, migration, error_message)
  128. if parent not in self.nodes:
  129. error_message = (
  130. "Migration %s dependencies reference nonexistent"
  131. " parent node %r" % (migration, parent)
  132. )
  133. self.add_dummy_node(parent, migration, error_message)
  134. self.node_map[child].add_parent(self.node_map[parent])
  135. self.node_map[parent].add_child(self.node_map[child])
  136. if not skip_validation:
  137. self.validate_consistency()
  138. self.clear_cache()
  139. def remove_replaced_nodes(self, replacement, replaced):
  140. """
  141. Remove each of the `replaced` nodes (when they exist). Any
  142. dependencies that were referencing them are changed to reference the
  143. `replacement` node instead.
  144. """
  145. # Cast list of replaced keys to set to speed up lookup later.
  146. replaced = set(replaced)
  147. try:
  148. replacement_node = self.node_map[replacement]
  149. except KeyError as err:
  150. raise NodeNotFoundError(
  151. "Unable to find replacement node %r. It was either never added"
  152. " to the migration graph, or has been removed." % (replacement,),
  153. replacement
  154. ) from err
  155. for replaced_key in replaced:
  156. self.nodes.pop(replaced_key, None)
  157. replaced_node = self.node_map.pop(replaced_key, None)
  158. if replaced_node:
  159. for child in replaced_node.children:
  160. child.parents.remove(replaced_node)
  161. # We don't want to create dependencies between the replaced
  162. # node and the replacement node as this would lead to
  163. # self-referencing on the replacement node at a later iteration.
  164. if child.key not in replaced:
  165. replacement_node.add_child(child)
  166. child.add_parent(replacement_node)
  167. for parent in replaced_node.parents:
  168. parent.children.remove(replaced_node)
  169. # Again, to avoid self-referencing.
  170. if parent.key not in replaced:
  171. replacement_node.add_parent(parent)
  172. parent.add_child(replacement_node)
  173. self.clear_cache()
  174. def remove_replacement_node(self, replacement, replaced):
  175. """
  176. The inverse operation to `remove_replaced_nodes`. Almost. Remove the
  177. replacement node `replacement` and remap its child nodes to `replaced`
  178. - the list of nodes it would have replaced. Don't remap its parent
  179. nodes as they are expected to be correct already.
  180. """
  181. self.nodes.pop(replacement, None)
  182. try:
  183. replacement_node = self.node_map.pop(replacement)
  184. except KeyError as err:
  185. raise NodeNotFoundError(
  186. "Unable to remove replacement node %r. It was either never added"
  187. " to the migration graph, or has been removed already." % (replacement,),
  188. replacement
  189. ) from err
  190. replaced_nodes = set()
  191. replaced_nodes_parents = set()
  192. for key in replaced:
  193. replaced_node = self.node_map.get(key)
  194. if replaced_node:
  195. replaced_nodes.add(replaced_node)
  196. replaced_nodes_parents |= replaced_node.parents
  197. # We're only interested in the latest replaced node, so filter out
  198. # replaced nodes that are parents of other replaced nodes.
  199. replaced_nodes -= replaced_nodes_parents
  200. for child in replacement_node.children:
  201. child.parents.remove(replacement_node)
  202. for replaced_node in replaced_nodes:
  203. replaced_node.add_child(child)
  204. child.add_parent(replaced_node)
  205. for parent in replacement_node.parents:
  206. parent.children.remove(replacement_node)
  207. # NOTE: There is no need to remap parent dependencies as we can
  208. # assume the replaced nodes already have the correct ancestry.
  209. self.clear_cache()
  210. def validate_consistency(self):
  211. """Ensure there are no dummy nodes remaining in the graph."""
  212. [n.raise_error() for n in self.node_map.values() if isinstance(n, DummyNode)]
  213. def clear_cache(self):
  214. if self.cached:
  215. for node in self.nodes:
  216. self.node_map[node].__dict__.pop('_ancestors', None)
  217. self.node_map[node].__dict__.pop('_descendants', None)
  218. self.cached = False
  219. def forwards_plan(self, target):
  220. """
  221. Given a node, return a list of which previous nodes (dependencies) must
  222. be applied, ending with the node itself. This is the list you would
  223. follow if applying the migrations to a database.
  224. """
  225. if target not in self.nodes:
  226. raise NodeNotFoundError("Node %r not a valid node" % (target,), target)
  227. # Use parent.key instead of parent to speed up the frequent hashing in ensure_not_cyclic
  228. self.ensure_not_cyclic(target, lambda x: (parent.key for parent in self.node_map[x].parents))
  229. self.cached = True
  230. node = self.node_map[target]
  231. try:
  232. return node.ancestors()
  233. except RuntimeError:
  234. # fallback to iterative dfs
  235. warnings.warn(RECURSION_DEPTH_WARNING, RuntimeWarning)
  236. return self.iterative_dfs(node)
  237. def backwards_plan(self, target):
  238. """
  239. Given a node, return a list of which dependent nodes (dependencies)
  240. must be unapplied, ending with the node itself. This is the list you
  241. would follow if removing the migrations from a database.
  242. """
  243. if target not in self.nodes:
  244. raise NodeNotFoundError("Node %r not a valid node" % (target,), target)
  245. # Use child.key instead of child to speed up the frequent hashing in ensure_not_cyclic
  246. self.ensure_not_cyclic(target, lambda x: (child.key for child in self.node_map[x].children))
  247. self.cached = True
  248. node = self.node_map[target]
  249. try:
  250. return node.descendants()
  251. except RuntimeError:
  252. # fallback to iterative dfs
  253. warnings.warn(RECURSION_DEPTH_WARNING, RuntimeWarning)
  254. return self.iterative_dfs(node, forwards=False)
  255. def iterative_dfs(self, start, forwards=True):
  256. """Iterative depth-first search for finding dependencies."""
  257. visited = []
  258. stack = [start]
  259. while stack:
  260. node = stack.pop()
  261. visited.append(node)
  262. stack += sorted(node.parents if forwards else node.children)
  263. return list(OrderedSet(reversed(visited)))
  264. def root_nodes(self, app=None):
  265. """
  266. Return all root nodes - that is, nodes with no dependencies inside
  267. their app. These are the starting point for an app.
  268. """
  269. roots = set()
  270. for node in self.nodes:
  271. if all(key[0] != node[0] for key in self.node_map[node].parents) and (not app or app == node[0]):
  272. roots.add(node)
  273. return sorted(roots)
  274. def leaf_nodes(self, app=None):
  275. """
  276. Return all leaf nodes - that is, nodes with no dependents in their app.
  277. These are the "most current" version of an app's schema.
  278. Having more than one per app is technically an error, but one that
  279. gets handled further up, in the interactive command - it's usually the
  280. result of a VCS merge and needs some user input.
  281. """
  282. leaves = set()
  283. for node in self.nodes:
  284. if all(key[0] != node[0] for key in self.node_map[node].children) and (not app or app == node[0]):
  285. leaves.add(node)
  286. return sorted(leaves)
  287. def ensure_not_cyclic(self, start, get_children):
  288. # Algo from GvR:
  289. # http://neopythonic.blogspot.co.uk/2009/01/detecting-cycles-in-directed-graph.html
  290. todo = set(self.nodes)
  291. while todo:
  292. node = todo.pop()
  293. stack = [node]
  294. while stack:
  295. top = stack[-1]
  296. for node in get_children(top):
  297. if node in stack:
  298. cycle = stack[stack.index(node):]
  299. raise CircularDependencyError(", ".join("%s.%s" % n for n in cycle))
  300. if node in todo:
  301. stack.append(node)
  302. todo.remove(node)
  303. break
  304. else:
  305. node = stack.pop()
  306. def __str__(self):
  307. return 'Graph: %s nodes, %s edges' % self._nodes_and_edges()
  308. def __repr__(self):
  309. nodes, edges = self._nodes_and_edges()
  310. return '<%s: nodes=%s, edges=%s>' % (self.__class__.__name__, nodes, edges)
  311. def _nodes_and_edges(self):
  312. return len(self.nodes), sum(len(node.parents) for node in self.node_map.values())
  313. def _generate_plan(self, nodes, at_end):
  314. plan = []
  315. for node in nodes:
  316. for migration in self.forwards_plan(node):
  317. if migration not in plan and (at_end or migration not in nodes):
  318. plan.append(migration)
  319. return plan
  320. def make_state(self, nodes=None, at_end=True, real_apps=None):
  321. """
  322. Given a migration node or nodes, return a complete ProjectState for it.
  323. If at_end is False, return the state before the migration has run.
  324. If nodes is not provided, return the overall most current project state.
  325. """
  326. if nodes is None:
  327. nodes = list(self.leaf_nodes())
  328. if not nodes:
  329. return ProjectState()
  330. if not isinstance(nodes[0], tuple):
  331. nodes = [nodes]
  332. plan = self._generate_plan(nodes, at_end)
  333. project_state = ProjectState(real_apps=real_apps)
  334. for node in plan:
  335. project_state = self.nodes[node].mutate_state(project_state, preserve=False)
  336. return project_state
  337. def __contains__(self, node):
  338. return node in self.nodes