2015-03-29 00:46:10 +08:00
|
|
|
import warnings
|
2014-11-15 23:34:20 +08:00
|
|
|
from collections import deque
|
2015-06-15 22:10:40 +08:00
|
|
|
from functools import total_ordering
|
2014-05-06 01:50:51 +08:00
|
|
|
|
2013-05-30 00:47:10 +08:00
|
|
|
from django.db.migrations.state import ProjectState
|
2014-09-26 06:24:17 +08:00
|
|
|
from django.utils.datastructures import OrderedSet
|
2015-02-20 12:43:45 +08:00
|
|
|
|
2015-05-02 02:46:07 +08:00
|
|
|
from .exceptions import CircularDependencyError, NodeNotFoundError
|
|
|
|
|
2015-03-29 00:46:10 +08:00
|
|
|
RECURSION_DEPTH_WARNING = (
|
|
|
|
"Maximum recursion depth exceeded while generating migration graph, "
|
|
|
|
"falling back to iterative approach. If you're experiencing performance issues, "
|
|
|
|
"consider squashing migrations as described at "
|
|
|
|
"https://docs.djangoproject.com/en/dev/topics/migrations/#squashing-migrations."
|
|
|
|
)
|
|
|
|
|
2015-02-20 12:43:45 +08:00
|
|
|
|
|
|
|
@total_ordering
|
2017-01-19 15:39:46 +08:00
|
|
|
class Node:
|
2015-02-20 12:43:45 +08:00
|
|
|
"""
|
|
|
|
A single node in the migration graph. Contains direct links to adjacent
|
|
|
|
nodes in either direction.
|
|
|
|
"""
|
|
|
|
def __init__(self, key):
|
|
|
|
self.key = key
|
|
|
|
self.children = set()
|
|
|
|
self.parents = set()
|
|
|
|
|
|
|
|
def __eq__(self, other):
|
|
|
|
return self.key == other
|
|
|
|
|
|
|
|
def __lt__(self, other):
|
|
|
|
return self.key < other
|
|
|
|
|
|
|
|
def __hash__(self):
|
|
|
|
return hash(self.key)
|
|
|
|
|
|
|
|
def __getitem__(self, item):
|
|
|
|
return self.key[item]
|
|
|
|
|
|
|
|
def __str__(self):
|
|
|
|
return str(self.key)
|
|
|
|
|
|
|
|
def __repr__(self):
|
2017-02-04 17:11:26 +08:00
|
|
|
return '<%s: (%r, %r)>' % (self.__class__.__name__, self.key[0], self.key[1])
|
2015-02-20 12:43:45 +08:00
|
|
|
|
|
|
|
def add_child(self, child):
|
|
|
|
self.children.add(child)
|
|
|
|
|
|
|
|
def add_parent(self, parent):
|
|
|
|
self.parents.add(parent)
|
|
|
|
|
|
|
|
# Use manual caching, @cached_property effectively doubles the
|
|
|
|
# recursion depth for each recursion.
|
|
|
|
def ancestors(self):
|
|
|
|
# Use self.key instead of self to speed up the frequent hashing
|
|
|
|
# when constructing an OrderedSet.
|
|
|
|
if '_ancestors' not in self.__dict__:
|
|
|
|
ancestors = deque([self.key])
|
|
|
|
for parent in sorted(self.parents):
|
|
|
|
ancestors.extendleft(reversed(parent.ancestors()))
|
|
|
|
self.__dict__['_ancestors'] = list(OrderedSet(ancestors))
|
|
|
|
return self.__dict__['_ancestors']
|
|
|
|
|
|
|
|
# Use manual caching, @cached_property effectively doubles the
|
|
|
|
# recursion depth for each recursion.
|
|
|
|
def descendants(self):
|
|
|
|
# Use self.key instead of self to speed up the frequent hashing
|
|
|
|
# when constructing an OrderedSet.
|
|
|
|
if '_descendants' not in self.__dict__:
|
|
|
|
descendants = deque([self.key])
|
|
|
|
for child in sorted(self.children):
|
|
|
|
descendants.extendleft(reversed(child.descendants()))
|
|
|
|
self.__dict__['_descendants'] = list(OrderedSet(descendants))
|
|
|
|
return self.__dict__['_descendants']
|
2013-05-10 19:52:04 +08:00
|
|
|
|
|
|
|
|
2016-05-08 08:56:13 +08:00
|
|
|
class DummyNode(Node):
|
|
|
|
def __init__(self, key, origin, error_message):
|
2017-01-21 21:13:44 +08:00
|
|
|
super().__init__(key)
|
2016-05-08 08:56:13 +08:00
|
|
|
self.origin = origin
|
|
|
|
self.error_message = error_message
|
|
|
|
|
|
|
|
def promote(self):
|
|
|
|
"""
|
|
|
|
Transition dummy to a normal node and clean off excess attribs.
|
|
|
|
Creating a Node object from scratch would be too much of a
|
|
|
|
hassle as many dependendies would need to be remapped.
|
|
|
|
"""
|
|
|
|
del self.origin
|
|
|
|
del self.error_message
|
|
|
|
self.__class__ = Node
|
|
|
|
|
|
|
|
def raise_error(self):
|
|
|
|
raise NodeNotFoundError(self.error_message, self.key, origin=self.origin)
|
|
|
|
|
|
|
|
|
2017-01-19 15:39:46 +08:00
|
|
|
class MigrationGraph:
|
2013-05-10 19:52:04 +08:00
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Represent the digraph of all migrations in a project.
|
2013-05-10 19:52:04 +08:00
|
|
|
|
|
|
|
Each migration is a node, and each dependency is an edge. There are
|
|
|
|
no implicit dependencies between numbered migrations - the numbering is
|
|
|
|
merely a convention to aid file listing. Every new numbered migration
|
|
|
|
has a declared dependency to the previous number, meaning that VCS
|
|
|
|
branch merges can be detected and resolved.
|
|
|
|
|
|
|
|
Migrations files can be marked as replacing another set of migrations -
|
2013-08-11 03:00:12 +08:00
|
|
|
this is to support the "squash" feature. The graph handler isn't responsible
|
2013-05-10 19:52:04 +08:00
|
|
|
for these; instead, the code to load them in here should examine the
|
|
|
|
migration files and if the replaced migrations are all either unapplied
|
|
|
|
or not present, it should ignore the replaced ones, load in just the
|
|
|
|
replacing migration, and repoint any dependencies that pointed to the
|
|
|
|
replaced migrations to point to the replacing one.
|
|
|
|
|
2013-05-10 23:09:57 +08:00
|
|
|
A node should be a tuple: (app_path, migration_name). The tree special-cases
|
|
|
|
things within an app - namely, root nodes and leaf nodes ignore dependencies
|
|
|
|
to other apps.
|
2013-05-10 19:52:04 +08:00
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self):
|
2015-02-20 12:43:45 +08:00
|
|
|
self.node_map = {}
|
2013-05-10 19:52:04 +08:00
|
|
|
self.nodes = {}
|
2015-02-20 12:43:45 +08:00
|
|
|
self.cached = False
|
2013-05-10 19:52:04 +08:00
|
|
|
|
2016-05-08 08:56:13 +08:00
|
|
|
def add_node(self, key, migration):
|
|
|
|
# If the key already exists, then it must be a dummy node.
|
|
|
|
dummy_node = self.node_map.get(key)
|
|
|
|
if dummy_node:
|
|
|
|
# Promote DummyNode to Node.
|
|
|
|
dummy_node.promote()
|
|
|
|
else:
|
|
|
|
node = Node(key)
|
|
|
|
self.node_map[key] = node
|
|
|
|
self.nodes[key] = migration
|
2015-02-20 12:43:45 +08:00
|
|
|
self.clear_cache()
|
2013-05-10 19:52:04 +08:00
|
|
|
|
2016-05-08 08:56:13 +08:00
|
|
|
def add_dummy_node(self, key, origin, error_message):
|
|
|
|
node = DummyNode(key, origin, error_message)
|
|
|
|
self.node_map[key] = node
|
|
|
|
self.nodes[key] = None
|
|
|
|
|
|
|
|
def add_dependency(self, migration, child, parent, skip_validation=False):
|
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
This may create dummy nodes if they don't yet exist. If
|
|
|
|
`skip_validation=True`, validate_consistency() should be called
|
|
|
|
afterwards.
|
2016-05-08 08:56:13 +08:00
|
|
|
"""
|
2013-05-30 00:47:10 +08:00
|
|
|
if child not in self.nodes:
|
2016-05-08 08:56:13 +08:00
|
|
|
error_message = (
|
|
|
|
"Migration %s dependencies reference nonexistent"
|
|
|
|
" child node %r" % (migration, child)
|
2014-10-25 12:42:44 +08:00
|
|
|
)
|
2016-05-08 08:56:13 +08:00
|
|
|
self.add_dummy_node(child, migration, error_message)
|
2013-05-30 00:47:10 +08:00
|
|
|
if parent not in self.nodes:
|
2016-05-08 08:56:13 +08:00
|
|
|
error_message = (
|
|
|
|
"Migration %s dependencies reference nonexistent"
|
|
|
|
" parent node %r" % (migration, parent)
|
2014-10-25 12:42:44 +08:00
|
|
|
)
|
2016-05-08 08:56:13 +08:00
|
|
|
self.add_dummy_node(parent, migration, error_message)
|
2015-02-20 12:43:45 +08:00
|
|
|
self.node_map[child].add_parent(self.node_map[parent])
|
|
|
|
self.node_map[parent].add_child(self.node_map[child])
|
2016-05-08 08:56:13 +08:00
|
|
|
if not skip_validation:
|
|
|
|
self.validate_consistency()
|
|
|
|
self.clear_cache()
|
|
|
|
|
|
|
|
def remove_replaced_nodes(self, replacement, replaced):
|
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Remove each of the `replaced` nodes (when they exist). Any
|
2016-05-08 08:56:13 +08:00
|
|
|
dependencies that were referencing them are changed to reference the
|
|
|
|
`replacement` node instead.
|
|
|
|
"""
|
|
|
|
# Cast list of replaced keys to set to speed up lookup later.
|
|
|
|
replaced = set(replaced)
|
|
|
|
try:
|
|
|
|
replacement_node = self.node_map[replacement]
|
2017-01-08 03:13:29 +08:00
|
|
|
except KeyError as err:
|
|
|
|
raise NodeNotFoundError(
|
2016-05-08 08:56:13 +08:00
|
|
|
"Unable to find replacement node %r. It was either never added"
|
|
|
|
" to the migration graph, or has been removed." % (replacement, ),
|
|
|
|
replacement
|
2017-01-08 03:13:29 +08:00
|
|
|
) from err
|
2016-05-08 08:56:13 +08:00
|
|
|
for replaced_key in replaced:
|
|
|
|
self.nodes.pop(replaced_key, None)
|
|
|
|
replaced_node = self.node_map.pop(replaced_key, None)
|
|
|
|
if replaced_node:
|
|
|
|
for child in replaced_node.children:
|
|
|
|
child.parents.remove(replaced_node)
|
|
|
|
# We don't want to create dependencies between the replaced
|
|
|
|
# node and the replacement node as this would lead to
|
|
|
|
# self-referencing on the replacement node at a later iteration.
|
|
|
|
if child.key not in replaced:
|
|
|
|
replacement_node.add_child(child)
|
|
|
|
child.add_parent(replacement_node)
|
|
|
|
for parent in replaced_node.parents:
|
|
|
|
parent.children.remove(replaced_node)
|
|
|
|
# Again, to avoid self-referencing.
|
|
|
|
if parent.key not in replaced:
|
|
|
|
replacement_node.add_parent(parent)
|
|
|
|
parent.add_child(replacement_node)
|
2015-02-20 12:43:45 +08:00
|
|
|
self.clear_cache()
|
|
|
|
|
2016-05-08 08:56:13 +08:00
|
|
|
def remove_replacement_node(self, replacement, replaced):
|
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
The inverse operation to `remove_replaced_nodes`. Almost. Remove the
|
|
|
|
replacement node `replacement` and remap its child nodes to `replaced`
|
|
|
|
- the list of nodes it would have replaced. Don't remap its parent
|
|
|
|
nodes as they are expected to be correct already.
|
2016-05-08 08:56:13 +08:00
|
|
|
"""
|
|
|
|
self.nodes.pop(replacement, None)
|
|
|
|
try:
|
|
|
|
replacement_node = self.node_map.pop(replacement)
|
2017-01-08 03:13:29 +08:00
|
|
|
except KeyError as err:
|
|
|
|
raise NodeNotFoundError(
|
2016-05-08 08:56:13 +08:00
|
|
|
"Unable to remove replacement node %r. It was either never added"
|
|
|
|
" to the migration graph, or has been removed already." % (replacement, ),
|
|
|
|
replacement
|
2017-01-08 03:13:29 +08:00
|
|
|
) from err
|
2016-05-08 08:56:13 +08:00
|
|
|
replaced_nodes = set()
|
|
|
|
replaced_nodes_parents = set()
|
|
|
|
for key in replaced:
|
|
|
|
replaced_node = self.node_map.get(key)
|
|
|
|
if replaced_node:
|
|
|
|
replaced_nodes.add(replaced_node)
|
|
|
|
replaced_nodes_parents |= replaced_node.parents
|
|
|
|
# We're only interested in the latest replaced node, so filter out
|
|
|
|
# replaced nodes that are parents of other replaced nodes.
|
|
|
|
replaced_nodes -= replaced_nodes_parents
|
|
|
|
for child in replacement_node.children:
|
|
|
|
child.parents.remove(replacement_node)
|
|
|
|
for replaced_node in replaced_nodes:
|
|
|
|
replaced_node.add_child(child)
|
|
|
|
child.add_parent(replaced_node)
|
|
|
|
for parent in replacement_node.parents:
|
|
|
|
parent.children.remove(replacement_node)
|
|
|
|
# NOTE: There is no need to remap parent dependencies as we can
|
|
|
|
# assume the replaced nodes already have the correct ancestry.
|
|
|
|
self.clear_cache()
|
|
|
|
|
|
|
|
def validate_consistency(self):
|
2017-01-25 07:04:12 +08:00
|
|
|
"""Ensure there are no dummy nodes remaining in the graph."""
|
2016-05-08 08:56:13 +08:00
|
|
|
[n.raise_error() for n in self.node_map.values() if isinstance(n, DummyNode)]
|
|
|
|
|
2015-02-20 12:43:45 +08:00
|
|
|
def clear_cache(self):
|
|
|
|
if self.cached:
|
|
|
|
for node in self.nodes:
|
|
|
|
self.node_map[node].__dict__.pop('_ancestors', None)
|
|
|
|
self.node_map[node].__dict__.pop('_descendants', None)
|
|
|
|
self.cached = False
|
2013-05-10 19:52:04 +08:00
|
|
|
|
2015-02-23 21:29:28 +08:00
|
|
|
def forwards_plan(self, target):
|
2013-05-10 19:52:04 +08:00
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Given a node, return a list of which previous nodes (dependencies) must
|
|
|
|
be applied, ending with the node itself. This is the list you would
|
|
|
|
follow if applying the migrations to a database.
|
2013-05-10 19:52:04 +08:00
|
|
|
"""
|
2015-02-23 21:29:28 +08:00
|
|
|
if target not in self.nodes:
|
|
|
|
raise NodeNotFoundError("Node %r not a valid node" % (target, ), target)
|
2015-02-20 12:43:45 +08:00
|
|
|
# Use parent.key instead of parent to speed up the frequent hashing in ensure_not_cyclic
|
2015-02-23 21:29:28 +08:00
|
|
|
self.ensure_not_cyclic(target, lambda x: (parent.key for parent in self.node_map[x].parents))
|
2015-02-20 12:43:45 +08:00
|
|
|
self.cached = True
|
2015-02-23 21:29:28 +08:00
|
|
|
node = self.node_map[target]
|
2015-03-29 00:46:10 +08:00
|
|
|
try:
|
|
|
|
return node.ancestors()
|
|
|
|
except RuntimeError:
|
|
|
|
# fallback to iterative dfs
|
|
|
|
warnings.warn(RECURSION_DEPTH_WARNING, RuntimeWarning)
|
|
|
|
return self.iterative_dfs(node)
|
2013-05-10 19:52:04 +08:00
|
|
|
|
2015-02-23 21:29:28 +08:00
|
|
|
def backwards_plan(self, target):
|
2013-05-10 19:52:04 +08:00
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Given a node, return a list of which dependent nodes (dependencies)
|
|
|
|
must be unapplied, ending with the node itself. This is the list you
|
|
|
|
would follow if removing the migrations from a database.
|
2013-05-10 19:52:04 +08:00
|
|
|
"""
|
2015-02-23 21:29:28 +08:00
|
|
|
if target not in self.nodes:
|
|
|
|
raise NodeNotFoundError("Node %r not a valid node" % (target, ), target)
|
2015-02-20 12:43:45 +08:00
|
|
|
# Use child.key instead of child to speed up the frequent hashing in ensure_not_cyclic
|
2015-02-23 21:29:28 +08:00
|
|
|
self.ensure_not_cyclic(target, lambda x: (child.key for child in self.node_map[x].children))
|
2015-02-20 12:43:45 +08:00
|
|
|
self.cached = True
|
2015-02-23 21:29:28 +08:00
|
|
|
node = self.node_map[target]
|
2015-03-29 00:46:10 +08:00
|
|
|
try:
|
|
|
|
return node.descendants()
|
|
|
|
except RuntimeError:
|
|
|
|
# fallback to iterative dfs
|
|
|
|
warnings.warn(RECURSION_DEPTH_WARNING, RuntimeWarning)
|
|
|
|
return self.iterative_dfs(node, forwards=False)
|
|
|
|
|
|
|
|
def iterative_dfs(self, start, forwards=True):
|
2017-01-25 07:04:12 +08:00
|
|
|
"""Iterative depth-first search for finding dependencies."""
|
2015-03-29 00:46:10 +08:00
|
|
|
visited = deque()
|
|
|
|
visited.append(start)
|
|
|
|
if forwards:
|
|
|
|
stack = deque(sorted(start.parents))
|
|
|
|
else:
|
|
|
|
stack = deque(sorted(start.children))
|
|
|
|
while stack:
|
|
|
|
node = stack.popleft()
|
|
|
|
visited.appendleft(node)
|
|
|
|
if forwards:
|
|
|
|
children = sorted(node.parents, reverse=True)
|
|
|
|
else:
|
|
|
|
children = sorted(node.children, reverse=True)
|
|
|
|
# reverse sorting is needed because prepending using deque.extendleft
|
|
|
|
# also effectively reverses values
|
|
|
|
stack.extendleft(children)
|
|
|
|
|
|
|
|
return list(OrderedSet(visited))
|
2013-05-10 19:52:04 +08:00
|
|
|
|
2014-01-08 21:00:12 +08:00
|
|
|
def root_nodes(self, app=None):
|
2013-05-10 23:09:57 +08:00
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Return all root nodes - that is, nodes with no dependencies inside
|
2013-05-10 23:09:57 +08:00
|
|
|
their app. These are the starting point for an app.
|
|
|
|
"""
|
|
|
|
roots = set()
|
|
|
|
for node in self.nodes:
|
2016-04-04 08:37:32 +08:00
|
|
|
if not any(key[0] == node[0] for key in self.node_map[node].parents) and (not app or app == node[0]):
|
2013-05-10 23:09:57 +08:00
|
|
|
roots.add(node)
|
2014-06-18 14:27:03 +08:00
|
|
|
return sorted(roots)
|
2013-05-10 23:09:57 +08:00
|
|
|
|
2013-09-07 00:05:50 +08:00
|
|
|
def leaf_nodes(self, app=None):
|
2013-05-10 23:09:57 +08:00
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Return all leaf nodes - that is, nodes with no dependents in their app.
|
2013-05-10 23:09:57 +08:00
|
|
|
These are the "most current" version of an app's schema.
|
|
|
|
Having more than one per app is technically an error, but one that
|
|
|
|
gets handled further up, in the interactive command - it's usually the
|
|
|
|
result of a VCS merge and needs some user input.
|
|
|
|
"""
|
|
|
|
leaves = set()
|
|
|
|
for node in self.nodes:
|
2016-04-04 08:37:32 +08:00
|
|
|
if not any(key[0] == node[0] for key in self.node_map[node].children) and (not app or app == node[0]):
|
2013-05-10 23:09:57 +08:00
|
|
|
leaves.add(node)
|
2014-06-18 14:27:03 +08:00
|
|
|
return sorted(leaves)
|
2013-05-10 23:09:57 +08:00
|
|
|
|
2014-11-25 21:29:38 +08:00
|
|
|
def ensure_not_cyclic(self, start, get_children):
|
|
|
|
# Algo from GvR:
|
|
|
|
# http://neopythonic.blogspot.co.uk/2009/01/detecting-cycles-in-directed-graph.html
|
2015-02-20 12:43:45 +08:00
|
|
|
todo = set(self.nodes)
|
2014-11-25 21:29:38 +08:00
|
|
|
while todo:
|
|
|
|
node = todo.pop()
|
|
|
|
stack = [node]
|
|
|
|
while stack:
|
|
|
|
top = stack[-1]
|
|
|
|
for node in get_children(top):
|
|
|
|
if node in stack:
|
|
|
|
cycle = stack[stack.index(node):]
|
|
|
|
raise CircularDependencyError(", ".join("%s.%s" % n for n in cycle))
|
|
|
|
if node in todo:
|
|
|
|
stack.append(node)
|
|
|
|
todo.remove(node)
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
node = stack.pop()
|
|
|
|
|
2013-05-10 23:00:55 +08:00
|
|
|
def __str__(self):
|
2015-06-01 20:46:45 +08:00
|
|
|
return 'Graph: %s nodes, %s edges' % self._nodes_and_edges()
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
nodes, edges = self._nodes_and_edges()
|
|
|
|
return '<%s: nodes=%s, edges=%s>' % (self.__class__.__name__, nodes, edges)
|
|
|
|
|
|
|
|
def _nodes_and_edges(self):
|
|
|
|
return len(self.nodes), sum(len(node.parents) for node in self.node_map.values())
|
2013-05-10 19:52:04 +08:00
|
|
|
|
2014-05-01 03:25:12 +08:00
|
|
|
def make_state(self, nodes=None, at_end=True, real_apps=None):
|
2013-05-30 00:47:10 +08:00
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Given a migration node or nodes, return a complete ProjectState for it.
|
|
|
|
If at_end is False, return the state before the migration has run.
|
|
|
|
If nodes is not provided, return the overall most current project state.
|
2013-05-30 00:47:10 +08:00
|
|
|
"""
|
2013-06-19 22:36:22 +08:00
|
|
|
if nodes is None:
|
|
|
|
nodes = list(self.leaf_nodes())
|
|
|
|
if len(nodes) == 0:
|
|
|
|
return ProjectState()
|
2013-06-08 00:56:43 +08:00
|
|
|
if not isinstance(nodes[0], tuple):
|
|
|
|
nodes = [nodes]
|
|
|
|
plan = []
|
|
|
|
for node in nodes:
|
|
|
|
for migration in self.forwards_plan(node):
|
|
|
|
if migration not in plan:
|
|
|
|
if not at_end and migration in nodes:
|
|
|
|
continue
|
|
|
|
plan.append(migration)
|
2014-05-01 03:25:12 +08:00
|
|
|
project_state = ProjectState(real_apps=real_apps)
|
2013-05-30 00:47:10 +08:00
|
|
|
for node in plan:
|
2015-04-01 05:35:41 +08:00
|
|
|
project_state = self.nodes[node].mutate_state(project_state, preserve=False)
|
2013-05-30 00:47:10 +08:00
|
|
|
return project_state
|
|
|
|
|
2014-01-08 21:00:12 +08:00
|
|
|
def __contains__(self, node):
|
|
|
|
return node in self.nodes
|