2014-05-06 01:50:51 +08:00
|
|
|
from __future__ import unicode_literals
|
|
|
|
|
2013-11-29 10:42:24 +08:00
|
|
|
import re
|
2013-12-11 21:23:07 +08:00
|
|
|
import datetime
|
2013-11-29 10:42:24 +08:00
|
|
|
|
2014-09-08 08:26:12 +08:00
|
|
|
from itertools import chain
|
|
|
|
|
2014-06-16 07:01:49 +08:00
|
|
|
from django.utils import six
|
2014-06-17 01:20:05 +08:00
|
|
|
from django.conf import settings
|
2014-10-07 07:53:21 +08:00
|
|
|
from django.db import models
|
2013-06-07 22:28:38 +08:00
|
|
|
from django.db.migrations import operations
|
|
|
|
from django.db.migrations.migration import Migration
|
2013-12-05 00:01:31 +08:00
|
|
|
from django.db.migrations.questioner import MigrationQuestioner
|
2014-06-06 14:03:33 +08:00
|
|
|
from django.db.migrations.optimizer import MigrationOptimizer
|
2014-07-29 01:47:28 +08:00
|
|
|
from django.db.migrations.operations.models import AlterModelOptions
|
2013-06-07 22:28:38 +08:00
|
|
|
|
2014-11-16 03:25:43 +08:00
|
|
|
from .topological_sort import stable_topological_sort
|
|
|
|
|
2013-06-07 22:28:38 +08:00
|
|
|
|
2013-06-07 22:49:48 +08:00
|
|
|
class MigrationAutodetector(object):
|
2013-06-07 22:28:38 +08:00
|
|
|
"""
|
|
|
|
Takes a pair of ProjectStates, and compares them to see what the
|
|
|
|
first would need doing to make it match the second (the second
|
|
|
|
usually being the project's current state).
|
|
|
|
|
|
|
|
Note that this naturally operates on entire projects at a time,
|
|
|
|
as it's likely that changes interact (for example, you can't
|
|
|
|
add a ForeignKey without having a migration to add the table it
|
2013-06-08 00:56:43 +08:00
|
|
|
depends on first). A user interface may offer single-app usage
|
2013-06-07 22:28:38 +08:00
|
|
|
if it wishes, with the caveat that it may not always be possible.
|
|
|
|
"""
|
|
|
|
|
2013-06-19 22:36:22 +08:00
|
|
|
def __init__(self, from_state, to_state, questioner=None):
|
2013-06-07 22:28:38 +08:00
|
|
|
self.from_state = from_state
|
|
|
|
self.to_state = to_state
|
2013-06-19 22:36:22 +08:00
|
|
|
self.questioner = questioner or MigrationQuestioner()
|
2013-06-07 22:28:38 +08:00
|
|
|
|
2014-08-19 21:24:31 +08:00
|
|
|
def changes(self, graph, trim_to_apps=None, convert_apps=None, migration_name=None):
|
2013-08-22 05:25:15 +08:00
|
|
|
"""
|
|
|
|
Main entry point to produce a list of appliable changes.
|
|
|
|
Takes a graph to base names on and an optional set of apps
|
|
|
|
to try and restrict to (restriction is not guaranteed)
|
|
|
|
"""
|
2014-07-10 14:53:16 +08:00
|
|
|
changes = self._detect_changes(convert_apps, graph)
|
2014-08-19 21:24:31 +08:00
|
|
|
changes = self.arrange_for_graph(changes, graph, migration_name)
|
2013-08-22 05:25:15 +08:00
|
|
|
if trim_to_apps:
|
|
|
|
changes = self._trim_to_apps(changes, trim_to_apps)
|
|
|
|
return changes
|
|
|
|
|
2014-06-06 14:03:33 +08:00
|
|
|
def deep_deconstruct(self, obj):
|
|
|
|
"""
|
|
|
|
Recursive deconstruction for a field and its arguments.
|
|
|
|
Used for full comparison for rename/alter; sometimes a single-level
|
|
|
|
deconstruction will not compare correctly.
|
|
|
|
"""
|
2014-09-06 08:07:42 +08:00
|
|
|
if not hasattr(obj, 'deconstruct') or isinstance(obj, type):
|
2014-06-06 14:03:33 +08:00
|
|
|
return obj
|
|
|
|
deconstructed = obj.deconstruct()
|
|
|
|
if isinstance(obj, models.Field):
|
|
|
|
# we have a field which also returns a name
|
|
|
|
deconstructed = deconstructed[1:]
|
|
|
|
path, args, kwargs = deconstructed
|
|
|
|
return (
|
|
|
|
path,
|
|
|
|
[self.deep_deconstruct(value) for value in args],
|
2014-12-07 05:00:09 +08:00
|
|
|
{
|
|
|
|
key: self.deep_deconstruct(value)
|
2014-06-06 14:03:33 +08:00
|
|
|
for key, value in kwargs.items()
|
2014-12-07 05:00:09 +08:00
|
|
|
},
|
2014-06-06 14:03:33 +08:00
|
|
|
)
|
|
|
|
|
|
|
|
def only_relation_agnostic_fields(self, fields):
|
|
|
|
"""
|
|
|
|
Return a definition of the fields that ignores field names and
|
|
|
|
what related fields actually relate to.
|
|
|
|
Used for detecting renames (as, of course, the related fields
|
|
|
|
change during renames)
|
|
|
|
"""
|
|
|
|
fields_def = []
|
|
|
|
for name, field in fields:
|
|
|
|
deconstruction = self.deep_deconstruct(field)
|
|
|
|
if field.rel and field.rel.to:
|
|
|
|
del deconstruction[2]['to']
|
|
|
|
fields_def.append(deconstruction)
|
|
|
|
return fields_def
|
|
|
|
|
2014-07-10 14:53:16 +08:00
|
|
|
def _detect_changes(self, convert_apps=None, graph=None):
|
2013-06-07 22:28:38 +08:00
|
|
|
"""
|
2013-06-08 00:56:43 +08:00
|
|
|
Returns a dict of migration plans which will achieve the
|
|
|
|
change from from_state to to_state. The dict has app labels
|
2013-08-11 03:00:12 +08:00
|
|
|
as keys and a list of migrations as values.
|
2013-06-08 00:56:43 +08:00
|
|
|
|
|
|
|
The resulting migrations aren't specially named, but the names
|
|
|
|
do matter for dependencies inside the set.
|
2014-07-10 14:53:16 +08:00
|
|
|
|
|
|
|
convert_apps is the list of apps to convert to use migrations
|
|
|
|
(i.e. to make initial migrations for, in the usual case)
|
|
|
|
|
|
|
|
graph is an optional argument that, if provided, can help improve
|
|
|
|
dependency generation and avoid potential circular dependencies.
|
2013-06-07 22:28:38 +08:00
|
|
|
"""
|
2014-06-06 14:03:33 +08:00
|
|
|
|
|
|
|
# The first phase is generating all the operations for each app
|
|
|
|
# and gathering them into a big per-app list.
|
|
|
|
# We'll then go through that list later and order it and split
|
|
|
|
# into migrations to resolve dependencies caused by M2Ms and FKs.
|
|
|
|
self.generated_operations = {}
|
|
|
|
|
2014-06-16 07:01:49 +08:00
|
|
|
# Prepare some old/new state and model lists, separating
|
|
|
|
# proxy models and ignoring unmigrated apps.
|
2014-12-30 09:52:32 +08:00
|
|
|
self.old_apps = self.from_state.concrete_apps
|
2014-11-05 17:43:31 +08:00
|
|
|
self.new_apps = self.to_state.apps
|
2014-06-06 14:03:33 +08:00
|
|
|
self.old_model_keys = []
|
2014-06-16 07:01:49 +08:00
|
|
|
self.old_proxy_keys = []
|
2014-08-13 03:49:20 +08:00
|
|
|
self.old_unmanaged_keys = []
|
2014-06-16 07:01:49 +08:00
|
|
|
self.new_model_keys = []
|
|
|
|
self.new_proxy_keys = []
|
2014-08-13 03:49:20 +08:00
|
|
|
self.new_unmanaged_keys = []
|
2014-06-06 14:03:33 +08:00
|
|
|
for al, mn in sorted(self.from_state.models.keys()):
|
|
|
|
model = self.old_apps.get_model(al, mn)
|
2014-08-13 03:49:20 +08:00
|
|
|
if not model._meta.managed:
|
|
|
|
self.old_unmanaged_keys.append((al, mn))
|
|
|
|
elif al not in self.from_state.real_apps:
|
2014-06-16 07:01:49 +08:00
|
|
|
if model._meta.proxy:
|
|
|
|
self.old_proxy_keys.append((al, mn))
|
|
|
|
else:
|
|
|
|
self.old_model_keys.append((al, mn))
|
|
|
|
|
2014-06-06 14:03:33 +08:00
|
|
|
for al, mn in sorted(self.to_state.models.keys()):
|
|
|
|
model = self.new_apps.get_model(al, mn)
|
2014-08-13 03:49:20 +08:00
|
|
|
if not model._meta.managed:
|
|
|
|
self.new_unmanaged_keys.append((al, mn))
|
|
|
|
elif (
|
2014-06-09 08:12:27 +08:00
|
|
|
al not in self.from_state.real_apps or
|
|
|
|
(convert_apps and al in convert_apps)
|
|
|
|
):
|
2014-06-16 07:01:49 +08:00
|
|
|
if model._meta.proxy:
|
|
|
|
self.new_proxy_keys.append((al, mn))
|
|
|
|
else:
|
|
|
|
self.new_model_keys.append((al, mn))
|
2014-06-06 14:03:33 +08:00
|
|
|
|
|
|
|
# Renames have to come first
|
|
|
|
self.generate_renamed_models()
|
|
|
|
|
|
|
|
# Prepare field lists, and prepare a list of the fields that used
|
|
|
|
# through models in the old state so we can make dependencies
|
|
|
|
# from the through model deletion to the field that uses it.
|
|
|
|
self.kept_model_keys = set(self.old_model_keys).intersection(self.new_model_keys)
|
2014-08-13 03:49:20 +08:00
|
|
|
self.kept_proxy_keys = set(self.old_proxy_keys).intersection(self.new_proxy_keys)
|
|
|
|
self.kept_unmanaged_keys = set(self.old_unmanaged_keys).intersection(self.new_unmanaged_keys)
|
2014-06-06 14:03:33 +08:00
|
|
|
self.through_users = {}
|
|
|
|
self.old_field_keys = set()
|
|
|
|
self.new_field_keys = set()
|
|
|
|
for app_label, model_name in sorted(self.kept_model_keys):
|
|
|
|
old_model_name = self.renamed_models.get((app_label, model_name), model_name)
|
|
|
|
old_model_state = self.from_state.models[app_label, old_model_name]
|
|
|
|
new_model_state = self.to_state.models[app_label, model_name]
|
|
|
|
self.old_field_keys.update((app_label, model_name, x) for x, y in old_model_state.fields)
|
|
|
|
self.new_field_keys.update((app_label, model_name, x) for x, y in new_model_state.fields)
|
2014-06-16 03:06:48 +08:00
|
|
|
|
|
|
|
# Through model map generation
|
|
|
|
for app_label, model_name in sorted(self.old_model_keys):
|
|
|
|
old_model_name = self.renamed_models.get((app_label, model_name), model_name)
|
|
|
|
old_model_state = self.from_state.models[app_label, old_model_name]
|
2014-06-06 14:03:33 +08:00
|
|
|
for field_name, field in old_model_state.fields:
|
|
|
|
old_field = self.old_apps.get_model(app_label, old_model_name)._meta.get_field_by_name(field_name)[0]
|
2014-10-25 12:42:44 +08:00
|
|
|
if (hasattr(old_field, "rel") and getattr(old_field.rel, "through", None)
|
|
|
|
and not old_field.rel.through._meta.auto_created):
|
2014-06-06 14:03:33 +08:00
|
|
|
through_key = (
|
|
|
|
old_field.rel.through._meta.app_label,
|
|
|
|
old_field.rel.through._meta.object_name.lower(),
|
|
|
|
)
|
|
|
|
self.through_users[through_key] = (app_label, old_model_name, field_name)
|
|
|
|
|
|
|
|
# Generate non-rename model operations
|
|
|
|
self.generate_deleted_models()
|
2014-07-30 00:38:08 +08:00
|
|
|
self.generate_created_models()
|
2014-06-16 07:01:49 +08:00
|
|
|
self.generate_deleted_proxies()
|
2014-07-30 00:38:08 +08:00
|
|
|
self.generate_created_proxies()
|
2014-06-16 03:34:02 +08:00
|
|
|
self.generate_altered_options()
|
2014-12-13 06:19:58 +08:00
|
|
|
self.generate_altered_managers()
|
2014-06-06 14:03:33 +08:00
|
|
|
|
|
|
|
# Generate field operations
|
2014-07-30 00:38:08 +08:00
|
|
|
self.generate_renamed_fields()
|
2014-06-06 14:03:33 +08:00
|
|
|
self.generate_removed_fields()
|
2014-07-30 00:38:08 +08:00
|
|
|
self.generate_added_fields()
|
2014-06-06 14:03:33 +08:00
|
|
|
self.generate_altered_fields()
|
|
|
|
self.generate_altered_unique_together()
|
|
|
|
self.generate_altered_index_together()
|
2014-10-14 22:20:24 +08:00
|
|
|
self.generate_altered_db_table()
|
2014-06-16 05:55:44 +08:00
|
|
|
self.generate_altered_order_with_respect_to()
|
2014-06-06 14:03:33 +08:00
|
|
|
|
|
|
|
# Now, reordering to make things possible. The order we have already
|
|
|
|
# isn't bad, but we need to pull a few things around so FKs work nicely
|
|
|
|
# inside the same app
|
|
|
|
for app_label, ops in sorted(self.generated_operations.items()):
|
2014-11-16 03:25:43 +08:00
|
|
|
|
|
|
|
# construct a dependency graph for intra-app dependencies
|
|
|
|
dependency_graph = {op: set() for op in ops}
|
|
|
|
for op in ops:
|
|
|
|
for dep in op._auto_deps:
|
|
|
|
if dep[0] == app_label:
|
|
|
|
for op2 in ops:
|
|
|
|
if self.check_dependency(op2, dep):
|
|
|
|
dependency_graph[op].add(op2)
|
|
|
|
|
|
|
|
# we use a stable sort for deterministic tests & general behavior
|
|
|
|
self.generated_operations[app_label] = stable_topological_sort(ops, dependency_graph)
|
2014-06-06 14:03:33 +08:00
|
|
|
|
|
|
|
# Now, we need to chop the lists of operations up into migrations with
|
|
|
|
# dependencies on each other.
|
|
|
|
# We do this by stepping up an app's list of operations until we
|
|
|
|
# find one that has an outgoing dependency that isn't in another app's
|
|
|
|
# migration yet (hasn't been chopped off its list). We then chop off the
|
|
|
|
# operations before it into a migration and move onto the next app.
|
|
|
|
# If we loop back around without doing anything, there's a circular
|
|
|
|
# dependency (which _should_ be impossible as the operations are all
|
|
|
|
# split at this point so they can't depend and be depended on)
|
|
|
|
|
|
|
|
self.migrations = {}
|
|
|
|
num_ops = sum(len(x) for x in self.generated_operations.values())
|
|
|
|
chop_mode = False
|
|
|
|
while num_ops:
|
|
|
|
# On every iteration, we step through all the apps and see if there
|
|
|
|
# is a completed set of operations.
|
|
|
|
# If we find that a subset of the operations are complete we can
|
|
|
|
# try to chop it off from the rest and continue, but we only
|
|
|
|
# do this if we've already been through the list once before
|
|
|
|
# without any chopping and nothing has changed.
|
|
|
|
for app_label in sorted(self.generated_operations.keys()):
|
|
|
|
chopped = []
|
|
|
|
dependencies = set()
|
|
|
|
for operation in list(self.generated_operations[app_label]):
|
|
|
|
deps_satisfied = True
|
|
|
|
operation_dependencies = set()
|
|
|
|
for dep in operation._auto_deps:
|
2014-08-20 22:15:23 +08:00
|
|
|
is_swappable_dep = False
|
2014-06-06 14:03:33 +08:00
|
|
|
if dep[0] == "__setting__":
|
2014-08-20 22:15:23 +08:00
|
|
|
# We need to temporarily resolve the swappable dependency to prevent
|
|
|
|
# circular references. While keeping the dependency checks on the
|
|
|
|
# resolved model we still add the swappable dependencies.
|
|
|
|
# See #23322
|
|
|
|
resolved_app_label, resolved_object_name = getattr(settings, dep[1]).split('.')
|
|
|
|
original_dep = dep
|
|
|
|
dep = (resolved_app_label, resolved_object_name.lower(), dep[2], dep[3])
|
|
|
|
is_swappable_dep = True
|
|
|
|
if dep[0] != app_label and dep[0] != "__setting__":
|
2014-06-06 14:03:33 +08:00
|
|
|
# External app dependency. See if it's not yet
|
|
|
|
# satisfied.
|
2014-06-16 02:25:39 +08:00
|
|
|
for other_operation in self.generated_operations.get(dep[0], []):
|
2014-06-06 14:03:33 +08:00
|
|
|
if self.check_dependency(other_operation, dep):
|
|
|
|
deps_satisfied = False
|
|
|
|
break
|
|
|
|
if not deps_satisfied:
|
|
|
|
break
|
|
|
|
else:
|
2014-08-20 22:15:23 +08:00
|
|
|
if is_swappable_dep:
|
|
|
|
operation_dependencies.add((original_dep[0], original_dep[1]))
|
|
|
|
elif dep[0] in self.migrations:
|
2014-06-06 14:03:33 +08:00
|
|
|
operation_dependencies.add((dep[0], self.migrations[dep[0]][-1].name))
|
|
|
|
else:
|
2014-07-10 14:53:16 +08:00
|
|
|
# If we can't find the other app, we add a first/last dependency,
|
2014-06-16 07:01:49 +08:00
|
|
|
# but only if we've already been through once and checked everything
|
|
|
|
if chop_mode:
|
2014-07-15 03:09:33 +08:00
|
|
|
# If the app already exists, we add a dependency on the last migration,
|
|
|
|
# as we don't know which migration contains the target field.
|
2014-07-10 14:53:16 +08:00
|
|
|
# If it's not yet migrated or has no migrations, we use __first__
|
2014-07-15 03:09:33 +08:00
|
|
|
if graph and graph.leaf_nodes(dep[0]):
|
|
|
|
operation_dependencies.add(graph.leaf_nodes(dep[0])[0])
|
2014-07-10 14:53:16 +08:00
|
|
|
else:
|
2014-07-15 03:09:33 +08:00
|
|
|
operation_dependencies.add((dep[0], "__first__"))
|
2014-06-16 07:01:49 +08:00
|
|
|
else:
|
|
|
|
deps_satisfied = False
|
2014-06-06 14:03:33 +08:00
|
|
|
if deps_satisfied:
|
|
|
|
chopped.append(operation)
|
|
|
|
dependencies.update(operation_dependencies)
|
|
|
|
self.generated_operations[app_label] = self.generated_operations[app_label][1:]
|
|
|
|
else:
|
|
|
|
break
|
|
|
|
# Make a migration! Well, only if there's stuff to put in it
|
|
|
|
if dependencies or chopped:
|
|
|
|
if not self.generated_operations[app_label] or chop_mode:
|
|
|
|
subclass = type(str("Migration"), (Migration,), {"operations": [], "dependencies": []})
|
|
|
|
instance = subclass("auto_%i" % (len(self.migrations.get(app_label, [])) + 1), app_label)
|
|
|
|
instance.dependencies = list(dependencies)
|
|
|
|
instance.operations = chopped
|
|
|
|
self.migrations.setdefault(app_label, []).append(instance)
|
|
|
|
chop_mode = False
|
|
|
|
else:
|
|
|
|
self.generated_operations[app_label] = chopped + self.generated_operations[app_label]
|
|
|
|
new_num_ops = sum(len(x) for x in self.generated_operations.values())
|
|
|
|
if new_num_ops == num_ops:
|
|
|
|
if not chop_mode:
|
|
|
|
chop_mode = True
|
|
|
|
else:
|
2014-08-20 10:50:14 +08:00
|
|
|
raise ValueError("Cannot resolve operation dependencies: %r" % self.generated_operations)
|
2014-06-06 14:03:33 +08:00
|
|
|
num_ops = new_num_ops
|
|
|
|
|
|
|
|
# OK, add in internal dependencies among the migrations
|
|
|
|
for app_label, migrations in self.migrations.items():
|
|
|
|
for m1, m2 in zip(migrations, migrations[1:]):
|
|
|
|
m2.dependencies.append((app_label, m1.name))
|
|
|
|
|
|
|
|
# De-dupe dependencies
|
|
|
|
for app_label, migrations in self.migrations.items():
|
|
|
|
for migration in migrations:
|
|
|
|
migration.dependencies = list(set(migration.dependencies))
|
|
|
|
|
|
|
|
# Optimize migrations
|
|
|
|
for app_label, migrations in self.migrations.items():
|
|
|
|
for migration in migrations:
|
|
|
|
migration.operations = MigrationOptimizer().optimize(migration.operations, app_label=app_label)
|
|
|
|
|
|
|
|
return self.migrations
|
|
|
|
|
|
|
|
def check_dependency(self, operation, dependency):
|
|
|
|
"""
|
2014-10-28 07:28:37 +08:00
|
|
|
Returns ``True`` if the given operation depends on the given dependency,
|
|
|
|
``False`` otherwise.
|
2014-06-06 14:03:33 +08:00
|
|
|
"""
|
|
|
|
# Created model
|
|
|
|
if dependency[2] is None and dependency[3] is True:
|
2014-05-02 10:50:56 +08:00
|
|
|
return (
|
2014-06-06 14:03:33 +08:00
|
|
|
isinstance(operation, operations.CreateModel) and
|
|
|
|
operation.name.lower() == dependency[1].lower()
|
2014-05-02 10:50:56 +08:00
|
|
|
)
|
2014-06-06 14:03:33 +08:00
|
|
|
# Created field
|
|
|
|
elif dependency[2] is not None and dependency[3] is True:
|
|
|
|
return (
|
|
|
|
(
|
|
|
|
isinstance(operation, operations.CreateModel) and
|
|
|
|
operation.name.lower() == dependency[1].lower() and
|
|
|
|
any(dependency[2] == x for x, y in operation.fields)
|
|
|
|
) or
|
|
|
|
(
|
|
|
|
isinstance(operation, operations.AddField) and
|
|
|
|
operation.model_name.lower() == dependency[1].lower() and
|
|
|
|
operation.name.lower() == dependency[2].lower()
|
|
|
|
)
|
|
|
|
)
|
|
|
|
# Removed field
|
|
|
|
elif dependency[2] is not None and dependency[3] is False:
|
|
|
|
return (
|
|
|
|
isinstance(operation, operations.RemoveField) and
|
|
|
|
operation.model_name.lower() == dependency[1].lower() and
|
|
|
|
operation.name.lower() == dependency[2].lower()
|
|
|
|
)
|
2014-06-16 07:01:49 +08:00
|
|
|
# Removed model
|
|
|
|
elif dependency[2] is None and dependency[3] is False:
|
|
|
|
return (
|
|
|
|
isinstance(operation, operations.DeleteModel) and
|
|
|
|
operation.name.lower() == dependency[1].lower()
|
|
|
|
)
|
2014-07-27 00:21:53 +08:00
|
|
|
# Field being altered
|
|
|
|
elif dependency[2] is not None and dependency[3] == "alter":
|
|
|
|
return (
|
|
|
|
isinstance(operation, operations.AlterField) and
|
|
|
|
operation.model_name.lower() == dependency[1].lower() and
|
|
|
|
operation.name.lower() == dependency[2].lower()
|
|
|
|
)
|
2014-06-16 05:55:44 +08:00
|
|
|
# order_with_respect_to being unset for a field
|
|
|
|
elif dependency[2] is not None and dependency[3] == "order_wrt_unset":
|
|
|
|
return (
|
|
|
|
isinstance(operation, operations.AlterOrderWithRespectTo) and
|
|
|
|
operation.name.lower() == dependency[1].lower() and
|
|
|
|
(operation.order_with_respect_to or "").lower() != dependency[2].lower()
|
|
|
|
)
|
2014-10-28 07:28:37 +08:00
|
|
|
# Field is removed and part of an index/unique_together
|
|
|
|
elif dependency[2] is not None and dependency[3] == "foo_together_change":
|
2014-11-15 22:43:06 +08:00
|
|
|
return (
|
|
|
|
isinstance(operation, (operations.AlterUniqueTogether,
|
|
|
|
operations.AlterIndexTogether)) and
|
|
|
|
operation.name.lower() == dependency[1].lower()
|
|
|
|
)
|
2014-06-06 14:03:33 +08:00
|
|
|
# Unknown dependency. Raise an error.
|
|
|
|
else:
|
2014-06-16 05:55:44 +08:00
|
|
|
raise ValueError("Can't handle dependency %r" % (dependency, ))
|
2014-05-02 10:50:56 +08:00
|
|
|
|
2014-08-20 10:50:14 +08:00
|
|
|
def add_operation(self, app_label, operation, dependencies=None, beginning=False):
|
2014-06-06 14:03:33 +08:00
|
|
|
# Dependencies are (app_label, model_name, field_name, create/delete as True/False)
|
|
|
|
operation._auto_deps = dependencies or []
|
2014-08-20 10:50:14 +08:00
|
|
|
if beginning:
|
|
|
|
self.generated_operations.setdefault(app_label, []).insert(0, operation)
|
|
|
|
else:
|
|
|
|
self.generated_operations.setdefault(app_label, []).append(operation)
|
2014-06-06 14:03:33 +08:00
|
|
|
|
2014-06-17 01:20:05 +08:00
|
|
|
def swappable_first_key(self, item):
|
|
|
|
"""
|
|
|
|
Sorting key function that places potential swappable models first in
|
|
|
|
lists of created models (only real way to solve #22783)
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
model = self.new_apps.get_model(item[0], item[1])
|
|
|
|
base_names = [base.__name__ for base in model.__bases__]
|
|
|
|
string_version = "%s.%s" % (item[0], item[1])
|
|
|
|
if (
|
|
|
|
model._meta.swappable or
|
|
|
|
"AbstractUser" in base_names or
|
|
|
|
"AbstractBaseUser" in base_names or
|
|
|
|
settings.AUTH_USER_MODEL.lower() == string_version.lower()
|
|
|
|
):
|
|
|
|
return ("___" + item[0], "___" + item[1])
|
|
|
|
except LookupError:
|
|
|
|
pass
|
|
|
|
return item
|
|
|
|
|
2014-06-06 14:03:33 +08:00
|
|
|
def generate_renamed_models(self):
|
|
|
|
"""
|
|
|
|
Finds any renamed models, and generates the operations for them,
|
|
|
|
and removes the old entry from the model lists.
|
|
|
|
Must be run before other model-level generation.
|
|
|
|
"""
|
|
|
|
self.renamed_models = {}
|
|
|
|
self.renamed_models_rel = {}
|
|
|
|
added_models = set(self.new_model_keys) - set(self.old_model_keys)
|
|
|
|
for app_label, model_name in sorted(added_models):
|
2014-03-10 08:38:24 +08:00
|
|
|
model_state = self.to_state.models[app_label, model_name]
|
2014-06-06 14:03:33 +08:00
|
|
|
model_fields_def = self.only_relation_agnostic_fields(model_state.fields)
|
2014-03-10 08:38:24 +08:00
|
|
|
|
2014-06-06 14:03:33 +08:00
|
|
|
removed_models = set(self.old_model_keys) - set(self.new_model_keys)
|
2014-03-10 08:38:24 +08:00
|
|
|
for rem_app_label, rem_model_name in removed_models:
|
|
|
|
if rem_app_label == app_label:
|
|
|
|
rem_model_state = self.from_state.models[rem_app_label, rem_model_name]
|
2014-06-06 14:03:33 +08:00
|
|
|
rem_model_fields_def = self.only_relation_agnostic_fields(rem_model_state.fields)
|
2014-03-10 08:38:24 +08:00
|
|
|
if model_fields_def == rem_model_fields_def:
|
|
|
|
if self.questioner.ask_rename_model(rem_model_state, model_state):
|
2014-06-06 14:03:33 +08:00
|
|
|
self.add_operation(
|
2014-03-10 08:38:24 +08:00
|
|
|
app_label,
|
|
|
|
operations.RenameModel(
|
2014-03-11 07:23:45 +08:00
|
|
|
old_name=rem_model_state.name,
|
|
|
|
new_name=model_state.name,
|
2014-03-10 08:38:24 +08:00
|
|
|
)
|
|
|
|
)
|
2014-06-06 14:03:33 +08:00
|
|
|
self.renamed_models[app_label, model_name] = rem_model_name
|
|
|
|
self.renamed_models_rel['%s.%s' % (rem_model_state.app_label, rem_model_state.name)] = '%s.%s' % (model_state.app_label, model_state.name)
|
|
|
|
self.old_model_keys.remove((rem_app_label, rem_model_name))
|
|
|
|
self.old_model_keys.append((app_label, model_name))
|
2014-03-10 08:38:24 +08:00
|
|
|
break
|
|
|
|
|
2014-06-06 14:03:33 +08:00
|
|
|
def generate_created_models(self):
|
|
|
|
"""
|
2014-09-08 08:26:12 +08:00
|
|
|
Find all new models (both managed and unmanaged) and make create
|
|
|
|
operations for them as well as separate operations to create any
|
|
|
|
foreign key or M2M relationships (we'll optimize these back in later
|
|
|
|
if we can).
|
2014-06-06 14:03:33 +08:00
|
|
|
|
|
|
|
We also defer any model options that refer to collections of fields
|
2014-09-08 08:26:12 +08:00
|
|
|
that might be deferred (e.g. unique_together, index_together).
|
2014-06-06 14:03:33 +08:00
|
|
|
"""
|
2014-12-24 01:27:49 +08:00
|
|
|
old_keys = set(self.old_model_keys).union(self.old_unmanaged_keys)
|
|
|
|
added_models = set(self.new_model_keys) - old_keys
|
|
|
|
added_unmanaged_models = set(self.new_unmanaged_keys) - old_keys
|
2014-12-31 13:21:32 +08:00
|
|
|
all_added_models = chain(
|
2014-09-08 08:26:12 +08:00
|
|
|
sorted(added_models, key=self.swappable_first_key, reverse=True),
|
|
|
|
sorted(added_unmanaged_models, key=self.swappable_first_key, reverse=True)
|
|
|
|
)
|
2014-12-31 13:21:32 +08:00
|
|
|
for app_label, model_name in all_added_models:
|
2013-06-23 00:15:51 +08:00
|
|
|
model_state = self.to_state.models[app_label, model_name]
|
2014-09-08 08:26:12 +08:00
|
|
|
model_opts = self.new_apps.get_model(app_label, model_name)._meta
|
2014-06-06 14:03:33 +08:00
|
|
|
# Gather related fields
|
|
|
|
related_fields = {}
|
2014-07-06 07:33:03 +08:00
|
|
|
primary_key_rel = None
|
2014-09-08 08:26:12 +08:00
|
|
|
for field in model_opts.local_fields:
|
2013-08-11 07:01:30 +08:00
|
|
|
if field.rel:
|
|
|
|
if field.rel.to:
|
2014-07-06 07:33:03 +08:00
|
|
|
if field.primary_key:
|
|
|
|
primary_key_rel = field.rel.to
|
2014-12-12 00:52:42 +08:00
|
|
|
elif not field.rel.parent_link:
|
2014-07-06 07:33:03 +08:00
|
|
|
related_fields[field.name] = field
|
2014-06-17 06:09:27 +08:00
|
|
|
# through will be none on M2Ms on swapped-out models;
|
|
|
|
# we can treat lack of through as auto_created=True, though.
|
|
|
|
if getattr(field.rel, "through", None) and not field.rel.through._meta.auto_created:
|
2014-06-06 14:03:33 +08:00
|
|
|
related_fields[field.name] = field
|
2014-09-08 08:26:12 +08:00
|
|
|
for field in model_opts.local_many_to_many:
|
2014-01-27 19:49:55 +08:00
|
|
|
if field.rel.to:
|
2014-06-06 14:03:33 +08:00
|
|
|
related_fields[field.name] = field
|
2014-06-17 06:09:27 +08:00
|
|
|
if getattr(field.rel, "through", None) and not field.rel.through._meta.auto_created:
|
2014-06-06 14:03:33 +08:00
|
|
|
related_fields[field.name] = field
|
|
|
|
# Are there unique/index_together to defer?
|
|
|
|
unique_together = model_state.options.pop('unique_together', None)
|
|
|
|
index_together = model_state.options.pop('index_together', None)
|
2014-06-16 05:55:44 +08:00
|
|
|
order_with_respect_to = model_state.options.pop('order_with_respect_to', None)
|
2014-06-16 07:01:49 +08:00
|
|
|
# Depend on the deletion of any possible proxy version of us
|
|
|
|
dependencies = [
|
|
|
|
(app_label, model_name, None, False),
|
|
|
|
]
|
|
|
|
# Depend on all bases
|
|
|
|
for base in model_state.bases:
|
|
|
|
if isinstance(base, six.string_types) and "." in base:
|
|
|
|
base_app_label, base_name = base.split(".", 1)
|
2014-06-24 11:48:33 +08:00
|
|
|
dependencies.append((base_app_label, base_name, None, True))
|
2014-07-06 07:33:03 +08:00
|
|
|
# Depend on the other end of the primary key if it's a relation
|
|
|
|
if primary_key_rel:
|
|
|
|
dependencies.append((
|
2014-07-06 08:24:00 +08:00
|
|
|
primary_key_rel._meta.app_label,
|
|
|
|
primary_key_rel._meta.object_name,
|
|
|
|
None,
|
|
|
|
True
|
|
|
|
))
|
2014-06-16 07:01:49 +08:00
|
|
|
# Generate creation operation
|
2014-06-06 14:03:33 +08:00
|
|
|
self.add_operation(
|
|
|
|
app_label,
|
|
|
|
operations.CreateModel(
|
|
|
|
name=model_state.name,
|
|
|
|
fields=[d for d in model_state.fields if d[0] not in related_fields],
|
|
|
|
options=model_state.options,
|
|
|
|
bases=model_state.bases,
|
2014-12-13 06:19:58 +08:00
|
|
|
managers=model_state.managers,
|
2014-06-16 07:01:49 +08:00
|
|
|
),
|
2014-06-16 08:45:15 +08:00
|
|
|
dependencies=dependencies,
|
2014-08-20 10:50:14 +08:00
|
|
|
beginning=True,
|
2014-06-06 14:03:33 +08:00
|
|
|
)
|
2014-09-08 08:26:12 +08:00
|
|
|
|
|
|
|
# Don't add operations which modify the database for unmanaged models
|
|
|
|
if not model_opts.managed:
|
|
|
|
continue
|
|
|
|
|
2014-06-06 14:03:33 +08:00
|
|
|
# Generate operations for each related field
|
|
|
|
for name, field in sorted(related_fields.items()):
|
|
|
|
# Account for FKs to swappable models
|
|
|
|
swappable_setting = getattr(field, 'swappable_setting', None)
|
|
|
|
if swappable_setting is not None:
|
|
|
|
dep_app_label = "__setting__"
|
|
|
|
dep_object_name = swappable_setting
|
|
|
|
else:
|
|
|
|
dep_app_label = field.rel.to._meta.app_label
|
|
|
|
dep_object_name = field.rel.to._meta.object_name
|
2014-06-09 08:12:27 +08:00
|
|
|
dependencies = [(dep_app_label, dep_object_name, None, True)]
|
|
|
|
if getattr(field.rel, "through", None) and not field.rel.through._meta.auto_created:
|
|
|
|
dependencies.append((
|
|
|
|
field.rel.through._meta.app_label,
|
|
|
|
field.rel.through._meta.object_name,
|
|
|
|
None,
|
|
|
|
True
|
|
|
|
))
|
2014-06-16 07:01:49 +08:00
|
|
|
# Depend on our own model being created
|
|
|
|
dependencies.append((app_label, model_name, None, True))
|
2014-06-06 14:03:33 +08:00
|
|
|
# Make operation
|
|
|
|
self.add_operation(
|
2013-06-23 00:15:51 +08:00
|
|
|
app_label,
|
2014-06-06 14:03:33 +08:00
|
|
|
operations.AddField(
|
|
|
|
model_name=model_name,
|
|
|
|
name=name,
|
|
|
|
field=field,
|
|
|
|
),
|
2014-06-09 08:12:27 +08:00
|
|
|
dependencies=list(set(dependencies)),
|
2013-06-23 00:15:51 +08:00
|
|
|
)
|
2014-06-06 14:03:33 +08:00
|
|
|
# Generate other opns
|
2014-06-16 07:01:49 +08:00
|
|
|
related_dependencies = [
|
|
|
|
(app_label, model_name, name, True)
|
|
|
|
for name, field in sorted(related_fields.items())
|
|
|
|
]
|
|
|
|
related_dependencies.append((app_label, model_name, None, True))
|
2014-06-06 14:03:33 +08:00
|
|
|
if unique_together:
|
|
|
|
self.add_operation(
|
2013-06-23 00:15:51 +08:00
|
|
|
app_label,
|
2014-06-06 14:03:33 +08:00
|
|
|
operations.AlterUniqueTogether(
|
|
|
|
name=model_name,
|
|
|
|
unique_together=unique_together,
|
2014-03-07 05:22:42 +08:00
|
|
|
),
|
2014-06-16 07:01:49 +08:00
|
|
|
dependencies=related_dependencies
|
2013-06-23 00:15:51 +08:00
|
|
|
)
|
2014-06-06 14:03:33 +08:00
|
|
|
if index_together:
|
|
|
|
self.add_operation(
|
2013-06-23 00:15:51 +08:00
|
|
|
app_label,
|
2014-06-06 14:03:33 +08:00
|
|
|
operations.AlterIndexTogether(
|
|
|
|
name=model_name,
|
|
|
|
index_together=index_together,
|
|
|
|
),
|
2014-06-16 07:01:49 +08:00
|
|
|
dependencies=related_dependencies
|
2013-06-23 00:15:51 +08:00
|
|
|
)
|
2014-06-16 05:55:44 +08:00
|
|
|
if order_with_respect_to:
|
|
|
|
self.add_operation(
|
|
|
|
app_label,
|
|
|
|
operations.AlterOrderWithRespectTo(
|
|
|
|
name=model_name,
|
|
|
|
order_with_respect_to=order_with_respect_to,
|
|
|
|
),
|
|
|
|
dependencies=[
|
|
|
|
(app_label, model_name, order_with_respect_to, True),
|
2014-06-16 07:01:49 +08:00
|
|
|
(app_label, model_name, None, True),
|
2014-06-16 05:55:44 +08:00
|
|
|
]
|
|
|
|
)
|
2014-03-26 03:51:15 +08:00
|
|
|
|
2014-09-08 08:26:12 +08:00
|
|
|
def generate_created_proxies(self):
|
2014-06-16 07:01:49 +08:00
|
|
|
"""
|
|
|
|
Makes CreateModel statements for proxy models.
|
|
|
|
We use the same statements as that way there's less code duplication,
|
|
|
|
but of course for proxy models we can skip all that pointless field
|
|
|
|
stuff and just chuck out an operation.
|
|
|
|
"""
|
2014-09-08 08:26:12 +08:00
|
|
|
added = set(self.new_proxy_keys) - set(self.old_proxy_keys)
|
2014-08-13 03:49:20 +08:00
|
|
|
for app_label, model_name in sorted(added):
|
2014-06-16 07:01:49 +08:00
|
|
|
model_state = self.to_state.models[app_label, model_name]
|
2014-09-08 08:26:12 +08:00
|
|
|
assert model_state.options.get("proxy", False)
|
2014-06-16 07:01:49 +08:00
|
|
|
# Depend on the deletion of any possible non-proxy version of us
|
|
|
|
dependencies = [
|
|
|
|
(app_label, model_name, None, False),
|
|
|
|
]
|
|
|
|
# Depend on all bases
|
|
|
|
for base in model_state.bases:
|
|
|
|
if isinstance(base, six.string_types) and "." in base:
|
|
|
|
base_app_label, base_name = base.split(".", 1)
|
2014-07-17 00:59:08 +08:00
|
|
|
dependencies.append((base_app_label, base_name, None, True))
|
2014-06-16 07:01:49 +08:00
|
|
|
# Generate creation operation
|
|
|
|
self.add_operation(
|
|
|
|
app_label,
|
|
|
|
operations.CreateModel(
|
|
|
|
name=model_state.name,
|
|
|
|
fields=[],
|
|
|
|
options=model_state.options,
|
|
|
|
bases=model_state.bases,
|
2014-12-13 06:19:58 +08:00
|
|
|
managers=model_state.managers,
|
2014-06-16 07:01:49 +08:00
|
|
|
),
|
|
|
|
# Depend on the deletion of any possible non-proxy version of us
|
2014-06-16 08:45:15 +08:00
|
|
|
dependencies=dependencies,
|
2014-06-16 07:01:49 +08:00
|
|
|
)
|
|
|
|
|
2014-06-06 14:03:33 +08:00
|
|
|
def generate_deleted_models(self):
|
|
|
|
"""
|
2014-09-08 08:26:12 +08:00
|
|
|
Find all deleted models (managed and unmanaged) and make delete
|
|
|
|
operations for them as well as separate operations to delete any
|
|
|
|
foreign key or M2M relationships (we'll optimize these back in later
|
|
|
|
if we can).
|
2014-06-06 14:03:33 +08:00
|
|
|
|
|
|
|
We also bring forward removal of any model options that refer to
|
2014-09-08 08:26:12 +08:00
|
|
|
collections of fields - the inverse of generate_created_models().
|
2014-06-06 14:03:33 +08:00
|
|
|
"""
|
2014-12-24 01:27:49 +08:00
|
|
|
new_keys = set(self.new_model_keys).union(self.new_unmanaged_keys)
|
|
|
|
deleted_models = set(self.old_model_keys) - new_keys
|
|
|
|
deleted_unmanaged_models = set(self.old_unmanaged_keys) - new_keys
|
2014-12-31 13:21:32 +08:00
|
|
|
all_deleted_models = chain(sorted(deleted_models), sorted(deleted_unmanaged_models))
|
|
|
|
for app_label, model_name in all_deleted_models:
|
2014-06-06 14:03:33 +08:00
|
|
|
model_state = self.from_state.models[app_label, model_name]
|
|
|
|
model = self.old_apps.get_model(app_label, model_name)
|
2014-09-08 08:26:12 +08:00
|
|
|
if not model._meta.managed:
|
|
|
|
# Skip here, no need to handle fields for unmanaged models
|
|
|
|
continue
|
|
|
|
|
2014-06-06 14:03:33 +08:00
|
|
|
# Gather related fields
|
|
|
|
related_fields = {}
|
|
|
|
for field in model._meta.local_fields:
|
|
|
|
if field.rel:
|
|
|
|
if field.rel.to:
|
|
|
|
related_fields[field.name] = field
|
2014-06-17 06:09:27 +08:00
|
|
|
# through will be none on M2Ms on swapped-out models;
|
|
|
|
# we can treat lack of through as auto_created=True, though.
|
|
|
|
if getattr(field.rel, "through", None) and not field.rel.through._meta.auto_created:
|
2014-06-06 14:03:33 +08:00
|
|
|
related_fields[field.name] = field
|
|
|
|
for field in model._meta.local_many_to_many:
|
|
|
|
if field.rel.to:
|
|
|
|
related_fields[field.name] = field
|
2014-06-17 06:09:27 +08:00
|
|
|
if getattr(field.rel, "through", None) and not field.rel.through._meta.auto_created:
|
2014-06-06 14:03:33 +08:00
|
|
|
related_fields[field.name] = field
|
|
|
|
# Generate option removal first
|
|
|
|
unique_together = model_state.options.pop('unique_together', None)
|
|
|
|
index_together = model_state.options.pop('index_together', None)
|
|
|
|
if unique_together:
|
|
|
|
self.add_operation(
|
2013-12-11 21:16:29 +08:00
|
|
|
app_label,
|
|
|
|
operations.AlterUniqueTogether(
|
|
|
|
name=model_name,
|
2014-06-06 14:03:33 +08:00
|
|
|
unique_together=None,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
if index_together:
|
|
|
|
self.add_operation(
|
|
|
|
app_label,
|
|
|
|
operations.AlterIndexTogether(
|
|
|
|
name=model_name,
|
|
|
|
index_together=None,
|
2013-12-11 21:16:29 +08:00
|
|
|
)
|
2014-06-06 14:03:33 +08:00
|
|
|
)
|
|
|
|
# Then remove each related field
|
|
|
|
for name, field in sorted(related_fields.items()):
|
|
|
|
self.add_operation(
|
|
|
|
app_label,
|
|
|
|
operations.RemoveField(
|
|
|
|
model_name=model_name,
|
|
|
|
name=name,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
# Finally, remove the model.
|
2014-07-27 00:21:53 +08:00
|
|
|
# This depends on both the removal/alteration of all incoming fields
|
2014-06-06 14:03:33 +08:00
|
|
|
# and the removal of all its own related fields, and if it's
|
|
|
|
# a through model the field that references it.
|
|
|
|
dependencies = []
|
|
|
|
for related_object in model._meta.get_all_related_objects():
|
|
|
|
dependencies.append((
|
|
|
|
related_object.model._meta.app_label,
|
|
|
|
related_object.model._meta.object_name,
|
|
|
|
related_object.field.name,
|
|
|
|
False,
|
|
|
|
))
|
2014-07-27 00:21:53 +08:00
|
|
|
dependencies.append((
|
|
|
|
related_object.model._meta.app_label,
|
|
|
|
related_object.model._meta.object_name,
|
|
|
|
related_object.field.name,
|
|
|
|
"alter",
|
|
|
|
))
|
2014-06-06 14:03:33 +08:00
|
|
|
for related_object in model._meta.get_all_related_many_to_many_objects():
|
|
|
|
dependencies.append((
|
|
|
|
related_object.model._meta.app_label,
|
|
|
|
related_object.model._meta.object_name,
|
|
|
|
related_object.field.name,
|
|
|
|
False,
|
2014-02-13 22:04:19 +08:00
|
|
|
))
|
2014-06-06 14:03:33 +08:00
|
|
|
for name, field in sorted(related_fields.items()):
|
|
|
|
dependencies.append((app_label, model_name, name, False))
|
|
|
|
# We're referenced in another field's through=
|
|
|
|
through_user = self.through_users.get((app_label, model_state.name.lower()), None)
|
|
|
|
if through_user:
|
|
|
|
dependencies.append((through_user[0], through_user[1], through_user[2], False))
|
|
|
|
# Finally, make the operation, deduping any dependencies
|
|
|
|
self.add_operation(
|
|
|
|
app_label,
|
|
|
|
operations.DeleteModel(
|
|
|
|
name=model_state.name,
|
|
|
|
),
|
2014-06-06 20:29:27 +08:00
|
|
|
dependencies=list(set(dependencies)),
|
2014-06-06 14:03:33 +08:00
|
|
|
)
|
|
|
|
|
2014-09-08 08:26:12 +08:00
|
|
|
def generate_deleted_proxies(self):
|
2014-06-16 07:01:49 +08:00
|
|
|
"""
|
|
|
|
Makes DeleteModel statements for proxy models.
|
|
|
|
"""
|
2014-09-08 08:26:12 +08:00
|
|
|
deleted = set(self.old_proxy_keys) - set(self.new_proxy_keys)
|
2014-08-13 03:49:20 +08:00
|
|
|
for app_label, model_name in sorted(deleted):
|
2014-06-16 07:01:49 +08:00
|
|
|
model_state = self.from_state.models[app_label, model_name]
|
2014-09-08 08:26:12 +08:00
|
|
|
assert model_state.options.get("proxy", False)
|
2014-06-16 07:01:49 +08:00
|
|
|
self.add_operation(
|
|
|
|
app_label,
|
|
|
|
operations.DeleteModel(
|
|
|
|
name=model_state.name,
|
|
|
|
),
|
|
|
|
)
|
|
|
|
|
2014-07-30 00:38:08 +08:00
|
|
|
def generate_renamed_fields(self):
|
|
|
|
"""
|
|
|
|
Works out renamed fields
|
|
|
|
"""
|
2014-06-06 14:03:33 +08:00
|
|
|
self.renamed_fields = {}
|
|
|
|
for app_label, model_name, field_name in sorted(self.new_field_keys - self.old_field_keys):
|
|
|
|
old_model_name = self.renamed_models.get((app_label, model_name), model_name)
|
2014-03-10 08:38:24 +08:00
|
|
|
old_model_state = self.from_state.models[app_label, old_model_name]
|
2014-07-29 01:32:43 +08:00
|
|
|
field = self.new_apps.get_model(app_label, model_name)._meta.get_field_by_name(field_name)[0]
|
2013-12-11 21:16:29 +08:00
|
|
|
# Scan to see if this is actually a rename!
|
2014-06-06 14:03:33 +08:00
|
|
|
field_dec = self.deep_deconstruct(field)
|
|
|
|
for rem_app_label, rem_model_name, rem_field_name in sorted(self.old_field_keys - self.new_field_keys):
|
2013-12-11 21:16:29 +08:00
|
|
|
if rem_app_label == app_label and rem_model_name == model_name:
|
2014-06-06 14:03:33 +08:00
|
|
|
old_field_dec = self.deep_deconstruct(old_model_state.get_field_by_name(rem_field_name))
|
2014-03-21 15:52:49 +08:00
|
|
|
if field.rel and field.rel.to and 'to' in old_field_dec[2]:
|
2014-03-10 08:38:24 +08:00
|
|
|
old_rel_to = old_field_dec[2]['to']
|
2014-06-06 14:03:33 +08:00
|
|
|
if old_rel_to in self.renamed_models_rel:
|
|
|
|
old_field_dec[2]['to'] = self.renamed_models_rel[old_rel_to]
|
2014-03-10 08:38:24 +08:00
|
|
|
if old_field_dec == field_dec:
|
2013-12-11 21:16:29 +08:00
|
|
|
if self.questioner.ask_rename(model_name, rem_field_name, field_name, field):
|
2014-06-06 14:03:33 +08:00
|
|
|
self.add_operation(
|
2013-06-21 22:32:15 +08:00
|
|
|
app_label,
|
|
|
|
operations.RenameField(
|
2013-11-03 17:22:11 +08:00
|
|
|
model_name=model_name,
|
2013-12-11 21:16:29 +08:00
|
|
|
old_name=rem_field_name,
|
2013-11-03 17:22:11 +08:00
|
|
|
new_name=field_name,
|
2013-06-21 22:32:15 +08:00
|
|
|
)
|
2013-06-20 23:02:43 +08:00
|
|
|
)
|
2014-06-06 14:03:33 +08:00
|
|
|
self.old_field_keys.remove((rem_app_label, rem_model_name, rem_field_name))
|
|
|
|
self.old_field_keys.add((app_label, model_name, field_name))
|
|
|
|
self.renamed_fields[app_label, model_name, field_name] = rem_field_name
|
2013-06-21 22:32:15 +08:00
|
|
|
break
|
2014-07-30 00:38:08 +08:00
|
|
|
|
|
|
|
def generate_added_fields(self):
|
|
|
|
"""
|
|
|
|
Fields that have been added
|
|
|
|
"""
|
|
|
|
for app_label, model_name, field_name in sorted(self.new_field_keys - self.old_field_keys):
|
2014-12-03 02:25:46 +08:00
|
|
|
self._generate_added_field(app_label, model_name, field_name)
|
|
|
|
|
|
|
|
def _generate_added_field(self, app_label, model_name, field_name):
|
|
|
|
field = self.new_apps.get_model(app_label, model_name)._meta.get_field_by_name(field_name)[0]
|
|
|
|
# Fields that are foreignkeys/m2ms depend on stuff
|
|
|
|
dependencies = []
|
|
|
|
if field.rel and field.rel.to:
|
|
|
|
# Account for FKs to swappable models
|
|
|
|
swappable_setting = getattr(field, 'swappable_setting', None)
|
|
|
|
if swappable_setting is not None:
|
|
|
|
dep_app_label = "__setting__"
|
|
|
|
dep_object_name = swappable_setting
|
2013-12-11 21:16:29 +08:00
|
|
|
else:
|
2014-12-03 02:25:46 +08:00
|
|
|
dep_app_label = field.rel.to._meta.app_label
|
|
|
|
dep_object_name = field.rel.to._meta.object_name
|
|
|
|
dependencies = [(dep_app_label, dep_object_name, None, True)]
|
|
|
|
if getattr(field.rel, "through", None) and not field.rel.through._meta.auto_created:
|
|
|
|
dependencies.append((
|
|
|
|
field.rel.through._meta.app_label,
|
|
|
|
field.rel.through._meta.object_name,
|
|
|
|
None,
|
|
|
|
True,
|
|
|
|
))
|
|
|
|
# You can't just add NOT NULL fields with no default or fields
|
|
|
|
# which don't allow empty strings as default.
|
|
|
|
preserve_default = True
|
|
|
|
if (not field.null and not field.has_default() and
|
|
|
|
not isinstance(field, models.ManyToManyField) and
|
|
|
|
not (field.blank and field.empty_strings_allowed)):
|
|
|
|
field = field.clone()
|
|
|
|
field.default = self.questioner.ask_not_null_addition(field_name, model_name)
|
|
|
|
preserve_default = False
|
|
|
|
self.add_operation(
|
|
|
|
app_label,
|
|
|
|
operations.AddField(
|
|
|
|
model_name=model_name,
|
|
|
|
name=field_name,
|
|
|
|
field=field,
|
|
|
|
preserve_default=preserve_default,
|
|
|
|
),
|
|
|
|
dependencies=dependencies,
|
|
|
|
)
|
2014-06-06 14:03:33 +08:00
|
|
|
|
|
|
|
def generate_removed_fields(self):
|
|
|
|
"""
|
|
|
|
Fields that have been removed.
|
|
|
|
"""
|
|
|
|
for app_label, model_name, field_name in sorted(self.old_field_keys - self.new_field_keys):
|
2014-12-03 02:25:46 +08:00
|
|
|
self._generate_removed_field(app_label, model_name, field_name)
|
|
|
|
|
|
|
|
def _generate_removed_field(self, app_label, model_name, field_name):
|
|
|
|
self.add_operation(
|
|
|
|
app_label,
|
|
|
|
operations.RemoveField(
|
|
|
|
model_name=model_name,
|
|
|
|
name=field_name,
|
|
|
|
),
|
|
|
|
# We might need to depend on the removal of an
|
|
|
|
# order_with_respect_to or index/unique_together operation;
|
|
|
|
# this is safely ignored if there isn't one
|
|
|
|
dependencies=[
|
|
|
|
(app_label, model_name, field_name, "order_wrt_unset"),
|
|
|
|
(app_label, model_name, field_name, "foo_together_change"),
|
|
|
|
],
|
|
|
|
)
|
2014-06-06 14:03:33 +08:00
|
|
|
|
|
|
|
def generate_altered_fields(self):
|
|
|
|
"""
|
|
|
|
Fields that have been altered.
|
|
|
|
"""
|
|
|
|
for app_label, model_name, field_name in sorted(self.old_field_keys.intersection(self.new_field_keys)):
|
2013-12-11 21:16:29 +08:00
|
|
|
# Did the field change?
|
2014-06-06 14:03:33 +08:00
|
|
|
old_model_name = self.renamed_models.get((app_label, model_name), model_name)
|
|
|
|
old_field_name = self.renamed_fields.get((app_label, model_name, field_name), field_name)
|
2014-06-08 09:17:31 +08:00
|
|
|
old_field = self.old_apps.get_model(app_label, old_model_name)._meta.get_field_by_name(old_field_name)[0]
|
|
|
|
new_field = self.new_apps.get_model(app_label, model_name)._meta.get_field_by_name(field_name)[0]
|
|
|
|
# Implement any model renames on relations; these are handled by RenameModel
|
|
|
|
# so we need to exclude them from the comparison
|
|
|
|
if hasattr(new_field, "rel") and getattr(new_field.rel, "to", None):
|
|
|
|
rename_key = (
|
|
|
|
new_field.rel.to._meta.app_label,
|
|
|
|
new_field.rel.to._meta.object_name.lower(),
|
|
|
|
)
|
|
|
|
if rename_key in self.renamed_models:
|
|
|
|
new_field.rel.to = old_field.rel.to
|
|
|
|
old_field_dec = self.deep_deconstruct(old_field)
|
|
|
|
new_field_dec = self.deep_deconstruct(new_field)
|
2013-12-11 21:16:29 +08:00
|
|
|
if old_field_dec != new_field_dec:
|
2014-12-03 02:25:46 +08:00
|
|
|
if (not isinstance(old_field, models.ManyToManyField) and
|
2014-10-07 07:53:21 +08:00
|
|
|
not isinstance(new_field, models.ManyToManyField)):
|
2014-12-03 02:25:46 +08:00
|
|
|
preserve_default = True
|
|
|
|
if (old_field.null and not new_field.null and not new_field.has_default() and
|
|
|
|
not isinstance(new_field, models.ManyToManyField)):
|
|
|
|
field = new_field.clone()
|
|
|
|
new_default = self.questioner.ask_not_null_alteration(field_name, model_name)
|
|
|
|
if new_default is not models.NOT_PROVIDED:
|
|
|
|
field.default = new_default
|
|
|
|
preserve_default = False
|
|
|
|
else:
|
|
|
|
field = new_field
|
|
|
|
self.add_operation(
|
|
|
|
app_label,
|
|
|
|
operations.AlterField(
|
|
|
|
model_name=model_name,
|
|
|
|
name=field_name,
|
|
|
|
field=field,
|
|
|
|
preserve_default=preserve_default,
|
|
|
|
)
|
2013-07-02 18:25:18 +08:00
|
|
|
)
|
2014-12-03 02:25:46 +08:00
|
|
|
else:
|
|
|
|
self._generate_removed_field(app_label, model_name, field_name)
|
|
|
|
self._generate_added_field(app_label, model_name, field_name)
|
2013-06-08 00:56:43 +08:00
|
|
|
|
2014-06-25 20:53:09 +08:00
|
|
|
def _generate_altered_foo_together(self, operation):
|
|
|
|
option_name = operation.option_name
|
2014-06-06 14:03:33 +08:00
|
|
|
for app_label, model_name in sorted(self.kept_model_keys):
|
|
|
|
old_model_name = self.renamed_models.get((app_label, model_name), model_name)
|
|
|
|
old_model_state = self.from_state.models[app_label, old_model_name]
|
|
|
|
new_model_state = self.to_state.models[app_label, model_name]
|
2014-09-10 05:28:45 +08:00
|
|
|
|
2014-07-21 18:36:34 +08:00
|
|
|
# We run the old version through the field renames to account for those
|
2014-09-10 05:28:45 +08:00
|
|
|
old_value = old_model_state.options.get(option_name) or set()
|
|
|
|
if old_value:
|
2014-09-26 20:31:50 +08:00
|
|
|
old_value = {
|
2014-07-25 01:36:15 +08:00
|
|
|
tuple(
|
2014-07-21 18:36:34 +08:00
|
|
|
self.renamed_fields.get((app_label, model_name, n), n)
|
|
|
|
for n in unique
|
2014-07-25 01:36:15 +08:00
|
|
|
)
|
2014-09-10 05:28:45 +08:00
|
|
|
for unique in old_value
|
2014-09-26 20:31:50 +08:00
|
|
|
}
|
2014-09-10 05:28:45 +08:00
|
|
|
|
|
|
|
new_value = new_model_state.options.get(option_name) or set()
|
|
|
|
if new_value:
|
|
|
|
new_value = set(new_value)
|
|
|
|
|
|
|
|
if old_value != new_value:
|
2014-06-06 14:03:33 +08:00
|
|
|
self.add_operation(
|
|
|
|
app_label,
|
2014-06-25 20:53:09 +08:00
|
|
|
operation(
|
2014-06-06 14:03:33 +08:00
|
|
|
name=model_name,
|
2014-09-10 05:28:45 +08:00
|
|
|
**{option_name: new_value}
|
2014-06-06 14:03:33 +08:00
|
|
|
)
|
|
|
|
)
|
2013-06-23 00:15:51 +08:00
|
|
|
|
2014-06-25 20:53:09 +08:00
|
|
|
def generate_altered_unique_together(self):
|
|
|
|
self._generate_altered_foo_together(operations.AlterUniqueTogether)
|
|
|
|
|
2014-06-06 14:03:33 +08:00
|
|
|
def generate_altered_index_together(self):
|
2014-06-25 20:53:09 +08:00
|
|
|
self._generate_altered_foo_together(operations.AlterIndexTogether)
|
2014-01-15 22:20:47 +08:00
|
|
|
|
2014-10-14 22:20:24 +08:00
|
|
|
def generate_altered_db_table(self):
|
|
|
|
models_to_check = self.kept_model_keys.union(self.kept_proxy_keys).union(self.kept_unmanaged_keys)
|
|
|
|
for app_label, model_name in sorted(models_to_check):
|
|
|
|
old_model_name = self.renamed_models.get((app_label, model_name), model_name)
|
|
|
|
old_model_state = self.from_state.models[app_label, old_model_name]
|
|
|
|
new_model_state = self.to_state.models[app_label, model_name]
|
|
|
|
old_db_table_name = old_model_state.options.get('db_table')
|
|
|
|
new_db_table_name = new_model_state.options.get('db_table')
|
|
|
|
if old_db_table_name != new_db_table_name:
|
|
|
|
self.add_operation(
|
|
|
|
app_label,
|
|
|
|
operations.AlterModelTable(
|
|
|
|
name=model_name,
|
|
|
|
table=new_db_table_name,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2014-06-16 03:34:02 +08:00
|
|
|
def generate_altered_options(self):
|
2014-06-16 05:55:44 +08:00
|
|
|
"""
|
|
|
|
Works out if any non-schema-affecting options have changed and
|
|
|
|
makes an operation to represent them in state changes (in case Python
|
|
|
|
code in migrations needs them)
|
|
|
|
"""
|
2014-12-24 01:27:49 +08:00
|
|
|
models_to_check = self.kept_model_keys.union(
|
|
|
|
self.kept_proxy_keys
|
|
|
|
).union(
|
|
|
|
self.kept_unmanaged_keys
|
|
|
|
).union(
|
|
|
|
# unmanaged converted to managed
|
|
|
|
set(self.old_unmanaged_keys).intersection(self.new_model_keys)
|
|
|
|
).union(
|
|
|
|
# managed converted to unmanaged
|
|
|
|
set(self.old_model_keys).intersection(self.new_unmanaged_keys)
|
|
|
|
)
|
|
|
|
|
2014-06-16 07:01:49 +08:00
|
|
|
for app_label, model_name in sorted(models_to_check):
|
2014-06-16 03:34:02 +08:00
|
|
|
old_model_name = self.renamed_models.get((app_label, model_name), model_name)
|
|
|
|
old_model_state = self.from_state.models[app_label, old_model_name]
|
|
|
|
new_model_state = self.to_state.models[app_label, model_name]
|
|
|
|
old_options = dict(
|
|
|
|
option for option in old_model_state.options.items()
|
2014-07-29 01:47:28 +08:00
|
|
|
if option[0] in AlterModelOptions.ALTER_OPTION_KEYS
|
2014-06-16 03:34:02 +08:00
|
|
|
)
|
|
|
|
new_options = dict(
|
|
|
|
option for option in new_model_state.options.items()
|
2014-07-29 01:47:28 +08:00
|
|
|
if option[0] in AlterModelOptions.ALTER_OPTION_KEYS
|
2014-06-16 03:34:02 +08:00
|
|
|
)
|
|
|
|
if old_options != new_options:
|
|
|
|
self.add_operation(
|
|
|
|
app_label,
|
|
|
|
operations.AlterModelOptions(
|
|
|
|
name=model_name,
|
|
|
|
options=new_options,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2014-06-16 05:55:44 +08:00
|
|
|
def generate_altered_order_with_respect_to(self):
|
|
|
|
for app_label, model_name in sorted(self.kept_model_keys):
|
|
|
|
old_model_name = self.renamed_models.get((app_label, model_name), model_name)
|
|
|
|
old_model_state = self.from_state.models[app_label, old_model_name]
|
|
|
|
new_model_state = self.to_state.models[app_label, model_name]
|
2014-10-25 12:42:44 +08:00
|
|
|
if (old_model_state.options.get("order_with_respect_to", None) !=
|
|
|
|
new_model_state.options.get("order_with_respect_to", None)):
|
2014-06-16 05:55:44 +08:00
|
|
|
# Make sure it comes second if we're adding
|
|
|
|
# (removal dependency is part of RemoveField)
|
|
|
|
dependencies = []
|
|
|
|
if new_model_state.options.get("order_with_respect_to", None):
|
|
|
|
dependencies.append((
|
|
|
|
app_label,
|
|
|
|
model_name,
|
|
|
|
new_model_state.options["order_with_respect_to"],
|
|
|
|
True,
|
|
|
|
))
|
|
|
|
# Actually generate the operation
|
|
|
|
self.add_operation(
|
|
|
|
app_label,
|
|
|
|
operations.AlterOrderWithRespectTo(
|
|
|
|
name=model_name,
|
|
|
|
order_with_respect_to=new_model_state.options.get('order_with_respect_to', None),
|
|
|
|
),
|
2014-06-16 08:45:15 +08:00
|
|
|
dependencies=dependencies,
|
2014-06-16 05:55:44 +08:00
|
|
|
)
|
|
|
|
|
2014-12-13 06:19:58 +08:00
|
|
|
def generate_altered_managers(self):
|
|
|
|
for app_label, model_name in sorted(self.kept_model_keys):
|
|
|
|
old_model_name = self.renamed_models.get((app_label, model_name), model_name)
|
|
|
|
old_model_state = self.from_state.models[app_label, old_model_name]
|
|
|
|
new_model_state = self.to_state.models[app_label, model_name]
|
|
|
|
if old_model_state.managers != new_model_state.managers:
|
|
|
|
self.add_operation(
|
|
|
|
app_label,
|
|
|
|
operations.AlterModelManagers(
|
|
|
|
name=model_name,
|
|
|
|
managers=new_model_state.managers,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2014-08-19 21:24:31 +08:00
|
|
|
def arrange_for_graph(self, changes, graph, migration_name=None):
|
2013-06-08 00:56:43 +08:00
|
|
|
"""
|
|
|
|
Takes in a result from changes() and a MigrationGraph,
|
|
|
|
and fixes the names and dependencies of the changes so they
|
|
|
|
extend the graph from the leaf nodes for each app.
|
|
|
|
"""
|
|
|
|
leaves = graph.leaf_nodes()
|
|
|
|
name_map = {}
|
2013-06-19 22:36:22 +08:00
|
|
|
for app_label, migrations in list(changes.items()):
|
2013-06-08 00:56:43 +08:00
|
|
|
if not migrations:
|
|
|
|
continue
|
|
|
|
# Find the app label's current leaf node
|
|
|
|
app_leaf = None
|
|
|
|
for leaf in leaves:
|
|
|
|
if leaf[0] == app_label:
|
|
|
|
app_leaf = leaf
|
|
|
|
break
|
2013-06-19 22:36:22 +08:00
|
|
|
# Do they want an initial migration for this app?
|
|
|
|
if app_leaf is None and not self.questioner.ask_initial(app_label):
|
|
|
|
# They don't.
|
|
|
|
for migration in migrations:
|
|
|
|
name_map[(app_label, migration.name)] = (app_label, "__first__")
|
|
|
|
del changes[app_label]
|
2014-01-08 21:00:12 +08:00
|
|
|
continue
|
2013-06-08 00:56:43 +08:00
|
|
|
# Work out the next number in the sequence
|
|
|
|
if app_leaf is None:
|
|
|
|
next_number = 1
|
|
|
|
else:
|
2013-06-19 22:36:22 +08:00
|
|
|
next_number = (self.parse_number(app_leaf[1]) or 0) + 1
|
2013-06-08 00:56:43 +08:00
|
|
|
# Name each migration
|
|
|
|
for i, migration in enumerate(migrations):
|
|
|
|
if i == 0 and app_leaf:
|
|
|
|
migration.dependencies.append(app_leaf)
|
|
|
|
if i == 0 and not app_leaf:
|
2014-08-19 21:24:31 +08:00
|
|
|
new_name = "0001_%s" % migration_name if migration_name else "0001_initial"
|
2013-06-08 00:56:43 +08:00
|
|
|
else:
|
2014-03-07 05:34:31 +08:00
|
|
|
new_name = "%04i_%s" % (
|
|
|
|
next_number,
|
2014-08-19 21:24:31 +08:00
|
|
|
migration_name or self.suggest_name(migration.operations)[:100],
|
2014-03-07 05:34:31 +08:00
|
|
|
)
|
2013-06-08 00:56:43 +08:00
|
|
|
name_map[(app_label, migration.name)] = (app_label, new_name)
|
2014-03-07 05:22:42 +08:00
|
|
|
next_number += 1
|
2013-06-08 00:56:43 +08:00
|
|
|
migration.name = new_name
|
|
|
|
# Now fix dependencies
|
|
|
|
for app_label, migrations in changes.items():
|
|
|
|
for migration in migrations:
|
|
|
|
migration.dependencies = [name_map.get(d, d) for d in migration.dependencies]
|
|
|
|
return changes
|
2013-06-19 22:36:22 +08:00
|
|
|
|
2013-08-22 05:25:15 +08:00
|
|
|
def _trim_to_apps(self, changes, app_labels):
|
2013-06-19 22:36:22 +08:00
|
|
|
"""
|
|
|
|
Takes changes from arrange_for_graph and set of app labels and
|
|
|
|
returns a modified set of changes which trims out as many migrations
|
|
|
|
that are not in app_labels as possible.
|
|
|
|
Note that some other migrations may still be present, as they may be
|
|
|
|
required dependencies.
|
|
|
|
"""
|
|
|
|
# Gather other app dependencies in a first pass
|
|
|
|
app_dependencies = {}
|
|
|
|
for app_label, migrations in changes.items():
|
|
|
|
for migration in migrations:
|
|
|
|
for dep_app_label, name in migration.dependencies:
|
|
|
|
app_dependencies.setdefault(app_label, set()).add(dep_app_label)
|
|
|
|
required_apps = set(app_labels)
|
|
|
|
# Keep resolving till there's no change
|
|
|
|
old_required_apps = None
|
|
|
|
while old_required_apps != required_apps:
|
|
|
|
old_required_apps = set(required_apps)
|
|
|
|
for app_label in list(required_apps):
|
|
|
|
required_apps.update(app_dependencies.get(app_label, set()))
|
|
|
|
# Remove all migrations that aren't needed
|
|
|
|
for app_label in list(changes.keys()):
|
|
|
|
if app_label not in required_apps:
|
|
|
|
del changes[app_label]
|
|
|
|
return changes
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def suggest_name(cls, ops):
|
|
|
|
"""
|
|
|
|
Given a set of operations, suggests a name for the migration
|
2013-12-11 21:23:07 +08:00
|
|
|
they might represent. Names are not guaranteed to be unique,
|
|
|
|
but we put some effort in to the fallback name to avoid VCS conflicts
|
|
|
|
if we can.
|
2013-06-19 22:36:22 +08:00
|
|
|
"""
|
|
|
|
if len(ops) == 1:
|
|
|
|
if isinstance(ops[0], operations.CreateModel):
|
|
|
|
return ops[0].name.lower()
|
|
|
|
elif isinstance(ops[0], operations.DeleteModel):
|
|
|
|
return "delete_%s" % ops[0].name.lower()
|
2013-06-20 00:01:48 +08:00
|
|
|
elif isinstance(ops[0], operations.AddField):
|
|
|
|
return "%s_%s" % (ops[0].model_name.lower(), ops[0].name.lower())
|
|
|
|
elif isinstance(ops[0], operations.RemoveField):
|
|
|
|
return "remove_%s_%s" % (ops[0].model_name.lower(), ops[0].name.lower())
|
2014-02-13 01:22:50 +08:00
|
|
|
elif len(ops) > 1:
|
|
|
|
if all(isinstance(o, operations.CreateModel) for o in ops):
|
|
|
|
return "_".join(sorted(o.name.lower() for o in ops))
|
2013-12-11 21:23:07 +08:00
|
|
|
return "auto_%s" % datetime.datetime.now().strftime("%Y%m%d_%H%M")
|
2013-06-19 22:36:22 +08:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def parse_number(cls, name):
|
|
|
|
"""
|
|
|
|
Given a migration name, tries to extract a number from the
|
|
|
|
beginning of it. If no number found, returns None.
|
|
|
|
"""
|
|
|
|
if re.match(r"^\d+_", name):
|
|
|
|
return int(name.split("_")[0])
|
|
|
|
return None
|