2016-04-18 09:55:15 +08:00
|
|
|
import functools
|
2015-01-28 20:35:27 +08:00
|
|
|
import re
|
2014-09-08 08:26:12 +08:00
|
|
|
from itertools import chain
|
|
|
|
|
2014-06-17 01:20:05 +08:00
|
|
|
from django.conf import settings
|
2014-10-07 07:53:21 +08:00
|
|
|
from django.db import models
|
2013-06-07 22:28:38 +08:00
|
|
|
from django.db.migrations import operations
|
|
|
|
from django.db.migrations.migration import Migration
|
2014-07-29 01:47:28 +08:00
|
|
|
from django.db.migrations.operations.models import AlterModelOptions
|
2015-01-28 20:35:27 +08:00
|
|
|
from django.db.migrations.optimizer import MigrationOptimizer
|
|
|
|
from django.db.migrations.questioner import MigrationQuestioner
|
2016-05-11 20:19:19 +08:00
|
|
|
from django.db.migrations.utils import (
|
|
|
|
COMPILED_REGEX_TYPE, RegexObject, get_migration_name_timestamp,
|
|
|
|
)
|
2013-06-07 22:28:38 +08:00
|
|
|
|
2014-11-16 03:25:43 +08:00
|
|
|
from .topological_sort import stable_topological_sort
|
|
|
|
|
2013-06-07 22:28:38 +08:00
|
|
|
|
2017-01-19 15:39:46 +08:00
|
|
|
class MigrationAutodetector:
|
2013-06-07 22:28:38 +08:00
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Take a pair of ProjectStates and compare them to see what the first would
|
|
|
|
need doing to make it match the second (the second usually being the
|
|
|
|
project's current state).
|
2013-06-07 22:28:38 +08:00
|
|
|
|
|
|
|
Note that this naturally operates on entire projects at a time,
|
|
|
|
as it's likely that changes interact (for example, you can't
|
|
|
|
add a ForeignKey without having a migration to add the table it
|
2013-06-08 00:56:43 +08:00
|
|
|
depends on first). A user interface may offer single-app usage
|
2013-06-07 22:28:38 +08:00
|
|
|
if it wishes, with the caveat that it may not always be possible.
|
|
|
|
"""
|
|
|
|
|
2013-06-19 22:36:22 +08:00
|
|
|
def __init__(self, from_state, to_state, questioner=None):
|
2013-06-07 22:28:38 +08:00
|
|
|
self.from_state = from_state
|
|
|
|
self.to_state = to_state
|
2013-06-19 22:36:22 +08:00
|
|
|
self.questioner = questioner or MigrationQuestioner()
|
2015-04-01 04:30:39 +08:00
|
|
|
self.existing_apps = {app for app, model in from_state.models}
|
2013-06-07 22:28:38 +08:00
|
|
|
|
2014-08-19 21:24:31 +08:00
|
|
|
def changes(self, graph, trim_to_apps=None, convert_apps=None, migration_name=None):
|
2013-08-22 05:25:15 +08:00
|
|
|
"""
|
2016-03-12 09:17:01 +08:00
|
|
|
Main entry point to produce a list of applicable changes.
|
2017-01-25 07:04:12 +08:00
|
|
|
Take a graph to base names on and an optional set of apps
|
2013-08-22 05:25:15 +08:00
|
|
|
to try and restrict to (restriction is not guaranteed)
|
|
|
|
"""
|
2014-07-10 14:53:16 +08:00
|
|
|
changes = self._detect_changes(convert_apps, graph)
|
2014-08-19 21:24:31 +08:00
|
|
|
changes = self.arrange_for_graph(changes, graph, migration_name)
|
2013-08-22 05:25:15 +08:00
|
|
|
if trim_to_apps:
|
|
|
|
changes = self._trim_to_apps(changes, trim_to_apps)
|
|
|
|
return changes
|
|
|
|
|
2014-06-06 14:03:33 +08:00
|
|
|
def deep_deconstruct(self, obj):
|
|
|
|
"""
|
|
|
|
Recursive deconstruction for a field and its arguments.
|
|
|
|
Used for full comparison for rename/alter; sometimes a single-level
|
|
|
|
deconstruction will not compare correctly.
|
|
|
|
"""
|
2015-02-14 09:22:38 +08:00
|
|
|
if isinstance(obj, list):
|
|
|
|
return [self.deep_deconstruct(value) for value in obj]
|
|
|
|
elif isinstance(obj, tuple):
|
|
|
|
return tuple(self.deep_deconstruct(value) for value in obj)
|
|
|
|
elif isinstance(obj, dict):
|
|
|
|
return {
|
2014-12-07 05:00:09 +08:00
|
|
|
key: self.deep_deconstruct(value)
|
2015-02-14 09:22:38 +08:00
|
|
|
for key, value in obj.items()
|
|
|
|
}
|
2016-04-18 09:55:15 +08:00
|
|
|
elif isinstance(obj, functools.partial):
|
|
|
|
return (obj.func, self.deep_deconstruct(obj.args), self.deep_deconstruct(obj.keywords))
|
2015-08-22 09:39:33 +08:00
|
|
|
elif isinstance(obj, COMPILED_REGEX_TYPE):
|
|
|
|
return RegexObject(obj)
|
2015-02-14 09:22:38 +08:00
|
|
|
elif isinstance(obj, type):
|
|
|
|
# If this is a type that implements 'deconstruct' as an instance method,
|
|
|
|
# avoid treating this as being deconstructible itself - see #22951
|
|
|
|
return obj
|
|
|
|
elif hasattr(obj, 'deconstruct'):
|
|
|
|
deconstructed = obj.deconstruct()
|
|
|
|
if isinstance(obj, models.Field):
|
|
|
|
# we have a field which also returns a name
|
|
|
|
deconstructed = deconstructed[1:]
|
|
|
|
path, args, kwargs = deconstructed
|
|
|
|
return (
|
|
|
|
path,
|
|
|
|
[self.deep_deconstruct(value) for value in args],
|
|
|
|
{
|
|
|
|
key: self.deep_deconstruct(value)
|
|
|
|
for key, value in kwargs.items()
|
|
|
|
},
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
return obj
|
2014-06-06 14:03:33 +08:00
|
|
|
|
|
|
|
def only_relation_agnostic_fields(self, fields):
|
|
|
|
"""
|
|
|
|
Return a definition of the fields that ignores field names and
|
2017-01-25 07:04:12 +08:00
|
|
|
what related fields actually relate to. Used for detecting renames (as,
|
|
|
|
of course, the related fields change during renames).
|
2014-06-06 14:03:33 +08:00
|
|
|
"""
|
|
|
|
fields_def = []
|
2015-03-29 01:57:51 +08:00
|
|
|
for name, field in sorted(fields):
|
2014-06-06 14:03:33 +08:00
|
|
|
deconstruction = self.deep_deconstruct(field)
|
2015-02-26 22:19:17 +08:00
|
|
|
if field.remote_field and field.remote_field.model:
|
2014-06-06 14:03:33 +08:00
|
|
|
del deconstruction[2]['to']
|
|
|
|
fields_def.append(deconstruction)
|
|
|
|
return fields_def
|
|
|
|
|
2014-07-10 14:53:16 +08:00
|
|
|
def _detect_changes(self, convert_apps=None, graph=None):
|
2013-06-07 22:28:38 +08:00
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Return a dict of migration plans which will achieve the
|
2013-06-08 00:56:43 +08:00
|
|
|
change from from_state to to_state. The dict has app labels
|
2013-08-11 03:00:12 +08:00
|
|
|
as keys and a list of migrations as values.
|
2013-06-08 00:56:43 +08:00
|
|
|
|
|
|
|
The resulting migrations aren't specially named, but the names
|
|
|
|
do matter for dependencies inside the set.
|
2014-07-10 14:53:16 +08:00
|
|
|
|
|
|
|
convert_apps is the list of apps to convert to use migrations
|
|
|
|
(i.e. to make initial migrations for, in the usual case)
|
|
|
|
|
|
|
|
graph is an optional argument that, if provided, can help improve
|
|
|
|
dependency generation and avoid potential circular dependencies.
|
2013-06-07 22:28:38 +08:00
|
|
|
"""
|
2014-06-06 14:03:33 +08:00
|
|
|
# The first phase is generating all the operations for each app
|
|
|
|
# and gathering them into a big per-app list.
|
2017-01-25 07:04:12 +08:00
|
|
|
# Then go through that list, order it, and split into migrations to
|
|
|
|
# resolve dependencies caused by M2Ms and FKs.
|
2014-06-06 14:03:33 +08:00
|
|
|
self.generated_operations = {}
|
2016-06-20 23:50:05 +08:00
|
|
|
self.altered_indexes = {}
|
2016-11-05 21:12:12 +08:00
|
|
|
self.altered_constraints = {}
|
2014-06-06 14:03:33 +08:00
|
|
|
|
2014-06-16 07:01:49 +08:00
|
|
|
# Prepare some old/new state and model lists, separating
|
|
|
|
# proxy models and ignoring unmigrated apps.
|
2014-12-30 09:52:32 +08:00
|
|
|
self.old_apps = self.from_state.concrete_apps
|
2014-11-05 17:43:31 +08:00
|
|
|
self.new_apps = self.to_state.apps
|
2017-08-23 04:23:02 +08:00
|
|
|
self.old_model_keys = set()
|
|
|
|
self.old_proxy_keys = set()
|
|
|
|
self.old_unmanaged_keys = set()
|
|
|
|
self.new_model_keys = set()
|
|
|
|
self.new_proxy_keys = set()
|
|
|
|
self.new_unmanaged_keys = set()
|
|
|
|
for al, mn in self.from_state.models:
|
2014-06-06 14:03:33 +08:00
|
|
|
model = self.old_apps.get_model(al, mn)
|
2014-08-13 03:49:20 +08:00
|
|
|
if not model._meta.managed:
|
2017-08-23 04:23:02 +08:00
|
|
|
self.old_unmanaged_keys.add((al, mn))
|
2014-08-13 03:49:20 +08:00
|
|
|
elif al not in self.from_state.real_apps:
|
2014-06-16 07:01:49 +08:00
|
|
|
if model._meta.proxy:
|
2017-08-23 04:23:02 +08:00
|
|
|
self.old_proxy_keys.add((al, mn))
|
2014-06-16 07:01:49 +08:00
|
|
|
else:
|
2017-08-23 04:23:02 +08:00
|
|
|
self.old_model_keys.add((al, mn))
|
2014-06-16 07:01:49 +08:00
|
|
|
|
2017-08-23 04:23:02 +08:00
|
|
|
for al, mn in self.to_state.models:
|
2014-06-06 14:03:33 +08:00
|
|
|
model = self.new_apps.get_model(al, mn)
|
2014-08-13 03:49:20 +08:00
|
|
|
if not model._meta.managed:
|
2017-08-23 04:23:02 +08:00
|
|
|
self.new_unmanaged_keys.add((al, mn))
|
2014-08-13 03:49:20 +08:00
|
|
|
elif (
|
2014-06-09 08:12:27 +08:00
|
|
|
al not in self.from_state.real_apps or
|
|
|
|
(convert_apps and al in convert_apps)
|
|
|
|
):
|
2014-06-16 07:01:49 +08:00
|
|
|
if model._meta.proxy:
|
2017-08-23 04:23:02 +08:00
|
|
|
self.new_proxy_keys.add((al, mn))
|
2014-06-16 07:01:49 +08:00
|
|
|
else:
|
2017-08-23 04:23:02 +08:00
|
|
|
self.new_model_keys.add((al, mn))
|
2014-06-06 14:03:33 +08:00
|
|
|
|
|
|
|
# Renames have to come first
|
|
|
|
self.generate_renamed_models()
|
|
|
|
|
2015-06-04 21:47:44 +08:00
|
|
|
# Prepare lists of fields and generate through model map
|
|
|
|
self._prepare_field_lists()
|
|
|
|
self._generate_through_model_map()
|
|
|
|
|
|
|
|
# Generate non-rename model operations
|
|
|
|
self.generate_deleted_models()
|
|
|
|
self.generate_created_models()
|
|
|
|
self.generate_deleted_proxies()
|
|
|
|
self.generate_created_proxies()
|
|
|
|
self.generate_altered_options()
|
|
|
|
self.generate_altered_managers()
|
|
|
|
|
2016-06-20 23:50:05 +08:00
|
|
|
# Create the altered indexes and store them in self.altered_indexes.
|
|
|
|
# This avoids the same computation in generate_removed_indexes()
|
|
|
|
# and generate_added_indexes().
|
|
|
|
self.create_altered_indexes()
|
2016-11-05 21:12:12 +08:00
|
|
|
self.create_altered_constraints()
|
2016-06-20 23:50:05 +08:00
|
|
|
# Generate index removal operations before field is removed
|
2016-11-05 21:12:12 +08:00
|
|
|
self.generate_removed_constraints()
|
2016-06-20 23:50:05 +08:00
|
|
|
self.generate_removed_indexes()
|
2015-06-04 21:47:44 +08:00
|
|
|
# Generate field operations
|
|
|
|
self.generate_renamed_fields()
|
|
|
|
self.generate_removed_fields()
|
|
|
|
self.generate_added_fields()
|
|
|
|
self.generate_altered_fields()
|
|
|
|
self.generate_altered_unique_together()
|
|
|
|
self.generate_altered_index_together()
|
2016-06-20 23:50:05 +08:00
|
|
|
self.generate_added_indexes()
|
2016-11-05 21:12:12 +08:00
|
|
|
self.generate_added_constraints()
|
2015-06-04 21:47:44 +08:00
|
|
|
self.generate_altered_db_table()
|
|
|
|
self.generate_altered_order_with_respect_to()
|
|
|
|
|
|
|
|
self._sort_migrations()
|
|
|
|
self._build_migration_list(graph)
|
|
|
|
self._optimize_migrations()
|
|
|
|
|
|
|
|
return self.migrations
|
|
|
|
|
|
|
|
def _prepare_field_lists(self):
|
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Prepare field lists and a list of the fields that used through models
|
|
|
|
in the old state so dependencies can be made from the through model
|
|
|
|
deletion to the field that uses it.
|
2015-06-04 21:47:44 +08:00
|
|
|
"""
|
2017-08-23 04:23:02 +08:00
|
|
|
self.kept_model_keys = self.old_model_keys & self.new_model_keys
|
|
|
|
self.kept_proxy_keys = self.old_proxy_keys & self.new_proxy_keys
|
|
|
|
self.kept_unmanaged_keys = self.old_unmanaged_keys & self.new_unmanaged_keys
|
2014-06-06 14:03:33 +08:00
|
|
|
self.through_users = {}
|
2017-08-23 04:23:02 +08:00
|
|
|
self.old_field_keys = {
|
|
|
|
(app_label, model_name, x)
|
|
|
|
for app_label, model_name in self.kept_model_keys
|
|
|
|
for x, y in self.from_state.models[
|
|
|
|
app_label,
|
|
|
|
self.renamed_models.get((app_label, model_name), model_name)
|
|
|
|
].fields
|
|
|
|
}
|
|
|
|
self.new_field_keys = {
|
|
|
|
(app_label, model_name, x)
|
|
|
|
for app_label, model_name in self.kept_model_keys
|
|
|
|
for x, y in self.to_state.models[app_label, model_name].fields
|
|
|
|
}
|
2014-06-16 03:06:48 +08:00
|
|
|
|
2015-06-04 21:47:44 +08:00
|
|
|
def _generate_through_model_map(self):
|
2017-01-25 07:04:12 +08:00
|
|
|
"""Through model map generation."""
|
2014-06-16 03:06:48 +08:00
|
|
|
for app_label, model_name in sorted(self.old_model_keys):
|
|
|
|
old_model_name = self.renamed_models.get((app_label, model_name), model_name)
|
|
|
|
old_model_state = self.from_state.models[app_label, old_model_name]
|
2014-06-06 14:03:33 +08:00
|
|
|
for field_name, field in old_model_state.fields:
|
2015-01-07 08:16:35 +08:00
|
|
|
old_field = self.old_apps.get_model(app_label, old_model_name)._meta.get_field(field_name)
|
2016-04-04 08:37:32 +08:00
|
|
|
if (hasattr(old_field, "remote_field") and getattr(old_field.remote_field, "through", None) and
|
|
|
|
not old_field.remote_field.through._meta.auto_created):
|
2014-06-06 14:03:33 +08:00
|
|
|
through_key = (
|
2015-02-26 22:19:17 +08:00
|
|
|
old_field.remote_field.through._meta.app_label,
|
|
|
|
old_field.remote_field.through._meta.model_name,
|
2014-06-06 14:03:33 +08:00
|
|
|
)
|
|
|
|
self.through_users[through_key] = (app_label, old_model_name, field_name)
|
|
|
|
|
2015-06-04 21:47:44 +08:00
|
|
|
def _build_migration_list(self, graph=None):
|
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Chop the lists of operations up into migrations with dependencies on
|
|
|
|
each other. Do this by going through an app's list of operations until
|
|
|
|
one is found that has an outgoing dependency that isn't in another
|
|
|
|
app's migration yet (hasn't been chopped off its list). Then chop off
|
|
|
|
the operations before it into a migration and move onto the next app.
|
|
|
|
If the loops completes without doing anything, there's a circular
|
|
|
|
dependency (which _should_ be impossible as the operations are
|
2015-06-04 21:47:44 +08:00
|
|
|
all split at this point so they can't depend and be depended on).
|
|
|
|
"""
|
2014-06-06 14:03:33 +08:00
|
|
|
self.migrations = {}
|
|
|
|
num_ops = sum(len(x) for x in self.generated_operations.values())
|
|
|
|
chop_mode = False
|
|
|
|
while num_ops:
|
|
|
|
# On every iteration, we step through all the apps and see if there
|
|
|
|
# is a completed set of operations.
|
|
|
|
# If we find that a subset of the operations are complete we can
|
|
|
|
# try to chop it off from the rest and continue, but we only
|
|
|
|
# do this if we've already been through the list once before
|
|
|
|
# without any chopping and nothing has changed.
|
2017-05-28 07:08:46 +08:00
|
|
|
for app_label in sorted(self.generated_operations):
|
2014-06-06 14:03:33 +08:00
|
|
|
chopped = []
|
|
|
|
dependencies = set()
|
|
|
|
for operation in list(self.generated_operations[app_label]):
|
|
|
|
deps_satisfied = True
|
|
|
|
operation_dependencies = set()
|
|
|
|
for dep in operation._auto_deps:
|
2018-01-12 22:05:16 +08:00
|
|
|
is_swappable_dep = dep[0] == '__setting__'
|
|
|
|
if is_swappable_dep:
|
2014-08-20 22:15:23 +08:00
|
|
|
# We need to temporarily resolve the swappable dependency to prevent
|
|
|
|
# circular references. While keeping the dependency checks on the
|
|
|
|
# resolved model we still add the swappable dependencies.
|
|
|
|
# See #23322
|
|
|
|
resolved_app_label, resolved_object_name = getattr(settings, dep[1]).split('.')
|
|
|
|
original_dep = dep
|
|
|
|
dep = (resolved_app_label, resolved_object_name.lower(), dep[2], dep[3])
|
|
|
|
if dep[0] != app_label and dep[0] != "__setting__":
|
2014-06-06 14:03:33 +08:00
|
|
|
# External app dependency. See if it's not yet
|
|
|
|
# satisfied.
|
2014-06-16 02:25:39 +08:00
|
|
|
for other_operation in self.generated_operations.get(dep[0], []):
|
2014-06-06 14:03:33 +08:00
|
|
|
if self.check_dependency(other_operation, dep):
|
|
|
|
deps_satisfied = False
|
|
|
|
break
|
|
|
|
if not deps_satisfied:
|
|
|
|
break
|
|
|
|
else:
|
2014-08-20 22:15:23 +08:00
|
|
|
if is_swappable_dep:
|
|
|
|
operation_dependencies.add((original_dep[0], original_dep[1]))
|
|
|
|
elif dep[0] in self.migrations:
|
2014-06-06 14:03:33 +08:00
|
|
|
operation_dependencies.add((dep[0], self.migrations[dep[0]][-1].name))
|
|
|
|
else:
|
2014-07-10 14:53:16 +08:00
|
|
|
# If we can't find the other app, we add a first/last dependency,
|
2014-06-16 07:01:49 +08:00
|
|
|
# but only if we've already been through once and checked everything
|
|
|
|
if chop_mode:
|
2014-07-15 03:09:33 +08:00
|
|
|
# If the app already exists, we add a dependency on the last migration,
|
|
|
|
# as we don't know which migration contains the target field.
|
2014-07-10 14:53:16 +08:00
|
|
|
# If it's not yet migrated or has no migrations, we use __first__
|
2014-07-15 03:09:33 +08:00
|
|
|
if graph and graph.leaf_nodes(dep[0]):
|
|
|
|
operation_dependencies.add(graph.leaf_nodes(dep[0])[0])
|
2014-07-10 14:53:16 +08:00
|
|
|
else:
|
2014-07-15 03:09:33 +08:00
|
|
|
operation_dependencies.add((dep[0], "__first__"))
|
2014-06-16 07:01:49 +08:00
|
|
|
else:
|
|
|
|
deps_satisfied = False
|
2014-06-06 14:03:33 +08:00
|
|
|
if deps_satisfied:
|
|
|
|
chopped.append(operation)
|
|
|
|
dependencies.update(operation_dependencies)
|
2018-01-04 02:24:02 +08:00
|
|
|
del self.generated_operations[app_label][0]
|
2014-06-06 14:03:33 +08:00
|
|
|
else:
|
|
|
|
break
|
|
|
|
# Make a migration! Well, only if there's stuff to put in it
|
|
|
|
if dependencies or chopped:
|
|
|
|
if not self.generated_operations[app_label] or chop_mode:
|
2017-01-19 22:48:01 +08:00
|
|
|
subclass = type("Migration", (Migration,), {"operations": [], "dependencies": []})
|
2014-06-06 14:03:33 +08:00
|
|
|
instance = subclass("auto_%i" % (len(self.migrations.get(app_label, [])) + 1), app_label)
|
|
|
|
instance.dependencies = list(dependencies)
|
|
|
|
instance.operations = chopped
|
2015-04-01 04:30:39 +08:00
|
|
|
instance.initial = app_label not in self.existing_apps
|
2014-06-06 14:03:33 +08:00
|
|
|
self.migrations.setdefault(app_label, []).append(instance)
|
|
|
|
chop_mode = False
|
|
|
|
else:
|
|
|
|
self.generated_operations[app_label] = chopped + self.generated_operations[app_label]
|
|
|
|
new_num_ops = sum(len(x) for x in self.generated_operations.values())
|
|
|
|
if new_num_ops == num_ops:
|
|
|
|
if not chop_mode:
|
|
|
|
chop_mode = True
|
|
|
|
else:
|
2014-08-20 10:50:14 +08:00
|
|
|
raise ValueError("Cannot resolve operation dependencies: %r" % self.generated_operations)
|
2014-06-06 14:03:33 +08:00
|
|
|
num_ops = new_num_ops
|
|
|
|
|
2015-06-04 21:47:44 +08:00
|
|
|
def _sort_migrations(self):
|
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Reorder to make things possible. Reordering may be needed so FKs work
|
|
|
|
nicely inside the same app.
|
2015-06-04 21:47:44 +08:00
|
|
|
"""
|
|
|
|
for app_label, ops in sorted(self.generated_operations.items()):
|
|
|
|
# construct a dependency graph for intra-app dependencies
|
|
|
|
dependency_graph = {op: set() for op in ops}
|
|
|
|
for op in ops:
|
|
|
|
for dep in op._auto_deps:
|
|
|
|
if dep[0] == app_label:
|
|
|
|
for op2 in ops:
|
|
|
|
if self.check_dependency(op2, dep):
|
|
|
|
dependency_graph[op].add(op2)
|
|
|
|
|
|
|
|
# we use a stable sort for deterministic tests & general behavior
|
|
|
|
self.generated_operations[app_label] = stable_topological_sort(ops, dependency_graph)
|
|
|
|
|
|
|
|
def _optimize_migrations(self):
|
|
|
|
# Add in internal dependencies among the migrations
|
2014-06-06 14:03:33 +08:00
|
|
|
for app_label, migrations in self.migrations.items():
|
|
|
|
for m1, m2 in zip(migrations, migrations[1:]):
|
|
|
|
m2.dependencies.append((app_label, m1.name))
|
|
|
|
|
|
|
|
# De-dupe dependencies
|
2017-12-07 06:17:59 +08:00
|
|
|
for migrations in self.migrations.values():
|
2014-06-06 14:03:33 +08:00
|
|
|
for migration in migrations:
|
|
|
|
migration.dependencies = list(set(migration.dependencies))
|
|
|
|
|
|
|
|
# Optimize migrations
|
|
|
|
for app_label, migrations in self.migrations.items():
|
|
|
|
for migration in migrations:
|
|
|
|
migration.operations = MigrationOptimizer().optimize(migration.operations, app_label=app_label)
|
|
|
|
|
|
|
|
def check_dependency(self, operation, dependency):
|
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Return True if the given operation depends on the given dependency,
|
|
|
|
False otherwise.
|
2014-06-06 14:03:33 +08:00
|
|
|
"""
|
|
|
|
# Created model
|
|
|
|
if dependency[2] is None and dependency[3] is True:
|
2014-05-02 10:50:56 +08:00
|
|
|
return (
|
2014-06-06 14:03:33 +08:00
|
|
|
isinstance(operation, operations.CreateModel) and
|
2015-01-02 23:37:21 +08:00
|
|
|
operation.name_lower == dependency[1].lower()
|
2014-05-02 10:50:56 +08:00
|
|
|
)
|
2014-06-06 14:03:33 +08:00
|
|
|
# Created field
|
|
|
|
elif dependency[2] is not None and dependency[3] is True:
|
|
|
|
return (
|
|
|
|
(
|
|
|
|
isinstance(operation, operations.CreateModel) and
|
2015-01-02 23:37:21 +08:00
|
|
|
operation.name_lower == dependency[1].lower() and
|
2014-06-06 14:03:33 +08:00
|
|
|
any(dependency[2] == x for x, y in operation.fields)
|
|
|
|
) or
|
|
|
|
(
|
|
|
|
isinstance(operation, operations.AddField) and
|
2015-01-02 23:37:21 +08:00
|
|
|
operation.model_name_lower == dependency[1].lower() and
|
|
|
|
operation.name_lower == dependency[2].lower()
|
2014-06-06 14:03:33 +08:00
|
|
|
)
|
|
|
|
)
|
|
|
|
# Removed field
|
|
|
|
elif dependency[2] is not None and dependency[3] is False:
|
|
|
|
return (
|
|
|
|
isinstance(operation, operations.RemoveField) and
|
2015-01-02 23:37:21 +08:00
|
|
|
operation.model_name_lower == dependency[1].lower() and
|
|
|
|
operation.name_lower == dependency[2].lower()
|
2014-06-06 14:03:33 +08:00
|
|
|
)
|
2014-06-16 07:01:49 +08:00
|
|
|
# Removed model
|
|
|
|
elif dependency[2] is None and dependency[3] is False:
|
|
|
|
return (
|
|
|
|
isinstance(operation, operations.DeleteModel) and
|
2015-01-02 23:37:21 +08:00
|
|
|
operation.name_lower == dependency[1].lower()
|
2014-06-16 07:01:49 +08:00
|
|
|
)
|
2014-07-27 00:21:53 +08:00
|
|
|
# Field being altered
|
|
|
|
elif dependency[2] is not None and dependency[3] == "alter":
|
|
|
|
return (
|
|
|
|
isinstance(operation, operations.AlterField) and
|
2015-01-02 23:37:21 +08:00
|
|
|
operation.model_name_lower == dependency[1].lower() and
|
|
|
|
operation.name_lower == dependency[2].lower()
|
2014-07-27 00:21:53 +08:00
|
|
|
)
|
2014-06-16 05:55:44 +08:00
|
|
|
# order_with_respect_to being unset for a field
|
|
|
|
elif dependency[2] is not None and dependency[3] == "order_wrt_unset":
|
|
|
|
return (
|
|
|
|
isinstance(operation, operations.AlterOrderWithRespectTo) and
|
2015-01-02 23:37:21 +08:00
|
|
|
operation.name_lower == dependency[1].lower() and
|
2014-06-16 05:55:44 +08:00
|
|
|
(operation.order_with_respect_to or "").lower() != dependency[2].lower()
|
|
|
|
)
|
2014-10-28 07:28:37 +08:00
|
|
|
# Field is removed and part of an index/unique_together
|
|
|
|
elif dependency[2] is not None and dependency[3] == "foo_together_change":
|
2014-11-15 22:43:06 +08:00
|
|
|
return (
|
|
|
|
isinstance(operation, (operations.AlterUniqueTogether,
|
|
|
|
operations.AlterIndexTogether)) and
|
2015-01-02 23:37:21 +08:00
|
|
|
operation.name_lower == dependency[1].lower()
|
2014-11-15 22:43:06 +08:00
|
|
|
)
|
2014-06-06 14:03:33 +08:00
|
|
|
# Unknown dependency. Raise an error.
|
|
|
|
else:
|
2017-12-29 04:07:29 +08:00
|
|
|
raise ValueError("Can't handle dependency %r" % (dependency,))
|
2014-05-02 10:50:56 +08:00
|
|
|
|
2014-08-20 10:50:14 +08:00
|
|
|
def add_operation(self, app_label, operation, dependencies=None, beginning=False):
|
2014-06-06 14:03:33 +08:00
|
|
|
# Dependencies are (app_label, model_name, field_name, create/delete as True/False)
|
|
|
|
operation._auto_deps = dependencies or []
|
2014-08-20 10:50:14 +08:00
|
|
|
if beginning:
|
|
|
|
self.generated_operations.setdefault(app_label, []).insert(0, operation)
|
|
|
|
else:
|
|
|
|
self.generated_operations.setdefault(app_label, []).append(operation)
|
2014-06-06 14:03:33 +08:00
|
|
|
|
2014-06-17 01:20:05 +08:00
|
|
|
def swappable_first_key(self, item):
|
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Place potential swappable models first in lists of created models (only
|
|
|
|
real way to solve #22783).
|
2014-06-17 01:20:05 +08:00
|
|
|
"""
|
2017-09-07 20:16:21 +08:00
|
|
|
try:
|
2014-06-17 01:20:05 +08:00
|
|
|
model = self.new_apps.get_model(item[0], item[1])
|
|
|
|
base_names = [base.__name__ for base in model.__bases__]
|
|
|
|
string_version = "%s.%s" % (item[0], item[1])
|
|
|
|
if (
|
|
|
|
model._meta.swappable or
|
|
|
|
"AbstractUser" in base_names or
|
|
|
|
"AbstractBaseUser" in base_names or
|
|
|
|
settings.AUTH_USER_MODEL.lower() == string_version.lower()
|
|
|
|
):
|
|
|
|
return ("___" + item[0], "___" + item[1])
|
2017-09-07 20:16:21 +08:00
|
|
|
except LookupError:
|
|
|
|
pass
|
2014-06-17 01:20:05 +08:00
|
|
|
return item
|
|
|
|
|
2014-06-06 14:03:33 +08:00
|
|
|
def generate_renamed_models(self):
|
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Find any renamed models, generate the operations for them, and remove
|
|
|
|
the old entry from the model lists. Must be run before other
|
|
|
|
model-level generation.
|
2014-06-06 14:03:33 +08:00
|
|
|
"""
|
|
|
|
self.renamed_models = {}
|
|
|
|
self.renamed_models_rel = {}
|
2017-08-23 04:23:02 +08:00
|
|
|
added_models = self.new_model_keys - self.old_model_keys
|
2014-06-06 14:03:33 +08:00
|
|
|
for app_label, model_name in sorted(added_models):
|
2014-03-10 08:38:24 +08:00
|
|
|
model_state = self.to_state.models[app_label, model_name]
|
2014-06-06 14:03:33 +08:00
|
|
|
model_fields_def = self.only_relation_agnostic_fields(model_state.fields)
|
2014-03-10 08:38:24 +08:00
|
|
|
|
2017-08-23 04:23:02 +08:00
|
|
|
removed_models = self.old_model_keys - self.new_model_keys
|
2014-03-10 08:38:24 +08:00
|
|
|
for rem_app_label, rem_model_name in removed_models:
|
|
|
|
if rem_app_label == app_label:
|
|
|
|
rem_model_state = self.from_state.models[rem_app_label, rem_model_name]
|
2014-06-06 14:03:33 +08:00
|
|
|
rem_model_fields_def = self.only_relation_agnostic_fields(rem_model_state.fields)
|
2014-03-10 08:38:24 +08:00
|
|
|
if model_fields_def == rem_model_fields_def:
|
|
|
|
if self.questioner.ask_rename_model(rem_model_state, model_state):
|
2017-08-18 05:41:04 +08:00
|
|
|
model_opts = self.new_apps.get_model(app_label, model_name)._meta
|
|
|
|
dependencies = []
|
|
|
|
for field in model_opts.get_fields():
|
|
|
|
if field.is_relation:
|
|
|
|
dependencies.extend(self._get_dependencies_for_foreign_key(field))
|
2014-06-06 14:03:33 +08:00
|
|
|
self.add_operation(
|
2014-03-10 08:38:24 +08:00
|
|
|
app_label,
|
|
|
|
operations.RenameModel(
|
2014-03-11 07:23:45 +08:00
|
|
|
old_name=rem_model_state.name,
|
|
|
|
new_name=model_state.name,
|
2017-08-18 05:41:04 +08:00
|
|
|
),
|
|
|
|
dependencies=dependencies,
|
2014-03-10 08:38:24 +08:00
|
|
|
)
|
2014-06-06 14:03:33 +08:00
|
|
|
self.renamed_models[app_label, model_name] = rem_model_name
|
2015-09-12 07:33:12 +08:00
|
|
|
renamed_models_rel_key = '%s.%s' % (rem_model_state.app_label, rem_model_state.name)
|
|
|
|
self.renamed_models_rel[renamed_models_rel_key] = '%s.%s' % (
|
|
|
|
model_state.app_label,
|
|
|
|
model_state.name,
|
|
|
|
)
|
2014-06-06 14:03:33 +08:00
|
|
|
self.old_model_keys.remove((rem_app_label, rem_model_name))
|
2017-08-23 04:23:02 +08:00
|
|
|
self.old_model_keys.add((app_label, model_name))
|
2014-03-10 08:38:24 +08:00
|
|
|
break
|
|
|
|
|
2014-06-06 14:03:33 +08:00
|
|
|
def generate_created_models(self):
|
|
|
|
"""
|
2014-09-08 08:26:12 +08:00
|
|
|
Find all new models (both managed and unmanaged) and make create
|
|
|
|
operations for them as well as separate operations to create any
|
2017-01-25 07:04:12 +08:00
|
|
|
foreign key or M2M relationships (these are optimized later, if
|
|
|
|
possible).
|
2014-06-06 14:03:33 +08:00
|
|
|
|
2017-01-25 07:04:12 +08:00
|
|
|
Defer any model options that refer to collections of fields that might
|
|
|
|
be deferred (e.g. unique_together, index_together).
|
2014-06-06 14:03:33 +08:00
|
|
|
"""
|
2017-08-23 04:23:02 +08:00
|
|
|
old_keys = self.old_model_keys | self.old_unmanaged_keys
|
|
|
|
added_models = self.new_model_keys - old_keys
|
|
|
|
added_unmanaged_models = self.new_unmanaged_keys - old_keys
|
2014-12-31 13:21:32 +08:00
|
|
|
all_added_models = chain(
|
2014-09-08 08:26:12 +08:00
|
|
|
sorted(added_models, key=self.swappable_first_key, reverse=True),
|
|
|
|
sorted(added_unmanaged_models, key=self.swappable_first_key, reverse=True)
|
|
|
|
)
|
2014-12-31 13:21:32 +08:00
|
|
|
for app_label, model_name in all_added_models:
|
2013-06-23 00:15:51 +08:00
|
|
|
model_state = self.to_state.models[app_label, model_name]
|
2014-09-08 08:26:12 +08:00
|
|
|
model_opts = self.new_apps.get_model(app_label, model_name)._meta
|
2014-06-06 14:03:33 +08:00
|
|
|
# Gather related fields
|
|
|
|
related_fields = {}
|
2014-07-06 07:33:03 +08:00
|
|
|
primary_key_rel = None
|
2014-09-08 08:26:12 +08:00
|
|
|
for field in model_opts.local_fields:
|
2015-02-26 22:19:17 +08:00
|
|
|
if field.remote_field:
|
|
|
|
if field.remote_field.model:
|
2014-07-06 07:33:03 +08:00
|
|
|
if field.primary_key:
|
2015-02-26 22:19:17 +08:00
|
|
|
primary_key_rel = field.remote_field.model
|
|
|
|
elif not field.remote_field.parent_link:
|
2014-07-06 07:33:03 +08:00
|
|
|
related_fields[field.name] = field
|
2014-06-17 06:09:27 +08:00
|
|
|
# through will be none on M2Ms on swapped-out models;
|
|
|
|
# we can treat lack of through as auto_created=True, though.
|
2016-04-04 08:37:32 +08:00
|
|
|
if (getattr(field.remote_field, "through", None) and
|
|
|
|
not field.remote_field.through._meta.auto_created):
|
2014-06-06 14:03:33 +08:00
|
|
|
related_fields[field.name] = field
|
2014-09-08 08:26:12 +08:00
|
|
|
for field in model_opts.local_many_to_many:
|
2015-02-26 22:19:17 +08:00
|
|
|
if field.remote_field.model:
|
2014-06-06 14:03:33 +08:00
|
|
|
related_fields[field.name] = field
|
2015-02-26 22:19:17 +08:00
|
|
|
if getattr(field.remote_field, "through", None) and not field.remote_field.through._meta.auto_created:
|
2014-06-06 14:03:33 +08:00
|
|
|
related_fields[field.name] = field
|
2016-09-19 22:17:16 +08:00
|
|
|
# Are there indexes/unique|index_together to defer?
|
|
|
|
indexes = model_state.options.pop('indexes')
|
2016-11-05 21:12:12 +08:00
|
|
|
constraints = model_state.options.pop('constraints')
|
2014-06-06 14:03:33 +08:00
|
|
|
unique_together = model_state.options.pop('unique_together', None)
|
|
|
|
index_together = model_state.options.pop('index_together', None)
|
2014-06-16 05:55:44 +08:00
|
|
|
order_with_respect_to = model_state.options.pop('order_with_respect_to', None)
|
2014-06-16 07:01:49 +08:00
|
|
|
# Depend on the deletion of any possible proxy version of us
|
|
|
|
dependencies = [
|
|
|
|
(app_label, model_name, None, False),
|
|
|
|
]
|
|
|
|
# Depend on all bases
|
|
|
|
for base in model_state.bases:
|
2016-12-29 23:27:49 +08:00
|
|
|
if isinstance(base, str) and "." in base:
|
2014-06-16 07:01:49 +08:00
|
|
|
base_app_label, base_name = base.split(".", 1)
|
2014-06-24 11:48:33 +08:00
|
|
|
dependencies.append((base_app_label, base_name, None, True))
|
2014-07-06 07:33:03 +08:00
|
|
|
# Depend on the other end of the primary key if it's a relation
|
|
|
|
if primary_key_rel:
|
|
|
|
dependencies.append((
|
2014-07-06 08:24:00 +08:00
|
|
|
primary_key_rel._meta.app_label,
|
|
|
|
primary_key_rel._meta.object_name,
|
|
|
|
None,
|
|
|
|
True
|
|
|
|
))
|
2014-06-16 07:01:49 +08:00
|
|
|
# Generate creation operation
|
2014-06-06 14:03:33 +08:00
|
|
|
self.add_operation(
|
|
|
|
app_label,
|
|
|
|
operations.CreateModel(
|
|
|
|
name=model_state.name,
|
|
|
|
fields=[d for d in model_state.fields if d[0] not in related_fields],
|
|
|
|
options=model_state.options,
|
|
|
|
bases=model_state.bases,
|
2014-12-13 06:19:58 +08:00
|
|
|
managers=model_state.managers,
|
2014-06-16 07:01:49 +08:00
|
|
|
),
|
2014-06-16 08:45:15 +08:00
|
|
|
dependencies=dependencies,
|
2014-08-20 10:50:14 +08:00
|
|
|
beginning=True,
|
2014-06-06 14:03:33 +08:00
|
|
|
)
|
2014-09-08 08:26:12 +08:00
|
|
|
|
|
|
|
# Don't add operations which modify the database for unmanaged models
|
|
|
|
if not model_opts.managed:
|
|
|
|
continue
|
|
|
|
|
2014-06-06 14:03:33 +08:00
|
|
|
# Generate operations for each related field
|
|
|
|
for name, field in sorted(related_fields.items()):
|
2016-07-16 22:41:31 +08:00
|
|
|
dependencies = self._get_dependencies_for_foreign_key(field)
|
2014-06-16 07:01:49 +08:00
|
|
|
# Depend on our own model being created
|
|
|
|
dependencies.append((app_label, model_name, None, True))
|
2014-06-06 14:03:33 +08:00
|
|
|
# Make operation
|
|
|
|
self.add_operation(
|
2013-06-23 00:15:51 +08:00
|
|
|
app_label,
|
2014-06-06 14:03:33 +08:00
|
|
|
operations.AddField(
|
|
|
|
model_name=model_name,
|
|
|
|
name=name,
|
|
|
|
field=field,
|
|
|
|
),
|
2014-06-09 08:12:27 +08:00
|
|
|
dependencies=list(set(dependencies)),
|
2013-06-23 00:15:51 +08:00
|
|
|
)
|
2014-06-06 14:03:33 +08:00
|
|
|
# Generate other opns
|
2014-06-16 07:01:49 +08:00
|
|
|
related_dependencies = [
|
|
|
|
(app_label, model_name, name, True)
|
2017-12-07 06:17:59 +08:00
|
|
|
for name in sorted(related_fields)
|
2014-06-16 07:01:49 +08:00
|
|
|
]
|
|
|
|
related_dependencies.append((app_label, model_name, None, True))
|
2016-06-20 23:50:05 +08:00
|
|
|
for index in indexes:
|
|
|
|
self.add_operation(
|
|
|
|
app_label,
|
|
|
|
operations.AddIndex(
|
|
|
|
model_name=model_name,
|
|
|
|
index=index,
|
|
|
|
),
|
|
|
|
dependencies=related_dependencies,
|
|
|
|
)
|
2016-11-05 21:12:12 +08:00
|
|
|
for constraint in constraints:
|
|
|
|
self.add_operation(
|
|
|
|
app_label,
|
|
|
|
operations.AddConstraint(
|
|
|
|
model_name=model_name,
|
|
|
|
constraint=constraint,
|
|
|
|
),
|
|
|
|
dependencies=related_dependencies,
|
|
|
|
)
|
2014-06-06 14:03:33 +08:00
|
|
|
if unique_together:
|
|
|
|
self.add_operation(
|
2013-06-23 00:15:51 +08:00
|
|
|
app_label,
|
2014-06-06 14:03:33 +08:00
|
|
|
operations.AlterUniqueTogether(
|
|
|
|
name=model_name,
|
|
|
|
unique_together=unique_together,
|
2014-03-07 05:22:42 +08:00
|
|
|
),
|
2014-06-16 07:01:49 +08:00
|
|
|
dependencies=related_dependencies
|
2013-06-23 00:15:51 +08:00
|
|
|
)
|
2014-06-06 14:03:33 +08:00
|
|
|
if index_together:
|
|
|
|
self.add_operation(
|
2013-06-23 00:15:51 +08:00
|
|
|
app_label,
|
2014-06-06 14:03:33 +08:00
|
|
|
operations.AlterIndexTogether(
|
|
|
|
name=model_name,
|
|
|
|
index_together=index_together,
|
|
|
|
),
|
2014-06-16 07:01:49 +08:00
|
|
|
dependencies=related_dependencies
|
2013-06-23 00:15:51 +08:00
|
|
|
)
|
2014-06-16 05:55:44 +08:00
|
|
|
if order_with_respect_to:
|
|
|
|
self.add_operation(
|
|
|
|
app_label,
|
|
|
|
operations.AlterOrderWithRespectTo(
|
|
|
|
name=model_name,
|
|
|
|
order_with_respect_to=order_with_respect_to,
|
|
|
|
),
|
|
|
|
dependencies=[
|
|
|
|
(app_label, model_name, order_with_respect_to, True),
|
2014-06-16 07:01:49 +08:00
|
|
|
(app_label, model_name, None, True),
|
2014-06-16 05:55:44 +08:00
|
|
|
]
|
|
|
|
)
|
2014-03-26 03:51:15 +08:00
|
|
|
|
2016-07-13 09:45:17 +08:00
|
|
|
# Fix relationships if the model changed from a proxy model to a
|
|
|
|
# concrete model.
|
|
|
|
if (app_label, model_name) in self.old_proxy_keys:
|
|
|
|
for related_object in model_opts.related_objects:
|
|
|
|
self.add_operation(
|
|
|
|
related_object.related_model._meta.app_label,
|
|
|
|
operations.AlterField(
|
|
|
|
model_name=related_object.related_model._meta.object_name,
|
|
|
|
name=related_object.field.name,
|
|
|
|
field=related_object.field,
|
|
|
|
),
|
|
|
|
dependencies=[(app_label, model_name, None, True)],
|
|
|
|
)
|
|
|
|
|
2014-09-08 08:26:12 +08:00
|
|
|
def generate_created_proxies(self):
|
2014-06-16 07:01:49 +08:00
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Make CreateModel statements for proxy models. Use the same statements
|
|
|
|
as that way there's less code duplication, but of course for proxy
|
|
|
|
models it's safe to skip all the pointless field stuff and just chuck
|
|
|
|
out an operation.
|
2014-06-16 07:01:49 +08:00
|
|
|
"""
|
2017-08-23 04:23:02 +08:00
|
|
|
added = self.new_proxy_keys - self.old_proxy_keys
|
2014-08-13 03:49:20 +08:00
|
|
|
for app_label, model_name in sorted(added):
|
2014-06-16 07:01:49 +08:00
|
|
|
model_state = self.to_state.models[app_label, model_name]
|
2015-05-14 02:51:18 +08:00
|
|
|
assert model_state.options.get("proxy")
|
2014-06-16 07:01:49 +08:00
|
|
|
# Depend on the deletion of any possible non-proxy version of us
|
|
|
|
dependencies = [
|
|
|
|
(app_label, model_name, None, False),
|
|
|
|
]
|
|
|
|
# Depend on all bases
|
|
|
|
for base in model_state.bases:
|
2016-12-29 23:27:49 +08:00
|
|
|
if isinstance(base, str) and "." in base:
|
2014-06-16 07:01:49 +08:00
|
|
|
base_app_label, base_name = base.split(".", 1)
|
2014-07-17 00:59:08 +08:00
|
|
|
dependencies.append((base_app_label, base_name, None, True))
|
2014-06-16 07:01:49 +08:00
|
|
|
# Generate creation operation
|
|
|
|
self.add_operation(
|
|
|
|
app_label,
|
|
|
|
operations.CreateModel(
|
|
|
|
name=model_state.name,
|
|
|
|
fields=[],
|
|
|
|
options=model_state.options,
|
|
|
|
bases=model_state.bases,
|
2014-12-13 06:19:58 +08:00
|
|
|
managers=model_state.managers,
|
2014-06-16 07:01:49 +08:00
|
|
|
),
|
|
|
|
# Depend on the deletion of any possible non-proxy version of us
|
2014-06-16 08:45:15 +08:00
|
|
|
dependencies=dependencies,
|
2014-06-16 07:01:49 +08:00
|
|
|
)
|
|
|
|
|
2014-06-06 14:03:33 +08:00
|
|
|
def generate_deleted_models(self):
|
|
|
|
"""
|
2014-09-08 08:26:12 +08:00
|
|
|
Find all deleted models (managed and unmanaged) and make delete
|
|
|
|
operations for them as well as separate operations to delete any
|
2017-01-25 07:04:12 +08:00
|
|
|
foreign key or M2M relationships (these are optimized later, if
|
|
|
|
possible).
|
2014-06-06 14:03:33 +08:00
|
|
|
|
2017-01-25 07:04:12 +08:00
|
|
|
Also bring forward removal of any model options that refer to
|
2014-09-08 08:26:12 +08:00
|
|
|
collections of fields - the inverse of generate_created_models().
|
2014-06-06 14:03:33 +08:00
|
|
|
"""
|
2017-08-23 04:23:02 +08:00
|
|
|
new_keys = self.new_model_keys | self.new_unmanaged_keys
|
|
|
|
deleted_models = self.old_model_keys - new_keys
|
|
|
|
deleted_unmanaged_models = self.old_unmanaged_keys - new_keys
|
2014-12-31 13:21:32 +08:00
|
|
|
all_deleted_models = chain(sorted(deleted_models), sorted(deleted_unmanaged_models))
|
|
|
|
for app_label, model_name in all_deleted_models:
|
2014-06-06 14:03:33 +08:00
|
|
|
model_state = self.from_state.models[app_label, model_name]
|
|
|
|
model = self.old_apps.get_model(app_label, model_name)
|
2014-09-08 08:26:12 +08:00
|
|
|
if not model._meta.managed:
|
|
|
|
# Skip here, no need to handle fields for unmanaged models
|
|
|
|
continue
|
|
|
|
|
2014-06-06 14:03:33 +08:00
|
|
|
# Gather related fields
|
|
|
|
related_fields = {}
|
|
|
|
for field in model._meta.local_fields:
|
2015-02-26 22:19:17 +08:00
|
|
|
if field.remote_field:
|
|
|
|
if field.remote_field.model:
|
2014-06-06 14:03:33 +08:00
|
|
|
related_fields[field.name] = field
|
2014-06-17 06:09:27 +08:00
|
|
|
# through will be none on M2Ms on swapped-out models;
|
|
|
|
# we can treat lack of through as auto_created=True, though.
|
2016-04-04 08:37:32 +08:00
|
|
|
if (getattr(field.remote_field, "through", None) and
|
|
|
|
not field.remote_field.through._meta.auto_created):
|
2014-06-06 14:03:33 +08:00
|
|
|
related_fields[field.name] = field
|
|
|
|
for field in model._meta.local_many_to_many:
|
2015-02-26 22:19:17 +08:00
|
|
|
if field.remote_field.model:
|
2014-06-06 14:03:33 +08:00
|
|
|
related_fields[field.name] = field
|
2015-02-26 22:19:17 +08:00
|
|
|
if getattr(field.remote_field, "through", None) and not field.remote_field.through._meta.auto_created:
|
2014-06-06 14:03:33 +08:00
|
|
|
related_fields[field.name] = field
|
|
|
|
# Generate option removal first
|
|
|
|
unique_together = model_state.options.pop('unique_together', None)
|
|
|
|
index_together = model_state.options.pop('index_together', None)
|
|
|
|
if unique_together:
|
|
|
|
self.add_operation(
|
2013-12-11 21:16:29 +08:00
|
|
|
app_label,
|
|
|
|
operations.AlterUniqueTogether(
|
|
|
|
name=model_name,
|
2014-06-06 14:03:33 +08:00
|
|
|
unique_together=None,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
if index_together:
|
|
|
|
self.add_operation(
|
|
|
|
app_label,
|
|
|
|
operations.AlterIndexTogether(
|
|
|
|
name=model_name,
|
|
|
|
index_together=None,
|
2013-12-11 21:16:29 +08:00
|
|
|
)
|
2014-06-06 14:03:33 +08:00
|
|
|
)
|
|
|
|
# Then remove each related field
|
2017-12-07 06:17:59 +08:00
|
|
|
for name in sorted(related_fields):
|
2014-06-06 14:03:33 +08:00
|
|
|
self.add_operation(
|
|
|
|
app_label,
|
|
|
|
operations.RemoveField(
|
|
|
|
model_name=model_name,
|
|
|
|
name=name,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
# Finally, remove the model.
|
2014-07-27 00:21:53 +08:00
|
|
|
# This depends on both the removal/alteration of all incoming fields
|
2014-06-06 14:03:33 +08:00
|
|
|
# and the removal of all its own related fields, and if it's
|
|
|
|
# a through model the field that references it.
|
|
|
|
dependencies = []
|
2015-01-07 08:16:35 +08:00
|
|
|
for related_object in model._meta.related_objects:
|
|
|
|
related_object_app_label = related_object.related_model._meta.app_label
|
|
|
|
object_name = related_object.related_model._meta.object_name
|
|
|
|
field_name = related_object.field.name
|
|
|
|
dependencies.append((related_object_app_label, object_name, field_name, False))
|
|
|
|
if not related_object.many_to_many:
|
|
|
|
dependencies.append((related_object_app_label, object_name, field_name, "alter"))
|
|
|
|
|
2017-12-07 06:17:59 +08:00
|
|
|
for name in sorted(related_fields):
|
2014-06-06 14:03:33 +08:00
|
|
|
dependencies.append((app_label, model_name, name, False))
|
|
|
|
# We're referenced in another field's through=
|
2015-05-14 02:51:18 +08:00
|
|
|
through_user = self.through_users.get((app_label, model_state.name_lower))
|
2014-06-06 14:03:33 +08:00
|
|
|
if through_user:
|
|
|
|
dependencies.append((through_user[0], through_user[1], through_user[2], False))
|
|
|
|
# Finally, make the operation, deduping any dependencies
|
|
|
|
self.add_operation(
|
|
|
|
app_label,
|
|
|
|
operations.DeleteModel(
|
|
|
|
name=model_state.name,
|
|
|
|
),
|
2014-06-06 20:29:27 +08:00
|
|
|
dependencies=list(set(dependencies)),
|
2014-06-06 14:03:33 +08:00
|
|
|
)
|
|
|
|
|
2014-09-08 08:26:12 +08:00
|
|
|
def generate_deleted_proxies(self):
|
2017-01-25 07:04:12 +08:00
|
|
|
"""Make DeleteModel options for proxy models."""
|
2017-08-23 04:23:02 +08:00
|
|
|
deleted = self.old_proxy_keys - self.new_proxy_keys
|
2014-08-13 03:49:20 +08:00
|
|
|
for app_label, model_name in sorted(deleted):
|
2014-06-16 07:01:49 +08:00
|
|
|
model_state = self.from_state.models[app_label, model_name]
|
2015-05-14 02:51:18 +08:00
|
|
|
assert model_state.options.get("proxy")
|
2014-06-16 07:01:49 +08:00
|
|
|
self.add_operation(
|
|
|
|
app_label,
|
|
|
|
operations.DeleteModel(
|
|
|
|
name=model_state.name,
|
|
|
|
),
|
|
|
|
)
|
|
|
|
|
2014-07-30 00:38:08 +08:00
|
|
|
def generate_renamed_fields(self):
|
2017-01-25 07:04:12 +08:00
|
|
|
"""Work out renamed fields."""
|
2014-06-06 14:03:33 +08:00
|
|
|
self.renamed_fields = {}
|
|
|
|
for app_label, model_name, field_name in sorted(self.new_field_keys - self.old_field_keys):
|
|
|
|
old_model_name = self.renamed_models.get((app_label, model_name), model_name)
|
2014-03-10 08:38:24 +08:00
|
|
|
old_model_state = self.from_state.models[app_label, old_model_name]
|
2015-01-07 08:16:35 +08:00
|
|
|
field = self.new_apps.get_model(app_label, model_name)._meta.get_field(field_name)
|
2013-12-11 21:16:29 +08:00
|
|
|
# Scan to see if this is actually a rename!
|
2014-06-06 14:03:33 +08:00
|
|
|
field_dec = self.deep_deconstruct(field)
|
|
|
|
for rem_app_label, rem_model_name, rem_field_name in sorted(self.old_field_keys - self.new_field_keys):
|
2013-12-11 21:16:29 +08:00
|
|
|
if rem_app_label == app_label and rem_model_name == model_name:
|
2018-03-28 12:58:12 +08:00
|
|
|
old_field = old_model_state.get_field_by_name(rem_field_name)
|
|
|
|
old_field_dec = self.deep_deconstruct(old_field)
|
2015-02-26 22:19:17 +08:00
|
|
|
if field.remote_field and field.remote_field.model and 'to' in old_field_dec[2]:
|
2014-03-10 08:38:24 +08:00
|
|
|
old_rel_to = old_field_dec[2]['to']
|
2014-06-06 14:03:33 +08:00
|
|
|
if old_rel_to in self.renamed_models_rel:
|
|
|
|
old_field_dec[2]['to'] = self.renamed_models_rel[old_rel_to]
|
2018-03-28 12:58:12 +08:00
|
|
|
old_field.set_attributes_from_name(rem_field_name)
|
|
|
|
old_db_column = old_field.get_attname_column()[1]
|
|
|
|
if (old_field_dec == field_dec or (
|
|
|
|
# Was the field renamed and db_column equal to the
|
|
|
|
# old field's column added?
|
|
|
|
old_field_dec[0:2] == field_dec[0:2] and
|
|
|
|
dict(old_field_dec[2], db_column=old_db_column) == field_dec[2])):
|
2013-12-11 21:16:29 +08:00
|
|
|
if self.questioner.ask_rename(model_name, rem_field_name, field_name, field):
|
2014-06-06 14:03:33 +08:00
|
|
|
self.add_operation(
|
2013-06-21 22:32:15 +08:00
|
|
|
app_label,
|
|
|
|
operations.RenameField(
|
2013-11-03 17:22:11 +08:00
|
|
|
model_name=model_name,
|
2013-12-11 21:16:29 +08:00
|
|
|
old_name=rem_field_name,
|
2013-11-03 17:22:11 +08:00
|
|
|
new_name=field_name,
|
2013-06-21 22:32:15 +08:00
|
|
|
)
|
2013-06-20 23:02:43 +08:00
|
|
|
)
|
2014-06-06 14:03:33 +08:00
|
|
|
self.old_field_keys.remove((rem_app_label, rem_model_name, rem_field_name))
|
|
|
|
self.old_field_keys.add((app_label, model_name, field_name))
|
|
|
|
self.renamed_fields[app_label, model_name, field_name] = rem_field_name
|
2013-06-21 22:32:15 +08:00
|
|
|
break
|
2014-07-30 00:38:08 +08:00
|
|
|
|
|
|
|
def generate_added_fields(self):
|
2017-01-25 07:04:12 +08:00
|
|
|
"""Make AddField operations."""
|
2014-07-30 00:38:08 +08:00
|
|
|
for app_label, model_name, field_name in sorted(self.new_field_keys - self.old_field_keys):
|
2014-12-03 02:25:46 +08:00
|
|
|
self._generate_added_field(app_label, model_name, field_name)
|
|
|
|
|
|
|
|
def _generate_added_field(self, app_label, model_name, field_name):
|
2015-01-07 08:16:35 +08:00
|
|
|
field = self.new_apps.get_model(app_label, model_name)._meta.get_field(field_name)
|
2014-12-03 02:25:46 +08:00
|
|
|
# Fields that are foreignkeys/m2ms depend on stuff
|
|
|
|
dependencies = []
|
2015-02-26 22:19:17 +08:00
|
|
|
if field.remote_field and field.remote_field.model:
|
2016-07-16 22:41:31 +08:00
|
|
|
dependencies.extend(self._get_dependencies_for_foreign_key(field))
|
2014-12-03 02:25:46 +08:00
|
|
|
# You can't just add NOT NULL fields with no default or fields
|
|
|
|
# which don't allow empty strings as default.
|
2016-04-02 21:49:32 +08:00
|
|
|
time_fields = (models.DateField, models.DateTimeField, models.TimeField)
|
2018-01-12 22:05:16 +08:00
|
|
|
preserve_default = (
|
|
|
|
field.null or field.has_default() or field.many_to_many or
|
|
|
|
(field.blank and field.empty_strings_allowed) or
|
|
|
|
(isinstance(field, time_fields) and field.auto_now)
|
|
|
|
)
|
|
|
|
if not preserve_default:
|
2014-12-03 02:25:46 +08:00
|
|
|
field = field.clone()
|
2016-04-02 21:49:32 +08:00
|
|
|
if isinstance(field, time_fields) and field.auto_now_add:
|
|
|
|
field.default = self.questioner.ask_auto_now_add_addition(field_name, model_name)
|
|
|
|
else:
|
|
|
|
field.default = self.questioner.ask_not_null_addition(field_name, model_name)
|
2014-12-03 02:25:46 +08:00
|
|
|
self.add_operation(
|
|
|
|
app_label,
|
|
|
|
operations.AddField(
|
|
|
|
model_name=model_name,
|
|
|
|
name=field_name,
|
|
|
|
field=field,
|
|
|
|
preserve_default=preserve_default,
|
|
|
|
),
|
|
|
|
dependencies=dependencies,
|
|
|
|
)
|
2014-06-06 14:03:33 +08:00
|
|
|
|
|
|
|
def generate_removed_fields(self):
|
2017-01-25 07:04:12 +08:00
|
|
|
"""Make RemoveField operations."""
|
2014-06-06 14:03:33 +08:00
|
|
|
for app_label, model_name, field_name in sorted(self.old_field_keys - self.new_field_keys):
|
2014-12-03 02:25:46 +08:00
|
|
|
self._generate_removed_field(app_label, model_name, field_name)
|
|
|
|
|
|
|
|
def _generate_removed_field(self, app_label, model_name, field_name):
|
|
|
|
self.add_operation(
|
|
|
|
app_label,
|
|
|
|
operations.RemoveField(
|
|
|
|
model_name=model_name,
|
|
|
|
name=field_name,
|
|
|
|
),
|
|
|
|
# We might need to depend on the removal of an
|
|
|
|
# order_with_respect_to or index/unique_together operation;
|
|
|
|
# this is safely ignored if there isn't one
|
|
|
|
dependencies=[
|
|
|
|
(app_label, model_name, field_name, "order_wrt_unset"),
|
|
|
|
(app_label, model_name, field_name, "foo_together_change"),
|
|
|
|
],
|
|
|
|
)
|
2014-06-06 14:03:33 +08:00
|
|
|
|
|
|
|
def generate_altered_fields(self):
|
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Make AlterField operations, or possibly RemovedField/AddField if alter
|
|
|
|
isn's possible.
|
2014-06-06 14:03:33 +08:00
|
|
|
"""
|
2017-08-23 04:23:02 +08:00
|
|
|
for app_label, model_name, field_name in sorted(self.old_field_keys & self.new_field_keys):
|
2013-12-11 21:16:29 +08:00
|
|
|
# Did the field change?
|
2014-06-06 14:03:33 +08:00
|
|
|
old_model_name = self.renamed_models.get((app_label, model_name), model_name)
|
|
|
|
old_field_name = self.renamed_fields.get((app_label, model_name, field_name), field_name)
|
2015-01-07 08:16:35 +08:00
|
|
|
old_field = self.old_apps.get_model(app_label, old_model_name)._meta.get_field(old_field_name)
|
|
|
|
new_field = self.new_apps.get_model(app_label, model_name)._meta.get_field(field_name)
|
2014-06-08 09:17:31 +08:00
|
|
|
# Implement any model renames on relations; these are handled by RenameModel
|
|
|
|
# so we need to exclude them from the comparison
|
2015-02-26 22:19:17 +08:00
|
|
|
if hasattr(new_field, "remote_field") and getattr(new_field.remote_field, "model", None):
|
2014-06-08 09:17:31 +08:00
|
|
|
rename_key = (
|
2015-02-26 22:19:17 +08:00
|
|
|
new_field.remote_field.model._meta.app_label,
|
|
|
|
new_field.remote_field.model._meta.model_name,
|
2014-06-08 09:17:31 +08:00
|
|
|
)
|
|
|
|
if rename_key in self.renamed_models:
|
2015-02-26 22:19:17 +08:00
|
|
|
new_field.remote_field.model = old_field.remote_field.model
|
2017-11-24 16:05:56 +08:00
|
|
|
# Handle ForeignKey which can only have a single to_field.
|
|
|
|
remote_field_name = getattr(new_field.remote_field, 'field_name', None)
|
|
|
|
if remote_field_name:
|
|
|
|
to_field_rename_key = rename_key + (remote_field_name,)
|
|
|
|
if to_field_rename_key in self.renamed_fields:
|
|
|
|
new_field.remote_field.field_name = old_field.remote_field.field_name
|
|
|
|
# Handle ForeignObjects which can have multiple from_fields/to_fields.
|
|
|
|
from_fields = getattr(new_field, 'from_fields', None)
|
|
|
|
if from_fields:
|
|
|
|
from_rename_key = (app_label, model_name)
|
|
|
|
new_field.from_fields = tuple([
|
|
|
|
self.renamed_fields.get(from_rename_key + (from_field,), from_field)
|
|
|
|
for from_field in from_fields
|
|
|
|
])
|
|
|
|
new_field.to_fields = tuple([
|
|
|
|
self.renamed_fields.get(rename_key + (to_field,), to_field)
|
|
|
|
for to_field in new_field.to_fields
|
|
|
|
])
|
2016-05-11 04:20:27 +08:00
|
|
|
if hasattr(new_field, "remote_field") and getattr(new_field.remote_field, "through", None):
|
|
|
|
rename_key = (
|
|
|
|
new_field.remote_field.through._meta.app_label,
|
|
|
|
new_field.remote_field.through._meta.model_name,
|
|
|
|
)
|
|
|
|
if rename_key in self.renamed_models:
|
|
|
|
new_field.remote_field.through = old_field.remote_field.through
|
2014-06-08 09:17:31 +08:00
|
|
|
old_field_dec = self.deep_deconstruct(old_field)
|
|
|
|
new_field_dec = self.deep_deconstruct(new_field)
|
2013-12-11 21:16:29 +08:00
|
|
|
if old_field_dec != new_field_dec:
|
2016-03-11 01:21:25 +08:00
|
|
|
both_m2m = old_field.many_to_many and new_field.many_to_many
|
|
|
|
neither_m2m = not old_field.many_to_many and not new_field.many_to_many
|
2015-03-04 16:36:53 +08:00
|
|
|
if both_m2m or neither_m2m:
|
|
|
|
# Either both fields are m2m or neither is
|
2014-12-03 02:25:46 +08:00
|
|
|
preserve_default = True
|
|
|
|
if (old_field.null and not new_field.null and not new_field.has_default() and
|
2016-03-11 01:21:25 +08:00
|
|
|
not new_field.many_to_many):
|
2014-12-03 02:25:46 +08:00
|
|
|
field = new_field.clone()
|
|
|
|
new_default = self.questioner.ask_not_null_alteration(field_name, model_name)
|
|
|
|
if new_default is not models.NOT_PROVIDED:
|
|
|
|
field.default = new_default
|
|
|
|
preserve_default = False
|
|
|
|
else:
|
|
|
|
field = new_field
|
|
|
|
self.add_operation(
|
|
|
|
app_label,
|
|
|
|
operations.AlterField(
|
|
|
|
model_name=model_name,
|
|
|
|
name=field_name,
|
|
|
|
field=field,
|
|
|
|
preserve_default=preserve_default,
|
|
|
|
)
|
2013-07-02 18:25:18 +08:00
|
|
|
)
|
2014-12-03 02:25:46 +08:00
|
|
|
else:
|
2015-03-04 16:36:53 +08:00
|
|
|
# We cannot alter between m2m and concrete fields
|
2014-12-03 02:25:46 +08:00
|
|
|
self._generate_removed_field(app_label, model_name, field_name)
|
|
|
|
self._generate_added_field(app_label, model_name, field_name)
|
2013-06-08 00:56:43 +08:00
|
|
|
|
2016-06-20 23:50:05 +08:00
|
|
|
def create_altered_indexes(self):
|
|
|
|
option_name = operations.AddIndex.option_name
|
|
|
|
for app_label, model_name in sorted(self.kept_model_keys):
|
|
|
|
old_model_name = self.renamed_models.get((app_label, model_name), model_name)
|
|
|
|
old_model_state = self.from_state.models[app_label, old_model_name]
|
|
|
|
new_model_state = self.to_state.models[app_label, model_name]
|
|
|
|
|
|
|
|
old_indexes = old_model_state.options[option_name]
|
|
|
|
new_indexes = new_model_state.options[option_name]
|
|
|
|
add_idx = [idx for idx in new_indexes if idx not in old_indexes]
|
|
|
|
rem_idx = [idx for idx in old_indexes if idx not in new_indexes]
|
|
|
|
|
|
|
|
self.altered_indexes.update({
|
|
|
|
(app_label, model_name): {
|
|
|
|
'added_indexes': add_idx, 'removed_indexes': rem_idx,
|
|
|
|
}
|
|
|
|
})
|
|
|
|
|
|
|
|
def generate_added_indexes(self):
|
|
|
|
for (app_label, model_name), alt_indexes in self.altered_indexes.items():
|
|
|
|
for index in alt_indexes['added_indexes']:
|
|
|
|
self.add_operation(
|
|
|
|
app_label,
|
|
|
|
operations.AddIndex(
|
|
|
|
model_name=model_name,
|
|
|
|
index=index,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
def generate_removed_indexes(self):
|
|
|
|
for (app_label, model_name), alt_indexes in self.altered_indexes.items():
|
|
|
|
for index in alt_indexes['removed_indexes']:
|
|
|
|
self.add_operation(
|
|
|
|
app_label,
|
|
|
|
operations.RemoveIndex(
|
|
|
|
model_name=model_name,
|
|
|
|
name=index.name,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2016-11-05 21:12:12 +08:00
|
|
|
def create_altered_constraints(self):
|
|
|
|
option_name = operations.AddConstraint.option_name
|
|
|
|
for app_label, model_name in sorted(self.kept_model_keys):
|
|
|
|
old_model_name = self.renamed_models.get((app_label, model_name), model_name)
|
|
|
|
old_model_state = self.from_state.models[app_label, old_model_name]
|
|
|
|
new_model_state = self.to_state.models[app_label, model_name]
|
|
|
|
|
|
|
|
old_constraints = old_model_state.options[option_name]
|
|
|
|
new_constraints = new_model_state.options[option_name]
|
|
|
|
add_constraints = [c for c in new_constraints if c not in old_constraints]
|
|
|
|
rem_constraints = [c for c in old_constraints if c not in new_constraints]
|
|
|
|
|
|
|
|
self.altered_constraints.update({
|
|
|
|
(app_label, model_name): {
|
|
|
|
'added_constraints': add_constraints, 'removed_constraints': rem_constraints,
|
|
|
|
}
|
|
|
|
})
|
|
|
|
|
|
|
|
def generate_added_constraints(self):
|
|
|
|
for (app_label, model_name), alt_constraints in self.altered_constraints.items():
|
|
|
|
for constraint in alt_constraints['added_constraints']:
|
|
|
|
self.add_operation(
|
|
|
|
app_label,
|
|
|
|
operations.AddConstraint(
|
|
|
|
model_name=model_name,
|
|
|
|
constraint=constraint,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
def generate_removed_constraints(self):
|
|
|
|
for (app_label, model_name), alt_constraints in self.altered_constraints.items():
|
|
|
|
for constraint in alt_constraints['removed_constraints']:
|
|
|
|
self.add_operation(
|
|
|
|
app_label,
|
|
|
|
operations.RemoveConstraint(
|
|
|
|
model_name=model_name,
|
|
|
|
name=constraint.name,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2016-07-16 22:41:31 +08:00
|
|
|
def _get_dependencies_for_foreign_key(self, field):
|
2015-11-07 22:26:25 +08:00
|
|
|
# Account for FKs to swappable models
|
|
|
|
swappable_setting = getattr(field, 'swappable_setting', None)
|
|
|
|
if swappable_setting is not None:
|
|
|
|
dep_app_label = "__setting__"
|
|
|
|
dep_object_name = swappable_setting
|
|
|
|
else:
|
|
|
|
dep_app_label = field.remote_field.model._meta.app_label
|
|
|
|
dep_object_name = field.remote_field.model._meta.object_name
|
|
|
|
dependencies = [(dep_app_label, dep_object_name, None, True)]
|
|
|
|
if getattr(field.remote_field, "through", None) and not field.remote_field.through._meta.auto_created:
|
|
|
|
dependencies.append((
|
|
|
|
field.remote_field.through._meta.app_label,
|
|
|
|
field.remote_field.through._meta.object_name,
|
|
|
|
None,
|
|
|
|
True,
|
|
|
|
))
|
|
|
|
return dependencies
|
|
|
|
|
2014-06-25 20:53:09 +08:00
|
|
|
def _generate_altered_foo_together(self, operation):
|
|
|
|
option_name = operation.option_name
|
2014-06-06 14:03:33 +08:00
|
|
|
for app_label, model_name in sorted(self.kept_model_keys):
|
|
|
|
old_model_name = self.renamed_models.get((app_label, model_name), model_name)
|
|
|
|
old_model_state = self.from_state.models[app_label, old_model_name]
|
|
|
|
new_model_state = self.to_state.models[app_label, model_name]
|
2014-09-10 05:28:45 +08:00
|
|
|
|
2014-07-21 18:36:34 +08:00
|
|
|
# We run the old version through the field renames to account for those
|
2017-08-23 04:23:02 +08:00
|
|
|
old_value = old_model_state.options.get(option_name)
|
|
|
|
old_value = {
|
|
|
|
tuple(
|
|
|
|
self.renamed_fields.get((app_label, model_name, n), n)
|
|
|
|
for n in unique
|
|
|
|
)
|
|
|
|
for unique in old_value
|
|
|
|
} if old_value else set()
|
2014-09-10 05:28:45 +08:00
|
|
|
|
2017-08-23 04:23:02 +08:00
|
|
|
new_value = new_model_state.options.get(option_name)
|
|
|
|
new_value = set(new_value) if new_value else set()
|
2014-09-10 05:28:45 +08:00
|
|
|
|
|
|
|
if old_value != new_value:
|
2015-11-07 22:26:25 +08:00
|
|
|
dependencies = []
|
|
|
|
for foo_togethers in new_value:
|
|
|
|
for field_name in foo_togethers:
|
|
|
|
field = self.new_apps.get_model(app_label, model_name)._meta.get_field(field_name)
|
|
|
|
if field.remote_field and field.remote_field.model:
|
2016-07-16 22:41:31 +08:00
|
|
|
dependencies.extend(self._get_dependencies_for_foreign_key(field))
|
2015-11-07 22:26:25 +08:00
|
|
|
|
2014-06-06 14:03:33 +08:00
|
|
|
self.add_operation(
|
|
|
|
app_label,
|
2014-06-25 20:53:09 +08:00
|
|
|
operation(
|
2014-06-06 14:03:33 +08:00
|
|
|
name=model_name,
|
2014-09-10 05:28:45 +08:00
|
|
|
**{option_name: new_value}
|
2015-11-07 22:26:25 +08:00
|
|
|
),
|
|
|
|
dependencies=dependencies,
|
2014-06-06 14:03:33 +08:00
|
|
|
)
|
2013-06-23 00:15:51 +08:00
|
|
|
|
2014-06-25 20:53:09 +08:00
|
|
|
def generate_altered_unique_together(self):
|
|
|
|
self._generate_altered_foo_together(operations.AlterUniqueTogether)
|
|
|
|
|
2014-06-06 14:03:33 +08:00
|
|
|
def generate_altered_index_together(self):
|
2014-06-25 20:53:09 +08:00
|
|
|
self._generate_altered_foo_together(operations.AlterIndexTogether)
|
2014-01-15 22:20:47 +08:00
|
|
|
|
2014-10-14 22:20:24 +08:00
|
|
|
def generate_altered_db_table(self):
|
2017-08-23 04:23:02 +08:00
|
|
|
models_to_check = self.kept_model_keys.union(self.kept_proxy_keys, self.kept_unmanaged_keys)
|
2014-10-14 22:20:24 +08:00
|
|
|
for app_label, model_name in sorted(models_to_check):
|
|
|
|
old_model_name = self.renamed_models.get((app_label, model_name), model_name)
|
|
|
|
old_model_state = self.from_state.models[app_label, old_model_name]
|
|
|
|
new_model_state = self.to_state.models[app_label, model_name]
|
|
|
|
old_db_table_name = old_model_state.options.get('db_table')
|
|
|
|
new_db_table_name = new_model_state.options.get('db_table')
|
|
|
|
if old_db_table_name != new_db_table_name:
|
|
|
|
self.add_operation(
|
|
|
|
app_label,
|
|
|
|
operations.AlterModelTable(
|
|
|
|
name=model_name,
|
|
|
|
table=new_db_table_name,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2014-06-16 03:34:02 +08:00
|
|
|
def generate_altered_options(self):
|
2014-06-16 05:55:44 +08:00
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Work out if any non-schema-affecting options have changed and make an
|
|
|
|
operation to represent them in state changes (in case Python code in
|
|
|
|
migrations needs them).
|
2014-06-16 05:55:44 +08:00
|
|
|
"""
|
2014-12-24 01:27:49 +08:00
|
|
|
models_to_check = self.kept_model_keys.union(
|
2017-08-23 04:23:02 +08:00
|
|
|
self.kept_proxy_keys,
|
|
|
|
self.kept_unmanaged_keys,
|
2014-12-24 01:27:49 +08:00
|
|
|
# unmanaged converted to managed
|
2017-08-23 04:23:02 +08:00
|
|
|
self.old_unmanaged_keys & self.new_model_keys,
|
2014-12-24 01:27:49 +08:00
|
|
|
# managed converted to unmanaged
|
2017-08-23 04:23:02 +08:00
|
|
|
self.old_model_keys & self.new_unmanaged_keys,
|
2014-12-24 01:27:49 +08:00
|
|
|
)
|
|
|
|
|
2014-06-16 07:01:49 +08:00
|
|
|
for app_label, model_name in sorted(models_to_check):
|
2014-06-16 03:34:02 +08:00
|
|
|
old_model_name = self.renamed_models.get((app_label, model_name), model_name)
|
|
|
|
old_model_state = self.from_state.models[app_label, old_model_name]
|
|
|
|
new_model_state = self.to_state.models[app_label, model_name]
|
2017-06-02 07:08:59 +08:00
|
|
|
old_options = {
|
|
|
|
key: value for key, value in old_model_state.options.items()
|
|
|
|
if key in AlterModelOptions.ALTER_OPTION_KEYS
|
|
|
|
}
|
|
|
|
new_options = {
|
|
|
|
key: value for key, value in new_model_state.options.items()
|
|
|
|
if key in AlterModelOptions.ALTER_OPTION_KEYS
|
|
|
|
}
|
2014-06-16 03:34:02 +08:00
|
|
|
if old_options != new_options:
|
|
|
|
self.add_operation(
|
|
|
|
app_label,
|
|
|
|
operations.AlterModelOptions(
|
|
|
|
name=model_name,
|
|
|
|
options=new_options,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2014-06-16 05:55:44 +08:00
|
|
|
def generate_altered_order_with_respect_to(self):
|
|
|
|
for app_label, model_name in sorted(self.kept_model_keys):
|
|
|
|
old_model_name = self.renamed_models.get((app_label, model_name), model_name)
|
|
|
|
old_model_state = self.from_state.models[app_label, old_model_name]
|
|
|
|
new_model_state = self.to_state.models[app_label, model_name]
|
2015-05-14 02:51:18 +08:00
|
|
|
if (old_model_state.options.get("order_with_respect_to") !=
|
|
|
|
new_model_state.options.get("order_with_respect_to")):
|
2014-06-16 05:55:44 +08:00
|
|
|
# Make sure it comes second if we're adding
|
|
|
|
# (removal dependency is part of RemoveField)
|
|
|
|
dependencies = []
|
2015-05-14 02:51:18 +08:00
|
|
|
if new_model_state.options.get("order_with_respect_to"):
|
2014-06-16 05:55:44 +08:00
|
|
|
dependencies.append((
|
|
|
|
app_label,
|
|
|
|
model_name,
|
|
|
|
new_model_state.options["order_with_respect_to"],
|
|
|
|
True,
|
|
|
|
))
|
|
|
|
# Actually generate the operation
|
|
|
|
self.add_operation(
|
|
|
|
app_label,
|
|
|
|
operations.AlterOrderWithRespectTo(
|
|
|
|
name=model_name,
|
2015-05-14 02:51:18 +08:00
|
|
|
order_with_respect_to=new_model_state.options.get('order_with_respect_to'),
|
2014-06-16 05:55:44 +08:00
|
|
|
),
|
2014-06-16 08:45:15 +08:00
|
|
|
dependencies=dependencies,
|
2014-06-16 05:55:44 +08:00
|
|
|
)
|
|
|
|
|
2014-12-13 06:19:58 +08:00
|
|
|
def generate_altered_managers(self):
|
|
|
|
for app_label, model_name in sorted(self.kept_model_keys):
|
|
|
|
old_model_name = self.renamed_models.get((app_label, model_name), model_name)
|
|
|
|
old_model_state = self.from_state.models[app_label, old_model_name]
|
|
|
|
new_model_state = self.to_state.models[app_label, model_name]
|
|
|
|
if old_model_state.managers != new_model_state.managers:
|
|
|
|
self.add_operation(
|
|
|
|
app_label,
|
|
|
|
operations.AlterModelManagers(
|
|
|
|
name=model_name,
|
|
|
|
managers=new_model_state.managers,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2014-08-19 21:24:31 +08:00
|
|
|
def arrange_for_graph(self, changes, graph, migration_name=None):
|
2013-06-08 00:56:43 +08:00
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Take a result from changes() and a MigrationGraph, and fix the names
|
|
|
|
and dependencies of the changes so they extend the graph from the leaf
|
|
|
|
nodes for each app.
|
2013-06-08 00:56:43 +08:00
|
|
|
"""
|
|
|
|
leaves = graph.leaf_nodes()
|
|
|
|
name_map = {}
|
2013-06-19 22:36:22 +08:00
|
|
|
for app_label, migrations in list(changes.items()):
|
2013-06-08 00:56:43 +08:00
|
|
|
if not migrations:
|
|
|
|
continue
|
|
|
|
# Find the app label's current leaf node
|
|
|
|
app_leaf = None
|
|
|
|
for leaf in leaves:
|
|
|
|
if leaf[0] == app_label:
|
|
|
|
app_leaf = leaf
|
|
|
|
break
|
2013-06-19 22:36:22 +08:00
|
|
|
# Do they want an initial migration for this app?
|
|
|
|
if app_leaf is None and not self.questioner.ask_initial(app_label):
|
|
|
|
# They don't.
|
|
|
|
for migration in migrations:
|
|
|
|
name_map[(app_label, migration.name)] = (app_label, "__first__")
|
|
|
|
del changes[app_label]
|
2014-01-08 21:00:12 +08:00
|
|
|
continue
|
2013-06-08 00:56:43 +08:00
|
|
|
# Work out the next number in the sequence
|
|
|
|
if app_leaf is None:
|
|
|
|
next_number = 1
|
|
|
|
else:
|
2013-06-19 22:36:22 +08:00
|
|
|
next_number = (self.parse_number(app_leaf[1]) or 0) + 1
|
2013-06-08 00:56:43 +08:00
|
|
|
# Name each migration
|
|
|
|
for i, migration in enumerate(migrations):
|
|
|
|
if i == 0 and app_leaf:
|
|
|
|
migration.dependencies.append(app_leaf)
|
|
|
|
if i == 0 and not app_leaf:
|
2014-08-19 21:24:31 +08:00
|
|
|
new_name = "0001_%s" % migration_name if migration_name else "0001_initial"
|
2013-06-08 00:56:43 +08:00
|
|
|
else:
|
2014-03-07 05:34:31 +08:00
|
|
|
new_name = "%04i_%s" % (
|
|
|
|
next_number,
|
2014-08-19 21:24:31 +08:00
|
|
|
migration_name or self.suggest_name(migration.operations)[:100],
|
2014-03-07 05:34:31 +08:00
|
|
|
)
|
2013-06-08 00:56:43 +08:00
|
|
|
name_map[(app_label, migration.name)] = (app_label, new_name)
|
2014-03-07 05:22:42 +08:00
|
|
|
next_number += 1
|
2013-06-08 00:56:43 +08:00
|
|
|
migration.name = new_name
|
|
|
|
# Now fix dependencies
|
2017-12-07 06:17:59 +08:00
|
|
|
for migrations in changes.values():
|
2013-06-08 00:56:43 +08:00
|
|
|
for migration in migrations:
|
|
|
|
migration.dependencies = [name_map.get(d, d) for d in migration.dependencies]
|
|
|
|
return changes
|
2013-06-19 22:36:22 +08:00
|
|
|
|
2013-08-22 05:25:15 +08:00
|
|
|
def _trim_to_apps(self, changes, app_labels):
|
2013-06-19 22:36:22 +08:00
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Take changes from arrange_for_graph() and set of app labels, and return
|
|
|
|
a modified set of changes which trims out as many migrations that are
|
|
|
|
not in app_labels as possible. Note that some other migrations may
|
|
|
|
still be present as they may be required dependencies.
|
2013-06-19 22:36:22 +08:00
|
|
|
"""
|
|
|
|
# Gather other app dependencies in a first pass
|
|
|
|
app_dependencies = {}
|
|
|
|
for app_label, migrations in changes.items():
|
|
|
|
for migration in migrations:
|
|
|
|
for dep_app_label, name in migration.dependencies:
|
|
|
|
app_dependencies.setdefault(app_label, set()).add(dep_app_label)
|
|
|
|
required_apps = set(app_labels)
|
|
|
|
# Keep resolving till there's no change
|
|
|
|
old_required_apps = None
|
|
|
|
while old_required_apps != required_apps:
|
|
|
|
old_required_apps = set(required_apps)
|
2017-08-23 04:23:02 +08:00
|
|
|
required_apps.update(*[app_dependencies.get(app_label, ()) for app_label in required_apps])
|
2013-06-19 22:36:22 +08:00
|
|
|
# Remove all migrations that aren't needed
|
2017-05-28 07:08:46 +08:00
|
|
|
for app_label in list(changes):
|
2013-06-19 22:36:22 +08:00
|
|
|
if app_label not in required_apps:
|
|
|
|
del changes[app_label]
|
|
|
|
return changes
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def suggest_name(cls, ops):
|
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Given a set of operations, suggest a name for the migration they might
|
|
|
|
represent. Names are not guaranteed to be unique, but put some effort
|
|
|
|
into the fallback name to avoid VCS conflicts if possible.
|
2013-06-19 22:36:22 +08:00
|
|
|
"""
|
|
|
|
if len(ops) == 1:
|
|
|
|
if isinstance(ops[0], operations.CreateModel):
|
2015-01-02 23:37:21 +08:00
|
|
|
return ops[0].name_lower
|
2013-06-19 22:36:22 +08:00
|
|
|
elif isinstance(ops[0], operations.DeleteModel):
|
2015-01-02 23:37:21 +08:00
|
|
|
return "delete_%s" % ops[0].name_lower
|
2013-06-20 00:01:48 +08:00
|
|
|
elif isinstance(ops[0], operations.AddField):
|
2015-01-02 23:37:21 +08:00
|
|
|
return "%s_%s" % (ops[0].model_name_lower, ops[0].name_lower)
|
2013-06-20 00:01:48 +08:00
|
|
|
elif isinstance(ops[0], operations.RemoveField):
|
2015-01-02 23:37:21 +08:00
|
|
|
return "remove_%s_%s" % (ops[0].model_name_lower, ops[0].name_lower)
|
2017-11-30 00:54:34 +08:00
|
|
|
elif ops:
|
2014-02-13 01:22:50 +08:00
|
|
|
if all(isinstance(o, operations.CreateModel) for o in ops):
|
2015-01-02 23:37:21 +08:00
|
|
|
return "_".join(sorted(o.name_lower for o in ops))
|
2016-05-11 20:19:19 +08:00
|
|
|
return "auto_%s" % get_migration_name_timestamp()
|
2013-06-19 22:36:22 +08:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def parse_number(cls, name):
|
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Given a migration name, try to extract a number from the beginning of
|
|
|
|
it. If no number is found, return None.
|
2013-06-19 22:36:22 +08:00
|
|
|
"""
|
2015-08-11 12:42:11 +08:00
|
|
|
match = re.match(r'^\d+', name)
|
|
|
|
if match:
|
|
|
|
return int(match.group())
|
2013-06-19 22:36:22 +08:00
|
|
|
return None
|