2018-03-16 03:32:56 +08:00
|
|
|
import pkgutil
|
2013-10-24 05:56:54 +08:00
|
|
|
import sys
|
2017-01-07 19:11:46 +08:00
|
|
|
from importlib import import_module, reload
|
2013-12-12 06:31:34 +08:00
|
|
|
|
2013-12-24 19:25:17 +08:00
|
|
|
from django.apps import apps
|
2015-01-28 20:35:27 +08:00
|
|
|
from django.conf import settings
|
2015-05-02 02:46:07 +08:00
|
|
|
from django.db.migrations.graph import MigrationGraph
|
2015-01-28 20:35:27 +08:00
|
|
|
from django.db.migrations.recorder import MigrationRecorder
|
2013-05-10 23:00:55 +08:00
|
|
|
|
2016-04-02 20:46:59 +08:00
|
|
|
from .exceptions import (
|
|
|
|
AmbiguityError, BadMigrationError, InconsistentMigrationHistory,
|
|
|
|
NodeNotFoundError,
|
|
|
|
)
|
2015-05-02 02:46:07 +08:00
|
|
|
|
2013-12-27 22:36:19 +08:00
|
|
|
MIGRATIONS_MODULE_NAME = 'migrations'
|
|
|
|
|
|
|
|
|
2017-01-19 15:39:46 +08:00
|
|
|
class MigrationLoader:
|
2013-05-10 23:00:55 +08:00
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Load migration files from disk and their status from the database.
|
2013-05-10 23:00:55 +08:00
|
|
|
|
|
|
|
Migration files are expected to live in the "migrations" directory of
|
|
|
|
an app. Their names are entirely unimportant from a code perspective,
|
|
|
|
but will probably follow the 1234_name.py convention.
|
|
|
|
|
2014-03-02 22:25:53 +08:00
|
|
|
On initialization, this class will scan those directories, and open and
|
2018-10-09 21:26:07 +08:00
|
|
|
read the Python files, looking for a class called Migration, which should
|
2013-05-10 23:00:55 +08:00
|
|
|
inherit from django.db.migrations.Migration. See
|
|
|
|
django.db.migrations.migration for what that looks like.
|
|
|
|
|
|
|
|
Some migrations will be marked as "replacing" another set of migrations.
|
|
|
|
These are loaded into a separate set of migrations away from the main ones.
|
|
|
|
If all the migrations they replace are either unapplied or missing from
|
|
|
|
disk, then they are injected into the main set, replacing the named migrations.
|
|
|
|
Any dependency pointers to the replaced migrations are re-pointed to the
|
|
|
|
new migration.
|
|
|
|
|
|
|
|
This does mean that this class MUST also talk to the database as well as
|
|
|
|
to disk, but this is probably fine. We're already not just operating
|
|
|
|
in memory.
|
|
|
|
"""
|
|
|
|
|
2020-03-09 05:48:59 +08:00
|
|
|
def __init__(
|
|
|
|
self, connection, load=True, ignore_no_migrations=False,
|
|
|
|
replace_migrations=True,
|
|
|
|
):
|
2013-05-10 23:00:55 +08:00
|
|
|
self.connection = connection
|
|
|
|
self.disk_migrations = None
|
|
|
|
self.applied_migrations = None
|
2014-06-17 00:58:35 +08:00
|
|
|
self.ignore_no_migrations = ignore_no_migrations
|
2020-03-09 05:48:59 +08:00
|
|
|
self.replace_migrations = replace_migrations
|
2013-10-24 05:56:54 +08:00
|
|
|
if load:
|
|
|
|
self.build_graph()
|
2013-05-10 23:00:55 +08:00
|
|
|
|
2016-07-14 21:10:15 +08:00
|
|
|
@classmethod
|
|
|
|
def migrations_module(cls, app_label):
|
2016-08-27 05:04:12 +08:00
|
|
|
"""
|
|
|
|
Return the path to the migrations module for the specified app_label
|
|
|
|
and a boolean indicating if the module is specified in
|
|
|
|
settings.MIGRATION_MODULE.
|
|
|
|
"""
|
2014-09-06 02:06:02 +08:00
|
|
|
if app_label in settings.MIGRATION_MODULES:
|
2016-08-27 05:04:12 +08:00
|
|
|
return settings.MIGRATION_MODULES[app_label], True
|
2013-10-19 08:24:38 +08:00
|
|
|
else:
|
2013-12-27 22:36:19 +08:00
|
|
|
app_package_name = apps.get_app_config(app_label).name
|
2016-08-27 05:04:12 +08:00
|
|
|
return '%s.%s' % (app_package_name, MIGRATIONS_MODULE_NAME), False
|
2013-06-19 22:36:02 +08:00
|
|
|
|
2013-05-10 23:00:55 +08:00
|
|
|
def load_disk(self):
|
2017-01-25 07:04:12 +08:00
|
|
|
"""Load the migrations from all INSTALLED_APPS from disk."""
|
2013-05-10 23:00:55 +08:00
|
|
|
self.disk_migrations = {}
|
2013-06-08 01:47:17 +08:00
|
|
|
self.unmigrated_apps = set()
|
2013-06-19 23:41:04 +08:00
|
|
|
self.migrated_apps = set()
|
2013-12-31 06:53:54 +08:00
|
|
|
for app_config in apps.get_app_configs():
|
2013-05-10 23:00:55 +08:00
|
|
|
# Get the migrations module directory
|
2016-08-27 05:04:12 +08:00
|
|
|
module_name, explicit = self.migrations_module(app_config.label)
|
2015-09-12 08:23:10 +08:00
|
|
|
if module_name is None:
|
|
|
|
self.unmigrated_apps.add(app_config.label)
|
|
|
|
continue
|
2013-10-24 05:56:54 +08:00
|
|
|
was_loaded = module_name in sys.modules
|
2013-05-10 23:00:55 +08:00
|
|
|
try:
|
|
|
|
module = import_module(module_name)
|
2020-04-19 23:51:43 +08:00
|
|
|
except ModuleNotFoundError as e:
|
|
|
|
if (
|
|
|
|
(explicit and self.ignore_no_migrations) or
|
|
|
|
(not explicit and MIGRATIONS_MODULE_NAME in e.name.split('.'))
|
|
|
|
):
|
2013-12-14 17:08:44 +08:00
|
|
|
self.unmigrated_apps.add(app_config.label)
|
2013-05-10 23:00:55 +08:00
|
|
|
continue
|
2013-09-06 04:51:11 +08:00
|
|
|
raise
|
2013-09-06 04:51:31 +08:00
|
|
|
else:
|
|
|
|
# Module is not a package (e.g. migrations.py).
|
|
|
|
if not hasattr(module, '__path__'):
|
2015-05-25 03:17:39 +08:00
|
|
|
self.unmigrated_apps.add(app_config.label)
|
2013-09-06 04:51:31 +08:00
|
|
|
continue
|
2013-10-24 05:56:54 +08:00
|
|
|
# Force a reload if it's already loaded (tests need this)
|
|
|
|
if was_loaded:
|
2017-01-07 19:11:46 +08:00
|
|
|
reload(module)
|
2018-09-12 00:51:11 +08:00
|
|
|
migration_names = {
|
|
|
|
name for _, name, is_pkg in pkgutil.iter_modules(module.__path__)
|
|
|
|
if not is_pkg and name[0] not in '_~'
|
|
|
|
}
|
2019-03-29 04:47:05 +08:00
|
|
|
if migration_names or self.ignore_no_migrations:
|
|
|
|
self.migrated_apps.add(app_config.label)
|
|
|
|
else:
|
|
|
|
self.unmigrated_apps.add(app_config.label)
|
2018-03-16 03:32:56 +08:00
|
|
|
# Load migrations
|
2013-05-10 23:00:55 +08:00
|
|
|
for migration_name in migration_names:
|
2018-03-16 03:32:56 +08:00
|
|
|
migration_path = '%s.%s' % (module_name, migration_name)
|
|
|
|
try:
|
|
|
|
migration_module = import_module(migration_path)
|
|
|
|
except ImportError as e:
|
|
|
|
if 'bad magic number' in str(e):
|
|
|
|
raise ImportError(
|
|
|
|
"Couldn't import %r as it appears to be a stale "
|
|
|
|
".pyc file." % migration_path
|
|
|
|
) from e
|
|
|
|
else:
|
|
|
|
raise
|
2013-05-10 23:00:55 +08:00
|
|
|
if not hasattr(migration_module, "Migration"):
|
2014-10-25 12:42:44 +08:00
|
|
|
raise BadMigrationError(
|
|
|
|
"Migration %s in app %s has no Migration class" % (migration_name, app_config.label)
|
|
|
|
)
|
2015-09-12 07:33:12 +08:00
|
|
|
self.disk_migrations[app_config.label, migration_name] = migration_module.Migration(
|
|
|
|
migration_name,
|
|
|
|
app_config.label,
|
|
|
|
)
|
2013-05-10 23:00:55 +08:00
|
|
|
|
2013-10-16 19:00:07 +08:00
|
|
|
def get_migration(self, app_label, name_prefix):
|
2017-01-25 07:04:12 +08:00
|
|
|
"""Return the named migration or raise NodeNotFoundError."""
|
2013-10-16 19:00:07 +08:00
|
|
|
return self.graph.nodes[app_label, name_prefix]
|
|
|
|
|
2013-07-23 02:43:58 +08:00
|
|
|
def get_migration_by_prefix(self, app_label, name_prefix):
|
2017-01-25 07:04:12 +08:00
|
|
|
"""
|
|
|
|
Return the migration(s) which match the given app label and name_prefix.
|
|
|
|
"""
|
2013-07-23 02:43:58 +08:00
|
|
|
# Do the search
|
|
|
|
results = []
|
2016-11-15 06:40:28 +08:00
|
|
|
for migration_app_label, migration_name in self.disk_migrations:
|
|
|
|
if migration_app_label == app_label and migration_name.startswith(name_prefix):
|
|
|
|
results.append((migration_app_label, migration_name))
|
2013-07-23 02:43:58 +08:00
|
|
|
if len(results) > 1:
|
2014-10-25 12:42:44 +08:00
|
|
|
raise AmbiguityError(
|
|
|
|
"There is more than one migration for '%s' with the prefix '%s'" % (app_label, name_prefix)
|
|
|
|
)
|
2017-11-30 00:54:34 +08:00
|
|
|
elif not results:
|
2013-07-23 02:43:58 +08:00
|
|
|
raise KeyError("There no migrations for '%s' with the prefix '%s'" % (app_label, name_prefix))
|
|
|
|
else:
|
|
|
|
return self.disk_migrations[results[0]]
|
|
|
|
|
2014-05-29 18:30:47 +08:00
|
|
|
def check_key(self, key, current_app):
|
2014-07-26 00:43:54 +08:00
|
|
|
if (key[1] != "__first__" and key[1] != "__latest__") or key in self.graph:
|
2014-05-29 18:30:47 +08:00
|
|
|
return key
|
|
|
|
# Special-case __first__, which means "the first migration" for
|
|
|
|
# migrated apps, and is ignored for unmigrated apps. It allows
|
|
|
|
# makemigrations to declare dependencies on apps before they even have
|
|
|
|
# migrations.
|
|
|
|
if key[0] == current_app:
|
|
|
|
# Ignore __first__ references to the same app (#22325)
|
|
|
|
return
|
|
|
|
if key[0] in self.unmigrated_apps:
|
|
|
|
# This app isn't migrated, but something depends on it.
|
|
|
|
# The models will get auto-added into the state, though
|
|
|
|
# so we're fine.
|
|
|
|
return
|
|
|
|
if key[0] in self.migrated_apps:
|
2014-06-16 02:05:40 +08:00
|
|
|
try:
|
2014-06-16 02:25:39 +08:00
|
|
|
if key[1] == "__first__":
|
2017-07-31 23:02:23 +08:00
|
|
|
return self.graph.root_nodes(key[0])[0]
|
2014-09-26 06:24:17 +08:00
|
|
|
else: # "__latest__"
|
2017-07-31 23:02:23 +08:00
|
|
|
return self.graph.leaf_nodes(key[0])[0]
|
2014-06-16 02:05:40 +08:00
|
|
|
except IndexError:
|
2014-06-17 00:58:35 +08:00
|
|
|
if self.ignore_no_migrations:
|
|
|
|
return None
|
|
|
|
else:
|
|
|
|
raise ValueError("Dependency on app with no migrations: %s" % key[0])
|
2014-06-16 02:05:40 +08:00
|
|
|
raise ValueError("Dependency on unknown app: %s" % key[0])
|
2014-05-29 18:30:47 +08:00
|
|
|
|
2016-05-08 08:56:13 +08:00
|
|
|
def add_internal_dependencies(self, key, migration):
|
|
|
|
"""
|
|
|
|
Internal dependencies need to be added first to ensure `__first__`
|
|
|
|
dependencies find the correct root node.
|
|
|
|
"""
|
|
|
|
for parent in migration.dependencies:
|
2018-01-12 22:05:16 +08:00
|
|
|
# Ignore __first__ references to the same app.
|
|
|
|
if parent[0] == key[0] and parent[1] != '__first__':
|
|
|
|
self.graph.add_dependency(migration, key, parent, skip_validation=True)
|
2016-05-08 08:56:13 +08:00
|
|
|
|
|
|
|
def add_external_dependencies(self, key, migration):
|
|
|
|
for parent in migration.dependencies:
|
|
|
|
# Skip internal dependencies
|
|
|
|
if key[0] == parent[0]:
|
|
|
|
continue
|
|
|
|
parent = self.check_key(parent, key[0])
|
|
|
|
if parent is not None:
|
|
|
|
self.graph.add_dependency(migration, key, parent, skip_validation=True)
|
|
|
|
for child in migration.run_before:
|
|
|
|
child = self.check_key(child, key[0])
|
|
|
|
if child is not None:
|
|
|
|
self.graph.add_dependency(migration, child, key, skip_validation=True)
|
|
|
|
|
2014-05-01 03:25:12 +08:00
|
|
|
def build_graph(self):
|
2013-05-10 23:00:55 +08:00
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Build a migration dependency graph using both the disk and database.
|
2013-10-24 05:56:54 +08:00
|
|
|
You'll need to rebuild the graph if you apply migrations. This isn't
|
|
|
|
usually a problem as generally migration stuff runs in a one-shot process.
|
2013-05-10 23:00:55 +08:00
|
|
|
"""
|
2013-10-24 05:56:54 +08:00
|
|
|
# Load disk data
|
|
|
|
self.load_disk()
|
|
|
|
# Load database data
|
2014-05-07 13:41:59 +08:00
|
|
|
if self.connection is None:
|
2019-03-08 08:36:55 +08:00
|
|
|
self.applied_migrations = {}
|
2014-05-07 13:41:59 +08:00
|
|
|
else:
|
|
|
|
recorder = MigrationRecorder(self.connection)
|
|
|
|
self.applied_migrations = recorder.applied_migrations()
|
2016-05-08 08:56:13 +08:00
|
|
|
# To start, populate the migration graph with nodes for ALL migrations
|
|
|
|
# and their dependencies. Also make note of replacing migrations at this step.
|
|
|
|
self.graph = MigrationGraph()
|
|
|
|
self.replacements = {}
|
2013-05-10 23:00:55 +08:00
|
|
|
for key, migration in self.disk_migrations.items():
|
2016-05-08 08:56:13 +08:00
|
|
|
self.graph.add_node(key, migration)
|
|
|
|
# Replacing migrations.
|
2013-05-10 23:00:55 +08:00
|
|
|
if migration.replaces:
|
2016-05-08 08:56:13 +08:00
|
|
|
self.replacements[key] = migration
|
2018-03-21 05:14:22 +08:00
|
|
|
for key, migration in self.disk_migrations.items():
|
|
|
|
# Internal (same app) dependencies.
|
|
|
|
self.add_internal_dependencies(key, migration)
|
2016-05-08 08:56:13 +08:00
|
|
|
# Add external dependencies now that the internal ones have been resolved.
|
|
|
|
for key, migration in self.disk_migrations.items():
|
|
|
|
self.add_external_dependencies(key, migration)
|
2020-03-09 05:48:59 +08:00
|
|
|
# Carry out replacements where possible and if enabled.
|
|
|
|
if self.replace_migrations:
|
|
|
|
for key, migration in self.replacements.items():
|
|
|
|
# Get applied status of each of this migration's replacement
|
|
|
|
# targets.
|
|
|
|
applied_statuses = [(target in self.applied_migrations) for target in migration.replaces]
|
|
|
|
# The replacing migration is only marked as applied if all of
|
|
|
|
# its replacement targets are.
|
|
|
|
if all(applied_statuses):
|
|
|
|
self.applied_migrations[key] = migration
|
|
|
|
else:
|
|
|
|
self.applied_migrations.pop(key, None)
|
|
|
|
# A replacing migration can be used if either all or none of
|
|
|
|
# its replacement targets have been applied.
|
|
|
|
if all(applied_statuses) or (not any(applied_statuses)):
|
|
|
|
self.graph.remove_replaced_nodes(key, migration.replaces)
|
|
|
|
else:
|
|
|
|
# This replacing migration cannot be used because it is
|
|
|
|
# partially applied. Remove it from the graph and remap
|
|
|
|
# dependencies to it (#25945).
|
|
|
|
self.graph.remove_replacement_node(key, migration.replaces)
|
2016-05-08 08:56:13 +08:00
|
|
|
# Ensure the graph is consistent.
|
|
|
|
try:
|
|
|
|
self.graph.validate_consistency()
|
|
|
|
except NodeNotFoundError as exc:
|
|
|
|
# Check if the missing node could have been replaced by any squash
|
|
|
|
# migration but wasn't because the squash migration was partially
|
|
|
|
# applied before. In that case raise a more understandable exception
|
|
|
|
# (#23556).
|
|
|
|
# Get reverse replacements.
|
|
|
|
reverse_replacements = {}
|
|
|
|
for key, migration in self.replacements.items():
|
|
|
|
for replaced in migration.replaces:
|
|
|
|
reverse_replacements.setdefault(replaced, set()).add(key)
|
|
|
|
# Try to reraise exception with more detail.
|
|
|
|
if exc.node in reverse_replacements:
|
|
|
|
candidates = reverse_replacements.get(exc.node, set())
|
2014-09-26 06:24:17 +08:00
|
|
|
is_replaced = any(candidate in self.graph.nodes for candidate in candidates)
|
|
|
|
if not is_replaced:
|
|
|
|
tries = ', '.join('%s.%s' % c for c in candidates)
|
2017-01-08 03:13:29 +08:00
|
|
|
raise NodeNotFoundError(
|
2014-09-26 06:24:17 +08:00
|
|
|
"Migration {0} depends on nonexistent node ('{1}', '{2}'). "
|
|
|
|
"Django tried to replace migration {1}.{2} with any of [{3}] "
|
|
|
|
"but wasn't able to because some of the replaced migrations "
|
|
|
|
"are already applied.".format(
|
2016-05-08 08:56:13 +08:00
|
|
|
exc.origin, exc.node[0], exc.node[1], tries
|
2014-09-26 06:24:17 +08:00
|
|
|
),
|
2016-05-08 08:56:13 +08:00
|
|
|
exc.node
|
2017-01-08 03:13:29 +08:00
|
|
|
) from exc
|
2020-01-20 06:13:58 +08:00
|
|
|
raise
|
2018-03-21 05:14:22 +08:00
|
|
|
self.graph.ensure_not_cyclic()
|
2014-09-26 06:24:17 +08:00
|
|
|
|
2016-04-02 20:46:59 +08:00
|
|
|
def check_consistent_history(self, connection):
|
|
|
|
"""
|
|
|
|
Raise InconsistentMigrationHistory if any applied migrations have
|
|
|
|
unapplied dependencies.
|
|
|
|
"""
|
|
|
|
recorder = MigrationRecorder(connection)
|
2016-09-02 04:19:29 +08:00
|
|
|
applied = recorder.applied_migrations()
|
2016-04-02 20:46:59 +08:00
|
|
|
for migration in applied:
|
|
|
|
# If the migration is unknown, skip it.
|
|
|
|
if migration not in self.graph.nodes:
|
|
|
|
continue
|
|
|
|
for parent in self.graph.node_map[migration].parents:
|
|
|
|
if parent not in applied:
|
2016-08-05 07:37:42 +08:00
|
|
|
# Skip unapplied squashed migrations that have all of their
|
|
|
|
# `replaces` applied.
|
|
|
|
if parent in self.replacements:
|
|
|
|
if all(m in applied for m in self.replacements[parent].replaces):
|
|
|
|
continue
|
2016-04-02 20:46:59 +08:00
|
|
|
raise InconsistentMigrationHistory(
|
2016-08-19 06:17:01 +08:00
|
|
|
"Migration {}.{} is applied before its dependency "
|
|
|
|
"{}.{} on database '{}'.".format(
|
2016-04-02 20:46:59 +08:00
|
|
|
migration[0], migration[1], parent[0], parent[1],
|
2016-08-19 06:17:01 +08:00
|
|
|
connection.alias,
|
2016-04-02 20:46:59 +08:00
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2013-12-05 00:01:31 +08:00
|
|
|
def detect_conflicts(self):
|
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Look through the loaded graph and detect any conflicts - apps
|
|
|
|
with more than one leaf migration. Return a dict of the app labels
|
2013-12-05 00:01:31 +08:00
|
|
|
that conflict with the migration names that conflict.
|
|
|
|
"""
|
|
|
|
seen_apps = {}
|
|
|
|
conflicting_apps = set()
|
|
|
|
for app_label, migration_name in self.graph.leaf_nodes():
|
|
|
|
if app_label in seen_apps:
|
|
|
|
conflicting_apps.add(app_label)
|
|
|
|
seen_apps.setdefault(app_label, set()).add(migration_name)
|
2014-12-07 05:00:09 +08:00
|
|
|
return {app_label: seen_apps[app_label] for app_label in conflicting_apps}
|
2013-12-05 00:01:31 +08:00
|
|
|
|
2014-05-01 03:25:12 +08:00
|
|
|
def project_state(self, nodes=None, at_end=True):
|
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Return a ProjectState object representing the most recent state
|
|
|
|
that the loaded migrations represent.
|
2014-05-01 03:25:12 +08:00
|
|
|
|
2017-01-25 07:04:12 +08:00
|
|
|
See graph.make_state() for the meaning of "nodes" and "at_end".
|
2014-05-01 03:25:12 +08:00
|
|
|
"""
|
|
|
|
return self.graph.make_state(nodes=nodes, at_end=at_end, real_apps=list(self.unmigrated_apps))
|
2020-03-09 05:48:00 +08:00
|
|
|
|
|
|
|
def collect_sql(self, plan):
|
|
|
|
"""
|
|
|
|
Take a migration plan and return a list of collected SQL statements
|
|
|
|
that represent the best-efforts version of that plan.
|
|
|
|
"""
|
|
|
|
statements = []
|
|
|
|
state = None
|
|
|
|
for migration, backwards in plan:
|
|
|
|
with self.connection.schema_editor(collect_sql=True, atomic=migration.atomic) as schema_editor:
|
|
|
|
if state is None:
|
|
|
|
state = self.project_state((migration.app_label, migration.name), at_end=False)
|
|
|
|
if not backwards:
|
|
|
|
state = migration.apply(state, schema_editor, collect_sql=True)
|
|
|
|
else:
|
|
|
|
state = migration.unapply(state, schema_editor, collect_sql=True)
|
|
|
|
statements.extend(schema_editor.collected_sql)
|
|
|
|
return statements
|