diff --git a/django/conf/global_settings.py b/django/conf/global_settings.py index ab3cdab59e..6dd25e18f9 100644 --- a/django/conf/global_settings.py +++ b/django/conf/global_settings.py @@ -609,3 +609,10 @@ STATICFILES_FINDERS = ( 'django.contrib.staticfiles.finders.AppDirectoriesFinder', # 'django.contrib.staticfiles.finders.DefaultStorageFinder', ) + +############## +# MIGRATIONS # +############## + +# Migration module overrides for apps, by app label. +MIGRATION_MODULES = {} diff --git a/django/contrib/auth/management/__init__.py b/django/contrib/auth/management/__init__.py index 8335f35fac..74d0909587 100644 --- a/django/contrib/auth/management/__init__.py +++ b/django/contrib/auth/management/__init__.py @@ -65,7 +65,7 @@ def create_permissions(app, created_models, verbosity, db=DEFAULT_DB_ALIAS, **kw except UnavailableApp: return - if not router.allow_syncdb(db, auth_app.Permission): + if not router.allow_migrate(db, auth_app.Permission): return from django.contrib.contenttypes.models import ContentType @@ -188,7 +188,7 @@ def get_default_username(check_db=True): return '' return default_username -signals.post_syncdb.connect(create_permissions, +signals.post_migrate.connect(create_permissions, dispatch_uid="django.contrib.auth.management.create_permissions") -signals.post_syncdb.connect(create_superuser, +signals.post_migrate.connect(create_superuser, sender=auth_app, dispatch_uid="django.contrib.auth.management.create_superuser") diff --git a/django/contrib/contenttypes/management.py b/django/contrib/contenttypes/management.py index 64d1c418ef..4278bbd1e7 100644 --- a/django/contrib/contenttypes/management.py +++ b/django/contrib/contenttypes/management.py @@ -16,7 +16,7 @@ def update_contenttypes(app, created_models, verbosity=2, db=DEFAULT_DB_ALIAS, * except UnavailableApp: return - if not router.allow_syncdb(db, ContentType): + if not router.allow_migrate(db, ContentType): return ContentType.objects.clear_cache() @@ -88,7 +88,7 @@ def update_all_contenttypes(verbosity=2, **kwargs): for app in get_apps(): update_contenttypes(app, None, verbosity, **kwargs) -signals.post_syncdb.connect(update_contenttypes) +signals.post_migrate.connect(update_contenttypes) if __name__ == "__main__": update_all_contenttypes() diff --git a/django/contrib/gis/db/backends/spatialite/creation.py b/django/contrib/gis/db/backends/spatialite/creation.py index d13a6ae9db..22457dd4de 100644 --- a/django/contrib/gis/db/backends/spatialite/creation.py +++ b/django/contrib/gis/db/backends/spatialite/creation.py @@ -47,7 +47,7 @@ class SpatiaLiteCreation(DatabaseCreation): # We need to then do a flush to ensure that any data installed by # custom SQL has been removed. The only test data should come from - # test fixtures, or autogenerated from post_syncdb triggers. + # test fixtures, or autogenerated from post_migrate triggers. # This has the side effect of loading initial data (which was # intentionally skipped in the syncdb). call_command('flush', diff --git a/django/contrib/gis/tests/layermap/tests.py b/django/contrib/gis/tests/layermap/tests.py index 86b15b24d4..632cb98aeb 100644 --- a/django/contrib/gis/tests/layermap/tests.py +++ b/django/contrib/gis/tests/layermap/tests.py @@ -311,7 +311,7 @@ class OtherRouter(object): def allow_relation(self, obj1, obj2, **hints): return None - def allow_syncdb(self, db, model): + def allow_migrate(self, db, model): return True diff --git a/django/contrib/sites/management.py b/django/contrib/sites/management.py index 7a29e82d4c..d9e3a2126c 100644 --- a/django/contrib/sites/management.py +++ b/django/contrib/sites/management.py @@ -11,7 +11,7 @@ from django.core.management.color import no_style def create_default_site(app, created_models, verbosity, db, **kwargs): # Only create the default sites in databases where Django created the table - if Site in created_models and router.allow_syncdb(db, Site) : + if Site in created_models and router.allow_migrate(db, Site) : # The default settings set SITE_ID = 1, and some tests in Django's test # suite rely on this value. However, if database sequences are reused # (e.g. in the test suite after flush/syncdb), it isn't guaranteed that @@ -33,4 +33,4 @@ def create_default_site(app, created_models, verbosity, db, **kwargs): Site.objects.clear_cache() -signals.post_syncdb.connect(create_default_site, sender=site_app) +signals.post_migrate.connect(create_default_site, sender=site_app) diff --git a/django/core/management/commands/createcachetable.py b/django/core/management/commands/createcachetable.py index d7ce3e93fd..27668f272d 100644 --- a/django/core/management/commands/createcachetable.py +++ b/django/core/management/commands/createcachetable.py @@ -24,7 +24,7 @@ class Command(LabelCommand): def handle_label(self, tablename, **options): db = options.get('database') cache = BaseDatabaseCache(tablename, {}) - if not router.allow_syncdb(db, cache.cache_model_class): + if not router.allow_migrate(db, cache.cache_model_class): return connection = connections[db] fields = ( diff --git a/django/core/management/commands/dumpdata.py b/django/core/management/commands/dumpdata.py index fd9418a728..c74eede846 100644 --- a/django/core/management/commands/dumpdata.py +++ b/django/core/management/commands/dumpdata.py @@ -118,7 +118,7 @@ class Command(BaseCommand): for model in sort_dependencies(app_list.items()): if model in excluded_models: continue - if not model._meta.proxy and router.allow_syncdb(using, model): + if not model._meta.proxy and router.allow_migrate(using, model): if use_base_manager: objects = model._base_manager else: diff --git a/django/core/management/commands/flush.py b/django/core/management/commands/flush.py index e5a7253e73..ea0952cb53 100644 --- a/django/core/management/commands/flush.py +++ b/django/core/management/commands/flush.py @@ -7,7 +7,7 @@ from django.db import connections, router, transaction, models, DEFAULT_DB_ALIAS from django.core.management import call_command from django.core.management.base import NoArgsCommand, CommandError from django.core.management.color import no_style -from django.core.management.sql import sql_flush, emit_post_sync_signal +from django.core.management.sql import sql_flush, emit_post_migrate_signal from django.utils.six.moves import input from django.utils import six @@ -23,8 +23,8 @@ class Command(NoArgsCommand): help='Tells Django not to load any initial data after database synchronization.'), ) help = ('Returns the database to the state it was in immediately after ' - 'syncdb was executed. This means that all data will be removed ' - 'from the database, any post-synchronization handlers will be ' + 'migrate was first executed. This means that all data will be removed ' + 'from the database, any post-migration handlers will be ' 're-executed, and the initial_data fixture will be re-installed.') def handle_noargs(self, **options): @@ -35,7 +35,7 @@ class Command(NoArgsCommand): # The following are stealth options used by Django's internals. reset_sequences = options.get('reset_sequences', True) allow_cascade = options.get('allow_cascade', False) - inhibit_post_syncdb = options.get('inhibit_post_syncdb', False) + inhibit_post_migrate = options.get('inhibit_post_migrate', False) self.style = no_style() @@ -54,7 +54,7 @@ class Command(NoArgsCommand): if interactive: confirm = input("""You have requested a flush of the database. This will IRREVERSIBLY DESTROY all data currently in the %r database, -and return each table to the state it was in after syncdb. +and return each table to a fresh state. Are you sure you want to do this? Type 'yes' to continue, or 'no' to cancel: """ % connection.settings_dict['NAME']) @@ -77,8 +77,8 @@ Are you sure you want to do this? "The full error: %s") % (connection.settings_dict['NAME'], e) six.reraise(CommandError, CommandError(new_msg), sys.exc_info()[2]) - if not inhibit_post_syncdb: - self.emit_post_syncdb(verbosity, interactive, db) + if not inhibit_post_migrate: + self.emit_post_migrate(verbosity, interactive, db) # Reinstall the initial_data fixture. if options.get('load_initial_data'): @@ -89,13 +89,13 @@ Are you sure you want to do this? self.stdout.write("Flush cancelled.\n") @staticmethod - def emit_post_syncdb(verbosity, interactive, database): - # Emit the post sync signal. This allows individual applications to - # respond as if the database had been sync'd from scratch. + def emit_post_migrate(verbosity, interactive, database): + # Emit the post migrate signal. This allows individual applications to + # respond as if the database had been migrated from scratch. all_models = [] for app in models.get_apps(): all_models.extend([ m for m in models.get_models(app, include_auto_created=True) - if router.allow_syncdb(database, m) + if router.allow_migrate(database, m) ]) - emit_post_sync_signal(set(all_models), verbosity, interactive, database) + emit_post_migrate_signal(set(all_models), verbosity, interactive, database) diff --git a/django/core/management/commands/loaddata.py b/django/core/management/commands/loaddata.py index 1997f2956b..a6e22d9173 100644 --- a/django/core/management/commands/loaddata.py +++ b/django/core/management/commands/loaddata.py @@ -134,7 +134,7 @@ class Command(BaseCommand): for obj in objects: objects_in_fixture += 1 - if router.allow_syncdb(self.using, obj.object.__class__): + if router.allow_migrate(self.using, obj.object.__class__): loaded_objects_in_fixture += 1 self.models.add(obj.object.__class__) try: diff --git a/django/core/management/commands/makemigrations.py b/django/core/management/commands/makemigrations.py new file mode 100644 index 0000000000..d802e2924a --- /dev/null +++ b/django/core/management/commands/makemigrations.py @@ -0,0 +1,84 @@ +import sys +import os +from optparse import make_option + +from django.core.management.base import BaseCommand +from django.core.exceptions import ImproperlyConfigured +from django.db import connections, DEFAULT_DB_ALIAS +from django.db.migrations.loader import MigrationLoader +from django.db.migrations.autodetector import MigrationAutodetector, InteractiveMigrationQuestioner +from django.db.migrations.state import ProjectState +from django.db.migrations.writer import MigrationWriter +from django.db.models.loading import cache + + +class Command(BaseCommand): + option_list = BaseCommand.option_list + ( + make_option('--empty', action='store_true', dest='empty', default=False, + help='Make a blank migration.'), + ) + + help = "Creates new migration(s) for apps." + usage_str = "Usage: ./manage.py makemigrations [--empty] [app [app ...]]" + + def handle(self, *app_labels, **options): + + self.verbosity = int(options.get('verbosity')) + self.interactive = options.get('interactive') + + # Make sure the app they asked for exists + app_labels = set(app_labels) + bad_app_labels = set() + for app_label in app_labels: + try: + cache.get_app(app_label) + except ImproperlyConfigured: + bad_app_labels.add(app_label) + if bad_app_labels: + for app_label in bad_app_labels: + self.stderr.write("App '%s' could not be found. Is it in INSTALLED_APPS?" % app_label) + sys.exit(2) + + # Load the current graph state. Takes a connection, but it's not used + # (makemigrations doesn't look at the database state). + loader = MigrationLoader(connections[DEFAULT_DB_ALIAS]) + + # Detect changes + autodetector = MigrationAutodetector( + loader.graph.project_state(), + ProjectState.from_app_cache(cache), + InteractiveMigrationQuestioner(specified_apps=app_labels), + ) + changes = autodetector.changes(graph=loader.graph, trim_to_apps=app_labels or None) + + # No changes? Tell them. + if not changes: + if len(app_labels) == 1: + self.stdout.write("No changes detected in app '%s'" % app_labels.pop()) + elif len(app_labels) > 1: + self.stdout.write("No changes detected in apps '%s'" % ("', '".join(app_labels))) + else: + self.stdout.write("No changes detected") + return + + directory_created = {} + for app_label, migrations in changes.items(): + self.stdout.write(self.style.MIGRATE_HEADING("Migrations for '%s':" % app_label) + "\n") + for migration in migrations: + # Describe the migration + writer = MigrationWriter(migration) + self.stdout.write(" %s:\n" % (self.style.MIGRATE_LABEL(writer.filename),)) + for operation in migration.operations: + self.stdout.write(" - %s\n" % operation.describe()) + # Write it + migrations_directory = os.path.dirname(writer.path) + if not directory_created.get(app_label, False): + if not os.path.isdir(migrations_directory): + os.mkdir(migrations_directory) + init_path = os.path.join(migrations_directory, "__init__.py") + if not os.path.isfile(init_path): + open(init_path, "w").close() + # We just do this once per app + directory_created[app_label] = True + with open(writer.path, "w") as fh: + fh.write(writer.as_string()) diff --git a/django/core/management/commands/migrate.py b/django/core/management/commands/migrate.py new file mode 100644 index 0000000000..dbec389bed --- /dev/null +++ b/django/core/management/commands/migrate.py @@ -0,0 +1,245 @@ +from optparse import make_option +from collections import OrderedDict +from importlib import import_module +import itertools +import traceback + +from django.conf import settings +from django.core.management import call_command +from django.core.management.base import BaseCommand, CommandError +from django.core.management.color import no_style +from django.core.management.sql import custom_sql_for_model, emit_post_migrate_signal, emit_pre_migrate_signal +from django.db import connections, router, transaction, models, DEFAULT_DB_ALIAS +from django.db.migrations.executor import MigrationExecutor +from django.db.migrations.loader import AmbiguityError +from django.utils.module_loading import module_has_submodule + + +class Command(BaseCommand): + option_list = BaseCommand.option_list + ( + make_option('--noinput', action='store_false', dest='interactive', default=True, + help='Tells Django to NOT prompt the user for input of any kind.'), + make_option('--no-initial-data', action='store_false', dest='load_initial_data', default=True, + help='Tells Django not to load any initial data after database synchronization.'), + make_option('--database', action='store', dest='database', + default=DEFAULT_DB_ALIAS, help='Nominates a database to synchronize. ' + 'Defaults to the "default" database.'), + make_option('--fake', action='store_true', dest='fake', default=False, + help='Mark migrations as run without actually running them'), + ) + + help = "Updates database schema. Manages both apps with migrations and those without." + + def handle(self, *args, **options): + + self.verbosity = int(options.get('verbosity')) + self.interactive = options.get('interactive') + self.show_traceback = options.get('traceback') + self.load_initial_data = options.get('load_initial_data') + self.test_database = options.get('test_database', False) + + # Import the 'management' module within each installed app, to register + # dispatcher events. + for app_name in settings.INSTALLED_APPS: + if module_has_submodule(import_module(app_name), "management"): + import_module('.management', app_name) + + # Get the database we're operating from + db = options.get('database') + connection = connections[db] + + # Work out which apps have migrations and which do not + executor = MigrationExecutor(connection, self.migration_progress_callback) + + # If they supplied command line arguments, work out what they mean. + run_syncdb = False + target_app_labels_only = True + if len(args) > 2: + raise CommandError("Too many command-line arguments (expecting 'appname' or 'appname migrationname')") + elif len(args) == 2: + app_label, migration_name = args + if app_label not in executor.loader.migrated_apps: + raise CommandError("App '%s' does not have migrations (you cannot selectively sync unmigrated apps)" % app_label) + if migration_name == "zero": + targets = [(app_label, None)] + else: + try: + migration = executor.loader.get_migration_by_prefix(app_label, migration_name) + except AmbiguityError: + raise CommandError("More than one migration matches '%s' in app '%s'. Please be more specific." % (app_label, migration_name)) + except KeyError: + raise CommandError("Cannot find a migration matching '%s' from app '%s'. Is it in INSTALLED_APPS?" % (app_label, migration_name)) + targets = [(app_label, migration.name)] + target_app_labels_only = False + elif len(args) == 1: + app_label = args[0] + if app_label not in executor.loader.migrated_apps: + raise CommandError("App '%s' does not have migrations (you cannot selectively sync unmigrated apps)" % app_label) + targets = [key for key in executor.loader.graph.leaf_nodes() if key[0] == app_label] + else: + targets = executor.loader.graph.leaf_nodes() + run_syncdb = True + + plan = executor.migration_plan(targets) + + # Print some useful info + if self.verbosity >= 1: + self.stdout.write(self.style.MIGRATE_HEADING("Operations to perform:")) + if run_syncdb: + self.stdout.write(self.style.MIGRATE_LABEL(" Synchronize unmigrated apps: ") + (", ".join(executor.loader.unmigrated_apps) or "(none)")) + if target_app_labels_only: + self.stdout.write(self.style.MIGRATE_LABEL(" Apply all migrations: ") + (", ".join(set(a for a, n in targets)) or "(none)")) + else: + if targets[0][1] is None: + self.stdout.write(self.style.MIGRATE_LABEL(" Unapply all migrations: ") + "%s" % (targets[0][0], )) + else: + self.stdout.write(self.style.MIGRATE_LABEL(" Target specific migration: ") + "%s, from %s" % (targets[0][1], targets[0][0])) + + # Run the syncdb phase. + # If you ever manage to get rid of this, I owe you many, many drinks. + # Note that pre_migrate is called from inside here, as it needs + # the list of models about to be installed. + if run_syncdb: + if self.verbosity >= 1: + self.stdout.write(self.style.MIGRATE_HEADING("Synchronizing apps without migrations:")) + created_models = self.sync_apps(connection, executor.loader.unmigrated_apps) + else: + created_models = [] + + # Migrate! + if self.verbosity >= 1: + self.stdout.write(self.style.MIGRATE_HEADING("Running migrations:")) + if not plan: + if self.verbosity >= 1: + self.stdout.write(" No migrations needed.") + else: + executor.migrate(targets, plan, fake=options.get("fake", False)) + + # Send the post_migrate signal, so individual apps can do whatever they need + # to do at this point. + emit_post_migrate_signal(created_models, self.verbosity, self.interactive, connection.alias) + + def migration_progress_callback(self, action, migration): + if self.verbosity >= 1: + if action == "apply_start": + self.stdout.write(" Applying %s..." % migration, ending="") + self.stdout.flush() + elif action == "apply_success": + self.stdout.write(self.style.MIGRATE_SUCCESS(" OK")) + elif action == "unapply_start": + self.stdout.write(" Unapplying %s..." % migration, ending="") + self.stdout.flush() + elif action == "unapply_success": + self.stdout.write(self.style.MIGRATE_SUCCESS(" OK")) + + def sync_apps(self, connection, apps): + "Runs the old syncdb-style operation on a list of apps." + cursor = connection.cursor() + + # Get a list of already installed *models* so that references work right. + tables = connection.introspection.table_names() + seen_models = connection.introspection.installed_models(tables) + created_models = set() + pending_references = {} + + # Build the manifest of apps and models that are to be synchronized + all_models = [ + (app.__name__.split('.')[-2], + [ + m for m in models.get_models(app, include_auto_created=True) + if router.allow_migrate(connection.alias, m) + ]) + for app in models.get_apps() if app.__name__.split('.')[-2] in apps + ] + + def model_installed(model): + opts = model._meta + converter = connection.introspection.table_name_converter + # Note that if a model is unmanaged we short-circuit and never try to install it + return not ((converter(opts.db_table) in tables) or + (opts.auto_created and converter(opts.auto_created._meta.db_table) in tables)) + + manifest = OrderedDict( + (app_name, list(filter(model_installed, model_list))) + for app_name, model_list in all_models + ) + + create_models = set([x for x in itertools.chain(*manifest.values())]) + emit_pre_migrate_signal(create_models, self.verbosity, self.interactive, connection.alias) + + # Create the tables for each model + if self.verbosity >= 1: + self.stdout.write(" Creating tables...\n") + with transaction.atomic(using=connection.alias, savepoint=False): + for app_name, model_list in manifest.items(): + for model in model_list: + # Create the model's database table, if it doesn't already exist. + if self.verbosity >= 3: + self.stdout.write(" Processing %s.%s model\n" % (app_name, model._meta.object_name)) + sql, references = connection.creation.sql_create_model(model, no_style(), seen_models) + seen_models.add(model) + created_models.add(model) + for refto, refs in references.items(): + pending_references.setdefault(refto, []).extend(refs) + if refto in seen_models: + sql.extend(connection.creation.sql_for_pending_references(refto, no_style(), pending_references)) + sql.extend(connection.creation.sql_for_pending_references(model, no_style(), pending_references)) + if self.verbosity >= 1 and sql: + self.stdout.write(" Creating table %s\n" % model._meta.db_table) + for statement in sql: + cursor.execute(statement) + tables.append(connection.introspection.table_name_converter(model._meta.db_table)) + + # We force a commit here, as that was the previous behaviour. + # If you can prove we don't need this, remove it. + transaction.set_dirty(using=connection.alias) + + # The connection may have been closed by a syncdb handler. + cursor = connection.cursor() + + # Install custom SQL for the app (but only if this + # is a model we've just created) + if self.verbosity >= 1: + self.stdout.write(" Installing custom SQL...\n") + for app_name, model_list in manifest.items(): + for model in model_list: + if model in created_models: + custom_sql = custom_sql_for_model(model, no_style(), connection) + if custom_sql: + if self.verbosity >= 2: + self.stdout.write(" Installing custom SQL for %s.%s model\n" % (app_name, model._meta.object_name)) + try: + with transaction.commit_on_success_unless_managed(using=connection.alias): + for sql in custom_sql: + cursor.execute(sql) + except Exception as e: + self.stderr.write(" Failed to install custom SQL for %s.%s model: %s\n" % (app_name, model._meta.object_name, e)) + if self.show_traceback: + traceback.print_exc() + else: + if self.verbosity >= 3: + self.stdout.write(" No custom SQL for %s.%s model\n" % (app_name, model._meta.object_name)) + + if self.verbosity >= 1: + self.stdout.write(" Installing indexes...\n") + + # Install SQL indices for all newly created models + for app_name, model_list in manifest.items(): + for model in model_list: + if model in created_models: + index_sql = connection.creation.sql_indexes_for_model(model, no_style()) + if index_sql: + if self.verbosity >= 2: + self.stdout.write(" Installing index for %s.%s model\n" % (app_name, model._meta.object_name)) + try: + with transaction.commit_on_success_unless_managed(using=connection.alias): + for sql in index_sql: + cursor.execute(sql) + except Exception as e: + self.stderr.write(" Failed to install index for %s.%s model: %s\n" % (app_name, model._meta.object_name, e)) + + # Load initial_data fixtures (unless that has been disabled) + if self.load_initial_data: + call_command('loaddata', 'initial_data', verbosity=self.verbosity, database=connection.alias, skip_validation=True) + + return created_models diff --git a/django/core/management/commands/syncdb.py b/django/core/management/commands/syncdb.py index d51699e95a..17ea51f4d5 100644 --- a/django/core/management/commands/syncdb.py +++ b/django/core/management/commands/syncdb.py @@ -1,15 +1,8 @@ -from collections import OrderedDict -from importlib import import_module +import warnings from optparse import make_option -import itertools -import traceback - -from django.conf import settings +from django.db import DEFAULT_DB_ALIAS from django.core.management import call_command from django.core.management.base import NoArgsCommand -from django.core.management.color import no_style -from django.core.management.sql import custom_sql_for_model, emit_post_sync_signal, emit_pre_sync_signal -from django.db import connections, router, transaction, models, DEFAULT_DB_ALIAS class Command(NoArgsCommand): @@ -22,141 +15,8 @@ class Command(NoArgsCommand): default=DEFAULT_DB_ALIAS, help='Nominates a database to synchronize. ' 'Defaults to the "default" database.'), ) - help = "Create the database tables for all apps in INSTALLED_APPS whose tables haven't already been created." + help = "Deprecated - use 'migrate' instead." def handle_noargs(self, **options): - - verbosity = int(options.get('verbosity')) - interactive = options.get('interactive') - show_traceback = options.get('traceback') - load_initial_data = options.get('load_initial_data') - - self.style = no_style() - - # Import the 'management' module within each installed app, to register - # dispatcher events. - for app_name in settings.INSTALLED_APPS: - try: - import_module('.management', app_name) - except ImportError as exc: - # This is slightly hackish. We want to ignore ImportErrors - # if the "management" module itself is missing -- but we don't - # want to ignore the exception if the management module exists - # but raises an ImportError for some reason. The only way we - # can do this is to check the text of the exception. Note that - # we're a bit broad in how we check the text, because different - # Python implementations may not use the same text. - # CPython uses the text "No module named management" - # PyPy uses "No module named myproject.myapp.management" - msg = exc.args[0] - if not msg.startswith('No module named') or 'management' not in msg: - raise - - db = options.get('database') - connection = connections[db] - cursor = connection.cursor() - - # Get a list of already installed *models* so that references work right. - tables = connection.introspection.table_names() - seen_models = connection.introspection.installed_models(tables) - created_models = set() - pending_references = {} - - # Build the manifest of apps and models that are to be synchronized - all_models = [ - (app.__name__.split('.')[-2], - [m for m in models.get_models(app, include_auto_created=True) - if router.allow_syncdb(db, m)]) - for app in models.get_apps() - ] - - def model_installed(model): - opts = model._meta - converter = connection.introspection.table_name_converter - return not ((converter(opts.db_table) in tables) or - (opts.auto_created and converter(opts.auto_created._meta.db_table) in tables)) - - manifest = OrderedDict( - (app_name, list(filter(model_installed, model_list))) - for app_name, model_list in all_models - ) - - create_models = set([x for x in itertools.chain(*manifest.values())]) - emit_pre_sync_signal(create_models, verbosity, interactive, db) - - # Create the tables for each model - if verbosity >= 1: - self.stdout.write("Creating tables ...\n") - with transaction.commit_on_success_unless_managed(using=db): - for app_name, model_list in manifest.items(): - for model in model_list: - # Create the model's database table, if it doesn't already exist. - if verbosity >= 3: - self.stdout.write("Processing %s.%s model\n" % (app_name, model._meta.object_name)) - sql, references = connection.creation.sql_create_model(model, self.style, seen_models) - seen_models.add(model) - created_models.add(model) - for refto, refs in references.items(): - pending_references.setdefault(refto, []).extend(refs) - if refto in seen_models: - sql.extend(connection.creation.sql_for_pending_references(refto, self.style, pending_references)) - sql.extend(connection.creation.sql_for_pending_references(model, self.style, pending_references)) - if verbosity >= 1 and sql: - self.stdout.write("Creating table %s\n" % model._meta.db_table) - for statement in sql: - cursor.execute(statement) - tables.append(connection.introspection.table_name_converter(model._meta.db_table)) - - # Send the post_syncdb signal, so individual apps can do whatever they need - # to do at this point. - emit_post_sync_signal(created_models, verbosity, interactive, db) - - # The connection may have been closed by a syncdb handler. - cursor = connection.cursor() - - # Install custom SQL for the app (but only if this - # is a model we've just created) - if verbosity >= 1: - self.stdout.write("Installing custom SQL ...\n") - for app_name, model_list in manifest.items(): - for model in model_list: - if model in created_models: - custom_sql = custom_sql_for_model(model, self.style, connection) - if custom_sql: - if verbosity >= 2: - self.stdout.write("Installing custom SQL for %s.%s model\n" % (app_name, model._meta.object_name)) - try: - with transaction.commit_on_success_unless_managed(using=db): - for sql in custom_sql: - cursor.execute(sql) - except Exception as e: - self.stderr.write("Failed to install custom SQL for %s.%s model: %s\n" % \ - (app_name, model._meta.object_name, e)) - if show_traceback: - traceback.print_exc() - else: - if verbosity >= 3: - self.stdout.write("No custom SQL for %s.%s model\n" % (app_name, model._meta.object_name)) - - if verbosity >= 1: - self.stdout.write("Installing indexes ...\n") - # Install SQL indices for all newly created models - for app_name, model_list in manifest.items(): - for model in model_list: - if model in created_models: - index_sql = connection.creation.sql_indexes_for_model(model, self.style) - if index_sql: - if verbosity >= 2: - self.stdout.write("Installing index for %s.%s model\n" % (app_name, model._meta.object_name)) - try: - with transaction.commit_on_success_unless_managed(using=db): - for sql in index_sql: - cursor.execute(sql) - except Exception as e: - self.stderr.write("Failed to install index for %s.%s model: %s\n" % \ - (app_name, model._meta.object_name, e)) - - # Load initial_data fixtures (unless that has been disabled) - if load_initial_data: - call_command('loaddata', 'initial_data', verbosity=verbosity, - database=db, skip_validation=True) + warnings.warn("The syncdb command will be removed in Django 1.9", PendingDeprecationWarning) + call_command("migrate", **options) diff --git a/django/core/management/sql.py b/django/core/management/sql.py index c5806086f9..2e977c0c07 100644 --- a/django/core/management/sql.py +++ b/django/core/management/sql.py @@ -206,25 +206,25 @@ def custom_sql_for_model(model, style, connection): return output -def emit_pre_sync_signal(create_models, verbosity, interactive, db): - # Emit the pre_sync signal for every application. +def emit_pre_migrate_signal(create_models, verbosity, interactive, db): + # Emit the pre_migrate signal for every application. for app in models.get_apps(): app_name = app.__name__.split('.')[-2] if verbosity >= 2: - print("Running pre-sync handlers for application %s" % app_name) - models.signals.pre_syncdb.send(sender=app, app=app, + print("Running pre-migrate handlers for application %s" % app_name) + models.signals.pre_migrate.send(sender=app, app=app, create_models=create_models, verbosity=verbosity, interactive=interactive, db=db) -def emit_post_sync_signal(created_models, verbosity, interactive, db): - # Emit the post_sync signal for every application. +def emit_post_migrate_signal(created_models, verbosity, interactive, db): + # Emit the post_migrate signal for every application. for app in models.get_apps(): app_name = app.__name__.split('.')[-2] if verbosity >= 2: - print("Running post-sync handlers for application %s" % app_name) - models.signals.post_syncdb.send(sender=app, app=app, + print("Running post-migrate handlers for application %s" % app_name) + models.signals.post_migrate.send(sender=app, app=app, created_models=created_models, verbosity=verbosity, interactive=interactive, db=db) diff --git a/django/db/backends/__init__.py b/django/db/backends/__init__.py index 07d45c9175..6274d5bc55 100644 --- a/django/db/backends/__init__.py +++ b/django/db/backends/__init__.py @@ -521,6 +521,10 @@ class BaseDatabaseWrapper(object): """ raise NotImplementedError + def schema_editor(self): + "Returns a new instance of this backend's SchemaEditor" + raise NotImplementedError() + class BaseDatabaseFeatures(object): allows_group_by_pk = False @@ -630,11 +634,32 @@ class BaseDatabaseFeatures(object): # when autocommit is disabled? http://bugs.python.org/issue8145#msg109965 autocommits_when_autocommit_is_off = False + # Can we roll back DDL in a transaction? + can_rollback_ddl = False + + # Can we issue more than one ALTER COLUMN clause in an ALTER TABLE? + supports_combined_alters = False + + # What's the maximum length for index names? + max_index_name_length = 63 + + # Does it support foreign keys? + supports_foreign_keys = True + + # Does it support CHECK constraints? + supports_check_constraints = True + # Does the backend support 'pyformat' style ("... %(name)s ...", {'name': value}) # parameter passing? Note this can be provided by the backend even if not # supported by the Python driver supports_paramstyle_pyformat = True + # Does the backend require literal defaults, rather than parameterised ones? + requires_literal_defaults = False + + # Does the backend require a connection reset after each material schema change? + connection_persists_old_columns = False + def __init__(self, connection): self.connection = connection @@ -1227,7 +1252,7 @@ class BaseDatabaseIntrospection(object): for model in models.get_models(app): if not model._meta.managed: continue - if not router.allow_syncdb(self.connection.alias, model): + if not router.allow_migrate(self.connection.alias, model): continue tables.add(model._meta.db_table) tables.update([f.m2m_db_table() for f in model._meta.local_many_to_many]) @@ -1247,7 +1272,7 @@ class BaseDatabaseIntrospection(object): all_models = [] for app in models.get_apps(): for model in models.get_models(app): - if router.allow_syncdb(self.connection.alias, model): + if router.allow_migrate(self.connection.alias, model): all_models.append(model) tables = list(map(self.table_name_converter, tables)) return set([ @@ -1268,7 +1293,7 @@ class BaseDatabaseIntrospection(object): continue if model._meta.swapped: continue - if not router.allow_syncdb(self.connection.alias, model): + if not router.allow_migrate(self.connection.alias, model): continue for f in model._meta.local_fields: if isinstance(f, models.AutoField): @@ -1310,6 +1335,25 @@ class BaseDatabaseIntrospection(object): """ raise NotImplementedError + def get_constraints(self, cursor, table_name): + """ + Retrieves any constraints or keys (unique, pk, fk, check, index) + across one or more columns. + + Returns a dict mapping constraint names to their attributes, + where attributes is a dict with keys: + * columns: List of columns this covers + * primary_key: True if primary key, False otherwise + * unique: True if this is a unique constraint, False otherwise + * foreign_key: (table, column) of target, or None + * check: True if check constraint, False otherwise + * index: True if index, False otherwise. + + Some backends may return special constraint names that don't exist + if they don't name constraints of a certain type (e.g. SQLite) + """ + raise NotImplementedError + class BaseDatabaseClient(object): """ diff --git a/django/db/backends/creation.py b/django/db/backends/creation.py index 4d646b05dd..1d90f03425 100644 --- a/django/db/backends/creation.py +++ b/django/db/backends/creation.py @@ -23,11 +23,13 @@ class BaseDatabaseCreation(object): destruction of test databases. """ data_types = {} + data_type_check_constraints = {} def __init__(self, connection): self.connection = connection - def _digest(self, *args): + @classmethod + def _digest(cls, *args): """ Generates a 32-bit digest of a set of arguments that can be used to shorten identifying names. @@ -330,18 +332,19 @@ class BaseDatabaseCreation(object): settings.DATABASES[self.connection.alias]["NAME"] = test_database_name self.connection.settings_dict["NAME"] = test_database_name - # Report syncdb messages at one level lower than that requested. + # Report migrate messages at one level lower than that requested. # This ensures we don't get flooded with messages during testing # (unless you really ask to be flooded) - call_command('syncdb', + call_command('migrate', verbosity=max(verbosity - 1, 0), interactive=False, database=self.connection.alias, - load_initial_data=False) + load_initial_data=False, + test_database=True) # We need to then do a flush to ensure that any data installed by # custom SQL has been removed. The only test data should come from - # test fixtures, or autogenerated from post_syncdb triggers. + # test fixtures, or autogenerated from post_migrate triggers. # This has the side effect of loading initial data (which was # intentionally skipped in the syncdb). call_command('flush', diff --git a/django/db/backends/mysql/base.py b/django/db/backends/mysql/base.py index 719b8ca29b..15eacec679 100644 --- a/django/db/backends/mysql/base.py +++ b/django/db/backends/mysql/base.py @@ -44,6 +44,9 @@ from django.db.backends.mysql.creation import DatabaseCreation from django.db.backends.mysql.introspection import DatabaseIntrospection from django.db.backends.mysql.validation import DatabaseValidation from django.utils.encoding import force_str, force_text +from django.db.backends.mysql.schema import DatabaseSchemaEditor +from django.utils.encoding import force_str +from django.utils.functional import cached_property from django.utils.safestring import SafeBytes, SafeText from django.utils import six from django.utils import timezone @@ -171,6 +174,7 @@ class DatabaseFeatures(BaseDatabaseFeatures): requires_explicit_null_ordering_when_grouping = True allows_primary_key_0 = False uses_savepoints = True + supports_check_constraints = False def __init__(self, connection): super(DatabaseFeatures, self).__init__(connection) @@ -514,6 +518,10 @@ class DatabaseWrapper(BaseDatabaseWrapper): table_name, column_name, bad_row[1], referenced_table_name, referenced_column_name)) + def schema_editor(self): + "Returns a new instance of this backend's SchemaEditor" + return DatabaseSchemaEditor(self) + def is_usable(self): try: self.connection.ping() diff --git a/django/db/backends/mysql/introspection.py b/django/db/backends/mysql/introspection.py index ec9f3e99f8..d7a29057de 100644 --- a/django/db/backends/mysql/introspection.py +++ b/django/db/backends/mysql/introspection.py @@ -1,6 +1,6 @@ import re from .base import FIELD_TYPE - +from django.utils.datastructures import OrderedSet from django.db.backends import BaseDatabaseIntrospection, FieldInfo from django.utils.encoding import force_text @@ -115,5 +115,71 @@ class DatabaseIntrospection(BaseDatabaseIntrospection): for row in rows: if row[2] in multicol_indexes: continue - indexes[row[4]] = {'primary_key': (row[2] == 'PRIMARY'), 'unique': not bool(row[1])} + if row[4] not in indexes: + indexes[row[4]] = {'primary_key': False, 'unique': False} + # It's possible to have the unique and PK constraints in separate indexes. + if row[2] == 'PRIMARY': + indexes[row[4]]['primary_key'] = True + if not bool(row[1]): + indexes[row[4]]['unique'] = True return indexes + + def get_constraints(self, cursor, table_name): + """ + Retrieves any constraints or keys (unique, pk, fk, check, index) across one or more columns. + """ + constraints = {} + # Get the actual constraint names and columns + name_query = """ + SELECT kc.`constraint_name`, kc.`column_name`, + kc.`referenced_table_name`, kc.`referenced_column_name` + FROM information_schema.key_column_usage AS kc + WHERE + kc.table_schema = %s AND + kc.table_name = %s + """ + cursor.execute(name_query, [self.connection.settings_dict['NAME'], table_name]) + for constraint, column, ref_table, ref_column in cursor.fetchall(): + if constraint not in constraints: + constraints[constraint] = { + 'columns': OrderedSet(), + 'primary_key': False, + 'unique': False, + 'index': False, + 'check': False, + 'foreign_key': (ref_table, ref_column) if ref_column else None, + } + constraints[constraint]['columns'].add(column) + # Now get the constraint types + type_query = """ + SELECT c.constraint_name, c.constraint_type + FROM information_schema.table_constraints AS c + WHERE + c.table_schema = %s AND + c.table_name = %s + """ + cursor.execute(type_query, [self.connection.settings_dict['NAME'], table_name]) + for constraint, kind in cursor.fetchall(): + if kind.lower() == "primary key": + constraints[constraint]['primary_key'] = True + constraints[constraint]['unique'] = True + elif kind.lower() == "unique": + constraints[constraint]['unique'] = True + # Now add in the indexes + cursor.execute("SHOW INDEX FROM %s" % self.connection.ops.quote_name(table_name)) + for table, non_unique, index, colseq, column in [x[:5] for x in cursor.fetchall()]: + if index not in constraints: + constraints[index] = { + 'columns': OrderedSet(), + 'primary_key': False, + 'unique': False, + 'index': True, + 'check': False, + 'foreign_key': None, + } + constraints[index]['index'] = True + constraints[index]['columns'].add(column) + # Convert the sorted sets to lists + for constraint in constraints.values(): + constraint['columns'] = list(constraint['columns']) + return constraints diff --git a/django/db/backends/mysql/schema.py b/django/db/backends/mysql/schema.py new file mode 100644 index 0000000000..dc74b2db2a --- /dev/null +++ b/django/db/backends/mysql/schema.py @@ -0,0 +1,26 @@ +from django.db.backends.schema import BaseDatabaseSchemaEditor + + +class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): + + sql_rename_table = "RENAME TABLE %(old_table)s TO %(new_table)s" + + sql_alter_column_null = "MODIFY %(column)s %(type)s NULL" + sql_alter_column_not_null = "MODIFY %(column)s %(type)s NOT NULL" + sql_alter_column_type = "MODIFY %(column)s %(type)s" + sql_rename_column = "ALTER TABLE %(table)s CHANGE %(old_column)s %(new_column)s %(type)s" + + sql_delete_unique = "ALTER TABLE %(table)s DROP INDEX %(name)s" + + sql_create_fk = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s FOREIGN KEY (%(column)s) REFERENCES %(to_table)s (%(to_column)s)" + sql_delete_fk = "ALTER TABLE %(table)s DROP FOREIGN KEY %(name)s" + + sql_delete_index = "DROP INDEX %(name)s ON %(table)s" + + sql_delete_pk = "ALTER TABLE %(table)s DROP PRIMARY KEY" + + alter_string_set_null = 'MODIFY %(column)s %(type)s NULL;' + alter_string_drop_null = 'MODIFY %(column)s %(type)s NOT NULL;' + + sql_create_pk = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s PRIMARY KEY (%(columns)s)" + sql_delete_pk = "ALTER TABLE %(table)s DROP PRIMARY KEY" diff --git a/django/db/backends/oracle/base.py b/django/db/backends/oracle/base.py index 8b556b8449..fe6ac0e70c 100644 --- a/django/db/backends/oracle/base.py +++ b/django/db/backends/oracle/base.py @@ -55,6 +55,7 @@ from django.db.backends import * from django.db.backends.oracle.client import DatabaseClient from django.db.backends.oracle.creation import DatabaseCreation from django.db.backends.oracle.introspection import DatabaseIntrospection +from django.db.backends.oracle.schema import DatabaseSchemaEditor from django.utils.encoding import force_bytes, force_text @@ -90,6 +91,11 @@ class DatabaseFeatures(BaseDatabaseFeatures): has_bulk_insert = True supports_tablespaces = True supports_sequence_reset = False + supports_combined_alters = False + max_index_name_length = 30 + nulls_order_largest = True + requires_literal_defaults = True + connection_persists_old_columns = True nulls_order_largest = True @@ -621,6 +627,10 @@ class DatabaseWrapper(BaseDatabaseWrapper): and x.code == 2091 and 'ORA-02291' in x.message: six.reraise(utils.IntegrityError, utils.IntegrityError(*tuple(e.args)), sys.exc_info()[2]) raise + + def schema_editor(self): + "Returns a new instance of this backend's SchemaEditor" + return DatabaseSchemaEditor(self) # Oracle doesn't support savepoint commits. Ignore them. def _savepoint_commit(self, sid): diff --git a/django/db/backends/oracle/creation.py b/django/db/backends/oracle/creation.py index 55f6ee4d7e..b1a8782aa9 100644 --- a/django/db/backends/oracle/creation.py +++ b/django/db/backends/oracle/creation.py @@ -22,7 +22,7 @@ class DatabaseCreation(BaseDatabaseCreation): data_types = { 'AutoField': 'NUMBER(11)', 'BinaryField': 'BLOB', - 'BooleanField': 'NUMBER(1) CHECK (%(qn_column)s IN (0,1))', + 'BooleanField': 'NUMBER(1)', 'CharField': 'NVARCHAR2(%(max_length)s)', 'CommaSeparatedIntegerField': 'VARCHAR2(%(max_length)s)', 'DateField': 'DATE', @@ -35,10 +35,10 @@ class DatabaseCreation(BaseDatabaseCreation): 'BigIntegerField': 'NUMBER(19)', 'IPAddressField': 'VARCHAR2(15)', 'GenericIPAddressField': 'VARCHAR2(39)', - 'NullBooleanField': 'NUMBER(1) CHECK ((%(qn_column)s IN (0,1)) OR (%(qn_column)s IS NULL))', + 'NullBooleanField': 'NUMBER(1)', 'OneToOneField': 'NUMBER(11)', - 'PositiveIntegerField': 'NUMBER(11) CHECK (%(qn_column)s >= 0)', - 'PositiveSmallIntegerField': 'NUMBER(11) CHECK (%(qn_column)s >= 0)', + 'PositiveIntegerField': 'NUMBER(11)', + 'PositiveSmallIntegerField': 'NUMBER(11)', 'SlugField': 'NVARCHAR2(%(max_length)s)', 'SmallIntegerField': 'NUMBER(11)', 'TextField': 'NCLOB', @@ -46,6 +46,13 @@ class DatabaseCreation(BaseDatabaseCreation): 'URLField': 'VARCHAR2(%(max_length)s)', } + data_type_check_constraints = { + 'BooleanField': '%(qn_column)s IN (0,1)', + 'NullBooleanField': '(%(qn_column)s IN (0,1)) OR (%(qn_column)s IS NULL)', + 'PositiveIntegerField': '%(qn_column)s >= 0', + 'PositiveSmallIntegerField': '%(qn_column)s >= 0', + } + def __init__(self, connection): super(DatabaseCreation, self).__init__(connection) diff --git a/django/db/backends/oracle/introspection.py b/django/db/backends/oracle/introspection.py index a2fad92509..70c38c8de8 100644 --- a/django/db/backends/oracle/introspection.py +++ b/django/db/backends/oracle/introspection.py @@ -134,3 +134,143 @@ class DatabaseIntrospection(BaseDatabaseIntrospection): indexes[row[0]] = {'primary_key': bool(row[1]), 'unique': bool(row[2])} return indexes + + def get_constraints(self, cursor, table_name): + """ + Retrieves any constraints or keys (unique, pk, fk, check, index) across one or more columns. + """ + constraints = {} + # Loop over the constraints, getting PKs and uniques + cursor.execute(""" + SELECT + user_constraints.constraint_name, + LOWER(cols.column_name) AS column_name, + CASE user_constraints.constraint_type + WHEN 'P' THEN 1 + ELSE 0 + END AS is_primary_key, + CASE user_indexes.uniqueness + WHEN 'UNIQUE' THEN 1 + ELSE 0 + END AS is_unique, + CASE user_constraints.constraint_type + WHEN 'C' THEN 1 + ELSE 0 + END AS is_check_constraint + FROM + user_constraints + INNER JOIN + user_indexes ON user_indexes.index_name = user_constraints.index_name + LEFT OUTER JOIN + user_cons_columns cols ON user_constraints.constraint_name = cols.constraint_name + WHERE + ( + user_constraints.constraint_type = 'P' OR + user_constraints.constraint_type = 'U' + ) + AND user_constraints.table_name = UPPER(%s) + ORDER BY cols.position + """, [table_name]) + for constraint, column, pk, unique, check in cursor.fetchall(): + # If we're the first column, make the record + if constraint not in constraints: + constraints[constraint] = { + "columns": [], + "primary_key": pk, + "unique": unique, + "foreign_key": None, + "check": check, + "index": True, # All P and U come with index, see inner join above + } + # Record the details + constraints[constraint]['columns'].append(column) + # Check constraints + cursor.execute(""" + SELECT + cons.constraint_name, + LOWER(cols.column_name) AS column_name + FROM + user_constraints cons + LEFT OUTER JOIN + user_cons_columns cols ON cons.constraint_name = cols.constraint_name + WHERE + cons.constraint_type = 'C' AND + cons.table_name = UPPER(%s) + ORDER BY cols.position + """, [table_name]) + for constraint, column in cursor.fetchall(): + # If we're the first column, make the record + if constraint not in constraints: + constraints[constraint] = { + "columns": [], + "primary_key": False, + "unique": False, + "foreign_key": None, + "check": True, + "index": False, + } + # Record the details + constraints[constraint]['columns'].append(column) + # Foreign key constraints + cursor.execute(""" + SELECT + cons.constraint_name, + LOWER(cols.column_name) AS column_name, + LOWER(rcons.table_name), + LOWER(rcols.column_name) + FROM + user_constraints cons + INNER JOIN + user_constraints rcons ON cons.r_constraint_name = rcons.constraint_name + INNER JOIN + user_cons_columns rcols ON rcols.constraint_name = rcons.constraint_name + LEFT OUTER JOIN + user_cons_columns cols ON cons.constraint_name = cols.constraint_name + WHERE + cons.constraint_type = 'R' AND + cons.table_name = UPPER(%s) + ORDER BY cols.position + """, [table_name]) + for constraint, column, other_table, other_column in cursor.fetchall(): + # If we're the first column, make the record + if constraint not in constraints: + constraints[constraint] = { + "columns": [], + "primary_key": False, + "unique": False, + "foreign_key": (other_table, other_column), + "check": False, + "index": False, + } + # Record the details + constraints[constraint]['columns'].append(column) + # Now get indexes + cursor.execute(""" + SELECT + index_name, + LOWER(column_name) + FROM + user_ind_columns cols + WHERE + table_name = UPPER(%s) AND + NOT EXISTS ( + SELECT 1 + FROM user_constraints cons + WHERE cols.index_name = cons.index_name + ) + ORDER BY cols.column_position + """, [table_name]) + for constraint, column in cursor.fetchall(): + # If we're the first column, make the record + if constraint not in constraints: + constraints[constraint] = { + "columns": [], + "primary_key": False, + "unique": False, + "foreign_key": None, + "check": False, + "index": True, + } + # Record the details + constraints[constraint]['columns'].append(column) + return constraints diff --git a/django/db/backends/oracle/schema.py b/django/db/backends/oracle/schema.py new file mode 100644 index 0000000000..18d67b254f --- /dev/null +++ b/django/db/backends/oracle/schema.py @@ -0,0 +1,103 @@ +import copy +import datetime +from django.utils import six +from django.db.backends.schema import BaseDatabaseSchemaEditor +from django.db.utils import DatabaseError + + +class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): + + sql_create_column = "ALTER TABLE %(table)s ADD %(column)s %(definition)s" + sql_alter_column_type = "MODIFY %(column)s %(type)s" + sql_alter_column_null = "MODIFY %(column)s NULL" + sql_alter_column_not_null = "MODIFY %(column)s NOT NULL" + sql_alter_column_default = "MODIFY %(column)s DEFAULT %(default)s" + sql_alter_column_no_default = "MODIFY %(column)s DEFAULT NULL" + sql_delete_column = "ALTER TABLE %(table)s DROP COLUMN %(column)s" + sql_delete_table = "DROP TABLE %(table)s CASCADE CONSTRAINTS" + + def delete_model(self, model): + # Run superclass action + super(DatabaseSchemaEditor, self).delete_model(model) + # Clean up any autoincrement trigger + self.execute(""" + DECLARE + i INTEGER; + BEGIN + SELECT COUNT(*) INTO i FROM USER_CATALOG + WHERE TABLE_NAME = '%(sq_name)s' AND TABLE_TYPE = 'SEQUENCE'; + IF i = 1 THEN + EXECUTE IMMEDIATE 'DROP SEQUENCE "%(sq_name)s"'; + END IF; + END; + /""" % {'sq_name': self.connection.ops._get_sequence_name(model._meta.db_table)}) + + def alter_field(self, model, old_field, new_field, strict=False): + try: + # Run superclass action + super(DatabaseSchemaEditor, self).alter_field(model, old_field, new_field, strict) + except DatabaseError as e: + description = str(e) + # If we're changing to/from LOB fields, we need to do a + # SQLite-ish workaround + if 'ORA-22858' in description or 'ORA-22859' in description: + self._alter_field_lob_workaround(model, old_field, new_field) + else: + raise + + def _alter_field_lob_workaround(self, model, old_field, new_field): + """ + Oracle refuses to change a column type from/to LOB to/from a regular + column. In Django, this shows up when the field is changed from/to + a TextField. + What we need to do instead is: + - Add the desired field with a temporary name + - Update the table to transfer values from old to new + - Drop old column + - Rename the new column + """ + # Make a new field that's like the new one but with a temporary + # column name. + new_temp_field = copy.deepcopy(new_field) + new_temp_field.column = self._generate_temp_name(new_field.column) + # Add it + self.add_field(model, new_temp_field) + # Transfer values across + self.execute("UPDATE %s set %s=%s" % ( + self.quote_name(model._meta.db_table), + self.quote_name(new_temp_field.column), + self.quote_name(old_field.column), + )) + # Drop the old field + self.remove_field(model, old_field) + # Rename the new field + self.alter_field(model, new_temp_field, new_field) + # Close the connection to force cx_Oracle to get column types right + # on a new cursor + self.connection.close() + + def normalize_name(self, name): + """ + Get the properly shortened and uppercased identifier as returned by quote_name(), but without the actual quotes. + """ + nn = self.quote_name(name) + if nn[0] == '"' and nn[-1] == '"': + nn = nn[1:-1] + return nn + + def _generate_temp_name(self, for_name): + """ + Generates temporary names for workarounds that need temp columns + """ + suffix = hex(hash(for_name)).upper()[1:] + return self.normalize_name(for_name + "_" + suffix) + + def prepare_default(self, value): + if isinstance(value, (datetime.date, datetime.time, datetime.datetime)): + return "'%s'" % value + elif isinstance(value, six.string_types): + return repr(value) + elif isinstance(value, bool): + return "1" if value else "0" + else: + return str(value) diff --git a/django/db/backends/postgresql_psycopg2/base.py b/django/db/backends/postgresql_psycopg2/base.py index f0a82c22d6..76b2935a1f 100644 --- a/django/db/backends/postgresql_psycopg2/base.py +++ b/django/db/backends/postgresql_psycopg2/base.py @@ -14,6 +14,7 @@ from django.db.backends.postgresql_psycopg2.client import DatabaseClient from django.db.backends.postgresql_psycopg2.creation import DatabaseCreation from django.db.backends.postgresql_psycopg2.version import get_version from django.db.backends.postgresql_psycopg2.introspection import DatabaseIntrospection +from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor from django.utils.encoding import force_str from django.utils.functional import cached_property from django.utils.safestring import SafeText, SafeBytes @@ -55,6 +56,8 @@ class DatabaseFeatures(BaseDatabaseFeatures): supports_tablespaces = True supports_transactions = True can_distinct_on_fields = True + can_rollback_ddl = True + supports_combined_alters = True nulls_order_largest = True @@ -202,6 +205,10 @@ class DatabaseWrapper(BaseDatabaseWrapper): else: return True + def schema_editor(self): + "Returns a new instance of this backend's SchemaEditor" + return DatabaseSchemaEditor(self) + @cached_property def psycopg2_version(self): version = psycopg2.__version__.split(' ', 1)[0] diff --git a/django/db/backends/postgresql_psycopg2/creation.py b/django/db/backends/postgresql_psycopg2/creation.py index cbf901555d..954e198764 100644 --- a/django/db/backends/postgresql_psycopg2/creation.py +++ b/django/db/backends/postgresql_psycopg2/creation.py @@ -25,14 +25,19 @@ class DatabaseCreation(BaseDatabaseCreation): 'GenericIPAddressField': 'inet', 'NullBooleanField': 'boolean', 'OneToOneField': 'integer', - 'PositiveIntegerField': 'integer CHECK ("%(column)s" >= 0)', - 'PositiveSmallIntegerField': 'smallint CHECK ("%(column)s" >= 0)', + 'PositiveIntegerField': 'integer', + 'PositiveSmallIntegerField': 'smallint', 'SlugField': 'varchar(%(max_length)s)', 'SmallIntegerField': 'smallint', 'TextField': 'text', 'TimeField': 'time', } + data_type_check_constraints = { + 'PositiveIntegerField': '"%(column)s" >= 0', + 'PositiveSmallIntegerField': '"%(column)s" >= 0', + } + def sql_table_creation_suffix(self): assert self.connection.settings_dict['TEST_COLLATION'] is None, "PostgreSQL does not support collation setting at database creation time." if self.connection.settings_dict['TEST_CHARSET']: diff --git a/django/db/backends/postgresql_psycopg2/introspection.py b/django/db/backends/postgresql_psycopg2/introspection.py index 2401785314..57d9a67abf 100644 --- a/django/db/backends/postgresql_psycopg2/introspection.py +++ b/django/db/backends/postgresql_psycopg2/introspection.py @@ -107,5 +107,100 @@ class DatabaseIntrospection(BaseDatabaseIntrospection): # Here, we skip any indexes across multiple fields. if ' ' in row[1]: continue - indexes[row[0]] = {'primary_key': row[3], 'unique': row[2]} + if row[0] not in indexes: + indexes[row[0]] = {'primary_key': False, 'unique': False} + # It's possible to have the unique and PK constraints in separate indexes. + if row[3]: + indexes[row[0]]['primary_key'] = True + if row[2]: + indexes[row[0]]['unique'] = True return indexes + + def get_constraints(self, cursor, table_name): + """ + Retrieves any constraints or keys (unique, pk, fk, check, index) across one or more columns. + """ + constraints = {} + # Loop over the key table, collecting things as constraints + # This will get PKs, FKs, and uniques, but not CHECK + cursor.execute(""" + SELECT + kc.constraint_name, + kc.column_name, + c.constraint_type, + array(SELECT table_name::text || '.' || column_name::text FROM information_schema.constraint_column_usage WHERE constraint_name = kc.constraint_name) + FROM information_schema.key_column_usage AS kc + JOIN information_schema.table_constraints AS c ON + kc.table_schema = c.table_schema AND + kc.table_name = c.table_name AND + kc.constraint_name = c.constraint_name + WHERE + kc.table_schema = %s AND + kc.table_name = %s + """, ["public", table_name]) + for constraint, column, kind, used_cols in cursor.fetchall(): + # If we're the first column, make the record + if constraint not in constraints: + constraints[constraint] = { + "columns": [], + "primary_key": kind.lower() == "primary key", + "unique": kind.lower() in ["primary key", "unique"], + "foreign_key": tuple(used_cols[0].split(".", 1)) if kind.lower() == "foreign key" else None, + "check": False, + "index": False, + } + # Record the details + constraints[constraint]['columns'].append(column) + # Now get CHECK constraint columns + cursor.execute(""" + SELECT kc.constraint_name, kc.column_name + FROM information_schema.constraint_column_usage AS kc + JOIN information_schema.table_constraints AS c ON + kc.table_schema = c.table_schema AND + kc.table_name = c.table_name AND + kc.constraint_name = c.constraint_name + WHERE + c.constraint_type = 'CHECK' AND + kc.table_schema = %s AND + kc.table_name = %s + """, ["public", table_name]) + for constraint, column in cursor.fetchall(): + # If we're the first column, make the record + if constraint not in constraints: + constraints[constraint] = { + "columns": [], + "primary_key": False, + "unique": False, + "foreign_key": None, + "check": True, + "index": False, + } + # Record the details + constraints[constraint]['columns'].append(column) + # Now get indexes + cursor.execute(""" + SELECT + c2.relname, + ARRAY( + SELECT (SELECT attname FROM pg_catalog.pg_attribute WHERE attnum = i AND attrelid = c.oid) + FROM unnest(idx.indkey) i + ), + idx.indisunique, + idx.indisprimary + FROM pg_catalog.pg_class c, pg_catalog.pg_class c2, + pg_catalog.pg_index idx + WHERE c.oid = idx.indrelid + AND idx.indexrelid = c2.oid + AND c.relname = %s + """, [table_name]) + for index, columns, unique, primary in cursor.fetchall(): + if index not in constraints: + constraints[index] = { + "columns": list(columns), + "primary_key": primary, + "unique": unique, + "foreign_key": None, + "check": False, + "index": True, + } + return constraints diff --git a/django/db/backends/postgresql_psycopg2/schema.py b/django/db/backends/postgresql_psycopg2/schema.py new file mode 100644 index 0000000000..b86e0857bb --- /dev/null +++ b/django/db/backends/postgresql_psycopg2/schema.py @@ -0,0 +1,5 @@ +from django.db.backends.schema import BaseDatabaseSchemaEditor + + +class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): + pass diff --git a/django/db/backends/schema.py b/django/db/backends/schema.py new file mode 100644 index 0000000000..64098499f6 --- /dev/null +++ b/django/db/backends/schema.py @@ -0,0 +1,729 @@ +import hashlib +import operator +import sys + +from django.db.backends.creation import BaseDatabaseCreation +from django.db.backends.util import truncate_name +from django.db.models.fields.related import ManyToManyField +from django.db.transaction import atomic +from django.utils.log import getLogger +from django.utils.six.moves import reduce + +logger = getLogger('django.db.backends.schema') + + +class BaseDatabaseSchemaEditor(object): + """ + This class (and its subclasses) are responsible for emitting schema-changing + statements to the databases - model creation/removal/alteration, field + renaming, index fiddling, and so on. + + It is intended to eventually completely replace DatabaseCreation. + + This class should be used by creating an instance for each set of schema + changes (e.g. a syncdb run, a migration file), and by first calling start(), + then the relevant actions, and then commit(). This is necessary to allow + things like circular foreign key references - FKs will only be created once + commit() is called. + """ + + # Overrideable SQL templates + sql_create_table = "CREATE TABLE %(table)s (%(definition)s)" + sql_create_table_unique = "UNIQUE (%(columns)s)" + sql_rename_table = "ALTER TABLE %(old_table)s RENAME TO %(new_table)s" + sql_retablespace_table = "ALTER TABLE %(table)s SET TABLESPACE %(new_tablespace)s" + sql_delete_table = "DROP TABLE %(table)s CASCADE" + + sql_create_column = "ALTER TABLE %(table)s ADD COLUMN %(column)s %(definition)s" + sql_alter_column = "ALTER TABLE %(table)s %(changes)s" + sql_alter_column_type = "ALTER COLUMN %(column)s TYPE %(type)s" + sql_alter_column_null = "ALTER COLUMN %(column)s DROP NOT NULL" + sql_alter_column_not_null = "ALTER COLUMN %(column)s SET NOT NULL" + sql_alter_column_default = "ALTER COLUMN %(column)s SET DEFAULT %(default)s" + sql_alter_column_no_default = "ALTER COLUMN %(column)s DROP DEFAULT" + sql_delete_column = "ALTER TABLE %(table)s DROP COLUMN %(column)s CASCADE" + sql_rename_column = "ALTER TABLE %(table)s RENAME COLUMN %(old_column)s TO %(new_column)s" + + sql_create_check = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s CHECK (%(check)s)" + sql_delete_check = "ALTER TABLE %(table)s DROP CONSTRAINT %(name)s" + + sql_create_unique = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s UNIQUE (%(columns)s)" + sql_delete_unique = "ALTER TABLE %(table)s DROP CONSTRAINT %(name)s" + + sql_create_fk = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s FOREIGN KEY (%(column)s) REFERENCES %(to_table)s (%(to_column)s) DEFERRABLE INITIALLY DEFERRED" + sql_delete_fk = "ALTER TABLE %(table)s DROP CONSTRAINT %(name)s" + + sql_create_index = "CREATE INDEX %(name)s ON %(table)s (%(columns)s)%(extra)s;" + sql_delete_index = "DROP INDEX %(name)s" + + sql_create_pk = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s PRIMARY KEY (%(columns)s)" + sql_delete_pk = "ALTER TABLE %(table)s DROP CONSTRAINT %(name)s" + + def __init__(self, connection): + self.connection = connection + + # State-managing methods + + def __enter__(self): + self.deferred_sql = [] + atomic(self.connection.alias, self.connection.features.can_rollback_ddl).__enter__() + return self + + def __exit__(self, exc_type, exc_value, traceback): + if exc_type is None: + for sql in self.deferred_sql: + self.execute(sql) + atomic(self.connection.alias, self.connection.features.can_rollback_ddl).__exit__(None, None, None) + else: + # Continue propagating exception + return None + + # Core utility functions + + def execute(self, sql, params=[]): + """ + Executes the given SQL statement, with optional parameters. + """ + # Get the cursor + cursor = self.connection.cursor() + # Log the command we're running, then run it + logger.debug("%s; (params %r)" % (sql, params)) + cursor.execute(sql, params) + + def quote_name(self, name): + return self.connection.ops.quote_name(name) + + # Field <-> database mapping functions + + def column_sql(self, model, field, include_default=False): + """ + Takes a field and returns its column definition. + The field must already have had set_attributes_from_name called. + """ + # Get the column's type and use that as the basis of the SQL + db_params = field.db_parameters(connection=self.connection) + sql = db_params['type'] + params = [] + # Check for fields that aren't actually columns (e.g. M2M) + if sql is None: + return None + # Optionally add the tablespace if it's an implicitly indexed column + tablespace = field.db_tablespace or model._meta.db_tablespace + if tablespace and self.connection.features.supports_tablespaces and field.unique: + sql += " %s" % self.connection.ops.tablespace_sql(tablespace, inline=True) + # Work out nullability + null = field.null + # If we were told to include a default value, do so + default_value = self.effective_default(field) + if include_default and default_value is not None: + if self.connection.features.requires_literal_defaults: + # Some databases can't take defaults as a parameter (oracle) + # If this is the case, the individual schema backend should + # implement prepare_default + sql += " DEFAULT %s" % self.prepare_default(default_value) + else: + sql += " DEFAULT %s" + params += [default_value] + # Oracle treats the empty string ('') as null, so coerce the null + # option whenever '' is a possible value. + if (field.empty_strings_allowed and not field.primary_key and + self.connection.features.interprets_empty_strings_as_nulls): + null = True + if null: + sql += " NULL" + else: + sql += " NOT NULL" + # Primary key/unique outputs + if field.primary_key: + sql += " PRIMARY KEY" + elif field.unique: + sql += " UNIQUE" + # Return the sql + return sql, params + + def prepare_default(self, value): + """ + Only used for backends which have requires_literal_defaults feature + """ + raise NotImplementedError() + + def effective_default(self, field): + """ + Returns a field's effective database default value + """ + if field.has_default(): + default = field.get_default() + elif not field.null and field.blank and field.empty_strings_allowed: + default = "" + else: + default = None + # If it's a callable, call it + if callable(default): + default = default() + return default + + # Actions + + def create_model(self, model): + """ + Takes a model and creates a table for it in the database. + Will also create any accompanying indexes or unique constraints. + """ + # Create column SQL, add FK deferreds if needed + column_sqls = [] + params = [] + for field in model._meta.local_fields: + # SQL + definition, extra_params = self.column_sql(model, field) + if definition is None: + continue + # Check constraints can go on the column SQL here + db_params = field.db_parameters(connection=self.connection) + if db_params['check']: + definition += " CHECK (%s)" % db_params['check'] + # Add the SQL to our big list + column_sqls.append("%s %s" % ( + self.quote_name(field.column), + definition, + )) + params.extend(extra_params) + # Indexes + if field.db_index and not field.unique: + self.deferred_sql.append( + self.sql_create_index % { + "name": self._create_index_name(model, [field.column], suffix=""), + "table": self.quote_name(model._meta.db_table), + "columns": self.quote_name(field.column), + "extra": "", + } + ) + # FK + if field.rel and self.connection.features.supports_foreign_keys: + to_table = field.rel.to._meta.db_table + to_column = field.rel.to._meta.get_field(field.rel.field_name).column + self.deferred_sql.append( + self.sql_create_fk % { + "name": self._create_index_name(model, [field.column], suffix="_fk_%s_%s" % (to_table, to_column)), + "table": self.quote_name(model._meta.db_table), + "column": self.quote_name(field.column), + "to_table": self.quote_name(to_table), + "to_column": self.quote_name(to_column), + } + ) + # Autoincrement SQL + if field.get_internal_type() == "AutoField": + autoinc_sql = self.connection.ops.autoinc_sql(model._meta.db_table, field.column) + if autoinc_sql: + self.deferred_sql.extend(autoinc_sql) + # Add any unique_togethers + for fields in model._meta.unique_together: + columns = [model._meta.get_field_by_name(field)[0].column for field in fields] + column_sqls.append(self.sql_create_table_unique % { + "columns": ", ".join(self.quote_name(column) for column in columns), + }) + # Make the table + sql = self.sql_create_table % { + "table": model._meta.db_table, + "definition": ", ".join(column_sqls) + } + self.execute(sql, params) + # Add any index_togethers + for fields in model._meta.index_together: + columns = [model._meta.get_field_by_name(field)[0].column for field in fields] + self.execute(self.sql_create_index % { + "table": self.quote_name(model._meta.db_table), + "name": self._create_index_name(model, columns, suffix="_idx"), + "columns": ", ".join(self.quote_name(column) for column in columns), + "extra": "", + }) + # Make M2M tables + for field in model._meta.local_many_to_many: + self.create_model(field.rel.through) + + def delete_model(self, model): + """ + Deletes a model from the database. + """ + # Delete the table + self.execute(self.sql_delete_table % { + "table": self.quote_name(model._meta.db_table), + }) + + def alter_unique_together(self, model, old_unique_together, new_unique_together): + """ + Deals with a model changing its unique_together. + Note: The input unique_togethers must be doubly-nested, not the single- + nested ["foo", "bar"] format. + """ + olds = set(tuple(fields) for fields in old_unique_together) + news = set(tuple(fields) for fields in new_unique_together) + # Deleted uniques + for fields in olds.difference(news): + columns = [model._meta.get_field_by_name(field)[0].column for field in fields] + constraint_names = self._constraint_names(model, columns, unique=True) + if len(constraint_names) != 1: + raise ValueError("Found wrong number (%s) of constraints for %s(%s)" % ( + len(constraint_names), + model._meta.db_table, + ", ".join(columns), + )) + self.execute( + self.sql_delete_unique % { + "table": self.quote_name(model._meta.db_table), + "name": constraint_names[0], + }, + ) + # Created uniques + for fields in news.difference(olds): + columns = [model._meta.get_field_by_name(field)[0].column for field in fields] + self.execute(self.sql_create_unique % { + "table": self.quote_name(model._meta.db_table), + "name": self._create_index_name(model, columns, suffix="_uniq"), + "columns": ", ".join(self.quote_name(column) for column in columns), + }) + + def alter_index_together(self, model, old_index_together, new_index_together): + """ + Deals with a model changing its index_together. + Note: The input index_togethers must be doubly-nested, not the single- + nested ["foo", "bar"] format. + """ + olds = set(tuple(fields) for fields in old_index_together) + news = set(tuple(fields) for fields in new_index_together) + # Deleted indexes + for fields in olds.difference(news): + columns = [model._meta.get_field_by_name(field)[0].column for field in fields] + constraint_names = self._constraint_names(model, list(columns), index=True) + if len(constraint_names) != 1: + raise ValueError("Found wrong number (%s) of constraints for %s(%s)" % ( + len(constraint_names), + model._meta.db_table, + ", ".join(columns), + )) + self.execute( + self.sql_delete_index % { + "table": self.quote_name(model._meta.db_table), + "name": constraint_names[0], + }, + ) + # Created indexes + for fields in news.difference(olds): + columns = [model._meta.get_field_by_name(field)[0].column for field in fields] + self.execute(self.sql_create_index % { + "table": self.quote_name(model._meta.db_table), + "name": self._create_index_name(model, columns, suffix="_idx"), + "columns": ", ".join(self.quote_name(column) for column in columns), + "extra": "", + }) + + def alter_db_table(self, model, old_db_table, new_db_table): + """ + Renames the table a model points to. + """ + self.execute(self.sql_rename_table % { + "old_table": self.quote_name(old_db_table), + "new_table": self.quote_name(new_db_table), + }) + + def alter_db_tablespace(self, model, old_db_tablespace, new_db_tablespace): + """ + Moves a model's table between tablespaces + """ + self.execute(self.sql_retablespace_table % { + "table": self.quote_name(model._meta.db_table), + "old_tablespace": self.quote_name(old_db_tablespace), + "new_tablespace": self.quote_name(new_db_tablespace), + }) + + def add_field(self, model, field): + """ + Creates a field on a model. + Usually involves adding a column, but may involve adding a + table instead (for M2M fields) + """ + # Special-case implicit M2M tables + if isinstance(field, ManyToManyField) and field.rel.through._meta.auto_created: + return self.create_model(field.rel.through) + # Get the column's definition + definition, params = self.column_sql(model, field, include_default=True) + # It might not actually have a column behind it + if definition is None: + return + # Check constraints can go on the column SQL here + db_params = field.db_parameters(connection=self.connection) + if db_params['check']: + definition += " CHECK (%s)" % db_params['check'] + # Build the SQL and run it + sql = self.sql_create_column % { + "table": self.quote_name(model._meta.db_table), + "column": self.quote_name(field.column), + "definition": definition, + } + self.execute(sql, params) + # Drop the default if we need to + # (Django usually does not use in-database defaults) + if field.default is not None: + sql = self.sql_alter_column % { + "table": self.quote_name(model._meta.db_table), + "changes": self.sql_alter_column_no_default % { + "column": self.quote_name(field.column), + } + } + self.execute(sql) + # Add an index, if required + if field.db_index and not field.unique: + self.deferred_sql.append( + self.sql_create_index % { + "name": self._create_index_name(model, [field.column], suffix=""), + "table": self.quote_name(model._meta.db_table), + "columns": self.quote_name(field.column), + "extra": "", + } + ) + # Add any FK constraints later + if field.rel and self.connection.features.supports_foreign_keys: + to_table = field.rel.to._meta.db_table + to_column = field.rel.to._meta.get_field(field.rel.field_name).column + self.deferred_sql.append( + self.sql_create_fk % { + "name": '%s_refs_%s_%x' % ( + field.column, + to_column, + abs(hash((model._meta.db_table, to_table))) + ), + "table": self.quote_name(model._meta.db_table), + "column": self.quote_name(field.column), + "to_table": self.quote_name(to_table), + "to_column": self.quote_name(to_column), + } + ) + # Reset connection if required + if self.connection.features.connection_persists_old_columns: + self.connection.close() + + def remove_field(self, model, field): + """ + Removes a field from a model. Usually involves deleting a column, + but for M2Ms may involve deleting a table. + """ + # Special-case implicit M2M tables + if isinstance(field, ManyToManyField) and field.rel.through._meta.auto_created: + return self.delete_model(field.rel.through) + # It might not actually have a column behind it + if field.db_parameters(connection=self.connection)['type'] is None: + return + # Get the column's definition + definition, params = self.column_sql(model, field) + # Delete the column + sql = self.sql_delete_column % { + "table": self.quote_name(model._meta.db_table), + "column": self.quote_name(field.column), + } + self.execute(sql) + # Reset connection if required + if self.connection.features.connection_persists_old_columns: + self.connection.close() + + def alter_field(self, model, old_field, new_field, strict=False): + """ + Allows a field's type, uniqueness, nullability, default, column, + constraints etc. to be modified. + Requires a copy of the old field as well so we can only perform + changes that are required. + If strict is true, raises errors if the old column does not match old_field precisely. + """ + # Ensure this field is even column-based + old_db_params = old_field.db_parameters(connection=self.connection) + old_type = old_db_params['type'] + new_db_params = new_field.db_parameters(connection=self.connection) + new_type = new_db_params['type'] + if old_type is None and new_type is None and (old_field.rel.through and new_field.rel.through and old_field.rel.through._meta.auto_created and new_field.rel.through._meta.auto_created): + return self._alter_many_to_many(model, old_field, new_field, strict) + elif old_type is None or new_type is None: + raise ValueError("Cannot alter field %s into %s - they are not compatible types (probably means only one is an M2M with implicit through model)" % ( + old_field, + new_field, + )) + # Has unique been removed? + if old_field.unique and (not new_field.unique or (not old_field.primary_key and new_field.primary_key)): + # Find the unique constraint for this field + constraint_names = self._constraint_names(model, [old_field.column], unique=True) + if strict and len(constraint_names) != 1: + raise ValueError("Found wrong number (%s) of unique constraints for %s.%s" % ( + len(constraint_names), + model._meta.db_table, + old_field.column, + )) + for constraint_name in constraint_names: + self.execute( + self.sql_delete_unique % { + "table": self.quote_name(model._meta.db_table), + "name": constraint_name, + }, + ) + # Removed an index? + if old_field.db_index and not new_field.db_index and not old_field.unique and not (not new_field.unique and old_field.unique): + # Find the index for this field + index_names = self._constraint_names(model, [old_field.column], index=True) + if strict and len(index_names) != 1: + raise ValueError("Found wrong number (%s) of indexes for %s.%s" % ( + len(index_names), + model._meta.db_table, + old_field.column, + )) + for index_name in index_names: + self.execute( + self.sql_delete_index % { + "table": self.quote_name(model._meta.db_table), + "name": index_name, + } + ) + # Drop any FK constraints, we'll remake them later + if old_field.rel: + fk_names = self._constraint_names(model, [old_field.column], foreign_key=True) + if strict and len(fk_names) != 1: + raise ValueError("Found wrong number (%s) of foreign key constraints for %s.%s" % ( + len(fk_names), + model._meta.db_table, + old_field.column, + )) + for fk_name in fk_names: + self.execute( + self.sql_delete_fk % { + "table": self.quote_name(model._meta.db_table), + "name": fk_name, + } + ) + # Change check constraints? + if old_db_params['check'] != new_db_params['check'] and old_db_params['check']: + constraint_names = self._constraint_names(model, [old_field.column], check=True) + if strict and len(constraint_names) != 1: + raise ValueError("Found wrong number (%s) of check constraints for %s.%s" % ( + len(constraint_names), + model._meta.db_table, + old_field.column, + )) + for constraint_name in constraint_names: + self.execute( + self.sql_delete_check % { + "table": self.quote_name(model._meta.db_table), + "name": constraint_name, + } + ) + # Have they renamed the column? + if old_field.column != new_field.column: + self.execute(self.sql_rename_column % { + "table": self.quote_name(model._meta.db_table), + "old_column": self.quote_name(old_field.column), + "new_column": self.quote_name(new_field.column), + "type": new_type, + }) + # Next, start accumulating actions to do + actions = [] + # Type change? + if old_type != new_type: + actions.append(( + self.sql_alter_column_type % { + "column": self.quote_name(new_field.column), + "type": new_type, + }, + [], + )) + # Default change? + old_default = self.effective_default(old_field) + new_default = self.effective_default(new_field) + if old_default != new_default: + if new_default is None: + actions.append(( + self.sql_alter_column_no_default % { + "column": self.quote_name(new_field.column), + }, + [], + )) + else: + if self.connection.features.requires_literal_defaults: + # Some databases can't take defaults as a parameter (oracle) + # If this is the case, the individual schema backend should + # implement prepare_default + actions.append(( + self.sql_alter_column_default % { + "column": self.quote_name(new_field.column), + "default": self.prepare_default(new_default), + }, + [], + )) + else: + actions.append(( + self.sql_alter_column_default % { + "column": self.quote_name(new_field.column), + "default": "%s", + }, + [new_default], + )) + # Nullability change? + if old_field.null != new_field.null: + if new_field.null: + actions.append(( + self.sql_alter_column_null % { + "column": self.quote_name(new_field.column), + "type": new_type, + }, + [], + )) + else: + actions.append(( + self.sql_alter_column_not_null % { + "column": self.quote_name(new_field.column), + "type": new_type, + }, + [], + )) + if actions: + # Combine actions together if we can (e.g. postgres) + if self.connection.features.supports_combined_alters: + sql, params = tuple(zip(*actions)) + actions = [(", ".join(sql), reduce(operator.add, params))] + # Apply those actions + for sql, params in actions: + self.execute( + self.sql_alter_column % { + "table": self.quote_name(model._meta.db_table), + "changes": sql, + }, + params, + ) + # Added a unique? + if not old_field.unique and new_field.unique: + self.execute( + self.sql_create_unique % { + "table": self.quote_name(model._meta.db_table), + "name": self._create_index_name(model, [new_field.column], suffix="_uniq"), + "columns": self.quote_name(new_field.column), + } + ) + # Added an index? + if not old_field.db_index and new_field.db_index and not new_field.unique and not (not old_field.unique and new_field.unique): + self.execute( + self.sql_create_index % { + "table": self.quote_name(model._meta.db_table), + "name": self._create_index_name(model, [new_field.column], suffix="_uniq"), + "columns": self.quote_name(new_field.column), + "extra": "", + } + ) + # Changed to become primary key? + # Note that we don't detect unsetting of a PK, as we assume another field + # will always come along and replace it. + if not old_field.primary_key and new_field.primary_key: + # First, drop the old PK + constraint_names = self._constraint_names(model, primary_key=True) + if strict and len(constraint_names) != 1: + raise ValueError("Found wrong number (%s) of PK constraints for %s" % ( + len(constraint_names), + model._meta.db_table, + )) + for constraint_name in constraint_names: + self.execute( + self.sql_delete_pk % { + "table": self.quote_name(model._meta.db_table), + "name": constraint_name, + }, + ) + # Make the new one + self.execute( + self.sql_create_pk % { + "table": self.quote_name(model._meta.db_table), + "name": self._create_index_name(model, [new_field.column], suffix="_pk"), + "columns": self.quote_name(new_field.column), + } + ) + # Does it have a foreign key? + if new_field.rel: + self.execute( + self.sql_create_fk % { + "table": self.quote_name(model._meta.db_table), + "name": self._create_index_name(model, [new_field.column], suffix="_fk"), + "column": self.quote_name(new_field.column), + "to_table": self.quote_name(new_field.rel.to._meta.db_table), + "to_column": self.quote_name(new_field.rel.get_related_field().column), + } + ) + # Does it have check constraints we need to add? + if old_db_params['check'] != new_db_params['check'] and new_db_params['check']: + self.execute( + self.sql_create_check % { + "table": self.quote_name(model._meta.db_table), + "name": self._create_index_name(model, [new_field.column], suffix="_check"), + "column": self.quote_name(new_field.column), + "check": new_db_params['check'], + } + ) + # Reset connection if required + if self.connection.features.connection_persists_old_columns: + self.connection.close() + + def _alter_many_to_many(self, model, old_field, new_field, strict): + """ + Alters M2Ms to repoint their to= endpoints. + """ + # Rename the through table + self.alter_db_table(old_field.rel.through, old_field.rel.through._meta.db_table, new_field.rel.through._meta.db_table) + # Repoint the FK to the other side + self.alter_field( + new_field.rel.through, + # We need the field that points to the target model, so we can tell alter_field to change it - + # this is m2m_reverse_field_name() (as opposed to m2m_field_name, which points to our model) + old_field.rel.through._meta.get_field_by_name(old_field.m2m_reverse_field_name())[0], + new_field.rel.through._meta.get_field_by_name(new_field.m2m_reverse_field_name())[0], + ) + + def _create_index_name(self, model, column_names, suffix=""): + """ + Generates a unique name for an index/unique constraint. + """ + # If there is just one column in the index, use a default algorithm from Django + if len(column_names) == 1 and not suffix: + return truncate_name( + '%s_%s' % (model._meta.db_table, BaseDatabaseCreation._digest(column_names[0])), + self.connection.ops.max_name_length() + ) + # Else generate the name for the index using a different algorithm + table_name = model._meta.db_table.replace('"', '').replace('.', '_') + index_unique_name = '_%x' % abs(hash((table_name, ','.join(column_names)))) + # If the index name is too long, truncate it + index_name = ('%s_%s%s%s' % (table_name, column_names[0], index_unique_name, suffix)).replace('"', '').replace('.', '_') + if len(index_name) > self.connection.features.max_index_name_length: + part = ('_%s%s%s' % (column_names[0], index_unique_name, suffix)) + index_name = '%s%s' % (table_name[:(self.connection.features.max_index_name_length - len(part))], part) + # It shouldn't start with an underscore (Oracle hates this) + if index_name[0] == "_": + index_name = index_name[1:] + # If it's STILL too long, just hash it down + if len(index_name) > self.connection.features.max_index_name_length: + index_name = hashlib.md5(index_name).hexdigest()[:self.connection.features.max_index_name_length] + # It can't start with a number on Oracle, so prepend D if we need to + if index_name[0].isdigit(): + index_name = "D%s" % index_name[:-1] + return index_name + + def _constraint_names(self, model, column_names=None, unique=None, primary_key=None, index=None, foreign_key=None, check=None): + """ + Returns all constraint names matching the columns and conditions + """ + column_names = list(column_names) if column_names else None + constraints = self.connection.introspection.get_constraints(self.connection.cursor(), model._meta.db_table) + result = [] + for name, infodict in constraints.items(): + if column_names is None or column_names == infodict['columns']: + if unique is not None and infodict['unique'] != unique: + continue + if primary_key is not None and infodict['primary_key'] != primary_key: + continue + if index is not None and infodict['index'] != index: + continue + if check is not None and infodict['check'] != check: + continue + if foreign_key is not None and not infodict['foreign_key']: + continue + result.append(name) + return result diff --git a/django/db/backends/sqlite3/base.py b/django/db/backends/sqlite3/base.py index f396bf7408..841ca92f3e 100644 --- a/django/db/backends/sqlite3/base.py +++ b/django/db/backends/sqlite3/base.py @@ -18,6 +18,7 @@ from django.db.backends import (util, BaseDatabaseFeatures, from django.db.backends.sqlite3.client import DatabaseClient from django.db.backends.sqlite3.creation import DatabaseCreation from django.db.backends.sqlite3.introspection import DatabaseIntrospection +from django.db.backends.sqlite3.schema import DatabaseSchemaEditor from django.db.models import fields from django.db.models.sql import aggregates from django.utils.dateparse import parse_date, parse_datetime, parse_time @@ -100,6 +101,8 @@ class DatabaseFeatures(BaseDatabaseFeatures): supports_mixed_date_datetime_comparisons = False has_bulk_insert = True can_combine_inserts_with_and_without_auto_increment_pk = False + supports_foreign_keys = False + supports_check_constraints = False autocommits_when_autocommit_is_off = True supports_paramstyle_pyformat = False @@ -432,6 +435,10 @@ class DatabaseWrapper(BaseDatabaseWrapper): """ self.cursor().execute("BEGIN") + def schema_editor(self): + "Returns a new instance of this backend's SchemaEditor" + return DatabaseSchemaEditor(self) + FORMAT_QMARK_REGEX = re.compile(r'(?>> ") + if not code: + print("Please enter some code, or 'exit' (with no quotes) to exit.") + elif code == "exit": + sys.exit(1) + else: + try: + return eval(code, {}, {"datetime": datetime_safe}) + except (SyntaxError, NameError) as e: + print("Invalid input: %s" % e) + else: + break + + def ask_rename(self, model_name, old_name, new_name, field_instance): + "Was this field really renamed?" + return self._boolean_input("Did you rename %s.%s to %s.%s (a %s)? [y/N]" % (model_name, old_name, model_name, new_name, field_instance.__class__.__name__), False) diff --git a/django/db/migrations/executor.py b/django/db/migrations/executor.py new file mode 100644 index 0000000000..fe0ac6b061 --- /dev/null +++ b/django/db/migrations/executor.py @@ -0,0 +1,90 @@ +from .loader import MigrationLoader +from .recorder import MigrationRecorder + + +class MigrationExecutor(object): + """ + End-to-end migration execution - loads migrations, and runs them + up or down to a specified set of targets. + """ + + def __init__(self, connection, progress_callback=None): + self.connection = connection + self.loader = MigrationLoader(self.connection) + self.loader.load_disk() + self.recorder = MigrationRecorder(self.connection) + self.progress_callback = progress_callback + + def migration_plan(self, targets): + """ + Given a set of targets, returns a list of (Migration instance, backwards?). + """ + plan = [] + applied = self.recorder.applied_migrations() + for target in targets: + # If the target is (appname, None), that means unmigrate everything + if target[1] is None: + for root in self.loader.graph.root_nodes(): + if root[0] == target[0]: + for migration in self.loader.graph.backwards_plan(root): + if migration in applied: + plan.append((self.loader.graph.nodes[migration], True)) + applied.remove(migration) + # If the migration is already applied, do backwards mode, + # otherwise do forwards mode. + elif target in applied: + backwards_plan = self.loader.graph.backwards_plan(target)[:-1] + # We only do this if the migration is not the most recent one + # in its app - that is, another migration with the same app + # label is in the backwards plan + if any(node[0] == target[0] for node in backwards_plan): + for migration in backwards_plan: + if migration in applied: + plan.append((self.loader.graph.nodes[migration], True)) + applied.remove(migration) + else: + for migration in self.loader.graph.forwards_plan(target): + if migration not in applied: + plan.append((self.loader.graph.nodes[migration], False)) + applied.add(migration) + return plan + + def migrate(self, targets, plan=None, fake=False): + """ + Migrates the database up to the given targets. + """ + if plan is None: + plan = self.migration_plan(targets) + for migration, backwards in plan: + if not backwards: + self.apply_migration(migration, fake=fake) + else: + self.unapply_migration(migration, fake=fake) + + def apply_migration(self, migration, fake=False): + """ + Runs a migration forwards. + """ + if self.progress_callback: + self.progress_callback("apply_start", migration) + if not fake: + with self.connection.schema_editor() as schema_editor: + project_state = self.loader.graph.project_state((migration.app_label, migration.name), at_end=False) + migration.apply(project_state, schema_editor) + self.recorder.record_applied(migration.app_label, migration.name) + if self.progress_callback: + self.progress_callback("apply_success", migration) + + def unapply_migration(self, migration, fake=False): + """ + Runs a migration backwards. + """ + if self.progress_callback: + self.progress_callback("unapply_start", migration) + if not fake: + with self.connection.schema_editor() as schema_editor: + project_state = self.loader.graph.project_state((migration.app_label, migration.name), at_end=False) + migration.unapply(project_state, schema_editor) + self.recorder.record_unapplied(migration.app_label, migration.name) + if self.progress_callback: + self.progress_callback("unapply_success", migration) diff --git a/django/db/migrations/graph.py b/django/db/migrations/graph.py new file mode 100644 index 0000000000..fcd83913c8 --- /dev/null +++ b/django/db/migrations/graph.py @@ -0,0 +1,152 @@ +from django.utils.datastructures import OrderedSet +from django.db.migrations.state import ProjectState + + +class MigrationGraph(object): + """ + Represents the digraph of all migrations in a project. + + Each migration is a node, and each dependency is an edge. There are + no implicit dependencies between numbered migrations - the numbering is + merely a convention to aid file listing. Every new numbered migration + has a declared dependency to the previous number, meaning that VCS + branch merges can be detected and resolved. + + Migrations files can be marked as replacing another set of migrations - + this is to support the "squash" feature. The graph handler isn't responsible + for these; instead, the code to load them in here should examine the + migration files and if the replaced migrations are all either unapplied + or not present, it should ignore the replaced ones, load in just the + replacing migration, and repoint any dependencies that pointed to the + replaced migrations to point to the replacing one. + + A node should be a tuple: (app_path, migration_name). The tree special-cases + things within an app - namely, root nodes and leaf nodes ignore dependencies + to other apps. + """ + + def __init__(self): + self.nodes = {} + self.dependencies = {} + self.dependents = {} + + def add_node(self, node, implementation): + self.nodes[node] = implementation + + def add_dependency(self, child, parent): + if child not in self.nodes: + raise KeyError("Dependency references nonexistent child node %r" % (child,)) + if parent not in self.nodes: + raise KeyError("Dependency references nonexistent parent node %r" % (parent,)) + self.dependencies.setdefault(child, set()).add(parent) + self.dependents.setdefault(parent, set()).add(child) + + def forwards_plan(self, node): + """ + Given a node, returns a list of which previous nodes (dependencies) + must be applied, ending with the node itself. + This is the list you would follow if applying the migrations to + a database. + """ + if node not in self.nodes: + raise ValueError("Node %r not a valid node" % (node, )) + return self.dfs(node, lambda x: self.dependencies.get(x, set())) + + def backwards_plan(self, node): + """ + Given a node, returns a list of which dependent nodes (dependencies) + must be unapplied, ending with the node itself. + This is the list you would follow if removing the migrations from + a database. + """ + if node not in self.nodes: + raise ValueError("Node %r not a valid node" % (node, )) + return self.dfs(node, lambda x: self.dependents.get(x, set())) + + def root_nodes(self): + """ + Returns all root nodes - that is, nodes with no dependencies inside + their app. These are the starting point for an app. + """ + roots = set() + for node in self.nodes: + if not any(key[0] == node[0] for key in self.dependencies.get(node, set())): + roots.add(node) + return roots + + def leaf_nodes(self): + """ + Returns all leaf nodes - that is, nodes with no dependents in their app. + These are the "most current" version of an app's schema. + Having more than one per app is technically an error, but one that + gets handled further up, in the interactive command - it's usually the + result of a VCS merge and needs some user input. + """ + leaves = set() + for node in self.nodes: + if not any(key[0] == node[0] for key in self.dependents.get(node, set())): + leaves.add(node) + return leaves + + def dfs(self, start, get_children): + """ + Dynamic programming based depth first search, for finding dependencies. + """ + cache = {} + def _dfs(start, get_children, path): + # If we already computed this, use that (dynamic programming) + if (start, get_children) in cache: + return cache[(start, get_children)] + # If we've traversed here before, that's a circular dep + if start in path: + raise CircularDependencyError(path[path.index(start):] + [start]) + # Build our own results list, starting with us + results = [] + results.append(start) + # We need to add to results all the migrations this one depends on + children = sorted(get_children(start)) + path.append(start) + for n in children: + results = _dfs(n, get_children, path) + results + path.pop() + # Use OrderedSet to ensure only one instance of each result + results = list(OrderedSet(results)) + # Populate DP cache + cache[(start, get_children)] = results + # Done! + return results + return _dfs(start, get_children, []) + + def __str__(self): + return "Graph: %s nodes, %s edges" % (len(self.nodes), sum(len(x) for x in self.dependencies.values())) + + def project_state(self, nodes=None, at_end=True): + """ + Given a migration node or nodes, returns a complete ProjectState for it. + If at_end is False, returns the state before the migration has run. + If nodes is not provided, returns the overall most current project state. + """ + if nodes is None: + nodes = list(self.leaf_nodes()) + if len(nodes) == 0: + return ProjectState() + if not isinstance(nodes[0], tuple): + nodes = [nodes] + plan = [] + for node in nodes: + for migration in self.forwards_plan(node): + if migration not in plan: + if not at_end and migration in nodes: + continue + plan.append(migration) + project_state = ProjectState() + for node in plan: + project_state = self.nodes[node].mutate_state(project_state) + return project_state + + +class CircularDependencyError(Exception): + """ + Raised when there's an impossible-to-resolve circular dependency. + """ + pass diff --git a/django/db/migrations/loader.py b/django/db/migrations/loader.py new file mode 100644 index 0000000000..6ad6959787 --- /dev/null +++ b/django/db/migrations/loader.py @@ -0,0 +1,167 @@ +import os +from importlib import import_module +from django.utils.functional import cached_property +from django.db.models.loading import cache +from django.db.migrations.recorder import MigrationRecorder +from django.db.migrations.graph import MigrationGraph +from django.conf import settings + + +class MigrationLoader(object): + """ + Loads migration files from disk, and their status from the database. + + Migration files are expected to live in the "migrations" directory of + an app. Their names are entirely unimportant from a code perspective, + but will probably follow the 1234_name.py convention. + + On initialisation, this class will scan those directories, and open and + read the python files, looking for a class called Migration, which should + inherit from django.db.migrations.Migration. See + django.db.migrations.migration for what that looks like. + + Some migrations will be marked as "replacing" another set of migrations. + These are loaded into a separate set of migrations away from the main ones. + If all the migrations they replace are either unapplied or missing from + disk, then they are injected into the main set, replacing the named migrations. + Any dependency pointers to the replaced migrations are re-pointed to the + new migration. + + This does mean that this class MUST also talk to the database as well as + to disk, but this is probably fine. We're already not just operating + in memory. + """ + + def __init__(self, connection): + self.connection = connection + self.disk_migrations = None + self.applied_migrations = None + + @classmethod + def migrations_module(cls, app_label): + if app_label in settings.MIGRATION_MODULES: + return settings.MIGRATION_MODULES[app_label] + app = cache.get_app(app_label) + return ".".join(app.__name__.split(".")[:-1] + ["migrations"]) + + def load_disk(self): + """ + Loads the migrations from all INSTALLED_APPS from disk. + """ + self.disk_migrations = {} + self.unmigrated_apps = set() + self.migrated_apps = set() + for app in cache.get_apps(): + # Get the migrations module directory + app_label = app.__name__.split(".")[-2] + module_name = self.migrations_module(app_label) + try: + module = import_module(module_name) + except ImportError as e: + # I hate doing this, but I don't want to squash other import errors. + # Might be better to try a directory check directly. + if "No module named" in str(e) and "migrations" in str(e): + self.unmigrated_apps.add(app_label) + continue + self.migrated_apps.add(app_label) + directory = os.path.dirname(module.__file__) + # Scan for .py[c|o] files + migration_names = set() + for name in os.listdir(directory): + if name.endswith(".py") or name.endswith(".pyc") or name.endswith(".pyo"): + import_name = name.rsplit(".", 1)[0] + if import_name[0] not in "_.~": + migration_names.add(import_name) + # Load them + for migration_name in migration_names: + migration_module = import_module("%s.%s" % (module_name, migration_name)) + if not hasattr(migration_module, "Migration"): + raise BadMigrationError("Migration %s in app %s has no Migration class" % (migration_name, app_label)) + self.disk_migrations[app_label, migration_name] = migration_module.Migration(migration_name, app_label) + + def get_migration_by_prefix(self, app_label, name_prefix): + "Returns the migration(s) which match the given app label and name _prefix_" + # Make sure we have the disk data + if self.disk_migrations is None: + self.load_disk() + # Do the search + results = [] + for l, n in self.disk_migrations: + if l == app_label and n.startswith(name_prefix): + results.append((l, n)) + if len(results) > 1: + raise AmbiguityError("There is more than one migration for '%s' with the prefix '%s'" % (app_label, name_prefix)) + elif len(results) == 0: + raise KeyError("There no migrations for '%s' with the prefix '%s'" % (app_label, name_prefix)) + else: + return self.disk_migrations[results[0]] + + @cached_property + def graph(self): + """ + Builds a migration dependency graph using both the disk and database. + """ + # Make sure we have the disk data + if self.disk_migrations is None: + self.load_disk() + # And the database data + if self.applied_migrations is None: + recorder = MigrationRecorder(self.connection) + self.applied_migrations = recorder.applied_migrations() + # Do a first pass to separate out replacing and non-replacing migrations + normal = {} + replacing = {} + for key, migration in self.disk_migrations.items(): + if migration.replaces: + replacing[key] = migration + else: + normal[key] = migration + # Calculate reverse dependencies - i.e., for each migration, what depends on it? + # This is just for dependency re-pointing when applying replacements, + # so we ignore run_before here. + reverse_dependencies = {} + for key, migration in normal.items(): + for parent in migration.dependencies: + reverse_dependencies.setdefault(parent, set()).add(key) + # Carry out replacements if we can - that is, if all replaced migrations + # are either unapplied or missing. + for key, migration in replacing.items(): + # Do the check + can_replace = True + for target in migration.replaces: + if target in self.applied_migrations: + can_replace = False + break + if not can_replace: + continue + # Alright, time to replace. Step through the replaced migrations + # and remove, repointing dependencies if needs be. + for replaced in migration.replaces: + if replaced in normal: + del normal[replaced] + for child_key in reverse_dependencies.get(replaced, set()): + normal[child_key].dependencies.remove(replaced) + normal[child_key].dependencies.append(key) + normal[key] = migration + # Finally, make a graph and load everything into it + graph = MigrationGraph() + for key, migration in normal.items(): + graph.add_node(key, migration) + for key, migration in normal.items(): + for parent in migration.dependencies: + graph.add_dependency(key, parent) + return graph + + +class BadMigrationError(Exception): + """ + Raised when there's a bad migration (unreadable/bad format/etc.) + """ + pass + + +class AmbiguityError(Exception): + """ + Raised when more than one migration matches a name prefix + """ + pass diff --git a/django/db/migrations/migration.py b/django/db/migrations/migration.py new file mode 100644 index 0000000000..277c5faa3f --- /dev/null +++ b/django/db/migrations/migration.py @@ -0,0 +1,101 @@ +class Migration(object): + """ + The base class for all migrations. + + Migration files will import this from django.db.migrations.Migration + and subclass it as a class called Migration. It will have one or more + of the following attributes: + + - operations: A list of Operation instances, probably from django.db.migrations.operations + - dependencies: A list of tuples of (app_path, migration_name) + - run_before: A list of tuples of (app_path, migration_name) + - replaces: A list of migration_names + + Note that all migrations come out of migrations and into the Loader or + Graph as instances, having been initialised with their app label and name. + """ + + # Operations to apply during this migration, in order. + operations = [] + + # Other migrations that should be run before this migration. + # Should be a list of (app, migration_name). + dependencies = [] + + # Other migrations that should be run after this one (i.e. have + # this migration added to their dependencies). Useful to make third-party + # apps' migrations run after your AUTH_USER replacement, for example. + run_before = [] + + # Migration names in this app that this migration replaces. If this is + # non-empty, this migration will only be applied if all these migrations + # are not applied. + replaces = [] + + def __init__(self, name, app_label): + self.name = name + self.app_label = app_label + + def __eq__(self, other): + if not isinstance(other, Migration): + return False + return (self.name == other.name) and (self.app_label == other.app_label) + + def __ne__(self, other): + return not (self == other) + + def __repr__(self): + return "" % (self.app_label, self.name) + + def __str__(self): + return "%s.%s" % (self.app_label, self.name) + + def __hash__(self): + return hash("%s.%s" % (self.app_label, self.name)) + + def mutate_state(self, project_state): + """ + Takes a ProjectState and returns a new one with the migration's + operations applied to it. + """ + new_state = project_state.clone() + for operation in self.operations: + operation.state_forwards(self.app_label, new_state) + return new_state + + def apply(self, project_state, schema_editor): + """ + Takes a project_state representing all migrations prior to this one + and a schema_editor for a live database and applies the migration + in a forwards order. + + Returns the resulting project state for efficient re-use by following + Migrations. + """ + for operation in self.operations: + # Get the state after the operation has run + new_state = project_state.clone() + operation.state_forwards(self.app_label, new_state) + # Run the operation + operation.database_forwards(self.app_label, schema_editor, project_state, new_state) + # Switch states + project_state = new_state + return project_state + + def unapply(self, project_state, schema_editor): + """ + Takes a project_state representing all migrations prior to this one + and a schema_editor for a live database and applies the migration + in a reverse order. + """ + # We need to pre-calculate the stack of project states + to_run = [] + for operation in self.operations: + new_state = project_state.clone() + operation.state_forwards(self.app_label, new_state) + to_run.append((operation, project_state, new_state)) + project_state = new_state + # Now run them in reverse + to_run.reverse() + for operation, to_state, from_state in to_run: + operation.database_backwards(self.app_label, schema_editor, from_state, to_state) diff --git a/django/db/migrations/operations/__init__.py b/django/db/migrations/operations/__init__.py new file mode 100644 index 0000000000..1240a5d1f5 --- /dev/null +++ b/django/db/migrations/operations/__init__.py @@ -0,0 +1,2 @@ +from .models import CreateModel, DeleteModel, AlterModelTable, AlterUniqueTogether, AlterIndexTogether +from .fields import AddField, RemoveField, AlterField, RenameField diff --git a/django/db/migrations/operations/base.py b/django/db/migrations/operations/base.py new file mode 100644 index 0000000000..dcdb1ad30b --- /dev/null +++ b/django/db/migrations/operations/base.py @@ -0,0 +1,62 @@ +class Operation(object): + """ + Base class for migration operations. + + It's responsible for both mutating the in-memory model state + (see db/migrations/state.py) to represent what it performs, as well + as actually performing it against a live database. + + Note that some operations won't modify memory state at all (e.g. data + copying operations), and some will need their modifications to be + optionally specified by the user (e.g. custom Python code snippets) + """ + + # If this migration can be run in reverse. + # Some operations are impossible to reverse, like deleting data. + reversible = True + + def __new__(cls, *args, **kwargs): + # We capture the arguments to make returning them trivial + self = object.__new__(cls) + self._constructor_args = (args, kwargs) + return self + + def deconstruct(self): + """ + Returns a 3-tuple of class import path (or just name if it lives + under django.db.migrations), positional arguments, and keyword + arguments. + """ + return ( + self.__class__.__name__, + self._constructor_args[0], + self._constructor_args[1], + ) + + def state_forwards(self, app_label, state): + """ + Takes the state from the previous migration, and mutates it + so that it matches what this migration would perform. + """ + raise NotImplementedError() + + def database_forwards(self, app_label, schema_editor, from_state, to_state): + """ + Performs the mutation on the database schema in the normal + (forwards) direction. + """ + raise NotImplementedError() + + def database_backwards(self, app_label, schema_editor, from_state, to_state): + """ + Performs the mutation on the database schema in the reverse + direction - e.g. if this were CreateModel, it would in fact + drop the model's table. + """ + raise NotImplementedError() + + def describe(self): + """ + Outputs a brief summary of what the action does. + """ + return "%s: %s" % (self.__class__.__name__, self._constructor_args) diff --git a/django/db/migrations/operations/fields.py b/django/db/migrations/operations/fields.py new file mode 100644 index 0000000000..7c619d49ce --- /dev/null +++ b/django/db/migrations/operations/fields.py @@ -0,0 +1,132 @@ +from django.db import router +from .base import Operation + + +class AddField(Operation): + """ + Adds a field to a model. + """ + + def __init__(self, model_name, name, field): + self.model_name = model_name.lower() + self.name = name + self.field = field + + def state_forwards(self, app_label, state): + state.models[app_label, self.model_name.lower()].fields.append((self.name, self.field)) + + def database_forwards(self, app_label, schema_editor, from_state, to_state): + from_model = from_state.render().get_model(app_label, self.model_name) + to_model = to_state.render().get_model(app_label, self.model_name) + if router.allow_migrate(schema_editor.connection.alias, to_model): + schema_editor.add_field(from_model, to_model._meta.get_field_by_name(self.name)[0]) + + def database_backwards(self, app_label, schema_editor, from_state, to_state): + from_model = from_state.render().get_model(app_label, self.model_name) + if router.allow_migrate(schema_editor.connection.alias, from_model): + schema_editor.remove_field(from_model, from_model._meta.get_field_by_name(self.name)[0]) + + def describe(self): + return "Add field %s to %s" % (self.name, self.model_name) + + +class RemoveField(Operation): + """ + Removes a field from a model. + """ + + def __init__(self, model_name, name): + self.model_name = model_name.lower() + self.name = name + + def state_forwards(self, app_label, state): + new_fields = [] + for name, instance in state.models[app_label, self.model_name.lower()].fields: + if name != self.name: + new_fields.append((name, instance)) + state.models[app_label, self.model_name.lower()].fields = new_fields + + def database_forwards(self, app_label, schema_editor, from_state, to_state): + from_model = from_state.render().get_model(app_label, self.model_name) + if router.allow_migrate(schema_editor.connection.alias, from_model): + schema_editor.remove_field(from_model, from_model._meta.get_field_by_name(self.name)[0]) + + def database_backwards(self, app_label, schema_editor, from_state, to_state): + from_model = from_state.render().get_model(app_label, self.model_name) + to_model = to_state.render().get_model(app_label, self.model_name) + if router.allow_migrate(schema_editor.connection.alias, to_model): + schema_editor.add_field(from_model, to_model._meta.get_field_by_name(self.name)[0]) + + def describe(self): + return "Remove field %s from %s" % (self.name, self.model_name) + + +class AlterField(Operation): + """ + Alters a field's database column (e.g. null, max_length) to the provided new field + """ + + def __init__(self, model_name, name, field): + self.model_name = model_name.lower() + self.name = name + self.field = field + + def state_forwards(self, app_label, state): + state.models[app_label, self.model_name.lower()].fields = [ + (n, self.field if n == self.name else f) for n, f in state.models[app_label, self.model_name.lower()].fields + ] + + def database_forwards(self, app_label, schema_editor, from_state, to_state): + from_model = from_state.render().get_model(app_label, self.model_name) + to_model = to_state.render().get_model(app_label, self.model_name) + if router.allow_migrate(schema_editor.connection.alias, to_model): + schema_editor.alter_field( + from_model, + from_model._meta.get_field_by_name(self.name)[0], + to_model._meta.get_field_by_name(self.name)[0], + ) + + def database_backwards(self, app_label, schema_editor, from_state, to_state): + self.database_forwards(app_label, schema_editor, from_state, to_state) + + def describe(self): + return "Alter field %s on %s" % (self.name, self.model_name) + + +class RenameField(Operation): + """ + Renames a field on the model. Might affect db_column too. + """ + + def __init__(self, model_name, old_name, new_name): + self.model_name = model_name.lower() + self.old_name = old_name + self.new_name = new_name + + def state_forwards(self, app_label, state): + state.models[app_label, self.model_name.lower()].fields = [ + (self.new_name if n == self.old_name else n, f) for n, f in state.models[app_label, self.model_name.lower()].fields + ] + + def database_forwards(self, app_label, schema_editor, from_state, to_state): + from_model = from_state.render().get_model(app_label, self.model_name) + to_model = to_state.render().get_model(app_label, self.model_name) + if router.allow_migrate(schema_editor.connection.alias, to_model): + schema_editor.alter_field( + from_model, + from_model._meta.get_field_by_name(self.old_name)[0], + to_model._meta.get_field_by_name(self.new_name)[0], + ) + + def database_backwards(self, app_label, schema_editor, from_state, to_state): + from_model = from_state.render().get_model(app_label, self.model_name) + to_model = to_state.render().get_model(app_label, self.model_name) + if router.allow_migrate(schema_editor.connection.alias, to_model): + schema_editor.alter_field( + from_model, + from_model._meta.get_field_by_name(self.new_name)[0], + to_model._meta.get_field_by_name(self.old_name)[0], + ) + + def describe(self): + return "Rename field %s on %s to %s" % (self.old_name, self.model_name, self.new_name) diff --git a/django/db/migrations/operations/models.py b/django/db/migrations/operations/models.py new file mode 100644 index 0000000000..406efa6ef1 --- /dev/null +++ b/django/db/migrations/operations/models.py @@ -0,0 +1,157 @@ +from .base import Operation +from django.db import models, router +from django.db.migrations.state import ModelState + + +class CreateModel(Operation): + """ + Create a model's table. + """ + + def __init__(self, name, fields, options=None, bases=None): + self.name = name + self.fields = fields + self.options = options or {} + self.bases = bases or (models.Model,) + + def state_forwards(self, app_label, state): + state.models[app_label, self.name.lower()] = ModelState(app_label, self.name, self.fields, self.options, self.bases) + + def database_forwards(self, app_label, schema_editor, from_state, to_state): + app_cache = to_state.render() + model = app_cache.get_model(app_label, self.name) + if router.allow_migrate(schema_editor.connection.alias, model): + schema_editor.create_model(model) + + def database_backwards(self, app_label, schema_editor, from_state, to_state): + app_cache = from_state.render() + model = app_cache.get_model(app_label, self.name) + if router.allow_migrate(schema_editor.connection.alias, model): + schema_editor.delete_model(model) + + def describe(self): + return "Create model %s" % (self.name, ) + + +class DeleteModel(Operation): + """ + Drops a model's table. + """ + + def __init__(self, name): + self.name = name + + def state_forwards(self, app_label, state): + del state.models[app_label, self.name.lower()] + + def database_forwards(self, app_label, schema_editor, from_state, to_state): + app_cache = from_state.render() + model = app_cache.get_model(app_label, self.name) + if router.allow_migrate(schema_editor.connection.alias, model): + schema_editor.delete_model(model) + + def database_backwards(self, app_label, schema_editor, from_state, to_state): + app_cache = to_state.render() + model = app_cache.get_model(app_label, self.name) + if router.allow_migrate(schema_editor.connection.alias, model): + schema_editor.create_model(model) + + def describe(self): + return "Delete model %s" % (self.name, ) + + +class AlterModelTable(Operation): + """ + Renames a model's table + """ + + def __init__(self, name, table): + self.name = name + self.table = table + + def state_forwards(self, app_label, state): + state.models[app_label, self.name.lower()].options["db_table"] = self.table + + def database_forwards(self, app_label, schema_editor, from_state, to_state): + old_app_cache = from_state.render() + new_app_cache = to_state.render() + old_model = old_app_cache.get_model(app_label, self.name) + new_model = new_app_cache.get_model(app_label, self.name) + if router.allow_migrate(schema_editor.connection.alias, new_model): + schema_editor.alter_db_table( + new_model, + old_model._meta.db_table, + new_model._meta.db_table, + ) + + def database_backwards(self, app_label, schema_editor, from_state, to_state): + return self.database_forwards(app_label, schema_editor, from_state, to_state) + + def describe(self): + return "Rename table for %s to %s" % (self.name, self.table) + + +class AlterUniqueTogether(Operation): + """ + Changes the value of index_together to the target one. + Input value of unique_together must be a set of tuples. + """ + + def __init__(self, name, unique_together): + self.name = name + self.unique_together = set(tuple(cons) for cons in unique_together) + + def state_forwards(self, app_label, state): + model_state = state.models[app_label, self.name.lower()] + model_state.options["unique_together"] = self.unique_together + + def database_forwards(self, app_label, schema_editor, from_state, to_state): + old_app_cache = from_state.render() + new_app_cache = to_state.render() + old_model = old_app_cache.get_model(app_label, self.name) + new_model = new_app_cache.get_model(app_label, self.name) + if router.allow_migrate(schema_editor.connection.alias, new_model): + schema_editor.alter_unique_together( + new_model, + getattr(old_model._meta, "unique_together", set()), + getattr(new_model._meta, "unique_together", set()), + ) + + def database_backwards(self, app_label, schema_editor, from_state, to_state): + return self.database_forwards(app_label, schema_editor, from_state, to_state) + + def describe(self): + return "Alter unique_together for %s (%s constraints)" % (self.name, len(self.unique_together)) + + +class AlterIndexTogether(Operation): + """ + Changes the value of index_together to the target one. + Input value of index_together must be a set of tuples. + """ + + def __init__(self, name, index_together): + self.name = name + self.index_together = set(tuple(cons) for cons in index_together) + + def state_forwards(self, app_label, state): + model_state = state.models[app_label, self.name.lower()] + model_state.options["index_together"] = self.index_together + + def database_forwards(self, app_label, schema_editor, from_state, to_state): + old_app_cache = from_state.render() + new_app_cache = to_state.render() + old_model = old_app_cache.get_model(app_label, self.name) + new_model = new_app_cache.get_model(app_label, self.name) + if router.allow_migrate(schema_editor.connection.alias, new_model): + schema_editor.alter_index_together( + new_model, + getattr(old_model._meta, "index_together", set()), + getattr(new_model._meta, "index_together", set()), + ) + + def database_backwards(self, app_label, schema_editor, from_state, to_state): + return self.database_forwards(app_label, schema_editor, from_state, to_state) + + def describe(self): + return "Alter index_together for %s (%s constraints)" % (self.name, len(self.index_together)) diff --git a/django/db/migrations/recorder.py b/django/db/migrations/recorder.py new file mode 100644 index 0000000000..c66d122068 --- /dev/null +++ b/django/db/migrations/recorder.py @@ -0,0 +1,69 @@ +from django.db import models +from django.db.models.loading import BaseAppCache +from django.utils.timezone import now + + +class MigrationRecorder(object): + """ + Deals with storing migration records in the database. + + Because this table is actually itself used for dealing with model + creation, it's the one thing we can't do normally via syncdb or migrations. + We manually handle table creation/schema updating (using schema backend) + and then have a floating model to do queries with. + + If a migration is unapplied its row is removed from the table. Having + a row in the table always means a migration is applied. + """ + + class Migration(models.Model): + app = models.CharField(max_length=255) + name = models.CharField(max_length=255) + applied = models.DateTimeField(default=now) + class Meta: + app_cache = BaseAppCache() + app_label = "migrations" + db_table = "django_migrations" + + def __init__(self, connection): + self.connection = connection + + def ensure_schema(self): + """ + Ensures the table exists and has the correct schema. + """ + # If the table's there, that's fine - we've never changed its schema + # in the codebase. + if self.Migration._meta.db_table in self.connection.introspection.get_table_list(self.connection.cursor()): + return + # Make the table + with self.connection.schema_editor() as editor: + editor.create_model(self.Migration) + + def applied_migrations(self): + """ + Returns a set of (app, name) of applied migrations. + """ + self.ensure_schema() + return set(tuple(x) for x in self.Migration.objects.values_list("app", "name")) + + def record_applied(self, app, name): + """ + Records that a migration was applied. + """ + self.ensure_schema() + self.Migration.objects.create(app=app, name=name) + + def record_unapplied(self, app, name): + """ + Records that a migration was unapplied. + """ + self.ensure_schema() + self.Migration.objects.filter(app=app, name=name).delete() + + @classmethod + def flush(cls): + """ + Deletes all migration records. Useful if you're testing migrations. + """ + cls.Migration.objects.all().delete() diff --git a/django/db/migrations/state.py b/django/db/migrations/state.py new file mode 100644 index 0000000000..8f0078d731 --- /dev/null +++ b/django/db/migrations/state.py @@ -0,0 +1,142 @@ +from django.db import models +from django.db.models.loading import BaseAppCache +from django.db.models.options import DEFAULT_NAMES +from django.utils.module_loading import import_by_path + + +class ProjectState(object): + """ + Represents the entire project's overall state. + This is the item that is passed around - we do it here rather than at the + app level so that cross-app FKs/etc. resolve properly. + """ + + def __init__(self, models=None): + self.models = models or {} + self.app_cache = None + + def add_model_state(self, model_state): + self.models[(model_state.app_label, model_state.name.lower())] = model_state + + def clone(self): + "Returns an exact copy of this ProjectState" + return ProjectState( + models = dict((k, v.clone()) for k, v in self.models.items()) + ) + + def render(self): + "Turns the project state into actual models in a new AppCache" + if self.app_cache is None: + self.app_cache = BaseAppCache() + for model in self.models.values(): + model.render(self.app_cache) + return self.app_cache + + @classmethod + def from_app_cache(cls, app_cache): + "Takes in an AppCache and returns a ProjectState matching it" + models = {} + for model in app_cache.get_models(): + model_state = ModelState.from_model(model) + models[(model_state.app_label, model_state.name.lower())] = model_state + return cls(models) + + +class ModelState(object): + """ + Represents a Django Model. We don't use the actual Model class + as it's not designed to have its options changed - instead, we + mutate this one and then render it into a Model as required. + + Note that while you are allowed to mutate .fields, you are not allowed + to mutate the Field instances inside there themselves - you must instead + assign new ones, as these are not detached during a clone. + """ + + def __init__(self, app_label, name, fields, options=None, bases=None): + self.app_label = app_label + self.name = name + self.fields = fields + self.options = options or {} + self.bases = bases or (models.Model, ) + # Sanity-check that fields is NOT a dict. It must be ordered. + if isinstance(self.fields, dict): + raise ValueError("ModelState.fields cannot be a dict - it must be a list of 2-tuples.") + + @classmethod + def from_model(cls, model): + """ + Feed me a model, get a ModelState representing it out. + """ + # Deconstruct the fields + fields = [] + for field in model._meta.fields: + name, path, args, kwargs = field.deconstruct() + field_class = import_by_path(path) + fields.append((name, field_class(*args, **kwargs))) + # Extract the options + options = {} + for name in DEFAULT_NAMES: + # Ignore some special options + if name in ["app_cache", "app_label"]: + continue + elif name in model._meta.original_attrs: + if name == "unique_together": + options[name] = set(model._meta.original_attrs["unique_together"]) + else: + options[name] = model._meta.original_attrs[name] + # Make our record + bases = tuple(model for model in model.__bases__ if (not hasattr(model, "_meta") or not model._meta.abstract)) + if not bases: + bases = (models.Model, ) + return cls( + model._meta.app_label, + model._meta.object_name, + fields, + options, + bases, + ) + + def clone(self): + "Returns an exact copy of this ModelState" + # We deep-clone the fields using deconstruction + fields = [] + for name, field in self.fields: + _, path, args, kwargs = field.deconstruct() + field_class = import_by_path(path) + fields.append((name, field_class(*args, **kwargs))) + # Now make a copy + return self.__class__( + app_label = self.app_label, + name = self.name, + fields = fields, + options = dict(self.options), + bases = self.bases, + ) + + def render(self, app_cache): + "Creates a Model object from our current state into the given app_cache" + # First, make a Meta object + meta_contents = {'app_label': self.app_label, "app_cache": app_cache} + meta_contents.update(self.options) + if "unique_together" in meta_contents: + meta_contents["unique_together"] = list(meta_contents["unique_together"]) + meta = type("Meta", tuple(), meta_contents) + # Then, work out our bases + # TODO: Use the actual bases + # Turn fields into a dict for the body, add other bits + body = dict(self.fields) + body['Meta'] = meta + body['__module__'] = "__fake__" + # Then, make a Model object + return type( + self.name, + tuple(self.bases), + body, + ) + + def get_field_by_name(self, name): + for fname, field in self.fields: + if fname == name: + return field + raise ValueError("No field called %s on model %s" % (name, self.name)) diff --git a/django/db/migrations/writer.py b/django/db/migrations/writer.py new file mode 100644 index 0000000000..753aeacd04 --- /dev/null +++ b/django/db/migrations/writer.py @@ -0,0 +1,180 @@ +from __future__ import unicode_literals +import datetime +import types +import os +from importlib import import_module +from django.utils import six +from django.db import models +from django.db.models.loading import cache +from django.db.migrations.loader import MigrationLoader + + +class MigrationWriter(object): + """ + Takes a Migration instance and is able to produce the contents + of the migration file from it. + """ + + def __init__(self, migration): + self.migration = migration + + def as_string(self): + """ + Returns a string of the file contents. + """ + items = { + "dependencies": repr(self.migration.dependencies), + } + imports = set() + # Deconstruct operations + operation_strings = [] + for operation in self.migration.operations: + name, args, kwargs = operation.deconstruct() + arg_strings = [] + for arg in args: + arg_string, arg_imports = self.serialize(arg) + arg_strings.append(arg_string) + imports.update(arg_imports) + for kw, arg in kwargs.items(): + arg_string, arg_imports = self.serialize(arg) + imports.update(arg_imports) + arg_strings.append("%s = %s" % (kw, arg_string)) + operation_strings.append("migrations.%s(%s\n )" % (name, "".join("\n %s," % arg for arg in arg_strings))) + items["operations"] = "[%s\n ]" % "".join("\n %s," % s for s in operation_strings) + # Format imports nicely + imports.discard("from django.db import models") + if not imports: + items["imports"] = "" + else: + items["imports"] = "\n".join(imports) + "\n" + return (MIGRATION_TEMPLATE % items).encode("utf8") + + @property + def filename(self): + return "%s.py" % self.migration.name + + @property + def path(self): + migrations_module_name = MigrationLoader.migrations_module(self.migration.app_label) + app_module = cache.get_app(self.migration.app_label) + # See if we can import the migrations module directly + try: + migrations_module = import_module(migrations_module_name) + basedir = os.path.dirname(migrations_module.__file__) + except ImportError: + # Alright, see if it's a direct submodule of the app + oneup = ".".join(migrations_module_name.split(".")[:-1]) + app_oneup = ".".join(app_module.__name__.split(".")[:-1]) + if oneup == app_oneup: + basedir = os.path.join(os.path.dirname(app_module.__file__), migrations_module_name.split(".")[-1]) + else: + raise ImportError("Cannot open migrations module %s for app %s" % (migrations_module_name, self.migration.app_label)) + return os.path.join(basedir, self.filename) + + @classmethod + def serialize(cls, value): + """ + Serializes the value to a string that's parsable by Python, along + with any needed imports to make that string work. + More advanced than repr() as it can encode things + like datetime.datetime.now. + """ + # Sequences + if isinstance(value, (list, set, tuple)): + imports = set() + strings = [] + for item in value: + item_string, item_imports = cls.serialize(item) + imports.update(item_imports) + strings.append(item_string) + if isinstance(value, set): + format = "set([%s])" + elif isinstance(value, tuple): + format = "(%s,)" + else: + format = "[%s]" + return format % (", ".join(strings)), imports + # Dictionaries + elif isinstance(value, dict): + imports = set() + strings = [] + for k, v in value.items(): + k_string, k_imports = cls.serialize(k) + v_string, v_imports = cls.serialize(v) + imports.update(k_imports) + imports.update(v_imports) + strings.append((k_string, v_string)) + return "{%s}" % (", ".join(["%s: %s" % (k, v) for k, v in strings])), imports + # Datetimes + elif isinstance(value, (datetime.datetime, datetime.date)): + return repr(value), set(["import datetime"]) + # Simple types + elif isinstance(value, six.integer_types + (float, six.binary_type, six.text_type, bool, type(None))): + return repr(value), set() + # Django fields + elif isinstance(value, models.Field): + attr_name, path, args, kwargs = value.deconstruct() + module, name = path.rsplit(".", 1) + if module == "django.db.models": + imports = set(["from django.db import models"]) + name = "models.%s" % name + else: + imports = set(["import %s" % module]) + name = path + arg_strings = [] + for arg in args: + arg_string, arg_imports = cls.serialize(arg) + arg_strings.append(arg_string) + imports.update(arg_imports) + for kw, arg in kwargs.items(): + arg_string, arg_imports = cls.serialize(arg) + imports.update(arg_imports) + arg_strings.append("%s=%s" % (kw, arg_string)) + return "%s(%s)" % (name, ", ".join(arg_strings)), imports + # Functions + elif isinstance(value, (types.FunctionType, types.BuiltinFunctionType)): + # Special-cases, as these don't have im_class + special_cases = [ + (datetime.datetime.now, "datetime.datetime.now", ["import datetime"]), + (datetime.datetime.utcnow, "datetime.datetime.utcnow", ["import datetime"]), + (datetime.date.today, "datetime.date.today", ["import datetime"]), + ] + for func, string, imports in special_cases: + if func == value: # For some reason "utcnow is not utcnow" + return string, set(imports) + # Method? + if hasattr(value, "im_class"): + klass = value.im_class + module = klass.__module__ + return "%s.%s.%s" % (module, klass.__name__, value.__name__), set(["import %s" % module]) + else: + module = value.__module__ + if module is None: + raise ValueError("Cannot serialize function %r: No module" % value) + return "%s.%s" % (module, value.__name__), set(["import %s" % module]) + # Classes + elif isinstance(value, type): + special_cases = [ + (models.Model, "models.Model", []), + ] + for case, string, imports in special_cases: + if case is value: + return string, set(imports) + if hasattr(value, "__module__"): + module = value.__module__ + return "%s.%s" % (module, value.__name__), set(["import %s" % module]) + # Uh oh. + else: + raise ValueError("Cannot serialize: %r" % value) + + +MIGRATION_TEMPLATE = """# encoding: utf8 +from django.db import models, migrations +%(imports)s + +class Migration(migrations.Migration): + + dependencies = %(dependencies)s + + operations = %(operations)s +""" diff --git a/django/db/models/base.py b/django/db/models/base.py index a5b0f188b4..d63017c438 100644 --- a/django/db/models/base.py +++ b/django/db/models/base.py @@ -146,7 +146,7 @@ class ModelBase(type): new_class._base_manager = new_class._base_manager._copy_to_model(new_class) # Bail out early if we have already created this class. - m = get_model(new_class._meta.app_label, name, + m = new_class._meta.app_cache.get_model(new_class._meta.app_label, name, seed_cache=False, only_installed=False) if m is not None: return m @@ -264,13 +264,13 @@ class ModelBase(type): return new_class new_class._prepare() - register_models(new_class._meta.app_label, new_class) - + + new_class._meta.app_cache.register_models(new_class._meta.app_label, new_class) # Because of the way imports happen (recursively), we may or may not be # the first time this model tries to register with the framework. There # should only be one class for each model, so we always return the # registered version. - return get_model(new_class._meta.app_label, name, + return new_class._meta.app_cache.get_model(new_class._meta.app_label, name, seed_cache=False, only_installed=False) def copy_managers(cls, base_managers): diff --git a/django/db/models/fields/__init__.py b/django/db/models/fields/__init__.py index fb4cbbb11a..4135c60ad3 100644 --- a/django/db/models/fields/__init__.py +++ b/django/db/models/fields/__init__.py @@ -368,12 +368,32 @@ class Field(object): # mapped to one of the built-in Django field types. In this case, you # can implement db_type() instead of get_internal_type() to specify # exactly which wacky database column type you want to use. + params = self.db_parameters(connection) + if params['type']: + if params['check']: + return "%s CHECK (%s)" % (params['type'], params['check']) + else: + return params['type'] + return None + + def db_parameters(self, connection): + """ + Replacement for db_type, providing a range of different return + values (type, checks) + """ data = DictWrapper(self.__dict__, connection.ops.quote_name, "qn_") try: - return (connection.creation.data_types[self.get_internal_type()] - % data) + type_string = connection.creation.data_types[self.get_internal_type()] % data except KeyError: - return None + type_string = None + try: + check_string = connection.creation.data_type_check_constraints[self.get_internal_type()] % data + except KeyError: + check_string = None + return { + "type": type_string, + "check": check_string, + } @property def unique(self): diff --git a/django/db/models/fields/related.py b/django/db/models/fields/related.py index 78569042a5..4ff93e701f 100644 --- a/django/db/models/fields/related.py +++ b/django/db/models/fields/related.py @@ -2,7 +2,7 @@ from operator import attrgetter from django.db import connection, connections, router from django.db.backends import util -from django.db.models import signals, get_model +from django.db.models import signals from django.db.models.fields import (AutoField, Field, IntegerField, PositiveIntegerField, PositiveSmallIntegerField, FieldDoesNotExist) from django.db.models.related import RelatedObject, PathInfo @@ -18,8 +18,6 @@ from django import forms RECURSIVE_RELATIONSHIP_CONSTANT = 'self' -pending_lookups = {} - def add_lazy_relation(cls, field, relation, operation): """ @@ -70,14 +68,14 @@ def add_lazy_relation(cls, field, relation, operation): # string right away. If get_model returns None, it means that the related # model isn't loaded yet, so we need to pend the relation until the class # is prepared. - model = get_model(app_label, model_name, + model = cls._meta.app_cache.get_model(app_label, model_name, seed_cache=False, only_installed=False) if model: operation(field, model, cls) else: key = (app_label, model_name) value = (cls, field, operation) - pending_lookups.setdefault(key, []).append(value) + cls._meta.app_cache.pending_lookups.setdefault(key, []).append(value) def do_pending_lookups(sender, **kwargs): @@ -85,7 +83,7 @@ def do_pending_lookups(sender, **kwargs): Handle any pending relations to the sending model. Sent from class_prepared. """ key = (sender._meta.app_label, sender.__name__) - for cls, field, operation in pending_lookups.pop(key, []): + for cls, field, operation in sender._meta.app_cache.pending_lookups.pop(key, []): operation(field, sender, cls) signals.class_prepared.connect(do_pending_lookups) @@ -941,6 +939,8 @@ class ForeignObject(RelatedField): def resolve_related_fields(self): if len(self.from_fields) < 1 or len(self.from_fields) != len(self.to_fields): raise ValueError('Foreign Object from and to fields must be the same non-zero length') + if isinstance(self.rel.to, six.string_types): + raise ValueError('Related model %r cannot been resolved' % self.rel.to) related_fields = [] for index in range(len(self.from_fields)): from_field_name = self.from_fields[index] @@ -1281,6 +1281,9 @@ class ForeignKey(ForeignObject): return IntegerField().db_type(connection=connection) return rel_field.db_type(connection=connection) + def db_parameters(self, connection): + return {"type": self.db_type(connection), "check": []} + class OneToOneField(ForeignKey): """ @@ -1351,6 +1354,7 @@ def create_many_to_many_intermediary_model(field, klass): 'unique_together': (from_, to), 'verbose_name': '%(from)s-%(to)s relationship' % {'from': from_, 'to': to}, 'verbose_name_plural': '%(from)s-%(to)s relationships' % {'from': from_, 'to': to}, + 'app_cache': field.model._meta.app_cache, }) # Construct and return the new class. return type(str(name), (models.Model,), { @@ -1561,3 +1565,11 @@ class ManyToManyField(RelatedField): initial = initial() defaults['initial'] = [i._get_pk_val() for i in initial] return super(ManyToManyField, self).formfield(**defaults) + + def db_type(self, connection): + # A ManyToManyField is not represented by a single column, + # so return None. + return None + + def db_parameters(self, connection): + return {"type": None, "check": None} diff --git a/django/db/models/loading.py b/django/db/models/loading.py index 6819134ae0..2858b8b699 100644 --- a/django/db/models/loading.py +++ b/django/db/models/loading.py @@ -31,28 +31,30 @@ class UnavailableApp(Exception): pass -class AppCache(object): +def _initialize(): """ - A cache that stores installed applications and their models. Used to - provide reverse-relations and for app introspection (e.g. admin). + Returns a dictionary to be used as the initial value of the + [shared] state of the app cache. """ - # Use the Borg pattern to share state between all instances. Details at - # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/66531. - __shared_state = dict( + return dict( # Keys of app_store are the model modules for each application. app_store=ModelDict(), # Mapping of installed app_labels to model modules for that app. - app_labels={}, + app_labels = {}, # Mapping of app_labels to a dictionary of model names to model code. # May contain apps that are not installed. app_models=ModelDict(), # Mapping of app_labels to errors raised when trying to import the app. - app_errors={}, + app_errors = {}, + + # Pending lookups for lazy relations + pending_lookups = {}, # -- Everything below here is only used when populating the cache -- + loads_installed = True, loaded=False, handled=set(), postponed=[], @@ -61,10 +63,27 @@ class AppCache(object): available_apps=None, ) + +class BaseAppCache(object): + """ + A cache that stores installed applications and their models. Used to + provide reverse-relations and for app introspection (e.g. admin). + + This provides the base (non-Borg) AppCache class - the AppCache + subclass adds borg-like behaviour for the few cases where it's needed, + and adds the code that auto-loads from INSTALLED_APPS. + """ + def __init__(self): - self.__dict__ = self.__shared_state + self.__dict__ = _initialize() + # This stops _populate loading from INSTALLED_APPS and ignores the + # only_installed arguments to get_model[s] + self.loads_installed = False def _populate(self): + """ + Stub method - this base class does no auto-loading. + """ """ Fill in all the cache information. This method is threadsafe, in the sense that every caller will see the same state upon return, and if the @@ -72,6 +91,9 @@ class AppCache(object): """ if self.loaded: return + if not self.loads_installed: + self.loaded = True + return # Note that we want to use the import lock here - the app loading is # in many cases initiated implicitly by importing, and thus it is # possible to end up in deadlock when one thread initiates loading @@ -233,12 +255,15 @@ class AppCache(object): By default, models that aren't part of installed apps will *not* be included in the list of models. However, if you specify - only_installed=False, they will be. + only_installed=False, they will be. If you're using a non-default + AppCache, this argument does nothing - all models will be included. By default, models that have been swapped out will *not* be included in the list of models. However, if you specify include_swapped, they will be. """ + if not self.loads_installed: + only_installed = False cache_key = (app_mod, include_auto_created, include_deferred, only_installed, include_swapped) model_list = None try: @@ -287,6 +312,8 @@ class AppCache(object): Raises UnavailableApp when set_available_apps() in in effect and doesn't include app_label. """ + if not self.loads_installed: + only_installed = False if seed_cache: self._populate() if only_installed and app_label not in self.app_labels: @@ -332,8 +359,24 @@ class AppCache(object): def unset_available_apps(self): self.available_apps = None + +class AppCache(BaseAppCache): + """ + A cache that stores installed applications and their models. Used to + provide reverse-relations and for app introspection (e.g. admin). + + Borg version of the BaseAppCache class. + """ + + __shared_state = _initialize() + + def __init__(self): + self.__dict__ = self.__shared_state + + cache = AppCache() + # These methods were always module level, so are kept that way for backwards # compatibility. get_apps = cache.get_apps diff --git a/django/db/models/options.py b/django/db/models/options.py index 5f269f83b9..14f73c301f 100644 --- a/django/db/models/options.py +++ b/django/db/models/options.py @@ -9,7 +9,7 @@ from django.conf import settings from django.db.models.fields.related import ManyToManyRel from django.db.models.fields import AutoField, FieldDoesNotExist from django.db.models.fields.proxy import OrderWrt -from django.db.models.loading import get_models, app_cache_ready +from django.db.models.loading import app_cache_ready, cache from django.utils import six from django.utils.functional import cached_property from django.utils.encoding import force_text, smart_text, python_2_unicode_compatible @@ -22,8 +22,7 @@ DEFAULT_NAMES = ('verbose_name', 'verbose_name_plural', 'db_table', 'ordering', 'unique_together', 'permissions', 'get_latest_by', 'order_with_respect_to', 'app_label', 'db_tablespace', 'abstract', 'managed', 'proxy', 'swappable', 'auto_created', - 'index_together', 'default_permissions') - + 'index_together', 'app_cache', 'default_permissions') @python_2_unicode_compatible class Options(object): @@ -71,6 +70,9 @@ class Options(object): # from *other* models. Needed for some admin checks. Internal use only. self.related_fkey_lookups = [] + # A custom AppCache to use, if you're making a separate model set. + self.app_cache = cache + def contribute_to_class(self, cls, name): from django.db import connection from django.db.backends.util import truncate_name @@ -83,6 +85,10 @@ class Options(object): self.model_name = self.object_name.lower() self.verbose_name = get_verbose_name(self.object_name) + # Store the original user-defined values for each option, + # for use when serializing the model definition + self.original_attrs = {} + # Next, apply any overridden values from 'class Meta'. if self.meta: meta_attrs = self.meta.__dict__.copy() @@ -95,8 +101,10 @@ class Options(object): for attr_name in DEFAULT_NAMES: if attr_name in meta_attrs: setattr(self, attr_name, meta_attrs.pop(attr_name)) + self.original_attrs[attr_name] = getattr(self, attr_name) elif hasattr(self.meta, attr_name): setattr(self, attr_name, getattr(self.meta, attr_name)) + self.original_attrs[attr_name] = getattr(self, attr_name) # unique_together can be either a tuple of tuples, or a single # tuple of two strings. Normalize it to a tuple of tuples, so that @@ -487,7 +495,7 @@ class Options(object): cache[obj] = model # Collect also objects which are in relation to some proxy child/parent of self. proxy_cache = cache.copy() - for klass in get_models(include_auto_created=True, only_installed=False): + for klass in self.app_cache.get_models(include_auto_created=True, only_installed=False): if not klass._meta.swapped: for f in klass._meta.local_fields: if f.rel and not isinstance(f.rel.to, six.string_types) and f.generate_reverse_relation: @@ -530,7 +538,7 @@ class Options(object): cache[obj] = parent else: cache[obj] = model - for klass in get_models(only_installed=False): + for klass in self.app_cache.get_models(only_installed=False): if not klass._meta.swapped: for f in klass._meta.local_many_to_many: if (f.rel diff --git a/django/db/models/signals.py b/django/db/models/signals.py index 07824421d8..6b7605839c 100644 --- a/django/db/models/signals.py +++ b/django/db/models/signals.py @@ -12,7 +12,9 @@ post_save = Signal(providing_args=["instance", "raw", "created", "using", "updat pre_delete = Signal(providing_args=["instance", "using"], use_caching=True) post_delete = Signal(providing_args=["instance", "using"], use_caching=True) -pre_syncdb = Signal(providing_args=["app", "create_models", "verbosity", "interactive", "db"]) -post_syncdb = Signal(providing_args=["class", "app", "created_models", "verbosity", "interactive", "db"]) +pre_migrate = Signal(providing_args=["app", "create_models", "verbosity", "interactive", "db"]) +pre_syncdb = pre_migrate +post_migrate = Signal(providing_args=["class", "app", "created_models", "verbosity", "interactive", "db"]) +post_syncdb = post_migrate m2m_changed = Signal(providing_args=["action", "instance", "reverse", "model", "pk_set", "using"], use_caching=True) diff --git a/django/db/utils.py b/django/db/utils.py index a1a2c0b564..bcfb06f584 100644 --- a/django/db/utils.py +++ b/django/db/utils.py @@ -262,10 +262,13 @@ class ConnectionRouter(object): return allow return obj1._state.db == obj2._state.db - def allow_syncdb(self, db, model): + def allow_migrate(self, db, model): for router in self.routers: try: - method = router.allow_syncdb + try: + method = router.allow_migrate + except AttributeError: + method = router.allow_syncdb except AttributeError: # If the router doesn't have a method, skip to the next one. pass diff --git a/django/test/testcases.py b/django/test/testcases.py index 6f3f1c00e4..5b72d4e8b8 100644 --- a/django/test/testcases.py +++ b/django/test/testcases.py @@ -718,7 +718,7 @@ class TransactionTestCase(SimpleTestCase): """Performs any pre-test setup. This includes: * If the class has an 'available_apps' attribute, restricting the app - cache to these applications, then firing post_syncdb -- it must run + cache to these applications, then firing post_migrate -- it must run with the correct set of applications for the test case. * If the class has a 'fixtures' attribute, installing these fixtures. """ @@ -726,8 +726,7 @@ class TransactionTestCase(SimpleTestCase): if self.available_apps is not None: cache.set_available_apps(self.available_apps) for db_name in self._databases_names(include_mirrors=False): - flush.Command.emit_post_syncdb( - verbosity=0, interactive=False, database=db_name) + flush.Command.emit_post_migrate(verbosity=0, interactive=False, database=db_name) try: self._fixture_setup() except Exception: @@ -772,7 +771,7 @@ class TransactionTestCase(SimpleTestCase): """Performs any post-test things. This includes: * Flushing the contents of the database, to leave a clean slate. If - the class has an 'available_apps' attribute, post_syncdb isn't fired. + the class has an 'available_apps' attribute, post_migrate isn't fired. * Force-closing the connection, so the next test gets a clean cursor. """ try: @@ -790,14 +789,14 @@ class TransactionTestCase(SimpleTestCase): cache.unset_available_apps() def _fixture_teardown(self): - # Allow TRUNCATE ... CASCADE and don't emit the post_syncdb signal + # Allow TRUNCATE ... CASCADE and don't emit the post_migrate signal # when flushing only a subset of the apps for db_name in self._databases_names(include_mirrors=False): call_command('flush', verbosity=0, interactive=False, database=db_name, skip_validation=True, reset_sequences=False, allow_cascade=self.available_apps is not None, - inhibit_post_syncdb=self.available_apps is not None) + inhibit_post_migrate=self.available_apps is not None) def assertQuerysetEqual(self, qs, values, transform=repr, ordered=True): items = six.moves.map(transform, qs) diff --git a/django/utils/datastructures.py b/django/utils/datastructures.py index d6447ec0c7..a0ee3e06ef 100644 --- a/django/utils/datastructures.py +++ b/django/utils/datastructures.py @@ -1,5 +1,6 @@ import copy import warnings +from collections import OrderedDict from django.utils import six class MergeDict(object): @@ -236,6 +237,36 @@ class SortedDict(dict): super(SortedDict, self).clear() self.keyOrder = [] +class OrderedSet(object): + """ + A set which keeps the ordering of the inserted items. + Currently backs onto OrderedDict. + """ + + def __init__(self, iterable=None): + self.dict = OrderedDict(((x, None) for x in iterable) if iterable else []) + + def add(self, item): + self.dict[item] = None + + def remove(self, item): + del self.dict[item] + + def discard(self, item): + try: + self.remove(item) + except KeyError: + pass + + def __iter__(self): + return iter(self.dict.keys()) + + def __contains__(self, item): + return item in self.dict + + def __nonzero__(self): + return bool(self.dict) + class MultiValueDictKeyError(KeyError): pass diff --git a/django/utils/functional.py b/django/utils/functional.py index fd10a84b26..9cc703fe84 100644 --- a/django/utils/functional.py +++ b/django/utils/functional.py @@ -148,6 +148,11 @@ def lazy(func, *resultclasses): else: return func(*self.__args, **self.__kw) + def __ne__(self, other): + if isinstance(other, Promise): + other = other.__cast() + return self.__cast() != other + def __eq__(self, other): if isinstance(other, Promise): other = other.__cast() diff --git a/django/utils/termcolors.py b/django/utils/termcolors.py index bb14837716..95d0d17f0f 100644 --- a/django/utils/termcolors.py +++ b/django/utils/termcolors.py @@ -86,6 +86,10 @@ PALETTES = { 'HTTP_BAD_REQUEST': {}, 'HTTP_NOT_FOUND': {}, 'HTTP_SERVER_ERROR': {}, + 'MIGRATE_HEADING': {}, + 'MIGRATE_LABEL': {}, + 'MIGRATE_SUCCESS': {}, + 'MIGRATE_FAILURE': {}, }, DARK_PALETTE: { 'ERROR': { 'fg': 'red', 'opts': ('bold',) }, @@ -101,6 +105,10 @@ PALETTES = { 'HTTP_BAD_REQUEST': { 'fg': 'red', 'opts': ('bold',) }, 'HTTP_NOT_FOUND': { 'fg': 'yellow' }, 'HTTP_SERVER_ERROR': { 'fg': 'magenta', 'opts': ('bold',) }, + 'MIGRATE_HEADING': { 'fg': 'cyan', 'opts': ('bold',) }, + 'MIGRATE_LABEL': { 'opts': ('bold',) }, + 'MIGRATE_SUCCESS': { 'fg': 'green', 'opts': ('bold',) }, + 'MIGRATE_FAILURE': { 'fg': 'red', 'opts': ('bold',) }, }, LIGHT_PALETTE: { 'ERROR': { 'fg': 'red', 'opts': ('bold',) }, @@ -116,6 +124,10 @@ PALETTES = { 'HTTP_BAD_REQUEST': { 'fg': 'red', 'opts': ('bold',) }, 'HTTP_NOT_FOUND': { 'fg': 'red' }, 'HTTP_SERVER_ERROR': { 'fg': 'magenta', 'opts': ('bold',) }, + 'MIGRATE_HEADING': { 'fg': 'cyan', 'opts': ('bold',) }, + 'MIGRATE_LABEL': { 'opts': ('bold',) }, + 'MIGRATE_SUCCESS': { 'fg': 'green', 'opts': ('bold',) }, + 'MIGRATE_FAILURE': { 'fg': 'red', 'opts': ('bold',) }, } } DEFAULT_PALETTE = DARK_PALETTE diff --git a/docs/howto/legacy-databases.txt b/docs/howto/legacy-databases.txt index 0bea8b41c4..1cf8329e79 100644 --- a/docs/howto/legacy-databases.txt +++ b/docs/howto/legacy-databases.txt @@ -81,10 +81,10 @@ access to your precious data on a model by model basis. Install the core Django tables ============================== -Next, run the :djadmin:`syncdb` command to install any extra needed database +Next, run the :djadmin:`migrate` command to install any extra needed database records such as admin permissions and content types:: - python manage.py syncdb + python manage.py migrate Test and tweak ============== diff --git a/docs/index.txt b/docs/index.txt index 8f46db8eb9..c58be5fcfc 100644 --- a/docs/index.txt +++ b/docs/index.txt @@ -71,6 +71,9 @@ manipulating the data of your Web application. Learn more about it below: :doc:`Instance methods ` | :doc:`Accessing related objects ` +* **Migrations:** + :doc:`Introduction to Migrations` + * **Advanced:** :doc:`Managers ` | :doc:`Raw SQL ` | diff --git a/docs/internals/contributing/writing-documentation.txt b/docs/internals/contributing/writing-documentation.txt index 2944dea504..d2cfaddc89 100644 --- a/docs/internals/contributing/writing-documentation.txt +++ b/docs/internals/contributing/writing-documentation.txt @@ -165,9 +165,9 @@ __ http://sphinx.pocoo.org/markup/desc.html * ``django-admin`` commands:: - .. django-admin:: syncdb + .. django-admin:: migrate - To link, use ``:djadmin:`syncdb```. + To link, use ``:djadmin:`migrate```. * ``django-admin`` command-line options:: diff --git a/docs/internals/deprecation.txt b/docs/internals/deprecation.txt index 29e9231896..7b8298597d 100644 --- a/docs/internals/deprecation.txt +++ b/docs/internals/deprecation.txt @@ -419,6 +419,17 @@ these changes. * ``django.utils.unittest`` will be removed. +* The ``syncdb`` command will be removed. + +* ``django.db.models.signals.pre_syncdb`` and + ``django.db.models.signals.post_syncdb`` will be removed, and + ``django.db.models.signals.pre_migrate`` and + ``django.db.models.signals.post_migrate`` will lose their + ``create_models`` and ``created_models`` arguments. + +* ``allow_syncdb`` on database routers will no longer automatically become + ``allow_migrate``. + * If models are organized in a package, Django will no longer look for :ref:`initial SQL data` in ``myapp/models/sql/``. Move your custom SQL files to ``myapp/sql/``. diff --git a/docs/intro/overview.txt b/docs/intro/overview.txt index 55366fb2c6..415e831faf 100644 --- a/docs/intro/overview.txt +++ b/docs/intro/overview.txt @@ -53,10 +53,11 @@ automatically: .. code-block:: bash - manage.py syncdb + manage.py migrate -The :djadmin:`syncdb` command looks at all your available models and creates -tables in your database for whichever tables don't already exist. +The :djadmin:`migrate` command looks at all your available models and creates +tables in your database for whichever tables don't already exist, as well as +optionally providing :doc:`much richer schema control `. Enjoy the free API ================== diff --git a/docs/intro/reusable-apps.txt b/docs/intro/reusable-apps.txt index 7fa1ffc8d9..51c1228cc1 100644 --- a/docs/intro/reusable-apps.txt +++ b/docs/intro/reusable-apps.txt @@ -155,7 +155,7 @@ this. For a small app like polls, this process isn't too difficult. url(r'^polls/', include('polls.urls')), - 3. Run `python manage.py syncdb` to create the polls models. + 3. Run `python manage.py migrate` to create the polls models. 4. Start the development server and visit http://127.0.0.1:8000/admin/ to create a poll (you'll need the Admin app enabled). diff --git a/docs/man/django-admin.1 b/docs/man/django-admin.1 index 4d937b488b..f1b568daf5 100644 --- a/docs/man/django-admin.1 +++ b/docs/man/django-admin.1 @@ -45,8 +45,7 @@ Outputs to standard output all data in the database associated with the named application(s). .TP .BI flush -Returns the database to the state it was in immediately after syncdb was -executed. +Removes all data from the database and then re-installs any initial data. .TP .B inspectdb Introspects the database tables in the database specified in settings.py and outputs a Django @@ -114,9 +113,9 @@ the current directory or the optional destination. Creates a Django project directory structure for the given project name in the current directory or the optional destination. .TP -.BI syncdb -Creates the database tables for all apps in INSTALLED_APPS whose tables -haven't already been created. +.BI migrate +Runs migrations for apps containing migrations, and just creates missing tables +for apps without migrations. .TP .BI "test [" "\-\-verbosity" "] [" "\-\-failfast" "] [" "appname ..." "]" Runs the test suite for the specified applications, or the entire project if diff --git a/docs/ref/contrib/comments/index.txt b/docs/ref/contrib/comments/index.txt index 6db69d8168..c08ac21d4e 100644 --- a/docs/ref/contrib/comments/index.txt +++ b/docs/ref/contrib/comments/index.txt @@ -31,7 +31,7 @@ To get started using the ``comments`` app, follow these steps: #. Install the comments framework by adding ``'django.contrib.comments'`` to :setting:`INSTALLED_APPS`. -#. Run ``manage.py syncdb`` so that Django will create the comment tables. +#. Run ``manage.py migrate`` so that Django will create the comment tables. #. Add the comment app's URLs to your project's ``urls.py``: diff --git a/docs/ref/contrib/contenttypes.txt b/docs/ref/contrib/contenttypes.txt index bfe92b88a6..21e65f168b 100644 --- a/docs/ref/contrib/contenttypes.txt +++ b/docs/ref/contrib/contenttypes.txt @@ -86,7 +86,7 @@ The ``ContentType`` model Let's look at an example to see how this works. If you already have the :mod:`~django.contrib.contenttypes` application installed, and then add :mod:`the sites application ` to your -:setting:`INSTALLED_APPS` setting and run ``manage.py syncdb`` to install it, +:setting:`INSTALLED_APPS` setting and run ``manage.py migrate`` to install it, the model :class:`django.contrib.sites.models.Site` will be installed into your database. Along with it a new instance of :class:`~django.contrib.contenttypes.models.ContentType` will be diff --git a/docs/ref/contrib/flatpages.txt b/docs/ref/contrib/flatpages.txt index 11d74d75c3..be9fe0c636 100644 --- a/docs/ref/contrib/flatpages.txt +++ b/docs/ref/contrib/flatpages.txt @@ -55,14 +55,14 @@ or: 3. Add ``'django.contrib.flatpages.middleware.FlatpageFallbackMiddleware'`` to your :setting:`MIDDLEWARE_CLASSES` setting. -4. Run the command :djadmin:`manage.py syncdb `. +4. Run the command :djadmin:`manage.py migrate `. .. currentmodule:: django.contrib.flatpages.middleware How it works ============ -``manage.py syncdb`` creates two tables in your database: ``django_flatpage`` +``manage.py migrate`` creates two tables in your database: ``django_flatpage`` and ``django_flatpage_sites``. ``django_flatpage`` is a simple lookup table that simply maps a URL to a title and bunch of text content. ``django_flatpage_sites`` associates a flatpage with a site. diff --git a/docs/ref/contrib/index.txt b/docs/ref/contrib/index.txt index e5cea01ead..727fab01dc 100644 --- a/docs/ref/contrib/index.txt +++ b/docs/ref/contrib/index.txt @@ -15,7 +15,7 @@ those packages have. For most of these add-ons -- specifically, the add-ons that include either models or template tags -- you'll need to add the package name (e.g., ``'django.contrib.admin'``) to your :setting:`INSTALLED_APPS` setting and - re-run ``manage.py syncdb``. + re-run ``manage.py migrate``. .. _"batteries included" philosophy: http://docs.python.org/tutorial/stdlib.html#batteries-included diff --git a/docs/ref/contrib/redirects.txt b/docs/ref/contrib/redirects.txt index 0c0cb2a3c2..eefbb96721 100644 --- a/docs/ref/contrib/redirects.txt +++ b/docs/ref/contrib/redirects.txt @@ -18,12 +18,12 @@ To install the redirects app, follow these steps: 2. Add ``'django.contrib.redirects'`` to your :setting:`INSTALLED_APPS` setting. 3. Add ``'django.contrib.redirects.middleware.RedirectFallbackMiddleware'`` to your :setting:`MIDDLEWARE_CLASSES` setting. -4. Run the command :djadmin:`manage.py syncdb `. +4. Run the command :djadmin:`manage.py migrate `. How it works ============ -``manage.py syncdb`` creates a ``django_redirect`` table in your database. This +``manage.py migrate`` creates a ``django_redirect`` table in your database. This is a simple lookup table with ``site_id``, ``old_path`` and ``new_path`` fields. The ``RedirectFallbackMiddleware`` does all of the work. Each time any Django diff --git a/docs/ref/contrib/sites.txt b/docs/ref/contrib/sites.txt index 131c9645e8..fd2e917c1d 100644 --- a/docs/ref/contrib/sites.txt +++ b/docs/ref/contrib/sites.txt @@ -264,10 +264,10 @@ To enable the sites framework, follow these steps: SITE_ID = 1 -3. Run :djadmin:`syncdb`. +3. Run :djadmin:`migrate`. ``django.contrib.sites`` registers a -:data:`~django.db.models.signals.post_syncdb` signal handler which creates a +:data:`~django.db.models.signals.post_migrate` signal handler which creates a default site named ``example.com`` with the domain ``example.com``. This site will also be created after Django creates the test database. To set the correct name and domain for your project, you can use an :doc:`initial data diff --git a/docs/ref/databases.txt b/docs/ref/databases.txt index 545f5df84c..707184c3ac 100644 --- a/docs/ref/databases.txt +++ b/docs/ref/databases.txt @@ -220,7 +220,7 @@ If you upgrade an existing project to MySQL 5.5.5 and subsequently add some tables, ensure that your tables are using the same storage engine (i.e. MyISAM vs. InnoDB). Specifically, if tables that have a ``ForeignKey`` between them use different storage engines, you may see an error like the following when -running ``syncdb``:: +running ``migrate``:: _mysql_exceptions.OperationalError: ( 1005, "Can't create table '\\db_name\\.#sql-4a8_ab' (errno: 150)" @@ -659,7 +659,7 @@ required. .. _`Oracle Database Server`: http://www.oracle.com/ .. _`cx_Oracle`: http://cx-oracle.sourceforge.net/ -In order for the ``python manage.py syncdb`` command to work, your Oracle +In order for the ``python manage.py migrate`` command to work, your Oracle database user must have privileges to run the following commands: * CREATE TABLE @@ -748,7 +748,7 @@ Oracle imposes a name length limit of 30 characters. To accommodate this, the backend truncates database identifiers to fit, replacing the final four characters of the truncated name with a repeatable MD5 hash value. -When running syncdb, an ``ORA-06552`` error may be encountered if +When running ``migrate``, an ``ORA-06552`` error may be encountered if certain Oracle keywords are used as the name of a model field or the value of a ``db_column`` option. Django quotes all identifiers used in queries to prevent most such problems, but this error can still diff --git a/docs/ref/django-admin.txt b/docs/ref/django-admin.txt index 146740b8eb..1385648d5d 100644 --- a/docs/ref/django-admin.txt +++ b/docs/ref/django-admin.txt @@ -242,10 +242,8 @@ flush .. django-admin:: flush -Returns the database to the state it was in immediately after :djadmin:`syncdb` -was executed. This means that all data will be removed from the database, any -post-synchronization handlers will be re-executed, and the ``initial_data`` -fixture will be re-installed. +Removes all data from the database, re-executes any post-synchronization +handlers, and reinstalls any initial data fixtures. The :djadminopt:`--noinput` option may be provided to suppress all user prompts. @@ -568,6 +566,52 @@ Use the ``--keep-pot`` option to prevent django from deleting the temporary .pot file it generates before creating the .po file. This is useful for debugging errors which may prevent the final language files from being created. +makemigrations [] +-------------------------- + +.. django-admin:: makemigrations + +.. versionadded:: 1.7 + +Creates new migrations based on the changes detected to your models. +Migrations, their relationship with apps and more are covered in depth in +:doc:`the migrations documentation`. + +Providing one or more app names as arguments will limit the migrations created +to the app(s) specified and any dependencies needed (the table at the other end +of a ``ForeignKey``, for example). + +.. django-admin-option:: --empty + +The ``--empty`` option will cause ``makemigrations`` to output an empty +migration for the specified apps, for manual editing. This option is only +for advanced users and should not be used unless you are familiar with +the migration format, migration operations, and the dependencies between +your migrations. + +migrate [ []] +------------------------------------- + +.. django-admin:: migrate + +.. versionadded:: 1.7 + +Synchronizes the database state with the current set of models and migrations. +Migrations, their relationship with apps and more are covered in depth in +:doc:`the migrations documentation`. + +The behavior of this command changes depending on the arguments provided: + +* No arguments: All migrated apps have all of their migrations run, + and all unmigrated apps are synchronized with the database, +* ````: The specified app has its migrations run, up to the most + recent migration. This may involve running other apps' migrations too, due + to dependencies. +* `` ``: Brings the database schema to a state where it + would have just run the given migration, but no further - this may involve + unapplying migrations if you have previously migrated past the named + migration. Use the name `zero` to unapply all migrations for an app. + runfcgi [options] ----------------- @@ -1102,45 +1146,13 @@ syncdb .. django-admin:: syncdb -Creates the database tables for all apps in :setting:`INSTALLED_APPS` whose -tables have not already been created. +.. deprecated:: 1.7 -Use this command when you've added new applications to your project and want to -install them in the database. This includes any apps shipped with Django that -might be in :setting:`INSTALLED_APPS` by default. When you start a new project, -run this command to install the default apps. + This command has been deprecated in favour of the :djadmin:`migrate` + command, which performs both the old behaviour as well as executing + migrations. It is now just an alias to that command. -.. admonition:: Syncdb will not alter existing tables - - ``syncdb`` will only create tables for models which have not yet been - installed. It will *never* issue ``ALTER TABLE`` statements to match - changes made to a model class after installation. Changes to model classes - and database schemas often involve some form of ambiguity and, in those - cases, Django would have to guess at the correct changes to make. There is - a risk that critical data would be lost in the process. - - If you have made changes to a model and wish to alter the database tables - to match, use the ``sql`` command to display the new SQL structure and - compare that to your existing table schema to work out the changes. - -If you're installing the ``django.contrib.auth`` application, ``syncdb`` will -give you the option of creating a superuser immediately. - -``syncdb`` will also search for and install any fixture named ``initial_data`` -with an appropriate extension (e.g. ``json`` or ``xml``). See the -documentation for ``loaddata`` for details on the specification of fixture -data files. - -The :djadminopt:`--noinput` option may be provided to suppress all user -prompts. - -The :djadminopt:`--database` option can be used to specify the database to -synchronize. - -``--no-initial-data`` -~~~~~~~~~~~~~~~~~~~~~ - -Use ``--no-initial-data`` to avoid loading the initial_data fixture. +Alias for :djadmin:`migrate`. test ----------------------------- @@ -1278,7 +1290,7 @@ This command is only available if Django's :doc:`authentication system Creates a superuser account (a user who has all permissions). This is useful if you need to create an initial superuser account but did not -do so during ``syncdb``, or if you need to programmatically generate +do so during the first ``migrate``, or if you need to programmatically generate superuser accounts for your site(s). When run interactively, this command will prompt for a password for @@ -1362,7 +1374,7 @@ allows for the following options: Example usage:: - django-admin.py syncdb --pythonpath='/home/djangoprojects/myproject' + django-admin.py migrate --pythonpath='/home/djangoprojects/myproject' Adds the given filesystem path to the Python `import search path`_. If this isn't provided, ``django-admin.py`` will use the ``PYTHONPATH`` environment @@ -1377,7 +1389,7 @@ setting the Python path for you. Example usage:: - django-admin.py syncdb --settings=mysite.settings + django-admin.py migrate --settings=mysite.settings Explicitly specifies the settings module to use. The settings module should be in Python package syntax, e.g. ``mysite.settings``. If this isn't provided, @@ -1391,7 +1403,7 @@ Note that this option is unnecessary in ``manage.py``, because it uses Example usage:: - django-admin.py syncdb --traceback + django-admin.py migrate --traceback By default, ``django-admin.py`` will show a simple error message whenever an :class:`~django.core.management.CommandError` occurs, but a full stack trace @@ -1407,7 +1419,7 @@ will also output a full stack trace when a ``CommandError`` is raised. Example usage:: - django-admin.py syncdb --verbosity 2 + django-admin.py migrate --verbosity 2 Use ``--verbosity`` to specify the amount of notification and debug information that ``django-admin.py`` should print to the console. diff --git a/docs/ref/models/options.txt b/docs/ref/models/options.txt index 7ad0242df7..baa24f63cb 100644 --- a/docs/ref/models/options.txt +++ b/docs/ref/models/options.txt @@ -106,9 +106,9 @@ Django quotes column and table names behind the scenes. .. attribute:: Options.managed Defaults to ``True``, meaning Django will create the appropriate database - tables in :djadmin:`syncdb` and remove them as part of a :djadmin:`flush` - management command. That is, Django *manages* the database tables' - lifecycles. + tables in :djadmin:`migrate` or as part of migrations and remove them as + part of a :djadmin:`flush` management command. That is, Django + *manages* the database tables' lifecycles. If ``False``, no database table creation or deletion operations will be performed for this model. This is useful if the model represents an existing @@ -192,9 +192,9 @@ Django quotes column and table names behind the scenes. .. admonition:: Changing order_with_respect_to ``order_with_respect_to`` adds an additional field/database column - named ``_order``, so be sure to handle that as you would any other - change to your models if you add or change ``order_with_respect_to`` - after your initial :djadmin:`syncdb`. + named ``_order``, so be sure to make and apply the appropriate + migrations if you add or change ``order_with_respect_to`` + after your initial :djadmin:`migrate`. ``ordering`` ------------ diff --git a/docs/ref/signals.txt b/docs/ref/signals.txt index 71756c98c6..8381a49a09 100644 --- a/docs/ref/signals.txt +++ b/docs/ref/signals.txt @@ -356,40 +356,36 @@ Management signals Signals sent by :doc:`django-admin `. -pre_syncdb ----------- +pre_migrate +----------- -.. data:: django.db.models.signals.pre_syncdb +.. data:: django.db.models.signals.pre_migrate :module: -Sent by the :djadmin:`syncdb` command before it starts to install an +Sent by the :djadmin:`migrate` command before it starts to install an application. Any handlers that listen to this signal need to be written in a particular place: a ``management`` module in one of your :setting:`INSTALLED_APPS`. If handlers are registered anywhere else they may not be loaded by -:djadmin:`syncdb`. +:djadmin:`migrate`. Arguments sent with this signal: ``sender`` - The ``models`` module that was just installed. That is, if - :djadmin:`syncdb` just installed an app called ``"foo.bar.myapp"``, - ``sender`` will be the ``foo.bar.myapp.models`` module. + The ``models`` module of the app about to be migrated/synced. + For example, if :djadmin:`migrate` is about to install + an app called ``"foo.bar.myapp"``, ``sender`` will be the + ``foo.bar.myapp.models`` module. ``app`` Same as ``sender``. -``create_models`` - A list of the model classes from any app which :djadmin:`syncdb` plans to - create. - - ``verbosity`` Indicates how much information manage.py is printing on screen. See the :djadminopt:`--verbosity` flag for details. - Functions which listen for :data:`pre_syncdb` should adjust what they + Functions which listen for :data:`pre_migrate` should adjust what they output to the screen based on the value of this argument. ``interactive`` @@ -403,42 +399,55 @@ Arguments sent with this signal: ``db`` The alias of database on which a command will operate. -post_syncdb ------------ +pre_syncdb +---------- -.. data:: django.db.models.signals.post_syncdb +.. data:: django.db.models.signals.pre_syncdb :module: -Sent by the :djadmin:`syncdb` command after it installs an application, and the +.. deprecated:: 1.7 + + This signal has been renamed to :data:`~django.db.models.signals.pre_migrate`. + +Alias of :data:`django.db.models.signals.pre_migrate`. As long as this alias +is present, for backwards-compatability this signal has an extra argument it sends: + +``create_models`` + A list of the model classes from any app which :djadmin:`migrate` is + going to create, **only if the app has no migrations**. + +post_migrate +------------ + +.. data:: django.db.models.signals.post_migrate + :module: + +Sent by the :djadmin:`migrate` command after it installs an application, and the :djadmin:`flush` command. Any handlers that listen to this signal need to be written in a particular place: a ``management`` module in one of your :setting:`INSTALLED_APPS`. If handlers are registered anywhere else they may not be loaded by -:djadmin:`syncdb`. It is important that handlers of this signal perform +:djadmin:`migrate`. It is important that handlers of this signal perform idempotent changes (e.g. no database alterations) as this may cause the :djadmin:`flush` management command to fail if it also ran during the -:djadmin:`syncdb` command. +:djadmin:`migrate` command. Arguments sent with this signal: ``sender`` The ``models`` module that was just installed. That is, if - :djadmin:`syncdb` just installed an app called ``"foo.bar.myapp"``, + :djadmin:`migrate` just installed an app called ``"foo.bar.myapp"``, ``sender`` will be the ``foo.bar.myapp.models`` module. ``app`` Same as ``sender``. -``created_models`` - A list of the model classes from any app which :djadmin:`syncdb` has - created so far. - ``verbosity`` Indicates how much information manage.py is printing on screen. See the :djadminopt:`--verbosity` flag for details. - Functions which listen for :data:`post_syncdb` should adjust what they + Functions which listen for :data:`post_migrate` should adjust what they output to the screen based on the value of this argument. ``interactive`` @@ -455,14 +464,31 @@ Arguments sent with this signal: For example, ``yourapp/management/__init__.py`` could be written like:: - from django.db.models.signals import post_syncdb + from django.db.models.signals import post_migrate import yourapp.models def my_callback(sender, **kwargs): # Your specific logic here pass - post_syncdb.connect(my_callback, sender=yourapp.models) + post_migrate.connect(my_callback, sender=yourapp.models) + +post_syncdb +----------- + +.. data:: django.db.models.signals.post_syncdb + :module: + +.. deprecated:: 1.7 + + This signal has been renamed to :data:`~django.db.models.signals.post_migrate`. + +Alias of :data:`django.db.models.signals.post_migrate`. As long as this alias +is present, for backwards-compatability this signal has an extra argument it sends: + +``created_models`` + A list of the model classes from any app which :djadmin:`migrate` has + created, **only if the app has no migrations**. Request/response signals ======================== diff --git a/docs/releases/1.7.txt b/docs/releases/1.7.txt index 6480a2505f..4c295f58e0 100644 --- a/docs/releases/1.7.txt +++ b/docs/releases/1.7.txt @@ -30,6 +30,61 @@ security support until the release of Django 1.8. What's new in Django 1.7 ======================== +Schema migrations +~~~~~~~~~~~~~~~~~ + +Django now has built-in support for schema migrations. It allows models +to be updated, changed, and deleted by creating migration files that represent +the model changes and which can be run on any development, staging or production +database. + +Migrations are covered in :doc:`their own documentation`, +but a few of the key features are: + +* ``syncdb`` has been deprecated and replaced by ``migrate``. Don't worry - + calls to ``syncdb`` will still work as before. + +* A new ``makemigrations`` command provides an easy way to autodetect changes + to your models and make migrations for them. + +* :data:`~django.db.models.signals.post_syncdb` and + :data:`~django.db.models.signals.post_syncdb` have been renamed to + :data:`~django.db.models.signals.pre_migrate` and + :data:`~django.db.models.signals.post_migrate` respectively. The + ``create_models``/``created_models`` argument has also been deprecated. + +* The ``allow_syncdb`` method on database routers is now called ``allow_migrate``, + but still performs the same function. Routers with ``allow_syncdb`` methods + will still work, but that method name is deprecated and you should change + it as soon as possible (nothing more than renaming is required). + +New method on Field subclasses +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +To help power both schema migrations and composite keys, the :class:`~django.db.models.Field` API now +has a new required method: ``deconstruct()``. + +This method takes no arguments, and returns a tuple of four items: + +* ``name``: The field's attribute name on its parent model, or None if it is not part of a model +* ``path``: A dotted, Python path to the class of this field, including the class name. +* ``args``: Positional arguments, as a list +* ``kwargs``: Keyword arguments, as a dict + +These four values allow any field to be serialized into a file, as well as +allowing the field to be copied safely, both essential parts of these new features. + +This change should not affect you unless you write custom Field subclasses; +if you do, you may need to reimplement the ``deconstruct()`` method if your +subclass changes the method signature of ``__init__`` in any way. If your +field just inherits from a built-in Django field and doesn't override ``__init__``, +no changes are necessary. + +If you do need to override ``deconstruct()``, a good place to start is the +built-in Django fields (``django/db/models/fields/__init__.py``) as several +fields, including ``DecimalField`` and ``DateField``, override it and show how +to call the method on the superclass and simply add or remove extra arguments. + Calling custom ``QuerySet`` methods from the ``Manager`` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -170,6 +225,18 @@ Backwards incompatible changes in 1.7 deprecation timeline for a given feature, its removal may appear as a backwards incompatible change. +allow_syncdb/allow_migrate +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +While Django will still look at ``allow_syncdb`` methods even though they +should be renamed to ``allow_migrate``, there is a subtle difference in which +models get passed to these methods. + +For apps with migrations, ``allow_migrate`` will now get passed +:ref:`historical models `, which are special versioned models +without custom attributes, methods or managers. Make sure your ``allow_migrate`` +methods are only referring to fields or other items in ``model._meta``. + Miscellaneous ~~~~~~~~~~~~~ @@ -249,3 +316,10 @@ work until Django 1.9. it will go through a regular deprecation path. This attribute was mostly used by methods that bypassed ``ModelAdmin.get_fieldsets()`` but this was considered a bug and has been addressed. + +``syncdb`` +~~~~~~~~~~ + +The ``syncdb`` command has been deprecated in favour of the new ``migrate`` +command. ``migrate`` takes the same arguments as ``syncdb`` used to plus a few +more, so it's safe to just change the name you're calling and nothing else. diff --git a/docs/topics/auth/customizing.txt b/docs/topics/auth/customizing.txt index 8a44d58ef6..746e92e637 100644 --- a/docs/topics/auth/customizing.txt +++ b/docs/topics/auth/customizing.txt @@ -275,7 +275,7 @@ can or cannot do with Task instances, specific to your application:: ) The only thing this does is create those extra permissions when you run -:djadmin:`manage.py syncdb `. Your code is in charge of checking the +:djadmin:`manage.py migrate `. Your code is in charge of checking the value of these permissions when an user is trying to access the functionality provided by the application (viewing tasks, changing the status of tasks, closing tasks.) Continuing the above example, the following checks if a user may @@ -378,14 +378,12 @@ use as your User model. Changing :setting:`AUTH_USER_MODEL` has a big effect on your database structure. It changes the tables that are available, and it will affect the construction of foreign keys and many-to-many relationships. If you intend - to set :setting:`AUTH_USER_MODEL`, you should set it before running - ``manage.py syncdb`` for the first time. + to set :setting:`AUTH_USER_MODEL`, you should set it before creating + any migrations or running ``manage.py migrate`` for the first time. - If you have an existing project and you want to migrate to using a custom - User model, you may need to look into using a migration tool like South_ - to ease the transition. - -.. _South: http://south.aeracode.org + Changing this setting after you have tables created is not supported + by :djadmin:`makemigrations` and will result in you having to manually + write a set of migrations to fix your schema. Referencing the User model -------------------------- diff --git a/docs/topics/auth/default.txt b/docs/topics/auth/default.txt index fdeb20fd10..4d86a7330e 100644 --- a/docs/topics/auth/default.txt +++ b/docs/topics/auth/default.txt @@ -65,7 +65,7 @@ interactively `. Creating superusers ------------------- -:djadmin:`manage.py syncdb ` prompts you to create a superuser the +:djadmin:`manage.py migrate ` prompts you to create a superuser the first time you run it with ``'django.contrib.auth'`` in your :setting:`INSTALLED_APPS`. If you need to create a superuser at a later date, you can use a command line utility:: @@ -190,13 +190,13 @@ setting, it will ensure that three default permissions -- add, change and delete -- are created for each Django model defined in one of your installed applications. -These permissions will be created when you run :djadmin:`manage.py syncdb -`; the first time you run ``syncdb`` after adding +These permissions will be created when you run :djadmin:`manage.py migrate +`; the first time you run ``migrate`` after adding ``django.contrib.auth`` to :setting:`INSTALLED_APPS`, the default permissions will be created for all previously-installed models, as well as for any new models being installed at that time. Afterward, it will create default -permissions for new models each time you run :djadmin:`manage.py syncdb -`. +permissions for new models each time you run :djadmin:`manage.py migrate +`. Assuming you have an application with an :attr:`~django.db.models.Options.app_label` ``foo`` and a model named ``Bar``, diff --git a/docs/topics/auth/index.txt b/docs/topics/auth/index.txt index 8447d449ce..81b6996d00 100644 --- a/docs/topics/auth/index.txt +++ b/docs/topics/auth/index.txt @@ -67,7 +67,7 @@ and two items in your :setting:`MIDDLEWARE_CLASSES` setting: 2. :class:`~django.contrib.auth.middleware.AuthenticationMiddleware` associates users with requests using sessions. -With these settings in place, running the command ``manage.py syncdb`` creates +With these settings in place, running the command ``manage.py migrate`` creates the necessary database tables for auth related models, creates permissions for any models defined in your installed apps, and prompts you to create a superuser account the first time you run it. diff --git a/docs/topics/cache.txt b/docs/topics/cache.txt index a2491f2198..092df1f876 100644 --- a/docs/topics/cache.txt +++ b/docs/topics/cache.txt @@ -219,8 +219,8 @@ operations to ``cache_slave``, and all write operations to return 'cache_master' return None - def allow_syncdb(self, db, model): - "Only synchronize the cache model on master" + def allow_migrate(self, db, model): + "Only install the cache model on master" if model._meta.app_label in ('django_cache',): return db == 'cache_master' return None diff --git a/docs/topics/db/models.txt b/docs/topics/db/models.txt index 2b565758e7..b0011e1098 100644 --- a/docs/topics/db/models.txt +++ b/docs/topics/db/models.txt @@ -77,7 +77,8 @@ application by the :djadmin:`manage.py startapp ` script), ) When you add new apps to :setting:`INSTALLED_APPS`, be sure to run -:djadmin:`manage.py syncdb `. +:djadmin:`manage.py migrate `, optionally making migrations +for them first with :djadmin:`manage.py makemigrations `. Fields ====== @@ -956,7 +957,7 @@ The reverse name of the ``common.ChildA.m2m`` field will be reverse name of the ``rare.ChildB.m2m`` field will be ``rare_childb_related``. It is up to you how you use the ``'%(class)s'`` and ``'%(app_label)s`` portion to construct your related name, but if you forget to use it, Django will raise -errors when you validate your models (or run :djadmin:`syncdb`). +errors when you validate your models (or run :djadmin:`migrate`). If you don't specify a :attr:`~django.db.models.ForeignKey.related_name` attribute for a field in an abstract base class, the default reverse name will @@ -1049,7 +1050,7 @@ are putting those types of relations on a subclass of another model, you **must** specify the :attr:`~django.db.models.ForeignKey.related_name` attribute on each such field. If you forget, Django will raise an error when you run -:djadmin:`validate` or :djadmin:`syncdb`. +:djadmin:`validate` or :djadmin:`migrate`. For example, using the above ``Place`` class again, let's create another subclass with a :class:`~django.db.models.ManyToManyField`:: diff --git a/docs/topics/db/multi-db.txt b/docs/topics/db/multi-db.txt index 3a902a9793..c098aa33e3 100644 --- a/docs/topics/db/multi-db.txt +++ b/docs/topics/db/multi-db.txt @@ -155,14 +155,23 @@ A database Router is a class that provides up to four methods: used by foreign key and many to many operations to determine if a relation should be allowed between two objects. -.. method:: allow_syncdb(db, model) +.. method:: allow_migrate(db, model) - Determine if the ``model`` should be synchronized onto the + Determine if the ``model`` should have tables/indexes created in the database with alias ``db``. Return True if the model should be - synchronized, False if it should not be synchronized, or None if + migrated, False if it should not be migrated, or None if the router has no opinion. This method can be used to determine the availability of a model on a given database. + Note that migrations will just silently not perform any operations + on a model for which this returns ``False``. This may result in broken + ForeignKeys, extra tables or missing tables if you change it once you + have applied some migrations. + + The value passed for ``model`` may be a + :ref:`historical model `, and thus not have any + custom attributes, methods or managers. You should only rely on ``_meta``. + A router doesn't have to provide *all* these methods -- it may omit one or more of them. If one of the methods is omitted, Django will skip that router when performing the relevant check. @@ -288,7 +297,7 @@ send queries for the ``auth`` app to ``auth_db``:: return True return None - def allow_syncdb(self, db, model): + def allow_migrate(self, db, model): """ Make sure the auth app only appears in the 'auth_db' database. @@ -328,7 +337,7 @@ from:: return True return None - def allow_syncdb(self, db, model): + def allow_migrate(self, db, model): """ All non-auth models end up in this pool. """ @@ -347,7 +356,7 @@ be queried in the order the are listed in the result, decisions concerning the models in ``auth`` are processed before any other decision is made. If the :setting:`DATABASE_ROUTERS` setting listed the two routers in the other order, -``MasterSlaveRouter.allow_syncdb()`` would be processed first. The +``MasterSlaveRouter.allow_migrate()`` would be processed first. The catch-all nature of the MasterSlaveRouter implementation would mean that all models would be available on all databases. diff --git a/docs/topics/http/sessions.txt b/docs/topics/http/sessions.txt index 637991b3b5..24b9ef4462 100644 --- a/docs/topics/http/sessions.txt +++ b/docs/topics/http/sessions.txt @@ -44,7 +44,7 @@ Using database-backed sessions If you want to use a database-backed session, you need to add ``'django.contrib.sessions'`` to your :setting:`INSTALLED_APPS` setting. -Once you have configured your installation, run ``manage.py syncdb`` +Once you have configured your installation, run ``manage.py migrate`` to install the single database table that stores session data. .. _cached-sessions-backend: diff --git a/docs/topics/index.txt b/docs/topics/index.txt index f8f60b2953..b248e10268 100644 --- a/docs/topics/index.txt +++ b/docs/topics/index.txt @@ -12,6 +12,7 @@ Introductions to all the key parts of Django you'll need to know: forms/index templates class-based-views/index + migrations files testing/index auth/index diff --git a/docs/topics/install.txt b/docs/topics/install.txt index 5bcc3f64ec..3e813b2a41 100644 --- a/docs/topics/install.txt +++ b/docs/topics/install.txt @@ -121,14 +121,12 @@ database bindings are installed. * If you're using an unofficial 3rd party backend, please consult the documentation provided for any additional requirements. -If you plan to use Django's ``manage.py syncdb`` command to automatically +If you plan to use Django's ``manage.py migrate`` command to automatically create database tables for your models (after first installing Django and creating a project), you'll need to ensure that Django has permission to create and alter tables in the database you're using; if you plan to manually create the tables, you can simply grant Django ``SELECT``, ``INSERT``, ``UPDATE`` and -``DELETE`` permissions. On some databases, Django will need ``ALTER TABLE`` -privileges during ``syncdb`` but won't issue ``ALTER TABLE`` statements on a -table once ``syncdb`` has created it. After creating a database user with these +``DELETE`` permissions. After creating a database user with these permissions, you'll specify the details in your project's settings file, see :setting:`DATABASES` for details. diff --git a/docs/topics/migrations.txt b/docs/topics/migrations.txt new file mode 100644 index 0000000000..5862c5defe --- /dev/null +++ b/docs/topics/migrations.txt @@ -0,0 +1,297 @@ +========== +Migrations +========== + +.. module:: django.db.migrations + :synopsis: Schema migration support for Django models + +.. versionadded:: 1.7 + +Migrations are Django's way of propagating changes you make to your models +(adding a field, deleting a model, etc.) into your database schema. They're +designed to be mostly automatic, but you'll need to know when to make +migrations, when to run them, and the common problems you might run into. + +A Brief History +--------------- + +Prior to version 1.7, Django only supported adding new models to the +database; it was not possible to alter or remove existing models via the +``syncdb`` command (the predecessor to ``migrate``). + +Third-party tools, most notably `South `_, +provided support for these additional types of change, but it was considered +important enough that support was brought into core Django. + +Two Commands +------------ + +There are two commands which you will use to interact with migrations +and Django's handling of database schema: + +* :djadmin:`migrate`, which is responsible for applying migrations, as well as + unapplying and listing their status. + +* :djadmin:`makemigrations`, which is responsible for creating new migrations + based on the changes you have made to your models. + +It's worth noting that migrations are created and run on a per-app basis. +In particular, it's possible to have apps that *do not use migrations* (these +are referred to as "unmigrated" apps) - these apps will instead mimic the +legacy behaviour of just adding new models. + +You should think of migrations as a version control system for your database +schema. ``makemigrations`` is responsible for packaging up your model changes +into individual migration files - analagous to commits - and ``migrate`` is +responsible for applying those to your database. + +The migration files for each app live in a "migrations" directory inside +of that app, and are designed to be committed to, and distributed as part +of, its codebase. You should be making them once on your development machine +and then running the same migrations on your colleagues' machines, your +staging machines, and eventually your production machines. + +Migrations will run the same way every time and produce consistent results, +meaning that what you see in development and staging is exactly what will +happen in production - no unexpected surprises. + +Backend Support +--------------- + +Migrations are supported on all backends that Django ships with, as well +as any third-party backends if they have programmed in support for schema +alteration (done via the ``SchemaEditor`` class). + +However, some databases are more capable than others when it comes to +schema migrations; some of the caveats are covered below. + +PostgreSQL +~~~~~~~~~~ + +PostgreSQL is the most capable of all the databases here in terms of schema +support; the only caveat is that adding columns with default values will +lock a table for a time proportional to the number of rows in it. + +For this reason, it's recommended you always create new columns with +``null=True``, as this way they will be added immediately. + +MySQL +~~~~~ + +MySQL lacks support for transactions around schema alteration operations, +meaning that if a migration fails to apply you will have to manually unpick +the changes in order to try again (it's impossible to roll back to an +earlier point). + +In addition, MySQL will lock tables for almost every schema operation and +generally takes a time proportional to the number of rows in the table to +add or remove columns. On slower hardware this can be worse than a minute +per million rows - adding a few columns to a table with just a few million +rows could lock your site up for over ten minutes. + +Finally, MySQL has reasonably small limits on name lengths for columns, tables +and indexes, as well as a limit on the combined size of all columns an index +covers. This means that indexes that are possible on other backends will +fail to be created under MySQL. + +SQLite +~~~~~~ + +SQLite has very little built-in schema alteration support, and so Django +attempts to emulate it by: + +* Creating a new table with the new schema +* Copying the data across +* Dropping the old table +* Renaming the new table to match the original name + +This process generally works well, but it can be slow and occasionally +buggy. It is not recommended that you run and migrate SQLite in a +production environment unless you are very aware of the risks and +its limitations; the support Django ships with is designed to allow +developers to use SQLite on their local machines to develop less complex +Django projects without the need for a full database. + +Workflow +-------- + +Working with migrations is simple. Make changes to your models - say, add +a field and remove a model - and then run :djadmin:`makemigrations`:: + + $ python manage.py makemigrations + Migrations for 'books': + 0003_auto.py: + - Alter field author on book + +Your models will be scanned and compared to the versions currently +contained in your migration files, and then a new set of migrations +will be written out. Make sure to read the output to see what +``makemigrations`` thinks you have changed - it's not perfect, and for +complex changes it might not be detecting what you expect. + +Once you have your new migration files, you should apply them to your +database to make sure they work as expected:: + + $ python manage.py migrate + Operations to perform: + Synchronize unmigrated apps: sessions, admin, messages, auth, staticfiles, contenttypes + Apply all migrations: books + Synchronizing apps without migrations: + Creating tables... + Installing custom SQL... + Installing indexes... + Installed 0 object(s) from 0 fixture(s) + Running migrations: + Applying books.0003_auto... OK + +The command runs in two stages; first, it synchronizes unmigrated apps +(performing the same functionality that ``syncdb`` used to provide), and +then it runs any migrations that have not yet been applied. + +Once the migration is applied, commit the migration and the models change +to your version control system as a single commit - that way, when other +developers (or your production servers) check out the code, they'll +get both the changes to your models and the accompanying migration at the +same time. + +Version control +~~~~~~~~~~~~~~~ + +Because migrations are stored in version control, you'll occasionally +come across situations where you and another developer have both committed +a migration to the same app at the same time, resulting in two migrations +with the same number. + +Don't worry - the numbers are just there for developers' reference, Django +just cares that each migration has a different name. Migrations specify which +other migrations they depend on - including earlier migrations in the same +app - in the file, so it's possible to detect when there's two new migrations +for the same app that aren't ordered. + +When this happens, Django will prompt you and give you some options. If it +thinks it's safe enough, it will offer to automatically linearize the two +migrations for you. If not, you'll have to go in and modify the migrations +yourself - don't worry, this isn't difficult, and is explained more in +:ref:`migration-files` below. + +Dependencies +------------ + +While migrations are per-app, the tables and relationships implied by +your models are too complex to be created for just one app at a time. When +you make a migration that requires something else to run - for example, +you add a ForeignKey in your ``books`` app to your ``authors`` app - the +resulting migration will contain a dependency on a migration in ``authors``. + +This means that when you run the migrations, the ``authors`` migration runs +first and creates the table the ``ForeignKey`` references, and then the migration +that makes the ``ForeignKey`` column runs afterwards and creates the constraint. +If this didn't happen, the migration would try to create the ForeignKey column +without the table it's referencing existing and your database would +throw an error. + +This dependency behaviour affects most migration operations where you +restrict to a single app. Restricting to a single app (either in +``makemigrations`` or ``migrate``) is a best-efforts promise, and not +a guarantee; any other apps that need to be used to get dependencies correct +will be. + +.. migration-files: + +Migration files +--------------- + +Migrations are stored as an on-disk format, referred to here as +"migration files". These files are actually just normal Python files with +an agreed-upon object layout, written in a declarative style. + +A basic migration file looks like this:: + + from django.db import migrations, models + + class Migration(migrations.Migration): + + dependencies = [("migrations", "0001_initial")] + + operations = [ + migrations.DeleteModel("Tribble"), + migrations.AddField("Author", "rating", models.IntegerField(default=0)), + ] + +What Django looks for when it loads a migration file (as a Python module) is +a subclass of ``django.db.migrations.Migration`` called ``Migration``. It then +inspects this object for four attributes, only two of which are used +most of the time: + +* ``dependencies``, a list of migrations this one depends on. +* ``operations``, a list of Operation classes that define what this migration + does. + +The operations are the key; they are a set of declarative instructions which +tell Django what schema changes need to be made. Django scans them and +builds an in-memory representation of all of the schema changes to all apps, +and uses this to generate the SQL which makes the schema changes. + +That in-memory structure is also used to work out what the differences are +between your models and the current state of your migrations; Django runs +through all the changes, in order, on an in-memory set of models to come +up with the state of your models last time you ran ``makemigrations``. It +then uses these models to compare against the ones in your ``models.py`` files +to work out what you have changed. + +You should rarely, if ever, need to edit migration files by hand, but +it's entirely possible to write them manually if you need to. Some of the +more complex operations are not autodetectable and are only available via +a hand-written migration, so don't be scared about editing them if you have to. + +Adding migrations to apps +------------------------- + +Adding migrations to new apps is straightforward - they come preconfigured to +accept migrations, and so just run :djadmin:`makemigrations` once you've made +some changes. + +If your app already has models and database tables, and doesn't have migrations +yet (for example, you created it against a previous Django version), you'll +need to convert it to use migrations; this is a simple process:: + + python manage.py makemigrations --force yourappname + +This will make a new initial migration for your app (the ``--force`` argument +is to override Django's default behaviour, as it thinks your app does not want +migrations). Now, when you run :djadmin:`migrate`, Django will detect that +you have an initial migration *and* that the tables it wants to create already +exist, and will mark the migration as already applied. + +Note that this only works given two things: + +* You have not changed your models since you made their tables. For migrations + to work, you must make the initial migration *first* and then make changes, + as Django compares changes against migration files, not the database. + +* You have not manually edited your database - Django won't be able to detect + that your database doesn't match your models, you'll just get errors when + migrations try to modify those tables. + + +.. historical-models: + +Historical models +----------------- + +When you run migrations, Django is working from historical versions of +your models stored in the migration files. If you write Python code +using the ``django.db.migrations.RunPython`` operation, or if you have +``allow_migrate`` methods on your database routers, you will be exposed +to these versions of your models. + +Because it's impossible to serialize arbitrary Python code, these historical +models will not have any custom methods or managers that you have defined. +They will, however, have the same fields, relationships and ``Meta`` options +(also versioned, so they may be different from your current ones). + +In addition, the base classes of the model are just stored as pointers, +so you must always keep base classes around for as long as there is a migration +that contains a reference to them. On the plus side, methods and managers +from these base classes inherit normally, so if you absolutely need access +to these you can opt to move them into a superclass. diff --git a/docs/topics/serialization.txt b/docs/topics/serialization.txt index 8078e0f2c6..0f24715599 100644 --- a/docs/topics/serialization.txt +++ b/docs/topics/serialization.txt @@ -293,7 +293,7 @@ serialize an object that refers to a content type, then you need to have a way to refer to that content type to begin with. Since ``ContentType`` objects are automatically created by Django during the database synchronization process, the primary key of a given content type isn't easy to predict; it will -depend on how and when :djadmin:`syncdb` was executed. This is true for all +depend on how and when :djadmin:`migrate` was executed. This is true for all models which automatically generate objects, notably including :class:`~django.contrib.auth.models.Permission`, :class:`~django.contrib.auth.models.Group`, and diff --git a/docs/topics/testing/advanced.txt b/docs/topics/testing/advanced.txt index 9ed06447c0..d8d59c6872 100644 --- a/docs/topics/testing/advanced.txt +++ b/docs/topics/testing/advanced.txt @@ -182,7 +182,7 @@ Advanced features of ``TransactionTestCase`` By default, ``available_apps`` is set to ``None``. After each test, Django calls :djadmin:`flush` to reset the database state. This empties all tables - and emits the :data:`~django.db.models.signals.post_syncdb` signal, which + and emits the :data:`~django.db.models.signals.post_migrate` signal, which re-creates one content type and three permissions for each model. This operation gets expensive proportionally to the number of models. @@ -190,13 +190,13 @@ Advanced features of ``TransactionTestCase`` behave as if only the models from these applications were available. The behavior of ``TransactionTestCase`` changes as follows: - - :data:`~django.db.models.signals.post_syncdb` is fired before each + - :data:`~django.db.models.signals.post_migrate` is fired before each test to create the content types and permissions for each model in available apps, in case they're missing. - After each test, Django empties only tables corresponding to models in available apps. However, at the database level, truncation may cascade to related models in unavailable apps. Furthermore - :data:`~django.db.models.signals.post_syncdb` isn't fired; it will be + :data:`~django.db.models.signals.post_migrate` isn't fired; it will be fired by the next ``TransactionTestCase``, after the correct set of applications is selected. @@ -205,10 +205,10 @@ Advanced features of ``TransactionTestCase`` cause unrelated tests to fail. Be careful with tests that use sessions; the default session engine stores them in the database. - Since :data:`~django.db.models.signals.post_syncdb` isn't emitted after + Since :data:`~django.db.models.signals.post_migrate` isn't emitted after flushing the database, its state after a ``TransactionTestCase`` isn't the same as after a ``TestCase``: it's missing the rows created by listeners - to :data:`~django.db.models.signals.post_syncdb`. Considering the + to :data:`~django.db.models.signals.post_migrate`. Considering the :ref:`order in which tests are executed `, this isn't an issue, provided either all ``TransactionTestCase`` in a given test suite declare ``available_apps``, or none of them. @@ -276,7 +276,7 @@ testing behavior. This behavior involves: #. Creating the test databases. -#. Running ``syncdb`` to install models and initial data into the test +#. Running ``migrate`` to install models and initial data into the test databases. #. Running the tests that were found. @@ -467,7 +467,7 @@ can be useful during testing. .. function:: create_test_db([verbosity=1, autoclobber=False]) - Creates a new test database and runs ``syncdb`` against it. + Creates a new test database and runs ``migrate`` against it. ``verbosity`` has the same behavior as in ``run_tests()``. diff --git a/docs/topics/testing/overview.txt b/docs/topics/testing/overview.txt index 89b43c42c5..89b38f7573 100644 --- a/docs/topics/testing/overview.txt +++ b/docs/topics/testing/overview.txt @@ -1170,9 +1170,9 @@ documentation` for more details. .. note:: - If you've ever run :djadmin:`manage.py syncdb`, you've + If you've ever run :djadmin:`manage.py migrate`, you've already used a fixture without even knowing it! When you call - :djadmin:`syncdb` in the database for the first time, Django + :djadmin:`migrate` in the database for the first time, Django installs a fixture called ``initial_data``. This gives you a way of populating a new database with any initial data, such as a default set of categories. diff --git a/tests/app_cache/__init__.py b/tests/app_cache/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/app_cache/models.py b/tests/app_cache/models.py new file mode 100644 index 0000000000..1b4d33c2f9 --- /dev/null +++ b/tests/app_cache/models.py @@ -0,0 +1,17 @@ +from django.db import models +from django.db.models.loading import BaseAppCache + +# We're testing app cache presence on load, so this is handy. + +new_app_cache = BaseAppCache() + + +class TotallyNormal(models.Model): + name = models.CharField(max_length=255) + + +class SoAlternative(models.Model): + name = models.CharField(max_length=255) + + class Meta: + app_cache = new_app_cache diff --git a/tests/app_cache/tests.py b/tests/app_cache/tests.py new file mode 100644 index 0000000000..b72b862de3 --- /dev/null +++ b/tests/app_cache/tests.py @@ -0,0 +1,44 @@ +from __future__ import absolute_import +from django.test import TestCase +from django.db.models.loading import cache, BaseAppCache +from django.db import models +from .models import TotallyNormal, SoAlternative, new_app_cache + + +class AppCacheTests(TestCase): + """ + Tests the AppCache borg and non-borg versions + """ + + def test_models_py(self): + """ + Tests that the models in the models.py file were loaded correctly. + """ + self.assertEqual(cache.get_model("app_cache", "TotallyNormal"), TotallyNormal) + self.assertEqual(cache.get_model("app_cache", "SoAlternative"), None) + + self.assertEqual(new_app_cache.get_model("app_cache", "TotallyNormal"), None) + self.assertEqual(new_app_cache.get_model("app_cache", "SoAlternative"), SoAlternative) + + def test_dynamic_load(self): + """ + Makes a new model at runtime and ensures it goes into the right place. + """ + old_models = cache.get_models(cache.get_app("app_cache")) + # Construct a new model in a new app cache + body = {} + new_app_cache = BaseAppCache() + meta_contents = { + 'app_label': "app_cache", + 'app_cache': new_app_cache, + } + meta = type("Meta", tuple(), meta_contents) + body['Meta'] = meta + body['__module__'] = TotallyNormal.__module__ + temp_model = type("SouthPonies", (models.Model,), body) + # Make sure it appeared in the right place! + self.assertEqual( + old_models, + cache.get_models(cache.get_app("app_cache")), + ) + self.assertEqual(new_app_cache.get_model("app_cache", "SouthPonies"), temp_model) diff --git a/tests/cache/tests.py b/tests/cache/tests.py index a225666521..287a078617 100644 --- a/tests/cache/tests.py +++ b/tests/cache/tests.py @@ -895,7 +895,7 @@ class DBCacheRouter(object): if model._meta.app_label == 'django_cache': return 'other' - def allow_syncdb(self, db, model): + def allow_migrate(self, db, model): if model._meta.app_label == 'django_cache': return db == 'other' diff --git a/tests/migrations/__init__.py b/tests/migrations/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/migrations/models.py b/tests/migrations/models.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/migrations/test_autodetector.py b/tests/migrations/test_autodetector.py new file mode 100644 index 0000000000..de3b156ba6 --- /dev/null +++ b/tests/migrations/test_autodetector.py @@ -0,0 +1,274 @@ +# encoding: utf8 +from django.test import TestCase +from django.db.migrations.autodetector import MigrationAutodetector, MigrationQuestioner +from django.db.migrations.state import ProjectState, ModelState +from django.db.migrations.graph import MigrationGraph +from django.db import models + + +class AutodetectorTests(TestCase): + """ + Tests the migration autodetector. + """ + + author_empty = ModelState("testapp", "Author", [("id", models.AutoField(primary_key=True))]) + author_name = ModelState("testapp", "Author", [("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200))]) + author_name_longer = ModelState("testapp", "Author", [("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=400))]) + author_name_renamed = ModelState("testapp", "Author", [("id", models.AutoField(primary_key=True)), ("names", models.CharField(max_length=200))]) + author_with_book = ModelState("testapp", "Author", [("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ("book", models.ForeignKey("otherapp.Book"))]) + other_pony = ModelState("otherapp", "Pony", [("id", models.AutoField(primary_key=True))]) + other_stable = ModelState("otherapp", "Stable", [("id", models.AutoField(primary_key=True))]) + third_thing = ModelState("thirdapp", "Thing", [("id", models.AutoField(primary_key=True))]) + book = ModelState("otherapp", "Book", [("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author")), ("title", models.CharField(max_length=200))]) + book_unique = ModelState("otherapp", "Book", [("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author")), ("title", models.CharField(max_length=200))], {"unique_together": [("author", "title")]}) + book_unique_2 = ModelState("otherapp", "Book", [("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author")), ("title", models.CharField(max_length=200))], {"unique_together": [("title", "author")]}) + edition = ModelState("thirdapp", "Edition", [("id", models.AutoField(primary_key=True)), ("book", models.ForeignKey("otherapp.Book"))]) + + def make_project_state(self, model_states): + "Shortcut to make ProjectStates from lists of predefined models" + project_state = ProjectState() + for model_state in model_states: + project_state.add_model_state(model_state.clone()) + return project_state + + def test_arrange_for_graph(self): + "Tests auto-naming of migrations for graph matching." + # Make a fake graph + graph = MigrationGraph() + graph.add_node(("testapp", "0001_initial"), None) + graph.add_node(("testapp", "0002_foobar"), None) + graph.add_node(("otherapp", "0001_initial"), None) + graph.add_dependency(("testapp", "0002_foobar"), ("testapp", "0001_initial")) + graph.add_dependency(("testapp", "0002_foobar"), ("otherapp", "0001_initial")) + # Use project state to make a new migration change set + before = self.make_project_state([]) + after = self.make_project_state([self.author_empty, self.other_pony, self.other_stable]) + autodetector = MigrationAutodetector(before, after) + changes = autodetector._detect_changes() + # Run through arrange_for_graph + changes = autodetector._arrange_for_graph(changes, graph) + # Make sure there's a new name, deps match, etc. + self.assertEqual(changes["testapp"][0].name, "0003_author") + self.assertEqual(changes["testapp"][0].dependencies, [("testapp", "0002_foobar")]) + self.assertEqual(changes["otherapp"][0].name, "0002_pony_stable") + self.assertEqual(changes["otherapp"][0].dependencies, [("otherapp", "0001_initial")]) + + def test_trim_apps(self): + "Tests that trim does not remove dependencies but does remove unwanted apps" + # Use project state to make a new migration change set + before = self.make_project_state([]) + after = self.make_project_state([self.author_empty, self.other_pony, self.other_stable, self.third_thing]) + autodetector = MigrationAutodetector(before, after, MigrationQuestioner({"ask_initial": True})) + changes = autodetector._detect_changes() + # Run through arrange_for_graph + graph = MigrationGraph() + changes = autodetector._arrange_for_graph(changes, graph) + changes["testapp"][0].dependencies.append(("otherapp", "0001_initial")) + changes = autodetector._trim_to_apps(changes, set(["testapp"])) + # Make sure there's the right set of migrations + self.assertEqual(changes["testapp"][0].name, "0001_initial") + self.assertEqual(changes["otherapp"][0].name, "0001_initial") + self.assertNotIn("thirdapp", changes) + + def test_new_model(self): + "Tests autodetection of new models" + # Make state + before = self.make_project_state([]) + after = self.make_project_state([self.author_empty]) + autodetector = MigrationAutodetector(before, after) + changes = autodetector._detect_changes() + # Right number of migrations? + self.assertEqual(len(changes['testapp']), 1) + # Right number of actions? + migration = changes['testapp'][0] + self.assertEqual(len(migration.operations), 1) + # Right action? + action = migration.operations[0] + self.assertEqual(action.__class__.__name__, "CreateModel") + self.assertEqual(action.name, "Author") + + def test_old_model(self): + "Tests deletion of old models" + # Make state + before = self.make_project_state([self.author_empty]) + after = self.make_project_state([]) + autodetector = MigrationAutodetector(before, after) + changes = autodetector._detect_changes() + # Right number of migrations? + self.assertEqual(len(changes['testapp']), 1) + # Right number of actions? + migration = changes['testapp'][0] + self.assertEqual(len(migration.operations), 1) + # Right action? + action = migration.operations[0] + self.assertEqual(action.__class__.__name__, "DeleteModel") + self.assertEqual(action.name, "Author") + + def test_add_field(self): + "Tests autodetection of new fields" + # Make state + before = self.make_project_state([self.author_empty]) + after = self.make_project_state([self.author_name]) + autodetector = MigrationAutodetector(before, after) + changes = autodetector._detect_changes() + # Right number of migrations? + self.assertEqual(len(changes['testapp']), 1) + # Right number of actions? + migration = changes['testapp'][0] + self.assertEqual(len(migration.operations), 1) + # Right action? + action = migration.operations[0] + self.assertEqual(action.__class__.__name__, "AddField") + self.assertEqual(action.name, "name") + + def test_remove_field(self): + "Tests autodetection of removed fields" + # Make state + before = self.make_project_state([self.author_name]) + after = self.make_project_state([self.author_empty]) + autodetector = MigrationAutodetector(before, after) + changes = autodetector._detect_changes() + # Right number of migrations? + self.assertEqual(len(changes['testapp']), 1) + # Right number of actions? + migration = changes['testapp'][0] + self.assertEqual(len(migration.operations), 1) + # Right action? + action = migration.operations[0] + self.assertEqual(action.__class__.__name__, "RemoveField") + self.assertEqual(action.name, "name") + + def test_alter_field(self): + "Tests autodetection of new fields" + # Make state + before = self.make_project_state([self.author_name]) + after = self.make_project_state([self.author_name_longer]) + autodetector = MigrationAutodetector(before, after) + changes = autodetector._detect_changes() + # Right number of migrations? + self.assertEqual(len(changes['testapp']), 1) + # Right number of actions? + migration = changes['testapp'][0] + self.assertEqual(len(migration.operations), 1) + # Right action? + action = migration.operations[0] + self.assertEqual(action.__class__.__name__, "AlterField") + self.assertEqual(action.name, "name") + + def test_rename_field(self): + "Tests autodetection of renamed fields" + # Make state + before = self.make_project_state([self.author_name]) + after = self.make_project_state([self.author_name_renamed]) + autodetector = MigrationAutodetector(before, after, MigrationQuestioner({"ask_rename": True})) + changes = autodetector._detect_changes() + # Right number of migrations? + self.assertEqual(len(changes['testapp']), 1) + # Right number of actions? + migration = changes['testapp'][0] + self.assertEqual(len(migration.operations), 1) + # Right action? + action = migration.operations[0] + self.assertEqual(action.__class__.__name__, "RenameField") + self.assertEqual(action.old_name, "name") + self.assertEqual(action.new_name, "names") + + def test_fk_dependency(self): + "Tests that having a ForeignKey automatically adds a dependency" + # Make state + before = self.make_project_state([]) + after = self.make_project_state([self.author_name, self.book, self.edition]) + autodetector = MigrationAutodetector(before, after) + changes = autodetector._detect_changes() + # Right number of migrations? + self.assertEqual(len(changes['testapp']), 1) + self.assertEqual(len(changes['otherapp']), 1) + self.assertEqual(len(changes['thirdapp']), 1) + # Right number of actions? + migration1 = changes['testapp'][0] + self.assertEqual(len(migration1.operations), 1) + migration2 = changes['otherapp'][0] + self.assertEqual(len(migration2.operations), 1) + migration3 = changes['thirdapp'][0] + self.assertEqual(len(migration3.operations), 1) + # Right actions? + action = migration1.operations[0] + self.assertEqual(action.__class__.__name__, "CreateModel") + action = migration2.operations[0] + self.assertEqual(action.__class__.__name__, "CreateModel") + action = migration3.operations[0] + self.assertEqual(action.__class__.__name__, "CreateModel") + # Right dependencies? + self.assertEqual(migration1.dependencies, []) + self.assertEqual(migration2.dependencies, [("testapp", "auto_1")]) + self.assertEqual(migration3.dependencies, [("otherapp", "auto_1")]) + + def test_circular_fk_dependency(self): + """ + Tests that having a circular ForeignKey dependency automatically + resolves the situation into 2 migrations on one side and 1 on the other. + """ + # Make state + before = self.make_project_state([]) + after = self.make_project_state([self.author_with_book, self.book]) + autodetector = MigrationAutodetector(before, after) + changes = autodetector._detect_changes() + # Right number of migrations? + self.assertEqual(len(changes['testapp']), 1) + self.assertEqual(len(changes['otherapp']), 2) + # Right number of actions? + migration1 = changes['testapp'][0] + self.assertEqual(len(migration1.operations), 1) + migration2 = changes['otherapp'][0] + self.assertEqual(len(migration2.operations), 1) + migration3 = changes['otherapp'][1] + self.assertEqual(len(migration2.operations), 1) + # Right actions? + action = migration1.operations[0] + self.assertEqual(action.__class__.__name__, "CreateModel") + action = migration2.operations[0] + self.assertEqual(action.__class__.__name__, "CreateModel") + self.assertEqual(len(action.fields), 2) + action = migration3.operations[0] + self.assertEqual(action.__class__.__name__, "AddField") + self.assertEqual(action.name, "author") + # Right dependencies? + self.assertEqual(migration1.dependencies, [("otherapp", "auto_1")]) + self.assertEqual(migration2.dependencies, []) + self.assertEqual(set(migration3.dependencies), set([("otherapp", "auto_1"), ("testapp", "auto_1")])) + + def test_unique_together(self): + "Tests unique_together detection" + # Make state + before = self.make_project_state([self.author_empty, self.book]) + after = self.make_project_state([self.author_empty, self.book_unique]) + autodetector = MigrationAutodetector(before, after) + changes = autodetector._detect_changes() + # Right number of migrations? + self.assertEqual(len(changes['otherapp']), 1) + # Right number of actions? + migration = changes['otherapp'][0] + self.assertEqual(len(migration.operations), 1) + # Right action? + action = migration.operations[0] + self.assertEqual(action.__class__.__name__, "AlterUniqueTogether") + self.assertEqual(action.name, "book") + self.assertEqual(action.unique_together, set([("author", "title")])) + + def test_unique_together_ordering(self): + "Tests that unique_together also triggers on ordering changes" + # Make state + before = self.make_project_state([self.author_empty, self.book_unique]) + after = self.make_project_state([self.author_empty, self.book_unique_2]) + autodetector = MigrationAutodetector(before, after) + changes = autodetector._detect_changes() + # Right number of migrations? + self.assertEqual(len(changes['otherapp']), 1) + # Right number of actions? + migration = changes['otherapp'][0] + self.assertEqual(len(migration.operations), 1) + # Right action? + action = migration.operations[0] + self.assertEqual(action.__class__.__name__, "AlterUniqueTogether") + self.assertEqual(action.name, "book") + self.assertEqual(action.unique_together, set([("title", "author")])) diff --git a/tests/migrations/test_base.py b/tests/migrations/test_base.py new file mode 100644 index 0000000000..7ab09b04a5 --- /dev/null +++ b/tests/migrations/test_base.py @@ -0,0 +1,41 @@ +from django.test import TransactionTestCase +from django.db import connection + + +class MigrationTestBase(TransactionTestCase): + """ + Contains an extended set of asserts for testing migrations and schema operations. + """ + + available_apps = ["migrations"] + + def assertTableExists(self, table): + self.assertIn(table, connection.introspection.get_table_list(connection.cursor())) + + def assertTableNotExists(self, table): + self.assertNotIn(table, connection.introspection.get_table_list(connection.cursor())) + + def assertColumnExists(self, table, column): + self.assertIn(column, [c.name for c in connection.introspection.get_table_description(connection.cursor(), table)]) + + def assertColumnNotExists(self, table, column): + self.assertNotIn(column, [c.name for c in connection.introspection.get_table_description(connection.cursor(), table)]) + + def assertColumnNull(self, table, column): + self.assertEqual([c.null_ok for c in connection.introspection.get_table_description(connection.cursor(), table) if c.name == column][0], True) + + def assertColumnNotNull(self, table, column): + self.assertEqual([c.null_ok for c in connection.introspection.get_table_description(connection.cursor(), table) if c.name == column][0], False) + + def assertIndexExists(self, table, columns, value=True): + self.assertEqual( + value, + any( + c["index"] + for c in connection.introspection.get_constraints(connection.cursor(), table).values() + if c['columns'] == list(columns) + ), + ) + + def assertIndexNotExists(self, table, columns): + return self.assertIndexExists(table, columns, False) diff --git a/tests/migrations/test_commands.py b/tests/migrations/test_commands.py new file mode 100644 index 0000000000..d775d1eba7 --- /dev/null +++ b/tests/migrations/test_commands.py @@ -0,0 +1,37 @@ +from django.core.management import call_command +from django.test.utils import override_settings +from .test_base import MigrationTestBase + + +class CommandTests(MigrationTestBase): + """ + Tests running the commands (migrate, makemigrations). + """ + + @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) + def test_migrate(self): + """ + Tests basic usage of the migrate command. + """ + # Make sure no tables are created + self.assertTableNotExists("migrations_author") + self.assertTableNotExists("migrations_tribble") + self.assertTableNotExists("migrations_book") + # Run the migrations to 0001 only + call_command("migrate", "migrations", "0001", verbosity=0) + # Make sure the right tables exist + self.assertTableExists("migrations_author") + self.assertTableExists("migrations_tribble") + self.assertTableNotExists("migrations_book") + # Run migrations all the way + call_command("migrate", verbosity=0) + # Make sure the right tables exist + self.assertTableExists("migrations_author") + self.assertTableNotExists("migrations_tribble") + self.assertTableExists("migrations_book") + # Unmigrate everything + call_command("migrate", "migrations", "zero", verbosity=0) + # Make sure it's all gone + self.assertTableNotExists("migrations_author") + self.assertTableNotExists("migrations_tribble") + self.assertTableNotExists("migrations_book") diff --git a/tests/migrations/test_executor.py b/tests/migrations/test_executor.py new file mode 100644 index 0000000000..dbdea900a5 --- /dev/null +++ b/tests/migrations/test_executor.py @@ -0,0 +1,77 @@ +from django.test import TransactionTestCase +from django.test.utils import override_settings +from django.db import connection +from django.db.migrations.executor import MigrationExecutor + + +class ExecutorTests(TransactionTestCase): + """ + Tests the migration executor (full end-to-end running). + + Bear in mind that if these are failing you should fix the other + test failures first, as they may be propagating into here. + """ + + available_apps = ["migrations", "django.contrib.sessions"] + + @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) + def test_run(self): + """ + Tests running a simple set of migrations. + """ + executor = MigrationExecutor(connection) + executor.recorder.flush() + # Let's look at the plan first and make sure it's up to scratch + plan = executor.migration_plan([("migrations", "0002_second")]) + self.assertEqual( + plan, + [ + (executor.loader.graph.nodes["migrations", "0001_initial"], False), + (executor.loader.graph.nodes["migrations", "0002_second"], False), + ], + ) + # Were the tables there before? + self.assertNotIn("migrations_author", connection.introspection.get_table_list(connection.cursor())) + self.assertNotIn("migrations_book", connection.introspection.get_table_list(connection.cursor())) + # Alright, let's try running it + executor.migrate([("migrations", "0002_second")]) + # Are the tables there now? + self.assertIn("migrations_author", connection.introspection.get_table_list(connection.cursor())) + self.assertIn("migrations_book", connection.introspection.get_table_list(connection.cursor())) + # Alright, let's undo what we did + executor.migrate([("migrations", None)]) + # Are the tables gone? + self.assertNotIn("migrations_author", connection.introspection.get_table_list(connection.cursor())) + self.assertNotIn("migrations_book", connection.introspection.get_table_list(connection.cursor())) + + @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations", "sessions": "migrations.test_migrations_2"}) + def test_empty_plan(self): + """ + Tests that re-planning a full migration of a fully-migrated set doesn't + perform spurious unmigrations and remigrations. + + There was previously a bug where the executor just always performed the + backwards plan for applied migrations - which even for the most recent + migration in an app, might include other, dependent apps, and these + were being unmigrated. + """ + # Make the initial plan, check it + # We use 'sessions' here as the second app as it's always present + # in INSTALLED_APPS, so we can happily assign it test migrations. + executor = MigrationExecutor(connection) + plan = executor.migration_plan([("migrations", "0002_second"), ("sessions", "0001_initial")]) + self.assertEqual( + plan, + [ + (executor.loader.graph.nodes["migrations", "0001_initial"], False), + (executor.loader.graph.nodes["migrations", "0002_second"], False), + (executor.loader.graph.nodes["sessions", "0001_initial"], False), + ], + ) + # Fake-apply all migrations + executor.migrate([("migrations", "0002_second"), ("sessions", "0001_initial")], fake=True) + # Now plan a second time and make sure it's empty + plan = executor.migration_plan([("migrations", "0002_second"), ("sessions", "0001_initial")]) + self.assertEqual(plan, []) + # Erase all the fake records + executor.recorder.flush() diff --git a/tests/migrations/test_graph.py b/tests/migrations/test_graph.py new file mode 100644 index 0000000000..e3d5a28283 --- /dev/null +++ b/tests/migrations/test_graph.py @@ -0,0 +1,135 @@ +from django.test import TestCase +from django.db.migrations.graph import MigrationGraph, CircularDependencyError + + +class GraphTests(TestCase): + """ + Tests the digraph structure. + """ + + def test_simple_graph(self): + """ + Tests a basic dependency graph: + + app_a: 0001 <-- 0002 <--- 0003 <-- 0004 + / + app_b: 0001 <-- 0002 <-/ + """ + # Build graph + graph = MigrationGraph() + graph.add_node(("app_a", "0001"), None) + graph.add_node(("app_a", "0002"), None) + graph.add_node(("app_a", "0003"), None) + graph.add_node(("app_a", "0004"), None) + graph.add_node(("app_b", "0001"), None) + graph.add_node(("app_b", "0002"), None) + graph.add_dependency(("app_a", "0004"), ("app_a", "0003")) + graph.add_dependency(("app_a", "0003"), ("app_a", "0002")) + graph.add_dependency(("app_a", "0002"), ("app_a", "0001")) + graph.add_dependency(("app_a", "0003"), ("app_b", "0002")) + graph.add_dependency(("app_b", "0002"), ("app_b", "0001")) + # Test root migration case + self.assertEqual( + graph.forwards_plan(("app_a", "0001")), + [('app_a', '0001')], + ) + # Test branch B only + self.assertEqual( + graph.forwards_plan(("app_b", "0002")), + [("app_b", "0001"), ("app_b", "0002")], + ) + # Test whole graph + self.assertEqual( + graph.forwards_plan(("app_a", "0004")), + [('app_b', '0001'), ('app_b', '0002'), ('app_a', '0001'), ('app_a', '0002'), ('app_a', '0003'), ('app_a', '0004')], + ) + # Test reverse to b:0002 + self.assertEqual( + graph.backwards_plan(("app_b", "0002")), + [('app_a', '0004'), ('app_a', '0003'), ('app_b', '0002')], + ) + # Test roots and leaves + self.assertEqual( + graph.root_nodes(), + set([('app_a', '0001'), ('app_b', '0001')]), + ) + self.assertEqual( + graph.leaf_nodes(), + set([('app_a', '0004'), ('app_b', '0002')]), + ) + + def test_complex_graph(self): + """ + Tests a complex dependency graph: + + app_a: 0001 <-- 0002 <--- 0003 <-- 0004 + \ \ / / + app_b: 0001 <-\ 0002 <-X / + \ \ / + app_c: \ 0001 <-- 0002 <- + """ + # Build graph + graph = MigrationGraph() + graph.add_node(("app_a", "0001"), None) + graph.add_node(("app_a", "0002"), None) + graph.add_node(("app_a", "0003"), None) + graph.add_node(("app_a", "0004"), None) + graph.add_node(("app_b", "0001"), None) + graph.add_node(("app_b", "0002"), None) + graph.add_node(("app_c", "0001"), None) + graph.add_node(("app_c", "0002"), None) + graph.add_dependency(("app_a", "0004"), ("app_a", "0003")) + graph.add_dependency(("app_a", "0003"), ("app_a", "0002")) + graph.add_dependency(("app_a", "0002"), ("app_a", "0001")) + graph.add_dependency(("app_a", "0003"), ("app_b", "0002")) + graph.add_dependency(("app_b", "0002"), ("app_b", "0001")) + graph.add_dependency(("app_a", "0004"), ("app_c", "0002")) + graph.add_dependency(("app_c", "0002"), ("app_c", "0001")) + graph.add_dependency(("app_c", "0001"), ("app_b", "0001")) + graph.add_dependency(("app_c", "0002"), ("app_a", "0002")) + # Test branch C only + self.assertEqual( + graph.forwards_plan(("app_c", "0002")), + [('app_b', '0001'), ('app_c', '0001'), ('app_a', '0001'), ('app_a', '0002'), ('app_c', '0002')], + ) + # Test whole graph + self.assertEqual( + graph.forwards_plan(("app_a", "0004")), + [('app_b', '0001'), ('app_c', '0001'), ('app_a', '0001'), ('app_a', '0002'), ('app_c', '0002'), ('app_b', '0002'), ('app_a', '0003'), ('app_a', '0004')], + ) + # Test reverse to b:0001 + self.assertEqual( + graph.backwards_plan(("app_b", "0001")), + [('app_a', '0004'), ('app_c', '0002'), ('app_c', '0001'), ('app_a', '0003'), ('app_b', '0002'), ('app_b', '0001')], + ) + # Test roots and leaves + self.assertEqual( + graph.root_nodes(), + set([('app_a', '0001'), ('app_b', '0001'), ('app_c', '0001')]), + ) + self.assertEqual( + graph.leaf_nodes(), + set([('app_a', '0004'), ('app_b', '0002'), ('app_c', '0002')]), + ) + + def test_circular_graph(self): + """ + Tests a circular dependency graph. + """ + # Build graph + graph = MigrationGraph() + graph.add_node(("app_a", "0001"), None) + graph.add_node(("app_a", "0002"), None) + graph.add_node(("app_a", "0003"), None) + graph.add_node(("app_b", "0001"), None) + graph.add_node(("app_b", "0002"), None) + graph.add_dependency(("app_a", "0003"), ("app_a", "0002")) + graph.add_dependency(("app_a", "0002"), ("app_a", "0001")) + graph.add_dependency(("app_a", "0001"), ("app_b", "0002")) + graph.add_dependency(("app_b", "0002"), ("app_b", "0001")) + graph.add_dependency(("app_b", "0001"), ("app_a", "0003")) + # Test whole graph + self.assertRaises( + CircularDependencyError, + graph.forwards_plan, ("app_a", "0003"), + ) diff --git a/tests/migrations/test_loader.py b/tests/migrations/test_loader.py new file mode 100644 index 0000000000..b9ad9726ae --- /dev/null +++ b/tests/migrations/test_loader.py @@ -0,0 +1,79 @@ +from django.test import TestCase +from django.test.utils import override_settings +from django.db import connection +from django.db.migrations.loader import MigrationLoader, AmbiguityError +from django.db.migrations.recorder import MigrationRecorder + + +class RecorderTests(TestCase): + """ + Tests recording migrations as applied or not. + """ + + def test_apply(self): + """ + Tests marking migrations as applied/unapplied. + """ + recorder = MigrationRecorder(connection) + self.assertEqual( + recorder.applied_migrations(), + set(), + ) + recorder.record_applied("myapp", "0432_ponies") + self.assertEqual( + recorder.applied_migrations(), + set([("myapp", "0432_ponies")]), + ) + recorder.record_unapplied("myapp", "0432_ponies") + self.assertEqual( + recorder.applied_migrations(), + set(), + ) + + +class LoaderTests(TestCase): + """ + Tests the disk and database loader, and running through migrations + in memory. + """ + + @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) + def test_load(self): + """ + Makes sure the loader can load the migrations for the test apps, + and then render them out to a new AppCache. + """ + # Load and test the plan + migration_loader = MigrationLoader(connection) + self.assertEqual( + migration_loader.graph.forwards_plan(("migrations", "0002_second")), + [("migrations", "0001_initial"), ("migrations", "0002_second")], + ) + # Now render it out! + project_state = migration_loader.graph.project_state(("migrations", "0002_second")) + self.assertEqual(len(project_state.models), 2) + + author_state = project_state.models["migrations", "author"] + self.assertEqual( + [x for x, y in author_state.fields], + ["id", "name", "slug", "age", "rating"] + ) + + book_state = project_state.models["migrations", "book"] + self.assertEqual( + [x for x, y in book_state.fields], + ["id", "author"] + ) + + @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) + def test_name_match(self): + "Tests prefix name matching" + migration_loader = MigrationLoader(connection) + self.assertEqual( + migration_loader.get_migration_by_prefix("migrations", "0001").name, + "0001_initial", + ) + with self.assertRaises(AmbiguityError): + migration_loader.get_migration_by_prefix("migrations", "0") + with self.assertRaises(KeyError): + migration_loader.get_migration_by_prefix("migrations", "blarg") diff --git a/tests/migrations/test_migrations/0001_initial.py b/tests/migrations/test_migrations/0001_initial.py new file mode 100644 index 0000000000..f20bac8aec --- /dev/null +++ b/tests/migrations/test_migrations/0001_initial.py @@ -0,0 +1,27 @@ +from django.db import migrations, models + + +class Migration(migrations.Migration): + + operations = [ + + migrations.CreateModel( + "Author", + [ + ("id", models.AutoField(primary_key=True)), + ("name", models.CharField(max_length=255)), + ("slug", models.SlugField(null=True)), + ("age", models.IntegerField(default=0)), + ("silly_field", models.BooleanField(default=False)), + ], + ), + + migrations.CreateModel( + "Tribble", + [ + ("id", models.AutoField(primary_key=True)), + ("fluffy", models.BooleanField(default=True)), + ], + ) + + ] diff --git a/tests/migrations/test_migrations/0002_second.py b/tests/migrations/test_migrations/0002_second.py new file mode 100644 index 0000000000..ace9a83347 --- /dev/null +++ b/tests/migrations/test_migrations/0002_second.py @@ -0,0 +1,24 @@ +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [("migrations", "0001_initial")] + + operations = [ + + migrations.DeleteModel("Tribble"), + + migrations.RemoveField("Author", "silly_field"), + + migrations.AddField("Author", "rating", models.IntegerField(default=0)), + + migrations.CreateModel( + "Book", + [ + ("id", models.AutoField(primary_key=True)), + ("author", models.ForeignKey("migrations.Author", null=True)), + ], + ) + + ] diff --git a/tests/migrations/test_migrations/__init__.py b/tests/migrations/test_migrations/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/migrations/test_migrations_2/0001_initial.py b/tests/migrations/test_migrations_2/0001_initial.py new file mode 100644 index 0000000000..94c4bc0746 --- /dev/null +++ b/tests/migrations/test_migrations_2/0001_initial.py @@ -0,0 +1,21 @@ +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [("migrations", "0002_second")] + + operations = [ + + migrations.CreateModel( + "OtherAuthor", + [ + ("id", models.AutoField(primary_key=True)), + ("name", models.CharField(max_length=255)), + ("slug", models.SlugField(null=True)), + ("age", models.IntegerField(default=0)), + ("silly_field", models.BooleanField(default=False)), + ], + ), + + ] diff --git a/tests/migrations/test_migrations_2/__init__.py b/tests/migrations/test_migrations_2/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/migrations/test_operations.py b/tests/migrations/test_operations.py new file mode 100644 index 0000000000..1bc4a42d7e --- /dev/null +++ b/tests/migrations/test_operations.py @@ -0,0 +1,327 @@ +from django.db import connection, models, migrations, router +from django.db.transaction import atomic +from django.db.utils import IntegrityError +from django.db.migrations.state import ProjectState +from .test_base import MigrationTestBase + + +class OperationTests(MigrationTestBase): + """ + Tests running the operations and making sure they do what they say they do. + Each test looks at their state changing, and then their database operation - + both forwards and backwards. + """ + + def set_up_test_model(self, app_label, second_model=False): + """ + Creates a test model state and database table. + """ + # Make the "current" state + operations = [migrations.CreateModel( + "Pony", + [ + ("id", models.AutoField(primary_key=True)), + ("pink", models.IntegerField(default=3)), + ("weight", models.FloatField()), + ], + )] + if second_model: + operations.append(migrations.CreateModel("Stable", [("id", models.AutoField(primary_key=True))])) + project_state = ProjectState() + for operation in operations: + operation.state_forwards(app_label, project_state) + # Set up the database + with connection.schema_editor() as editor: + for operation in operations: + operation.database_forwards(app_label, editor, ProjectState(), project_state) + return project_state + + def test_create_model(self): + """ + Tests the CreateModel operation. + Most other tests use this operation as part of setup, so check failures here first. + """ + operation = migrations.CreateModel( + "Pony", + [ + ("id", models.AutoField(primary_key=True)), + ("pink", models.IntegerField(default=1)), + ], + ) + # Test the state alteration + project_state = ProjectState() + new_state = project_state.clone() + operation.state_forwards("test_crmo", new_state) + self.assertEqual(new_state.models["test_crmo", "pony"].name, "Pony") + self.assertEqual(len(new_state.models["test_crmo", "pony"].fields), 2) + # Test the database alteration + self.assertTableNotExists("test_crmo_pony") + with connection.schema_editor() as editor: + operation.database_forwards("test_crmo", editor, project_state, new_state) + self.assertTableExists("test_crmo_pony") + # And test reversal + with connection.schema_editor() as editor: + operation.database_backwards("test_crmo", editor, new_state, project_state) + self.assertTableNotExists("test_crmo_pony") + # And deconstruction + definition = operation.deconstruct() + self.assertEqual(definition[0], "CreateModel") + self.assertEqual(len(definition[1]), 2) + self.assertEqual(len(definition[2]), 0) + self.assertEqual(definition[1][0], "Pony") + + def test_delete_model(self): + """ + Tests the DeleteModel operation. + """ + project_state = self.set_up_test_model("test_dlmo") + # Test the state alteration + operation = migrations.DeleteModel("Pony") + new_state = project_state.clone() + operation.state_forwards("test_dlmo", new_state) + self.assertNotIn(("test_dlmo", "pony"), new_state.models) + # Test the database alteration + self.assertTableExists("test_dlmo_pony") + with connection.schema_editor() as editor: + operation.database_forwards("test_dlmo", editor, project_state, new_state) + self.assertTableNotExists("test_dlmo_pony") + # And test reversal + with connection.schema_editor() as editor: + operation.database_backwards("test_dlmo", editor, new_state, project_state) + self.assertTableExists("test_dlmo_pony") + + def test_add_field(self): + """ + Tests the AddField operation. + """ + project_state = self.set_up_test_model("test_adfl") + # Test the state alteration + operation = migrations.AddField("Pony", "height", models.FloatField(null=True)) + new_state = project_state.clone() + operation.state_forwards("test_adfl", new_state) + self.assertEqual(len(new_state.models["test_adfl", "pony"].fields), 4) + # Test the database alteration + self.assertColumnNotExists("test_adfl_pony", "height") + with connection.schema_editor() as editor: + operation.database_forwards("test_adfl", editor, project_state, new_state) + self.assertColumnExists("test_adfl_pony", "height") + # And test reversal + with connection.schema_editor() as editor: + operation.database_backwards("test_adfl", editor, new_state, project_state) + self.assertColumnNotExists("test_adfl_pony", "height") + + def test_add_field_m2m(self): + """ + Tests the AddField operation with a ManyToManyField. + """ + project_state = self.set_up_test_model("test_adflmm", second_model=True) + # Test the state alteration + operation = migrations.AddField("Pony", "stables", models.ManyToManyField("Stable", related_name="ponies")) + new_state = project_state.clone() + operation.state_forwards("test_adflmm", new_state) + self.assertEqual(len(new_state.models["test_adflmm", "pony"].fields), 4) + # Test the database alteration + self.assertTableNotExists("test_adflmm_pony_stables") + with connection.schema_editor() as editor: + operation.database_forwards("test_adflmm", editor, project_state, new_state) + self.assertTableExists("test_adflmm_pony_stables") + self.assertColumnNotExists("test_adflmm_pony", "stables") + # Make sure the M2M field actually works + with atomic(): + app_cache = new_state.render() + Pony = app_cache.get_model("test_adflmm", "Pony") + p = Pony.objects.create(pink=False, weight=4.55) + p.stables.create() + self.assertEqual(p.stables.count(), 1) + p.stables.all().delete() + # And test reversal + with connection.schema_editor() as editor: + operation.database_backwards("test_adflmm", editor, new_state, project_state) + self.assertTableNotExists("test_adflmm_pony_stables") + + def test_remove_field(self): + """ + Tests the RemoveField operation. + """ + project_state = self.set_up_test_model("test_rmfl") + # Test the state alteration + operation = migrations.RemoveField("Pony", "pink") + new_state = project_state.clone() + operation.state_forwards("test_rmfl", new_state) + self.assertEqual(len(new_state.models["test_rmfl", "pony"].fields), 2) + # Test the database alteration + self.assertColumnExists("test_rmfl_pony", "pink") + with connection.schema_editor() as editor: + operation.database_forwards("test_rmfl", editor, project_state, new_state) + self.assertColumnNotExists("test_rmfl_pony", "pink") + # And test reversal + with connection.schema_editor() as editor: + operation.database_backwards("test_rmfl", editor, new_state, project_state) + self.assertColumnExists("test_rmfl_pony", "pink") + + def test_alter_model_table(self): + """ + Tests the AlterModelTable operation. + """ + project_state = self.set_up_test_model("test_almota") + # Test the state alteration + operation = migrations.AlterModelTable("Pony", "test_almota_pony_2") + new_state = project_state.clone() + operation.state_forwards("test_almota", new_state) + self.assertEqual(new_state.models["test_almota", "pony"].options["db_table"], "test_almota_pony_2") + # Test the database alteration + self.assertTableExists("test_almota_pony") + self.assertTableNotExists("test_almota_pony_2") + with connection.schema_editor() as editor: + operation.database_forwards("test_almota", editor, project_state, new_state) + self.assertTableNotExists("test_almota_pony") + self.assertTableExists("test_almota_pony_2") + # And test reversal + with connection.schema_editor() as editor: + operation.database_backwards("test_almota", editor, new_state, project_state) + self.assertTableExists("test_almota_pony") + self.assertTableNotExists("test_almota_pony_2") + + def test_alter_field(self): + """ + Tests the AlterField operation. + """ + project_state = self.set_up_test_model("test_alfl") + # Test the state alteration + operation = migrations.AlterField("Pony", "pink", models.IntegerField(null=True)) + new_state = project_state.clone() + operation.state_forwards("test_alfl", new_state) + self.assertEqual(project_state.models["test_alfl", "pony"].get_field_by_name("pink").null, False) + self.assertEqual(new_state.models["test_alfl", "pony"].get_field_by_name("pink").null, True) + # Test the database alteration + self.assertColumnNotNull("test_alfl_pony", "pink") + with connection.schema_editor() as editor: + operation.database_forwards("test_alfl", editor, project_state, new_state) + self.assertColumnNull("test_alfl_pony", "pink") + # And test reversal + with connection.schema_editor() as editor: + operation.database_backwards("test_alfl", editor, new_state, project_state) + self.assertColumnNotNull("test_alfl_pony", "pink") + + def test_rename_field(self): + """ + Tests the RenameField operation. + """ + project_state = self.set_up_test_model("test_rnfl") + # Test the state alteration + operation = migrations.RenameField("Pony", "pink", "blue") + new_state = project_state.clone() + operation.state_forwards("test_rnfl", new_state) + self.assertIn("blue", [n for n, f in new_state.models["test_rnfl", "pony"].fields]) + self.assertNotIn("pink", [n for n, f in new_state.models["test_rnfl", "pony"].fields]) + # Test the database alteration + self.assertColumnExists("test_rnfl_pony", "pink") + self.assertColumnNotExists("test_rnfl_pony", "blue") + with connection.schema_editor() as editor: + operation.database_forwards("test_rnfl", editor, project_state, new_state) + self.assertColumnExists("test_rnfl_pony", "blue") + self.assertColumnNotExists("test_rnfl_pony", "pink") + # And test reversal + with connection.schema_editor() as editor: + operation.database_backwards("test_rnfl", editor, new_state, project_state) + self.assertColumnExists("test_rnfl_pony", "pink") + self.assertColumnNotExists("test_rnfl_pony", "blue") + + def test_alter_unique_together(self): + """ + Tests the AlterUniqueTogether operation. + """ + project_state = self.set_up_test_model("test_alunto") + # Test the state alteration + operation = migrations.AlterUniqueTogether("Pony", [("pink", "weight")]) + new_state = project_state.clone() + operation.state_forwards("test_alunto", new_state) + self.assertEqual(len(project_state.models["test_alunto", "pony"].options.get("unique_together", set())), 0) + self.assertEqual(len(new_state.models["test_alunto", "pony"].options.get("unique_together", set())), 1) + # Make sure we can insert duplicate rows + cursor = connection.cursor() + cursor.execute("INSERT INTO test_alunto_pony (id, pink, weight) VALUES (1, 1, 1)") + cursor.execute("INSERT INTO test_alunto_pony (id, pink, weight) VALUES (2, 1, 1)") + cursor.execute("DELETE FROM test_alunto_pony") + # Test the database alteration + with connection.schema_editor() as editor: + operation.database_forwards("test_alunto", editor, project_state, new_state) + cursor.execute("INSERT INTO test_alunto_pony (id, pink, weight) VALUES (1, 1, 1)") + with self.assertRaises(IntegrityError): + with atomic(): + cursor.execute("INSERT INTO test_alunto_pony (id, pink, weight) VALUES (2, 1, 1)") + cursor.execute("DELETE FROM test_alunto_pony") + # And test reversal + with connection.schema_editor() as editor: + operation.database_backwards("test_alunto", editor, new_state, project_state) + cursor.execute("INSERT INTO test_alunto_pony (id, pink, weight) VALUES (1, 1, 1)") + cursor.execute("INSERT INTO test_alunto_pony (id, pink, weight) VALUES (2, 1, 1)") + cursor.execute("DELETE FROM test_alunto_pony") + + def test_alter_index_together(self): + """ + Tests the AlterIndexTogether operation. + """ + project_state = self.set_up_test_model("test_alinto") + # Test the state alteration + operation = migrations.AlterIndexTogether("Pony", [("pink", "weight")]) + new_state = project_state.clone() + operation.state_forwards("test_alinto", new_state) + self.assertEqual(len(project_state.models["test_alinto", "pony"].options.get("index_together", set())), 0) + self.assertEqual(len(new_state.models["test_alinto", "pony"].options.get("index_together", set())), 1) + # Make sure there's no matching index + self.assertIndexNotExists("test_alinto_pony", ["pink", "weight"]) + # Test the database alteration + with connection.schema_editor() as editor: + operation.database_forwards("test_alinto", editor, project_state, new_state) + self.assertIndexExists("test_alinto_pony", ["pink", "weight"]) + # And test reversal + with connection.schema_editor() as editor: + operation.database_backwards("test_alinto", editor, new_state, project_state) + self.assertIndexNotExists("test_alinto_pony", ["pink", "weight"]) + + +class MigrateNothingRouter(object): + """ + A router that sends all writes to the other database. + """ + def allow_migrate(self, db, model): + return False + + +class MultiDBOperationTests(MigrationTestBase): + multi_db = True + + def setUp(self): + # Make the 'other' database appear to be a slave of the 'default' + self.old_routers = router.routers + router.routers = [MigrateNothingRouter()] + + def tearDown(self): + # Restore the 'other' database as an independent database + router.routers = self.old_routers + + def test_create_model(self): + """ + Tests that CreateModel honours multi-db settings. + """ + operation = migrations.CreateModel( + "Pony", + [ + ("id", models.AutoField(primary_key=True)), + ("pink", models.IntegerField(default=1)), + ], + ) + # Test the state alteration + project_state = ProjectState() + new_state = project_state.clone() + operation.state_forwards("test_crmo", new_state) + # Test the database alteration + self.assertTableNotExists("test_crmo_pony") + with connection.schema_editor() as editor: + operation.database_forwards("test_crmo", editor, project_state, new_state) + self.assertTableNotExists("test_crmo_pony") + # And test reversal + with connection.schema_editor() as editor: + operation.database_backwards("test_crmo", editor, new_state, project_state) + self.assertTableNotExists("test_crmo_pony") diff --git a/tests/migrations/test_state.py b/tests/migrations/test_state.py new file mode 100644 index 0000000000..e5b3fbfa08 --- /dev/null +++ b/tests/migrations/test_state.py @@ -0,0 +1,77 @@ +from django.test import TestCase +from django.db import models +from django.db.models.loading import BaseAppCache +from django.db.migrations.state import ProjectState, ModelState + + +class StateTests(TestCase): + """ + Tests state construction, rendering and modification by operations. + """ + + def test_create(self): + """ + Tests making a ProjectState from an AppCache + """ + + new_app_cache = BaseAppCache() + + class Author(models.Model): + name = models.CharField(max_length=255) + bio = models.TextField() + age = models.IntegerField(blank=True, null=True) + class Meta: + app_label = "migrations" + app_cache = new_app_cache + unique_together = ["name", "bio"] + + class Book(models.Model): + title = models.CharField(max_length=1000) + author = models.ForeignKey(Author) + class Meta: + app_label = "migrations" + app_cache = new_app_cache + verbose_name = "tome" + db_table = "test_tome" + + project_state = ProjectState.from_app_cache(new_app_cache) + author_state = project_state.models['migrations', 'author'] + book_state = project_state.models['migrations', 'book'] + + self.assertEqual(author_state.app_label, "migrations") + self.assertEqual(author_state.name, "Author") + self.assertEqual([x for x, y in author_state.fields], ["id", "name", "bio", "age"]) + self.assertEqual(author_state.fields[1][1].max_length, 255) + self.assertEqual(author_state.fields[2][1].null, False) + self.assertEqual(author_state.fields[3][1].null, True) + self.assertEqual(author_state.options, {"unique_together": set(("name", "bio"))}) + self.assertEqual(author_state.bases, (models.Model, )) + + self.assertEqual(book_state.app_label, "migrations") + self.assertEqual(book_state.name, "Book") + self.assertEqual([x for x, y in book_state.fields], ["id", "title", "author"]) + self.assertEqual(book_state.fields[1][1].max_length, 1000) + self.assertEqual(book_state.fields[2][1].null, False) + self.assertEqual(book_state.options, {"verbose_name": "tome", "db_table": "test_tome"}) + self.assertEqual(book_state.bases, (models.Model, )) + + def test_render(self): + """ + Tests rendering a ProjectState into an AppCache. + """ + project_state = ProjectState() + project_state.add_model_state(ModelState( + "migrations", + "Tag", + [ + ("id", models.AutoField(primary_key=True)), + ("name", models.CharField(max_length=100)), + ("hidden", models.BooleanField()), + ], + {}, + None, + )) + + new_app_cache = project_state.render() + self.assertEqual(new_app_cache.get_model("migrations", "Tag")._meta.get_field_by_name("name")[0].max_length, 100) + self.assertEqual(new_app_cache.get_model("migrations", "Tag")._meta.get_field_by_name("hidden")[0].null, False) diff --git a/tests/migrations/test_writer.py b/tests/migrations/test_writer.py new file mode 100644 index 0000000000..22925fee9b --- /dev/null +++ b/tests/migrations/test_writer.py @@ -0,0 +1,84 @@ +# encoding: utf8 +import datetime +from django.utils import six +from django.test import TestCase +from django.db.migrations.writer import MigrationWriter +from django.db import models, migrations + + +class WriterTests(TestCase): + """ + Tests the migration writer (makes migration files from Migration instances) + """ + + def safe_exec(self, string, value=None): + l = {} + try: + exec(string, globals(), l) + except Exception as e: + if value: + self.fail("Could not exec %r (from value %r): %s" % (string.strip(), value, e)) + else: + self.fail("Could not exec %r: %s" % (string.strip(), e)) + return l + + def serialize_round_trip(self, value): + string, imports = MigrationWriter.serialize(value) + return self.safe_exec("%s\ntest_value_result = %s" % ("\n".join(imports), string), value)['test_value_result'] + + def assertSerializedEqual(self, value): + self.assertEqual(self.serialize_round_trip(value), value) + + def assertSerializedIs(self, value): + self.assertIs(self.serialize_round_trip(value), value) + + def assertSerializedFieldEqual(self, value): + new_value = self.serialize_round_trip(value) + self.assertEqual(value.__class__, new_value.__class__) + self.assertEqual(value.max_length, new_value.max_length) + self.assertEqual(value.null, new_value.null) + self.assertEqual(value.unique, new_value.unique) + + def test_serialize(self): + """ + Tests various different forms of the serializer. + This does not care about formatting, just that the parsed result is + correct, so we always exec() the result and check that. + """ + # Basic values + self.assertSerializedEqual(1) + self.assertSerializedEqual(None) + self.assertSerializedEqual("foobar") + self.assertSerializedEqual(u"föobár") + self.assertSerializedEqual({1: 2}) + self.assertSerializedEqual(["a", 2, True, None]) + self.assertSerializedEqual(set([2, 3, "eighty"])) + self.assertSerializedEqual({"lalalala": ["yeah", "no", "maybe"]}) + # Datetime stuff + self.assertSerializedEqual(datetime.datetime.utcnow()) + self.assertSerializedEqual(datetime.datetime.utcnow) + self.assertSerializedEqual(datetime.date.today()) + self.assertSerializedEqual(datetime.date.today) + # Django fields + self.assertSerializedFieldEqual(models.CharField(max_length=255)) + self.assertSerializedFieldEqual(models.TextField(null=True, blank=True)) + + def test_simple_migration(self): + """ + Tests serializing a simple migration. + """ + migration = type("Migration", (migrations.Migration,), { + "operations": [ + migrations.DeleteModel("MyModel"), + migrations.AddField("OtherModel", "field_name", models.DateTimeField(default=datetime.datetime.utcnow)) + ], + "dependencies": [("testapp", "some_other_one")], + }) + writer = MigrationWriter(migration) + output = writer.as_string() + # It should NOT be unicode. + self.assertIsInstance(output, six.binary_type, "Migration as_string returned unicode") + # We don't test the output formatting - that's too fragile. + # Just make sure it runs for now, and that things look alright. + result = self.safe_exec(output) + self.assertIn("Migration", result) diff --git a/tests/multiple_database/tests.py b/tests/multiple_database/tests.py index 949679418c..629cb1237c 100644 --- a/tests/multiple_database/tests.py +++ b/tests/multiple_database/tests.py @@ -933,7 +933,7 @@ class TestRouter(object): def allow_relation(self, obj1, obj2, **hints): return obj1._state.db in ('default', 'other') and obj2._state.db in ('default', 'other') - def allow_syncdb(self, db, model): + def allow_migrate(self, db, model): return True class AuthRouter(object): @@ -960,7 +960,7 @@ class AuthRouter(object): return True return None - def allow_syncdb(self, db, model): + def allow_migrate(self, db, model): "Make sure the auth app only appears on the 'other' db" if db == 'other': return model._meta.app_label == 'auth' @@ -1022,30 +1022,30 @@ class RouterTestCase(TestCase): def test_syncdb_selection(self): "Synchronization behavior is predictable" - self.assertTrue(router.allow_syncdb('default', User)) - self.assertTrue(router.allow_syncdb('default', Book)) + self.assertTrue(router.allow_migrate('default', User)) + self.assertTrue(router.allow_migrate('default', Book)) - self.assertTrue(router.allow_syncdb('other', User)) - self.assertTrue(router.allow_syncdb('other', Book)) + self.assertTrue(router.allow_migrate('other', User)) + self.assertTrue(router.allow_migrate('other', Book)) # Add the auth router to the chain. # TestRouter is a universal synchronizer, so it should have no effect. router.routers = [TestRouter(), AuthRouter()] - self.assertTrue(router.allow_syncdb('default', User)) - self.assertTrue(router.allow_syncdb('default', Book)) + self.assertTrue(router.allow_migrate('default', User)) + self.assertTrue(router.allow_migrate('default', Book)) - self.assertTrue(router.allow_syncdb('other', User)) - self.assertTrue(router.allow_syncdb('other', Book)) + self.assertTrue(router.allow_migrate('other', User)) + self.assertTrue(router.allow_migrate('other', Book)) # Now check what happens if the router order is the other way around router.routers = [AuthRouter(), TestRouter()] - self.assertFalse(router.allow_syncdb('default', User)) - self.assertTrue(router.allow_syncdb('default', Book)) + self.assertFalse(router.allow_migrate('default', User)) + self.assertTrue(router.allow_migrate('default', Book)) - self.assertTrue(router.allow_syncdb('other', User)) - self.assertFalse(router.allow_syncdb('other', Book)) + self.assertTrue(router.allow_migrate('other', User)) + self.assertFalse(router.allow_migrate('other', Book)) def test_partial_router(self): "A router can choose to implement a subset of methods" @@ -1062,8 +1062,8 @@ class RouterTestCase(TestCase): self.assertTrue(router.allow_relation(dive, dive)) - self.assertTrue(router.allow_syncdb('default', User)) - self.assertTrue(router.allow_syncdb('default', Book)) + self.assertTrue(router.allow_migrate('default', User)) + self.assertTrue(router.allow_migrate('default', Book)) router.routers = [WriteRouter(), AuthRouter(), TestRouter()] @@ -1075,8 +1075,8 @@ class RouterTestCase(TestCase): self.assertTrue(router.allow_relation(dive, dive)) - self.assertFalse(router.allow_syncdb('default', User)) - self.assertTrue(router.allow_syncdb('default', Book)) + self.assertFalse(router.allow_migrate('default', User)) + self.assertTrue(router.allow_migrate('default', Book)) def test_database_routing(self): @@ -1607,12 +1607,12 @@ class AuthTestCase(TestCase): self.assertEqual(User.objects.using('other').count(), 1) def test_dumpdata(self): - "Check that dumpdata honors allow_syncdb restrictions on the router" + "Check that dumpdata honors allow_migrate restrictions on the router" User.objects.create_user('alice', 'alice@example.com') User.objects.db_manager('default').create_user('bob', 'bob@example.com') # Check that dumping the default database doesn't try to include auth - # because allow_syncdb prohibits auth on default + # because allow_migrate prohibits auth on default new_io = StringIO() management.call_command('dumpdata', 'auth', format='json', database='default', stdout=new_io) command_output = new_io.getvalue().strip() @@ -1625,10 +1625,10 @@ class AuthTestCase(TestCase): self.assertTrue('"email": "alice@example.com"' in command_output) class AntiPetRouter(object): - # A router that only expresses an opinion on syncdb, + # A router that only expresses an opinion on migrate, # passing pets to the 'other' database - def allow_syncdb(self, db, model): + def allow_migrate(self, db, model): "Make sure the auth app only appears on the 'other' db" if db == 'other': return model._meta.object_name == 'Pet' @@ -1917,7 +1917,7 @@ class RouterModelArgumentTestCase(TestCase): class SyncOnlyDefaultDatabaseRouter(object): - def allow_syncdb(self, db, model): + def allow_migrate(self, db, model): return db == DEFAULT_DB_ALIAS diff --git a/tests/schema/__init__.py b/tests/schema/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/schema/models.py b/tests/schema/models.py new file mode 100644 index 0000000000..dc717ec105 --- /dev/null +++ b/tests/schema/models.py @@ -0,0 +1,97 @@ +from django.db import models +from django.db.models.loading import BaseAppCache + +# Because we want to test creation and deletion of these as separate things, +# these models are all inserted into a separate AppCache so the main test +# runner doesn't syncdb them. + +new_app_cache = BaseAppCache() + + +class Author(models.Model): + name = models.CharField(max_length=255) + height = models.PositiveIntegerField(null=True, blank=True) + + class Meta: + app_cache = new_app_cache + + +class AuthorWithM2M(models.Model): + name = models.CharField(max_length=255) + + class Meta: + app_cache = new_app_cache + + +class Book(models.Model): + author = models.ForeignKey(Author) + title = models.CharField(max_length=100, db_index=True) + pub_date = models.DateTimeField() + # tags = models.ManyToManyField("Tag", related_name="books") + + class Meta: + app_cache = new_app_cache + + +class BookWithM2M(models.Model): + author = models.ForeignKey(Author) + title = models.CharField(max_length=100, db_index=True) + pub_date = models.DateTimeField() + tags = models.ManyToManyField("TagM2MTest", related_name="books") + + class Meta: + app_cache = new_app_cache + + +class BookWithSlug(models.Model): + author = models.ForeignKey(Author) + title = models.CharField(max_length=100, db_index=True) + pub_date = models.DateTimeField() + slug = models.CharField(max_length=20, unique=True) + + class Meta: + app_cache = new_app_cache + db_table = "schema_book" + + +class Tag(models.Model): + title = models.CharField(max_length=255) + slug = models.SlugField(unique=True) + + class Meta: + app_cache = new_app_cache + + +class TagM2MTest(models.Model): + title = models.CharField(max_length=255) + slug = models.SlugField(unique=True) + + class Meta: + app_cache = new_app_cache + + +class TagIndexed(models.Model): + title = models.CharField(max_length=255) + slug = models.SlugField(unique=True) + + class Meta: + app_cache = new_app_cache + index_together = [["slug", "title"]] + + +class TagUniqueRename(models.Model): + title = models.CharField(max_length=255) + slug2 = models.SlugField(unique=True) + + class Meta: + app_cache = new_app_cache + db_table = "schema_tag" + + +class UniqueTest(models.Model): + year = models.IntegerField() + slug = models.SlugField(unique=False) + + class Meta: + app_cache = new_app_cache + unique_together = ["year", "slug"] diff --git a/tests/schema/tests.py b/tests/schema/tests.py new file mode 100644 index 0000000000..c3764979d6 --- /dev/null +++ b/tests/schema/tests.py @@ -0,0 +1,650 @@ +from __future__ import absolute_import +import datetime +from django.test import TransactionTestCase +from django.utils.unittest import skipUnless +from django.db import connection, DatabaseError, IntegrityError +from django.db.models.fields import IntegerField, TextField, CharField, SlugField +from django.db.models.fields.related import ManyToManyField, ForeignKey +from django.db.transaction import atomic +from .models import Author, AuthorWithM2M, Book, BookWithSlug, BookWithM2M, Tag, TagIndexed, TagM2MTest, TagUniqueRename, UniqueTest + + +class SchemaTests(TransactionTestCase): + """ + Tests that the schema-alteration code works correctly. + + Be aware that these tests are more liable than most to false results, + as sometimes the code to check if a test has worked is almost as complex + as the code it is testing. + """ + + available_apps = [] + + models = [Author, AuthorWithM2M, Book, BookWithSlug, BookWithM2M, Tag, TagIndexed, TagM2MTest, TagUniqueRename, UniqueTest] + no_table_strings = ["no such table", "unknown table", "does not exist"] + + # Utility functions + + def tearDown(self): + # Delete any tables made for our models + self.delete_tables() + + def delete_tables(self): + "Deletes all model tables for our models for a clean test environment" + cursor = connection.cursor() + connection.disable_constraint_checking() + for model in self.models: + # Remove any M2M tables first + for field in model._meta.local_many_to_many: + with atomic(): + try: + cursor.execute(connection.schema_editor().sql_delete_table % { + "table": connection.ops.quote_name(field.rel.through._meta.db_table), + }) + except DatabaseError as e: + if any([s in str(e).lower() for s in self.no_table_strings]): + pass + else: + raise + # Then remove the main tables + with atomic(): + try: + cursor.execute(connection.schema_editor().sql_delete_table % { + "table": connection.ops.quote_name(model._meta.db_table), + }) + except DatabaseError as e: + if any([s in str(e).lower() for s in self.no_table_strings]): + pass + else: + raise + connection.enable_constraint_checking() + + def column_classes(self, model): + cursor = connection.cursor() + columns = dict( + (d[0], (connection.introspection.get_field_type(d[1], d), d)) + for d in connection.introspection.get_table_description( + cursor, + model._meta.db_table, + ) + ) + # SQLite has a different format for field_type + for name, (type, desc) in columns.items(): + if isinstance(type, tuple): + columns[name] = (type[0], desc) + # SQLite also doesn't error properly + if not columns: + raise DatabaseError("Table does not exist (empty pragma)") + return columns + + # Tests + + def test_creation_deletion(self): + """ + Tries creating a model's table, and then deleting it. + """ + # Create the table + with connection.schema_editor() as editor: + editor.create_model(Author) + # Check that it's there + list(Author.objects.all()) + # Clean up that table + with connection.schema_editor() as editor: + editor.delete_model(Author) + # Check that it's gone + self.assertRaises( + DatabaseError, + lambda: list(Author.objects.all()), + ) + + @skipUnless(connection.features.supports_foreign_keys, "No FK support") + def test_fk(self): + "Tests that creating tables out of FK order, then repointing, works" + # Create the table + with connection.schema_editor() as editor: + editor.create_model(Book) + editor.create_model(Author) + editor.create_model(Tag) + # Check that initial tables are there + list(Author.objects.all()) + list(Book.objects.all()) + # Make sure the FK constraint is present + with self.assertRaises(IntegrityError): + Book.objects.create( + author_id = 1, + title = "Much Ado About Foreign Keys", + pub_date = datetime.datetime.now(), + ) + # Repoint the FK constraint + new_field = ForeignKey(Tag) + new_field.set_attributes_from_name("author") + with connection.schema_editor() as editor: + editor.alter_field( + Book, + Book._meta.get_field_by_name("author")[0], + new_field, + strict=True, + ) + # Make sure the new FK constraint is present + constraints = connection.introspection.get_constraints(connection.cursor(), Book._meta.db_table) + for name, details in constraints.items(): + if details['columns'] == ["author_id"] and details['foreign_key']: + self.assertEqual(details['foreign_key'], ('schema_tag', 'id')) + break + else: + self.fail("No FK constraint for author_id found") + + def test_add_field(self): + """ + Tests adding fields to models + """ + # Create the table + with connection.schema_editor() as editor: + editor.create_model(Author) + # Ensure there's no age field + columns = self.column_classes(Author) + self.assertNotIn("age", columns) + # Alter the name field to a TextField + new_field = IntegerField(null=True) + new_field.set_attributes_from_name("age") + with connection.schema_editor() as editor: + editor.add_field( + Author, + new_field, + ) + # Ensure the field is right afterwards + columns = self.column_classes(Author) + self.assertEqual(columns['age'][0], "IntegerField") + self.assertEqual(columns['age'][1][6], True) + + def test_alter(self): + """ + Tests simple altering of fields + """ + # Create the table + with connection.schema_editor() as editor: + editor.create_model(Author) + # Ensure the field is right to begin with + columns = self.column_classes(Author) + self.assertEqual(columns['name'][0], "CharField") + self.assertEqual(bool(columns['name'][1][6]), bool(connection.features.interprets_empty_strings_as_nulls)) + # Alter the name field to a TextField + new_field = TextField(null=True) + new_field.set_attributes_from_name("name") + with connection.schema_editor() as editor: + editor.alter_field( + Author, + Author._meta.get_field_by_name("name")[0], + new_field, + strict=True, + ) + # Ensure the field is right afterwards + columns = self.column_classes(Author) + self.assertEqual(columns['name'][0], "TextField") + self.assertEqual(columns['name'][1][6], True) + # Change nullability again + new_field2 = TextField(null=False) + new_field2.set_attributes_from_name("name") + with connection.schema_editor() as editor: + editor.alter_field( + Author, + new_field, + new_field2, + strict=True, + ) + # Ensure the field is right afterwards + columns = self.column_classes(Author) + self.assertEqual(columns['name'][0], "TextField") + self.assertEqual(bool(columns['name'][1][6]), False) + + def test_rename(self): + """ + Tests simple altering of fields + """ + # Create the table + with connection.schema_editor() as editor: + editor.create_model(Author) + # Ensure the field is right to begin with + columns = self.column_classes(Author) + self.assertEqual(columns['name'][0], "CharField") + self.assertNotIn("display_name", columns) + # Alter the name field's name + new_field = CharField(max_length=254) + new_field.set_attributes_from_name("display_name") + with connection.schema_editor() as editor: + editor.alter_field( + Author, + Author._meta.get_field_by_name("name")[0], + new_field, + strict = True, + ) + # Ensure the field is right afterwards + columns = self.column_classes(Author) + self.assertEqual(columns['display_name'][0], "CharField") + self.assertNotIn("name", columns) + + def test_m2m_create(self): + """ + Tests M2M fields on models during creation + """ + # Create the tables + with connection.schema_editor() as editor: + editor.create_model(Author) + editor.create_model(TagM2MTest) + editor.create_model(BookWithM2M) + # Ensure there is now an m2m table there + columns = self.column_classes(BookWithM2M._meta.get_field_by_name("tags")[0].rel.through) + self.assertEqual(columns['tagm2mtest_id'][0], "IntegerField") + + def test_m2m(self): + """ + Tests adding/removing M2M fields on models + """ + # Create the tables + with connection.schema_editor() as editor: + editor.create_model(AuthorWithM2M) + editor.create_model(TagM2MTest) + # Create an M2M field + new_field = ManyToManyField("schema.TagM2MTest", related_name="authors") + new_field.contribute_to_class(AuthorWithM2M, "tags") + try: + # Ensure there's no m2m table there + self.assertRaises(DatabaseError, self.column_classes, new_field.rel.through) + # Add the field + with connection.schema_editor() as editor: + editor.add_field( + Author, + new_field, + ) + # Ensure there is now an m2m table there + columns = self.column_classes(new_field.rel.through) + self.assertEqual(columns['tagm2mtest_id'][0], "IntegerField") + # Remove the M2M table again + with connection.schema_editor() as editor: + editor.remove_field( + Author, + new_field, + ) + # Ensure there's no m2m table there + self.assertRaises(DatabaseError, self.column_classes, new_field.rel.through) + finally: + # Cleanup model states + AuthorWithM2M._meta.local_many_to_many.remove(new_field) + + def test_m2m_repoint(self): + """ + Tests repointing M2M fields + """ + # Create the tables + with connection.schema_editor() as editor: + editor.create_model(Author) + editor.create_model(BookWithM2M) + editor.create_model(TagM2MTest) + editor.create_model(UniqueTest) + # Ensure the M2M exists and points to TagM2MTest + constraints = connection.introspection.get_constraints(connection.cursor(), BookWithM2M._meta.get_field_by_name("tags")[0].rel.through._meta.db_table) + if connection.features.supports_foreign_keys: + for name, details in constraints.items(): + if details['columns'] == ["tagm2mtest_id"] and details['foreign_key']: + self.assertEqual(details['foreign_key'], ('schema_tagm2mtest', 'id')) + break + else: + self.fail("No FK constraint for tagm2mtest_id found") + # Repoint the M2M + new_field = ManyToManyField(UniqueTest) + new_field.contribute_to_class(BookWithM2M, "uniques") + try: + with connection.schema_editor() as editor: + editor.alter_field( + Author, + BookWithM2M._meta.get_field_by_name("tags")[0], + new_field, + ) + # Ensure old M2M is gone + self.assertRaises(DatabaseError, self.column_classes, BookWithM2M._meta.get_field_by_name("tags")[0].rel.through) + # Ensure the new M2M exists and points to UniqueTest + constraints = connection.introspection.get_constraints(connection.cursor(), new_field.rel.through._meta.db_table) + if connection.features.supports_foreign_keys: + for name, details in constraints.items(): + if details['columns'] == ["uniquetest_id"] and details['foreign_key']: + self.assertEqual(details['foreign_key'], ('schema_uniquetest', 'id')) + break + else: + self.fail("No FK constraint for uniquetest_id found") + finally: + # Cleanup model states + BookWithM2M._meta.local_many_to_many.remove(new_field) + del BookWithM2M._meta._m2m_cache + + @skipUnless(connection.features.supports_check_constraints, "No check constraints") + def test_check_constraints(self): + """ + Tests creating/deleting CHECK constraints + """ + # Create the tables + with connection.schema_editor() as editor: + editor.create_model(Author) + # Ensure the constraint exists + constraints = connection.introspection.get_constraints(connection.cursor(), Author._meta.db_table) + for name, details in constraints.items(): + if details['columns'] == ["height"] and details['check']: + break + else: + self.fail("No check constraint for height found") + # Alter the column to remove it + new_field = IntegerField(null=True, blank=True) + new_field.set_attributes_from_name("height") + with connection.schema_editor() as editor: + editor.alter_field( + Author, + Author._meta.get_field_by_name("height")[0], + new_field, + strict = True, + ) + constraints = connection.introspection.get_constraints(connection.cursor(), Author._meta.db_table) + for name, details in constraints.items(): + if details['columns'] == ["height"] and details['check']: + self.fail("Check constraint for height found") + # Alter the column to re-add it + with connection.schema_editor() as editor: + editor.alter_field( + Author, + new_field, + Author._meta.get_field_by_name("height")[0], + strict = True, + ) + constraints = connection.introspection.get_constraints(connection.cursor(), Author._meta.db_table) + for name, details in constraints.items(): + if details['columns'] == ["height"] and details['check']: + break + else: + self.fail("No check constraint for height found") + + def test_unique(self): + """ + Tests removing and adding unique constraints to a single column. + """ + # Create the table + with connection.schema_editor() as editor: + editor.create_model(Tag) + # Ensure the field is unique to begin with + Tag.objects.create(title="foo", slug="foo") + self.assertRaises(IntegrityError, Tag.objects.create, title="bar", slug="foo") + Tag.objects.all().delete() + # Alter the slug field to be non-unique + new_field = SlugField(unique=False) + new_field.set_attributes_from_name("slug") + with connection.schema_editor() as editor: + editor.alter_field( + Tag, + Tag._meta.get_field_by_name("slug")[0], + new_field, + strict = True, + ) + # Ensure the field is no longer unique + Tag.objects.create(title="foo", slug="foo") + Tag.objects.create(title="bar", slug="foo") + Tag.objects.all().delete() + # Alter the slug field to be unique + new_new_field = SlugField(unique=True) + new_new_field.set_attributes_from_name("slug") + with connection.schema_editor() as editor: + editor.alter_field( + Tag, + new_field, + new_new_field, + strict = True, + ) + # Ensure the field is unique again + Tag.objects.create(title="foo", slug="foo") + self.assertRaises(IntegrityError, Tag.objects.create, title="bar", slug="foo") + Tag.objects.all().delete() + # Rename the field + new_field = SlugField(unique=False) + new_field.set_attributes_from_name("slug2") + with connection.schema_editor() as editor: + editor.alter_field( + Tag, + Tag._meta.get_field_by_name("slug")[0], + TagUniqueRename._meta.get_field_by_name("slug2")[0], + strict = True, + ) + # Ensure the field is still unique + TagUniqueRename.objects.create(title="foo", slug2="foo") + self.assertRaises(IntegrityError, TagUniqueRename.objects.create, title="bar", slug2="foo") + Tag.objects.all().delete() + + def test_unique_together(self): + """ + Tests removing and adding unique_together constraints on a model. + """ + # Create the table + with connection.schema_editor() as editor: + editor.create_model(UniqueTest) + # Ensure the fields are unique to begin with + UniqueTest.objects.create(year=2012, slug="foo") + UniqueTest.objects.create(year=2011, slug="foo") + UniqueTest.objects.create(year=2011, slug="bar") + self.assertRaises(IntegrityError, UniqueTest.objects.create, year=2012, slug="foo") + UniqueTest.objects.all().delete() + # Alter the model to it's non-unique-together companion + with connection.schema_editor() as editor: + editor.alter_unique_together( + UniqueTest, + UniqueTest._meta.unique_together, + [], + ) + # Ensure the fields are no longer unique + UniqueTest.objects.create(year=2012, slug="foo") + UniqueTest.objects.create(year=2012, slug="foo") + UniqueTest.objects.all().delete() + # Alter it back + new_new_field = SlugField(unique=True) + new_new_field.set_attributes_from_name("slug") + with connection.schema_editor() as editor: + editor.alter_unique_together( + UniqueTest, + [], + UniqueTest._meta.unique_together, + ) + # Ensure the fields are unique again + UniqueTest.objects.create(year=2012, slug="foo") + self.assertRaises(IntegrityError, UniqueTest.objects.create, year=2012, slug="foo") + UniqueTest.objects.all().delete() + + def test_index_together(self): + """ + Tests removing and adding index_together constraints on a model. + """ + # Create the table + with connection.schema_editor() as editor: + editor.create_model(Tag) + # Ensure there's no index on the year/slug columns first + self.assertEqual( + False, + any( + c["index"] + for c in connection.introspection.get_constraints(connection.cursor(), "schema_tag").values() + if c['columns'] == ["slug", "title"] + ), + ) + # Alter the model to add an index + with connection.schema_editor() as editor: + editor.alter_index_together( + Tag, + [], + [("slug", "title")], + ) + # Ensure there is now an index + self.assertEqual( + True, + any( + c["index"] + for c in connection.introspection.get_constraints(connection.cursor(), "schema_tag").values() + if c['columns'] == ["slug", "title"] + ), + ) + # Alter it back + new_new_field = SlugField(unique=True) + new_new_field.set_attributes_from_name("slug") + with connection.schema_editor() as editor: + editor.alter_index_together( + Tag, + [("slug", "title")], + [], + ) + # Ensure there's no index + self.assertEqual( + False, + any( + c["index"] + for c in connection.introspection.get_constraints(connection.cursor(), "schema_tag").values() + if c['columns'] == ["slug", "title"] + ), + ) + + def test_create_index_together(self): + """ + Tests creating models with index_together already defined + """ + # Create the table + with connection.schema_editor() as editor: + editor.create_model(TagIndexed) + # Ensure there is an index + self.assertEqual( + True, + any( + c["index"] + for c in connection.introspection.get_constraints(connection.cursor(), "schema_tagindexed").values() + if c['columns'] == ["slug", "title"] + ), + ) + + def test_db_table(self): + """ + Tests renaming of the table + """ + # Create the table + with connection.schema_editor() as editor: + editor.create_model(Author) + # Ensure the table is there to begin with + columns = self.column_classes(Author) + self.assertEqual(columns['name'][0], "CharField") + # Alter the table + with connection.schema_editor() as editor: + editor.alter_db_table( + Author, + "schema_author", + "schema_otherauthor", + ) + # Ensure the table is there afterwards + Author._meta.db_table = "schema_otherauthor" + columns = self.column_classes(Author) + self.assertEqual(columns['name'][0], "CharField") + # Alter the table again + with connection.schema_editor() as editor: + editor.alter_db_table( + Author, + "schema_otherauthor", + "schema_author", + ) + # Ensure the table is still there + Author._meta.db_table = "schema_author" + columns = self.column_classes(Author) + self.assertEqual(columns['name'][0], "CharField") + + def test_indexes(self): + """ + Tests creation/altering of indexes + """ + # Create the table + with connection.schema_editor() as editor: + editor.create_model(Author) + editor.create_model(Book) + # Ensure the table is there and has the right index + self.assertIn( + "title", + connection.introspection.get_indexes(connection.cursor(), Book._meta.db_table), + ) + # Alter to remove the index + new_field = CharField(max_length=100, db_index=False) + new_field.set_attributes_from_name("title") + with connection.schema_editor() as editor: + editor.alter_field( + Book, + Book._meta.get_field_by_name("title")[0], + new_field, + strict = True, + ) + # Ensure the table is there and has no index + self.assertNotIn( + "title", + connection.introspection.get_indexes(connection.cursor(), Book._meta.db_table), + ) + # Alter to re-add the index + with connection.schema_editor() as editor: + editor.alter_field( + Book, + new_field, + Book._meta.get_field_by_name("title")[0], + strict = True, + ) + # Ensure the table is there and has the index again + self.assertIn( + "title", + connection.introspection.get_indexes(connection.cursor(), Book._meta.db_table), + ) + # Add a unique column, verify that creates an implicit index + with connection.schema_editor() as editor: + editor.add_field( + Book, + BookWithSlug._meta.get_field_by_name("slug")[0], + ) + self.assertIn( + "slug", + connection.introspection.get_indexes(connection.cursor(), Book._meta.db_table), + ) + # Remove the unique, check the index goes with it + new_field2 = CharField(max_length=20, unique=False) + new_field2.set_attributes_from_name("slug") + with connection.schema_editor() as editor: + editor.alter_field( + BookWithSlug, + BookWithSlug._meta.get_field_by_name("slug")[0], + new_field2, + strict = True, + ) + self.assertNotIn( + "slug", + connection.introspection.get_indexes(connection.cursor(), Book._meta.db_table), + ) + + def test_primary_key(self): + """ + Tests altering of the primary key + """ + # Create the table + with connection.schema_editor() as editor: + editor.create_model(Tag) + # Ensure the table is there and has the right PK + self.assertTrue( + connection.introspection.get_indexes(connection.cursor(), Tag._meta.db_table)['id']['primary_key'], + ) + # Alter to change the PK + new_field = SlugField(primary_key=True) + new_field.set_attributes_from_name("slug") + with connection.schema_editor() as editor: + editor.remove_field(Tag, Tag._meta.get_field_by_name("id")[0]) + editor.alter_field( + Tag, + Tag._meta.get_field_by_name("slug")[0], + new_field, + ) + # Ensure the PK changed + self.assertNotIn( + 'id', + connection.introspection.get_indexes(connection.cursor(), Tag._meta.db_table), + ) + self.assertTrue( + connection.introspection.get_indexes(connection.cursor(), Tag._meta.db_table)['slug']['primary_key'], + ) diff --git a/tests/utils_tests/test_functional.py b/tests/utils_tests/test_functional.py index df377c6e06..66e051033e 100644 --- a/tests/utils_tests/test_functional.py +++ b/tests/utils_tests/test_functional.py @@ -65,3 +65,15 @@ class FunctionalTestCase(unittest.TestCase): # check that it behaves like a property when there's no instance self.assertIsInstance(A.value, cached_property) + + def test_lazy_equality(self): + """ + Tests that == and != work correctly for Promises. + """ + + lazy_a = lazy(lambda: 4, int) + lazy_b = lazy(lambda: 4, int) + lazy_c = lazy(lambda: 5, int) + + self.assertEqual(lazy_a(), lazy_b()) + self.assertNotEqual(lazy_b(), lazy_c())