2012-06-19 00:32:03 +08:00
|
|
|
from __future__ import absolute_import
|
|
|
|
import datetime
|
2013-09-03 21:55:21 +08:00
|
|
|
import unittest
|
|
|
|
|
2013-04-19 00:16:39 +08:00
|
|
|
from django.test import TransactionTestCase
|
2013-12-23 03:44:49 +08:00
|
|
|
from django.db import connection, DatabaseError, IntegrityError, OperationalError
|
2014-06-17 09:43:45 +08:00
|
|
|
from django.db.models.fields import IntegerField, TextField, CharField, SlugField, BooleanField, BinaryField
|
2012-09-08 01:31:05 +08:00
|
|
|
from django.db.models.fields.related import ManyToManyField, ForeignKey
|
2013-05-18 17:48:46 +08:00
|
|
|
from django.db.transaction import atomic
|
2013-11-23 06:31:50 +08:00
|
|
|
from .models import (Author, AuthorWithM2M, Book, BookWithLongName,
|
|
|
|
BookWithSlug, BookWithM2M, Tag, TagIndexed, TagM2MTest, TagUniqueRename,
|
2014-05-09 01:33:59 +08:00
|
|
|
UniqueTest, Thing, TagThrough, BookWithM2MThrough, AuthorTag, AuthorWithM2MThrough)
|
2012-06-19 00:32:03 +08:00
|
|
|
|
|
|
|
|
2013-04-19 00:16:39 +08:00
|
|
|
class SchemaTests(TransactionTestCase):
|
2012-06-19 00:32:03 +08:00
|
|
|
"""
|
|
|
|
Tests that the schema-alteration code works correctly.
|
|
|
|
|
|
|
|
Be aware that these tests are more liable than most to false results,
|
|
|
|
as sometimes the code to check if a test has worked is almost as complex
|
|
|
|
as the code it is testing.
|
|
|
|
"""
|
2013-09-03 21:55:21 +08:00
|
|
|
|
2013-06-19 22:36:22 +08:00
|
|
|
available_apps = []
|
2012-06-19 00:32:03 +08:00
|
|
|
|
2013-11-23 06:31:50 +08:00
|
|
|
models = [
|
|
|
|
Author, AuthorWithM2M, Book, BookWithLongName, BookWithSlug,
|
|
|
|
BookWithM2M, Tag, TagIndexed, TagM2MTest, TagUniqueRename, UniqueTest,
|
2014-03-09 07:57:25 +08:00
|
|
|
Thing, TagThrough, BookWithM2MThrough
|
2013-11-23 06:31:50 +08:00
|
|
|
]
|
2012-06-19 00:32:03 +08:00
|
|
|
|
2012-06-19 20:25:22 +08:00
|
|
|
# Utility functions
|
|
|
|
|
2012-06-19 00:32:03 +08:00
|
|
|
def tearDown(self):
|
|
|
|
# Delete any tables made for our models
|
2012-09-18 02:57:23 +08:00
|
|
|
self.delete_tables()
|
|
|
|
|
|
|
|
def delete_tables(self):
|
|
|
|
"Deletes all model tables for our models for a clean test environment"
|
2014-01-09 23:05:15 +08:00
|
|
|
with connection.cursor() as cursor:
|
|
|
|
connection.disable_constraint_checking()
|
|
|
|
table_names = connection.introspection.table_names(cursor)
|
|
|
|
for model in self.models:
|
|
|
|
# Remove any M2M tables first
|
|
|
|
for field in model._meta.local_many_to_many:
|
|
|
|
with atomic():
|
|
|
|
tbl = field.rel.through._meta.db_table
|
|
|
|
if tbl in table_names:
|
|
|
|
cursor.execute(connection.schema_editor().sql_delete_table % {
|
|
|
|
"table": connection.ops.quote_name(tbl),
|
|
|
|
})
|
|
|
|
table_names.remove(tbl)
|
|
|
|
# Then remove the main tables
|
2013-05-18 17:48:46 +08:00
|
|
|
with atomic():
|
2014-01-09 23:05:15 +08:00
|
|
|
tbl = model._meta.db_table
|
2013-08-31 03:28:21 +08:00
|
|
|
if tbl in table_names:
|
2013-05-18 17:48:46 +08:00
|
|
|
cursor.execute(connection.schema_editor().sql_delete_table % {
|
2013-08-31 03:28:21 +08:00
|
|
|
"table": connection.ops.quote_name(tbl),
|
2013-05-18 17:48:46 +08:00
|
|
|
})
|
2013-08-31 03:28:21 +08:00
|
|
|
table_names.remove(tbl)
|
2012-08-18 20:48:54 +08:00
|
|
|
connection.enable_constraint_checking()
|
2012-06-19 00:32:03 +08:00
|
|
|
|
2012-06-19 20:25:22 +08:00
|
|
|
def column_classes(self, model):
|
2014-01-09 23:05:15 +08:00
|
|
|
with connection.cursor() as cursor:
|
|
|
|
columns = dict(
|
|
|
|
(d[0], (connection.introspection.get_field_type(d[1], d), d))
|
|
|
|
for d in connection.introspection.get_table_description(
|
|
|
|
cursor,
|
|
|
|
model._meta.db_table,
|
|
|
|
)
|
2012-06-19 20:25:22 +08:00
|
|
|
)
|
2012-09-08 00:51:11 +08:00
|
|
|
# SQLite has a different format for field_type
|
|
|
|
for name, (type, desc) in columns.items():
|
|
|
|
if isinstance(type, tuple):
|
|
|
|
columns[name] = (type[0], desc)
|
|
|
|
# SQLite also doesn't error properly
|
|
|
|
if not columns:
|
|
|
|
raise DatabaseError("Table does not exist (empty pragma)")
|
|
|
|
return columns
|
2012-06-19 20:25:22 +08:00
|
|
|
|
2014-01-09 23:05:15 +08:00
|
|
|
def get_indexes(self, table):
|
|
|
|
"""
|
|
|
|
Get the indexes on the table using a new cursor.
|
|
|
|
"""
|
|
|
|
with connection.cursor() as cursor:
|
|
|
|
return connection.introspection.get_indexes(cursor, table)
|
|
|
|
|
|
|
|
def get_constraints(self, table):
|
|
|
|
"""
|
|
|
|
Get the constraints on a table using a new cursor.
|
|
|
|
"""
|
|
|
|
with connection.cursor() as cursor:
|
|
|
|
return connection.introspection.get_constraints(cursor, table)
|
|
|
|
|
2012-06-19 20:25:22 +08:00
|
|
|
# Tests
|
|
|
|
|
2012-06-19 00:32:03 +08:00
|
|
|
def test_creation_deletion(self):
|
|
|
|
"""
|
|
|
|
Tries creating a model's table, and then deleting it.
|
|
|
|
"""
|
|
|
|
# Create the table
|
2013-05-18 17:48:46 +08:00
|
|
|
with connection.schema_editor() as editor:
|
|
|
|
editor.create_model(Author)
|
2012-06-19 00:32:03 +08:00
|
|
|
# Check that it's there
|
2012-09-18 17:59:03 +08:00
|
|
|
list(Author.objects.all())
|
2012-06-19 00:32:03 +08:00
|
|
|
# Clean up that table
|
2013-05-18 17:48:46 +08:00
|
|
|
with connection.schema_editor() as editor:
|
|
|
|
editor.delete_model(Author)
|
2012-06-19 00:32:03 +08:00
|
|
|
# Check that it's gone
|
|
|
|
self.assertRaises(
|
|
|
|
DatabaseError,
|
|
|
|
lambda: list(Author.objects.all()),
|
|
|
|
)
|
|
|
|
|
2013-09-03 21:55:21 +08:00
|
|
|
@unittest.skipUnless(connection.features.supports_foreign_keys, "No FK support")
|
2012-09-08 01:31:05 +08:00
|
|
|
def test_fk(self):
|
|
|
|
"Tests that creating tables out of FK order, then repointing, works"
|
2012-06-19 00:32:03 +08:00
|
|
|
# Create the table
|
2013-05-18 17:48:46 +08:00
|
|
|
with connection.schema_editor() as editor:
|
|
|
|
editor.create_model(Book)
|
|
|
|
editor.create_model(Author)
|
|
|
|
editor.create_model(Tag)
|
2012-09-08 01:31:05 +08:00
|
|
|
# Check that initial tables are there
|
2012-09-18 17:59:03 +08:00
|
|
|
list(Author.objects.all())
|
|
|
|
list(Book.objects.all())
|
2012-06-19 00:32:03 +08:00
|
|
|
# Make sure the FK constraint is present
|
|
|
|
with self.assertRaises(IntegrityError):
|
|
|
|
Book.objects.create(
|
2013-11-04 02:17:58 +08:00
|
|
|
author_id=1,
|
|
|
|
title="Much Ado About Foreign Keys",
|
|
|
|
pub_date=datetime.datetime.now(),
|
2012-06-19 00:32:03 +08:00
|
|
|
)
|
2012-09-08 01:31:05 +08:00
|
|
|
# Repoint the FK constraint
|
|
|
|
new_field = ForeignKey(Tag)
|
|
|
|
new_field.set_attributes_from_name("author")
|
2013-05-18 17:48:46 +08:00
|
|
|
with connection.schema_editor() as editor:
|
|
|
|
editor.alter_field(
|
|
|
|
Book,
|
|
|
|
Book._meta.get_field_by_name("author")[0],
|
|
|
|
new_field,
|
|
|
|
strict=True,
|
|
|
|
)
|
2012-09-08 01:31:05 +08:00
|
|
|
# Make sure the new FK constraint is present
|
2014-01-09 23:05:15 +08:00
|
|
|
constraints = self.get_constraints(Book._meta.db_table)
|
2012-09-08 01:31:05 +08:00
|
|
|
for name, details in constraints.items():
|
2013-07-03 01:02:20 +08:00
|
|
|
if details['columns'] == ["author_id"] and details['foreign_key']:
|
2012-09-08 01:31:05 +08:00
|
|
|
self.assertEqual(details['foreign_key'], ('schema_tag', 'id'))
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
self.fail("No FK constraint for author_id found")
|
2012-06-19 20:25:22 +08:00
|
|
|
|
2013-05-30 00:47:10 +08:00
|
|
|
def test_add_field(self):
|
2012-06-19 20:25:22 +08:00
|
|
|
"""
|
|
|
|
Tests adding fields to models
|
|
|
|
"""
|
|
|
|
# Create the table
|
2013-05-18 17:48:46 +08:00
|
|
|
with connection.schema_editor() as editor:
|
|
|
|
editor.create_model(Author)
|
2012-06-19 20:25:22 +08:00
|
|
|
# Ensure there's no age field
|
|
|
|
columns = self.column_classes(Author)
|
|
|
|
self.assertNotIn("age", columns)
|
2014-01-20 01:10:24 +08:00
|
|
|
# Add the new field
|
2012-06-19 20:25:22 +08:00
|
|
|
new_field = IntegerField(null=True)
|
|
|
|
new_field.set_attributes_from_name("age")
|
2013-05-18 17:48:46 +08:00
|
|
|
with connection.schema_editor() as editor:
|
2013-05-30 00:47:10 +08:00
|
|
|
editor.add_field(
|
2013-05-18 17:48:46 +08:00
|
|
|
Author,
|
|
|
|
new_field,
|
|
|
|
)
|
2012-06-19 20:25:22 +08:00
|
|
|
# Ensure the field is right afterwards
|
|
|
|
columns = self.column_classes(Author)
|
|
|
|
self.assertEqual(columns['age'][0], "IntegerField")
|
|
|
|
self.assertEqual(columns['age'][1][6], True)
|
|
|
|
|
2014-01-20 01:10:24 +08:00
|
|
|
def test_add_field_temp_default(self):
|
|
|
|
"""
|
|
|
|
Tests adding fields to models with a temporary default
|
|
|
|
"""
|
|
|
|
# Create the table
|
|
|
|
with connection.schema_editor() as editor:
|
|
|
|
editor.create_model(Author)
|
|
|
|
# Ensure there's no age field
|
|
|
|
columns = self.column_classes(Author)
|
|
|
|
self.assertNotIn("age", columns)
|
|
|
|
# Add some rows of data
|
|
|
|
Author.objects.create(name="Andrew", height=30)
|
|
|
|
Author.objects.create(name="Andrea")
|
|
|
|
# Add a not-null field
|
|
|
|
new_field = CharField(max_length=30, default="Godwin")
|
|
|
|
new_field.set_attributes_from_name("surname")
|
|
|
|
with connection.schema_editor() as editor:
|
|
|
|
editor.add_field(
|
|
|
|
Author,
|
|
|
|
new_field,
|
|
|
|
)
|
|
|
|
# Ensure the field is right afterwards
|
|
|
|
columns = self.column_classes(Author)
|
|
|
|
self.assertEqual(columns['surname'][0], "CharField")
|
2014-01-20 19:01:14 +08:00
|
|
|
self.assertEqual(columns['surname'][1][6],
|
|
|
|
connection.features.interprets_empty_strings_as_nulls)
|
2014-01-20 01:10:24 +08:00
|
|
|
|
2014-01-25 08:09:56 +08:00
|
|
|
def test_add_field_temp_default_boolean(self):
|
|
|
|
"""
|
|
|
|
Tests adding fields to models with a temporary default where
|
|
|
|
the default is False. (#21783)
|
|
|
|
"""
|
|
|
|
# Create the table
|
|
|
|
with connection.schema_editor() as editor:
|
|
|
|
editor.create_model(Author)
|
|
|
|
# Ensure there's no age field
|
|
|
|
columns = self.column_classes(Author)
|
|
|
|
self.assertNotIn("age", columns)
|
|
|
|
# Add some rows of data
|
|
|
|
Author.objects.create(name="Andrew", height=30)
|
|
|
|
Author.objects.create(name="Andrea")
|
|
|
|
# Add a not-null field
|
|
|
|
new_field = BooleanField(default=False)
|
|
|
|
new_field.set_attributes_from_name("awesome")
|
|
|
|
with connection.schema_editor() as editor:
|
|
|
|
editor.add_field(
|
|
|
|
Author,
|
|
|
|
new_field,
|
|
|
|
)
|
|
|
|
# Ensure the field is right afterwards
|
|
|
|
columns = self.column_classes(Author)
|
2014-01-25 23:59:38 +08:00
|
|
|
# BooleanField are stored as TINYINT(1) on MySQL.
|
|
|
|
field_type, field_info = columns['awesome']
|
|
|
|
if connection.vendor == 'mysql':
|
|
|
|
self.assertEqual(field_type, 'IntegerField')
|
|
|
|
self.assertEqual(field_info.precision, 1)
|
|
|
|
else:
|
|
|
|
self.assertEqual(field_type, 'BooleanField')
|
2014-01-25 08:09:56 +08:00
|
|
|
|
2014-05-08 04:46:23 +08:00
|
|
|
def test_add_field_default_transform(self):
|
|
|
|
"""
|
|
|
|
Tests adding fields to models with a default that is not directly
|
|
|
|
valid in the database (#22581)
|
|
|
|
"""
|
2014-05-09 03:49:54 +08:00
|
|
|
|
2014-05-08 04:46:23 +08:00
|
|
|
class TestTransformField(IntegerField):
|
2014-05-09 03:49:54 +08:00
|
|
|
|
2014-05-08 04:46:23 +08:00
|
|
|
# Weird field that saves the count of items in its value
|
|
|
|
def get_default(self):
|
|
|
|
return self.default
|
2014-05-09 03:49:54 +08:00
|
|
|
|
2014-05-08 04:46:23 +08:00
|
|
|
def get_prep_value(self, value):
|
|
|
|
if value is None:
|
|
|
|
return 0
|
|
|
|
return len(value)
|
2014-05-09 03:49:54 +08:00
|
|
|
|
2014-05-08 04:46:23 +08:00
|
|
|
# Create the table
|
|
|
|
with connection.schema_editor() as editor:
|
|
|
|
editor.create_model(Author)
|
|
|
|
# Add some rows of data
|
|
|
|
Author.objects.create(name="Andrew", height=30)
|
|
|
|
Author.objects.create(name="Andrea")
|
|
|
|
# Add the field with a default it needs to cast (to string in this case)
|
2014-05-09 03:49:54 +08:00
|
|
|
new_field = TestTransformField(default={1: 2})
|
2014-05-08 04:46:23 +08:00
|
|
|
new_field.set_attributes_from_name("thing")
|
|
|
|
with connection.schema_editor() as editor:
|
|
|
|
editor.add_field(
|
|
|
|
Author,
|
|
|
|
new_field,
|
|
|
|
)
|
|
|
|
# Ensure the field is there
|
|
|
|
columns = self.column_classes(Author)
|
|
|
|
field_type, field_info = columns['thing']
|
|
|
|
self.assertEqual(field_type, 'IntegerField')
|
|
|
|
# Make sure the values were transformed correctly
|
|
|
|
self.assertEqual(Author.objects.extra(where=["thing = 1"]).count(), 2)
|
|
|
|
|
2014-06-17 09:43:45 +08:00
|
|
|
def test_add_field_binary(self):
|
|
|
|
"""
|
|
|
|
Tests binary fields get a sane default (#22851)
|
|
|
|
"""
|
|
|
|
# Create the table
|
|
|
|
with connection.schema_editor() as editor:
|
|
|
|
editor.create_model(Author)
|
|
|
|
# Add the new field
|
|
|
|
new_field = BinaryField(blank=True)
|
|
|
|
new_field.set_attributes_from_name("bits")
|
|
|
|
with connection.schema_editor() as editor:
|
|
|
|
editor.add_field(
|
|
|
|
Author,
|
|
|
|
new_field,
|
|
|
|
)
|
|
|
|
# Ensure the field is right afterwards
|
|
|
|
columns = self.column_classes(Author)
|
2014-06-17 15:36:27 +08:00
|
|
|
# MySQL annoyingly uses the same backend, so it'll come back as one of
|
|
|
|
# these two types.
|
|
|
|
self.assertIn(columns['bits'][0], ("BinaryField", "TextField"))
|
2014-06-17 09:43:45 +08:00
|
|
|
|
2012-06-19 20:25:22 +08:00
|
|
|
def test_alter(self):
|
|
|
|
"""
|
|
|
|
Tests simple altering of fields
|
|
|
|
"""
|
|
|
|
# Create the table
|
2013-05-18 17:48:46 +08:00
|
|
|
with connection.schema_editor() as editor:
|
|
|
|
editor.create_model(Author)
|
2012-06-19 20:25:22 +08:00
|
|
|
# Ensure the field is right to begin with
|
|
|
|
columns = self.column_classes(Author)
|
|
|
|
self.assertEqual(columns['name'][0], "CharField")
|
2013-08-14 03:54:57 +08:00
|
|
|
self.assertEqual(bool(columns['name'][1][6]), bool(connection.features.interprets_empty_strings_as_nulls))
|
2012-06-19 20:25:22 +08:00
|
|
|
# Alter the name field to a TextField
|
|
|
|
new_field = TextField(null=True)
|
|
|
|
new_field.set_attributes_from_name("name")
|
2013-05-18 17:48:46 +08:00
|
|
|
with connection.schema_editor() as editor:
|
|
|
|
editor.alter_field(
|
|
|
|
Author,
|
|
|
|
Author._meta.get_field_by_name("name")[0],
|
|
|
|
new_field,
|
|
|
|
strict=True,
|
|
|
|
)
|
2012-06-19 20:25:22 +08:00
|
|
|
# Ensure the field is right afterwards
|
|
|
|
columns = self.column_classes(Author)
|
|
|
|
self.assertEqual(columns['name'][0], "TextField")
|
|
|
|
self.assertEqual(columns['name'][1][6], True)
|
2012-09-24 19:53:37 +08:00
|
|
|
# Change nullability again
|
|
|
|
new_field2 = TextField(null=False)
|
|
|
|
new_field2.set_attributes_from_name("name")
|
2013-05-18 17:48:46 +08:00
|
|
|
with connection.schema_editor() as editor:
|
|
|
|
editor.alter_field(
|
|
|
|
Author,
|
|
|
|
new_field,
|
|
|
|
new_field2,
|
|
|
|
strict=True,
|
|
|
|
)
|
2012-09-24 19:53:37 +08:00
|
|
|
# Ensure the field is right afterwards
|
|
|
|
columns = self.column_classes(Author)
|
|
|
|
self.assertEqual(columns['name'][0], "TextField")
|
2013-08-23 19:07:43 +08:00
|
|
|
self.assertEqual(bool(columns['name'][1][6]), False)
|
2012-08-02 22:08:39 +08:00
|
|
|
|
|
|
|
def test_rename(self):
|
|
|
|
"""
|
|
|
|
Tests simple altering of fields
|
|
|
|
"""
|
|
|
|
# Create the table
|
2013-05-18 17:48:46 +08:00
|
|
|
with connection.schema_editor() as editor:
|
|
|
|
editor.create_model(Author)
|
2012-08-02 22:08:39 +08:00
|
|
|
# Ensure the field is right to begin with
|
|
|
|
columns = self.column_classes(Author)
|
|
|
|
self.assertEqual(columns['name'][0], "CharField")
|
|
|
|
self.assertNotIn("display_name", columns)
|
|
|
|
# Alter the name field's name
|
|
|
|
new_field = CharField(max_length=254)
|
|
|
|
new_field.set_attributes_from_name("display_name")
|
2013-05-18 17:48:46 +08:00
|
|
|
with connection.schema_editor() as editor:
|
|
|
|
editor.alter_field(
|
|
|
|
Author,
|
|
|
|
Author._meta.get_field_by_name("name")[0],
|
|
|
|
new_field,
|
2013-11-04 02:17:58 +08:00
|
|
|
strict=True,
|
2013-05-18 17:48:46 +08:00
|
|
|
)
|
2012-08-02 22:08:39 +08:00
|
|
|
# Ensure the field is right afterwards
|
|
|
|
columns = self.column_classes(Author)
|
|
|
|
self.assertEqual(columns['display_name'][0], "CharField")
|
|
|
|
self.assertNotIn("name", columns)
|
|
|
|
|
2012-09-08 02:39:22 +08:00
|
|
|
def test_m2m_create(self):
|
|
|
|
"""
|
|
|
|
Tests M2M fields on models during creation
|
|
|
|
"""
|
|
|
|
# Create the tables
|
2013-05-18 17:48:46 +08:00
|
|
|
with connection.schema_editor() as editor:
|
|
|
|
editor.create_model(Author)
|
2013-08-23 19:07:43 +08:00
|
|
|
editor.create_model(TagM2MTest)
|
2013-05-18 17:48:46 +08:00
|
|
|
editor.create_model(BookWithM2M)
|
2012-09-08 02:39:22 +08:00
|
|
|
# Ensure there is now an m2m table there
|
|
|
|
columns = self.column_classes(BookWithM2M._meta.get_field_by_name("tags")[0].rel.through)
|
2013-08-19 20:50:26 +08:00
|
|
|
self.assertEqual(columns['tagm2mtest_id'][0], "IntegerField")
|
2012-09-08 02:39:22 +08:00
|
|
|
|
2014-03-09 07:57:25 +08:00
|
|
|
def test_m2m_create_through(self):
|
|
|
|
"""
|
|
|
|
Tests M2M fields on models during creation with through models
|
|
|
|
"""
|
|
|
|
# Create the tables
|
|
|
|
with connection.schema_editor() as editor:
|
|
|
|
editor.create_model(TagThrough)
|
|
|
|
editor.create_model(TagM2MTest)
|
|
|
|
editor.create_model(BookWithM2MThrough)
|
|
|
|
# Ensure there is now an m2m table there
|
|
|
|
columns = self.column_classes(TagThrough)
|
|
|
|
self.assertEqual(columns['book_id'][0], "IntegerField")
|
|
|
|
self.assertEqual(columns['tag_id'][0], "IntegerField")
|
|
|
|
|
2012-08-02 22:08:39 +08:00
|
|
|
def test_m2m(self):
|
|
|
|
"""
|
|
|
|
Tests adding/removing M2M fields on models
|
|
|
|
"""
|
|
|
|
# Create the tables
|
2013-05-18 17:48:46 +08:00
|
|
|
with connection.schema_editor() as editor:
|
|
|
|
editor.create_model(AuthorWithM2M)
|
2013-08-19 20:50:26 +08:00
|
|
|
editor.create_model(TagM2MTest)
|
2012-08-02 22:08:39 +08:00
|
|
|
# Create an M2M field
|
2013-08-19 20:50:26 +08:00
|
|
|
new_field = ManyToManyField("schema.TagM2MTest", related_name="authors")
|
2012-08-02 22:08:39 +08:00
|
|
|
new_field.contribute_to_class(AuthorWithM2M, "tags")
|
2012-09-18 04:16:36 +08:00
|
|
|
try:
|
|
|
|
# Ensure there's no m2m table there
|
|
|
|
self.assertRaises(DatabaseError, self.column_classes, new_field.rel.through)
|
|
|
|
# Add the field
|
2013-05-18 17:48:46 +08:00
|
|
|
with connection.schema_editor() as editor:
|
2013-05-30 00:47:10 +08:00
|
|
|
editor.add_field(
|
2013-05-18 17:48:46 +08:00
|
|
|
Author,
|
|
|
|
new_field,
|
|
|
|
)
|
2012-09-18 04:16:36 +08:00
|
|
|
# Ensure there is now an m2m table there
|
|
|
|
columns = self.column_classes(new_field.rel.through)
|
2013-08-19 20:50:26 +08:00
|
|
|
self.assertEqual(columns['tagm2mtest_id'][0], "IntegerField")
|
2014-03-20 12:08:28 +08:00
|
|
|
|
|
|
|
# "Alter" the field. This should not rename the DB table to itself.
|
|
|
|
with connection.schema_editor() as editor:
|
|
|
|
editor.alter_field(
|
|
|
|
Author,
|
|
|
|
new_field,
|
|
|
|
new_field,
|
|
|
|
)
|
|
|
|
|
2012-09-18 04:16:36 +08:00
|
|
|
# Remove the M2M table again
|
2013-05-18 17:48:46 +08:00
|
|
|
with connection.schema_editor() as editor:
|
2013-05-30 00:47:10 +08:00
|
|
|
editor.remove_field(
|
2013-05-18 17:48:46 +08:00
|
|
|
Author,
|
|
|
|
new_field,
|
|
|
|
)
|
2012-09-18 04:16:36 +08:00
|
|
|
# Ensure there's no m2m table there
|
|
|
|
self.assertRaises(DatabaseError, self.column_classes, new_field.rel.through)
|
|
|
|
finally:
|
|
|
|
# Cleanup model states
|
|
|
|
AuthorWithM2M._meta.local_many_to_many.remove(new_field)
|
2012-08-02 22:08:39 +08:00
|
|
|
|
2014-05-09 01:33:59 +08:00
|
|
|
def test_m2m_through_alter(self):
|
|
|
|
"""
|
|
|
|
Tests altering M2Ms with explicit through models (should no-op)
|
|
|
|
"""
|
|
|
|
# Create the tables
|
|
|
|
with connection.schema_editor() as editor:
|
|
|
|
editor.create_model(AuthorTag)
|
|
|
|
editor.create_model(AuthorWithM2MThrough)
|
|
|
|
editor.create_model(TagM2MTest)
|
|
|
|
# Ensure the m2m table is there
|
|
|
|
self.assertEqual(len(self.column_classes(AuthorTag)), 3)
|
|
|
|
# "Alter" the field's blankness. This should not actually do anything.
|
|
|
|
with connection.schema_editor() as editor:
|
|
|
|
old_field = AuthorWithM2MThrough._meta.get_field_by_name("tags")[0]
|
|
|
|
new_field = ManyToManyField("schema.TagM2MTest", related_name="authors", through="AuthorTag")
|
|
|
|
new_field.contribute_to_class(AuthorWithM2MThrough, "tags")
|
|
|
|
editor.alter_field(
|
|
|
|
Author,
|
|
|
|
old_field,
|
|
|
|
new_field,
|
|
|
|
)
|
|
|
|
# Ensure the m2m table is still there
|
|
|
|
self.assertEqual(len(self.column_classes(AuthorTag)), 3)
|
|
|
|
|
2012-09-08 02:39:22 +08:00
|
|
|
def test_m2m_repoint(self):
|
|
|
|
"""
|
|
|
|
Tests repointing M2M fields
|
|
|
|
"""
|
|
|
|
# Create the tables
|
2013-05-18 17:48:46 +08:00
|
|
|
with connection.schema_editor() as editor:
|
|
|
|
editor.create_model(Author)
|
|
|
|
editor.create_model(BookWithM2M)
|
2013-08-19 20:50:26 +08:00
|
|
|
editor.create_model(TagM2MTest)
|
2013-05-18 17:48:46 +08:00
|
|
|
editor.create_model(UniqueTest)
|
2013-08-19 20:50:26 +08:00
|
|
|
# Ensure the M2M exists and points to TagM2MTest
|
2014-01-09 23:05:15 +08:00
|
|
|
constraints = self.get_constraints(BookWithM2M._meta.get_field_by_name("tags")[0].rel.through._meta.db_table)
|
2012-09-08 02:39:22 +08:00
|
|
|
if connection.features.supports_foreign_keys:
|
|
|
|
for name, details in constraints.items():
|
2013-08-19 20:50:26 +08:00
|
|
|
if details['columns'] == ["tagm2mtest_id"] and details['foreign_key']:
|
|
|
|
self.assertEqual(details['foreign_key'], ('schema_tagm2mtest', 'id'))
|
2012-09-08 02:39:22 +08:00
|
|
|
break
|
|
|
|
else:
|
2013-08-19 20:50:26 +08:00
|
|
|
self.fail("No FK constraint for tagm2mtest_id found")
|
2012-09-08 02:39:22 +08:00
|
|
|
# Repoint the M2M
|
|
|
|
new_field = ManyToManyField(UniqueTest)
|
|
|
|
new_field.contribute_to_class(BookWithM2M, "uniques")
|
2012-09-18 04:16:36 +08:00
|
|
|
try:
|
2013-05-18 17:48:46 +08:00
|
|
|
with connection.schema_editor() as editor:
|
|
|
|
editor.alter_field(
|
|
|
|
Author,
|
|
|
|
BookWithM2M._meta.get_field_by_name("tags")[0],
|
|
|
|
new_field,
|
|
|
|
)
|
2012-09-18 04:16:36 +08:00
|
|
|
# Ensure old M2M is gone
|
|
|
|
self.assertRaises(DatabaseError, self.column_classes, BookWithM2M._meta.get_field_by_name("tags")[0].rel.through)
|
|
|
|
# Ensure the new M2M exists and points to UniqueTest
|
2014-01-09 23:05:15 +08:00
|
|
|
constraints = self.get_constraints(new_field.rel.through._meta.db_table)
|
2012-09-18 04:16:36 +08:00
|
|
|
if connection.features.supports_foreign_keys:
|
|
|
|
for name, details in constraints.items():
|
2013-07-03 01:02:20 +08:00
|
|
|
if details['columns'] == ["uniquetest_id"] and details['foreign_key']:
|
2012-09-18 04:16:36 +08:00
|
|
|
self.assertEqual(details['foreign_key'], ('schema_uniquetest', 'id'))
|
|
|
|
break
|
|
|
|
else:
|
2013-08-19 20:50:26 +08:00
|
|
|
self.fail("No FK constraint for uniquetest_id found")
|
2012-09-18 04:16:36 +08:00
|
|
|
finally:
|
2013-09-24 18:00:00 +08:00
|
|
|
# Cleanup through table separately
|
|
|
|
with connection.schema_editor() as editor:
|
|
|
|
editor.remove_field(BookWithM2M, BookWithM2M._meta.get_field_by_name("uniques")[0])
|
2012-09-18 04:16:36 +08:00
|
|
|
# Cleanup model states
|
|
|
|
BookWithM2M._meta.local_many_to_many.remove(new_field)
|
|
|
|
del BookWithM2M._meta._m2m_cache
|
2012-09-08 02:39:22 +08:00
|
|
|
|
2014-06-17 07:25:13 +08:00
|
|
|
@unittest.skipUnless(connection.features.supports_column_check_constraints, "No check constraints")
|
2012-09-08 03:40:59 +08:00
|
|
|
def test_check_constraints(self):
|
|
|
|
"""
|
|
|
|
Tests creating/deleting CHECK constraints
|
|
|
|
"""
|
|
|
|
# Create the tables
|
2013-05-18 17:48:46 +08:00
|
|
|
with connection.schema_editor() as editor:
|
|
|
|
editor.create_model(Author)
|
2012-09-08 03:40:59 +08:00
|
|
|
# Ensure the constraint exists
|
2014-01-09 23:05:15 +08:00
|
|
|
constraints = self.get_constraints(Author._meta.db_table)
|
2012-09-08 03:40:59 +08:00
|
|
|
for name, details in constraints.items():
|
2013-07-03 01:02:20 +08:00
|
|
|
if details['columns'] == ["height"] and details['check']:
|
2012-09-08 03:40:59 +08:00
|
|
|
break
|
|
|
|
else:
|
|
|
|
self.fail("No check constraint for height found")
|
|
|
|
# Alter the column to remove it
|
|
|
|
new_field = IntegerField(null=True, blank=True)
|
|
|
|
new_field.set_attributes_from_name("height")
|
2013-05-18 17:48:46 +08:00
|
|
|
with connection.schema_editor() as editor:
|
|
|
|
editor.alter_field(
|
|
|
|
Author,
|
|
|
|
Author._meta.get_field_by_name("height")[0],
|
|
|
|
new_field,
|
2013-11-04 02:17:58 +08:00
|
|
|
strict=True,
|
2013-05-18 17:48:46 +08:00
|
|
|
)
|
2014-01-09 23:05:15 +08:00
|
|
|
constraints = self.get_constraints(Author._meta.db_table)
|
2012-09-08 03:40:59 +08:00
|
|
|
for name, details in constraints.items():
|
2013-07-03 01:02:20 +08:00
|
|
|
if details['columns'] == ["height"] and details['check']:
|
2012-09-08 03:40:59 +08:00
|
|
|
self.fail("Check constraint for height found")
|
|
|
|
# Alter the column to re-add it
|
2013-05-18 17:48:46 +08:00
|
|
|
with connection.schema_editor() as editor:
|
|
|
|
editor.alter_field(
|
|
|
|
Author,
|
|
|
|
new_field,
|
|
|
|
Author._meta.get_field_by_name("height")[0],
|
2013-11-04 02:17:58 +08:00
|
|
|
strict=True,
|
2013-05-18 17:48:46 +08:00
|
|
|
)
|
2014-01-09 23:05:15 +08:00
|
|
|
constraints = self.get_constraints(Author._meta.db_table)
|
2012-09-08 03:40:59 +08:00
|
|
|
for name, details in constraints.items():
|
2013-07-03 01:02:20 +08:00
|
|
|
if details['columns'] == ["height"] and details['check']:
|
2012-09-08 03:40:59 +08:00
|
|
|
break
|
|
|
|
else:
|
|
|
|
self.fail("No check constraint for height found")
|
|
|
|
|
2012-08-02 22:08:39 +08:00
|
|
|
def test_unique(self):
|
|
|
|
"""
|
|
|
|
Tests removing and adding unique constraints to a single column.
|
|
|
|
"""
|
|
|
|
# Create the table
|
2013-05-18 17:48:46 +08:00
|
|
|
with connection.schema_editor() as editor:
|
|
|
|
editor.create_model(Tag)
|
2012-08-02 22:08:39 +08:00
|
|
|
# Ensure the field is unique to begin with
|
|
|
|
Tag.objects.create(title="foo", slug="foo")
|
|
|
|
self.assertRaises(IntegrityError, Tag.objects.create, title="bar", slug="foo")
|
2013-05-18 17:48:46 +08:00
|
|
|
Tag.objects.all().delete()
|
2012-08-02 22:08:39 +08:00
|
|
|
# Alter the slug field to be non-unique
|
|
|
|
new_field = SlugField(unique=False)
|
|
|
|
new_field.set_attributes_from_name("slug")
|
2013-05-18 17:48:46 +08:00
|
|
|
with connection.schema_editor() as editor:
|
|
|
|
editor.alter_field(
|
|
|
|
Tag,
|
|
|
|
Tag._meta.get_field_by_name("slug")[0],
|
|
|
|
new_field,
|
2013-11-04 02:17:58 +08:00
|
|
|
strict=True,
|
2013-05-18 17:48:46 +08:00
|
|
|
)
|
2012-08-02 22:08:39 +08:00
|
|
|
# Ensure the field is no longer unique
|
|
|
|
Tag.objects.create(title="foo", slug="foo")
|
|
|
|
Tag.objects.create(title="bar", slug="foo")
|
2013-05-18 17:48:46 +08:00
|
|
|
Tag.objects.all().delete()
|
2013-04-19 16:01:45 +08:00
|
|
|
# Alter the slug field to be unique
|
2012-08-02 22:08:39 +08:00
|
|
|
new_new_field = SlugField(unique=True)
|
|
|
|
new_new_field.set_attributes_from_name("slug")
|
2013-05-18 17:48:46 +08:00
|
|
|
with connection.schema_editor() as editor:
|
|
|
|
editor.alter_field(
|
|
|
|
Tag,
|
|
|
|
new_field,
|
|
|
|
new_new_field,
|
2013-11-04 02:17:58 +08:00
|
|
|
strict=True,
|
2013-05-18 17:48:46 +08:00
|
|
|
)
|
2012-08-02 22:08:39 +08:00
|
|
|
# Ensure the field is unique again
|
|
|
|
Tag.objects.create(title="foo", slug="foo")
|
|
|
|
self.assertRaises(IntegrityError, Tag.objects.create, title="bar", slug="foo")
|
2013-05-18 17:48:46 +08:00
|
|
|
Tag.objects.all().delete()
|
2012-08-18 21:00:42 +08:00
|
|
|
# Rename the field
|
|
|
|
new_field = SlugField(unique=False)
|
|
|
|
new_field.set_attributes_from_name("slug2")
|
2013-05-18 17:48:46 +08:00
|
|
|
with connection.schema_editor() as editor:
|
|
|
|
editor.alter_field(
|
|
|
|
Tag,
|
|
|
|
Tag._meta.get_field_by_name("slug")[0],
|
|
|
|
TagUniqueRename._meta.get_field_by_name("slug2")[0],
|
2013-11-04 02:17:58 +08:00
|
|
|
strict=True,
|
2013-05-18 17:48:46 +08:00
|
|
|
)
|
2012-08-18 21:00:42 +08:00
|
|
|
# Ensure the field is still unique
|
|
|
|
TagUniqueRename.objects.create(title="foo", slug2="foo")
|
|
|
|
self.assertRaises(IntegrityError, TagUniqueRename.objects.create, title="bar", slug2="foo")
|
2013-05-18 17:48:46 +08:00
|
|
|
Tag.objects.all().delete()
|
2012-08-10 19:38:18 +08:00
|
|
|
|
|
|
|
def test_unique_together(self):
|
|
|
|
"""
|
|
|
|
Tests removing and adding unique_together constraints on a model.
|
|
|
|
"""
|
|
|
|
# Create the table
|
2013-05-18 17:48:46 +08:00
|
|
|
with connection.schema_editor() as editor:
|
|
|
|
editor.create_model(UniqueTest)
|
2012-08-10 19:38:18 +08:00
|
|
|
# Ensure the fields are unique to begin with
|
|
|
|
UniqueTest.objects.create(year=2012, slug="foo")
|
|
|
|
UniqueTest.objects.create(year=2011, slug="foo")
|
|
|
|
UniqueTest.objects.create(year=2011, slug="bar")
|
|
|
|
self.assertRaises(IntegrityError, UniqueTest.objects.create, year=2012, slug="foo")
|
2013-05-18 17:48:46 +08:00
|
|
|
UniqueTest.objects.all().delete()
|
2014-04-27 01:18:45 +08:00
|
|
|
# Alter the model to its non-unique-together companion
|
2013-05-18 17:48:46 +08:00
|
|
|
with connection.schema_editor() as editor:
|
|
|
|
editor.alter_unique_together(
|
|
|
|
UniqueTest,
|
|
|
|
UniqueTest._meta.unique_together,
|
|
|
|
[],
|
|
|
|
)
|
2012-08-10 19:38:18 +08:00
|
|
|
# Ensure the fields are no longer unique
|
|
|
|
UniqueTest.objects.create(year=2012, slug="foo")
|
|
|
|
UniqueTest.objects.create(year=2012, slug="foo")
|
2013-05-18 17:48:46 +08:00
|
|
|
UniqueTest.objects.all().delete()
|
2012-08-10 19:38:18 +08:00
|
|
|
# Alter it back
|
|
|
|
new_new_field = SlugField(unique=True)
|
|
|
|
new_new_field.set_attributes_from_name("slug")
|
2013-05-18 17:48:46 +08:00
|
|
|
with connection.schema_editor() as editor:
|
|
|
|
editor.alter_unique_together(
|
|
|
|
UniqueTest,
|
|
|
|
[],
|
|
|
|
UniqueTest._meta.unique_together,
|
|
|
|
)
|
2012-08-10 19:38:18 +08:00
|
|
|
# Ensure the fields are unique again
|
|
|
|
UniqueTest.objects.create(year=2012, slug="foo")
|
|
|
|
self.assertRaises(IntegrityError, UniqueTest.objects.create, year=2012, slug="foo")
|
2013-05-18 17:48:46 +08:00
|
|
|
UniqueTest.objects.all().delete()
|
2012-08-10 22:03:18 +08:00
|
|
|
|
2013-07-02 18:43:44 +08:00
|
|
|
def test_index_together(self):
|
|
|
|
"""
|
|
|
|
Tests removing and adding index_together constraints on a model.
|
|
|
|
"""
|
|
|
|
# Create the table
|
|
|
|
with connection.schema_editor() as editor:
|
|
|
|
editor.create_model(Tag)
|
|
|
|
# Ensure there's no index on the year/slug columns first
|
|
|
|
self.assertEqual(
|
|
|
|
False,
|
|
|
|
any(
|
|
|
|
c["index"]
|
2014-01-09 23:05:15 +08:00
|
|
|
for c in self.get_constraints("schema_tag").values()
|
2013-07-03 01:02:20 +08:00
|
|
|
if c['columns'] == ["slug", "title"]
|
2013-07-02 18:43:44 +08:00
|
|
|
),
|
|
|
|
)
|
|
|
|
# Alter the model to add an index
|
|
|
|
with connection.schema_editor() as editor:
|
|
|
|
editor.alter_index_together(
|
|
|
|
Tag,
|
|
|
|
[],
|
|
|
|
[("slug", "title")],
|
|
|
|
)
|
|
|
|
# Ensure there is now an index
|
|
|
|
self.assertEqual(
|
|
|
|
True,
|
|
|
|
any(
|
|
|
|
c["index"]
|
2014-01-09 23:05:15 +08:00
|
|
|
for c in self.get_constraints("schema_tag").values()
|
2013-07-03 01:02:20 +08:00
|
|
|
if c['columns'] == ["slug", "title"]
|
2013-07-02 18:43:44 +08:00
|
|
|
),
|
|
|
|
)
|
|
|
|
# Alter it back
|
|
|
|
new_new_field = SlugField(unique=True)
|
|
|
|
new_new_field.set_attributes_from_name("slug")
|
|
|
|
with connection.schema_editor() as editor:
|
2013-07-02 19:06:26 +08:00
|
|
|
editor.alter_index_together(
|
2013-07-02 18:43:44 +08:00
|
|
|
Tag,
|
|
|
|
[("slug", "title")],
|
|
|
|
[],
|
|
|
|
)
|
|
|
|
# Ensure there's no index
|
|
|
|
self.assertEqual(
|
|
|
|
False,
|
|
|
|
any(
|
|
|
|
c["index"]
|
2014-01-09 23:05:15 +08:00
|
|
|
for c in self.get_constraints("schema_tag").values()
|
2013-07-03 01:02:20 +08:00
|
|
|
if c['columns'] == ["slug", "title"]
|
2013-07-02 18:43:44 +08:00
|
|
|
),
|
|
|
|
)
|
|
|
|
|
2013-08-11 21:23:31 +08:00
|
|
|
def test_create_index_together(self):
|
|
|
|
"""
|
|
|
|
Tests creating models with index_together already defined
|
|
|
|
"""
|
|
|
|
# Create the table
|
|
|
|
with connection.schema_editor() as editor:
|
|
|
|
editor.create_model(TagIndexed)
|
|
|
|
# Ensure there is an index
|
|
|
|
self.assertEqual(
|
|
|
|
True,
|
|
|
|
any(
|
|
|
|
c["index"]
|
2014-01-09 23:05:15 +08:00
|
|
|
for c in self.get_constraints("schema_tagindexed").values()
|
2013-08-11 21:23:31 +08:00
|
|
|
if c['columns'] == ["slug", "title"]
|
|
|
|
),
|
|
|
|
)
|
|
|
|
|
2012-08-10 22:03:18 +08:00
|
|
|
def test_db_table(self):
|
|
|
|
"""
|
|
|
|
Tests renaming of the table
|
|
|
|
"""
|
|
|
|
# Create the table
|
2013-05-18 17:48:46 +08:00
|
|
|
with connection.schema_editor() as editor:
|
|
|
|
editor.create_model(Author)
|
2012-08-10 22:03:18 +08:00
|
|
|
# Ensure the table is there to begin with
|
|
|
|
columns = self.column_classes(Author)
|
|
|
|
self.assertEqual(columns['name'][0], "CharField")
|
|
|
|
# Alter the table
|
2013-05-18 17:48:46 +08:00
|
|
|
with connection.schema_editor() as editor:
|
|
|
|
editor.alter_db_table(
|
|
|
|
Author,
|
|
|
|
"schema_author",
|
|
|
|
"schema_otherauthor",
|
|
|
|
)
|
2012-08-10 22:03:18 +08:00
|
|
|
# Ensure the table is there afterwards
|
|
|
|
Author._meta.db_table = "schema_otherauthor"
|
|
|
|
columns = self.column_classes(Author)
|
|
|
|
self.assertEqual(columns['name'][0], "CharField")
|
|
|
|
# Alter the table again
|
2013-05-18 17:48:46 +08:00
|
|
|
with connection.schema_editor() as editor:
|
|
|
|
editor.alter_db_table(
|
|
|
|
Author,
|
|
|
|
"schema_otherauthor",
|
|
|
|
"schema_author",
|
|
|
|
)
|
2012-08-10 22:03:18 +08:00
|
|
|
# Ensure the table is still there
|
|
|
|
Author._meta.db_table = "schema_author"
|
|
|
|
columns = self.column_classes(Author)
|
|
|
|
self.assertEqual(columns['name'][0], "CharField")
|
2012-08-31 06:11:56 +08:00
|
|
|
|
|
|
|
def test_indexes(self):
|
|
|
|
"""
|
|
|
|
Tests creation/altering of indexes
|
|
|
|
"""
|
|
|
|
# Create the table
|
2013-05-18 17:48:46 +08:00
|
|
|
with connection.schema_editor() as editor:
|
|
|
|
editor.create_model(Author)
|
|
|
|
editor.create_model(Book)
|
2012-08-31 06:11:56 +08:00
|
|
|
# Ensure the table is there and has the right index
|
|
|
|
self.assertIn(
|
|
|
|
"title",
|
2014-01-09 23:05:15 +08:00
|
|
|
self.get_indexes(Book._meta.db_table),
|
2012-08-31 06:11:56 +08:00
|
|
|
)
|
|
|
|
# Alter to remove the index
|
|
|
|
new_field = CharField(max_length=100, db_index=False)
|
|
|
|
new_field.set_attributes_from_name("title")
|
2013-05-18 17:48:46 +08:00
|
|
|
with connection.schema_editor() as editor:
|
|
|
|
editor.alter_field(
|
|
|
|
Book,
|
|
|
|
Book._meta.get_field_by_name("title")[0],
|
|
|
|
new_field,
|
2013-11-04 02:17:58 +08:00
|
|
|
strict=True,
|
2013-05-18 17:48:46 +08:00
|
|
|
)
|
2012-08-31 06:11:56 +08:00
|
|
|
# Ensure the table is there and has no index
|
|
|
|
self.assertNotIn(
|
|
|
|
"title",
|
2014-01-09 23:05:15 +08:00
|
|
|
self.get_indexes(Book._meta.db_table),
|
2012-08-31 06:11:56 +08:00
|
|
|
)
|
|
|
|
# Alter to re-add the index
|
2013-05-18 17:48:46 +08:00
|
|
|
with connection.schema_editor() as editor:
|
|
|
|
editor.alter_field(
|
|
|
|
Book,
|
|
|
|
new_field,
|
|
|
|
Book._meta.get_field_by_name("title")[0],
|
2013-11-04 02:17:58 +08:00
|
|
|
strict=True,
|
2013-05-18 17:48:46 +08:00
|
|
|
)
|
2012-08-31 06:11:56 +08:00
|
|
|
# Ensure the table is there and has the index again
|
|
|
|
self.assertIn(
|
|
|
|
"title",
|
2014-01-09 23:05:15 +08:00
|
|
|
self.get_indexes(Book._meta.db_table),
|
2012-08-31 06:11:56 +08:00
|
|
|
)
|
|
|
|
# Add a unique column, verify that creates an implicit index
|
2013-05-18 17:48:46 +08:00
|
|
|
with connection.schema_editor() as editor:
|
2013-05-30 00:47:10 +08:00
|
|
|
editor.add_field(
|
2013-05-18 17:48:46 +08:00
|
|
|
Book,
|
|
|
|
BookWithSlug._meta.get_field_by_name("slug")[0],
|
|
|
|
)
|
2012-08-31 06:11:56 +08:00
|
|
|
self.assertIn(
|
|
|
|
"slug",
|
2014-01-09 23:05:15 +08:00
|
|
|
self.get_indexes(Book._meta.db_table),
|
2012-08-31 06:11:56 +08:00
|
|
|
)
|
|
|
|
# Remove the unique, check the index goes with it
|
|
|
|
new_field2 = CharField(max_length=20, unique=False)
|
|
|
|
new_field2.set_attributes_from_name("slug")
|
2013-05-18 17:48:46 +08:00
|
|
|
with connection.schema_editor() as editor:
|
|
|
|
editor.alter_field(
|
|
|
|
BookWithSlug,
|
|
|
|
BookWithSlug._meta.get_field_by_name("slug")[0],
|
|
|
|
new_field2,
|
2013-11-04 02:17:58 +08:00
|
|
|
strict=True,
|
2013-05-18 17:48:46 +08:00
|
|
|
)
|
2012-08-31 06:11:56 +08:00
|
|
|
self.assertNotIn(
|
|
|
|
"slug",
|
2014-01-09 23:05:15 +08:00
|
|
|
self.get_indexes(Book._meta.db_table),
|
2012-08-31 06:11:56 +08:00
|
|
|
)
|
2012-09-05 00:53:31 +08:00
|
|
|
|
|
|
|
def test_primary_key(self):
|
|
|
|
"""
|
|
|
|
Tests altering of the primary key
|
|
|
|
"""
|
|
|
|
# Create the table
|
2013-05-18 17:48:46 +08:00
|
|
|
with connection.schema_editor() as editor:
|
|
|
|
editor.create_model(Tag)
|
2012-09-05 00:53:31 +08:00
|
|
|
# Ensure the table is there and has the right PK
|
|
|
|
self.assertTrue(
|
2014-01-09 23:05:15 +08:00
|
|
|
self.get_indexes(Tag._meta.db_table)['id']['primary_key'],
|
2012-09-05 00:53:31 +08:00
|
|
|
)
|
|
|
|
# Alter to change the PK
|
|
|
|
new_field = SlugField(primary_key=True)
|
|
|
|
new_field.set_attributes_from_name("slug")
|
2013-12-11 22:19:05 +08:00
|
|
|
new_field.model = Tag
|
2013-05-18 17:48:46 +08:00
|
|
|
with connection.schema_editor() as editor:
|
2013-05-30 00:47:10 +08:00
|
|
|
editor.remove_field(Tag, Tag._meta.get_field_by_name("id")[0])
|
2013-05-18 17:48:46 +08:00
|
|
|
editor.alter_field(
|
|
|
|
Tag,
|
|
|
|
Tag._meta.get_field_by_name("slug")[0],
|
|
|
|
new_field,
|
|
|
|
)
|
2012-09-05 00:53:31 +08:00
|
|
|
# Ensure the PK changed
|
|
|
|
self.assertNotIn(
|
|
|
|
'id',
|
2014-01-09 23:05:15 +08:00
|
|
|
self.get_indexes(Tag._meta.db_table),
|
2012-09-05 00:53:31 +08:00
|
|
|
)
|
|
|
|
self.assertTrue(
|
2014-01-09 23:05:15 +08:00
|
|
|
self.get_indexes(Tag._meta.db_table)['slug']['primary_key'],
|
2012-09-05 00:53:31 +08:00
|
|
|
)
|
2013-09-07 00:46:33 +08:00
|
|
|
|
|
|
|
def test_context_manager_exit(self):
|
|
|
|
"""
|
|
|
|
Ensures transaction is correctly closed when an error occurs
|
|
|
|
inside a SchemaEditor context.
|
|
|
|
"""
|
|
|
|
class SomeError(Exception):
|
|
|
|
pass
|
|
|
|
try:
|
2013-10-19 20:31:38 +08:00
|
|
|
with connection.schema_editor():
|
2013-09-07 00:46:33 +08:00
|
|
|
raise SomeError
|
|
|
|
except SomeError:
|
|
|
|
self.assertFalse(connection.in_atomic_block)
|
2013-11-23 06:31:50 +08:00
|
|
|
|
|
|
|
def test_foreign_key_index_long_names_regression(self):
|
|
|
|
"""
|
|
|
|
Regression test for #21497. Only affects databases that supports
|
|
|
|
foreign keys.
|
|
|
|
"""
|
|
|
|
# Create the table
|
|
|
|
with connection.schema_editor() as editor:
|
|
|
|
editor.create_model(Author)
|
|
|
|
editor.create_model(BookWithLongName)
|
2013-11-25 21:11:50 +08:00
|
|
|
# Find the properly shortened column name
|
|
|
|
column_name = connection.ops.quote_name("author_foreign_key_with_really_long_field_name_id")
|
2013-11-25 23:21:25 +08:00
|
|
|
column_name = column_name[1:-1].lower() # unquote, and, for Oracle, un-upcase
|
2013-11-25 21:11:50 +08:00
|
|
|
# Ensure the table is there and has an index on the column
|
2013-11-23 06:31:50 +08:00
|
|
|
self.assertIn(
|
2013-11-25 21:11:50 +08:00
|
|
|
column_name,
|
2014-01-09 23:05:15 +08:00
|
|
|
self.get_indexes(BookWithLongName._meta.db_table),
|
2013-11-23 06:31:50 +08:00
|
|
|
)
|
2013-12-23 03:44:49 +08:00
|
|
|
|
|
|
|
def test_creation_deletion_reserved_names(self):
|
|
|
|
"""
|
|
|
|
Tries creating a model's table, and then deleting it when it has a
|
|
|
|
SQL reserved name.
|
|
|
|
"""
|
|
|
|
# Create the table
|
|
|
|
with connection.schema_editor() as editor:
|
|
|
|
try:
|
|
|
|
editor.create_model(Thing)
|
|
|
|
except OperationalError as e:
|
|
|
|
self.fail("Errors when applying initial migration for a model "
|
|
|
|
"with a table named after a SQL reserved word: %s" % e)
|
|
|
|
# Check that it's there
|
|
|
|
list(Thing.objects.all())
|
|
|
|
# Clean up that table
|
|
|
|
with connection.schema_editor() as editor:
|
|
|
|
editor.delete_model(Thing)
|
|
|
|
# Check that it's gone
|
|
|
|
self.assertRaises(
|
|
|
|
DatabaseError,
|
|
|
|
lambda: list(Thing.objects.all()),
|
|
|
|
)
|