mirror of https://github.com/django/django.git
db_index alteration mostly working
This commit is contained in:
parent
d3d1e59921
commit
d865503389
|
@ -104,8 +104,7 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
|
|||
|
||||
def get_constraints(self, cursor, table_name):
|
||||
"""
|
||||
Retrieves any constraints (unique, pk, fk, check) across one or more columns.
|
||||
Returns {'cnname': {'columns': set(columns), 'primary_key': bool, 'unique': bool, 'foreign_key': None|(tbl, col)}}
|
||||
Retrieves any constraints or keys (unique, pk, fk, check, index) across one or more columns.
|
||||
"""
|
||||
constraints = {}
|
||||
# Get the actual constraint names and columns
|
||||
|
@ -124,6 +123,8 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
|
|||
'columns': set(),
|
||||
'primary_key': False,
|
||||
'unique': False,
|
||||
'index': False,
|
||||
'check': False,
|
||||
'foreign_key': (ref_table, ref_column) if ref_column else None,
|
||||
}
|
||||
constraints[constraint]['columns'].add(column)
|
||||
|
@ -142,5 +143,19 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
|
|||
constraints[constraint]['unique'] = True
|
||||
elif kind.lower() == "unique":
|
||||
constraints[constraint]['unique'] = True
|
||||
# Now add in the indexes
|
||||
cursor.execute("SHOW INDEX FROM %s" % self.connection.ops.quote_name(table_name))
|
||||
for table, non_unique, index, colseq, column in [x[:5] for x in cursor.fetchall()]:
|
||||
if index not in constraints:
|
||||
constraints[index] = {
|
||||
'columns': set(),
|
||||
'primary_key': False,
|
||||
'unique': False,
|
||||
'index': True,
|
||||
'check': False,
|
||||
'foreign_key': None,
|
||||
}
|
||||
constraints[index]['index'] = True
|
||||
constraints[index]['columns'].add(column)
|
||||
# Return
|
||||
return constraints
|
||||
|
|
|
@ -15,7 +15,7 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
|
|||
sql_create_fk = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s FOREIGN KEY (%(column)s) REFERENCES %(to_table)s (%(to_column)s)"
|
||||
sql_delete_fk = "ALTER TABLE %(table)s DROP FOREIGN KEY %(name)s"
|
||||
|
||||
sql_delete_index = "DROP INDEX %(name)s ON %(table_name)s"
|
||||
sql_delete_index = "DROP INDEX %(name)s ON %(table)s"
|
||||
|
||||
sql_delete_pk = "ALTER TABLE %(table)s DROP PRIMARY KEY"
|
||||
|
||||
|
|
|
@ -91,32 +91,87 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
|
|||
|
||||
def get_constraints(self, cursor, table_name):
|
||||
"""
|
||||
Retrieves any constraints (unique, pk, fk, check) across one or more columns.
|
||||
Returns {'cnname': {'columns': set(columns), 'primary_key': bool, 'unique': bool}}
|
||||
Retrieves any constraints or keys (unique, pk, fk, check, index) across one or more columns.
|
||||
"""
|
||||
constraints = {}
|
||||
# Loop over the constraint tables, collecting things as constraints
|
||||
ifsc_tables = ["constraint_column_usage", "key_column_usage"]
|
||||
for ifsc_table in ifsc_tables:
|
||||
cursor.execute("""
|
||||
SELECT kc.constraint_name, kc.column_name, c.constraint_type
|
||||
FROM information_schema.%s AS kc
|
||||
JOIN information_schema.table_constraints AS c ON
|
||||
kc.table_schema = c.table_schema AND
|
||||
kc.table_name = c.table_name AND
|
||||
kc.constraint_name = c.constraint_name
|
||||
WHERE
|
||||
kc.table_schema = %%s AND
|
||||
kc.table_name = %%s
|
||||
""" % ifsc_table, ["public", table_name])
|
||||
for constraint, column, kind in cursor.fetchall():
|
||||
# If we're the first column, make the record
|
||||
if constraint not in constraints:
|
||||
constraints[constraint] = {
|
||||
"columns": set(),
|
||||
"primary_key": kind.lower() == "primary key",
|
||||
"unique": kind.lower() in ["primary key", "unique"],
|
||||
}
|
||||
# Record the details
|
||||
constraints[constraint]['columns'].add(column)
|
||||
# Loop over the key table, collecting things as constraints
|
||||
# This will get PKs, FKs, and uniques, but not CHECK
|
||||
cursor.execute("""
|
||||
SELECT
|
||||
kc.constraint_name,
|
||||
kc.column_name,
|
||||
c.constraint_type,
|
||||
array(SELECT table_name::text || '.' || column_name::text FROM information_schema.constraint_column_usage WHERE constraint_name = kc.constraint_name)
|
||||
FROM information_schema.key_column_usage AS kc
|
||||
JOIN information_schema.table_constraints AS c ON
|
||||
kc.table_schema = c.table_schema AND
|
||||
kc.table_name = c.table_name AND
|
||||
kc.constraint_name = c.constraint_name
|
||||
WHERE
|
||||
kc.table_schema = %s AND
|
||||
kc.table_name = %s
|
||||
""", ["public", table_name])
|
||||
for constraint, column, kind, used_cols in cursor.fetchall():
|
||||
# If we're the first column, make the record
|
||||
if constraint not in constraints:
|
||||
constraints[constraint] = {
|
||||
"columns": set(),
|
||||
"primary_key": kind.lower() == "primary key",
|
||||
"unique": kind.lower() in ["primary key", "unique"],
|
||||
"foreign_key": set([tuple(x.split(".", 1)) for x in used_cols]) if kind.lower() == "foreign key" else None,
|
||||
"check": False,
|
||||
"index": False,
|
||||
}
|
||||
# Record the details
|
||||
constraints[constraint]['columns'].add(column)
|
||||
# Now get CHECK constraint columns
|
||||
cursor.execute("""
|
||||
SELECT kc.constraint_name, kc.column_name
|
||||
FROM information_schema.constraint_column_usage AS kc
|
||||
JOIN information_schema.table_constraints AS c ON
|
||||
kc.table_schema = c.table_schema AND
|
||||
kc.table_name = c.table_name AND
|
||||
kc.constraint_name = c.constraint_name
|
||||
WHERE
|
||||
c.constraint_type = 'CHECK' AND
|
||||
kc.table_schema = %s AND
|
||||
kc.table_name = %s
|
||||
""", ["public", table_name])
|
||||
for constraint, column, kind in cursor.fetchall():
|
||||
# If we're the first column, make the record
|
||||
if constraint not in constraints:
|
||||
constraints[constraint] = {
|
||||
"columns": set(),
|
||||
"primary_key": False,
|
||||
"unique": False,
|
||||
"foreign_key": False,
|
||||
"check": True,
|
||||
"index": False,
|
||||
}
|
||||
# Record the details
|
||||
constraints[constraint]['columns'].add(column)
|
||||
# Now get indexes
|
||||
cursor.execute("""
|
||||
SELECT c2.relname, attr.attname, idx.indkey, idx.indisunique, idx.indisprimary
|
||||
FROM pg_catalog.pg_class c, pg_catalog.pg_class c2,
|
||||
pg_catalog.pg_index idx, pg_catalog.pg_attribute attr
|
||||
WHERE c.oid = idx.indrelid
|
||||
AND idx.indexrelid = c2.oid
|
||||
AND attr.attrelid = c.oid
|
||||
AND attr.attnum = idx.indkey[0]
|
||||
AND c.relname = %s
|
||||
""", [table_name])
|
||||
for index, column, coli, unique, primary in cursor.fetchall():
|
||||
# If we're the first column, make the record
|
||||
if index not in constraints:
|
||||
constraints[index] = {
|
||||
"columns": set(),
|
||||
"primary_key": False,
|
||||
"unique": False,
|
||||
"foreign_key": False,
|
||||
"check": False,
|
||||
"index": True,
|
||||
}
|
||||
# Record the details
|
||||
constraints[index]['columns'].add(column)
|
||||
return constraints
|
||||
|
|
|
@ -54,7 +54,7 @@ class BaseDatabaseSchemaEditor(object):
|
|||
sql_create_fk = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s FOREIGN KEY (%(column)s) REFERENCES %(to_table)s (%(to_column)s) DEFERRABLE INITIALLY DEFERRED"
|
||||
sql_delete_fk = "ALTER TABLE %(table)s DROP CONSTRAINT %(name)s"
|
||||
|
||||
sql_create_index = "CREATE %(unique)s INDEX %(name)s ON %(table)s (%(columns)s)%(extra)s;"
|
||||
sql_create_index = "CREATE INDEX %(name)s ON %(table)s (%(columns)s)%(extra)s;"
|
||||
sql_delete_index = "DROP INDEX %(name)s"
|
||||
|
||||
sql_create_pk = "ALTER TABLE %(table)s ADD CONSTRAINT %(constraint)s PRIMARY KEY (%(columns)s)"
|
||||
|
@ -181,7 +181,6 @@ class BaseDatabaseSchemaEditor(object):
|
|||
if field.db_index:
|
||||
self.deferred_sql.append(
|
||||
self.sql_create_index % {
|
||||
"unique": "",
|
||||
"name": self._create_index_name(model, [field.column], suffix=""),
|
||||
"table": self.quote_name(model._meta.db_table),
|
||||
"columns": self.quote_name(field.column),
|
||||
|
@ -350,12 +349,13 @@ class BaseDatabaseSchemaEditor(object):
|
|||
}
|
||||
self.execute(sql)
|
||||
|
||||
def alter_field(self, model, old_field, new_field):
|
||||
def alter_field(self, model, old_field, new_field, strict=False):
|
||||
"""
|
||||
Allows a field's type, uniqueness, nullability, default, column,
|
||||
constraints etc. to be modified.
|
||||
Requires a copy of the old field as well so we can only perform
|
||||
changes that are required.
|
||||
If strict is true, raises errors if the old column does not match old_field precisely.
|
||||
"""
|
||||
# Ensure this field is even column-based
|
||||
old_type = old_field.db_type(connection=self.connection)
|
||||
|
@ -372,18 +372,36 @@ class BaseDatabaseSchemaEditor(object):
|
|||
if old_field.unique and not new_field.unique:
|
||||
# Find the unique constraint for this field
|
||||
constraint_names = self._constraint_names(model, [old_field.column], unique=True)
|
||||
if len(constraint_names) != 1:
|
||||
if strict and len(constraint_names) != 1:
|
||||
raise ValueError("Found wrong number (%s) of constraints for %s.%s" % (
|
||||
len(constraint_names),
|
||||
model._meta.db_table,
|
||||
old_field.column,
|
||||
))
|
||||
self.execute(
|
||||
self.sql_delete_unique % {
|
||||
"table": self.quote_name(model._meta.db_table),
|
||||
"name": constraint_names[0],
|
||||
},
|
||||
)
|
||||
for constraint_name in constraint_names:
|
||||
self.execute(
|
||||
self.sql_delete_unique % {
|
||||
"table": self.quote_name(model._meta.db_table),
|
||||
"name": constraint_name,
|
||||
},
|
||||
)
|
||||
# Removed an index?
|
||||
if old_field.db_index and not new_field.db_index and not old_field.unique and not new_field.unique:
|
||||
# Find the index for this field
|
||||
index_names = self._constraint_names(model, [old_field.column], index=True)
|
||||
if strict and len(index_names) != 1:
|
||||
raise ValueError("Found wrong number (%s) of indexes for %s.%s" % (
|
||||
len(index_names),
|
||||
model._meta.db_table,
|
||||
old_field.column,
|
||||
))
|
||||
for index_name in index_names:
|
||||
self.execute(
|
||||
self.sql_delete_index % {
|
||||
"table": self.quote_name(model._meta.db_table),
|
||||
"name": index_name,
|
||||
}
|
||||
)
|
||||
# Have they renamed the column?
|
||||
if old_field.column != new_field.column:
|
||||
self.execute(self.sql_rename_column % {
|
||||
|
@ -463,6 +481,16 @@ class BaseDatabaseSchemaEditor(object):
|
|||
"columns": self.quote_name(new_field.column),
|
||||
}
|
||||
)
|
||||
# Added an index?
|
||||
if not old_field.db_index and new_field.db_index and not old_field.unique and not new_field.unique:
|
||||
self.execute(
|
||||
self.sql_create_index % {
|
||||
"table": self.quote_name(model._meta.db_table),
|
||||
"name": self._create_index_name(model, [new_field.column], suffix="_uniq"),
|
||||
"columns": self.quote_name(new_field.column),
|
||||
"extra": "",
|
||||
}
|
||||
)
|
||||
|
||||
def _type_for_alter(self, field):
|
||||
"""
|
||||
|
@ -490,7 +518,7 @@ class BaseDatabaseSchemaEditor(object):
|
|||
index_name = '%s%s' % (table_name[:(self.connection.features.max_index_name_length - len(part))], part)
|
||||
return index_name
|
||||
|
||||
def _constraint_names(self, model, column_names, unique=None, primary_key=None):
|
||||
def _constraint_names(self, model, column_names, unique=None, primary_key=None, index=None):
|
||||
"Returns all constraint names matching the columns and conditions"
|
||||
column_names = set(column_names)
|
||||
constraints = self.connection.introspection.get_constraints(self.connection.cursor(), model._meta.db_table)
|
||||
|
@ -501,5 +529,7 @@ class BaseDatabaseSchemaEditor(object):
|
|||
continue
|
||||
if primary_key is not None and infodict['primary_key'] != unique:
|
||||
continue
|
||||
if index is not None and infodict['index'] != index:
|
||||
continue
|
||||
result.append(name)
|
||||
return result
|
||||
|
|
|
@ -21,7 +21,7 @@ class AuthorWithM2M(models.Model):
|
|||
|
||||
class Book(models.Model):
|
||||
author = models.ForeignKey(Author)
|
||||
title = models.CharField(max_length=100)
|
||||
title = models.CharField(max_length=100, db_index=True)
|
||||
pub_date = models.DateTimeField()
|
||||
#tags = models.ManyToManyField("Tag", related_name="books")
|
||||
|
||||
|
|
|
@ -179,7 +179,8 @@ class SchemaTests(TestCase):
|
|||
Author,
|
||||
Author._meta.get_field_by_name("name")[0],
|
||||
new_field,
|
||||
)
|
||||
strict=True,
|
||||
)
|
||||
editor.commit()
|
||||
# Ensure the field is right afterwards
|
||||
columns = self.column_classes(Author)
|
||||
|
@ -208,6 +209,7 @@ class SchemaTests(TestCase):
|
|||
Author,
|
||||
Author._meta.get_field_by_name("name")[0],
|
||||
new_field,
|
||||
strict = True,
|
||||
)
|
||||
editor.commit()
|
||||
# Ensure the field is right afterwards
|
||||
|
@ -276,6 +278,7 @@ class SchemaTests(TestCase):
|
|||
Tag,
|
||||
Tag._meta.get_field_by_name("slug")[0],
|
||||
new_field,
|
||||
strict = True,
|
||||
)
|
||||
editor.commit()
|
||||
# Ensure the field is no longer unique
|
||||
|
@ -291,6 +294,7 @@ class SchemaTests(TestCase):
|
|||
Tag,
|
||||
new_field,
|
||||
new_new_field,
|
||||
strict = True,
|
||||
)
|
||||
editor.commit()
|
||||
# Ensure the field is unique again
|
||||
|
@ -306,6 +310,7 @@ class SchemaTests(TestCase):
|
|||
Tag,
|
||||
Tag._meta.get_field_by_name("slug")[0],
|
||||
TagUniqueRename._meta.get_field_by_name("slug2")[0],
|
||||
strict = True,
|
||||
)
|
||||
editor.commit()
|
||||
# Ensure the field is still unique
|
||||
|
@ -395,3 +400,81 @@ class SchemaTests(TestCase):
|
|||
Author._meta.db_table = "schema_author"
|
||||
columns = self.column_classes(Author)
|
||||
self.assertEqual(columns['name'][0], "CharField")
|
||||
|
||||
def test_indexes(self):
|
||||
"""
|
||||
Tests creation/altering of indexes
|
||||
"""
|
||||
# Create the table
|
||||
editor = connection.schema_editor()
|
||||
editor.start()
|
||||
editor.create_model(Author)
|
||||
editor.create_model(Book)
|
||||
editor.commit()
|
||||
# Ensure the table is there and has the right index
|
||||
self.assertIn(
|
||||
"title",
|
||||
connection.introspection.get_indexes(connection.cursor(), Book._meta.db_table),
|
||||
)
|
||||
# Alter to remove the index
|
||||
new_field = CharField(max_length=100, db_index=False)
|
||||
new_field.set_attributes_from_name("title")
|
||||
editor = connection.schema_editor()
|
||||
editor.start()
|
||||
editor.alter_field(
|
||||
Book,
|
||||
Book._meta.get_field_by_name("title")[0],
|
||||
new_field,
|
||||
strict = True,
|
||||
)
|
||||
editor.commit()
|
||||
# Ensure the table is there and has no index
|
||||
self.assertNotIn(
|
||||
"title",
|
||||
connection.introspection.get_indexes(connection.cursor(), Book._meta.db_table),
|
||||
)
|
||||
# Alter to re-add the index
|
||||
editor = connection.schema_editor()
|
||||
editor.start()
|
||||
editor.alter_field(
|
||||
Book,
|
||||
new_field,
|
||||
Book._meta.get_field_by_name("title")[0],
|
||||
strict = True,
|
||||
)
|
||||
editor.commit()
|
||||
# Ensure the table is there and has the index again
|
||||
self.assertIn(
|
||||
"title",
|
||||
connection.introspection.get_indexes(connection.cursor(), Book._meta.db_table),
|
||||
)
|
||||
# Add a unique column, verify that creates an implicit index
|
||||
new_field = CharField(max_length=20, unique=True)
|
||||
new_field.set_attributes_from_name("slug")
|
||||
editor = connection.schema_editor()
|
||||
editor.start()
|
||||
editor.create_field(
|
||||
Book,
|
||||
new_field,
|
||||
)
|
||||
editor.commit()
|
||||
self.assertIn(
|
||||
"slug",
|
||||
connection.introspection.get_indexes(connection.cursor(), Book._meta.db_table),
|
||||
)
|
||||
# Remove the unique, check the index goes with it
|
||||
new_field2 = CharField(max_length=20, unique=False)
|
||||
new_field2.set_attributes_from_name("slug")
|
||||
editor = connection.schema_editor()
|
||||
editor.start()
|
||||
editor.alter_field(
|
||||
Book,
|
||||
new_field,
|
||||
new_field2,
|
||||
strict = True,
|
||||
)
|
||||
editor.commit()
|
||||
self.assertNotIn(
|
||||
"slug",
|
||||
connection.introspection.get_indexes(connection.cursor(), Book._meta.db_table),
|
||||
)
|
||||
|
|
Loading…
Reference in New Issue