db_index alteration mostly working

This commit is contained in:
Andrew Godwin 2012-08-30 23:11:56 +01:00
parent d3d1e59921
commit d865503389
6 changed files with 225 additions and 42 deletions

View File

@ -104,8 +104,7 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
def get_constraints(self, cursor, table_name): def get_constraints(self, cursor, table_name):
""" """
Retrieves any constraints (unique, pk, fk, check) across one or more columns. Retrieves any constraints or keys (unique, pk, fk, check, index) across one or more columns.
Returns {'cnname': {'columns': set(columns), 'primary_key': bool, 'unique': bool, 'foreign_key': None|(tbl, col)}}
""" """
constraints = {} constraints = {}
# Get the actual constraint names and columns # Get the actual constraint names and columns
@ -124,6 +123,8 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
'columns': set(), 'columns': set(),
'primary_key': False, 'primary_key': False,
'unique': False, 'unique': False,
'index': False,
'check': False,
'foreign_key': (ref_table, ref_column) if ref_column else None, 'foreign_key': (ref_table, ref_column) if ref_column else None,
} }
constraints[constraint]['columns'].add(column) constraints[constraint]['columns'].add(column)
@ -142,5 +143,19 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
constraints[constraint]['unique'] = True constraints[constraint]['unique'] = True
elif kind.lower() == "unique": elif kind.lower() == "unique":
constraints[constraint]['unique'] = True constraints[constraint]['unique'] = True
# Now add in the indexes
cursor.execute("SHOW INDEX FROM %s" % self.connection.ops.quote_name(table_name))
for table, non_unique, index, colseq, column in [x[:5] for x in cursor.fetchall()]:
if index not in constraints:
constraints[index] = {
'columns': set(),
'primary_key': False,
'unique': False,
'index': True,
'check': False,
'foreign_key': None,
}
constraints[index]['index'] = True
constraints[index]['columns'].add(column)
# Return # Return
return constraints return constraints

View File

@ -15,7 +15,7 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
sql_create_fk = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s FOREIGN KEY (%(column)s) REFERENCES %(to_table)s (%(to_column)s)" sql_create_fk = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s FOREIGN KEY (%(column)s) REFERENCES %(to_table)s (%(to_column)s)"
sql_delete_fk = "ALTER TABLE %(table)s DROP FOREIGN KEY %(name)s" sql_delete_fk = "ALTER TABLE %(table)s DROP FOREIGN KEY %(name)s"
sql_delete_index = "DROP INDEX %(name)s ON %(table_name)s" sql_delete_index = "DROP INDEX %(name)s ON %(table)s"
sql_delete_pk = "ALTER TABLE %(table)s DROP PRIMARY KEY" sql_delete_pk = "ALTER TABLE %(table)s DROP PRIMARY KEY"

View File

@ -91,32 +91,87 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
def get_constraints(self, cursor, table_name): def get_constraints(self, cursor, table_name):
""" """
Retrieves any constraints (unique, pk, fk, check) across one or more columns. Retrieves any constraints or keys (unique, pk, fk, check, index) across one or more columns.
Returns {'cnname': {'columns': set(columns), 'primary_key': bool, 'unique': bool}}
""" """
constraints = {} constraints = {}
# Loop over the constraint tables, collecting things as constraints # Loop over the key table, collecting things as constraints
ifsc_tables = ["constraint_column_usage", "key_column_usage"] # This will get PKs, FKs, and uniques, but not CHECK
for ifsc_table in ifsc_tables:
cursor.execute(""" cursor.execute("""
SELECT kc.constraint_name, kc.column_name, c.constraint_type SELECT
FROM information_schema.%s AS kc kc.constraint_name,
kc.column_name,
c.constraint_type,
array(SELECT table_name::text || '.' || column_name::text FROM information_schema.constraint_column_usage WHERE constraint_name = kc.constraint_name)
FROM information_schema.key_column_usage AS kc
JOIN information_schema.table_constraints AS c ON JOIN information_schema.table_constraints AS c ON
kc.table_schema = c.table_schema AND kc.table_schema = c.table_schema AND
kc.table_name = c.table_name AND kc.table_name = c.table_name AND
kc.constraint_name = c.constraint_name kc.constraint_name = c.constraint_name
WHERE WHERE
kc.table_schema = %%s AND kc.table_schema = %s AND
kc.table_name = %%s kc.table_name = %s
""" % ifsc_table, ["public", table_name]) """, ["public", table_name])
for constraint, column, kind in cursor.fetchall(): for constraint, column, kind, used_cols in cursor.fetchall():
# If we're the first column, make the record # If we're the first column, make the record
if constraint not in constraints: if constraint not in constraints:
constraints[constraint] = { constraints[constraint] = {
"columns": set(), "columns": set(),
"primary_key": kind.lower() == "primary key", "primary_key": kind.lower() == "primary key",
"unique": kind.lower() in ["primary key", "unique"], "unique": kind.lower() in ["primary key", "unique"],
"foreign_key": set([tuple(x.split(".", 1)) for x in used_cols]) if kind.lower() == "foreign key" else None,
"check": False,
"index": False,
} }
# Record the details # Record the details
constraints[constraint]['columns'].add(column) constraints[constraint]['columns'].add(column)
# Now get CHECK constraint columns
cursor.execute("""
SELECT kc.constraint_name, kc.column_name
FROM information_schema.constraint_column_usage AS kc
JOIN information_schema.table_constraints AS c ON
kc.table_schema = c.table_schema AND
kc.table_name = c.table_name AND
kc.constraint_name = c.constraint_name
WHERE
c.constraint_type = 'CHECK' AND
kc.table_schema = %s AND
kc.table_name = %s
""", ["public", table_name])
for constraint, column, kind in cursor.fetchall():
# If we're the first column, make the record
if constraint not in constraints:
constraints[constraint] = {
"columns": set(),
"primary_key": False,
"unique": False,
"foreign_key": False,
"check": True,
"index": False,
}
# Record the details
constraints[constraint]['columns'].add(column)
# Now get indexes
cursor.execute("""
SELECT c2.relname, attr.attname, idx.indkey, idx.indisunique, idx.indisprimary
FROM pg_catalog.pg_class c, pg_catalog.pg_class c2,
pg_catalog.pg_index idx, pg_catalog.pg_attribute attr
WHERE c.oid = idx.indrelid
AND idx.indexrelid = c2.oid
AND attr.attrelid = c.oid
AND attr.attnum = idx.indkey[0]
AND c.relname = %s
""", [table_name])
for index, column, coli, unique, primary in cursor.fetchall():
# If we're the first column, make the record
if index not in constraints:
constraints[index] = {
"columns": set(),
"primary_key": False,
"unique": False,
"foreign_key": False,
"check": False,
"index": True,
}
# Record the details
constraints[index]['columns'].add(column)
return constraints return constraints

View File

@ -54,7 +54,7 @@ class BaseDatabaseSchemaEditor(object):
sql_create_fk = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s FOREIGN KEY (%(column)s) REFERENCES %(to_table)s (%(to_column)s) DEFERRABLE INITIALLY DEFERRED" sql_create_fk = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s FOREIGN KEY (%(column)s) REFERENCES %(to_table)s (%(to_column)s) DEFERRABLE INITIALLY DEFERRED"
sql_delete_fk = "ALTER TABLE %(table)s DROP CONSTRAINT %(name)s" sql_delete_fk = "ALTER TABLE %(table)s DROP CONSTRAINT %(name)s"
sql_create_index = "CREATE %(unique)s INDEX %(name)s ON %(table)s (%(columns)s)%(extra)s;" sql_create_index = "CREATE INDEX %(name)s ON %(table)s (%(columns)s)%(extra)s;"
sql_delete_index = "DROP INDEX %(name)s" sql_delete_index = "DROP INDEX %(name)s"
sql_create_pk = "ALTER TABLE %(table)s ADD CONSTRAINT %(constraint)s PRIMARY KEY (%(columns)s)" sql_create_pk = "ALTER TABLE %(table)s ADD CONSTRAINT %(constraint)s PRIMARY KEY (%(columns)s)"
@ -181,7 +181,6 @@ class BaseDatabaseSchemaEditor(object):
if field.db_index: if field.db_index:
self.deferred_sql.append( self.deferred_sql.append(
self.sql_create_index % { self.sql_create_index % {
"unique": "",
"name": self._create_index_name(model, [field.column], suffix=""), "name": self._create_index_name(model, [field.column], suffix=""),
"table": self.quote_name(model._meta.db_table), "table": self.quote_name(model._meta.db_table),
"columns": self.quote_name(field.column), "columns": self.quote_name(field.column),
@ -350,12 +349,13 @@ class BaseDatabaseSchemaEditor(object):
} }
self.execute(sql) self.execute(sql)
def alter_field(self, model, old_field, new_field): def alter_field(self, model, old_field, new_field, strict=False):
""" """
Allows a field's type, uniqueness, nullability, default, column, Allows a field's type, uniqueness, nullability, default, column,
constraints etc. to be modified. constraints etc. to be modified.
Requires a copy of the old field as well so we can only perform Requires a copy of the old field as well so we can only perform
changes that are required. changes that are required.
If strict is true, raises errors if the old column does not match old_field precisely.
""" """
# Ensure this field is even column-based # Ensure this field is even column-based
old_type = old_field.db_type(connection=self.connection) old_type = old_field.db_type(connection=self.connection)
@ -372,18 +372,36 @@ class BaseDatabaseSchemaEditor(object):
if old_field.unique and not new_field.unique: if old_field.unique and not new_field.unique:
# Find the unique constraint for this field # Find the unique constraint for this field
constraint_names = self._constraint_names(model, [old_field.column], unique=True) constraint_names = self._constraint_names(model, [old_field.column], unique=True)
if len(constraint_names) != 1: if strict and len(constraint_names) != 1:
raise ValueError("Found wrong number (%s) of constraints for %s.%s" % ( raise ValueError("Found wrong number (%s) of constraints for %s.%s" % (
len(constraint_names), len(constraint_names),
model._meta.db_table, model._meta.db_table,
old_field.column, old_field.column,
)) ))
for constraint_name in constraint_names:
self.execute( self.execute(
self.sql_delete_unique % { self.sql_delete_unique % {
"table": self.quote_name(model._meta.db_table), "table": self.quote_name(model._meta.db_table),
"name": constraint_names[0], "name": constraint_name,
}, },
) )
# Removed an index?
if old_field.db_index and not new_field.db_index and not old_field.unique and not new_field.unique:
# Find the index for this field
index_names = self._constraint_names(model, [old_field.column], index=True)
if strict and len(index_names) != 1:
raise ValueError("Found wrong number (%s) of indexes for %s.%s" % (
len(index_names),
model._meta.db_table,
old_field.column,
))
for index_name in index_names:
self.execute(
self.sql_delete_index % {
"table": self.quote_name(model._meta.db_table),
"name": index_name,
}
)
# Have they renamed the column? # Have they renamed the column?
if old_field.column != new_field.column: if old_field.column != new_field.column:
self.execute(self.sql_rename_column % { self.execute(self.sql_rename_column % {
@ -463,6 +481,16 @@ class BaseDatabaseSchemaEditor(object):
"columns": self.quote_name(new_field.column), "columns": self.quote_name(new_field.column),
} }
) )
# Added an index?
if not old_field.db_index and new_field.db_index and not old_field.unique and not new_field.unique:
self.execute(
self.sql_create_index % {
"table": self.quote_name(model._meta.db_table),
"name": self._create_index_name(model, [new_field.column], suffix="_uniq"),
"columns": self.quote_name(new_field.column),
"extra": "",
}
)
def _type_for_alter(self, field): def _type_for_alter(self, field):
""" """
@ -490,7 +518,7 @@ class BaseDatabaseSchemaEditor(object):
index_name = '%s%s' % (table_name[:(self.connection.features.max_index_name_length - len(part))], part) index_name = '%s%s' % (table_name[:(self.connection.features.max_index_name_length - len(part))], part)
return index_name return index_name
def _constraint_names(self, model, column_names, unique=None, primary_key=None): def _constraint_names(self, model, column_names, unique=None, primary_key=None, index=None):
"Returns all constraint names matching the columns and conditions" "Returns all constraint names matching the columns and conditions"
column_names = set(column_names) column_names = set(column_names)
constraints = self.connection.introspection.get_constraints(self.connection.cursor(), model._meta.db_table) constraints = self.connection.introspection.get_constraints(self.connection.cursor(), model._meta.db_table)
@ -501,5 +529,7 @@ class BaseDatabaseSchemaEditor(object):
continue continue
if primary_key is not None and infodict['primary_key'] != unique: if primary_key is not None and infodict['primary_key'] != unique:
continue continue
if index is not None and infodict['index'] != index:
continue
result.append(name) result.append(name)
return result return result

View File

@ -21,7 +21,7 @@ class AuthorWithM2M(models.Model):
class Book(models.Model): class Book(models.Model):
author = models.ForeignKey(Author) author = models.ForeignKey(Author)
title = models.CharField(max_length=100) title = models.CharField(max_length=100, db_index=True)
pub_date = models.DateTimeField() pub_date = models.DateTimeField()
#tags = models.ManyToManyField("Tag", related_name="books") #tags = models.ManyToManyField("Tag", related_name="books")

View File

@ -179,6 +179,7 @@ class SchemaTests(TestCase):
Author, Author,
Author._meta.get_field_by_name("name")[0], Author._meta.get_field_by_name("name")[0],
new_field, new_field,
strict=True,
) )
editor.commit() editor.commit()
# Ensure the field is right afterwards # Ensure the field is right afterwards
@ -208,6 +209,7 @@ class SchemaTests(TestCase):
Author, Author,
Author._meta.get_field_by_name("name")[0], Author._meta.get_field_by_name("name")[0],
new_field, new_field,
strict = True,
) )
editor.commit() editor.commit()
# Ensure the field is right afterwards # Ensure the field is right afterwards
@ -276,6 +278,7 @@ class SchemaTests(TestCase):
Tag, Tag,
Tag._meta.get_field_by_name("slug")[0], Tag._meta.get_field_by_name("slug")[0],
new_field, new_field,
strict = True,
) )
editor.commit() editor.commit()
# Ensure the field is no longer unique # Ensure the field is no longer unique
@ -291,6 +294,7 @@ class SchemaTests(TestCase):
Tag, Tag,
new_field, new_field,
new_new_field, new_new_field,
strict = True,
) )
editor.commit() editor.commit()
# Ensure the field is unique again # Ensure the field is unique again
@ -306,6 +310,7 @@ class SchemaTests(TestCase):
Tag, Tag,
Tag._meta.get_field_by_name("slug")[0], Tag._meta.get_field_by_name("slug")[0],
TagUniqueRename._meta.get_field_by_name("slug2")[0], TagUniqueRename._meta.get_field_by_name("slug2")[0],
strict = True,
) )
editor.commit() editor.commit()
# Ensure the field is still unique # Ensure the field is still unique
@ -395,3 +400,81 @@ class SchemaTests(TestCase):
Author._meta.db_table = "schema_author" Author._meta.db_table = "schema_author"
columns = self.column_classes(Author) columns = self.column_classes(Author)
self.assertEqual(columns['name'][0], "CharField") self.assertEqual(columns['name'][0], "CharField")
def test_indexes(self):
"""
Tests creation/altering of indexes
"""
# Create the table
editor = connection.schema_editor()
editor.start()
editor.create_model(Author)
editor.create_model(Book)
editor.commit()
# Ensure the table is there and has the right index
self.assertIn(
"title",
connection.introspection.get_indexes(connection.cursor(), Book._meta.db_table),
)
# Alter to remove the index
new_field = CharField(max_length=100, db_index=False)
new_field.set_attributes_from_name("title")
editor = connection.schema_editor()
editor.start()
editor.alter_field(
Book,
Book._meta.get_field_by_name("title")[0],
new_field,
strict = True,
)
editor.commit()
# Ensure the table is there and has no index
self.assertNotIn(
"title",
connection.introspection.get_indexes(connection.cursor(), Book._meta.db_table),
)
# Alter to re-add the index
editor = connection.schema_editor()
editor.start()
editor.alter_field(
Book,
new_field,
Book._meta.get_field_by_name("title")[0],
strict = True,
)
editor.commit()
# Ensure the table is there and has the index again
self.assertIn(
"title",
connection.introspection.get_indexes(connection.cursor(), Book._meta.db_table),
)
# Add a unique column, verify that creates an implicit index
new_field = CharField(max_length=20, unique=True)
new_field.set_attributes_from_name("slug")
editor = connection.schema_editor()
editor.start()
editor.create_field(
Book,
new_field,
)
editor.commit()
self.assertIn(
"slug",
connection.introspection.get_indexes(connection.cursor(), Book._meta.db_table),
)
# Remove the unique, check the index goes with it
new_field2 = CharField(max_length=20, unique=False)
new_field2.set_attributes_from_name("slug")
editor = connection.schema_editor()
editor.start()
editor.alter_field(
Book,
new_field,
new_field2,
strict = True,
)
editor.commit()
self.assertNotIn(
"slug",
connection.introspection.get_indexes(connection.cursor(), Book._meta.db_table),
)