Fixed #23954 -- Added special text/varchar PostgreSQL indexes in migrations
Thanks adityagupta104 for the report and Tim Graham for the review.
This commit is contained in:
parent
ce2eff7e48
commit
8d7a48027e
|
@ -6,12 +6,34 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
|
||||||
sql_create_sequence = "CREATE SEQUENCE %(sequence)s"
|
sql_create_sequence = "CREATE SEQUENCE %(sequence)s"
|
||||||
sql_delete_sequence = "DROP SEQUENCE IF EXISTS %(sequence)s CASCADE"
|
sql_delete_sequence = "DROP SEQUENCE IF EXISTS %(sequence)s CASCADE"
|
||||||
sql_set_sequence_max = "SELECT setval('%(sequence)s', MAX(%(column)s)) FROM %(table)s"
|
sql_set_sequence_max = "SELECT setval('%(sequence)s', MAX(%(column)s)) FROM %(table)s"
|
||||||
|
sql_create_varchar_index = "CREATE INDEX %(name)s ON %(table)s (%(columns)s varchar_pattern_ops)%(extra)s"
|
||||||
|
sql_create_text_index = "CREATE INDEX %(name)s ON %(table)s (%(columns)s text_pattern_ops)%(extra)s"
|
||||||
|
|
||||||
def quote_value(self, value):
|
def quote_value(self, value):
|
||||||
# Inner import so backend fails nicely if it's not present
|
# Inner import so backend fails nicely if it's not present
|
||||||
import psycopg2
|
import psycopg2
|
||||||
return psycopg2.extensions.adapt(value)
|
return psycopg2.extensions.adapt(value)
|
||||||
|
|
||||||
|
def _model_indexes_sql(self, model):
|
||||||
|
output = super(DatabaseSchemaEditor, self)._model_indexes_sql(model)
|
||||||
|
if not model._meta.managed or model._meta.proxy or model._meta.swapped:
|
||||||
|
return output
|
||||||
|
|
||||||
|
for field in model._meta.local_fields:
|
||||||
|
db_type = field.db_type(connection=self.connection)
|
||||||
|
if db_type is not None and (field.db_index or field.unique):
|
||||||
|
# Fields with database column types of `varchar` and `text` need
|
||||||
|
# a second index that specifies their operator class, which is
|
||||||
|
# needed when performing correct LIKE queries outside the
|
||||||
|
# C locale. See #12234.
|
||||||
|
if db_type.startswith('varchar'):
|
||||||
|
output.append(self._create_index_sql(
|
||||||
|
model, [field], suffix='_like', sql=self.sql_create_varchar_index))
|
||||||
|
elif db_type.startswith('text'):
|
||||||
|
output.append(self._create_index_sql(
|
||||||
|
model, [field], suffix='_like', sql=self.sql_create_text_index))
|
||||||
|
return output
|
||||||
|
|
||||||
def _alter_column_type_sql(self, table, column, type):
|
def _alter_column_type_sql(self, table, column, type):
|
||||||
"""
|
"""
|
||||||
Makes ALTER TYPE with SERIAL make sense.
|
Makes ALTER TYPE with SERIAL make sense.
|
||||||
|
|
|
@ -226,9 +226,6 @@ class BaseDatabaseSchemaEditor(object):
|
||||||
if col_type_suffix:
|
if col_type_suffix:
|
||||||
definition += " %s" % col_type_suffix
|
definition += " %s" % col_type_suffix
|
||||||
params.extend(extra_params)
|
params.extend(extra_params)
|
||||||
# Indexes
|
|
||||||
if field.db_index and not field.unique:
|
|
||||||
self.deferred_sql.append(self._create_index_sql(model, [field], suffix=""))
|
|
||||||
# FK
|
# FK
|
||||||
if field.rel and field.db_constraint:
|
if field.rel and field.db_constraint:
|
||||||
to_table = field.rel.to._meta.db_table
|
to_table = field.rel.to._meta.db_table
|
||||||
|
@ -262,10 +259,10 @@ class BaseDatabaseSchemaEditor(object):
|
||||||
"definition": ", ".join(column_sqls)
|
"definition": ", ".join(column_sqls)
|
||||||
}
|
}
|
||||||
self.execute(sql, params)
|
self.execute(sql, params)
|
||||||
# Add any index_togethers
|
|
||||||
for field_names in model._meta.index_together:
|
# Add any field index and index_together's (deferred as SQLite3 _remake_table needs it)
|
||||||
fields = [model._meta.get_field_by_name(field)[0] for field in field_names]
|
self.deferred_sql.extend(self._model_indexes_sql(model))
|
||||||
self.execute(self._create_index_sql(model, fields, suffix="_idx"))
|
|
||||||
# Make M2M tables
|
# Make M2M tables
|
||||||
for field in model._meta.local_many_to_many:
|
for field in model._meta.local_many_to_many:
|
||||||
if field.rel.through._meta.auto_created:
|
if field.rel.through._meta.auto_created:
|
||||||
|
@ -501,18 +498,12 @@ class BaseDatabaseSchemaEditor(object):
|
||||||
rel_fk_names = self._constraint_names(rel.model, [rel.field.column], foreign_key=True)
|
rel_fk_names = self._constraint_names(rel.model, [rel.field.column], foreign_key=True)
|
||||||
for fk_name in rel_fk_names:
|
for fk_name in rel_fk_names:
|
||||||
self.execute(self._delete_constraint_sql(self.sql_delete_fk, rel.model, fk_name))
|
self.execute(self._delete_constraint_sql(self.sql_delete_fk, rel.model, fk_name))
|
||||||
# Removed an index?
|
# Removed an index? (no strict check, as multiple indexes are possible)
|
||||||
if (old_field.db_index and not new_field.db_index and
|
if (old_field.db_index and not new_field.db_index and
|
||||||
not old_field.unique and not
|
not old_field.unique and not
|
||||||
(not new_field.unique and old_field.unique)):
|
(not new_field.unique and old_field.unique)):
|
||||||
# Find the index for this field
|
# Find the index for this field
|
||||||
index_names = self._constraint_names(model, [old_field.column], index=True)
|
index_names = self._constraint_names(model, [old_field.column], index=True)
|
||||||
if strict and len(index_names) != 1:
|
|
||||||
raise ValueError("Found wrong number (%s) of indexes for %s.%s" % (
|
|
||||||
len(index_names),
|
|
||||||
model._meta.db_table,
|
|
||||||
old_field.column,
|
|
||||||
))
|
|
||||||
for index_name in index_names:
|
for index_name in index_names:
|
||||||
self.execute(self._delete_constraint_sql(self.sql_delete_index, model, index_name))
|
self.execute(self._delete_constraint_sql(self.sql_delete_index, model, index_name))
|
||||||
# Change check constraints?
|
# Change check constraints?
|
||||||
|
@ -802,15 +793,47 @@ class BaseDatabaseSchemaEditor(object):
|
||||||
index_name = "D%s" % index_name[:-1]
|
index_name = "D%s" % index_name[:-1]
|
||||||
return index_name
|
return index_name
|
||||||
|
|
||||||
def _create_index_sql(self, model, fields, suffix=""):
|
def _create_index_sql(self, model, fields, suffix="", sql=None):
|
||||||
|
"""
|
||||||
|
Return the SQL statement to create the index for one or several fields.
|
||||||
|
`sql` can be specified if the syntax differs from the standard (GIS
|
||||||
|
indexes, ...).
|
||||||
|
"""
|
||||||
|
if len(fields) == 1 and fields[0].db_tablespace:
|
||||||
|
tablespace_sql = self.connection.ops.tablespace_sql(fields[0].db_tablespace)
|
||||||
|
elif model._meta.db_tablespace:
|
||||||
|
tablespace_sql = self.connection.ops.tablespace_sql(model._meta.db_tablespace)
|
||||||
|
else:
|
||||||
|
tablespace_sql = ""
|
||||||
|
if tablespace_sql:
|
||||||
|
tablespace_sql = " " + tablespace_sql
|
||||||
|
|
||||||
columns = [field.column for field in fields]
|
columns = [field.column for field in fields]
|
||||||
return self.sql_create_index % {
|
sql_create_index = sql or self.sql_create_index
|
||||||
|
return sql_create_index % {
|
||||||
"table": self.quote_name(model._meta.db_table),
|
"table": self.quote_name(model._meta.db_table),
|
||||||
"name": self.quote_name(self._create_index_name(model, columns, suffix=suffix)),
|
"name": self.quote_name(self._create_index_name(model, columns, suffix=suffix)),
|
||||||
"columns": ", ".join(self.quote_name(column) for column in columns),
|
"columns": ", ".join(self.quote_name(column) for column in columns),
|
||||||
"extra": "",
|
"extra": tablespace_sql,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
def _model_indexes_sql(self, model):
|
||||||
|
"""
|
||||||
|
Return all index SQL statements (field indexes, index_together) for the
|
||||||
|
specified model, as a list.
|
||||||
|
"""
|
||||||
|
if not model._meta.managed or model._meta.proxy or model._meta.swapped:
|
||||||
|
return []
|
||||||
|
output = []
|
||||||
|
for field in model._meta.local_fields:
|
||||||
|
if field.db_index and not field.unique:
|
||||||
|
output.append(self._create_index_sql(model, [field], suffix=""))
|
||||||
|
|
||||||
|
for field_names in model._meta.index_together:
|
||||||
|
fields = [model._meta.get_field_by_name(field)[0] for field in field_names]
|
||||||
|
output.append(self._create_index_sql(model, fields, suffix="_idx"))
|
||||||
|
return output
|
||||||
|
|
||||||
def _create_fk_sql(self, model, field, suffix):
|
def _create_fk_sql(self, model, field, suffix):
|
||||||
from_table = model._meta.db_table
|
from_table = model._meta.db_table
|
||||||
from_column = field.column
|
from_column = field.column
|
||||||
|
|
|
@ -102,6 +102,7 @@ Bugfixes
|
||||||
* Fixed ``runserver`` crash when socket error message contained Unicode
|
* Fixed ``runserver`` crash when socket error message contained Unicode
|
||||||
characters (:ticket:`23946`).
|
characters (:ticket:`23946`).
|
||||||
|
|
||||||
|
<<<<<<< HEAD
|
||||||
* Fixed serialization of ``type`` when adding a ``deconstruct()`` method
|
* Fixed serialization of ``type`` when adding a ``deconstruct()`` method
|
||||||
(:ticket:`23950`).
|
(:ticket:`23950`).
|
||||||
|
|
||||||
|
@ -111,3 +112,7 @@ Bugfixes
|
||||||
|
|
||||||
* Fixed a crash when adding ``blank=True`` to ``TextField()`` on MySQL
|
* Fixed a crash when adding ``blank=True`` to ``TextField()`` on MySQL
|
||||||
(:ticket:`23920`).
|
(:ticket:`23920`).
|
||||||
|
|
||||||
|
* Fixed index creation by the migration infrastructure, particularly when
|
||||||
|
dealing with PostgreSQL specific {text|varchar}_pattern_ops indexes
|
||||||
|
(:ticket:`23954`).
|
||||||
|
|
|
@ -7,7 +7,10 @@ from django.test import TestCase
|
||||||
from .models import Article, IndexTogetherSingleList
|
from .models import Article, IndexTogetherSingleList
|
||||||
|
|
||||||
|
|
||||||
class IndexesTests(TestCase):
|
class CreationIndexesTests(TestCase):
|
||||||
|
"""
|
||||||
|
Test index handling by the to-be-deprecated connection.creation interface.
|
||||||
|
"""
|
||||||
def test_index_together(self):
|
def test_index_together(self):
|
||||||
index_sql = connection.creation.sql_indexes_for_model(Article, no_style())
|
index_sql = connection.creation.sql_indexes_for_model(Article, no_style())
|
||||||
self.assertEqual(len(index_sql), 1)
|
self.assertEqual(len(index_sql), 1)
|
||||||
|
@ -36,3 +39,37 @@ class IndexesTests(TestCase):
|
||||||
"""Test indexes are not created for related objects"""
|
"""Test indexes are not created for related objects"""
|
||||||
index_sql = connection.creation.sql_indexes_for_model(Article, no_style())
|
index_sql = connection.creation.sql_indexes_for_model(Article, no_style())
|
||||||
self.assertEqual(len(index_sql), 1)
|
self.assertEqual(len(index_sql), 1)
|
||||||
|
|
||||||
|
|
||||||
|
class SchemaIndexesTests(TestCase):
|
||||||
|
"""
|
||||||
|
Test index handling by the db.backends.schema infrastructure.
|
||||||
|
"""
|
||||||
|
def test_index_together(self):
|
||||||
|
index_sql = connection.schema_editor()._model_indexes_sql(Article)
|
||||||
|
self.assertEqual(len(index_sql), 1)
|
||||||
|
|
||||||
|
def test_index_together_single_list(self):
|
||||||
|
# Test for using index_together with a single list (#22172)
|
||||||
|
index_sql = connection.schema_editor()._model_indexes_sql(IndexTogetherSingleList)
|
||||||
|
self.assertEqual(len(index_sql), 1)
|
||||||
|
|
||||||
|
@skipUnless(connection.vendor == 'postgresql',
|
||||||
|
"This is a postgresql-specific issue")
|
||||||
|
def test_postgresql_text_indexes(self):
|
||||||
|
"""Test creation of PostgreSQL-specific text indexes (#12234)"""
|
||||||
|
from .models import IndexedArticle
|
||||||
|
index_sql = connection.schema_editor()._model_indexes_sql(IndexedArticle)
|
||||||
|
self.assertEqual(len(index_sql), 5)
|
||||||
|
self.assertIn('("headline" varchar_pattern_ops)', index_sql[2])
|
||||||
|
self.assertIn('("body" text_pattern_ops)', index_sql[3])
|
||||||
|
# unique=True and db_index=True should only create the varchar-specific
|
||||||
|
# index (#19441).
|
||||||
|
self.assertIn('("slug" varchar_pattern_ops)', index_sql[4])
|
||||||
|
|
||||||
|
@skipUnless(connection.vendor == 'postgresql',
|
||||||
|
"This is a postgresql-specific issue")
|
||||||
|
def test_postgresql_virtual_relation_indexes(self):
|
||||||
|
"""Test indexes are not created for related objects"""
|
||||||
|
index_sql = connection.schema_editor()._model_indexes_sql(Article)
|
||||||
|
self.assertEqual(len(index_sql), 1)
|
||||||
|
|
Loading…
Reference in New Issue