Refs #20888 -- Added index order introspection.
This commit is contained in:
parent
5eab1f6f83
commit
f842d1011c
|
@ -160,6 +160,9 @@ class BaseDatabaseFeatures(object):
|
||||||
# Can the backend introspect a TimeField, instead of a DateTimeField?
|
# Can the backend introspect a TimeField, instead of a DateTimeField?
|
||||||
can_introspect_time_field = True
|
can_introspect_time_field = True
|
||||||
|
|
||||||
|
# Can the backend introspect the column order (ASC/DESC) for indexes?
|
||||||
|
supports_index_column_ordering = True
|
||||||
|
|
||||||
# Support for the DISTINCT ON clause
|
# Support for the DISTINCT ON clause
|
||||||
can_distinct_on_fields = False
|
can_distinct_on_fields = False
|
||||||
|
|
||||||
|
|
|
@ -24,6 +24,7 @@ class DatabaseFeatures(BaseDatabaseFeatures):
|
||||||
can_introspect_autofield = True
|
can_introspect_autofield = True
|
||||||
can_introspect_binary_field = False
|
can_introspect_binary_field = False
|
||||||
can_introspect_small_integer_field = True
|
can_introspect_small_integer_field = True
|
||||||
|
supports_index_column_ordering = False
|
||||||
supports_timezones = False
|
supports_timezones = False
|
||||||
requires_explicit_null_ordering_when_grouping = True
|
requires_explicit_null_ordering_when_grouping = True
|
||||||
allows_auto_pk_0 = False
|
allows_auto_pk_0 = False
|
||||||
|
|
|
@ -259,7 +259,7 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
|
||||||
cursor.execute("""
|
cursor.execute("""
|
||||||
SELECT
|
SELECT
|
||||||
index_name,
|
index_name,
|
||||||
LOWER(column_name)
|
LOWER(column_name), descend
|
||||||
FROM
|
FROM
|
||||||
user_ind_columns cols
|
user_ind_columns cols
|
||||||
WHERE
|
WHERE
|
||||||
|
@ -271,11 +271,12 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
|
||||||
)
|
)
|
||||||
ORDER BY cols.column_position
|
ORDER BY cols.column_position
|
||||||
""", [table_name])
|
""", [table_name])
|
||||||
for constraint, column in cursor.fetchall():
|
for constraint, column, order in cursor.fetchall():
|
||||||
# If we're the first column, make the record
|
# If we're the first column, make the record
|
||||||
if constraint not in constraints:
|
if constraint not in constraints:
|
||||||
constraints[constraint] = {
|
constraints[constraint] = {
|
||||||
"columns": [],
|
"columns": [],
|
||||||
|
"orders": [],
|
||||||
"primary_key": False,
|
"primary_key": False,
|
||||||
"unique": False,
|
"unique": False,
|
||||||
"foreign_key": None,
|
"foreign_key": None,
|
||||||
|
@ -284,4 +285,5 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
|
||||||
}
|
}
|
||||||
# Record the details
|
# Record the details
|
||||||
constraints[constraint]['columns'].append(column)
|
constraints[constraint]['columns'].append(column)
|
||||||
|
constraints[constraint]['orders'].append(order)
|
||||||
return constraints
|
return constraints
|
||||||
|
|
|
@ -211,23 +211,36 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
|
||||||
# Now get indexes
|
# Now get indexes
|
||||||
cursor.execute("""
|
cursor.execute("""
|
||||||
SELECT
|
SELECT
|
||||||
c2.relname,
|
indexname, array_agg(attname), indisunique, indisprimary,
|
||||||
ARRAY(
|
array_agg(ordering)
|
||||||
SELECT (SELECT attname FROM pg_catalog.pg_attribute WHERE attnum = i AND attrelid = c.oid)
|
FROM (
|
||||||
FROM unnest(idx.indkey) i
|
SELECT
|
||||||
),
|
c2.relname as indexname, idx.*, attr.attname,
|
||||||
idx.indisunique,
|
CASE
|
||||||
idx.indisprimary
|
WHEN am.amcanorder THEN
|
||||||
FROM pg_catalog.pg_class c, pg_catalog.pg_class c2,
|
CASE (option & 1)
|
||||||
pg_catalog.pg_index idx
|
WHEN 1 THEN 'DESC' ELSE 'ASC'
|
||||||
WHERE c.oid = idx.indrelid
|
END
|
||||||
AND idx.indexrelid = c2.oid
|
END as ordering
|
||||||
AND c.relname = %s
|
FROM (
|
||||||
|
SELECT
|
||||||
|
*, unnest(i.indkey) as key, unnest(i.indoption) as option
|
||||||
|
FROM pg_index i
|
||||||
|
) idx, pg_class c, pg_class c2, pg_am am, pg_attribute attr
|
||||||
|
WHERE c.oid=idx.indrelid
|
||||||
|
AND idx.indexrelid=c2.oid
|
||||||
|
AND attr.attrelid=c.oid
|
||||||
|
AND attr.attnum=idx.key
|
||||||
|
AND c2.relam=am.oid
|
||||||
|
AND c.relname = %s
|
||||||
|
) s2
|
||||||
|
GROUP BY indexname, indisunique, indisprimary;
|
||||||
""", [table_name])
|
""", [table_name])
|
||||||
for index, columns, unique, primary in cursor.fetchall():
|
for index, columns, unique, primary, orders in cursor.fetchall():
|
||||||
if index not in constraints:
|
if index not in constraints:
|
||||||
constraints[index] = {
|
constraints[index] = {
|
||||||
"columns": list(columns),
|
"columns": columns,
|
||||||
|
"orders": orders,
|
||||||
"primary_key": primary,
|
"primary_key": primary,
|
||||||
"unique": unique,
|
"unique": unique,
|
||||||
"foreign_key": None,
|
"foreign_key": None,
|
||||||
|
|
|
@ -45,6 +45,10 @@ class DatabaseFeatures(BaseDatabaseFeatures):
|
||||||
def uses_savepoints(self):
|
def uses_savepoints(self):
|
||||||
return Database.sqlite_version_info >= (3, 6, 8)
|
return Database.sqlite_version_info >= (3, 6, 8)
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def supports_index_column_ordering(self):
|
||||||
|
return Database.sqlite_version_info >= (3, 3, 0)
|
||||||
|
|
||||||
@cached_property
|
@cached_property
|
||||||
def can_release_savepoints(self):
|
def can_release_savepoints(self):
|
||||||
return self.uses_savepoints
|
return self.uses_savepoints
|
||||||
|
|
|
@ -255,6 +255,18 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
|
||||||
"index": True,
|
"index": True,
|
||||||
}
|
}
|
||||||
constraints[index]['columns'].append(column)
|
constraints[index]['columns'].append(column)
|
||||||
|
# Add column orders for indexes
|
||||||
|
if constraints[index]['index'] and not constraints[index]['unique']:
|
||||||
|
cursor.execute(
|
||||||
|
"SELECT sql FROM sqlite_master "
|
||||||
|
"WHERE type='index' AND name=%s" % self.connection.ops.quote_name(index)
|
||||||
|
)
|
||||||
|
orders = []
|
||||||
|
# There would be only 1 row to loop over
|
||||||
|
for sql, in cursor.fetchall():
|
||||||
|
order_info = sql.split('(')[-1].split(')')[0].split(',')
|
||||||
|
orders = ['DESC' if info.endswith('DESC') else 'ASC' for info in order_info]
|
||||||
|
constraints[index]['orders'] = orders
|
||||||
# Get the PK
|
# Get the PK
|
||||||
pk_column = self.get_primary_key_column(cursor, table_name)
|
pk_column = self.get_primary_key_column(cursor, table_name)
|
||||||
if pk_column:
|
if pk_column:
|
||||||
|
|
|
@ -311,6 +311,13 @@ Database backend API
|
||||||
* To enable ``FOR UPDATE SKIP LOCKED`` support, set
|
* To enable ``FOR UPDATE SKIP LOCKED`` support, set
|
||||||
``DatabaseFeatures.has_select_for_update_skip_locked = True``.
|
``DatabaseFeatures.has_select_for_update_skip_locked = True``.
|
||||||
|
|
||||||
|
* The new ``DatabaseFeatures.supports_index_column_ordering`` attribute
|
||||||
|
specifies if a database allows defining ordering for columns in indexes. The
|
||||||
|
default value is ``True`` and the ``DatabaseIntrospection.get_constraints()``
|
||||||
|
method should include an ``'orders'`` key in each of the returned
|
||||||
|
dictionaries with a list of ``'ASC'`` and/or ``'DESC'`` values corresponding
|
||||||
|
to the the ordering of each column in the index.
|
||||||
|
|
||||||
Dropped support for PostgreSQL 9.2 and PostGIS 2.0
|
Dropped support for PostgreSQL 9.2 and PostGIS 2.0
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
|
|
||||||
|
|
|
@ -182,6 +182,26 @@ class IntrospectionTests(TransactionTestCase):
|
||||||
self.assertNotIn('first_name', indexes)
|
self.assertNotIn('first_name', indexes)
|
||||||
self.assertIn('id', indexes)
|
self.assertIn('id', indexes)
|
||||||
|
|
||||||
|
@skipUnlessDBFeature('supports_index_column_ordering')
|
||||||
|
def test_get_constraints_indexes_orders(self):
|
||||||
|
"""
|
||||||
|
Indexes have the 'orders' key with a list of 'ASC'/'DESC' values.
|
||||||
|
"""
|
||||||
|
with connection.cursor() as cursor:
|
||||||
|
constraints = connection.introspection.get_constraints(cursor, Article._meta.db_table)
|
||||||
|
indexes_verified = 0
|
||||||
|
expected_columns = [
|
||||||
|
['reporter_id'],
|
||||||
|
['headline', 'pub_date'],
|
||||||
|
['response_to_id'],
|
||||||
|
]
|
||||||
|
for key, val in constraints.items():
|
||||||
|
if val['index'] and not (val['primary_key'] or val['unique']):
|
||||||
|
self.assertIn(val['columns'], expected_columns)
|
||||||
|
self.assertEqual(val['orders'], ['ASC'] * len(val['columns']))
|
||||||
|
indexes_verified += 1
|
||||||
|
self.assertEqual(indexes_verified, 3)
|
||||||
|
|
||||||
|
|
||||||
def datatype(dbtype, description):
|
def datatype(dbtype, description):
|
||||||
"""Helper to convert a data type into a string."""
|
"""Helper to convert a data type into a string."""
|
||||||
|
|
Loading…
Reference in New Issue