2012-06-08 00:08:47 +08:00
|
|
|
from __future__ import unicode_literals
|
2015-01-28 20:35:27 +08:00
|
|
|
|
2014-11-15 01:45:31 +08:00
|
|
|
from collections import namedtuple
|
2012-06-08 00:08:47 +08:00
|
|
|
|
2015-01-13 04:20:40 +08:00
|
|
|
from django.db.backends.base.introspection import (
|
|
|
|
BaseDatabaseIntrospection, FieldInfo, TableInfo,
|
|
|
|
)
|
2013-04-02 01:51:53 +08:00
|
|
|
from django.utils.encoding import force_text
|
2007-08-20 09:03:33 +08:00
|
|
|
|
2014-11-15 01:45:31 +08:00
|
|
|
FieldInfo = namedtuple('FieldInfo', FieldInfo._fields + ('default',))
|
|
|
|
|
|
|
|
|
2011-04-02 16:39:08 +08:00
|
|
|
class DatabaseIntrospection(BaseDatabaseIntrospection):
|
|
|
|
# Maps type codes to Django Field types.
|
|
|
|
data_types_reverse = {
|
|
|
|
16: 'BooleanField',
|
2012-12-18 05:35:35 +08:00
|
|
|
17: 'BinaryField',
|
2011-04-02 16:39:08 +08:00
|
|
|
20: 'BigIntegerField',
|
|
|
|
21: 'SmallIntegerField',
|
|
|
|
23: 'IntegerField',
|
|
|
|
25: 'TextField',
|
|
|
|
700: 'FloatField',
|
|
|
|
701: 'FloatField',
|
2011-06-11 21:48:24 +08:00
|
|
|
869: 'GenericIPAddressField',
|
2013-11-03 05:02:56 +08:00
|
|
|
1042: 'CharField', # blank-padded
|
2011-04-02 16:39:08 +08:00
|
|
|
1043: 'CharField',
|
|
|
|
1082: 'DateField',
|
|
|
|
1083: 'TimeField',
|
|
|
|
1114: 'DateTimeField',
|
|
|
|
1184: 'DateTimeField',
|
|
|
|
1266: 'TimeField',
|
|
|
|
1700: 'DecimalField',
|
|
|
|
}
|
2013-07-08 08:39:54 +08:00
|
|
|
|
2013-08-01 04:35:44 +08:00
|
|
|
ignored_tables = []
|
|
|
|
|
2015-06-19 23:46:03 +08:00
|
|
|
_get_indexes_query = """
|
|
|
|
SELECT attr.attname, idx.indkey, idx.indisunique, idx.indisprimary
|
|
|
|
FROM pg_catalog.pg_class c, pg_catalog.pg_class c2,
|
|
|
|
pg_catalog.pg_index idx, pg_catalog.pg_attribute attr
|
|
|
|
WHERE c.oid = idx.indrelid
|
|
|
|
AND idx.indexrelid = c2.oid
|
|
|
|
AND attr.attrelid = c.oid
|
|
|
|
AND attr.attnum = idx.indkey[0]
|
|
|
|
AND c.relname = %s"""
|
|
|
|
|
2014-11-15 01:45:31 +08:00
|
|
|
def get_field_type(self, data_type, description):
|
|
|
|
field_type = super(DatabaseIntrospection, self).get_field_type(data_type, description)
|
2015-07-02 16:43:15 +08:00
|
|
|
if description.default and 'nextval' in description.default:
|
|
|
|
if field_type == 'IntegerField':
|
|
|
|
return 'AutoField'
|
|
|
|
elif field_type == 'BigIntegerField':
|
|
|
|
return 'BigAutoField'
|
2014-11-15 01:45:31 +08:00
|
|
|
return field_type
|
|
|
|
|
2011-04-02 16:39:08 +08:00
|
|
|
def get_table_list(self, cursor):
|
2014-09-21 03:34:23 +08:00
|
|
|
"""
|
|
|
|
Returns a list of table and view names in the current database.
|
|
|
|
"""
|
2011-04-02 16:39:08 +08:00
|
|
|
cursor.execute("""
|
2014-09-21 03:34:23 +08:00
|
|
|
SELECT c.relname, c.relkind
|
2011-04-02 16:39:08 +08:00
|
|
|
FROM pg_catalog.pg_class c
|
|
|
|
LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
|
2014-09-21 03:34:23 +08:00
|
|
|
WHERE c.relkind IN ('r', 'v')
|
2011-04-02 16:39:08 +08:00
|
|
|
AND n.nspname NOT IN ('pg_catalog', 'pg_toast')
|
|
|
|
AND pg_catalog.pg_table_is_visible(c.oid)""")
|
2014-09-24 18:31:42 +08:00
|
|
|
return [TableInfo(row[0], {'r': 't', 'v': 'v'}.get(row[1]))
|
2014-09-21 03:34:23 +08:00
|
|
|
for row in cursor.fetchall()
|
|
|
|
if row[0] not in self.ignored_tables]
|
2011-04-02 16:39:08 +08:00
|
|
|
|
|
|
|
def get_table_description(self, cursor, table_name):
|
|
|
|
"Returns a description of the table, with the DB-API cursor.description interface."
|
2012-02-12 04:05:50 +08:00
|
|
|
# As cursor.description does not return reliably the nullable property,
|
|
|
|
# we have to query the information_schema (#7783)
|
|
|
|
cursor.execute("""
|
2014-11-15 01:45:31 +08:00
|
|
|
SELECT column_name, is_nullable, column_default
|
2012-02-12 04:05:50 +08:00
|
|
|
FROM information_schema.columns
|
|
|
|
WHERE table_name = %s""", [table_name])
|
2014-12-07 05:00:09 +08:00
|
|
|
field_map = {line[0]: line[1:] for line in cursor.fetchall()}
|
2011-04-02 16:39:08 +08:00
|
|
|
cursor.execute("SELECT * FROM %s LIMIT 1" % self.connection.ops.quote_name(table_name))
|
2016-04-04 08:37:32 +08:00
|
|
|
return [
|
|
|
|
FieldInfo(*(
|
|
|
|
(force_text(line[0]),) +
|
|
|
|
line[1:6] +
|
|
|
|
(field_map[force_text(line[0])][0] == 'YES', field_map[force_text(line[0])][1])
|
|
|
|
)) for line in cursor.description
|
|
|
|
]
|
2006-05-18 11:36:58 +08:00
|
|
|
|
2008-08-11 20:11:25 +08:00
|
|
|
def get_relations(self, cursor, table_name):
|
|
|
|
"""
|
2015-01-11 03:27:30 +08:00
|
|
|
Returns a dictionary of {field_name: (field_name_other_table, other_table)}
|
|
|
|
representing all relationships to the given table.
|
2008-08-11 20:11:25 +08:00
|
|
|
"""
|
|
|
|
cursor.execute("""
|
2015-01-11 03:27:30 +08:00
|
|
|
SELECT c2.relname, a1.attname, a2.attname
|
|
|
|
FROM pg_constraint con
|
|
|
|
LEFT JOIN pg_class c1 ON con.conrelid = c1.oid
|
|
|
|
LEFT JOIN pg_class c2 ON con.confrelid = c2.oid
|
|
|
|
LEFT JOIN pg_attribute a1 ON c1.oid = a1.attrelid AND a1.attnum = con.conkey[1]
|
|
|
|
LEFT JOIN pg_attribute a2 ON c2.oid = a2.attrelid AND a2.attnum = con.confkey[1]
|
|
|
|
WHERE c1.relname = %s
|
2008-08-11 20:11:25 +08:00
|
|
|
AND con.contype = 'f'""", [table_name])
|
|
|
|
relations = {}
|
|
|
|
for row in cursor.fetchall():
|
2015-01-11 03:27:30 +08:00
|
|
|
relations[row[1]] = (row[2], row[0])
|
2008-08-11 20:11:25 +08:00
|
|
|
return relations
|
2011-04-02 16:39:08 +08:00
|
|
|
|
2013-01-13 04:46:08 +08:00
|
|
|
def get_key_columns(self, cursor, table_name):
|
|
|
|
key_columns = []
|
|
|
|
cursor.execute("""
|
|
|
|
SELECT kcu.column_name, ccu.table_name AS referenced_table, ccu.column_name AS referenced_column
|
|
|
|
FROM information_schema.constraint_column_usage ccu
|
|
|
|
LEFT JOIN information_schema.key_column_usage kcu
|
|
|
|
ON ccu.constraint_catalog = kcu.constraint_catalog
|
|
|
|
AND ccu.constraint_schema = kcu.constraint_schema
|
|
|
|
AND ccu.constraint_name = kcu.constraint_name
|
|
|
|
LEFT JOIN information_schema.table_constraints tc
|
|
|
|
ON ccu.constraint_catalog = tc.constraint_catalog
|
|
|
|
AND ccu.constraint_schema = tc.constraint_schema
|
|
|
|
AND ccu.constraint_name = tc.constraint_name
|
2013-07-08 08:39:54 +08:00
|
|
|
WHERE kcu.table_name = %s AND tc.constraint_type = 'FOREIGN KEY'""", [table_name])
|
2013-01-13 04:46:08 +08:00
|
|
|
key_columns.extend(cursor.fetchall())
|
|
|
|
return key_columns
|
|
|
|
|
2011-04-02 16:39:08 +08:00
|
|
|
def get_indexes(self, cursor, table_name):
|
|
|
|
# This query retrieves each index on the given table, including the
|
|
|
|
# first associated field name
|
2015-06-19 23:46:03 +08:00
|
|
|
cursor.execute(self._get_indexes_query, [table_name])
|
2011-04-02 16:39:08 +08:00
|
|
|
indexes = {}
|
|
|
|
for row in cursor.fetchall():
|
|
|
|
# row[1] (idx.indkey) is stored in the DB as an array. It comes out as
|
|
|
|
# a string of space-separated integers. This designates the field
|
|
|
|
# indexes (1-based) of the fields that have indexes on the table.
|
|
|
|
# Here, we skip any indexes across multiple fields.
|
|
|
|
if ' ' in row[1]:
|
|
|
|
continue
|
2012-09-18 06:45:00 +08:00
|
|
|
if row[0] not in indexes:
|
|
|
|
indexes[row[0]] = {'primary_key': False, 'unique': False}
|
|
|
|
# It's possible to have the unique and PK constraints in separate indexes.
|
|
|
|
if row[3]:
|
|
|
|
indexes[row[0]]['primary_key'] = True
|
|
|
|
if row[2]:
|
|
|
|
indexes[row[0]]['unique'] = True
|
2011-04-02 16:39:08 +08:00
|
|
|
return indexes
|
2012-08-02 22:08:39 +08:00
|
|
|
|
|
|
|
def get_constraints(self, cursor, table_name):
|
|
|
|
"""
|
2012-08-31 06:11:56 +08:00
|
|
|
Retrieves any constraints or keys (unique, pk, fk, check, index) across one or more columns.
|
2012-08-02 22:08:39 +08:00
|
|
|
"""
|
|
|
|
constraints = {}
|
2012-08-31 06:11:56 +08:00
|
|
|
# Loop over the key table, collecting things as constraints
|
|
|
|
# This will get PKs, FKs, and uniques, but not CHECK
|
|
|
|
cursor.execute("""
|
|
|
|
SELECT
|
|
|
|
kc.constraint_name,
|
|
|
|
kc.column_name,
|
|
|
|
c.constraint_type,
|
2014-09-04 20:15:09 +08:00
|
|
|
array(SELECT table_name::text || '.' || column_name::text
|
|
|
|
FROM information_schema.constraint_column_usage
|
|
|
|
WHERE constraint_name = kc.constraint_name)
|
2012-08-31 06:11:56 +08:00
|
|
|
FROM information_schema.key_column_usage AS kc
|
|
|
|
JOIN information_schema.table_constraints AS c ON
|
|
|
|
kc.table_schema = c.table_schema AND
|
|
|
|
kc.table_name = c.table_name AND
|
|
|
|
kc.constraint_name = c.constraint_name
|
|
|
|
WHERE
|
|
|
|
kc.table_schema = %s AND
|
|
|
|
kc.table_name = %s
|
2013-09-26 21:52:13 +08:00
|
|
|
ORDER BY kc.ordinal_position ASC
|
2012-08-31 06:11:56 +08:00
|
|
|
""", ["public", table_name])
|
|
|
|
for constraint, column, kind, used_cols in cursor.fetchall():
|
|
|
|
# If we're the first column, make the record
|
|
|
|
if constraint not in constraints:
|
|
|
|
constraints[constraint] = {
|
2013-07-03 01:02:20 +08:00
|
|
|
"columns": [],
|
2012-08-31 06:11:56 +08:00
|
|
|
"primary_key": kind.lower() == "primary key",
|
|
|
|
"unique": kind.lower() in ["primary key", "unique"],
|
2012-09-08 01:31:05 +08:00
|
|
|
"foreign_key": tuple(used_cols[0].split(".", 1)) if kind.lower() == "foreign key" else None,
|
2012-08-31 06:11:56 +08:00
|
|
|
"check": False,
|
|
|
|
"index": False,
|
|
|
|
}
|
|
|
|
# Record the details
|
2013-07-03 01:02:20 +08:00
|
|
|
constraints[constraint]['columns'].append(column)
|
2012-08-31 06:11:56 +08:00
|
|
|
# Now get CHECK constraint columns
|
|
|
|
cursor.execute("""
|
|
|
|
SELECT kc.constraint_name, kc.column_name
|
|
|
|
FROM information_schema.constraint_column_usage AS kc
|
|
|
|
JOIN information_schema.table_constraints AS c ON
|
|
|
|
kc.table_schema = c.table_schema AND
|
|
|
|
kc.table_name = c.table_name AND
|
|
|
|
kc.constraint_name = c.constraint_name
|
|
|
|
WHERE
|
|
|
|
c.constraint_type = 'CHECK' AND
|
|
|
|
kc.table_schema = %s AND
|
|
|
|
kc.table_name = %s
|
|
|
|
""", ["public", table_name])
|
2012-09-08 03:40:59 +08:00
|
|
|
for constraint, column in cursor.fetchall():
|
2012-08-31 06:11:56 +08:00
|
|
|
# If we're the first column, make the record
|
|
|
|
if constraint not in constraints:
|
|
|
|
constraints[constraint] = {
|
2013-07-03 01:02:20 +08:00
|
|
|
"columns": [],
|
2012-08-31 06:11:56 +08:00
|
|
|
"primary_key": False,
|
|
|
|
"unique": False,
|
2013-08-11 06:58:12 +08:00
|
|
|
"foreign_key": None,
|
2012-08-31 06:11:56 +08:00
|
|
|
"check": True,
|
|
|
|
"index": False,
|
|
|
|
}
|
|
|
|
# Record the details
|
2013-07-03 01:02:20 +08:00
|
|
|
constraints[constraint]['columns'].append(column)
|
2012-08-31 06:11:56 +08:00
|
|
|
# Now get indexes
|
|
|
|
cursor.execute("""
|
2013-07-02 19:06:00 +08:00
|
|
|
SELECT
|
|
|
|
c2.relname,
|
|
|
|
ARRAY(
|
2013-07-03 01:02:20 +08:00
|
|
|
SELECT (SELECT attname FROM pg_catalog.pg_attribute WHERE attnum = i AND attrelid = c.oid)
|
|
|
|
FROM unnest(idx.indkey) i
|
2013-07-02 19:06:00 +08:00
|
|
|
),
|
|
|
|
idx.indisunique,
|
|
|
|
idx.indisprimary
|
2012-08-31 06:11:56 +08:00
|
|
|
FROM pg_catalog.pg_class c, pg_catalog.pg_class c2,
|
2013-07-02 19:06:00 +08:00
|
|
|
pg_catalog.pg_index idx
|
2012-08-31 06:11:56 +08:00
|
|
|
WHERE c.oid = idx.indrelid
|
|
|
|
AND idx.indexrelid = c2.oid
|
|
|
|
AND c.relname = %s
|
|
|
|
""", [table_name])
|
2013-07-02 19:06:00 +08:00
|
|
|
for index, columns, unique, primary in cursor.fetchall():
|
2012-08-31 06:11:56 +08:00
|
|
|
if index not in constraints:
|
|
|
|
constraints[index] = {
|
2013-07-03 01:02:20 +08:00
|
|
|
"columns": list(columns),
|
2013-07-02 19:06:00 +08:00
|
|
|
"primary_key": primary,
|
|
|
|
"unique": unique,
|
2013-08-11 06:58:12 +08:00
|
|
|
"foreign_key": None,
|
2012-08-31 06:11:56 +08:00
|
|
|
"check": False,
|
|
|
|
"index": True,
|
|
|
|
}
|
2012-08-02 22:08:39 +08:00
|
|
|
return constraints
|