2015-01-13 04:20:40 +08:00
|
|
|
from django.db.backends.base.introspection import (
|
|
|
|
BaseDatabaseIntrospection, FieldInfo, TableInfo,
|
|
|
|
)
|
2017-02-16 02:33:55 +08:00
|
|
|
from django.db.models.indexes import Index
|
2007-08-20 09:03:33 +08:00
|
|
|
|
2014-11-15 01:45:31 +08:00
|
|
|
|
2011-04-02 16:39:08 +08:00
|
|
|
class DatabaseIntrospection(BaseDatabaseIntrospection):
|
|
|
|
# Maps type codes to Django Field types.
|
|
|
|
data_types_reverse = {
|
|
|
|
16: 'BooleanField',
|
2012-12-18 05:35:35 +08:00
|
|
|
17: 'BinaryField',
|
2011-04-02 16:39:08 +08:00
|
|
|
20: 'BigIntegerField',
|
|
|
|
21: 'SmallIntegerField',
|
|
|
|
23: 'IntegerField',
|
|
|
|
25: 'TextField',
|
|
|
|
700: 'FloatField',
|
|
|
|
701: 'FloatField',
|
2011-06-11 21:48:24 +08:00
|
|
|
869: 'GenericIPAddressField',
|
2013-11-03 05:02:56 +08:00
|
|
|
1042: 'CharField', # blank-padded
|
2011-04-02 16:39:08 +08:00
|
|
|
1043: 'CharField',
|
|
|
|
1082: 'DateField',
|
|
|
|
1083: 'TimeField',
|
|
|
|
1114: 'DateTimeField',
|
|
|
|
1184: 'DateTimeField',
|
|
|
|
1266: 'TimeField',
|
|
|
|
1700: 'DecimalField',
|
2016-07-20 15:01:57 +08:00
|
|
|
2950: 'UUIDField',
|
2011-04-02 16:39:08 +08:00
|
|
|
}
|
2013-07-08 08:39:54 +08:00
|
|
|
|
2013-08-01 04:35:44 +08:00
|
|
|
ignored_tables = []
|
|
|
|
|
2014-11-15 01:45:31 +08:00
|
|
|
def get_field_type(self, data_type, description):
|
2017-01-21 21:13:44 +08:00
|
|
|
field_type = super().get_field_type(data_type, description)
|
2015-07-02 16:43:15 +08:00
|
|
|
if description.default and 'nextval' in description.default:
|
|
|
|
if field_type == 'IntegerField':
|
|
|
|
return 'AutoField'
|
|
|
|
elif field_type == 'BigIntegerField':
|
|
|
|
return 'BigAutoField'
|
2014-11-15 01:45:31 +08:00
|
|
|
return field_type
|
|
|
|
|
2011-04-02 16:39:08 +08:00
|
|
|
def get_table_list(self, cursor):
|
2017-01-25 07:04:12 +08:00
|
|
|
"""Return a list of table and view names in the current database."""
|
2011-04-02 16:39:08 +08:00
|
|
|
cursor.execute("""
|
2014-09-21 03:34:23 +08:00
|
|
|
SELECT c.relname, c.relkind
|
2011-04-02 16:39:08 +08:00
|
|
|
FROM pg_catalog.pg_class c
|
|
|
|
LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
|
2014-09-21 03:34:23 +08:00
|
|
|
WHERE c.relkind IN ('r', 'v')
|
2011-04-02 16:39:08 +08:00
|
|
|
AND n.nspname NOT IN ('pg_catalog', 'pg_toast')
|
|
|
|
AND pg_catalog.pg_table_is_visible(c.oid)""")
|
2014-09-24 18:31:42 +08:00
|
|
|
return [TableInfo(row[0], {'r': 't', 'v': 'v'}.get(row[1]))
|
2014-09-21 03:34:23 +08:00
|
|
|
for row in cursor.fetchall()
|
|
|
|
if row[0] not in self.ignored_tables]
|
2011-04-02 16:39:08 +08:00
|
|
|
|
|
|
|
def get_table_description(self, cursor, table_name):
|
2017-01-25 07:04:12 +08:00
|
|
|
"""
|
|
|
|
Return a description of the table with the DB-API cursor.description
|
|
|
|
interface.
|
|
|
|
"""
|
2012-02-12 04:05:50 +08:00
|
|
|
# As cursor.description does not return reliably the nullable property,
|
|
|
|
# we have to query the information_schema (#7783)
|
|
|
|
cursor.execute("""
|
2014-11-15 01:45:31 +08:00
|
|
|
SELECT column_name, is_nullable, column_default
|
2012-02-12 04:05:50 +08:00
|
|
|
FROM information_schema.columns
|
|
|
|
WHERE table_name = %s""", [table_name])
|
2014-12-07 05:00:09 +08:00
|
|
|
field_map = {line[0]: line[1:] for line in cursor.fetchall()}
|
2011-04-02 16:39:08 +08:00
|
|
|
cursor.execute("SELECT * FROM %s LIMIT 1" % self.connection.ops.quote_name(table_name))
|
2016-04-04 08:37:32 +08:00
|
|
|
return [
|
2017-12-22 10:05:23 +08:00
|
|
|
FieldInfo(*line[0:6], field_map[line.name][0] == 'YES', field_map[line.name][1])
|
2017-03-04 22:47:49 +08:00
|
|
|
for line in cursor.description
|
2016-04-04 08:37:32 +08:00
|
|
|
]
|
2006-05-18 11:36:58 +08:00
|
|
|
|
2017-09-14 02:12:32 +08:00
|
|
|
def get_sequences(self, cursor, table_name, table_fields=()):
|
|
|
|
sequences = []
|
|
|
|
cursor.execute("""
|
|
|
|
SELECT s.relname as sequence_name, col.attname
|
|
|
|
FROM pg_class s
|
|
|
|
JOIN pg_namespace sn ON sn.oid = s.relnamespace
|
|
|
|
JOIN pg_depend d ON d.refobjid = s.oid AND d.refclassid='pg_class'::regclass
|
|
|
|
JOIN pg_attrdef ad ON ad.oid = d.objid AND d.classid = 'pg_attrdef'::regclass
|
|
|
|
JOIN pg_attribute col ON col.attrelid = ad.adrelid AND col.attnum = ad.adnum
|
|
|
|
JOIN pg_class tbl ON tbl.oid = ad.adrelid
|
|
|
|
JOIN pg_namespace n ON n.oid = tbl.relnamespace
|
|
|
|
WHERE s.relkind = 'S'
|
|
|
|
AND d.deptype in ('a', 'n')
|
|
|
|
AND n.nspname = 'public'
|
|
|
|
AND tbl.relname = %s
|
|
|
|
""", [table_name])
|
|
|
|
for row in cursor.fetchall():
|
|
|
|
sequences.append({'name': row[0], 'table': table_name, 'column': row[1]})
|
|
|
|
return sequences
|
|
|
|
|
2008-08-11 20:11:25 +08:00
|
|
|
def get_relations(self, cursor, table_name):
|
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Return a dictionary of {field_name: (field_name_other_table, other_table)}
|
2015-01-11 03:27:30 +08:00
|
|
|
representing all relationships to the given table.
|
2008-08-11 20:11:25 +08:00
|
|
|
"""
|
|
|
|
cursor.execute("""
|
2015-01-11 03:27:30 +08:00
|
|
|
SELECT c2.relname, a1.attname, a2.attname
|
|
|
|
FROM pg_constraint con
|
|
|
|
LEFT JOIN pg_class c1 ON con.conrelid = c1.oid
|
|
|
|
LEFT JOIN pg_class c2 ON con.confrelid = c2.oid
|
|
|
|
LEFT JOIN pg_attribute a1 ON c1.oid = a1.attrelid AND a1.attnum = con.conkey[1]
|
|
|
|
LEFT JOIN pg_attribute a2 ON c2.oid = a2.attrelid AND a2.attnum = con.confkey[1]
|
|
|
|
WHERE c1.relname = %s
|
2008-08-11 20:11:25 +08:00
|
|
|
AND con.contype = 'f'""", [table_name])
|
|
|
|
relations = {}
|
|
|
|
for row in cursor.fetchall():
|
2015-01-11 03:27:30 +08:00
|
|
|
relations[row[1]] = (row[2], row[0])
|
2008-08-11 20:11:25 +08:00
|
|
|
return relations
|
2011-04-02 16:39:08 +08:00
|
|
|
|
2013-01-13 04:46:08 +08:00
|
|
|
def get_key_columns(self, cursor, table_name):
|
|
|
|
key_columns = []
|
|
|
|
cursor.execute("""
|
|
|
|
SELECT kcu.column_name, ccu.table_name AS referenced_table, ccu.column_name AS referenced_column
|
|
|
|
FROM information_schema.constraint_column_usage ccu
|
|
|
|
LEFT JOIN information_schema.key_column_usage kcu
|
|
|
|
ON ccu.constraint_catalog = kcu.constraint_catalog
|
|
|
|
AND ccu.constraint_schema = kcu.constraint_schema
|
|
|
|
AND ccu.constraint_name = kcu.constraint_name
|
|
|
|
LEFT JOIN information_schema.table_constraints tc
|
|
|
|
ON ccu.constraint_catalog = tc.constraint_catalog
|
|
|
|
AND ccu.constraint_schema = tc.constraint_schema
|
|
|
|
AND ccu.constraint_name = tc.constraint_name
|
2013-07-08 08:39:54 +08:00
|
|
|
WHERE kcu.table_name = %s AND tc.constraint_type = 'FOREIGN KEY'""", [table_name])
|
2013-01-13 04:46:08 +08:00
|
|
|
key_columns.extend(cursor.fetchall())
|
|
|
|
return key_columns
|
|
|
|
|
2012-08-02 22:08:39 +08:00
|
|
|
def get_constraints(self, cursor, table_name):
|
|
|
|
"""
|
2016-09-01 03:05:39 +08:00
|
|
|
Retrieve any constraints or keys (unique, pk, fk, check, index) across
|
|
|
|
one or more columns. Also retrieve the definition of expression-based
|
|
|
|
indexes.
|
2012-08-02 22:08:39 +08:00
|
|
|
"""
|
|
|
|
constraints = {}
|
2016-08-23 01:06:56 +08:00
|
|
|
# Loop over the key table, collecting things as constraints. The column
|
|
|
|
# array must return column names in the same order in which they were
|
|
|
|
# created.
|
2012-08-31 06:11:56 +08:00
|
|
|
cursor.execute("""
|
|
|
|
SELECT
|
2016-08-23 01:06:56 +08:00
|
|
|
c.conname,
|
|
|
|
array(
|
|
|
|
SELECT attname
|
2017-09-06 22:26:45 +08:00
|
|
|
FROM unnest(c.conkey) WITH ORDINALITY cols(colid, arridx)
|
2016-08-23 01:06:56 +08:00
|
|
|
JOIN pg_attribute AS ca ON cols.colid = ca.attnum
|
|
|
|
WHERE ca.attrelid = c.conrelid
|
|
|
|
ORDER BY cols.arridx
|
|
|
|
),
|
|
|
|
c.contype,
|
|
|
|
(SELECT fkc.relname || '.' || fka.attname
|
|
|
|
FROM pg_attribute AS fka
|
|
|
|
JOIN pg_class AS fkc ON fka.attrelid = fkc.oid
|
2016-10-13 20:39:44 +08:00
|
|
|
WHERE fka.attrelid = c.confrelid AND fka.attnum = c.confkey[1]),
|
|
|
|
cl.reloptions
|
2016-08-23 01:06:56 +08:00
|
|
|
FROM pg_constraint AS c
|
|
|
|
JOIN pg_class AS cl ON c.conrelid = cl.oid
|
|
|
|
JOIN pg_namespace AS ns ON cl.relnamespace = ns.oid
|
|
|
|
WHERE ns.nspname = %s AND cl.relname = %s
|
2012-08-31 06:11:56 +08:00
|
|
|
""", ["public", table_name])
|
2016-10-13 20:39:44 +08:00
|
|
|
for constraint, columns, kind, used_cols, options in cursor.fetchall():
|
2016-08-23 01:06:56 +08:00
|
|
|
constraints[constraint] = {
|
|
|
|
"columns": columns,
|
|
|
|
"primary_key": kind == "p",
|
|
|
|
"unique": kind in ["p", "u"],
|
|
|
|
"foreign_key": tuple(used_cols.split(".", 1)) if kind == "f" else None,
|
|
|
|
"check": kind == "c",
|
|
|
|
"index": False,
|
2016-09-01 03:05:39 +08:00
|
|
|
"definition": None,
|
2016-10-13 20:39:44 +08:00
|
|
|
"options": options,
|
2016-08-23 01:06:56 +08:00
|
|
|
}
|
2012-08-31 06:11:56 +08:00
|
|
|
# Now get indexes
|
|
|
|
cursor.execute("""
|
2013-07-02 19:06:00 +08:00
|
|
|
SELECT
|
2017-09-06 22:26:45 +08:00
|
|
|
indexname, array_agg(attname ORDER BY arridx), indisunique, indisprimary,
|
|
|
|
array_agg(ordering ORDER BY arridx), amname, exprdef, s2.attoptions
|
2016-07-26 09:04:28 +08:00
|
|
|
FROM (
|
|
|
|
SELECT
|
2017-09-06 22:26:45 +08:00
|
|
|
c2.relname as indexname, idx.*, attr.attname, am.amname,
|
2016-09-01 03:05:39 +08:00
|
|
|
CASE
|
|
|
|
WHEN idx.indexprs IS NOT NULL THEN
|
|
|
|
pg_get_indexdef(idx.indexrelid)
|
|
|
|
END AS exprdef,
|
2016-10-17 19:16:13 +08:00
|
|
|
CASE am.amname
|
|
|
|
WHEN 'btree' THEN
|
2016-07-26 09:04:28 +08:00
|
|
|
CASE (option & 1)
|
|
|
|
WHEN 1 THEN 'DESC' ELSE 'ASC'
|
|
|
|
END
|
2016-10-13 20:39:44 +08:00
|
|
|
END as ordering,
|
|
|
|
c2.reloptions as attoptions
|
2016-07-26 09:04:28 +08:00
|
|
|
FROM (
|
2017-09-06 22:26:45 +08:00
|
|
|
SELECT *
|
|
|
|
FROM pg_index i, unnest(i.indkey, i.indoption) WITH ORDINALITY koi(key, option, arridx)
|
2016-09-01 01:45:06 +08:00
|
|
|
) idx
|
|
|
|
LEFT JOIN pg_class c ON idx.indrelid = c.oid
|
|
|
|
LEFT JOIN pg_class c2 ON idx.indexrelid = c2.oid
|
|
|
|
LEFT JOIN pg_am am ON c2.relam = am.oid
|
|
|
|
LEFT JOIN pg_attribute attr ON attr.attrelid = c.oid AND attr.attnum = idx.key
|
|
|
|
WHERE c.relname = %s
|
2016-07-26 09:04:28 +08:00
|
|
|
) s2
|
2016-10-13 20:39:44 +08:00
|
|
|
GROUP BY indexname, indisunique, indisprimary, amname, exprdef, attoptions;
|
2012-08-31 06:11:56 +08:00
|
|
|
""", [table_name])
|
2016-10-13 20:39:44 +08:00
|
|
|
for index, columns, unique, primary, orders, type_, definition, options in cursor.fetchall():
|
2012-08-31 06:11:56 +08:00
|
|
|
if index not in constraints:
|
|
|
|
constraints[index] = {
|
2016-09-01 03:05:39 +08:00
|
|
|
"columns": columns if columns != [None] else [],
|
|
|
|
"orders": orders if orders != [None] else [],
|
2013-07-02 19:06:00 +08:00
|
|
|
"primary_key": primary,
|
|
|
|
"unique": unique,
|
2013-08-11 06:58:12 +08:00
|
|
|
"foreign_key": None,
|
2012-08-31 06:11:56 +08:00
|
|
|
"check": False,
|
|
|
|
"index": True,
|
2017-02-16 02:33:55 +08:00
|
|
|
"type": Index.suffix if type_ == 'btree' else type_,
|
2016-09-01 03:05:39 +08:00
|
|
|
"definition": definition,
|
2016-10-13 20:39:44 +08:00
|
|
|
"options": options,
|
2012-08-31 06:11:56 +08:00
|
|
|
}
|
2012-08-02 22:08:39 +08:00
|
|
|
return constraints
|