2009-04-02 13:02:39 +08:00
|
|
|
import re
|
2018-09-13 06:45:17 +08:00
|
|
|
from collections import namedtuple
|
2013-07-08 08:39:54 +08:00
|
|
|
|
2018-08-06 09:06:52 +08:00
|
|
|
import sqlparse
|
|
|
|
|
2015-01-13 04:20:40 +08:00
|
|
|
from django.db.backends.base.introspection import (
|
2018-09-13 06:45:17 +08:00
|
|
|
BaseDatabaseIntrospection, FieldInfo as BaseFieldInfo, TableInfo,
|
2015-01-13 04:20:40 +08:00
|
|
|
)
|
2019-08-20 15:54:41 +08:00
|
|
|
from django.db.models import Index
|
2019-10-26 22:42:32 +08:00
|
|
|
from django.utils.regex_helper import _lazy_re_compile
|
2006-05-02 09:31:56 +08:00
|
|
|
|
2019-06-09 08:56:37 +08:00
|
|
|
FieldInfo = namedtuple('FieldInfo', BaseFieldInfo._fields + ('pk', 'has_json_constraint'))
|
2018-09-13 06:45:17 +08:00
|
|
|
|
2019-10-26 22:42:32 +08:00
|
|
|
field_size_re = _lazy_re_compile(r'^\s*(?:var)?char\s*\(\s*(\d+)\s*\)\s*$')
|
2012-08-31 01:36:05 +08:00
|
|
|
|
2013-07-08 08:39:54 +08:00
|
|
|
|
2012-08-31 01:36:05 +08:00
|
|
|
def get_field_size(name):
|
|
|
|
""" Extract the size number from a "varchar(11)" type name """
|
|
|
|
m = field_size_re.search(name)
|
2020-05-11 04:03:39 +08:00
|
|
|
return int(m[1]) if m else None
|
2012-08-31 01:36:05 +08:00
|
|
|
|
|
|
|
|
2006-05-02 09:31:56 +08:00
|
|
|
# This light wrapper "fakes" a dictionary interface, because some SQLite data
|
|
|
|
# types include variables in them -- e.g. "varchar(30)" -- and can't be matched
|
|
|
|
# as a simple dictionary lookup.
|
2017-01-19 15:39:46 +08:00
|
|
|
class FlexibleFieldLookupDict:
|
2008-08-11 20:11:25 +08:00
|
|
|
# Maps SQL types to Django Field types. Some of the SQL types have multiple
|
|
|
|
# entries here because SQLite allows for anything and doesn't normalize the
|
|
|
|
# field type; it uses whatever was given.
|
|
|
|
base_data_types_reverse = {
|
|
|
|
'bool': 'BooleanField',
|
|
|
|
'boolean': 'BooleanField',
|
|
|
|
'smallint': 'SmallIntegerField',
|
2008-08-26 16:58:07 +08:00
|
|
|
'smallint unsigned': 'PositiveSmallIntegerField',
|
2008-08-11 20:11:25 +08:00
|
|
|
'smallinteger': 'SmallIntegerField',
|
|
|
|
'int': 'IntegerField',
|
|
|
|
'integer': 'IntegerField',
|
2009-12-17 23:10:38 +08:00
|
|
|
'bigint': 'BigIntegerField',
|
2008-08-26 16:58:07 +08:00
|
|
|
'integer unsigned': 'PositiveIntegerField',
|
2019-10-16 20:32:12 +08:00
|
|
|
'bigint unsigned': 'PositiveBigIntegerField',
|
2008-08-26 16:58:07 +08:00
|
|
|
'decimal': 'DecimalField',
|
|
|
|
'real': 'FloatField',
|
2008-08-11 20:11:25 +08:00
|
|
|
'text': 'TextField',
|
|
|
|
'char': 'CharField',
|
2019-01-19 06:44:09 +08:00
|
|
|
'varchar': 'CharField',
|
2012-12-18 05:35:35 +08:00
|
|
|
'blob': 'BinaryField',
|
2008-08-11 20:11:25 +08:00
|
|
|
'date': 'DateField',
|
|
|
|
'datetime': 'DateTimeField',
|
|
|
|
'time': 'TimeField',
|
|
|
|
}
|
|
|
|
|
2006-05-02 09:31:56 +08:00
|
|
|
def __getitem__(self, key):
|
2019-01-19 06:44:09 +08:00
|
|
|
key = key.lower().split('(', 1)[0].strip()
|
|
|
|
return self.base_data_types_reverse[key]
|
2006-05-02 09:31:56 +08:00
|
|
|
|
2013-07-08 08:39:54 +08:00
|
|
|
|
2008-08-11 20:11:25 +08:00
|
|
|
class DatabaseIntrospection(BaseDatabaseIntrospection):
|
|
|
|
data_types_reverse = FlexibleFieldLookupDict()
|
2008-08-26 16:58:07 +08:00
|
|
|
|
2018-09-13 06:45:17 +08:00
|
|
|
def get_field_type(self, data_type, description):
|
|
|
|
field_type = super().get_field_type(data_type, description)
|
2019-07-27 05:05:22 +08:00
|
|
|
if description.pk and field_type in {'BigIntegerField', 'IntegerField', 'SmallIntegerField'}:
|
|
|
|
# No support for BigAutoField or SmallAutoField as SQLite treats
|
|
|
|
# all integer primary keys as signed 64-bit integers.
|
2018-09-13 06:45:17 +08:00
|
|
|
return 'AutoField'
|
2019-06-09 08:56:37 +08:00
|
|
|
if description.has_json_constraint:
|
|
|
|
return 'JSONField'
|
2018-09-13 06:45:17 +08:00
|
|
|
return field_type
|
|
|
|
|
2008-08-11 20:11:25 +08:00
|
|
|
def get_table_list(self, cursor):
|
2017-01-25 07:04:12 +08:00
|
|
|
"""Return a list of table and view names in the current database."""
|
2008-08-11 20:11:25 +08:00
|
|
|
# Skip the sqlite_sequence system table used for autoincrement key
|
|
|
|
# generation.
|
|
|
|
cursor.execute("""
|
2014-09-21 03:34:23 +08:00
|
|
|
SELECT name, type FROM sqlite_master
|
2013-10-15 23:17:32 +08:00
|
|
|
WHERE type in ('table', 'view') AND NOT name='sqlite_sequence'
|
2008-08-11 20:11:25 +08:00
|
|
|
ORDER BY name""")
|
2014-09-21 03:34:23 +08:00
|
|
|
return [TableInfo(row[0], row[1][0]) for row in cursor.fetchall()]
|
2008-08-11 20:11:25 +08:00
|
|
|
|
|
|
|
def get_table_description(self, cursor, table_name):
|
2017-01-25 07:04:12 +08:00
|
|
|
"""
|
|
|
|
Return a description of the table with the DB-API cursor.description
|
|
|
|
interface.
|
|
|
|
"""
|
2019-01-19 07:24:05 +08:00
|
|
|
cursor.execute('PRAGMA table_info(%s)' % self.connection.ops.quote_name(table_name))
|
2019-06-09 08:56:37 +08:00
|
|
|
table_info = cursor.fetchall()
|
|
|
|
json_columns = set()
|
|
|
|
if self.connection.features.can_introspect_json_field:
|
|
|
|
for line in table_info:
|
|
|
|
column = line[1]
|
|
|
|
json_constraint_sql = '%%json_valid("%s")%%' % column
|
|
|
|
has_json_constraint = cursor.execute("""
|
|
|
|
SELECT sql
|
|
|
|
FROM sqlite_master
|
|
|
|
WHERE
|
|
|
|
type = 'table' AND
|
|
|
|
name = %s AND
|
|
|
|
sql LIKE %s
|
|
|
|
""", [table_name, json_constraint_sql]).fetchone()
|
|
|
|
if has_json_constraint:
|
|
|
|
json_columns.add(column)
|
2014-12-30 00:41:16 +08:00
|
|
|
return [
|
|
|
|
FieldInfo(
|
2019-01-19 07:24:05 +08:00
|
|
|
name, data_type, None, get_field_size(data_type), None, None,
|
2019-06-09 08:56:37 +08:00
|
|
|
not notnull, default, pk == 1, name in json_columns
|
2019-01-19 07:24:05 +08:00
|
|
|
)
|
2019-06-09 08:56:37 +08:00
|
|
|
for cid, name, data_type, notnull, default, pk in table_info
|
2014-12-30 00:41:16 +08:00
|
|
|
]
|
2008-08-11 20:11:25 +08:00
|
|
|
|
2017-09-14 02:12:32 +08:00
|
|
|
def get_sequences(self, cursor, table_name, table_fields=()):
|
|
|
|
pk_col = self.get_primary_key_column(cursor, table_name)
|
|
|
|
return [{'table': table_name, 'column': pk_col}]
|
|
|
|
|
2008-08-11 20:11:25 +08:00
|
|
|
def get_relations(self, cursor, table_name):
|
2009-04-05 03:03:55 +08:00
|
|
|
"""
|
2015-09-18 03:02:14 +08:00
|
|
|
Return a dictionary of {field_name: (field_name_other_table, other_table)}
|
|
|
|
representing all relationships to the given table.
|
2009-04-05 03:03:55 +08:00
|
|
|
"""
|
|
|
|
# Dictionary of relations to return
|
|
|
|
relations = {}
|
|
|
|
|
|
|
|
# Schema for this table
|
2018-01-10 00:04:56 +08:00
|
|
|
cursor.execute(
|
|
|
|
"SELECT sql, type FROM sqlite_master "
|
|
|
|
"WHERE tbl_name = %s AND type IN ('table', 'view')",
|
|
|
|
[table_name]
|
|
|
|
)
|
|
|
|
create_sql, table_type = cursor.fetchone()
|
|
|
|
if table_type == 'view':
|
2014-06-02 04:03:33 +08:00
|
|
|
# It might be a view, then no results will be returned
|
|
|
|
return relations
|
2018-01-10 00:04:56 +08:00
|
|
|
results = create_sql[create_sql.index('(') + 1:create_sql.rindex(')')]
|
2009-04-05 03:03:55 +08:00
|
|
|
|
2009-04-02 13:02:39 +08:00
|
|
|
# Walk through and look for references to other tables. SQLite doesn't
|
|
|
|
# really have enforced references, but since it echoes out the SQL used
|
|
|
|
# to create the table we can look for REFERENCES statements used there.
|
2015-01-11 03:27:30 +08:00
|
|
|
for field_desc in results.split(','):
|
2009-04-05 03:03:55 +08:00
|
|
|
field_desc = field_desc.strip()
|
|
|
|
if field_desc.startswith("UNIQUE"):
|
|
|
|
continue
|
|
|
|
|
2016-09-17 00:15:00 +08:00
|
|
|
m = re.search(r'references (\S*) ?\(["|]?(.*)["|]?\)', field_desc, re.I)
|
2009-04-05 03:03:55 +08:00
|
|
|
if not m:
|
|
|
|
continue
|
2009-04-02 13:02:39 +08:00
|
|
|
table, column = [s.strip('"') for s in m.groups()]
|
2015-01-11 03:27:30 +08:00
|
|
|
|
2015-01-10 23:48:07 +08:00
|
|
|
if field_desc.startswith("FOREIGN KEY"):
|
2015-01-11 03:27:30 +08:00
|
|
|
# Find name of the target FK field
|
2016-11-05 20:46:59 +08:00
|
|
|
m = re.match(r'FOREIGN KEY\s*\(([^\)]*)\).*', field_desc, re.I)
|
2020-05-11 04:03:39 +08:00
|
|
|
field_name = m[1].strip('"')
|
2015-01-11 03:27:30 +08:00
|
|
|
else:
|
|
|
|
field_name = field_desc.split()[0].strip('"')
|
2009-04-05 03:03:55 +08:00
|
|
|
|
2009-04-02 13:02:39 +08:00
|
|
|
cursor.execute("SELECT sql FROM sqlite_master WHERE tbl_name = %s", [table])
|
2011-01-27 08:00:32 +08:00
|
|
|
result = cursor.fetchall()[0]
|
2009-04-05 03:03:55 +08:00
|
|
|
other_table_results = result[0].strip()
|
|
|
|
li, ri = other_table_results.index('('), other_table_results.rindex(')')
|
2013-07-08 08:39:54 +08:00
|
|
|
other_table_results = other_table_results[li + 1:ri]
|
2009-04-05 03:03:55 +08:00
|
|
|
|
2015-01-11 03:27:30 +08:00
|
|
|
for other_desc in other_table_results.split(','):
|
2009-04-05 03:03:55 +08:00
|
|
|
other_desc = other_desc.strip()
|
|
|
|
if other_desc.startswith('UNIQUE'):
|
|
|
|
continue
|
|
|
|
|
2015-01-11 03:27:30 +08:00
|
|
|
other_name = other_desc.split(' ', 1)[0].strip('"')
|
|
|
|
if other_name == column:
|
|
|
|
relations[field_name] = (other_name, table)
|
2009-04-02 13:02:39 +08:00
|
|
|
break
|
2009-04-05 03:03:55 +08:00
|
|
|
|
2009-04-02 13:02:39 +08:00
|
|
|
return relations
|
2009-04-05 03:03:55 +08:00
|
|
|
|
2011-08-07 08:43:26 +08:00
|
|
|
def get_key_columns(self, cursor, table_name):
|
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Return a list of (column_name, referenced_table_name, referenced_column_name)
|
|
|
|
for all key columns in given table.
|
2011-08-07 08:43:26 +08:00
|
|
|
"""
|
|
|
|
key_columns = []
|
|
|
|
|
|
|
|
# Schema for this table
|
|
|
|
cursor.execute("SELECT sql FROM sqlite_master WHERE tbl_name = %s AND type = %s", [table_name, "table"])
|
|
|
|
results = cursor.fetchone()[0].strip()
|
2013-07-08 08:39:54 +08:00
|
|
|
results = results[results.index('(') + 1:results.rindex(')')]
|
2011-08-07 08:43:26 +08:00
|
|
|
|
|
|
|
# Walk through and look for references to other tables. SQLite doesn't
|
|
|
|
# really have enforced references, but since it echoes out the SQL used
|
|
|
|
# to create the table we can look for REFERENCES statements used there.
|
|
|
|
for field_index, field_desc in enumerate(results.split(',')):
|
|
|
|
field_desc = field_desc.strip()
|
|
|
|
if field_desc.startswith("UNIQUE"):
|
|
|
|
continue
|
|
|
|
|
2016-09-17 00:15:00 +08:00
|
|
|
m = re.search(r'"(.*)".*references (.*) \(["|](.*)["|]\)', field_desc, re.I)
|
2011-08-07 08:43:26 +08:00
|
|
|
if not m:
|
|
|
|
continue
|
|
|
|
|
|
|
|
# This will append (column_name, referenced_table_name, referenced_column_name) to key_columns
|
2013-08-30 00:09:35 +08:00
|
|
|
key_columns.append(tuple(s.strip('"') for s in m.groups()))
|
2011-08-07 08:43:26 +08:00
|
|
|
|
|
|
|
return key_columns
|
|
|
|
|
|
|
|
def get_primary_key_column(self, cursor, table_name):
|
2017-01-25 07:04:12 +08:00
|
|
|
"""Return the column name of the primary key for the given table."""
|
2011-08-07 08:43:26 +08:00
|
|
|
# Don't use PRAGMA because that causes issues with some transactions
|
2018-01-10 00:04:56 +08:00
|
|
|
cursor.execute(
|
|
|
|
"SELECT sql, type FROM sqlite_master "
|
|
|
|
"WHERE tbl_name = %s AND type IN ('table', 'view')",
|
|
|
|
[table_name]
|
|
|
|
)
|
2013-08-11 21:23:31 +08:00
|
|
|
row = cursor.fetchone()
|
|
|
|
if row is None:
|
|
|
|
raise ValueError("Table %s does not exist" % table_name)
|
2018-01-10 00:04:56 +08:00
|
|
|
create_sql, table_type = row
|
|
|
|
if table_type == 'view':
|
|
|
|
# Views don't have a primary key.
|
|
|
|
return None
|
|
|
|
fields_sql = create_sql[create_sql.index('(') + 1:create_sql.rindex(')')]
|
|
|
|
for field_desc in fields_sql.split(','):
|
2011-08-07 08:43:26 +08:00
|
|
|
field_desc = field_desc.strip()
|
2018-05-08 22:42:24 +08:00
|
|
|
m = re.match(r'(?:(?:["`\[])(.*)(?:["`\]])|(\w+)).*PRIMARY KEY.*', field_desc)
|
2011-08-07 08:43:26 +08:00
|
|
|
if m:
|
2020-05-11 04:03:39 +08:00
|
|
|
return m[1] if m[1] else m[2]
|
2011-08-07 08:43:26 +08:00
|
|
|
return None
|
|
|
|
|
2017-12-05 09:35:33 +08:00
|
|
|
def _get_foreign_key_constraints(self, cursor, table_name):
|
|
|
|
constraints = {}
|
|
|
|
cursor.execute('PRAGMA foreign_key_list(%s)' % self.connection.ops.quote_name(table_name))
|
|
|
|
for row in cursor.fetchall():
|
|
|
|
# Remaining on_update/on_delete/match values are of no interest.
|
|
|
|
id_, _, table, from_, to = row[:5]
|
|
|
|
constraints['fk_%d' % id_] = {
|
|
|
|
'columns': [from_],
|
|
|
|
'primary_key': False,
|
|
|
|
'unique': False,
|
|
|
|
'foreign_key': (table, to),
|
|
|
|
'check': False,
|
|
|
|
'index': False,
|
|
|
|
}
|
|
|
|
return constraints
|
|
|
|
|
2019-02-28 05:47:29 +08:00
|
|
|
def _parse_column_or_constraint_definition(self, tokens, columns):
|
|
|
|
token = None
|
|
|
|
is_constraint_definition = None
|
|
|
|
field_name = None
|
|
|
|
constraint_name = None
|
|
|
|
unique = False
|
|
|
|
unique_columns = []
|
|
|
|
check = False
|
|
|
|
check_columns = []
|
|
|
|
braces_deep = 0
|
|
|
|
for token in tokens:
|
|
|
|
if token.match(sqlparse.tokens.Punctuation, '('):
|
|
|
|
braces_deep += 1
|
|
|
|
elif token.match(sqlparse.tokens.Punctuation, ')'):
|
|
|
|
braces_deep -= 1
|
|
|
|
if braces_deep < 0:
|
|
|
|
# End of columns and constraints for table definition.
|
|
|
|
break
|
|
|
|
elif braces_deep == 0 and token.match(sqlparse.tokens.Punctuation, ','):
|
|
|
|
# End of current column or constraint definition.
|
|
|
|
break
|
|
|
|
# Detect column or constraint definition by first token.
|
|
|
|
if is_constraint_definition is None:
|
|
|
|
is_constraint_definition = token.match(sqlparse.tokens.Keyword, 'CONSTRAINT')
|
|
|
|
if is_constraint_definition:
|
|
|
|
continue
|
|
|
|
if is_constraint_definition:
|
|
|
|
# Detect constraint name by second token.
|
|
|
|
if constraint_name is None:
|
|
|
|
if token.ttype in (sqlparse.tokens.Name, sqlparse.tokens.Keyword):
|
|
|
|
constraint_name = token.value
|
|
|
|
elif token.ttype == sqlparse.tokens.Literal.String.Symbol:
|
|
|
|
constraint_name = token.value[1:-1]
|
|
|
|
# Start constraint columns parsing after UNIQUE keyword.
|
|
|
|
if token.match(sqlparse.tokens.Keyword, 'UNIQUE'):
|
|
|
|
unique = True
|
|
|
|
unique_braces_deep = braces_deep
|
|
|
|
elif unique:
|
|
|
|
if unique_braces_deep == braces_deep:
|
|
|
|
if unique_columns:
|
|
|
|
# Stop constraint parsing.
|
|
|
|
unique = False
|
|
|
|
continue
|
|
|
|
if token.ttype in (sqlparse.tokens.Name, sqlparse.tokens.Keyword):
|
|
|
|
unique_columns.append(token.value)
|
|
|
|
elif token.ttype == sqlparse.tokens.Literal.String.Symbol:
|
|
|
|
unique_columns.append(token.value[1:-1])
|
|
|
|
else:
|
|
|
|
# Detect field name by first token.
|
|
|
|
if field_name is None:
|
|
|
|
if token.ttype in (sqlparse.tokens.Name, sqlparse.tokens.Keyword):
|
|
|
|
field_name = token.value
|
|
|
|
elif token.ttype == sqlparse.tokens.Literal.String.Symbol:
|
|
|
|
field_name = token.value[1:-1]
|
|
|
|
if token.match(sqlparse.tokens.Keyword, 'UNIQUE'):
|
|
|
|
unique_columns = [field_name]
|
|
|
|
# Start constraint columns parsing after CHECK keyword.
|
|
|
|
if token.match(sqlparse.tokens.Keyword, 'CHECK'):
|
|
|
|
check = True
|
|
|
|
check_braces_deep = braces_deep
|
|
|
|
elif check:
|
|
|
|
if check_braces_deep == braces_deep:
|
|
|
|
if check_columns:
|
|
|
|
# Stop constraint parsing.
|
|
|
|
check = False
|
|
|
|
continue
|
|
|
|
if token.ttype in (sqlparse.tokens.Name, sqlparse.tokens.Keyword):
|
|
|
|
if token.value in columns:
|
|
|
|
check_columns.append(token.value)
|
|
|
|
elif token.ttype == sqlparse.tokens.Literal.String.Symbol:
|
|
|
|
if token.value[1:-1] in columns:
|
|
|
|
check_columns.append(token.value[1:-1])
|
|
|
|
unique_constraint = {
|
|
|
|
'unique': True,
|
|
|
|
'columns': unique_columns,
|
|
|
|
'primary_key': False,
|
|
|
|
'foreign_key': None,
|
|
|
|
'check': False,
|
|
|
|
'index': False,
|
|
|
|
} if unique_columns else None
|
|
|
|
check_constraint = {
|
|
|
|
'check': True,
|
|
|
|
'columns': check_columns,
|
|
|
|
'primary_key': False,
|
|
|
|
'unique': False,
|
|
|
|
'foreign_key': None,
|
|
|
|
'index': False,
|
|
|
|
} if check_columns else None
|
|
|
|
return constraint_name, unique_constraint, check_constraint, token
|
|
|
|
|
|
|
|
def _parse_table_constraints(self, sql, columns):
|
2019-02-28 05:44:45 +08:00
|
|
|
# Check constraint parsing is based of SQLite syntax diagram.
|
|
|
|
# https://www.sqlite.org/syntaxdiagrams.html#table-constraint
|
|
|
|
statement = sqlparse.parse(sql)[0]
|
|
|
|
constraints = {}
|
2019-02-28 05:47:29 +08:00
|
|
|
unnamed_constrains_index = 0
|
|
|
|
tokens = (token for token in statement.flatten() if not token.is_whitespace)
|
|
|
|
# Go to columns and constraint definition
|
2019-02-28 05:44:45 +08:00
|
|
|
for token in tokens:
|
2019-02-28 05:47:29 +08:00
|
|
|
if token.match(sqlparse.tokens.Punctuation, '('):
|
|
|
|
break
|
|
|
|
# Parse columns and constraint definition
|
|
|
|
while True:
|
|
|
|
constraint_name, unique, check, end_token = self._parse_column_or_constraint_definition(tokens, columns)
|
|
|
|
if unique:
|
|
|
|
if constraint_name:
|
|
|
|
constraints[constraint_name] = unique
|
|
|
|
else:
|
|
|
|
unnamed_constrains_index += 1
|
|
|
|
constraints['__unnamed_constraint_%s__' % unnamed_constrains_index] = unique
|
|
|
|
if check:
|
|
|
|
if constraint_name:
|
|
|
|
constraints[constraint_name] = check
|
2019-02-28 05:44:45 +08:00
|
|
|
else:
|
2019-02-28 05:47:29 +08:00
|
|
|
unnamed_constrains_index += 1
|
|
|
|
constraints['__unnamed_constraint_%s__' % unnamed_constrains_index] = check
|
|
|
|
if end_token.match(sqlparse.tokens.Punctuation, ')'):
|
|
|
|
break
|
2019-02-28 05:44:45 +08:00
|
|
|
return constraints
|
|
|
|
|
2012-09-08 00:51:11 +08:00
|
|
|
def get_constraints(self, cursor, table_name):
|
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Retrieve any constraints or keys (unique, pk, fk, check, index) across
|
|
|
|
one or more columns.
|
2012-09-08 00:51:11 +08:00
|
|
|
"""
|
|
|
|
constraints = {}
|
2018-06-23 02:42:51 +08:00
|
|
|
# Find inline check constraints.
|
|
|
|
try:
|
|
|
|
table_schema = cursor.execute(
|
|
|
|
"SELECT sql FROM sqlite_master WHERE type='table' and name=%s" % (
|
|
|
|
self.connection.ops.quote_name(table_name),
|
|
|
|
)
|
|
|
|
).fetchone()[0]
|
|
|
|
except TypeError:
|
|
|
|
# table_name is a view.
|
|
|
|
pass
|
|
|
|
else:
|
2019-02-28 05:47:29 +08:00
|
|
|
columns = {info.name for info in self.get_table_description(cursor, table_name)}
|
|
|
|
constraints.update(self._parse_table_constraints(table_schema, columns))
|
2019-02-28 05:44:45 +08:00
|
|
|
|
2012-09-08 00:51:11 +08:00
|
|
|
# Get the index info
|
|
|
|
cursor.execute("PRAGMA index_list(%s)" % self.connection.ops.quote_name(table_name))
|
2015-04-14 04:54:04 +08:00
|
|
|
for row in cursor.fetchall():
|
2018-12-23 07:11:36 +08:00
|
|
|
# SQLite 3.8.9+ has 5 columns, however older versions only give 3
|
2015-04-14 04:54:04 +08:00
|
|
|
# columns. Discard last 2 columns if there.
|
|
|
|
number, index, unique = row[:3]
|
2019-02-28 05:47:29 +08:00
|
|
|
cursor.execute(
|
|
|
|
"SELECT sql FROM sqlite_master "
|
|
|
|
"WHERE type='index' AND name=%s" % self.connection.ops.quote_name(index)
|
|
|
|
)
|
|
|
|
# There's at most one row.
|
|
|
|
sql, = cursor.fetchone() or (None,)
|
|
|
|
# Inline constraints are already detected in
|
|
|
|
# _parse_table_constraints(). The reasons to avoid fetching inline
|
|
|
|
# constraints from `PRAGMA index_list` are:
|
|
|
|
# - Inline constraints can have a different name and information
|
|
|
|
# than what `PRAGMA index_list` gives.
|
|
|
|
# - Not all inline constraints may appear in `PRAGMA index_list`.
|
|
|
|
if not sql:
|
|
|
|
# An inline constraint
|
|
|
|
continue
|
2012-09-08 00:51:11 +08:00
|
|
|
# Get the index info for that index
|
|
|
|
cursor.execute('PRAGMA index_info(%s)' % self.connection.ops.quote_name(index))
|
|
|
|
for index_rank, column_rank, column in cursor.fetchall():
|
|
|
|
if index not in constraints:
|
|
|
|
constraints[index] = {
|
2013-07-03 01:02:20 +08:00
|
|
|
"columns": [],
|
2012-09-08 00:51:11 +08:00
|
|
|
"primary_key": False,
|
|
|
|
"unique": bool(unique),
|
2019-02-28 05:30:39 +08:00
|
|
|
"foreign_key": None,
|
2012-09-08 00:51:11 +08:00
|
|
|
"check": False,
|
|
|
|
"index": True,
|
|
|
|
}
|
2013-07-03 01:02:20 +08:00
|
|
|
constraints[index]['columns'].append(column)
|
2016-08-25 15:12:17 +08:00
|
|
|
# Add type and column orders for indexes
|
2016-07-26 09:04:28 +08:00
|
|
|
if constraints[index]['index'] and not constraints[index]['unique']:
|
2016-08-25 15:12:17 +08:00
|
|
|
# SQLite doesn't support any index type other than b-tree
|
2017-02-16 02:33:55 +08:00
|
|
|
constraints[index]['type'] = Index.suffix
|
2019-02-28 05:47:29 +08:00
|
|
|
order_info = sql.split('(')[-1].split(')')[0].split(',')
|
|
|
|
orders = ['DESC' if info.endswith('DESC') else 'ASC' for info in order_info]
|
2016-07-26 09:04:28 +08:00
|
|
|
constraints[index]['orders'] = orders
|
2012-09-08 00:51:11 +08:00
|
|
|
# Get the PK
|
|
|
|
pk_column = self.get_primary_key_column(cursor, table_name)
|
|
|
|
if pk_column:
|
2012-09-18 17:37:30 +08:00
|
|
|
# SQLite doesn't actually give a name to the PK constraint,
|
|
|
|
# so we invent one. This is fine, as the SQLite backend never
|
2012-09-22 08:22:58 +08:00
|
|
|
# deletes PK constraints by name, as you can't delete constraints
|
|
|
|
# in SQLite; we remake the table with a new PK instead.
|
2012-09-08 00:51:11 +08:00
|
|
|
constraints["__primary__"] = {
|
2013-07-03 01:02:20 +08:00
|
|
|
"columns": [pk_column],
|
2012-09-08 00:51:11 +08:00
|
|
|
"primary_key": True,
|
2012-09-18 17:37:30 +08:00
|
|
|
"unique": False, # It's not actually a unique constraint.
|
2019-02-28 05:30:39 +08:00
|
|
|
"foreign_key": None,
|
2012-09-08 00:51:11 +08:00
|
|
|
"check": False,
|
|
|
|
"index": False,
|
|
|
|
}
|
2017-12-05 09:35:33 +08:00
|
|
|
constraints.update(self._get_foreign_key_constraints(cursor, table_name))
|
2012-09-08 00:51:11 +08:00
|
|
|
return constraints
|