2009-12-22 23:18:51 +08:00
|
|
|
import keyword
|
2012-08-24 04:50:25 +08:00
|
|
|
import re
|
2015-01-28 20:35:27 +08:00
|
|
|
from collections import OrderedDict
|
2009-12-22 23:18:51 +08:00
|
|
|
|
2014-06-18 07:07:54 +08:00
|
|
|
from django.core.management.base import BaseCommand, CommandError
|
2015-01-28 20:35:27 +08:00
|
|
|
from django.db import DEFAULT_DB_ALIAS, connections
|
2016-10-06 18:02:48 +08:00
|
|
|
from django.db.models.constants import LOOKUP_SEP
|
2016-02-25 03:16:49 +08:00
|
|
|
from django.utils.encoding import force_text
|
2013-01-28 17:21:07 +08:00
|
|
|
|
2007-08-16 14:06:55 +08:00
|
|
|
|
2014-06-18 07:07:54 +08:00
|
|
|
class Command(BaseCommand):
|
2007-08-16 14:06:55 +08:00
|
|
|
help = "Introspects the database tables in the given database and outputs a Django model module."
|
|
|
|
|
2014-01-20 10:45:21 +08:00
|
|
|
requires_system_checks = False
|
2007-08-16 14:06:55 +08:00
|
|
|
|
2010-01-19 05:02:47 +08:00
|
|
|
db_module = 'django.db'
|
|
|
|
|
2014-06-07 04:39:33 +08:00
|
|
|
def add_arguments(self, parser):
|
2016-03-29 06:33:29 +08:00
|
|
|
parser.add_argument(
|
|
|
|
'table', action='store', nargs='*', type=str,
|
|
|
|
help='Selects what tables or views should be introspected.',
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
'--database', action='store', dest='database', default=DEFAULT_DB_ALIAS,
|
|
|
|
help='Nominates a database to introspect. Defaults to using the "default" database.',
|
|
|
|
)
|
2014-06-07 04:39:33 +08:00
|
|
|
|
2014-06-18 07:07:54 +08:00
|
|
|
def handle(self, **options):
|
2007-08-16 14:06:55 +08:00
|
|
|
try:
|
2009-12-22 23:18:51 +08:00
|
|
|
for line in self.handle_inspection(options):
|
2011-01-24 22:58:05 +08:00
|
|
|
self.stdout.write("%s\n" % line)
|
2007-08-16 14:06:55 +08:00
|
|
|
except NotImplementedError:
|
|
|
|
raise CommandError("Database inspection isn't supported for the currently selected database backend.")
|
|
|
|
|
2009-12-22 23:18:51 +08:00
|
|
|
def handle_inspection(self, options):
|
2014-06-07 04:39:33 +08:00
|
|
|
connection = connections[options['database']]
|
2012-06-02 07:58:53 +08:00
|
|
|
# 'table_name_filter' is a stealth option
|
|
|
|
table_name_filter = options.get('table_name_filter')
|
2007-08-16 14:06:55 +08:00
|
|
|
|
2016-01-24 00:47:07 +08:00
|
|
|
def table2model(table_name):
|
|
|
|
return re.sub(r'[^a-zA-Z0-9]', '', table_name.title())
|
|
|
|
|
|
|
|
def strip_prefix(s):
|
|
|
|
return s[1:] if s.startswith("u'") else s
|
2007-08-16 14:06:55 +08:00
|
|
|
|
2014-01-09 23:05:15 +08:00
|
|
|
with connection.cursor() as cursor:
|
|
|
|
yield "# This is an auto-generated Django model module."
|
|
|
|
yield "# You'll have to do the following manually to clean this up:"
|
|
|
|
yield "# * Rearrange models' order"
|
|
|
|
yield "# * Make sure each model has one field with primary_key=True"
|
2015-12-03 07:55:50 +08:00
|
|
|
yield "# * Make sure each ForeignKey has `on_delete` set to the desired behavior."
|
2014-09-04 20:15:09 +08:00
|
|
|
yield (
|
|
|
|
"# * Remove `managed = False` lines if you wish to allow "
|
|
|
|
"Django to create, modify, and delete the table"
|
|
|
|
)
|
2014-01-09 23:05:15 +08:00
|
|
|
yield "# Feel free to rename the models, but don't rename db_table values or field names."
|
|
|
|
yield 'from %s import models' % self.db_module
|
|
|
|
known_models = []
|
2015-11-02 07:53:43 +08:00
|
|
|
tables_to_introspect = options['table'] or connection.introspection.table_names(cursor)
|
|
|
|
|
|
|
|
for table_name in tables_to_introspect:
|
2014-01-09 23:05:15 +08:00
|
|
|
if table_name_filter is not None and callable(table_name_filter):
|
|
|
|
if not table_name_filter(table_name):
|
2013-09-12 22:03:29 +08:00
|
|
|
continue
|
2016-02-25 03:16:49 +08:00
|
|
|
try:
|
|
|
|
try:
|
|
|
|
relations = connection.introspection.get_relations(cursor, table_name)
|
|
|
|
except NotImplementedError:
|
|
|
|
relations = {}
|
|
|
|
try:
|
|
|
|
constraints = connection.introspection.get_constraints(cursor, table_name)
|
|
|
|
except NotImplementedError:
|
|
|
|
constraints = {}
|
2016-08-20 18:14:02 +08:00
|
|
|
primary_key_column = connection.introspection.get_primary_key_column(cursor, table_name)
|
|
|
|
unique_columns = [
|
|
|
|
c['columns'][0] for c in constraints.values()
|
|
|
|
if c['unique'] and len(c['columns']) == 1
|
|
|
|
]
|
2016-02-25 03:16:49 +08:00
|
|
|
table_description = connection.introspection.get_table_description(cursor, table_name)
|
|
|
|
except Exception as e:
|
|
|
|
yield "# Unable to inspect table '%s'" % table_name
|
|
|
|
yield "# The error was: %s" % force_text(e)
|
|
|
|
continue
|
|
|
|
|
2014-01-09 23:05:15 +08:00
|
|
|
yield ''
|
|
|
|
yield ''
|
|
|
|
yield 'class %s(models.Model):' % table2model(table_name)
|
|
|
|
known_models.append(table2model(table_name))
|
|
|
|
used_column_names = [] # Holds column names used in the table so far
|
2015-11-07 00:29:23 +08:00
|
|
|
column_to_field_name = {} # Maps column names to names of model fields
|
2016-02-25 03:16:49 +08:00
|
|
|
for row in table_description:
|
2014-01-09 23:05:15 +08:00
|
|
|
comment_notes = [] # Holds Field notes, to be displayed in a Python comment.
|
|
|
|
extra_params = OrderedDict() # Holds Field parameters such as 'db_column'.
|
|
|
|
column_name = row[0]
|
2015-01-11 03:27:30 +08:00
|
|
|
is_relation = column_name in relations
|
2014-01-09 23:05:15 +08:00
|
|
|
|
|
|
|
att_name, params, notes = self.normalize_col_name(
|
|
|
|
column_name, used_column_names, is_relation)
|
|
|
|
extra_params.update(params)
|
|
|
|
comment_notes.extend(notes)
|
|
|
|
|
|
|
|
used_column_names.append(att_name)
|
2015-11-07 00:29:23 +08:00
|
|
|
column_to_field_name[column_name] = att_name
|
2014-01-09 23:05:15 +08:00
|
|
|
|
|
|
|
# Add primary_key and unique, if necessary.
|
2016-08-20 18:14:02 +08:00
|
|
|
if column_name == primary_key_column:
|
|
|
|
extra_params['primary_key'] = True
|
|
|
|
elif column_name in unique_columns:
|
|
|
|
extra_params['unique'] = True
|
2014-01-09 23:05:15 +08:00
|
|
|
|
|
|
|
if is_relation:
|
2015-09-12 07:33:12 +08:00
|
|
|
rel_to = (
|
|
|
|
"self" if relations[column_name][1] == table_name
|
|
|
|
else table2model(relations[column_name][1])
|
|
|
|
)
|
2014-01-09 23:05:15 +08:00
|
|
|
if rel_to in known_models:
|
|
|
|
field_type = 'ForeignKey(%s' % rel_to
|
|
|
|
else:
|
|
|
|
field_type = "ForeignKey('%s'" % rel_to
|
2013-02-01 03:55:00 +08:00
|
|
|
else:
|
2014-01-09 23:05:15 +08:00
|
|
|
# Calling `get_field_type` to get the field type string and any
|
2014-03-02 22:25:53 +08:00
|
|
|
# additional parameters and notes.
|
2014-01-09 23:05:15 +08:00
|
|
|
field_type, field_params, field_notes = self.get_field_type(connection, table_name, row)
|
|
|
|
extra_params.update(field_params)
|
|
|
|
comment_notes.extend(field_notes)
|
|
|
|
|
|
|
|
field_type += '('
|
|
|
|
|
|
|
|
# Don't output 'id = meta.AutoField(primary_key=True)', because
|
|
|
|
# that's assumed if it doesn't exist.
|
|
|
|
if att_name == 'id' and extra_params == {'primary_key': True}:
|
|
|
|
if field_type == 'AutoField(':
|
|
|
|
continue
|
|
|
|
elif field_type == 'IntegerField(' and not connection.features.can_introspect_autofield:
|
|
|
|
comment_notes.append('AutoField?')
|
|
|
|
|
|
|
|
# Add 'null' and 'blank', if the 'null_ok' flag was present in the
|
|
|
|
# table description.
|
|
|
|
if row[6]: # If it's NULL...
|
|
|
|
if field_type == 'BooleanField(':
|
|
|
|
field_type = 'NullBooleanField('
|
|
|
|
else:
|
|
|
|
extra_params['blank'] = True
|
2014-10-21 02:05:43 +08:00
|
|
|
extra_params['null'] = True
|
2014-01-09 23:05:15 +08:00
|
|
|
|
|
|
|
field_desc = '%s = %s%s' % (
|
|
|
|
att_name,
|
|
|
|
# Custom fields will have a dotted path
|
|
|
|
'' if '.' in field_type else 'models.',
|
|
|
|
field_type,
|
|
|
|
)
|
2015-07-22 22:43:21 +08:00
|
|
|
if field_type.startswith('ForeignKey('):
|
|
|
|
field_desc += ', models.DO_NOTHING'
|
|
|
|
|
2014-01-09 23:05:15 +08:00
|
|
|
if extra_params:
|
|
|
|
if not field_desc.endswith('('):
|
|
|
|
field_desc += ', '
|
2014-12-07 05:00:09 +08:00
|
|
|
field_desc += ', '.join(
|
2014-01-09 23:05:15 +08:00
|
|
|
'%s=%s' % (k, strip_prefix(repr(v)))
|
2014-12-07 05:00:09 +08:00
|
|
|
for k, v in extra_params.items())
|
2014-01-09 23:05:15 +08:00
|
|
|
field_desc += ')'
|
|
|
|
if comment_notes:
|
|
|
|
field_desc += ' # ' + ' '.join(comment_notes)
|
|
|
|
yield ' %s' % field_desc
|
2015-11-07 00:29:23 +08:00
|
|
|
for meta_line in self.get_meta(table_name, constraints, column_to_field_name):
|
2014-01-09 23:05:15 +08:00
|
|
|
yield meta_line
|
2010-01-19 05:02:47 +08:00
|
|
|
|
2012-08-24 03:07:56 +08:00
|
|
|
def normalize_col_name(self, col_name, used_column_names, is_relation):
|
|
|
|
"""
|
|
|
|
Modify the column name to make it Python-compatible as a field name
|
|
|
|
"""
|
|
|
|
field_params = {}
|
|
|
|
field_notes = []
|
|
|
|
|
|
|
|
new_name = col_name.lower()
|
|
|
|
if new_name != col_name:
|
|
|
|
field_notes.append('Field name made lowercase.')
|
|
|
|
|
|
|
|
if is_relation:
|
|
|
|
if new_name.endswith('_id'):
|
|
|
|
new_name = new_name[:-3]
|
|
|
|
else:
|
|
|
|
field_params['db_column'] = col_name
|
|
|
|
|
2012-08-24 04:50:25 +08:00
|
|
|
new_name, num_repl = re.subn(r'\W', '_', new_name)
|
|
|
|
if num_repl > 0:
|
|
|
|
field_notes.append('Field renamed to remove unsuitable characters.')
|
2012-08-24 03:07:56 +08:00
|
|
|
|
2016-10-06 18:02:48 +08:00
|
|
|
if new_name.find(LOOKUP_SEP) >= 0:
|
|
|
|
while new_name.find(LOOKUP_SEP) >= 0:
|
|
|
|
new_name = new_name.replace(LOOKUP_SEP, '_')
|
|
|
|
if col_name.lower().find(LOOKUP_SEP) >= 0:
|
2012-08-24 04:50:25 +08:00
|
|
|
# Only add the comment if the double underscore was in the original name
|
|
|
|
field_notes.append("Field renamed because it contained more than one '_' in a row.")
|
2012-08-24 03:07:56 +08:00
|
|
|
|
|
|
|
if new_name.startswith('_'):
|
|
|
|
new_name = 'field%s' % new_name
|
|
|
|
field_notes.append("Field renamed because it started with '_'.")
|
|
|
|
|
|
|
|
if new_name.endswith('_'):
|
|
|
|
new_name = '%sfield' % new_name
|
|
|
|
field_notes.append("Field renamed because it ended with '_'.")
|
|
|
|
|
|
|
|
if keyword.iskeyword(new_name):
|
|
|
|
new_name += '_field'
|
|
|
|
field_notes.append('Field renamed because it was a Python reserved word.')
|
|
|
|
|
|
|
|
if new_name[0].isdigit():
|
|
|
|
new_name = 'number_%s' % new_name
|
|
|
|
field_notes.append("Field renamed because it wasn't a valid Python identifier.")
|
|
|
|
|
|
|
|
if new_name in used_column_names:
|
|
|
|
num = 0
|
|
|
|
while '%s_%d' % (new_name, num) in used_column_names:
|
|
|
|
num += 1
|
|
|
|
new_name = '%s_%d' % (new_name, num)
|
|
|
|
field_notes.append('Field renamed because of name conflict.')
|
|
|
|
|
|
|
|
if col_name != new_name and field_notes:
|
|
|
|
field_params['db_column'] = col_name
|
|
|
|
|
|
|
|
return new_name, field_params, field_notes
|
|
|
|
|
2010-01-19 05:02:47 +08:00
|
|
|
def get_field_type(self, connection, table_name, row):
|
|
|
|
"""
|
|
|
|
Given the database connection, the table name, and the cursor row
|
|
|
|
description, this routine will return the given field type name, as
|
|
|
|
well as any additional keyword parameters and notes for the field.
|
|
|
|
"""
|
2013-08-03 13:41:15 +08:00
|
|
|
field_params = OrderedDict()
|
2010-01-19 05:02:47 +08:00
|
|
|
field_notes = []
|
|
|
|
|
|
|
|
try:
|
|
|
|
field_type = connection.introspection.get_field_type(row[1], row)
|
|
|
|
except KeyError:
|
|
|
|
field_type = 'TextField'
|
|
|
|
field_notes.append('This field type is a guess.')
|
|
|
|
|
2014-07-06 02:28:30 +08:00
|
|
|
# This is a hook for data_types_reverse to return a tuple of
|
2010-01-19 05:02:47 +08:00
|
|
|
# (field_type, field_params_dict).
|
|
|
|
if type(field_type) is tuple:
|
|
|
|
field_type, new_params = field_type
|
|
|
|
field_params.update(new_params)
|
|
|
|
|
|
|
|
# Add max_length for all CharFields.
|
|
|
|
if field_type == 'CharField' and row[3]:
|
2013-02-01 03:34:36 +08:00
|
|
|
field_params['max_length'] = int(row[3])
|
2010-01-19 05:02:47 +08:00
|
|
|
|
|
|
|
if field_type == 'DecimalField':
|
2013-04-02 00:32:57 +08:00
|
|
|
if row[4] is None or row[5] is None:
|
|
|
|
field_notes.append(
|
|
|
|
'max_digits and decimal_places have been guessed, as this '
|
|
|
|
'database handles decimal fields as float')
|
|
|
|
field_params['max_digits'] = row[4] if row[4] is not None else 10
|
|
|
|
field_params['decimal_places'] = row[5] if row[5] is not None else 5
|
|
|
|
else:
|
|
|
|
field_params['max_digits'] = row[4]
|
|
|
|
field_params['decimal_places'] = row[5]
|
2010-01-19 05:02:47 +08:00
|
|
|
|
|
|
|
return field_type, field_params, field_notes
|
|
|
|
|
2015-11-07 00:29:23 +08:00
|
|
|
def get_meta(self, table_name, constraints, column_to_field_name):
|
2010-01-19 05:02:47 +08:00
|
|
|
"""
|
|
|
|
Return a sequence comprising the lines of code necessary
|
|
|
|
to construct the inner Meta class for the model corresponding
|
|
|
|
to the given database table name.
|
|
|
|
"""
|
2014-07-15 04:42:05 +08:00
|
|
|
unique_together = []
|
|
|
|
for index, params in constraints.items():
|
|
|
|
if params['unique']:
|
|
|
|
columns = params['columns']
|
|
|
|
if len(columns) > 1:
|
|
|
|
# we do not want to include the u"" or u'' prefix
|
|
|
|
# so we build the string rather than interpolate the tuple
|
2015-11-07 00:29:23 +08:00
|
|
|
tup = '(' + ', '.join("'%s'" % column_to_field_name[c] for c in columns) + ')'
|
2014-07-15 04:42:05 +08:00
|
|
|
unique_together.append(tup)
|
|
|
|
meta = ["",
|
2014-01-16 07:01:30 +08:00
|
|
|
" class Meta:",
|
2013-02-03 08:08:45 +08:00
|
|
|
" managed = False",
|
2014-01-16 07:01:30 +08:00
|
|
|
" db_table = '%s'" % table_name]
|
2014-07-15 04:42:05 +08:00
|
|
|
if unique_together:
|
|
|
|
tup = '(' + ', '.join(unique_together) + ',)'
|
|
|
|
meta += [" unique_together = %s" % tup]
|
|
|
|
return meta
|