2009-12-22 23:18:51 +08:00
|
|
|
import keyword
|
2012-08-24 04:50:25 +08:00
|
|
|
import re
|
2009-12-22 23:18:51 +08:00
|
|
|
|
2014-06-18 07:07:54 +08:00
|
|
|
from django.core.management.base import BaseCommand, CommandError
|
2015-01-28 20:35:27 +08:00
|
|
|
from django.db import DEFAULT_DB_ALIAS, connections
|
2016-10-06 18:02:48 +08:00
|
|
|
from django.db.models.constants import LOOKUP_SEP
|
2013-01-28 17:21:07 +08:00
|
|
|
|
2007-08-16 14:06:55 +08:00
|
|
|
|
2014-06-18 07:07:54 +08:00
|
|
|
class Command(BaseCommand):
|
2007-08-16 14:06:55 +08:00
|
|
|
help = "Introspects the database tables in the given database and outputs a Django model module."
|
2020-05-14 06:00:41 +08:00
|
|
|
requires_system_checks = []
|
2017-12-29 04:07:29 +08:00
|
|
|
stealth_options = ('table_name_filter',)
|
2010-01-19 05:02:47 +08:00
|
|
|
db_module = 'django.db'
|
|
|
|
|
2014-06-07 04:39:33 +08:00
|
|
|
def add_arguments(self, parser):
|
2016-03-29 06:33:29 +08:00
|
|
|
parser.add_argument(
|
2018-07-03 05:54:57 +08:00
|
|
|
'table', nargs='*', type=str,
|
2016-03-29 06:33:29 +08:00
|
|
|
help='Selects what tables or views should be introspected.',
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
2018-07-03 05:54:57 +08:00
|
|
|
'--database', default=DEFAULT_DB_ALIAS,
|
2016-03-29 06:33:29 +08:00
|
|
|
help='Nominates a database to introspect. Defaults to using the "default" database.',
|
|
|
|
)
|
2018-09-13 04:53:24 +08:00
|
|
|
parser.add_argument(
|
|
|
|
'--include-partitions', action='store_true', help='Also output models for partition tables.',
|
|
|
|
)
|
2018-01-10 00:04:56 +08:00
|
|
|
parser.add_argument(
|
|
|
|
'--include-views', action='store_true', help='Also output models for database views.',
|
|
|
|
)
|
2014-06-07 04:39:33 +08:00
|
|
|
|
2014-06-18 07:07:54 +08:00
|
|
|
def handle(self, **options):
|
2007-08-16 14:06:55 +08:00
|
|
|
try:
|
2009-12-22 23:18:51 +08:00
|
|
|
for line in self.handle_inspection(options):
|
2020-04-25 21:51:17 +08:00
|
|
|
self.stdout.write(line)
|
2007-08-16 14:06:55 +08:00
|
|
|
except NotImplementedError:
|
|
|
|
raise CommandError("Database inspection isn't supported for the currently selected database backend.")
|
|
|
|
|
2009-12-22 23:18:51 +08:00
|
|
|
def handle_inspection(self, options):
|
2014-06-07 04:39:33 +08:00
|
|
|
connection = connections[options['database']]
|
2012-06-02 07:58:53 +08:00
|
|
|
# 'table_name_filter' is a stealth option
|
|
|
|
table_name_filter = options.get('table_name_filter')
|
2007-08-16 14:06:55 +08:00
|
|
|
|
2016-01-24 00:47:07 +08:00
|
|
|
def table2model(table_name):
|
|
|
|
return re.sub(r'[^a-zA-Z0-9]', '', table_name.title())
|
|
|
|
|
2014-01-09 23:05:15 +08:00
|
|
|
with connection.cursor() as cursor:
|
|
|
|
yield "# This is an auto-generated Django model module."
|
|
|
|
yield "# You'll have to do the following manually to clean this up:"
|
|
|
|
yield "# * Rearrange models' order"
|
|
|
|
yield "# * Make sure each model has one field with primary_key=True"
|
2019-04-20 19:26:46 +08:00
|
|
|
yield "# * Make sure each ForeignKey and OneToOneField has `on_delete` set to the desired behavior"
|
2014-09-04 20:15:09 +08:00
|
|
|
yield (
|
|
|
|
"# * Remove `managed = False` lines if you wish to allow "
|
|
|
|
"Django to create, modify, and delete the table"
|
|
|
|
)
|
2014-01-09 23:05:15 +08:00
|
|
|
yield "# Feel free to rename the models, but don't rename db_table values or field names."
|
|
|
|
yield 'from %s import models' % self.db_module
|
|
|
|
known_models = []
|
2018-01-10 00:04:56 +08:00
|
|
|
table_info = connection.introspection.get_table_list(cursor)
|
2015-11-02 07:53:43 +08:00
|
|
|
|
2018-09-13 04:53:24 +08:00
|
|
|
# Determine types of tables and/or views to be introspected.
|
|
|
|
types = {'t'}
|
|
|
|
if options['include_partitions']:
|
|
|
|
types.add('p')
|
|
|
|
if options['include_views']:
|
|
|
|
types.add('v')
|
|
|
|
|
|
|
|
for table_name in (options['table'] or sorted(info.name for info in table_info if info.type in types)):
|
2014-01-09 23:05:15 +08:00
|
|
|
if table_name_filter is not None and callable(table_name_filter):
|
|
|
|
if not table_name_filter(table_name):
|
2013-09-12 22:03:29 +08:00
|
|
|
continue
|
2016-02-25 03:16:49 +08:00
|
|
|
try:
|
|
|
|
try:
|
|
|
|
relations = connection.introspection.get_relations(cursor, table_name)
|
|
|
|
except NotImplementedError:
|
|
|
|
relations = {}
|
|
|
|
try:
|
|
|
|
constraints = connection.introspection.get_constraints(cursor, table_name)
|
|
|
|
except NotImplementedError:
|
|
|
|
constraints = {}
|
2016-08-20 18:14:02 +08:00
|
|
|
primary_key_column = connection.introspection.get_primary_key_column(cursor, table_name)
|
|
|
|
unique_columns = [
|
|
|
|
c['columns'][0] for c in constraints.values()
|
|
|
|
if c['unique'] and len(c['columns']) == 1
|
|
|
|
]
|
2016-02-25 03:16:49 +08:00
|
|
|
table_description = connection.introspection.get_table_description(cursor, table_name)
|
|
|
|
except Exception as e:
|
|
|
|
yield "# Unable to inspect table '%s'" % table_name
|
2017-03-04 22:47:49 +08:00
|
|
|
yield "# The error was: %s" % e
|
2016-02-25 03:16:49 +08:00
|
|
|
continue
|
|
|
|
|
2014-01-09 23:05:15 +08:00
|
|
|
yield ''
|
|
|
|
yield ''
|
|
|
|
yield 'class %s(models.Model):' % table2model(table_name)
|
|
|
|
known_models.append(table2model(table_name))
|
|
|
|
used_column_names = [] # Holds column names used in the table so far
|
2015-11-07 00:29:23 +08:00
|
|
|
column_to_field_name = {} # Maps column names to names of model fields
|
2016-02-25 03:16:49 +08:00
|
|
|
for row in table_description:
|
2014-01-09 23:05:15 +08:00
|
|
|
comment_notes = [] # Holds Field notes, to be displayed in a Python comment.
|
2019-02-05 19:22:08 +08:00
|
|
|
extra_params = {} # Holds Field parameters such as 'db_column'.
|
2018-07-21 07:58:16 +08:00
|
|
|
column_name = row.name
|
2015-01-11 03:27:30 +08:00
|
|
|
is_relation = column_name in relations
|
2014-01-09 23:05:15 +08:00
|
|
|
|
|
|
|
att_name, params, notes = self.normalize_col_name(
|
|
|
|
column_name, used_column_names, is_relation)
|
|
|
|
extra_params.update(params)
|
|
|
|
comment_notes.extend(notes)
|
|
|
|
|
|
|
|
used_column_names.append(att_name)
|
2015-11-07 00:29:23 +08:00
|
|
|
column_to_field_name[column_name] = att_name
|
2014-01-09 23:05:15 +08:00
|
|
|
|
|
|
|
# Add primary_key and unique, if necessary.
|
2016-08-20 18:14:02 +08:00
|
|
|
if column_name == primary_key_column:
|
|
|
|
extra_params['primary_key'] = True
|
|
|
|
elif column_name in unique_columns:
|
|
|
|
extra_params['unique'] = True
|
2014-01-09 23:05:15 +08:00
|
|
|
|
|
|
|
if is_relation:
|
2021-10-30 23:08:06 +08:00
|
|
|
ref_db_column, ref_db_table = relations[column_name]
|
2019-04-20 19:26:46 +08:00
|
|
|
if extra_params.pop('unique', False) or extra_params.get('primary_key'):
|
|
|
|
rel_type = 'OneToOneField'
|
|
|
|
else:
|
|
|
|
rel_type = 'ForeignKey'
|
2021-10-30 23:08:06 +08:00
|
|
|
ref_pk_column = connection.introspection.get_primary_key_column(cursor, ref_db_table)
|
|
|
|
if ref_pk_column and ref_pk_column != ref_db_column:
|
|
|
|
extra_params['to_field'] = ref_db_column
|
2015-09-12 07:33:12 +08:00
|
|
|
rel_to = (
|
2021-10-30 23:08:06 +08:00
|
|
|
'self' if ref_db_table == table_name
|
|
|
|
else table2model(ref_db_table)
|
2015-09-12 07:33:12 +08:00
|
|
|
)
|
2014-01-09 23:05:15 +08:00
|
|
|
if rel_to in known_models:
|
2019-04-20 19:26:46 +08:00
|
|
|
field_type = '%s(%s' % (rel_type, rel_to)
|
2014-01-09 23:05:15 +08:00
|
|
|
else:
|
2019-04-20 19:26:46 +08:00
|
|
|
field_type = "%s('%s'" % (rel_type, rel_to)
|
2013-02-01 03:55:00 +08:00
|
|
|
else:
|
2014-01-09 23:05:15 +08:00
|
|
|
# Calling `get_field_type` to get the field type string and any
|
2014-03-02 22:25:53 +08:00
|
|
|
# additional parameters and notes.
|
2014-01-09 23:05:15 +08:00
|
|
|
field_type, field_params, field_notes = self.get_field_type(connection, table_name, row)
|
|
|
|
extra_params.update(field_params)
|
|
|
|
comment_notes.extend(field_notes)
|
|
|
|
|
|
|
|
field_type += '('
|
|
|
|
|
|
|
|
# Don't output 'id = meta.AutoField(primary_key=True)', because
|
|
|
|
# that's assumed if it doesn't exist.
|
|
|
|
if att_name == 'id' and extra_params == {'primary_key': True}:
|
|
|
|
if field_type == 'AutoField(':
|
|
|
|
continue
|
2020-06-02 08:18:14 +08:00
|
|
|
elif field_type == connection.features.introspected_field_types['AutoField'] + '(':
|
2014-01-09 23:05:15 +08:00
|
|
|
comment_notes.append('AutoField?')
|
|
|
|
|
|
|
|
# Add 'null' and 'blank', if the 'null_ok' flag was present in the
|
|
|
|
# table description.
|
2018-07-21 07:58:16 +08:00
|
|
|
if row.null_ok: # If it's NULL...
|
2018-03-16 22:52:04 +08:00
|
|
|
extra_params['blank'] = True
|
|
|
|
extra_params['null'] = True
|
2014-01-09 23:05:15 +08:00
|
|
|
|
|
|
|
field_desc = '%s = %s%s' % (
|
|
|
|
att_name,
|
|
|
|
# Custom fields will have a dotted path
|
|
|
|
'' if '.' in field_type else 'models.',
|
|
|
|
field_type,
|
|
|
|
)
|
2019-04-20 19:26:46 +08:00
|
|
|
if field_type.startswith(('ForeignKey(', 'OneToOneField(')):
|
2015-07-22 22:43:21 +08:00
|
|
|
field_desc += ', models.DO_NOTHING'
|
|
|
|
|
2014-01-09 23:05:15 +08:00
|
|
|
if extra_params:
|
|
|
|
if not field_desc.endswith('('):
|
|
|
|
field_desc += ', '
|
2018-07-21 06:09:34 +08:00
|
|
|
field_desc += ', '.join('%s=%r' % (k, v) for k, v in extra_params.items())
|
2014-01-09 23:05:15 +08:00
|
|
|
field_desc += ')'
|
|
|
|
if comment_notes:
|
|
|
|
field_desc += ' # ' + ' '.join(comment_notes)
|
|
|
|
yield ' %s' % field_desc
|
2018-01-10 00:04:56 +08:00
|
|
|
is_view = any(info.name == table_name and info.type == 'v' for info in table_info)
|
2018-09-13 04:53:24 +08:00
|
|
|
is_partition = any(info.name == table_name and info.type == 'p' for info in table_info)
|
2019-12-12 17:51:46 +08:00
|
|
|
yield from self.get_meta(table_name, constraints, column_to_field_name, is_view, is_partition)
|
2010-01-19 05:02:47 +08:00
|
|
|
|
2012-08-24 03:07:56 +08:00
|
|
|
def normalize_col_name(self, col_name, used_column_names, is_relation):
|
|
|
|
"""
|
|
|
|
Modify the column name to make it Python-compatible as a field name
|
|
|
|
"""
|
|
|
|
field_params = {}
|
|
|
|
field_notes = []
|
|
|
|
|
|
|
|
new_name = col_name.lower()
|
|
|
|
if new_name != col_name:
|
|
|
|
field_notes.append('Field name made lowercase.')
|
|
|
|
|
|
|
|
if is_relation:
|
|
|
|
if new_name.endswith('_id'):
|
|
|
|
new_name = new_name[:-3]
|
|
|
|
else:
|
|
|
|
field_params['db_column'] = col_name
|
|
|
|
|
2012-08-24 04:50:25 +08:00
|
|
|
new_name, num_repl = re.subn(r'\W', '_', new_name)
|
|
|
|
if num_repl > 0:
|
|
|
|
field_notes.append('Field renamed to remove unsuitable characters.')
|
2012-08-24 03:07:56 +08:00
|
|
|
|
2016-10-06 18:02:48 +08:00
|
|
|
if new_name.find(LOOKUP_SEP) >= 0:
|
|
|
|
while new_name.find(LOOKUP_SEP) >= 0:
|
|
|
|
new_name = new_name.replace(LOOKUP_SEP, '_')
|
|
|
|
if col_name.lower().find(LOOKUP_SEP) >= 0:
|
2012-08-24 04:50:25 +08:00
|
|
|
# Only add the comment if the double underscore was in the original name
|
|
|
|
field_notes.append("Field renamed because it contained more than one '_' in a row.")
|
2012-08-24 03:07:56 +08:00
|
|
|
|
|
|
|
if new_name.startswith('_'):
|
|
|
|
new_name = 'field%s' % new_name
|
|
|
|
field_notes.append("Field renamed because it started with '_'.")
|
|
|
|
|
|
|
|
if new_name.endswith('_'):
|
|
|
|
new_name = '%sfield' % new_name
|
|
|
|
field_notes.append("Field renamed because it ended with '_'.")
|
|
|
|
|
|
|
|
if keyword.iskeyword(new_name):
|
|
|
|
new_name += '_field'
|
|
|
|
field_notes.append('Field renamed because it was a Python reserved word.')
|
|
|
|
|
|
|
|
if new_name[0].isdigit():
|
|
|
|
new_name = 'number_%s' % new_name
|
|
|
|
field_notes.append("Field renamed because it wasn't a valid Python identifier.")
|
|
|
|
|
|
|
|
if new_name in used_column_names:
|
|
|
|
num = 0
|
|
|
|
while '%s_%d' % (new_name, num) in used_column_names:
|
|
|
|
num += 1
|
|
|
|
new_name = '%s_%d' % (new_name, num)
|
|
|
|
field_notes.append('Field renamed because of name conflict.')
|
|
|
|
|
|
|
|
if col_name != new_name and field_notes:
|
|
|
|
field_params['db_column'] = col_name
|
|
|
|
|
|
|
|
return new_name, field_params, field_notes
|
|
|
|
|
2010-01-19 05:02:47 +08:00
|
|
|
def get_field_type(self, connection, table_name, row):
|
|
|
|
"""
|
|
|
|
Given the database connection, the table name, and the cursor row
|
|
|
|
description, this routine will return the given field type name, as
|
|
|
|
well as any additional keyword parameters and notes for the field.
|
|
|
|
"""
|
2019-02-05 19:22:08 +08:00
|
|
|
field_params = {}
|
2010-01-19 05:02:47 +08:00
|
|
|
field_notes = []
|
|
|
|
|
|
|
|
try:
|
2018-07-21 07:58:16 +08:00
|
|
|
field_type = connection.introspection.get_field_type(row.type_code, row)
|
2010-01-19 05:02:47 +08:00
|
|
|
except KeyError:
|
|
|
|
field_type = 'TextField'
|
|
|
|
field_notes.append('This field type is a guess.')
|
|
|
|
|
|
|
|
# Add max_length for all CharFields.
|
2018-07-21 07:58:16 +08:00
|
|
|
if field_type == 'CharField' and row.internal_size:
|
|
|
|
field_params['max_length'] = int(row.internal_size)
|
2010-01-19 05:02:47 +08:00
|
|
|
|
2020-07-18 19:17:39 +08:00
|
|
|
if field_type in {'CharField', 'TextField'} and row.collation:
|
|
|
|
field_params['db_collation'] = row.collation
|
|
|
|
|
2010-01-19 05:02:47 +08:00
|
|
|
if field_type == 'DecimalField':
|
2018-07-21 07:58:16 +08:00
|
|
|
if row.precision is None or row.scale is None:
|
2013-04-02 00:32:57 +08:00
|
|
|
field_notes.append(
|
|
|
|
'max_digits and decimal_places have been guessed, as this '
|
|
|
|
'database handles decimal fields as float')
|
2018-07-21 07:58:16 +08:00
|
|
|
field_params['max_digits'] = row.precision if row.precision is not None else 10
|
|
|
|
field_params['decimal_places'] = row.scale if row.scale is not None else 5
|
2013-04-02 00:32:57 +08:00
|
|
|
else:
|
2018-07-21 07:58:16 +08:00
|
|
|
field_params['max_digits'] = row.precision
|
|
|
|
field_params['decimal_places'] = row.scale
|
2010-01-19 05:02:47 +08:00
|
|
|
|
|
|
|
return field_type, field_params, field_notes
|
|
|
|
|
2018-09-13 04:53:24 +08:00
|
|
|
def get_meta(self, table_name, constraints, column_to_field_name, is_view, is_partition):
|
2010-01-19 05:02:47 +08:00
|
|
|
"""
|
|
|
|
Return a sequence comprising the lines of code necessary
|
|
|
|
to construct the inner Meta class for the model corresponding
|
|
|
|
to the given database table name.
|
|
|
|
"""
|
2014-07-15 04:42:05 +08:00
|
|
|
unique_together = []
|
2016-12-13 19:38:09 +08:00
|
|
|
has_unsupported_constraint = False
|
2017-12-07 06:17:59 +08:00
|
|
|
for params in constraints.values():
|
2014-07-15 04:42:05 +08:00
|
|
|
if params['unique']:
|
|
|
|
columns = params['columns']
|
2016-12-13 19:38:09 +08:00
|
|
|
if None in columns:
|
|
|
|
has_unsupported_constraint = True
|
|
|
|
columns = [x for x in columns if x is not None]
|
2014-07-15 04:42:05 +08:00
|
|
|
if len(columns) > 1:
|
2018-03-21 22:37:56 +08:00
|
|
|
unique_together.append(str(tuple(column_to_field_name[c] for c in columns)))
|
2018-09-13 04:53:24 +08:00
|
|
|
if is_view:
|
|
|
|
managed_comment = " # Created from a view. Don't remove."
|
|
|
|
elif is_partition:
|
|
|
|
managed_comment = " # Created from a partition. Don't remove."
|
|
|
|
else:
|
|
|
|
managed_comment = ''
|
2016-12-13 19:38:09 +08:00
|
|
|
meta = ['']
|
|
|
|
if has_unsupported_constraint:
|
|
|
|
meta.append(' # A unique constraint could not be introspected.')
|
|
|
|
meta += [
|
|
|
|
' class Meta:',
|
|
|
|
' managed = False%s' % managed_comment,
|
|
|
|
' db_table = %r' % table_name
|
|
|
|
]
|
2014-07-15 04:42:05 +08:00
|
|
|
if unique_together:
|
|
|
|
tup = '(' + ', '.join(unique_together) + ',)'
|
|
|
|
meta += [" unique_together = %s" % tup]
|
|
|
|
return meta
|