2020-06-23 21:04:38 +08:00
|
|
|
import os
|
2013-02-01 03:34:36 +08:00
|
|
|
import re
|
2017-01-07 19:11:46 +08:00
|
|
|
from io import StringIO
|
2017-01-20 01:16:04 +08:00
|
|
|
from unittest import mock, skipUnless
|
2013-02-01 03:34:36 +08:00
|
|
|
|
2011-01-24 22:58:05 +08:00
|
|
|
from django.core.management import call_command
|
2012-10-27 09:51:14 +08:00
|
|
|
from django.db import connection
|
2018-01-10 00:04:56 +08:00
|
|
|
from django.db.backends.base.introspection import TableInfo
|
|
|
|
from django.test import TestCase, TransactionTestCase, skipUnlessDBFeature
|
2011-01-24 22:58:05 +08:00
|
|
|
|
2020-07-18 19:17:39 +08:00
|
|
|
from .models import PeopleMoreData, test_collation
|
2016-12-13 19:38:09 +08:00
|
|
|
|
2013-11-03 12:36:09 +08:00
|
|
|
|
2018-03-12 22:02:10 +08:00
|
|
|
def inspectdb_tables_only(table_name):
|
|
|
|
"""
|
|
|
|
Limit introspection to tables created for models of this app.
|
|
|
|
Some databases such as Oracle are extremely slow at introspection.
|
|
|
|
"""
|
|
|
|
return table_name.startswith('inspectdb_')
|
|
|
|
|
|
|
|
|
2020-07-29 19:59:29 +08:00
|
|
|
def inspectdb_views_only(table_name):
|
|
|
|
return (
|
|
|
|
table_name.startswith('inspectdb_') and
|
|
|
|
table_name.endswith(('_materialized', '_view'))
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2018-03-12 22:02:10 +08:00
|
|
|
def special_table_only(table_name):
|
|
|
|
return table_name.startswith('inspectdb_special')
|
|
|
|
|
|
|
|
|
2011-01-24 22:58:05 +08:00
|
|
|
class InspectDBTestCase(TestCase):
|
2016-12-13 19:38:09 +08:00
|
|
|
unique_re = re.compile(r'.*unique_together = \((.+),\).*')
|
2011-02-01 22:42:52 +08:00
|
|
|
|
2012-06-02 07:58:53 +08:00
|
|
|
def test_stealth_table_name_filter_option(self):
|
|
|
|
out = StringIO()
|
2018-03-12 22:02:10 +08:00
|
|
|
call_command('inspectdb', table_name_filter=inspectdb_tables_only, stdout=out)
|
2012-06-02 07:58:53 +08:00
|
|
|
error_message = "inspectdb has examined a table that should have been filtered out."
|
|
|
|
# contrib.contenttypes is one of the apps always installed when running
|
|
|
|
# the Django test suite, check that one of its tables hasn't been
|
|
|
|
# inspected
|
|
|
|
self.assertNotIn("class DjangoContentType(models.Model):", out.getvalue(), msg=error_message)
|
|
|
|
|
2015-11-02 07:53:43 +08:00
|
|
|
def test_table_option(self):
|
|
|
|
"""
|
|
|
|
inspectdb can inspect a subset of tables by passing the table names as
|
|
|
|
arguments.
|
|
|
|
"""
|
|
|
|
out = StringIO()
|
|
|
|
call_command('inspectdb', 'inspectdb_people', stdout=out)
|
|
|
|
output = out.getvalue()
|
|
|
|
self.assertIn('class InspectdbPeople(models.Model):', output)
|
|
|
|
self.assertNotIn("InspectdbPeopledata", output)
|
|
|
|
|
2014-01-30 01:18:32 +08:00
|
|
|
def make_field_type_asserter(self):
|
|
|
|
"""Call inspectdb and return a function to validate a field type in its output"""
|
2013-02-01 03:34:36 +08:00
|
|
|
out = StringIO()
|
2017-01-08 04:41:42 +08:00
|
|
|
call_command('inspectdb', 'inspectdb_columntypes', stdout=out)
|
2013-02-01 03:34:36 +08:00
|
|
|
output = out.getvalue()
|
2013-10-22 18:21:07 +08:00
|
|
|
|
2013-02-01 03:34:36 +08:00
|
|
|
def assertFieldType(name, definition):
|
2020-05-11 04:03:39 +08:00
|
|
|
out_def = re.search(r'^\s*%s = (models.*)$' % name, output, re.MULTILINE)[1]
|
2013-02-01 03:34:36 +08:00
|
|
|
self.assertEqual(definition, out_def)
|
|
|
|
|
2014-01-30 01:18:32 +08:00
|
|
|
return assertFieldType
|
|
|
|
|
|
|
|
def test_field_types(self):
|
|
|
|
"""Test introspection of various Django field types"""
|
|
|
|
assertFieldType = self.make_field_type_asserter()
|
2020-05-27 06:25:45 +08:00
|
|
|
introspected_field_types = connection.features.introspected_field_types
|
2020-06-02 09:59:03 +08:00
|
|
|
char_field_type = introspected_field_types['CharField']
|
2014-05-08 04:14:39 +08:00
|
|
|
# Inspecting Oracle DB doesn't produce correct results (#19884):
|
|
|
|
# - it reports fields as blank=True when they aren't.
|
2020-06-02 09:59:03 +08:00
|
|
|
if not connection.features.interprets_empty_strings_as_nulls and char_field_type == 'CharField':
|
2014-05-08 04:14:39 +08:00
|
|
|
assertFieldType('char_field', "models.CharField(max_length=10)")
|
2014-10-21 02:05:43 +08:00
|
|
|
assertFieldType('null_char_field', "models.CharField(max_length=10, blank=True, null=True)")
|
2014-07-01 02:34:45 +08:00
|
|
|
assertFieldType('email_field', "models.CharField(max_length=254)")
|
2014-05-08 04:14:39 +08:00
|
|
|
assertFieldType('file_field', "models.CharField(max_length=100)")
|
|
|
|
assertFieldType('file_path_field', "models.CharField(max_length=100)")
|
2016-12-20 05:16:11 +08:00
|
|
|
assertFieldType('slug_field', "models.CharField(max_length=50)")
|
|
|
|
assertFieldType('text_field', "models.TextField()")
|
|
|
|
assertFieldType('url_field', "models.CharField(max_length=200)")
|
2020-06-02 09:59:03 +08:00
|
|
|
if char_field_type == 'TextField':
|
|
|
|
assertFieldType('char_field', 'models.TextField()')
|
|
|
|
assertFieldType('null_char_field', 'models.TextField(blank=True, null=True)')
|
|
|
|
assertFieldType('email_field', 'models.TextField()')
|
|
|
|
assertFieldType('file_field', 'models.TextField()')
|
|
|
|
assertFieldType('file_path_field', 'models.TextField()')
|
|
|
|
assertFieldType('slug_field', 'models.TextField()')
|
|
|
|
assertFieldType('text_field', 'models.TextField()')
|
|
|
|
assertFieldType('url_field', 'models.TextField()')
|
2016-12-20 05:16:11 +08:00
|
|
|
assertFieldType('date_field', "models.DateField()")
|
|
|
|
assertFieldType('date_time_field', "models.DateTimeField()")
|
2020-05-27 06:25:45 +08:00
|
|
|
if introspected_field_types['GenericIPAddressField'] == 'GenericIPAddressField':
|
2017-01-19 18:00:41 +08:00
|
|
|
assertFieldType('gen_ip_address_field', "models.GenericIPAddressField()")
|
2016-12-20 05:16:11 +08:00
|
|
|
elif not connection.features.interprets_empty_strings_as_nulls:
|
2017-01-19 18:00:41 +08:00
|
|
|
assertFieldType('gen_ip_address_field', "models.CharField(max_length=39)")
|
2020-05-27 06:25:45 +08:00
|
|
|
assertFieldType('time_field', 'models.%s()' % introspected_field_types['TimeField'])
|
2016-07-20 15:01:57 +08:00
|
|
|
if connection.features.has_native_uuid_field:
|
|
|
|
assertFieldType('uuid_field', "models.UUIDField()")
|
2016-12-20 05:16:11 +08:00
|
|
|
elif not connection.features.interprets_empty_strings_as_nulls:
|
2016-07-20 15:01:57 +08:00
|
|
|
assertFieldType('uuid_field', "models.CharField(max_length=32)")
|
2014-01-30 01:18:32 +08:00
|
|
|
|
2019-06-09 08:56:37 +08:00
|
|
|
@skipUnlessDBFeature('can_introspect_json_field', 'supports_json_field')
|
|
|
|
def test_json_field(self):
|
|
|
|
out = StringIO()
|
|
|
|
call_command('inspectdb', 'inspectdb_jsonfieldcolumntype', stdout=out)
|
|
|
|
output = out.getvalue()
|
|
|
|
if not connection.features.interprets_empty_strings_as_nulls:
|
|
|
|
self.assertIn('json_field = models.JSONField()', output)
|
|
|
|
self.assertIn('null_json_field = models.JSONField(blank=True, null=True)', output)
|
|
|
|
|
2020-07-18 19:17:39 +08:00
|
|
|
@skipUnlessDBFeature('supports_collation_on_charfield')
|
2020-10-06 18:51:35 +08:00
|
|
|
@skipUnless(test_collation, 'Language collations are not supported.')
|
2020-07-18 19:17:39 +08:00
|
|
|
def test_char_field_db_collation(self):
|
|
|
|
out = StringIO()
|
|
|
|
call_command('inspectdb', 'inspectdb_charfielddbcollation', stdout=out)
|
|
|
|
output = out.getvalue()
|
|
|
|
if not connection.features.interprets_empty_strings_as_nulls:
|
|
|
|
self.assertIn(
|
|
|
|
"char_field = models.CharField(max_length=10, "
|
|
|
|
"db_collation='%s')" % test_collation,
|
|
|
|
output,
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
self.assertIn(
|
|
|
|
"char_field = models.CharField(max_length=10, "
|
|
|
|
"db_collation='%s', blank=True, null=True)" % test_collation,
|
|
|
|
output,
|
|
|
|
)
|
|
|
|
|
|
|
|
@skipUnlessDBFeature('supports_collation_on_textfield')
|
2020-10-06 18:51:35 +08:00
|
|
|
@skipUnless(test_collation, 'Language collations are not supported.')
|
2020-07-18 19:17:39 +08:00
|
|
|
def test_text_field_db_collation(self):
|
|
|
|
out = StringIO()
|
|
|
|
call_command('inspectdb', 'inspectdb_textfielddbcollation', stdout=out)
|
|
|
|
output = out.getvalue()
|
|
|
|
if not connection.features.interprets_empty_strings_as_nulls:
|
|
|
|
self.assertIn(
|
|
|
|
"text_field = models.TextField(db_collation='%s')" % test_collation,
|
|
|
|
output,
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
self.assertIn(
|
|
|
|
"text_field = models.TextField(db_collation='%s, blank=True, "
|
|
|
|
"null=True)" % test_collation,
|
|
|
|
output,
|
|
|
|
)
|
|
|
|
|
2014-01-30 01:18:32 +08:00
|
|
|
def test_number_field_types(self):
|
|
|
|
"""Test introspection of various Django field types"""
|
|
|
|
assertFieldType = self.make_field_type_asserter()
|
2020-05-27 06:25:45 +08:00
|
|
|
introspected_field_types = connection.features.introspected_field_types
|
2014-01-30 01:18:32 +08:00
|
|
|
|
2020-06-02 08:18:14 +08:00
|
|
|
auto_field_type = connection.features.introspected_field_types['AutoField']
|
|
|
|
if auto_field_type != 'AutoField':
|
|
|
|
assertFieldType('id', "models.%s(primary_key=True) # AutoField?" % auto_field_type)
|
2014-05-08 04:14:39 +08:00
|
|
|
|
2020-05-27 06:25:45 +08:00
|
|
|
assertFieldType('big_int_field', 'models.%s()' % introspected_field_types['BigIntegerField'])
|
2014-05-08 04:14:39 +08:00
|
|
|
|
2020-05-27 06:25:45 +08:00
|
|
|
bool_field_type = introspected_field_types['BooleanField']
|
2014-09-27 05:18:22 +08:00
|
|
|
assertFieldType('bool_field', "models.{}()".format(bool_field_type))
|
2018-03-16 22:52:04 +08:00
|
|
|
assertFieldType('null_bool_field', 'models.{}(blank=True, null=True)'.format(bool_field_type))
|
2014-05-08 04:14:39 +08:00
|
|
|
|
2020-05-27 06:25:45 +08:00
|
|
|
if connection.vendor != 'sqlite':
|
2014-06-06 06:29:24 +08:00
|
|
|
assertFieldType('decimal_field', "models.DecimalField(max_digits=6, decimal_places=1)")
|
|
|
|
else: # Guessed arguments on SQLite, see #5014
|
2014-01-16 07:01:30 +08:00
|
|
|
assertFieldType('decimal_field', "models.DecimalField(max_digits=10, decimal_places=5) "
|
2014-01-30 01:18:32 +08:00
|
|
|
"# max_digits and decimal_places have been guessed, "
|
|
|
|
"as this database handles decimal fields as float")
|
2014-05-08 04:14:39 +08:00
|
|
|
|
2013-02-01 09:00:25 +08:00
|
|
|
assertFieldType('float_field', "models.FloatField()")
|
2020-06-02 09:59:03 +08:00
|
|
|
assertFieldType('int_field', 'models.%s()' % introspected_field_types['IntegerField'])
|
2020-05-27 06:25:45 +08:00
|
|
|
assertFieldType('pos_int_field', 'models.%s()' % introspected_field_types['PositiveIntegerField'])
|
|
|
|
assertFieldType('pos_big_int_field', 'models.%s()' % introspected_field_types['PositiveBigIntegerField'])
|
|
|
|
assertFieldType('pos_small_int_field', 'models.%s()' % introspected_field_types['PositiveSmallIntegerField'])
|
|
|
|
assertFieldType('small_int_field', 'models.%s()' % introspected_field_types['SmallIntegerField'])
|
2013-02-01 03:34:36 +08:00
|
|
|
|
2011-02-01 22:42:52 +08:00
|
|
|
@skipUnlessDBFeature('can_introspect_foreign_keys')
|
2011-01-24 22:58:05 +08:00
|
|
|
def test_attribute_name_not_python_keyword(self):
|
|
|
|
out = StringIO()
|
2018-03-12 22:02:10 +08:00
|
|
|
call_command('inspectdb', table_name_filter=inspectdb_tables_only, stdout=out)
|
2012-08-24 03:07:56 +08:00
|
|
|
output = out.getvalue()
|
2018-10-09 21:26:07 +08:00
|
|
|
error_message = "inspectdb generated an attribute name which is a Python keyword"
|
2013-01-28 17:21:07 +08:00
|
|
|
# Recursive foreign keys should be set to 'self'
|
2015-07-22 22:43:21 +08:00
|
|
|
self.assertIn("parent = models.ForeignKey('self', models.DO_NOTHING)", output)
|
|
|
|
self.assertNotIn(
|
|
|
|
"from = models.ForeignKey(InspectdbPeople, models.DO_NOTHING)",
|
|
|
|
output,
|
|
|
|
msg=error_message,
|
|
|
|
)
|
2012-04-27 14:56:31 +08:00
|
|
|
# As InspectdbPeople model is defined after InspectdbMessage, it should be quoted
|
2015-07-22 22:43:21 +08:00
|
|
|
self.assertIn(
|
|
|
|
"from_field = models.ForeignKey('InspectdbPeople', models.DO_NOTHING, db_column='from_id')",
|
|
|
|
output,
|
|
|
|
)
|
|
|
|
self.assertIn(
|
2019-04-20 19:26:46 +08:00
|
|
|
'people_pk = models.OneToOneField(InspectdbPeople, models.DO_NOTHING, primary_key=True)',
|
2015-07-22 22:43:21 +08:00
|
|
|
output,
|
|
|
|
)
|
|
|
|
self.assertIn(
|
2019-04-20 19:26:46 +08:00
|
|
|
'people_unique = models.OneToOneField(InspectdbPeople, models.DO_NOTHING)',
|
2015-07-22 22:43:21 +08:00
|
|
|
output,
|
|
|
|
)
|
2012-02-12 04:53:48 +08:00
|
|
|
|
|
|
|
def test_digits_column_name_introspection(self):
|
|
|
|
"""Introspection of column names consist/start with digits (#16536/#17676)"""
|
2020-06-02 09:59:03 +08:00
|
|
|
char_field_type = connection.features.introspected_field_types['CharField']
|
2012-02-12 04:53:48 +08:00
|
|
|
out = StringIO()
|
2017-01-08 04:41:42 +08:00
|
|
|
call_command('inspectdb', 'inspectdb_digitsincolumnname', stdout=out)
|
2012-08-24 03:07:56 +08:00
|
|
|
output = out.getvalue()
|
2012-02-12 04:53:48 +08:00
|
|
|
error_message = "inspectdb generated a model field name which is a number"
|
2020-06-02 09:59:03 +08:00
|
|
|
self.assertNotIn(' 123 = models.%s' % char_field_type, output, msg=error_message)
|
|
|
|
self.assertIn('number_123 = models.%s' % char_field_type, output)
|
2012-02-12 04:53:48 +08:00
|
|
|
|
|
|
|
error_message = "inspectdb generated a model field name which starts with a digit"
|
2020-06-02 09:59:03 +08:00
|
|
|
self.assertNotIn(' 4extra = models.%s' % char_field_type, output, msg=error_message)
|
|
|
|
self.assertIn('number_4extra = models.%s' % char_field_type, output)
|
2012-08-24 03:07:56 +08:00
|
|
|
|
2020-06-02 09:59:03 +08:00
|
|
|
self.assertNotIn(' 45extra = models.%s' % char_field_type, output, msg=error_message)
|
|
|
|
self.assertIn('number_45extra = models.%s' % char_field_type, output)
|
2012-02-12 04:53:48 +08:00
|
|
|
|
2012-08-24 04:50:25 +08:00
|
|
|
def test_special_column_name_introspection(self):
|
2012-10-27 09:51:14 +08:00
|
|
|
"""
|
|
|
|
Introspection of column names containing special characters,
|
|
|
|
unsuitable for Python identifiers
|
2012-08-24 04:50:25 +08:00
|
|
|
"""
|
2012-08-24 03:07:56 +08:00
|
|
|
out = StringIO()
|
2018-03-12 22:02:10 +08:00
|
|
|
call_command('inspectdb', table_name_filter=special_table_only, stdout=out)
|
2012-08-24 03:07:56 +08:00
|
|
|
output = out.getvalue()
|
2018-11-21 16:06:50 +08:00
|
|
|
base_name = connection.introspection.identifier_converter('Field')
|
2020-06-02 09:59:03 +08:00
|
|
|
integer_field_type = connection.features.introspected_field_types['IntegerField']
|
|
|
|
self.assertIn("field = models.%s()" % integer_field_type, output)
|
|
|
|
self.assertIn("field_field = models.%s(db_column='%s_')" % (integer_field_type, base_name), output)
|
|
|
|
self.assertIn("field_field_0 = models.%s(db_column='%s__')" % (integer_field_type, base_name), output)
|
|
|
|
self.assertIn("field_field_1 = models.%s(db_column='__field')" % integer_field_type, output)
|
|
|
|
self.assertIn("prc_x = models.{}(db_column='prc(%) x')".format(integer_field_type), output)
|
|
|
|
self.assertIn("tamaño = models.%s()" % integer_field_type, output)
|
2013-02-03 08:08:45 +08:00
|
|
|
|
2014-07-06 02:48:57 +08:00
|
|
|
def test_table_name_introspection(self):
|
|
|
|
"""
|
|
|
|
Introspection of table names containing special characters,
|
|
|
|
unsuitable for Python identifiers
|
|
|
|
"""
|
|
|
|
out = StringIO()
|
2018-03-12 22:02:10 +08:00
|
|
|
call_command('inspectdb', table_name_filter=special_table_only, stdout=out)
|
2014-07-06 02:48:57 +08:00
|
|
|
output = out.getvalue()
|
|
|
|
self.assertIn("class InspectdbSpecialTableName(models.Model):", output)
|
|
|
|
|
2013-02-03 08:08:45 +08:00
|
|
|
def test_managed_models(self):
|
2016-10-27 15:53:39 +08:00
|
|
|
"""By default the command generates models with `Meta.managed = False` (#14305)"""
|
2013-02-03 08:08:45 +08:00
|
|
|
out = StringIO()
|
2017-01-08 04:41:42 +08:00
|
|
|
call_command('inspectdb', 'inspectdb_columntypes', stdout=out)
|
2013-02-03 08:08:45 +08:00
|
|
|
output = out.getvalue()
|
|
|
|
self.longMessage = False
|
|
|
|
self.assertIn(" managed = False", output, msg='inspectdb should generate unmanaged models.')
|
2013-09-11 23:12:56 +08:00
|
|
|
|
2014-07-15 04:42:05 +08:00
|
|
|
def test_unique_together_meta(self):
|
|
|
|
out = StringIO()
|
2017-01-08 04:41:42 +08:00
|
|
|
call_command('inspectdb', 'inspectdb_uniquetogether', stdout=out)
|
2014-07-15 04:42:05 +08:00
|
|
|
output = out.getvalue()
|
2018-03-21 22:37:56 +08:00
|
|
|
self.assertIn(" unique_together = (('", output)
|
2019-05-24 15:04:27 +08:00
|
|
|
unique_together_match = self.unique_re.findall(output)
|
2015-11-07 00:29:23 +08:00
|
|
|
# There should be one unique_together tuple.
|
|
|
|
self.assertEqual(len(unique_together_match), 1)
|
|
|
|
fields = unique_together_match[0]
|
|
|
|
# Fields with db_column = field name.
|
|
|
|
self.assertIn("('field1', 'field2')", fields)
|
|
|
|
# Fields from columns whose names are Python keywords.
|
|
|
|
self.assertIn("('field1', 'field2')", fields)
|
|
|
|
# Fields whose names normalize to the same Python field name and hence
|
|
|
|
# are given an integer suffix.
|
|
|
|
self.assertIn("('non_unique_column', 'non_unique_column_0')", fields)
|
2014-07-15 04:42:05 +08:00
|
|
|
|
2016-12-13 19:38:09 +08:00
|
|
|
@skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific SQL')
|
|
|
|
def test_unsupported_unique_together(self):
|
|
|
|
"""Unsupported index types (COALESCE here) are skipped."""
|
|
|
|
with connection.cursor() as c:
|
|
|
|
c.execute(
|
|
|
|
'CREATE UNIQUE INDEX Findex ON %s '
|
|
|
|
'(id, people_unique_id, COALESCE(message_id, -1))' % PeopleMoreData._meta.db_table
|
|
|
|
)
|
|
|
|
try:
|
|
|
|
out = StringIO()
|
|
|
|
call_command(
|
|
|
|
'inspectdb',
|
|
|
|
table_name_filter=lambda tn: tn.startswith(PeopleMoreData._meta.db_table),
|
|
|
|
stdout=out,
|
|
|
|
)
|
|
|
|
output = out.getvalue()
|
|
|
|
self.assertIn('# A unique constraint could not be introspected.', output)
|
2019-05-24 15:04:27 +08:00
|
|
|
self.assertEqual(self.unique_re.findall(output), ["('id', 'people_unique')"])
|
2016-12-13 19:38:09 +08:00
|
|
|
finally:
|
|
|
|
with connection.cursor() as c:
|
|
|
|
c.execute('DROP INDEX Findex')
|
|
|
|
|
2013-09-11 23:12:56 +08:00
|
|
|
@skipUnless(connection.vendor == 'sqlite',
|
2014-01-30 01:18:32 +08:00
|
|
|
"Only patched sqlite's DatabaseIntrospection.data_types_reverse for this test")
|
2013-09-11 23:12:56 +08:00
|
|
|
def test_custom_fields(self):
|
|
|
|
"""
|
|
|
|
Introspection of columns with a custom field (#21090)
|
|
|
|
"""
|
|
|
|
out = StringIO()
|
|
|
|
orig_data_types_reverse = connection.introspection.data_types_reverse
|
|
|
|
try:
|
|
|
|
connection.introspection.data_types_reverse = {
|
|
|
|
'text': 'myfields.TextField',
|
|
|
|
'bigint': 'BigIntegerField',
|
|
|
|
}
|
2017-01-08 04:41:42 +08:00
|
|
|
call_command('inspectdb', 'inspectdb_columntypes', stdout=out)
|
2013-09-11 23:12:56 +08:00
|
|
|
output = out.getvalue()
|
|
|
|
self.assertIn("text_field = myfields.TextField()", output)
|
|
|
|
self.assertIn("big_int_field = models.BigIntegerField()", output)
|
|
|
|
finally:
|
|
|
|
connection.introspection.data_types_reverse = orig_data_types_reverse
|
2016-02-25 03:16:49 +08:00
|
|
|
|
|
|
|
def test_introspection_errors(self):
|
|
|
|
"""
|
|
|
|
Introspection errors should not crash the command, and the error should
|
|
|
|
be visible in the output.
|
|
|
|
"""
|
|
|
|
out = StringIO()
|
2018-01-10 00:04:56 +08:00
|
|
|
with mock.patch('django.db.connection.introspection.get_table_list',
|
|
|
|
return_value=[TableInfo(name='nonexistent', type='t')]):
|
2016-02-25 03:16:49 +08:00
|
|
|
call_command('inspectdb', stdout=out)
|
2017-01-24 19:22:42 +08:00
|
|
|
output = out.getvalue()
|
2016-02-25 03:16:49 +08:00
|
|
|
self.assertIn("# Unable to inspect table 'nonexistent'", output)
|
|
|
|
# The error message depends on the backend
|
|
|
|
self.assertIn("# The error was:", output)
|
2018-01-10 00:04:56 +08:00
|
|
|
|
|
|
|
|
|
|
|
class InspectDBTransactionalTests(TransactionTestCase):
|
2018-12-25 02:34:44 +08:00
|
|
|
available_apps = ['inspectdb']
|
2018-01-10 00:04:56 +08:00
|
|
|
|
|
|
|
def test_include_views(self):
|
|
|
|
"""inspectdb --include-views creates models for database views."""
|
|
|
|
with connection.cursor() as cursor:
|
|
|
|
cursor.execute(
|
|
|
|
'CREATE VIEW inspectdb_people_view AS '
|
|
|
|
'SELECT id, name FROM inspectdb_people'
|
|
|
|
)
|
|
|
|
out = StringIO()
|
|
|
|
view_model = 'class InspectdbPeopleView(models.Model):'
|
|
|
|
view_managed = 'managed = False # Created from a view.'
|
|
|
|
try:
|
2020-07-29 19:59:29 +08:00
|
|
|
call_command(
|
|
|
|
'inspectdb',
|
|
|
|
table_name_filter=inspectdb_views_only,
|
|
|
|
stdout=out,
|
|
|
|
)
|
2018-01-10 00:04:56 +08:00
|
|
|
no_views_output = out.getvalue()
|
|
|
|
self.assertNotIn(view_model, no_views_output)
|
|
|
|
self.assertNotIn(view_managed, no_views_output)
|
2020-07-29 19:59:29 +08:00
|
|
|
call_command(
|
|
|
|
'inspectdb',
|
|
|
|
table_name_filter=inspectdb_views_only,
|
|
|
|
include_views=True,
|
|
|
|
stdout=out,
|
|
|
|
)
|
2018-01-10 00:04:56 +08:00
|
|
|
with_views_output = out.getvalue()
|
|
|
|
self.assertIn(view_model, with_views_output)
|
|
|
|
self.assertIn(view_managed, with_views_output)
|
|
|
|
finally:
|
|
|
|
with connection.cursor() as cursor:
|
|
|
|
cursor.execute('DROP VIEW inspectdb_people_view')
|
2018-09-12 08:36:15 +08:00
|
|
|
|
2018-11-27 02:45:05 +08:00
|
|
|
@skipUnlessDBFeature('can_introspect_materialized_views')
|
2018-09-12 08:23:35 +08:00
|
|
|
def test_include_materialized_views(self):
|
2018-11-27 02:45:05 +08:00
|
|
|
"""inspectdb --include-views creates models for materialized views."""
|
2018-09-12 08:23:35 +08:00
|
|
|
with connection.cursor() as cursor:
|
|
|
|
cursor.execute(
|
2018-11-27 02:45:05 +08:00
|
|
|
'CREATE MATERIALIZED VIEW inspectdb_people_materialized AS '
|
2018-09-12 08:23:35 +08:00
|
|
|
'SELECT id, name FROM inspectdb_people'
|
|
|
|
)
|
|
|
|
out = StringIO()
|
2018-11-27 02:45:05 +08:00
|
|
|
view_model = 'class InspectdbPeopleMaterialized(models.Model):'
|
2018-09-12 08:23:35 +08:00
|
|
|
view_managed = 'managed = False # Created from a view.'
|
|
|
|
try:
|
2020-07-29 19:59:29 +08:00
|
|
|
call_command(
|
|
|
|
'inspectdb',
|
|
|
|
table_name_filter=inspectdb_views_only,
|
|
|
|
stdout=out,
|
|
|
|
)
|
2018-09-12 08:23:35 +08:00
|
|
|
no_views_output = out.getvalue()
|
|
|
|
self.assertNotIn(view_model, no_views_output)
|
|
|
|
self.assertNotIn(view_managed, no_views_output)
|
2020-07-29 19:59:29 +08:00
|
|
|
call_command(
|
|
|
|
'inspectdb',
|
|
|
|
table_name_filter=inspectdb_views_only,
|
|
|
|
include_views=True,
|
|
|
|
stdout=out,
|
|
|
|
)
|
2018-09-12 08:23:35 +08:00
|
|
|
with_views_output = out.getvalue()
|
|
|
|
self.assertIn(view_model, with_views_output)
|
|
|
|
self.assertIn(view_managed, with_views_output)
|
|
|
|
finally:
|
|
|
|
with connection.cursor() as cursor:
|
2018-11-27 02:45:05 +08:00
|
|
|
cursor.execute('DROP MATERIALIZED VIEW inspectdb_people_materialized')
|
2018-09-12 08:23:35 +08:00
|
|
|
|
2018-09-13 04:53:24 +08:00
|
|
|
@skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific SQL')
|
|
|
|
def test_include_partitions(self):
|
|
|
|
"""inspectdb --include-partitions creates models for partitions."""
|
|
|
|
with connection.cursor() as cursor:
|
|
|
|
cursor.execute('''\
|
|
|
|
CREATE TABLE inspectdb_partition_parent (name text not null)
|
|
|
|
PARTITION BY LIST (left(upper(name), 1))
|
|
|
|
''')
|
|
|
|
cursor.execute('''\
|
|
|
|
CREATE TABLE inspectdb_partition_child
|
|
|
|
PARTITION OF inspectdb_partition_parent
|
|
|
|
FOR VALUES IN ('A', 'B', 'C')
|
|
|
|
''')
|
|
|
|
out = StringIO()
|
|
|
|
partition_model_parent = 'class InspectdbPartitionParent(models.Model):'
|
|
|
|
partition_model_child = 'class InspectdbPartitionChild(models.Model):'
|
|
|
|
partition_managed = 'managed = False # Created from a partition.'
|
|
|
|
try:
|
|
|
|
call_command('inspectdb', table_name_filter=inspectdb_tables_only, stdout=out)
|
|
|
|
no_partitions_output = out.getvalue()
|
|
|
|
self.assertIn(partition_model_parent, no_partitions_output)
|
|
|
|
self.assertNotIn(partition_model_child, no_partitions_output)
|
|
|
|
self.assertNotIn(partition_managed, no_partitions_output)
|
|
|
|
call_command('inspectdb', table_name_filter=inspectdb_tables_only, include_partitions=True, stdout=out)
|
|
|
|
with_partitions_output = out.getvalue()
|
|
|
|
self.assertIn(partition_model_parent, with_partitions_output)
|
|
|
|
self.assertIn(partition_model_child, with_partitions_output)
|
|
|
|
self.assertIn(partition_managed, with_partitions_output)
|
|
|
|
finally:
|
|
|
|
with connection.cursor() as cursor:
|
|
|
|
cursor.execute('DROP TABLE IF EXISTS inspectdb_partition_child')
|
|
|
|
cursor.execute('DROP TABLE IF EXISTS inspectdb_partition_parent')
|
|
|
|
|
2018-09-12 08:36:15 +08:00
|
|
|
@skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific SQL')
|
|
|
|
def test_foreign_data_wrapper(self):
|
|
|
|
with connection.cursor() as cursor:
|
|
|
|
cursor.execute('CREATE EXTENSION IF NOT EXISTS file_fdw')
|
|
|
|
cursor.execute('CREATE SERVER inspectdb_server FOREIGN DATA WRAPPER file_fdw')
|
|
|
|
cursor.execute('''\
|
|
|
|
CREATE FOREIGN TABLE inspectdb_iris_foreign_table (
|
|
|
|
petal_length real,
|
|
|
|
petal_width real,
|
|
|
|
sepal_length real,
|
|
|
|
sepal_width real
|
|
|
|
) SERVER inspectdb_server OPTIONS (
|
2020-06-23 21:04:38 +08:00
|
|
|
filename %s
|
2018-09-12 08:36:15 +08:00
|
|
|
)
|
2020-06-23 21:04:38 +08:00
|
|
|
''', [os.devnull])
|
2018-09-12 08:36:15 +08:00
|
|
|
out = StringIO()
|
|
|
|
foreign_table_model = 'class InspectdbIrisForeignTable(models.Model):'
|
|
|
|
foreign_table_managed = 'managed = False'
|
|
|
|
try:
|
2020-07-29 19:59:29 +08:00
|
|
|
call_command(
|
|
|
|
'inspectdb',
|
|
|
|
table_name_filter=inspectdb_tables_only,
|
|
|
|
stdout=out,
|
|
|
|
)
|
2018-09-12 08:36:15 +08:00
|
|
|
output = out.getvalue()
|
|
|
|
self.assertIn(foreign_table_model, output)
|
|
|
|
self.assertIn(foreign_table_managed, output)
|
|
|
|
finally:
|
|
|
|
with connection.cursor() as cursor:
|
|
|
|
cursor.execute('DROP FOREIGN TABLE IF EXISTS inspectdb_iris_foreign_table')
|
|
|
|
cursor.execute('DROP SERVER IF EXISTS inspectdb_server')
|
|
|
|
cursor.execute('DROP EXTENSION IF EXISTS file_fdw')
|