2017-02-12 04:37:49 +08:00
|
|
|
"""Tests related to django.db.backends that haven't been organized."""
|
2010-02-24 23:29:25 +08:00
|
|
|
import datetime
|
2011-12-16 21:40:19 +08:00
|
|
|
import threading
|
2013-07-02 04:49:07 +08:00
|
|
|
import unittest
|
2014-06-07 20:09:27 +08:00
|
|
|
import warnings
|
2010-06-21 19:48:45 +08:00
|
|
|
|
|
|
|
from django.core.management.color import no_style
|
2015-01-28 20:35:27 +08:00
|
|
|
from django.db import (
|
|
|
|
DEFAULT_DB_ALIAS, DatabaseError, IntegrityError, connection, connections,
|
|
|
|
reset_queries, transaction,
|
|
|
|
)
|
2015-01-13 04:20:40 +08:00
|
|
|
from django.db.backends.base.base import BaseDatabaseWrapper
|
2015-01-28 20:35:27 +08:00
|
|
|
from django.db.backends.signals import connection_created
|
2017-02-12 04:37:49 +08:00
|
|
|
from django.db.backends.utils import CursorWrapper
|
2014-01-09 12:31:34 +08:00
|
|
|
from django.db.models.sql.constants import CURSOR
|
2015-01-28 20:35:27 +08:00
|
|
|
from django.test import (
|
2017-02-12 04:37:49 +08:00
|
|
|
TestCase, TransactionTestCase, override_settings, skipIfDBFeature,
|
|
|
|
skipUnlessDBFeature,
|
2015-01-28 20:35:27 +08:00
|
|
|
)
|
2009-01-17 06:23:58 +08:00
|
|
|
|
2016-12-09 00:00:14 +08:00
|
|
|
from .models import (
|
2017-02-12 04:37:49 +08:00
|
|
|
Article, Object, ObjectReference, Person, Post, RawData, Reporter,
|
2020-12-28 13:45:48 +08:00
|
|
|
ReporterProxy, SchoolClass, SQLKeywordsModel, Square,
|
2016-12-09 00:00:14 +08:00
|
|
|
VeryLongModelNameZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ,
|
|
|
|
)
|
2011-10-14 02:51:33 +08:00
|
|
|
|
2010-06-21 19:48:45 +08:00
|
|
|
|
2010-02-24 23:29:25 +08:00
|
|
|
class DateQuotingTest(TestCase):
|
|
|
|
|
|
|
|
def test_django_date_trunc(self):
|
|
|
|
"""
|
|
|
|
Test the custom ``django_date_trunc method``, in particular against
|
2016-10-27 15:53:39 +08:00
|
|
|
fields which clash with strings passed to it (e.g. 'year') (#12818).
|
2010-02-24 23:29:25 +08:00
|
|
|
"""
|
|
|
|
updated = datetime.datetime(2010, 2, 20)
|
2016-12-09 00:00:14 +08:00
|
|
|
SchoolClass.objects.create(year=2009, last_updated=updated)
|
|
|
|
years = SchoolClass.objects.dates('last_updated', 'year')
|
2013-02-10 23:15:49 +08:00
|
|
|
self.assertEqual(list(years), [datetime.date(2010, 1, 1)])
|
2010-02-24 23:29:25 +08:00
|
|
|
|
2013-02-10 23:15:49 +08:00
|
|
|
def test_django_date_extract(self):
|
2010-02-24 23:29:25 +08:00
|
|
|
"""
|
2013-02-10 23:15:49 +08:00
|
|
|
Test the custom ``django_date_extract method``, in particular against fields
|
2016-10-27 15:53:39 +08:00
|
|
|
which clash with strings passed to it (e.g. 'day') (#12818).
|
2010-02-24 23:29:25 +08:00
|
|
|
"""
|
|
|
|
updated = datetime.datetime(2010, 2, 20)
|
2016-12-09 00:00:14 +08:00
|
|
|
SchoolClass.objects.create(year=2009, last_updated=updated)
|
|
|
|
classes = SchoolClass.objects.filter(last_updated__day=20)
|
2010-02-24 23:29:25 +08:00
|
|
|
self.assertEqual(len(classes), 1)
|
|
|
|
|
2011-04-22 20:14:54 +08:00
|
|
|
|
2013-01-27 00:51:44 +08:00
|
|
|
@override_settings(DEBUG=True)
|
2012-06-13 17:36:27 +08:00
|
|
|
class LastExecutedQueryTest(TestCase):
|
2013-01-27 00:51:44 +08:00
|
|
|
|
2019-04-06 03:35:51 +08:00
|
|
|
def test_last_executed_query_without_previous_query(self):
|
2013-06-30 00:44:41 +08:00
|
|
|
"""
|
|
|
|
last_executed_query should not raise an exception even if no previous
|
|
|
|
query has been run.
|
|
|
|
"""
|
2017-11-28 21:12:28 +08:00
|
|
|
with connection.cursor() as cursor:
|
|
|
|
connection.ops.last_executed_query(cursor, '', ())
|
2013-06-30 00:44:41 +08:00
|
|
|
|
2012-06-13 19:28:09 +08:00
|
|
|
def test_debug_sql(self):
|
2016-12-09 00:00:14 +08:00
|
|
|
list(Reporter.objects.filter(first_name="test"))
|
2012-06-13 19:28:09 +08:00
|
|
|
sql = connection.queries[-1]['sql'].lower()
|
2013-01-27 00:51:44 +08:00
|
|
|
self.assertIn("select", sql)
|
2016-12-09 00:00:14 +08:00
|
|
|
self.assertIn(Reporter._meta.db_table, sql)
|
2010-06-21 19:48:45 +08:00
|
|
|
|
2012-06-13 17:36:27 +08:00
|
|
|
def test_query_encoding(self):
|
2017-01-21 05:04:05 +08:00
|
|
|
"""last_executed_query() returns a string."""
|
2016-12-09 00:00:14 +08:00
|
|
|
data = RawData.objects.filter(raw_data=b'\x00\x46 \xFE').extra(select={'föö': 1})
|
2014-06-05 23:06:50 +08:00
|
|
|
sql, params = data.query.sql_with_params()
|
2020-02-04 11:07:00 +08:00
|
|
|
with data.query.get_compiler('default').execute_sql(CURSOR) as cursor:
|
|
|
|
last_sql = cursor.db.ops.last_executed_query(cursor, sql, params)
|
2016-12-29 23:27:49 +08:00
|
|
|
self.assertIsInstance(last_sql, str)
|
2012-06-13 17:36:27 +08:00
|
|
|
|
2019-04-06 03:35:51 +08:00
|
|
|
def test_last_executed_query(self):
|
|
|
|
# last_executed_query() interpolate all parameters, in most cases it is
|
|
|
|
# not equal to QuerySet.query.
|
|
|
|
for qs in (
|
|
|
|
Article.objects.filter(pk=1),
|
|
|
|
Article.objects.filter(pk__in=(1, 2), reporter__pk=3),
|
2020-02-18 18:45:12 +08:00
|
|
|
Article.objects.filter(
|
|
|
|
pk=1,
|
|
|
|
reporter__pk=9,
|
|
|
|
).exclude(reporter__pk__in=[2, 1]),
|
2019-04-06 03:35:51 +08:00
|
|
|
):
|
|
|
|
sql, params = qs.query.sql_with_params()
|
2020-02-04 11:07:00 +08:00
|
|
|
with qs.query.get_compiler(DEFAULT_DB_ALIAS).execute_sql(CURSOR) as cursor:
|
|
|
|
self.assertEqual(
|
|
|
|
cursor.db.ops.last_executed_query(cursor, sql, params),
|
|
|
|
str(qs.query),
|
|
|
|
)
|
2019-04-06 03:35:51 +08:00
|
|
|
|
|
|
|
@skipUnlessDBFeature('supports_paramstyle_pyformat')
|
|
|
|
def test_last_executed_query_dict(self):
|
|
|
|
square_opts = Square._meta
|
|
|
|
sql = 'INSERT INTO %s (%s, %s) VALUES (%%(root)s, %%(square)s)' % (
|
|
|
|
connection.introspection.identifier_converter(square_opts.db_table),
|
|
|
|
connection.ops.quote_name(square_opts.get_field('root').column),
|
|
|
|
connection.ops.quote_name(square_opts.get_field('square').column),
|
|
|
|
)
|
|
|
|
with connection.cursor() as cursor:
|
|
|
|
params = {'root': 2, 'square': 4}
|
|
|
|
cursor.execute(sql, params)
|
|
|
|
self.assertEqual(
|
|
|
|
cursor.db.ops.last_executed_query(cursor, sql, params),
|
|
|
|
sql % params,
|
|
|
|
)
|
|
|
|
|
2013-03-08 03:24:51 +08:00
|
|
|
|
2010-03-23 21:51:11 +08:00
|
|
|
class ParameterHandlingTest(TestCase):
|
2014-05-08 03:40:02 +08:00
|
|
|
|
2010-03-23 21:51:11 +08:00
|
|
|
def test_bad_parameter_count(self):
|
|
|
|
"An executemany call with too many/not enough parameters will raise an exception (Refs #12612)"
|
2017-11-28 21:12:28 +08:00
|
|
|
with connection.cursor() as cursor:
|
|
|
|
query = ('INSERT INTO %s (%s, %s) VALUES (%%s, %%s)' % (
|
2018-11-21 16:06:50 +08:00
|
|
|
connection.introspection.identifier_converter('backends_square'),
|
2017-11-28 21:12:28 +08:00
|
|
|
connection.ops.quote_name('root'),
|
|
|
|
connection.ops.quote_name('square')
|
|
|
|
))
|
|
|
|
with self.assertRaises(Exception):
|
|
|
|
cursor.executemany(query, [(1, 2, 3)])
|
|
|
|
with self.assertRaises(Exception):
|
|
|
|
cursor.executemany(query, [(1,)])
|
2013-03-08 03:24:51 +08:00
|
|
|
|
2010-03-23 21:51:11 +08:00
|
|
|
|
2014-10-19 05:01:13 +08:00
|
|
|
class LongNameTest(TransactionTestCase):
|
2010-10-11 20:55:17 +08:00
|
|
|
"""Long primary keys and model names can result in a sequence name
|
|
|
|
that exceeds the database limits, which will result in truncation
|
|
|
|
on certain databases (e.g., Postgres). The backend needs to use
|
|
|
|
the correct sequence name in last_insert_id and other places, so
|
|
|
|
check it is. Refs #8901.
|
|
|
|
"""
|
2014-10-19 05:01:13 +08:00
|
|
|
available_apps = ['backends']
|
2010-10-11 20:55:17 +08:00
|
|
|
|
|
|
|
def test_sequence_name_length_limits_create(self):
|
|
|
|
"""Test creation of model with long name and long pk name doesn't error. Ref #8901"""
|
2016-12-09 00:00:14 +08:00
|
|
|
VeryLongModelNameZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ.objects.create()
|
2010-10-11 20:55:17 +08:00
|
|
|
|
|
|
|
def test_sequence_name_length_limits_m2m(self):
|
2015-09-12 07:33:12 +08:00
|
|
|
"""
|
|
|
|
An m2m save of a model with a long name and a long m2m field name
|
|
|
|
doesn't error (#8901).
|
|
|
|
"""
|
2016-12-09 00:00:14 +08:00
|
|
|
obj = VeryLongModelNameZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ.objects.create()
|
|
|
|
rel_obj = Person.objects.create(first_name='Django', last_name='Reinhardt')
|
2010-10-11 20:55:17 +08:00
|
|
|
obj.m2m_also_quite_long_zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz.add(rel_obj)
|
|
|
|
|
|
|
|
def test_sequence_name_length_limits_flush(self):
|
2015-09-12 07:33:12 +08:00
|
|
|
"""
|
|
|
|
Sequence resetting as part of a flush with model with long name and
|
|
|
|
long pk name doesn't error (#8901).
|
|
|
|
"""
|
2010-10-11 20:55:17 +08:00
|
|
|
# A full flush is expensive to the full test, so we dig into the
|
|
|
|
# internals to generate the likely offending SQL and run it manually
|
|
|
|
|
|
|
|
# Some convenience aliases
|
2016-12-09 00:00:14 +08:00
|
|
|
VLM = VeryLongModelNameZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ
|
2010-10-11 20:55:17 +08:00
|
|
|
VLM_m2m = VLM.m2m_also_quite_long_zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz.through
|
|
|
|
tables = [
|
|
|
|
VLM._meta.db_table,
|
|
|
|
VLM_m2m._meta.db_table,
|
|
|
|
]
|
2020-04-15 17:20:46 +08:00
|
|
|
sql_list = connection.ops.sql_flush(no_style(), tables, reset_sequences=True)
|
2020-04-20 13:49:35 +08:00
|
|
|
connection.ops.execute_sql_flush(sql_list)
|
2010-06-21 19:48:45 +08:00
|
|
|
|
2013-03-08 03:24:51 +08:00
|
|
|
|
2010-07-30 10:42:36 +08:00
|
|
|
class SequenceResetTest(TestCase):
|
2014-05-08 03:40:02 +08:00
|
|
|
|
2010-07-30 10:42:36 +08:00
|
|
|
def test_generic_relation(self):
|
|
|
|
"Sequence names are correct when resetting generic relations (Ref #13941)"
|
|
|
|
# Create an object with a manually specified PK
|
2016-12-09 00:00:14 +08:00
|
|
|
Post.objects.create(id=10, name='1st post', text='hello world')
|
2010-07-30 10:42:36 +08:00
|
|
|
|
|
|
|
# Reset the sequences for the database
|
2016-12-09 00:00:14 +08:00
|
|
|
commands = connections[DEFAULT_DB_ALIAS].ops.sequence_reset_sql(no_style(), [Post])
|
2017-11-28 21:12:28 +08:00
|
|
|
with connection.cursor() as cursor:
|
|
|
|
for sql in commands:
|
|
|
|
cursor.execute(sql)
|
2010-07-30 10:42:36 +08:00
|
|
|
|
|
|
|
# If we create a new object now, it should have a PK greater
|
|
|
|
# than the PK we specified manually.
|
2016-12-09 00:00:14 +08:00
|
|
|
obj = Post.objects.create(name='New post', text='goodbye world')
|
2014-10-28 18:02:56 +08:00
|
|
|
self.assertGreater(obj.pk, 10)
|
2010-07-30 10:42:36 +08:00
|
|
|
|
2013-03-08 03:24:51 +08:00
|
|
|
|
2013-03-12 02:42:23 +08:00
|
|
|
# This test needs to run outside of a transaction, otherwise closing the
|
|
|
|
# connection would implicitly rollback and cause problems during teardown.
|
|
|
|
class ConnectionCreatedSignalTest(TransactionTestCase):
|
|
|
|
|
2013-06-04 14:09:29 +08:00
|
|
|
available_apps = []
|
|
|
|
|
2013-03-12 02:42:23 +08:00
|
|
|
# Unfortunately with sqlite3 the in-memory test database cannot be closed,
|
|
|
|
# and so it cannot be re-opened during testing.
|
2010-10-11 20:55:17 +08:00
|
|
|
@skipUnlessDBFeature('test_db_allows_multiple_connections')
|
|
|
|
def test_signal(self):
|
|
|
|
data = {}
|
2013-03-08 03:24:51 +08:00
|
|
|
|
2010-10-11 20:55:17 +08:00
|
|
|
def receiver(sender, connection, **kwargs):
|
|
|
|
data["connection"] = connection
|
|
|
|
|
|
|
|
connection_created.connect(receiver)
|
|
|
|
connection.close()
|
2020-02-04 11:07:00 +08:00
|
|
|
with connection.cursor():
|
|
|
|
pass
|
2014-10-28 18:02:56 +08:00
|
|
|
self.assertIs(data["connection"].connection, connection.connection)
|
2010-08-30 21:21:18 +08:00
|
|
|
|
2010-10-11 20:55:17 +08:00
|
|
|
connection_created.disconnect(receiver)
|
|
|
|
data.clear()
|
2020-02-04 11:07:00 +08:00
|
|
|
with connection.cursor():
|
|
|
|
pass
|
2014-10-28 18:02:56 +08:00
|
|
|
self.assertEqual(data, {})
|
2010-09-27 23:12:48 +08:00
|
|
|
|
|
|
|
|
2011-05-10 20:20:47 +08:00
|
|
|
class EscapingChecks(TestCase):
|
2013-03-23 23:09:56 +08:00
|
|
|
"""
|
|
|
|
All tests in this test case are also run with settings.DEBUG=True in
|
|
|
|
EscapingChecksDebug test case, to also test CursorDebugWrapper.
|
|
|
|
"""
|
2013-05-16 20:01:39 +08:00
|
|
|
|
2014-05-08 03:50:09 +08:00
|
|
|
bare_select_suffix = connection.features.bare_select_suffix
|
2013-05-16 20:01:39 +08:00
|
|
|
|
2013-03-23 23:09:56 +08:00
|
|
|
def test_paramless_no_escaping(self):
|
2017-11-28 21:12:28 +08:00
|
|
|
with connection.cursor() as cursor:
|
|
|
|
cursor.execute("SELECT '%s'" + self.bare_select_suffix)
|
|
|
|
self.assertEqual(cursor.fetchall()[0][0], '%s')
|
2013-03-23 23:09:56 +08:00
|
|
|
|
|
|
|
def test_parameter_escaping(self):
|
2017-11-28 21:12:28 +08:00
|
|
|
with connection.cursor() as cursor:
|
|
|
|
cursor.execute("SELECT '%%', %s" + self.bare_select_suffix, ('%d',))
|
|
|
|
self.assertEqual(cursor.fetchall()[0], ('%', '%d'))
|
2011-05-10 20:20:47 +08:00
|
|
|
|
2013-11-03 12:36:09 +08:00
|
|
|
|
2013-03-23 23:09:56 +08:00
|
|
|
@override_settings(DEBUG=True)
|
|
|
|
class EscapingChecksDebug(EscapingChecks):
|
|
|
|
pass
|
|
|
|
|
2011-05-10 20:20:47 +08:00
|
|
|
|
2014-10-19 05:01:13 +08:00
|
|
|
class BackendTestCase(TransactionTestCase):
|
|
|
|
|
|
|
|
available_apps = ['backends']
|
2012-01-23 00:41:20 +08:00
|
|
|
|
|
|
|
def create_squares_with_executemany(self, args):
|
2013-06-28 11:15:03 +08:00
|
|
|
self.create_squares(args, 'format', True)
|
|
|
|
|
2013-07-02 04:49:07 +08:00
|
|
|
def create_squares(self, args, paramstyle, multiple):
|
2016-12-09 00:00:14 +08:00
|
|
|
opts = Square._meta
|
2018-11-21 16:06:50 +08:00
|
|
|
tbl = connection.introspection.identifier_converter(opts.db_table)
|
2012-01-23 00:41:20 +08:00
|
|
|
f1 = connection.ops.quote_name(opts.get_field('root').column)
|
|
|
|
f2 = connection.ops.quote_name(opts.get_field('square').column)
|
2013-10-23 18:09:29 +08:00
|
|
|
if paramstyle == 'format':
|
2013-06-28 11:15:03 +08:00
|
|
|
query = 'INSERT INTO %s (%s, %s) VALUES (%%s, %%s)' % (tbl, f1, f2)
|
2013-10-23 18:09:29 +08:00
|
|
|
elif paramstyle == 'pyformat':
|
2013-06-28 11:15:03 +08:00
|
|
|
query = 'INSERT INTO %s (%s, %s) VALUES (%%(root)s, %%(square)s)' % (tbl, f1, f2)
|
|
|
|
else:
|
|
|
|
raise ValueError("unsupported paramstyle in test")
|
2017-11-28 21:12:28 +08:00
|
|
|
with connection.cursor() as cursor:
|
|
|
|
if multiple:
|
|
|
|
cursor.executemany(query, args)
|
|
|
|
else:
|
|
|
|
cursor.execute(query, args)
|
2012-01-23 00:41:20 +08:00
|
|
|
|
|
|
|
def test_cursor_executemany(self):
|
2015-02-06 02:25:34 +08:00
|
|
|
# Test cursor.executemany #4896
|
2013-11-03 12:36:09 +08:00
|
|
|
args = [(i, i ** 2) for i in range(-5, 6)]
|
2012-01-23 00:41:20 +08:00
|
|
|
self.create_squares_with_executemany(args)
|
2016-12-09 00:00:14 +08:00
|
|
|
self.assertEqual(Square.objects.count(), 11)
|
2010-09-27 23:12:48 +08:00
|
|
|
for i in range(-5, 6):
|
2016-12-09 00:00:14 +08:00
|
|
|
square = Square.objects.get(root=i)
|
2013-11-03 12:36:09 +08:00
|
|
|
self.assertEqual(square.square, i ** 2)
|
2010-09-27 23:12:48 +08:00
|
|
|
|
2012-01-23 00:41:20 +08:00
|
|
|
def test_cursor_executemany_with_empty_params_list(self):
|
2015-02-06 02:25:34 +08:00
|
|
|
# Test executemany with params=[] does nothing #4765
|
2012-01-23 00:41:20 +08:00
|
|
|
args = []
|
|
|
|
self.create_squares_with_executemany(args)
|
2016-12-09 00:00:14 +08:00
|
|
|
self.assertEqual(Square.objects.count(), 0)
|
2012-01-23 00:41:20 +08:00
|
|
|
|
|
|
|
def test_cursor_executemany_with_iterator(self):
|
2015-02-06 02:25:34 +08:00
|
|
|
# Test executemany accepts iterators #10320
|
2019-02-09 22:18:22 +08:00
|
|
|
args = ((i, i ** 2) for i in range(-3, 2))
|
2012-01-23 00:41:20 +08:00
|
|
|
self.create_squares_with_executemany(args)
|
2016-12-09 00:00:14 +08:00
|
|
|
self.assertEqual(Square.objects.count(), 5)
|
2012-01-23 00:41:20 +08:00
|
|
|
|
2019-02-09 22:18:22 +08:00
|
|
|
args = ((i, i ** 2) for i in range(3, 7))
|
2012-01-23 00:41:20 +08:00
|
|
|
with override_settings(DEBUG=True):
|
|
|
|
# same test for DebugCursorWrapper
|
|
|
|
self.create_squares_with_executemany(args)
|
2016-12-09 00:00:14 +08:00
|
|
|
self.assertEqual(Square.objects.count(), 9)
|
2012-01-23 00:41:20 +08:00
|
|
|
|
2013-06-28 11:15:03 +08:00
|
|
|
@skipUnlessDBFeature('supports_paramstyle_pyformat')
|
|
|
|
def test_cursor_execute_with_pyformat(self):
|
2015-02-06 02:25:34 +08:00
|
|
|
# Support pyformat style passing of parameters #10070
|
2013-06-28 11:15:03 +08:00
|
|
|
args = {'root': 3, 'square': 9}
|
|
|
|
self.create_squares(args, 'pyformat', multiple=False)
|
2016-12-09 00:00:14 +08:00
|
|
|
self.assertEqual(Square.objects.count(), 1)
|
2013-06-28 11:15:03 +08:00
|
|
|
|
|
|
|
@skipUnlessDBFeature('supports_paramstyle_pyformat')
|
|
|
|
def test_cursor_executemany_with_pyformat(self):
|
2015-02-06 02:25:34 +08:00
|
|
|
# Support pyformat style passing of parameters #10070
|
2013-11-03 12:36:09 +08:00
|
|
|
args = [{'root': i, 'square': i ** 2} for i in range(-5, 6)]
|
2013-06-28 11:15:03 +08:00
|
|
|
self.create_squares(args, 'pyformat', multiple=True)
|
2016-12-09 00:00:14 +08:00
|
|
|
self.assertEqual(Square.objects.count(), 11)
|
2013-06-28 11:15:03 +08:00
|
|
|
for i in range(-5, 6):
|
2016-12-09 00:00:14 +08:00
|
|
|
square = Square.objects.get(root=i)
|
2013-11-03 12:36:09 +08:00
|
|
|
self.assertEqual(square.square, i ** 2)
|
2013-06-28 11:15:03 +08:00
|
|
|
|
|
|
|
@skipUnlessDBFeature('supports_paramstyle_pyformat')
|
|
|
|
def test_cursor_executemany_with_pyformat_iterator(self):
|
2019-02-09 22:18:22 +08:00
|
|
|
args = ({'root': i, 'square': i ** 2} for i in range(-3, 2))
|
2013-06-28 11:15:03 +08:00
|
|
|
self.create_squares(args, 'pyformat', multiple=True)
|
2016-12-09 00:00:14 +08:00
|
|
|
self.assertEqual(Square.objects.count(), 5)
|
2013-06-28 11:15:03 +08:00
|
|
|
|
2019-02-09 22:18:22 +08:00
|
|
|
args = ({'root': i, 'square': i ** 2} for i in range(3, 7))
|
2013-06-28 11:15:03 +08:00
|
|
|
with override_settings(DEBUG=True):
|
|
|
|
# same test for DebugCursorWrapper
|
|
|
|
self.create_squares(args, 'pyformat', multiple=True)
|
2016-12-09 00:00:14 +08:00
|
|
|
self.assertEqual(Square.objects.count(), 9)
|
2013-07-02 04:49:07 +08:00
|
|
|
|
2010-09-27 23:12:48 +08:00
|
|
|
def test_unicode_fetches(self):
|
2020-04-18 22:46:05 +08:00
|
|
|
# fetchone, fetchmany, fetchall return strings as Unicode objects.
|
2010-09-27 23:12:48 +08:00
|
|
|
qn = connection.ops.quote_name
|
2016-12-09 00:00:14 +08:00
|
|
|
Person(first_name="John", last_name="Doe").save()
|
|
|
|
Person(first_name="Jane", last_name="Doe").save()
|
|
|
|
Person(first_name="Mary", last_name="Agnelline").save()
|
|
|
|
Person(first_name="Peter", last_name="Parker").save()
|
|
|
|
Person(first_name="Clark", last_name="Kent").save()
|
|
|
|
opts2 = Person._meta
|
2010-09-27 23:12:48 +08:00
|
|
|
f3, f4 = opts2.get_field('first_name'), opts2.get_field('last_name')
|
2017-11-28 21:12:28 +08:00
|
|
|
with connection.cursor() as cursor:
|
|
|
|
cursor.execute(
|
|
|
|
'SELECT %s, %s FROM %s ORDER BY %s' % (
|
|
|
|
qn(f3.column),
|
|
|
|
qn(f4.column),
|
2018-11-21 16:06:50 +08:00
|
|
|
connection.introspection.identifier_converter(opts2.db_table),
|
2017-11-28 21:12:28 +08:00
|
|
|
qn(f3.column),
|
|
|
|
)
|
2016-04-08 10:04:45 +08:00
|
|
|
)
|
2017-11-28 21:12:28 +08:00
|
|
|
self.assertEqual(cursor.fetchone(), ('Clark', 'Kent'))
|
|
|
|
self.assertEqual(list(cursor.fetchmany(2)), [('Jane', 'Doe'), ('John', 'Doe')])
|
|
|
|
self.assertEqual(list(cursor.fetchall()), [('Mary', 'Agnelline'), ('Peter', 'Parker')])
|
2010-10-11 20:55:17 +08:00
|
|
|
|
2012-09-12 16:16:49 +08:00
|
|
|
def test_unicode_password(self):
|
|
|
|
old_password = connection.settings_dict['PASSWORD']
|
|
|
|
connection.settings_dict['PASSWORD'] = "françois"
|
|
|
|
try:
|
2020-02-04 11:07:00 +08:00
|
|
|
with connection.cursor():
|
|
|
|
pass
|
2013-02-28 04:12:48 +08:00
|
|
|
except DatabaseError:
|
2012-09-12 16:16:49 +08:00
|
|
|
# As password is probably wrong, a database exception is expected
|
|
|
|
pass
|
|
|
|
except Exception as e:
|
2020-04-18 22:46:05 +08:00
|
|
|
self.fail('Unexpected error raised with Unicode password: %s' % e)
|
2012-09-12 16:16:49 +08:00
|
|
|
finally:
|
|
|
|
connection.settings_dict['PASSWORD'] = old_password
|
|
|
|
|
2011-04-05 08:19:17 +08:00
|
|
|
def test_database_operations_helper_class(self):
|
|
|
|
# Ticket #13630
|
|
|
|
self.assertTrue(hasattr(connection, 'ops'))
|
|
|
|
self.assertTrue(hasattr(connection.ops, 'connection'))
|
|
|
|
self.assertEqual(connection, connection.ops.connection)
|
|
|
|
|
2014-09-19 00:11:37 +08:00
|
|
|
def test_database_operations_init(self):
|
|
|
|
"""
|
2016-10-27 15:53:39 +08:00
|
|
|
DatabaseOperations initialization doesn't query the database.
|
2014-09-19 00:11:37 +08:00
|
|
|
See #17656.
|
|
|
|
"""
|
|
|
|
with self.assertNumQueries(0):
|
|
|
|
connection.ops.__class__(connection)
|
|
|
|
|
2012-06-09 21:59:52 +08:00
|
|
|
def test_cached_db_features(self):
|
2012-05-18 18:08:36 +08:00
|
|
|
self.assertIn(connection.features.supports_transactions, (True, False))
|
|
|
|
self.assertIn(connection.features.can_introspect_foreign_keys, (True, False))
|
|
|
|
|
2012-01-08 03:53:20 +08:00
|
|
|
def test_duplicate_table_error(self):
|
2016-10-27 15:53:39 +08:00
|
|
|
""" Creating an existing table returns a DatabaseError """
|
2016-12-09 00:00:14 +08:00
|
|
|
query = 'CREATE TABLE %s (id INTEGER);' % Article._meta.db_table
|
2017-11-28 21:12:28 +08:00
|
|
|
with connection.cursor() as cursor:
|
|
|
|
with self.assertRaises(DatabaseError):
|
|
|
|
cursor.execute(query)
|
2010-10-23 08:01:22 +08:00
|
|
|
|
2013-09-24 08:17:59 +08:00
|
|
|
def test_cursor_contextmanager(self):
|
|
|
|
"""
|
2016-10-27 15:53:39 +08:00
|
|
|
Cursors can be used as a context manager
|
2013-09-24 08:17:59 +08:00
|
|
|
"""
|
|
|
|
with connection.cursor() as cursor:
|
2014-04-10 04:20:22 +08:00
|
|
|
self.assertIsInstance(cursor, CursorWrapper)
|
2013-09-24 08:17:59 +08:00
|
|
|
# Both InterfaceError and ProgrammingError seem to be used when
|
|
|
|
# accessing closed cursor (psycopg2 has InterfaceError, rest seem
|
|
|
|
# to use ProgrammingError).
|
|
|
|
with self.assertRaises(connection.features.closed_cursor_error_class):
|
|
|
|
# cursor should be closed, so no queries should be possible.
|
2014-06-07 20:09:27 +08:00
|
|
|
cursor.execute("SELECT 1" + connection.features.bare_select_suffix)
|
2013-09-24 08:17:59 +08:00
|
|
|
|
|
|
|
@unittest.skipUnless(connection.vendor == 'postgresql',
|
|
|
|
"Psycopg2 specific cursor.closed attribute needed")
|
|
|
|
def test_cursor_contextmanager_closing(self):
|
|
|
|
# There isn't a generic way to test that cursors are closed, but
|
|
|
|
# psycopg2 offers us a way to check that by closed attribute.
|
|
|
|
# So, run only on psycopg2 for that reason.
|
|
|
|
with connection.cursor() as cursor:
|
2014-04-10 04:20:22 +08:00
|
|
|
self.assertIsInstance(cursor, CursorWrapper)
|
2013-09-24 08:17:59 +08:00
|
|
|
self.assertTrue(cursor.closed)
|
|
|
|
|
2014-04-10 04:41:33 +08:00
|
|
|
# Unfortunately with sqlite3 the in-memory test database cannot be closed.
|
|
|
|
@skipUnlessDBFeature('test_db_allows_multiple_connections')
|
|
|
|
def test_is_usable_after_database_disconnects(self):
|
|
|
|
"""
|
2016-10-27 15:53:39 +08:00
|
|
|
is_usable() doesn't crash when the database disconnects (#21553).
|
2014-04-10 04:41:33 +08:00
|
|
|
"""
|
|
|
|
# Open a connection to the database.
|
|
|
|
with connection.cursor():
|
|
|
|
pass
|
|
|
|
# Emulate a connection close by the database.
|
|
|
|
connection._close()
|
|
|
|
# Even then is_usable() should not raise an exception.
|
|
|
|
try:
|
|
|
|
self.assertFalse(connection.is_usable())
|
|
|
|
finally:
|
|
|
|
# Clean up the mess created by connection._close(). Since the
|
|
|
|
# connection is already closed, this crashes on some backends.
|
2017-09-07 20:16:21 +08:00
|
|
|
try:
|
2014-04-10 04:41:33 +08:00
|
|
|
connection.close()
|
2017-09-07 20:16:21 +08:00
|
|
|
except Exception:
|
|
|
|
pass
|
2014-04-10 04:41:33 +08:00
|
|
|
|
2014-06-07 20:09:27 +08:00
|
|
|
@override_settings(DEBUG=True)
|
|
|
|
def test_queries(self):
|
|
|
|
"""
|
|
|
|
Test the documented API of connection.queries.
|
|
|
|
"""
|
2019-04-29 16:38:17 +08:00
|
|
|
sql = 'SELECT 1' + connection.features.bare_select_suffix
|
2014-06-07 20:09:27 +08:00
|
|
|
with connection.cursor() as cursor:
|
2014-10-19 05:01:13 +08:00
|
|
|
reset_queries()
|
2019-04-29 16:38:17 +08:00
|
|
|
cursor.execute(sql)
|
2014-06-07 20:09:27 +08:00
|
|
|
self.assertEqual(1, len(connection.queries))
|
|
|
|
self.assertIsInstance(connection.queries, list)
|
|
|
|
self.assertIsInstance(connection.queries[0], dict)
|
2019-04-29 16:38:17 +08:00
|
|
|
self.assertEqual(list(connection.queries[0]), ['sql', 'time'])
|
|
|
|
self.assertEqual(connection.queries[0]['sql'], sql)
|
2014-06-07 20:09:27 +08:00
|
|
|
|
|
|
|
reset_queries()
|
|
|
|
self.assertEqual(0, len(connection.queries))
|
|
|
|
|
2019-04-29 16:38:17 +08:00
|
|
|
sql = ('INSERT INTO %s (%s, %s) VALUES (%%s, %%s)' % (
|
|
|
|
connection.introspection.identifier_converter('backends_square'),
|
|
|
|
connection.ops.quote_name('root'),
|
|
|
|
connection.ops.quote_name('square'),
|
|
|
|
))
|
|
|
|
with connection.cursor() as cursor:
|
|
|
|
cursor.executemany(sql, [(1, 1), (2, 4)])
|
|
|
|
self.assertEqual(1, len(connection.queries))
|
|
|
|
self.assertIsInstance(connection.queries, list)
|
|
|
|
self.assertIsInstance(connection.queries[0], dict)
|
|
|
|
self.assertEqual(list(connection.queries[0]), ['sql', 'time'])
|
|
|
|
self.assertEqual(connection.queries[0]['sql'], '2 times: %s' % sql)
|
|
|
|
|
2014-06-07 20:09:27 +08:00
|
|
|
# Unfortunately with sqlite3 the in-memory test database cannot be closed.
|
|
|
|
@skipUnlessDBFeature('test_db_allows_multiple_connections')
|
|
|
|
@override_settings(DEBUG=True)
|
|
|
|
def test_queries_limit(self):
|
|
|
|
"""
|
2016-10-27 15:53:39 +08:00
|
|
|
The backend doesn't store an unlimited number of queries (#12581).
|
2014-06-07 20:09:27 +08:00
|
|
|
"""
|
|
|
|
old_queries_limit = BaseDatabaseWrapper.queries_limit
|
|
|
|
BaseDatabaseWrapper.queries_limit = 3
|
2015-09-08 04:10:31 +08:00
|
|
|
new_connection = connection.copy()
|
2014-06-07 20:09:27 +08:00
|
|
|
|
2014-06-07 23:16:49 +08:00
|
|
|
# Initialize the connection and clear initialization statements.
|
|
|
|
with new_connection.cursor():
|
|
|
|
pass
|
|
|
|
new_connection.queries_log.clear()
|
|
|
|
|
2014-06-07 20:09:27 +08:00
|
|
|
try:
|
|
|
|
with new_connection.cursor() as cursor:
|
|
|
|
cursor.execute("SELECT 1" + new_connection.features.bare_select_suffix)
|
|
|
|
cursor.execute("SELECT 2" + new_connection.features.bare_select_suffix)
|
|
|
|
|
|
|
|
with warnings.catch_warnings(record=True) as w:
|
|
|
|
self.assertEqual(2, len(new_connection.queries))
|
|
|
|
self.assertEqual(0, len(w))
|
|
|
|
|
|
|
|
with new_connection.cursor() as cursor:
|
|
|
|
cursor.execute("SELECT 3" + new_connection.features.bare_select_suffix)
|
|
|
|
cursor.execute("SELECT 4" + new_connection.features.bare_select_suffix)
|
|
|
|
|
2018-04-28 05:18:15 +08:00
|
|
|
msg = "Limit for query logging exceeded, only the last 3 queries will be returned."
|
|
|
|
with self.assertWarnsMessage(UserWarning, msg):
|
2014-06-07 20:09:27 +08:00
|
|
|
self.assertEqual(3, len(new_connection.queries))
|
2018-04-28 05:18:15 +08:00
|
|
|
|
2014-06-07 20:09:27 +08:00
|
|
|
finally:
|
|
|
|
BaseDatabaseWrapper.queries_limit = old_queries_limit
|
|
|
|
new_connection.close()
|
|
|
|
|
2016-02-06 22:21:05 +08:00
|
|
|
def test_timezone_none_use_tz_false(self):
|
|
|
|
connection.ensure_connection()
|
|
|
|
with self.settings(TIME_ZONE=None, USE_TZ=False):
|
|
|
|
connection.init_connection_state()
|
|
|
|
|
2014-04-10 04:41:33 +08:00
|
|
|
|
2018-12-23 07:14:47 +08:00
|
|
|
# These tests aren't conditional because it would require differentiating
|
|
|
|
# between MySQL+InnoDB and MySQL+MYISAM (something we currently can't do).
|
2010-10-23 08:01:22 +08:00
|
|
|
class FkConstraintsTests(TransactionTestCase):
|
|
|
|
|
2013-06-04 14:09:29 +08:00
|
|
|
available_apps = ['backends']
|
|
|
|
|
2010-10-23 08:01:22 +08:00
|
|
|
def setUp(self):
|
|
|
|
# Create a Reporter.
|
2016-12-09 00:00:14 +08:00
|
|
|
self.r = Reporter.objects.create(first_name='John', last_name='Smith')
|
2010-10-23 08:01:22 +08:00
|
|
|
|
|
|
|
def test_integrity_checks_on_creation(self):
|
|
|
|
"""
|
|
|
|
Try to create a model instance that violates a FK constraint. If it
|
|
|
|
fails it should fail with IntegrityError.
|
|
|
|
"""
|
2016-12-09 00:00:14 +08:00
|
|
|
a1 = Article(headline="This is a test", pub_date=datetime.datetime(2005, 7, 27), reporter_id=30)
|
2010-10-23 08:01:22 +08:00
|
|
|
try:
|
2013-08-02 02:09:47 +08:00
|
|
|
a1.save()
|
2010-10-23 08:01:22 +08:00
|
|
|
except IntegrityError:
|
2013-08-02 02:09:47 +08:00
|
|
|
pass
|
|
|
|
else:
|
|
|
|
self.skipTest("This backend does not support integrity checks.")
|
|
|
|
# Now that we know this backend supports integrity checks we make sure
|
2016-12-09 00:00:14 +08:00
|
|
|
# constraints are also enforced for proxy Refs #17519
|
|
|
|
a2 = Article(
|
|
|
|
headline='This is another test', reporter=self.r,
|
|
|
|
pub_date=datetime.datetime(2012, 8, 3),
|
|
|
|
reporter_proxy_id=30,
|
|
|
|
)
|
2016-01-17 19:26:39 +08:00
|
|
|
with self.assertRaises(IntegrityError):
|
|
|
|
a2.save()
|
2010-10-23 08:01:22 +08:00
|
|
|
|
|
|
|
def test_integrity_checks_on_update(self):
|
|
|
|
"""
|
|
|
|
Try to update a model instance introducing a FK constraint violation.
|
|
|
|
If it fails it should fail with IntegrityError.
|
|
|
|
"""
|
|
|
|
# Create an Article.
|
2016-12-09 00:00:14 +08:00
|
|
|
Article.objects.create(headline="Test article", pub_date=datetime.datetime(2010, 9, 4), reporter=self.r)
|
2013-08-02 02:09:47 +08:00
|
|
|
# Retrieve it from the DB
|
2016-12-09 00:00:14 +08:00
|
|
|
a1 = Article.objects.get(headline="Test article")
|
2013-08-02 02:09:47 +08:00
|
|
|
a1.reporter_id = 30
|
2010-10-23 08:01:22 +08:00
|
|
|
try:
|
2013-08-02 02:09:47 +08:00
|
|
|
a1.save()
|
2010-10-23 08:01:22 +08:00
|
|
|
except IntegrityError:
|
2013-08-02 02:09:47 +08:00
|
|
|
pass
|
|
|
|
else:
|
|
|
|
self.skipTest("This backend does not support integrity checks.")
|
|
|
|
# Now that we know this backend supports integrity checks we make sure
|
2016-12-09 00:00:14 +08:00
|
|
|
# constraints are also enforced for proxy Refs #17519
|
2013-08-02 02:09:47 +08:00
|
|
|
# Create another article
|
2016-12-09 00:00:14 +08:00
|
|
|
r_proxy = ReporterProxy.objects.get(pk=self.r.pk)
|
|
|
|
Article.objects.create(
|
|
|
|
headline='Another article',
|
|
|
|
pub_date=datetime.datetime(1988, 5, 15),
|
|
|
|
reporter=self.r, reporter_proxy=r_proxy,
|
|
|
|
)
|
2015-01-20 22:54:12 +08:00
|
|
|
# Retrieve the second article from the DB
|
2016-12-09 00:00:14 +08:00
|
|
|
a2 = Article.objects.get(headline='Another article')
|
2013-08-02 02:09:47 +08:00
|
|
|
a2.reporter_proxy_id = 30
|
2016-01-17 19:26:39 +08:00
|
|
|
with self.assertRaises(IntegrityError):
|
|
|
|
a2.save()
|
2011-08-07 08:43:26 +08:00
|
|
|
|
|
|
|
def test_disable_constraint_checks_manually(self):
|
|
|
|
"""
|
2015-09-12 07:33:12 +08:00
|
|
|
When constraint checks are disabled, should be able to write bad data
|
|
|
|
without IntegrityErrors.
|
2011-08-07 08:43:26 +08:00
|
|
|
"""
|
2013-07-10 03:12:51 +08:00
|
|
|
with transaction.atomic():
|
2011-08-07 08:43:26 +08:00
|
|
|
# Create an Article.
|
2016-12-09 00:00:14 +08:00
|
|
|
Article.objects.create(
|
2015-09-12 07:33:12 +08:00
|
|
|
headline="Test article",
|
|
|
|
pub_date=datetime.datetime(2010, 9, 4),
|
|
|
|
reporter=self.r,
|
|
|
|
)
|
2014-03-02 22:25:53 +08:00
|
|
|
# Retrieve it from the DB
|
2016-12-09 00:00:14 +08:00
|
|
|
a = Article.objects.get(headline="Test article")
|
2011-08-07 08:43:26 +08:00
|
|
|
a.reporter_id = 30
|
|
|
|
try:
|
|
|
|
connection.disable_constraint_checking()
|
|
|
|
a.save()
|
|
|
|
connection.enable_constraint_checking()
|
|
|
|
except IntegrityError:
|
|
|
|
self.fail("IntegrityError should not have occurred.")
|
2013-07-10 03:12:51 +08:00
|
|
|
transaction.set_rollback(True)
|
2011-08-07 08:43:26 +08:00
|
|
|
|
|
|
|
def test_disable_constraint_checks_context_manager(self):
|
|
|
|
"""
|
2015-09-12 07:33:12 +08:00
|
|
|
When constraint checks are disabled (using context manager), should be
|
|
|
|
able to write bad data without IntegrityErrors.
|
2011-08-07 08:43:26 +08:00
|
|
|
"""
|
2013-07-10 03:12:51 +08:00
|
|
|
with transaction.atomic():
|
2011-08-07 08:43:26 +08:00
|
|
|
# Create an Article.
|
2016-12-09 00:00:14 +08:00
|
|
|
Article.objects.create(
|
2015-09-12 07:33:12 +08:00
|
|
|
headline="Test article",
|
|
|
|
pub_date=datetime.datetime(2010, 9, 4),
|
|
|
|
reporter=self.r,
|
|
|
|
)
|
2014-03-02 22:25:53 +08:00
|
|
|
# Retrieve it from the DB
|
2016-12-09 00:00:14 +08:00
|
|
|
a = Article.objects.get(headline="Test article")
|
2011-08-07 08:43:26 +08:00
|
|
|
a.reporter_id = 30
|
|
|
|
try:
|
|
|
|
with connection.constraint_checks_disabled():
|
|
|
|
a.save()
|
|
|
|
except IntegrityError:
|
|
|
|
self.fail("IntegrityError should not have occurred.")
|
2013-07-10 03:12:51 +08:00
|
|
|
transaction.set_rollback(True)
|
2011-08-07 08:43:26 +08:00
|
|
|
|
|
|
|
def test_check_constraints(self):
|
|
|
|
"""
|
|
|
|
Constraint checks should raise an IntegrityError when bad data is in the DB.
|
|
|
|
"""
|
2013-07-10 03:12:51 +08:00
|
|
|
with transaction.atomic():
|
2011-08-07 08:43:26 +08:00
|
|
|
# Create an Article.
|
2016-12-09 00:00:14 +08:00
|
|
|
Article.objects.create(
|
2015-09-12 07:33:12 +08:00
|
|
|
headline="Test article",
|
|
|
|
pub_date=datetime.datetime(2010, 9, 4),
|
|
|
|
reporter=self.r,
|
|
|
|
)
|
2014-03-02 22:25:53 +08:00
|
|
|
# Retrieve it from the DB
|
2016-12-09 00:00:14 +08:00
|
|
|
a = Article.objects.get(headline="Test article")
|
2011-08-07 08:43:26 +08:00
|
|
|
a.reporter_id = 30
|
2013-07-10 03:12:51 +08:00
|
|
|
with connection.constraint_checks_disabled():
|
|
|
|
a.save()
|
|
|
|
with self.assertRaises(IntegrityError):
|
|
|
|
connection.check_constraints()
|
|
|
|
transaction.set_rollback(True)
|
2011-12-16 21:40:19 +08:00
|
|
|
|
2020-12-28 13:45:48 +08:00
|
|
|
def test_check_constraints_sql_keywords(self):
|
|
|
|
with transaction.atomic():
|
|
|
|
obj = SQLKeywordsModel.objects.create(reporter=self.r)
|
|
|
|
obj.refresh_from_db()
|
|
|
|
obj.reporter_id = 30
|
|
|
|
with connection.constraint_checks_disabled():
|
|
|
|
obj.save()
|
|
|
|
with self.assertRaises(IntegrityError):
|
|
|
|
connection.check_constraints(table_names=['order'])
|
|
|
|
transaction.set_rollback(True)
|
|
|
|
|
2011-12-16 21:40:19 +08:00
|
|
|
|
2014-10-19 05:01:13 +08:00
|
|
|
class ThreadTests(TransactionTestCase):
|
|
|
|
|
|
|
|
available_apps = ['backends']
|
2011-12-16 21:40:19 +08:00
|
|
|
|
|
|
|
def test_default_connection_thread_local(self):
|
|
|
|
"""
|
2016-10-27 15:53:39 +08:00
|
|
|
The default connection (i.e. django.db.connection) is different for
|
|
|
|
each thread (#17258).
|
2011-12-16 21:40:19 +08:00
|
|
|
"""
|
2013-02-26 05:53:08 +08:00
|
|
|
# Map connections by id because connections with identical aliases
|
|
|
|
# have the same hash.
|
|
|
|
connections_dict = {}
|
2020-02-04 11:07:00 +08:00
|
|
|
with connection.cursor():
|
|
|
|
pass
|
2013-02-26 05:53:08 +08:00
|
|
|
connections_dict[id(connection)] = connection
|
2013-03-08 03:24:51 +08:00
|
|
|
|
2011-12-16 21:40:19 +08:00
|
|
|
def runner():
|
2012-11-27 04:52:44 +08:00
|
|
|
# Passing django.db.connection between threads doesn't work while
|
|
|
|
# connections[DEFAULT_DB_ALIAS] does.
|
|
|
|
from django.db import connections
|
|
|
|
connection = connections[DEFAULT_DB_ALIAS]
|
2012-12-17 05:00:11 +08:00
|
|
|
# Allow thread sharing so the connection can be closed by the
|
|
|
|
# main thread.
|
2019-02-14 23:04:55 +08:00
|
|
|
connection.inc_thread_sharing()
|
2020-02-04 11:07:00 +08:00
|
|
|
with connection.cursor():
|
|
|
|
pass
|
2013-02-26 05:53:08 +08:00
|
|
|
connections_dict[id(connection)] = connection
|
2019-02-14 23:04:55 +08:00
|
|
|
try:
|
|
|
|
for x in range(2):
|
|
|
|
t = threading.Thread(target=runner)
|
|
|
|
t.start()
|
|
|
|
t.join()
|
|
|
|
# Each created connection got different inner connection.
|
|
|
|
self.assertEqual(len({conn.connection for conn in connections_dict.values()}), 3)
|
|
|
|
finally:
|
|
|
|
# Finish by closing the connections opened by the other threads
|
|
|
|
# (the connection opened in the main thread will automatically be
|
|
|
|
# closed on teardown).
|
|
|
|
for conn in connections_dict.values():
|
2020-09-25 00:37:55 +08:00
|
|
|
if conn is not connection and conn.allow_thread_sharing:
|
|
|
|
conn.close()
|
|
|
|
conn.dec_thread_sharing()
|
2011-12-16 21:40:19 +08:00
|
|
|
|
|
|
|
def test_connections_thread_local(self):
|
|
|
|
"""
|
2016-10-27 15:53:39 +08:00
|
|
|
The connections are different for each thread (#17258).
|
2011-12-16 21:40:19 +08:00
|
|
|
"""
|
2013-02-26 05:53:08 +08:00
|
|
|
# Map connections by id because connections with identical aliases
|
|
|
|
# have the same hash.
|
|
|
|
connections_dict = {}
|
2011-12-16 21:40:19 +08:00
|
|
|
for conn in connections.all():
|
2013-02-26 05:53:08 +08:00
|
|
|
connections_dict[id(conn)] = conn
|
2013-03-08 03:24:51 +08:00
|
|
|
|
2011-12-16 21:40:19 +08:00
|
|
|
def runner():
|
|
|
|
from django.db import connections
|
|
|
|
for conn in connections.all():
|
2011-12-17 01:02:41 +08:00
|
|
|
# Allow thread sharing so the connection can be closed by the
|
|
|
|
# main thread.
|
2019-02-14 23:04:55 +08:00
|
|
|
conn.inc_thread_sharing()
|
2013-02-26 05:53:08 +08:00
|
|
|
connections_dict[id(conn)] = conn
|
2019-02-14 23:04:55 +08:00
|
|
|
try:
|
2019-12-21 03:49:56 +08:00
|
|
|
num_new_threads = 2
|
|
|
|
for x in range(num_new_threads):
|
2019-02-14 23:04:55 +08:00
|
|
|
t = threading.Thread(target=runner)
|
|
|
|
t.start()
|
|
|
|
t.join()
|
2019-12-21 03:49:56 +08:00
|
|
|
self.assertEqual(
|
|
|
|
len(connections_dict),
|
|
|
|
len(connections.all()) * (num_new_threads + 1),
|
|
|
|
)
|
2019-02-14 23:04:55 +08:00
|
|
|
finally:
|
|
|
|
# Finish by closing the connections opened by the other threads
|
|
|
|
# (the connection opened in the main thread will automatically be
|
|
|
|
# closed on teardown).
|
|
|
|
for conn in connections_dict.values():
|
2020-09-25 00:37:55 +08:00
|
|
|
if conn is not connection and conn.allow_thread_sharing:
|
|
|
|
conn.close()
|
|
|
|
conn.dec_thread_sharing()
|
2011-12-16 21:40:19 +08:00
|
|
|
|
|
|
|
def test_pass_connection_between_threads(self):
|
|
|
|
"""
|
2016-10-27 15:53:39 +08:00
|
|
|
A connection can be passed from one thread to the other (#17258).
|
2011-12-16 21:40:19 +08:00
|
|
|
"""
|
2016-12-09 00:00:14 +08:00
|
|
|
Person.objects.create(first_name="John", last_name="Doe")
|
2011-12-16 21:40:19 +08:00
|
|
|
|
|
|
|
def do_thread():
|
|
|
|
def runner(main_thread_connection):
|
|
|
|
from django.db import connections
|
|
|
|
connections['default'] = main_thread_connection
|
|
|
|
try:
|
2016-12-09 00:00:14 +08:00
|
|
|
Person.objects.get(first_name="John", last_name="Doe")
|
2012-08-18 16:59:10 +08:00
|
|
|
except Exception as e:
|
2011-12-16 21:40:19 +08:00
|
|
|
exceptions.append(e)
|
|
|
|
t = threading.Thread(target=runner, args=[connections['default']])
|
|
|
|
t.start()
|
|
|
|
t.join()
|
|
|
|
|
2019-02-14 23:04:55 +08:00
|
|
|
# Without touching thread sharing, which should be False by default.
|
2011-12-16 21:40:19 +08:00
|
|
|
exceptions = []
|
|
|
|
do_thread()
|
|
|
|
# Forbidden!
|
2012-08-18 16:59:10 +08:00
|
|
|
self.assertIsInstance(exceptions[0], DatabaseError)
|
2020-02-04 11:07:00 +08:00
|
|
|
connections['default'].close()
|
2011-12-16 21:40:19 +08:00
|
|
|
|
2019-02-14 23:04:55 +08:00
|
|
|
# After calling inc_thread_sharing() on the connection.
|
|
|
|
connections['default'].inc_thread_sharing()
|
|
|
|
try:
|
|
|
|
exceptions = []
|
|
|
|
do_thread()
|
|
|
|
# All good
|
|
|
|
self.assertEqual(exceptions, [])
|
|
|
|
finally:
|
|
|
|
connections['default'].dec_thread_sharing()
|
2011-12-17 01:02:41 +08:00
|
|
|
|
|
|
|
def test_closing_non_shared_connections(self):
|
|
|
|
"""
|
2016-10-27 15:53:39 +08:00
|
|
|
A connection that is not explicitly shareable cannot be closed by
|
|
|
|
another thread (#17258).
|
2011-12-17 01:02:41 +08:00
|
|
|
"""
|
|
|
|
# First, without explicitly enabling the connection for sharing.
|
|
|
|
exceptions = set()
|
2013-03-08 03:24:51 +08:00
|
|
|
|
2011-12-17 01:02:41 +08:00
|
|
|
def runner1():
|
|
|
|
def runner2(other_thread_connection):
|
|
|
|
try:
|
|
|
|
other_thread_connection.close()
|
2012-04-29 00:09:37 +08:00
|
|
|
except DatabaseError as e:
|
2011-12-17 01:02:41 +08:00
|
|
|
exceptions.add(e)
|
|
|
|
t2 = threading.Thread(target=runner2, args=[connections['default']])
|
|
|
|
t2.start()
|
|
|
|
t2.join()
|
|
|
|
t1 = threading.Thread(target=runner1)
|
|
|
|
t1.start()
|
|
|
|
t1.join()
|
|
|
|
# The exception was raised
|
|
|
|
self.assertEqual(len(exceptions), 1)
|
|
|
|
|
|
|
|
# Then, with explicitly enabling the connection for sharing.
|
|
|
|
exceptions = set()
|
2013-03-08 03:24:51 +08:00
|
|
|
|
2011-12-17 01:02:41 +08:00
|
|
|
def runner1():
|
|
|
|
def runner2(other_thread_connection):
|
|
|
|
try:
|
|
|
|
other_thread_connection.close()
|
2012-04-29 00:09:37 +08:00
|
|
|
except DatabaseError as e:
|
2011-12-17 01:02:41 +08:00
|
|
|
exceptions.add(e)
|
|
|
|
# Enable thread sharing
|
2019-02-14 23:04:55 +08:00
|
|
|
connections['default'].inc_thread_sharing()
|
|
|
|
try:
|
|
|
|
t2 = threading.Thread(target=runner2, args=[connections['default']])
|
|
|
|
t2.start()
|
|
|
|
t2.join()
|
|
|
|
finally:
|
|
|
|
connections['default'].dec_thread_sharing()
|
2011-12-17 01:02:41 +08:00
|
|
|
t1 = threading.Thread(target=runner1)
|
|
|
|
t1.start()
|
|
|
|
t1.join()
|
|
|
|
# No exception was raised
|
2011-12-25 21:24:39 +08:00
|
|
|
self.assertEqual(len(exceptions), 0)
|
|
|
|
|
2019-02-14 23:04:55 +08:00
|
|
|
def test_thread_sharing_count(self):
|
|
|
|
self.assertIs(connection.allow_thread_sharing, False)
|
|
|
|
connection.inc_thread_sharing()
|
|
|
|
self.assertIs(connection.allow_thread_sharing, True)
|
|
|
|
connection.inc_thread_sharing()
|
|
|
|
self.assertIs(connection.allow_thread_sharing, True)
|
|
|
|
connection.dec_thread_sharing()
|
|
|
|
self.assertIs(connection.allow_thread_sharing, True)
|
|
|
|
connection.dec_thread_sharing()
|
|
|
|
self.assertIs(connection.allow_thread_sharing, False)
|
|
|
|
msg = 'Cannot decrement the thread sharing count below zero.'
|
|
|
|
with self.assertRaisesMessage(RuntimeError, msg):
|
|
|
|
connection.dec_thread_sharing()
|
|
|
|
|
2011-12-25 21:24:39 +08:00
|
|
|
|
2012-04-25 02:03:14 +08:00
|
|
|
class MySQLPKZeroTests(TestCase):
|
|
|
|
"""
|
|
|
|
Zero as id for AutoField should raise exception in MySQL, because MySQL
|
2020-07-20 15:48:31 +08:00
|
|
|
does not allow zero for autoincrement primary key if the
|
|
|
|
NO_AUTO_VALUE_ON_ZERO SQL mode is not enabled.
|
2012-04-25 02:03:14 +08:00
|
|
|
"""
|
2013-11-24 21:12:22 +08:00
|
|
|
@skipIfDBFeature('allows_auto_pk_0')
|
2012-04-25 02:03:14 +08:00
|
|
|
def test_zero_as_autoval(self):
|
|
|
|
with self.assertRaises(ValueError):
|
2016-12-09 00:00:14 +08:00
|
|
|
Square.objects.create(id=0, root=0, square=1)
|
2013-02-21 03:27:32 +08:00
|
|
|
|
|
|
|
|
2014-10-19 05:01:13 +08:00
|
|
|
class DBConstraintTestCase(TestCase):
|
2013-06-04 14:09:29 +08:00
|
|
|
|
2014-11-04 06:48:03 +08:00
|
|
|
def test_can_reference_existent(self):
|
2016-12-09 00:00:14 +08:00
|
|
|
obj = Object.objects.create()
|
|
|
|
ref = ObjectReference.objects.create(obj=obj)
|
2013-02-21 03:27:32 +08:00
|
|
|
self.assertEqual(ref.obj, obj)
|
|
|
|
|
2016-12-09 00:00:14 +08:00
|
|
|
ref = ObjectReference.objects.get(obj=obj)
|
2013-02-21 03:27:32 +08:00
|
|
|
self.assertEqual(ref.obj, obj)
|
|
|
|
|
2014-11-04 06:48:03 +08:00
|
|
|
def test_can_reference_non_existent(self):
|
2016-12-09 00:00:14 +08:00
|
|
|
self.assertFalse(Object.objects.filter(id=12345).exists())
|
|
|
|
ref = ObjectReference.objects.create(obj_id=12345)
|
|
|
|
ref_new = ObjectReference.objects.get(obj_id=12345)
|
2013-02-21 03:27:32 +08:00
|
|
|
self.assertEqual(ref, ref_new)
|
|
|
|
|
2016-12-09 00:00:14 +08:00
|
|
|
with self.assertRaises(Object.DoesNotExist):
|
2013-02-21 03:27:32 +08:00
|
|
|
ref.obj
|
2013-03-08 03:24:51 +08:00
|
|
|
|
|
|
|
def test_many_to_many(self):
|
2016-12-09 00:00:14 +08:00
|
|
|
obj = Object.objects.create()
|
2013-03-08 03:24:51 +08:00
|
|
|
obj.related_objects.create()
|
2016-12-09 00:00:14 +08:00
|
|
|
self.assertEqual(Object.objects.count(), 2)
|
2013-03-08 03:24:51 +08:00
|
|
|
self.assertEqual(obj.related_objects.count(), 1)
|
|
|
|
|
2016-12-09 00:00:14 +08:00
|
|
|
intermediary_model = Object._meta.get_field("related_objects").remote_field.through
|
2013-03-08 03:24:51 +08:00
|
|
|
intermediary_model.objects.create(from_object_id=obj.id, to_object_id=12345)
|
|
|
|
self.assertEqual(obj.related_objects.count(), 1)
|
|
|
|
self.assertEqual(intermediary_model.objects.count(), 2)
|