Prevented unlimited memory consumption when running background tasks with DEBUG=True. Thanks Rob, Alex, Baptiste, and others.
This commit is contained in:
parent
e2112edd9a
commit
cfcca7ccce
|
@ -52,7 +52,7 @@ connection = DefaultConnectionProxy()
|
|||
# Register an event to reset saved queries when a Django request is started.
|
||||
def reset_queries(**kwargs):
|
||||
for conn in connections.all():
|
||||
conn.queries = []
|
||||
conn.queries_log.clear()
|
||||
signals.request_started.connect(reset_queries)
|
||||
|
||||
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
from collections import deque
|
||||
import datetime
|
||||
import time
|
||||
import warnings
|
||||
|
@ -30,17 +31,21 @@ class BaseDatabaseWrapper(object):
|
|||
ops = None
|
||||
vendor = 'unknown'
|
||||
|
||||
queries_limit = 9000
|
||||
|
||||
def __init__(self, settings_dict, alias=DEFAULT_DB_ALIAS,
|
||||
allow_thread_sharing=False):
|
||||
# Connection related attributes.
|
||||
# The underlying database connection.
|
||||
self.connection = None
|
||||
self.queries = []
|
||||
# `settings_dict` should be a dictionary containing keys such as
|
||||
# NAME, USER, etc. It's called `settings_dict` instead of `settings`
|
||||
# to disambiguate it from Django settings modules.
|
||||
self.settings_dict = settings_dict
|
||||
self.alias = alias
|
||||
# Query logging in debug mode.
|
||||
self.use_debug_cursor = None
|
||||
self.queries_log = deque(maxlen=self.queries_limit)
|
||||
|
||||
# Transaction related attributes.
|
||||
# Tracks if the connection is in autocommit mode. Per PEP 249, by
|
||||
|
@ -79,6 +84,14 @@ class BaseDatabaseWrapper(object):
|
|||
def __hash__(self):
|
||||
return hash(self.alias)
|
||||
|
||||
@property
|
||||
def queries(self):
|
||||
if len(self.queries_log) == self.queries_log.maxlen:
|
||||
warnings.warn(
|
||||
"Limit for query logging exceeded, only the last {} queries "
|
||||
"will be returned.".format(self.queries_log.maxlen))
|
||||
return list(self.queries_log)
|
||||
|
||||
##### Backend-specific methods for creating connections and cursors #####
|
||||
|
||||
def get_connection_params(self):
|
||||
|
@ -429,7 +442,7 @@ class BaseDatabaseWrapper(object):
|
|||
|
||||
def make_debug_cursor(self, cursor):
|
||||
"""
|
||||
Creates a cursor that logs all queries in self.queries.
|
||||
Creates a cursor that logs all queries in self.queries_log.
|
||||
"""
|
||||
return utils.CursorDebugWrapper(cursor, self)
|
||||
|
||||
|
|
|
@ -80,7 +80,7 @@ class CursorDebugWrapper(CursorWrapper):
|
|||
stop = time()
|
||||
duration = stop - start
|
||||
sql = self.db.ops.last_executed_query(self.cursor, sql, params)
|
||||
self.db.queries.append({
|
||||
self.db.queries_log.append({
|
||||
'sql': sql,
|
||||
'time': "%.3f" % duration,
|
||||
})
|
||||
|
@ -99,7 +99,7 @@ class CursorDebugWrapper(CursorWrapper):
|
|||
times = len(param_list)
|
||||
except TypeError: # param_list could be an iterator
|
||||
times = '?'
|
||||
self.db.queries.append({
|
||||
self.db.queries_log.append({
|
||||
'sql': '%s times: %s' % (times, sql),
|
||||
'time': "%.3f" % duration,
|
||||
})
|
||||
|
|
|
@ -509,7 +509,7 @@ class CaptureQueriesContext(object):
|
|||
def __enter__(self):
|
||||
self.use_debug_cursor = self.connection.use_debug_cursor
|
||||
self.connection.use_debug_cursor = True
|
||||
self.initial_queries = len(self.connection.queries)
|
||||
self.initial_queries = len(self.connection.queries_log)
|
||||
self.final_queries = None
|
||||
request_started.disconnect(reset_queries)
|
||||
return self
|
||||
|
@ -519,7 +519,7 @@ class CaptureQueriesContext(object):
|
|||
request_started.connect(reset_queries)
|
||||
if exc_type is not None:
|
||||
return
|
||||
self.final_queries = len(self.connection.queries)
|
||||
self.final_queries = len(self.connection.queries_log)
|
||||
|
||||
|
||||
class IgnoreDeprecationWarningsMixin(object):
|
||||
|
|
|
@ -32,6 +32,12 @@ same interface on each member of the ``connections`` dictionary::
|
|||
>>> from django.db import connections
|
||||
>>> connections['my_db_alias'].queries
|
||||
|
||||
If you need to clear the query list manually at any point in your functions,
|
||||
just call ``reset_queries()``, like this::
|
||||
|
||||
from django.db import reset_queries
|
||||
reset_queries()
|
||||
|
||||
Can I use Django with a pre-existing database?
|
||||
----------------------------------------------
|
||||
|
||||
|
@ -76,22 +82,3 @@ type, create an initial data file and put something like this in it::
|
|||
As explained in the :ref:`SQL initial data file <initial-sql>` documentation,
|
||||
this SQL file can contain arbitrary SQL, so you can make any sorts of changes
|
||||
you need to make.
|
||||
|
||||
Why is Django leaking memory?
|
||||
-----------------------------
|
||||
|
||||
Django isn't known to leak memory. If you find your Django processes are
|
||||
allocating more and more memory, with no sign of releasing it, check to make
|
||||
sure your :setting:`DEBUG` setting is set to ``False``. If :setting:`DEBUG`
|
||||
is ``True``, then Django saves a copy of every SQL statement it has executed.
|
||||
|
||||
(The queries are saved in ``django.db.connection.queries``. See
|
||||
:ref:`faq-see-raw-sql-queries`.)
|
||||
|
||||
To fix the problem, set :setting:`DEBUG` to ``False``.
|
||||
|
||||
If you need to clear the query list manually at any point in your functions,
|
||||
just call ``reset_queries()``, like this::
|
||||
|
||||
from django import db
|
||||
db.reset_queries()
|
||||
|
|
|
@ -170,7 +170,8 @@ Management Commands
|
|||
Models
|
||||
^^^^^^
|
||||
|
||||
* ...
|
||||
* Django now logs at most 9000 queries in ``connections.queries``, in order
|
||||
to prevent excessive memory usage in long-running processes in debug mode.
|
||||
|
||||
Signals
|
||||
^^^^^^^
|
||||
|
@ -263,6 +264,8 @@ Now, an error will be raised to prevent data loss::
|
|||
Miscellaneous
|
||||
~~~~~~~~~~~~~
|
||||
|
||||
* ``connections.queries`` is now a read-only attribute.
|
||||
|
||||
* ``URLField.to_python`` no longer adds a trailing slash to pathless URLs.
|
||||
|
||||
* ``django.contrib.gis`` dropped support for GEOS 3.1 and GDAL 1.6.
|
||||
|
|
|
@ -8,11 +8,13 @@ from decimal import Decimal
|
|||
import re
|
||||
import threading
|
||||
import unittest
|
||||
import warnings
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.management.color import no_style
|
||||
from django.db import (connection, connections, DEFAULT_DB_ALIAS,
|
||||
DatabaseError, IntegrityError, transaction)
|
||||
DatabaseError, IntegrityError, reset_queries, transaction)
|
||||
from django.db.backends import BaseDatabaseWrapper
|
||||
from django.db.backends.signals import connection_created
|
||||
from django.db.backends.postgresql_psycopg2 import version as pg_version
|
||||
from django.db.backends.utils import format_number, CursorWrapper
|
||||
|
@ -630,7 +632,7 @@ class BackendTestCase(TestCase):
|
|||
# to use ProgrammingError).
|
||||
with self.assertRaises(connection.features.closed_cursor_error_class):
|
||||
# cursor should be closed, so no queries should be possible.
|
||||
cursor.execute("select 1")
|
||||
cursor.execute("SELECT 1" + connection.features.bare_select_suffix)
|
||||
|
||||
@unittest.skipUnless(connection.vendor == 'postgresql',
|
||||
"Psycopg2 specific cursor.closed attribute needed")
|
||||
|
@ -666,6 +668,62 @@ class BackendTestCase(TestCase):
|
|||
except Exception:
|
||||
pass
|
||||
|
||||
@override_settings(DEBUG=True)
|
||||
def test_queries(self):
|
||||
"""
|
||||
Test the documented API of connection.queries.
|
||||
"""
|
||||
reset_queries()
|
||||
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute("SELECT 1" + connection.features.bare_select_suffix)
|
||||
self.assertEqual(1, len(connection.queries))
|
||||
|
||||
self.assertIsInstance(connection.queries, list)
|
||||
self.assertIsInstance(connection.queries[0], dict)
|
||||
self.assertItemsEqual(connection.queries[0].keys(), ['sql', 'time'])
|
||||
|
||||
reset_queries()
|
||||
self.assertEqual(0, len(connection.queries))
|
||||
|
||||
|
||||
# Unfortunately with sqlite3 the in-memory test database cannot be closed.
|
||||
@skipUnlessDBFeature('test_db_allows_multiple_connections')
|
||||
@override_settings(DEBUG=True)
|
||||
def test_queries_limit(self):
|
||||
"""
|
||||
Test that the backend doesn't store an unlimited number of queries.
|
||||
|
||||
Regression for #12581.
|
||||
"""
|
||||
old_queries_limit = BaseDatabaseWrapper.queries_limit
|
||||
BaseDatabaseWrapper.queries_limit = 3
|
||||
new_connections = ConnectionHandler(settings.DATABASES)
|
||||
new_connection = new_connections[DEFAULT_DB_ALIAS]
|
||||
|
||||
try:
|
||||
with new_connection.cursor() as cursor:
|
||||
cursor.execute("SELECT 1" + new_connection.features.bare_select_suffix)
|
||||
cursor.execute("SELECT 2" + new_connection.features.bare_select_suffix)
|
||||
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
self.assertEqual(2, len(new_connection.queries))
|
||||
self.assertEqual(0, len(w))
|
||||
|
||||
with new_connection.cursor() as cursor:
|
||||
cursor.execute("SELECT 3" + new_connection.features.bare_select_suffix)
|
||||
cursor.execute("SELECT 4" + new_connection.features.bare_select_suffix)
|
||||
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
self.assertEqual(3, len(new_connection.queries))
|
||||
self.assertEqual(1, len(w))
|
||||
self.assertEqual(str(w[0].message), "Limit for query logging "
|
||||
"exceeded, only the last 3 queries will be returned.")
|
||||
|
||||
finally:
|
||||
BaseDatabaseWrapper.queries_limit = old_queries_limit
|
||||
new_connection.close()
|
||||
|
||||
|
||||
# We don't make these tests conditional because that means we would need to
|
||||
# check and differentiate between:
|
||||
|
|
|
@ -91,7 +91,7 @@ class BulkCreateTests(TestCase):
|
|||
|
||||
def test_large_batch(self):
|
||||
with override_settings(DEBUG=True):
|
||||
connection.queries = []
|
||||
connection.queries_log.clear()
|
||||
TwoFields.objects.bulk_create([
|
||||
TwoFields(f1=i, f2=i + 1) for i in range(0, 1001)
|
||||
])
|
||||
|
@ -112,7 +112,7 @@ class BulkCreateTests(TestCase):
|
|||
@skipUnlessDBFeature('has_bulk_insert')
|
||||
def test_large_batch_efficiency(self):
|
||||
with override_settings(DEBUG=True):
|
||||
connection.queries = []
|
||||
connection.queries_log.clear()
|
||||
TwoFields.objects.bulk_create([
|
||||
TwoFields(f1=i, f2=i + 1) for i in range(0, 1001)
|
||||
])
|
||||
|
@ -124,7 +124,7 @@ class BulkCreateTests(TestCase):
|
|||
mixed together with objects without PK set.
|
||||
"""
|
||||
with override_settings(DEBUG=True):
|
||||
connection.queries = []
|
||||
connection.queries_log.clear()
|
||||
TwoFields.objects.bulk_create([
|
||||
TwoFields(id=i if i % 2 == 0 else None, f1=i, f2=i + 1)
|
||||
for i in range(100000, 101000)])
|
||||
|
@ -142,7 +142,7 @@ class BulkCreateTests(TestCase):
|
|||
mixed together with objects without PK set.
|
||||
"""
|
||||
with override_settings(DEBUG=True):
|
||||
connection.queries = []
|
||||
connection.queries_log.clear()
|
||||
TwoFields.objects.bulk_create([
|
||||
TwoFields(id=i if i % 2 == 0 else None, f1=i, f2=i + 1)
|
||||
for i in range(100000, 101000)])
|
||||
|
|
Loading…
Reference in New Issue