Fixed #9479 -- Corrected an edge case in bulk queryset deletion that could cause an infinite loop when using MySQL InnoDB.

git-svn-id: http://code.djangoproject.com/svn/django/trunk@10913 bcc190cf-cafb-0310-a4f2-bffc1f526a37
This commit is contained in:
Russell Keith-Magee 2009-06-03 13:23:19 +00:00
parent be907bcdcb
commit 2416e5fefe
4 changed files with 78 additions and 2 deletions

View File

@ -355,10 +355,11 @@ class QuerySet(object):
# Delete objects in chunks to prevent the list of related objects from
# becoming too long.
seen_objs = None
while 1:
# Collect all the objects to be deleted in this chunk, and all the
# objects that are related to the objects that are to be deleted.
seen_objs = CollectedObjects()
seen_objs = CollectedObjects(seen_objs)
for object in del_query[:CHUNK_SIZE]:
object._collect_sub_objects(seen_objs)

View File

@ -32,11 +32,21 @@ class CollectedObjects(object):
This is used for the database object deletion routines so that we can
calculate the 'leaf' objects which should be deleted first.
previously_seen is an optional argument. It must be a CollectedObjects
instance itself; any previously_seen collected object will be blocked from
being added to this instance.
"""
def __init__(self):
def __init__(self, previously_seen=None):
self.data = {}
self.children = {}
if previously_seen:
self.blocked = previously_seen.blocked
for cls, seen in previously_seen.data.items():
self.blocked.setdefault(cls, SortedDict()).update(seen)
else:
self.blocked = {}
def add(self, model, pk, obj, parent_model, nullable=False):
"""
@ -53,6 +63,9 @@ class CollectedObjects(object):
Returns True if the item already existed in the structure and
False otherwise.
"""
if pk in self.blocked.get(model, {}):
return True
d = self.data.setdefault(model, SortedDict())
retval = pk in d
d[pk] = obj

View File

@ -0,0 +1 @@

View File

@ -0,0 +1,61 @@
from django.conf import settings
from django.db import models, backend, connection, transaction
from django.db.models import sql, query
from django.test import TransactionTestCase
class Book(models.Model):
pagecount = models.IntegerField()
# Can't run this test under SQLite, because you can't
# get two connections to an in-memory database.
if settings.DATABASE_ENGINE != 'sqlite3':
class DeleteLockingTest(TransactionTestCase):
def setUp(self):
# Create a second connection to the database
self.conn2 = backend.DatabaseWrapper({
'DATABASE_HOST': settings.DATABASE_HOST,
'DATABASE_NAME': settings.DATABASE_NAME,
'DATABASE_OPTIONS': settings.DATABASE_OPTIONS,
'DATABASE_PASSWORD': settings.DATABASE_PASSWORD,
'DATABASE_PORT': settings.DATABASE_PORT,
'DATABASE_USER': settings.DATABASE_USER,
'TIME_ZONE': settings.TIME_ZONE,
})
# Put both DB connections into managed transaction mode
transaction.enter_transaction_management()
transaction.managed(True)
self.conn2._enter_transaction_management(True)
def tearDown(self):
# Close down the second connection.
transaction.leave_transaction_management()
self.conn2.close()
def test_concurrent_delete(self):
"Deletes on concurrent transactions don't collide and lock the database. Regression for #9479"
# Create some dummy data
b1 = Book(id=1, pagecount=100)
b2 = Book(id=2, pagecount=200)
b3 = Book(id=3, pagecount=300)
b1.save()
b2.save()
b3.save()
transaction.commit()
self.assertEquals(3, Book.objects.count())
# Delete something using connection 2.
cursor2 = self.conn2.cursor()
cursor2.execute('DELETE from delete_regress_book WHERE id=1')
self.conn2._commit();
# Now perform a queryset delete that covers the object
# deleted in connection 2. This causes an infinite loop
# under MySQL InnoDB unless we keep track of already
# deleted objects.
Book.objects.filter(pagecount__lt=250).delete()
transaction.commit()
self.assertEquals(1, Book.objects.count())