diff --git a/django/db/models/query.py b/django/db/models/query.py index 363a105056..846f931e82 100644 --- a/django/db/models/query.py +++ b/django/db/models/query.py @@ -388,13 +388,14 @@ class QuerySet(object): # Delete objects in chunks to prevent the list of related objects from # becoming too long. seen_objs = None + del_itr = iter(del_query) while 1: # Collect a chunk of objects to be deleted, and then all the # objects that are related to the objects that are to be deleted. # The chunking *isn't* done by slicing the del_query because we # need to maintain the query cache on del_query (see #12328) seen_objs = CollectedObjects(seen_objs) - for i, obj in izip(xrange(CHUNK_SIZE), del_query): + for i, obj in izip(xrange(CHUNK_SIZE), del_itr): obj._collect_sub_objects(seen_objs) if not seen_objs: diff --git a/tests/regressiontests/delete_regress/tests.py b/tests/regressiontests/delete_regress/tests.py new file mode 100644 index 0000000000..da1b89eda8 --- /dev/null +++ b/tests/regressiontests/delete_regress/tests.py @@ -0,0 +1,13 @@ +import datetime + +from django.test import TestCase + +from models import Book + +class LargeDeleteTests(TestCase): + def test_large_deletes(self): + "Regression for #13309 -- if the number of objects > chunk size, deletion still occurs" + for x in range(300): + track = Book.objects.create(pagecount=x+100) + Book.objects.all().delete() + self.assertEquals(Book.objects.count(), 0)