2013-07-30 01:19:04 +08:00
|
|
|
from __future__ import unicode_literals
|
2011-10-14 02:04:12 +08:00
|
|
|
|
2014-08-11 17:47:37 +08:00
|
|
|
from math import ceil
|
|
|
|
|
2015-01-28 20:35:27 +08:00
|
|
|
from django.db import IntegrityError, connection, models
|
2014-08-11 17:47:37 +08:00
|
|
|
from django.db.models.sql.constants import GET_ITERATOR_CHUNK_SIZE
|
2015-01-28 20:35:27 +08:00
|
|
|
from django.test import TestCase, skipIfDBFeature, skipUnlessDBFeature
|
2014-12-13 21:04:36 +08:00
|
|
|
from django.utils.six.moves import range
|
2010-09-13 04:03:28 +08:00
|
|
|
|
2015-01-28 20:35:27 +08:00
|
|
|
from .models import (
|
|
|
|
A, M, MR, R, S, T, Avatar, Base, Child, HiddenUser, HiddenUserProfile,
|
|
|
|
M2MFrom, M2MTo, MRNull, Parent, RChild, User, create_a, get_default_r,
|
|
|
|
)
|
2010-09-13 04:03:28 +08:00
|
|
|
|
|
|
|
|
2010-11-10 00:46:42 +08:00
|
|
|
class OnDeleteTests(TestCase):
|
2010-09-13 04:03:28 +08:00
|
|
|
def setUp(self):
|
2010-11-10 00:46:42 +08:00
|
|
|
self.DEFAULT = get_default_r()
|
|
|
|
|
|
|
|
def test_auto(self):
|
|
|
|
a = create_a('auto')
|
|
|
|
a.auto.delete()
|
|
|
|
self.assertFalse(A.objects.filter(name='auto').exists())
|
|
|
|
|
|
|
|
def test_auto_nullable(self):
|
|
|
|
a = create_a('auto_nullable')
|
|
|
|
a.auto_nullable.delete()
|
|
|
|
self.assertFalse(A.objects.filter(name='auto_nullable').exists())
|
|
|
|
|
|
|
|
def test_setvalue(self):
|
|
|
|
a = create_a('setvalue')
|
|
|
|
a.setvalue.delete()
|
|
|
|
a = A.objects.get(pk=a.pk)
|
|
|
|
self.assertEqual(self.DEFAULT, a.setvalue)
|
|
|
|
|
|
|
|
def test_setnull(self):
|
|
|
|
a = create_a('setnull')
|
|
|
|
a.setnull.delete()
|
|
|
|
a = A.objects.get(pk=a.pk)
|
|
|
|
self.assertEqual(None, a.setnull)
|
|
|
|
|
|
|
|
def test_setdefault(self):
|
|
|
|
a = create_a('setdefault')
|
|
|
|
a.setdefault.delete()
|
|
|
|
a = A.objects.get(pk=a.pk)
|
|
|
|
self.assertEqual(self.DEFAULT, a.setdefault)
|
|
|
|
|
|
|
|
def test_setdefault_none(self):
|
|
|
|
a = create_a('setdefault_none')
|
|
|
|
a.setdefault_none.delete()
|
|
|
|
a = A.objects.get(pk=a.pk)
|
|
|
|
self.assertEqual(None, a.setdefault_none)
|
|
|
|
|
|
|
|
def test_cascade(self):
|
|
|
|
a = create_a('cascade')
|
|
|
|
a.cascade.delete()
|
|
|
|
self.assertFalse(A.objects.filter(name='cascade').exists())
|
|
|
|
|
|
|
|
def test_cascade_nullable(self):
|
|
|
|
a = create_a('cascade_nullable')
|
|
|
|
a.cascade_nullable.delete()
|
|
|
|
self.assertFalse(A.objects.filter(name='cascade_nullable').exists())
|
|
|
|
|
|
|
|
def test_protect(self):
|
|
|
|
a = create_a('protect')
|
|
|
|
self.assertRaises(IntegrityError, a.protect.delete)
|
|
|
|
|
|
|
|
def test_do_nothing(self):
|
|
|
|
# Testing DO_NOTHING is a bit harder: It would raise IntegrityError for a normal model,
|
|
|
|
# so we connect to pre_delete and set the fk to a known value.
|
|
|
|
replacement_r = R.objects.create()
|
2013-10-22 18:21:07 +08:00
|
|
|
|
2010-11-10 00:46:42 +08:00
|
|
|
def check_do_nothing(sender, **kwargs):
|
|
|
|
obj = kwargs['instance']
|
|
|
|
obj.donothing_set.update(donothing=replacement_r)
|
|
|
|
models.signals.pre_delete.connect(check_do_nothing)
|
|
|
|
a = create_a('do_nothing')
|
|
|
|
a.donothing.delete()
|
|
|
|
a = A.objects.get(pk=a.pk)
|
|
|
|
self.assertEqual(replacement_r, a.donothing)
|
|
|
|
models.signals.pre_delete.disconnect(check_do_nothing)
|
|
|
|
|
2012-09-20 23:51:30 +08:00
|
|
|
def test_do_nothing_qscount(self):
|
|
|
|
"""
|
|
|
|
Test that a models.DO_NOTHING relation doesn't trigger a query.
|
|
|
|
"""
|
|
|
|
b = Base.objects.create()
|
|
|
|
with self.assertNumQueries(1):
|
|
|
|
# RelToBase should not be queried.
|
|
|
|
b.delete()
|
|
|
|
self.assertEqual(Base.objects.count(), 0)
|
|
|
|
|
2010-11-10 00:46:42 +08:00
|
|
|
def test_inheritance_cascade_up(self):
|
|
|
|
child = RChild.objects.create()
|
|
|
|
child.delete()
|
|
|
|
self.assertFalse(R.objects.filter(pk=child.pk).exists())
|
|
|
|
|
|
|
|
def test_inheritance_cascade_down(self):
|
|
|
|
child = RChild.objects.create()
|
|
|
|
parent = child.r_ptr
|
|
|
|
parent.delete()
|
|
|
|
self.assertFalse(RChild.objects.filter(pk=child.pk).exists())
|
|
|
|
|
|
|
|
def test_cascade_from_child(self):
|
|
|
|
a = create_a('child')
|
|
|
|
a.child.delete()
|
|
|
|
self.assertFalse(A.objects.filter(name='child').exists())
|
|
|
|
self.assertFalse(R.objects.filter(pk=a.child_id).exists())
|
|
|
|
|
|
|
|
def test_cascade_from_parent(self):
|
|
|
|
a = create_a('child')
|
|
|
|
R.objects.get(pk=a.child_id).delete()
|
|
|
|
self.assertFalse(A.objects.filter(name='child').exists())
|
|
|
|
self.assertFalse(RChild.objects.filter(pk=a.child_id).exists())
|
|
|
|
|
|
|
|
def test_setnull_from_child(self):
|
|
|
|
a = create_a('child_setnull')
|
|
|
|
a.child_setnull.delete()
|
|
|
|
self.assertFalse(R.objects.filter(pk=a.child_setnull_id).exists())
|
|
|
|
|
|
|
|
a = A.objects.get(pk=a.pk)
|
|
|
|
self.assertEqual(None, a.child_setnull)
|
|
|
|
|
|
|
|
def test_setnull_from_parent(self):
|
|
|
|
a = create_a('child_setnull')
|
|
|
|
R.objects.get(pk=a.child_setnull_id).delete()
|
|
|
|
self.assertFalse(RChild.objects.filter(pk=a.child_setnull_id).exists())
|
|
|
|
|
|
|
|
a = A.objects.get(pk=a.pk)
|
|
|
|
self.assertEqual(None, a.child_setnull)
|
|
|
|
|
|
|
|
def test_o2o_setnull(self):
|
|
|
|
a = create_a('o2o_setnull')
|
|
|
|
a.o2o_setnull.delete()
|
|
|
|
a = A.objects.get(pk=a.pk)
|
|
|
|
self.assertEqual(None, a.o2o_setnull)
|
|
|
|
|
|
|
|
|
|
|
|
class DeletionTests(TestCase):
|
|
|
|
def test_m2m(self):
|
|
|
|
m = M.objects.create()
|
|
|
|
r = R.objects.create()
|
|
|
|
MR.objects.create(m=m, r=r)
|
|
|
|
r.delete()
|
|
|
|
self.assertFalse(MR.objects.exists())
|
|
|
|
|
|
|
|
r = R.objects.create()
|
|
|
|
MR.objects.create(m=m, r=r)
|
|
|
|
m.delete()
|
|
|
|
self.assertFalse(MR.objects.exists())
|
|
|
|
|
|
|
|
m = M.objects.create()
|
|
|
|
r = R.objects.create()
|
|
|
|
m.m2m.add(r)
|
|
|
|
r.delete()
|
|
|
|
through = M._meta.get_field('m2m').rel.through
|
|
|
|
self.assertFalse(through.objects.exists())
|
|
|
|
|
|
|
|
r = R.objects.create()
|
|
|
|
m.m2m.add(r)
|
|
|
|
m.delete()
|
|
|
|
self.assertFalse(through.objects.exists())
|
|
|
|
|
|
|
|
m = M.objects.create()
|
|
|
|
r = R.objects.create()
|
|
|
|
MRNull.objects.create(m=m, r=r)
|
|
|
|
r.delete()
|
|
|
|
self.assertFalse(not MRNull.objects.exists())
|
|
|
|
self.assertFalse(m.m2m_through_null.exists())
|
|
|
|
|
|
|
|
def test_bulk(self):
|
|
|
|
s = S.objects.create(r=R.objects.create())
|
2014-12-13 21:04:36 +08:00
|
|
|
for i in range(2 * GET_ITERATOR_CHUNK_SIZE):
|
2010-11-10 00:46:42 +08:00
|
|
|
T.objects.create(s=s)
|
|
|
|
# 1 (select related `T` instances)
|
|
|
|
# + 1 (select related `U` instances)
|
|
|
|
# + 2 (delete `T` instances in batches)
|
|
|
|
# + 1 (delete `s`)
|
|
|
|
self.assertNumQueries(5, s.delete)
|
|
|
|
self.assertFalse(S.objects.exists())
|
|
|
|
|
|
|
|
def test_instance_update(self):
|
|
|
|
deleted = []
|
|
|
|
related_setnull_sets = []
|
2013-10-22 18:21:07 +08:00
|
|
|
|
2010-11-10 00:46:42 +08:00
|
|
|
def pre_delete(sender, **kwargs):
|
|
|
|
obj = kwargs['instance']
|
|
|
|
deleted.append(obj)
|
|
|
|
if isinstance(obj, R):
|
|
|
|
related_setnull_sets.append(list(a.pk for a in obj.setnull_set.all()))
|
|
|
|
|
|
|
|
models.signals.pre_delete.connect(pre_delete)
|
|
|
|
a = create_a('update_setnull')
|
|
|
|
a.setnull.delete()
|
|
|
|
|
|
|
|
a = create_a('update_cascade')
|
|
|
|
a.cascade.delete()
|
|
|
|
|
|
|
|
for obj in deleted:
|
|
|
|
self.assertEqual(None, obj.pk)
|
|
|
|
|
|
|
|
for pk_list in related_setnull_sets:
|
|
|
|
for a in A.objects.filter(id__in=pk_list):
|
|
|
|
self.assertEqual(None, a.setnull)
|
|
|
|
|
|
|
|
models.signals.pre_delete.disconnect(pre_delete)
|
|
|
|
|
|
|
|
def test_deletion_order(self):
|
|
|
|
pre_delete_order = []
|
|
|
|
post_delete_order = []
|
|
|
|
|
|
|
|
def log_post_delete(sender, **kwargs):
|
|
|
|
pre_delete_order.append((sender, kwargs['instance'].pk))
|
|
|
|
|
|
|
|
def log_pre_delete(sender, **kwargs):
|
|
|
|
post_delete_order.append((sender, kwargs['instance'].pk))
|
|
|
|
|
|
|
|
models.signals.post_delete.connect(log_post_delete)
|
|
|
|
models.signals.pre_delete.connect(log_pre_delete)
|
|
|
|
|
|
|
|
r = R.objects.create(pk=1)
|
|
|
|
s1 = S.objects.create(pk=1, r=r)
|
|
|
|
s2 = S.objects.create(pk=2, r=r)
|
2013-09-09 03:20:01 +08:00
|
|
|
T.objects.create(pk=1, s=s1)
|
|
|
|
T.objects.create(pk=2, s=s2)
|
2010-11-10 00:46:42 +08:00
|
|
|
r.delete()
|
|
|
|
self.assertEqual(
|
|
|
|
pre_delete_order, [(T, 2), (T, 1), (S, 2), (S, 1), (R, 1)]
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
post_delete_order, [(T, 1), (T, 2), (S, 1), (S, 2), (R, 1)]
|
|
|
|
)
|
|
|
|
|
|
|
|
models.signals.post_delete.disconnect(log_post_delete)
|
2013-02-07 14:10:13 +08:00
|
|
|
models.signals.pre_delete.disconnect(log_pre_delete)
|
2010-11-10 00:46:42 +08:00
|
|
|
|
2013-01-12 06:12:22 +08:00
|
|
|
def test_relational_post_delete_signals_happen_before_parent_object(self):
|
2013-01-15 05:18:24 +08:00
|
|
|
deletions = []
|
|
|
|
|
2013-01-12 06:12:22 +08:00
|
|
|
def log_post_delete(instance, **kwargs):
|
|
|
|
self.assertTrue(R.objects.filter(pk=instance.r_id))
|
2013-01-15 23:51:33 +08:00
|
|
|
self.assertIs(type(instance), S)
|
2013-01-15 05:18:24 +08:00
|
|
|
deletions.append(instance.id)
|
2013-01-12 06:12:22 +08:00
|
|
|
|
|
|
|
r = R.objects.create(pk=1)
|
|
|
|
S.objects.create(pk=1, r=r)
|
|
|
|
|
2013-01-12 06:58:36 +08:00
|
|
|
models.signals.post_delete.connect(log_post_delete, sender=S)
|
|
|
|
|
|
|
|
try:
|
|
|
|
r.delete()
|
|
|
|
finally:
|
|
|
|
models.signals.post_delete.disconnect(log_post_delete)
|
2013-01-12 06:12:22 +08:00
|
|
|
|
2013-01-15 23:51:33 +08:00
|
|
|
self.assertEqual(len(deletions), 1)
|
|
|
|
self.assertEqual(deletions[0], 1)
|
2013-01-15 05:18:24 +08:00
|
|
|
|
2010-11-10 00:46:42 +08:00
|
|
|
@skipUnlessDBFeature("can_defer_constraint_checks")
|
|
|
|
def test_can_defer_constraint_checks(self):
|
|
|
|
u = User.objects.create(
|
|
|
|
avatar=Avatar.objects.create()
|
|
|
|
)
|
|
|
|
a = Avatar.objects.get(pk=u.avatar_id)
|
|
|
|
# 1 query to find the users for the avatar.
|
|
|
|
# 1 query to delete the user
|
|
|
|
# 1 query to delete the avatar
|
|
|
|
# The important thing is that when we can defer constraint checks there
|
|
|
|
# is no need to do an UPDATE on User.avatar to null it out.
|
2012-09-20 23:51:30 +08:00
|
|
|
|
|
|
|
# Attach a signal to make sure we will not do fast_deletes.
|
|
|
|
calls = []
|
2013-10-22 18:21:07 +08:00
|
|
|
|
2012-09-20 23:51:30 +08:00
|
|
|
def noop(*args, **kwargs):
|
|
|
|
calls.append('')
|
|
|
|
models.signals.post_delete.connect(noop, sender=User)
|
|
|
|
|
2010-11-10 00:46:42 +08:00
|
|
|
self.assertNumQueries(3, a.delete)
|
|
|
|
self.assertFalse(User.objects.exists())
|
|
|
|
self.assertFalse(Avatar.objects.exists())
|
2012-10-24 03:34:58 +08:00
|
|
|
self.assertEqual(len(calls), 1)
|
2012-09-20 23:51:30 +08:00
|
|
|
models.signals.post_delete.disconnect(noop, sender=User)
|
2010-11-10 00:46:42 +08:00
|
|
|
|
|
|
|
@skipIfDBFeature("can_defer_constraint_checks")
|
|
|
|
def test_cannot_defer_constraint_checks(self):
|
|
|
|
u = User.objects.create(
|
|
|
|
avatar=Avatar.objects.create()
|
|
|
|
)
|
2012-09-20 23:51:30 +08:00
|
|
|
# Attach a signal to make sure we will not do fast_deletes.
|
|
|
|
calls = []
|
2013-10-22 18:21:07 +08:00
|
|
|
|
2012-09-20 23:51:30 +08:00
|
|
|
def noop(*args, **kwargs):
|
|
|
|
calls.append('')
|
|
|
|
models.signals.post_delete.connect(noop, sender=User)
|
|
|
|
|
2010-11-10 00:46:42 +08:00
|
|
|
a = Avatar.objects.get(pk=u.avatar_id)
|
2012-09-20 23:51:30 +08:00
|
|
|
# The below doesn't make sense... Why do we need to null out
|
|
|
|
# user.avatar if we are going to delete the user immediately after it,
|
|
|
|
# and there are no more cascades.
|
2010-11-10 00:46:42 +08:00
|
|
|
# 1 query to find the users for the avatar.
|
|
|
|
# 1 query to delete the user
|
|
|
|
# 1 query to null out user.avatar, because we can't defer the constraint
|
|
|
|
# 1 query to delete the avatar
|
|
|
|
self.assertNumQueries(4, a.delete)
|
|
|
|
self.assertFalse(User.objects.exists())
|
|
|
|
self.assertFalse(Avatar.objects.exists())
|
2012-10-24 03:34:58 +08:00
|
|
|
self.assertEqual(len(calls), 1)
|
2012-09-20 23:51:30 +08:00
|
|
|
models.signals.post_delete.disconnect(noop, sender=User)
|
2010-11-10 00:46:42 +08:00
|
|
|
|
|
|
|
def test_hidden_related(self):
|
|
|
|
r = R.objects.create()
|
|
|
|
h = HiddenUser.objects.create(r=r)
|
2013-09-09 03:20:01 +08:00
|
|
|
HiddenUserProfile.objects.create(user=h)
|
2010-11-10 00:46:42 +08:00
|
|
|
|
|
|
|
r.delete()
|
|
|
|
self.assertEqual(HiddenUserProfile.objects.count(), 0)
|
2012-09-20 23:51:30 +08:00
|
|
|
|
2014-08-11 17:47:37 +08:00
|
|
|
def test_large_delete(self):
|
|
|
|
TEST_SIZE = 2000
|
|
|
|
objs = [Avatar() for i in range(0, TEST_SIZE)]
|
|
|
|
Avatar.objects.bulk_create(objs)
|
|
|
|
# Calculate the number of queries needed.
|
|
|
|
batch_size = connection.ops.bulk_batch_size(['pk'], objs)
|
|
|
|
# The related fetches are done in batches.
|
|
|
|
batches = int(ceil(float(len(objs)) / batch_size))
|
|
|
|
# One query for Avatar.objects.all() and then one related fast delete for
|
|
|
|
# each batch.
|
|
|
|
fetches_to_mem = 1 + batches
|
2015-01-20 22:54:12 +08:00
|
|
|
# The Avatar objects are going to be deleted in batches of GET_ITERATOR_CHUNK_SIZE
|
2014-08-11 17:47:37 +08:00
|
|
|
queries = fetches_to_mem + TEST_SIZE // GET_ITERATOR_CHUNK_SIZE
|
|
|
|
self.assertNumQueries(queries, Avatar.objects.all().delete)
|
|
|
|
self.assertFalse(Avatar.objects.exists())
|
|
|
|
|
|
|
|
def test_large_delete_related(self):
|
|
|
|
TEST_SIZE = 2000
|
|
|
|
s = S.objects.create(r=R.objects.create())
|
2014-12-13 21:04:36 +08:00
|
|
|
for i in range(TEST_SIZE):
|
2014-08-11 17:47:37 +08:00
|
|
|
T.objects.create(s=s)
|
|
|
|
|
2014-12-13 21:04:36 +08:00
|
|
|
batch_size = max(connection.ops.bulk_batch_size(['pk'], range(TEST_SIZE)), 1)
|
2014-08-11 17:47:37 +08:00
|
|
|
|
|
|
|
# TEST_SIZE // batch_size (select related `T` instances)
|
|
|
|
# + 1 (select related `U` instances)
|
|
|
|
# + TEST_SIZE // GET_ITERATOR_CHUNK_SIZE (delete `T` instances in batches)
|
|
|
|
# + 1 (delete `s`)
|
|
|
|
expected_num_queries = (ceil(TEST_SIZE // batch_size) +
|
|
|
|
ceil(TEST_SIZE // GET_ITERATOR_CHUNK_SIZE) + 2)
|
|
|
|
|
|
|
|
self.assertNumQueries(expected_num_queries, s.delete)
|
|
|
|
self.assertFalse(S.objects.exists())
|
|
|
|
self.assertFalse(T.objects.exists())
|
|
|
|
|
2013-11-03 12:36:09 +08:00
|
|
|
|
2012-09-20 23:51:30 +08:00
|
|
|
class FastDeleteTests(TestCase):
|
|
|
|
|
|
|
|
def test_fast_delete_fk(self):
|
|
|
|
u = User.objects.create(
|
|
|
|
avatar=Avatar.objects.create()
|
|
|
|
)
|
|
|
|
a = Avatar.objects.get(pk=u.avatar_id)
|
|
|
|
# 1 query to fast-delete the user
|
|
|
|
# 1 query to delete the avatar
|
|
|
|
self.assertNumQueries(2, a.delete)
|
|
|
|
self.assertFalse(User.objects.exists())
|
|
|
|
self.assertFalse(Avatar.objects.exists())
|
|
|
|
|
|
|
|
def test_fast_delete_m2m(self):
|
|
|
|
t = M2MTo.objects.create()
|
|
|
|
f = M2MFrom.objects.create()
|
|
|
|
f.m2m.add(t)
|
|
|
|
# 1 to delete f, 1 to fast-delete m2m for f
|
|
|
|
self.assertNumQueries(2, f.delete)
|
|
|
|
|
|
|
|
def test_fast_delete_revm2m(self):
|
|
|
|
t = M2MTo.objects.create()
|
|
|
|
f = M2MFrom.objects.create()
|
|
|
|
f.m2m.add(t)
|
|
|
|
# 1 to delete t, 1 to fast-delete t's m_set
|
|
|
|
self.assertNumQueries(2, f.delete)
|
|
|
|
|
|
|
|
def test_fast_delete_qs(self):
|
|
|
|
u1 = User.objects.create()
|
|
|
|
u2 = User.objects.create()
|
|
|
|
self.assertNumQueries(1, User.objects.filter(pk=u1.pk).delete)
|
2012-10-24 03:34:58 +08:00
|
|
|
self.assertEqual(User.objects.count(), 1)
|
2012-09-20 23:51:30 +08:00
|
|
|
self.assertTrue(User.objects.filter(pk=u2.pk).exists())
|
|
|
|
|
|
|
|
def test_fast_delete_joined_qs(self):
|
|
|
|
a = Avatar.objects.create(desc='a')
|
|
|
|
User.objects.create(avatar=a)
|
|
|
|
u2 = User.objects.create()
|
|
|
|
expected_queries = 1 if connection.features.update_can_self_select else 2
|
|
|
|
self.assertNumQueries(expected_queries,
|
|
|
|
User.objects.filter(avatar__desc='a').delete)
|
2012-10-24 03:34:58 +08:00
|
|
|
self.assertEqual(User.objects.count(), 1)
|
2012-09-20 23:51:30 +08:00
|
|
|
self.assertTrue(User.objects.filter(pk=u2.pk).exists())
|
|
|
|
|
|
|
|
def test_fast_delete_inheritance(self):
|
|
|
|
c = Child.objects.create()
|
|
|
|
p = Parent.objects.create()
|
|
|
|
# 1 for self, 1 for parent
|
|
|
|
# However, this doesn't work as child.parent access creates a query,
|
|
|
|
# and this means we will be generating extra queries (a lot for large
|
|
|
|
# querysets). This is not a fast-delete problem.
|
|
|
|
# self.assertNumQueries(2, c.delete)
|
|
|
|
c.delete()
|
|
|
|
self.assertFalse(Child.objects.exists())
|
2012-10-24 03:34:58 +08:00
|
|
|
self.assertEqual(Parent.objects.count(), 1)
|
|
|
|
self.assertEqual(Parent.objects.filter(pk=p.pk).count(), 1)
|
2012-09-20 23:51:30 +08:00
|
|
|
# 1 for self delete, 1 for fast delete of empty "child" qs.
|
|
|
|
self.assertNumQueries(2, p.delete)
|
|
|
|
self.assertFalse(Parent.objects.exists())
|
|
|
|
# 1 for self delete, 1 for fast delete of empty "child" qs.
|
|
|
|
c = Child.objects.create()
|
|
|
|
p = c.parent_ptr
|
|
|
|
self.assertNumQueries(2, p.delete)
|
|
|
|
self.assertFalse(Parent.objects.exists())
|
|
|
|
self.assertFalse(Child.objects.exists())
|
2014-08-11 17:47:37 +08:00
|
|
|
|
|
|
|
def test_fast_delete_large_batch(self):
|
|
|
|
User.objects.bulk_create(User() for i in range(0, 2000))
|
|
|
|
# No problems here - we aren't going to cascade, so we will fast
|
|
|
|
# delete the objects in a single query.
|
|
|
|
self.assertNumQueries(1, User.objects.all().delete)
|
|
|
|
a = Avatar.objects.create(desc='a')
|
|
|
|
User.objects.bulk_create(User(avatar=a) for i in range(0, 2000))
|
|
|
|
# We don't hit parameter amount limits for a, so just one query for
|
|
|
|
# that + fast delete of the related objs.
|
|
|
|
self.assertNumQueries(2, a.delete)
|
|
|
|
self.assertEqual(User.objects.count(), 0)
|