Fixed #12953 -- Ensure that deletion cascades through generic relations. Also cleans up the special-casing of generic relations in the deleted object discovery process. Thanks to carljm for the report and patch.

git-svn-id: http://code.djangoproject.com/svn/django/trunk@12790 bcc190cf-cafb-0310-a4f2-bffc1f526a37
This commit is contained in:
Russell Keith-Magee 2010-03-15 13:15:01 +00:00
parent d8c9d21c33
commit 2d57300f52
6 changed files with 168 additions and 125 deletions

View File

@ -108,29 +108,6 @@ def get_deleted_objects(objs, opts, user, admin_site, levels_to_root=4):
# TODO using a private model API! # TODO using a private model API!
obj._collect_sub_objects(collector) obj._collect_sub_objects(collector)
# TODO This next bit is needed only because GenericRelations are
# cascade-deleted way down in the internals in
# DeleteQuery.delete_batch_related, instead of being found by
# _collect_sub_objects. Refs #12593.
from django.contrib.contenttypes import generic
for f in obj._meta.many_to_many:
if isinstance(f, generic.GenericRelation):
rel_manager = f.value_from_object(obj)
for related in rel_manager.all():
# There's a wierdness here in the case that the
# generic-related object also has FKs pointing to it
# from elsewhere. DeleteQuery does not follow those
# FKs or delete any such objects explicitly (which is
# probably a bug). Some databases may cascade those
# deletes themselves, and some won't. So do we report
# those objects as to-be-deleted? No right answer; for
# now we opt to report only on objects that Django
# will explicitly delete, at risk that some further
# objects will be silently deleted by a
# referential-integrity-maintaining database.
collector.add(related.__class__, related.pk, related,
obj.__class__, obj)
perms_needed = set() perms_needed = set()
to_delete = collector.nested(_format_callback, to_delete = collector.nested(_format_callback,
@ -188,6 +165,10 @@ class NestedObjects(object):
""" """
model, pk = type(obj), obj._get_pk_val() model, pk = type(obj), obj._get_pk_val()
# auto-created M2M models don't interest us
if model._meta.auto_created:
return True
key = model, pk key = model, pk
if key in self.seen: if key in self.seen:

View File

@ -556,7 +556,7 @@ class Model(object):
for related in self._meta.get_all_related_objects(): for related in self._meta.get_all_related_objects():
rel_opts_name = related.get_accessor_name() rel_opts_name = related.get_accessor_name()
if isinstance(related.field.rel, OneToOneRel): if not related.field.rel.multiple:
try: try:
sub_obj = getattr(self, rel_opts_name) sub_obj = getattr(self, rel_opts_name)
except ObjectDoesNotExist: except ObjectDoesNotExist:
@ -582,6 +582,30 @@ class Model(object):
for sub_obj in delete_qs: for sub_obj in delete_qs:
sub_obj._collect_sub_objects(seen_objs, self, related.field.null) sub_obj._collect_sub_objects(seen_objs, self, related.field.null)
for related in self._meta.get_all_related_many_to_many_objects():
if related.field.rel.through:
opts = related.field.rel.through._meta
reverse_field_name = related.field.m2m_reverse_field_name()
nullable = opts.get_field(reverse_field_name).null
filters = {reverse_field_name: self}
for sub_obj in related.field.rel.through._base_manager.filter(**filters):
sub_obj._collect_sub_objects(seen_objs, self, nullable)
for f in self._meta.many_to_many:
if f.rel.through:
opts = f.rel.through._meta
field_name = f.m2m_field_name()
nullable = opts.get_field(field_name).null
filters = {field_name: self}
for sub_obj in f.rel.through._base_manager.filter(**filters):
sub_obj._collect_sub_objects(seen_objs, self, nullable)
else:
# m2m-ish but with no through table? GenericRelation: cascade delete
for sub_obj in f.value_from_object(self).all():
# Generic relations not enforced by db constraints, thus we can set
# nullable=True, order does not matter
sub_obj._collect_sub_objects(seen_objs, self, True)
# Handle any ancestors (for the model-inheritance case). We do this by # Handle any ancestors (for the model-inheritance case). We do this by
# traversing to the most remote parent classes -- those with no parents # traversing to the most remote parent classes -- those with no parents
# themselves -- and then adding those instances to the collection. That # themselves -- and then adding those instances to the collection. That

View File

@ -1278,8 +1278,6 @@ def delete_objects(seen_objs, using):
signals.pre_delete.send(sender=cls, instance=instance) signals.pre_delete.send(sender=cls, instance=instance)
pk_list = [pk for pk,instance in items] pk_list = [pk for pk,instance in items]
del_query = sql.DeleteQuery(cls)
del_query.delete_batch_related(pk_list, using=using)
update_query = sql.UpdateQuery(cls) update_query = sql.UpdateQuery(cls)
for field, model in cls._meta.get_fields_with_model(): for field, model in cls._meta.get_fields_with_model():

View File

@ -26,52 +26,9 @@ class DeleteQuery(Query):
self.where = where self.where = where
self.get_compiler(using).execute_sql(None) self.get_compiler(using).execute_sql(None)
def delete_batch_related(self, pk_list, using):
"""
Set up and execute delete queries for all the objects related to the
primary key values in pk_list. To delete the objects themselves, use
the delete_batch() method.
More than one physical query may be executed if there are a
lot of values in pk_list.
"""
from django.contrib.contenttypes import generic
cls = self.model
for related in cls._meta.get_all_related_many_to_many_objects():
if not isinstance(related.field, generic.GenericRelation):
for offset in range(0, len(pk_list), GET_ITERATOR_CHUNK_SIZE):
where = self.where_class()
where.add((Constraint(None,
related.field.m2m_reverse_name(), related.field),
'in',
pk_list[offset : offset+GET_ITERATOR_CHUNK_SIZE]),
AND)
self.do_query(related.field.m2m_db_table(), where, using=using)
for f in cls._meta.many_to_many:
w1 = self.where_class()
db_prep_value = None
if isinstance(f, generic.GenericRelation):
from django.contrib.contenttypes.models import ContentType
ct_field = f.rel.to._meta.get_field(f.content_type_field_name)
w1.add((Constraint(None, ct_field.column, ct_field), 'exact',
ContentType.objects.get_for_model(cls).id), AND)
id_field = f.rel.to._meta.get_field(f.object_id_field_name)
db_prep_value = id_field.get_db_prep_value
for offset in range(0, len(pk_list), GET_ITERATOR_CHUNK_SIZE):
where = self.where_class()
where.add((Constraint(None, f.m2m_column_name(), f), 'in',
map(db_prep_value,
pk_list[offset : offset + GET_ITERATOR_CHUNK_SIZE])),
AND)
if w1:
where.add(w1, AND)
self.do_query(f.m2m_db_table(), where, using=using)
def delete_batch(self, pk_list, using): def delete_batch(self, pk_list, using):
""" """
Set up and execute delete queries for all the objects in pk_list. This Set up and execute delete queries for all the objects in pk_list.
should be called after delete_batch_related(), if necessary.
More than one physical query may be executed if there are a More than one physical query may be executed if there are a
lot of values in pk_list. lot of values in pk_list.

View File

@ -1,62 +1,37 @@
from django.conf import settings from django.db import models
from django.db import models, backend, connection, transaction, DEFAULT_DB_ALIAS
from django.db.models import sql, query from django.contrib.contenttypes import generic
from django.test import TransactionTestCase from django.contrib.contenttypes.models import ContentType
class Award(models.Model):
name = models.CharField(max_length=25)
object_id = models.PositiveIntegerField()
content_type = models.ForeignKey(ContentType)
content_object = generic.GenericForeignKey()
class AwardNote(models.Model):
award = models.ForeignKey(Award)
note = models.CharField(max_length=100)
class Person(models.Model):
name = models.CharField(max_length=25)
awards = generic.GenericRelation(Award)
class Book(models.Model): class Book(models.Model):
pagecount = models.IntegerField() pagecount = models.IntegerField()
# Can't run this test under SQLite, because you can't class Toy(models.Model):
# get two connections to an in-memory database. name = models.CharField(max_length=50)
if settings.DATABASES[DEFAULT_DB_ALIAS]['ENGINE'] != 'django.db.backends.sqlite3':
class DeleteLockingTest(TransactionTestCase):
def setUp(self):
# Create a second connection to the database
conn_settings = settings.DATABASES[DEFAULT_DB_ALIAS]
self.conn2 = backend.DatabaseWrapper({
'HOST': conn_settings['HOST'],
'NAME': conn_settings['NAME'],
'OPTIONS': conn_settings['OPTIONS'],
'PASSWORD': conn_settings['PASSWORD'],
'PORT': conn_settings['PORT'],
'USER': conn_settings['USER'],
'TIME_ZONE': settings.TIME_ZONE,
})
# Put both DB connections into managed transaction mode class Child(models.Model):
transaction.enter_transaction_management() name = models.CharField(max_length=50)
transaction.managed(True) toys = models.ManyToManyField(Toy, through='PlayedWith')
self.conn2._enter_transaction_management(True)
def tearDown(self): class PlayedWith(models.Model):
# Close down the second connection. child = models.ForeignKey(Child)
transaction.leave_transaction_management() toy = models.ForeignKey(Toy)
self.conn2.close() date = models.DateField()
def test_concurrent_delete(self): class PlayedWithNote(models.Model):
"Deletes on concurrent transactions don't collide and lock the database. Regression for #9479" played = models.ForeignKey(PlayedWith)
note = models.TextField()
# Create some dummy data
b1 = Book(id=1, pagecount=100)
b2 = Book(id=2, pagecount=200)
b3 = Book(id=3, pagecount=300)
b1.save()
b2.save()
b3.save()
transaction.commit()
self.assertEquals(3, Book.objects.count())
# Delete something using connection 2.
cursor2 = self.conn2.cursor()
cursor2.execute('DELETE from delete_regress_book WHERE id=1')
self.conn2._commit();
# Now perform a queryset delete that covers the object
# deleted in connection 2. This causes an infinite loop
# under MySQL InnoDB unless we keep track of already
# deleted objects.
Book.objects.filter(pagecount__lt=250).delete()
transaction.commit()
self.assertEquals(1, Book.objects.count())

View File

@ -0,0 +1,108 @@
import datetime
from django.conf import settings
from django.db import backend, connection, transaction, DEFAULT_DB_ALIAS
from django.test import TestCase, TransactionTestCase
from models import Book, Award, AwardNote, Person, Child, Toy, PlayedWith, PlayedWithNote
# Can't run this test under SQLite, because you can't
# get two connections to an in-memory database.
if settings.DATABASES[DEFAULT_DB_ALIAS]['ENGINE'] != 'django.db.backends.sqlite3':
class DeleteLockingTest(TransactionTestCase):
def setUp(self):
# Create a second connection to the default database
conn_settings = settings.DATABASES[DEFAULT_DB_ALIAS]
self.conn2 = backend.DatabaseWrapper({
'HOST': conn_settings['HOST'],
'NAME': conn_settings['NAME'],
'OPTIONS': conn_settings['OPTIONS'],
'PASSWORD': conn_settings['PASSWORD'],
'PORT': conn_settings['PORT'],
'USER': conn_settings['USER'],
'TIME_ZONE': settings.TIME_ZONE,
})
# Put both DB connections into managed transaction mode
transaction.enter_transaction_management()
transaction.managed(True)
self.conn2._enter_transaction_management(True)
def tearDown(self):
# Close down the second connection.
transaction.leave_transaction_management()
self.conn2.close()
def test_concurrent_delete(self):
"Deletes on concurrent transactions don't collide and lock the database. Regression for #9479"
# Create some dummy data
b1 = Book(id=1, pagecount=100)
b2 = Book(id=2, pagecount=200)
b3 = Book(id=3, pagecount=300)
b1.save()
b2.save()
b3.save()
transaction.commit()
self.assertEquals(3, Book.objects.count())
# Delete something using connection 2.
cursor2 = self.conn2.cursor()
cursor2.execute('DELETE from delete_regress_book WHERE id=1')
self.conn2._commit();
# Now perform a queryset delete that covers the object
# deleted in connection 2. This causes an infinite loop
# under MySQL InnoDB unless we keep track of already
# deleted objects.
Book.objects.filter(pagecount__lt=250).delete()
transaction.commit()
self.assertEquals(1, Book.objects.count())
class DeleteCascadeTests(TestCase):
def test_generic_relation_cascade(self):
"""
Test that Django cascades deletes through generic-related
objects to their reverse relations.
This might falsely succeed if the database cascades deletes
itself immediately; the postgresql_psycopg2 backend does not
give such a false success because ForeignKeys are created with
DEFERRABLE INITIALLY DEFERRED, so its internal cascade is
delayed until transaction commit.
"""
person = Person.objects.create(name='Nelson Mandela')
award = Award.objects.create(name='Nobel', content_object=person)
note = AwardNote.objects.create(note='a peace prize',
award=award)
self.assertEquals(AwardNote.objects.count(), 1)
person.delete()
self.assertEquals(Award.objects.count(), 0)
# first two asserts are just sanity checks, this is the kicker:
self.assertEquals(AwardNote.objects.count(), 0)
def test_fk_to_m2m_through(self):
"""
Test that if a M2M relationship has an explicitly-specified
through model, and some other model has an FK to that through
model, deletion is cascaded from one of the participants in
the M2M, to the through model, to its related model.
Like the above test, this could in theory falsely succeed if
the DB cascades deletes itself immediately.
"""
juan = Child.objects.create(name='Juan')
paints = Toy.objects.create(name='Paints')
played = PlayedWith.objects.create(child=juan, toy=paints,
date=datetime.date.today())
note = PlayedWithNote.objects.create(played=played,
note='the next Jackson Pollock')
self.assertEquals(PlayedWithNote.objects.count(), 1)
paints.delete()
self.assertEquals(PlayedWith.objects.count(), 0)
# first two asserts just sanity checks, this is the kicker:
self.assertEquals(PlayedWithNote.objects.count(), 0)