2010-04-15 20:41:08 +08:00
|
|
|
import datetime
|
2015-01-28 20:35:27 +08:00
|
|
|
import pickle
|
2010-03-27 23:54:31 +08:00
|
|
|
|
2020-04-04 04:13:02 +08:00
|
|
|
import django
|
2015-10-08 00:46:07 +08:00
|
|
|
from django.db import models
|
2010-03-27 23:54:31 +08:00
|
|
|
from django.test import TestCase
|
|
|
|
|
2019-12-11 22:25:50 +08:00
|
|
|
from .models import Container, Event, Group, Happening, M2MModel, MyEvent
|
2010-03-27 23:54:31 +08:00
|
|
|
|
|
|
|
|
|
|
|
class PickleabilityTestCase(TestCase):
|
2018-11-24 09:59:38 +08:00
|
|
|
@classmethod
|
|
|
|
def setUpTestData(cls):
|
2013-11-03 05:02:56 +08:00
|
|
|
Happening.objects.create() # make sure the defaults are working (#20158)
|
2013-03-29 20:32:21 +08:00
|
|
|
|
2010-03-27 23:54:31 +08:00
|
|
|
def assert_pickles(self, qs):
|
|
|
|
self.assertEqual(list(pickle.loads(pickle.dumps(qs))), list(qs))
|
|
|
|
|
|
|
|
def test_related_field(self):
|
|
|
|
g = Group.objects.create(name="Ponies Who Own Maybachs")
|
|
|
|
self.assert_pickles(Event.objects.filter(group=g.id))
|
2010-04-15 20:41:08 +08:00
|
|
|
|
|
|
|
def test_datetime_callable_default_all(self):
|
|
|
|
self.assert_pickles(Happening.objects.all())
|
|
|
|
|
|
|
|
def test_datetime_callable_default_filter(self):
|
|
|
|
self.assert_pickles(Happening.objects.filter(when=datetime.datetime.now()))
|
|
|
|
|
2015-07-16 09:18:07 +08:00
|
|
|
def test_string_as_default(self):
|
2010-04-15 20:41:08 +08:00
|
|
|
self.assert_pickles(Happening.objects.filter(name="test"))
|
|
|
|
|
2010-04-22 00:34:33 +08:00
|
|
|
def test_standalone_method_as_default(self):
|
|
|
|
self.assert_pickles(Happening.objects.filter(number1=1))
|
|
|
|
|
|
|
|
def test_staticmethod_as_default(self):
|
|
|
|
self.assert_pickles(Happening.objects.filter(number2=1))
|
|
|
|
|
2015-02-23 15:31:58 +08:00
|
|
|
def test_filter_reverse_fk(self):
|
|
|
|
self.assert_pickles(Group.objects.filter(event=1))
|
|
|
|
|
2012-06-22 20:28:15 +08:00
|
|
|
def test_doesnotexist_exception(self):
|
|
|
|
# Ticket #17776
|
|
|
|
original = Event.DoesNotExist("Doesn't exist")
|
|
|
|
unpickled = pickle.loads(pickle.dumps(original))
|
|
|
|
|
|
|
|
# Exceptions are not equal to equivalent instances of themselves, so
|
|
|
|
# can't just use assertEqual(original, unpickled)
|
|
|
|
self.assertEqual(original.__class__, unpickled.__class__)
|
|
|
|
self.assertEqual(original.args, unpickled.args)
|
2013-05-21 17:57:24 +08:00
|
|
|
|
2017-10-13 21:16:09 +08:00
|
|
|
def test_doesnotexist_class(self):
|
|
|
|
klass = Event.DoesNotExist
|
|
|
|
self.assertIs(pickle.loads(pickle.dumps(klass)), klass)
|
|
|
|
|
|
|
|
def test_multipleobjectsreturned_class(self):
|
|
|
|
klass = Event.MultipleObjectsReturned
|
|
|
|
self.assertIs(pickle.loads(pickle.dumps(klass)), klass)
|
|
|
|
|
2017-10-19 09:43:53 +08:00
|
|
|
def test_forward_relatedobjectdoesnotexist_class(self):
|
|
|
|
# ForwardManyToOneDescriptor
|
|
|
|
klass = Event.group.RelatedObjectDoesNotExist
|
|
|
|
self.assertIs(pickle.loads(pickle.dumps(klass)), klass)
|
|
|
|
# ForwardOneToOneDescriptor
|
|
|
|
klass = Happening.event.RelatedObjectDoesNotExist
|
|
|
|
self.assertIs(pickle.loads(pickle.dumps(klass)), klass)
|
|
|
|
|
|
|
|
def test_reverse_one_to_one_relatedobjectdoesnotexist_class(self):
|
|
|
|
klass = Event.happening.RelatedObjectDoesNotExist
|
|
|
|
self.assertIs(pickle.loads(pickle.dumps(klass)), klass)
|
|
|
|
|
2014-01-22 17:53:41 +08:00
|
|
|
def test_manager_pickle(self):
|
|
|
|
pickle.loads(pickle.dumps(Happening.objects))
|
|
|
|
|
2013-05-21 17:57:24 +08:00
|
|
|
def test_model_pickle(self):
|
|
|
|
"""
|
2016-10-27 15:53:39 +08:00
|
|
|
A model not defined on module level is picklable.
|
2013-05-21 17:57:24 +08:00
|
|
|
"""
|
|
|
|
original = Container.SomeModel(pk=1)
|
|
|
|
dumped = pickle.dumps(original)
|
|
|
|
reloaded = pickle.loads(dumped)
|
|
|
|
self.assertEqual(original, reloaded)
|
|
|
|
# Also, deferred dynamic model works
|
|
|
|
Container.SomeModel.objects.create(somefield=1)
|
|
|
|
original = Container.SomeModel.objects.defer('somefield')[0]
|
|
|
|
dumped = pickle.dumps(original)
|
|
|
|
reloaded = pickle.loads(dumped)
|
|
|
|
self.assertEqual(original, reloaded)
|
|
|
|
self.assertEqual(original.somefield, reloaded.somefield)
|
|
|
|
|
|
|
|
def test_model_pickle_m2m(self):
|
|
|
|
"""
|
|
|
|
Test intentionally the automatically created through model.
|
|
|
|
"""
|
|
|
|
m1 = M2MModel.objects.create()
|
|
|
|
g1 = Group.objects.create(name='foof')
|
|
|
|
m1.groups.add(g1)
|
2015-02-26 22:19:17 +08:00
|
|
|
m2m_through = M2MModel._meta.get_field('groups').remote_field.through
|
2013-05-21 17:57:24 +08:00
|
|
|
original = m2m_through.objects.get()
|
|
|
|
dumped = pickle.dumps(original)
|
|
|
|
reloaded = pickle.loads(dumped)
|
|
|
|
self.assertEqual(original, reloaded)
|
|
|
|
|
|
|
|
def test_model_pickle_dynamic(self):
|
|
|
|
class Meta:
|
|
|
|
proxy = True
|
2017-12-29 04:07:29 +08:00
|
|
|
dynclass = type("DynamicEventSubclass", (Event,), {'Meta': Meta, '__module__': Event.__module__})
|
2013-05-21 17:57:24 +08:00
|
|
|
original = dynclass(pk=1)
|
|
|
|
dumped = pickle.dumps(original)
|
|
|
|
reloaded = pickle.loads(dumped)
|
|
|
|
self.assertEqual(original, reloaded)
|
|
|
|
self.assertIs(reloaded.__class__, dynclass)
|
2013-07-26 16:59:40 +08:00
|
|
|
|
|
|
|
def test_specialized_queryset(self):
|
|
|
|
self.assert_pickles(Happening.objects.values('name'))
|
|
|
|
self.assert_pickles(Happening.objects.values('name').dates('when', 'year'))
|
2015-01-30 15:26:13 +08:00
|
|
|
# With related field (#14515)
|
2014-12-24 21:58:32 +08:00
|
|
|
self.assert_pickles(
|
|
|
|
Event.objects.select_related('group').order_by('title').values_list('title', 'group__name')
|
|
|
|
)
|
|
|
|
|
2013-09-14 15:33:12 +08:00
|
|
|
def test_pickle_prefetch_related_idempotence(self):
|
|
|
|
g = Group.objects.create(name='foo')
|
|
|
|
groups = Group.objects.prefetch_related('event_set')
|
|
|
|
|
|
|
|
# First pickling
|
|
|
|
groups = pickle.loads(pickle.dumps(groups))
|
2016-09-10 17:36:27 +08:00
|
|
|
self.assertSequenceEqual(groups, [g])
|
2013-09-14 15:33:12 +08:00
|
|
|
|
|
|
|
# Second pickling
|
|
|
|
groups = pickle.loads(pickle.dumps(groups))
|
2016-09-10 17:36:27 +08:00
|
|
|
self.assertSequenceEqual(groups, [g])
|
2014-06-06 19:10:20 +08:00
|
|
|
|
2016-11-23 22:59:43 +08:00
|
|
|
def test_pickle_prefetch_queryset_usable_outside_of_prefetch(self):
|
|
|
|
# Prefetch shouldn't affect the fetch-on-pickle behavior of the
|
|
|
|
# queryset passed to it.
|
|
|
|
Group.objects.create(name='foo')
|
|
|
|
events = Event.objects.order_by('id')
|
|
|
|
Group.objects.prefetch_related(models.Prefetch('event_set', queryset=events))
|
|
|
|
with self.assertNumQueries(1):
|
|
|
|
events2 = pickle.loads(pickle.dumps(events))
|
|
|
|
with self.assertNumQueries(0):
|
|
|
|
list(events2)
|
|
|
|
|
|
|
|
def test_pickle_prefetch_queryset_still_usable(self):
|
|
|
|
g = Group.objects.create(name='foo')
|
|
|
|
groups = Group.objects.prefetch_related(
|
|
|
|
models.Prefetch('event_set', queryset=Event.objects.order_by('id'))
|
|
|
|
)
|
|
|
|
groups2 = pickle.loads(pickle.dumps(groups))
|
|
|
|
self.assertSequenceEqual(groups2.filter(id__gte=0), [g])
|
|
|
|
|
2016-11-16 22:49:20 +08:00
|
|
|
def test_pickle_prefetch_queryset_not_evaluated(self):
|
|
|
|
Group.objects.create(name='foo')
|
|
|
|
groups = Group.objects.prefetch_related(
|
|
|
|
models.Prefetch('event_set', queryset=Event.objects.order_by('id'))
|
|
|
|
)
|
|
|
|
list(groups) # evaluate QuerySet
|
|
|
|
with self.assertNumQueries(0):
|
|
|
|
pickle.loads(pickle.dumps(groups))
|
|
|
|
|
2015-05-29 19:45:36 +08:00
|
|
|
def test_pickle_prefetch_related_with_m2m_and_objects_deletion(self):
|
|
|
|
"""
|
|
|
|
#24831 -- Cached properties on ManyToOneRel created in QuerySet.delete()
|
|
|
|
caused subsequent QuerySet pickling to fail.
|
|
|
|
"""
|
|
|
|
g = Group.objects.create(name='foo')
|
|
|
|
m2m = M2MModel.objects.create()
|
|
|
|
m2m.groups.add(g)
|
|
|
|
Group.objects.all().delete()
|
|
|
|
|
|
|
|
m2ms = M2MModel.objects.prefetch_related('groups')
|
|
|
|
m2ms = pickle.loads(pickle.dumps(m2ms))
|
2016-09-10 17:36:27 +08:00
|
|
|
self.assertSequenceEqual(m2ms, [m2m])
|
2015-05-29 19:45:36 +08:00
|
|
|
|
2019-08-27 18:58:11 +08:00
|
|
|
def test_pickle_exists_queryset_still_usable(self):
|
|
|
|
group = Group.objects.create(name='group')
|
|
|
|
Event.objects.create(title='event', group=group)
|
|
|
|
groups = Group.objects.annotate(
|
|
|
|
has_event=models.Exists(
|
|
|
|
Event.objects.filter(group_id=models.OuterRef('id')),
|
|
|
|
),
|
|
|
|
)
|
|
|
|
groups2 = pickle.loads(pickle.dumps(groups))
|
|
|
|
self.assertSequenceEqual(groups2.filter(has_event=True), [group])
|
|
|
|
|
|
|
|
def test_pickle_exists_queryset_not_evaluated(self):
|
|
|
|
group = Group.objects.create(name='group')
|
|
|
|
Event.objects.create(title='event', group=group)
|
|
|
|
groups = Group.objects.annotate(
|
|
|
|
has_event=models.Exists(
|
|
|
|
Event.objects.filter(group_id=models.OuterRef('id')),
|
|
|
|
),
|
|
|
|
)
|
|
|
|
list(groups) # evaluate QuerySet.
|
|
|
|
with self.assertNumQueries(0):
|
|
|
|
self.assert_pickles(groups)
|
|
|
|
|
2020-05-20 03:05:52 +08:00
|
|
|
def test_pickle_exists_kwargs_queryset_not_evaluated(self):
|
|
|
|
group = Group.objects.create(name='group')
|
|
|
|
Event.objects.create(title='event', group=group)
|
|
|
|
groups = Group.objects.annotate(
|
|
|
|
has_event=models.Exists(
|
|
|
|
queryset=Event.objects.filter(group_id=models.OuterRef('id')),
|
|
|
|
),
|
|
|
|
)
|
|
|
|
list(groups) # evaluate QuerySet.
|
|
|
|
with self.assertNumQueries(0):
|
|
|
|
self.assert_pickles(groups)
|
|
|
|
|
2019-08-27 18:58:11 +08:00
|
|
|
def test_pickle_subquery_queryset_not_evaluated(self):
|
|
|
|
group = Group.objects.create(name='group')
|
|
|
|
Event.objects.create(title='event', group=group)
|
|
|
|
groups = Group.objects.annotate(
|
|
|
|
event_title=models.Subquery(
|
|
|
|
Event.objects.filter(group_id=models.OuterRef('id')).values('title'),
|
|
|
|
),
|
|
|
|
)
|
|
|
|
list(groups) # evaluate QuerySet.
|
|
|
|
with self.assertNumQueries(0):
|
|
|
|
self.assert_pickles(groups)
|
|
|
|
|
2015-10-08 00:46:07 +08:00
|
|
|
def test_annotation_with_callable_default(self):
|
|
|
|
# Happening.when has a callable default of datetime.datetime.now.
|
|
|
|
qs = Happening.objects.annotate(latest_time=models.Max('when'))
|
|
|
|
self.assert_pickles(qs)
|
|
|
|
|
2019-10-15 06:59:43 +08:00
|
|
|
def test_filter_deferred(self):
|
|
|
|
qs = Happening.objects.all()
|
|
|
|
qs._defer_next_filter = True
|
|
|
|
qs = qs.filter(id=0)
|
|
|
|
self.assert_pickles(qs)
|
|
|
|
|
2014-06-06 19:10:20 +08:00
|
|
|
def test_missing_django_version_unpickling(self):
|
|
|
|
"""
|
|
|
|
#21430 -- Verifies a warning is raised for querysets that are
|
|
|
|
unpickled without a Django version
|
|
|
|
"""
|
|
|
|
qs = Group.missing_django_version_objects.all()
|
2015-11-14 04:54:05 +08:00
|
|
|
msg = "Pickled queryset instance's Django version is not specified."
|
|
|
|
with self.assertRaisesMessage(RuntimeWarning, msg):
|
2014-06-06 19:10:20 +08:00
|
|
|
pickle.loads(pickle.dumps(qs))
|
|
|
|
|
|
|
|
def test_unsupported_unpickle(self):
|
|
|
|
"""
|
|
|
|
#21430 -- Verifies a warning is raised for querysets that are
|
|
|
|
unpickled with a different Django version than the current
|
|
|
|
"""
|
|
|
|
qs = Group.previous_django_version_objects.all()
|
2020-04-04 04:13:02 +08:00
|
|
|
msg = (
|
|
|
|
"Pickled queryset instance's Django version 1.0 does not match "
|
|
|
|
"the current version %s." % django.__version__
|
|
|
|
)
|
2015-11-14 04:54:05 +08:00
|
|
|
with self.assertRaisesMessage(RuntimeWarning, msg):
|
2014-06-06 19:10:20 +08:00
|
|
|
pickle.loads(pickle.dumps(qs))
|
2016-09-01 02:16:39 +08:00
|
|
|
|
2019-12-11 22:25:50 +08:00
|
|
|
def test_order_by_model_with_abstract_inheritance_and_meta_ordering(self):
|
|
|
|
group = Group.objects.create(name='test')
|
|
|
|
event = MyEvent.objects.create(title='test event', group=group)
|
|
|
|
event.edition_set.create()
|
|
|
|
self.assert_pickles(event.edition_set.order_by('event'))
|
|
|
|
|
2016-09-01 02:16:39 +08:00
|
|
|
|
|
|
|
class InLookupTests(TestCase):
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def setUpTestData(cls):
|
|
|
|
for i in range(1, 3):
|
|
|
|
group = Group.objects.create(name='Group {}'.format(i))
|
|
|
|
cls.e1 = Event.objects.create(title='Event 1', group=group)
|
|
|
|
|
|
|
|
def test_in_lookup_queryset_evaluation(self):
|
|
|
|
"""
|
|
|
|
Neither pickling nor unpickling a QuerySet.query with an __in=inner_qs
|
|
|
|
lookup should evaluate inner_qs.
|
|
|
|
"""
|
|
|
|
events = Event.objects.filter(group__in=Group.objects.all())
|
|
|
|
|
|
|
|
with self.assertNumQueries(0):
|
|
|
|
dumped = pickle.dumps(events.query)
|
|
|
|
|
|
|
|
with self.assertNumQueries(0):
|
|
|
|
reloaded = pickle.loads(dumped)
|
|
|
|
reloaded_events = Event.objects.none()
|
|
|
|
reloaded_events.query = reloaded
|
|
|
|
|
|
|
|
self.assertSequenceEqual(reloaded_events, [self.e1])
|
|
|
|
|
|
|
|
def test_in_lookup_query_evaluation(self):
|
|
|
|
events = Event.objects.filter(group__in=Group.objects.values('id').query)
|
|
|
|
|
|
|
|
with self.assertNumQueries(0):
|
|
|
|
dumped = pickle.dumps(events.query)
|
|
|
|
|
|
|
|
with self.assertNumQueries(0):
|
|
|
|
reloaded = pickle.loads(dumped)
|
|
|
|
reloaded_events = Event.objects.none()
|
|
|
|
reloaded_events.query = reloaded
|
|
|
|
|
|
|
|
self.assertSequenceEqual(reloaded_events, [self.e1])
|