2010-09-28 22:52:20 +08:00
|
|
|
# Unittests for fixtures.
|
2013-12-06 05:55:33 +08:00
|
|
|
import json
|
2010-09-28 22:52:20 +08:00
|
|
|
import os
|
2010-10-21 22:56:18 +08:00
|
|
|
import re
|
2017-01-07 19:11:46 +08:00
|
|
|
from io import StringIO
|
2019-11-06 15:06:55 +08:00
|
|
|
from pathlib import Path
|
2010-09-28 22:52:20 +08:00
|
|
|
|
2015-01-28 20:35:27 +08:00
|
|
|
from django.core import management, serializers
|
2014-10-31 01:38:47 +08:00
|
|
|
from django.core.exceptions import ImproperlyConfigured
|
2015-01-28 20:35:27 +08:00
|
|
|
from django.core.serializers.base import DeserializationError
|
|
|
|
from django.db import IntegrityError, transaction
|
2011-10-14 02:51:33 +08:00
|
|
|
from django.db.models import signals
|
2015-01-28 20:35:27 +08:00
|
|
|
from django.test import (
|
|
|
|
TestCase,
|
|
|
|
TransactionTestCase,
|
|
|
|
override_settings,
|
|
|
|
skipIfDBFeature,
|
|
|
|
skipUnlessDBFeature,
|
|
|
|
)
|
2010-09-28 22:52:20 +08:00
|
|
|
|
2015-01-28 20:35:27 +08:00
|
|
|
from .models import (
|
|
|
|
Absolute,
|
|
|
|
Animal,
|
|
|
|
Article,
|
|
|
|
Book,
|
|
|
|
Child,
|
|
|
|
Circle1,
|
|
|
|
Circle2,
|
|
|
|
Circle3,
|
|
|
|
ExternalDependency,
|
|
|
|
M2MCircular1ThroughAB,
|
|
|
|
M2MCircular1ThroughBC,
|
|
|
|
M2MCircular1ThroughCA,
|
|
|
|
M2MCircular2ThroughAB,
|
|
|
|
M2MComplexA,
|
|
|
|
M2MComplexB,
|
|
|
|
M2MComplexCircular1A,
|
|
|
|
M2MComplexCircular1B,
|
|
|
|
M2MComplexCircular1C,
|
|
|
|
M2MComplexCircular2A,
|
|
|
|
M2MComplexCircular2B,
|
|
|
|
M2MSimpleA,
|
|
|
|
M2MSimpleB,
|
|
|
|
M2MSimpleCircularA,
|
|
|
|
M2MSimpleCircularB,
|
|
|
|
M2MThroughAB,
|
2022-03-18 18:17:54 +08:00
|
|
|
NaturalKeyWithFKDependency,
|
2015-01-28 20:35:27 +08:00
|
|
|
NKChild,
|
|
|
|
Parent,
|
|
|
|
Person,
|
|
|
|
RefToNKChild,
|
|
|
|
Store,
|
|
|
|
Stuff,
|
|
|
|
Thingy,
|
|
|
|
Widget,
|
|
|
|
)
|
2010-09-28 22:52:20 +08:00
|
|
|
|
2017-01-20 21:01:02 +08:00
|
|
|
_cur_dir = os.path.dirname(os.path.abspath(__file__))
|
2013-12-06 05:55:33 +08:00
|
|
|
|
2010-09-28 22:52:20 +08:00
|
|
|
|
2012-09-08 01:43:51 +08:00
|
|
|
class TestFixtures(TestCase):
|
|
|
|
def animal_pre_save_check(self, signal, sender, instance, **kwargs):
|
|
|
|
self.pre_save_checks.append(
|
|
|
|
(
|
|
|
|
"Count = %s (%s)" % (instance.count, type(instance.count)),
|
|
|
|
"Weight = %s (%s)" % (instance.weight, type(instance.weight)),
|
|
|
|
)
|
2010-09-28 22:52:20 +08:00
|
|
|
)
|
|
|
|
|
|
|
|
def test_duplicate_pk(self):
|
|
|
|
"""
|
|
|
|
This is a regression test for ticket #3790.
|
|
|
|
"""
|
|
|
|
# Load a fixture that uses PK=1
|
|
|
|
management.call_command(
|
|
|
|
"loaddata",
|
|
|
|
"sequence",
|
|
|
|
verbosity=0,
|
|
|
|
)
|
|
|
|
|
|
|
|
# Create a new animal. Without a sequence reset, this new object
|
|
|
|
# will take a PK of 1 (on Postgres), and the save will fail.
|
|
|
|
|
|
|
|
animal = Animal(
|
|
|
|
name="Platypus",
|
|
|
|
latin_name="Ornithorhynchus anatinus",
|
|
|
|
count=2,
|
2014-10-22 01:15:10 +08:00
|
|
|
weight=2.2,
|
2010-09-28 22:52:20 +08:00
|
|
|
)
|
|
|
|
animal.save()
|
2010-11-12 09:44:22 +08:00
|
|
|
self.assertGreater(animal.id, 1)
|
2010-09-28 22:52:20 +08:00
|
|
|
|
2012-09-30 20:34:13 +08:00
|
|
|
def test_loaddata_not_found_fields_not_ignore(self):
|
|
|
|
"""
|
|
|
|
Test for ticket #9279 -- Error is raised for entries in
|
2014-03-02 22:25:53 +08:00
|
|
|
the serialized data for fields that have been removed
|
2012-09-30 20:34:13 +08:00
|
|
|
from the database when not ignored.
|
|
|
|
"""
|
|
|
|
with self.assertRaises(DeserializationError):
|
|
|
|
management.call_command(
|
|
|
|
"loaddata",
|
|
|
|
"sequence_extra",
|
2014-10-22 01:15:10 +08:00
|
|
|
verbosity=0,
|
2012-09-30 20:34:13 +08:00
|
|
|
)
|
|
|
|
|
|
|
|
def test_loaddata_not_found_fields_ignore(self):
|
|
|
|
"""
|
|
|
|
Test for ticket #9279 -- Ignores entries in
|
2014-03-02 22:25:53 +08:00
|
|
|
the serialized data for fields that have been removed
|
2012-09-30 20:34:13 +08:00
|
|
|
from the database.
|
|
|
|
"""
|
|
|
|
management.call_command(
|
|
|
|
"loaddata",
|
|
|
|
"sequence_extra",
|
|
|
|
ignore=True,
|
2012-10-02 02:36:16 +08:00
|
|
|
verbosity=0,
|
2012-09-30 20:34:13 +08:00
|
|
|
)
|
|
|
|
self.assertEqual(Animal.specimens.all()[0].name, "Lion")
|
|
|
|
|
2013-03-07 05:15:36 +08:00
|
|
|
def test_loaddata_not_found_fields_ignore_xml(self):
|
|
|
|
"""
|
2014-03-02 22:25:53 +08:00
|
|
|
Test for ticket #19998 -- Ignore entries in the XML serialized data
|
2013-03-07 05:15:36 +08:00
|
|
|
for fields that have been removed from the model definition.
|
|
|
|
"""
|
|
|
|
management.call_command(
|
|
|
|
"loaddata",
|
|
|
|
"sequence_extra_xml",
|
|
|
|
ignore=True,
|
|
|
|
verbosity=0,
|
|
|
|
)
|
|
|
|
self.assertEqual(Animal.specimens.all()[0].name, "Wolf")
|
|
|
|
|
2010-11-13 03:26:57 +08:00
|
|
|
@skipIfDBFeature("interprets_empty_strings_as_nulls")
|
2010-09-28 22:52:20 +08:00
|
|
|
def test_pretty_print_xml(self):
|
|
|
|
"""
|
|
|
|
Regression test for ticket #4558 -- pretty printing of XML fixtures
|
|
|
|
doesn't affect parsing of None values.
|
|
|
|
"""
|
|
|
|
# Load a pretty-printed XML fixture with Nulls.
|
|
|
|
management.call_command(
|
|
|
|
"loaddata",
|
|
|
|
"pretty.xml",
|
|
|
|
verbosity=0,
|
|
|
|
)
|
2016-06-17 02:19:18 +08:00
|
|
|
self.assertIsNone(Stuff.objects.all()[0].name)
|
|
|
|
self.assertIsNone(Stuff.objects.all()[0].owner)
|
2010-09-28 22:52:20 +08:00
|
|
|
|
2010-11-13 03:26:57 +08:00
|
|
|
@skipUnlessDBFeature("interprets_empty_strings_as_nulls")
|
|
|
|
def test_pretty_print_xml_empty_strings(self):
|
|
|
|
"""
|
|
|
|
Regression test for ticket #4558 -- pretty printing of XML fixtures
|
|
|
|
doesn't affect parsing of None values.
|
|
|
|
"""
|
|
|
|
# Load a pretty-printed XML fixture with Nulls.
|
|
|
|
management.call_command(
|
|
|
|
"loaddata",
|
|
|
|
"pretty.xml",
|
|
|
|
verbosity=0,
|
|
|
|
)
|
2012-06-08 00:08:47 +08:00
|
|
|
self.assertEqual(Stuff.objects.all()[0].name, "")
|
2016-06-17 02:19:18 +08:00
|
|
|
self.assertIsNone(Stuff.objects.all()[0].owner)
|
2010-11-13 03:26:57 +08:00
|
|
|
|
2010-09-28 22:52:20 +08:00
|
|
|
def test_absolute_path(self):
|
|
|
|
"""
|
|
|
|
Regression test for ticket #6436 --
|
|
|
|
os.path.join will throw away the initial parts of a path if it
|
|
|
|
encounters an absolute path.
|
|
|
|
This means that if a fixture is specified as an absolute path,
|
|
|
|
we need to make sure we don't discover the absolute path in every
|
|
|
|
fixture directory.
|
|
|
|
"""
|
|
|
|
load_absolute_path = os.path.join(
|
2017-01-20 21:01:02 +08:00
|
|
|
os.path.dirname(__file__), "fixtures", "absolute.json"
|
2010-09-28 22:52:20 +08:00
|
|
|
)
|
|
|
|
management.call_command(
|
|
|
|
"loaddata",
|
|
|
|
load_absolute_path,
|
|
|
|
verbosity=0,
|
|
|
|
)
|
2013-08-21 01:03:33 +08:00
|
|
|
self.assertEqual(Absolute.objects.count(), 1)
|
|
|
|
|
2013-12-06 05:55:33 +08:00
|
|
|
def test_relative_path(self, path=["fixtures", "absolute.json"]):
|
|
|
|
relative_path = os.path.join(*path)
|
2013-08-21 01:03:33 +08:00
|
|
|
cwd = os.getcwd()
|
|
|
|
try:
|
2013-12-06 05:55:33 +08:00
|
|
|
os.chdir(_cur_dir)
|
2013-08-21 01:03:33 +08:00
|
|
|
management.call_command(
|
|
|
|
"loaddata",
|
|
|
|
relative_path,
|
|
|
|
verbosity=0,
|
|
|
|
)
|
|
|
|
finally:
|
|
|
|
os.chdir(cwd)
|
|
|
|
self.assertEqual(Absolute.objects.count(), 1)
|
2010-09-28 22:52:20 +08:00
|
|
|
|
2013-12-06 05:55:33 +08:00
|
|
|
@override_settings(FIXTURE_DIRS=[os.path.join(_cur_dir, "fixtures_1")])
|
|
|
|
def test_relative_path_in_fixture_dirs(self):
|
|
|
|
self.test_relative_path(path=["inner", "absolute.json"])
|
|
|
|
|
2013-11-18 21:05:59 +08:00
|
|
|
def test_path_containing_dots(self):
|
|
|
|
management.call_command(
|
|
|
|
"loaddata",
|
|
|
|
"path.containing.dots.json",
|
|
|
|
verbosity=0,
|
|
|
|
)
|
|
|
|
self.assertEqual(Absolute.objects.count(), 1)
|
|
|
|
|
2010-09-28 22:52:20 +08:00
|
|
|
def test_unknown_format(self):
|
|
|
|
"""
|
|
|
|
Test for ticket #4371 -- Loading data of an unknown format should fail
|
|
|
|
Validate that error conditions are caught correctly
|
|
|
|
"""
|
2018-09-27 03:30:15 +08:00
|
|
|
msg = (
|
|
|
|
"Problem installing fixture 'bad_fix.ture1': unkn is not a known "
|
|
|
|
"serialization format."
|
2022-02-04 15:08:27 +08:00
|
|
|
)
|
2016-01-18 16:45:45 +08:00
|
|
|
with self.assertRaisesMessage(management.CommandError, msg):
|
2012-06-07 16:31:08 +08:00
|
|
|
management.call_command(
|
|
|
|
"loaddata",
|
2018-09-27 03:30:15 +08:00
|
|
|
"bad_fix.ture1.unkn",
|
2012-06-07 16:31:08 +08:00
|
|
|
verbosity=0,
|
|
|
|
)
|
2010-09-28 22:52:20 +08:00
|
|
|
|
2012-08-22 03:52:25 +08:00
|
|
|
@override_settings(SERIALIZATION_MODULES={"unkn": "unexistent.path"})
|
|
|
|
def test_unimportable_serializer(self):
|
|
|
|
"""
|
2016-10-27 15:53:39 +08:00
|
|
|
Failing serializer import raises the proper error
|
2012-08-22 03:52:25 +08:00
|
|
|
"""
|
2017-01-20 10:10:33 +08:00
|
|
|
with self.assertRaisesMessage(ImportError, "No module named 'unexistent'"):
|
2012-08-22 03:52:25 +08:00
|
|
|
management.call_command(
|
|
|
|
"loaddata",
|
2018-09-27 03:30:15 +08:00
|
|
|
"bad_fix.ture1.unkn",
|
2012-08-22 03:52:25 +08:00
|
|
|
verbosity=0,
|
|
|
|
)
|
|
|
|
|
2010-09-28 22:52:20 +08:00
|
|
|
def test_invalid_data(self):
|
|
|
|
"""
|
|
|
|
Test for ticket #4371 -- Loading a fixture file with invalid data
|
|
|
|
using explicit filename.
|
2013-02-24 03:34:59 +08:00
|
|
|
Test for ticket #18213 -- warning conditions are caught correctly
|
2010-09-28 22:52:20 +08:00
|
|
|
"""
|
2018-04-28 05:18:15 +08:00
|
|
|
msg = "No fixture data found for 'bad_fixture2'. (File format may be invalid.)"
|
|
|
|
with self.assertWarnsMessage(RuntimeWarning, msg):
|
2012-06-07 16:31:08 +08:00
|
|
|
management.call_command(
|
|
|
|
"loaddata",
|
|
|
|
"bad_fixture2.xml",
|
|
|
|
verbosity=0,
|
|
|
|
)
|
2010-09-28 22:52:20 +08:00
|
|
|
|
|
|
|
def test_invalid_data_no_ext(self):
|
|
|
|
"""
|
|
|
|
Test for ticket #4371 -- Loading a fixture file with invalid data
|
|
|
|
without file extension.
|
2013-02-24 03:34:59 +08:00
|
|
|
Test for ticket #18213 -- warning conditions are caught correctly
|
2010-09-28 22:52:20 +08:00
|
|
|
"""
|
2018-04-28 05:18:15 +08:00
|
|
|
msg = "No fixture data found for 'bad_fixture2'. (File format may be invalid.)"
|
|
|
|
with self.assertWarnsMessage(RuntimeWarning, msg):
|
2012-06-07 16:31:08 +08:00
|
|
|
management.call_command(
|
|
|
|
"loaddata",
|
|
|
|
"bad_fixture2",
|
|
|
|
verbosity=0,
|
|
|
|
)
|
2010-09-28 22:52:20 +08:00
|
|
|
|
|
|
|
def test_empty(self):
|
|
|
|
"""
|
2013-02-24 03:34:59 +08:00
|
|
|
Test for ticket #18213 -- Loading a fixture file with no data output a warning.
|
|
|
|
Previously empty fixture raises an error exception, see ticket #4371.
|
2010-09-28 22:52:20 +08:00
|
|
|
"""
|
2018-04-28 05:18:15 +08:00
|
|
|
msg = "No fixture data found for 'empty'. (File format may be invalid.)"
|
|
|
|
with self.assertWarnsMessage(RuntimeWarning, msg):
|
2012-06-07 16:31:08 +08:00
|
|
|
management.call_command(
|
|
|
|
"loaddata",
|
|
|
|
"empty",
|
|
|
|
verbosity=0,
|
|
|
|
)
|
2010-09-28 22:52:20 +08:00
|
|
|
|
|
|
|
def test_error_message(self):
|
|
|
|
"""
|
2013-02-24 03:34:59 +08:00
|
|
|
Regression for #9011 - error message is correct.
|
|
|
|
Change from error to warning for ticket #18213.
|
2010-09-28 22:52:20 +08:00
|
|
|
"""
|
2018-04-28 05:18:15 +08:00
|
|
|
msg = "No fixture data found for 'bad_fixture2'. (File format may be invalid.)"
|
|
|
|
with self.assertWarnsMessage(RuntimeWarning, msg):
|
2012-06-07 16:31:08 +08:00
|
|
|
management.call_command(
|
|
|
|
"loaddata",
|
|
|
|
"bad_fixture2",
|
|
|
|
"animal",
|
|
|
|
verbosity=0,
|
|
|
|
)
|
2010-09-28 22:52:20 +08:00
|
|
|
|
|
|
|
def test_pg_sequence_resetting_checks(self):
|
|
|
|
"""
|
|
|
|
Test for ticket #7565 -- PostgreSQL sequence resetting checks shouldn't
|
|
|
|
ascend to parent models when inheritance is used
|
|
|
|
(since they are treated individually).
|
|
|
|
"""
|
|
|
|
management.call_command(
|
|
|
|
"loaddata",
|
|
|
|
"model-inheritance.json",
|
|
|
|
verbosity=0,
|
|
|
|
)
|
|
|
|
self.assertEqual(Parent.objects.all()[0].id, 1)
|
|
|
|
self.assertEqual(Child.objects.all()[0].id, 1)
|
|
|
|
|
|
|
|
def test_close_connection_after_loaddata(self):
|
|
|
|
"""
|
|
|
|
Test for ticket #7572 -- MySQL has a problem if the same connection is
|
|
|
|
used to create tables, load data, and then query over that data.
|
|
|
|
To compensate, we close the connection after running loaddata.
|
|
|
|
This ensures that a new connection is opened when test queries are
|
|
|
|
issued.
|
|
|
|
"""
|
|
|
|
management.call_command(
|
|
|
|
"loaddata",
|
|
|
|
"big-fixture.json",
|
|
|
|
verbosity=0,
|
|
|
|
)
|
|
|
|
articles = Article.objects.exclude(id=9)
|
|
|
|
self.assertEqual(
|
2010-10-02 21:13:59 +08:00
|
|
|
list(articles.values_list("id", flat=True)), [1, 2, 3, 4, 5, 6, 7, 8]
|
2010-09-28 22:52:20 +08:00
|
|
|
)
|
|
|
|
# Just for good measure, run the same query again.
|
|
|
|
# Under the influence of ticket #7572, this will
|
|
|
|
# give a different result to the previous call.
|
|
|
|
self.assertEqual(
|
2010-10-02 21:13:59 +08:00
|
|
|
list(articles.values_list("id", flat=True)), [1, 2, 3, 4, 5, 6, 7, 8]
|
2010-09-28 22:52:20 +08:00
|
|
|
)
|
|
|
|
|
|
|
|
def test_field_value_coerce(self):
|
|
|
|
"""
|
|
|
|
Test for tickets #8298, #9942 - Field values should be coerced into the
|
|
|
|
correct type by the deserializer, not as part of the database write.
|
|
|
|
"""
|
2012-09-08 01:43:51 +08:00
|
|
|
self.pre_save_checks = []
|
|
|
|
signals.pre_save.connect(self.animal_pre_save_check)
|
2011-09-26 18:11:18 +08:00
|
|
|
try:
|
|
|
|
management.call_command(
|
|
|
|
"loaddata",
|
|
|
|
"animal.xml",
|
|
|
|
verbosity=0,
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
2012-09-08 01:43:51 +08:00
|
|
|
self.pre_save_checks,
|
2016-12-01 18:38:01 +08:00
|
|
|
[("Count = 42 (<class 'int'>)", "Weight = 1.2 (<class 'float'>)")],
|
2011-09-26 18:11:18 +08:00
|
|
|
)
|
|
|
|
finally:
|
2012-09-08 01:43:51 +08:00
|
|
|
signals.pre_save.disconnect(self.animal_pre_save_check)
|
2010-09-28 22:52:20 +08:00
|
|
|
|
|
|
|
def test_dumpdata_uses_default_manager(self):
|
|
|
|
"""
|
|
|
|
Regression for #11286
|
2016-10-27 15:53:39 +08:00
|
|
|
Dumpdata honors the default manager. Dump the current contents of
|
|
|
|
the database as a JSON fixture
|
2010-09-28 22:52:20 +08:00
|
|
|
"""
|
|
|
|
management.call_command(
|
|
|
|
"loaddata",
|
|
|
|
"animal.xml",
|
|
|
|
verbosity=0,
|
|
|
|
)
|
|
|
|
management.call_command(
|
|
|
|
"loaddata",
|
|
|
|
"sequence.json",
|
|
|
|
verbosity=0,
|
|
|
|
)
|
|
|
|
animal = Animal(
|
|
|
|
name="Platypus",
|
|
|
|
latin_name="Ornithorhynchus anatinus",
|
|
|
|
count=2,
|
2014-10-22 01:15:10 +08:00
|
|
|
weight=2.2,
|
2010-09-28 22:52:20 +08:00
|
|
|
)
|
|
|
|
animal.save()
|
|
|
|
|
2014-10-22 01:15:10 +08:00
|
|
|
out = StringIO()
|
2010-09-28 22:52:20 +08:00
|
|
|
management.call_command(
|
|
|
|
"dumpdata",
|
|
|
|
"fixtures_regress.animal",
|
|
|
|
format="json",
|
2014-10-22 01:15:10 +08:00
|
|
|
stdout=out,
|
2010-09-28 22:52:20 +08:00
|
|
|
)
|
2010-09-29 09:20:34 +08:00
|
|
|
|
|
|
|
# Output order isn't guaranteed, so check for parts
|
2014-10-22 01:15:10 +08:00
|
|
|
data = out.getvalue()
|
2010-10-21 22:56:18 +08:00
|
|
|
|
|
|
|
# Get rid of artifacts like '000000002' to eliminate the differences
|
|
|
|
# between different Python versions.
|
2014-04-15 02:12:44 +08:00
|
|
|
data = re.sub("0{6,}[0-9]", "", data)
|
2022-02-04 03:24:19 +08:00
|
|
|
|
2012-09-28 15:27:38 +08:00
|
|
|
animals_data = sorted(
|
|
|
|
[
|
2015-09-12 07:33:12 +08:00
|
|
|
{
|
|
|
|
"pk": 1,
|
|
|
|
"model": "fixtures_regress.animal",
|
|
|
|
"fields": {
|
|
|
|
"count": 3,
|
|
|
|
"weight": 1.2,
|
|
|
|
"name": "Lion",
|
|
|
|
"latin_name": "Panthera leo",
|
|
|
|
},
|
2022-02-04 03:24:19 +08:00
|
|
|
},
|
2015-09-12 07:33:12 +08:00
|
|
|
{
|
|
|
|
"pk": 10,
|
|
|
|
"model": "fixtures_regress.animal",
|
|
|
|
"fields": {
|
|
|
|
"count": 42,
|
|
|
|
"weight": 1.2,
|
|
|
|
"name": "Emu",
|
|
|
|
"latin_name": "Dromaius novaehollandiae",
|
2022-02-04 03:24:19 +08:00
|
|
|
},
|
2015-09-12 07:33:12 +08:00
|
|
|
},
|
|
|
|
{
|
|
|
|
"pk": animal.pk,
|
|
|
|
"model": "fixtures_regress.animal",
|
|
|
|
"fields": {
|
|
|
|
"count": 2,
|
|
|
|
"weight": 2.2,
|
|
|
|
"name": "Platypus",
|
|
|
|
"latin_name": "Ornithorhynchus anatinus",
|
|
|
|
},
|
2022-02-04 03:24:19 +08:00
|
|
|
},
|
2012-09-28 15:27:38 +08:00
|
|
|
],
|
|
|
|
key=lambda x: x["pk"],
|
|
|
|
)
|
|
|
|
|
|
|
|
data = sorted(json.loads(data), key=lambda x: x["pk"])
|
|
|
|
|
|
|
|
self.maxDiff = 1024
|
|
|
|
self.assertEqual(data, animals_data)
|
2010-09-29 09:20:34 +08:00
|
|
|
|
2010-09-28 22:52:20 +08:00
|
|
|
def test_proxy_model_included(self):
|
|
|
|
"""
|
|
|
|
Regression for #11428 - Proxy models aren't included when you dumpdata
|
|
|
|
"""
|
2014-10-22 01:15:10 +08:00
|
|
|
out = StringIO()
|
2010-09-28 22:52:20 +08:00
|
|
|
# Create an instance of the concrete class
|
2011-03-10 07:46:28 +08:00
|
|
|
widget = Widget.objects.create(name="grommet")
|
2010-09-28 22:52:20 +08:00
|
|
|
management.call_command(
|
|
|
|
"dumpdata",
|
|
|
|
"fixtures_regress.widget",
|
|
|
|
"fixtures_regress.widgetproxy",
|
|
|
|
format="json",
|
2014-10-22 01:15:10 +08:00
|
|
|
stdout=out,
|
2010-09-28 22:52:20 +08:00
|
|
|
)
|
2012-09-28 15:20:01 +08:00
|
|
|
self.assertJSONEqual(
|
2014-10-22 01:15:10 +08:00
|
|
|
out.getvalue(),
|
2011-03-10 07:46:28 +08:00
|
|
|
'[{"pk": %d, "model": "fixtures_regress.widget", '
|
|
|
|
'"fields": {"name": "grommet"}}]' % widget.pk,
|
2013-10-18 17:02:43 +08:00
|
|
|
)
|
2010-09-28 22:52:20 +08:00
|
|
|
|
2014-06-11 23:45:18 +08:00
|
|
|
@skipUnlessDBFeature("supports_forward_references")
|
2011-08-07 08:43:26 +08:00
|
|
|
def test_loaddata_works_when_fixture_has_forward_refs(self):
|
|
|
|
"""
|
|
|
|
Forward references cause fixtures not to load in MySQL (InnoDB).
|
|
|
|
"""
|
|
|
|
management.call_command(
|
|
|
|
"loaddata",
|
|
|
|
"forward_ref.json",
|
|
|
|
verbosity=0,
|
|
|
|
)
|
|
|
|
self.assertEqual(Book.objects.all()[0].id, 1)
|
|
|
|
self.assertEqual(Person.objects.all()[0].id, 4)
|
|
|
|
|
|
|
|
def test_loaddata_raises_error_when_fixture_has_invalid_foreign_key(self):
|
|
|
|
"""
|
|
|
|
Data with nonexistent child key references raises error.
|
|
|
|
"""
|
2016-01-18 16:45:45 +08:00
|
|
|
with self.assertRaisesMessage(IntegrityError, "Problem installing fixture"):
|
2012-06-07 16:31:08 +08:00
|
|
|
management.call_command(
|
|
|
|
"loaddata",
|
|
|
|
"forward_ref_bad_data.json",
|
|
|
|
verbosity=0,
|
|
|
|
)
|
2011-08-07 08:43:26 +08:00
|
|
|
|
2014-06-11 23:45:18 +08:00
|
|
|
@skipUnlessDBFeature("supports_forward_references")
|
2012-01-15 01:26:32 +08:00
|
|
|
@override_settings(
|
|
|
|
FIXTURE_DIRS=[
|
|
|
|
os.path.join(_cur_dir, "fixtures_1"),
|
|
|
|
os.path.join(_cur_dir, "fixtures_2"),
|
2022-02-04 03:24:19 +08:00
|
|
|
]
|
2012-01-15 01:26:32 +08:00
|
|
|
)
|
|
|
|
def test_loaddata_forward_refs_split_fixtures(self):
|
|
|
|
"""
|
|
|
|
Regression for #17530 - should be able to cope with forward references
|
|
|
|
when the fixtures are not in the same files or directories.
|
|
|
|
"""
|
|
|
|
management.call_command(
|
|
|
|
"loaddata",
|
|
|
|
"forward_ref_1.json",
|
|
|
|
"forward_ref_2.json",
|
|
|
|
verbosity=0,
|
|
|
|
)
|
|
|
|
self.assertEqual(Book.objects.all()[0].id, 1)
|
|
|
|
self.assertEqual(Person.objects.all()[0].id, 4)
|
|
|
|
|
2011-10-29 22:47:45 +08:00
|
|
|
def test_loaddata_no_fixture_specified(self):
|
|
|
|
"""
|
|
|
|
Error is quickly reported when no fixtures is provided in the command
|
|
|
|
line.
|
|
|
|
"""
|
2016-01-18 16:45:45 +08:00
|
|
|
msg = (
|
|
|
|
"No database fixture specified. Please provide the path of at least one "
|
|
|
|
"fixture in the command line."
|
2022-02-04 15:08:27 +08:00
|
|
|
)
|
2016-01-18 16:45:45 +08:00
|
|
|
with self.assertRaisesMessage(management.CommandError, msg):
|
2012-06-07 16:31:08 +08:00
|
|
|
management.call_command(
|
|
|
|
"loaddata",
|
|
|
|
verbosity=0,
|
|
|
|
)
|
2011-10-29 22:47:45 +08:00
|
|
|
|
2013-08-20 21:23:25 +08:00
|
|
|
def test_ticket_20820(self):
|
|
|
|
"""
|
|
|
|
Regression for ticket #20820 -- loaddata on a model that inherits
|
|
|
|
from a model with a M2M shouldn't blow up.
|
|
|
|
"""
|
|
|
|
management.call_command(
|
|
|
|
"loaddata",
|
|
|
|
"special-article.json",
|
|
|
|
verbosity=0,
|
|
|
|
)
|
|
|
|
|
2014-05-13 00:46:47 +08:00
|
|
|
def test_ticket_22421(self):
|
|
|
|
"""
|
|
|
|
Regression for ticket #22421 -- loaddata on a model that inherits from
|
|
|
|
a grand-parent model with a M2M but via an abstract parent shouldn't
|
|
|
|
blow up.
|
|
|
|
"""
|
|
|
|
management.call_command(
|
|
|
|
"loaddata",
|
|
|
|
"feature.json",
|
|
|
|
verbosity=0,
|
|
|
|
)
|
|
|
|
|
2014-12-21 21:24:15 +08:00
|
|
|
def test_loaddata_with_m2m_to_self(self):
|
|
|
|
"""
|
|
|
|
Regression test for ticket #17946.
|
|
|
|
"""
|
|
|
|
management.call_command(
|
|
|
|
"loaddata",
|
|
|
|
"m2mtoself.json",
|
|
|
|
verbosity=0,
|
|
|
|
)
|
|
|
|
|
2014-10-31 01:38:47 +08:00
|
|
|
@override_settings(
|
|
|
|
FIXTURE_DIRS=[
|
|
|
|
os.path.join(_cur_dir, "fixtures_1"),
|
|
|
|
os.path.join(_cur_dir, "fixtures_1"),
|
2022-02-04 03:24:19 +08:00
|
|
|
]
|
2014-10-31 01:38:47 +08:00
|
|
|
)
|
|
|
|
def test_fixture_dirs_with_duplicates(self):
|
|
|
|
"""
|
|
|
|
settings.FIXTURE_DIRS cannot contain duplicates in order to avoid
|
|
|
|
repeated fixture loading.
|
|
|
|
"""
|
2016-01-04 16:50:08 +08:00
|
|
|
with self.assertRaisesMessage(
|
|
|
|
ImproperlyConfigured, "settings.FIXTURE_DIRS contains duplicates."
|
|
|
|
):
|
|
|
|
management.call_command("loaddata", "absolute.json", verbosity=0)
|
2014-10-31 01:38:47 +08:00
|
|
|
|
|
|
|
@override_settings(FIXTURE_DIRS=[os.path.join(_cur_dir, "fixtures")])
|
|
|
|
def test_fixture_dirs_with_default_fixture_path(self):
|
|
|
|
"""
|
|
|
|
settings.FIXTURE_DIRS cannot contain a default fixtures directory
|
|
|
|
for application (app/fixtures) in order to avoid repeated fixture loading.
|
|
|
|
"""
|
2016-01-04 16:50:08 +08:00
|
|
|
msg = (
|
2014-10-31 01:38:47 +08:00
|
|
|
"'%s' is a default fixture directory for the '%s' app "
|
|
|
|
"and cannot be listed in settings.FIXTURE_DIRS."
|
2016-01-04 16:50:08 +08:00
|
|
|
% (os.path.join(_cur_dir, "fixtures"), "fixtures_regress")
|
2014-10-31 01:38:47 +08:00
|
|
|
)
|
2016-01-04 16:50:08 +08:00
|
|
|
with self.assertRaisesMessage(ImproperlyConfigured, msg):
|
|
|
|
management.call_command("loaddata", "absolute.json", verbosity=0)
|
2022-02-04 03:24:19 +08:00
|
|
|
|
2014-10-31 01:38:47 +08:00
|
|
|
@override_settings(
|
|
|
|
FIXTURE_DIRS=[
|
|
|
|
os.path.join(_cur_dir, "fixtures_1"),
|
|
|
|
os.path.join(_cur_dir, "fixtures_2"),
|
2022-02-04 03:24:19 +08:00
|
|
|
]
|
2014-10-31 01:38:47 +08:00
|
|
|
)
|
|
|
|
def test_loaddata_with_valid_fixture_dirs(self):
|
|
|
|
management.call_command(
|
|
|
|
"loaddata",
|
|
|
|
"absolute.json",
|
|
|
|
verbosity=0,
|
|
|
|
)
|
|
|
|
|
2019-11-06 15:06:55 +08:00
|
|
|
@override_settings(FIXTURE_DIRS=[Path(_cur_dir) / "fixtures_1"])
|
|
|
|
def test_fixtures_dir_pathlib(self):
|
|
|
|
management.call_command("loaddata", "inner/absolute.json", verbosity=0)
|
|
|
|
self.assertQuerysetEqual(Absolute.objects.all(), [1], transform=lambda o: o.pk)
|
|
|
|
|
2010-09-28 22:52:20 +08:00
|
|
|
|
|
|
|
class NaturalKeyFixtureTests(TestCase):
|
|
|
|
def test_nk_deserialize(self):
|
|
|
|
"""
|
|
|
|
Test for ticket #13030 - Python based parser version
|
|
|
|
natural keys deserialize with fk to inheriting model
|
|
|
|
"""
|
|
|
|
management.call_command(
|
|
|
|
"loaddata",
|
|
|
|
"model-inheritance.json",
|
|
|
|
verbosity=0,
|
|
|
|
)
|
|
|
|
management.call_command(
|
|
|
|
"loaddata",
|
|
|
|
"nk-inheritance.json",
|
|
|
|
verbosity=0,
|
|
|
|
)
|
|
|
|
self.assertEqual(NKChild.objects.get(pk=1).data, "apple")
|
|
|
|
|
|
|
|
self.assertEqual(RefToNKChild.objects.get(pk=1).nk_fk.data, "apple")
|
|
|
|
|
|
|
|
def test_nk_deserialize_xml(self):
|
|
|
|
"""
|
|
|
|
Test for ticket #13030 - XML version
|
|
|
|
natural keys deserialize with fk to inheriting model
|
|
|
|
"""
|
|
|
|
management.call_command(
|
|
|
|
"loaddata",
|
|
|
|
"model-inheritance.json",
|
|
|
|
verbosity=0,
|
|
|
|
)
|
|
|
|
management.call_command(
|
|
|
|
"loaddata",
|
|
|
|
"nk-inheritance.json",
|
|
|
|
verbosity=0,
|
|
|
|
)
|
|
|
|
management.call_command(
|
|
|
|
"loaddata",
|
|
|
|
"nk-inheritance2.xml",
|
|
|
|
verbosity=0,
|
|
|
|
)
|
|
|
|
self.assertEqual(NKChild.objects.get(pk=2).data, "banana")
|
|
|
|
self.assertEqual(RefToNKChild.objects.get(pk=2).nk_fk.data, "apple")
|
|
|
|
|
|
|
|
def test_nk_on_serialize(self):
|
|
|
|
"""
|
2016-10-27 15:53:39 +08:00
|
|
|
Natural key requirements are taken into account when serializing models.
|
2010-09-28 22:52:20 +08:00
|
|
|
"""
|
|
|
|
management.call_command(
|
|
|
|
"loaddata",
|
|
|
|
"forward_ref_lookup.json",
|
|
|
|
verbosity=0,
|
2013-10-18 17:02:43 +08:00
|
|
|
)
|
2010-09-28 22:52:20 +08:00
|
|
|
|
2014-10-22 01:15:10 +08:00
|
|
|
out = StringIO()
|
2010-09-28 22:52:20 +08:00
|
|
|
management.call_command(
|
|
|
|
"dumpdata",
|
|
|
|
"fixtures_regress.book",
|
|
|
|
"fixtures_regress.person",
|
|
|
|
"fixtures_regress.store",
|
|
|
|
verbosity=0,
|
|
|
|
format="json",
|
2012-08-01 09:49:01 +08:00
|
|
|
use_natural_foreign_keys=True,
|
|
|
|
use_natural_primary_keys=True,
|
2014-10-22 01:15:10 +08:00
|
|
|
stdout=out,
|
2010-09-28 22:52:20 +08:00
|
|
|
)
|
2012-09-28 15:20:01 +08:00
|
|
|
self.assertJSONEqual(
|
2014-10-22 01:15:10 +08:00
|
|
|
out.getvalue(),
|
2015-09-12 07:33:12 +08:00
|
|
|
"""
|
|
|
|
[{"fields": {"main": null, "name": "Amazon"},
|
|
|
|
"model": "fixtures_regress.store"},
|
|
|
|
{"fields": {"main": null, "name": "Borders"},
|
|
|
|
"model": "fixtures_regress.store"},
|
|
|
|
{"fields": {"name": "Neal Stephenson"}, "model": "fixtures_regress.person"},
|
|
|
|
{"pk": 1, "model": "fixtures_regress.book",
|
|
|
|
"fields": {"stores": [["Amazon"], ["Borders"]],
|
|
|
|
"name": "Cryptonomicon", "author": ["Neal Stephenson"]}}]
|
|
|
|
""",
|
2010-09-28 22:52:20 +08:00
|
|
|
)
|
|
|
|
|
|
|
|
def test_dependency_sorting(self):
|
|
|
|
"""
|
2016-10-27 15:53:39 +08:00
|
|
|
It doesn't matter what order you mention the models, Store *must* be
|
|
|
|
serialized before then Person, and both must be serialized before Book.
|
2010-09-28 22:52:20 +08:00
|
|
|
"""
|
2014-10-15 23:37:23 +08:00
|
|
|
sorted_deps = serializers.sort_dependencies(
|
2010-09-28 22:52:20 +08:00
|
|
|
[("fixtures_regress", [Book, Person, Store])]
|
|
|
|
)
|
2014-05-12 07:39:00 +08:00
|
|
|
self.assertEqual(sorted_deps, [Store, Person, Book])
|
2010-09-28 22:52:20 +08:00
|
|
|
|
|
|
|
def test_dependency_sorting_2(self):
|
2014-10-15 23:37:23 +08:00
|
|
|
sorted_deps = serializers.sort_dependencies(
|
2010-09-28 22:52:20 +08:00
|
|
|
[("fixtures_regress", [Book, Store, Person])]
|
|
|
|
)
|
2014-05-12 07:39:00 +08:00
|
|
|
self.assertEqual(sorted_deps, [Store, Person, Book])
|
2010-09-28 22:52:20 +08:00
|
|
|
|
|
|
|
def test_dependency_sorting_3(self):
|
2014-10-15 23:37:23 +08:00
|
|
|
sorted_deps = serializers.sort_dependencies(
|
2010-09-28 22:52:20 +08:00
|
|
|
[("fixtures_regress", [Store, Book, Person])]
|
|
|
|
)
|
2014-05-12 07:39:00 +08:00
|
|
|
self.assertEqual(sorted_deps, [Store, Person, Book])
|
2010-09-28 22:52:20 +08:00
|
|
|
|
|
|
|
def test_dependency_sorting_4(self):
|
2014-10-15 23:37:23 +08:00
|
|
|
sorted_deps = serializers.sort_dependencies(
|
2010-09-28 22:52:20 +08:00
|
|
|
[("fixtures_regress", [Store, Person, Book])]
|
|
|
|
)
|
2014-05-12 07:39:00 +08:00
|
|
|
self.assertEqual(sorted_deps, [Store, Person, Book])
|
2010-09-28 22:52:20 +08:00
|
|
|
|
|
|
|
def test_dependency_sorting_5(self):
|
2014-10-15 23:37:23 +08:00
|
|
|
sorted_deps = serializers.sort_dependencies(
|
2010-09-28 22:52:20 +08:00
|
|
|
[("fixtures_regress", [Person, Book, Store])]
|
|
|
|
)
|
2014-05-12 07:39:00 +08:00
|
|
|
self.assertEqual(sorted_deps, [Store, Person, Book])
|
2010-09-28 22:52:20 +08:00
|
|
|
|
|
|
|
def test_dependency_sorting_6(self):
|
2014-10-15 23:37:23 +08:00
|
|
|
sorted_deps = serializers.sort_dependencies(
|
2010-09-28 22:52:20 +08:00
|
|
|
[("fixtures_regress", [Person, Store, Book])]
|
|
|
|
)
|
2014-05-12 07:39:00 +08:00
|
|
|
self.assertEqual(sorted_deps, [Store, Person, Book])
|
2010-09-28 22:52:20 +08:00
|
|
|
|
|
|
|
def test_dependency_sorting_dangling(self):
|
2014-10-15 23:37:23 +08:00
|
|
|
sorted_deps = serializers.sort_dependencies(
|
2010-09-28 22:52:20 +08:00
|
|
|
[("fixtures_regress", [Person, Circle1, Store, Book])]
|
|
|
|
)
|
2016-01-04 16:50:08 +08:00
|
|
|
self.assertEqual(sorted_deps, [Circle1, Store, Person, Book])
|
2010-09-28 22:52:20 +08:00
|
|
|
|
|
|
|
def test_dependency_sorting_tight_circular(self):
|
2016-01-04 16:50:08 +08:00
|
|
|
with self.assertRaisesMessage(
|
2014-10-15 23:37:23 +08:00
|
|
|
RuntimeError,
|
2015-09-12 07:33:12 +08:00
|
|
|
"Can't resolve dependencies for fixtures_regress.Circle1, "
|
2016-01-04 16:50:08 +08:00
|
|
|
"fixtures_regress.Circle2 in serialized app list.",
|
|
|
|
):
|
|
|
|
serializers.sort_dependencies(
|
|
|
|
[("fixtures_regress", [Person, Circle2, Circle1, Store, Book])]
|
|
|
|
)
|
2010-09-28 22:52:20 +08:00
|
|
|
|
|
|
|
def test_dependency_sorting_tight_circular_2(self):
|
2016-01-04 16:50:08 +08:00
|
|
|
with self.assertRaisesMessage(
|
2014-10-15 23:37:23 +08:00
|
|
|
RuntimeError,
|
2015-09-12 07:33:12 +08:00
|
|
|
"Can't resolve dependencies for fixtures_regress.Circle1, "
|
2016-01-04 16:50:08 +08:00
|
|
|
"fixtures_regress.Circle2 in serialized app list.",
|
|
|
|
):
|
|
|
|
serializers.sort_dependencies(
|
|
|
|
[("fixtures_regress", [Circle1, Book, Circle2])]
|
|
|
|
)
|
2010-09-28 22:52:20 +08:00
|
|
|
|
|
|
|
def test_dependency_self_referential(self):
|
2016-01-04 16:50:08 +08:00
|
|
|
with self.assertRaisesMessage(
|
2014-10-15 23:37:23 +08:00
|
|
|
RuntimeError,
|
2015-09-12 07:33:12 +08:00
|
|
|
"Can't resolve dependencies for fixtures_regress.Circle3 in "
|
2016-01-04 16:50:08 +08:00
|
|
|
"serialized app list.",
|
|
|
|
):
|
|
|
|
serializers.sort_dependencies([("fixtures_regress", [Book, Circle3])])
|
2010-09-28 22:52:20 +08:00
|
|
|
|
|
|
|
def test_dependency_sorting_long(self):
|
2016-01-04 16:50:08 +08:00
|
|
|
with self.assertRaisesMessage(
|
2014-10-15 23:37:23 +08:00
|
|
|
RuntimeError,
|
2015-09-12 07:33:12 +08:00
|
|
|
"Can't resolve dependencies for fixtures_regress.Circle1, "
|
|
|
|
"fixtures_regress.Circle2, fixtures_regress.Circle3 in serialized "
|
2016-01-04 16:50:08 +08:00
|
|
|
"app list.",
|
|
|
|
):
|
|
|
|
serializers.sort_dependencies(
|
|
|
|
[("fixtures_regress", [Person, Circle2, Circle1, Circle3, Store, Book])]
|
|
|
|
)
|
2010-09-28 22:52:20 +08:00
|
|
|
|
|
|
|
def test_dependency_sorting_normal(self):
|
2014-10-15 23:37:23 +08:00
|
|
|
sorted_deps = serializers.sort_dependencies(
|
2010-09-28 22:52:20 +08:00
|
|
|
[("fixtures_regress", [Person, ExternalDependency, Book])]
|
|
|
|
)
|
2014-05-12 07:39:00 +08:00
|
|
|
self.assertEqual(sorted_deps, [Person, Book, ExternalDependency])
|
2010-09-28 22:52:20 +08:00
|
|
|
|
|
|
|
def test_normal_pk(self):
|
|
|
|
"""
|
2016-10-27 15:53:39 +08:00
|
|
|
Normal primary keys work on a model with natural key capabilities.
|
2010-09-28 22:52:20 +08:00
|
|
|
"""
|
|
|
|
management.call_command(
|
|
|
|
"loaddata",
|
|
|
|
"non_natural_1.json",
|
|
|
|
verbosity=0,
|
|
|
|
)
|
|
|
|
management.call_command(
|
|
|
|
"loaddata",
|
|
|
|
"forward_ref_lookup.json",
|
|
|
|
verbosity=0,
|
|
|
|
)
|
|
|
|
management.call_command(
|
|
|
|
"loaddata",
|
|
|
|
"non_natural_2.xml",
|
|
|
|
verbosity=0,
|
|
|
|
)
|
|
|
|
books = Book.objects.all()
|
2015-10-06 07:07:34 +08:00
|
|
|
self.assertQuerysetEqual(
|
|
|
|
books,
|
|
|
|
[
|
|
|
|
"<Book: Cryptonomicon by Neal Stephenson (available at Amazon, "
|
|
|
|
"Borders)>",
|
|
|
|
"<Book: Ender's Game by Orson Scott Card (available at Collins "
|
|
|
|
"Bookstore)>",
|
|
|
|
"<Book: Permutation City by Greg Egan (available at Angus and "
|
|
|
|
"Robertson)>",
|
2020-10-19 00:29:52 +08:00
|
|
|
],
|
|
|
|
transform=repr,
|
2010-09-28 22:52:20 +08:00
|
|
|
)
|
2010-11-12 09:44:22 +08:00
|
|
|
|
|
|
|
|
2022-03-18 18:17:54 +08:00
|
|
|
class NaturalKeyFixtureOnOtherDatabaseTests(TestCase):
|
|
|
|
databases = {"other"}
|
|
|
|
|
|
|
|
def test_natural_key_dependencies(self):
|
|
|
|
"""
|
|
|
|
Natural keys with foreing keys in dependencies works in a multiple
|
|
|
|
database setup.
|
|
|
|
"""
|
|
|
|
management.call_command(
|
|
|
|
"loaddata",
|
|
|
|
"nk_with_foreign_key.json",
|
|
|
|
database="other",
|
|
|
|
verbosity=0,
|
|
|
|
)
|
|
|
|
obj = NaturalKeyWithFKDependency.objects.using("other").get()
|
|
|
|
self.assertEqual(obj.name, "The Lord of the Rings")
|
|
|
|
self.assertEqual(obj.author.name, "J.R.R. Tolkien")
|
|
|
|
|
|
|
|
|
2013-08-21 08:21:21 +08:00
|
|
|
class M2MNaturalKeyFixtureTests(TestCase):
|
|
|
|
"""Tests for ticket #14426."""
|
|
|
|
|
|
|
|
def test_dependency_sorting_m2m_simple(self):
|
|
|
|
"""
|
|
|
|
M2M relations without explicit through models SHOULD count as dependencies
|
|
|
|
|
|
|
|
Regression test for bugs that could be caused by flawed fixes to
|
|
|
|
#14226, namely if M2M checks are removed from sort_dependencies
|
|
|
|
altogether.
|
|
|
|
"""
|
2014-10-15 23:37:23 +08:00
|
|
|
sorted_deps = serializers.sort_dependencies(
|
2013-08-21 08:21:21 +08:00
|
|
|
[("fixtures_regress", [M2MSimpleA, M2MSimpleB])]
|
|
|
|
)
|
|
|
|
self.assertEqual(sorted_deps, [M2MSimpleB, M2MSimpleA])
|
|
|
|
|
|
|
|
def test_dependency_sorting_m2m_simple_circular(self):
|
|
|
|
"""
|
|
|
|
Resolving circular M2M relations without explicit through models should
|
|
|
|
fail loudly
|
|
|
|
"""
|
2016-01-04 16:50:08 +08:00
|
|
|
with self.assertRaisesMessage(
|
2014-10-15 23:37:23 +08:00
|
|
|
RuntimeError,
|
2013-08-21 08:21:21 +08:00
|
|
|
"Can't resolve dependencies for fixtures_regress.M2MSimpleCircularA, "
|
2016-01-04 16:50:08 +08:00
|
|
|
"fixtures_regress.M2MSimpleCircularB in serialized app list.",
|
|
|
|
):
|
|
|
|
serializers.sort_dependencies(
|
|
|
|
[("fixtures_regress", [M2MSimpleCircularA, M2MSimpleCircularB])]
|
|
|
|
)
|
2013-08-21 08:21:21 +08:00
|
|
|
|
|
|
|
def test_dependency_sorting_m2m_complex(self):
|
|
|
|
"""
|
|
|
|
M2M relations with explicit through models should NOT count as
|
|
|
|
dependencies. The through model itself will have dependencies, though.
|
|
|
|
"""
|
2014-10-15 23:37:23 +08:00
|
|
|
sorted_deps = serializers.sort_dependencies(
|
2013-08-21 08:21:21 +08:00
|
|
|
[("fixtures_regress", [M2MComplexA, M2MComplexB, M2MThroughAB])]
|
|
|
|
)
|
|
|
|
# Order between M2MComplexA and M2MComplexB doesn't matter. The through
|
|
|
|
# model has dependencies to them though, so it should come last.
|
|
|
|
self.assertEqual(sorted_deps[-1], M2MThroughAB)
|
|
|
|
|
|
|
|
def test_dependency_sorting_m2m_complex_circular_1(self):
|
|
|
|
"""
|
|
|
|
Circular M2M relations with explicit through models should be serializable
|
|
|
|
"""
|
|
|
|
A, B, C, AtoB, BtoC, CtoA = (
|
|
|
|
M2MComplexCircular1A,
|
|
|
|
M2MComplexCircular1B,
|
|
|
|
M2MComplexCircular1C,
|
|
|
|
M2MCircular1ThroughAB,
|
|
|
|
M2MCircular1ThroughBC,
|
|
|
|
M2MCircular1ThroughCA,
|
|
|
|
)
|
2016-06-28 23:21:26 +08:00
|
|
|
sorted_deps = serializers.sort_dependencies(
|
|
|
|
[("fixtures_regress", [A, B, C, AtoB, BtoC, CtoA])]
|
|
|
|
)
|
2013-08-21 08:21:21 +08:00
|
|
|
# The dependency sorting should not result in an error, and the
|
|
|
|
# through model should have dependencies to the other models and as
|
|
|
|
# such come last in the list.
|
2014-05-12 07:39:00 +08:00
|
|
|
self.assertEqual(sorted_deps[:3], [A, B, C])
|
|
|
|
self.assertEqual(sorted_deps[3:], [AtoB, BtoC, CtoA])
|
2013-08-21 08:21:21 +08:00
|
|
|
|
|
|
|
def test_dependency_sorting_m2m_complex_circular_2(self):
|
|
|
|
"""
|
|
|
|
Circular M2M relations with explicit through models should be serializable
|
|
|
|
This test tests the circularity with explicit natural_key.dependencies
|
|
|
|
"""
|
2016-06-28 23:21:26 +08:00
|
|
|
sorted_deps = serializers.sort_dependencies(
|
|
|
|
[
|
|
|
|
(
|
|
|
|
"fixtures_regress",
|
|
|
|
[M2MComplexCircular2A, M2MComplexCircular2B, M2MCircular2ThroughAB],
|
|
|
|
)
|
|
|
|
]
|
2022-02-04 03:24:19 +08:00
|
|
|
)
|
2014-05-12 07:39:00 +08:00
|
|
|
self.assertEqual(sorted_deps[:2], [M2MComplexCircular2A, M2MComplexCircular2B])
|
2013-08-21 08:21:21 +08:00
|
|
|
self.assertEqual(sorted_deps[2:], [M2MCircular2ThroughAB])
|
|
|
|
|
|
|
|
def test_dump_and_load_m2m_simple(self):
|
|
|
|
"""
|
|
|
|
Test serializing and deserializing back models with simple M2M relations
|
|
|
|
"""
|
|
|
|
a = M2MSimpleA.objects.create(data="a")
|
|
|
|
b1 = M2MSimpleB.objects.create(data="b1")
|
|
|
|
b2 = M2MSimpleB.objects.create(data="b2")
|
|
|
|
a.b_set.add(b1)
|
|
|
|
a.b_set.add(b2)
|
|
|
|
|
2014-10-22 01:15:10 +08:00
|
|
|
out = StringIO()
|
2013-08-21 08:21:21 +08:00
|
|
|
management.call_command(
|
|
|
|
"dumpdata",
|
|
|
|
"fixtures_regress.M2MSimpleA",
|
|
|
|
"fixtures_regress.M2MSimpleB",
|
|
|
|
use_natural_foreign_keys=True,
|
2014-10-22 01:15:10 +08:00
|
|
|
stdout=out,
|
2013-08-21 08:21:21 +08:00
|
|
|
)
|
|
|
|
|
|
|
|
for model in [M2MSimpleA, M2MSimpleB]:
|
|
|
|
model.objects.all().delete()
|
|
|
|
|
2014-10-22 01:15:10 +08:00
|
|
|
objects = serializers.deserialize("json", out.getvalue())
|
2013-08-21 08:21:21 +08:00
|
|
|
for obj in objects:
|
|
|
|
obj.save()
|
|
|
|
|
|
|
|
new_a = M2MSimpleA.objects.get_by_natural_key("a")
|
2020-10-19 00:29:52 +08:00
|
|
|
self.assertCountEqual(new_a.b_set.all(), [b1, b2])
|
2013-08-21 08:21:21 +08:00
|
|
|
|
|
|
|
|
2010-11-12 09:44:22 +08:00
|
|
|
class TestTicket11101(TransactionTestCase):
|
|
|
|
|
2017-01-12 12:09:41 +08:00
|
|
|
available_apps = ["fixtures_regress"]
|
2013-06-04 14:09:29 +08:00
|
|
|
|
2011-01-26 22:49:50 +08:00
|
|
|
@skipUnlessDBFeature("supports_transactions")
|
2010-11-12 09:44:22 +08:00
|
|
|
def test_ticket_11101(self):
|
2016-10-27 15:53:39 +08:00
|
|
|
"""Fixtures can be rolled back (ticket #11101)."""
|
2013-07-10 03:12:51 +08:00
|
|
|
with transaction.atomic():
|
|
|
|
management.call_command(
|
|
|
|
"loaddata",
|
|
|
|
"thingy.json",
|
|
|
|
verbosity=0,
|
|
|
|
)
|
|
|
|
self.assertEqual(Thingy.objects.count(), 1)
|
|
|
|
transaction.set_rollback(True)
|
|
|
|
self.assertEqual(Thingy.objects.count(), 0)
|
2014-10-08 01:53:19 +08:00
|
|
|
|
|
|
|
|
|
|
|
class TestLoadFixtureFromOtherAppDirectory(TestCase):
|
|
|
|
"""
|
|
|
|
#23612 -- fixtures path should be normalized to allow referencing relative
|
|
|
|
paths on Windows.
|
|
|
|
"""
|
2022-02-04 03:24:19 +08:00
|
|
|
|
2014-10-22 08:52:31 +08:00
|
|
|
current_dir = os.path.abspath(os.path.dirname(__file__))
|
|
|
|
# relative_prefix is something like tests/fixtures_regress or
|
|
|
|
# fixtures_regress depending on how runtests.py is invoked.
|
|
|
|
# All path separators must be / in order to be a proper regression test on
|
|
|
|
# Windows, so replace as appropriate.
|
2014-11-17 09:32:04 +08:00
|
|
|
relative_prefix = os.path.relpath(current_dir, os.getcwd()).replace("\\", "/")
|
2014-10-22 08:52:31 +08:00
|
|
|
fixtures = [relative_prefix + "/fixtures/absolute.json"]
|
2014-10-08 01:53:19 +08:00
|
|
|
|
|
|
|
def test_fixtures_loaded(self):
|
|
|
|
count = Absolute.objects.count()
|
|
|
|
self.assertGreater(count, 0, "Fixtures not loaded properly.")
|