2009-12-22 23:18:51 +08:00
|
|
|
import datetime
|
|
|
|
import pickle
|
2017-01-07 19:11:46 +08:00
|
|
|
from io import StringIO
|
2012-08-08 22:50:59 +08:00
|
|
|
from operator import attrgetter
|
2018-01-02 22:42:24 +08:00
|
|
|
from unittest.mock import Mock
|
2009-12-22 23:18:51 +08:00
|
|
|
|
|
|
|
from django.contrib.auth.models import User
|
2011-10-16 19:04:34 +08:00
|
|
|
from django.contrib.contenttypes.models import ContentType
|
2010-04-09 21:08:08 +08:00
|
|
|
from django.core import management
|
2015-01-28 20:35:27 +08:00
|
|
|
from django.db import DEFAULT_DB_ALIAS, connections, router, transaction
|
2010-08-07 14:27:52 +08:00
|
|
|
from django.db.models import signals
|
2013-05-25 02:45:03 +08:00
|
|
|
from django.db.utils import ConnectionRouter
|
2015-04-18 05:38:20 +08:00
|
|
|
from django.test import SimpleTestCase, TestCase, override_settings
|
2009-12-22 23:18:51 +08:00
|
|
|
|
2011-10-14 05:34:56 +08:00
|
|
|
from .models import Book, Person, Pet, Review, UserProfile
|
2015-01-28 20:35:27 +08:00
|
|
|
from .routers import AuthRouter, TestRouter, WriteRouter
|
2009-12-22 23:18:51 +08:00
|
|
|
|
|
|
|
|
|
|
|
class QueryTestCase(TestCase):
|
2018-07-12 12:12:20 +08:00
|
|
|
databases = {'default', 'other'}
|
2009-12-22 23:18:51 +08:00
|
|
|
|
2010-01-22 22:30:06 +08:00
|
|
|
def test_db_selection(self):
|
2016-10-27 15:53:39 +08:00
|
|
|
"Querysets will use the default database by default"
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(Book.objects.db, DEFAULT_DB_ALIAS)
|
|
|
|
self.assertEqual(Book.objects.all().db, DEFAULT_DB_ALIAS)
|
2010-01-22 22:30:06 +08:00
|
|
|
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(Book.objects.using('other').db, 'other')
|
2010-01-22 22:30:06 +08:00
|
|
|
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(Book.objects.db_manager('other').db, 'other')
|
|
|
|
self.assertEqual(Book.objects.db_manager('other').all().db, 'other')
|
2010-01-22 22:30:06 +08:00
|
|
|
|
2009-12-22 23:18:51 +08:00
|
|
|
def test_default_creation(self):
|
|
|
|
"Objects created on the default database don't leak onto other databases"
|
|
|
|
# Create a book on the default database using create()
|
2016-04-08 10:04:45 +08:00
|
|
|
Book.objects.create(title="Pro Django", published=datetime.date(2008, 12, 16))
|
2009-12-22 23:18:51 +08:00
|
|
|
|
|
|
|
# Create a book on the default database using a save
|
|
|
|
dive = Book()
|
2013-10-23 18:09:29 +08:00
|
|
|
dive.title = "Dive into Python"
|
2009-12-22 23:18:51 +08:00
|
|
|
dive.published = datetime.date(2009, 5, 4)
|
|
|
|
dive.save()
|
|
|
|
|
2016-10-27 15:53:39 +08:00
|
|
|
# Book exists on the default database, but not on other database
|
2009-12-22 23:18:51 +08:00
|
|
|
try:
|
|
|
|
Book.objects.get(title="Pro Django")
|
|
|
|
Book.objects.using('default').get(title="Pro Django")
|
|
|
|
except Book.DoesNotExist:
|
2013-10-09 09:06:23 +08:00
|
|
|
self.fail('"Pro Django" should exist on default database')
|
2009-12-22 23:18:51 +08:00
|
|
|
|
2016-01-17 19:26:39 +08:00
|
|
|
with self.assertRaises(Book.DoesNotExist):
|
|
|
|
Book.objects.using('other').get(title="Pro Django")
|
2009-12-22 23:18:51 +08:00
|
|
|
|
|
|
|
try:
|
|
|
|
Book.objects.get(title="Dive into Python")
|
|
|
|
Book.objects.using('default').get(title="Dive into Python")
|
|
|
|
except Book.DoesNotExist:
|
|
|
|
self.fail('"Dive into Python" should exist on default database')
|
|
|
|
|
2016-01-17 19:26:39 +08:00
|
|
|
with self.assertRaises(Book.DoesNotExist):
|
|
|
|
Book.objects.using('other').get(title="Dive into Python")
|
2009-12-22 23:18:51 +08:00
|
|
|
|
|
|
|
def test_other_creation(self):
|
|
|
|
"Objects created on another database don't leak onto the default database"
|
|
|
|
# Create a book on the second database
|
|
|
|
Book.objects.using('other').create(title="Pro Django",
|
|
|
|
published=datetime.date(2008, 12, 16))
|
|
|
|
|
|
|
|
# Create a book on the default database using a save
|
|
|
|
dive = Book()
|
2013-10-23 18:09:29 +08:00
|
|
|
dive.title = "Dive into Python"
|
2009-12-22 23:18:51 +08:00
|
|
|
dive.published = datetime.date(2009, 5, 4)
|
|
|
|
dive.save(using='other')
|
|
|
|
|
2016-10-27 15:53:39 +08:00
|
|
|
# Book exists on the default database, but not on other database
|
2009-12-22 23:18:51 +08:00
|
|
|
try:
|
|
|
|
Book.objects.using('other').get(title="Pro Django")
|
|
|
|
except Book.DoesNotExist:
|
2013-10-09 09:06:23 +08:00
|
|
|
self.fail('"Pro Django" should exist on other database')
|
2009-12-22 23:18:51 +08:00
|
|
|
|
2016-01-17 19:26:39 +08:00
|
|
|
with self.assertRaises(Book.DoesNotExist):
|
|
|
|
Book.objects.get(title="Pro Django")
|
|
|
|
with self.assertRaises(Book.DoesNotExist):
|
|
|
|
Book.objects.using('default').get(title="Pro Django")
|
2009-12-22 23:18:51 +08:00
|
|
|
|
|
|
|
try:
|
|
|
|
Book.objects.using('other').get(title="Dive into Python")
|
|
|
|
except Book.DoesNotExist:
|
|
|
|
self.fail('"Dive into Python" should exist on other database')
|
|
|
|
|
2016-01-17 19:26:39 +08:00
|
|
|
with self.assertRaises(Book.DoesNotExist):
|
|
|
|
Book.objects.get(title="Dive into Python")
|
|
|
|
with self.assertRaises(Book.DoesNotExist):
|
|
|
|
Book.objects.using('default').get(title="Dive into Python")
|
2009-12-22 23:18:51 +08:00
|
|
|
|
2014-07-05 14:03:52 +08:00
|
|
|
def test_refresh(self):
|
2016-09-07 21:48:31 +08:00
|
|
|
dive = Book(title="Dive into Python", published=datetime.date(2009, 5, 4))
|
2014-07-05 14:03:52 +08:00
|
|
|
dive.save(using='other')
|
|
|
|
dive2 = Book.objects.using('other').get()
|
|
|
|
dive2.title = "Dive into Python (on default)"
|
|
|
|
dive2.save(using='default')
|
|
|
|
dive.refresh_from_db()
|
|
|
|
self.assertEqual(dive.title, "Dive into Python")
|
|
|
|
dive.refresh_from_db(using='default')
|
|
|
|
self.assertEqual(dive.title, "Dive into Python (on default)")
|
|
|
|
self.assertEqual(dive._state.db, "default")
|
|
|
|
|
2018-01-02 22:42:24 +08:00
|
|
|
def test_refresh_router_instance_hint(self):
|
|
|
|
router = Mock()
|
|
|
|
router.db_for_read.return_value = None
|
|
|
|
book = Book.objects.create(title='Dive Into Python', published=datetime.date(1957, 10, 12))
|
|
|
|
with self.settings(DATABASE_ROUTERS=[router]):
|
|
|
|
book.refresh_from_db()
|
|
|
|
router.db_for_read.assert_called_once_with(Book, instance=book)
|
|
|
|
|
2009-12-22 23:18:51 +08:00
|
|
|
def test_basic_queries(self):
|
|
|
|
"Queries are constrained to a single database"
|
2016-04-08 10:04:45 +08:00
|
|
|
dive = Book.objects.using('other').create(title="Dive into Python", published=datetime.date(2009, 5, 4))
|
2009-12-22 23:18:51 +08:00
|
|
|
|
2013-10-11 19:25:14 +08:00
|
|
|
dive = Book.objects.using('other').get(published=datetime.date(2009, 5, 4))
|
2009-12-22 23:18:51 +08:00
|
|
|
self.assertEqual(dive.title, "Dive into Python")
|
2016-01-17 19:26:39 +08:00
|
|
|
with self.assertRaises(Book.DoesNotExist):
|
|
|
|
Book.objects.using('default').get(published=datetime.date(2009, 5, 4))
|
2009-12-22 23:18:51 +08:00
|
|
|
|
|
|
|
dive = Book.objects.using('other').get(title__icontains="dive")
|
|
|
|
self.assertEqual(dive.title, "Dive into Python")
|
2016-01-17 19:26:39 +08:00
|
|
|
with self.assertRaises(Book.DoesNotExist):
|
|
|
|
Book.objects.using('default').get(title__icontains="dive")
|
2009-12-22 23:18:51 +08:00
|
|
|
|
|
|
|
dive = Book.objects.using('other').get(title__iexact="dive INTO python")
|
|
|
|
self.assertEqual(dive.title, "Dive into Python")
|
2016-01-17 19:26:39 +08:00
|
|
|
with self.assertRaises(Book.DoesNotExist):
|
|
|
|
Book.objects.using('default').get(title__iexact="dive INTO python")
|
2009-12-22 23:18:51 +08:00
|
|
|
|
2013-10-11 19:25:14 +08:00
|
|
|
dive = Book.objects.using('other').get(published__year=2009)
|
2009-12-22 23:18:51 +08:00
|
|
|
self.assertEqual(dive.title, "Dive into Python")
|
|
|
|
self.assertEqual(dive.published, datetime.date(2009, 5, 4))
|
2016-01-17 19:26:39 +08:00
|
|
|
with self.assertRaises(Book.DoesNotExist):
|
|
|
|
Book.objects.using('default').get(published__year=2009)
|
2009-12-22 23:18:51 +08:00
|
|
|
|
|
|
|
years = Book.objects.using('other').dates('published', 'year')
|
|
|
|
self.assertEqual([o.year for o in years], [2009])
|
|
|
|
years = Book.objects.using('default').dates('published', 'year')
|
|
|
|
self.assertEqual([o.year for o in years], [])
|
|
|
|
|
|
|
|
months = Book.objects.using('other').dates('published', 'month')
|
|
|
|
self.assertEqual([o.month for o in months], [5])
|
|
|
|
months = Book.objects.using('default').dates('published', 'month')
|
|
|
|
self.assertEqual([o.month for o in months], [])
|
|
|
|
|
|
|
|
def test_m2m_separation(self):
|
|
|
|
"M2M fields are constrained to a single database"
|
|
|
|
# Create a book and author on the default database
|
|
|
|
pro = Book.objects.create(title="Pro Django",
|
|
|
|
published=datetime.date(2008, 12, 16))
|
|
|
|
|
|
|
|
marty = Person.objects.create(name="Marty Alchin")
|
|
|
|
|
|
|
|
# Create a book and author on the other database
|
2016-04-08 10:04:45 +08:00
|
|
|
dive = Book.objects.using('other').create(title="Dive into Python", published=datetime.date(2009, 5, 4))
|
2009-12-22 23:18:51 +08:00
|
|
|
|
|
|
|
mark = Person.objects.using('other').create(name="Mark Pilgrim")
|
|
|
|
|
|
|
|
# Save the author relations
|
2015-10-09 05:17:10 +08:00
|
|
|
pro.authors.set([marty])
|
|
|
|
dive.authors.set([mark])
|
2009-12-22 23:18:51 +08:00
|
|
|
|
|
|
|
# Inspect the m2m tables directly.
|
|
|
|
# There should be 1 entry in each database
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(Book.authors.through.objects.using('default').count(), 1)
|
|
|
|
self.assertEqual(Book.authors.through.objects.using('other').count(), 1)
|
2009-12-22 23:18:51 +08:00
|
|
|
|
2016-10-27 15:53:39 +08:00
|
|
|
# Queries work across m2m joins
|
2015-09-12 07:33:12 +08:00
|
|
|
self.assertEqual(
|
|
|
|
list(Book.objects.using('default').filter(authors__name='Marty Alchin').values_list('title', flat=True)),
|
|
|
|
['Pro Django']
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
list(Book.objects.using('other').filter(authors__name='Marty Alchin').values_list('title', flat=True)),
|
|
|
|
[]
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
list(Book.objects.using('default').filter(authors__name='Mark Pilgrim').values_list('title', flat=True)),
|
|
|
|
[]
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
list(Book.objects.using('other').filter(authors__name='Mark Pilgrim').values_list('title', flat=True)),
|
|
|
|
['Dive into Python']
|
|
|
|
)
|
2009-12-22 23:18:51 +08:00
|
|
|
|
|
|
|
# Reget the objects to clear caches
|
|
|
|
dive = Book.objects.using('other').get(title="Dive into Python")
|
|
|
|
mark = Person.objects.using('other').get(name="Mark Pilgrim")
|
|
|
|
|
2014-03-02 22:25:53 +08:00
|
|
|
# Retrieve related object by descriptor. Related objects should be database-bound
|
2016-04-08 10:04:45 +08:00
|
|
|
self.assertEqual(list(dive.authors.all().values_list('name', flat=True)), ['Mark Pilgrim'])
|
2009-12-22 23:18:51 +08:00
|
|
|
|
2016-04-08 10:04:45 +08:00
|
|
|
self.assertEqual(list(mark.book_set.all().values_list('title', flat=True)), ['Dive into Python'])
|
2009-12-22 23:18:51 +08:00
|
|
|
|
|
|
|
def test_m2m_forward_operations(self):
|
|
|
|
"M2M forward manipulations are all constrained to a single DB"
|
|
|
|
# Create a book and author on the other database
|
2016-04-08 10:04:45 +08:00
|
|
|
dive = Book.objects.using('other').create(title="Dive into Python", published=datetime.date(2009, 5, 4))
|
2009-12-22 23:18:51 +08:00
|
|
|
mark = Person.objects.using('other').create(name="Mark Pilgrim")
|
|
|
|
|
|
|
|
# Save the author relations
|
2015-10-09 05:17:10 +08:00
|
|
|
dive.authors.set([mark])
|
2009-12-22 23:18:51 +08:00
|
|
|
|
|
|
|
# Add a second author
|
|
|
|
john = Person.objects.using('other').create(name="John Smith")
|
2015-09-12 07:33:12 +08:00
|
|
|
self.assertEqual(
|
|
|
|
list(Book.objects.using('other').filter(authors__name='John Smith').values_list('title', flat=True)),
|
|
|
|
[]
|
|
|
|
)
|
2009-12-22 23:18:51 +08:00
|
|
|
|
|
|
|
dive.authors.add(john)
|
2015-09-12 07:33:12 +08:00
|
|
|
self.assertEqual(
|
|
|
|
list(Book.objects.using('other').filter(authors__name='Mark Pilgrim').values_list('title', flat=True)),
|
|
|
|
['Dive into Python']
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
list(Book.objects.using('other').filter(authors__name='John Smith').values_list('title', flat=True)),
|
|
|
|
['Dive into Python']
|
|
|
|
)
|
2009-12-22 23:18:51 +08:00
|
|
|
|
|
|
|
# Remove the second author
|
|
|
|
dive.authors.remove(john)
|
2015-09-12 07:33:12 +08:00
|
|
|
self.assertEqual(
|
|
|
|
list(Book.objects.using('other').filter(authors__name='Mark Pilgrim').values_list('title', flat=True)),
|
|
|
|
['Dive into Python']
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
list(Book.objects.using('other').filter(authors__name='John Smith').values_list('title', flat=True)),
|
|
|
|
[]
|
|
|
|
)
|
2009-12-22 23:18:51 +08:00
|
|
|
|
|
|
|
# Clear all authors
|
|
|
|
dive.authors.clear()
|
2015-09-12 07:33:12 +08:00
|
|
|
self.assertEqual(
|
|
|
|
list(Book.objects.using('other').filter(authors__name='Mark Pilgrim').values_list('title', flat=True)),
|
|
|
|
[]
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
list(Book.objects.using('other').filter(authors__name='John Smith').values_list('title', flat=True)),
|
|
|
|
[]
|
|
|
|
)
|
2009-12-22 23:18:51 +08:00
|
|
|
|
|
|
|
# Create an author through the m2m interface
|
|
|
|
dive.authors.create(name='Jane Brown')
|
2015-09-12 07:33:12 +08:00
|
|
|
self.assertEqual(
|
|
|
|
list(Book.objects.using('other').filter(authors__name='Mark Pilgrim').values_list('title', flat=True)),
|
|
|
|
[]
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
list(Book.objects.using('other').filter(authors__name='Jane Brown').values_list('title', flat=True)),
|
|
|
|
['Dive into Python']
|
|
|
|
)
|
2009-12-22 23:18:51 +08:00
|
|
|
|
|
|
|
def test_m2m_reverse_operations(self):
|
|
|
|
"M2M reverse manipulations are all constrained to a single DB"
|
|
|
|
# Create a book and author on the other database
|
2016-04-08 10:04:45 +08:00
|
|
|
dive = Book.objects.using('other').create(title="Dive into Python", published=datetime.date(2009, 5, 4))
|
2009-12-22 23:18:51 +08:00
|
|
|
mark = Person.objects.using('other').create(name="Mark Pilgrim")
|
|
|
|
|
|
|
|
# Save the author relations
|
2015-10-09 05:17:10 +08:00
|
|
|
dive.authors.set([mark])
|
2009-12-22 23:18:51 +08:00
|
|
|
|
|
|
|
# Create a second book on the other database
|
2016-04-08 10:04:45 +08:00
|
|
|
grease = Book.objects.using('other').create(title="Greasemonkey Hacks", published=datetime.date(2005, 11, 1))
|
2009-12-22 23:18:51 +08:00
|
|
|
|
|
|
|
# Add a books to the m2m
|
|
|
|
mark.book_set.add(grease)
|
2015-09-12 07:33:12 +08:00
|
|
|
self.assertEqual(
|
|
|
|
list(Person.objects.using('other').filter(book__title='Dive into Python').values_list('name', flat=True)),
|
|
|
|
['Mark Pilgrim']
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
list(
|
|
|
|
Person.objects.using('other').filter(book__title='Greasemonkey Hacks').values_list('name', flat=True)
|
|
|
|
),
|
|
|
|
['Mark Pilgrim']
|
|
|
|
)
|
2009-12-22 23:18:51 +08:00
|
|
|
|
|
|
|
# Remove a book from the m2m
|
|
|
|
mark.book_set.remove(grease)
|
2015-09-12 07:33:12 +08:00
|
|
|
self.assertEqual(
|
|
|
|
list(Person.objects.using('other').filter(book__title='Dive into Python').values_list('name', flat=True)),
|
|
|
|
['Mark Pilgrim']
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
list(
|
|
|
|
Person.objects.using('other').filter(book__title='Greasemonkey Hacks').values_list('name', flat=True)
|
|
|
|
),
|
|
|
|
[]
|
|
|
|
)
|
2009-12-22 23:18:51 +08:00
|
|
|
|
|
|
|
# Clear the books associated with mark
|
|
|
|
mark.book_set.clear()
|
2015-09-12 07:33:12 +08:00
|
|
|
self.assertEqual(
|
|
|
|
list(Person.objects.using('other').filter(book__title='Dive into Python').values_list('name', flat=True)),
|
|
|
|
[]
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
list(
|
|
|
|
Person.objects.using('other').filter(book__title='Greasemonkey Hacks').values_list('name', flat=True)
|
|
|
|
),
|
|
|
|
[]
|
|
|
|
)
|
2009-12-22 23:18:51 +08:00
|
|
|
|
|
|
|
# Create a book through the m2m interface
|
|
|
|
mark.book_set.create(title="Dive into HTML5", published=datetime.date(2020, 1, 1))
|
2015-09-12 07:33:12 +08:00
|
|
|
self.assertEqual(
|
|
|
|
list(Person.objects.using('other').filter(book__title='Dive into Python').values_list('name', flat=True)),
|
|
|
|
[]
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
list(Person.objects.using('other').filter(book__title='Dive into HTML5').values_list('name', flat=True)),
|
|
|
|
['Mark Pilgrim']
|
|
|
|
)
|
2009-12-22 23:18:51 +08:00
|
|
|
|
2010-01-22 22:30:06 +08:00
|
|
|
def test_m2m_cross_database_protection(self):
|
|
|
|
"Operations that involve sharing M2M objects across databases raise an error"
|
|
|
|
# Create a book and author on the default database
|
2016-04-08 10:04:45 +08:00
|
|
|
pro = Book.objects.create(title="Pro Django", published=datetime.date(2008, 12, 16))
|
2010-01-22 22:30:06 +08:00
|
|
|
|
|
|
|
marty = Person.objects.create(name="Marty Alchin")
|
|
|
|
|
|
|
|
# Create a book and author on the other database
|
2016-04-08 10:04:45 +08:00
|
|
|
dive = Book.objects.using('other').create(title="Dive into Python", published=datetime.date(2009, 5, 4))
|
2010-01-22 22:30:06 +08:00
|
|
|
|
|
|
|
mark = Person.objects.using('other').create(name="Mark Pilgrim")
|
|
|
|
# Set a foreign key set with an object from a different database
|
2017-05-29 03:37:21 +08:00
|
|
|
msg = (
|
|
|
|
'Cannot assign "<Person: Marty Alchin>": the current database '
|
|
|
|
'router prevents this relation.'
|
|
|
|
)
|
|
|
|
with self.assertRaisesMessage(ValueError, msg):
|
2014-03-30 02:56:08 +08:00
|
|
|
with transaction.atomic(using='default'):
|
2015-10-09 05:17:10 +08:00
|
|
|
marty.edited.set([pro, dive])
|
2010-01-22 22:30:06 +08:00
|
|
|
|
|
|
|
# Add to an m2m with an object from a different database
|
2017-05-29 03:37:21 +08:00
|
|
|
msg = (
|
|
|
|
'Cannot add "<Book: Dive into Python>": instance is on '
|
|
|
|
'database "default", value is on database "other"'
|
|
|
|
)
|
|
|
|
with self.assertRaisesMessage(ValueError, msg):
|
2014-03-30 02:57:28 +08:00
|
|
|
with transaction.atomic(using='default'):
|
|
|
|
marty.book_set.add(dive)
|
2010-01-22 22:30:06 +08:00
|
|
|
|
|
|
|
# Set a m2m with an object from a different database
|
2017-05-29 03:37:21 +08:00
|
|
|
with self.assertRaisesMessage(ValueError, msg):
|
2014-03-30 02:57:28 +08:00
|
|
|
with transaction.atomic(using='default'):
|
2015-10-09 05:17:10 +08:00
|
|
|
marty.book_set.set([pro, dive])
|
2010-01-22 22:30:06 +08:00
|
|
|
|
|
|
|
# Add to a reverse m2m with an object from a different database
|
2017-05-29 03:37:21 +08:00
|
|
|
msg = (
|
|
|
|
'Cannot add "<Person: Marty Alchin>": instance is on '
|
|
|
|
'database "other", value is on database "default"'
|
|
|
|
)
|
|
|
|
with self.assertRaisesMessage(ValueError, msg):
|
2014-03-30 02:57:28 +08:00
|
|
|
with transaction.atomic(using='other'):
|
|
|
|
dive.authors.add(marty)
|
2010-01-22 22:30:06 +08:00
|
|
|
|
|
|
|
# Set a reverse m2m with an object from a different database
|
2017-05-29 03:37:21 +08:00
|
|
|
with self.assertRaisesMessage(ValueError, msg):
|
2014-03-30 02:57:28 +08:00
|
|
|
with transaction.atomic(using='other'):
|
2015-10-09 05:17:10 +08:00
|
|
|
dive.authors.set([mark, marty])
|
2009-12-22 23:18:51 +08:00
|
|
|
|
2010-05-11 21:06:03 +08:00
|
|
|
def test_m2m_deletion(self):
|
|
|
|
"Cascaded deletions of m2m relations issue queries on the right database"
|
|
|
|
# Create a book and author on the other database
|
2016-04-08 10:04:45 +08:00
|
|
|
dive = Book.objects.using('other').create(title="Dive into Python", published=datetime.date(2009, 5, 4))
|
2010-05-11 21:06:03 +08:00
|
|
|
mark = Person.objects.using('other').create(name="Mark Pilgrim")
|
2015-10-09 05:17:10 +08:00
|
|
|
dive.authors.set([mark])
|
2010-05-11 21:06:03 +08:00
|
|
|
|
|
|
|
# Check the initial state
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(Person.objects.using('default').count(), 0)
|
|
|
|
self.assertEqual(Book.objects.using('default').count(), 0)
|
|
|
|
self.assertEqual(Book.authors.through.objects.using('default').count(), 0)
|
2010-05-11 21:06:03 +08:00
|
|
|
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(Person.objects.using('other').count(), 1)
|
|
|
|
self.assertEqual(Book.objects.using('other').count(), 1)
|
|
|
|
self.assertEqual(Book.authors.through.objects.using('other').count(), 1)
|
2010-05-11 21:06:03 +08:00
|
|
|
|
|
|
|
# Delete the object on the other database
|
|
|
|
dive.delete(using='other')
|
|
|
|
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(Person.objects.using('default').count(), 0)
|
|
|
|
self.assertEqual(Book.objects.using('default').count(), 0)
|
|
|
|
self.assertEqual(Book.authors.through.objects.using('default').count(), 0)
|
2010-05-11 21:06:03 +08:00
|
|
|
|
|
|
|
# The person still exists ...
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(Person.objects.using('other').count(), 1)
|
2010-05-11 21:06:03 +08:00
|
|
|
# ... but the book has been deleted
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(Book.objects.using('other').count(), 0)
|
2010-05-11 21:06:03 +08:00
|
|
|
# ... and the relationship object has also been deleted.
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(Book.authors.through.objects.using('other').count(), 0)
|
2010-05-11 21:06:03 +08:00
|
|
|
|
|
|
|
# Now try deletion in the reverse direction. Set up the relation again
|
2016-04-08 10:04:45 +08:00
|
|
|
dive = Book.objects.using('other').create(title="Dive into Python", published=datetime.date(2009, 5, 4))
|
2015-10-09 05:17:10 +08:00
|
|
|
dive.authors.set([mark])
|
2010-05-11 21:06:03 +08:00
|
|
|
|
|
|
|
# Check the initial state
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(Person.objects.using('default').count(), 0)
|
|
|
|
self.assertEqual(Book.objects.using('default').count(), 0)
|
|
|
|
self.assertEqual(Book.authors.through.objects.using('default').count(), 0)
|
2010-05-11 21:06:03 +08:00
|
|
|
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(Person.objects.using('other').count(), 1)
|
|
|
|
self.assertEqual(Book.objects.using('other').count(), 1)
|
|
|
|
self.assertEqual(Book.authors.through.objects.using('other').count(), 1)
|
2010-05-11 21:06:03 +08:00
|
|
|
|
|
|
|
# Delete the object on the other database
|
|
|
|
mark.delete(using='other')
|
|
|
|
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(Person.objects.using('default').count(), 0)
|
|
|
|
self.assertEqual(Book.objects.using('default').count(), 0)
|
|
|
|
self.assertEqual(Book.authors.through.objects.using('default').count(), 0)
|
2010-05-11 21:06:03 +08:00
|
|
|
|
|
|
|
# The person has been deleted ...
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(Person.objects.using('other').count(), 0)
|
2010-05-11 21:06:03 +08:00
|
|
|
# ... but the book still exists
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(Book.objects.using('other').count(), 1)
|
2010-05-11 21:06:03 +08:00
|
|
|
# ... and the relationship object has been deleted.
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(Book.authors.through.objects.using('other').count(), 0)
|
2010-05-11 21:06:03 +08:00
|
|
|
|
2009-12-22 23:18:51 +08:00
|
|
|
def test_foreign_key_separation(self):
|
|
|
|
"FK fields are constrained to a single database"
|
|
|
|
# Create a book and author on the default database
|
2016-04-08 10:04:45 +08:00
|
|
|
pro = Book.objects.create(title="Pro Django", published=datetime.date(2008, 12, 16))
|
2009-12-22 23:18:51 +08:00
|
|
|
|
|
|
|
george = Person.objects.create(name="George Vilches")
|
|
|
|
|
|
|
|
# Create a book and author on the other database
|
2016-04-08 10:04:45 +08:00
|
|
|
dive = Book.objects.using('other').create(title="Dive into Python", published=datetime.date(2009, 5, 4))
|
2009-12-22 23:18:51 +08:00
|
|
|
chris = Person.objects.using('other').create(name="Chris Mills")
|
|
|
|
|
2014-03-02 22:25:53 +08:00
|
|
|
# Save the author's favorite books
|
2009-12-22 23:18:51 +08:00
|
|
|
pro.editor = george
|
|
|
|
pro.save()
|
|
|
|
|
|
|
|
dive.editor = chris
|
|
|
|
dive.save()
|
|
|
|
|
|
|
|
pro = Book.objects.using('default').get(title="Pro Django")
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(pro.editor.name, "George Vilches")
|
2009-12-22 23:18:51 +08:00
|
|
|
|
|
|
|
dive = Book.objects.using('other').get(title="Dive into Python")
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(dive.editor.name, "Chris Mills")
|
2009-12-22 23:18:51 +08:00
|
|
|
|
2016-10-27 15:53:39 +08:00
|
|
|
# Queries work across foreign key joins
|
2015-09-12 07:33:12 +08:00
|
|
|
self.assertEqual(
|
|
|
|
list(Person.objects.using('default').filter(edited__title='Pro Django').values_list('name', flat=True)),
|
|
|
|
['George Vilches']
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
list(Person.objects.using('other').filter(edited__title='Pro Django').values_list('name', flat=True)),
|
|
|
|
[]
|
|
|
|
)
|
2009-12-22 23:18:51 +08:00
|
|
|
|
2015-09-12 07:33:12 +08:00
|
|
|
self.assertEqual(
|
|
|
|
list(
|
|
|
|
Person.objects.using('default').filter(edited__title='Dive into Python').values_list('name', flat=True)
|
|
|
|
),
|
|
|
|
[]
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
list(
|
|
|
|
Person.objects.using('other').filter(edited__title='Dive into Python').values_list('name', flat=True)
|
|
|
|
),
|
|
|
|
['Chris Mills']
|
|
|
|
)
|
2009-12-22 23:18:51 +08:00
|
|
|
|
|
|
|
# Reget the objects to clear caches
|
|
|
|
chris = Person.objects.using('other').get(name="Chris Mills")
|
|
|
|
dive = Book.objects.using('other').get(title="Dive into Python")
|
|
|
|
|
2014-03-02 22:25:53 +08:00
|
|
|
# Retrieve related object by descriptor. Related objects should be database-bound
|
2016-04-08 10:04:45 +08:00
|
|
|
self.assertEqual(list(chris.edited.values_list('title', flat=True)), ['Dive into Python'])
|
2009-12-22 23:18:51 +08:00
|
|
|
|
|
|
|
def test_foreign_key_reverse_operations(self):
|
|
|
|
"FK reverse manipulations are all constrained to a single DB"
|
2016-04-08 10:04:45 +08:00
|
|
|
dive = Book.objects.using('other').create(title="Dive into Python", published=datetime.date(2009, 5, 4))
|
2009-12-22 23:18:51 +08:00
|
|
|
chris = Person.objects.using('other').create(name="Chris Mills")
|
|
|
|
|
|
|
|
# Save the author relations
|
|
|
|
dive.editor = chris
|
|
|
|
dive.save()
|
|
|
|
|
|
|
|
# Add a second book edited by chris
|
|
|
|
html5 = Book.objects.using('other').create(title="Dive into HTML5", published=datetime.date(2010, 3, 15))
|
2015-09-12 07:33:12 +08:00
|
|
|
self.assertEqual(
|
|
|
|
list(Person.objects.using('other').filter(edited__title='Dive into HTML5').values_list('name', flat=True)),
|
|
|
|
[]
|
|
|
|
)
|
2009-12-22 23:18:51 +08:00
|
|
|
|
|
|
|
chris.edited.add(html5)
|
2015-09-12 07:33:12 +08:00
|
|
|
self.assertEqual(
|
|
|
|
list(Person.objects.using('other').filter(edited__title='Dive into HTML5').values_list('name', flat=True)),
|
|
|
|
['Chris Mills']
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
list(
|
|
|
|
Person.objects.using('other').filter(edited__title='Dive into Python').values_list('name', flat=True)
|
|
|
|
),
|
|
|
|
['Chris Mills']
|
|
|
|
)
|
2009-12-22 23:18:51 +08:00
|
|
|
|
|
|
|
# Remove the second editor
|
|
|
|
chris.edited.remove(html5)
|
2015-09-12 07:33:12 +08:00
|
|
|
self.assertEqual(
|
|
|
|
list(Person.objects.using('other').filter(edited__title='Dive into HTML5').values_list('name', flat=True)),
|
|
|
|
[]
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
list(
|
|
|
|
Person.objects.using('other').filter(edited__title='Dive into Python').values_list('name', flat=True)
|
|
|
|
),
|
|
|
|
['Chris Mills']
|
|
|
|
)
|
2009-12-22 23:18:51 +08:00
|
|
|
|
|
|
|
# Clear all edited books
|
|
|
|
chris.edited.clear()
|
2015-09-12 07:33:12 +08:00
|
|
|
self.assertEqual(
|
|
|
|
list(Person.objects.using('other').filter(edited__title='Dive into HTML5').values_list('name', flat=True)),
|
|
|
|
[]
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
list(
|
|
|
|
Person.objects.using('other').filter(edited__title='Dive into Python').values_list('name', flat=True)
|
|
|
|
),
|
|
|
|
[]
|
|
|
|
)
|
2009-12-22 23:18:51 +08:00
|
|
|
|
|
|
|
# Create an author through the m2m interface
|
|
|
|
chris.edited.create(title='Dive into Water', published=datetime.date(2010, 3, 15))
|
2015-09-12 07:33:12 +08:00
|
|
|
self.assertEqual(
|
|
|
|
list(Person.objects.using('other').filter(edited__title='Dive into HTML5').values_list('name', flat=True)),
|
|
|
|
[]
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
list(Person.objects.using('other').filter(edited__title='Dive into Water').values_list('name', flat=True)),
|
|
|
|
['Chris Mills']
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
list(
|
|
|
|
Person.objects.using('other').filter(edited__title='Dive into Python').values_list('name', flat=True)
|
|
|
|
),
|
|
|
|
[]
|
|
|
|
)
|
2009-12-22 23:18:51 +08:00
|
|
|
|
2010-01-22 22:30:06 +08:00
|
|
|
def test_foreign_key_cross_database_protection(self):
|
|
|
|
"Operations that involve sharing FK objects across databases raise an error"
|
|
|
|
# Create a book and author on the default database
|
2016-04-08 10:04:45 +08:00
|
|
|
pro = Book.objects.create(title="Pro Django", published=datetime.date(2008, 12, 16))
|
2010-01-22 22:30:06 +08:00
|
|
|
marty = Person.objects.create(name="Marty Alchin")
|
|
|
|
|
|
|
|
# Create a book and author on the other database
|
2016-04-08 10:04:45 +08:00
|
|
|
dive = Book.objects.using('other').create(title="Dive into Python", published=datetime.date(2009, 5, 4))
|
2010-01-22 22:30:06 +08:00
|
|
|
|
|
|
|
# Set a foreign key with an object from a different database
|
2017-05-29 03:37:21 +08:00
|
|
|
msg = (
|
|
|
|
'Cannot assign "<Person: Marty Alchin>": the current database '
|
|
|
|
'router prevents this relation.'
|
|
|
|
)
|
|
|
|
with self.assertRaisesMessage(ValueError, msg):
|
2014-03-30 02:56:08 +08:00
|
|
|
dive.editor = marty
|
2010-01-22 22:30:06 +08:00
|
|
|
|
|
|
|
# Set a foreign key set with an object from a different database
|
2017-05-29 03:37:21 +08:00
|
|
|
with self.assertRaisesMessage(ValueError, msg):
|
2013-09-27 02:16:50 +08:00
|
|
|
with transaction.atomic(using='default'):
|
2015-10-09 05:17:10 +08:00
|
|
|
marty.edited.set([pro, dive])
|
2010-01-22 22:30:06 +08:00
|
|
|
|
|
|
|
# Add to a foreign key set with an object from a different database
|
2017-05-29 03:37:21 +08:00
|
|
|
with self.assertRaisesMessage(ValueError, msg):
|
2013-09-27 02:16:50 +08:00
|
|
|
with transaction.atomic(using='default'):
|
|
|
|
marty.edited.add(dive)
|
2010-01-22 22:30:06 +08:00
|
|
|
|
2010-05-11 21:06:03 +08:00
|
|
|
def test_foreign_key_deletion(self):
|
|
|
|
"Cascaded deletions of Foreign Key relations issue queries on the right database"
|
|
|
|
mark = Person.objects.using('other').create(name="Mark Pilgrim")
|
2013-10-19 20:31:38 +08:00
|
|
|
Pet.objects.using('other').create(name="Fido", owner=mark)
|
2010-05-11 21:06:03 +08:00
|
|
|
|
|
|
|
# Check the initial state
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(Person.objects.using('default').count(), 0)
|
|
|
|
self.assertEqual(Pet.objects.using('default').count(), 0)
|
2010-05-11 21:06:03 +08:00
|
|
|
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(Person.objects.using('other').count(), 1)
|
|
|
|
self.assertEqual(Pet.objects.using('other').count(), 1)
|
2010-05-11 21:06:03 +08:00
|
|
|
|
|
|
|
# Delete the person object, which will cascade onto the pet
|
|
|
|
mark.delete(using='other')
|
|
|
|
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(Person.objects.using('default').count(), 0)
|
|
|
|
self.assertEqual(Pet.objects.using('default').count(), 0)
|
2010-05-11 21:06:03 +08:00
|
|
|
|
|
|
|
# Both the pet and the person have been deleted from the right database
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(Person.objects.using('other').count(), 0)
|
|
|
|
self.assertEqual(Pet.objects.using('other').count(), 0)
|
2010-05-11 21:06:03 +08:00
|
|
|
|
2010-11-17 06:09:13 +08:00
|
|
|
def test_foreign_key_validation(self):
|
|
|
|
"ForeignKey.validate() uses the correct database"
|
|
|
|
mickey = Person.objects.using('other').create(name="Mickey")
|
|
|
|
pluto = Pet.objects.using('other').create(name="Pluto", owner=mickey)
|
2015-04-27 22:59:16 +08:00
|
|
|
self.assertIsNone(pluto.full_clean())
|
2010-11-17 06:09:13 +08:00
|
|
|
|
2016-06-21 05:52:14 +08:00
|
|
|
# Any router that accesses `model` in db_for_read() works here.
|
|
|
|
@override_settings(DATABASE_ROUTERS=[AuthRouter()])
|
|
|
|
def test_foreign_key_validation_with_router(self):
|
|
|
|
"""
|
|
|
|
ForeignKey.validate() passes `model` to db_for_read() even if
|
|
|
|
model_instance=None.
|
|
|
|
"""
|
|
|
|
mickey = Person.objects.create(name="Mickey")
|
|
|
|
owner_field = Pet._meta.get_field('owner')
|
|
|
|
self.assertEqual(owner_field.clean(mickey.pk, None), mickey.pk)
|
|
|
|
|
2010-04-28 12:20:35 +08:00
|
|
|
def test_o2o_separation(self):
|
|
|
|
"OneToOne fields are constrained to a single database"
|
|
|
|
# Create a user and profile on the default database
|
|
|
|
alice = User.objects.db_manager('default').create_user('alice', 'alice@example.com')
|
|
|
|
alice_profile = UserProfile.objects.using('default').create(user=alice, flavor='chocolate')
|
|
|
|
|
|
|
|
# Create a user and profile on the other database
|
|
|
|
bob = User.objects.db_manager('other').create_user('bob', 'bob@example.com')
|
|
|
|
bob_profile = UserProfile.objects.using('other').create(user=bob, flavor='crunchy frog')
|
|
|
|
|
|
|
|
# Retrieve related objects; queries should be database constrained
|
|
|
|
alice = User.objects.using('default').get(username="alice")
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(alice.userprofile.flavor, "chocolate")
|
2010-04-28 12:20:35 +08:00
|
|
|
|
|
|
|
bob = User.objects.using('other').get(username="bob")
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(bob.userprofile.flavor, "crunchy frog")
|
2010-04-28 12:20:35 +08:00
|
|
|
|
2016-10-27 15:53:39 +08:00
|
|
|
# Queries work across joins
|
2015-09-12 07:33:12 +08:00
|
|
|
self.assertEqual(
|
|
|
|
list(
|
|
|
|
User.objects.using('default')
|
|
|
|
.filter(userprofile__flavor='chocolate').values_list('username', flat=True)
|
|
|
|
),
|
|
|
|
['alice']
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
list(
|
|
|
|
User.objects.using('other')
|
|
|
|
.filter(userprofile__flavor='chocolate').values_list('username', flat=True)
|
|
|
|
),
|
|
|
|
[]
|
|
|
|
)
|
2010-04-28 12:20:35 +08:00
|
|
|
|
2015-09-12 07:33:12 +08:00
|
|
|
self.assertEqual(
|
|
|
|
list(
|
|
|
|
User.objects.using('default')
|
|
|
|
.filter(userprofile__flavor='crunchy frog').values_list('username', flat=True)
|
|
|
|
),
|
|
|
|
[]
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
list(
|
|
|
|
User.objects.using('other')
|
|
|
|
.filter(userprofile__flavor='crunchy frog').values_list('username', flat=True)
|
|
|
|
),
|
|
|
|
['bob']
|
|
|
|
)
|
2010-04-28 12:20:35 +08:00
|
|
|
|
|
|
|
# Reget the objects to clear caches
|
|
|
|
alice_profile = UserProfile.objects.using('default').get(flavor='chocolate')
|
|
|
|
bob_profile = UserProfile.objects.using('other').get(flavor='crunchy frog')
|
|
|
|
|
2014-03-02 22:25:53 +08:00
|
|
|
# Retrieve related object by descriptor. Related objects should be database-bound
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(alice_profile.user.username, 'alice')
|
|
|
|
self.assertEqual(bob_profile.user.username, 'bob')
|
2010-04-28 12:20:35 +08:00
|
|
|
|
|
|
|
def test_o2o_cross_database_protection(self):
|
|
|
|
"Operations that involve sharing FK objects across databases raise an error"
|
|
|
|
# Create a user and profile on the default database
|
|
|
|
alice = User.objects.db_manager('default').create_user('alice', 'alice@example.com')
|
|
|
|
|
|
|
|
# Create a user and profile on the other database
|
|
|
|
bob = User.objects.db_manager('other').create_user('bob', 'bob@example.com')
|
|
|
|
|
|
|
|
# Set a one-to-one relation with an object from a different database
|
|
|
|
alice_profile = UserProfile.objects.using('default').create(user=alice, flavor='chocolate')
|
2017-05-29 03:37:21 +08:00
|
|
|
msg = (
|
2017-08-01 01:28:14 +08:00
|
|
|
'Cannot assign "%r": the current database router prevents this '
|
|
|
|
'relation.' % alice_profile
|
2017-05-29 03:37:21 +08:00
|
|
|
)
|
|
|
|
with self.assertRaisesMessage(ValueError, msg):
|
2010-04-28 12:20:35 +08:00
|
|
|
bob.userprofile = alice_profile
|
|
|
|
|
|
|
|
# BUT! if you assign a FK object when the base object hasn't
|
|
|
|
# been saved yet, you implicitly assign the database for the
|
|
|
|
# base object.
|
|
|
|
bob_profile = UserProfile.objects.using('other').create(user=bob, flavor='crunchy frog')
|
|
|
|
|
|
|
|
new_bob_profile = UserProfile(flavor="spring surprise")
|
|
|
|
|
2014-05-29 08:39:14 +08:00
|
|
|
# assigning a profile requires an explicit pk as the object isn't saved
|
2012-10-10 03:23:02 +08:00
|
|
|
charlie = User(pk=51, username='charlie', email='charlie@example.com')
|
2010-04-28 12:20:35 +08:00
|
|
|
charlie.set_unusable_password()
|
|
|
|
|
|
|
|
# initially, no db assigned
|
2016-06-17 02:19:18 +08:00
|
|
|
self.assertIsNone(new_bob_profile._state.db)
|
|
|
|
self.assertIsNone(charlie._state.db)
|
2010-04-28 12:20:35 +08:00
|
|
|
|
|
|
|
# old object comes from 'other', so the new object is set to use 'other'...
|
|
|
|
new_bob_profile.user = bob
|
|
|
|
charlie.userprofile = bob_profile
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(new_bob_profile._state.db, 'other')
|
|
|
|
self.assertEqual(charlie._state.db, 'other')
|
2010-04-28 12:20:35 +08:00
|
|
|
|
|
|
|
# ... but it isn't saved yet
|
2016-04-08 10:04:45 +08:00
|
|
|
self.assertEqual(list(User.objects.using('other').values_list('username', flat=True)), ['bob'])
|
|
|
|
self.assertEqual(list(UserProfile.objects.using('other').values_list('flavor', flat=True)), ['crunchy frog'])
|
2010-04-28 12:20:35 +08:00
|
|
|
|
|
|
|
# When saved (no using required), new objects goes to 'other'
|
|
|
|
charlie.save()
|
|
|
|
bob_profile.save()
|
|
|
|
new_bob_profile.save()
|
2016-04-08 10:04:45 +08:00
|
|
|
self.assertEqual(list(User.objects.using('default').values_list('username', flat=True)), ['alice'])
|
|
|
|
self.assertEqual(list(User.objects.using('other').values_list('username', flat=True)), ['bob', 'charlie'])
|
|
|
|
self.assertEqual(list(UserProfile.objects.using('default').values_list('flavor', flat=True)), ['chocolate'])
|
|
|
|
self.assertEqual(
|
|
|
|
list(UserProfile.objects.using('other').values_list('flavor', flat=True)),
|
|
|
|
['crunchy frog', 'spring surprise']
|
|
|
|
)
|
2010-04-28 12:20:35 +08:00
|
|
|
|
|
|
|
# This also works if you assign the O2O relation in the constructor
|
2013-10-27 03:15:03 +08:00
|
|
|
denise = User.objects.db_manager('other').create_user('denise', 'denise@example.com')
|
2010-04-28 12:20:35 +08:00
|
|
|
denise_profile = UserProfile(flavor="tofu", user=denise)
|
|
|
|
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(denise_profile._state.db, 'other')
|
2010-04-28 12:20:35 +08:00
|
|
|
# ... but it isn't saved yet
|
2016-04-08 10:04:45 +08:00
|
|
|
self.assertEqual(list(UserProfile.objects.using('default').values_list('flavor', flat=True)), ['chocolate'])
|
|
|
|
self.assertEqual(
|
|
|
|
list(UserProfile.objects.using('other').values_list('flavor', flat=True)),
|
|
|
|
['crunchy frog', 'spring surprise']
|
|
|
|
)
|
2010-04-28 12:20:35 +08:00
|
|
|
|
|
|
|
# When saved, the new profile goes to 'other'
|
|
|
|
denise_profile.save()
|
2016-04-08 10:04:45 +08:00
|
|
|
self.assertEqual(list(UserProfile.objects.using('default').values_list('flavor', flat=True)), ['chocolate'])
|
|
|
|
self.assertEqual(
|
|
|
|
list(UserProfile.objects.using('other').values_list('flavor', flat=True)),
|
|
|
|
['crunchy frog', 'spring surprise', 'tofu']
|
|
|
|
)
|
2010-04-28 12:20:35 +08:00
|
|
|
|
2009-12-22 23:18:51 +08:00
|
|
|
def test_generic_key_separation(self):
|
|
|
|
"Generic fields are constrained to a single database"
|
|
|
|
# Create a book and author on the default database
|
2016-04-08 10:04:45 +08:00
|
|
|
pro = Book.objects.create(title="Pro Django", published=datetime.date(2008, 12, 16))
|
2009-12-22 23:18:51 +08:00
|
|
|
review1 = Review.objects.create(source="Python Monthly", content_object=pro)
|
|
|
|
|
|
|
|
# Create a book and author on the other database
|
2016-04-08 10:04:45 +08:00
|
|
|
dive = Book.objects.using('other').create(title="Dive into Python", published=datetime.date(2009, 5, 4))
|
2009-12-22 23:18:51 +08:00
|
|
|
|
|
|
|
review2 = Review.objects.using('other').create(source="Python Weekly", content_object=dive)
|
|
|
|
|
|
|
|
review1 = Review.objects.using('default').get(source="Python Monthly")
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(review1.content_object.title, "Pro Django")
|
2009-12-22 23:18:51 +08:00
|
|
|
|
|
|
|
review2 = Review.objects.using('other').get(source="Python Weekly")
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(review2.content_object.title, "Dive into Python")
|
2009-12-22 23:18:51 +08:00
|
|
|
|
|
|
|
# Reget the objects to clear caches
|
|
|
|
dive = Book.objects.using('other').get(title="Dive into Python")
|
|
|
|
|
2014-03-02 22:25:53 +08:00
|
|
|
# Retrieve related object by descriptor. Related objects should be database-bound
|
2016-04-08 10:04:45 +08:00
|
|
|
self.assertEqual(list(dive.reviews.all().values_list('source', flat=True)), ['Python Weekly'])
|
2009-12-22 23:18:51 +08:00
|
|
|
|
|
|
|
def test_generic_key_reverse_operations(self):
|
|
|
|
"Generic reverse manipulations are all constrained to a single DB"
|
2016-04-08 10:04:45 +08:00
|
|
|
dive = Book.objects.using('other').create(title="Dive into Python", published=datetime.date(2009, 5, 4))
|
|
|
|
temp = Book.objects.using('other').create(title="Temp", published=datetime.date(2009, 5, 4))
|
2009-12-22 23:18:51 +08:00
|
|
|
review1 = Review.objects.using('other').create(source="Python Weekly", content_object=dive)
|
|
|
|
review2 = Review.objects.using('other').create(source="Python Monthly", content_object=temp)
|
|
|
|
|
2015-09-12 07:33:12 +08:00
|
|
|
self.assertEqual(
|
|
|
|
list(Review.objects.using('default').filter(object_id=dive.pk).values_list('source', flat=True)),
|
|
|
|
[]
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
list(Review.objects.using('other').filter(object_id=dive.pk).values_list('source', flat=True)),
|
|
|
|
['Python Weekly']
|
|
|
|
)
|
2009-12-22 23:18:51 +08:00
|
|
|
|
|
|
|
# Add a second review
|
|
|
|
dive.reviews.add(review2)
|
2015-09-12 07:33:12 +08:00
|
|
|
self.assertEqual(
|
|
|
|
list(Review.objects.using('default').filter(object_id=dive.pk).values_list('source', flat=True)),
|
|
|
|
[]
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
list(Review.objects.using('other').filter(object_id=dive.pk).values_list('source', flat=True)),
|
|
|
|
['Python Monthly', 'Python Weekly']
|
|
|
|
)
|
2009-12-22 23:18:51 +08:00
|
|
|
|
|
|
|
# Remove the second author
|
|
|
|
dive.reviews.remove(review1)
|
2015-09-12 07:33:12 +08:00
|
|
|
self.assertEqual(
|
|
|
|
list(Review.objects.using('default').filter(object_id=dive.pk).values_list('source', flat=True)),
|
|
|
|
[]
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
list(Review.objects.using('other').filter(object_id=dive.pk).values_list('source', flat=True)),
|
|
|
|
['Python Monthly']
|
|
|
|
)
|
2009-12-22 23:18:51 +08:00
|
|
|
|
|
|
|
# Clear all reviews
|
|
|
|
dive.reviews.clear()
|
2015-09-12 07:33:12 +08:00
|
|
|
self.assertEqual(
|
|
|
|
list(Review.objects.using('default').filter(object_id=dive.pk).values_list('source', flat=True)),
|
|
|
|
[]
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
list(Review.objects.using('other').filter(object_id=dive.pk).values_list('source', flat=True)),
|
|
|
|
[]
|
|
|
|
)
|
2009-12-22 23:18:51 +08:00
|
|
|
|
|
|
|
# Create an author through the generic interface
|
|
|
|
dive.reviews.create(source='Python Daily')
|
2015-09-12 07:33:12 +08:00
|
|
|
self.assertEqual(
|
|
|
|
list(Review.objects.using('default').filter(object_id=dive.pk).values_list('source', flat=True)),
|
|
|
|
[]
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
list(Review.objects.using('other').filter(object_id=dive.pk).values_list('source', flat=True)),
|
|
|
|
['Python Daily']
|
|
|
|
)
|
2009-12-22 23:18:51 +08:00
|
|
|
|
2010-01-22 22:30:06 +08:00
|
|
|
def test_generic_key_cross_database_protection(self):
|
|
|
|
"Operations that involve sharing generic key objects across databases raise an error"
|
|
|
|
# Create a book and author on the default database
|
2016-04-08 10:04:45 +08:00
|
|
|
pro = Book.objects.create(title="Pro Django", published=datetime.date(2008, 12, 16))
|
2010-01-22 22:30:06 +08:00
|
|
|
review1 = Review.objects.create(source="Python Monthly", content_object=pro)
|
|
|
|
|
|
|
|
# Create a book and author on the other database
|
2016-04-08 10:04:45 +08:00
|
|
|
dive = Book.objects.using('other').create(title="Dive into Python", published=datetime.date(2009, 5, 4))
|
2010-01-22 22:30:06 +08:00
|
|
|
|
2013-10-19 20:31:38 +08:00
|
|
|
Review.objects.using('other').create(source="Python Weekly", content_object=dive)
|
2010-01-22 22:30:06 +08:00
|
|
|
|
|
|
|
# Set a foreign key with an object from a different database
|
2017-05-29 03:37:21 +08:00
|
|
|
msg = (
|
2018-12-21 15:24:04 +08:00
|
|
|
'Cannot assign "<ContentType: multiple_database | book>": the '
|
|
|
|
'current database router prevents this relation.'
|
2017-05-29 03:37:21 +08:00
|
|
|
)
|
|
|
|
with self.assertRaisesMessage(ValueError, msg):
|
2010-01-22 22:30:06 +08:00
|
|
|
review1.content_object = dive
|
|
|
|
|
|
|
|
# Add to a foreign key set with an object from a different database
|
2017-05-29 03:37:21 +08:00
|
|
|
msg = (
|
|
|
|
"<Review: Python Monthly> instance isn't saved. "
|
|
|
|
"Use bulk=False or save the object first."
|
|
|
|
)
|
|
|
|
with self.assertRaisesMessage(ValueError, msg):
|
2014-03-30 02:57:28 +08:00
|
|
|
with transaction.atomic(using='other'):
|
|
|
|
dive.reviews.add(review1)
|
2010-01-22 22:30:06 +08:00
|
|
|
|
|
|
|
# BUT! if you assign a FK object when the base object hasn't
|
|
|
|
# been saved yet, you implicitly assign the database for the
|
|
|
|
# base object.
|
|
|
|
review3 = Review(source="Python Daily")
|
|
|
|
# initially, no db assigned
|
2016-06-17 02:19:18 +08:00
|
|
|
self.assertIsNone(review3._state.db)
|
2010-01-22 22:30:06 +08:00
|
|
|
|
|
|
|
# Dive comes from 'other', so review3 is set to use 'other'...
|
|
|
|
review3.content_object = dive
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(review3._state.db, 'other')
|
2010-01-22 22:30:06 +08:00
|
|
|
# ... but it isn't saved yet
|
2015-09-12 07:33:12 +08:00
|
|
|
self.assertEqual(
|
|
|
|
list(Review.objects.using('default').filter(object_id=pro.pk).values_list('source', flat=True)),
|
|
|
|
['Python Monthly']
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
list(Review.objects.using('other').filter(object_id=dive.pk).values_list('source', flat=True)),
|
|
|
|
['Python Weekly']
|
|
|
|
)
|
2010-01-22 22:30:06 +08:00
|
|
|
|
|
|
|
# When saved, John goes to 'other'
|
|
|
|
review3.save()
|
2015-09-12 07:33:12 +08:00
|
|
|
self.assertEqual(
|
|
|
|
list(Review.objects.using('default').filter(object_id=pro.pk).values_list('source', flat=True)),
|
|
|
|
['Python Monthly']
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
list(Review.objects.using('other').filter(object_id=dive.pk).values_list('source', flat=True)),
|
|
|
|
['Python Daily', 'Python Weekly']
|
|
|
|
)
|
2009-12-22 23:18:51 +08:00
|
|
|
|
2010-05-11 21:06:03 +08:00
|
|
|
def test_generic_key_deletion(self):
|
|
|
|
"Cascaded deletions of Generic Key relations issue queries on the right database"
|
2016-04-08 10:04:45 +08:00
|
|
|
dive = Book.objects.using('other').create(title="Dive into Python", published=datetime.date(2009, 5, 4))
|
2013-10-19 20:31:38 +08:00
|
|
|
Review.objects.using('other').create(source="Python Weekly", content_object=dive)
|
2010-05-11 21:06:03 +08:00
|
|
|
|
|
|
|
# Check the initial state
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(Book.objects.using('default').count(), 0)
|
|
|
|
self.assertEqual(Review.objects.using('default').count(), 0)
|
2010-05-11 21:06:03 +08:00
|
|
|
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(Book.objects.using('other').count(), 1)
|
|
|
|
self.assertEqual(Review.objects.using('other').count(), 1)
|
2010-05-11 21:06:03 +08:00
|
|
|
|
|
|
|
# Delete the Book object, which will cascade onto the pet
|
|
|
|
dive.delete(using='other')
|
|
|
|
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(Book.objects.using('default').count(), 0)
|
|
|
|
self.assertEqual(Review.objects.using('default').count(), 0)
|
2010-05-11 21:06:03 +08:00
|
|
|
|
|
|
|
# Both the pet and the person have been deleted from the right database
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(Book.objects.using('other').count(), 0)
|
|
|
|
self.assertEqual(Review.objects.using('other').count(), 0)
|
2010-05-11 21:06:03 +08:00
|
|
|
|
2009-12-22 23:18:51 +08:00
|
|
|
def test_ordering(self):
|
|
|
|
"get_next_by_XXX commands stick to a single database"
|
2016-04-08 10:04:45 +08:00
|
|
|
Book.objects.create(title="Pro Django", published=datetime.date(2008, 12, 16))
|
|
|
|
dive = Book.objects.using('other').create(title="Dive into Python", published=datetime.date(2009, 5, 4))
|
|
|
|
learn = Book.objects.using('other').create(title="Learning Python", published=datetime.date(2008, 7, 16))
|
2009-12-22 23:18:51 +08:00
|
|
|
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(learn.get_next_by_published().title, "Dive into Python")
|
|
|
|
self.assertEqual(dive.get_previous_by_published().title, "Learning Python")
|
2009-12-22 23:18:51 +08:00
|
|
|
|
|
|
|
def test_raw(self):
|
|
|
|
"test the raw() method across databases"
|
2016-04-08 10:04:45 +08:00
|
|
|
dive = Book.objects.using('other').create(title="Dive into Python", published=datetime.date(2009, 5, 4))
|
2010-11-13 02:48:39 +08:00
|
|
|
val = Book.objects.db_manager("other").raw('SELECT id FROM multiple_database_book')
|
2012-08-08 22:50:59 +08:00
|
|
|
self.assertQuerysetEqual(val, [dive.pk], attrgetter("pk"))
|
2009-12-22 23:18:51 +08:00
|
|
|
|
2010-11-13 02:48:39 +08:00
|
|
|
val = Book.objects.raw('SELECT id FROM multiple_database_book').using('other')
|
2012-08-08 22:50:59 +08:00
|
|
|
self.assertQuerysetEqual(val, [dive.pk], attrgetter("pk"))
|
2009-12-22 23:18:51 +08:00
|
|
|
|
2010-03-07 15:13:55 +08:00
|
|
|
def test_select_related(self):
|
|
|
|
"Database assignment is retained if an object is retrieved with select_related()"
|
|
|
|
# Create a book and author on the other database
|
|
|
|
mark = Person.objects.using('other').create(name="Mark Pilgrim")
|
2016-04-08 10:04:45 +08:00
|
|
|
Book.objects.using('other').create(
|
|
|
|
title="Dive into Python",
|
|
|
|
published=datetime.date(2009, 5, 4),
|
|
|
|
editor=mark,
|
|
|
|
)
|
2010-03-07 15:13:55 +08:00
|
|
|
|
|
|
|
# Retrieve the Person using select_related()
|
|
|
|
book = Book.objects.using('other').select_related('editor').get(title="Dive into Python")
|
|
|
|
|
|
|
|
# The editor instance should have a db state
|
|
|
|
self.assertEqual(book.editor._state.db, 'other')
|
|
|
|
|
2010-03-10 23:27:22 +08:00
|
|
|
def test_subquery(self):
|
2014-05-21 03:54:56 +08:00
|
|
|
"""Make sure as_sql works with subqueries and primary/replica."""
|
2010-03-10 23:27:22 +08:00
|
|
|
sub = Person.objects.using('other').filter(name='fff')
|
|
|
|
qs = Book.objects.filter(editor__in=sub)
|
|
|
|
|
|
|
|
# When you call __str__ on the query object, it doesn't know about using
|
|
|
|
# so it falls back to the default. If the subquery explicitly uses a
|
|
|
|
# different database, an error should be raised.
|
2017-05-29 03:37:21 +08:00
|
|
|
msg = (
|
|
|
|
"Subqueries aren't allowed across different databases. Force the "
|
|
|
|
"inner query to be evaluated using `list(inner_query)`."
|
|
|
|
)
|
|
|
|
with self.assertRaisesMessage(ValueError, msg):
|
2016-01-17 19:26:39 +08:00
|
|
|
str(qs.query)
|
2010-03-10 23:27:22 +08:00
|
|
|
|
|
|
|
# Evaluating the query shouldn't work, either
|
2017-05-29 03:37:21 +08:00
|
|
|
with self.assertRaisesMessage(ValueError, msg):
|
2010-06-05 14:37:52 +08:00
|
|
|
for obj in qs:
|
|
|
|
pass
|
|
|
|
|
2010-10-28 20:57:04 +08:00
|
|
|
def test_related_manager(self):
|
|
|
|
"Related managers return managers, not querysets"
|
|
|
|
mark = Person.objects.using('other').create(name="Mark Pilgrim")
|
|
|
|
|
|
|
|
# extra_arg is removed by the BookManager's implementation of
|
|
|
|
# create(); but the BookManager's implementation won't get called
|
|
|
|
# unless edited returns a Manager, not a queryset
|
2016-04-08 10:04:45 +08:00
|
|
|
mark.book_set.create(title="Dive into Python", published=datetime.date(2009, 5, 4), extra_arg=True)
|
|
|
|
mark.book_set.get_or_create(title="Dive into Python", published=datetime.date(2009, 5, 4), extra_arg=True)
|
|
|
|
mark.edited.create(title="Dive into Water", published=datetime.date(2009, 5, 4), extra_arg=True)
|
|
|
|
mark.edited.get_or_create(title="Dive into Water", published=datetime.date(2009, 5, 4), extra_arg=True)
|
2010-03-10 23:27:22 +08:00
|
|
|
|
2013-05-25 02:45:03 +08:00
|
|
|
|
2015-04-18 05:38:20 +08:00
|
|
|
class ConnectionRouterTestCase(SimpleTestCase):
|
2013-05-25 02:45:03 +08:00
|
|
|
@override_settings(DATABASE_ROUTERS=[
|
|
|
|
'multiple_database.tests.TestRouter',
|
|
|
|
'multiple_database.tests.WriteRouter'])
|
|
|
|
def test_router_init_default(self):
|
2013-10-10 23:07:48 +08:00
|
|
|
connection_router = ConnectionRouter()
|
2017-03-17 19:51:48 +08:00
|
|
|
self.assertEqual([r.__class__.__name__ for r in connection_router.routers], ['TestRouter', 'WriteRouter'])
|
2013-05-25 02:45:03 +08:00
|
|
|
|
|
|
|
def test_router_init_arg(self):
|
2013-10-10 23:07:48 +08:00
|
|
|
connection_router = ConnectionRouter([
|
2013-05-25 02:45:03 +08:00
|
|
|
'multiple_database.tests.TestRouter',
|
|
|
|
'multiple_database.tests.WriteRouter'
|
|
|
|
])
|
2017-03-17 19:51:48 +08:00
|
|
|
self.assertEqual([r.__class__.__name__ for r in connection_router.routers], ['TestRouter', 'WriteRouter'])
|
2013-05-25 02:45:03 +08:00
|
|
|
|
|
|
|
# Init with instances instead of strings
|
2013-10-10 23:07:48 +08:00
|
|
|
connection_router = ConnectionRouter([TestRouter(), WriteRouter()])
|
2017-03-17 19:51:48 +08:00
|
|
|
self.assertEqual([r.__class__.__name__ for r in connection_router.routers], ['TestRouter', 'WriteRouter'])
|
2013-05-25 02:45:03 +08:00
|
|
|
|
|
|
|
|
2014-11-30 01:09:40 +08:00
|
|
|
# Make the 'other' database appear to be a replica of the 'default'
|
|
|
|
@override_settings(DATABASE_ROUTERS=[TestRouter()])
|
2010-01-22 22:30:06 +08:00
|
|
|
class RouterTestCase(TestCase):
|
2018-07-12 12:12:20 +08:00
|
|
|
databases = {'default', 'other'}
|
2010-01-22 22:30:06 +08:00
|
|
|
|
|
|
|
def test_db_selection(self):
|
2016-10-27 15:53:39 +08:00
|
|
|
"Querysets obey the router for db suggestions"
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(Book.objects.db, 'other')
|
|
|
|
self.assertEqual(Book.objects.all().db, 'other')
|
2010-01-22 22:30:06 +08:00
|
|
|
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(Book.objects.using('default').db, 'default')
|
2010-01-22 22:30:06 +08:00
|
|
|
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(Book.objects.db_manager('default').db, 'default')
|
|
|
|
self.assertEqual(Book.objects.db_manager('default').all().db, 'default')
|
2010-01-22 22:30:06 +08:00
|
|
|
|
2013-09-03 23:51:34 +08:00
|
|
|
def test_migrate_selection(self):
|
2011-12-18 01:37:24 +08:00
|
|
|
"Synchronization behavior is predictable"
|
2010-01-25 20:23:30 +08:00
|
|
|
|
2015-02-19 15:27:58 +08:00
|
|
|
self.assertTrue(router.allow_migrate_model('default', User))
|
|
|
|
self.assertTrue(router.allow_migrate_model('default', Book))
|
2010-01-25 20:23:30 +08:00
|
|
|
|
2015-02-19 15:27:58 +08:00
|
|
|
self.assertTrue(router.allow_migrate_model('other', User))
|
|
|
|
self.assertTrue(router.allow_migrate_model('other', Book))
|
2010-01-25 20:23:30 +08:00
|
|
|
|
2014-11-30 01:09:40 +08:00
|
|
|
with override_settings(DATABASE_ROUTERS=[TestRouter(), AuthRouter()]):
|
|
|
|
# Add the auth router to the chain. TestRouter is a universal
|
|
|
|
# synchronizer, so it should have no effect.
|
2015-02-19 15:27:58 +08:00
|
|
|
self.assertTrue(router.allow_migrate_model('default', User))
|
|
|
|
self.assertTrue(router.allow_migrate_model('default', Book))
|
2010-01-25 20:23:30 +08:00
|
|
|
|
2015-02-19 15:27:58 +08:00
|
|
|
self.assertTrue(router.allow_migrate_model('other', User))
|
|
|
|
self.assertTrue(router.allow_migrate_model('other', Book))
|
2010-01-25 20:23:30 +08:00
|
|
|
|
2014-11-30 01:09:40 +08:00
|
|
|
with override_settings(DATABASE_ROUTERS=[AuthRouter(), TestRouter()]):
|
|
|
|
# Now check what happens if the router order is reversed.
|
2015-02-19 15:27:58 +08:00
|
|
|
self.assertFalse(router.allow_migrate_model('default', User))
|
|
|
|
self.assertTrue(router.allow_migrate_model('default', Book))
|
|
|
|
|
|
|
|
self.assertTrue(router.allow_migrate_model('other', User))
|
|
|
|
self.assertTrue(router.allow_migrate_model('other', Book))
|
|
|
|
|
2010-01-27 15:56:53 +08:00
|
|
|
def test_partial_router(self):
|
|
|
|
"A router can choose to implement a subset of methods"
|
|
|
|
dive = Book.objects.using('other').create(title="Dive into Python",
|
|
|
|
published=datetime.date(2009, 5, 4))
|
|
|
|
|
2011-12-18 01:37:24 +08:00
|
|
|
# First check the baseline behavior.
|
2010-01-27 15:56:53 +08:00
|
|
|
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(router.db_for_read(User), 'other')
|
|
|
|
self.assertEqual(router.db_for_read(Book), 'other')
|
2010-01-27 15:56:53 +08:00
|
|
|
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(router.db_for_write(User), 'default')
|
|
|
|
self.assertEqual(router.db_for_write(Book), 'default')
|
2010-01-27 15:56:53 +08:00
|
|
|
|
|
|
|
self.assertTrue(router.allow_relation(dive, dive))
|
|
|
|
|
2015-02-19 15:27:58 +08:00
|
|
|
self.assertTrue(router.allow_migrate_model('default', User))
|
|
|
|
self.assertTrue(router.allow_migrate_model('default', Book))
|
2010-01-27 15:56:53 +08:00
|
|
|
|
2014-11-30 01:09:40 +08:00
|
|
|
with override_settings(DATABASE_ROUTERS=[WriteRouter(), AuthRouter(), TestRouter()]):
|
|
|
|
self.assertEqual(router.db_for_read(User), 'default')
|
|
|
|
self.assertEqual(router.db_for_read(Book), 'other')
|
2010-01-27 15:56:53 +08:00
|
|
|
|
2014-11-30 01:09:40 +08:00
|
|
|
self.assertEqual(router.db_for_write(User), 'writer')
|
|
|
|
self.assertEqual(router.db_for_write(Book), 'writer')
|
2010-01-27 15:56:53 +08:00
|
|
|
|
2014-11-30 01:09:40 +08:00
|
|
|
self.assertTrue(router.allow_relation(dive, dive))
|
2010-01-27 15:56:53 +08:00
|
|
|
|
2015-02-19 15:27:58 +08:00
|
|
|
self.assertFalse(router.allow_migrate_model('default', User))
|
|
|
|
self.assertTrue(router.allow_migrate_model('default', Book))
|
2010-01-27 15:56:53 +08:00
|
|
|
|
2010-01-22 22:30:06 +08:00
|
|
|
def test_database_routing(self):
|
|
|
|
marty = Person.objects.using('default').create(name="Marty Alchin")
|
2018-03-16 17:54:34 +08:00
|
|
|
pro = Book.objects.using('default').create(
|
|
|
|
title='Pro Django',
|
|
|
|
published=datetime.date(2008, 12, 16),
|
|
|
|
editor=marty,
|
|
|
|
)
|
2015-10-09 05:17:10 +08:00
|
|
|
pro.authors.set([marty])
|
2010-01-22 22:30:06 +08:00
|
|
|
|
|
|
|
# Create a book and author on the other database
|
2016-04-08 10:04:45 +08:00
|
|
|
Book.objects.using('other').create(title="Dive into Python", published=datetime.date(2009, 5, 4))
|
2010-01-22 22:30:06 +08:00
|
|
|
|
|
|
|
# An update query will be routed to the default database
|
|
|
|
Book.objects.filter(title='Pro Django').update(pages=200)
|
|
|
|
|
2014-03-30 02:56:08 +08:00
|
|
|
with self.assertRaises(Book.DoesNotExist):
|
2010-01-22 22:30:06 +08:00
|
|
|
# By default, the get query will be directed to 'other'
|
|
|
|
Book.objects.get(title='Pro Django')
|
|
|
|
|
|
|
|
# But the same query issued explicitly at a database will work.
|
|
|
|
pro = Book.objects.using('default').get(title='Pro Django')
|
|
|
|
|
2016-10-27 15:53:39 +08:00
|
|
|
# The update worked.
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(pro.pages, 200)
|
2010-01-22 22:30:06 +08:00
|
|
|
|
|
|
|
# An update query with an explicit using clause will be routed
|
|
|
|
# to the requested database.
|
|
|
|
Book.objects.using('other').filter(title='Dive into Python').update(pages=300)
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(Book.objects.get(title='Dive into Python').pages, 300)
|
2010-01-22 22:30:06 +08:00
|
|
|
|
|
|
|
# Related object queries stick to the same database
|
|
|
|
# as the original object, regardless of the router
|
2012-06-08 00:08:47 +08:00
|
|
|
self.assertEqual(list(pro.authors.values_list('name', flat=True)), ['Marty Alchin'])
|
|
|
|
self.assertEqual(pro.editor.name, 'Marty Alchin')
|
2010-01-22 22:30:06 +08:00
|
|
|
|
2011-08-12 22:14:15 +08:00
|
|
|
# get_or_create is a special case. The get needs to be targeted at
|
2010-01-22 22:30:06 +08:00
|
|
|
# the write database in order to avoid potential transaction
|
|
|
|
# consistency problems
|
|
|
|
book, created = Book.objects.get_or_create(title="Pro Django")
|
|
|
|
self.assertFalse(created)
|
|
|
|
|
|
|
|
book, created = Book.objects.get_or_create(title="Dive Into Python",
|
2013-10-27 03:15:03 +08:00
|
|
|
defaults={'published': datetime.date(2009, 5, 4)})
|
2010-01-22 22:30:06 +08:00
|
|
|
self.assertTrue(created)
|
|
|
|
|
|
|
|
# Check the head count of objects
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(Book.objects.using('default').count(), 2)
|
|
|
|
self.assertEqual(Book.objects.using('other').count(), 1)
|
2010-01-22 22:30:06 +08:00
|
|
|
# If a database isn't specified, the read database is used
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(Book.objects.count(), 1)
|
2010-01-22 22:30:06 +08:00
|
|
|
|
|
|
|
# A delete query will also be routed to the default database
|
|
|
|
Book.objects.filter(pages__gt=150).delete()
|
|
|
|
|
|
|
|
# The default database has lost the book.
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(Book.objects.using('default').count(), 1)
|
|
|
|
self.assertEqual(Book.objects.using('other').count(), 1)
|
2010-01-22 22:30:06 +08:00
|
|
|
|
2015-03-15 09:25:33 +08:00
|
|
|
def test_invalid_set_foreign_key_assignment(self):
|
|
|
|
marty = Person.objects.using('default').create(name="Marty Alchin")
|
|
|
|
dive = Book.objects.using('other').create(
|
|
|
|
title="Dive into Python",
|
|
|
|
published=datetime.date(2009, 5, 4),
|
|
|
|
)
|
|
|
|
# Set a foreign key set with an object from a different database
|
|
|
|
msg = "<Book: Dive into Python> instance isn't saved. Use bulk=False or save the object first."
|
|
|
|
with self.assertRaisesMessage(ValueError, msg):
|
|
|
|
marty.edited.set([dive])
|
|
|
|
|
2010-01-22 22:30:06 +08:00
|
|
|
def test_foreign_key_cross_database_protection(self):
|
|
|
|
"Foreign keys can cross databases if they two databases have a common source"
|
|
|
|
# Create a book and author on the default database
|
|
|
|
pro = Book.objects.using('default').create(title="Pro Django",
|
|
|
|
published=datetime.date(2008, 12, 16))
|
|
|
|
|
|
|
|
marty = Person.objects.using('default').create(name="Marty Alchin")
|
|
|
|
|
|
|
|
# Create a book and author on the other database
|
|
|
|
dive = Book.objects.using('other').create(title="Dive into Python",
|
|
|
|
published=datetime.date(2009, 5, 4))
|
|
|
|
|
|
|
|
mark = Person.objects.using('other').create(name="Mark Pilgrim")
|
|
|
|
|
|
|
|
# Set a foreign key with an object from a different database
|
2016-06-28 23:21:26 +08:00
|
|
|
dive.editor = marty
|
2010-01-22 22:30:06 +08:00
|
|
|
|
|
|
|
# Database assignments of original objects haven't changed...
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(marty._state.db, 'default')
|
|
|
|
self.assertEqual(pro._state.db, 'default')
|
|
|
|
self.assertEqual(dive._state.db, 'other')
|
|
|
|
self.assertEqual(mark._state.db, 'other')
|
2010-01-22 22:30:06 +08:00
|
|
|
|
|
|
|
# ... but they will when the affected object is saved.
|
|
|
|
dive.save()
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(dive._state.db, 'default')
|
2010-01-22 22:30:06 +08:00
|
|
|
|
|
|
|
# ...and the source database now has a copy of any object saved
|
2016-06-28 23:21:26 +08:00
|
|
|
Book.objects.using('default').get(title='Dive into Python').delete()
|
2010-01-22 22:30:06 +08:00
|
|
|
|
2014-05-21 03:54:56 +08:00
|
|
|
# This isn't a real primary/replica database, so restore the original from other
|
2010-01-22 22:30:06 +08:00
|
|
|
dive = Book.objects.using('other').get(title='Dive into Python')
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(dive._state.db, 'other')
|
2010-01-22 22:30:06 +08:00
|
|
|
|
|
|
|
# Set a foreign key set with an object from a different database
|
2016-06-28 23:21:26 +08:00
|
|
|
marty.edited.set([pro, dive], bulk=False)
|
2010-01-22 22:30:06 +08:00
|
|
|
|
|
|
|
# Assignment implies a save, so database assignments of original objects have changed...
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(marty._state.db, 'default')
|
|
|
|
self.assertEqual(pro._state.db, 'default')
|
|
|
|
self.assertEqual(dive._state.db, 'default')
|
|
|
|
self.assertEqual(mark._state.db, 'other')
|
2010-01-22 22:30:06 +08:00
|
|
|
|
|
|
|
# ...and the source database now has a copy of any object saved
|
2016-06-28 23:21:26 +08:00
|
|
|
Book.objects.using('default').get(title='Dive into Python').delete()
|
2010-01-22 22:30:06 +08:00
|
|
|
|
2014-05-21 03:54:56 +08:00
|
|
|
# This isn't a real primary/replica database, so restore the original from other
|
2010-01-22 22:30:06 +08:00
|
|
|
dive = Book.objects.using('other').get(title='Dive into Python')
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(dive._state.db, 'other')
|
2010-01-22 22:30:06 +08:00
|
|
|
|
|
|
|
# Add to a foreign key set with an object from a different database
|
2016-06-28 23:21:26 +08:00
|
|
|
marty.edited.add(dive, bulk=False)
|
2010-01-22 22:30:06 +08:00
|
|
|
|
|
|
|
# Add implies a save, so database assignments of original objects have changed...
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(marty._state.db, 'default')
|
|
|
|
self.assertEqual(pro._state.db, 'default')
|
|
|
|
self.assertEqual(dive._state.db, 'default')
|
|
|
|
self.assertEqual(mark._state.db, 'other')
|
2010-01-22 22:30:06 +08:00
|
|
|
|
|
|
|
# ...and the source database now has a copy of any object saved
|
2016-06-28 23:21:26 +08:00
|
|
|
Book.objects.using('default').get(title='Dive into Python').delete()
|
2010-01-22 22:30:06 +08:00
|
|
|
|
2014-05-21 03:54:56 +08:00
|
|
|
# This isn't a real primary/replica database, so restore the original from other
|
2010-01-22 22:30:06 +08:00
|
|
|
dive = Book.objects.using('other').get(title='Dive into Python')
|
|
|
|
|
|
|
|
# If you assign a FK object when the base object hasn't
|
|
|
|
# been saved yet, you implicitly assign the database for the
|
|
|
|
# base object.
|
|
|
|
chris = Person(name="Chris Mills")
|
|
|
|
html5 = Book(title="Dive into HTML5", published=datetime.date(2010, 3, 15))
|
|
|
|
# initially, no db assigned
|
2016-06-17 02:19:18 +08:00
|
|
|
self.assertIsNone(chris._state.db)
|
|
|
|
self.assertIsNone(html5._state.db)
|
2010-01-22 22:30:06 +08:00
|
|
|
|
|
|
|
# old object comes from 'other', so the new object is set to use the
|
|
|
|
# source of 'other'...
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(dive._state.db, 'other')
|
2014-05-19 16:45:55 +08:00
|
|
|
chris.save()
|
2010-01-22 22:30:06 +08:00
|
|
|
dive.editor = chris
|
|
|
|
html5.editor = mark
|
|
|
|
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(dive._state.db, 'other')
|
|
|
|
self.assertEqual(mark._state.db, 'other')
|
|
|
|
self.assertEqual(chris._state.db, 'default')
|
|
|
|
self.assertEqual(html5._state.db, 'default')
|
2010-01-22 22:30:06 +08:00
|
|
|
|
|
|
|
# This also works if you assign the FK in the constructor
|
|
|
|
water = Book(title="Dive into Water", published=datetime.date(2001, 1, 1), editor=mark)
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(water._state.db, 'default')
|
2010-01-22 22:30:06 +08:00
|
|
|
|
2011-10-16 19:04:34 +08:00
|
|
|
# For the remainder of this test, create a copy of 'mark' in the
|
|
|
|
# 'default' database to prevent integrity errors on backends that
|
|
|
|
# don't defer constraints checks until the end of the transaction
|
|
|
|
mark.save(using='default')
|
|
|
|
|
|
|
|
# This moved 'mark' in the 'default' database, move it back in 'other'
|
|
|
|
mark.save(using='other')
|
|
|
|
self.assertEqual(mark._state.db, 'other')
|
|
|
|
|
2010-03-31 20:33:10 +08:00
|
|
|
# If you create an object through a FK relation, it will be
|
|
|
|
# written to the write database, even if the original object
|
|
|
|
# was on the read database
|
|
|
|
cheesecake = mark.edited.create(title='Dive into Cheesecake', published=datetime.date(2010, 3, 15))
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(cheesecake._state.db, 'default')
|
2010-03-31 20:33:10 +08:00
|
|
|
|
|
|
|
# Same goes for get_or_create, regardless of whether getting or creating
|
2015-09-12 07:33:12 +08:00
|
|
|
cheesecake, created = mark.edited.get_or_create(
|
|
|
|
title='Dive into Cheesecake',
|
|
|
|
published=datetime.date(2010, 3, 15),
|
|
|
|
)
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(cheesecake._state.db, 'default')
|
2010-03-31 20:33:10 +08:00
|
|
|
|
|
|
|
puddles, created = mark.edited.get_or_create(title='Dive into Puddles', published=datetime.date(2010, 3, 15))
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(puddles._state.db, 'default')
|
2010-03-31 20:33:10 +08:00
|
|
|
|
2010-01-22 22:30:06 +08:00
|
|
|
def test_m2m_cross_database_protection(self):
|
|
|
|
"M2M relations can cross databases if the database share a source"
|
|
|
|
# Create books and authors on the inverse to the usual database
|
|
|
|
pro = Book.objects.using('other').create(pk=1, title="Pro Django",
|
|
|
|
published=datetime.date(2008, 12, 16))
|
|
|
|
|
|
|
|
marty = Person.objects.using('other').create(pk=1, name="Marty Alchin")
|
|
|
|
|
|
|
|
dive = Book.objects.using('default').create(pk=2, title="Dive into Python",
|
|
|
|
published=datetime.date(2009, 5, 4))
|
|
|
|
|
|
|
|
mark = Person.objects.using('default').create(pk=2, name="Mark Pilgrim")
|
|
|
|
|
2010-09-13 13:08:21 +08:00
|
|
|
# Now save back onto the usual database.
|
2014-05-21 03:54:56 +08:00
|
|
|
# This simulates primary/replica - the objects exist on both database,
|
2010-01-22 22:30:06 +08:00
|
|
|
# but the _state.db is as it is for all other tests.
|
|
|
|
pro.save(using='default')
|
|
|
|
marty.save(using='default')
|
|
|
|
dive.save(using='other')
|
|
|
|
mark.save(using='other')
|
|
|
|
|
2016-10-27 15:53:39 +08:00
|
|
|
# We have 2 of both types of object on both databases
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(Book.objects.using('default').count(), 2)
|
|
|
|
self.assertEqual(Book.objects.using('other').count(), 2)
|
|
|
|
self.assertEqual(Person.objects.using('default').count(), 2)
|
|
|
|
self.assertEqual(Person.objects.using('other').count(), 2)
|
2010-01-22 22:30:06 +08:00
|
|
|
|
|
|
|
# Set a m2m set with an object from a different database
|
2016-06-28 23:21:26 +08:00
|
|
|
marty.book_set.set([pro, dive])
|
2010-01-22 22:30:06 +08:00
|
|
|
|
|
|
|
# Database assignments don't change
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(marty._state.db, 'default')
|
|
|
|
self.assertEqual(pro._state.db, 'default')
|
|
|
|
self.assertEqual(dive._state.db, 'other')
|
|
|
|
self.assertEqual(mark._state.db, 'other')
|
2010-01-22 22:30:06 +08:00
|
|
|
|
|
|
|
# All m2m relations should be saved on the default database
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(Book.authors.through.objects.using('default').count(), 2)
|
|
|
|
self.assertEqual(Book.authors.through.objects.using('other').count(), 0)
|
2010-01-22 22:30:06 +08:00
|
|
|
|
|
|
|
# Reset relations
|
|
|
|
Book.authors.through.objects.using('default').delete()
|
|
|
|
|
|
|
|
# Add to an m2m with an object from a different database
|
2016-06-28 23:21:26 +08:00
|
|
|
marty.book_set.add(dive)
|
2010-01-22 22:30:06 +08:00
|
|
|
|
|
|
|
# Database assignments don't change
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(marty._state.db, 'default')
|
|
|
|
self.assertEqual(pro._state.db, 'default')
|
|
|
|
self.assertEqual(dive._state.db, 'other')
|
|
|
|
self.assertEqual(mark._state.db, 'other')
|
2010-01-22 22:30:06 +08:00
|
|
|
|
|
|
|
# All m2m relations should be saved on the default database
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(Book.authors.through.objects.using('default').count(), 1)
|
|
|
|
self.assertEqual(Book.authors.through.objects.using('other').count(), 0)
|
2010-01-22 22:30:06 +08:00
|
|
|
|
|
|
|
# Reset relations
|
|
|
|
Book.authors.through.objects.using('default').delete()
|
|
|
|
|
|
|
|
# Set a reverse m2m with an object from a different database
|
2016-06-28 23:21:26 +08:00
|
|
|
dive.authors.set([mark, marty])
|
2010-01-22 22:30:06 +08:00
|
|
|
|
|
|
|
# Database assignments don't change
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(marty._state.db, 'default')
|
|
|
|
self.assertEqual(pro._state.db, 'default')
|
|
|
|
self.assertEqual(dive._state.db, 'other')
|
|
|
|
self.assertEqual(mark._state.db, 'other')
|
2010-01-22 22:30:06 +08:00
|
|
|
|
|
|
|
# All m2m relations should be saved on the default database
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(Book.authors.through.objects.using('default').count(), 2)
|
|
|
|
self.assertEqual(Book.authors.through.objects.using('other').count(), 0)
|
2010-01-22 22:30:06 +08:00
|
|
|
|
|
|
|
# Reset relations
|
|
|
|
Book.authors.through.objects.using('default').delete()
|
|
|
|
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(Book.authors.through.objects.using('default').count(), 0)
|
|
|
|
self.assertEqual(Book.authors.through.objects.using('other').count(), 0)
|
2010-01-22 22:30:06 +08:00
|
|
|
|
|
|
|
# Add to a reverse m2m with an object from a different database
|
2016-06-28 23:21:26 +08:00
|
|
|
dive.authors.add(marty)
|
2010-01-22 22:30:06 +08:00
|
|
|
|
|
|
|
# Database assignments don't change
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(marty._state.db, 'default')
|
|
|
|
self.assertEqual(pro._state.db, 'default')
|
|
|
|
self.assertEqual(dive._state.db, 'other')
|
|
|
|
self.assertEqual(mark._state.db, 'other')
|
2010-01-22 22:30:06 +08:00
|
|
|
|
|
|
|
# All m2m relations should be saved on the default database
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(Book.authors.through.objects.using('default').count(), 1)
|
|
|
|
self.assertEqual(Book.authors.through.objects.using('other').count(), 0)
|
2010-01-22 22:30:06 +08:00
|
|
|
|
2010-03-31 20:33:10 +08:00
|
|
|
# If you create an object through a M2M relation, it will be
|
|
|
|
# written to the write database, even if the original object
|
|
|
|
# was on the read database
|
|
|
|
alice = dive.authors.create(name='Alice')
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(alice._state.db, 'default')
|
2010-03-31 20:33:10 +08:00
|
|
|
|
|
|
|
# Same goes for get_or_create, regardless of whether getting or creating
|
|
|
|
alice, created = dive.authors.get_or_create(name='Alice')
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(alice._state.db, 'default')
|
2010-03-31 20:33:10 +08:00
|
|
|
|
|
|
|
bob, created = dive.authors.get_or_create(name='Bob')
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(bob._state.db, 'default')
|
2010-03-31 20:33:10 +08:00
|
|
|
|
2010-04-28 12:20:35 +08:00
|
|
|
def test_o2o_cross_database_protection(self):
|
|
|
|
"Operations that involve sharing FK objects across databases raise an error"
|
|
|
|
# Create a user and profile on the default database
|
|
|
|
alice = User.objects.db_manager('default').create_user('alice', 'alice@example.com')
|
|
|
|
|
|
|
|
# Create a user and profile on the other database
|
|
|
|
bob = User.objects.db_manager('other').create_user('bob', 'bob@example.com')
|
|
|
|
|
|
|
|
# Set a one-to-one relation with an object from a different database
|
|
|
|
alice_profile = UserProfile.objects.create(user=alice, flavor='chocolate')
|
2016-06-28 23:21:26 +08:00
|
|
|
bob.userprofile = alice_profile
|
2010-04-28 12:20:35 +08:00
|
|
|
|
|
|
|
# Database assignments of original objects haven't changed...
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(alice._state.db, 'default')
|
|
|
|
self.assertEqual(alice_profile._state.db, 'default')
|
|
|
|
self.assertEqual(bob._state.db, 'other')
|
2010-04-28 12:20:35 +08:00
|
|
|
|
|
|
|
# ... but they will when the affected object is saved.
|
|
|
|
bob.save()
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(bob._state.db, 'default')
|
2010-04-28 12:20:35 +08:00
|
|
|
|
2010-01-22 22:30:06 +08:00
|
|
|
def test_generic_key_cross_database_protection(self):
|
|
|
|
"Generic Key operations can span databases if they share a source"
|
|
|
|
# Create a book and author on the default database
|
2013-11-03 12:36:09 +08:00
|
|
|
pro = Book.objects.using(
|
|
|
|
'default').create(title="Pro Django", published=datetime.date(2008, 12, 16))
|
2010-01-22 22:30:06 +08:00
|
|
|
|
2013-11-03 12:36:09 +08:00
|
|
|
review1 = Review.objects.using(
|
|
|
|
'default').create(source="Python Monthly", content_object=pro)
|
2010-01-22 22:30:06 +08:00
|
|
|
|
|
|
|
# Create a book and author on the other database
|
2013-11-03 12:36:09 +08:00
|
|
|
dive = Book.objects.using(
|
|
|
|
'other').create(title="Dive into Python", published=datetime.date(2009, 5, 4))
|
2010-01-22 22:30:06 +08:00
|
|
|
|
2013-11-03 12:36:09 +08:00
|
|
|
review2 = Review.objects.using(
|
|
|
|
'other').create(source="Python Weekly", content_object=dive)
|
2010-01-22 22:30:06 +08:00
|
|
|
|
|
|
|
# Set a generic foreign key with an object from a different database
|
2016-06-28 23:21:26 +08:00
|
|
|
review1.content_object = dive
|
2010-01-22 22:30:06 +08:00
|
|
|
|
|
|
|
# Database assignments of original objects haven't changed...
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(pro._state.db, 'default')
|
|
|
|
self.assertEqual(review1._state.db, 'default')
|
|
|
|
self.assertEqual(dive._state.db, 'other')
|
|
|
|
self.assertEqual(review2._state.db, 'other')
|
2010-01-22 22:30:06 +08:00
|
|
|
|
|
|
|
# ... but they will when the affected object is saved.
|
|
|
|
dive.save()
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(review1._state.db, 'default')
|
|
|
|
self.assertEqual(dive._state.db, 'default')
|
2010-01-22 22:30:06 +08:00
|
|
|
|
|
|
|
# ...and the source database now has a copy of any object saved
|
2016-06-28 23:21:26 +08:00
|
|
|
Book.objects.using('default').get(title='Dive into Python').delete()
|
2010-01-22 22:30:06 +08:00
|
|
|
|
2014-05-21 03:54:56 +08:00
|
|
|
# This isn't a real primary/replica database, so restore the original from other
|
2010-01-22 22:30:06 +08:00
|
|
|
dive = Book.objects.using('other').get(title='Dive into Python')
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(dive._state.db, 'other')
|
2010-01-22 22:30:06 +08:00
|
|
|
|
|
|
|
# Add to a generic foreign key set with an object from a different database
|
2016-06-28 23:21:26 +08:00
|
|
|
dive.reviews.add(review1)
|
2010-01-22 22:30:06 +08:00
|
|
|
|
|
|
|
# Database assignments of original objects haven't changed...
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(pro._state.db, 'default')
|
|
|
|
self.assertEqual(review1._state.db, 'default')
|
|
|
|
self.assertEqual(dive._state.db, 'other')
|
|
|
|
self.assertEqual(review2._state.db, 'other')
|
2010-01-22 22:30:06 +08:00
|
|
|
|
|
|
|
# ... but they will when the affected object is saved.
|
|
|
|
dive.save()
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(dive._state.db, 'default')
|
2010-01-22 22:30:06 +08:00
|
|
|
|
|
|
|
# ...and the source database now has a copy of any object saved
|
2016-06-28 23:21:26 +08:00
|
|
|
Book.objects.using('default').get(title='Dive into Python').delete()
|
2010-01-22 22:30:06 +08:00
|
|
|
|
|
|
|
# BUT! if you assign a FK object when the base object hasn't
|
|
|
|
# been saved yet, you implicitly assign the database for the
|
|
|
|
# base object.
|
|
|
|
review3 = Review(source="Python Daily")
|
|
|
|
# initially, no db assigned
|
2016-06-17 02:19:18 +08:00
|
|
|
self.assertIsNone(review3._state.db)
|
2010-01-22 22:30:06 +08:00
|
|
|
|
|
|
|
# Dive comes from 'other', so review3 is set to use the source of 'other'...
|
|
|
|
review3.content_object = dive
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(review3._state.db, 'default')
|
2010-01-22 22:30:06 +08:00
|
|
|
|
2010-03-31 20:33:10 +08:00
|
|
|
# If you create an object through a M2M relation, it will be
|
|
|
|
# written to the write database, even if the original object
|
|
|
|
# was on the read database
|
|
|
|
dive = Book.objects.using('other').get(title='Dive into Python')
|
|
|
|
nyt = dive.reviews.create(source="New York Times", content_object=dive)
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(nyt._state.db, 'default')
|
2010-03-31 20:33:10 +08:00
|
|
|
|
2010-04-16 20:29:23 +08:00
|
|
|
def test_m2m_managers(self):
|
|
|
|
"M2M relations are represented by managers, and can be controlled like managers"
|
|
|
|
pro = Book.objects.using('other').create(pk=1, title="Pro Django",
|
|
|
|
published=datetime.date(2008, 12, 16))
|
|
|
|
|
|
|
|
marty = Person.objects.using('other').create(pk=1, name="Marty Alchin")
|
|
|
|
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(pro.authors.db, 'other')
|
|
|
|
self.assertEqual(pro.authors.db_manager('default').db, 'default')
|
|
|
|
self.assertEqual(pro.authors.db_manager('default').all().db, 'default')
|
2010-04-16 20:29:23 +08:00
|
|
|
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(marty.book_set.db, 'other')
|
|
|
|
self.assertEqual(marty.book_set.db_manager('default').db, 'default')
|
|
|
|
self.assertEqual(marty.book_set.db_manager('default').all().db, 'default')
|
2010-04-16 20:29:23 +08:00
|
|
|
|
|
|
|
def test_foreign_key_managers(self):
|
|
|
|
"FK reverse relations are represented by managers, and can be controlled like managers"
|
|
|
|
marty = Person.objects.using('other').create(pk=1, name="Marty Alchin")
|
2018-03-16 17:54:34 +08:00
|
|
|
Book.objects.using('other').create(
|
|
|
|
pk=1,
|
|
|
|
title='Pro Django',
|
|
|
|
published=datetime.date(2008, 12, 16),
|
|
|
|
editor=marty,
|
|
|
|
)
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(marty.edited.db, 'other')
|
|
|
|
self.assertEqual(marty.edited.db_manager('default').db, 'default')
|
|
|
|
self.assertEqual(marty.edited.db_manager('default').all().db, 'default')
|
2010-04-16 20:29:23 +08:00
|
|
|
|
|
|
|
def test_generic_key_managers(self):
|
|
|
|
"Generic key relations are represented by managers, and can be controlled like managers"
|
|
|
|
pro = Book.objects.using('other').create(title="Pro Django",
|
|
|
|
published=datetime.date(2008, 12, 16))
|
|
|
|
|
2018-03-16 17:54:34 +08:00
|
|
|
Review.objects.using('other').create(source='Python Monthly', content_object=pro)
|
2010-04-16 20:29:23 +08:00
|
|
|
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(pro.reviews.db, 'other')
|
|
|
|
self.assertEqual(pro.reviews.db_manager('default').db, 'default')
|
|
|
|
self.assertEqual(pro.reviews.db_manager('default').all().db, 'default')
|
2010-04-16 20:29:23 +08:00
|
|
|
|
2010-03-10 23:27:22 +08:00
|
|
|
def test_subquery(self):
|
2014-05-21 03:54:56 +08:00
|
|
|
"""Make sure as_sql works with subqueries and primary/replica."""
|
2010-03-10 23:27:22 +08:00
|
|
|
# Create a book and author on the other database
|
|
|
|
|
|
|
|
mark = Person.objects.using('other').create(name="Mark Pilgrim")
|
2018-03-16 17:54:34 +08:00
|
|
|
Book.objects.using('other').create(
|
|
|
|
title='Dive into Python',
|
|
|
|
published=datetime.date(2009, 5, 4),
|
|
|
|
editor=mark,
|
|
|
|
)
|
2010-03-10 23:27:22 +08:00
|
|
|
|
|
|
|
sub = Person.objects.filter(name='Mark Pilgrim')
|
|
|
|
qs = Book.objects.filter(editor__in=sub)
|
|
|
|
|
|
|
|
# When you call __str__ on the query object, it doesn't know about using
|
|
|
|
# so it falls back to the default. Don't let routing instructions
|
|
|
|
# force the subquery to an incompatible database.
|
|
|
|
str(qs.query)
|
|
|
|
|
|
|
|
# If you evaluate the query, it should work, running on 'other'
|
2012-06-08 00:08:47 +08:00
|
|
|
self.assertEqual(list(qs.values_list('title', flat=True)), ['Dive into Python'])
|
2010-03-10 23:27:22 +08:00
|
|
|
|
2012-08-13 03:17:54 +08:00
|
|
|
def test_deferred_models(self):
|
|
|
|
mark_def = Person.objects.using('default').create(name="Mark Pilgrim")
|
|
|
|
mark_other = Person.objects.using('other').create(name="Mark Pilgrim")
|
2018-03-16 17:54:34 +08:00
|
|
|
orig_b = Book.objects.using('other').create(
|
|
|
|
title='Dive into Python',
|
|
|
|
published=datetime.date(2009, 5, 4),
|
|
|
|
editor=mark_other,
|
|
|
|
)
|
2012-08-13 03:17:54 +08:00
|
|
|
b = Book.objects.using('other').only('title').get(pk=orig_b.pk)
|
|
|
|
self.assertEqual(b.published, datetime.date(2009, 5, 4))
|
|
|
|
b = Book.objects.using('other').only('title').get(pk=orig_b.pk)
|
|
|
|
b.editor = mark_def
|
|
|
|
b.save(using='default')
|
|
|
|
self.assertEqual(Book.objects.using('default').get(pk=b.pk).published,
|
|
|
|
datetime.date(2009, 5, 4))
|
|
|
|
|
|
|
|
|
2014-11-30 01:09:40 +08:00
|
|
|
@override_settings(DATABASE_ROUTERS=[AuthRouter()])
|
2010-02-22 21:09:02 +08:00
|
|
|
class AuthTestCase(TestCase):
|
2018-07-12 12:12:20 +08:00
|
|
|
databases = {'default', 'other'}
|
2010-02-22 21:09:02 +08:00
|
|
|
|
|
|
|
def test_auth_manager(self):
|
|
|
|
"The methods on the auth manager obey database hints"
|
|
|
|
# Create one user using default allocation policy
|
|
|
|
User.objects.create_user('alice', 'alice@example.com')
|
|
|
|
|
|
|
|
# Create another user, explicitly specifying the database
|
|
|
|
User.objects.db_manager('default').create_user('bob', 'bob@example.com')
|
|
|
|
|
|
|
|
# The second user only exists on the other database
|
|
|
|
alice = User.objects.using('other').get(username='alice')
|
|
|
|
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(alice.username, 'alice')
|
|
|
|
self.assertEqual(alice._state.db, 'other')
|
2010-02-22 21:09:02 +08:00
|
|
|
|
2016-01-17 19:26:39 +08:00
|
|
|
with self.assertRaises(User.DoesNotExist):
|
|
|
|
User.objects.using('default').get(username='alice')
|
2010-02-22 21:09:02 +08:00
|
|
|
|
|
|
|
# The second user only exists on the default database
|
|
|
|
bob = User.objects.using('default').get(username='bob')
|
|
|
|
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(bob.username, 'bob')
|
|
|
|
self.assertEqual(bob._state.db, 'default')
|
2010-02-22 21:09:02 +08:00
|
|
|
|
2016-01-17 19:26:39 +08:00
|
|
|
with self.assertRaises(User.DoesNotExist):
|
|
|
|
User.objects.using('other').get(username='bob')
|
2010-02-22 21:09:02 +08:00
|
|
|
|
|
|
|
# That is... there is one user on each database
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(User.objects.using('default').count(), 1)
|
|
|
|
self.assertEqual(User.objects.using('other').count(), 1)
|
2009-12-22 23:18:51 +08:00
|
|
|
|
2010-04-09 21:08:08 +08:00
|
|
|
def test_dumpdata(self):
|
2016-10-27 15:53:39 +08:00
|
|
|
"dumpdata honors allow_migrate restrictions on the router"
|
2010-04-09 21:08:08 +08:00
|
|
|
User.objects.create_user('alice', 'alice@example.com')
|
|
|
|
User.objects.db_manager('default').create_user('bob', 'bob@example.com')
|
|
|
|
|
2016-10-27 15:53:39 +08:00
|
|
|
# dumping the default database doesn't try to include auth because
|
|
|
|
# allow_migrate prohibits auth on default
|
2010-06-05 13:32:05 +08:00
|
|
|
new_io = StringIO()
|
|
|
|
management.call_command('dumpdata', 'auth', format='json', database='default', stdout=new_io)
|
|
|
|
command_output = new_io.getvalue().strip()
|
|
|
|
self.assertEqual(command_output, '[]')
|
2010-04-09 21:08:08 +08:00
|
|
|
|
2016-10-27 15:53:39 +08:00
|
|
|
# dumping the other database does include auth
|
2010-06-05 13:32:05 +08:00
|
|
|
new_io = StringIO()
|
|
|
|
management.call_command('dumpdata', 'auth', format='json', database='other', stdout=new_io)
|
|
|
|
command_output = new_io.getvalue().strip()
|
2014-10-28 18:02:56 +08:00
|
|
|
self.assertIn('"email": "alice@example.com"', command_output)
|
2010-04-09 21:08:08 +08:00
|
|
|
|
2013-11-03 12:36:09 +08:00
|
|
|
|
2017-01-19 15:39:46 +08:00
|
|
|
class AntiPetRouter:
|
2013-07-30 19:08:59 +08:00
|
|
|
# A router that only expresses an opinion on migrate,
|
2010-08-20 21:57:24 +08:00
|
|
|
# passing pets to the 'other' database
|
|
|
|
|
2015-02-22 00:08:09 +08:00
|
|
|
def allow_migrate(self, db, app_label, model_name=None, **hints):
|
2010-08-20 21:57:24 +08:00
|
|
|
if db == 'other':
|
2015-02-19 15:27:58 +08:00
|
|
|
return model_name == 'pet'
|
2010-08-20 21:57:24 +08:00
|
|
|
else:
|
2015-02-19 15:27:58 +08:00
|
|
|
return model_name != 'pet'
|
2010-01-25 20:23:30 +08:00
|
|
|
|
2013-11-03 12:36:09 +08:00
|
|
|
|
2009-12-22 23:18:51 +08:00
|
|
|
class FixtureTestCase(TestCase):
|
2018-07-12 12:12:20 +08:00
|
|
|
databases = {'default', 'other'}
|
2009-12-22 23:18:51 +08:00
|
|
|
fixtures = ['multidb-common', 'multidb']
|
|
|
|
|
2014-11-30 01:09:40 +08:00
|
|
|
@override_settings(DATABASE_ROUTERS=[AntiPetRouter()])
|
2009-12-22 23:18:51 +08:00
|
|
|
def test_fixture_loading(self):
|
|
|
|
"Multi-db fixtures are loaded correctly"
|
2016-10-27 15:53:39 +08:00
|
|
|
# "Pro Django" exists on the default database, but not on other database
|
2016-06-28 23:21:26 +08:00
|
|
|
Book.objects.get(title="Pro Django")
|
|
|
|
Book.objects.using('default').get(title="Pro Django")
|
2009-12-22 23:18:51 +08:00
|
|
|
|
2016-01-17 19:26:39 +08:00
|
|
|
with self.assertRaises(Book.DoesNotExist):
|
|
|
|
Book.objects.using('other').get(title="Pro Django")
|
2009-12-22 23:18:51 +08:00
|
|
|
|
2016-10-27 15:53:39 +08:00
|
|
|
# "Dive into Python" exists on the default database, but not on other database
|
2016-06-28 23:21:26 +08:00
|
|
|
Book.objects.using('other').get(title="Dive into Python")
|
2009-12-22 23:18:51 +08:00
|
|
|
|
2016-01-17 19:26:39 +08:00
|
|
|
with self.assertRaises(Book.DoesNotExist):
|
|
|
|
Book.objects.get(title="Dive into Python")
|
|
|
|
with self.assertRaises(Book.DoesNotExist):
|
|
|
|
Book.objects.using('default').get(title="Dive into Python")
|
2009-12-22 23:18:51 +08:00
|
|
|
|
2016-10-27 15:53:39 +08:00
|
|
|
# "Definitive Guide" exists on the both databases
|
2016-06-28 23:21:26 +08:00
|
|
|
Book.objects.get(title="The Definitive Guide to Django")
|
|
|
|
Book.objects.using('default').get(title="The Definitive Guide to Django")
|
|
|
|
Book.objects.using('other').get(title="The Definitive Guide to Django")
|
2009-12-22 23:18:51 +08:00
|
|
|
|
2014-11-30 01:09:40 +08:00
|
|
|
@override_settings(DATABASE_ROUTERS=[AntiPetRouter()])
|
2010-08-20 21:57:24 +08:00
|
|
|
def test_pseudo_empty_fixtures(self):
|
2015-09-12 07:33:12 +08:00
|
|
|
"""
|
|
|
|
A fixture can contain entries, but lead to nothing in the database;
|
|
|
|
this shouldn't raise an error (#14068).
|
|
|
|
"""
|
2010-08-20 21:57:24 +08:00
|
|
|
new_io = StringIO()
|
|
|
|
management.call_command('loaddata', 'pets', stdout=new_io, stderr=new_io)
|
|
|
|
command_output = new_io.getvalue().strip()
|
|
|
|
# No objects will actually be loaded
|
|
|
|
self.assertEqual(command_output, "Installed 0 object(s) (of 2) from 1 fixture(s)")
|
|
|
|
|
2013-11-03 12:36:09 +08:00
|
|
|
|
2009-12-22 23:18:51 +08:00
|
|
|
class PickleQuerySetTestCase(TestCase):
|
2018-07-12 12:12:20 +08:00
|
|
|
databases = {'default', 'other'}
|
2009-12-22 23:18:51 +08:00
|
|
|
|
|
|
|
def test_pickling(self):
|
|
|
|
for db in connections:
|
|
|
|
Book.objects.using(db).create(title='Dive into Python', published=datetime.date(2009, 5, 4))
|
|
|
|
qs = Book.objects.all()
|
|
|
|
self.assertEqual(qs.db, pickle.loads(pickle.dumps(qs)).db)
|
2010-08-07 14:27:52 +08:00
|
|
|
|
|
|
|
|
2017-01-19 15:39:46 +08:00
|
|
|
class DatabaseReceiver:
|
2010-08-07 14:27:52 +08:00
|
|
|
"""
|
|
|
|
Used in the tests for the database argument in signals (#13552)
|
|
|
|
"""
|
|
|
|
def __call__(self, signal, sender, **kwargs):
|
|
|
|
self._database = kwargs['using']
|
|
|
|
|
2013-11-03 12:36:09 +08:00
|
|
|
|
2017-01-19 15:39:46 +08:00
|
|
|
class WriteToOtherRouter:
|
2010-08-07 14:27:52 +08:00
|
|
|
"""
|
|
|
|
A router that sends all writes to the other database.
|
|
|
|
"""
|
|
|
|
def db_for_write(self, model, **hints):
|
|
|
|
return "other"
|
|
|
|
|
2013-11-03 12:36:09 +08:00
|
|
|
|
2010-08-07 14:27:52 +08:00
|
|
|
class SignalTests(TestCase):
|
2018-07-12 12:12:20 +08:00
|
|
|
databases = {'default', 'other'}
|
2010-08-07 14:27:52 +08:00
|
|
|
|
2014-11-30 01:09:40 +08:00
|
|
|
def override_router(self):
|
|
|
|
return override_settings(DATABASE_ROUTERS=[WriteToOtherRouter()])
|
2010-08-07 14:27:52 +08:00
|
|
|
|
|
|
|
def test_database_arg_save_and_delete(self):
|
|
|
|
"""
|
2016-10-27 15:53:39 +08:00
|
|
|
The pre/post_save signal contains the correct database.
|
2010-08-07 14:27:52 +08:00
|
|
|
"""
|
|
|
|
# Make some signal receivers
|
|
|
|
pre_save_receiver = DatabaseReceiver()
|
|
|
|
post_save_receiver = DatabaseReceiver()
|
|
|
|
pre_delete_receiver = DatabaseReceiver()
|
|
|
|
post_delete_receiver = DatabaseReceiver()
|
|
|
|
# Make model and connect receivers
|
|
|
|
signals.pre_save.connect(sender=Person, receiver=pre_save_receiver)
|
|
|
|
signals.post_save.connect(sender=Person, receiver=post_save_receiver)
|
|
|
|
signals.pre_delete.connect(sender=Person, receiver=pre_delete_receiver)
|
|
|
|
signals.post_delete.connect(sender=Person, receiver=post_delete_receiver)
|
|
|
|
p = Person.objects.create(name='Darth Vader')
|
|
|
|
# Save and test receivers got calls
|
|
|
|
p.save()
|
|
|
|
self.assertEqual(pre_save_receiver._database, DEFAULT_DB_ALIAS)
|
|
|
|
self.assertEqual(post_save_receiver._database, DEFAULT_DB_ALIAS)
|
|
|
|
# Delete, and test
|
|
|
|
p.delete()
|
|
|
|
self.assertEqual(pre_delete_receiver._database, DEFAULT_DB_ALIAS)
|
|
|
|
self.assertEqual(post_delete_receiver._database, DEFAULT_DB_ALIAS)
|
|
|
|
# Save again to a different database
|
|
|
|
p.save(using="other")
|
|
|
|
self.assertEqual(pre_save_receiver._database, "other")
|
|
|
|
self.assertEqual(post_save_receiver._database, "other")
|
|
|
|
# Delete, and test
|
|
|
|
p.delete(using="other")
|
|
|
|
self.assertEqual(pre_delete_receiver._database, "other")
|
|
|
|
self.assertEqual(post_delete_receiver._database, "other")
|
|
|
|
|
2013-02-09 04:12:03 +08:00
|
|
|
signals.pre_save.disconnect(sender=Person, receiver=pre_save_receiver)
|
|
|
|
signals.post_save.disconnect(sender=Person, receiver=post_save_receiver)
|
|
|
|
signals.pre_delete.disconnect(sender=Person, receiver=pre_delete_receiver)
|
|
|
|
signals.post_delete.disconnect(sender=Person, receiver=post_delete_receiver)
|
|
|
|
|
2010-08-07 14:27:52 +08:00
|
|
|
def test_database_arg_m2m(self):
|
|
|
|
"""
|
2016-10-27 15:53:39 +08:00
|
|
|
The m2m_changed signal has a correct database arg.
|
2010-08-07 14:27:52 +08:00
|
|
|
"""
|
|
|
|
# Make a receiver
|
|
|
|
receiver = DatabaseReceiver()
|
2011-10-16 19:04:34 +08:00
|
|
|
# Connect it
|
2010-08-07 14:27:52 +08:00
|
|
|
signals.m2m_changed.connect(receiver=receiver)
|
|
|
|
|
2011-10-16 19:04:34 +08:00
|
|
|
# Create the models that will be used for the tests
|
2010-08-07 14:27:52 +08:00
|
|
|
b = Book.objects.create(title="Pro Django",
|
|
|
|
published=datetime.date(2008, 12, 16))
|
|
|
|
p = Person.objects.create(name="Marty Alchin")
|
|
|
|
|
2011-10-16 19:04:34 +08:00
|
|
|
# Create a copy of the models on the 'other' database to prevent
|
|
|
|
# integrity errors on backends that don't defer constraints checks
|
2018-03-16 17:54:34 +08:00
|
|
|
Book.objects.using('other').create(pk=b.pk, title=b.title, published=b.published)
|
2011-10-16 19:04:34 +08:00
|
|
|
Person.objects.using('other').create(pk=p.pk, name=p.name)
|
|
|
|
|
2010-08-07 14:27:52 +08:00
|
|
|
# Test addition
|
|
|
|
b.authors.add(p)
|
|
|
|
self.assertEqual(receiver._database, DEFAULT_DB_ALIAS)
|
2014-11-30 01:09:40 +08:00
|
|
|
with self.override_router():
|
|
|
|
b.authors.add(p)
|
2010-08-07 14:27:52 +08:00
|
|
|
self.assertEqual(receiver._database, "other")
|
|
|
|
|
|
|
|
# Test removal
|
|
|
|
b.authors.remove(p)
|
|
|
|
self.assertEqual(receiver._database, DEFAULT_DB_ALIAS)
|
2014-11-30 01:09:40 +08:00
|
|
|
with self.override_router():
|
|
|
|
b.authors.remove(p)
|
2010-08-07 14:27:52 +08:00
|
|
|
self.assertEqual(receiver._database, "other")
|
|
|
|
|
|
|
|
# Test addition in reverse
|
|
|
|
p.book_set.add(b)
|
|
|
|
self.assertEqual(receiver._database, DEFAULT_DB_ALIAS)
|
2014-11-30 01:09:40 +08:00
|
|
|
with self.override_router():
|
|
|
|
p.book_set.add(b)
|
2010-08-07 14:27:52 +08:00
|
|
|
self.assertEqual(receiver._database, "other")
|
|
|
|
|
|
|
|
# Test clearing
|
|
|
|
b.authors.clear()
|
|
|
|
self.assertEqual(receiver._database, DEFAULT_DB_ALIAS)
|
2014-11-30 01:09:40 +08:00
|
|
|
with self.override_router():
|
|
|
|
b.authors.clear()
|
2010-08-07 14:27:52 +08:00
|
|
|
self.assertEqual(receiver._database, "other")
|
2011-01-13 12:11:41 +08:00
|
|
|
|
2013-11-03 12:36:09 +08:00
|
|
|
|
2017-01-19 15:39:46 +08:00
|
|
|
class AttributeErrorRouter:
|
2011-01-13 12:11:41 +08:00
|
|
|
"A router to test the exception handling of ConnectionRouter"
|
|
|
|
def db_for_read(self, model, **hints):
|
|
|
|
raise AttributeError
|
|
|
|
|
|
|
|
def db_for_write(self, model, **hints):
|
|
|
|
raise AttributeError
|
|
|
|
|
2013-11-03 12:36:09 +08:00
|
|
|
|
2011-01-13 12:11:41 +08:00
|
|
|
class RouterAttributeErrorTestCase(TestCase):
|
2018-07-12 12:12:20 +08:00
|
|
|
databases = {'default', 'other'}
|
2011-01-13 12:11:41 +08:00
|
|
|
|
2014-11-30 01:09:40 +08:00
|
|
|
def override_router(self):
|
|
|
|
return override_settings(DATABASE_ROUTERS=[AttributeErrorRouter()])
|
2011-01-13 12:11:41 +08:00
|
|
|
|
2011-01-14 11:43:10 +08:00
|
|
|
def test_attribute_error_read(self):
|
2016-10-27 15:53:39 +08:00
|
|
|
"The AttributeError from AttributeErrorRouter bubbles up"
|
2011-01-14 11:43:10 +08:00
|
|
|
b = Book.objects.create(title="Pro Django",
|
|
|
|
published=datetime.date(2008, 12, 16))
|
2014-11-30 01:09:40 +08:00
|
|
|
with self.override_router():
|
2016-01-17 19:26:39 +08:00
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
Book.objects.get(pk=b.pk)
|
2011-01-14 11:43:10 +08:00
|
|
|
|
|
|
|
def test_attribute_error_save(self):
|
2016-10-27 15:53:39 +08:00
|
|
|
"The AttributeError from AttributeErrorRouter bubbles up"
|
2011-01-13 12:11:41 +08:00
|
|
|
dive = Book()
|
2013-10-23 18:09:29 +08:00
|
|
|
dive.title = "Dive into Python"
|
2011-01-13 12:11:41 +08:00
|
|
|
dive.published = datetime.date(2009, 5, 4)
|
2014-11-30 01:09:40 +08:00
|
|
|
with self.override_router():
|
2016-01-17 19:26:39 +08:00
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
dive.save()
|
2011-01-13 12:11:41 +08:00
|
|
|
|
2011-01-14 11:43:10 +08:00
|
|
|
def test_attribute_error_delete(self):
|
2016-10-27 15:53:39 +08:00
|
|
|
"The AttributeError from AttributeErrorRouter bubbles up"
|
2011-01-14 11:43:10 +08:00
|
|
|
b = Book.objects.create(title="Pro Django",
|
|
|
|
published=datetime.date(2008, 12, 16))
|
|
|
|
p = Person.objects.create(name="Marty Alchin")
|
2015-10-09 05:17:10 +08:00
|
|
|
b.authors.set([p])
|
2011-01-14 11:43:10 +08:00
|
|
|
b.editor = p
|
2014-11-30 01:09:40 +08:00
|
|
|
with self.override_router():
|
2016-01-17 19:26:39 +08:00
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
b.delete()
|
2011-01-14 11:43:10 +08:00
|
|
|
|
|
|
|
def test_attribute_error_m2m(self):
|
2016-10-27 15:53:39 +08:00
|
|
|
"The AttributeError from AttributeErrorRouter bubbles up"
|
2011-01-14 11:43:10 +08:00
|
|
|
b = Book.objects.create(title="Pro Django",
|
|
|
|
published=datetime.date(2008, 12, 16))
|
|
|
|
p = Person.objects.create(name="Marty Alchin")
|
2014-11-30 01:09:40 +08:00
|
|
|
with self.override_router():
|
2015-10-09 05:17:10 +08:00
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
b.authors.set([p])
|
2011-01-14 11:43:10 +08:00
|
|
|
|
2013-11-03 12:36:09 +08:00
|
|
|
|
2017-01-19 15:39:46 +08:00
|
|
|
class ModelMetaRouter:
|
2011-01-13 12:11:41 +08:00
|
|
|
"A router to ensure model arguments are real model classes"
|
|
|
|
def db_for_write(self, model, **hints):
|
|
|
|
if not hasattr(model, '_meta'):
|
|
|
|
raise ValueError
|
|
|
|
|
2013-11-03 12:36:09 +08:00
|
|
|
|
2014-11-30 01:09:40 +08:00
|
|
|
@override_settings(DATABASE_ROUTERS=[ModelMetaRouter()])
|
2011-01-15 08:15:39 +08:00
|
|
|
class RouterModelArgumentTestCase(TestCase):
|
2018-07-12 12:12:20 +08:00
|
|
|
databases = {'default', 'other'}
|
2011-01-13 12:11:41 +08:00
|
|
|
|
2011-01-15 08:15:39 +08:00
|
|
|
def test_m2m_collection(self):
|
2011-01-13 12:11:41 +08:00
|
|
|
b = Book.objects.create(title="Pro Django",
|
|
|
|
published=datetime.date(2008, 12, 16))
|
|
|
|
|
|
|
|
p = Person.objects.create(name="Marty Alchin")
|
|
|
|
# test add
|
|
|
|
b.authors.add(p)
|
|
|
|
# test remove
|
|
|
|
b.authors.remove(p)
|
|
|
|
# test clear
|
|
|
|
b.authors.clear()
|
2011-01-15 08:15:39 +08:00
|
|
|
# test setattr
|
2015-10-09 05:17:10 +08:00
|
|
|
b.authors.set([p])
|
2011-01-15 08:15:39 +08:00
|
|
|
# test M2M collection
|
|
|
|
b.delete()
|
|
|
|
|
|
|
|
def test_foreignkey_collection(self):
|
|
|
|
person = Person.objects.create(name='Bob')
|
2013-10-19 20:31:38 +08:00
|
|
|
Pet.objects.create(owner=person, name='Wart')
|
2011-01-15 08:15:39 +08:00
|
|
|
# test related FK collection
|
|
|
|
person.delete()
|
2012-11-23 03:09:40 +08:00
|
|
|
|
|
|
|
|
2017-01-19 15:39:46 +08:00
|
|
|
class SyncOnlyDefaultDatabaseRouter:
|
2015-02-19 15:27:58 +08:00
|
|
|
def allow_migrate(self, db, app_label, **hints):
|
2012-11-23 03:09:40 +08:00
|
|
|
return db == DEFAULT_DB_ALIAS
|
|
|
|
|
|
|
|
|
2013-09-03 23:51:34 +08:00
|
|
|
class MigrateTestCase(TestCase):
|
2013-06-15 05:11:51 +08:00
|
|
|
|
2017-01-28 09:06:55 +08:00
|
|
|
# Limit memory usage when calling 'migrate'.
|
|
|
|
available_apps = [
|
|
|
|
'multiple_database',
|
|
|
|
'django.contrib.auth',
|
|
|
|
'django.contrib.contenttypes'
|
|
|
|
]
|
2018-07-12 12:12:20 +08:00
|
|
|
databases = {'default', 'other'}
|
2012-11-23 03:09:40 +08:00
|
|
|
|
2013-09-03 23:51:34 +08:00
|
|
|
def test_migrate_to_other_database(self):
|
|
|
|
"""Regression test for #16039: migrate with --database option."""
|
2012-11-23 17:02:18 +08:00
|
|
|
cts = ContentType.objects.using('other').filter(app_label='multiple_database')
|
|
|
|
|
|
|
|
count = cts.count()
|
2012-11-23 03:09:40 +08:00
|
|
|
self.assertGreater(count, 0)
|
|
|
|
|
2012-11-23 17:02:18 +08:00
|
|
|
cts.delete()
|
2014-12-27 02:23:38 +08:00
|
|
|
management.call_command('migrate', verbosity=0, interactive=False, database='other')
|
2012-11-23 17:02:18 +08:00
|
|
|
self.assertEqual(cts.count(), count)
|
2012-11-23 03:09:40 +08:00
|
|
|
|
2013-09-03 23:51:34 +08:00
|
|
|
def test_migrate_to_other_database_with_router(self):
|
|
|
|
"""Regression test for #16039: migrate with --database option."""
|
2012-11-23 17:02:18 +08:00
|
|
|
cts = ContentType.objects.using('other').filter(app_label='multiple_database')
|
|
|
|
|
|
|
|
cts.delete()
|
2014-11-30 01:09:40 +08:00
|
|
|
with override_settings(DATABASE_ROUTERS=[SyncOnlyDefaultDatabaseRouter()]):
|
2014-12-27 02:23:38 +08:00
|
|
|
management.call_command('migrate', verbosity=0, interactive=False, database='other')
|
2012-11-23 03:09:40 +08:00
|
|
|
|
2012-11-23 17:02:18 +08:00
|
|
|
self.assertEqual(cts.count(), 0)
|
2013-09-30 13:05:43 +08:00
|
|
|
|
|
|
|
|
|
|
|
class RouterUsed(Exception):
|
|
|
|
WRITE = 'write'
|
|
|
|
|
|
|
|
def __init__(self, mode, model, hints):
|
|
|
|
self.mode = mode
|
|
|
|
self.model = model
|
|
|
|
self.hints = hints
|
|
|
|
|
|
|
|
|
|
|
|
class RouteForWriteTestCase(TestCase):
|
2018-07-12 12:12:20 +08:00
|
|
|
databases = {'default', 'other'}
|
2013-09-30 13:05:43 +08:00
|
|
|
|
2017-01-19 15:39:46 +08:00
|
|
|
class WriteCheckRouter:
|
2013-09-30 13:05:43 +08:00
|
|
|
def db_for_write(self, model, **hints):
|
|
|
|
raise RouterUsed(mode=RouterUsed.WRITE, model=model, hints=hints)
|
|
|
|
|
2014-11-30 01:09:40 +08:00
|
|
|
def override_router(self):
|
|
|
|
return override_settings(DATABASE_ROUTERS=[RouteForWriteTestCase.WriteCheckRouter()])
|
2013-09-30 13:05:43 +08:00
|
|
|
|
|
|
|
def test_fk_delete(self):
|
|
|
|
owner = Person.objects.create(name='Someone')
|
|
|
|
pet = Pet.objects.create(name='fido', owner=owner)
|
2016-06-28 23:21:26 +08:00
|
|
|
with self.assertRaises(RouterUsed) as cm:
|
2014-11-30 01:09:40 +08:00
|
|
|
with self.override_router():
|
|
|
|
pet.owner.delete()
|
2016-06-28 23:21:26 +08:00
|
|
|
e = cm.exception
|
|
|
|
self.assertEqual(e.mode, RouterUsed.WRITE)
|
|
|
|
self.assertEqual(e.model, Person)
|
|
|
|
self.assertEqual(e.hints, {'instance': owner})
|
2013-09-30 13:05:43 +08:00
|
|
|
|
|
|
|
def test_reverse_fk_delete(self):
|
|
|
|
owner = Person.objects.create(name='Someone')
|
|
|
|
to_del_qs = owner.pet_set.all()
|
2016-06-28 23:21:26 +08:00
|
|
|
with self.assertRaises(RouterUsed) as cm:
|
2014-11-30 01:09:40 +08:00
|
|
|
with self.override_router():
|
|
|
|
to_del_qs.delete()
|
2016-06-28 23:21:26 +08:00
|
|
|
e = cm.exception
|
|
|
|
self.assertEqual(e.mode, RouterUsed.WRITE)
|
|
|
|
self.assertEqual(e.model, Pet)
|
|
|
|
self.assertEqual(e.hints, {'instance': owner})
|
2013-09-30 13:05:43 +08:00
|
|
|
|
|
|
|
def test_reverse_fk_get_or_create(self):
|
|
|
|
owner = Person.objects.create(name='Someone')
|
2016-06-28 23:21:26 +08:00
|
|
|
with self.assertRaises(RouterUsed) as cm:
|
2014-11-30 01:09:40 +08:00
|
|
|
with self.override_router():
|
|
|
|
owner.pet_set.get_or_create(name='fido')
|
2016-06-28 23:21:26 +08:00
|
|
|
e = cm.exception
|
|
|
|
self.assertEqual(e.mode, RouterUsed.WRITE)
|
|
|
|
self.assertEqual(e.model, Pet)
|
|
|
|
self.assertEqual(e.hints, {'instance': owner})
|
2013-09-30 13:05:43 +08:00
|
|
|
|
|
|
|
def test_reverse_fk_update(self):
|
|
|
|
owner = Person.objects.create(name='Someone')
|
2013-10-19 20:31:38 +08:00
|
|
|
Pet.objects.create(name='fido', owner=owner)
|
2016-06-28 23:21:26 +08:00
|
|
|
with self.assertRaises(RouterUsed) as cm:
|
2014-11-30 01:09:40 +08:00
|
|
|
with self.override_router():
|
|
|
|
owner.pet_set.update(name='max')
|
2016-06-28 23:21:26 +08:00
|
|
|
e = cm.exception
|
|
|
|
self.assertEqual(e.mode, RouterUsed.WRITE)
|
|
|
|
self.assertEqual(e.model, Pet)
|
|
|
|
self.assertEqual(e.hints, {'instance': owner})
|
2013-09-30 13:05:43 +08:00
|
|
|
|
|
|
|
def test_m2m_add(self):
|
|
|
|
auth = Person.objects.create(name='Someone')
|
|
|
|
book = Book.objects.create(title="Pro Django",
|
|
|
|
published=datetime.date(2008, 12, 16))
|
2016-06-28 23:21:26 +08:00
|
|
|
with self.assertRaises(RouterUsed) as cm:
|
2014-11-30 01:09:40 +08:00
|
|
|
with self.override_router():
|
|
|
|
book.authors.add(auth)
|
2016-06-28 23:21:26 +08:00
|
|
|
e = cm.exception
|
|
|
|
self.assertEqual(e.mode, RouterUsed.WRITE)
|
|
|
|
self.assertEqual(e.model, Book.authors.through)
|
|
|
|
self.assertEqual(e.hints, {'instance': book})
|
2013-09-30 13:05:43 +08:00
|
|
|
|
|
|
|
def test_m2m_clear(self):
|
|
|
|
auth = Person.objects.create(name='Someone')
|
|
|
|
book = Book.objects.create(title="Pro Django",
|
|
|
|
published=datetime.date(2008, 12, 16))
|
|
|
|
book.authors.add(auth)
|
2016-06-28 23:21:26 +08:00
|
|
|
with self.assertRaises(RouterUsed) as cm:
|
2014-11-30 01:09:40 +08:00
|
|
|
with self.override_router():
|
|
|
|
book.authors.clear()
|
2016-06-28 23:21:26 +08:00
|
|
|
e = cm.exception
|
|
|
|
self.assertEqual(e.mode, RouterUsed.WRITE)
|
|
|
|
self.assertEqual(e.model, Book.authors.through)
|
|
|
|
self.assertEqual(e.hints, {'instance': book})
|
2013-09-30 13:05:43 +08:00
|
|
|
|
|
|
|
def test_m2m_delete(self):
|
|
|
|
auth = Person.objects.create(name='Someone')
|
|
|
|
book = Book.objects.create(title="Pro Django",
|
|
|
|
published=datetime.date(2008, 12, 16))
|
|
|
|
book.authors.add(auth)
|
2016-06-28 23:21:26 +08:00
|
|
|
with self.assertRaises(RouterUsed) as cm:
|
2014-11-30 01:09:40 +08:00
|
|
|
with self.override_router():
|
|
|
|
book.authors.all().delete()
|
2016-06-28 23:21:26 +08:00
|
|
|
e = cm.exception
|
|
|
|
self.assertEqual(e.mode, RouterUsed.WRITE)
|
|
|
|
self.assertEqual(e.model, Person)
|
|
|
|
self.assertEqual(e.hints, {'instance': book})
|
2013-09-30 13:05:43 +08:00
|
|
|
|
|
|
|
def test_m2m_get_or_create(self):
|
2013-10-19 20:31:38 +08:00
|
|
|
Person.objects.create(name='Someone')
|
2013-09-30 13:05:43 +08:00
|
|
|
book = Book.objects.create(title="Pro Django",
|
|
|
|
published=datetime.date(2008, 12, 16))
|
2016-06-28 23:21:26 +08:00
|
|
|
with self.assertRaises(RouterUsed) as cm:
|
2014-11-30 01:09:40 +08:00
|
|
|
with self.override_router():
|
|
|
|
book.authors.get_or_create(name='Someone else')
|
2016-06-28 23:21:26 +08:00
|
|
|
e = cm.exception
|
|
|
|
self.assertEqual(e.mode, RouterUsed.WRITE)
|
|
|
|
self.assertEqual(e.model, Book)
|
|
|
|
self.assertEqual(e.hints, {'instance': book})
|
2013-09-30 13:05:43 +08:00
|
|
|
|
|
|
|
def test_m2m_remove(self):
|
|
|
|
auth = Person.objects.create(name='Someone')
|
|
|
|
book = Book.objects.create(title="Pro Django",
|
|
|
|
published=datetime.date(2008, 12, 16))
|
|
|
|
book.authors.add(auth)
|
2016-06-28 23:21:26 +08:00
|
|
|
with self.assertRaises(RouterUsed) as cm:
|
2014-11-30 01:09:40 +08:00
|
|
|
with self.override_router():
|
|
|
|
book.authors.remove(auth)
|
2016-06-28 23:21:26 +08:00
|
|
|
e = cm.exception
|
|
|
|
self.assertEqual(e.mode, RouterUsed.WRITE)
|
|
|
|
self.assertEqual(e.model, Book.authors.through)
|
|
|
|
self.assertEqual(e.hints, {'instance': book})
|
2013-09-30 13:05:43 +08:00
|
|
|
|
|
|
|
def test_m2m_update(self):
|
|
|
|
auth = Person.objects.create(name='Someone')
|
|
|
|
book = Book.objects.create(title="Pro Django",
|
|
|
|
published=datetime.date(2008, 12, 16))
|
|
|
|
book.authors.add(auth)
|
2016-06-28 23:21:26 +08:00
|
|
|
with self.assertRaises(RouterUsed) as cm:
|
2014-11-30 01:09:40 +08:00
|
|
|
with self.override_router():
|
|
|
|
book.authors.all().update(name='Different')
|
2016-06-28 23:21:26 +08:00
|
|
|
e = cm.exception
|
|
|
|
self.assertEqual(e.mode, RouterUsed.WRITE)
|
|
|
|
self.assertEqual(e.model, Person)
|
|
|
|
self.assertEqual(e.hints, {'instance': book})
|
2013-09-30 13:05:43 +08:00
|
|
|
|
|
|
|
def test_reverse_m2m_add(self):
|
|
|
|
auth = Person.objects.create(name='Someone')
|
|
|
|
book = Book.objects.create(title="Pro Django",
|
|
|
|
published=datetime.date(2008, 12, 16))
|
2016-06-28 23:21:26 +08:00
|
|
|
with self.assertRaises(RouterUsed) as cm:
|
2014-11-30 01:09:40 +08:00
|
|
|
with self.override_router():
|
|
|
|
auth.book_set.add(book)
|
2016-06-28 23:21:26 +08:00
|
|
|
e = cm.exception
|
|
|
|
self.assertEqual(e.mode, RouterUsed.WRITE)
|
|
|
|
self.assertEqual(e.model, Book.authors.through)
|
|
|
|
self.assertEqual(e.hints, {'instance': auth})
|
2013-09-30 13:05:43 +08:00
|
|
|
|
|
|
|
def test_reverse_m2m_clear(self):
|
|
|
|
auth = Person.objects.create(name='Someone')
|
|
|
|
book = Book.objects.create(title="Pro Django",
|
|
|
|
published=datetime.date(2008, 12, 16))
|
|
|
|
book.authors.add(auth)
|
2016-06-28 23:21:26 +08:00
|
|
|
with self.assertRaises(RouterUsed) as cm:
|
2014-11-30 01:09:40 +08:00
|
|
|
with self.override_router():
|
|
|
|
auth.book_set.clear()
|
2016-06-28 23:21:26 +08:00
|
|
|
e = cm.exception
|
|
|
|
self.assertEqual(e.mode, RouterUsed.WRITE)
|
|
|
|
self.assertEqual(e.model, Book.authors.through)
|
|
|
|
self.assertEqual(e.hints, {'instance': auth})
|
2013-09-30 13:05:43 +08:00
|
|
|
|
|
|
|
def test_reverse_m2m_delete(self):
|
|
|
|
auth = Person.objects.create(name='Someone')
|
|
|
|
book = Book.objects.create(title="Pro Django",
|
|
|
|
published=datetime.date(2008, 12, 16))
|
|
|
|
book.authors.add(auth)
|
2016-06-28 23:21:26 +08:00
|
|
|
with self.assertRaises(RouterUsed) as cm:
|
2014-11-30 01:09:40 +08:00
|
|
|
with self.override_router():
|
|
|
|
auth.book_set.all().delete()
|
2016-06-28 23:21:26 +08:00
|
|
|
e = cm.exception
|
|
|
|
self.assertEqual(e.mode, RouterUsed.WRITE)
|
|
|
|
self.assertEqual(e.model, Book)
|
|
|
|
self.assertEqual(e.hints, {'instance': auth})
|
2013-09-30 13:05:43 +08:00
|
|
|
|
|
|
|
def test_reverse_m2m_get_or_create(self):
|
|
|
|
auth = Person.objects.create(name='Someone')
|
2013-10-19 20:31:38 +08:00
|
|
|
Book.objects.create(title="Pro Django",
|
|
|
|
published=datetime.date(2008, 12, 16))
|
2016-06-28 23:21:26 +08:00
|
|
|
with self.assertRaises(RouterUsed) as cm:
|
2014-11-30 01:09:40 +08:00
|
|
|
with self.override_router():
|
|
|
|
auth.book_set.get_or_create(title="New Book", published=datetime.datetime.now())
|
2016-06-28 23:21:26 +08:00
|
|
|
e = cm.exception
|
|
|
|
self.assertEqual(e.mode, RouterUsed.WRITE)
|
|
|
|
self.assertEqual(e.model, Person)
|
|
|
|
self.assertEqual(e.hints, {'instance': auth})
|
2013-09-30 13:05:43 +08:00
|
|
|
|
|
|
|
def test_reverse_m2m_remove(self):
|
|
|
|
auth = Person.objects.create(name='Someone')
|
|
|
|
book = Book.objects.create(title="Pro Django",
|
|
|
|
published=datetime.date(2008, 12, 16))
|
|
|
|
book.authors.add(auth)
|
2016-06-28 23:21:26 +08:00
|
|
|
with self.assertRaises(RouterUsed) as cm:
|
2014-11-30 01:09:40 +08:00
|
|
|
with self.override_router():
|
|
|
|
auth.book_set.remove(book)
|
2016-06-28 23:21:26 +08:00
|
|
|
e = cm.exception
|
|
|
|
self.assertEqual(e.mode, RouterUsed.WRITE)
|
|
|
|
self.assertEqual(e.model, Book.authors.through)
|
|
|
|
self.assertEqual(e.hints, {'instance': auth})
|
2013-09-30 13:05:43 +08:00
|
|
|
|
|
|
|
def test_reverse_m2m_update(self):
|
|
|
|
auth = Person.objects.create(name='Someone')
|
|
|
|
book = Book.objects.create(title="Pro Django",
|
|
|
|
published=datetime.date(2008, 12, 16))
|
|
|
|
book.authors.add(auth)
|
2016-06-28 23:21:26 +08:00
|
|
|
with self.assertRaises(RouterUsed) as cm:
|
2014-11-30 01:09:40 +08:00
|
|
|
with self.override_router():
|
|
|
|
auth.book_set.all().update(title='Different')
|
2016-06-28 23:21:26 +08:00
|
|
|
e = cm.exception
|
|
|
|
self.assertEqual(e.mode, RouterUsed.WRITE)
|
|
|
|
self.assertEqual(e.model, Book)
|
|
|
|
self.assertEqual(e.hints, {'instance': auth})
|
2018-05-11 08:42:44 +08:00
|
|
|
|
|
|
|
|
|
|
|
class NoRelationRouter:
|
|
|
|
"""Disallow all relations."""
|
|
|
|
def allow_relation(self, obj1, obj2, **hints):
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
|
|
@override_settings(DATABASE_ROUTERS=[NoRelationRouter()])
|
2018-11-27 03:05:02 +08:00
|
|
|
class RelationAssignmentTests(SimpleTestCase):
|
2018-05-11 08:42:44 +08:00
|
|
|
"""allow_relation() is called with unsaved model instances."""
|
2018-07-12 12:12:20 +08:00
|
|
|
databases = {'default', 'other'}
|
2018-05-11 08:42:44 +08:00
|
|
|
router_prevents_msg = 'the current database router prevents this relation'
|
|
|
|
|
|
|
|
def test_foreign_key_relation(self):
|
|
|
|
person = Person(name='Someone')
|
|
|
|
pet = Pet()
|
|
|
|
with self.assertRaisesMessage(ValueError, self.router_prevents_msg):
|
|
|
|
pet.owner = person
|
|
|
|
|
|
|
|
def test_reverse_one_to_one_relation(self):
|
|
|
|
user = User(username='Someone', password='fake_hash')
|
|
|
|
profile = UserProfile()
|
|
|
|
with self.assertRaisesMessage(ValueError, self.router_prevents_msg):
|
|
|
|
user.userprofile = profile
|