2013-09-08 16:43:33 +08:00
|
|
|
import datetime
|
2010-10-11 20:20:07 +08:00
|
|
|
from xml.dom import minidom
|
|
|
|
|
2014-12-10 03:50:45 +08:00
|
|
|
from django.contrib.sites.models import Site
|
2011-04-02 16:44:47 +08:00
|
|
|
from django.contrib.syndication import views
|
2010-01-28 21:46:18 +08:00
|
|
|
from django.core.exceptions import ImproperlyConfigured
|
2014-04-05 14:04:46 +08:00
|
|
|
from django.test import TestCase, override_settings
|
2013-10-03 06:34:38 +08:00
|
|
|
from django.test.utils import requires_tz_support
|
2013-09-08 16:43:33 +08:00
|
|
|
from django.utils import timezone
|
2016-12-31 21:27:32 +08:00
|
|
|
from django.utils.feedgenerator import rfc2822_date, rfc3339_date
|
2010-10-11 20:20:07 +08:00
|
|
|
|
2015-02-23 08:53:57 +08:00
|
|
|
from .models import Article, Entry
|
2010-01-28 21:46:18 +08:00
|
|
|
|
2013-09-08 16:43:33 +08:00
|
|
|
TZ = timezone.get_default_timezone()
|
|
|
|
|
|
|
|
|
2010-01-28 21:46:18 +08:00
|
|
|
class FeedTestCase(TestCase):
|
2015-02-23 08:53:57 +08:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def setUpTestData(cls):
|
|
|
|
cls.e1 = Entry.objects.create(
|
|
|
|
title='My first entry', updated=datetime.datetime(1980, 1, 1, 12, 30),
|
|
|
|
published=datetime.datetime(1986, 9, 25, 20, 15, 00)
|
|
|
|
)
|
|
|
|
cls.e2 = Entry.objects.create(
|
|
|
|
title='My second entry', updated=datetime.datetime(2008, 1, 2, 12, 30),
|
|
|
|
published=datetime.datetime(2006, 3, 17, 18, 0)
|
|
|
|
)
|
|
|
|
cls.e3 = Entry.objects.create(
|
|
|
|
title='My third entry', updated=datetime.datetime(2008, 1, 2, 13, 30),
|
|
|
|
published=datetime.datetime(2005, 6, 14, 10, 45)
|
|
|
|
)
|
|
|
|
cls.e4 = Entry.objects.create(
|
|
|
|
title='A & B < C > D', updated=datetime.datetime(2008, 1, 3, 13, 30),
|
|
|
|
published=datetime.datetime(2005, 11, 25, 12, 11, 23)
|
|
|
|
)
|
|
|
|
cls.e5 = Entry.objects.create(
|
|
|
|
title='My last entry', updated=datetime.datetime(2013, 1, 20, 0, 0),
|
|
|
|
published=datetime.datetime(2013, 3, 25, 20, 0)
|
|
|
|
)
|
2020-10-13 09:53:49 +08:00
|
|
|
cls.a1 = Article.objects.create(
|
|
|
|
title='My first article',
|
|
|
|
entry=cls.e1,
|
|
|
|
updated=datetime.datetime(1986, 11, 21, 9, 12, 18),
|
|
|
|
published=datetime.datetime(1986, 10, 21, 9, 12, 18),
|
|
|
|
)
|
2013-09-08 16:43:33 +08:00
|
|
|
|
2008-08-12 06:22:26 +08:00
|
|
|
def assertChildNodes(self, elem, expected):
|
2017-06-02 07:08:59 +08:00
|
|
|
actual = {n.nodeName for n in elem.childNodes}
|
2008-08-12 06:22:26 +08:00
|
|
|
expected = set(expected)
|
|
|
|
self.assertEqual(actual, expected)
|
|
|
|
|
2010-01-28 21:46:18 +08:00
|
|
|
def assertChildNodeContent(self, elem, expected):
|
|
|
|
for k, v in expected.items():
|
|
|
|
self.assertEqual(
|
|
|
|
elem.getElementsByTagName(k)[0].firstChild.wholeText, v)
|
|
|
|
|
|
|
|
def assertCategories(self, elem, expected):
|
2015-09-12 07:33:12 +08:00
|
|
|
self.assertEqual(
|
2017-06-02 07:08:59 +08:00
|
|
|
{i.firstChild.wholeText for i in elem.childNodes if i.nodeName == 'category'},
|
2015-09-12 07:33:12 +08:00
|
|
|
set(expected)
|
|
|
|
)
|
2010-01-28 21:46:18 +08:00
|
|
|
|
2013-11-03 05:34:05 +08:00
|
|
|
|
2014-04-05 14:04:46 +08:00
|
|
|
@override_settings(ROOT_URLCONF='syndication_tests.urls')
|
2010-01-28 21:46:18 +08:00
|
|
|
class SyndicationFeedTest(FeedTestCase):
|
|
|
|
"""
|
|
|
|
Tests for the high-level syndication feed framework.
|
|
|
|
"""
|
2014-12-10 03:50:45 +08:00
|
|
|
@classmethod
|
|
|
|
def setUpClass(cls):
|
2017-01-21 21:13:44 +08:00
|
|
|
super().setUpClass()
|
2014-12-10 03:50:45 +08:00
|
|
|
# This cleanup is necessary because contrib.sites cache
|
|
|
|
# makes tests interfere with each other, see #11505
|
|
|
|
Site.objects.clear_cache()
|
2010-01-28 21:46:18 +08:00
|
|
|
|
|
|
|
def test_rss2_feed(self):
|
|
|
|
"""
|
|
|
|
Test the structure and content of feeds generated by Rss201rev2Feed.
|
|
|
|
"""
|
|
|
|
response = self.client.get('/syndication/rss2/')
|
2008-08-12 04:49:19 +08:00
|
|
|
doc = minidom.parseString(response.content)
|
2010-01-28 21:46:18 +08:00
|
|
|
|
2008-08-13 06:12:14 +08:00
|
|
|
# Making sure there's only 1 `rss` element and that the correct
|
|
|
|
# RSS version was specified.
|
|
|
|
feed_elem = doc.getElementsByTagName('rss')
|
|
|
|
self.assertEqual(len(feed_elem), 1)
|
|
|
|
feed = feed_elem[0]
|
|
|
|
self.assertEqual(feed.getAttribute('version'), '2.0')
|
2019-04-11 05:48:58 +08:00
|
|
|
self.assertEqual(feed.getElementsByTagName('language')[0].firstChild.nodeValue, 'en')
|
2010-01-28 21:46:18 +08:00
|
|
|
|
2008-08-13 06:12:14 +08:00
|
|
|
# Making sure there's only one `channel` element w/in the
|
|
|
|
# `rss` element.
|
|
|
|
chan_elem = feed.getElementsByTagName('channel')
|
|
|
|
self.assertEqual(len(chan_elem), 1)
|
|
|
|
chan = chan_elem[0]
|
2010-03-15 23:25:39 +08:00
|
|
|
|
|
|
|
# Find the last build date
|
2013-07-18 03:20:20 +08:00
|
|
|
d = Entry.objects.latest('published').published
|
2013-09-08 16:43:33 +08:00
|
|
|
last_build_date = rfc2822_date(timezone.make_aware(d, TZ))
|
2010-03-15 23:25:39 +08:00
|
|
|
|
2015-09-12 07:33:12 +08:00
|
|
|
self.assertChildNodes(
|
|
|
|
chan, [
|
|
|
|
'title', 'link', 'description', 'language', 'lastBuildDate',
|
|
|
|
'item', 'atom:link', 'ttl', 'copyright', 'category',
|
|
|
|
]
|
|
|
|
)
|
2010-01-28 21:46:18 +08:00
|
|
|
self.assertChildNodeContent(chan, {
|
|
|
|
'title': 'My blog',
|
|
|
|
'description': 'A more thorough description of my blog.',
|
|
|
|
'link': 'http://example.com/blog/',
|
|
|
|
'language': 'en',
|
2010-03-15 23:25:39 +08:00
|
|
|
'lastBuildDate': last_build_date,
|
2010-01-28 21:46:18 +08:00
|
|
|
'ttl': '600',
|
|
|
|
'copyright': 'Copyright (c) 2007, Sally Smith',
|
|
|
|
})
|
2011-06-10 18:18:06 +08:00
|
|
|
self.assertCategories(chan, ['python', 'django'])
|
2010-01-28 21:46:18 +08:00
|
|
|
|
|
|
|
# Ensure the content of the channel is correct
|
|
|
|
self.assertChildNodeContent(chan, {
|
|
|
|
'title': 'My blog',
|
|
|
|
'link': 'http://example.com/blog/',
|
|
|
|
})
|
|
|
|
|
|
|
|
# Check feed_url is passed
|
|
|
|
self.assertEqual(
|
|
|
|
chan.getElementsByTagName('atom:link')[0].getAttribute('href'),
|
|
|
|
'http://example.com/syndication/rss2/'
|
|
|
|
)
|
|
|
|
|
2010-03-15 23:25:39 +08:00
|
|
|
# Find the pubdate of the first feed item
|
2020-10-21 11:33:23 +08:00
|
|
|
d = Entry.objects.get(pk=self.e1.pk).published
|
2013-09-08 16:43:33 +08:00
|
|
|
pub_date = rfc2822_date(timezone.make_aware(d, TZ))
|
2010-03-15 23:25:39 +08:00
|
|
|
|
2008-08-12 06:22:26 +08:00
|
|
|
items = chan.getElementsByTagName('item')
|
|
|
|
self.assertEqual(len(items), Entry.objects.count())
|
2010-01-28 21:46:18 +08:00
|
|
|
self.assertChildNodeContent(items[0], {
|
|
|
|
'title': 'My first entry',
|
|
|
|
'description': 'Overridden description: My first entry',
|
2020-10-21 11:33:23 +08:00
|
|
|
'link': 'http://example.com/blog/%s/' % self.e1.pk,
|
|
|
|
'guid': 'http://example.com/blog/%s/' % self.e1.pk,
|
2010-03-15 23:25:39 +08:00
|
|
|
'pubDate': pub_date,
|
2010-01-28 21:46:18 +08:00
|
|
|
'author': 'test@example.com (Sally Smith)',
|
2020-10-21 11:33:23 +08:00
|
|
|
'comments': '/blog/%s/comments' % self.e1.pk,
|
2010-01-28 21:46:18 +08:00
|
|
|
})
|
2011-06-10 18:18:06 +08:00
|
|
|
self.assertCategories(items[0], ['python', 'testing'])
|
2008-08-12 06:22:26 +08:00
|
|
|
for item in items:
|
2020-07-23 22:42:25 +08:00
|
|
|
self.assertChildNodes(item, [
|
|
|
|
'title',
|
|
|
|
'link',
|
|
|
|
'description',
|
|
|
|
'guid',
|
|
|
|
'category',
|
|
|
|
'pubDate',
|
|
|
|
'author',
|
|
|
|
'comments',
|
|
|
|
])
|
2013-02-06 18:25:35 +08:00
|
|
|
# Assert that <guid> does not have any 'isPermaLink' attribute
|
|
|
|
self.assertIsNone(item.getElementsByTagName(
|
|
|
|
'guid')[0].attributes.get('isPermaLink'))
|
|
|
|
|
|
|
|
def test_rss2_feed_guid_permalink_false(self):
|
|
|
|
"""
|
|
|
|
Test if the 'isPermaLink' attribute of <guid> element of an item
|
|
|
|
in the RSS feed is 'false'.
|
|
|
|
"""
|
|
|
|
response = self.client.get(
|
|
|
|
'/syndication/rss2/guid_ispermalink_false/')
|
|
|
|
doc = minidom.parseString(response.content)
|
|
|
|
chan = doc.getElementsByTagName(
|
|
|
|
'rss')[0].getElementsByTagName('channel')[0]
|
|
|
|
items = chan.getElementsByTagName('item')
|
|
|
|
for item in items:
|
|
|
|
self.assertEqual(
|
|
|
|
item.getElementsByTagName('guid')[0].attributes.get(
|
|
|
|
'isPermaLink').value, "false")
|
|
|
|
|
|
|
|
def test_rss2_feed_guid_permalink_true(self):
|
|
|
|
"""
|
|
|
|
Test if the 'isPermaLink' attribute of <guid> element of an item
|
|
|
|
in the RSS feed is 'true'.
|
|
|
|
"""
|
|
|
|
response = self.client.get(
|
|
|
|
'/syndication/rss2/guid_ispermalink_true/')
|
|
|
|
doc = minidom.parseString(response.content)
|
|
|
|
chan = doc.getElementsByTagName(
|
|
|
|
'rss')[0].getElementsByTagName('channel')[0]
|
|
|
|
items = chan.getElementsByTagName('item')
|
|
|
|
for item in items:
|
|
|
|
self.assertEqual(
|
|
|
|
item.getElementsByTagName('guid')[0].attributes.get(
|
|
|
|
'isPermaLink').value, "true")
|
2010-01-28 21:46:18 +08:00
|
|
|
|
2015-08-21 17:50:43 +08:00
|
|
|
def test_rss2_single_enclosure(self):
|
|
|
|
response = self.client.get('/syndication/rss2/single-enclosure/')
|
|
|
|
doc = minidom.parseString(response.content)
|
|
|
|
chan = doc.getElementsByTagName('rss')[0].getElementsByTagName('channel')[0]
|
|
|
|
items = chan.getElementsByTagName('item')
|
|
|
|
for item in items:
|
|
|
|
enclosures = item.getElementsByTagName('enclosure')
|
|
|
|
self.assertEqual(len(enclosures), 1)
|
|
|
|
|
|
|
|
def test_rss2_multiple_enclosures(self):
|
2016-02-04 19:10:50 +08:00
|
|
|
with self.assertRaisesMessage(
|
|
|
|
ValueError,
|
2015-08-21 17:50:43 +08:00
|
|
|
"RSS feed items may only have one enclosure, see "
|
|
|
|
"http://www.rssboard.org/rss-profile#element-channel-item-enclosure"
|
2016-02-04 19:10:50 +08:00
|
|
|
):
|
2015-08-21 17:50:43 +08:00
|
|
|
self.client.get('/syndication/rss2/multiple-enclosure/')
|
|
|
|
|
2010-01-28 21:46:18 +08:00
|
|
|
def test_rss091_feed(self):
|
|
|
|
"""
|
|
|
|
Test the structure and content of feeds generated by RssUserland091Feed.
|
|
|
|
"""
|
|
|
|
response = self.client.get('/syndication/rss091/')
|
2008-08-12 04:49:19 +08:00
|
|
|
doc = minidom.parseString(response.content)
|
2010-01-28 21:46:18 +08:00
|
|
|
|
|
|
|
# Making sure there's only 1 `rss` element and that the correct
|
|
|
|
# RSS version was specified.
|
|
|
|
feed_elem = doc.getElementsByTagName('rss')
|
|
|
|
self.assertEqual(len(feed_elem), 1)
|
|
|
|
feed = feed_elem[0]
|
|
|
|
self.assertEqual(feed.getAttribute('version'), '0.91')
|
|
|
|
|
|
|
|
# Making sure there's only one `channel` element w/in the
|
|
|
|
# `rss` element.
|
|
|
|
chan_elem = feed.getElementsByTagName('channel')
|
|
|
|
self.assertEqual(len(chan_elem), 1)
|
|
|
|
chan = chan_elem[0]
|
2015-09-12 07:33:12 +08:00
|
|
|
self.assertChildNodes(
|
|
|
|
chan, [
|
|
|
|
'title', 'link', 'description', 'language', 'lastBuildDate',
|
|
|
|
'item', 'atom:link', 'ttl', 'copyright', 'category',
|
|
|
|
]
|
|
|
|
)
|
2010-01-28 21:46:18 +08:00
|
|
|
|
|
|
|
# Ensure the content of the channel is correct
|
|
|
|
self.assertChildNodeContent(chan, {
|
|
|
|
'title': 'My blog',
|
|
|
|
'link': 'http://example.com/blog/',
|
|
|
|
})
|
|
|
|
self.assertCategories(chan, ['python', 'django'])
|
|
|
|
|
|
|
|
# Check feed_url is passed
|
|
|
|
self.assertEqual(
|
|
|
|
chan.getElementsByTagName('atom:link')[0].getAttribute('href'),
|
|
|
|
'http://example.com/syndication/rss091/'
|
|
|
|
)
|
|
|
|
|
|
|
|
items = chan.getElementsByTagName('item')
|
|
|
|
self.assertEqual(len(items), Entry.objects.count())
|
|
|
|
self.assertChildNodeContent(items[0], {
|
|
|
|
'title': 'My first entry',
|
|
|
|
'description': 'Overridden description: My first entry',
|
2020-10-21 11:33:23 +08:00
|
|
|
'link': 'http://example.com/blog/%s/' % self.e1.pk,
|
2010-01-28 21:46:18 +08:00
|
|
|
})
|
|
|
|
for item in items:
|
|
|
|
self.assertChildNodes(item, ['title', 'link', 'description'])
|
|
|
|
self.assertCategories(item, [])
|
|
|
|
|
|
|
|
def test_atom_feed(self):
|
|
|
|
"""
|
|
|
|
Test the structure and content of feeds generated by Atom1Feed.
|
|
|
|
"""
|
|
|
|
response = self.client.get('/syndication/atom/')
|
|
|
|
feed = minidom.parseString(response.content).firstChild
|
|
|
|
|
2008-08-12 06:22:26 +08:00
|
|
|
self.assertEqual(feed.nodeName, 'feed')
|
2010-01-28 21:46:18 +08:00
|
|
|
self.assertEqual(feed.getAttribute('xmlns'), 'http://www.w3.org/2005/Atom')
|
2015-09-12 07:33:12 +08:00
|
|
|
self.assertChildNodes(
|
|
|
|
feed,
|
|
|
|
['title', 'subtitle', 'link', 'id', 'updated', 'entry', 'rights', 'category', 'author']
|
|
|
|
)
|
2010-01-28 21:46:18 +08:00
|
|
|
for link in feed.getElementsByTagName('link'):
|
|
|
|
if link.getAttribute('rel') == 'self':
|
|
|
|
self.assertEqual(link.getAttribute('href'), 'http://example.com/syndication/atom/')
|
|
|
|
|
2008-08-12 06:22:26 +08:00
|
|
|
entries = feed.getElementsByTagName('entry')
|
|
|
|
self.assertEqual(len(entries), Entry.objects.count())
|
|
|
|
for entry in entries:
|
2013-07-18 03:20:20 +08:00
|
|
|
self.assertChildNodes(entry, [
|
|
|
|
'title',
|
|
|
|
'link',
|
|
|
|
'id',
|
|
|
|
'summary',
|
|
|
|
'category',
|
|
|
|
'updated',
|
|
|
|
'published',
|
|
|
|
'rights',
|
|
|
|
'author',
|
|
|
|
])
|
2008-08-12 06:22:26 +08:00
|
|
|
summary = entry.getElementsByTagName('summary')[0]
|
|
|
|
self.assertEqual(summary.getAttribute('type'), 'html')
|
2010-01-28 21:46:18 +08:00
|
|
|
|
2013-07-18 03:20:20 +08:00
|
|
|
def test_atom_feed_published_and_updated_elements(self):
|
|
|
|
"""
|
2016-10-27 15:53:39 +08:00
|
|
|
The published and updated elements are not
|
2013-07-18 03:20:20 +08:00
|
|
|
the same and now adhere to RFC 4287.
|
|
|
|
"""
|
|
|
|
response = self.client.get('/syndication/atom/')
|
|
|
|
feed = minidom.parseString(response.content).firstChild
|
|
|
|
entries = feed.getElementsByTagName('entry')
|
|
|
|
|
|
|
|
published = entries[0].getElementsByTagName('published')[0].firstChild.wholeText
|
|
|
|
updated = entries[0].getElementsByTagName('updated')[0].firstChild.wholeText
|
|
|
|
|
|
|
|
self.assertNotEqual(published, updated)
|
|
|
|
|
2015-08-21 17:50:43 +08:00
|
|
|
def test_atom_single_enclosure(self):
|
2015-09-19 13:59:18 +08:00
|
|
|
response = self.client.get('/syndication/atom/single-enclosure/')
|
2015-08-21 17:50:43 +08:00
|
|
|
feed = minidom.parseString(response.content).firstChild
|
|
|
|
items = feed.getElementsByTagName('entry')
|
|
|
|
for item in items:
|
|
|
|
links = item.getElementsByTagName('link')
|
|
|
|
links = [link for link in links if link.getAttribute('rel') == 'enclosure']
|
|
|
|
self.assertEqual(len(links), 1)
|
|
|
|
|
|
|
|
def test_atom_multiple_enclosures(self):
|
2015-09-19 13:59:18 +08:00
|
|
|
response = self.client.get('/syndication/atom/multiple-enclosure/')
|
2015-08-21 17:50:43 +08:00
|
|
|
feed = minidom.parseString(response.content).firstChild
|
|
|
|
items = feed.getElementsByTagName('entry')
|
|
|
|
for item in items:
|
|
|
|
links = item.getElementsByTagName('link')
|
|
|
|
links = [link for link in links if link.getAttribute('rel') == 'enclosure']
|
|
|
|
self.assertEqual(len(links), 2)
|
|
|
|
|
2013-07-18 03:20:20 +08:00
|
|
|
def test_latest_post_date(self):
|
|
|
|
"""
|
2016-10-27 15:53:39 +08:00
|
|
|
Both the published and updated dates are
|
2013-07-18 03:20:20 +08:00
|
|
|
considered when determining the latest post date.
|
|
|
|
"""
|
|
|
|
# this feed has a `published` element with the latest date
|
|
|
|
response = self.client.get('/syndication/atom/')
|
|
|
|
feed = minidom.parseString(response.content).firstChild
|
|
|
|
updated = feed.getElementsByTagName('updated')[0].firstChild.wholeText
|
|
|
|
|
|
|
|
d = Entry.objects.latest('published').published
|
2013-09-08 16:43:33 +08:00
|
|
|
latest_published = rfc3339_date(timezone.make_aware(d, TZ))
|
2013-07-18 03:20:20 +08:00
|
|
|
|
|
|
|
self.assertEqual(updated, latest_published)
|
|
|
|
|
|
|
|
# this feed has an `updated` element with the latest date
|
|
|
|
response = self.client.get('/syndication/latest/')
|
|
|
|
feed = minidom.parseString(response.content).firstChild
|
|
|
|
updated = feed.getElementsByTagName('updated')[0].firstChild.wholeText
|
|
|
|
|
2020-10-21 11:33:23 +08:00
|
|
|
d = Entry.objects.exclude(title='My last entry').latest('updated').updated
|
2013-09-08 16:43:33 +08:00
|
|
|
latest_updated = rfc3339_date(timezone.make_aware(d, TZ))
|
2013-07-18 03:20:20 +08:00
|
|
|
|
|
|
|
self.assertEqual(updated, latest_updated)
|
|
|
|
|
2008-08-12 06:22:26 +08:00
|
|
|
def test_custom_feed_generator(self):
|
2010-01-28 21:46:18 +08:00
|
|
|
response = self.client.get('/syndication/custom/')
|
|
|
|
feed = minidom.parseString(response.content).firstChild
|
|
|
|
|
2008-08-12 06:22:26 +08:00
|
|
|
self.assertEqual(feed.nodeName, 'feed')
|
|
|
|
self.assertEqual(feed.getAttribute('django'), 'rocks')
|
2015-09-12 07:33:12 +08:00
|
|
|
self.assertChildNodes(
|
|
|
|
feed,
|
|
|
|
['title', 'subtitle', 'link', 'id', 'updated', 'entry', 'spam', 'rights', 'category', 'author']
|
|
|
|
)
|
2010-01-28 21:46:18 +08:00
|
|
|
|
2008-08-12 06:22:26 +08:00
|
|
|
entries = feed.getElementsByTagName('entry')
|
|
|
|
self.assertEqual(len(entries), Entry.objects.count())
|
|
|
|
for entry in entries:
|
|
|
|
self.assertEqual(entry.getAttribute('bacon'), 'yum')
|
2013-07-18 03:20:20 +08:00
|
|
|
self.assertChildNodes(entry, [
|
|
|
|
'title',
|
|
|
|
'link',
|
|
|
|
'id',
|
|
|
|
'summary',
|
|
|
|
'ministry',
|
|
|
|
'rights',
|
|
|
|
'author',
|
|
|
|
'updated',
|
|
|
|
'published',
|
|
|
|
'category',
|
|
|
|
])
|
2008-08-12 06:22:26 +08:00
|
|
|
summary = entry.getElementsByTagName('summary')[0]
|
|
|
|
self.assertEqual(summary.getAttribute('type'), 'html')
|
2008-08-12 04:49:19 +08:00
|
|
|
|
2019-04-11 05:48:58 +08:00
|
|
|
def test_feed_generator_language_attribute(self):
|
|
|
|
response = self.client.get('/syndication/language/')
|
|
|
|
feed = minidom.parseString(response.content).firstChild
|
|
|
|
self.assertEqual(feed.firstChild.getElementsByTagName('language')[0].firstChild.nodeValue, 'de')
|
|
|
|
|
2008-08-28 03:52:27 +08:00
|
|
|
def test_title_escaping(self):
|
|
|
|
"""
|
2016-10-27 15:53:39 +08:00
|
|
|
Titles are escaped correctly in RSS feeds.
|
2008-08-28 03:52:27 +08:00
|
|
|
"""
|
2010-01-28 21:46:18 +08:00
|
|
|
response = self.client.get('/syndication/rss2/')
|
2008-08-28 03:52:27 +08:00
|
|
|
doc = minidom.parseString(response.content)
|
|
|
|
for item in doc.getElementsByTagName('item'):
|
|
|
|
link = item.getElementsByTagName('link')[0]
|
|
|
|
if link.firstChild.wholeText == 'http://example.com/blog/4/':
|
|
|
|
title = item.getElementsByTagName('title')[0]
|
2012-06-08 00:08:47 +08:00
|
|
|
self.assertEqual(title.firstChild.wholeText, 'A & B < C > D')
|
2010-01-28 21:46:18 +08:00
|
|
|
|
2009-04-08 05:20:14 +08:00
|
|
|
def test_naive_datetime_conversion(self):
|
|
|
|
"""
|
2016-10-27 15:53:39 +08:00
|
|
|
Datetimes are correctly converted to the local time zone.
|
2009-04-08 05:20:14 +08:00
|
|
|
"""
|
|
|
|
# Naive date times passed in get converted to the local time zone, so
|
2014-04-27 01:18:45 +08:00
|
|
|
# check the received zone offset against the local offset.
|
2010-01-28 21:46:18 +08:00
|
|
|
response = self.client.get('/syndication/naive-dates/')
|
2009-04-08 05:20:14 +08:00
|
|
|
doc = minidom.parseString(response.content)
|
2010-01-28 21:46:18 +08:00
|
|
|
updated = doc.getElementsByTagName('updated')[0].firstChild.wholeText
|
2010-03-15 23:25:39 +08:00
|
|
|
|
2013-07-18 03:20:20 +08:00
|
|
|
d = Entry.objects.latest('published').published
|
2013-09-08 16:43:33 +08:00
|
|
|
latest = rfc3339_date(timezone.make_aware(d, TZ))
|
2010-03-15 23:25:39 +08:00
|
|
|
|
|
|
|
self.assertEqual(updated, latest)
|
2010-01-28 21:46:18 +08:00
|
|
|
|
2009-04-08 05:20:14 +08:00
|
|
|
def test_aware_datetime_conversion(self):
|
|
|
|
"""
|
2016-10-27 15:53:39 +08:00
|
|
|
Datetimes with timezones don't get trodden on.
|
2009-04-08 05:20:14 +08:00
|
|
|
"""
|
2010-01-28 21:46:18 +08:00
|
|
|
response = self.client.get('/syndication/aware-dates/')
|
2009-04-08 05:20:14 +08:00
|
|
|
doc = minidom.parseString(response.content)
|
2013-07-18 03:20:20 +08:00
|
|
|
published = doc.getElementsByTagName('published')[0].firstChild.wholeText
|
|
|
|
self.assertEqual(published[-6:], '+00:42')
|
2010-01-28 21:46:18 +08:00
|
|
|
|
2013-10-03 06:34:38 +08:00
|
|
|
@requires_tz_support
|
|
|
|
def test_feed_last_modified_time_naive_date(self):
|
|
|
|
"""
|
|
|
|
Tests the Last-Modified header with naive publication dates.
|
|
|
|
"""
|
2012-10-01 02:44:27 +08:00
|
|
|
response = self.client.get('/syndication/naive-dates/')
|
2020-07-14 19:32:24 +08:00
|
|
|
self.assertEqual(response.headers['Last-Modified'], 'Tue, 26 Mar 2013 01:00:00 GMT')
|
2012-10-01 02:44:27 +08:00
|
|
|
|
2013-10-03 06:34:38 +08:00
|
|
|
def test_feed_last_modified_time(self):
|
|
|
|
"""
|
|
|
|
Tests the Last-Modified header with aware publication dates.
|
|
|
|
"""
|
|
|
|
response = self.client.get('/syndication/aware-dates/')
|
2020-07-14 19:32:24 +08:00
|
|
|
self.assertEqual(response.headers['Last-Modified'], 'Mon, 25 Mar 2013 19:18:00 GMT')
|
2013-10-03 06:34:38 +08:00
|
|
|
|
2012-10-01 02:44:27 +08:00
|
|
|
# No last-modified when feed has no item_pubdate
|
|
|
|
response = self.client.get('/syndication/no_pubdate/')
|
|
|
|
self.assertFalse(response.has_header('Last-Modified'))
|
|
|
|
|
2010-01-28 21:46:18 +08:00
|
|
|
def test_feed_url(self):
|
|
|
|
"""
|
2016-10-27 15:53:39 +08:00
|
|
|
The feed_url can be overridden.
|
2010-01-28 21:46:18 +08:00
|
|
|
"""
|
|
|
|
response = self.client.get('/syndication/feedurl/')
|
|
|
|
doc = minidom.parseString(response.content)
|
|
|
|
for link in doc.getElementsByTagName('link'):
|
|
|
|
if link.getAttribute('rel') == 'self':
|
|
|
|
self.assertEqual(link.getAttribute('href'), 'http://example.com/customfeedurl/')
|
|
|
|
|
2010-10-08 22:14:05 +08:00
|
|
|
def test_secure_urls(self):
|
|
|
|
"""
|
|
|
|
Test URLs are prefixed with https:// when feed is requested over HTTPS.
|
|
|
|
"""
|
|
|
|
response = self.client.get('/syndication/rss2/', **{
|
|
|
|
'wsgi.url_scheme': 'https',
|
|
|
|
})
|
|
|
|
doc = minidom.parseString(response.content)
|
|
|
|
chan = doc.getElementsByTagName('channel')[0]
|
|
|
|
self.assertEqual(
|
|
|
|
chan.getElementsByTagName('link')[0].firstChild.wholeText[0:5],
|
|
|
|
'https'
|
|
|
|
)
|
|
|
|
atom_link = chan.getElementsByTagName('atom:link')[0]
|
|
|
|
self.assertEqual(atom_link.getAttribute('href')[0:5], 'https')
|
|
|
|
for link in doc.getElementsByTagName('link'):
|
|
|
|
if link.getAttribute('rel') == 'self':
|
|
|
|
self.assertEqual(link.getAttribute('href')[0:5], 'https')
|
|
|
|
|
2010-01-28 21:46:18 +08:00
|
|
|
def test_item_link_error(self):
|
|
|
|
"""
|
2016-10-27 15:53:39 +08:00
|
|
|
An ImproperlyConfigured is raised if no link could be found for the
|
|
|
|
item(s).
|
2010-01-28 21:46:18 +08:00
|
|
|
"""
|
2017-05-29 03:37:21 +08:00
|
|
|
msg = (
|
|
|
|
'Give your Article class a get_absolute_url() method, or define '
|
|
|
|
'an item_link() method in your Feed class.'
|
|
|
|
)
|
|
|
|
with self.assertRaisesMessage(ImproperlyConfigured, msg):
|
2016-01-17 19:26:39 +08:00
|
|
|
self.client.get('/syndication/articles/')
|
2010-01-28 21:46:18 +08:00
|
|
|
|
|
|
|
def test_template_feed(self):
|
|
|
|
"""
|
2016-10-27 15:53:39 +08:00
|
|
|
The item title and description can be overridden with templates.
|
2010-01-28 21:46:18 +08:00
|
|
|
"""
|
|
|
|
response = self.client.get('/syndication/template/')
|
|
|
|
doc = minidom.parseString(response.content)
|
|
|
|
feed = doc.getElementsByTagName('rss')[0]
|
|
|
|
chan = feed.getElementsByTagName('channel')[0]
|
|
|
|
items = chan.getElementsByTagName('item')
|
|
|
|
|
|
|
|
self.assertChildNodeContent(items[0], {
|
2014-08-13 08:16:04 +08:00
|
|
|
'title': 'Title in your templates: My first entry\n',
|
|
|
|
'description': 'Description in your templates: My first entry\n',
|
2020-10-21 11:33:23 +08:00
|
|
|
'link': 'http://example.com/blog/%s/' % self.e1.pk,
|
2010-01-28 21:46:18 +08:00
|
|
|
})
|
|
|
|
|
2013-02-24 22:00:34 +08:00
|
|
|
def test_template_context_feed(self):
|
|
|
|
"""
|
2016-10-27 15:53:39 +08:00
|
|
|
Custom context data can be passed to templates for title
|
2013-02-24 22:00:34 +08:00
|
|
|
and description.
|
|
|
|
"""
|
|
|
|
response = self.client.get('/syndication/template_context/')
|
|
|
|
doc = minidom.parseString(response.content)
|
|
|
|
feed = doc.getElementsByTagName('rss')[0]
|
|
|
|
chan = feed.getElementsByTagName('channel')[0]
|
|
|
|
items = chan.getElementsByTagName('item')
|
|
|
|
|
|
|
|
self.assertChildNodeContent(items[0], {
|
2014-08-13 08:16:04 +08:00
|
|
|
'title': 'My first entry (foo is bar)\n',
|
|
|
|
'description': 'My first entry (foo is bar)\n',
|
2013-02-24 22:00:34 +08:00
|
|
|
})
|
|
|
|
|
2010-01-28 21:46:18 +08:00
|
|
|
def test_add_domain(self):
|
|
|
|
"""
|
2016-10-27 15:53:39 +08:00
|
|
|
add_domain() prefixes domains onto the correct URLs.
|
2010-01-28 21:46:18 +08:00
|
|
|
"""
|
2017-09-04 08:40:50 +08:00
|
|
|
prefix_domain_mapping = (
|
|
|
|
(('example.com', '/foo/?arg=value'), 'http://example.com/foo/?arg=value'),
|
|
|
|
(('example.com', '/foo/?arg=value', True), 'https://example.com/foo/?arg=value'),
|
|
|
|
(('example.com', 'http://djangoproject.com/doc/'), 'http://djangoproject.com/doc/'),
|
|
|
|
(('example.com', 'https://djangoproject.com/doc/'), 'https://djangoproject.com/doc/'),
|
|
|
|
(('example.com', 'mailto:uhoh@djangoproject.com'), 'mailto:uhoh@djangoproject.com'),
|
|
|
|
(('example.com', '//example.com/foo/?arg=value'), 'http://example.com/foo/?arg=value'),
|
2011-11-19 06:54:24 +08:00
|
|
|
)
|
2017-09-04 08:40:50 +08:00
|
|
|
for prefix in prefix_domain_mapping:
|
|
|
|
with self.subTest(prefix=prefix):
|
|
|
|
self.assertEqual(views.add_domain(*prefix[0]), prefix[1])
|
2020-10-13 09:53:49 +08:00
|
|
|
|
|
|
|
def test_get_object(self):
|
|
|
|
response = self.client.get('/syndication/rss2/articles/%s/' % self.e1.pk)
|
|
|
|
doc = minidom.parseString(response.content)
|
|
|
|
feed = doc.getElementsByTagName('rss')[0]
|
|
|
|
chan = feed.getElementsByTagName('channel')[0]
|
|
|
|
items = chan.getElementsByTagName('item')
|
|
|
|
|
|
|
|
self.assertChildNodeContent(items[0], {
|
2020-10-21 11:33:23 +08:00
|
|
|
'comments': '/blog/%s/article/%s/comments' % (self.e1.pk, self.a1.pk),
|
2020-10-13 09:53:49 +08:00
|
|
|
'description': 'Article description: My first article',
|
2020-10-21 11:33:23 +08:00
|
|
|
'link': 'http://example.com/blog/%s/article/%s/' % (self.e1.pk, self.a1.pk),
|
2020-10-13 09:53:49 +08:00
|
|
|
'title': 'Title: My first article',
|
|
|
|
'pubDate': rfc2822_date(timezone.make_aware(self.a1.published, TZ)),
|
|
|
|
})
|
|
|
|
|
|
|
|
def test_get_non_existent_object(self):
|
|
|
|
response = self.client.get('/syndication/rss2/articles/0/')
|
|
|
|
self.assertEqual(response.status_code, 404)
|