2020-04-30 05:22:41 +08:00
|
|
|
|
import platform
|
2013-07-01 20:22:27 +08:00
|
|
|
|
import unittest
|
2021-05-07 17:42:59 +08:00
|
|
|
|
from datetime import datetime, timezone
|
2019-09-28 12:15:38 +08:00
|
|
|
|
from unittest import mock
|
2012-02-16 08:58:49 +08:00
|
|
|
|
|
2021-01-05 19:18:04 +08:00
|
|
|
|
from django.test import SimpleTestCase
|
2012-08-04 00:46:30 +08:00
|
|
|
|
from django.utils.datastructures import MultiValueDict
|
2017-10-06 02:20:23 +08:00
|
|
|
|
from django.utils.http import (
|
2018-12-28 09:35:20 +08:00
|
|
|
|
base36_to_int,
|
2022-12-01 04:09:49 +08:00
|
|
|
|
content_disposition_header,
|
2018-12-28 09:35:20 +08:00
|
|
|
|
escape_leading_slashes,
|
|
|
|
|
http_date,
|
|
|
|
|
int_to_base36,
|
2021-01-19 15:35:16 +08:00
|
|
|
|
is_same_domain,
|
|
|
|
|
parse_etags,
|
2022-05-10 18:12:17 +08:00
|
|
|
|
parse_header_parameters,
|
2021-01-19 15:35:16 +08:00
|
|
|
|
parse_http_date,
|
|
|
|
|
quote_etag,
|
2021-01-07 15:15:39 +08:00
|
|
|
|
url_has_allowed_host_and_scheme,
|
|
|
|
|
urlencode,
|
|
|
|
|
urlsafe_base64_decode,
|
|
|
|
|
urlsafe_base64_encode,
|
2017-10-06 02:20:23 +08:00
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
2018-12-28 00:19:55 +08:00
|
|
|
|
class URLEncodeTests(SimpleTestCase):
|
|
|
|
|
cannot_encode_none_msg = (
|
2019-08-09 16:19:18 +08:00
|
|
|
|
"Cannot encode None for key 'a' in a query string. Did you mean to "
|
|
|
|
|
"pass an empty string or omit the value?"
|
2018-12-28 00:19:55 +08:00
|
|
|
|
)
|
|
|
|
|
|
2017-10-06 02:20:23 +08:00
|
|
|
|
def test_tuples(self):
|
|
|
|
|
self.assertEqual(urlencode((("a", 1), ("b", 2), ("c", 3))), "a=1&b=2&c=3")
|
|
|
|
|
|
|
|
|
|
def test_dict(self):
|
|
|
|
|
result = urlencode({"a": 1, "b": 2, "c": 3})
|
|
|
|
|
# Dictionaries are treated as unordered.
|
|
|
|
|
self.assertIn(
|
|
|
|
|
result,
|
|
|
|
|
[
|
2011-04-22 20:01:41 +08:00
|
|
|
|
"a=1&b=2&c=3",
|
|
|
|
|
"a=1&c=3&b=2",
|
|
|
|
|
"b=2&a=1&c=3",
|
|
|
|
|
"b=2&c=3&a=1",
|
|
|
|
|
"c=3&a=1&b=2",
|
2017-10-06 02:20:23 +08:00
|
|
|
|
"c=3&b=2&a=1",
|
|
|
|
|
],
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
def test_dict_containing_sequence_not_doseq(self):
|
2019-05-24 23:15:34 +08:00
|
|
|
|
self.assertEqual(urlencode({"a": [1, 2]}, doseq=False), "a=%5B1%2C+2%5D")
|
|
|
|
|
|
|
|
|
|
def test_dict_containing_tuple_not_doseq(self):
|
|
|
|
|
self.assertEqual(urlencode({"a": (1, 2)}, doseq=False), "a=%281%2C+2%29")
|
|
|
|
|
|
|
|
|
|
def test_custom_iterable_not_doseq(self):
|
|
|
|
|
class IterableWithStr:
|
|
|
|
|
def __str__(self):
|
|
|
|
|
return "custom"
|
|
|
|
|
|
|
|
|
|
def __iter__(self):
|
|
|
|
|
yield from range(0, 3)
|
|
|
|
|
|
|
|
|
|
self.assertEqual(urlencode({"a": IterableWithStr()}, doseq=False), "a=custom")
|
2017-10-06 02:20:23 +08:00
|
|
|
|
|
|
|
|
|
def test_dict_containing_sequence_doseq(self):
|
|
|
|
|
self.assertEqual(urlencode({"a": [1, 2]}, doseq=True), "a=1&a=2")
|
|
|
|
|
|
|
|
|
|
def test_dict_containing_empty_sequence_doseq(self):
|
|
|
|
|
self.assertEqual(urlencode({"a": []}, doseq=True), "")
|
|
|
|
|
|
|
|
|
|
def test_multivaluedict(self):
|
|
|
|
|
result = urlencode(
|
|
|
|
|
MultiValueDict(
|
|
|
|
|
{
|
2011-04-22 20:01:41 +08:00
|
|
|
|
"name": ["Adrian", "Simon"],
|
2017-10-06 02:20:23 +08:00
|
|
|
|
"position": ["Developer"],
|
2011-04-22 20:01:41 +08:00
|
|
|
|
}
|
|
|
|
|
),
|
|
|
|
|
doseq=True,
|
|
|
|
|
)
|
2017-10-06 02:20:23 +08:00
|
|
|
|
# MultiValueDicts are similarly unordered.
|
|
|
|
|
self.assertIn(
|
|
|
|
|
result,
|
|
|
|
|
[
|
2011-04-22 20:01:41 +08:00
|
|
|
|
"name=Adrian&name=Simon&position=Developer",
|
2017-10-06 02:20:23 +08:00
|
|
|
|
"position=Developer&name=Adrian&name=Simon",
|
|
|
|
|
],
|
|
|
|
|
)
|
|
|
|
|
|
2017-10-10 04:20:01 +08:00
|
|
|
|
def test_dict_with_bytes_values(self):
|
|
|
|
|
self.assertEqual(urlencode({"a": b"abc"}, doseq=True), "a=abc")
|
|
|
|
|
|
|
|
|
|
def test_dict_with_sequence_of_bytes(self):
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
urlencode({"a": [b"spam", b"eggs", b"bacon"]}, doseq=True),
|
|
|
|
|
"a=spam&a=eggs&a=bacon",
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
def test_dict_with_bytearray(self):
|
|
|
|
|
self.assertEqual(urlencode({"a": bytearray(range(2))}, doseq=True), "a=0&a=1")
|
|
|
|
|
|
|
|
|
|
def test_generator(self):
|
2019-05-24 23:15:34 +08:00
|
|
|
|
self.assertEqual(urlencode({"a": range(2)}, doseq=True), "a=0&a=1")
|
|
|
|
|
self.assertEqual(urlencode({"a": range(2)}, doseq=False), "a=range%280%2C+2%29")
|
2017-10-10 04:20:01 +08:00
|
|
|
|
|
2018-12-28 00:19:55 +08:00
|
|
|
|
def test_none(self):
|
|
|
|
|
with self.assertRaisesMessage(TypeError, self.cannot_encode_none_msg):
|
|
|
|
|
urlencode({"a": None})
|
|
|
|
|
|
|
|
|
|
def test_none_in_sequence(self):
|
|
|
|
|
with self.assertRaisesMessage(TypeError, self.cannot_encode_none_msg):
|
|
|
|
|
urlencode({"a": [None]}, doseq=True)
|
|
|
|
|
|
|
|
|
|
def test_none_in_generator(self):
|
|
|
|
|
def gen():
|
|
|
|
|
yield None
|
2022-02-04 03:24:19 +08:00
|
|
|
|
|
2018-12-28 00:19:55 +08:00
|
|
|
|
with self.assertRaisesMessage(TypeError, self.cannot_encode_none_msg):
|
|
|
|
|
urlencode({"a": gen()}, doseq=True)
|
|
|
|
|
|
2011-12-11 16:58:14 +08:00
|
|
|
|
|
2017-10-06 02:20:23 +08:00
|
|
|
|
class Base36IntTests(SimpleTestCase):
|
|
|
|
|
def test_roundtrip(self):
|
2012-08-04 00:46:30 +08:00
|
|
|
|
for n in [0, 1, 1000, 1000000]:
|
2017-10-06 02:20:23 +08:00
|
|
|
|
self.assertEqual(n, base36_to_int(int_to_base36(n)))
|
2012-02-16 08:58:49 +08:00
|
|
|
|
|
2017-10-06 02:20:23 +08:00
|
|
|
|
def test_negative_input(self):
|
|
|
|
|
with self.assertRaisesMessage(ValueError, "Negative base36 conversion input."):
|
|
|
|
|
int_to_base36(-1)
|
|
|
|
|
|
|
|
|
|
def test_to_base36_errors(self):
|
2012-08-04 00:46:30 +08:00
|
|
|
|
for n in ["1", "foo", {1: 2}, (1, 2, 3), 3.141]:
|
2016-01-17 19:26:39 +08:00
|
|
|
|
with self.assertRaises(TypeError):
|
2017-10-06 02:20:23 +08:00
|
|
|
|
int_to_base36(n)
|
2012-08-04 00:46:30 +08:00
|
|
|
|
|
2017-10-06 02:20:23 +08:00
|
|
|
|
def test_invalid_literal(self):
|
2012-02-16 08:58:49 +08:00
|
|
|
|
for n in ["#", " "]:
|
2017-10-06 02:20:23 +08:00
|
|
|
|
with self.assertRaisesMessage(
|
|
|
|
|
ValueError, "invalid literal for int() with base 36: '%s'" % n
|
|
|
|
|
):
|
|
|
|
|
base36_to_int(n)
|
|
|
|
|
|
|
|
|
|
def test_input_too_large(self):
|
|
|
|
|
with self.assertRaisesMessage(ValueError, "Base36 input too large"):
|
|
|
|
|
base36_to_int("1" * 14)
|
|
|
|
|
|
|
|
|
|
def test_to_int_errors(self):
|
2012-08-04 00:46:30 +08:00
|
|
|
|
for n in [123, {1: 2}, (1, 2, 3), 3.141]:
|
2016-01-17 19:26:39 +08:00
|
|
|
|
with self.assertRaises(TypeError):
|
2017-10-06 02:20:23 +08:00
|
|
|
|
base36_to_int(n)
|
2012-02-16 08:58:49 +08:00
|
|
|
|
|
2017-10-06 02:20:23 +08:00
|
|
|
|
def test_values(self):
|
2012-02-16 09:10:21 +08:00
|
|
|
|
for n, b36 in [(0, "0"), (1, "1"), (42, "16"), (818469960, "django")]:
|
2017-10-06 02:20:23 +08:00
|
|
|
|
self.assertEqual(int_to_base36(n), b36)
|
|
|
|
|
self.assertEqual(base36_to_int(b36), n)
|
|
|
|
|
|
2012-09-27 03:10:17 +08:00
|
|
|
|
|
2021-01-07 15:15:39 +08:00
|
|
|
|
class URLHasAllowedHostAndSchemeTests(unittest.TestCase):
|
2017-10-06 02:20:23 +08:00
|
|
|
|
def test_bad_urls(self):
|
2016-04-08 10:04:45 +08:00
|
|
|
|
bad_urls = (
|
|
|
|
|
"http://example.com",
|
|
|
|
|
"http:///example.com",
|
|
|
|
|
"https://example.com",
|
|
|
|
|
"ftp://example.com",
|
|
|
|
|
r"\\example.com",
|
|
|
|
|
r"\\\example.com",
|
|
|
|
|
r"/\\/example.com",
|
|
|
|
|
r"\\\example.com",
|
|
|
|
|
r"\\example.com",
|
|
|
|
|
r"\\//example.com",
|
|
|
|
|
r"/\/example.com",
|
|
|
|
|
r"\/example.com",
|
|
|
|
|
r"/\example.com",
|
|
|
|
|
"http:///example.com",
|
2016-09-17 00:15:00 +08:00
|
|
|
|
r"http:/\//example.com",
|
|
|
|
|
r"http:\/example.com",
|
|
|
|
|
r"http:/\example.com",
|
2016-04-08 10:04:45 +08:00
|
|
|
|
'javascript:alert("XSS")',
|
|
|
|
|
"\njavascript:alert(x)",
|
2022-06-30 08:39:51 +08:00
|
|
|
|
"java\nscript:alert(x)",
|
2016-04-08 10:04:45 +08:00
|
|
|
|
"\x08//example.com",
|
|
|
|
|
r"http://otherserver\@example.com",
|
|
|
|
|
r"http:\\testserver\@example.com",
|
|
|
|
|
r"http://testserver\me:pass@example.com",
|
|
|
|
|
r"http://testserver\@example.com",
|
|
|
|
|
r"http:\\testserver\confirm\me@example.com",
|
2017-03-14 22:46:53 +08:00
|
|
|
|
"http:999999999",
|
|
|
|
|
"ftp:9999999999",
|
2016-04-08 10:04:45 +08:00
|
|
|
|
"\n",
|
2017-04-30 07:10:43 +08:00
|
|
|
|
"http://[2001:cdba:0000:0000:0000:0000:3257:9652/",
|
|
|
|
|
"http://2001:cdba:0000:0000:0000:0000:3257:9652]/",
|
2016-04-08 10:04:45 +08:00
|
|
|
|
)
|
|
|
|
|
for bad_url in bad_urls:
|
2017-10-06 02:20:23 +08:00
|
|
|
|
with self.subTest(url=bad_url):
|
2019-08-14 23:39:21 +08:00
|
|
|
|
self.assertIs(
|
|
|
|
|
url_has_allowed_host_and_scheme(
|
|
|
|
|
bad_url, allowed_hosts={"testserver", "testserver2"}
|
|
|
|
|
),
|
|
|
|
|
False,
|
|
|
|
|
)
|
2016-04-08 10:04:45 +08:00
|
|
|
|
|
2017-10-06 02:20:23 +08:00
|
|
|
|
def test_good_urls(self):
|
2016-04-08 10:04:45 +08:00
|
|
|
|
good_urls = (
|
|
|
|
|
"/view/?param=http://example.com",
|
|
|
|
|
"/view/?param=https://example.com",
|
|
|
|
|
"/view?param=ftp://example.com",
|
|
|
|
|
"view/?param=//example.com",
|
|
|
|
|
"https://testserver/",
|
|
|
|
|
"HTTPS://testserver/",
|
|
|
|
|
"//testserver/",
|
|
|
|
|
"http://testserver/confirm?email=me@example.com",
|
|
|
|
|
"/url%20with%20spaces/",
|
2017-03-14 22:46:53 +08:00
|
|
|
|
"path/http:2222222222",
|
2016-04-08 10:04:45 +08:00
|
|
|
|
)
|
|
|
|
|
for good_url in good_urls:
|
2017-10-06 02:20:23 +08:00
|
|
|
|
with self.subTest(url=good_url):
|
2019-08-14 23:39:21 +08:00
|
|
|
|
self.assertIs(
|
|
|
|
|
url_has_allowed_host_and_scheme(
|
|
|
|
|
good_url, allowed_hosts={"otherserver", "testserver"}
|
|
|
|
|
),
|
|
|
|
|
True,
|
|
|
|
|
)
|
2016-03-04 22:41:52 +08:00
|
|
|
|
|
2017-10-06 02:20:23 +08:00
|
|
|
|
def test_basic_auth(self):
|
2016-02-23 05:47:01 +08:00
|
|
|
|
# Valid basic auth credentials are allowed.
|
2019-08-14 23:39:21 +08:00
|
|
|
|
self.assertIs(
|
|
|
|
|
url_has_allowed_host_and_scheme(
|
|
|
|
|
r"http://user:pass@testserver/", allowed_hosts={"user:pass@testserver"}
|
|
|
|
|
),
|
|
|
|
|
True,
|
|
|
|
|
)
|
2017-10-06 02:20:23 +08:00
|
|
|
|
|
|
|
|
|
def test_no_allowed_hosts(self):
|
2016-02-23 05:47:01 +08:00
|
|
|
|
# A path without host is allowed.
|
2019-08-14 23:39:21 +08:00
|
|
|
|
self.assertIs(
|
|
|
|
|
url_has_allowed_host_and_scheme(
|
|
|
|
|
"/confirm/me@example.com", allowed_hosts=None
|
|
|
|
|
),
|
|
|
|
|
True,
|
|
|
|
|
)
|
2016-02-23 05:47:01 +08:00
|
|
|
|
# Basic auth without host is not allowed.
|
2019-08-14 23:39:21 +08:00
|
|
|
|
self.assertIs(
|
|
|
|
|
url_has_allowed_host_and_scheme(
|
|
|
|
|
r"http://testserver\@example.com", allowed_hosts=None
|
|
|
|
|
),
|
|
|
|
|
False,
|
|
|
|
|
)
|
2014-05-12 19:38:39 +08:00
|
|
|
|
|
2018-06-22 17:21:52 +08:00
|
|
|
|
def test_allowed_hosts_str(self):
|
2019-08-14 23:39:21 +08:00
|
|
|
|
self.assertIs(
|
|
|
|
|
url_has_allowed_host_and_scheme(
|
|
|
|
|
"http://good.com/good", allowed_hosts="good.com"
|
|
|
|
|
),
|
|
|
|
|
True,
|
|
|
|
|
)
|
|
|
|
|
self.assertIs(
|
|
|
|
|
url_has_allowed_host_and_scheme(
|
|
|
|
|
"http://good.co/evil", allowed_hosts="good.com"
|
|
|
|
|
),
|
|
|
|
|
False,
|
|
|
|
|
)
|
2018-06-22 17:21:52 +08:00
|
|
|
|
|
2017-10-06 02:20:23 +08:00
|
|
|
|
def test_secure_param_https_urls(self):
|
2016-08-19 19:23:13 +08:00
|
|
|
|
secure_urls = (
|
|
|
|
|
"https://example.com/p",
|
|
|
|
|
"HTTPS://example.com/p",
|
|
|
|
|
"/view/?param=http://example.com",
|
|
|
|
|
)
|
|
|
|
|
for url in secure_urls:
|
2017-10-06 02:20:23 +08:00
|
|
|
|
with self.subTest(url=url):
|
2019-08-14 23:39:21 +08:00
|
|
|
|
self.assertIs(
|
|
|
|
|
url_has_allowed_host_and_scheme(
|
|
|
|
|
url, allowed_hosts={"example.com"}, require_https=True
|
|
|
|
|
),
|
|
|
|
|
True,
|
|
|
|
|
)
|
2016-08-19 19:23:13 +08:00
|
|
|
|
|
2017-10-06 02:20:23 +08:00
|
|
|
|
def test_secure_param_non_https_urls(self):
|
|
|
|
|
insecure_urls = (
|
2016-08-19 19:23:13 +08:00
|
|
|
|
"http://example.com/p",
|
|
|
|
|
"ftp://example.com/p",
|
|
|
|
|
"//example.com/p",
|
|
|
|
|
)
|
2017-10-06 02:20:23 +08:00
|
|
|
|
for url in insecure_urls:
|
|
|
|
|
with self.subTest(url=url):
|
2019-08-14 23:39:21 +08:00
|
|
|
|
self.assertIs(
|
|
|
|
|
url_has_allowed_host_and_scheme(
|
|
|
|
|
url, allowed_hosts={"example.com"}, require_https=True
|
|
|
|
|
),
|
|
|
|
|
False,
|
|
|
|
|
)
|
|
|
|
|
|
2017-10-06 02:20:23 +08:00
|
|
|
|
|
|
|
|
|
class URLSafeBase64Tests(unittest.TestCase):
|
|
|
|
|
def test_roundtrip(self):
|
2014-08-21 19:53:22 +08:00
|
|
|
|
bytestring = b"foo"
|
2017-10-06 02:20:23 +08:00
|
|
|
|
encoded = urlsafe_base64_encode(bytestring)
|
|
|
|
|
decoded = urlsafe_base64_decode(encoded)
|
2014-08-21 19:53:22 +08:00
|
|
|
|
self.assertEqual(bytestring, decoded)
|
|
|
|
|
|
2017-10-06 02:20:23 +08:00
|
|
|
|
|
|
|
|
|
class IsSameDomainTests(unittest.TestCase):
|
|
|
|
|
def test_good(self):
|
2015-03-17 17:52:55 +08:00
|
|
|
|
for pair in (
|
|
|
|
|
("example.com", "example.com"),
|
|
|
|
|
("example.com", ".example.com"),
|
|
|
|
|
("foo.example.com", ".example.com"),
|
|
|
|
|
("example.com:8888", "example.com:8888"),
|
|
|
|
|
("example.com:8888", ".example.com:8888"),
|
|
|
|
|
("foo.example.com:8888", ".example.com:8888"),
|
|
|
|
|
):
|
2017-10-06 02:20:23 +08:00
|
|
|
|
self.assertIs(is_same_domain(*pair), True)
|
2015-03-17 17:52:55 +08:00
|
|
|
|
|
2017-10-06 02:20:23 +08:00
|
|
|
|
def test_bad(self):
|
2015-03-17 17:52:55 +08:00
|
|
|
|
for pair in (
|
|
|
|
|
("example2.com", "example.com"),
|
|
|
|
|
("foo.example.com", "example.com"),
|
|
|
|
|
("example.com:9999", "example.com:8888"),
|
2018-11-03 23:13:28 +08:00
|
|
|
|
("foo.example.com:8888", ""),
|
2015-03-17 17:52:55 +08:00
|
|
|
|
):
|
2017-10-06 02:20:23 +08:00
|
|
|
|
self.assertIs(is_same_domain(*pair), False)
|
2015-03-17 17:52:55 +08:00
|
|
|
|
|
2012-09-27 03:10:17 +08:00
|
|
|
|
|
|
|
|
|
class ETagProcessingTests(unittest.TestCase):
|
2014-07-08 07:08:42 +08:00
|
|
|
|
def test_parsing(self):
|
2016-09-01 21:32:20 +08:00
|
|
|
|
self.assertEqual(
|
2017-10-06 02:20:23 +08:00
|
|
|
|
parse_etags(r'"" , "etag", "e\\tag", W/"weak"'),
|
2016-09-01 21:32:20 +08:00
|
|
|
|
['""', '"etag"', r'"e\\tag"', 'W/"weak"'],
|
|
|
|
|
)
|
2017-10-06 02:20:23 +08:00
|
|
|
|
self.assertEqual(parse_etags("*"), ["*"])
|
2016-09-01 21:32:20 +08:00
|
|
|
|
|
2022-11-04 20:33:09 +08:00
|
|
|
|
# Ignore RFC 2616 ETags that are invalid according to RFC 9110.
|
2017-10-06 02:20:23 +08:00
|
|
|
|
self.assertEqual(parse_etags(r'"etag", "e\"t\"ag"'), ['"etag"'])
|
2012-09-27 03:10:17 +08:00
|
|
|
|
|
2014-07-08 07:08:42 +08:00
|
|
|
|
def test_quoting(self):
|
2017-10-06 02:20:23 +08:00
|
|
|
|
self.assertEqual(quote_etag("etag"), '"etag"') # unquoted
|
|
|
|
|
self.assertEqual(quote_etag('"etag"'), '"etag"') # quoted
|
|
|
|
|
self.assertEqual(quote_etag('W/"etag"'), 'W/"etag"') # quoted, weak
|
2012-09-27 03:10:17 +08:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class HttpDateProcessingTests(unittest.TestCase):
|
2014-09-23 20:45:59 +08:00
|
|
|
|
def test_http_date(self):
|
|
|
|
|
t = 1167616461.0
|
2017-10-06 02:20:23 +08:00
|
|
|
|
self.assertEqual(http_date(t), "Mon, 01 Jan 2007 01:54:21 GMT")
|
2014-09-23 20:45:59 +08:00
|
|
|
|
|
2014-07-08 07:08:42 +08:00
|
|
|
|
def test_parsing_rfc1123(self):
|
2017-10-06 02:20:23 +08:00
|
|
|
|
parsed = parse_http_date("Sun, 06 Nov 1994 08:49:37 GMT")
|
2021-05-07 17:42:59 +08:00
|
|
|
|
self.assertEqual(
|
|
|
|
|
datetime.fromtimestamp(parsed, timezone.utc),
|
|
|
|
|
datetime(1994, 11, 6, 8, 49, 37, tzinfo=timezone.utc),
|
|
|
|
|
)
|
2012-09-27 03:10:17 +08:00
|
|
|
|
|
2020-04-30 05:22:41 +08:00
|
|
|
|
@unittest.skipIf(platform.architecture()[0] == "32bit", "The Year 2038 problem.")
|
2019-09-28 12:15:38 +08:00
|
|
|
|
@mock.patch("django.utils.http.datetime.datetime")
|
|
|
|
|
def test_parsing_rfc850(self, mocked_datetime):
|
|
|
|
|
mocked_datetime.side_effect = datetime
|
2021-05-07 17:42:59 +08:00
|
|
|
|
mocked_datetime.now = mock.Mock()
|
|
|
|
|
now_1 = datetime(2019, 11, 6, 8, 49, 37, tzinfo=timezone.utc)
|
|
|
|
|
now_2 = datetime(2020, 11, 6, 8, 49, 37, tzinfo=timezone.utc)
|
|
|
|
|
now_3 = datetime(2048, 11, 6, 8, 49, 37, tzinfo=timezone.utc)
|
2019-09-30 20:35:59 +08:00
|
|
|
|
tests = (
|
2021-05-07 17:42:59 +08:00
|
|
|
|
(
|
|
|
|
|
now_1,
|
|
|
|
|
"Tuesday, 31-Dec-69 08:49:37 GMT",
|
|
|
|
|
datetime(2069, 12, 31, 8, 49, 37, tzinfo=timezone.utc),
|
2022-02-04 03:24:19 +08:00
|
|
|
|
),
|
2021-05-07 17:42:59 +08:00
|
|
|
|
(
|
|
|
|
|
now_1,
|
|
|
|
|
"Tuesday, 10-Nov-70 08:49:37 GMT",
|
|
|
|
|
datetime(1970, 11, 10, 8, 49, 37, tzinfo=timezone.utc),
|
2022-02-04 03:24:19 +08:00
|
|
|
|
),
|
2021-05-07 17:42:59 +08:00
|
|
|
|
(
|
|
|
|
|
now_1,
|
|
|
|
|
"Sunday, 06-Nov-94 08:49:37 GMT",
|
|
|
|
|
datetime(1994, 11, 6, 8, 49, 37, tzinfo=timezone.utc),
|
2022-02-04 03:24:19 +08:00
|
|
|
|
),
|
2021-05-07 17:42:59 +08:00
|
|
|
|
(
|
|
|
|
|
now_2,
|
|
|
|
|
"Wednesday, 31-Dec-70 08:49:37 GMT",
|
|
|
|
|
datetime(2070, 12, 31, 8, 49, 37, tzinfo=timezone.utc),
|
2022-02-04 03:24:19 +08:00
|
|
|
|
),
|
2021-05-07 17:42:59 +08:00
|
|
|
|
(
|
|
|
|
|
now_2,
|
|
|
|
|
"Friday, 31-Dec-71 08:49:37 GMT",
|
|
|
|
|
datetime(1971, 12, 31, 8, 49, 37, tzinfo=timezone.utc),
|
2022-02-04 03:24:19 +08:00
|
|
|
|
),
|
2021-05-07 17:42:59 +08:00
|
|
|
|
(
|
|
|
|
|
now_3,
|
|
|
|
|
"Sunday, 31-Dec-00 08:49:37 GMT",
|
|
|
|
|
datetime(2000, 12, 31, 8, 49, 37, tzinfo=timezone.utc),
|
2022-02-04 03:24:19 +08:00
|
|
|
|
),
|
2021-05-07 17:42:59 +08:00
|
|
|
|
(
|
|
|
|
|
now_3,
|
|
|
|
|
"Friday, 31-Dec-99 08:49:37 GMT",
|
|
|
|
|
datetime(1999, 12, 31, 8, 49, 37, tzinfo=timezone.utc),
|
2022-02-04 03:24:19 +08:00
|
|
|
|
),
|
2019-09-30 20:35:59 +08:00
|
|
|
|
)
|
2021-05-07 17:42:59 +08:00
|
|
|
|
for now, rfc850str, expected_date in tests:
|
2019-09-30 20:35:59 +08:00
|
|
|
|
with self.subTest(rfc850str=rfc850str):
|
2021-05-07 17:42:59 +08:00
|
|
|
|
mocked_datetime.now.return_value = now
|
2019-09-30 20:35:59 +08:00
|
|
|
|
parsed = parse_http_date(rfc850str)
|
2021-05-07 17:42:59 +08:00
|
|
|
|
mocked_datetime.now.assert_called_once_with(tz=timezone.utc)
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
datetime.fromtimestamp(parsed, timezone.utc),
|
|
|
|
|
expected_date,
|
|
|
|
|
)
|
|
|
|
|
mocked_datetime.reset_mock()
|
2012-09-27 03:10:17 +08:00
|
|
|
|
|
2014-07-08 07:08:42 +08:00
|
|
|
|
def test_parsing_asctime(self):
|
2017-10-06 02:20:23 +08:00
|
|
|
|
parsed = parse_http_date("Sun Nov 6 08:49:37 1994")
|
2021-05-07 17:42:59 +08:00
|
|
|
|
self.assertEqual(
|
|
|
|
|
datetime.fromtimestamp(parsed, timezone.utc),
|
|
|
|
|
datetime(1994, 11, 6, 8, 49, 37, tzinfo=timezone.utc),
|
|
|
|
|
)
|
2018-07-25 04:18:17 +08:00
|
|
|
|
|
2021-11-26 09:44:54 +08:00
|
|
|
|
def test_parsing_asctime_nonascii_digits(self):
|
|
|
|
|
"""Non-ASCII unicode decimals raise an error."""
|
|
|
|
|
with self.assertRaises(ValueError):
|
|
|
|
|
parse_http_date("Sun Nov 6 08:49:37 1994")
|
|
|
|
|
with self.assertRaises(ValueError):
|
|
|
|
|
parse_http_date("Sun Nov 12 08:49:37 1994")
|
|
|
|
|
|
2018-11-03 23:13:28 +08:00
|
|
|
|
def test_parsing_year_less_than_70(self):
|
2019-03-20 20:44:30 +08:00
|
|
|
|
parsed = parse_http_date("Sun Nov 6 08:49:37 0037")
|
2021-05-07 17:42:59 +08:00
|
|
|
|
self.assertEqual(
|
|
|
|
|
datetime.fromtimestamp(parsed, timezone.utc),
|
|
|
|
|
datetime(2037, 11, 6, 8, 49, 37, tzinfo=timezone.utc),
|
|
|
|
|
)
|
2018-11-03 23:13:28 +08:00
|
|
|
|
|
2018-07-25 04:18:17 +08:00
|
|
|
|
|
|
|
|
|
class EscapeLeadingSlashesTests(unittest.TestCase):
|
|
|
|
|
def test(self):
|
|
|
|
|
tests = (
|
|
|
|
|
("//example.com", "/%2Fexample.com"),
|
|
|
|
|
("//", "/%2F"),
|
|
|
|
|
)
|
|
|
|
|
for url, expected in tests:
|
|
|
|
|
with self.subTest(url=url):
|
|
|
|
|
self.assertEqual(escape_leading_slashes(url), expected)
|
2022-05-10 18:12:17 +08:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class ParseHeaderParameterTests(unittest.TestCase):
|
|
|
|
|
def test_basic(self):
|
|
|
|
|
tests = [
|
|
|
|
|
("text/plain", ("text/plain", {})),
|
|
|
|
|
("text/vnd.just.made.this.up ; ", ("text/vnd.just.made.this.up", {})),
|
|
|
|
|
("text/plain;charset=us-ascii", ("text/plain", {"charset": "us-ascii"})),
|
|
|
|
|
(
|
|
|
|
|
'text/plain ; charset="us-ascii"',
|
|
|
|
|
("text/plain", {"charset": "us-ascii"}),
|
|
|
|
|
),
|
|
|
|
|
(
|
|
|
|
|
'text/plain ; charset="us-ascii"; another=opt',
|
|
|
|
|
("text/plain", {"charset": "us-ascii", "another": "opt"}),
|
|
|
|
|
),
|
|
|
|
|
(
|
|
|
|
|
'attachment; filename="silly.txt"',
|
|
|
|
|
("attachment", {"filename": "silly.txt"}),
|
|
|
|
|
),
|
|
|
|
|
(
|
|
|
|
|
'attachment; filename="strange;name"',
|
|
|
|
|
("attachment", {"filename": "strange;name"}),
|
|
|
|
|
),
|
|
|
|
|
(
|
|
|
|
|
'attachment; filename="strange;name";size=123;',
|
|
|
|
|
("attachment", {"filename": "strange;name", "size": "123"}),
|
|
|
|
|
),
|
|
|
|
|
(
|
|
|
|
|
'form-data; name="files"; filename="fo\\"o;bar"',
|
|
|
|
|
("form-data", {"name": "files", "filename": 'fo"o;bar'}),
|
|
|
|
|
),
|
|
|
|
|
]
|
|
|
|
|
for header, expected in tests:
|
|
|
|
|
with self.subTest(header=header):
|
|
|
|
|
self.assertEqual(parse_header_parameters(header), expected)
|
2022-06-25 02:46:34 +08:00
|
|
|
|
|
|
|
|
|
def test_rfc2231_parsing(self):
|
|
|
|
|
test_data = (
|
|
|
|
|
(
|
|
|
|
|
"Content-Type: application/x-stuff; "
|
|
|
|
|
"title*=us-ascii'en-us'This%20is%20%2A%2A%2Afun%2A%2A%2A",
|
|
|
|
|
"This is ***fun***",
|
|
|
|
|
),
|
|
|
|
|
(
|
|
|
|
|
"Content-Type: application/x-stuff; title*=UTF-8''foo-%c3%a4.html",
|
|
|
|
|
"foo-ä.html",
|
|
|
|
|
),
|
|
|
|
|
(
|
|
|
|
|
"Content-Type: application/x-stuff; title*=iso-8859-1''foo-%E4.html",
|
|
|
|
|
"foo-ä.html",
|
|
|
|
|
),
|
|
|
|
|
)
|
|
|
|
|
for raw_line, expected_title in test_data:
|
|
|
|
|
parsed = parse_header_parameters(raw_line)
|
|
|
|
|
self.assertEqual(parsed[1]["title"], expected_title)
|
|
|
|
|
|
|
|
|
|
def test_rfc2231_wrong_title(self):
|
|
|
|
|
"""
|
|
|
|
|
Test wrongly formatted RFC 2231 headers (missing double single quotes).
|
|
|
|
|
Parsing should not crash (#24209).
|
|
|
|
|
"""
|
|
|
|
|
test_data = (
|
|
|
|
|
(
|
|
|
|
|
"Content-Type: application/x-stuff; "
|
|
|
|
|
"title*='This%20is%20%2A%2A%2Afun%2A%2A%2A",
|
|
|
|
|
"'This%20is%20%2A%2A%2Afun%2A%2A%2A",
|
|
|
|
|
),
|
|
|
|
|
("Content-Type: application/x-stuff; title*='foo.html", "'foo.html"),
|
|
|
|
|
("Content-Type: application/x-stuff; title*=bar.html", "bar.html"),
|
|
|
|
|
)
|
|
|
|
|
for raw_line, expected_title in test_data:
|
|
|
|
|
parsed = parse_header_parameters(raw_line)
|
|
|
|
|
self.assertEqual(parsed[1]["title"], expected_title)
|
2022-12-01 04:09:49 +08:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class ContentDispositionHeaderTests(unittest.TestCase):
|
|
|
|
|
def test_basic(self):
|
|
|
|
|
tests = (
|
|
|
|
|
((False, None), None),
|
|
|
|
|
((False, "example"), 'inline; filename="example"'),
|
|
|
|
|
((True, None), "attachment"),
|
|
|
|
|
((True, "example"), 'attachment; filename="example"'),
|
|
|
|
|
(
|
|
|
|
|
(True, '"example" file\\name'),
|
|
|
|
|
'attachment; filename="\\"example\\" file\\\\name"',
|
|
|
|
|
),
|
|
|
|
|
((True, "espécimen"), "attachment; filename*=utf-8''esp%C3%A9cimen"),
|
|
|
|
|
(
|
|
|
|
|
(True, '"espécimen" filename'),
|
|
|
|
|
"attachment; filename*=utf-8''%22esp%C3%A9cimen%22%20filename",
|
|
|
|
|
),
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
for (is_attachment, filename), expected in tests:
|
|
|
|
|
with self.subTest(is_attachment=is_attachment, filename=filename):
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
content_disposition_header(is_attachment, filename), expected
|
|
|
|
|
)
|