2012-08-15 03:17:25 +08:00
|
|
|
from __future__ import unicode_literals
|
|
|
|
|
2013-06-22 04:59:33 +08:00
|
|
|
import base64
|
2011-03-01 22:28:06 +08:00
|
|
|
import calendar
|
|
|
|
import datetime
|
2009-03-22 15:58:29 +08:00
|
|
|
import re
|
2011-01-24 16:02:40 +08:00
|
|
|
import sys
|
2012-07-20 21:36:52 +08:00
|
|
|
|
2013-06-22 04:59:33 +08:00
|
|
|
from binascii import Error as BinasciiError
|
2011-09-10 00:18:38 +08:00
|
|
|
from email.utils import formatdate
|
2007-10-31 11:59:40 +08:00
|
|
|
|
2011-04-22 20:01:41 +08:00
|
|
|
from django.utils.datastructures import MultiValueDict
|
2012-08-30 04:40:51 +08:00
|
|
|
from django.utils.encoding import force_str, force_text
|
Merged Unicode branch into trunk (r4952:5608). This should be fully
backwards compatible for all practical purposes.
Fixed #2391, #2489, #2996, #3322, #3344, #3370, #3406, #3432, #3454, #3492, #3582, #3690, #3878, #3891, #3937, #4039, #4141, #4227, #4286, #4291, #4300, #4452, #4702
git-svn-id: http://code.djangoproject.com/svn/django/trunk@5609 bcc190cf-cafb-0310-a4f2-bffc1f526a37
2007-07-04 20:11:04 +08:00
|
|
|
from django.utils.functional import allow_lazy
|
2012-07-20 20:48:51 +08:00
|
|
|
from django.utils import six
|
2013-09-06 03:38:59 +08:00
|
|
|
from django.utils.six.moves.urllib.parse import (
|
|
|
|
quote, quote_plus, unquote, unquote_plus, urlparse,
|
|
|
|
urlencode as original_urlencode)
|
Merged Unicode branch into trunk (r4952:5608). This should be fully
backwards compatible for all practical purposes.
Fixed #2391, #2489, #2996, #3322, #3344, #3370, #3406, #3432, #3454, #3492, #3582, #3690, #3878, #3891, #3937, #4039, #4141, #4227, #4286, #4291, #4300, #4452, #4702
git-svn-id: http://code.djangoproject.com/svn/django/trunk@5609 bcc190cf-cafb-0310-a4f2-bffc1f526a37
2007-07-04 20:11:04 +08:00
|
|
|
|
2009-03-22 15:58:29 +08:00
|
|
|
ETAG_MATCH = re.compile(r'(?:W/)?"((?:\\.|[^"])*)"')
|
|
|
|
|
2011-03-01 22:28:06 +08:00
|
|
|
MONTHS = 'jan feb mar apr may jun jul aug sep oct nov dec'.split()
|
|
|
|
__D = r'(?P<day>\d{2})'
|
|
|
|
__D2 = r'(?P<day>[ \d]\d)'
|
|
|
|
__M = r'(?P<mon>\w{3})'
|
|
|
|
__Y = r'(?P<year>\d{4})'
|
|
|
|
__Y2 = r'(?P<year>\d{2})'
|
|
|
|
__T = r'(?P<hour>\d{2}):(?P<min>\d{2}):(?P<sec>\d{2})'
|
|
|
|
RFC1123_DATE = re.compile(r'^\w{3}, %s %s %s %s GMT$' % (__D, __M, __Y, __T))
|
|
|
|
RFC850_DATE = re.compile(r'^\w{6,9}, %s-%s-%s %s GMT$' % (__D, __M, __Y2, __T))
|
|
|
|
ASCTIME_DATE = re.compile(r'^\w{3} %s %s %s %s$' % (__M, __D2, __T, __Y))
|
|
|
|
|
Merged Unicode branch into trunk (r4952:5608). This should be fully
backwards compatible for all practical purposes.
Fixed #2391, #2489, #2996, #3322, #3344, #3370, #3406, #3432, #3454, #3492, #3582, #3690, #3878, #3891, #3937, #4039, #4141, #4227, #4286, #4291, #4300, #4452, #4702
git-svn-id: http://code.djangoproject.com/svn/django/trunk@5609 bcc190cf-cafb-0310-a4f2-bffc1f526a37
2007-07-04 20:11:04 +08:00
|
|
|
def urlquote(url, safe='/'):
|
|
|
|
"""
|
|
|
|
A version of Python's urllib.quote() function that can operate on unicode
|
|
|
|
strings. The url is first UTF-8 encoded before quoting. The returned string
|
|
|
|
can safely be used as part of an argument to a subsequent iri_to_uri() call
|
|
|
|
without double-quoting occurring.
|
|
|
|
"""
|
2013-09-06 03:38:59 +08:00
|
|
|
return force_text(quote(force_str(url), force_str(safe)))
|
2012-07-20 20:48:51 +08:00
|
|
|
urlquote = allow_lazy(urlquote, six.text_type)
|
Merged Unicode branch into trunk (r4952:5608). This should be fully
backwards compatible for all practical purposes.
Fixed #2391, #2489, #2996, #3322, #3344, #3370, #3406, #3432, #3454, #3492, #3582, #3690, #3878, #3891, #3937, #4039, #4141, #4227, #4286, #4291, #4300, #4452, #4702
git-svn-id: http://code.djangoproject.com/svn/django/trunk@5609 bcc190cf-cafb-0310-a4f2-bffc1f526a37
2007-07-04 20:11:04 +08:00
|
|
|
|
|
|
|
def urlquote_plus(url, safe=''):
|
|
|
|
"""
|
|
|
|
A version of Python's urllib.quote_plus() function that can operate on
|
|
|
|
unicode strings. The url is first UTF-8 encoded before quoting. The
|
|
|
|
returned string can safely be used as part of an argument to a subsequent
|
|
|
|
iri_to_uri() call without double-quoting occurring.
|
|
|
|
"""
|
2013-09-06 03:38:59 +08:00
|
|
|
return force_text(quote_plus(force_str(url), force_str(safe)))
|
2012-07-20 20:48:51 +08:00
|
|
|
urlquote_plus = allow_lazy(urlquote_plus, six.text_type)
|
Merged Unicode branch into trunk (r4952:5608). This should be fully
backwards compatible for all practical purposes.
Fixed #2391, #2489, #2996, #3322, #3344, #3370, #3406, #3432, #3454, #3492, #3582, #3690, #3878, #3891, #3937, #4039, #4141, #4227, #4286, #4291, #4300, #4452, #4702
git-svn-id: http://code.djangoproject.com/svn/django/trunk@5609 bcc190cf-cafb-0310-a4f2-bffc1f526a37
2007-07-04 20:11:04 +08:00
|
|
|
|
2012-01-29 17:00:12 +08:00
|
|
|
def urlunquote(quoted_url):
|
|
|
|
"""
|
|
|
|
A wrapper for Python's urllib.unquote() function that can operate on
|
|
|
|
the result of django.utils.http.urlquote().
|
|
|
|
"""
|
2013-09-06 03:38:59 +08:00
|
|
|
return force_text(unquote(force_str(quoted_url)))
|
2012-07-20 20:48:51 +08:00
|
|
|
urlunquote = allow_lazy(urlunquote, six.text_type)
|
2012-01-29 17:00:12 +08:00
|
|
|
|
|
|
|
def urlunquote_plus(quoted_url):
|
|
|
|
"""
|
|
|
|
A wrapper for Python's urllib.unquote_plus() function that can operate on
|
|
|
|
the result of django.utils.http.urlquote_plus().
|
|
|
|
"""
|
2013-09-06 03:38:59 +08:00
|
|
|
return force_text(unquote_plus(force_str(quoted_url)))
|
2012-07-20 20:48:51 +08:00
|
|
|
urlunquote_plus = allow_lazy(urlunquote_plus, six.text_type)
|
2012-01-29 17:00:12 +08:00
|
|
|
|
Merged Unicode branch into trunk (r4952:5608). This should be fully
backwards compatible for all practical purposes.
Fixed #2391, #2489, #2996, #3322, #3344, #3370, #3406, #3432, #3454, #3492, #3582, #3690, #3878, #3891, #3937, #4039, #4141, #4227, #4286, #4291, #4300, #4452, #4702
git-svn-id: http://code.djangoproject.com/svn/django/trunk@5609 bcc190cf-cafb-0310-a4f2-bffc1f526a37
2007-07-04 20:11:04 +08:00
|
|
|
def urlencode(query, doseq=0):
|
|
|
|
"""
|
|
|
|
A version of Python's urllib.urlencode() function that can operate on
|
2013-05-19 17:24:17 +08:00
|
|
|
unicode strings. The parameters are first cast to UTF-8 encoded strings and
|
Merged Unicode branch into trunk (r4952:5608). This should be fully
backwards compatible for all practical purposes.
Fixed #2391, #2489, #2996, #3322, #3344, #3370, #3406, #3432, #3454, #3492, #3582, #3690, #3878, #3891, #3937, #4039, #4141, #4227, #4286, #4291, #4300, #4452, #4702
git-svn-id: http://code.djangoproject.com/svn/django/trunk@5609 bcc190cf-cafb-0310-a4f2-bffc1f526a37
2007-07-04 20:11:04 +08:00
|
|
|
then encoded as per normal.
|
|
|
|
"""
|
2011-04-22 20:01:41 +08:00
|
|
|
if isinstance(query, MultiValueDict):
|
|
|
|
query = query.lists()
|
|
|
|
elif hasattr(query, 'items'):
|
Merged Unicode branch into trunk (r4952:5608). This should be fully
backwards compatible for all practical purposes.
Fixed #2391, #2489, #2996, #3322, #3344, #3370, #3406, #3432, #3454, #3492, #3582, #3690, #3878, #3891, #3937, #4039, #4141, #4227, #4286, #4291, #4300, #4452, #4702
git-svn-id: http://code.djangoproject.com/svn/django/trunk@5609 bcc190cf-cafb-0310-a4f2-bffc1f526a37
2007-07-04 20:11:04 +08:00
|
|
|
query = query.items()
|
2013-09-06 03:38:59 +08:00
|
|
|
return original_urlencode(
|
2012-08-30 04:40:51 +08:00
|
|
|
[(force_str(k),
|
|
|
|
[force_str(i) for i in v] if isinstance(v, (list,tuple)) else force_str(v))
|
2007-07-21 13:17:20 +08:00
|
|
|
for k, v in query],
|
|
|
|
doseq)
|
Merged Unicode branch into trunk (r4952:5608). This should be fully
backwards compatible for all practical purposes.
Fixed #2391, #2489, #2996, #3322, #3344, #3370, #3406, #3432, #3454, #3492, #3582, #3690, #3878, #3891, #3937, #4039, #4141, #4227, #4286, #4291, #4300, #4452, #4702
git-svn-id: http://code.djangoproject.com/svn/django/trunk@5609 bcc190cf-cafb-0310-a4f2-bffc1f526a37
2007-07-04 20:11:04 +08:00
|
|
|
|
2007-10-31 11:59:40 +08:00
|
|
|
def cookie_date(epoch_seconds=None):
|
|
|
|
"""
|
|
|
|
Formats the time to ensure compatibility with Netscape's cookie standard.
|
|
|
|
|
|
|
|
Accepts a floating point number expressed in seconds since the epoch, in
|
|
|
|
UTC - such as that outputted by time.time(). If set to None, defaults to
|
|
|
|
the current time.
|
|
|
|
|
|
|
|
Outputs a string in the format 'Wdy, DD-Mon-YYYY HH:MM:SS GMT'.
|
|
|
|
"""
|
|
|
|
rfcdate = formatdate(epoch_seconds)
|
|
|
|
return '%s-%s-%s GMT' % (rfcdate[:7], rfcdate[8:11], rfcdate[12:25])
|
|
|
|
|
|
|
|
def http_date(epoch_seconds=None):
|
|
|
|
"""
|
|
|
|
Formats the time to match the RFC1123 date format as specified by HTTP
|
|
|
|
RFC2616 section 3.3.1.
|
|
|
|
|
|
|
|
Accepts a floating point number expressed in seconds since the epoch, in
|
|
|
|
UTC - such as that outputted by time.time(). If set to None, defaults to
|
|
|
|
the current time.
|
|
|
|
|
|
|
|
Outputs a string in the format 'Wdy, DD Mon YYYY HH:MM:SS GMT'.
|
|
|
|
"""
|
2013-08-07 19:00:39 +08:00
|
|
|
return formatdate(epoch_seconds, usegmt=True)
|
2008-08-01 04:47:53 +08:00
|
|
|
|
2011-03-01 22:28:06 +08:00
|
|
|
def parse_http_date(date):
|
|
|
|
"""
|
|
|
|
Parses a date format as specified by HTTP RFC2616 section 3.3.1.
|
|
|
|
|
|
|
|
The three formats allowed by the RFC are accepted, even if only the first
|
|
|
|
one is still in widespread use.
|
|
|
|
|
2012-09-27 03:10:17 +08:00
|
|
|
Returns an integer expressed in seconds since the epoch, in UTC.
|
2011-03-01 22:28:06 +08:00
|
|
|
"""
|
|
|
|
# emails.Util.parsedate does the job for RFC1123 dates; unfortunately
|
|
|
|
# RFC2616 makes it mandatory to support RFC850 dates too. So we roll
|
|
|
|
# our own RFC-compliant parsing.
|
|
|
|
for regex in RFC1123_DATE, RFC850_DATE, ASCTIME_DATE:
|
|
|
|
m = regex.match(date)
|
|
|
|
if m is not None:
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
raise ValueError("%r is not in a valid HTTP date format" % date)
|
|
|
|
try:
|
|
|
|
year = int(m.group('year'))
|
|
|
|
if year < 100:
|
2011-03-04 02:42:59 +08:00
|
|
|
if year < 70:
|
|
|
|
year += 2000
|
|
|
|
else:
|
|
|
|
year += 1900
|
2011-03-01 22:28:06 +08:00
|
|
|
month = MONTHS.index(m.group('mon').lower()) + 1
|
|
|
|
day = int(m.group('day'))
|
|
|
|
hour = int(m.group('hour'))
|
|
|
|
min = int(m.group('min'))
|
|
|
|
sec = int(m.group('sec'))
|
|
|
|
result = datetime.datetime(year, month, day, hour, min, sec)
|
|
|
|
return calendar.timegm(result.utctimetuple())
|
|
|
|
except Exception:
|
2013-03-19 13:04:59 +08:00
|
|
|
six.reraise(ValueError, ValueError("%r is not a valid date" % date), sys.exc_info()[2])
|
2011-03-01 22:28:06 +08:00
|
|
|
|
|
|
|
def parse_http_date_safe(date):
|
|
|
|
"""
|
|
|
|
Same as parse_http_date, but returns None if the input is invalid.
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
return parse_http_date(date)
|
|
|
|
except Exception:
|
|
|
|
pass
|
|
|
|
|
2008-08-01 04:47:53 +08:00
|
|
|
# Base 36 functions: useful for generating compact URLs
|
|
|
|
|
|
|
|
def base36_to_int(s):
|
|
|
|
"""
|
2011-01-24 16:02:40 +08:00
|
|
|
Converts a base 36 string to an ``int``. Raises ``ValueError` if the
|
|
|
|
input won't fit into an int.
|
2008-08-01 04:47:53 +08:00
|
|
|
"""
|
2011-01-24 16:02:40 +08:00
|
|
|
# To prevent overconsumption of server resources, reject any
|
|
|
|
# base36 string that is long than 13 base36 digits (13 digits
|
|
|
|
# is sufficient to base36-encode any 64-bit integer)
|
2010-12-23 11:45:08 +08:00
|
|
|
if len(s) > 13:
|
|
|
|
raise ValueError("Base36 input too large")
|
2011-01-24 16:02:40 +08:00
|
|
|
value = int(s, 36)
|
2012-08-04 00:46:30 +08:00
|
|
|
# ... then do a final check that the value will fit into an int to avoid
|
|
|
|
# returning a long (#15067). The long type was removed in Python 3.
|
2013-09-02 18:06:32 +08:00
|
|
|
if six.PY2 and value > sys.maxint:
|
2011-01-24 16:02:40 +08:00
|
|
|
raise ValueError("Base36 input too large")
|
|
|
|
return value
|
2008-08-01 04:47:53 +08:00
|
|
|
|
|
|
|
def int_to_base36(i):
|
|
|
|
"""
|
|
|
|
Converts an integer to a base36 string
|
|
|
|
"""
|
|
|
|
digits = "0123456789abcdefghijklmnopqrstuvwxyz"
|
|
|
|
factor = 0
|
2012-08-04 00:46:30 +08:00
|
|
|
if i < 0:
|
|
|
|
raise ValueError("Negative base36 conversion input.")
|
2013-09-02 18:06:32 +08:00
|
|
|
if six.PY2:
|
2012-08-04 00:46:30 +08:00
|
|
|
if not isinstance(i, six.integer_types):
|
|
|
|
raise TypeError("Non-integer base36 conversion input.")
|
|
|
|
if i > sys.maxint:
|
|
|
|
raise ValueError("Base36 conversion input too large.")
|
2008-08-01 04:47:53 +08:00
|
|
|
# Find starting factor
|
|
|
|
while True:
|
|
|
|
factor += 1
|
|
|
|
if i < 36 ** factor:
|
|
|
|
factor -= 1
|
|
|
|
break
|
|
|
|
base36 = []
|
|
|
|
# Construct base36 representation
|
|
|
|
while factor >= 0:
|
|
|
|
j = 36 ** factor
|
2011-09-10 05:45:58 +08:00
|
|
|
base36.append(digits[i // j])
|
2008-08-01 04:47:53 +08:00
|
|
|
i = i % j
|
|
|
|
factor -= 1
|
|
|
|
return ''.join(base36)
|
2009-03-22 15:58:29 +08:00
|
|
|
|
2013-06-22 04:59:33 +08:00
|
|
|
def urlsafe_base64_encode(s):
|
|
|
|
"""
|
|
|
|
Encodes a bytestring in base64 for use in URLs, stripping any trailing
|
|
|
|
equal signs.
|
|
|
|
"""
|
|
|
|
return base64.urlsafe_b64encode(s).rstrip(b'\n=')
|
|
|
|
|
|
|
|
def urlsafe_base64_decode(s):
|
|
|
|
"""
|
|
|
|
Decodes a base64 encoded string, adding back any trailing equal signs that
|
|
|
|
might have been stripped.
|
|
|
|
"""
|
|
|
|
s = s.encode('utf-8') # base64encode should only return ASCII.
|
|
|
|
try:
|
|
|
|
return base64.urlsafe_b64decode(s.ljust(len(s) + len(s) % 4, b'='))
|
|
|
|
except (LookupError, BinasciiError) as e:
|
|
|
|
raise ValueError(e)
|
|
|
|
|
2009-03-22 15:58:29 +08:00
|
|
|
def parse_etags(etag_str):
|
|
|
|
"""
|
|
|
|
Parses a string with one or several etags passed in If-None-Match and
|
|
|
|
If-Match headers by the rules in RFC 2616. Returns a list of etags
|
|
|
|
without surrounding double quotes (") and unescaped from \<CHAR>.
|
|
|
|
"""
|
|
|
|
etags = ETAG_MATCH.findall(etag_str)
|
|
|
|
if not etags:
|
|
|
|
# etag_str has wrong format, treat it as an opaque string then
|
|
|
|
return [etag_str]
|
2012-08-15 03:17:25 +08:00
|
|
|
etags = [e.encode('ascii').decode('unicode_escape') for e in etags]
|
2009-03-22 15:58:29 +08:00
|
|
|
return etags
|
|
|
|
|
|
|
|
def quote_etag(etag):
|
|
|
|
"""
|
2013-03-29 01:16:53 +08:00
|
|
|
Wraps a string in double quotes escaping contents as necessary.
|
2009-03-22 15:58:29 +08:00
|
|
|
"""
|
|
|
|
return '"%s"' % etag.replace('\\', '\\\\').replace('"', '\\"')
|
|
|
|
|
2012-03-30 17:20:04 +08:00
|
|
|
def same_origin(url1, url2):
|
|
|
|
"""
|
|
|
|
Checks if two URLs are 'same-origin'
|
|
|
|
"""
|
2013-09-06 03:38:59 +08:00
|
|
|
p1, p2 = urlparse(url1), urlparse(url2)
|
2013-05-18 18:32:47 +08:00
|
|
|
try:
|
|
|
|
return (p1.scheme, p1.hostname, p1.port) == (p2.scheme, p2.hostname, p2.port)
|
|
|
|
except ValueError:
|
|
|
|
return False
|
2012-11-18 05:00:53 +08:00
|
|
|
|
|
|
|
def is_safe_url(url, host=None):
|
|
|
|
"""
|
|
|
|
Return ``True`` if the url is a safe redirection (i.e. it doesn't point to
|
2013-08-14 00:06:22 +08:00
|
|
|
a different host and uses a safe scheme).
|
2012-11-18 05:00:53 +08:00
|
|
|
|
|
|
|
Always returns ``False`` on an empty url.
|
|
|
|
"""
|
|
|
|
if not url:
|
|
|
|
return False
|
2013-09-06 03:38:59 +08:00
|
|
|
url_info = urlparse(url)
|
2013-09-22 20:01:57 +08:00
|
|
|
return ((not url_info.netloc or url_info.netloc == host) and
|
|
|
|
(not url_info.scheme or url_info.scheme in ['http', 'https']))
|