2005-10-09 08:55:08 +08:00
|
|
|
import re
|
2007-11-19 11:41:46 +08:00
|
|
|
|
2005-10-09 08:55:08 +08:00
|
|
|
from django.utils.text import compress_string
|
|
|
|
from django.utils.cache import patch_vary_headers
|
|
|
|
|
|
|
|
re_accepts_gzip = re.compile(r'\bgzip\b')
|
|
|
|
|
2006-06-08 13:00:13 +08:00
|
|
|
class GZipMiddleware(object):
|
2005-10-09 08:55:08 +08:00
|
|
|
"""
|
|
|
|
This middleware compresses content if the browser allows gzip compression.
|
|
|
|
It sets the Vary header accordingly, so that caches will base their storage
|
|
|
|
on the Accept-Encoding header.
|
|
|
|
"""
|
|
|
|
def process_response(self, request, response):
|
2007-11-19 11:41:46 +08:00
|
|
|
# It's not worth compressing non-OK or really short responses.
|
2007-08-12 20:29:25 +08:00
|
|
|
if response.status_code != 200 or len(response.content) < 200:
|
|
|
|
return response
|
|
|
|
|
2005-10-09 08:55:08 +08:00
|
|
|
patch_vary_headers(response, ('Accept-Encoding',))
|
2007-10-20 11:45:48 +08:00
|
|
|
|
2007-11-19 11:41:46 +08:00
|
|
|
# Avoid gzipping if we've already got a content-encoding.
|
|
|
|
if response.has_header('Content-Encoding'):
|
|
|
|
return response
|
|
|
|
|
2009-04-12 12:14:23 +08:00
|
|
|
# MSIE have issues with gzipped respones of various content types.
|
2008-02-04 09:40:53 +08:00
|
|
|
if "msie" in request.META.get('HTTP_USER_AGENT', '').lower():
|
|
|
|
ctype = response.get('Content-Type', '').lower()
|
2009-04-12 12:14:23 +08:00
|
|
|
if not ctype.startswith("text/") or "javascript" in ctype:
|
2008-02-04 09:40:53 +08:00
|
|
|
return response
|
2005-10-09 08:55:08 +08:00
|
|
|
|
|
|
|
ae = request.META.get('HTTP_ACCEPT_ENCODING', '')
|
|
|
|
if not re_accepts_gzip.search(ae):
|
|
|
|
return response
|
|
|
|
|
|
|
|
response.content = compress_string(response.content)
|
|
|
|
response['Content-Encoding'] = 'gzip'
|
2006-11-23 04:31:09 +08:00
|
|
|
response['Content-Length'] = str(len(response.content))
|
2005-10-09 08:55:08 +08:00
|
|
|
return response
|