Fixed #7635: do a better job checking for infinite loops in multi-part MIME parsing. Thanks, Mike Axiak.

git-svn-id: http://code.djangoproject.com/svn/django/trunk@7905 bcc190cf-cafb-0310-a4f2-bffc1f526a37
This commit is contained in:
Jacob Kaplan-Moss 2008-07-12 20:43:15 +00:00
parent 8e852a92ec
commit c121ff4046
1 changed files with 23 additions and 32 deletions

View File

@ -270,24 +270,9 @@ class LazyStream(object):
self._empty = False self._empty = False
self._leftover = '' self._leftover = ''
self.length = length self.length = length
self._position = 0 self.position = 0
self._remaining = length self._remaining = length
self._unget_history = []
# These fields are to do sanity checking to make sure we don't
# have infinite loops getting/ungetting from the stream. The
# purpose overall is to raise an exception if we perform lots
# of stream get/unget gymnastics without getting
# anywhere. Naturally this is not sound, but most probably
# would indicate a bug if the exception is raised.
# largest position tell us how far this lazystream has ever
# been advanced
self._largest_position = 0
# "modifications since" will start at zero and increment every
# time the position is modified but a new largest position is
# not achieved.
self._modifications_since = 0
def tell(self): def tell(self):
return self.position return self.position
@ -329,6 +314,7 @@ class LazyStream(object):
self._leftover = '' self._leftover = ''
else: else:
output = self._producer.next() output = self._producer.next()
self._unget_history = []
self.position += len(output) self.position += len(output)
return output return output
@ -351,25 +337,30 @@ class LazyStream(object):
Future calls to read() will return those bytes first. The Future calls to read() will return those bytes first. The
stream position and thus tell() will be rewound. stream position and thus tell() will be rewound.
""" """
if not bytes:
return
self._update_unget_history(len(bytes))
self.position -= len(bytes) self.position -= len(bytes)
self._leftover = ''.join([bytes, self._leftover]) self._leftover = ''.join([bytes, self._leftover])
def _set_position(self, value): def _update_unget_history(self, num_bytes):
if value > self._largest_position: """
self._modifications_since = 0 Updates the unget history as a sanity check to see if we've pushed
self._largest_position = value back the same number of bytes in one chunk. If we keep ungetting the
else: same number of bytes many times (here, 50), we're mostly likely in an
self._modifications_since += 1 infinite loop of some sort. This is usually caused by a
if self._modifications_since > 500: maliciously-malformed MIME request.
raise SuspiciousOperation( """
"The multipart parser got stuck, which shouldn't happen with" self._unget_history = [num_bytes] + self._unget_history[:49]
" normal uploaded files. Check for malicious upload activity;" number_equal = len([current_number for current_number in self._unget_history
" if there is none, report this to the Django developers." if current_number == num_bytes])
)
self._position = value if number_equal > 40:
raise SuspiciousOperation(
position = property(lambda self: self._position, _set_position) "The multipart parser got stuck, which shouldn't happen with"
" normal uploaded files. Check for malicious upload activity;"
" if there is none, report this to the Django developers."
)
class ChunkIter(object): class ChunkIter(object):
""" """