Replaced foo.next() by next(foo).

This new syntax for next() has been introduced in Python 2.6 and is
compatible with Python 3.
This commit is contained in:
Claude Paroz 2012-05-10 20:14:04 +02:00
parent 1c1a229632
commit 169b1a404c
20 changed files with 73 additions and 73 deletions

View File

@ -53,7 +53,7 @@ class AdminForm(object):
except (KeyError, IndexError): except (KeyError, IndexError):
pass pass
try: try:
return iter(self.form).next() return next(iter(self.form))
except StopIteration: except StopIteration:
return None return None

View File

@ -46,9 +46,9 @@ class Point(GEOSGeometry):
cs = capi.create_cs(c_uint(1), c_uint(ndim)) cs = capi.create_cs(c_uint(1), c_uint(ndim))
i = iter(coords) i = iter(coords)
capi.cs_setx(cs, 0, i.next()) capi.cs_setx(cs, 0, next(i))
capi.cs_sety(cs, 0, i.next()) capi.cs_sety(cs, 0, next(i))
if ndim == 3: capi.cs_setz(cs, 0, i.next()) if ndim == 3: capi.cs_setz(cs, 0, next(i))
return capi.create_point(cs) return capi.create_point(cs)

View File

@ -39,7 +39,7 @@ class ROCIFField(RegexField):
key_iter = iter(key) key_iter = iter(key)
checksum = 0 checksum = 0
for digit in value[1:]: for digit in value[1:]:
checksum += int(digit) * int(key_iter.next()) checksum += int(digit) * int(next(key_iter))
checksum = checksum * 10 % 11 checksum = checksum * 10 % 11
if checksum == 10: if checksum == 10:
checksum = 0 checksum = 0
@ -79,7 +79,7 @@ class ROCNPField(RegexField):
checksum = 0 checksum = 0
value_iter = iter(value) value_iter = iter(value)
for digit in key: for digit in key:
checksum += int(digit) * int(value_iter.next()) checksum += int(digit) * int(next(value_iter))
checksum %= 11 checksum %= 11
if checksum == 10: if checksum == 10:
checksum = 1 checksum = 1

View File

@ -69,7 +69,7 @@ class Storage(object):
count = itertools.count(1) count = itertools.count(1)
while self.exists(name): while self.exists(name):
# file_ext includes the dot. # file_ext includes the dot.
name = os.path.join(dir_name, "%s_%s%s" % (file_root, count.next(), file_ext)) name = os.path.join(dir_name, "%s_%s%s" % (file_root, next(count), file_ext))
return name return name

View File

@ -754,7 +754,7 @@ class CursorIterator(object):
return self return self
def next(self): def next(self):
return _rowfactory(self.iter.next(), self.cursor) return _rowfactory(next(self.iter), self.cursor)
def _rowfactory(row, cursor): def _rowfactory(row, cursor):

View File

@ -128,7 +128,7 @@ class QuerySet(object):
if self._result_cache is not None: if self._result_cache is not None:
return bool(self._result_cache) return bool(self._result_cache)
try: try:
iter(self).next() next(iter(self))
except StopIteration: except StopIteration:
return False return False
return True return True
@ -877,7 +877,7 @@ class QuerySet(object):
if self._iter: if self._iter:
try: try:
for i in range(num or ITER_CHUNK_SIZE): for i in range(num or ITER_CHUNK_SIZE):
self._result_cache.append(self._iter.next()) self._result_cache.append(next(self._iter))
except StopIteration: except StopIteration:
self._iter = None self._iter = None
@ -1147,7 +1147,7 @@ class EmptyQuerySet(QuerySet):
def iterator(self): def iterator(self):
# This slightly odd construction is because we need an empty generator # This slightly odd construction is because we need an empty generator
# (it raises StopIteration immediately). # (it raises StopIteration immediately).
yield iter([]).next() yield next(iter([]))
def all(self): def all(self):
""" """

View File

@ -1082,7 +1082,7 @@ def empty_iter():
""" """
Returns an iterator containing no results. Returns an iterator containing no results.
""" """
yield iter([]).next() yield next(iter([]))
def order_modified_iter(cursor, trim, sentinel): def order_modified_iter(cursor, trim, sentinel):

View File

@ -1140,10 +1140,10 @@ class Query(object):
# join list) an outer join. # join list) an outer join.
join_it = iter(join_list) join_it = iter(join_list)
table_it = iter(self.tables) table_it = iter(self.tables)
join_it.next(), table_it.next() next(join_it), next(table_it)
unconditional = False unconditional = False
for join in join_it: for join in join_it:
table = table_it.next() table = next(table_it)
# Once we hit an outer join, all subsequent joins must # Once we hit an outer join, all subsequent joins must
# also be promoted, regardless of whether they have been # also be promoted, regardless of whether they have been
# promoted as a result of this pass through the tables. # promoted as a result of this pass through the tables.
@ -1774,7 +1774,7 @@ class Query(object):
entry_params = [] entry_params = []
pos = entry.find("%s") pos = entry.find("%s")
while pos != -1: while pos != -1:
entry_params.append(param_iter.next()) entry_params.append(next(param_iter))
pos = entry.find("%s", pos + 2) pos = entry.find("%s", pos + 2)
select_pairs[name] = (entry, entry_params) select_pairs[name] = (entry, entry_params)
# This is order preserving, since self.extra_select is a SortedDict. # This is order preserving, since self.extra_select is a SortedDict.

View File

@ -668,7 +668,7 @@ class HttpResponse(object):
return self return self
def next(self): def next(self):
chunk = self._iterator.next() chunk = next(self._iterator)
if isinstance(chunk, unicode): if isinstance(chunk, unicode):
chunk = chunk.encode(self._charset) chunk = chunk.encode(self._charset)
return str(chunk) return str(chunk)

View File

@ -291,7 +291,7 @@ class LazyStream(object):
while remaining != 0: while remaining != 0:
assert remaining > 0, 'remaining bytes to read should never go negative' assert remaining > 0, 'remaining bytes to read should never go negative'
chunk = self.next() chunk = next(self)
emitting = chunk[:remaining] emitting = chunk[:remaining]
self.unget(chunk[remaining:]) self.unget(chunk[remaining:])
@ -313,7 +313,7 @@ class LazyStream(object):
output = self._leftover output = self._leftover
self._leftover = '' self._leftover = ''
else: else:
output = self._producer.next() output = next(self._producer)
self._unget_history = [] self._unget_history = []
self.position += len(output) self.position += len(output)
return output return output
@ -410,7 +410,7 @@ class BoundaryIter(object):
before the boundary, throw away the boundary bytes themselves, and push the before the boundary, throw away the boundary bytes themselves, and push the
post-boundary bytes back on the stream. post-boundary bytes back on the stream.
The future calls to .next() after locating the boundary will raise a The future calls to next() after locating the boundary will raise a
StopIteration exception. StopIteration exception.
""" """

View File

@ -173,7 +173,7 @@ class Token(object):
sentinal = bit[2] + ')' sentinal = bit[2] + ')'
trans_bit = [bit] trans_bit = [bit]
while not bit.endswith(sentinal): while not bit.endswith(sentinal):
bit = bits.next() bit = next(bits)
trans_bit.append(bit) trans_bit.append(bit)
bit = ' '.join(trans_bit) bit = ' '.join(trans_bit)
split.append(bit) split.append(bit)

View File

@ -66,7 +66,7 @@ class CycleNode(Node):
# First time the node is rendered in template # First time the node is rendered in template
context.render_context[self] = itertools_cycle(self.cyclevars) context.render_context[self] = itertools_cycle(self.cyclevars)
cycle_iter = context.render_context[self] cycle_iter = context.render_context[self]
value = cycle_iter.next().resolve(context) value = next(cycle_iter).resolve(context)
if self.variable_name: if self.variable_name:
context[self.variable_name] = value context[self.variable_name] = value
if self.silent: if self.silent:

View File

@ -165,7 +165,7 @@ class IfParser(object):
self.tokens = mapped_tokens self.tokens = mapped_tokens
self.pos = 0 self.pos = 0
self.current_token = self.next() self.current_token = next(self)
def translate_token(self, token): def translate_token(self, token):
try: try:
@ -193,11 +193,11 @@ class IfParser(object):
def expression(self, rbp=0): def expression(self, rbp=0):
t = self.current_token t = self.current_token
self.current_token = self.next() self.current_token = next(self)
left = t.nud(self) left = t.nud(self)
while rbp < self.current_token.lbp: while rbp < self.current_token.lbp:
t = self.current_token t = self.current_token
self.current_token = self.next() self.current_token = next(self)
left = t.led(left, self) left = t.led(left, self)
return left return left

View File

@ -75,7 +75,7 @@ def normalize(pattern):
# at the next character and possibly go around without consuming another # at the next character and possibly go around without consuming another
# one at the top of the loop. # one at the top of the loop.
try: try:
ch, escaped = pattern_iter.next() ch, escaped = next(pattern_iter)
except StopIteration: except StopIteration:
return zip([u''], [[]]) return zip([u''], [[]])
@ -105,14 +105,14 @@ def normalize(pattern):
result = result[:start] + [inner] result = result[:start] + [inner]
elif ch == '[': elif ch == '[':
# Replace ranges with the first character in the range. # Replace ranges with the first character in the range.
ch, escaped = pattern_iter.next() ch, escaped = next(pattern_iter)
result.append(ch) result.append(ch)
ch, escaped = pattern_iter.next() ch, escaped = next(pattern_iter)
while escaped or ch != ']': while escaped or ch != ']':
ch, escaped = pattern_iter.next() ch, escaped = next(pattern_iter)
elif ch == '(': elif ch == '(':
# Some kind of group. # Some kind of group.
ch, escaped = pattern_iter.next() ch, escaped = next(pattern_iter)
if ch != '?' or escaped: if ch != '?' or escaped:
# A positional group # A positional group
name = "_%d" % num_args name = "_%d" % num_args
@ -120,7 +120,7 @@ def normalize(pattern):
result.append(Group(((u"%%(%s)s" % name), name))) result.append(Group(((u"%%(%s)s" % name), name)))
walk_to_end(ch, pattern_iter) walk_to_end(ch, pattern_iter)
else: else:
ch, escaped = pattern_iter.next() ch, escaped = next(pattern_iter)
if ch in "iLmsu#": if ch in "iLmsu#":
# All of these are ignorable. Walk to the end of the # All of these are ignorable. Walk to the end of the
# group. # group.
@ -133,7 +133,7 @@ def normalize(pattern):
# we cannot reverse. # we cannot reverse.
raise ValueError("Non-reversible reg-exp portion: '(?%s'" % ch) raise ValueError("Non-reversible reg-exp portion: '(?%s'" % ch)
else: else:
ch, escaped = pattern_iter.next() ch, escaped = next(pattern_iter)
if ch not in ('<', '='): if ch not in ('<', '='):
raise ValueError("Non-reversible reg-exp portion: '(?P%s'" % ch) raise ValueError("Non-reversible reg-exp portion: '(?P%s'" % ch)
# We are in a named capturing group. Extra the name and # We are in a named capturing group. Extra the name and
@ -144,10 +144,10 @@ def normalize(pattern):
else: else:
terminal_char = ')' terminal_char = ')'
name = [] name = []
ch, escaped = pattern_iter.next() ch, escaped = next(pattern_iter)
while ch != terminal_char: while ch != terminal_char:
name.append(ch) name.append(ch)
ch, escaped = pattern_iter.next() ch, escaped = next(pattern_iter)
param = ''.join(name) param = ''.join(name)
# Named backreferences have already consumed the # Named backreferences have already consumed the
# parenthesis. # parenthesis.
@ -183,7 +183,7 @@ def normalize(pattern):
result.append(ch) result.append(ch)
if consume_next: if consume_next:
ch, escaped = pattern_iter.next() ch, escaped = next(pattern_iter)
else: else:
consume_next = True consume_next = True
except StopIteration: except StopIteration:
@ -208,7 +208,7 @@ def next_char(input_iter):
if ch != '\\': if ch != '\\':
yield ch, False yield ch, False
continue continue
ch = input_iter.next() ch = next(input_iter)
representative = ESCAPE_MAPPINGS.get(ch, ch) representative = ESCAPE_MAPPINGS.get(ch, ch)
if representative is None: if representative is None:
continue continue
@ -245,7 +245,7 @@ def get_quantifier(ch, input_iter):
""" """
if ch in '*?+': if ch in '*?+':
try: try:
ch2, escaped = input_iter.next() ch2, escaped = next(input_iter)
except StopIteration: except StopIteration:
ch2 = None ch2 = None
if ch2 == '?': if ch2 == '?':
@ -256,14 +256,14 @@ def get_quantifier(ch, input_iter):
quant = [] quant = []
while ch != '}': while ch != '}':
ch, escaped = input_iter.next() ch, escaped = next(input_iter)
quant.append(ch) quant.append(ch)
quant = quant[:-1] quant = quant[:-1]
values = ''.join(quant).split(',') values = ''.join(quant).split(',')
# Consume the trailing '?', if necessary. # Consume the trailing '?', if necessary.
try: try:
ch, escaped = input_iter.next() ch, escaped = next(input_iter)
except StopIteration: except StopIteration:
ch = None ch = None
if ch == '?': if ch == '?':

View File

@ -26,7 +26,7 @@ def wrap(text, width):
text = force_unicode(text) text = force_unicode(text)
def _generator(): def _generator():
it = iter(text.split(' ')) it = iter(text.split(' '))
word = it.next() word = next(it)
yield word yield word
pos = len(word) - word.rfind('\n') - 1 pos = len(word) - word.rfind('\n') - 1
for word in it: for word in it:

View File

@ -569,7 +569,7 @@ A naive implementation of ``CycleNode`` might look something like this:
def __init__(self, cyclevars): def __init__(self, cyclevars):
self.cycle_iter = itertools.cycle(cyclevars) self.cycle_iter = itertools.cycle(cyclevars)
def render(self, context): def render(self, context):
return self.cycle_iter.next() return next(self.cycle_iter)
But, suppose we have two templates rendering the template snippet from above at But, suppose we have two templates rendering the template snippet from above at
the same time: the same time:
@ -603,7 +603,7 @@ Let's refactor our ``CycleNode`` implementation to use the ``render_context``:
if self not in context.render_context: if self not in context.render_context:
context.render_context[self] = itertools.cycle(self.cyclevars) context.render_context[self] = itertools.cycle(self.cyclevars)
cycle_iter = context.render_context[self] cycle_iter = context.render_context[self]
return cycle_iter.next() return next(cycle_iter)
Note that it's perfectly safe to store global information that will not change Note that it's perfectly safe to store global information that will not change
throughout the life of the ``Node`` as an attribute. In the case of throughout the life of the ``Node`` as an attribute. In the case of

View File

@ -493,23 +493,23 @@ class SerializationTests(TestCase):
data = serializers.serialize('python', [Event(dt=dt)]) data = serializers.serialize('python', [Event(dt=dt)])
self.assertEqual(data[0]['fields']['dt'], dt) self.assertEqual(data[0]['fields']['dt'], dt)
obj = serializers.deserialize('python', data).next().object obj = next(serializers.deserialize('python', data)).object
self.assertEqual(obj.dt, dt) self.assertEqual(obj.dt, dt)
data = serializers.serialize('json', [Event(dt=dt)]) data = serializers.serialize('json', [Event(dt=dt)])
self.assertIn('"fields": {"dt": "2011-09-01T13:20:30"}', data) self.assertIn('"fields": {"dt": "2011-09-01T13:20:30"}', data)
obj = serializers.deserialize('json', data).next().object obj = next(serializers.deserialize('json', data)).object
self.assertEqual(obj.dt, dt) self.assertEqual(obj.dt, dt)
data = serializers.serialize('xml', [Event(dt=dt)]) data = serializers.serialize('xml', [Event(dt=dt)])
self.assertIn('<field type="DateTimeField" name="dt">2011-09-01T13:20:30</field>', data) self.assertIn('<field type="DateTimeField" name="dt">2011-09-01T13:20:30</field>', data)
obj = serializers.deserialize('xml', data).next().object obj = next(serializers.deserialize('xml', data)).object
self.assertEqual(obj.dt, dt) self.assertEqual(obj.dt, dt)
if 'yaml' in serializers.get_serializer_formats(): if 'yaml' in serializers.get_serializer_formats():
data = serializers.serialize('yaml', [Event(dt=dt)]) data = serializers.serialize('yaml', [Event(dt=dt)])
self.assertIn("- fields: {dt: !!timestamp '2011-09-01 13:20:30'}", data) self.assertIn("- fields: {dt: !!timestamp '2011-09-01 13:20:30'}", data)
obj = serializers.deserialize('yaml', data).next().object obj = next(serializers.deserialize('yaml', data)).object
self.assertEqual(obj.dt, dt) self.assertEqual(obj.dt, dt)
def test_naive_datetime_with_microsecond(self): def test_naive_datetime_with_microsecond(self):
@ -517,23 +517,23 @@ class SerializationTests(TestCase):
data = serializers.serialize('python', [Event(dt=dt)]) data = serializers.serialize('python', [Event(dt=dt)])
self.assertEqual(data[0]['fields']['dt'], dt) self.assertEqual(data[0]['fields']['dt'], dt)
obj = serializers.deserialize('python', data).next().object obj = next(serializers.deserialize('python', data)).object
self.assertEqual(obj.dt, dt) self.assertEqual(obj.dt, dt)
data = serializers.serialize('json', [Event(dt=dt)]) data = serializers.serialize('json', [Event(dt=dt)])
self.assertIn('"fields": {"dt": "2011-09-01T13:20:30.405"}', data) self.assertIn('"fields": {"dt": "2011-09-01T13:20:30.405"}', data)
obj = serializers.deserialize('json', data).next().object obj = next(serializers.deserialize('json', data)).object
self.assertEqual(obj.dt, dt.replace(microsecond=405000)) self.assertEqual(obj.dt, dt.replace(microsecond=405000))
data = serializers.serialize('xml', [Event(dt=dt)]) data = serializers.serialize('xml', [Event(dt=dt)])
self.assertIn('<field type="DateTimeField" name="dt">2011-09-01T13:20:30.405060</field>', data) self.assertIn('<field type="DateTimeField" name="dt">2011-09-01T13:20:30.405060</field>', data)
obj = serializers.deserialize('xml', data).next().object obj = next(serializers.deserialize('xml', data)).object
self.assertEqual(obj.dt, dt) self.assertEqual(obj.dt, dt)
if 'yaml' in serializers.get_serializer_formats(): if 'yaml' in serializers.get_serializer_formats():
data = serializers.serialize('yaml', [Event(dt=dt)]) data = serializers.serialize('yaml', [Event(dt=dt)])
self.assertIn("- fields: {dt: !!timestamp '2011-09-01 13:20:30.405060'}", data) self.assertIn("- fields: {dt: !!timestamp '2011-09-01 13:20:30.405060'}", data)
obj = serializers.deserialize('yaml', data).next().object obj = next(serializers.deserialize('yaml', data)).object
self.assertEqual(obj.dt, dt) self.assertEqual(obj.dt, dt)
def test_aware_datetime_with_microsecond(self): def test_aware_datetime_with_microsecond(self):
@ -541,23 +541,23 @@ class SerializationTests(TestCase):
data = serializers.serialize('python', [Event(dt=dt)]) data = serializers.serialize('python', [Event(dt=dt)])
self.assertEqual(data[0]['fields']['dt'], dt) self.assertEqual(data[0]['fields']['dt'], dt)
obj = serializers.deserialize('python', data).next().object obj = next(serializers.deserialize('python', data)).object
self.assertEqual(obj.dt, dt) self.assertEqual(obj.dt, dt)
data = serializers.serialize('json', [Event(dt=dt)]) data = serializers.serialize('json', [Event(dt=dt)])
self.assertIn('"fields": {"dt": "2011-09-01T17:20:30.405+07:00"}', data) self.assertIn('"fields": {"dt": "2011-09-01T17:20:30.405+07:00"}', data)
obj = serializers.deserialize('json', data).next().object obj = next(serializers.deserialize('json', data)).object
self.assertEqual(obj.dt, dt.replace(microsecond=405000)) self.assertEqual(obj.dt, dt.replace(microsecond=405000))
data = serializers.serialize('xml', [Event(dt=dt)]) data = serializers.serialize('xml', [Event(dt=dt)])
self.assertIn('<field type="DateTimeField" name="dt">2011-09-01T17:20:30.405060+07:00</field>', data) self.assertIn('<field type="DateTimeField" name="dt">2011-09-01T17:20:30.405060+07:00</field>', data)
obj = serializers.deserialize('xml', data).next().object obj = next(serializers.deserialize('xml', data)).object
self.assertEqual(obj.dt, dt) self.assertEqual(obj.dt, dt)
if 'yaml' in serializers.get_serializer_formats(): if 'yaml' in serializers.get_serializer_formats():
data = serializers.serialize('yaml', [Event(dt=dt)]) data = serializers.serialize('yaml', [Event(dt=dt)])
self.assertIn("- fields: {dt: !!timestamp '2011-09-01 17:20:30.405060+07:00'}", data) self.assertIn("- fields: {dt: !!timestamp '2011-09-01 17:20:30.405060+07:00'}", data)
obj = serializers.deserialize('yaml', data).next().object obj = next(serializers.deserialize('yaml', data)).object
self.assertEqual(obj.dt.replace(tzinfo=UTC), dt) self.assertEqual(obj.dt.replace(tzinfo=UTC), dt)
def test_aware_datetime_in_utc(self): def test_aware_datetime_in_utc(self):
@ -565,23 +565,23 @@ class SerializationTests(TestCase):
data = serializers.serialize('python', [Event(dt=dt)]) data = serializers.serialize('python', [Event(dt=dt)])
self.assertEqual(data[0]['fields']['dt'], dt) self.assertEqual(data[0]['fields']['dt'], dt)
obj = serializers.deserialize('python', data).next().object obj = next(serializers.deserialize('python', data)).object
self.assertEqual(obj.dt, dt) self.assertEqual(obj.dt, dt)
data = serializers.serialize('json', [Event(dt=dt)]) data = serializers.serialize('json', [Event(dt=dt)])
self.assertIn('"fields": {"dt": "2011-09-01T10:20:30Z"}', data) self.assertIn('"fields": {"dt": "2011-09-01T10:20:30Z"}', data)
obj = serializers.deserialize('json', data).next().object obj = next(serializers.deserialize('json', data)).object
self.assertEqual(obj.dt, dt) self.assertEqual(obj.dt, dt)
data = serializers.serialize('xml', [Event(dt=dt)]) data = serializers.serialize('xml', [Event(dt=dt)])
self.assertIn('<field type="DateTimeField" name="dt">2011-09-01T10:20:30+00:00</field>', data) self.assertIn('<field type="DateTimeField" name="dt">2011-09-01T10:20:30+00:00</field>', data)
obj = serializers.deserialize('xml', data).next().object obj = next(serializers.deserialize('xml', data)).object
self.assertEqual(obj.dt, dt) self.assertEqual(obj.dt, dt)
if 'yaml' in serializers.get_serializer_formats(): if 'yaml' in serializers.get_serializer_formats():
data = serializers.serialize('yaml', [Event(dt=dt)]) data = serializers.serialize('yaml', [Event(dt=dt)])
self.assertIn("- fields: {dt: !!timestamp '2011-09-01 10:20:30+00:00'}", data) self.assertIn("- fields: {dt: !!timestamp '2011-09-01 10:20:30+00:00'}", data)
obj = serializers.deserialize('yaml', data).next().object obj = next(serializers.deserialize('yaml', data)).object
self.assertEqual(obj.dt.replace(tzinfo=UTC), dt) self.assertEqual(obj.dt.replace(tzinfo=UTC), dt)
def test_aware_datetime_in_local_timezone(self): def test_aware_datetime_in_local_timezone(self):
@ -589,23 +589,23 @@ class SerializationTests(TestCase):
data = serializers.serialize('python', [Event(dt=dt)]) data = serializers.serialize('python', [Event(dt=dt)])
self.assertEqual(data[0]['fields']['dt'], dt) self.assertEqual(data[0]['fields']['dt'], dt)
obj = serializers.deserialize('python', data).next().object obj = next(serializers.deserialize('python', data)).object
self.assertEqual(obj.dt, dt) self.assertEqual(obj.dt, dt)
data = serializers.serialize('json', [Event(dt=dt)]) data = serializers.serialize('json', [Event(dt=dt)])
self.assertIn('"fields": {"dt": "2011-09-01T13:20:30+03:00"}', data) self.assertIn('"fields": {"dt": "2011-09-01T13:20:30+03:00"}', data)
obj = serializers.deserialize('json', data).next().object obj = next(serializers.deserialize('json', data)).object
self.assertEqual(obj.dt, dt) self.assertEqual(obj.dt, dt)
data = serializers.serialize('xml', [Event(dt=dt)]) data = serializers.serialize('xml', [Event(dt=dt)])
self.assertIn('<field type="DateTimeField" name="dt">2011-09-01T13:20:30+03:00</field>', data) self.assertIn('<field type="DateTimeField" name="dt">2011-09-01T13:20:30+03:00</field>', data)
obj = serializers.deserialize('xml', data).next().object obj = next(serializers.deserialize('xml', data)).object
self.assertEqual(obj.dt, dt) self.assertEqual(obj.dt, dt)
if 'yaml' in serializers.get_serializer_formats(): if 'yaml' in serializers.get_serializer_formats():
data = serializers.serialize('yaml', [Event(dt=dt)]) data = serializers.serialize('yaml', [Event(dt=dt)])
self.assertIn("- fields: {dt: !!timestamp '2011-09-01 13:20:30+03:00'}", data) self.assertIn("- fields: {dt: !!timestamp '2011-09-01 13:20:30+03:00'}", data)
obj = serializers.deserialize('yaml', data).next().object obj = next(serializers.deserialize('yaml', data)).object
self.assertEqual(obj.dt.replace(tzinfo=UTC), dt) self.assertEqual(obj.dt.replace(tzinfo=UTC), dt)
def test_aware_datetime_in_other_timezone(self): def test_aware_datetime_in_other_timezone(self):
@ -613,23 +613,23 @@ class SerializationTests(TestCase):
data = serializers.serialize('python', [Event(dt=dt)]) data = serializers.serialize('python', [Event(dt=dt)])
self.assertEqual(data[0]['fields']['dt'], dt) self.assertEqual(data[0]['fields']['dt'], dt)
obj = serializers.deserialize('python', data).next().object obj = next(serializers.deserialize('python', data)).object
self.assertEqual(obj.dt, dt) self.assertEqual(obj.dt, dt)
data = serializers.serialize('json', [Event(dt=dt)]) data = serializers.serialize('json', [Event(dt=dt)])
self.assertIn('"fields": {"dt": "2011-09-01T17:20:30+07:00"}', data) self.assertIn('"fields": {"dt": "2011-09-01T17:20:30+07:00"}', data)
obj = serializers.deserialize('json', data).next().object obj = next(serializers.deserialize('json', data)).object
self.assertEqual(obj.dt, dt) self.assertEqual(obj.dt, dt)
data = serializers.serialize('xml', [Event(dt=dt)]) data = serializers.serialize('xml', [Event(dt=dt)])
self.assertIn('<field type="DateTimeField" name="dt">2011-09-01T17:20:30+07:00</field>', data) self.assertIn('<field type="DateTimeField" name="dt">2011-09-01T17:20:30+07:00</field>', data)
obj = serializers.deserialize('xml', data).next().object obj = next(serializers.deserialize('xml', data)).object
self.assertEqual(obj.dt, dt) self.assertEqual(obj.dt, dt)
if 'yaml' in serializers.get_serializer_formats(): if 'yaml' in serializers.get_serializer_formats():
data = serializers.serialize('yaml', [Event(dt=dt)]) data = serializers.serialize('yaml', [Event(dt=dt)])
self.assertIn("- fields: {dt: !!timestamp '2011-09-01 17:20:30+07:00'}", data) self.assertIn("- fields: {dt: !!timestamp '2011-09-01 17:20:30+07:00'}", data)
obj = serializers.deserialize('yaml', data).next().object obj = next(serializers.deserialize('yaml', data)).object
self.assertEqual(obj.dt.replace(tzinfo=UTC), dt) self.assertEqual(obj.dt.replace(tzinfo=UTC), dt)

View File

@ -281,7 +281,7 @@ class HttpResponseTests(unittest.TestCase):
my_iter = r.__iter__() my_iter = r.__iter__()
while True: while True:
try: try:
result.append(my_iter.next()) result.append(next(my_iter))
except StopIteration: except StopIteration:
break break
#'\xde\x9e' == unichr(1950).encode('utf-8') #'\xde\x9e' == unichr(1950).encode('utf-8')

View File

@ -1398,12 +1398,12 @@ class Queries6Tests(TestCase):
# Test that parallel iterators work. # Test that parallel iterators work.
qs = Tag.objects.all() qs = Tag.objects.all()
i1, i2 = iter(qs), iter(qs) i1, i2 = iter(qs), iter(qs)
self.assertEqual(repr(i1.next()), '<Tag: t1>') self.assertEqual(repr(next(i1)), '<Tag: t1>')
self.assertEqual(repr(i1.next()), '<Tag: t2>') self.assertEqual(repr(next(i1)), '<Tag: t2>')
self.assertEqual(repr(i2.next()), '<Tag: t1>') self.assertEqual(repr(next(i2)), '<Tag: t1>')
self.assertEqual(repr(i2.next()), '<Tag: t2>') self.assertEqual(repr(next(i2)), '<Tag: t2>')
self.assertEqual(repr(i2.next()), '<Tag: t3>') self.assertEqual(repr(next(i2)), '<Tag: t3>')
self.assertEqual(repr(i1.next()), '<Tag: t3>') self.assertEqual(repr(next(i1)), '<Tag: t3>')
qs = X.objects.all() qs = X.objects.all()
self.assertEqual(bool(qs), False) self.assertEqual(bool(qs), False)

View File

@ -489,7 +489,7 @@ def fieldsTest(format, self):
# Serialize then deserialize the test database # Serialize then deserialize the test database
serialized_data = serializers.serialize(format, [obj], indent=2, fields=('field1','field3')) serialized_data = serializers.serialize(format, [obj], indent=2, fields=('field1','field3'))
result = serializers.deserialize(format, serialized_data).next() result = next(serializers.deserialize(format, serialized_data))
# Check that the deserialized object contains data in only the serialized fields. # Check that the deserialized object contains data in only the serialized fields.
self.assertEqual(result.object.field1, 'first') self.assertEqual(result.object.field1, 'first')