Replaced foo.next() by next(foo).
This new syntax for next() has been introduced in Python 2.6 and is compatible with Python 3.
This commit is contained in:
parent
1c1a229632
commit
169b1a404c
|
@ -53,7 +53,7 @@ class AdminForm(object):
|
|||
except (KeyError, IndexError):
|
||||
pass
|
||||
try:
|
||||
return iter(self.form).next()
|
||||
return next(iter(self.form))
|
||||
except StopIteration:
|
||||
return None
|
||||
|
||||
|
|
|
@ -46,9 +46,9 @@ class Point(GEOSGeometry):
|
|||
|
||||
cs = capi.create_cs(c_uint(1), c_uint(ndim))
|
||||
i = iter(coords)
|
||||
capi.cs_setx(cs, 0, i.next())
|
||||
capi.cs_sety(cs, 0, i.next())
|
||||
if ndim == 3: capi.cs_setz(cs, 0, i.next())
|
||||
capi.cs_setx(cs, 0, next(i))
|
||||
capi.cs_sety(cs, 0, next(i))
|
||||
if ndim == 3: capi.cs_setz(cs, 0, next(i))
|
||||
|
||||
return capi.create_point(cs)
|
||||
|
||||
|
|
|
@ -39,7 +39,7 @@ class ROCIFField(RegexField):
|
|||
key_iter = iter(key)
|
||||
checksum = 0
|
||||
for digit in value[1:]:
|
||||
checksum += int(digit) * int(key_iter.next())
|
||||
checksum += int(digit) * int(next(key_iter))
|
||||
checksum = checksum * 10 % 11
|
||||
if checksum == 10:
|
||||
checksum = 0
|
||||
|
@ -79,7 +79,7 @@ class ROCNPField(RegexField):
|
|||
checksum = 0
|
||||
value_iter = iter(value)
|
||||
for digit in key:
|
||||
checksum += int(digit) * int(value_iter.next())
|
||||
checksum += int(digit) * int(next(value_iter))
|
||||
checksum %= 11
|
||||
if checksum == 10:
|
||||
checksum = 1
|
||||
|
|
|
@ -69,7 +69,7 @@ class Storage(object):
|
|||
count = itertools.count(1)
|
||||
while self.exists(name):
|
||||
# file_ext includes the dot.
|
||||
name = os.path.join(dir_name, "%s_%s%s" % (file_root, count.next(), file_ext))
|
||||
name = os.path.join(dir_name, "%s_%s%s" % (file_root, next(count), file_ext))
|
||||
|
||||
return name
|
||||
|
||||
|
|
|
@ -754,7 +754,7 @@ class CursorIterator(object):
|
|||
return self
|
||||
|
||||
def next(self):
|
||||
return _rowfactory(self.iter.next(), self.cursor)
|
||||
return _rowfactory(next(self.iter), self.cursor)
|
||||
|
||||
|
||||
def _rowfactory(row, cursor):
|
||||
|
|
|
@ -128,7 +128,7 @@ class QuerySet(object):
|
|||
if self._result_cache is not None:
|
||||
return bool(self._result_cache)
|
||||
try:
|
||||
iter(self).next()
|
||||
next(iter(self))
|
||||
except StopIteration:
|
||||
return False
|
||||
return True
|
||||
|
@ -877,7 +877,7 @@ class QuerySet(object):
|
|||
if self._iter:
|
||||
try:
|
||||
for i in range(num or ITER_CHUNK_SIZE):
|
||||
self._result_cache.append(self._iter.next())
|
||||
self._result_cache.append(next(self._iter))
|
||||
except StopIteration:
|
||||
self._iter = None
|
||||
|
||||
|
@ -1147,7 +1147,7 @@ class EmptyQuerySet(QuerySet):
|
|||
def iterator(self):
|
||||
# This slightly odd construction is because we need an empty generator
|
||||
# (it raises StopIteration immediately).
|
||||
yield iter([]).next()
|
||||
yield next(iter([]))
|
||||
|
||||
def all(self):
|
||||
"""
|
||||
|
|
|
@ -1082,7 +1082,7 @@ def empty_iter():
|
|||
"""
|
||||
Returns an iterator containing no results.
|
||||
"""
|
||||
yield iter([]).next()
|
||||
yield next(iter([]))
|
||||
|
||||
|
||||
def order_modified_iter(cursor, trim, sentinel):
|
||||
|
|
|
@ -1140,10 +1140,10 @@ class Query(object):
|
|||
# join list) an outer join.
|
||||
join_it = iter(join_list)
|
||||
table_it = iter(self.tables)
|
||||
join_it.next(), table_it.next()
|
||||
next(join_it), next(table_it)
|
||||
unconditional = False
|
||||
for join in join_it:
|
||||
table = table_it.next()
|
||||
table = next(table_it)
|
||||
# Once we hit an outer join, all subsequent joins must
|
||||
# also be promoted, regardless of whether they have been
|
||||
# promoted as a result of this pass through the tables.
|
||||
|
@ -1774,7 +1774,7 @@ class Query(object):
|
|||
entry_params = []
|
||||
pos = entry.find("%s")
|
||||
while pos != -1:
|
||||
entry_params.append(param_iter.next())
|
||||
entry_params.append(next(param_iter))
|
||||
pos = entry.find("%s", pos + 2)
|
||||
select_pairs[name] = (entry, entry_params)
|
||||
# This is order preserving, since self.extra_select is a SortedDict.
|
||||
|
|
|
@ -668,7 +668,7 @@ class HttpResponse(object):
|
|||
return self
|
||||
|
||||
def next(self):
|
||||
chunk = self._iterator.next()
|
||||
chunk = next(self._iterator)
|
||||
if isinstance(chunk, unicode):
|
||||
chunk = chunk.encode(self._charset)
|
||||
return str(chunk)
|
||||
|
|
|
@ -291,7 +291,7 @@ class LazyStream(object):
|
|||
while remaining != 0:
|
||||
assert remaining > 0, 'remaining bytes to read should never go negative'
|
||||
|
||||
chunk = self.next()
|
||||
chunk = next(self)
|
||||
|
||||
emitting = chunk[:remaining]
|
||||
self.unget(chunk[remaining:])
|
||||
|
@ -313,7 +313,7 @@ class LazyStream(object):
|
|||
output = self._leftover
|
||||
self._leftover = ''
|
||||
else:
|
||||
output = self._producer.next()
|
||||
output = next(self._producer)
|
||||
self._unget_history = []
|
||||
self.position += len(output)
|
||||
return output
|
||||
|
@ -410,7 +410,7 @@ class BoundaryIter(object):
|
|||
before the boundary, throw away the boundary bytes themselves, and push the
|
||||
post-boundary bytes back on the stream.
|
||||
|
||||
The future calls to .next() after locating the boundary will raise a
|
||||
The future calls to next() after locating the boundary will raise a
|
||||
StopIteration exception.
|
||||
"""
|
||||
|
||||
|
|
|
@ -173,7 +173,7 @@ class Token(object):
|
|||
sentinal = bit[2] + ')'
|
||||
trans_bit = [bit]
|
||||
while not bit.endswith(sentinal):
|
||||
bit = bits.next()
|
||||
bit = next(bits)
|
||||
trans_bit.append(bit)
|
||||
bit = ' '.join(trans_bit)
|
||||
split.append(bit)
|
||||
|
|
|
@ -66,7 +66,7 @@ class CycleNode(Node):
|
|||
# First time the node is rendered in template
|
||||
context.render_context[self] = itertools_cycle(self.cyclevars)
|
||||
cycle_iter = context.render_context[self]
|
||||
value = cycle_iter.next().resolve(context)
|
||||
value = next(cycle_iter).resolve(context)
|
||||
if self.variable_name:
|
||||
context[self.variable_name] = value
|
||||
if self.silent:
|
||||
|
|
|
@ -165,7 +165,7 @@ class IfParser(object):
|
|||
|
||||
self.tokens = mapped_tokens
|
||||
self.pos = 0
|
||||
self.current_token = self.next()
|
||||
self.current_token = next(self)
|
||||
|
||||
def translate_token(self, token):
|
||||
try:
|
||||
|
@ -193,11 +193,11 @@ class IfParser(object):
|
|||
|
||||
def expression(self, rbp=0):
|
||||
t = self.current_token
|
||||
self.current_token = self.next()
|
||||
self.current_token = next(self)
|
||||
left = t.nud(self)
|
||||
while rbp < self.current_token.lbp:
|
||||
t = self.current_token
|
||||
self.current_token = self.next()
|
||||
self.current_token = next(self)
|
||||
left = t.led(left, self)
|
||||
return left
|
||||
|
||||
|
|
|
@ -75,7 +75,7 @@ def normalize(pattern):
|
|||
# at the next character and possibly go around without consuming another
|
||||
# one at the top of the loop.
|
||||
try:
|
||||
ch, escaped = pattern_iter.next()
|
||||
ch, escaped = next(pattern_iter)
|
||||
except StopIteration:
|
||||
return zip([u''], [[]])
|
||||
|
||||
|
@ -105,14 +105,14 @@ def normalize(pattern):
|
|||
result = result[:start] + [inner]
|
||||
elif ch == '[':
|
||||
# Replace ranges with the first character in the range.
|
||||
ch, escaped = pattern_iter.next()
|
||||
ch, escaped = next(pattern_iter)
|
||||
result.append(ch)
|
||||
ch, escaped = pattern_iter.next()
|
||||
ch, escaped = next(pattern_iter)
|
||||
while escaped or ch != ']':
|
||||
ch, escaped = pattern_iter.next()
|
||||
ch, escaped = next(pattern_iter)
|
||||
elif ch == '(':
|
||||
# Some kind of group.
|
||||
ch, escaped = pattern_iter.next()
|
||||
ch, escaped = next(pattern_iter)
|
||||
if ch != '?' or escaped:
|
||||
# A positional group
|
||||
name = "_%d" % num_args
|
||||
|
@ -120,7 +120,7 @@ def normalize(pattern):
|
|||
result.append(Group(((u"%%(%s)s" % name), name)))
|
||||
walk_to_end(ch, pattern_iter)
|
||||
else:
|
||||
ch, escaped = pattern_iter.next()
|
||||
ch, escaped = next(pattern_iter)
|
||||
if ch in "iLmsu#":
|
||||
# All of these are ignorable. Walk to the end of the
|
||||
# group.
|
||||
|
@ -133,7 +133,7 @@ def normalize(pattern):
|
|||
# we cannot reverse.
|
||||
raise ValueError("Non-reversible reg-exp portion: '(?%s'" % ch)
|
||||
else:
|
||||
ch, escaped = pattern_iter.next()
|
||||
ch, escaped = next(pattern_iter)
|
||||
if ch not in ('<', '='):
|
||||
raise ValueError("Non-reversible reg-exp portion: '(?P%s'" % ch)
|
||||
# We are in a named capturing group. Extra the name and
|
||||
|
@ -144,10 +144,10 @@ def normalize(pattern):
|
|||
else:
|
||||
terminal_char = ')'
|
||||
name = []
|
||||
ch, escaped = pattern_iter.next()
|
||||
ch, escaped = next(pattern_iter)
|
||||
while ch != terminal_char:
|
||||
name.append(ch)
|
||||
ch, escaped = pattern_iter.next()
|
||||
ch, escaped = next(pattern_iter)
|
||||
param = ''.join(name)
|
||||
# Named backreferences have already consumed the
|
||||
# parenthesis.
|
||||
|
@ -183,7 +183,7 @@ def normalize(pattern):
|
|||
result.append(ch)
|
||||
|
||||
if consume_next:
|
||||
ch, escaped = pattern_iter.next()
|
||||
ch, escaped = next(pattern_iter)
|
||||
else:
|
||||
consume_next = True
|
||||
except StopIteration:
|
||||
|
@ -208,7 +208,7 @@ def next_char(input_iter):
|
|||
if ch != '\\':
|
||||
yield ch, False
|
||||
continue
|
||||
ch = input_iter.next()
|
||||
ch = next(input_iter)
|
||||
representative = ESCAPE_MAPPINGS.get(ch, ch)
|
||||
if representative is None:
|
||||
continue
|
||||
|
@ -245,7 +245,7 @@ def get_quantifier(ch, input_iter):
|
|||
"""
|
||||
if ch in '*?+':
|
||||
try:
|
||||
ch2, escaped = input_iter.next()
|
||||
ch2, escaped = next(input_iter)
|
||||
except StopIteration:
|
||||
ch2 = None
|
||||
if ch2 == '?':
|
||||
|
@ -256,14 +256,14 @@ def get_quantifier(ch, input_iter):
|
|||
|
||||
quant = []
|
||||
while ch != '}':
|
||||
ch, escaped = input_iter.next()
|
||||
ch, escaped = next(input_iter)
|
||||
quant.append(ch)
|
||||
quant = quant[:-1]
|
||||
values = ''.join(quant).split(',')
|
||||
|
||||
# Consume the trailing '?', if necessary.
|
||||
try:
|
||||
ch, escaped = input_iter.next()
|
||||
ch, escaped = next(input_iter)
|
||||
except StopIteration:
|
||||
ch = None
|
||||
if ch == '?':
|
||||
|
|
|
@ -26,7 +26,7 @@ def wrap(text, width):
|
|||
text = force_unicode(text)
|
||||
def _generator():
|
||||
it = iter(text.split(' '))
|
||||
word = it.next()
|
||||
word = next(it)
|
||||
yield word
|
||||
pos = len(word) - word.rfind('\n') - 1
|
||||
for word in it:
|
||||
|
|
|
@ -569,7 +569,7 @@ A naive implementation of ``CycleNode`` might look something like this:
|
|||
def __init__(self, cyclevars):
|
||||
self.cycle_iter = itertools.cycle(cyclevars)
|
||||
def render(self, context):
|
||||
return self.cycle_iter.next()
|
||||
return next(self.cycle_iter)
|
||||
|
||||
But, suppose we have two templates rendering the template snippet from above at
|
||||
the same time:
|
||||
|
@ -603,7 +603,7 @@ Let's refactor our ``CycleNode`` implementation to use the ``render_context``:
|
|||
if self not in context.render_context:
|
||||
context.render_context[self] = itertools.cycle(self.cyclevars)
|
||||
cycle_iter = context.render_context[self]
|
||||
return cycle_iter.next()
|
||||
return next(cycle_iter)
|
||||
|
||||
Note that it's perfectly safe to store global information that will not change
|
||||
throughout the life of the ``Node`` as an attribute. In the case of
|
||||
|
|
|
@ -493,23 +493,23 @@ class SerializationTests(TestCase):
|
|||
|
||||
data = serializers.serialize('python', [Event(dt=dt)])
|
||||
self.assertEqual(data[0]['fields']['dt'], dt)
|
||||
obj = serializers.deserialize('python', data).next().object
|
||||
obj = next(serializers.deserialize('python', data)).object
|
||||
self.assertEqual(obj.dt, dt)
|
||||
|
||||
data = serializers.serialize('json', [Event(dt=dt)])
|
||||
self.assertIn('"fields": {"dt": "2011-09-01T13:20:30"}', data)
|
||||
obj = serializers.deserialize('json', data).next().object
|
||||
obj = next(serializers.deserialize('json', data)).object
|
||||
self.assertEqual(obj.dt, dt)
|
||||
|
||||
data = serializers.serialize('xml', [Event(dt=dt)])
|
||||
self.assertIn('<field type="DateTimeField" name="dt">2011-09-01T13:20:30</field>', data)
|
||||
obj = serializers.deserialize('xml', data).next().object
|
||||
obj = next(serializers.deserialize('xml', data)).object
|
||||
self.assertEqual(obj.dt, dt)
|
||||
|
||||
if 'yaml' in serializers.get_serializer_formats():
|
||||
data = serializers.serialize('yaml', [Event(dt=dt)])
|
||||
self.assertIn("- fields: {dt: !!timestamp '2011-09-01 13:20:30'}", data)
|
||||
obj = serializers.deserialize('yaml', data).next().object
|
||||
obj = next(serializers.deserialize('yaml', data)).object
|
||||
self.assertEqual(obj.dt, dt)
|
||||
|
||||
def test_naive_datetime_with_microsecond(self):
|
||||
|
@ -517,23 +517,23 @@ class SerializationTests(TestCase):
|
|||
|
||||
data = serializers.serialize('python', [Event(dt=dt)])
|
||||
self.assertEqual(data[0]['fields']['dt'], dt)
|
||||
obj = serializers.deserialize('python', data).next().object
|
||||
obj = next(serializers.deserialize('python', data)).object
|
||||
self.assertEqual(obj.dt, dt)
|
||||
|
||||
data = serializers.serialize('json', [Event(dt=dt)])
|
||||
self.assertIn('"fields": {"dt": "2011-09-01T13:20:30.405"}', data)
|
||||
obj = serializers.deserialize('json', data).next().object
|
||||
obj = next(serializers.deserialize('json', data)).object
|
||||
self.assertEqual(obj.dt, dt.replace(microsecond=405000))
|
||||
|
||||
data = serializers.serialize('xml', [Event(dt=dt)])
|
||||
self.assertIn('<field type="DateTimeField" name="dt">2011-09-01T13:20:30.405060</field>', data)
|
||||
obj = serializers.deserialize('xml', data).next().object
|
||||
obj = next(serializers.deserialize('xml', data)).object
|
||||
self.assertEqual(obj.dt, dt)
|
||||
|
||||
if 'yaml' in serializers.get_serializer_formats():
|
||||
data = serializers.serialize('yaml', [Event(dt=dt)])
|
||||
self.assertIn("- fields: {dt: !!timestamp '2011-09-01 13:20:30.405060'}", data)
|
||||
obj = serializers.deserialize('yaml', data).next().object
|
||||
obj = next(serializers.deserialize('yaml', data)).object
|
||||
self.assertEqual(obj.dt, dt)
|
||||
|
||||
def test_aware_datetime_with_microsecond(self):
|
||||
|
@ -541,23 +541,23 @@ class SerializationTests(TestCase):
|
|||
|
||||
data = serializers.serialize('python', [Event(dt=dt)])
|
||||
self.assertEqual(data[0]['fields']['dt'], dt)
|
||||
obj = serializers.deserialize('python', data).next().object
|
||||
obj = next(serializers.deserialize('python', data)).object
|
||||
self.assertEqual(obj.dt, dt)
|
||||
|
||||
data = serializers.serialize('json', [Event(dt=dt)])
|
||||
self.assertIn('"fields": {"dt": "2011-09-01T17:20:30.405+07:00"}', data)
|
||||
obj = serializers.deserialize('json', data).next().object
|
||||
obj = next(serializers.deserialize('json', data)).object
|
||||
self.assertEqual(obj.dt, dt.replace(microsecond=405000))
|
||||
|
||||
data = serializers.serialize('xml', [Event(dt=dt)])
|
||||
self.assertIn('<field type="DateTimeField" name="dt">2011-09-01T17:20:30.405060+07:00</field>', data)
|
||||
obj = serializers.deserialize('xml', data).next().object
|
||||
obj = next(serializers.deserialize('xml', data)).object
|
||||
self.assertEqual(obj.dt, dt)
|
||||
|
||||
if 'yaml' in serializers.get_serializer_formats():
|
||||
data = serializers.serialize('yaml', [Event(dt=dt)])
|
||||
self.assertIn("- fields: {dt: !!timestamp '2011-09-01 17:20:30.405060+07:00'}", data)
|
||||
obj = serializers.deserialize('yaml', data).next().object
|
||||
obj = next(serializers.deserialize('yaml', data)).object
|
||||
self.assertEqual(obj.dt.replace(tzinfo=UTC), dt)
|
||||
|
||||
def test_aware_datetime_in_utc(self):
|
||||
|
@ -565,23 +565,23 @@ class SerializationTests(TestCase):
|
|||
|
||||
data = serializers.serialize('python', [Event(dt=dt)])
|
||||
self.assertEqual(data[0]['fields']['dt'], dt)
|
||||
obj = serializers.deserialize('python', data).next().object
|
||||
obj = next(serializers.deserialize('python', data)).object
|
||||
self.assertEqual(obj.dt, dt)
|
||||
|
||||
data = serializers.serialize('json', [Event(dt=dt)])
|
||||
self.assertIn('"fields": {"dt": "2011-09-01T10:20:30Z"}', data)
|
||||
obj = serializers.deserialize('json', data).next().object
|
||||
obj = next(serializers.deserialize('json', data)).object
|
||||
self.assertEqual(obj.dt, dt)
|
||||
|
||||
data = serializers.serialize('xml', [Event(dt=dt)])
|
||||
self.assertIn('<field type="DateTimeField" name="dt">2011-09-01T10:20:30+00:00</field>', data)
|
||||
obj = serializers.deserialize('xml', data).next().object
|
||||
obj = next(serializers.deserialize('xml', data)).object
|
||||
self.assertEqual(obj.dt, dt)
|
||||
|
||||
if 'yaml' in serializers.get_serializer_formats():
|
||||
data = serializers.serialize('yaml', [Event(dt=dt)])
|
||||
self.assertIn("- fields: {dt: !!timestamp '2011-09-01 10:20:30+00:00'}", data)
|
||||
obj = serializers.deserialize('yaml', data).next().object
|
||||
obj = next(serializers.deserialize('yaml', data)).object
|
||||
self.assertEqual(obj.dt.replace(tzinfo=UTC), dt)
|
||||
|
||||
def test_aware_datetime_in_local_timezone(self):
|
||||
|
@ -589,23 +589,23 @@ class SerializationTests(TestCase):
|
|||
|
||||
data = serializers.serialize('python', [Event(dt=dt)])
|
||||
self.assertEqual(data[0]['fields']['dt'], dt)
|
||||
obj = serializers.deserialize('python', data).next().object
|
||||
obj = next(serializers.deserialize('python', data)).object
|
||||
self.assertEqual(obj.dt, dt)
|
||||
|
||||
data = serializers.serialize('json', [Event(dt=dt)])
|
||||
self.assertIn('"fields": {"dt": "2011-09-01T13:20:30+03:00"}', data)
|
||||
obj = serializers.deserialize('json', data).next().object
|
||||
obj = next(serializers.deserialize('json', data)).object
|
||||
self.assertEqual(obj.dt, dt)
|
||||
|
||||
data = serializers.serialize('xml', [Event(dt=dt)])
|
||||
self.assertIn('<field type="DateTimeField" name="dt">2011-09-01T13:20:30+03:00</field>', data)
|
||||
obj = serializers.deserialize('xml', data).next().object
|
||||
obj = next(serializers.deserialize('xml', data)).object
|
||||
self.assertEqual(obj.dt, dt)
|
||||
|
||||
if 'yaml' in serializers.get_serializer_formats():
|
||||
data = serializers.serialize('yaml', [Event(dt=dt)])
|
||||
self.assertIn("- fields: {dt: !!timestamp '2011-09-01 13:20:30+03:00'}", data)
|
||||
obj = serializers.deserialize('yaml', data).next().object
|
||||
obj = next(serializers.deserialize('yaml', data)).object
|
||||
self.assertEqual(obj.dt.replace(tzinfo=UTC), dt)
|
||||
|
||||
def test_aware_datetime_in_other_timezone(self):
|
||||
|
@ -613,23 +613,23 @@ class SerializationTests(TestCase):
|
|||
|
||||
data = serializers.serialize('python', [Event(dt=dt)])
|
||||
self.assertEqual(data[0]['fields']['dt'], dt)
|
||||
obj = serializers.deserialize('python', data).next().object
|
||||
obj = next(serializers.deserialize('python', data)).object
|
||||
self.assertEqual(obj.dt, dt)
|
||||
|
||||
data = serializers.serialize('json', [Event(dt=dt)])
|
||||
self.assertIn('"fields": {"dt": "2011-09-01T17:20:30+07:00"}', data)
|
||||
obj = serializers.deserialize('json', data).next().object
|
||||
obj = next(serializers.deserialize('json', data)).object
|
||||
self.assertEqual(obj.dt, dt)
|
||||
|
||||
data = serializers.serialize('xml', [Event(dt=dt)])
|
||||
self.assertIn('<field type="DateTimeField" name="dt">2011-09-01T17:20:30+07:00</field>', data)
|
||||
obj = serializers.deserialize('xml', data).next().object
|
||||
obj = next(serializers.deserialize('xml', data)).object
|
||||
self.assertEqual(obj.dt, dt)
|
||||
|
||||
if 'yaml' in serializers.get_serializer_formats():
|
||||
data = serializers.serialize('yaml', [Event(dt=dt)])
|
||||
self.assertIn("- fields: {dt: !!timestamp '2011-09-01 17:20:30+07:00'}", data)
|
||||
obj = serializers.deserialize('yaml', data).next().object
|
||||
obj = next(serializers.deserialize('yaml', data)).object
|
||||
self.assertEqual(obj.dt.replace(tzinfo=UTC), dt)
|
||||
|
||||
|
||||
|
|
|
@ -281,7 +281,7 @@ class HttpResponseTests(unittest.TestCase):
|
|||
my_iter = r.__iter__()
|
||||
while True:
|
||||
try:
|
||||
result.append(my_iter.next())
|
||||
result.append(next(my_iter))
|
||||
except StopIteration:
|
||||
break
|
||||
#'\xde\x9e' == unichr(1950).encode('utf-8')
|
||||
|
|
|
@ -1398,12 +1398,12 @@ class Queries6Tests(TestCase):
|
|||
# Test that parallel iterators work.
|
||||
qs = Tag.objects.all()
|
||||
i1, i2 = iter(qs), iter(qs)
|
||||
self.assertEqual(repr(i1.next()), '<Tag: t1>')
|
||||
self.assertEqual(repr(i1.next()), '<Tag: t2>')
|
||||
self.assertEqual(repr(i2.next()), '<Tag: t1>')
|
||||
self.assertEqual(repr(i2.next()), '<Tag: t2>')
|
||||
self.assertEqual(repr(i2.next()), '<Tag: t3>')
|
||||
self.assertEqual(repr(i1.next()), '<Tag: t3>')
|
||||
self.assertEqual(repr(next(i1)), '<Tag: t1>')
|
||||
self.assertEqual(repr(next(i1)), '<Tag: t2>')
|
||||
self.assertEqual(repr(next(i2)), '<Tag: t1>')
|
||||
self.assertEqual(repr(next(i2)), '<Tag: t2>')
|
||||
self.assertEqual(repr(next(i2)), '<Tag: t3>')
|
||||
self.assertEqual(repr(next(i1)), '<Tag: t3>')
|
||||
|
||||
qs = X.objects.all()
|
||||
self.assertEqual(bool(qs), False)
|
||||
|
|
|
@ -489,7 +489,7 @@ def fieldsTest(format, self):
|
|||
|
||||
# Serialize then deserialize the test database
|
||||
serialized_data = serializers.serialize(format, [obj], indent=2, fields=('field1','field3'))
|
||||
result = serializers.deserialize(format, serialized_data).next()
|
||||
result = next(serializers.deserialize(format, serialized_data))
|
||||
|
||||
# Check that the deserialized object contains data in only the serialized fields.
|
||||
self.assertEqual(result.object.field1, 'first')
|
||||
|
|
Loading…
Reference in New Issue