mirror of https://github.com/django/django.git
Fixed all E226 violations
This commit is contained in:
parent
7288e1b02b
commit
c347f78cc1
|
@ -34,12 +34,12 @@ def paginator_number(cl, i):
|
|||
if i == DOT:
|
||||
return '... '
|
||||
elif i == cl.page_num:
|
||||
return format_html('<span class="this-page">{0}</span> ', i+1)
|
||||
return format_html('<span class="this-page">{0}</span> ', i + 1)
|
||||
else:
|
||||
return format_html('<a href="{0}"{1}>{2}</a> ',
|
||||
cl.get_query_string({PAGE_VAR: i}),
|
||||
mark_safe(' class="end"' if i == cl.paginator.num_pages-1 else ''),
|
||||
i+1)
|
||||
mark_safe(' class="end"' if i == cl.paginator.num_pages - 1 else ''),
|
||||
i + 1)
|
||||
|
||||
|
||||
@register.inclusion_tag('admin/pagination.html')
|
||||
|
|
|
@ -412,7 +412,7 @@ def reverse_field_path(model, path):
|
|||
for piece in pieces:
|
||||
field, model, direct, m2m = parent._meta.get_field_by_name(piece)
|
||||
# skip trailing data field if extant:
|
||||
if len(reversed_path) == len(pieces)-1: # final iteration
|
||||
if len(reversed_path) == len(pieces) - 1: # final iteration
|
||||
try:
|
||||
get_model_from_relation(field)
|
||||
except NotRelationField:
|
||||
|
|
|
@ -236,7 +236,7 @@ class ChangeList(six.with_metaclass(RenameChangeListMethods)):
|
|||
result_list = self.queryset._clone()
|
||||
else:
|
||||
try:
|
||||
result_list = paginator.page(self.page_num+1).object_list
|
||||
result_list = paginator.page(self.page_num + 1).object_list
|
||||
except InvalidPage:
|
||||
raise IncorrectLookupParameters
|
||||
|
||||
|
|
|
@ -145,7 +145,7 @@ class ListMixin(object):
|
|||
del self[:]
|
||||
else:
|
||||
cache = list(self)
|
||||
for i in range(n-1):
|
||||
for i in range(n - 1):
|
||||
self.extend(cache)
|
||||
return self
|
||||
|
||||
|
|
|
@ -134,7 +134,7 @@ class Polygon(GEOSGeometry):
|
|||
return capi.get_extring(self.ptr)
|
||||
else:
|
||||
# Getting the interior ring, have to subtract 1 from the index.
|
||||
return capi.get_intring(self.ptr, index-1)
|
||||
return capi.get_intring(self.ptr, index - 1)
|
||||
|
||||
def _get_single_external(self, index):
|
||||
return GEOSGeometry(capi.geom_clone(self._get_single_internal(index)), srid=self.srid)
|
||||
|
@ -170,6 +170,6 @@ class Polygon(GEOSGeometry):
|
|||
@property
|
||||
def kml(self):
|
||||
"Returns the KML representation of this Polygon."
|
||||
inner_kml = ''.join("<innerBoundaryIs>%s</innerBoundaryIs>" % self[i+1].kml
|
||||
inner_kml = ''.join("<innerBoundaryIs>%s</innerBoundaryIs>" % self[i + 1].kml
|
||||
for i in xrange(self.num_interior_rings))
|
||||
return "<Polygon><outerBoundaryIs>%s</outerBoundaryIs>%s</Polygon>" % (self[0].kml, inner_kml)
|
||||
|
|
|
@ -1081,11 +1081,11 @@ class GEOSTest(unittest.TestCase, TestDataMixin):
|
|||
|
||||
self.assertEqual(ls.project(Point(0, 20)), 10.0)
|
||||
self.assertEqual(ls.project(Point(7, 6)), 24)
|
||||
self.assertEqual(ls.project_normalized(Point(0, 20)), 1.0/3)
|
||||
self.assertEqual(ls.project_normalized(Point(0, 20)), 1.0 / 3)
|
||||
|
||||
self.assertEqual(ls.interpolate(10), Point(0, 10))
|
||||
self.assertEqual(ls.interpolate(24), Point(10, 6))
|
||||
self.assertEqual(ls.interpolate_normalized(1.0/3), Point(0, 10))
|
||||
self.assertEqual(ls.interpolate_normalized(1.0 / 3), Point(0, 10))
|
||||
|
||||
self.assertEqual(mls.project(Point(0, 20)), 10)
|
||||
self.assertEqual(mls.project(Point(7, 6)), 16)
|
||||
|
|
|
@ -349,8 +349,8 @@ class ListMixinTest(unittest.TestCase):
|
|||
ul.sort()
|
||||
self.assertEqual(pl[:], ul[:], 'sort')
|
||||
mid = pl[len(pl) // 2]
|
||||
pl.sort(key=lambda x: (mid-x)**2)
|
||||
ul.sort(key=lambda x: (mid-x)**2)
|
||||
pl.sort(key=lambda x: (mid - x) ** 2)
|
||||
ul.sort(key=lambda x: (mid - x) ** 2)
|
||||
self.assertEqual(pl[:], ul[:], 'sort w/ key')
|
||||
|
||||
pl.insert(0, pl.pop())
|
||||
|
@ -359,8 +359,8 @@ class ListMixinTest(unittest.TestCase):
|
|||
ul.sort(reverse=True)
|
||||
self.assertEqual(pl[:], ul[:], 'sort w/ reverse')
|
||||
mid = pl[len(pl) // 2]
|
||||
pl.sort(key=lambda x: (mid-x)**2)
|
||||
ul.sort(key=lambda x: (mid-x)**2)
|
||||
pl.sort(key=lambda x: (mid - x) ** 2)
|
||||
ul.sort(key=lambda x: (mid - x) ** 2)
|
||||
self.assertEqual(pl[:], ul[:], 'sort w/ key')
|
||||
|
||||
def test_12_arithmetic(self):
|
||||
|
|
|
@ -79,7 +79,7 @@ class GoogleZoom(object):
|
|||
fac = min(max(sin(DTOR * lat), -0.9999), 0.9999)
|
||||
|
||||
# Calculating the pixel y coordinate.
|
||||
px_y = round(npix + (0.5 * log((1 + fac)/(1 - fac)) * (-1.0 * self._radpp[zoom])))
|
||||
px_y = round(npix + (0.5 * log((1 + fac) / (1 - fac)) * (-1.0 * self._radpp[zoom])))
|
||||
|
||||
# Returning the pixel x, y to the caller of the function.
|
||||
return (px_x, px_y)
|
||||
|
@ -116,8 +116,8 @@ class GoogleZoom(object):
|
|||
|
||||
# Getting the lower-left and upper-right lat/lon coordinates
|
||||
# for the bounding box of the tile.
|
||||
ll = self.pixel_to_lonlat((px[0]-delta, px[1]-delta), zoom)
|
||||
ur = self.pixel_to_lonlat((px[0]+delta, px[1]+delta), zoom)
|
||||
ll = self.pixel_to_lonlat((px[0] - delta, px[1] - delta), zoom)
|
||||
ur = self.pixel_to_lonlat((px[0] + delta, px[1] + delta), zoom)
|
||||
|
||||
# Constructing the Polygon, representing the tile and returning.
|
||||
return Polygon(LinearRing(ll, (ll[0], ur[1]), ur, (ur[0], ll[1]), ll), srid=4326)
|
||||
|
@ -143,10 +143,10 @@ class GoogleZoom(object):
|
|||
if (env_w > tile_w) or (env_h > tile_h):
|
||||
if z == 0:
|
||||
raise GoogleMapException('Geometry width and height should not exceed that of the Earth.')
|
||||
return z-1
|
||||
return z - 1
|
||||
|
||||
# Otherwise, we've zoomed in to the max.
|
||||
return self._nzoom-1
|
||||
return self._nzoom - 1
|
||||
|
||||
def get_width_height(self, extent):
|
||||
"""
|
||||
|
|
|
@ -32,7 +32,7 @@ def index(request, sitemaps):
|
|||
sites.append('%s://%s%s' % (protocol, current_site.domain, sitemap_url))
|
||||
|
||||
if pages > 1:
|
||||
for page in range(2, pages+1):
|
||||
for page in range(2, pages + 1):
|
||||
sites.append('%s://%s%s?p=%s' % (protocol, current_site.domain, sitemap_url, page))
|
||||
xml = loader.render_to_string('sitemap_index.xml', {'sitemaps': sites})
|
||||
return HttpResponse(xml, content_type='application/xml')
|
||||
|
|
|
@ -94,7 +94,7 @@ class DistanceTest(unittest.TestCase):
|
|||
|
||||
a5 = d1 * D(m=10)
|
||||
self.assertTrue(isinstance(a5, Area))
|
||||
self.assertEqual(a5.sq_m, 100*10)
|
||||
self.assertEqual(a5.sq_m, 100 * 10)
|
||||
|
||||
with self.assertRaises(TypeError):
|
||||
d1 *= D(m=1)
|
||||
|
|
|
@ -600,7 +600,7 @@ class LayerMapping(object):
|
|||
for i, end in enumerate(indices):
|
||||
# Constructing the slice to use for this step; the last slice is
|
||||
# special (e.g, [100:] instead of [90:100]).
|
||||
if i+1 == n_i:
|
||||
if i + 1 == n_i:
|
||||
step_slice = slice(beg, None)
|
||||
else:
|
||||
step_slice = slice(beg, end)
|
||||
|
|
|
@ -148,7 +148,7 @@ def apnumber(value):
|
|||
return value
|
||||
if not 0 < value < 10:
|
||||
return value
|
||||
return (_('one'), _('two'), _('three'), _('four'), _('five'), _('six'), _('seven'), _('eight'), _('nine'))[value-1]
|
||||
return (_('one'), _('two'), _('three'), _('four'), _('five'), _('six'), _('seven'), _('eight'), _('nine'))[value - 1]
|
||||
|
||||
|
||||
# Perform the comparison in the default time zone when USE_TZ = True
|
||||
|
|
|
@ -233,9 +233,9 @@ class BaseCache(object):
|
|||
if value is None:
|
||||
raise ValueError("Key '%s' not found" % key)
|
||||
|
||||
self.set(key, value, version=version+delta)
|
||||
self.set(key, value, version=version + delta)
|
||||
self.delete(key, version=version)
|
||||
return version+delta
|
||||
return version + delta
|
||||
|
||||
def decr_version(self, key, delta=1, version=None):
|
||||
"""Substracts delta from the cache version for the supplied key. Returns
|
||||
|
|
|
@ -11,7 +11,7 @@ from django.utils.encoding import force_bytes, python_2_unicode_compatible
|
|||
|
||||
@python_2_unicode_compatible
|
||||
class File(FileProxyMixin):
|
||||
DEFAULT_CHUNK_SIZE = 64 * 2**10
|
||||
DEFAULT_CHUNK_SIZE = 64 * 2 ** 10
|
||||
|
||||
def __init__(self, file, name=None):
|
||||
self.file = file
|
||||
|
|
|
@ -65,7 +65,7 @@ def get_image_dimensions(file_or_path, close=False):
|
|||
raise
|
||||
if p.image:
|
||||
return p.image.size
|
||||
chunk_size = chunk_size*2
|
||||
chunk_size *= 2
|
||||
return None
|
||||
finally:
|
||||
if close:
|
||||
|
|
|
@ -38,7 +38,7 @@ def _samefile(src, dst):
|
|||
os.path.normcase(os.path.abspath(dst)))
|
||||
|
||||
|
||||
def file_move_safe(old_file_name, new_file_name, chunk_size=1024*64, allow_overwrite=False):
|
||||
def file_move_safe(old_file_name, new_file_name, chunk_size=1024 * 64, allow_overwrite=False):
|
||||
"""
|
||||
Moves a file from one location to another in the safest way possible.
|
||||
|
||||
|
|
|
@ -23,7 +23,7 @@ class UploadedFile(File):
|
|||
An ``UploadedFile`` object behaves somewhat like a file object and
|
||||
represents some file data that the user submitted with a form.
|
||||
"""
|
||||
DEFAULT_CHUNK_SIZE = 64 * 2**10
|
||||
DEFAULT_CHUNK_SIZE = 64 * 2 ** 10
|
||||
|
||||
def __init__(self, file=None, name=None, content_type=None, size=None, charset=None, content_type_extra=None):
|
||||
super(UploadedFile, self).__init__(file, name)
|
||||
|
|
|
@ -317,7 +317,7 @@ class ManagementUtility(object):
|
|||
cword = int(os.environ['COMP_CWORD'])
|
||||
|
||||
try:
|
||||
curr = cwords[cword-1]
|
||||
curr = cwords[cword - 1]
|
||||
except IndexError:
|
||||
curr = ''
|
||||
|
||||
|
@ -350,7 +350,7 @@ class ManagementUtility(object):
|
|||
options += [(s_opt.get_opt_string(), s_opt.nargs) for s_opt in
|
||||
subcommand_cls.option_list]
|
||||
# filter out previously specified options from available options
|
||||
prev_opts = [x.split('=')[0] for x in cwords[1:cword-1]]
|
||||
prev_opts = [x.split('=')[0] for x in cwords[1:cword - 1]]
|
||||
options = [opt for opt in options if opt[0] not in prev_opts]
|
||||
|
||||
# filter options by current input
|
||||
|
|
|
@ -69,7 +69,7 @@ class Command(BaseCommand):
|
|||
table_output.append(" ".join(field_output))
|
||||
full_statement = ["CREATE TABLE %s (" % qn(tablename)]
|
||||
for i, line in enumerate(table_output):
|
||||
full_statement.append(' %s%s' % (line, ',' if i < len(table_output)-1 else ''))
|
||||
full_statement.append(' %s%s' % (line, ',' if i < len(table_output) - 1 else ''))
|
||||
full_statement.append(');')
|
||||
with transaction.commit_on_success_unless_managed():
|
||||
curs = connection.cursor()
|
||||
|
|
|
@ -147,7 +147,7 @@ def get_mod_func(callback):
|
|||
dot = callback.rindex('.')
|
||||
except ValueError:
|
||||
return callback, ''
|
||||
return callback[:dot], callback[dot+1:]
|
||||
return callback[:dot], callback[dot + 1:]
|
||||
|
||||
|
||||
class LocaleRegexProvider(object):
|
||||
|
|
|
@ -33,7 +33,7 @@ class SeparateDatabaseAndState(Operation):
|
|||
base_state = to_state
|
||||
for pos, database_operation in enumerate(reversed(self.database_operations)):
|
||||
to_state = base_state.clone()
|
||||
for dbop in self.database_operations[:-(pos+1)]:
|
||||
for dbop in self.database_operations[:-(pos + 1)]:
|
||||
dbop.state_forwards(app_label, to_state)
|
||||
from_state = base_state.clone()
|
||||
database_operation.state_forwards(app_label, from_state)
|
||||
|
|
|
@ -48,13 +48,13 @@ class MigrationOptimizer(object):
|
|||
new_operations = []
|
||||
for i, operation in enumerate(operations):
|
||||
# Compare it to each operation after it
|
||||
for j, other in enumerate(operations[i+1:]):
|
||||
for j, other in enumerate(operations[i + 1:]):
|
||||
result = self.reduce(operation, other)
|
||||
if result is not None:
|
||||
# Optimize! Add result, then remaining others, then return
|
||||
new_operations.extend(result)
|
||||
new_operations.extend(operations[i+1:i+1+j])
|
||||
new_operations.extend(operations[i+j+2:])
|
||||
new_operations.extend(operations[i + 1:i + 1 + j])
|
||||
new_operations.extend(operations[i + j + 2:])
|
||||
return new_operations
|
||||
if not self.can_optimize_through(operation, other, app_label):
|
||||
new_operations.append(operation)
|
||||
|
|
|
@ -1359,7 +1359,7 @@ def get_klass_info(klass, max_depth=0, cur_depth=0, requested=None,
|
|||
next = requested[f.name]
|
||||
else:
|
||||
next = None
|
||||
klass_info = get_klass_info(f.rel.to, max_depth=max_depth, cur_depth=cur_depth+1,
|
||||
klass_info = get_klass_info(f.rel.to, max_depth=max_depth, cur_depth=cur_depth + 1,
|
||||
requested=next, only_load=only_load)
|
||||
related_fields.append((f, klass_info))
|
||||
|
||||
|
|
|
@ -62,7 +62,7 @@ class SelectDateWidget(Widget):
|
|||
self.years = years
|
||||
else:
|
||||
this_year = datetime.date.today().year
|
||||
self.years = range(this_year, this_year+10)
|
||||
self.years = range(this_year, this_year + 10)
|
||||
|
||||
# Optional dict of months to use in the "month" select box.
|
||||
if months:
|
||||
|
|
|
@ -363,7 +363,7 @@ class BaseFormSet(object):
|
|||
if self.can_order:
|
||||
# Only pre-fill the ordering field for initial forms.
|
||||
if index is not None and index < self.initial_form_count():
|
||||
form.fields[ORDERING_FIELD_NAME] = IntegerField(label=_('Order'), initial=index+1, required=False)
|
||||
form.fields[ORDERING_FIELD_NAME] = IntegerField(label=_('Order'), initial=index + 1, required=False)
|
||||
else:
|
||||
form.fields[ORDERING_FIELD_NAME] = IntegerField(label=_('Order'), required=False)
|
||||
if self.can_delete:
|
||||
|
|
|
@ -572,7 +572,7 @@ class BaseModelFormSet(BaseFormSet):
|
|||
if i >= self.initial_form_count() and self.initial_extra:
|
||||
# Set initial values for extra forms
|
||||
try:
|
||||
kwargs['initial'] = self.initial_extra[i-self.initial_form_count()]
|
||||
kwargs['initial'] = self.initial_extra[i - self.initial_form_count()]
|
||||
except IndexError:
|
||||
pass
|
||||
return super(BaseModelFormSet, self)._construct_form(i, **kwargs)
|
||||
|
|
|
@ -94,7 +94,7 @@ class MultiPartParser(object):
|
|||
# For compatibility with low-level network APIs (with 32-bit integers),
|
||||
# the chunk size should be < 2^31, but still divisible by 4.
|
||||
possible_sizes = [x.chunk_size for x in upload_handlers if x.chunk_size]
|
||||
self._chunk_size = min([2**31-4] + possible_sizes)
|
||||
self._chunk_size = min([2 ** 31 - 4] + possible_sizes)
|
||||
|
||||
self._meta = META
|
||||
self._encoding = encoding or settings.DEFAULT_CHARSET
|
||||
|
@ -267,7 +267,7 @@ class MultiPartParser(object):
|
|||
|
||||
def IE_sanitize(self, filename):
|
||||
"""Cleanup filename from Internet Explorer full paths."""
|
||||
return filename and filename[filename.rfind("\\")+1:].strip()
|
||||
return filename and filename[filename.rfind("\\") + 1:].strip()
|
||||
|
||||
|
||||
class LazyStream(six.Iterator):
|
||||
|
@ -512,11 +512,11 @@ class BoundaryIter(six.Iterator):
|
|||
end = index
|
||||
next = index + len(self._boundary)
|
||||
# backup over CRLF
|
||||
last = max(0, end-1)
|
||||
if data[last:last+1] == b'\n':
|
||||
last = max(0, end - 1)
|
||||
if data[last:last + 1] == b'\n':
|
||||
end -= 1
|
||||
last = max(0, end-1)
|
||||
if data[last:last+1] == b'\r':
|
||||
last = max(0, end - 1)
|
||||
if data[last:last + 1] == b'\r':
|
||||
end -= 1
|
||||
return end, next
|
||||
|
||||
|
@ -623,7 +623,7 @@ def parse_header(line):
|
|||
i = p.find(b'=')
|
||||
if i >= 0:
|
||||
name = p[:i].strip().lower().decode('ascii')
|
||||
value = p[i+1:].strip()
|
||||
value = p[i + 1:].strip()
|
||||
if len(value) >= 2 and value[:1] == value[-1:] == b'"':
|
||||
value = value[1:-1]
|
||||
value = value.replace(b'\\\\', b'\\').replace(b'\\"', b'"')
|
||||
|
|
|
@ -680,14 +680,14 @@ def unordered_list(value, autoescape=None):
|
|||
sublist_item = title
|
||||
title = ''
|
||||
elif i < list_length - 1:
|
||||
next_item = list_[i+1]
|
||||
next_item = list_[i + 1]
|
||||
if next_item and isinstance(next_item, (list, tuple)):
|
||||
# The next item is a sub-list.
|
||||
sublist_item = next_item
|
||||
# We've processed the next item now too.
|
||||
i += 1
|
||||
if sublist_item:
|
||||
sublist = _helper(sublist_item, tabs+1)
|
||||
sublist = _helper(sublist_item, tabs + 1)
|
||||
sublist = '\n%s<ul>\n%s\n%s</ul>\n%s' % (indent, sublist,
|
||||
indent, indent)
|
||||
output.append('%s<li>%s%s</li>' % (indent,
|
||||
|
|
|
@ -171,7 +171,7 @@ class ForNode(Node):
|
|||
for i, item in enumerate(values):
|
||||
# Shortcuts for current loop iteration number.
|
||||
loop_dict['counter0'] = i
|
||||
loop_dict['counter'] = i+1
|
||||
loop_dict['counter'] = i + 1
|
||||
# Reverse counter iteration numbers.
|
||||
loop_dict['revcounter'] = len_values - i
|
||||
loop_dict['revcounter0'] = len_values - i - 1
|
||||
|
@ -832,7 +832,7 @@ def do_for(parser, token):
|
|||
raise TemplateSyntaxError("'for' tag received an invalid argument:"
|
||||
" %s" % token.contents)
|
||||
|
||||
sequence = parser.compile_filter(bits[in_index+1])
|
||||
sequence = parser.compile_filter(bits[in_index + 1])
|
||||
nodelist_loop = parser.parse(('empty', 'endfor',))
|
||||
token = parser.next_token()
|
||||
if token.contents == 'empty':
|
||||
|
|
|
@ -158,7 +158,7 @@ class IfParser(object):
|
|||
i = 0
|
||||
while i < l:
|
||||
token = tokens[i]
|
||||
if token == "not" and i + 1 < l and tokens[i+1] == "in":
|
||||
if token == "not" and i + 1 < l and tokens[i + 1] == "in":
|
||||
token = "not in"
|
||||
i += 1 # skip 'in'
|
||||
mapped_tokens.append(self.translate_token(token))
|
||||
|
|
|
@ -203,10 +203,10 @@ def reorder_suite(suite, classes):
|
|||
"""
|
||||
class_count = len(classes)
|
||||
suite_class = type(suite)
|
||||
bins = [suite_class() for i in range(class_count+1)]
|
||||
bins = [suite_class() for i in range(class_count + 1)]
|
||||
partition_suite(suite, classes, bins)
|
||||
for i in range(class_count):
|
||||
bins[0].addTests(bins[i+1])
|
||||
bins[0].addTests(bins[i + 1])
|
||||
return bins[0]
|
||||
|
||||
|
||||
|
|
|
@ -303,7 +303,7 @@ class DateFormat(TimeFormat):
|
|||
weekday = self.data.weekday() + 1
|
||||
day_of_year = self.z()
|
||||
if day_of_year <= (8 - jan1_weekday) and jan1_weekday > 4:
|
||||
if jan1_weekday == 5 or (jan1_weekday == 6 and calendar.isleap(self.data.year-1)):
|
||||
if jan1_weekday == 5 or (jan1_weekday == 6 and calendar.isleap(self.data.year - 1)):
|
||||
week_number = 53
|
||||
else:
|
||||
week_number = 52
|
||||
|
|
|
@ -83,8 +83,8 @@ def strftime(dt, fmt):
|
|||
s1 = time.strftime(fmt, (year,) + timetuple[1:])
|
||||
sites1 = _findall(s1, str(year))
|
||||
|
||||
s2 = time.strftime(fmt, (year+28,) + timetuple[1:])
|
||||
sites2 = _findall(s2, str(year+28))
|
||||
s2 = time.strftime(fmt, (year + 28,) + timetuple[1:])
|
||||
sites2 = _findall(s2, str(year + 28))
|
||||
|
||||
sites = []
|
||||
for site in sites1:
|
||||
|
@ -94,5 +94,5 @@ def strftime(dt, fmt):
|
|||
s = s1
|
||||
syear = "%04d" % (dt.year,)
|
||||
for site in sites:
|
||||
s = s[:site] + syear + s[site+4:]
|
||||
s = s[:site] + syear + s[site + 4:]
|
||||
return s
|
||||
|
|
|
@ -25,7 +25,7 @@ def literals(choices, prefix="", suffix=""):
|
|||
individually.
|
||||
|
||||
"""
|
||||
return "|".join(prefix+re.escape(c)+suffix for c in choices.split())
|
||||
return "|".join(prefix + re.escape(c) + suffix for c in choices.split())
|
||||
|
||||
|
||||
class Lexer(object):
|
||||
|
|
|
@ -59,7 +59,7 @@ gettext_noop = gettext_lazy = _ = gettext
|
|||
def to_locale(language):
|
||||
p = language.find('-')
|
||||
if p >= 0:
|
||||
return language[:p].lower()+'_'+language[p+1:].upper()
|
||||
return language[:p].lower() + '_' + language[p + 1:].upper()
|
||||
else:
|
||||
return language.lower()
|
||||
|
||||
|
|
|
@ -55,12 +55,12 @@ def to_locale(language, to_lower=False):
|
|||
p = language.find('-')
|
||||
if p >= 0:
|
||||
if to_lower:
|
||||
return language[:p].lower()+'_'+language[p+1:].lower()
|
||||
return language[:p].lower() + '_' + language[p + 1:].lower()
|
||||
else:
|
||||
# Get correct locale for sr-latn
|
||||
if len(language[p+1:]) > 2:
|
||||
return language[:p].lower()+'_'+language[p+1].upper()+language[p+2:].lower()
|
||||
return language[:p].lower()+'_'+language[p+1:].upper()
|
||||
if len(language[p + 1:]) > 2:
|
||||
return language[:p].lower() + '_' + language[p + 1].upper() + language[p + 2:].lower()
|
||||
return language[:p].lower() + '_' + language[p + 1:].upper()
|
||||
else:
|
||||
return language.lower()
|
||||
|
||||
|
@ -69,7 +69,7 @@ def to_language(locale):
|
|||
"""Turns a locale name (en_US) into a language name (en-us)."""
|
||||
p = locale.find('_')
|
||||
if p >= 0:
|
||||
return locale[:p].lower()+'-'+locale[p+1:].lower()
|
||||
return locale[:p].lower() + '-' + locale[p + 1:].lower()
|
||||
else:
|
||||
return locale.lower()
|
||||
|
||||
|
|
|
@ -26,8 +26,8 @@ def linebreak_iter(template_source):
|
|||
yield 0
|
||||
p = template_source.find('\n')
|
||||
while p >= 0:
|
||||
yield p+1
|
||||
p = template_source.find('\n', p+1)
|
||||
yield p + 1
|
||||
p = template_source.find('\n', p + 1)
|
||||
yield len(template_source) + 1
|
||||
|
||||
|
||||
|
@ -302,7 +302,7 @@ class ExceptionReporter(object):
|
|||
end = getattr(self.exc_value, 'end', None)
|
||||
if start is not None and end is not None:
|
||||
unicode_str = self.exc_value.args[1]
|
||||
unicode_hint = smart_text(unicode_str[max(start-5, 0):min(end+5, len(unicode_str))], 'ascii', errors='replace')
|
||||
unicode_hint = smart_text(unicode_str[max(start - 5, 0):min(end + 5, len(unicode_str))], 'ascii', errors='replace')
|
||||
from django import get_version
|
||||
c = {
|
||||
'is_email': self.is_email,
|
||||
|
@ -419,7 +419,7 @@ class ExceptionReporter(object):
|
|||
|
||||
pre_context = source[lower_bound:lineno]
|
||||
context_line = source[lineno]
|
||||
post_context = source[lineno+1:upper_bound]
|
||||
post_context = source[lineno + 1:upper_bound]
|
||||
|
||||
return lower_bound, pre_context, context_line, post_context
|
||||
|
||||
|
|
|
@ -68,8 +68,8 @@ def fixliterals(fname):
|
|||
new.append(m.group(0))
|
||||
continue
|
||||
|
||||
sys.stdout.write("\n"+"-"*80+"\n")
|
||||
sys.stdout.write(data[prev_start+1:m.start()])
|
||||
sys.stdout.write("\n" + "-" * 80 + "\n")
|
||||
sys.stdout.write(data[prev_start + 1:m.start()])
|
||||
sys.stdout.write(colorize(m.group(0), fg="red"))
|
||||
sys.stdout.write(data[m.end():next_end])
|
||||
sys.stdout.write("\n\n")
|
||||
|
|
|
@ -161,7 +161,7 @@ def make_template_info(filename, root_dirs):
|
|||
relative_filenames from the supplied filename and root template directories
|
||||
"""
|
||||
return Template(filename,
|
||||
[filename[len(d)+1:] for d in root_dirs if filename.startswith(d)])
|
||||
[filename[len(d) + 1:] for d in root_dirs if filename.startswith(d)])
|
||||
|
||||
|
||||
class Template(object):
|
||||
|
|
|
@ -4,7 +4,7 @@ install-script = scripts/rpm-install.sh
|
|||
|
||||
[flake8]
|
||||
exclude=./django/utils/dictconfig.py,./django/contrib/comments/*,./django/utils/unittest.py,./tests/comment_tests/*,./django/test/_doctest.py,./django/utils/six.py,./django/conf/app_template/*
|
||||
ignore=E124,E125,E127,E128,E226,E251,E501,W601
|
||||
ignore=E124,E125,E127,E128,E251,E501,W601
|
||||
|
||||
[metadata]
|
||||
license-file = LICENSE
|
||||
|
|
|
@ -61,7 +61,7 @@ class DecadeListFilterWithNoneReturningLookups(DecadeListFilterWithTitleAndParam
|
|||
class DecadeListFilterWithFailingQueryset(DecadeListFilterWithTitleAndParameter):
|
||||
|
||||
def queryset(self, request, queryset):
|
||||
raise 1/0
|
||||
raise 1 / 0
|
||||
|
||||
|
||||
class DecadeListFilterWithQuerysetBasedLookups(DecadeListFilterWithTitleAndParameter):
|
||||
|
|
|
@ -346,7 +346,7 @@ class AggregationTests(TestCase):
|
|||
def test_aggregate_fexpr(self):
|
||||
# Aggregates can be used with F() expressions
|
||||
# ... where the F() is pushed into the HAVING clause
|
||||
qs = Publisher.objects.annotate(num_books=Count('book')).filter(num_books__lt=F('num_awards')/2).order_by('name').values('name', 'num_books', 'num_awards')
|
||||
qs = Publisher.objects.annotate(num_books=Count('book')).filter(num_books__lt=F('num_awards') / 2).order_by('name').values('name', 'num_books', 'num_awards')
|
||||
self.assertQuerysetEqual(
|
||||
qs, [
|
||||
{'num_books': 1, 'name': 'Morgan Kaufmann', 'num_awards': 9},
|
||||
|
@ -355,7 +355,7 @@ class AggregationTests(TestCase):
|
|||
lambda p: p,
|
||||
)
|
||||
|
||||
qs = Publisher.objects.annotate(num_books=Count('book')).exclude(num_books__lt=F('num_awards')/2).order_by('name').values('name', 'num_books', 'num_awards')
|
||||
qs = Publisher.objects.annotate(num_books=Count('book')).exclude(num_books__lt=F('num_awards') / 2).order_by('name').values('name', 'num_books', 'num_awards')
|
||||
self.assertQuerysetEqual(
|
||||
qs, [
|
||||
{'num_books': 2, 'name': 'Apress', 'num_awards': 3},
|
||||
|
@ -366,7 +366,7 @@ class AggregationTests(TestCase):
|
|||
)
|
||||
|
||||
# ... and where the F() references an aggregate
|
||||
qs = Publisher.objects.annotate(num_books=Count('book')).filter(num_awards__gt=2*F('num_books')).order_by('name').values('name', 'num_books', 'num_awards')
|
||||
qs = Publisher.objects.annotate(num_books=Count('book')).filter(num_awards__gt=2 * F('num_books')).order_by('name').values('name', 'num_books', 'num_awards')
|
||||
self.assertQuerysetEqual(
|
||||
qs, [
|
||||
{'num_books': 1, 'name': 'Morgan Kaufmann', 'num_awards': 9},
|
||||
|
@ -375,7 +375,7 @@ class AggregationTests(TestCase):
|
|||
lambda p: p,
|
||||
)
|
||||
|
||||
qs = Publisher.objects.annotate(num_books=Count('book')).exclude(num_books__lt=F('num_awards')/2).order_by('name').values('name', 'num_books', 'num_awards')
|
||||
qs = Publisher.objects.annotate(num_books=Count('book')).exclude(num_books__lt=F('num_awards') / 2).order_by('name').values('name', 'num_books', 'num_awards')
|
||||
self.assertQuerysetEqual(
|
||||
qs, [
|
||||
{'num_books': 2, 'name': 'Apress', 'num_awards': 3},
|
||||
|
|
|
@ -94,7 +94,7 @@ class BulkCreateTests(TestCase):
|
|||
with override_settings(DEBUG=True):
|
||||
connection.queries = []
|
||||
TwoFields.objects.bulk_create([
|
||||
TwoFields(f1=i, f2=i+1) for i in range(0, 1001)
|
||||
TwoFields(f1=i, f2=i + 1) for i in range(0, 1001)
|
||||
])
|
||||
self.assertEqual(TwoFields.objects.count(), 1001)
|
||||
self.assertEqual(
|
||||
|
@ -115,7 +115,7 @@ class BulkCreateTests(TestCase):
|
|||
with override_settings(DEBUG=True):
|
||||
connection.queries = []
|
||||
TwoFields.objects.bulk_create([
|
||||
TwoFields(f1=i, f2=i+1) for i in range(0, 1001)
|
||||
TwoFields(f1=i, f2=i + 1) for i in range(0, 1001)
|
||||
])
|
||||
self.assertTrue(len(connection.queries) < 10)
|
||||
|
||||
|
@ -127,7 +127,7 @@ class BulkCreateTests(TestCase):
|
|||
with override_settings(DEBUG=True):
|
||||
connection.queries = []
|
||||
TwoFields.objects.bulk_create([
|
||||
TwoFields(id=i if i % 2 == 0 else None, f1=i, f2=i+1)
|
||||
TwoFields(id=i if i % 2 == 0 else None, f1=i, f2=i + 1)
|
||||
for i in range(100000, 101000)])
|
||||
self.assertEqual(TwoFields.objects.count(), 1000)
|
||||
# We can't assume much about the ID's created, except that the above
|
||||
|
@ -145,7 +145,7 @@ class BulkCreateTests(TestCase):
|
|||
with override_settings(DEBUG=True):
|
||||
connection.queries = []
|
||||
TwoFields.objects.bulk_create([
|
||||
TwoFields(id=i if i % 2 == 0 else None, f1=i, f2=i+1)
|
||||
TwoFields(id=i if i % 2 == 0 else None, f1=i, f2=i + 1)
|
||||
for i in range(100000, 101000)])
|
||||
self.assertTrue(len(connection.queries) < 10)
|
||||
|
||||
|
|
|
@ -436,13 +436,13 @@ class BaseCacheTests(object):
|
|||
it is an absolute expiration timestamp instead of a relative
|
||||
offset. Test that we honour this convention. Refs #12399.
|
||||
'''
|
||||
self.cache.set('key1', 'eggs', 60*60*24*30 + 1) # 30 days + 1 second
|
||||
self.cache.set('key1', 'eggs', 60 * 60 * 24 * 30 + 1) # 30 days + 1 second
|
||||
self.assertEqual(self.cache.get('key1'), 'eggs')
|
||||
|
||||
self.cache.add('key2', 'ham', 60*60*24*30 + 1)
|
||||
self.cache.add('key2', 'ham', 60 * 60 * 24 * 30 + 1)
|
||||
self.assertEqual(self.cache.get('key2'), 'ham')
|
||||
|
||||
self.cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, 60*60*24*30 + 1)
|
||||
self.cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, 60 * 60 * 24 * 30 + 1)
|
||||
self.assertEqual(self.cache.get('key3'), 'sausage')
|
||||
self.assertEqual(self.cache.get('key4'), 'lobster bisque')
|
||||
|
||||
|
@ -1000,7 +1000,7 @@ class LocMemCacheTests(unittest.TestCase, BaseCacheTests):
|
|||
"""incr/decr does not modify expiry time (matches memcached behavior)"""
|
||||
key = 'value'
|
||||
_key = self.cache.make_key(key)
|
||||
self.cache.set(key, 1, timeout=self.cache.default_timeout*10)
|
||||
self.cache.set(key, 1, timeout=self.cache.default_timeout * 10)
|
||||
expire = self.cache._expire_info[_key]
|
||||
self.cache.incr(key)
|
||||
self.assertEqual(expire, self.cache._expire_info[_key])
|
||||
|
|
|
@ -613,15 +613,15 @@ class DefaultFiltersTests(TestCase):
|
|||
# NOTE: \xa0 avoids wrapping between value and unit
|
||||
self.assertEqual(filesizeformat(1023), '1023\xa0bytes')
|
||||
self.assertEqual(filesizeformat(1024), '1.0\xa0KB')
|
||||
self.assertEqual(filesizeformat(10*1024), '10.0\xa0KB')
|
||||
self.assertEqual(filesizeformat(1024*1024-1), '1024.0\xa0KB')
|
||||
self.assertEqual(filesizeformat(1024*1024), '1.0\xa0MB')
|
||||
self.assertEqual(filesizeformat(1024*1024*50), '50.0\xa0MB')
|
||||
self.assertEqual(filesizeformat(1024*1024*1024-1), '1024.0\xa0MB')
|
||||
self.assertEqual(filesizeformat(1024*1024*1024), '1.0\xa0GB')
|
||||
self.assertEqual(filesizeformat(1024*1024*1024*1024), '1.0\xa0TB')
|
||||
self.assertEqual(filesizeformat(1024*1024*1024*1024*1024), '1.0\xa0PB')
|
||||
self.assertEqual(filesizeformat(1024*1024*1024*1024*1024*2000),
|
||||
self.assertEqual(filesizeformat(10 * 1024), '10.0\xa0KB')
|
||||
self.assertEqual(filesizeformat(1024 * 1024 - 1), '1024.0\xa0KB')
|
||||
self.assertEqual(filesizeformat(1024 * 1024), '1.0\xa0MB')
|
||||
self.assertEqual(filesizeformat(1024 * 1024 * 50), '50.0\xa0MB')
|
||||
self.assertEqual(filesizeformat(1024 * 1024 * 1024 - 1), '1024.0\xa0MB')
|
||||
self.assertEqual(filesizeformat(1024 * 1024 * 1024), '1.0\xa0GB')
|
||||
self.assertEqual(filesizeformat(1024 * 1024 * 1024 * 1024), '1.0\xa0TB')
|
||||
self.assertEqual(filesizeformat(1024 * 1024 * 1024 * 1024 * 1024), '1.0\xa0PB')
|
||||
self.assertEqual(filesizeformat(1024 * 1024 * 1024 * 1024 * 1024 * 2000),
|
||||
'2000.0\xa0PB')
|
||||
self.assertEqual(filesizeformat(complex(1, -1)), '0\xa0bytes')
|
||||
self.assertEqual(filesizeformat(""), '0\xa0bytes')
|
||||
|
@ -680,16 +680,16 @@ class DefaultFiltersI18NTests(TransRealMixin, TestCase):
|
|||
with self.settings(USE_L10N=True), translation.override('de', deactivate=True):
|
||||
self.assertEqual(filesizeformat(1023), '1023\xa0Bytes')
|
||||
self.assertEqual(filesizeformat(1024), '1,0\xa0KB')
|
||||
self.assertEqual(filesizeformat(10*1024), '10,0\xa0KB')
|
||||
self.assertEqual(filesizeformat(1024*1024-1), '1024,0\xa0KB')
|
||||
self.assertEqual(filesizeformat(1024*1024), '1,0\xa0MB')
|
||||
self.assertEqual(filesizeformat(1024*1024*50), '50,0\xa0MB')
|
||||
self.assertEqual(filesizeformat(1024*1024*1024-1), '1024,0\xa0MB')
|
||||
self.assertEqual(filesizeformat(1024*1024*1024), '1,0\xa0GB')
|
||||
self.assertEqual(filesizeformat(1024*1024*1024*1024), '1,0\xa0TB')
|
||||
self.assertEqual(filesizeformat(1024*1024*1024*1024*1024),
|
||||
self.assertEqual(filesizeformat(10 * 1024), '10,0\xa0KB')
|
||||
self.assertEqual(filesizeformat(1024 * 1024 - 1), '1024,0\xa0KB')
|
||||
self.assertEqual(filesizeformat(1024 * 1024), '1,0\xa0MB')
|
||||
self.assertEqual(filesizeformat(1024 * 1024 * 50), '50,0\xa0MB')
|
||||
self.assertEqual(filesizeformat(1024 * 1024 * 1024 - 1), '1024,0\xa0MB')
|
||||
self.assertEqual(filesizeformat(1024 * 1024 * 1024), '1,0\xa0GB')
|
||||
self.assertEqual(filesizeformat(1024 * 1024 * 1024 * 1024), '1,0\xa0TB')
|
||||
self.assertEqual(filesizeformat(1024 * 1024 * 1024 * 1024 * 1024),
|
||||
'1,0\xa0PB')
|
||||
self.assertEqual(filesizeformat(1024*1024*1024*1024*1024*2000),
|
||||
self.assertEqual(filesizeformat(1024 * 1024 * 1024 * 1024 * 1024 * 2000),
|
||||
'2000,0\xa0PB')
|
||||
self.assertEqual(filesizeformat(complex(1, -1)), '0\xa0Bytes')
|
||||
self.assertEqual(filesizeformat(""), '0\xa0Bytes')
|
||||
|
|
|
@ -74,7 +74,7 @@ class ExpressionsTests(TestCase):
|
|||
|
||||
# We can perform arithmetic operations in expressions
|
||||
# Make sure we have 2 spare chairs
|
||||
company_query.update(num_chairs=F("num_employees")+2)
|
||||
company_query.update(num_chairs=F("num_employees") + 2)
|
||||
self.assertQuerysetEqual(
|
||||
company_query, [
|
||||
{
|
||||
|
|
|
@ -760,11 +760,11 @@ class FieldsTests(SimpleTestCase):
|
|||
def test_url_regex_ticket11198(self):
|
||||
f = URLField()
|
||||
# hangs "forever" if catastrophic backtracking in ticket:#11198 not fixed
|
||||
self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'http://%s' % ("X"*200,))
|
||||
self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'http://%s' % ("X" * 200,))
|
||||
|
||||
# a second test, to make sure the problem is really addressed, even on
|
||||
# domains that don't fail the domain label length check in the regex
|
||||
self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'http://%s' % ("X"*60,))
|
||||
self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'", f.clean, 'http://%s' % ("X" * 60,))
|
||||
|
||||
def test_urlfield_2(self):
|
||||
f = URLField(required=False)
|
||||
|
|
|
@ -283,26 +283,26 @@ class LookupTests(TestCase):
|
|||
Article.objects.extra(select={'id_plus_one': 'id+1'})
|
||||
.order_by('id').values_list('id_plus_one', 'id'),
|
||||
[
|
||||
(self.a1.id+1, self.a1.id),
|
||||
(self.a2.id+1, self.a2.id),
|
||||
(self.a3.id+1, self.a3.id),
|
||||
(self.a4.id+1, self.a4.id),
|
||||
(self.a5.id+1, self.a5.id),
|
||||
(self.a6.id+1, self.a6.id),
|
||||
(self.a7.id+1, self.a7.id)
|
||||
(self.a1.id + 1, self.a1.id),
|
||||
(self.a2.id + 1, self.a2.id),
|
||||
(self.a3.id + 1, self.a3.id),
|
||||
(self.a4.id + 1, self.a4.id),
|
||||
(self.a5.id + 1, self.a5.id),
|
||||
(self.a6.id + 1, self.a6.id),
|
||||
(self.a7.id + 1, self.a7.id)
|
||||
],
|
||||
transform=identity)
|
||||
self.assertQuerysetEqual(
|
||||
Article.objects.extra(select={'id_plus_one': 'id+1'})
|
||||
.order_by('id').values_list('id', 'id_plus_one'),
|
||||
[
|
||||
(self.a1.id, self.a1.id+1),
|
||||
(self.a2.id, self.a2.id+1),
|
||||
(self.a3.id, self.a3.id+1),
|
||||
(self.a4.id, self.a4.id+1),
|
||||
(self.a5.id, self.a5.id+1),
|
||||
(self.a6.id, self.a6.id+1),
|
||||
(self.a7.id, self.a7.id+1)
|
||||
(self.a1.id, self.a1.id + 1),
|
||||
(self.a2.id, self.a2.id + 1),
|
||||
(self.a3.id, self.a3.id + 1),
|
||||
(self.a4.id, self.a4.id + 1),
|
||||
(self.a5.id, self.a5.id + 1),
|
||||
(self.a6.id, self.a6.id + 1),
|
||||
(self.a7.id, self.a7.id + 1)
|
||||
],
|
||||
transform=identity)
|
||||
self.assertQuerysetEqual(
|
||||
|
|
|
@ -162,7 +162,7 @@ class RequestsTests(SimpleTestCase):
|
|||
response.set_cookie('max_age', max_age=10)
|
||||
max_age_cookie = response.cookies['max_age']
|
||||
self.assertEqual(max_age_cookie['max-age'], 10)
|
||||
self.assertEqual(max_age_cookie['expires'], cookie_date(time.time()+10))
|
||||
self.assertEqual(max_age_cookie['expires'], cookie_date(time.time() + 10))
|
||||
|
||||
def test_httponly_cookie(self):
|
||||
response = HttpResponse()
|
||||
|
|
|
@ -9,7 +9,7 @@ from .models import (SyndicatedArticle, ExclusiveArticle, CustomArticle,
|
|||
class SitesFrameworkTestCase(TestCase):
|
||||
def setUp(self):
|
||||
Site.objects.get_or_create(id=settings.SITE_ID, domain="example.com", name="example.com")
|
||||
Site.objects.create(id=settings.SITE_ID+1, domain="example2.com", name="example2.com")
|
||||
Site.objects.create(id=settings.SITE_ID + 1, domain="example2.com", name="example2.com")
|
||||
|
||||
def test_site_fk(self):
|
||||
article = ExclusiveArticle.objects.create(title="Breaking News!", site_id=settings.SITE_ID)
|
||||
|
@ -18,9 +18,9 @@ class SitesFrameworkTestCase(TestCase):
|
|||
def test_sites_m2m(self):
|
||||
article = SyndicatedArticle.objects.create(title="Fresh News!")
|
||||
article.sites.add(Site.objects.get(id=settings.SITE_ID))
|
||||
article.sites.add(Site.objects.get(id=settings.SITE_ID+1))
|
||||
article.sites.add(Site.objects.get(id=settings.SITE_ID + 1))
|
||||
article2 = SyndicatedArticle.objects.create(title="More News!")
|
||||
article2.sites.add(Site.objects.get(id=settings.SITE_ID+1))
|
||||
article2.sites.add(Site.objects.get(id=settings.SITE_ID + 1))
|
||||
self.assertEqual(SyndicatedArticle.on_site.all().get(), article)
|
||||
|
||||
def test_custom_named_field(self):
|
||||
|
|
|
@ -28,28 +28,28 @@ class TimesinceTests(unittest.TestCase):
|
|||
|
||||
def test_ignore_microseconds_and_seconds(self):
|
||||
""" Microseconds and seconds are ignored. """
|
||||
self.assertEqual(timesince(self.t, self.t+self.onemicrosecond),
|
||||
self.assertEqual(timesince(self.t, self.t + self.onemicrosecond),
|
||||
'0\xa0minutes')
|
||||
self.assertEqual(timesince(self.t, self.t+self.onesecond),
|
||||
self.assertEqual(timesince(self.t, self.t + self.onesecond),
|
||||
'0\xa0minutes')
|
||||
|
||||
def test_other_units(self):
|
||||
""" Test other units. """
|
||||
self.assertEqual(timesince(self.t, self.t+self.oneminute),
|
||||
self.assertEqual(timesince(self.t, self.t + self.oneminute),
|
||||
'1\xa0minute')
|
||||
self.assertEqual(timesince(self.t, self.t+self.onehour), '1\xa0hour')
|
||||
self.assertEqual(timesince(self.t, self.t+self.oneday), '1\xa0day')
|
||||
self.assertEqual(timesince(self.t, self.t+self.oneweek), '1\xa0week')
|
||||
self.assertEqual(timesince(self.t, self.t+self.onemonth),
|
||||
self.assertEqual(timesince(self.t, self.t + self.onehour), '1\xa0hour')
|
||||
self.assertEqual(timesince(self.t, self.t + self.oneday), '1\xa0day')
|
||||
self.assertEqual(timesince(self.t, self.t + self.oneweek), '1\xa0week')
|
||||
self.assertEqual(timesince(self.t, self.t + self.onemonth),
|
||||
'1\xa0month')
|
||||
self.assertEqual(timesince(self.t, self.t+self.oneyear), '1\xa0year')
|
||||
self.assertEqual(timesince(self.t, self.t + self.oneyear), '1\xa0year')
|
||||
|
||||
def test_multiple_units(self):
|
||||
""" Test multiple units. """
|
||||
self.assertEqual(timesince(self.t,
|
||||
self.t+2*self.oneday+6*self.onehour), '2\xa0days, 6\xa0hours')
|
||||
self.t + 2 * self.oneday + 6 * self.onehour), '2\xa0days, 6\xa0hours')
|
||||
self.assertEqual(timesince(self.t,
|
||||
self.t+2*self.oneweek+2*self.oneday), '2\xa0weeks, 2\xa0days')
|
||||
self.t + 2 * self.oneweek + 2 * self.oneday), '2\xa0weeks, 2\xa0days')
|
||||
|
||||
def test_display_first_unit(self):
|
||||
"""
|
||||
|
@ -57,42 +57,42 @@ class TimesinceTests(unittest.TestCase):
|
|||
displayed.
|
||||
"""
|
||||
self.assertEqual(timesince(self.t,
|
||||
self.t+2*self.oneweek+3*self.onehour+4*self.oneminute),
|
||||
self.t + 2 * self.oneweek + 3 * self.onehour + 4 * self.oneminute),
|
||||
'2\xa0weeks')
|
||||
|
||||
self.assertEqual(timesince(self.t,
|
||||
self.t+4*self.oneday+5*self.oneminute), '4\xa0days')
|
||||
self.t + 4 * self.oneday + 5 * self.oneminute), '4\xa0days')
|
||||
|
||||
def test_display_second_before_first(self):
|
||||
"""
|
||||
When the second date occurs before the first, we should always
|
||||
get 0 minutes.
|
||||
"""
|
||||
self.assertEqual(timesince(self.t, self.t-self.onemicrosecond),
|
||||
self.assertEqual(timesince(self.t, self.t - self.onemicrosecond),
|
||||
'0\xa0minutes')
|
||||
self.assertEqual(timesince(self.t, self.t-self.onesecond),
|
||||
self.assertEqual(timesince(self.t, self.t - self.onesecond),
|
||||
'0\xa0minutes')
|
||||
self.assertEqual(timesince(self.t, self.t-self.oneminute),
|
||||
self.assertEqual(timesince(self.t, self.t - self.oneminute),
|
||||
'0\xa0minutes')
|
||||
self.assertEqual(timesince(self.t, self.t-self.onehour),
|
||||
self.assertEqual(timesince(self.t, self.t - self.onehour),
|
||||
'0\xa0minutes')
|
||||
self.assertEqual(timesince(self.t, self.t-self.oneday),
|
||||
self.assertEqual(timesince(self.t, self.t - self.oneday),
|
||||
'0\xa0minutes')
|
||||
self.assertEqual(timesince(self.t, self.t-self.oneweek),
|
||||
self.assertEqual(timesince(self.t, self.t - self.oneweek),
|
||||
'0\xa0minutes')
|
||||
self.assertEqual(timesince(self.t, self.t-self.onemonth),
|
||||
self.assertEqual(timesince(self.t, self.t - self.onemonth),
|
||||
'0\xa0minutes')
|
||||
self.assertEqual(timesince(self.t, self.t-self.oneyear),
|
||||
self.assertEqual(timesince(self.t, self.t - self.oneyear),
|
||||
'0\xa0minutes')
|
||||
self.assertEqual(timesince(self.t,
|
||||
self.t-2*self.oneday-6*self.onehour), '0\xa0minutes')
|
||||
self.t - 2 * self.oneday - 6 * self.onehour), '0\xa0minutes')
|
||||
self.assertEqual(timesince(self.t,
|
||||
self.t-2*self.oneweek-2*self.oneday), '0\xa0minutes')
|
||||
self.t - 2 * self.oneweek - 2 * self.oneday), '0\xa0minutes')
|
||||
self.assertEqual(timesince(self.t,
|
||||
self.t-2*self.oneweek-3*self.onehour-4*self.oneminute),
|
||||
self.t - 2 * self.oneweek - 3 * self.onehour - 4 * self.oneminute),
|
||||
'0\xa0minutes')
|
||||
self.assertEqual(timesince(self.t,
|
||||
self.t-4*self.oneday-5*self.oneminute), '0\xa0minutes')
|
||||
self.t - 4 * self.oneday - 5 * self.oneminute), '0\xa0minutes')
|
||||
|
||||
@requires_tz_support
|
||||
def test_different_timezones(self):
|
||||
|
|
Loading…
Reference in New Issue