Fixed assorted flake8 errors.
This commit is contained in:
parent
695bc0d191
commit
b67ab75e82
|
@ -12,7 +12,7 @@ YEAR_MONTH_FORMAT = 'F Y'
|
||||||
MONTH_DAY_FORMAT = 'j F'
|
MONTH_DAY_FORMAT = 'j F'
|
||||||
SHORT_DATE_FORMAT = 'd-m-Y'
|
SHORT_DATE_FORMAT = 'd-m-Y'
|
||||||
SHORT_DATETIME_FORMAT = 'd-m-Y G.i.s'
|
SHORT_DATETIME_FORMAT = 'd-m-Y G.i.s'
|
||||||
FIRST_DAY_OF_WEEK = 1 #Monday
|
FIRST_DAY_OF_WEEK = 1 # Monday
|
||||||
|
|
||||||
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
|
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
|
||||||
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
|
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
|
||||||
|
|
|
@ -12,7 +12,7 @@ YEAR_MONTH_FORMAT = r'Y. \g. F'
|
||||||
MONTH_DAY_FORMAT = 'j. F'
|
MONTH_DAY_FORMAT = 'j. F'
|
||||||
SHORT_DATE_FORMAT = r'j.m.Y'
|
SHORT_DATE_FORMAT = r'j.m.Y'
|
||||||
SHORT_DATETIME_FORMAT = 'j.m.Y H:i:s'
|
SHORT_DATETIME_FORMAT = 'j.m.Y H:i:s'
|
||||||
FIRST_DAY_OF_WEEK = 1 #Monday
|
FIRST_DAY_OF_WEEK = 1 # Monday
|
||||||
|
|
||||||
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
|
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
|
||||||
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
|
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
|
||||||
|
|
|
@ -101,7 +101,7 @@ class GeoModelAdmin(ModelAdmin):
|
||||||
'num_zoom' : self.num_zoom,
|
'num_zoom' : self.num_zoom,
|
||||||
'max_zoom' : self.max_zoom,
|
'max_zoom' : self.max_zoom,
|
||||||
'min_zoom' : self.min_zoom,
|
'min_zoom' : self.min_zoom,
|
||||||
'units' : self.units, #likely shoud get from object
|
'units' : self.units, # likely should get from object
|
||||||
'max_resolution' : self.max_resolution,
|
'max_resolution' : self.max_resolution,
|
||||||
'max_extent' : self.max_extent,
|
'max_extent' : self.max_extent,
|
||||||
'modifiable' : self.modifiable,
|
'modifiable' : self.modifiable,
|
||||||
|
|
|
@ -151,7 +151,7 @@ class OGRGeometry(GDALBase):
|
||||||
def from_bbox(cls, bbox):
|
def from_bbox(cls, bbox):
|
||||||
"Constructs a Polygon from a bounding box (4-tuple)."
|
"Constructs a Polygon from a bounding box (4-tuple)."
|
||||||
x0, y0, x1, y1 = bbox
|
x0, y0, x1, y1 = bbox
|
||||||
return OGRGeometry( 'POLYGON((%s %s, %s %s, %s %s, %s %s, %s %s))' % (
|
return OGRGeometry( 'POLYGON((%s %s, %s %s, %s %s, %s %s, %s %s))' % (
|
||||||
x0, y0, x0, y1, x1, y1, x1, y0, x0, y0) )
|
x0, y0, x0, y1, x1, y1, x1, y0, x0, y0) )
|
||||||
|
|
||||||
### Geometry set-like operations ###
|
### Geometry set-like operations ###
|
||||||
|
|
|
@ -105,7 +105,7 @@ class SpatialReference(GDALBase):
|
||||||
doesn't exist. Can also take a tuple as a parameter, (target, child),
|
doesn't exist. Can also take a tuple as a parameter, (target, child),
|
||||||
where child is the index of the attribute in the WKT. For example:
|
where child is the index of the attribute in the WKT. For example:
|
||||||
|
|
||||||
>>> wkt = 'GEOGCS["WGS 84", DATUM["WGS_1984, ... AUTHORITY["EPSG","4326"]]')
|
>>> wkt = 'GEOGCS["WGS 84", DATUM["WGS_1984, ... AUTHORITY["EPSG","4326"]]'
|
||||||
>>> srs = SpatialReference(wkt) # could also use 'WGS84', or 4326
|
>>> srs = SpatialReference(wkt) # could also use 'WGS84', or 4326
|
||||||
>>> print(srs['GEOGCS'])
|
>>> print(srs['GEOGCS'])
|
||||||
WGS 84
|
WGS 84
|
||||||
|
|
|
@ -204,7 +204,7 @@ class OGRGeomTest(unittest.TestCase, TestDataMixin):
|
||||||
"Testing Polygon objects."
|
"Testing Polygon objects."
|
||||||
|
|
||||||
# Testing `from_bbox` class method
|
# Testing `from_bbox` class method
|
||||||
bbox = (-180,-90,180,90)
|
bbox = (-180, -90, 180, 90)
|
||||||
p = OGRGeometry.from_bbox( bbox )
|
p = OGRGeometry.from_bbox( bbox )
|
||||||
self.assertEqual(bbox, p.extent)
|
self.assertEqual(bbox, p.extent)
|
||||||
|
|
||||||
|
|
|
@ -17,12 +17,14 @@ class Polygon(GEOSGeometry):
|
||||||
|
|
||||||
Examples of initialization, where shell, hole1, and hole2 are
|
Examples of initialization, where shell, hole1, and hole2 are
|
||||||
valid LinearRing geometries:
|
valid LinearRing geometries:
|
||||||
|
>>> from django.contrib.gis.geos import LinearRing, Polygon
|
||||||
|
>>> shell = hole1 = hole2 = LinearRing()
|
||||||
>>> poly = Polygon(shell, hole1, hole2)
|
>>> poly = Polygon(shell, hole1, hole2)
|
||||||
>>> poly = Polygon(shell, (hole1, hole2))
|
>>> poly = Polygon(shell, (hole1, hole2))
|
||||||
|
|
||||||
Example where a tuple parameters are used:
|
>>> # Example where a tuple parameters are used:
|
||||||
>>> poly = Polygon(((0, 0), (0, 10), (10, 10), (0, 10), (0, 0)),
|
>>> poly = Polygon(((0, 0), (0, 10), (10, 10), (0, 10), (0, 0)),
|
||||||
((4, 4), (4, 6), (6, 6), (6, 4), (4, 4)))
|
... ((4, 4), (4, 6), (6, 6), (6, 4), (4, 4)))
|
||||||
"""
|
"""
|
||||||
if not args:
|
if not args:
|
||||||
raise TypeError('Must provide at least one LinearRing, or a tuple, to initialize a Polygon.')
|
raise TypeError('Must provide at least one LinearRing, or a tuple, to initialize a Polygon.')
|
||||||
|
|
|
@ -33,9 +33,9 @@ def api_get_extent(x): return x.extent
|
||||||
def api_get_area(x): return x.area
|
def api_get_area(x): return x.area
|
||||||
def api_get_length(x): return x.length
|
def api_get_length(x): return x.length
|
||||||
|
|
||||||
geos_function_tests = [ val for name, val in vars().items()
|
geos_function_tests = [val for name, val in vars().items()
|
||||||
if hasattr(val, '__call__')
|
if hasattr(val, '__call__')
|
||||||
and name.startswith('api_get_') ]
|
and name.startswith('api_get_')]
|
||||||
|
|
||||||
|
|
||||||
@skipUnless(HAS_GEOS, "Geos is required.")
|
@skipUnless(HAS_GEOS, "Geos is required.")
|
||||||
|
|
|
@ -16,11 +16,14 @@ class UserListA(ListMixin):
|
||||||
self._list = self._mytype(i_list)
|
self._list = self._mytype(i_list)
|
||||||
super(UserListA, self).__init__(*args, **kwargs)
|
super(UserListA, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
def __len__(self): return len(self._list)
|
def __len__(self):
|
||||||
|
return len(self._list)
|
||||||
|
|
||||||
def __str__(self): return str(self._list)
|
def __str__(self):
|
||||||
|
return str(self._list)
|
||||||
|
|
||||||
def __repr__(self): return repr(self._list)
|
def __repr__(self):
|
||||||
|
return repr(self._list)
|
||||||
|
|
||||||
def _set_list(self, length, items):
|
def _set_list(self, length, items):
|
||||||
# this would work:
|
# this would work:
|
||||||
|
|
|
@ -81,7 +81,7 @@ class GoogleMap(object):
|
||||||
# level and a center coordinate are provided with polygons/polylines,
|
# level and a center coordinate are provided with polygons/polylines,
|
||||||
# no automatic determination will occur.
|
# no automatic determination will occur.
|
||||||
self.calc_zoom = False
|
self.calc_zoom = False
|
||||||
if self.polygons or self.polylines or self.markers:
|
if self.polygons or self.polylines or self.markers:
|
||||||
if center is None or zoom is None:
|
if center is None or zoom is None:
|
||||||
self.calc_zoom = True
|
self.calc_zoom = True
|
||||||
|
|
||||||
|
|
|
@ -189,7 +189,7 @@ class DistanceTest(TestCase):
|
||||||
self.assertAlmostEqual(spheroid_distances[i], c.distance.m, tol)
|
self.assertAlmostEqual(spheroid_distances[i], c.distance.m, tol)
|
||||||
if postgis:
|
if postgis:
|
||||||
# PostGIS uses sphere-only distances by default, testing these as well.
|
# PostGIS uses sphere-only distances by default, testing these as well.
|
||||||
qs = AustraliaCity.objects.exclude(id=hillsdale.id).distance(hillsdale.point)
|
qs = AustraliaCity.objects.exclude(id=hillsdale.id).distance(hillsdale.point)
|
||||||
for i, c in enumerate(qs):
|
for i, c in enumerate(qs):
|
||||||
self.assertAlmostEqual(sphere_distances[i], c.distance.m, tol)
|
self.assertAlmostEqual(sphere_distances[i], c.distance.m, tol)
|
||||||
|
|
||||||
|
|
|
@ -568,7 +568,7 @@ class GeoQuerySetTest(TestCase):
|
||||||
# The reference KML depends on the version of PostGIS used
|
# The reference KML depends on the version of PostGIS used
|
||||||
# (the output stopped including altitude in 1.3.3).
|
# (the output stopped including altitude in 1.3.3).
|
||||||
if connection.ops.spatial_version >= (1, 3, 3):
|
if connection.ops.spatial_version >= (1, 3, 3):
|
||||||
ref_kml = '<Point><coordinates>-104.609252,38.255001</coordinates></Point>'
|
ref_kml = '<Point><coordinates>-104.609252,38.255001</coordinates></Point>'
|
||||||
else:
|
else:
|
||||||
ref_kml = '<Point><coordinates>-104.609252,38.255001,0</coordinates></Point>'
|
ref_kml = '<Point><coordinates>-104.609252,38.255001,0</coordinates></Point>'
|
||||||
|
|
||||||
|
|
|
@ -163,8 +163,10 @@ class LayerMapTest(TestCase):
|
||||||
|
|
||||||
# Passing in invalid ForeignKey mapping parameters -- must be a dictionary
|
# Passing in invalid ForeignKey mapping parameters -- must be a dictionary
|
||||||
# mapping for the model the ForeignKey points to.
|
# mapping for the model the ForeignKey points to.
|
||||||
bad_fk_map1 = copy(co_mapping); bad_fk_map1['state'] = 'name'
|
bad_fk_map1 = copy(co_mapping)
|
||||||
bad_fk_map2 = copy(co_mapping); bad_fk_map2['state'] = {'nombre' : 'State'}
|
bad_fk_map1['state'] = 'name'
|
||||||
|
bad_fk_map2 = copy(co_mapping)
|
||||||
|
bad_fk_map2['state'] = {'nombre' : 'State'}
|
||||||
self.assertRaises(TypeError, LayerMapping, County, co_shp, bad_fk_map1, transform=False)
|
self.assertRaises(TypeError, LayerMapping, County, co_shp, bad_fk_map1, transform=False)
|
||||||
self.assertRaises(LayerMapError, LayerMapping, County, co_shp, bad_fk_map2, transform=False)
|
self.assertRaises(LayerMapError, LayerMapping, County, co_shp, bad_fk_map2, transform=False)
|
||||||
|
|
||||||
|
|
|
@ -163,7 +163,7 @@ class SpecializedFieldTest(SimpleTestCase):
|
||||||
self.assertFalse(invalid.is_valid())
|
self.assertFalse(invalid.is_valid())
|
||||||
self.assertTrue('Invalid geometry value' in str(invalid.errors))
|
self.assertTrue('Invalid geometry value' in str(invalid.errors))
|
||||||
|
|
||||||
for invalid in [geom for key, geom in self.geometries.items() if key!='point']:
|
for invalid in [geo for key, geo in self.geometries.items() if key!='point']:
|
||||||
self.assertFalse(PointForm(data={'p': invalid.wkt}).is_valid())
|
self.assertFalse(PointForm(data={'p': invalid.wkt}).is_valid())
|
||||||
|
|
||||||
def test_multipointfield(self):
|
def test_multipointfield(self):
|
||||||
|
@ -176,7 +176,7 @@ class SpecializedFieldTest(SimpleTestCase):
|
||||||
self.assertMapWidget(form)
|
self.assertMapWidget(form)
|
||||||
self.assertFalse(PointForm().is_valid())
|
self.assertFalse(PointForm().is_valid())
|
||||||
|
|
||||||
for invalid in [geom for key, geom in self.geometries.items() if key!='multipoint']:
|
for invalid in [geo for key, geo in self.geometries.items() if key!='multipoint']:
|
||||||
self.assertFalse(PointForm(data={'p': invalid.wkt}).is_valid())
|
self.assertFalse(PointForm(data={'p': invalid.wkt}).is_valid())
|
||||||
|
|
||||||
def test_linestringfield(self):
|
def test_linestringfield(self):
|
||||||
|
@ -189,7 +189,7 @@ class SpecializedFieldTest(SimpleTestCase):
|
||||||
self.assertMapWidget(form)
|
self.assertMapWidget(form)
|
||||||
self.assertFalse(LineStringForm().is_valid())
|
self.assertFalse(LineStringForm().is_valid())
|
||||||
|
|
||||||
for invalid in [geom for key, geom in self.geometries.items() if key!='linestring']:
|
for invalid in [geo for key, geo in self.geometries.items() if key!='linestring']:
|
||||||
self.assertFalse(LineStringForm(data={'p': invalid.wkt}).is_valid())
|
self.assertFalse(LineStringForm(data={'p': invalid.wkt}).is_valid())
|
||||||
|
|
||||||
def test_multilinestringfield(self):
|
def test_multilinestringfield(self):
|
||||||
|
@ -202,7 +202,7 @@ class SpecializedFieldTest(SimpleTestCase):
|
||||||
self.assertMapWidget(form)
|
self.assertMapWidget(form)
|
||||||
self.assertFalse(LineStringForm().is_valid())
|
self.assertFalse(LineStringForm().is_valid())
|
||||||
|
|
||||||
for invalid in [geom for key, geom in self.geometries.items() if key!='multilinestring']:
|
for invalid in [geo for key, geo in self.geometries.items() if key!='multilinestring']:
|
||||||
self.assertFalse(LineStringForm(data={'p': invalid.wkt}).is_valid())
|
self.assertFalse(LineStringForm(data={'p': invalid.wkt}).is_valid())
|
||||||
|
|
||||||
def test_polygonfield(self):
|
def test_polygonfield(self):
|
||||||
|
@ -215,7 +215,7 @@ class SpecializedFieldTest(SimpleTestCase):
|
||||||
self.assertMapWidget(form)
|
self.assertMapWidget(form)
|
||||||
self.assertFalse(PolygonForm().is_valid())
|
self.assertFalse(PolygonForm().is_valid())
|
||||||
|
|
||||||
for invalid in [geom for key, geom in self.geometries.items() if key!='polygon']:
|
for invalid in [geo for key, geo in self.geometries.items() if key!='polygon']:
|
||||||
self.assertFalse(PolygonForm(data={'p': invalid.wkt}).is_valid())
|
self.assertFalse(PolygonForm(data={'p': invalid.wkt}).is_valid())
|
||||||
|
|
||||||
def test_multipolygonfield(self):
|
def test_multipolygonfield(self):
|
||||||
|
@ -228,7 +228,7 @@ class SpecializedFieldTest(SimpleTestCase):
|
||||||
self.assertMapWidget(form)
|
self.assertMapWidget(form)
|
||||||
self.assertFalse(PolygonForm().is_valid())
|
self.assertFalse(PolygonForm().is_valid())
|
||||||
|
|
||||||
for invalid in [geom for key, geom in self.geometries.items() if key!='multipolygon']:
|
for invalid in [geo for key, geo in self.geometries.items() if key!='multipolygon']:
|
||||||
self.assertFalse(PolygonForm(data={'p': invalid.wkt}).is_valid())
|
self.assertFalse(PolygonForm(data={'p': invalid.wkt}).is_valid())
|
||||||
|
|
||||||
def test_geometrycollectionfield(self):
|
def test_geometrycollectionfield(self):
|
||||||
|
@ -241,7 +241,7 @@ class SpecializedFieldTest(SimpleTestCase):
|
||||||
self.assertMapWidget(form)
|
self.assertMapWidget(form)
|
||||||
self.assertFalse(GeometryForm().is_valid())
|
self.assertFalse(GeometryForm().is_valid())
|
||||||
|
|
||||||
for invalid in [geom for key, geom in self.geometries.items() if key!='geometrycollection']:
|
for invalid in [geo for key, geo in self.geometries.items() if key!='geometrycollection']:
|
||||||
self.assertFalse(GeometryForm(data={'g': invalid.wkt}).is_valid())
|
self.assertFalse(GeometryForm(data={'g': invalid.wkt}).is_valid())
|
||||||
|
|
||||||
def test_osm_widget(self):
|
def test_osm_widget(self):
|
||||||
|
|
|
@ -10,10 +10,11 @@ def precision_wkt(geom, prec):
|
||||||
integer or a string). If the precision is an integer, then the decimal
|
integer or a string). If the precision is an integer, then the decimal
|
||||||
places of coordinates WKT will be truncated to that number:
|
places of coordinates WKT will be truncated to that number:
|
||||||
|
|
||||||
|
>>> from django.contrib.gis.geos import Point
|
||||||
>>> pnt = Point(5, 23)
|
>>> pnt = Point(5, 23)
|
||||||
>>> pnt.wkt
|
>>> pnt.wkt
|
||||||
'POINT (5.0000000000000000 23.0000000000000000)'
|
'POINT (5.0000000000000000 23.0000000000000000)'
|
||||||
>>> precision(geom, 1)
|
>>> precision_wkt(pnt, 1)
|
||||||
'POINT (5.0 23.0)'
|
'POINT (5.0 23.0)'
|
||||||
|
|
||||||
If the precision is a string, it must be valid Python format string
|
If the precision is a string, it must be valid Python format string
|
||||||
|
|
|
@ -199,6 +199,8 @@ def load_handler(path, *args, **kwargs):
|
||||||
Given a path to a handler, return an instance of that handler.
|
Given a path to a handler, return an instance of that handler.
|
||||||
|
|
||||||
E.g.::
|
E.g.::
|
||||||
|
>>> from django.http import HttpRequest
|
||||||
|
>>> request = HttpRequest()
|
||||||
>>> load_handler('django.core.files.uploadhandler.TemporaryFileUploadHandler', request)
|
>>> load_handler('django.core.files.uploadhandler.TemporaryFileUploadHandler', request)
|
||||||
<TemporaryFileUploadHandler object at 0x...>
|
<TemporaryFileUploadHandler object at 0x...>
|
||||||
|
|
||||||
|
|
|
@ -187,7 +187,7 @@ class TimestampSigner(Signer):
|
||||||
Retrieve original value and check it wasn't signed more
|
Retrieve original value and check it wasn't signed more
|
||||||
than max_age seconds ago.
|
than max_age seconds ago.
|
||||||
"""
|
"""
|
||||||
result = super(TimestampSigner, self).unsign(value)
|
result = super(TimestampSigner, self).unsign(value)
|
||||||
value, timestamp = result.rsplit(self.sep, 1)
|
value, timestamp = result.rsplit(self.sep, 1)
|
||||||
timestamp = baseconv.base62.decode(timestamp)
|
timestamp = baseconv.base62.decode(timestamp)
|
||||||
if max_age is not None:
|
if max_age is not None:
|
||||||
|
|
|
@ -164,7 +164,7 @@ class ModelBase(type):
|
||||||
# Basic setup for proxy models.
|
# Basic setup for proxy models.
|
||||||
if is_proxy:
|
if is_proxy:
|
||||||
base = None
|
base = None
|
||||||
for parent in [cls for cls in parents if hasattr(cls, '_meta')]:
|
for parent in [kls for kls in parents if hasattr(kls, '_meta')]:
|
||||||
if parent._meta.abstract:
|
if parent._meta.abstract:
|
||||||
if parent._meta.fields:
|
if parent._meta.fields:
|
||||||
raise TypeError("Abstract base class containing model fields not permitted for proxy model '%s'." % name)
|
raise TypeError("Abstract base class containing model fields not permitted for proxy model '%s'." % name)
|
||||||
|
|
|
@ -142,11 +142,14 @@ class FileDescriptor(object):
|
||||||
The descriptor for the file attribute on the model instance. Returns a
|
The descriptor for the file attribute on the model instance. Returns a
|
||||||
FieldFile when accessed so you can do stuff like::
|
FieldFile when accessed so you can do stuff like::
|
||||||
|
|
||||||
|
>>> from myapp.models import MyModel
|
||||||
|
>>> instance = MyModel.objects.get(pk=1)
|
||||||
>>> instance.file.size
|
>>> instance.file.size
|
||||||
|
|
||||||
Assigns a file object on assignment so you can do::
|
Assigns a file object on assignment so you can do::
|
||||||
|
|
||||||
>>> instance.file = File(...)
|
>>> with open('/tmp/hello.world', 'r') as f:
|
||||||
|
... instance.file = File(f)
|
||||||
|
|
||||||
"""
|
"""
|
||||||
def __init__(self, field):
|
def __init__(self, field):
|
||||||
|
|
|
@ -23,7 +23,7 @@ def safeRef(target, onDelete = None):
|
||||||
if target.__self__ is not None:
|
if target.__self__ is not None:
|
||||||
# Turn a bound method into a BoundMethodWeakref instance.
|
# Turn a bound method into a BoundMethodWeakref instance.
|
||||||
# Keep track of these instances for lookup by disconnect().
|
# Keep track of these instances for lookup by disconnect().
|
||||||
assert hasattr(target, '__func__'), """safeRef target %r has __self__, but no __func__, don't know how to create reference"""%( target,)
|
assert hasattr(target, '__func__'), """safeRef target %r has __self__, but no __func__, don't know how to create reference""" % (target,)
|
||||||
reference = get_bound_method_weakref(
|
reference = get_bound_method_weakref(
|
||||||
target=target,
|
target=target,
|
||||||
onDelete=onDelete
|
onDelete=onDelete
|
||||||
|
@ -144,7 +144,7 @@ class BoundMethodWeakref(object):
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
"""Give a friendly representation of the object"""
|
"""Give a friendly representation of the object"""
|
||||||
return """%s( %s.%s )"""%(
|
return """%s( %s.%s )""" % (
|
||||||
self.__class__.__name__,
|
self.__class__.__name__,
|
||||||
self.selfName,
|
self.selfName,
|
||||||
self.funcName,
|
self.funcName,
|
||||||
|
|
|
@ -567,7 +567,7 @@ class FileField(Field):
|
||||||
raise ValidationError(self.error_messages['invalid'], code='invalid')
|
raise ValidationError(self.error_messages['invalid'], code='invalid')
|
||||||
|
|
||||||
if self.max_length is not None and len(file_name) > self.max_length:
|
if self.max_length is not None and len(file_name) > self.max_length:
|
||||||
params = {'max': self.max_length, 'length': len(file_name)}
|
params = {'max': self.max_length, 'length': len(file_name)}
|
||||||
raise ValidationError(self.error_messages['max_length'], code='max_length', params=params)
|
raise ValidationError(self.error_messages['max_length'], code='max_length', params=params)
|
||||||
if not file_name:
|
if not file_name:
|
||||||
raise ValidationError(self.error_messages['invalid'], code='invalid')
|
raise ValidationError(self.error_messages['invalid'], code='invalid')
|
||||||
|
|
|
@ -117,7 +117,7 @@ def media_property(cls):
|
||||||
if definition:
|
if definition:
|
||||||
extend = getattr(definition, 'extend', True)
|
extend = getattr(definition, 'extend', True)
|
||||||
if extend:
|
if extend:
|
||||||
if extend == True:
|
if extend is True:
|
||||||
m = base
|
m = base
|
||||||
else:
|
else:
|
||||||
m = Media()
|
m = Media()
|
||||||
|
|
|
@ -6,7 +6,7 @@ _standard_context_processors = None
|
||||||
# We need the CSRF processor no matter what the user has in their settings,
|
# We need the CSRF processor no matter what the user has in their settings,
|
||||||
# because otherwise it is a security vulnerability, and we can't afford to leave
|
# because otherwise it is a security vulnerability, and we can't afford to leave
|
||||||
# this to human error or failure to read migration instructions.
|
# this to human error or failure to read migration instructions.
|
||||||
_builtin_context_processors = ('django.core.context_processors.csrf',)
|
_builtin_context_processors = ('django.core.context_processors.csrf',)
|
||||||
|
|
||||||
class ContextPopException(Exception):
|
class ContextPopException(Exception):
|
||||||
"pop() has been called more times than push()"
|
"pop() has been called more times than push()"
|
||||||
|
|
|
@ -60,4 +60,4 @@ def do_cache(parser, token):
|
||||||
return CacheNode(nodelist,
|
return CacheNode(nodelist,
|
||||||
parser.compile_filter(tokens[1]),
|
parser.compile_filter(tokens[1]),
|
||||||
tokens[2], # fragment_name can't be a variable.
|
tokens[2], # fragment_name can't be a variable.
|
||||||
[parser.compile_filter(token) for token in tokens[3:]])
|
[parser.compile_filter(t) for t in tokens[3:]])
|
||||||
|
|
|
@ -28,7 +28,11 @@
|
||||||
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
import os, sys, time, signal, traceback
|
import os
|
||||||
|
import signal
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
import traceback
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from django.utils.six.moves import _thread as thread
|
from django.utils.six.moves import _thread as thread
|
||||||
|
|
|
@ -144,7 +144,7 @@ def get_template_dirs():
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
dirs = set()
|
dirs = set()
|
||||||
if ('django.template.loaders.filesystem.load_template_source' in settings.TEMPLATE_LOADERS
|
if ('django.template.loaders.filesystem.load_template_source' in settings.TEMPLATE_LOADERS
|
||||||
or 'django.template.loaders.filesystem.Loader' in settings.TEMPLATE_LOADERS):
|
or 'django.template.loaders.filesystem.Loader' in settings.TEMPLATE_LOADERS):
|
||||||
dirs.update(map(unicode, settings.TEMPLATE_DIRS))
|
dirs.update(map(unicode, settings.TEMPLATE_DIRS))
|
||||||
|
|
||||||
if ('django.template.loaders.app_directories.load_template_source' in settings.TEMPLATE_LOADERS
|
if ('django.template.loaders.app_directories.load_template_source' in settings.TEMPLATE_LOADERS
|
||||||
|
|
|
@ -505,7 +505,7 @@ class ChangeListTests(TestCase):
|
||||||
admin.site.register(UnorderedObject, UnorderedObjectAdmin)
|
admin.site.register(UnorderedObject, UnorderedObjectAdmin)
|
||||||
model_admin = UnorderedObjectAdmin(UnorderedObject, admin.site)
|
model_admin = UnorderedObjectAdmin(UnorderedObject, admin.site)
|
||||||
counter = 0 if ascending else 51
|
counter = 0 if ascending else 51
|
||||||
for page in range (0, 5):
|
for page in range(0, 5):
|
||||||
request = self._mocked_authenticated_request('/unorderedobject/?p=%s' % page, superuser)
|
request = self._mocked_authenticated_request('/unorderedobject/?p=%s' % page, superuser)
|
||||||
response = model_admin.changelist_view(request)
|
response = model_admin.changelist_view(request)
|
||||||
for result in response.context_data['cl'].result_list:
|
for result in response.context_data['cl'].result_list:
|
||||||
|
@ -550,7 +550,7 @@ class ChangeListTests(TestCase):
|
||||||
admin.site.register(OrderedObject, OrderedObjectAdmin)
|
admin.site.register(OrderedObject, OrderedObjectAdmin)
|
||||||
model_admin = OrderedObjectAdmin(OrderedObject, admin.site)
|
model_admin = OrderedObjectAdmin(OrderedObject, admin.site)
|
||||||
counter = 0 if ascending else 51
|
counter = 0 if ascending else 51
|
||||||
for page in range (0, 5):
|
for page in range(0, 5):
|
||||||
request = self._mocked_authenticated_request('/orderedobject/?p=%s' % page, superuser)
|
request = self._mocked_authenticated_request('/orderedobject/?p=%s' % page, superuser)
|
||||||
response = model_admin.changelist_view(request)
|
response = model_admin.changelist_view(request)
|
||||||
for result in response.context_data['cl'].result_list:
|
for result in response.context_data['cl'].result_list:
|
||||||
|
@ -588,7 +588,7 @@ class ChangeListTests(TestCase):
|
||||||
user_parents = self._create_superuser('parents')
|
user_parents = self._create_superuser('parents')
|
||||||
|
|
||||||
# Test with user 'noparents'
|
# Test with user 'noparents'
|
||||||
m = DynamicListFilterChildAdmin(Child, admin.site)
|
m = DynamicListFilterChildAdmin(Child, admin.site)
|
||||||
request = self._mocked_authenticated_request('/child/', user_noparents)
|
request = self._mocked_authenticated_request('/child/', user_noparents)
|
||||||
response = m.changelist_view(request)
|
response = m.changelist_view(request)
|
||||||
self.assertEqual(response.context_data['cl'].list_filter, ['name', 'age'])
|
self.assertEqual(response.context_data['cl'].list_filter, ['name', 'age'])
|
||||||
|
|
|
@ -170,7 +170,7 @@ class TestInline(TestCase):
|
||||||
holder = Holder.objects.create(pk=123456789, dummy=42)
|
holder = Holder.objects.create(pk=123456789, dummy=42)
|
||||||
inner = Inner.objects.create(pk=987654321, holder=holder, dummy=42, readonly='')
|
inner = Inner.objects.create(pk=987654321, holder=holder, dummy=42, readonly='')
|
||||||
response = self.client.get('/admin/admin_inlines/holder/%i/' % holder.id)
|
response = self.client.get('/admin/admin_inlines/holder/%i/' % holder.id)
|
||||||
inner_shortcut = 'r/%s/%s/'%(ContentType.objects.get_for_model(inner).pk, inner.pk)
|
inner_shortcut = 'r/%s/%s/' % (ContentType.objects.get_for_model(inner).pk, inner.pk)
|
||||||
self.assertContains(response, inner_shortcut)
|
self.assertContains(response, inner_shortcut)
|
||||||
|
|
||||||
def test_custom_pk_shortcut(self):
|
def test_custom_pk_shortcut(self):
|
||||||
|
@ -182,8 +182,8 @@ class TestInline(TestCase):
|
||||||
child1 = ChildModel1.objects.create(my_own_pk="bar", name="Bar", parent=parent)
|
child1 = ChildModel1.objects.create(my_own_pk="bar", name="Bar", parent=parent)
|
||||||
child2 = ChildModel2.objects.create(my_own_pk="baz", name="Baz", parent=parent)
|
child2 = ChildModel2.objects.create(my_own_pk="baz", name="Baz", parent=parent)
|
||||||
response = self.client.get('/admin/admin_inlines/parentmodelwithcustompk/foo/')
|
response = self.client.get('/admin/admin_inlines/parentmodelwithcustompk/foo/')
|
||||||
child1_shortcut = 'r/%s/%s/'%(ContentType.objects.get_for_model(child1).pk, child1.pk)
|
child1_shortcut = 'r/%s/%s/' % (ContentType.objects.get_for_model(child1).pk, child1.pk)
|
||||||
child2_shortcut = 'r/%s/%s/'%(ContentType.objects.get_for_model(child2).pk, child2.pk)
|
child2_shortcut = 'r/%s/%s/' % (ContentType.objects.get_for_model(child2).pk, child2.pk)
|
||||||
self.assertContains(response, child1_shortcut)
|
self.assertContains(response, child1_shortcut)
|
||||||
self.assertContains(response, child2_shortcut)
|
self.assertContains(response, child2_shortcut)
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
from django.conf.urls import patterns, include
|
from django.conf.urls import patterns, include
|
||||||
|
|
||||||
from . import widgetadmin
|
from . import widgetadmin
|
||||||
|
|
||||||
|
|
||||||
urlpatterns = patterns('',
|
urlpatterns = patterns('',
|
||||||
|
|
|
@ -1017,20 +1017,20 @@ class AggregationTests(TestCase):
|
||||||
|
|
||||||
tests aggregations with generic reverse relations
|
tests aggregations with generic reverse relations
|
||||||
"""
|
"""
|
||||||
b = Book.objects.get(name='Practical Django Projects')
|
django_book = Book.objects.get(name='Practical Django Projects')
|
||||||
ItemTag.objects.create(object_id=b.id, tag='intermediate',
|
ItemTag.objects.create(object_id=django_book.id, tag='intermediate',
|
||||||
content_type=ContentType.objects.get_for_model(b))
|
content_type=ContentType.objects.get_for_model(django_book))
|
||||||
ItemTag.objects.create(object_id=b.id, tag='django',
|
ItemTag.objects.create(object_id=django_book.id, tag='django',
|
||||||
content_type=ContentType.objects.get_for_model(b))
|
content_type=ContentType.objects.get_for_model(django_book))
|
||||||
# Assign a tag to model with same PK as the book above. If the JOIN
|
# Assign a tag to model with same PK as the book above. If the JOIN
|
||||||
# used in aggregation doesn't have content type as part of the
|
# used in aggregation doesn't have content type as part of the
|
||||||
# condition the annotation will also count the 'hi mom' tag for b.
|
# condition the annotation will also count the 'hi mom' tag for b.
|
||||||
wmpk = WithManualPK.objects.create(id=b.pk)
|
wmpk = WithManualPK.objects.create(id=django_book.pk)
|
||||||
ItemTag.objects.create(object_id=wmpk.id, tag='hi mom',
|
ItemTag.objects.create(object_id=wmpk.id, tag='hi mom',
|
||||||
content_type=ContentType.objects.get_for_model(wmpk))
|
content_type=ContentType.objects.get_for_model(wmpk))
|
||||||
b = Book.objects.get(name__startswith='Paradigms of Artificial Intelligence')
|
ai_book = Book.objects.get(name__startswith='Paradigms of Artificial Intelligence')
|
||||||
ItemTag.objects.create(object_id=b.id, tag='intermediate',
|
ItemTag.objects.create(object_id=ai_book.id, tag='intermediate',
|
||||||
content_type=ContentType.objects.get_for_model(b))
|
content_type=ContentType.objects.get_for_model(ai_book))
|
||||||
|
|
||||||
self.assertEqual(Book.objects.aggregate(Count('tags')), {'tags__count': 3})
|
self.assertEqual(Book.objects.aggregate(Count('tags')), {'tags__count': 3})
|
||||||
results = Book.objects.annotate(Count('tags')).order_by('-tags__count', 'name')
|
results = Book.objects.annotate(Count('tags')).order_by('-tags__count', 'name')
|
||||||
|
|
|
@ -149,7 +149,7 @@ class DummyCacheTests(unittest.TestCase):
|
||||||
'ascii': 'ascii_value',
|
'ascii': 'ascii_value',
|
||||||
'unicode_ascii': 'Iñtërnâtiônàlizætiøn1',
|
'unicode_ascii': 'Iñtërnâtiônàlizætiøn1',
|
||||||
'Iñtërnâtiônàlizætiøn': 'Iñtërnâtiônàlizætiøn2',
|
'Iñtërnâtiônàlizætiøn': 'Iñtërnâtiônàlizætiøn2',
|
||||||
'ascii2': {'x' : 1 }
|
'ascii2': {'x' : 1}
|
||||||
}
|
}
|
||||||
for (key, value) in stuff.items():
|
for (key, value) in stuff.items():
|
||||||
self.cache.set(key, value)
|
self.cache.set(key, value)
|
||||||
|
@ -434,7 +434,7 @@ class BaseCacheTests(object):
|
||||||
it is an absolute expiration timestamp instead of a relative
|
it is an absolute expiration timestamp instead of a relative
|
||||||
offset. Test that we honour this convention. Refs #12399.
|
offset. Test that we honour this convention. Refs #12399.
|
||||||
'''
|
'''
|
||||||
self.cache.set('key1', 'eggs', 60*60*24*30 + 1) #30 days + 1 second
|
self.cache.set('key1', 'eggs', 60*60*24*30 + 1) # 30 days + 1 second
|
||||||
self.assertEqual(self.cache.get('key1'), 'eggs')
|
self.assertEqual(self.cache.get('key1'), 'eggs')
|
||||||
|
|
||||||
self.cache.add('key2', 'ham', 60*60*24*30 + 1)
|
self.cache.add('key2', 'ham', 60*60*24*30 + 1)
|
||||||
|
@ -1432,7 +1432,7 @@ class CacheI18nTest(TestCase):
|
||||||
self.assertEqual(key, key2)
|
self.assertEqual(key, key2)
|
||||||
|
|
||||||
@override_settings(USE_I18N=False, USE_L10N=False)
|
@override_settings(USE_I18N=False, USE_L10N=False)
|
||||||
def test_cache_key_no_i18n (self):
|
def test_cache_key_no_i18n(self):
|
||||||
request = self._get_request()
|
request = self._get_request()
|
||||||
lang = translation.get_language()
|
lang = translation.get_language()
|
||||||
tz = force_text(timezone.get_current_timezone_name(), errors='ignore')
|
tz = force_text(timezone.get_current_timezone_name(), errors='ignore')
|
||||||
|
|
|
@ -367,7 +367,7 @@ class FTimeDeltaTests(TestCase):
|
||||||
def test_delta_invalid_op_mod(self):
|
def test_delta_invalid_op_mod(self):
|
||||||
raised = False
|
raised = False
|
||||||
try:
|
try:
|
||||||
r = repr(Experiment.objects.filter(end__lt=F('start')%self.deltas[0]))
|
r = repr(Experiment.objects.filter(end__lt=F('start') % self.deltas[0]))
|
||||||
except TypeError:
|
except TypeError:
|
||||||
raised = True
|
raised = True
|
||||||
self.assertTrue(raised, "TypeError not raised on attempt to modulo divide datetime by timedelta.")
|
self.assertTrue(raised, "TypeError not raised on attempt to modulo divide datetime by timedelta.")
|
||||||
|
|
|
@ -1071,7 +1071,7 @@ class FormsFormsetTestCase(TestCase):
|
||||||
|
|
||||||
def test_formset_total_error_count(self):
|
def test_formset_total_error_count(self):
|
||||||
"""A valid formset should have 0 total errors."""
|
"""A valid formset should have 0 total errors."""
|
||||||
data = [ # formset_data, expected error count
|
data = [ # formset_data, expected error count
|
||||||
([('Calexico', '100')], 0),
|
([('Calexico', '100')], 0),
|
||||||
([('Calexico', '')], 1),
|
([('Calexico', '')], 1),
|
||||||
([('', 'invalid')], 2),
|
([('', 'invalid')], 2),
|
||||||
|
|
|
@ -113,7 +113,7 @@ class QueryTestCase(TestCase):
|
||||||
dive = Book.objects.using('other').create(title="Dive into Python",
|
dive = Book.objects.using('other').create(title="Dive into Python",
|
||||||
published=datetime.date(2009, 5, 4))
|
published=datetime.date(2009, 5, 4))
|
||||||
|
|
||||||
dive = Book.objects.using('other').get(published=datetime.date(2009, 5, 4))
|
dive = Book.objects.using('other').get(published=datetime.date(2009, 5, 4))
|
||||||
self.assertEqual(dive.title, "Dive into Python")
|
self.assertEqual(dive.title, "Dive into Python")
|
||||||
self.assertRaises(Book.DoesNotExist, Book.objects.using('default').get, published=datetime.date(2009, 5, 4))
|
self.assertRaises(Book.DoesNotExist, Book.objects.using('default').get, published=datetime.date(2009, 5, 4))
|
||||||
|
|
||||||
|
@ -125,7 +125,7 @@ class QueryTestCase(TestCase):
|
||||||
self.assertEqual(dive.title, "Dive into Python")
|
self.assertEqual(dive.title, "Dive into Python")
|
||||||
self.assertRaises(Book.DoesNotExist, Book.objects.using('default').get, title__iexact="dive INTO python")
|
self.assertRaises(Book.DoesNotExist, Book.objects.using('default').get, title__iexact="dive INTO python")
|
||||||
|
|
||||||
dive = Book.objects.using('other').get(published__year=2009)
|
dive = Book.objects.using('other').get(published__year=2009)
|
||||||
self.assertEqual(dive.title, "Dive into Python")
|
self.assertEqual(dive.title, "Dive into Python")
|
||||||
self.assertEqual(dive.published, datetime.date(2009, 5, 4))
|
self.assertEqual(dive.published, datetime.date(2009, 5, 4))
|
||||||
self.assertRaises(Book.DoesNotExist, Book.objects.using('default').get, published__year=2009)
|
self.assertRaises(Book.DoesNotExist, Book.objects.using('default').get, published__year=2009)
|
||||||
|
|
|
@ -24,7 +24,7 @@ class OrLookupsTests(TestCase):
|
||||||
|
|
||||||
def test_filter_or(self):
|
def test_filter_or(self):
|
||||||
self.assertQuerysetEqual(
|
self.assertQuerysetEqual(
|
||||||
Article.objects.filter(headline__startswith='Hello') | Article.objects.filter(headline__startswith='Goodbye'), [
|
Article.objects.filter(headline__startswith='Hello') | Article.objects.filter(headline__startswith='Goodbye'), [
|
||||||
'Hello',
|
'Hello',
|
||||||
'Goodbye',
|
'Goodbye',
|
||||||
'Hello and goodbye'
|
'Hello and goodbye'
|
||||||
|
|
|
@ -321,9 +321,9 @@ class GenericRelationTests(TestCase):
|
||||||
[t.created_by for t in TaggedItem.objects.all()])
|
[t.created_by for t in TaggedItem.objects.all()])
|
||||||
|
|
||||||
def test_generic_relation(self):
|
def test_generic_relation(self):
|
||||||
b = Bookmark.objects.create(url='http://www.djangoproject.com/')
|
bookmark = Bookmark.objects.create(url='http://www.djangoproject.com/')
|
||||||
t1 = TaggedItem.objects.create(content_object=b, tag='django')
|
t1 = TaggedItem.objects.create(content_object=bookmark, tag='django')
|
||||||
t2 = TaggedItem.objects.create(content_object=b, tag='python')
|
t2 = TaggedItem.objects.create(content_object=bookmark, tag='python')
|
||||||
|
|
||||||
with self.assertNumQueries(2):
|
with self.assertNumQueries(2):
|
||||||
tags = [t.tag for b in Bookmark.objects.prefetch_related('tags')
|
tags = [t.tag for b in Bookmark.objects.prefetch_related('tags')
|
||||||
|
@ -509,8 +509,8 @@ class NullableTest(TestCase):
|
||||||
co_serfs = [list(e.boss.serfs.all()) if e.boss is not None else []
|
co_serfs = [list(e.boss.serfs.all()) if e.boss is not None else []
|
||||||
for e in qs]
|
for e in qs]
|
||||||
|
|
||||||
qs2 = Employee.objects.select_related('boss')
|
qs2 = Employee.objects.select_related('boss')
|
||||||
co_serfs2 = [list(e.boss.serfs.all()) if e.boss is not None else []
|
co_serfs2 = [list(e.boss.serfs.all()) if e.boss is not None else []
|
||||||
for e in qs2]
|
for e in qs2]
|
||||||
|
|
||||||
self.assertEqual(co_serfs, co_serfs2)
|
self.assertEqual(co_serfs, co_serfs2)
|
||||||
|
@ -522,8 +522,8 @@ class NullableTest(TestCase):
|
||||||
co_serfs = [list(e.boss.serfs.all()) if e.boss is not None else []
|
co_serfs = [list(e.boss.serfs.all()) if e.boss is not None else []
|
||||||
for e in qs]
|
for e in qs]
|
||||||
|
|
||||||
qs2 = Employee.objects.all()
|
qs2 = Employee.objects.all()
|
||||||
co_serfs2 = [list(e.boss.serfs.all()) if e.boss is not None else []
|
co_serfs2 = [list(e.boss.serfs.all()) if e.boss is not None else []
|
||||||
for e in qs2]
|
for e in qs2]
|
||||||
|
|
||||||
self.assertEqual(co_serfs, co_serfs2)
|
self.assertEqual(co_serfs, co_serfs2)
|
||||||
|
|
|
@ -1558,7 +1558,9 @@ class Queries5Tests(TestCase):
|
||||||
# extra()
|
# extra()
|
||||||
qs = Ranking.objects.extra(select={'good': 'case when rank > 2 then 1 else 0 end'})
|
qs = Ranking.objects.extra(select={'good': 'case when rank > 2 then 1 else 0 end'})
|
||||||
dicts = qs.values().order_by('id')
|
dicts = qs.values().order_by('id')
|
||||||
for d in dicts: del d['id']; del d['author_id']
|
for d in dicts:
|
||||||
|
del d['id']
|
||||||
|
del d['author_id']
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
[sorted(d.items()) for d in dicts],
|
[sorted(d.items()) for d in dicts],
|
||||||
[[('good', 0), ('rank', 2)], [('good', 0), ('rank', 1)], [('good', 1), ('rank', 3)]]
|
[[('good', 0), ('rank', 2)], [('good', 0), ('rank', 1)], [('good', 1), ('rank', 3)]]
|
||||||
|
@ -2668,7 +2670,7 @@ class NullJoinPromotionOrTest(TestCase):
|
||||||
# problem here was that b__name generates a LOUTER JOIN, then
|
# problem here was that b__name generates a LOUTER JOIN, then
|
||||||
# b__c__name generates join to c, which the ORM tried to promote but
|
# b__c__name generates join to c, which the ORM tried to promote but
|
||||||
# failed as that join isn't nullable.
|
# failed as that join isn't nullable.
|
||||||
q_obj = (
|
q_obj = (
|
||||||
Q(d__name='foo')|
|
Q(d__name='foo')|
|
||||||
Q(b__name='foo')|
|
Q(b__name='foo')|
|
||||||
Q(b__c__name='foo')
|
Q(b__c__name='foo')
|
||||||
|
|
|
@ -2,7 +2,7 @@ from django.db import models
|
||||||
|
|
||||||
|
|
||||||
class Article(models.Model):
|
class Article(models.Model):
|
||||||
title = models.CharField(max_length=100)
|
title = models.CharField(max_length=100)
|
||||||
publication_date = models.DateField()
|
publication_date = models.DateField()
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
@ -10,6 +10,6 @@ class Article(models.Model):
|
||||||
|
|
||||||
|
|
||||||
class AlternateArticle(models.Model):
|
class AlternateArticle(models.Model):
|
||||||
title = models.CharField(max_length=100)
|
title = models.CharField(max_length=100)
|
||||||
publication_date = models.DateField()
|
publication_date = models.DateField()
|
||||||
byline = models.CharField(max_length=100)
|
byline = models.CharField(max_length=100)
|
||||||
|
|
Loading…
Reference in New Issue