mirror of https://github.com/django/django.git
Merge branch 'master' into schema-alteration
This commit is contained in:
commit
b31eea069c
|
@ -5,4 +5,4 @@ MANIFEST
|
|||
dist/
|
||||
docs/_build/
|
||||
tests/coverage_html/
|
||||
tests/.coverage
|
||||
tests/.coverage
|
||||
|
|
3
AUTHORS
3
AUTHORS
|
@ -36,6 +36,7 @@ The PRIMARY AUTHORS are (and/or have been):
|
|||
* Preston Holmes
|
||||
* Simon Charette
|
||||
* Donald Stufft
|
||||
* Marc Tamlyn
|
||||
|
||||
More information on the main contributors to Django can be found in
|
||||
docs/internals/committers.txt.
|
||||
|
@ -121,6 +122,7 @@ answer newbie questions, and generally made Django that much better:
|
|||
Chris Cahoon <chris.cahoon@gmail.com>
|
||||
Juan Manuel Caicedo <juan.manuel.caicedo@gmail.com>
|
||||
Trevor Caira <trevor@caira.com>
|
||||
Aaron Cannon <cannona@fireantproductions.com>
|
||||
Brett Cannon <brett@python.org>
|
||||
Ricardo Javier Cárdenes Medina <ricardo.cardenes@gmail.com>
|
||||
Jeremy Carbaugh <jcarbaugh@gmail.com>
|
||||
|
@ -539,7 +541,6 @@ answer newbie questions, and generally made Django that much better:
|
|||
Aaron Swartz <http://www.aaronsw.com/>
|
||||
Ville Säävuori <http://www.unessa.net/>
|
||||
Mart Sõmermaa <http://mrts.pri.ee/>
|
||||
Marc Tamlyn
|
||||
Christian Tanzer <tanzer@swing.co.at>
|
||||
Tyler Tarabula <tyler.tarabula@gmail.com>
|
||||
Tyson Tate <tyson@fallingbullets.com>
|
||||
|
|
|
@ -576,7 +576,7 @@ DEFAULT_EXCEPTION_REPORTER_FILTER = 'django.views.debug.SafeExceptionReporterFil
|
|||
###########
|
||||
|
||||
# The name of the class to use to run the test suite
|
||||
TEST_RUNNER = 'django.test.simple.DjangoTestSuiteRunner'
|
||||
TEST_RUNNER = 'django.test.runner.DiscoverRunner'
|
||||
|
||||
############
|
||||
# FIXTURES #
|
||||
|
|
|
@ -37,7 +37,7 @@ from django.utils.encoding import force_text
|
|||
|
||||
HORIZONTAL, VERTICAL = 1, 2
|
||||
# returns the <ul> class for a given radio_admin field
|
||||
get_ul_class = lambda x: 'radiolist%s' % ((x == HORIZONTAL) and ' inline' or '')
|
||||
get_ul_class = lambda x: 'radiolist%s' % (' inline' if x == HORIZONTAL else '')
|
||||
|
||||
|
||||
class IncorrectLookupParameters(Exception):
|
||||
|
@ -189,7 +189,7 @@ class BaseModelAdmin(six.with_metaclass(RenameBaseModelAdminMethods)):
|
|||
kwargs['widget'] = widgets.AdminRadioSelect(attrs={
|
||||
'class': get_ul_class(self.radio_fields[db_field.name]),
|
||||
})
|
||||
kwargs['empty_label'] = db_field.blank and _('None') or None
|
||||
kwargs['empty_label'] = _('None') if db_field.blank else None
|
||||
|
||||
queryset = self.get_field_queryset(db, db_field, request)
|
||||
if queryset is not None:
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
from .test_fields import TestFieldType
|
|
@ -267,7 +267,7 @@ def model_detail(request, app_label, model_name):
|
|||
return render_to_response('admin_doc/model_detail.html', {
|
||||
'root_path': urlresolvers.reverse('admin:index'),
|
||||
'name': '%s.%s' % (opts.app_label, opts.object_name),
|
||||
'summary': _("Fields on %s objects") % opts.object_name,
|
||||
'summary': _("Attributes on %s objects") % opts.object_name,
|
||||
'description': model.__doc__,
|
||||
'fields': fields,
|
||||
}, context_instance=RequestContext(request))
|
||||
|
|
|
@ -171,7 +171,7 @@ class AuthenticationForm(forms.Form):
|
|||
# Set the label for the "username" field.
|
||||
UserModel = get_user_model()
|
||||
self.username_field = UserModel._meta.get_field(UserModel.USERNAME_FIELD)
|
||||
if not self.fields['username'].label:
|
||||
if self.fields['username'].label is None:
|
||||
self.fields['username'].label = capfirst(self.username_field.verbose_name)
|
||||
|
||||
def clean(self):
|
||||
|
|
|
@ -9,7 +9,7 @@ from django.conf import settings
|
|||
from django.test.signals import setting_changed
|
||||
from django.utils import importlib
|
||||
from django.utils.datastructures import SortedDict
|
||||
from django.utils.encoding import force_bytes, force_str
|
||||
from django.utils.encoding import force_bytes, force_str, force_text
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
from django.utils.crypto import (
|
||||
pbkdf2, constant_time_compare, get_random_string)
|
||||
|
@ -263,13 +263,13 @@ class BCryptSHA256PasswordHasher(BasePasswordHasher):
|
|||
Secure password hashing using the bcrypt algorithm (recommended)
|
||||
|
||||
This is considered by many to be the most secure algorithm but you
|
||||
must first install the py-bcrypt library. Please be warned that
|
||||
must first install the bcrypt library. Please be warned that
|
||||
this library depends on native C code and might cause portability
|
||||
issues.
|
||||
"""
|
||||
algorithm = "bcrypt_sha256"
|
||||
digest = hashlib.sha256
|
||||
library = ("py-bcrypt", "bcrypt")
|
||||
library = ("bcrypt", "bcrypt")
|
||||
rounds = 12
|
||||
|
||||
def salt(self):
|
||||
|
@ -291,7 +291,7 @@ class BCryptSHA256PasswordHasher(BasePasswordHasher):
|
|||
password = force_bytes(password)
|
||||
|
||||
data = bcrypt.hashpw(password, salt)
|
||||
return "%s$%s" % (self.algorithm, data)
|
||||
return "%s$%s" % (self.algorithm, force_text(data))
|
||||
|
||||
def verify(self, password, encoded):
|
||||
algorithm, data = encoded.split('$', 1)
|
||||
|
@ -307,6 +307,9 @@ class BCryptSHA256PasswordHasher(BasePasswordHasher):
|
|||
else:
|
||||
password = force_bytes(password)
|
||||
|
||||
# Ensure that our data is a bytestring
|
||||
data = force_bytes(data)
|
||||
|
||||
return constant_time_compare(data, bcrypt.hashpw(password, data))
|
||||
|
||||
def safe_summary(self, encoded):
|
||||
|
@ -326,7 +329,7 @@ class BCryptPasswordHasher(BCryptSHA256PasswordHasher):
|
|||
Secure password hashing using the bcrypt algorithm
|
||||
|
||||
This is considered by many to be the most secure algorithm but you
|
||||
must first install the py-bcrypt library. Please be warned that
|
||||
must first install the bcrypt library. Please be warned that
|
||||
this library depends on native C code and might cause portability
|
||||
issues.
|
||||
|
||||
|
|
|
@ -1,16 +1 @@
|
|||
from django.contrib.auth.tests.test_custom_user import *
|
||||
from django.contrib.auth.tests.test_auth_backends import *
|
||||
from django.contrib.auth.tests.test_basic import *
|
||||
from django.contrib.auth.tests.test_context_processors import *
|
||||
from django.contrib.auth.tests.test_decorators import *
|
||||
from django.contrib.auth.tests.test_forms import *
|
||||
from django.contrib.auth.tests.test_remote_user import *
|
||||
from django.contrib.auth.tests.test_management import *
|
||||
from django.contrib.auth.tests.test_models import *
|
||||
from django.contrib.auth.tests.test_handlers import *
|
||||
from django.contrib.auth.tests.test_hashers import *
|
||||
from django.contrib.auth.tests.test_signals import *
|
||||
from django.contrib.auth.tests.test_tokens import *
|
||||
from django.contrib.auth.tests.test_views import *
|
||||
|
||||
# The password for the fixture data users is 'password'
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import os
|
||||
|
||||
from django.contrib.auth import get_user_model
|
||||
from django.contrib.auth.models import User
|
||||
from django.contrib.auth.forms import (UserCreationForm, AuthenticationForm,
|
||||
PasswordChangeForm, SetPasswordForm, UserChangeForm, PasswordResetForm,
|
||||
|
@ -13,6 +15,7 @@ from django.test.utils import override_settings
|
|||
from django.utils.encoding import force_text
|
||||
from django.utils._os import upath
|
||||
from django.utils import translation
|
||||
from django.utils.text import capfirst
|
||||
from django.utils.translation import ugettext as _
|
||||
|
||||
|
||||
|
@ -146,6 +149,24 @@ class AuthenticationFormTest(TestCase):
|
|||
form = CustomAuthenticationForm()
|
||||
self.assertEqual(form['username'].label, "Name")
|
||||
|
||||
def test_username_field_label_not_set(self):
|
||||
|
||||
class CustomAuthenticationForm(AuthenticationForm):
|
||||
username = CharField()
|
||||
|
||||
form = CustomAuthenticationForm()
|
||||
UserModel = get_user_model()
|
||||
username_field = UserModel._meta.get_field(UserModel.USERNAME_FIELD)
|
||||
self.assertEqual(form.fields['username'].label, capfirst(username_field.verbose_name))
|
||||
|
||||
def test_username_field_label_empty_string(self):
|
||||
|
||||
class CustomAuthenticationForm(AuthenticationForm):
|
||||
username = CharField(label='')
|
||||
|
||||
form = CustomAuthenticationForm()
|
||||
self.assertEqual(form.fields['username'].label, "")
|
||||
|
||||
|
||||
@skipIfCustomUser
|
||||
@override_settings(USE_TZ=False, PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',))
|
||||
|
|
|
@ -2,7 +2,7 @@ from __future__ import unicode_literals
|
|||
|
||||
from django.contrib.auth.handlers.modwsgi import check_password, groups_for_user
|
||||
from django.contrib.auth.models import User, Group
|
||||
from django.contrib.auth.tests import CustomUser
|
||||
from django.contrib.auth.tests.test_custom_user import CustomUser
|
||||
from django.contrib.auth.tests.utils import skipIfCustomUser
|
||||
from django.test import TransactionTestCase
|
||||
from django.test.utils import override_settings
|
||||
|
|
|
@ -92,7 +92,7 @@ class TestUtilsHashPass(unittest.TestCase):
|
|||
self.assertFalse(check_password('lètmeiz', encoded))
|
||||
self.assertEqual(identify_hasher(encoded).algorithm, "crypt")
|
||||
|
||||
@skipUnless(bcrypt, "py-bcrypt not installed")
|
||||
@skipUnless(bcrypt, "bcrypt not installed")
|
||||
def test_bcrypt_sha256(self):
|
||||
encoded = make_password('lètmein', hasher='bcrypt_sha256')
|
||||
self.assertTrue(is_password_usable(encoded))
|
||||
|
@ -108,7 +108,7 @@ class TestUtilsHashPass(unittest.TestCase):
|
|||
self.assertTrue(check_password(password, encoded))
|
||||
self.assertFalse(check_password(password[:72], encoded))
|
||||
|
||||
@skipUnless(bcrypt, "py-bcrypt not installed")
|
||||
@skipUnless(bcrypt, "bcrypt not installed")
|
||||
def test_bcrypt(self):
|
||||
encoded = make_password('lètmein', hasher='bcrypt')
|
||||
self.assertTrue(is_password_usable(encoded))
|
||||
|
|
|
@ -5,7 +5,7 @@ from django.contrib.auth import models, management
|
|||
from django.contrib.auth.management import create_permissions
|
||||
from django.contrib.auth.management.commands import changepassword
|
||||
from django.contrib.auth.models import User
|
||||
from django.contrib.auth.tests import CustomUser
|
||||
from django.contrib.auth.tests.test_custom_user import CustomUser
|
||||
from django.contrib.auth.tests.utils import skipIfCustomUser
|
||||
from django.core.management import call_command
|
||||
from django.core.management.base import CommandError
|
||||
|
|
|
@ -37,7 +37,7 @@ def next_redirect(request, fallback, **get_kwargs):
|
|||
else:
|
||||
anchor = ''
|
||||
|
||||
joiner = ('?' in next) and '&' or '?'
|
||||
joiner = '&' if '?' in next else '?'
|
||||
next += joiner + urlencode(get_kwargs) + anchor
|
||||
return HttpResponseRedirect(next)
|
||||
|
||||
|
|
|
@ -75,7 +75,7 @@ def shortcut(request, content_type_id, object_id):
|
|||
# If all that malarkey found an object domain, use it. Otherwise, fall back
|
||||
# to whatever get_absolute_url() returned.
|
||||
if object_domain is not None:
|
||||
protocol = request.is_secure() and 'https' or 'http'
|
||||
protocol = 'https' if request.is_secure() else 'http'
|
||||
return http.HttpResponseRedirect('%s://%s%s'
|
||||
% (protocol, object_domain, absurl))
|
||||
else:
|
||||
|
|
|
@ -1,6 +0,0 @@
|
|||
from django.contrib.flatpages.tests.test_csrf import *
|
||||
from django.contrib.flatpages.tests.test_forms import *
|
||||
from django.contrib.flatpages.tests.test_models import *
|
||||
from django.contrib.flatpages.tests.test_middleware import *
|
||||
from django.contrib.flatpages.tests.test_templatetags import *
|
||||
from django.contrib.flatpages.tests.test_views import *
|
|
@ -1,2 +0,0 @@
|
|||
from django.contrib.formtools.tests.tests import *
|
||||
from django.contrib.formtools.tests.wizard import *
|
|
@ -5,7 +5,7 @@ This is a URLconf to be loaded by tests.py. Add any URLs needed for tests only.
|
|||
from __future__ import absolute_import
|
||||
|
||||
from django.conf.urls import patterns, url
|
||||
from django.contrib.formtools.tests import TestFormPreview
|
||||
from django.contrib.formtools.tests.tests import TestFormPreview
|
||||
|
||||
from django.contrib.formtools.tests.forms import TestForm
|
||||
|
||||
|
|
|
@ -17,7 +17,7 @@ from django.contrib.formtools.wizard.views import (WizardView,
|
|||
class DummyRequest(http.HttpRequest):
|
||||
def __init__(self, POST=None):
|
||||
super(DummyRequest, self).__init__()
|
||||
self.method = POST and "POST" or "GET"
|
||||
self.method = "POST" if POST else "GET"
|
||||
if POST is not None:
|
||||
self.POST.update(POST)
|
||||
self.session = {}
|
||||
|
|
|
@ -44,6 +44,7 @@ class GeometryField(Field):
|
|||
|
||||
# The OpenGIS Geometry name.
|
||||
geom_type = 'GEOMETRY'
|
||||
form_class = forms.GeometryField
|
||||
|
||||
# Geodetic units.
|
||||
geodetic_units = ('Decimal Degree', 'degree')
|
||||
|
@ -201,11 +202,14 @@ class GeometryField(Field):
|
|||
return connection.ops.geo_db_type(self)
|
||||
|
||||
def formfield(self, **kwargs):
|
||||
defaults = {'form_class' : forms.GeometryField,
|
||||
defaults = {'form_class' : self.form_class,
|
||||
'geom_type' : self.geom_type,
|
||||
'srid' : self.srid,
|
||||
}
|
||||
defaults.update(kwargs)
|
||||
if (self.dim > 2 and not 'widget' in kwargs and
|
||||
not getattr(defaults['form_class'].widget, 'supports_3d', False)):
|
||||
defaults['widget'] = forms.Textarea
|
||||
return super(GeometryField, self).formfield(**defaults)
|
||||
|
||||
def get_db_prep_lookup(self, lookup_type, value, connection, prepared=False):
|
||||
|
@ -267,28 +271,35 @@ class GeometryField(Field):
|
|||
# The OpenGIS Geometry Type Fields
|
||||
class PointField(GeometryField):
|
||||
geom_type = 'POINT'
|
||||
form_class = forms.PointField
|
||||
description = _("Point")
|
||||
|
||||
class LineStringField(GeometryField):
|
||||
geom_type = 'LINESTRING'
|
||||
form_class = forms.LineStringField
|
||||
description = _("Line string")
|
||||
|
||||
class PolygonField(GeometryField):
|
||||
geom_type = 'POLYGON'
|
||||
form_class = forms.PolygonField
|
||||
description = _("Polygon")
|
||||
|
||||
class MultiPointField(GeometryField):
|
||||
geom_type = 'MULTIPOINT'
|
||||
form_class = forms.MultiPointField
|
||||
description = _("Multi-point")
|
||||
|
||||
class MultiLineStringField(GeometryField):
|
||||
geom_type = 'MULTILINESTRING'
|
||||
form_class = forms.MultiLineStringField
|
||||
description = _("Multi-line string")
|
||||
|
||||
class MultiPolygonField(GeometryField):
|
||||
geom_type = 'MULTIPOLYGON'
|
||||
form_class = forms.MultiPolygonField
|
||||
description = _("Multi polygon")
|
||||
|
||||
class GeometryCollectionField(GeometryField):
|
||||
geom_type = 'GEOMETRYCOLLECTION'
|
||||
form_class = forms.GeometryCollectionField
|
||||
description = _("Geometry collection")
|
||||
|
|
|
@ -121,7 +121,7 @@ class GeoSQLCompiler(compiler.SQLCompiler):
|
|||
"""
|
||||
result = []
|
||||
if opts is None:
|
||||
opts = self.query.model._meta
|
||||
opts = self.query.get_meta()
|
||||
aliases = set()
|
||||
only_load = self.deferred_to_columns()
|
||||
seen = self.query.included_inherited_models.copy()
|
||||
|
@ -247,7 +247,7 @@ class GeoSQLCompiler(compiler.SQLCompiler):
|
|||
used. If `column` is specified, it will be used instead of the value
|
||||
in `field.column`.
|
||||
"""
|
||||
if table_alias is None: table_alias = self.query.model._meta.db_table
|
||||
if table_alias is None: table_alias = self.query.get_meta().db_table
|
||||
return "%s.%s" % (self.quote_name_unless_alias(table_alias),
|
||||
self.connection.ops.quote_name(column or field.column))
|
||||
|
||||
|
|
|
@ -1,2 +1,5 @@
|
|||
from django.forms import *
|
||||
from django.contrib.gis.forms.fields import GeometryField
|
||||
from .fields import (GeometryField, GeometryCollectionField, PointField,
|
||||
MultiPointField, LineStringField, MultiLineStringField, PolygonField,
|
||||
MultiPolygonField)
|
||||
from .widgets import BaseGeometryWidget, OpenLayersWidget, OSMWidget
|
||||
|
|
|
@ -9,6 +9,7 @@ from django.utils.translation import ugettext_lazy as _
|
|||
# While this couples the geographic forms to the GEOS library,
|
||||
# it decouples from database (by not importing SpatialBackend).
|
||||
from django.contrib.gis.geos import GEOSException, GEOSGeometry, fromstr
|
||||
from .widgets import OpenLayersWidget
|
||||
|
||||
|
||||
class GeometryField(forms.Field):
|
||||
|
@ -17,7 +18,8 @@ class GeometryField(forms.Field):
|
|||
accepted by GEOSGeometry is accepted by this form. By default,
|
||||
this includes WKT, HEXEWKB, WKB (in a buffer), and GeoJSON.
|
||||
"""
|
||||
widget = forms.Textarea
|
||||
widget = OpenLayersWidget
|
||||
geom_type = 'GEOMETRY'
|
||||
|
||||
default_error_messages = {
|
||||
'required' : _('No geometry value provided.'),
|
||||
|
@ -31,12 +33,13 @@ class GeometryField(forms.Field):
|
|||
# Pop out attributes from the database field, or use sensible
|
||||
# defaults (e.g., allow None).
|
||||
self.srid = kwargs.pop('srid', None)
|
||||
self.geom_type = kwargs.pop('geom_type', 'GEOMETRY')
|
||||
self.geom_type = kwargs.pop('geom_type', self.geom_type)
|
||||
if 'null' in kwargs:
|
||||
kwargs.pop('null', True)
|
||||
warnings.warn("Passing 'null' keyword argument to GeometryField is deprecated.",
|
||||
DeprecationWarning, stacklevel=2)
|
||||
super(GeometryField, self).__init__(**kwargs)
|
||||
self.widget.attrs['geom_type'] = self.geom_type
|
||||
|
||||
def to_python(self, value):
|
||||
"""
|
||||
|
@ -98,3 +101,31 @@ class GeometryField(forms.Field):
|
|||
else:
|
||||
# Check for change of state of existence
|
||||
return bool(initial) != bool(data)
|
||||
|
||||
|
||||
class GeometryCollectionField(GeometryField):
|
||||
geom_type = 'GEOMETRYCOLLECTION'
|
||||
|
||||
|
||||
class PointField(GeometryField):
|
||||
geom_type = 'POINT'
|
||||
|
||||
|
||||
class MultiPointField(GeometryField):
|
||||
geom_type = 'MULTIPOINT'
|
||||
|
||||
|
||||
class LineStringField(GeometryField):
|
||||
geom_type = 'LINESTRING'
|
||||
|
||||
|
||||
class MultiLineStringField(GeometryField):
|
||||
geom_type = 'MULTILINESTRING'
|
||||
|
||||
|
||||
class PolygonField(GeometryField):
|
||||
geom_type = 'POLYGON'
|
||||
|
||||
|
||||
class MultiPolygonField(GeometryField):
|
||||
geom_type = 'MULTIPOLYGON'
|
||||
|
|
|
@ -0,0 +1,112 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import logging
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.gis import gdal
|
||||
from django.contrib.gis.geos import GEOSGeometry, GEOSException
|
||||
from django.forms.widgets import Widget
|
||||
from django.template import loader
|
||||
from django.utils import six
|
||||
from django.utils import translation
|
||||
|
||||
logger = logging.getLogger('django.contrib.gis')
|
||||
|
||||
|
||||
class BaseGeometryWidget(Widget):
|
||||
"""
|
||||
The base class for rich geometry widgets.
|
||||
Renders a map using the WKT of the geometry.
|
||||
"""
|
||||
geom_type = 'GEOMETRY'
|
||||
map_srid = 4326
|
||||
map_width = 600
|
||||
map_height = 400
|
||||
display_wkt = False
|
||||
|
||||
supports_3d = False
|
||||
template_name = '' # set on subclasses
|
||||
|
||||
def __init__(self, attrs=None):
|
||||
self.attrs = {}
|
||||
for key in ('geom_type', 'map_srid', 'map_width', 'map_height', 'display_wkt'):
|
||||
self.attrs[key] = getattr(self, key)
|
||||
if attrs:
|
||||
self.attrs.update(attrs)
|
||||
|
||||
def render(self, name, value, attrs=None):
|
||||
# If a string reaches here (via a validation error on another
|
||||
# field) then just reconstruct the Geometry.
|
||||
if isinstance(value, six.string_types):
|
||||
try:
|
||||
value = GEOSGeometry(value)
|
||||
except (GEOSException, ValueError) as err:
|
||||
logger.error(
|
||||
"Error creating geometry from value '%s' (%s)" % (
|
||||
value, err)
|
||||
)
|
||||
value = None
|
||||
|
||||
wkt = ''
|
||||
if value:
|
||||
# Check that srid of value and map match
|
||||
if value.srid != self.map_srid:
|
||||
try:
|
||||
ogr = value.ogr
|
||||
ogr.transform(self.map_srid)
|
||||
wkt = ogr.wkt
|
||||
except gdal.OGRException as err:
|
||||
logger.error(
|
||||
"Error transforming geometry from srid '%s' to srid '%s' (%s)" % (
|
||||
value.srid, self.map_srid, err)
|
||||
)
|
||||
else:
|
||||
wkt = value.wkt
|
||||
|
||||
context = self.build_attrs(attrs,
|
||||
name=name,
|
||||
module='geodjango_%s' % name.replace('-','_'), # JS-safe
|
||||
wkt=wkt,
|
||||
geom_type=gdal.OGRGeomType(self.attrs['geom_type']),
|
||||
STATIC_URL=settings.STATIC_URL,
|
||||
LANGUAGE_BIDI=translation.get_language_bidi(),
|
||||
)
|
||||
return loader.render_to_string(self.template_name, context)
|
||||
|
||||
|
||||
class OpenLayersWidget(BaseGeometryWidget):
|
||||
template_name = 'gis/openlayers.html'
|
||||
class Media:
|
||||
js = (
|
||||
'http://openlayers.org/api/2.11/OpenLayers.js',
|
||||
'gis/js/OLMapWidget.js',
|
||||
)
|
||||
|
||||
|
||||
class OSMWidget(BaseGeometryWidget):
|
||||
"""
|
||||
An OpenLayers/OpenStreetMap-based widget.
|
||||
"""
|
||||
template_name = 'gis/openlayers-osm.html'
|
||||
default_lon = 5
|
||||
default_lat = 47
|
||||
|
||||
class Media:
|
||||
js = (
|
||||
'http://openlayers.org/api/2.11/OpenLayers.js',
|
||||
'http://www.openstreetmap.org/openlayers/OpenStreetMap.js',
|
||||
'gis/js/OLMapWidget.js',
|
||||
)
|
||||
|
||||
@property
|
||||
def map_srid(self):
|
||||
# Use the official spherical mercator projection SRID on versions
|
||||
# of GDAL that support it; otherwise, fallback to 900913.
|
||||
if gdal.HAS_GDAL and gdal.GDAL_VERSION >= (1, 7):
|
||||
return 3857
|
||||
else:
|
||||
return 900913
|
||||
|
||||
def render(self, name, value, attrs=None):
|
||||
return super(self, OSMWidget).render(name, value,
|
||||
{'default_lon': self.default_lon, 'default_lat': self.default_lat})
|
|
@ -31,6 +31,9 @@
|
|||
to a non-existant file location (e.g., `GDAL_LIBRARY_PATH='/null/path'`;
|
||||
setting to None/False/'' will not work as a string must be given).
|
||||
"""
|
||||
from django.contrib.gis.gdal.error import check_err, OGRException, OGRIndexError, SRSException
|
||||
from django.contrib.gis.gdal.geomtype import OGRGeomType
|
||||
|
||||
# Attempting to import objects that depend on the GDAL library. The
|
||||
# HAS_GDAL flag will be set to True if the library is present on
|
||||
# the system.
|
||||
|
@ -41,7 +44,7 @@ try:
|
|||
from django.contrib.gis.gdal.srs import SpatialReference, CoordTransform
|
||||
from django.contrib.gis.gdal.geometries import OGRGeometry
|
||||
HAS_GDAL = True
|
||||
except Exception:
|
||||
except OGRException:
|
||||
HAS_GDAL = False
|
||||
|
||||
try:
|
||||
|
@ -50,5 +53,3 @@ except ImportError:
|
|||
# No ctypes, but don't raise an exception.
|
||||
pass
|
||||
|
||||
from django.contrib.gis.gdal.error import check_err, OGRException, OGRIndexError, SRSException
|
||||
from django.contrib.gis.gdal.geomtype import OGRGeomType
|
||||
|
|
|
@ -1,28 +0,0 @@
|
|||
"""
|
||||
Module for executing all of the GDAL tests. None
|
||||
of these tests require the use of the database.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
from django.utils.unittest import TestSuite, TextTestRunner
|
||||
|
||||
# Importing the GDAL test modules.
|
||||
from . import test_driver, test_ds, test_envelope, test_geom, test_srs
|
||||
|
||||
test_suites = [test_driver.suite(),
|
||||
test_ds.suite(),
|
||||
test_envelope.suite(),
|
||||
test_geom.suite(),
|
||||
test_srs.suite(),
|
||||
]
|
||||
|
||||
def suite():
|
||||
"Builds a test suite for the GDAL tests."
|
||||
s = TestSuite()
|
||||
for test_suite in test_suites:
|
||||
s.addTest(test_suite)
|
||||
return s
|
||||
|
||||
def run(verbosity=1):
|
||||
"Runs the GDAL tests."
|
||||
TextTestRunner(verbosity=verbosity).run(suite())
|
|
@ -1,5 +1,10 @@
|
|||
import unittest
|
||||
from django.contrib.gis.gdal import Driver, OGRException
|
||||
from django.contrib.gis.gdal import HAS_GDAL
|
||||
from django.utils import unittest
|
||||
from django.utils.unittest import skipUnless
|
||||
|
||||
if HAS_GDAL:
|
||||
from django.contrib.gis.gdal import Driver, OGRException
|
||||
|
||||
|
||||
valid_drivers = ('ESRI Shapefile', 'MapInfo File', 'TIGER', 'S57', 'DGN',
|
||||
'Memory', 'CSV', 'GML', 'KML')
|
||||
|
@ -12,6 +17,8 @@ aliases = {'eSrI' : 'ESRI Shapefile',
|
|||
'sHp' : 'ESRI Shapefile',
|
||||
}
|
||||
|
||||
|
||||
@skipUnless(HAS_GDAL, "GDAL is required")
|
||||
class DriverTest(unittest.TestCase):
|
||||
|
||||
def test01_valid_driver(self):
|
||||
|
@ -30,11 +37,3 @@ class DriverTest(unittest.TestCase):
|
|||
for alias, full_name in aliases.items():
|
||||
dr = Driver(alias)
|
||||
self.assertEqual(full_name, str(dr))
|
||||
|
||||
def suite():
|
||||
s = unittest.TestSuite()
|
||||
s.addTest(unittest.makeSuite(DriverTest))
|
||||
return s
|
||||
|
||||
def run(verbosity=2):
|
||||
unittest.TextTestRunner(verbosity=verbosity).run(suite())
|
||||
|
|
|
@ -1,32 +1,38 @@
|
|||
import os
|
||||
import unittest
|
||||
from django.contrib.gis.gdal import DataSource, Envelope, OGRGeometry, OGRException, OGRIndexError, GDAL_VERSION
|
||||
from django.contrib.gis.gdal.field import OFTReal, OFTInteger, OFTString
|
||||
|
||||
from django.contrib.gis.gdal import HAS_GDAL
|
||||
from django.contrib.gis.geometry.test_data import get_ds_file, TestDS, TEST_DATA
|
||||
from django.utils import unittest
|
||||
from django.utils.unittest import skipUnless
|
||||
|
||||
if HAS_GDAL:
|
||||
from django.contrib.gis.gdal import DataSource, Envelope, OGRGeometry, OGRException, OGRIndexError, GDAL_VERSION
|
||||
from django.contrib.gis.gdal.field import OFTReal, OFTInteger, OFTString
|
||||
|
||||
# List of acceptable data sources.
|
||||
ds_list = (
|
||||
TestDS('test_point', nfeat=5, nfld=3, geom='POINT', gtype=1, driver='ESRI Shapefile',
|
||||
fields={'dbl' : OFTReal, 'int' : OFTInteger, 'str' : OFTString,},
|
||||
extent=(-1.35011,0.166623,-0.524093,0.824508), # Got extent from QGIS
|
||||
srs_wkt='GEOGCS["GCS_WGS_1984",DATUM["WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295]]',
|
||||
field_values={'dbl' : [float(i) for i in range(1, 6)], 'int' : list(range(1, 6)), 'str' : [str(i) for i in range(1, 6)]},
|
||||
fids=range(5)),
|
||||
TestDS('test_vrt', ext='vrt', nfeat=3, nfld=3, geom='POINT', gtype='Point25D', driver='VRT',
|
||||
fields={'POINT_X' : OFTString, 'POINT_Y' : OFTString, 'NUM' : OFTString}, # VRT uses CSV, which all types are OFTString.
|
||||
extent=(1.0, 2.0, 100.0, 523.5), # Min/Max from CSV
|
||||
field_values={'POINT_X' : ['1.0', '5.0', '100.0'], 'POINT_Y' : ['2.0', '23.0', '523.5'], 'NUM' : ['5', '17', '23']},
|
||||
fids=range(1,4)),
|
||||
TestDS('test_poly', nfeat=3, nfld=3, geom='POLYGON', gtype=3,
|
||||
driver='ESRI Shapefile',
|
||||
fields={'float' : OFTReal, 'int' : OFTInteger, 'str' : OFTString,},
|
||||
extent=(-1.01513,-0.558245,0.161876,0.839637), # Got extent from QGIS
|
||||
srs_wkt='GEOGCS["GCS_WGS_1984",DATUM["WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295]]'),
|
||||
)
|
||||
|
||||
bad_ds = (TestDS('foo'),)
|
||||
|
||||
|
||||
# List of acceptable data sources.
|
||||
ds_list = (TestDS('test_point', nfeat=5, nfld=3, geom='POINT', gtype=1, driver='ESRI Shapefile',
|
||||
fields={'dbl' : OFTReal, 'int' : OFTInteger, 'str' : OFTString,},
|
||||
extent=(-1.35011,0.166623,-0.524093,0.824508), # Got extent from QGIS
|
||||
srs_wkt='GEOGCS["GCS_WGS_1984",DATUM["WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295]]',
|
||||
field_values={'dbl' : [float(i) for i in range(1, 6)], 'int' : list(range(1, 6)), 'str' : [str(i) for i in range(1, 6)]},
|
||||
fids=range(5)),
|
||||
TestDS('test_vrt', ext='vrt', nfeat=3, nfld=3, geom='POINT', gtype='Point25D', driver='VRT',
|
||||
fields={'POINT_X' : OFTString, 'POINT_Y' : OFTString, 'NUM' : OFTString}, # VRT uses CSV, which all types are OFTString.
|
||||
extent=(1.0, 2.0, 100.0, 523.5), # Min/Max from CSV
|
||||
field_values={'POINT_X' : ['1.0', '5.0', '100.0'], 'POINT_Y' : ['2.0', '23.0', '523.5'], 'NUM' : ['5', '17', '23']},
|
||||
fids=range(1,4)),
|
||||
TestDS('test_poly', nfeat=3, nfld=3, geom='POLYGON', gtype=3,
|
||||
driver='ESRI Shapefile',
|
||||
fields={'float' : OFTReal, 'int' : OFTInteger, 'str' : OFTString,},
|
||||
extent=(-1.01513,-0.558245,0.161876,0.839637), # Got extent from QGIS
|
||||
srs_wkt='GEOGCS["GCS_WGS_1984",DATUM["WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295]]'),
|
||||
)
|
||||
|
||||
bad_ds = (TestDS('foo'),
|
||||
)
|
||||
|
||||
@skipUnless(HAS_GDAL, "GDAL is required")
|
||||
class DataSourceTest(unittest.TestCase):
|
||||
|
||||
def test01_valid_shp(self):
|
||||
|
@ -236,11 +242,3 @@ class DataSourceTest(unittest.TestCase):
|
|||
feat = ds[0][0]
|
||||
# Reference value obtained using `ogrinfo`.
|
||||
self.assertEqual(676586997978, feat.get('ALAND10'))
|
||||
|
||||
def suite():
|
||||
s = unittest.TestSuite()
|
||||
s.addTest(unittest.makeSuite(DataSourceTest))
|
||||
return s
|
||||
|
||||
def run(verbosity=2):
|
||||
unittest.TextTestRunner(verbosity=verbosity).run(suite())
|
||||
|
|
|
@ -1,5 +1,9 @@
|
|||
from django.contrib.gis.gdal import Envelope, OGRException
|
||||
from django.contrib.gis.gdal import HAS_GDAL
|
||||
from django.utils import unittest
|
||||
from django.utils.unittest import skipUnless
|
||||
|
||||
if HAS_GDAL:
|
||||
from django.contrib.gis.gdal import Envelope, OGRException
|
||||
|
||||
|
||||
class TestPoint(object):
|
||||
|
@ -7,11 +11,13 @@ class TestPoint(object):
|
|||
self.x = x
|
||||
self.y = y
|
||||
|
||||
|
||||
@skipUnless(HAS_GDAL, "GDAL is required")
|
||||
class EnvelopeTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.e = Envelope(0, 0, 5, 5)
|
||||
|
||||
|
||||
def test01_init(self):
|
||||
"Testing Envelope initilization."
|
||||
e1 = Envelope((0, 0, 5, 5))
|
||||
|
@ -85,11 +91,3 @@ class EnvelopeTest(unittest.TestCase):
|
|||
self.assertEqual((-1, 0, 5, 5), self.e)
|
||||
self.e.expand_to_include(TestPoint(10, 10))
|
||||
self.assertEqual((-1, 0, 10, 10), self.e)
|
||||
|
||||
def suite():
|
||||
s = unittest.TestSuite()
|
||||
s.addTest(unittest.makeSuite(EnvelopeTest))
|
||||
return s
|
||||
|
||||
def run(verbosity=2):
|
||||
unittest.TextTestRunner(verbosity=verbosity).run(suite())
|
||||
|
|
|
@ -5,12 +5,19 @@ try:
|
|||
except ImportError:
|
||||
import pickle
|
||||
|
||||
from django.contrib.gis.gdal import (OGRGeometry, OGRGeomType, OGRException,
|
||||
OGRIndexError, SpatialReference, CoordTransform, GDAL_VERSION)
|
||||
from django.contrib.gis.gdal import HAS_GDAL
|
||||
from django.contrib.gis.geometry.test_data import TestDataMixin
|
||||
from django.utils.six.moves import xrange
|
||||
from django.utils import unittest
|
||||
from django.utils.unittest import skipUnless
|
||||
|
||||
if HAS_GDAL:
|
||||
from django.contrib.gis.gdal import (OGRGeometry, OGRGeomType,
|
||||
OGRException, OGRIndexError, SpatialReference, CoordTransform,
|
||||
GDAL_VERSION)
|
||||
|
||||
|
||||
@skipUnless(HAS_GDAL, "GDAL is required")
|
||||
class OGRGeomTest(unittest.TestCase, TestDataMixin):
|
||||
"This tests the OGR Geometry."
|
||||
|
||||
|
@ -476,11 +483,3 @@ class OGRGeomTest(unittest.TestCase, TestDataMixin):
|
|||
"Testing equivalence methods with non-OGRGeometry instances."
|
||||
self.assertNotEqual(None, OGRGeometry('POINT(0 0)'))
|
||||
self.assertEqual(False, OGRGeometry('LINESTRING(0 0, 1 1)') == 3)
|
||||
|
||||
def suite():
|
||||
s = unittest.TestSuite()
|
||||
s.addTest(unittest.makeSuite(OGRGeomTest))
|
||||
return s
|
||||
|
||||
def run(verbosity=2):
|
||||
unittest.TextTestRunner(verbosity=verbosity).run(suite())
|
||||
|
|
|
@ -1,5 +1,9 @@
|
|||
from django.contrib.gis.gdal import SpatialReference, CoordTransform, OGRException, SRSException
|
||||
from django.contrib.gis.gdal import HAS_GDAL
|
||||
from django.utils import unittest
|
||||
from django.utils.unittest import skipUnless
|
||||
|
||||
if HAS_GDAL:
|
||||
from django.contrib.gis.gdal import SpatialReference, CoordTransform, OGRException, SRSException
|
||||
|
||||
|
||||
class TestSRS:
|
||||
|
@ -46,6 +50,8 @@ well_known = (TestSRS('GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",637813
|
|||
|
||||
bad_srlist = ('Foobar', 'OOJCS["NAD83 / Texas South Central",GEOGCS["NAD83",DATUM["North_American_Datum_1983",SPHEROID["GRS 1980",6378137,298.257222101,AUTHORITY["EPSG","7019"]],AUTHORITY["EPSG","6269"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4269"]],PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",30.28333333333333],PARAMETER["standard_parallel_2",28.38333333333333],PARAMETER["latitude_of_origin",27.83333333333333],PARAMETER["central_meridian",-99],PARAMETER["false_easting",600000],PARAMETER["false_northing",4000000],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AUTHORITY["EPSG","32140"]]',)
|
||||
|
||||
|
||||
@skipUnless(HAS_GDAL, "GDAL is required")
|
||||
class SpatialRefTest(unittest.TestCase):
|
||||
|
||||
def test01_wkt(self):
|
||||
|
@ -155,11 +161,3 @@ class SpatialRefTest(unittest.TestCase):
|
|||
self.assertEqual('EPSG', s1['AUTHORITY'])
|
||||
self.assertEqual(4326, int(s1['AUTHORITY', 1]))
|
||||
self.assertEqual(None, s1['FOOBAR'])
|
||||
|
||||
def suite():
|
||||
s = unittest.TestSuite()
|
||||
s.addTest(unittest.makeSuite(SpatialRefTest))
|
||||
return s
|
||||
|
||||
def run(verbosity=2):
|
||||
unittest.TextTestRunner(verbosity=verbosity).run(suite())
|
||||
|
|
|
@ -3,16 +3,28 @@ from __future__ import unicode_literals
|
|||
|
||||
import os
|
||||
from django.conf import settings
|
||||
from django.contrib.gis.geos import GEOSGeometry
|
||||
from django.contrib.gis.geoip import GeoIP, GeoIPException
|
||||
from django.contrib.gis.geos import HAS_GEOS
|
||||
from django.contrib.gis.geoip import HAS_GEOIP
|
||||
from django.utils import unittest
|
||||
from django.utils.unittest import skipUnless
|
||||
|
||||
from django.utils import six
|
||||
|
||||
if HAS_GEOIP:
|
||||
from . import GeoIP, GeoIPException
|
||||
|
||||
if HAS_GEOS:
|
||||
from ..geos import GEOSGeometry
|
||||
|
||||
|
||||
# Note: Requires use of both the GeoIP country and city datasets.
|
||||
# The GEOIP_DATA path should be the only setting set (the directory
|
||||
# should contain links or the actual database files 'GeoIP.dat' and
|
||||
# 'GeoLiteCity.dat'.
|
||||
|
||||
|
||||
@skipUnless(HAS_GEOIP and getattr(settings, "GEOIP_PATH", None),
|
||||
"GeoIP is required along with the GEOIP_DATA setting.")
|
||||
class GeoIPTest(unittest.TestCase):
|
||||
|
||||
def test01_init(self):
|
||||
|
@ -70,6 +82,7 @@ class GeoIPTest(unittest.TestCase):
|
|||
self.assertEqual({'country_code' : 'US', 'country_name' : 'United States'},
|
||||
g.country(query))
|
||||
|
||||
@skipUnless(HAS_GEOS, "Geos is required")
|
||||
def test04_city(self):
|
||||
"Testing GeoIP city querying methods."
|
||||
g = GeoIP(country='<foo>')
|
||||
|
@ -105,12 +118,3 @@ class GeoIPTest(unittest.TestCase):
|
|||
g = GeoIP()
|
||||
d = g.city("www.osnabrueck.de")
|
||||
self.assertEqual('Osnabrück', d['city'])
|
||||
|
||||
|
||||
def suite():
|
||||
s = unittest.TestSuite()
|
||||
s.addTest(unittest.makeSuite(GeoIPTest))
|
||||
return s
|
||||
|
||||
def run(verbosity=1):
|
||||
unittest.TextTestRunner(verbosity=verbosity).run(suite())
|
||||
|
|
|
@ -3,12 +3,18 @@ The GeoDjango GEOS module. Please consult the GeoDjango documentation
|
|||
for more details:
|
||||
http://geodjango.org/docs/geos.html
|
||||
"""
|
||||
from django.contrib.gis.geos.geometry import GEOSGeometry, wkt_regex, hex_regex
|
||||
from django.contrib.gis.geos.point import Point
|
||||
from django.contrib.gis.geos.linestring import LineString, LinearRing
|
||||
from django.contrib.gis.geos.polygon import Polygon
|
||||
from django.contrib.gis.geos.collections import GeometryCollection, MultiPoint, MultiLineString, MultiPolygon
|
||||
from django.contrib.gis.geos.error import GEOSException, GEOSIndexError
|
||||
from django.contrib.gis.geos.io import WKTReader, WKTWriter, WKBReader, WKBWriter
|
||||
from django.contrib.gis.geos.factory import fromfile, fromstr
|
||||
from django.contrib.gis.geos.libgeos import geos_version, geos_version_info, GEOS_PREPARE
|
||||
try:
|
||||
from .libgeos import geos_version, geos_version_info, GEOS_PREPARE
|
||||
HAS_GEOS = True
|
||||
except ImportError:
|
||||
HAS_GEOS = False
|
||||
|
||||
if HAS_GEOS:
|
||||
from .geometry import GEOSGeometry, wkt_regex, hex_regex
|
||||
from .point import Point
|
||||
from .linestring import LineString, LinearRing
|
||||
from .polygon import Polygon
|
||||
from .collections import GeometryCollection, MultiPoint, MultiLineString, MultiPolygon
|
||||
from .error import GEOSException, GEOSIndexError
|
||||
from .io import WKTReader, WKTWriter, WKBReader, WKBWriter
|
||||
from .factory import fromfile, fromstr
|
||||
|
|
|
@ -1,28 +0,0 @@
|
|||
"""
|
||||
GEOS Testing module.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
from django.utils.unittest import TestSuite, TextTestRunner
|
||||
from . import test_geos, test_io, test_geos_mutation, test_mutable_list
|
||||
|
||||
test_suites = [
|
||||
test_geos.suite(),
|
||||
test_io.suite(),
|
||||
test_geos_mutation.suite(),
|
||||
test_mutable_list.suite(),
|
||||
]
|
||||
|
||||
def suite():
|
||||
"Builds a test suite for the GEOS tests."
|
||||
s = TestSuite()
|
||||
for suite in test_suites:
|
||||
s.addTest(suite)
|
||||
return s
|
||||
|
||||
def run(verbosity=1):
|
||||
"Runs the GEOS tests."
|
||||
TextTestRunner(verbosity=verbosity).run(suite())
|
||||
|
||||
if __name__ == '__main__':
|
||||
run(2)
|
|
@ -6,20 +6,28 @@ import random
|
|||
from binascii import a2b_hex, b2a_hex
|
||||
from io import BytesIO
|
||||
|
||||
from django.contrib.gis.gdal import HAS_GDAL
|
||||
|
||||
from django.contrib.gis import memoryview
|
||||
from django.contrib.gis.geos import (GEOSException, GEOSIndexError, GEOSGeometry,
|
||||
GeometryCollection, Point, MultiPoint, Polygon, MultiPolygon, LinearRing,
|
||||
LineString, MultiLineString, fromfile, fromstr, geos_version_info)
|
||||
from django.contrib.gis.geos.base import gdal, numpy, GEOSBase
|
||||
from django.contrib.gis.geos.libgeos import GEOS_PREPARE
|
||||
from django.contrib.gis.geometry.test_data import TestDataMixin
|
||||
|
||||
from django.utils.encoding import force_bytes
|
||||
from django.utils import six
|
||||
from django.utils.six.moves import xrange
|
||||
from django.utils import unittest
|
||||
from django.utils.unittest import skipUnless
|
||||
|
||||
from .. import HAS_GEOS
|
||||
|
||||
if HAS_GEOS:
|
||||
from .. import (GEOSException, GEOSIndexError, GEOSGeometry,
|
||||
GeometryCollection, Point, MultiPoint, Polygon, MultiPolygon, LinearRing,
|
||||
LineString, MultiLineString, fromfile, fromstr, geos_version_info,
|
||||
GEOS_PREPARE)
|
||||
from ..base import gdal, numpy, GEOSBase
|
||||
|
||||
|
||||
@skipUnless(HAS_GEOS, "Geos is required.")
|
||||
class GEOSTest(unittest.TestCase, TestDataMixin):
|
||||
|
||||
@property
|
||||
|
@ -198,7 +206,7 @@ class GEOSTest(unittest.TestCase, TestDataMixin):
|
|||
self.assertEqual(srid, poly.shell.srid)
|
||||
self.assertEqual(srid, fromstr(poly.ewkt).srid) # Checking export
|
||||
|
||||
@unittest.skipUnless(gdal.HAS_GDAL, "gdal is required")
|
||||
@skipUnless(HAS_GDAL, "GDAL is required.")
|
||||
def test_json(self):
|
||||
"Testing GeoJSON input/output (via GDAL)."
|
||||
for g in self.geometries.json_geoms:
|
||||
|
@ -662,6 +670,7 @@ class GEOSTest(unittest.TestCase, TestDataMixin):
|
|||
p3 = fromstr(p1.hex, srid=-1) # -1 is intended.
|
||||
self.assertEqual(-1, p3.srid)
|
||||
|
||||
@skipUnless(HAS_GDAL, "GDAL is required.")
|
||||
def test_custom_srid(self):
|
||||
""" Test with a srid unknown from GDAL """
|
||||
pnt = Point(111200, 220900, srid=999999)
|
||||
|
@ -851,7 +860,7 @@ class GEOSTest(unittest.TestCase, TestDataMixin):
|
|||
# And, they should be equal.
|
||||
self.assertEqual(gc1, gc2)
|
||||
|
||||
@unittest.skipUnless(gdal.HAS_GDAL, "gdal is required")
|
||||
@skipUnless(HAS_GDAL, "GDAL is required.")
|
||||
def test_gdal(self):
|
||||
"Testing `ogr` and `srs` properties."
|
||||
g1 = fromstr('POINT(5 23)')
|
||||
|
@ -878,7 +887,7 @@ class GEOSTest(unittest.TestCase, TestDataMixin):
|
|||
self.assertNotEqual(poly._ptr, cpy1._ptr)
|
||||
self.assertNotEqual(poly._ptr, cpy2._ptr)
|
||||
|
||||
@unittest.skipUnless(gdal.HAS_GDAL, "gdal is required to transform geometries")
|
||||
@skipUnless(HAS_GDAL, "GDAL is required to transform geometries")
|
||||
def test_transform(self):
|
||||
"Testing `transform` method."
|
||||
orig = GEOSGeometry('POINT (-104.609 38.255)', 4326)
|
||||
|
@ -903,7 +912,7 @@ class GEOSTest(unittest.TestCase, TestDataMixin):
|
|||
self.assertAlmostEqual(trans.x, p.x, prec)
|
||||
self.assertAlmostEqual(trans.y, p.y, prec)
|
||||
|
||||
@unittest.skipUnless(gdal.HAS_GDAL, "gdal is required to transform geometries")
|
||||
@skipUnless(HAS_GDAL, "GDAL is required to transform geometries")
|
||||
def test_transform_3d(self):
|
||||
p3d = GEOSGeometry('POINT (5 23 100)', 4326)
|
||||
p3d.transform(2774)
|
||||
|
@ -912,6 +921,7 @@ class GEOSTest(unittest.TestCase, TestDataMixin):
|
|||
else:
|
||||
self.assertIsNone(p3d.z)
|
||||
|
||||
@skipUnless(HAS_GDAL, "GDAL is required.")
|
||||
def test_transform_noop(self):
|
||||
""" Testing `transform` method (SRID match) """
|
||||
# transform() should no-op if source & dest SRIDs match,
|
||||
|
@ -962,6 +972,7 @@ class GEOSTest(unittest.TestCase, TestDataMixin):
|
|||
g = GEOSGeometry('POINT (-104.609 38.255)', srid=-1)
|
||||
self.assertRaises(GEOSException, g.transform, 2774, clone=True)
|
||||
|
||||
@skipUnless(HAS_GDAL, "GDAL is required.")
|
||||
def test_transform_nogdal(self):
|
||||
""" Testing `transform` method (GDAL not available) """
|
||||
old_has_gdal = gdal.HAS_GDAL
|
||||
|
@ -1016,7 +1027,7 @@ class GEOSTest(unittest.TestCase, TestDataMixin):
|
|||
self.assertEqual(geom, tmpg)
|
||||
if not no_srid: self.assertEqual(geom.srid, tmpg.srid)
|
||||
|
||||
@unittest.skipUnless(GEOS_PREPARE, "geos >= 3.1.0 is required")
|
||||
@skipUnless(HAS_GEOS and GEOS_PREPARE, "geos >= 3.1.0 is required")
|
||||
def test_prepared(self):
|
||||
"Testing PreparedGeometry support."
|
||||
# Creating a simple multipolygon and getting a prepared version.
|
||||
|
@ -1043,7 +1054,7 @@ class GEOSTest(unittest.TestCase, TestDataMixin):
|
|||
for geom, merged in zip(ref_geoms, ref_merged):
|
||||
self.assertEqual(merged, geom.merged)
|
||||
|
||||
@unittest.skipUnless(GEOS_PREPARE, "geos >= 3.1.0 is required")
|
||||
@skipUnless(HAS_GEOS and GEOS_PREPARE, "geos >= 3.1.0 is required")
|
||||
def test_valid_reason(self):
|
||||
"Testing IsValidReason support"
|
||||
|
||||
|
@ -1058,7 +1069,7 @@ class GEOSTest(unittest.TestCase, TestDataMixin):
|
|||
self.assertIsInstance(g.valid_reason, six.string_types)
|
||||
self.assertTrue(g.valid_reason.startswith("Too few points in geometry component"))
|
||||
|
||||
@unittest.skipUnless(geos_version_info()['version'] >= '3.2.0', "geos >= 3.2.0 is required")
|
||||
@skipUnless(HAS_GEOS and geos_version_info()['version'] >= '3.2.0', "geos >= 3.2.0 is required")
|
||||
def test_linearref(self):
|
||||
"Testing linear referencing"
|
||||
|
||||
|
@ -1091,12 +1102,3 @@ class GEOSTest(unittest.TestCase, TestDataMixin):
|
|||
self.assertTrue(m, msg="Unable to parse the version string '%s'" % v_init)
|
||||
self.assertEqual(m.group('version'), v_geos)
|
||||
self.assertEqual(m.group('capi_version'), v_capi)
|
||||
|
||||
|
||||
def suite():
|
||||
s = unittest.TestSuite()
|
||||
s.addTest(unittest.makeSuite(GEOSTest))
|
||||
return s
|
||||
|
||||
def run(verbosity=2):
|
||||
unittest.TextTestRunner(verbosity=verbosity).run(suite())
|
||||
|
|
|
@ -2,15 +2,23 @@
|
|||
# Modified from original contribution by Aryeh Leib Taurog, which was
|
||||
# released under the New BSD license.
|
||||
|
||||
from django.contrib.gis.geos import *
|
||||
from django.contrib.gis.geos.error import GEOSIndexError
|
||||
from django.utils import unittest
|
||||
from django.utils.unittest import skipUnless
|
||||
|
||||
from .. import HAS_GEOS
|
||||
|
||||
if HAS_GEOS:
|
||||
from .. import *
|
||||
from ..error import GEOSIndexError
|
||||
|
||||
|
||||
def getItem(o,i): return o[i]
|
||||
def delItem(o,i): del o[i]
|
||||
def setItem(o,i,v): o[i] = v
|
||||
|
||||
def api_get_distance(x): return x.distance(Point(-200,-200))
|
||||
if HAS_GEOS:
|
||||
def api_get_distance(x): return x.distance(Point(-200,-200))
|
||||
|
||||
def api_get_buffer(x): return x.buffer(10)
|
||||
def api_get_geom_typeid(x): return x.geom_typeid
|
||||
def api_get_num_coords(x): return x.num_coords
|
||||
|
@ -29,6 +37,8 @@ geos_function_tests = [ val for name, val in vars().items()
|
|||
if hasattr(val, '__call__')
|
||||
and name.startswith('api_get_') ]
|
||||
|
||||
|
||||
@skipUnless(HAS_GEOS, "Geos is required.")
|
||||
class GEOSMutationTest(unittest.TestCase):
|
||||
"""
|
||||
Tests Pythonic Mutability of Python GEOS geometry wrappers
|
||||
|
@ -122,14 +132,3 @@ class GEOSMutationTest(unittest.TestCase):
|
|||
lsa = MultiPoint(*map(Point,((5,5),(3,-2),(8,1))))
|
||||
for f in geos_function_tests:
|
||||
self.assertEqual(f(lsa), f(mp), 'MultiPoint ' + f.__name__)
|
||||
|
||||
def suite():
|
||||
s = unittest.TestSuite()
|
||||
s.addTest(unittest.makeSuite(GEOSMutationTest))
|
||||
return s
|
||||
|
||||
def run(verbosity=2):
|
||||
unittest.TextTestRunner(verbosity=verbosity).run(suite())
|
||||
|
||||
if __name__ == '__main__':
|
||||
run()
|
||||
|
|
|
@ -4,10 +4,16 @@ import binascii
|
|||
import unittest
|
||||
|
||||
from django.contrib.gis import memoryview
|
||||
from django.contrib.gis.geos import GEOSGeometry, WKTReader, WKTWriter, WKBReader, WKBWriter, geos_version_info
|
||||
from django.utils import six
|
||||
from django.utils.unittest import skipUnless
|
||||
|
||||
from ..import HAS_GEOS
|
||||
|
||||
if HAS_GEOS:
|
||||
from .. import GEOSGeometry, WKTReader, WKTWriter, WKBReader, WKBWriter, geos_version_info
|
||||
|
||||
|
||||
@skipUnless(HAS_GEOS, "Geos is required.")
|
||||
class GEOSIOTest(unittest.TestCase):
|
||||
|
||||
def test01_wktreader(self):
|
||||
|
@ -109,11 +115,3 @@ class GEOSIOTest(unittest.TestCase):
|
|||
wkb_w.srid = True
|
||||
self.assertEqual(hex3d_srid, wkb_w.write_hex(g))
|
||||
self.assertEqual(wkb3d_srid, wkb_w.write(g))
|
||||
|
||||
def suite():
|
||||
s = unittest.TestSuite()
|
||||
s.addTest(unittest.makeSuite(GEOSIOTest))
|
||||
return s
|
||||
|
||||
def run(verbosity=2):
|
||||
unittest.TextTestRunner(verbosity=verbosity).run(suite())
|
||||
|
|
|
@ -395,15 +395,3 @@ class ListMixinTest(unittest.TestCase):
|
|||
|
||||
class ListMixinTestSingle(ListMixinTest):
|
||||
listType = UserListB
|
||||
|
||||
def suite():
|
||||
s = unittest.TestSuite()
|
||||
s.addTest(unittest.makeSuite(ListMixinTest))
|
||||
s.addTest(unittest.makeSuite(ListMixinTestSingle))
|
||||
return s
|
||||
|
||||
def run(verbosity=2):
|
||||
unittest.TextTestRunner(verbosity=verbosity).run(suite())
|
||||
|
||||
if __name__ == '__main__':
|
||||
run()
|
||||
|
|
|
@ -20,7 +20,7 @@ def index(request, sitemaps):
|
|||
"""
|
||||
current_site = get_current_site(request)
|
||||
sites = []
|
||||
protocol = request.is_secure() and 'https' or 'http'
|
||||
protocol = 'https' if request.is_secure() else 'http'
|
||||
for section, site in sitemaps.items():
|
||||
if callable(site):
|
||||
pages = site().paginator.num_pages
|
||||
|
|
|
@ -0,0 +1,371 @@
|
|||
(function() {
|
||||
/**
|
||||
* Transforms an array of features to a single feature with the merged
|
||||
* geometry of geom_type
|
||||
*/
|
||||
OpenLayers.Util.properFeatures = function(features, geom_type) {
|
||||
if (features.constructor == Array) {
|
||||
var geoms = [];
|
||||
for (var i=0; i<features.length; i++) {
|
||||
geoms.push(features[i].geometry);
|
||||
}
|
||||
var geom = new geom_type(geoms);
|
||||
features = new OpenLayers.Feature.Vector(geom);
|
||||
}
|
||||
return features;
|
||||
}
|
||||
|
||||
/**
|
||||
* @requires OpenLayers/Format/WKT.js
|
||||
*/
|
||||
|
||||
/**
|
||||
* Class: OpenLayers.Format.DjangoWKT
|
||||
* Class for reading Well-Known Text, with workarounds to successfully parse
|
||||
* geometries and collections as returnes by django.contrib.gis.geos.
|
||||
*
|
||||
* Inherits from:
|
||||
* - <OpenLayers.Format.WKT>
|
||||
*/
|
||||
|
||||
OpenLayers.Format.DjangoWKT = OpenLayers.Class(OpenLayers.Format.WKT, {
|
||||
initialize: function(options) {
|
||||
OpenLayers.Format.WKT.prototype.initialize.apply(this, [options]);
|
||||
this.regExes.justComma = /\s*,\s*/;
|
||||
},
|
||||
|
||||
parse: {
|
||||
'point': function(str) {
|
||||
var coords = OpenLayers.String.trim(str).split(this.regExes.spaces);
|
||||
return new OpenLayers.Feature.Vector(
|
||||
new OpenLayers.Geometry.Point(coords[0], coords[1])
|
||||
);
|
||||
},
|
||||
|
||||
'multipoint': function(str) {
|
||||
var point;
|
||||
var points = OpenLayers.String.trim(str).split(this.regExes.justComma);
|
||||
var components = [];
|
||||
for(var i=0, len=points.length; i<len; ++i) {
|
||||
point = points[i].replace(this.regExes.trimParens, '$1');
|
||||
components.push(this.parse.point.apply(this, [point]).geometry);
|
||||
}
|
||||
return new OpenLayers.Feature.Vector(
|
||||
new OpenLayers.Geometry.MultiPoint(components)
|
||||
);
|
||||
},
|
||||
|
||||
'linestring': function(str) {
|
||||
var points = OpenLayers.String.trim(str).split(',');
|
||||
var components = [];
|
||||
for(var i=0, len=points.length; i<len; ++i) {
|
||||
components.push(this.parse.point.apply(this, [points[i]]).geometry);
|
||||
}
|
||||
return new OpenLayers.Feature.Vector(
|
||||
new OpenLayers.Geometry.LineString(components)
|
||||
);
|
||||
},
|
||||
|
||||
'multilinestring': function(str) {
|
||||
var line;
|
||||
var lines = OpenLayers.String.trim(str).split(this.regExes.parenComma);
|
||||
var components = [];
|
||||
for(var i=0, len=lines.length; i<len; ++i) {
|
||||
line = lines[i].replace(this.regExes.trimParens, '$1');
|
||||
components.push(this.parse.linestring.apply(this, [line]).geometry);
|
||||
}
|
||||
return new OpenLayers.Feature.Vector(
|
||||
new OpenLayers.Geometry.MultiLineString(components)
|
||||
);
|
||||
},
|
||||
|
||||
'polygon': function(str) {
|
||||
var ring, linestring, linearring;
|
||||
var rings = OpenLayers.String.trim(str).split(this.regExes.parenComma);
|
||||
var components = [];
|
||||
for(var i=0, len=rings.length; i<len; ++i) {
|
||||
ring = rings[i].replace(this.regExes.trimParens, '$1');
|
||||
linestring = this.parse.linestring.apply(this, [ring]).geometry;
|
||||
linearring = new OpenLayers.Geometry.LinearRing(linestring.components);
|
||||
components.push(linearring);
|
||||
}
|
||||
return new OpenLayers.Feature.Vector(
|
||||
new OpenLayers.Geometry.Polygon(components)
|
||||
);
|
||||
},
|
||||
|
||||
'multipolygon': function(str) {
|
||||
var polygon;
|
||||
var polygons = OpenLayers.String.trim(str).split(this.regExes.doubleParenComma);
|
||||
var components = [];
|
||||
for(var i=0, len=polygons.length; i<len; ++i) {
|
||||
polygon = polygons[i].replace(this.regExes.trimParens, '$1');
|
||||
components.push(this.parse.polygon.apply(this, [polygon]).geometry);
|
||||
}
|
||||
return new OpenLayers.Feature.Vector(
|
||||
new OpenLayers.Geometry.MultiPolygon(components)
|
||||
);
|
||||
},
|
||||
|
||||
'geometrycollection': function(str) {
|
||||
// separate components of the collection with |
|
||||
str = str.replace(/,\s*([A-Za-z])/g, '|$1');
|
||||
var wktArray = OpenLayers.String.trim(str).split('|');
|
||||
var components = [];
|
||||
for(var i=0, len=wktArray.length; i<len; ++i) {
|
||||
components.push(OpenLayers.Format.WKT.prototype.read.apply(this,[wktArray[i]]));
|
||||
}
|
||||
return components;
|
||||
}
|
||||
},
|
||||
|
||||
extractGeometry: function(geometry) {
|
||||
var type = geometry.CLASS_NAME.split('.')[2].toLowerCase();
|
||||
if (!this.extract[type]) {
|
||||
return null;
|
||||
}
|
||||
if (this.internalProjection && this.externalProjection) {
|
||||
geometry = geometry.clone();
|
||||
geometry.transform(this.internalProjection, this.externalProjection);
|
||||
}
|
||||
var wktType = type == 'collection' ? 'GEOMETRYCOLLECTION' : type.toUpperCase();
|
||||
var data = wktType + '(' + this.extract[type].apply(this, [geometry]) + ')';
|
||||
return data;
|
||||
},
|
||||
|
||||
/**
|
||||
* Patched write: successfully writes WKT for geometries and
|
||||
* geometrycollections.
|
||||
*/
|
||||
write: function(features) {
|
||||
var collection, geometry, type, data, isCollection;
|
||||
isCollection = features.geometry.CLASS_NAME == "OpenLayers.Geometry.Collection";
|
||||
var pieces = [];
|
||||
if (isCollection) {
|
||||
collection = features.geometry.components;
|
||||
pieces.push('GEOMETRYCOLLECTION(');
|
||||
for (var i=0, len=collection.length; i<len; ++i) {
|
||||
if (i>0) {
|
||||
pieces.push(',');
|
||||
}
|
||||
pieces.push(this.extractGeometry(collection[i]));
|
||||
}
|
||||
pieces.push(')');
|
||||
} else {
|
||||
pieces.push(this.extractGeometry(features.geometry));
|
||||
}
|
||||
return pieces.join('');
|
||||
},
|
||||
|
||||
CLASS_NAME: "OpenLayers.Format.DjangoWKT"
|
||||
});
|
||||
|
||||
function MapWidget(options) {
|
||||
this.map = null;
|
||||
this.controls = null;
|
||||
this.panel = null;
|
||||
this.layers = {};
|
||||
this.wkt_f = new OpenLayers.Format.DjangoWKT();
|
||||
|
||||
// Mapping from OGRGeomType name to OpenLayers.Geometry name
|
||||
if (options['geom_name'] == 'Unknown') options['geom_type'] = OpenLayers.Geometry;
|
||||
else if (options['geom_name'] == 'GeometryCollection') options['geom_type'] = OpenLayers.Geometry.Collection;
|
||||
else options['geom_type'] = eval('OpenLayers.Geometry' + options['geom_name']);
|
||||
|
||||
// Default options
|
||||
this.options = {
|
||||
color: 'ee9900',
|
||||
default_lat: 0,
|
||||
default_lon: 0,
|
||||
default_zoom: 4,
|
||||
is_collection: options['geom_type'] instanceof OpenLayers.Geometry.Collection,
|
||||
layerswitcher: false,
|
||||
map_options: {},
|
||||
map_srid: 4326,
|
||||
modifiable: true,
|
||||
mouse_position: false,
|
||||
opacity: 0.4,
|
||||
point_zoom: 12,
|
||||
scale_text: false,
|
||||
scrollable: true
|
||||
};
|
||||
|
||||
// Altering using user-provied options
|
||||
for (var property in options) {
|
||||
if (options.hasOwnProperty(property)) {
|
||||
this.options[property] = options[property];
|
||||
}
|
||||
}
|
||||
|
||||
this.map = new OpenLayers.Map(this.options.map_id, this.options.map_options);
|
||||
if (this.options.base_layer) this.layers.base = this.options.base_layer;
|
||||
else this.layers.base = new OpenLayers.Layer.WMS('OpenLayers WMS', 'http://vmap0.tiles.osgeo.org/wms/vmap0', {layers: 'basic'});
|
||||
this.map.addLayer(this.layers.base);
|
||||
|
||||
var defaults_style = {
|
||||
'fillColor': '#' + this.options.color,
|
||||
'fillOpacity': this.options.opacity,
|
||||
'strokeColor': '#' + this.options.color,
|
||||
};
|
||||
if (this.options.geom_name == 'LineString') {
|
||||
defaults_style['strokeWidth'] = 3;
|
||||
}
|
||||
var styleMap = new OpenLayers.StyleMap({'default': OpenLayers.Util.applyDefaults(defaults_style, OpenLayers.Feature.Vector.style['default'])});
|
||||
this.layers.vector = new OpenLayers.Layer.Vector(" " + this.options.name, {styleMap: styleMap});
|
||||
this.map.addLayer(this.layers.vector);
|
||||
wkt = document.getElementById(this.options.id).value;
|
||||
if (wkt) {
|
||||
var feat = OpenLayers.Util.properFeatures(this.read_wkt(wkt), this.options.geom_type);
|
||||
this.write_wkt(feat);
|
||||
if (this.options.is_collection) {
|
||||
for (var i=0; i<this.num_geom; i++) {
|
||||
this.layers.vector.addFeatures([new OpenLayers.Feature.Vector(feat.geometry.components[i].clone())]);
|
||||
}
|
||||
} else {
|
||||
this.layers.vector.addFeatures([feat]);
|
||||
}
|
||||
this.map.zoomToExtent(feat.geometry.getBounds());
|
||||
if (this.options.geom_name == 'Point') {
|
||||
this.map.zoomTo(this.options.point_zoom);
|
||||
}
|
||||
} else {
|
||||
this.map.setCenter(this.defaultCenter(), this.options.default_zoom);
|
||||
}
|
||||
this.layers.vector.events.on({'featuremodified': this.modify_wkt, scope: this});
|
||||
this.layers.vector.events.on({'featureadded': this.add_wkt, scope: this});
|
||||
|
||||
this.getControls(this.layers.vector);
|
||||
this.panel.addControls(this.controls);
|
||||
this.map.addControl(this.panel);
|
||||
this.addSelectControl();
|
||||
|
||||
if (this.options.mouse_position) {
|
||||
this.map.addControl(new OpenLayers.Control.MousePosition());
|
||||
}
|
||||
if (this.options.scale_text) {
|
||||
this.map.addControl(new OpenLayers.Control.Scale());
|
||||
}
|
||||
if (this.options.layerswitcher) {
|
||||
this.map.addControl(new OpenLayers.Control.LayerSwitcher());
|
||||
}
|
||||
if (!this.options.scrollable) {
|
||||
this.map.getControlsByClass('OpenLayers.Control.Navigation')[0].disableZoomWheel();
|
||||
}
|
||||
if (wkt) {
|
||||
if (this.options.modifiable) {
|
||||
this.enableEditing();
|
||||
}
|
||||
} else {
|
||||
this.enableDrawing();
|
||||
}
|
||||
}
|
||||
|
||||
MapWidget.prototype.get_ewkt = function(feat) {
|
||||
return "SRID=" + this.options.map_srid + ";" + this.wkt_f.write(feat);
|
||||
};
|
||||
|
||||
MapWidget.prototype.read_wkt = function(wkt) {
|
||||
var prefix = 'SRID=' + this.options.map_srid + ';'
|
||||
if (wkt.indexOf(prefix) === 0) {
|
||||
wkt = wkt.slice(prefix.length);
|
||||
}
|
||||
return this.wkt_f.read(wkt);
|
||||
};
|
||||
|
||||
MapWidget.prototype.write_wkt = function(feat) {
|
||||
feat = OpenLayers.Util.properFeatures(feat, this.options.geom_type);
|
||||
if (this.options.is_collection) {
|
||||
this.num_geom = feat.geometry.components.length;
|
||||
} else {
|
||||
this.num_geom = 1;
|
||||
}
|
||||
document.getElementById(this.options.id).value = this.get_ewkt(feat);
|
||||
};
|
||||
|
||||
MapWidget.prototype.add_wkt = function(event) {
|
||||
if (this.options.is_collection) {
|
||||
var feat = new OpenLayers.Feature.Vector(new this.options.geom_type());
|
||||
for (var i=0; i<this.layers.vector.features.length; i++) {
|
||||
feat.geometry.addComponents([this.layers.vector.features[i].geometry]);
|
||||
}
|
||||
this.write_wkt(feat);
|
||||
} else {
|
||||
if (this.layers.vector.features.length > 1) {
|
||||
old_feats = [this.layers.vector.features[0]];
|
||||
this.layers.vector.removeFeatures(old_feats);
|
||||
this.layers.vector.destroyFeatures(old_feats);
|
||||
}
|
||||
this.write_wkt(event.feature);
|
||||
}
|
||||
};
|
||||
|
||||
MapWidget.prototype.modify_wkt = function(event) {
|
||||
if (this.options.is_collection) {
|
||||
if (this.options.geom_name == 'MultiPoint') {
|
||||
this.add_wkt(event);
|
||||
return;
|
||||
} else {
|
||||
var feat = new OpenLayers.Feature.Vector(new this.options.geom_type());
|
||||
for (var i=0; i<this.num_geom; i++) {
|
||||
feat.geometry.addComponents([this.layers.vector.features[i].geometry]);
|
||||
}
|
||||
this.write_wkt(feat);
|
||||
}
|
||||
} else {
|
||||
this.write_wkt(event.feature);
|
||||
}
|
||||
};
|
||||
|
||||
MapWidget.prototype.deleteFeatures = function() {
|
||||
this.layers.vector.removeFeatures(this.layers.vector.features);
|
||||
this.layers.vector.destroyFeatures();
|
||||
};
|
||||
|
||||
MapWidget.prototype.clearFeatures = function() {
|
||||
this.deleteFeatures();
|
||||
document.getElementById(this.options.id).value = '';
|
||||
this.map.setCenter(this.defaultCenter(), this.options.default_zoom);
|
||||
};
|
||||
|
||||
MapWidget.prototype.defaultCenter = function() {
|
||||
var center = new OpenLayers.LonLat(this.options.default_lon, this.options.default_lat);
|
||||
if (this.options.map_srid) {
|
||||
return center.transform(new OpenLayers.Projection("EPSG:4326"), this.map.getProjectionObject());
|
||||
}
|
||||
return center;
|
||||
};
|
||||
|
||||
MapWidget.prototype.addSelectControl = function() {
|
||||
var select = new OpenLayers.Control.SelectFeature(this.layers.vector, {'toggle': true, 'clickout': true});
|
||||
this.map.addControl(select);
|
||||
select.activate();
|
||||
};
|
||||
|
||||
MapWidget.prototype.enableDrawing = function () {
|
||||
this.map.getControlsByClass('OpenLayers.Control.DrawFeature')[0].activate();
|
||||
};
|
||||
|
||||
MapWidget.prototype.enableEditing = function () {
|
||||
this.map.getControlsByClass('OpenLayers.Control.ModifyFeature')[0].activate();
|
||||
};
|
||||
|
||||
MapWidget.prototype.getControls = function(layer) {
|
||||
this.panel = new OpenLayers.Control.Panel({'displayClass': 'olControlEditingToolbar'});
|
||||
this.controls = [new OpenLayers.Control.Navigation()];
|
||||
if (!this.options.modifiable && layer.features.length)
|
||||
return;
|
||||
if (this.options.geom_name == 'LineString' || this.options.geom_name == 'Unknown') {
|
||||
this.controls.push(new OpenLayers.Control.DrawFeature(layer, OpenLayers.Handler.Path, {'displayClass': 'olControlDrawFeaturePath'}));
|
||||
}
|
||||
if (this.options.geom_name == 'Polygon' || this.options.geom_name == 'Unknown') {
|
||||
this.controls.push(new OpenLayers.Control.DrawFeature(layer, OpenLayers.Handler.Polygon, {'displayClass': 'olControlDrawFeaturePolygon'}));
|
||||
}
|
||||
if (this.options.geom_name == 'Point' || this.options.geom_name == 'Unknown') {
|
||||
this.controls.push(new OpenLayers.Control.DrawFeature(layer, OpenLayers.Handler.Point, {'displayClass': 'olControlDrawFeaturePoint'}));
|
||||
}
|
||||
if (this.options.modifiable) {
|
||||
this.controls.push(new OpenLayers.Control.ModifyFeature(layer, {'displayClass': 'olControlModifyFeature'}));
|
||||
}
|
||||
};
|
||||
window.MapWidget = MapWidget;
|
||||
})();
|
|
@ -12,7 +12,7 @@ OpenLayers.Projection.addTransform("EPSG:4326", "EPSG:3857", OpenLayers.Layer.Sp
|
|||
{{ module }}.is_point = {{ is_point|yesno:"true,false" }};
|
||||
{% endblock %}
|
||||
{{ module }}.get_ewkt = function(feat){
|
||||
return 'SRID={{ srid }};' + {{ module }}.wkt_f.write(feat);
|
||||
return 'SRID={{ srid|unlocalize }};' + {{ module }}.wkt_f.write(feat);
|
||||
};
|
||||
{{ module }}.read_wkt = function(wkt){
|
||||
// OpenLayers cannot handle EWKT -- we make sure to strip it out.
|
||||
|
|
|
@ -0,0 +1,17 @@
|
|||
{% extends "gis/openlayers.html" %}
|
||||
{% load l10n %}
|
||||
|
||||
{% block map_options %}var map_options = {
|
||||
maxExtend: new OpenLayers.Bounds(-20037508,-20037508,20037508,20037508),
|
||||
maxResolution: 156543.0339,
|
||||
numZoomLevels: 20,
|
||||
units: 'm'
|
||||
};{% endblock %}
|
||||
|
||||
{% block options %}{{ block.super }}
|
||||
options['scale_text'] = true;
|
||||
options['mouse_position'] = true;
|
||||
options['default_lon'] = {{ default_lon|unlocalize }};
|
||||
options['default_lat'] = {{ default_lat|unlocalize }};
|
||||
options['base_layer'] = new OpenLayers.Layer.OSM.Mapnik("OpenStreetMap (Mapnik)");
|
||||
{% endblock %}
|
|
@ -0,0 +1,34 @@
|
|||
<style type="text/css">{% block map_css %}
|
||||
#{{ id }}_map { width: {{ map_width }}px; height: {{ map_height }}px; }
|
||||
#{{ id }}_map .aligned label { float: inherit; }
|
||||
#{{ id }}_div_map { position: relative; vertical-align: top; float: {{ LANGUAGE_BIDI|yesno:"right,left" }}; }
|
||||
{% if not display_wkt %}#{{ id }} { display: none; }{% endif %}
|
||||
.olControlEditingToolbar .olControlModifyFeatureItemActive {
|
||||
background-image: url("{{ STATIC_URL }}admin/img/gis/move_vertex_on.png");
|
||||
background-repeat: no-repeat;
|
||||
}
|
||||
.olControlEditingToolbar .olControlModifyFeatureItemInactive {
|
||||
background-image: url("{{ STATIC_URL }}admin/img/gis/move_vertex_off.png");
|
||||
background-repeat: no-repeat;
|
||||
}{% endblock %}
|
||||
</style>
|
||||
|
||||
<div id="{{ id }}_div_map">
|
||||
<div id="{{ id }}_map"></div>
|
||||
<span class="clear_features"><a href="javascript:{{ module }}.clearFeatures()">Delete all Features</a></span>
|
||||
{% if display_wkt %}<p> WKT debugging window:</p>{% endif %}
|
||||
<textarea id="{{ id }}" class="vWKTField required" cols="150" rows="10" name="{{ name }}">{{ wkt }}</textarea>
|
||||
<script type="text/javascript">
|
||||
{% block map_options %}var map_options = {};{% endblock %}
|
||||
{% block options %}var options = {
|
||||
geom_name: '{{ geom_type }}',
|
||||
id: '{{ id }}',
|
||||
map_id: '{{ id }}_map',
|
||||
map_options: map_options,
|
||||
map_srid: {{ map_srid }},
|
||||
name: '{{ name }}'
|
||||
};
|
||||
{% endblock %}
|
||||
var {{ module }} = new MapWidget(options);
|
||||
</script>
|
||||
</div>
|
|
@ -1,13 +1,4 @@
|
|||
from django.conf import settings
|
||||
from django.test.simple import build_suite, DjangoTestSuiteRunner
|
||||
from django.utils import unittest
|
||||
|
||||
from .test_geoforms import GeometryFieldTest
|
||||
from .test_measure import DistanceTest, AreaTest
|
||||
from .test_spatialrefsys import SpatialRefSysTest
|
||||
|
||||
|
||||
def geo_apps(namespace=True, runtests=False):
|
||||
def geo_apps():
|
||||
"""
|
||||
Returns a list of GeoDjango test applications that reside in
|
||||
`django.contrib.gis.tests` that can be used with the current
|
||||
|
@ -36,88 +27,4 @@ def geo_apps(namespace=True, runtests=False):
|
|||
# 3D apps use LayerMapping, which uses GDAL and require GEOS 3.1+.
|
||||
if connection.ops.postgis and GEOS_PREPARE:
|
||||
apps.append('geo3d')
|
||||
if runtests:
|
||||
return [('django.contrib.gis.tests', app) for app in apps]
|
||||
elif namespace:
|
||||
return ['django.contrib.gis.tests.%s' % app
|
||||
for app in apps]
|
||||
else:
|
||||
return apps
|
||||
|
||||
|
||||
def geodjango_suite(apps=True):
|
||||
"""
|
||||
Returns a TestSuite consisting only of GeoDjango tests that can be run.
|
||||
"""
|
||||
import sys
|
||||
from django.db.models import get_app
|
||||
|
||||
suite = unittest.TestSuite()
|
||||
|
||||
# Adding the GEOS tests.
|
||||
from django.contrib.gis.geos import tests as geos_tests
|
||||
suite.addTest(geos_tests.suite())
|
||||
|
||||
# Adding GDAL tests, and any test suite that depends on GDAL, to the
|
||||
# suite if GDAL is available.
|
||||
from django.contrib.gis.gdal import HAS_GDAL
|
||||
if HAS_GDAL:
|
||||
from django.contrib.gis.gdal import tests as gdal_tests
|
||||
suite.addTest(gdal_tests.suite())
|
||||
else:
|
||||
sys.stderr.write('GDAL not available - no tests requiring GDAL will be run.\n')
|
||||
|
||||
# Add GeoIP tests to the suite, if the library and data is available.
|
||||
from django.contrib.gis.geoip import HAS_GEOIP
|
||||
if HAS_GEOIP and hasattr(settings, 'GEOIP_PATH'):
|
||||
from django.contrib.gis.geoip import tests as geoip_tests
|
||||
suite.addTest(geoip_tests.suite())
|
||||
|
||||
# Finally, adding the suites for each of the GeoDjango test apps.
|
||||
if apps:
|
||||
for app_name in geo_apps(namespace=False):
|
||||
suite.addTest(build_suite(get_app(app_name)))
|
||||
|
||||
return suite
|
||||
|
||||
|
||||
class GeoDjangoTestSuiteRunner(DjangoTestSuiteRunner):
|
||||
|
||||
def setup_test_environment(self, **kwargs):
|
||||
super(GeoDjangoTestSuiteRunner, self).setup_test_environment(**kwargs)
|
||||
|
||||
# Saving original values of INSTALLED_APPS, ROOT_URLCONF, and SITE_ID.
|
||||
self.old_installed = getattr(settings, 'INSTALLED_APPS', None)
|
||||
self.old_root_urlconf = getattr(settings, 'ROOT_URLCONF', '')
|
||||
self.old_site_id = getattr(settings, 'SITE_ID', None)
|
||||
|
||||
# Constructing the new INSTALLED_APPS, and including applications
|
||||
# within the GeoDjango test namespace.
|
||||
new_installed = [
|
||||
'django.contrib.sites',
|
||||
'django.contrib.sitemaps',
|
||||
'django.contrib.gis',
|
||||
]
|
||||
|
||||
# Calling out to `geo_apps` to get GeoDjango applications supported
|
||||
# for testing.
|
||||
new_installed.extend(geo_apps())
|
||||
settings.INSTALLED_APPS = list(self.old_installed) + new_installed
|
||||
|
||||
# SITE_ID needs to be set
|
||||
settings.SITE_ID = 1
|
||||
|
||||
# ROOT_URLCONF needs to be set, else `AttributeErrors` are raised
|
||||
# when TestCases are torn down that have `urls` defined.
|
||||
settings.ROOT_URLCONF = ''
|
||||
|
||||
|
||||
def teardown_test_environment(self, **kwargs):
|
||||
super(GeoDjangoTestSuiteRunner, self).teardown_test_environment(**kwargs)
|
||||
settings.INSTALLED_APPS = self.old_installed
|
||||
settings.ROOT_URLCONF = self.old_root_urlconf
|
||||
settings.SITE_ID = self.old_site_id
|
||||
|
||||
|
||||
def build_suite(self, test_labels, extra_tests=None, **kwargs):
|
||||
return geodjango_suite()
|
||||
return [('django.contrib.gis.tests', app) for app in apps]
|
||||
|
|
|
@ -2,24 +2,33 @@ from __future__ import absolute_import
|
|||
|
||||
from django.db import connection
|
||||
from django.db.models import Q
|
||||
from django.contrib.gis.geos import GEOSGeometry, LineString
|
||||
from django.contrib.gis.geos import HAS_GEOS
|
||||
from django.contrib.gis.measure import D # alias for Distance
|
||||
from django.contrib.gis.tests.utils import oracle, postgis, spatialite, no_oracle, no_spatialite
|
||||
from django.contrib.gis.tests.utils import (
|
||||
HAS_SPATIAL_DB, mysql, oracle, postgis, spatialite, no_oracle, no_spatialite
|
||||
)
|
||||
from django.test import TestCase
|
||||
from django.utils.unittest import skipUnless
|
||||
|
||||
from .models import (AustraliaCity, Interstate, SouthTexasInterstate,
|
||||
SouthTexasCity, SouthTexasCityFt, CensusZipcode, SouthTexasZipcode)
|
||||
if HAS_GEOS and HAS_SPATIAL_DB:
|
||||
from django.contrib.gis.geos import GEOSGeometry, LineString
|
||||
|
||||
from .models import (AustraliaCity, Interstate, SouthTexasInterstate,
|
||||
SouthTexasCity, SouthTexasCityFt, CensusZipcode, SouthTexasZipcode)
|
||||
|
||||
|
||||
@skipUnless(HAS_GEOS and HAS_SPATIAL_DB and not mysql,
|
||||
"Geos and spatial db (not mysql) are required.")
|
||||
class DistanceTest(TestCase):
|
||||
|
||||
# A point we are testing distances with -- using a WGS84
|
||||
# coordinate that'll be implicitly transormed to that to
|
||||
# the coordinate system of the field, EPSG:32140 (Texas South Central
|
||||
# w/units in meters)
|
||||
stx_pnt = GEOSGeometry('POINT (-95.370401017314293 29.704867409475465)', 4326)
|
||||
# Another one for Australia
|
||||
au_pnt = GEOSGeometry('POINT (150.791 -34.4919)', 4326)
|
||||
if HAS_GEOS and HAS_SPATIAL_DB:
|
||||
# A point we are testing distances with -- using a WGS84
|
||||
# coordinate that'll be implicitly transormed to that to
|
||||
# the coordinate system of the field, EPSG:32140 (Texas South Central
|
||||
# w/units in meters)
|
||||
stx_pnt = GEOSGeometry('POINT (-95.370401017314293 29.704867409475465)', 4326)
|
||||
# Another one for Australia
|
||||
au_pnt = GEOSGeometry('POINT (150.791 -34.4919)', 4326)
|
||||
|
||||
def get_names(self, qs):
|
||||
cities = [c.name for c in qs]
|
||||
|
|
|
@ -3,14 +3,22 @@ from __future__ import absolute_import, unicode_literals
|
|||
import os
|
||||
import re
|
||||
|
||||
from django.contrib.gis.db.models import Union, Extent3D
|
||||
from django.contrib.gis.geos import GEOSGeometry, LineString, Point, Polygon
|
||||
from django.contrib.gis.utils import LayerMapping, LayerMapError
|
||||
from django.contrib.gis.gdal import HAS_GDAL
|
||||
from django.contrib.gis.geos import HAS_GEOS
|
||||
from django.contrib.gis.tests.utils import postgis
|
||||
from django.test import TestCase
|
||||
from django.utils._os import upath
|
||||
from django.utils.unittest import skipUnless
|
||||
|
||||
from .models import (City3D, Interstate2D, Interstate3D, InterstateProj2D,
|
||||
InterstateProj3D, Point2D, Point3D, MultiPoint3D, Polygon2D, Polygon3D)
|
||||
if HAS_GEOS:
|
||||
from django.contrib.gis.db.models import Union, Extent3D
|
||||
from django.contrib.gis.geos import GEOSGeometry, LineString, Point, Polygon
|
||||
|
||||
from .models import (City3D, Interstate2D, Interstate3D, InterstateProj2D,
|
||||
InterstateProj3D, Point2D, Point3D, MultiPoint3D, Polygon2D, Polygon3D)
|
||||
|
||||
if HAS_GDAL:
|
||||
from django.contrib.gis.utils import LayerMapping, LayerMapError
|
||||
|
||||
|
||||
data_path = os.path.realpath(os.path.join(os.path.dirname(upath(__file__)), '..', 'data'))
|
||||
|
@ -54,6 +62,7 @@ bbox_data = (
|
|||
)
|
||||
|
||||
|
||||
@skipUnless(HAS_GEOS and HAS_GDAL and postgis, "Geos, GDAL and postgis are required.")
|
||||
class Geo3DTest(TestCase):
|
||||
"""
|
||||
Only a subset of the PostGIS routines are 3D-enabled, and this TestCase
|
||||
|
|
|
@ -1,12 +1,18 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
from django.test import TestCase
|
||||
from django.contrib.gis import admin
|
||||
from django.contrib.gis.geos import GEOSGeometry, Point
|
||||
from django.contrib.gis.geos import HAS_GEOS
|
||||
from django.contrib.gis.tests.utils import HAS_SPATIAL_DB
|
||||
from django.utils.unittest import skipUnless
|
||||
|
||||
from .models import City
|
||||
if HAS_GEOS and HAS_SPATIAL_DB:
|
||||
from django.contrib.gis import admin
|
||||
from django.contrib.gis.geos import Point
|
||||
|
||||
from .models import City
|
||||
|
||||
|
||||
@skipUnless(HAS_GEOS and HAS_SPATIAL_DB, "Geos and spatial db are required.")
|
||||
class GeoAdminTest(TestCase):
|
||||
urls = 'django.contrib.gis.tests.geoadmin.urls'
|
||||
|
||||
|
@ -24,10 +30,7 @@ class GeoAdminTest(TestCase):
|
|||
result)
|
||||
|
||||
def test_olmap_WMS_rendering(self):
|
||||
admin.site.unregister(City)
|
||||
admin.site.register(City, admin.GeoModelAdmin)
|
||||
|
||||
geoadmin = admin.site._registry[City]
|
||||
geoadmin = admin.GeoModelAdmin(City, admin.site)
|
||||
result = geoadmin.get_map_widget(City._meta.get_field('point'))(
|
||||
).render('point', Point(-79.460734, 40.18476))
|
||||
self.assertIn(
|
||||
|
|
|
@ -4,11 +4,16 @@ from xml.dom import minidom
|
|||
|
||||
from django.conf import settings
|
||||
from django.contrib.sites.models import Site
|
||||
from django.contrib.gis.geos import HAS_GEOS
|
||||
from django.contrib.gis.tests.utils import HAS_SPATIAL_DB
|
||||
from django.test import TestCase
|
||||
from django.utils.unittest import skipUnless
|
||||
|
||||
from .models import City
|
||||
if HAS_GEOS:
|
||||
from .models import City
|
||||
|
||||
|
||||
@skipUnless(HAS_GEOS and HAS_SPATIAL_DB, "Geos and spatial db are required.")
|
||||
class GeoFeedTest(TestCase):
|
||||
|
||||
urls = 'django.contrib.gis.tests.geoapp.urls'
|
||||
|
|
|
@ -3,14 +3,19 @@ from __future__ import absolute_import, unicode_literals
|
|||
|
||||
from datetime import datetime
|
||||
|
||||
from django.contrib.gis.geos import HAS_GEOS
|
||||
from django.contrib.gis.tests.utils import no_mysql, no_spatialite
|
||||
from django.contrib.gis.shortcuts import render_to_kmz
|
||||
from django.contrib.gis.tests.utils import HAS_SPATIAL_DB
|
||||
from django.db.models import Count, Min
|
||||
from django.test import TestCase
|
||||
from django.utils.unittest import skipUnless
|
||||
|
||||
from .models import City, PennsylvaniaCity, State, Truth
|
||||
if HAS_GEOS:
|
||||
from .models import City, PennsylvaniaCity, State, Truth
|
||||
|
||||
|
||||
@skipUnless(HAS_GEOS and HAS_SPATIAL_DB, "Geos and spatial db are required.")
|
||||
class GeoRegressionTests(TestCase):
|
||||
|
||||
def test_update(self):
|
||||
|
@ -72,8 +77,8 @@ class GeoRegressionTests(TestCase):
|
|||
t1 = Truth.objects.create(val=True)
|
||||
t2 = Truth.objects.create(val=False)
|
||||
|
||||
val1 = Truth.objects.get(pk=1).val
|
||||
val2 = Truth.objects.get(pk=2).val
|
||||
val1 = Truth.objects.get(pk=t1.pk).val
|
||||
val2 = Truth.objects.get(pk=t2.pk).val
|
||||
# verify types -- should't be 0/1
|
||||
self.assertIsInstance(val1, bool)
|
||||
self.assertIsInstance(val2, bool)
|
||||
|
|
|
@ -5,12 +5,17 @@ from xml.dom import minidom
|
|||
import zipfile
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.gis.geos import HAS_GEOS
|
||||
from django.contrib.gis.tests.utils import HAS_SPATIAL_DB
|
||||
from django.contrib.sites.models import Site
|
||||
from django.test import TestCase
|
||||
from django.utils.unittest import skipUnless
|
||||
|
||||
from .models import City, Country
|
||||
if HAS_GEOS:
|
||||
from .models import City, Country
|
||||
|
||||
|
||||
@skipUnless(HAS_GEOS and HAS_SPATIAL_DB, "Geos and spatial db are required.")
|
||||
class GeoSitemapTest(TestCase):
|
||||
|
||||
urls = 'django.contrib.gis.tests.geoapp.urls'
|
||||
|
|
|
@ -3,26 +3,31 @@ from __future__ import absolute_import
|
|||
import re
|
||||
|
||||
from django.db import connection
|
||||
from django.db.utils import DatabaseError
|
||||
from django.contrib.gis import gdal
|
||||
from django.contrib.gis.geos import (fromstr, GEOSGeometry,
|
||||
Point, LineString, LinearRing, Polygon, GeometryCollection)
|
||||
from django.contrib.gis.geos import HAS_GEOS
|
||||
from django.contrib.gis.tests.utils import (
|
||||
no_mysql, no_oracle, no_spatialite,
|
||||
mysql, oracle, postgis, spatialite)
|
||||
from django.test import TestCase
|
||||
from django.utils import six, unittest
|
||||
from django.utils.unittest import skipUnless
|
||||
|
||||
from .models import Country, City, PennsylvaniaCity, State, Track
|
||||
if HAS_GEOS:
|
||||
from django.contrib.gis.geos import (fromstr, GEOSGeometry,
|
||||
Point, LineString, LinearRing, Polygon, GeometryCollection)
|
||||
|
||||
from .test_feeds import GeoFeedTest
|
||||
from .test_regress import GeoRegressionTests
|
||||
from .test_sitemaps import GeoSitemapTest
|
||||
from .models import Country, City, PennsylvaniaCity, State, Track
|
||||
|
||||
|
||||
if not spatialite:
|
||||
if HAS_GEOS and not spatialite:
|
||||
from .models import Feature, MinusOneSRID
|
||||
|
||||
|
||||
def postgis_bug_version():
|
||||
spatial_version = getattr(connection.ops, "spatial_version", (0,0,0))
|
||||
return spatial_version and (2, 0, 0) <= spatial_version <= (2, 0, 1)
|
||||
|
||||
|
||||
@skipUnless(HAS_GEOS and postgis, "Geos and postgis are required.")
|
||||
class GeoModelTest(TestCase):
|
||||
|
||||
def test_fixtures(self):
|
||||
|
@ -197,6 +202,7 @@ class GeoModelTest(TestCase):
|
|||
self.assertTrue(isinstance(cities2[0].point, Point))
|
||||
|
||||
|
||||
@skipUnless(HAS_GEOS and postgis, "Geos and postgis are required.")
|
||||
class GeoLookupTest(TestCase):
|
||||
|
||||
@no_mysql
|
||||
|
@ -297,7 +303,7 @@ class GeoLookupTest(TestCase):
|
|||
|
||||
# The left/right lookup tests are known failures on PostGIS 2.0/2.0.1
|
||||
# http://trac.osgeo.org/postgis/ticket/2035
|
||||
if connection.ops.postgis and (2, 0, 0) <= connection.ops.spatial_version <= (2, 0, 1):
|
||||
if postgis_bug_version():
|
||||
test_left_right_lookups = unittest.expectedFailure(test_left_right_lookups)
|
||||
|
||||
def test_equals_lookups(self):
|
||||
|
@ -382,6 +388,7 @@ class GeoLookupTest(TestCase):
|
|||
self.assertEqual('Lawrence', City.objects.get(point__relate=(ks.poly, intersects_mask)).name)
|
||||
|
||||
|
||||
@skipUnless(HAS_GEOS and postgis, "Geos and postgis are required.")
|
||||
class GeoQuerySetTest(TestCase):
|
||||
# Please keep the tests in GeoQuerySet method's alphabetic order
|
||||
|
||||
|
|
|
@ -5,14 +5,19 @@ from __future__ import absolute_import
|
|||
|
||||
import os
|
||||
|
||||
from django.contrib.gis import gdal
|
||||
from django.contrib.gis.gdal import HAS_GDAL
|
||||
from django.contrib.gis.geos import HAS_GEOS
|
||||
from django.contrib.gis.measure import D
|
||||
from django.contrib.gis.tests.utils import postgis
|
||||
from django.test import TestCase
|
||||
from django.utils._os import upath
|
||||
from django.utils.unittest import skipUnless
|
||||
|
||||
from .models import City, County, Zipcode
|
||||
if HAS_GEOS:
|
||||
from .models import City, County, Zipcode
|
||||
|
||||
|
||||
@skipUnless(HAS_GEOS and postgis, "Geos and postgis are required.")
|
||||
class GeographyTest(TestCase):
|
||||
|
||||
def test01_fixture_load(self):
|
||||
|
@ -54,11 +59,11 @@ class GeographyTest(TestCase):
|
|||
htown = City.objects.get(name='Houston')
|
||||
self.assertRaises(ValueError, City.objects.get, point__exact=htown.point)
|
||||
|
||||
@skipUnless(HAS_GDAL, "GDAL is required.")
|
||||
def test05_geography_layermapping(self):
|
||||
"Testing LayerMapping support on models with geography fields."
|
||||
# There is a similar test in `layermap` that uses the same data set,
|
||||
# but the County model here is a bit different.
|
||||
if not gdal.HAS_GDAL: return
|
||||
from django.contrib.gis.utils import LayerMapping
|
||||
|
||||
# Getting the shapefile and mapping dictionary.
|
||||
|
|
|
@ -4,13 +4,19 @@ import os
|
|||
|
||||
from django.db import connections
|
||||
from django.test import TestCase
|
||||
from django.contrib.gis.gdal import Driver
|
||||
from django.contrib.gis.gdal import HAS_GDAL
|
||||
from django.contrib.gis.geometry.test_data import TEST_DATA
|
||||
from django.contrib.gis.utils.ogrinspect import ogrinspect
|
||||
from django.contrib.gis.tests.utils import HAS_SPATIAL_DB
|
||||
from django.utils.unittest import skipUnless
|
||||
|
||||
from .models import AllOGRFields
|
||||
if HAS_GDAL:
|
||||
from django.contrib.gis.gdal import Driver
|
||||
from django.contrib.gis.utils.ogrinspect import ogrinspect
|
||||
|
||||
from .models import AllOGRFields
|
||||
|
||||
|
||||
@skipUnless(HAS_GDAL and HAS_SPATIAL_DB, "GDAL and spatial db are required.")
|
||||
class OGRInspectTest(TestCase):
|
||||
maxDiff = 1024
|
||||
|
||||
|
|
|
@ -5,19 +5,23 @@ import os
|
|||
from copy import copy
|
||||
from decimal import Decimal
|
||||
|
||||
from django.contrib.gis.gdal import DataSource
|
||||
from django.contrib.gis.tests.utils import mysql
|
||||
from django.contrib.gis.utils.layermapping import (LayerMapping, LayerMapError,
|
||||
InvalidDecimal, MissingForeignKey)
|
||||
from django.contrib.gis.gdal import HAS_GDAL
|
||||
from django.contrib.gis.tests.utils import HAS_SPATIAL_DB, mysql
|
||||
from django.db import router
|
||||
from django.conf import settings
|
||||
from django.test import TestCase
|
||||
from django.utils import unittest
|
||||
from django.utils.unittest import skipUnless
|
||||
from django.utils._os import upath
|
||||
|
||||
from .models import (
|
||||
City, County, CountyFeat, Interstate, ICity1, ICity2, Invalid, State,
|
||||
city_mapping, co_mapping, cofeat_mapping, inter_mapping)
|
||||
if HAS_GDAL:
|
||||
from django.contrib.gis.utils.layermapping import (LayerMapping,
|
||||
LayerMapError, InvalidDecimal, MissingForeignKey)
|
||||
from django.contrib.gis.gdal import DataSource
|
||||
|
||||
from .models import (
|
||||
City, County, CountyFeat, Interstate, ICity1, ICity2, Invalid, State,
|
||||
city_mapping, co_mapping, cofeat_mapping, inter_mapping)
|
||||
|
||||
|
||||
shp_path = os.path.realpath(os.path.join(os.path.dirname(upath(__file__)), os.pardir, 'data'))
|
||||
|
@ -32,6 +36,7 @@ NUMS = [1, 2, 1, 19, 1] # Number of polygons for each.
|
|||
STATES = ['Texas', 'Texas', 'Texas', 'Hawaii', 'Colorado']
|
||||
|
||||
|
||||
@skipUnless(HAS_GDAL and HAS_SPATIAL_DB, "GDAL and spatial db are required.")
|
||||
class LayerMapTest(TestCase):
|
||||
|
||||
def test_init(self):
|
||||
|
@ -310,6 +315,7 @@ class OtherRouter(object):
|
|||
return True
|
||||
|
||||
|
||||
@skipUnless(HAS_GDAL and HAS_SPATIAL_DB, "GDAL and spatial db are required.")
|
||||
class LayerMapRouterTest(TestCase):
|
||||
|
||||
def setUp(self):
|
||||
|
|
|
@ -2,15 +2,20 @@ from __future__ import absolute_import
|
|||
|
||||
from datetime import date
|
||||
|
||||
from django.contrib.gis.geos import GEOSGeometry, Point, MultiPoint
|
||||
from django.contrib.gis.db.models import Collect, Count, Extent, F, Union
|
||||
from django.contrib.gis.geometry.backend import Geometry
|
||||
from django.contrib.gis.tests.utils import mysql, oracle, no_mysql, no_oracle, no_spatialite
|
||||
from django.contrib.gis.geos import HAS_GEOS
|
||||
from django.contrib.gis.tests.utils import HAS_SPATIAL_DB, mysql, oracle, no_mysql, no_oracle, no_spatialite
|
||||
from django.test import TestCase
|
||||
from django.utils.unittest import skipUnless
|
||||
|
||||
from .models import City, Location, DirectoryEntry, Parcel, Book, Author, Article
|
||||
if HAS_GEOS:
|
||||
from django.contrib.gis.db.models import Collect, Count, Extent, F, Union
|
||||
from django.contrib.gis.geometry.backend import Geometry
|
||||
from django.contrib.gis.geos import GEOSGeometry, Point, MultiPoint
|
||||
|
||||
from .models import City, Location, DirectoryEntry, Parcel, Book, Author, Article
|
||||
|
||||
|
||||
@skipUnless(HAS_GEOS and HAS_SPATIAL_DB, "Geos and spatial db are required.")
|
||||
class RelatedGeoModelTest(TestCase):
|
||||
|
||||
def test02_select_related(self):
|
||||
|
|
|
@ -1,24 +1,25 @@
|
|||
from django.forms import ValidationError
|
||||
from django.contrib.gis.gdal import HAS_GDAL
|
||||
from django.contrib.gis.tests.utils import HAS_SPATIALREFSYS
|
||||
from django.test import SimpleTestCase
|
||||
from django.utils import six
|
||||
from django.utils import unittest
|
||||
from django.utils.unittest import skipUnless
|
||||
|
||||
|
||||
if HAS_SPATIALREFSYS:
|
||||
from django.contrib.gis import forms
|
||||
from django.contrib.gis.geos import GEOSGeometry
|
||||
|
||||
@unittest.skipUnless(HAS_GDAL and HAS_SPATIALREFSYS, "GeometryFieldTest needs gdal support and a spatial database")
|
||||
class GeometryFieldTest(unittest.TestCase):
|
||||
@skipUnless(HAS_GDAL and HAS_SPATIALREFSYS, "GeometryFieldTest needs gdal support and a spatial database")
|
||||
class GeometryFieldTest(SimpleTestCase):
|
||||
|
||||
def test00_init(self):
|
||||
def test_init(self):
|
||||
"Testing GeometryField initialization with defaults."
|
||||
fld = forms.GeometryField()
|
||||
for bad_default in ('blah', 3, 'FoO', None, 0):
|
||||
self.assertRaises(ValidationError, fld.clean, bad_default)
|
||||
|
||||
def test01_srid(self):
|
||||
def test_srid(self):
|
||||
"Testing GeometryField with a SRID set."
|
||||
# Input that doesn't specify the SRID is assumed to be in the SRID
|
||||
# of the input field.
|
||||
|
@ -34,7 +35,7 @@ class GeometryFieldTest(unittest.TestCase):
|
|||
cleaned_geom = fld.clean('SRID=4326;POINT (-95.363151 29.763374)')
|
||||
self.assertTrue(xform_geom.equals_exact(cleaned_geom, tol))
|
||||
|
||||
def test02_null(self):
|
||||
def test_null(self):
|
||||
"Testing GeometryField's handling of null (None) geometries."
|
||||
# Form fields, by default, are required (`required=True`)
|
||||
fld = forms.GeometryField()
|
||||
|
@ -46,7 +47,7 @@ class GeometryFieldTest(unittest.TestCase):
|
|||
fld = forms.GeometryField(required=False)
|
||||
self.assertIsNone(fld.clean(None))
|
||||
|
||||
def test03_geom_type(self):
|
||||
def test_geom_type(self):
|
||||
"Testing GeometryField's handling of different geometry types."
|
||||
# By default, all geometry types are allowed.
|
||||
fld = forms.GeometryField()
|
||||
|
@ -60,7 +61,7 @@ class GeometryFieldTest(unittest.TestCase):
|
|||
# but rejected by `clean`
|
||||
self.assertRaises(forms.ValidationError, pnt_fld.clean, 'LINESTRING(0 0, 1 1)')
|
||||
|
||||
def test04_to_python(self):
|
||||
def test_to_python(self):
|
||||
"""
|
||||
Testing to_python returns a correct GEOSGeometry object or
|
||||
a ValidationError
|
||||
|
@ -74,13 +75,169 @@ class GeometryFieldTest(unittest.TestCase):
|
|||
self.assertRaises(forms.ValidationError, fld.to_python, wkt)
|
||||
|
||||
|
||||
def suite():
|
||||
s = unittest.TestSuite()
|
||||
s.addTest(unittest.makeSuite(GeometryFieldTest))
|
||||
return s
|
||||
@skipUnless(HAS_GDAL and HAS_SPATIALREFSYS,
|
||||
"SpecializedFieldTest needs gdal support and a spatial database")
|
||||
class SpecializedFieldTest(SimpleTestCase):
|
||||
def setUp(self):
|
||||
self.geometries = {
|
||||
'point': GEOSGeometry("SRID=4326;POINT(9.052734375 42.451171875)"),
|
||||
'multipoint': GEOSGeometry("SRID=4326;MULTIPOINT("
|
||||
"(13.18634033203125 14.504356384277344),"
|
||||
"(13.207969665527 14.490966796875),"
|
||||
"(13.177070617675 14.454917907714))"),
|
||||
'linestring': GEOSGeometry("SRID=4326;LINESTRING("
|
||||
"-8.26171875 -0.52734375,"
|
||||
"-7.734375 4.21875,"
|
||||
"6.85546875 3.779296875,"
|
||||
"5.44921875 -3.515625)"),
|
||||
'multilinestring': GEOSGeometry("SRID=4326;MULTILINESTRING("
|
||||
"(-16.435546875 -2.98828125,"
|
||||
"-17.2265625 2.98828125,"
|
||||
"-0.703125 3.515625,"
|
||||
"-1.494140625 -3.33984375),"
|
||||
"(-8.0859375 -5.9765625,"
|
||||
"8.525390625 -8.7890625,"
|
||||
"12.392578125 -0.87890625,"
|
||||
"10.01953125 7.646484375))"),
|
||||
'polygon': GEOSGeometry("SRID=4326;POLYGON("
|
||||
"(-1.669921875 6.240234375,"
|
||||
"-3.8671875 -0.615234375,"
|
||||
"5.9765625 -3.955078125,"
|
||||
"18.193359375 3.955078125,"
|
||||
"9.84375 9.4921875,"
|
||||
"-1.669921875 6.240234375))"),
|
||||
'multipolygon': GEOSGeometry("SRID=4326;MULTIPOLYGON("
|
||||
"((-17.578125 13.095703125,"
|
||||
"-17.2265625 10.8984375,"
|
||||
"-13.974609375 10.1953125,"
|
||||
"-13.359375 12.744140625,"
|
||||
"-15.732421875 13.7109375,"
|
||||
"-17.578125 13.095703125)),"
|
||||
"((-8.525390625 5.537109375,"
|
||||
"-8.876953125 2.548828125,"
|
||||
"-5.888671875 1.93359375,"
|
||||
"-5.09765625 4.21875,"
|
||||
"-6.064453125 6.240234375,"
|
||||
"-8.525390625 5.537109375)))"),
|
||||
'geometrycollection': GEOSGeometry("SRID=4326;GEOMETRYCOLLECTION("
|
||||
"POINT(5.625 -0.263671875),"
|
||||
"POINT(6.767578125 -3.603515625),"
|
||||
"POINT(8.525390625 0.087890625),"
|
||||
"POINT(8.0859375 -2.13134765625),"
|
||||
"LINESTRING("
|
||||
"6.273193359375 -1.175537109375,"
|
||||
"5.77880859375 -1.812744140625,"
|
||||
"7.27294921875 -2.230224609375,"
|
||||
"7.657470703125 -1.25244140625))"),
|
||||
}
|
||||
|
||||
def run(verbosity=2):
|
||||
unittest.TextTestRunner(verbosity=verbosity).run(suite())
|
||||
def assertMapWidget(self, form_instance):
|
||||
"""
|
||||
Make sure the MapWidget js is passed in the form media and a MapWidget
|
||||
is actually created
|
||||
"""
|
||||
self.assertTrue(form_instance.is_valid())
|
||||
rendered = form_instance.as_p()
|
||||
self.assertIn('new MapWidget(options);', rendered)
|
||||
self.assertIn('gis/js/OLMapWidget.js', str(form_instance.media))
|
||||
|
||||
if __name__=="__main__":
|
||||
run()
|
||||
def assertTextarea(self, geom, rendered):
|
||||
"""Makes sure the wkt and a textarea are in the content"""
|
||||
|
||||
self.assertIn('<textarea ', rendered)
|
||||
self.assertIn('required', rendered)
|
||||
self.assertIn(geom.wkt, rendered)
|
||||
|
||||
def test_pointfield(self):
|
||||
class PointForm(forms.Form):
|
||||
p = forms.PointField()
|
||||
|
||||
geom = self.geometries['point']
|
||||
form = PointForm(data={'p': geom})
|
||||
self.assertTextarea(geom, form.as_p())
|
||||
self.assertMapWidget(form)
|
||||
self.assertFalse(PointForm().is_valid())
|
||||
invalid = PointForm(data={'p': 'some invalid geom'})
|
||||
self.assertFalse(invalid.is_valid())
|
||||
self.assertTrue('Invalid geometry value' in str(invalid.errors))
|
||||
|
||||
for invalid in [geom for key, geom in self.geometries.items() if key!='point']:
|
||||
self.assertFalse(PointForm(data={'p': invalid.wkt}).is_valid())
|
||||
|
||||
def test_multipointfield(self):
|
||||
class PointForm(forms.Form):
|
||||
p = forms.MultiPointField()
|
||||
|
||||
geom = self.geometries['multipoint']
|
||||
form = PointForm(data={'p': geom})
|
||||
self.assertTextarea(geom, form.as_p())
|
||||
self.assertMapWidget(form)
|
||||
self.assertFalse(PointForm().is_valid())
|
||||
|
||||
for invalid in [geom for key, geom in self.geometries.items() if key!='multipoint']:
|
||||
self.assertFalse(PointForm(data={'p': invalid.wkt}).is_valid())
|
||||
|
||||
def test_linestringfield(self):
|
||||
class LineStringForm(forms.Form):
|
||||
l = forms.LineStringField()
|
||||
|
||||
geom = self.geometries['linestring']
|
||||
form = LineStringForm(data={'l': geom})
|
||||
self.assertTextarea(geom, form.as_p())
|
||||
self.assertMapWidget(form)
|
||||
self.assertFalse(LineStringForm().is_valid())
|
||||
|
||||
for invalid in [geom for key, geom in self.geometries.items() if key!='linestring']:
|
||||
self.assertFalse(LineStringForm(data={'p': invalid.wkt}).is_valid())
|
||||
|
||||
def test_multilinestringfield(self):
|
||||
class LineStringForm(forms.Form):
|
||||
l = forms.MultiLineStringField()
|
||||
|
||||
geom = self.geometries['multilinestring']
|
||||
form = LineStringForm(data={'l': geom})
|
||||
self.assertTextarea(geom, form.as_p())
|
||||
self.assertMapWidget(form)
|
||||
self.assertFalse(LineStringForm().is_valid())
|
||||
|
||||
for invalid in [geom for key, geom in self.geometries.items() if key!='multilinestring']:
|
||||
self.assertFalse(LineStringForm(data={'p': invalid.wkt}).is_valid())
|
||||
|
||||
def test_polygonfield(self):
|
||||
class PolygonForm(forms.Form):
|
||||
p = forms.PolygonField()
|
||||
|
||||
geom = self.geometries['polygon']
|
||||
form = PolygonForm(data={'p': geom})
|
||||
self.assertTextarea(geom, form.as_p())
|
||||
self.assertMapWidget(form)
|
||||
self.assertFalse(PolygonForm().is_valid())
|
||||
|
||||
for invalid in [geom for key, geom in self.geometries.items() if key!='polygon']:
|
||||
self.assertFalse(PolygonForm(data={'p': invalid.wkt}).is_valid())
|
||||
|
||||
def test_multipolygonfield(self):
|
||||
class PolygonForm(forms.Form):
|
||||
p = forms.MultiPolygonField()
|
||||
|
||||
geom = self.geometries['multipolygon']
|
||||
form = PolygonForm(data={'p': geom})
|
||||
self.assertTextarea(geom, form.as_p())
|
||||
self.assertMapWidget(form)
|
||||
self.assertFalse(PolygonForm().is_valid())
|
||||
|
||||
for invalid in [geom for key, geom in self.geometries.items() if key!='multipolygon']:
|
||||
self.assertFalse(PolygonForm(data={'p': invalid.wkt}).is_valid())
|
||||
|
||||
def test_geometrycollectionfield(self):
|
||||
class GeometryForm(forms.Form):
|
||||
g = forms.GeometryCollectionField()
|
||||
|
||||
geom = self.geometries['geometrycollection']
|
||||
form = GeometryForm(data={'g': geom})
|
||||
self.assertTextarea(geom, form.as_p())
|
||||
self.assertMapWidget(form)
|
||||
self.assertFalse(GeometryForm().is_valid())
|
||||
|
||||
for invalid in [geom for key, geom in self.geometries.items() if key!='geometrycollection']:
|
||||
self.assertFalse(GeometryForm(data={'g': invalid.wkt}).is_valid())
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
from django.db import connection
|
||||
from django.contrib.gis.gdal import HAS_GDAL
|
||||
from django.contrib.gis.tests.utils import (no_mysql, oracle, postgis,
|
||||
spatialite, HAS_SPATIALREFSYS, SpatialRefSys)
|
||||
|
|
|
@ -35,3 +35,12 @@ elif spatialite:
|
|||
else:
|
||||
HAS_SPATIALREFSYS = False
|
||||
SpatialRefSys = None
|
||||
|
||||
|
||||
def has_spatial_db():
|
||||
# All databases must have spatial backends to run GeoDjango tests.
|
||||
spatial_dbs = [name for name, db_dict in settings.DATABASES.items()
|
||||
if db_dict['ENGINE'].startswith('django.contrib.gis')]
|
||||
return len(spatial_dbs) == len(settings.DATABASES)
|
||||
|
||||
HAS_SPATIAL_DB = has_spatial_db()
|
||||
|
|
|
@ -201,7 +201,7 @@ class LayerMapping(object):
|
|||
if not (ltype.name.startswith(gtype.name) or self.make_multi(ltype, model_field)):
|
||||
raise LayerMapError('Invalid mapping geometry; model has %s%s, '
|
||||
'layer geometry type is %s.' %
|
||||
(fld_name, (coord_dim == 3 and '(dim=3)') or '', ltype))
|
||||
(fld_name, '(dim=3)' if coord_dim == 3 else '', ltype))
|
||||
|
||||
# Setting the `geom_field` attribute w/the name of the model field
|
||||
# that is a Geometry. Also setting the coordinate dimension
|
||||
|
|
|
@ -1,5 +0,0 @@
|
|||
from django.contrib.messages.tests.test_cookie import CookieTest
|
||||
from django.contrib.messages.tests.test_fallback import FallbackTest
|
||||
from django.contrib.messages.tests.test_middleware import MiddlewareTest
|
||||
from django.contrib.messages.tests.test_session import SessionTest
|
||||
from django.contrib.messages.tests.test_mixins import SuccessMessageMixinTests
|
|
@ -1,8 +1,7 @@
|
|||
from django.views.generic.edit import FormMixin
|
||||
from django.contrib import messages
|
||||
|
||||
|
||||
class SuccessMessageMixin(FormMixin):
|
||||
class SuccessMessageMixin(object):
|
||||
"""
|
||||
Adds a success message on successful form submission.
|
||||
"""
|
||||
|
|
|
@ -1,4 +0,0 @@
|
|||
from .test_flatpages import FlatpagesSitemapTests
|
||||
from .test_generic import GenericViewsSitemapTests
|
||||
from .test_http import HTTPSitemapTests
|
||||
from .test_https import HTTPSSitemapTests, HTTPSDetectionSitemapTests
|
|
@ -174,7 +174,7 @@ Type 'yes' to continue, or 'no' to cancel: """
|
|||
"%(destination)s%(unmodified)s%(post_processed)s.\n")
|
||||
summary = template % {
|
||||
'modified_count': modified_count,
|
||||
'identifier': 'static file' + (modified_count != 1 and 's' or ''),
|
||||
'identifier': 'static file' + ('' if modified_count == 1 else 's'),
|
||||
'action': self.symlink and 'symlinked' or 'copied',
|
||||
'destination': (destination_path and " to '%s'"
|
||||
% destination_path or ''),
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
"""
|
||||
Utility functions for handling images.
|
||||
|
||||
Requires PIL, as you might imagine.
|
||||
Requires Pillow (or PIL), as you might imagine.
|
||||
"""
|
||||
import zlib
|
||||
|
||||
|
@ -35,11 +35,7 @@ def get_image_dimensions(file_or_path, close=False):
|
|||
'close' to True to close the file at the end if it is initially in an open
|
||||
state.
|
||||
"""
|
||||
# Try to import PIL in either of the two ways it can end up installed.
|
||||
try:
|
||||
from PIL import ImageFile as PILImageFile
|
||||
except ImportError:
|
||||
import ImageFile as PILImageFile
|
||||
from django.utils.image import ImageFile as PILImageFile
|
||||
|
||||
p = PILImageFile.Parser()
|
||||
if hasattr(file_or_path, 'read'):
|
||||
|
|
|
@ -200,9 +200,9 @@ class FileSystemStorage(Storage):
|
|||
getattr(os, 'O_BINARY', 0))
|
||||
# The current umask value is masked out by os.open!
|
||||
fd = os.open(full_path, flags, 0o666)
|
||||
_file = None
|
||||
try:
|
||||
locks.lock(fd, locks.LOCK_EX)
|
||||
_file = None
|
||||
for chunk in content.chunks():
|
||||
if _file is None:
|
||||
mode = 'wb' if isinstance(chunk, bytes) else 'wt'
|
||||
|
|
|
@ -63,7 +63,7 @@ def find_management_module(app_name):
|
|||
|
||||
while parts:
|
||||
part = parts.pop()
|
||||
f, path, descr = imp.find_module(part, path and [path] or None)
|
||||
f, path, descr = imp.find_module(part, [path] if path else None)
|
||||
if f:
|
||||
f.close()
|
||||
return path
|
||||
|
|
|
@ -60,7 +60,7 @@ class OutputWrapper(object):
|
|||
return getattr(self._out, name)
|
||||
|
||||
def write(self, msg, style_func=None, ending=None):
|
||||
ending = ending is None and self.ending or ending
|
||||
ending = self.ending if ending is None else ending
|
||||
if ending and not msg.endswith(ending):
|
||||
msg += ending
|
||||
style_func = [f for f in (style_func, self.style_func, lambda x:x)
|
||||
|
@ -311,7 +311,7 @@ class BaseCommand(object):
|
|||
error_text = s.read()
|
||||
raise CommandError("One or more models did not validate:\n%s" % error_text)
|
||||
if display_num_errors:
|
||||
self.stdout.write("%s error%s found" % (num_errors, num_errors != 1 and 's' or ''))
|
||||
self.stdout.write("%s error%s found" % (num_errors, '' if num_errors == 1 else 's'))
|
||||
|
||||
def handle(self, *args, **options):
|
||||
"""
|
||||
|
|
|
@ -44,7 +44,7 @@ class Command(LabelCommand):
|
|||
elif f.unique:
|
||||
field_output.append("UNIQUE")
|
||||
if f.db_index:
|
||||
unique = f.unique and "UNIQUE " or ""
|
||||
unique = "UNIQUE " if f.unique else ""
|
||||
index_output.append("CREATE %sINDEX %s ON %s (%s);" % \
|
||||
(unique, qn('%s_%s' % (tablename, f.name)), qn(tablename),
|
||||
qn(f.name)))
|
||||
|
|
|
@ -65,7 +65,7 @@ class Command(BaseCommand):
|
|||
elif self.use_ipv6 and not _fqdn:
|
||||
raise CommandError('"%s" is not a valid IPv6 address.' % self.addr)
|
||||
if not self.addr:
|
||||
self.addr = self.use_ipv6 and '::1' or '127.0.0.1'
|
||||
self.addr = '::1' if self.use_ipv6 else '127.0.0.1'
|
||||
self._raw_ipv6 = bool(self.use_ipv6)
|
||||
self.run(*args, **options)
|
||||
|
||||
|
@ -86,7 +86,7 @@ class Command(BaseCommand):
|
|||
|
||||
threading = options.get('use_threading')
|
||||
shutdown_message = options.get('shutdown_message', '')
|
||||
quit_command = (sys.platform == 'win32') and 'CTRL-BREAK' or 'CONTROL-C'
|
||||
quit_command = 'CTRL-BREAK' if sys.platform == 'win32' else 'CONTROL-C'
|
||||
|
||||
self.stdout.write("Validating models...\n\n")
|
||||
self.validate(display_num_errors=True)
|
||||
|
|
|
@ -105,14 +105,10 @@ def get_validation_errors(outfile, app=None):
|
|||
if isinstance(f, models.FileField) and not f.upload_to:
|
||||
e.add(opts, '"%s": FileFields require an "upload_to" attribute.' % f.name)
|
||||
if isinstance(f, models.ImageField):
|
||||
# Try to import PIL in either of the two ways it can end up installed.
|
||||
try:
|
||||
from PIL import Image
|
||||
from django.utils.image import Image
|
||||
except ImportError:
|
||||
try:
|
||||
import Image
|
||||
except ImportError:
|
||||
e.add(opts, '"%s": To use ImageFields, you need to install the Python Imaging Library. Get it at http://www.pythonware.com/products/pil/ .' % f.name)
|
||||
e.add(opts, '"%s": To use ImageFields, you need to install Pillow. Get it at https://pypi.python.org/pypi/Pillow.' % f.name)
|
||||
if isinstance(f, models.BooleanField) and getattr(f, 'null', False):
|
||||
e.add(opts, '"%s": BooleanFields do not accept null values. Use a NullBooleanField instead.' % f.name)
|
||||
if isinstance(f, models.FilePathField) and not (f.allow_files or f.allow_folders):
|
||||
|
|
|
@ -161,9 +161,7 @@ class DeserializedObject(object):
|
|||
def save(self, save_m2m=True, using=None):
|
||||
# Call save on the Model baseclass directly. This bypasses any
|
||||
# model-defined save. The save is also forced to be raw.
|
||||
# This ensures that the data that is deserialized is literally
|
||||
# what came from the file, not post-processed by pre_save/save
|
||||
# methods.
|
||||
# raw=True is passed to any pre/post_save signals.
|
||||
models.Model.save_base(self.object, using=using, raw=True)
|
||||
if self.m2m_data and save_m2m:
|
||||
for accessor_name, object_list in self.m2m_data.items():
|
||||
|
|
|
@ -630,7 +630,7 @@ class Model(six.with_metaclass(ModelBase)):
|
|||
# If possible, try an UPDATE. If that doesn't update anything, do an INSERT.
|
||||
if pk_set and not force_insert:
|
||||
base_qs = cls._base_manager.using(using)
|
||||
values = [(f, None, (raw and getattr(self, f.attname) or f.pre_save(self, False)))
|
||||
values = [(f, None, (getattr(self, f.attname) if raw else f.pre_save(self, False)))
|
||||
for f in non_pks]
|
||||
if not values:
|
||||
# We can end up here when saving a model in inheritance chain where
|
||||
|
@ -697,8 +697,8 @@ class Model(six.with_metaclass(ModelBase)):
|
|||
def _get_next_or_previous_by_FIELD(self, field, is_next, **kwargs):
|
||||
if not self.pk:
|
||||
raise ValueError("get_next/get_previous cannot be used on unsaved objects.")
|
||||
op = is_next and 'gt' or 'lt'
|
||||
order = not is_next and '-' or ''
|
||||
op = 'gt' if is_next else 'lt'
|
||||
order = '' if is_next else '-'
|
||||
param = force_text(getattr(self, field.attname))
|
||||
q = Q(**{'%s__%s' % (field.name, op): param})
|
||||
q = q | Q(**{field.name: param, 'pk__%s' % op: self.pk})
|
||||
|
@ -711,8 +711,8 @@ class Model(six.with_metaclass(ModelBase)):
|
|||
def _get_next_or_previous_in_order(self, is_next):
|
||||
cachename = "__%s_order_cache" % is_next
|
||||
if not hasattr(self, cachename):
|
||||
op = is_next and 'gt' or 'lt'
|
||||
order = not is_next and '-_order' or '_order'
|
||||
op = 'gt' if is_next else 'lt'
|
||||
order = '_order' if is_next else '-_order'
|
||||
order_field = self._meta.order_with_respect_to
|
||||
obj = self._default_manager.filter(**{
|
||||
order_field.name: getattr(self, order_field.attname)
|
||||
|
|
|
@ -468,7 +468,7 @@ class Field(object):
|
|||
def get_choices(self, include_blank=True, blank_choice=BLANK_CHOICE_DASH):
|
||||
"""Returns choices with a default blank choices included, for use
|
||||
as SelectField choices for this field."""
|
||||
first_choice = include_blank and blank_choice or []
|
||||
first_choice = blank_choice if include_blank else []
|
||||
if self.choices:
|
||||
return first_choice + list(self.choices)
|
||||
rel_model = self.rel.to
|
||||
|
@ -491,7 +491,7 @@ class Field(object):
|
|||
"""
|
||||
Returns flattened choices with a default blank choice included.
|
||||
"""
|
||||
first_choice = include_blank and blank_choice or []
|
||||
first_choice = blank_choice if include_blank else []
|
||||
return first_choice + list(self.flatchoices)
|
||||
|
||||
def _get_val_from_obj(self, obj):
|
||||
|
|
|
@ -349,7 +349,7 @@ class Options(object):
|
|||
"""
|
||||
Returns the requested field by name. Raises FieldDoesNotExist on error.
|
||||
"""
|
||||
to_search = many_to_many and (self.fields + self.many_to_many) or self.fields
|
||||
to_search = (self.fields + self.many_to_many) if many_to_many else self.fields
|
||||
for f in to_search:
|
||||
if f.name == name:
|
||||
return f
|
||||
|
|
|
@ -869,7 +869,7 @@ class QuerySet(object):
|
|||
"""
|
||||
if self.query.extra_order_by or self.query.order_by:
|
||||
return True
|
||||
elif self.query.default_ordering and self.query.model._meta.ordering:
|
||||
elif self.query.default_ordering and self.query.get_meta().ordering:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
|
|
@ -27,7 +27,7 @@ class RelatedObject(object):
|
|||
Analogue of django.db.models.fields.Field.get_choices, provided
|
||||
initially for utilisation by RelatedFieldListFilter.
|
||||
"""
|
||||
first_choice = include_blank and blank_choice or []
|
||||
first_choice = blank_choice if include_blank else []
|
||||
queryset = self.model._default_manager.all()
|
||||
if limit_to_currently_related:
|
||||
queryset = queryset.complex_filter(
|
||||
|
|
|
@ -112,7 +112,7 @@ class StdDev(Aggregate):
|
|||
|
||||
def __init__(self, col, sample=False, **extra):
|
||||
super(StdDev, self).__init__(col, **extra)
|
||||
self.sql_function = sample and 'STDDEV_SAMP' or 'STDDEV_POP'
|
||||
self.sql_function = 'STDDEV_SAMP' if sample else 'STDDEV_POP'
|
||||
|
||||
class Sum(Aggregate):
|
||||
sql_function = 'SUM'
|
||||
|
@ -122,4 +122,4 @@ class Variance(Aggregate):
|
|||
|
||||
def __init__(self, col, sample=False, **extra):
|
||||
super(Variance, self).__init__(col, **extra)
|
||||
self.sql_function = sample and 'VAR_SAMP' or 'VAR_POP'
|
||||
self.sql_function = 'VAR_SAMP' if sample else 'VAR_POP'
|
||||
|
|
|
@ -32,7 +32,7 @@ class SQLCompiler(object):
|
|||
# cleaned. We are not using a clone() of the query here.
|
||||
"""
|
||||
if not self.query.tables:
|
||||
self.query.join((None, self.query.model._meta.db_table, None))
|
||||
self.query.join((None, self.query.get_meta().db_table, None))
|
||||
if (not self.query.select and self.query.default_cols and not
|
||||
self.query.included_inherited_models):
|
||||
self.query.setup_inherited_models()
|
||||
|
@ -260,7 +260,7 @@ class SQLCompiler(object):
|
|||
"""
|
||||
result = []
|
||||
if opts is None:
|
||||
opts = self.query.model._meta
|
||||
opts = self.query.get_meta()
|
||||
qn = self.quote_name_unless_alias
|
||||
qn2 = self.connection.ops.quote_name
|
||||
aliases = set()
|
||||
|
@ -309,7 +309,7 @@ class SQLCompiler(object):
|
|||
qn = self.quote_name_unless_alias
|
||||
qn2 = self.connection.ops.quote_name
|
||||
result = []
|
||||
opts = self.query.model._meta
|
||||
opts = self.query.get_meta()
|
||||
|
||||
for name in self.query.distinct_fields:
|
||||
parts = name.split(LOOKUP_SEP)
|
||||
|
@ -338,7 +338,7 @@ class SQLCompiler(object):
|
|||
ordering = self.query.order_by
|
||||
else:
|
||||
ordering = (self.query.order_by
|
||||
or self.query.model._meta.ordering
|
||||
or self.query.get_meta().ordering
|
||||
or [])
|
||||
qn = self.quote_name_unless_alias
|
||||
qn2 = self.connection.ops.quote_name
|
||||
|
@ -388,7 +388,7 @@ class SQLCompiler(object):
|
|||
# 'col' is of the form 'field' or 'field1__field2' or
|
||||
# '-field1__field2__field', etc.
|
||||
for table, cols, order in self.find_ordering_name(field,
|
||||
self.query.model._meta, default_order=asc):
|
||||
self.query.get_meta(), default_order=asc):
|
||||
for col in cols:
|
||||
if (table, col) not in processed_pairs:
|
||||
elt = '%s.%s' % (qn(table), qn2(col))
|
||||
|
@ -512,7 +512,7 @@ class SQLCompiler(object):
|
|||
# Extra tables can end up in self.tables, but not in the
|
||||
# alias_map if they aren't in a join. That's OK. We skip them.
|
||||
continue
|
||||
alias_str = (alias != name and ' %s' % alias or '')
|
||||
alias_str = '' if alias == name else (' %s' % alias)
|
||||
if join_type and not first:
|
||||
extra_cond = join_field.get_extra_restriction(
|
||||
self.query.where_class, alias, lhs)
|
||||
|
@ -532,7 +532,7 @@ class SQLCompiler(object):
|
|||
(qn(lhs), qn2(lhs_col), qn(alias), qn2(rhs_col)))
|
||||
result.append('%s)' % extra_sql)
|
||||
else:
|
||||
connector = not first and ', ' or ''
|
||||
connector = '' if first else ', '
|
||||
result.append('%s%s%s' % (connector, qn(name), alias_str))
|
||||
first = False
|
||||
for t in self.query.extra_tables:
|
||||
|
@ -541,7 +541,7 @@ class SQLCompiler(object):
|
|||
# calls increments the refcount, so an alias refcount of one means
|
||||
# this is the only reference.
|
||||
if alias not in self.query.alias_map or self.query.alias_refcount[alias] == 1:
|
||||
connector = not first and ', ' or ''
|
||||
connector = '' if first else ', '
|
||||
result.append('%s%s' % (connector, qn(alias)))
|
||||
first = False
|
||||
return result, from_params
|
||||
|
@ -556,10 +556,10 @@ class SQLCompiler(object):
|
|||
select_cols = self.query.select + self.query.related_select_cols
|
||||
# Just the column, not the fields.
|
||||
select_cols = [s[0] for s in select_cols]
|
||||
if (len(self.query.model._meta.concrete_fields) == len(self.query.select)
|
||||
if (len(self.query.get_meta().concrete_fields) == len(self.query.select)
|
||||
and self.connection.features.allows_group_by_pk):
|
||||
self.query.group_by = [
|
||||
(self.query.model._meta.db_table, self.query.model._meta.pk.column)
|
||||
(self.query.get_meta().db_table, self.query.get_meta().pk.column)
|
||||
]
|
||||
select_cols = []
|
||||
seen = set()
|
||||
|
@ -716,14 +716,14 @@ class SQLCompiler(object):
|
|||
if self.query.select:
|
||||
fields = [f.field for f in self.query.select]
|
||||
else:
|
||||
fields = self.query.model._meta.concrete_fields
|
||||
fields = self.query.get_meta().concrete_fields
|
||||
fields = fields + [f.field for f in self.query.related_select_cols]
|
||||
|
||||
# If the field was deferred, exclude it from being passed
|
||||
# into `resolve_columns` because it wasn't selected.
|
||||
only_load = self.deferred_to_columns()
|
||||
if only_load:
|
||||
db_table = self.query.model._meta.db_table
|
||||
db_table = self.query.get_meta().db_table
|
||||
fields = [f for f in fields if db_table in only_load and
|
||||
f.column in only_load[db_table]]
|
||||
row = self.resolve_columns(row, fields)
|
||||
|
@ -825,7 +825,7 @@ class SQLInsertCompiler(SQLCompiler):
|
|||
# We don't need quote_name_unless_alias() here, since these are all
|
||||
# going to be column names (so we can avoid the extra overhead).
|
||||
qn = self.connection.ops.quote_name
|
||||
opts = self.query.model._meta
|
||||
opts = self.query.get_meta()
|
||||
result = ['INSERT INTO %s' % qn(opts.db_table)]
|
||||
|
||||
has_fields = bool(self.query.fields)
|
||||
|
@ -887,7 +887,7 @@ class SQLInsertCompiler(SQLCompiler):
|
|||
if self.connection.features.can_return_id_from_insert:
|
||||
return self.connection.ops.fetch_returned_insert_id(cursor)
|
||||
return self.connection.ops.last_insert_id(cursor,
|
||||
self.query.model._meta.db_table, self.query.model._meta.pk.column)
|
||||
self.query.get_meta().db_table, self.query.get_meta().pk.column)
|
||||
|
||||
|
||||
class SQLDeleteCompiler(SQLCompiler):
|
||||
|
@ -959,7 +959,7 @@ class SQLUpdateCompiler(SQLCompiler):
|
|||
related queries are not available.
|
||||
"""
|
||||
cursor = super(SQLUpdateCompiler, self).execute_sql(result_type)
|
||||
rows = cursor and cursor.rowcount or 0
|
||||
rows = cursor.rowcount if cursor else 0
|
||||
is_empty = cursor is None
|
||||
del cursor
|
||||
for query in self.query.get_related_updates():
|
||||
|
@ -992,7 +992,7 @@ class SQLUpdateCompiler(SQLCompiler):
|
|||
query.bump_prefix()
|
||||
query.extra = {}
|
||||
query.select = []
|
||||
query.add_fields([query.model._meta.pk.name])
|
||||
query.add_fields([query.get_meta().pk.name])
|
||||
# Recheck the count - it is possible that fiddling with the select
|
||||
# fields above removes tables from the query. Refs #18304.
|
||||
count = query.count_active_tables()
|
||||
|
|
|
@ -532,7 +532,7 @@ class Query(object):
|
|||
|
||||
# Ordering uses the 'rhs' ordering, unless it has none, in which case
|
||||
# the current ordering is used.
|
||||
self.order_by = rhs.order_by and rhs.order_by[:] or self.order_by
|
||||
self.order_by = rhs.order_by[:] if rhs.order_by else self.order_by
|
||||
self.extra_order_by = rhs.extra_order_by or self.extra_order_by
|
||||
|
||||
def deferred_to_data(self, target, callback):
|
||||
|
@ -552,7 +552,7 @@ class Query(object):
|
|||
field_names, defer = self.deferred_loading
|
||||
if not field_names:
|
||||
return
|
||||
orig_opts = self.model._meta
|
||||
orig_opts = self.get_meta()
|
||||
seen = {}
|
||||
must_include = {orig_opts.concrete_model: set([orig_opts.pk])}
|
||||
for field_name in field_names:
|
||||
|
@ -818,7 +818,7 @@ class Query(object):
|
|||
alias = self.tables[0]
|
||||
self.ref_alias(alias)
|
||||
else:
|
||||
alias = self.join((None, self.model._meta.db_table, None))
|
||||
alias = self.join((None, self.get_meta().db_table, None))
|
||||
return alias
|
||||
|
||||
def count_active_tables(self):
|
||||
|
@ -906,7 +906,7 @@ class Query(object):
|
|||
whereas column determination is a later part, and side-effect, of
|
||||
as_sql()).
|
||||
"""
|
||||
opts = self.model._meta
|
||||
opts = self.get_meta()
|
||||
root_alias = self.tables[0]
|
||||
seen = {None: root_alias}
|
||||
|
||||
|
@ -1624,7 +1624,7 @@ class Query(object):
|
|||
"Cannot add count col with multiple cols in 'select': %r" % self.select
|
||||
count = self.aggregates_module.Count(self.select[0].col)
|
||||
else:
|
||||
opts = self.model._meta
|
||||
opts = self.get_meta()
|
||||
if not self.select:
|
||||
count = self.aggregates_module.Count(
|
||||
(self.join((None, opts.db_table, None)), opts.pk.column),
|
||||
|
@ -1732,7 +1732,7 @@ class Query(object):
|
|||
field_names = set(field_names)
|
||||
if 'pk' in field_names:
|
||||
field_names.remove('pk')
|
||||
field_names.add(self.model._meta.pk.name)
|
||||
field_names.add(self.get_meta().pk.name)
|
||||
|
||||
if defer:
|
||||
# Remove any existing deferred names from the current set before
|
||||
|
|
|
@ -41,12 +41,12 @@ class DeleteQuery(Query):
|
|||
lot of values in pk_list.
|
||||
"""
|
||||
if not field:
|
||||
field = self.model._meta.pk
|
||||
field = self.get_meta().pk
|
||||
for offset in range(0, len(pk_list), GET_ITERATOR_CHUNK_SIZE):
|
||||
where = self.where_class()
|
||||
where.add((Constraint(None, field.column, field), 'in',
|
||||
pk_list[offset:offset + GET_ITERATOR_CHUNK_SIZE]), AND)
|
||||
self.do_query(self.model._meta.db_table, where, using=using)
|
||||
self.do_query(self.get_meta().db_table, where, using=using)
|
||||
|
||||
def delete_qs(self, query, using):
|
||||
"""
|
||||
|
@ -112,7 +112,7 @@ class UpdateQuery(Query):
|
|||
related_updates=self.related_updates.copy(), **kwargs)
|
||||
|
||||
def update_batch(self, pk_list, values, using):
|
||||
pk_field = self.model._meta.pk
|
||||
pk_field = self.get_meta().pk
|
||||
self.add_update_values(values)
|
||||
for offset in range(0, len(pk_list), GET_ITERATOR_CHUNK_SIZE):
|
||||
self.where = self.where_class()
|
||||
|
@ -129,7 +129,7 @@ class UpdateQuery(Query):
|
|||
"""
|
||||
values_seq = []
|
||||
for name, val in six.iteritems(values):
|
||||
field, model, direct, m2m = self.model._meta.get_field_by_name(name)
|
||||
field, model, direct, m2m = self.get_meta().get_field_by_name(name)
|
||||
if not direct or m2m:
|
||||
raise FieldError('Cannot update model field %r (only non-relations and foreign keys permitted).' % field)
|
||||
if model:
|
||||
|
@ -236,7 +236,7 @@ class DateQuery(Query):
|
|||
)
|
||||
except FieldError:
|
||||
raise FieldDoesNotExist("%s has no field named '%s'" % (
|
||||
self.model._meta.object_name, field_name
|
||||
self.get_meta().object_name, field_name
|
||||
))
|
||||
field = result[0]
|
||||
self._check_field(field) # overridden in DateTimeQuery
|
||||
|
@ -245,7 +245,7 @@ class DateQuery(Query):
|
|||
self.clear_select_clause()
|
||||
self.select = [SelectInfo(select, None)]
|
||||
self.distinct = True
|
||||
self.order_by = order == 'ASC' and [1] or [-1]
|
||||
self.order_by = [1] if order == 'ASC' else [-1]
|
||||
|
||||
if field.null:
|
||||
self.add_filter(("%s__isnull" % field_name, False))
|
||||
|
|
|
@ -602,13 +602,9 @@ class ImageField(FileField):
|
|||
if f is None:
|
||||
return None
|
||||
|
||||
# Try to import PIL in either of the two ways it can end up installed.
|
||||
try:
|
||||
from PIL import Image
|
||||
except ImportError:
|
||||
import Image
|
||||
from django.utils.image import Image
|
||||
|
||||
# We need to get a file object for PIL. We might have a path or we might
|
||||
# We need to get a file object for Pillow. We might have a path or we might
|
||||
# have to read the data into memory.
|
||||
if hasattr(data, 'temporary_file_path'):
|
||||
file = data.temporary_file_path()
|
||||
|
@ -623,12 +619,8 @@ class ImageField(FileField):
|
|||
# image in memory, which is a DoS vector. See #3848 and #18520.
|
||||
# verify() must be called immediately after the constructor.
|
||||
Image.open(file).verify()
|
||||
except ImportError:
|
||||
# Under PyPy, it is possible to import PIL. However, the underlying
|
||||
# _imaging C module isn't available, so an ImportError will be
|
||||
# raised. Catch and re-raise.
|
||||
raise
|
||||
except Exception: # Python Imaging Library doesn't recognize it as an image
|
||||
except Exception:
|
||||
# Pillow (or PIL) doesn't recognize it as an image.
|
||||
six.reraise(ValidationError, ValidationError(self.error_messages['invalid_image']), sys.exc_info()[2])
|
||||
if hasattr(f, 'seek') and callable(f.seek):
|
||||
f.seek(0)
|
||||
|
|
|
@ -523,7 +523,7 @@ class BoundField(object):
|
|||
widget = self.field.widget
|
||||
id_ = widget.attrs.get('id') or self.auto_id
|
||||
if id_:
|
||||
attrs = attrs and flatatt(attrs) or ''
|
||||
attrs = flatatt(attrs) if attrs else ''
|
||||
contents = format_html('<label for="{0}"{1}>{2}</label>',
|
||||
widget.id_for_label(id_), attrs, contents
|
||||
)
|
||||
|
|
|
@ -119,7 +119,7 @@ class BaseFormSet(object):
|
|||
return self.management_form.cleaned_data[INITIAL_FORM_COUNT]
|
||||
else:
|
||||
# Use the length of the initial data if it's there, 0 otherwise.
|
||||
initial_forms = self.initial and len(self.initial) or 0
|
||||
initial_forms = len(self.initial) if self.initial else 0
|
||||
return initial_forms
|
||||
|
||||
def _construct_forms(self):
|
||||
|
|
|
@ -775,7 +775,7 @@ class MultiWidget(Widget):
|
|||
You'll probably want to use this class with MultiValueField.
|
||||
"""
|
||||
def __init__(self, widgets, attrs=None):
|
||||
self.widgets = [isinstance(w, type) and w() or w for w in widgets]
|
||||
self.widgets = [w() if isinstance(w, type) else w for w in widgets]
|
||||
super(MultiWidget, self).__init__(attrs)
|
||||
|
||||
def render(self, name, value, attrs=None):
|
||||
|
|
|
@ -292,7 +292,7 @@ class LazyStream(six.Iterator):
|
|||
|
||||
def read(self, size=None):
|
||||
def parts():
|
||||
remaining = (size is not None and [size] or [self._remaining])[0]
|
||||
remaining = self._remaining if size is None else size
|
||||
# do the whole thing in one shot if no limit was provided.
|
||||
if remaining is None:
|
||||
yield b''.join(self)
|
||||
|
|
|
@ -641,7 +641,7 @@ class FilterExpression(object):
|
|||
(name, len(nondefs), plen))
|
||||
|
||||
# Defaults can be overridden.
|
||||
defaults = defaults and list(defaults) or []
|
||||
defaults = list(defaults) if defaults else []
|
||||
try:
|
||||
for parg in provided:
|
||||
defaults.pop(0)
|
||||
|
|
|
@ -127,7 +127,7 @@ class ForNode(Node):
|
|||
self.nodelist_empty = nodelist_empty
|
||||
|
||||
def __repr__(self):
|
||||
reversed_text = self.is_reversed and ' reversed' or ''
|
||||
reversed_text = ' reversed' if self.is_reversed else ''
|
||||
return "<For Node: for %s in %s, tail_len: %d%s>" % \
|
||||
(', '.join(self.loopvars), self.sequence, len(self.nodelist_loop),
|
||||
reversed_text)
|
||||
|
@ -788,7 +788,7 @@ def do_for(parser, token):
|
|||
" words: %s" % token.contents)
|
||||
|
||||
is_reversed = bits[-1] == 'reversed'
|
||||
in_index = is_reversed and -3 or -2
|
||||
in_index = -3 if is_reversed else -2
|
||||
if bits[in_index] != 'in':
|
||||
raise TemplateSyntaxError("'for' statements should use the format"
|
||||
" 'for x in y': %s" % token.contents)
|
||||
|
|
|
@ -49,6 +49,13 @@ files containing doctests. There are also many ways to override parts
|
|||
of doctest's default behaviors. See the Library Reference Manual for
|
||||
details.
|
||||
"""
|
||||
import warnings
|
||||
|
||||
warnings.warn(
|
||||
"The django.test._doctest module is deprecated; "
|
||||
"use the doctest module from the Python standard library instead.",
|
||||
PendingDeprecationWarning)
|
||||
|
||||
|
||||
__docformat__ = 'reStructuredText en'
|
||||
|
||||
|
|
|
@ -0,0 +1,289 @@
|
|||
import os
|
||||
from optparse import make_option
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
from django.test import TestCase
|
||||
from django.test.utils import setup_test_environment, teardown_test_environment
|
||||
from django.utils import unittest
|
||||
from django.utils.unittest import TestSuite, defaultTestLoader
|
||||
|
||||
|
||||
class DiscoverRunner(object):
|
||||
"""
|
||||
A Django test runner that uses unittest2 test discovery.
|
||||
"""
|
||||
|
||||
test_loader = defaultTestLoader
|
||||
reorder_by = (TestCase, )
|
||||
option_list = (
|
||||
make_option('-t', '--top-level-directory',
|
||||
action='store', dest='top_level', default=None,
|
||||
help='Top level of project for unittest discovery.'),
|
||||
make_option('-p', '--pattern', action='store', dest='pattern',
|
||||
default="test*.py",
|
||||
help='The test matching pattern. Defaults to test*.py.'),
|
||||
)
|
||||
|
||||
def __init__(self, pattern=None, top_level=None,
|
||||
verbosity=1, interactive=True, failfast=False,
|
||||
**kwargs):
|
||||
|
||||
self.pattern = pattern
|
||||
self.top_level = top_level
|
||||
|
||||
self.verbosity = verbosity
|
||||
self.interactive = interactive
|
||||
self.failfast = failfast
|
||||
|
||||
def setup_test_environment(self, **kwargs):
|
||||
setup_test_environment()
|
||||
settings.DEBUG = False
|
||||
unittest.installHandler()
|
||||
|
||||
def build_suite(self, test_labels=None, extra_tests=None, **kwargs):
|
||||
suite = TestSuite()
|
||||
test_labels = test_labels or ['.']
|
||||
extra_tests = extra_tests or []
|
||||
|
||||
discover_kwargs = {}
|
||||
if self.pattern is not None:
|
||||
discover_kwargs['pattern'] = self.pattern
|
||||
if self.top_level is not None:
|
||||
discover_kwargs['top_level_dir'] = self.top_level
|
||||
|
||||
for label in test_labels:
|
||||
kwargs = discover_kwargs.copy()
|
||||
tests = None
|
||||
|
||||
label_as_path = os.path.abspath(label)
|
||||
|
||||
# if a module, or "module.ClassName[.method_name]", just run those
|
||||
if not os.path.exists(label_as_path):
|
||||
tests = self.test_loader.loadTestsFromName(label)
|
||||
elif os.path.isdir(label_as_path) and not self.top_level:
|
||||
# Try to be a bit smarter than unittest about finding the
|
||||
# default top-level for a given directory path, to avoid
|
||||
# breaking relative imports. (Unittest's default is to set
|
||||
# top-level equal to the path, which means relative imports
|
||||
# will result in "Attempted relative import in non-package.").
|
||||
|
||||
# We'd be happy to skip this and require dotted module paths
|
||||
# (which don't cause this problem) instead of file paths (which
|
||||
# do), but in the case of a directory in the cwd, which would
|
||||
# be equally valid if considered as a top-level module or as a
|
||||
# directory path, unittest unfortunately prefers the latter.
|
||||
|
||||
top_level = label_as_path
|
||||
while True:
|
||||
init_py = os.path.join(top_level, '__init__.py')
|
||||
if os.path.exists(init_py):
|
||||
try_next = os.path.dirname(top_level)
|
||||
if try_next == top_level:
|
||||
# __init__.py all the way down? give up.
|
||||
break
|
||||
top_level = try_next
|
||||
continue
|
||||
break
|
||||
kwargs['top_level_dir'] = top_level
|
||||
|
||||
|
||||
if not (tests and tests.countTestCases()):
|
||||
# if no tests found, it's probably a package; try discovery
|
||||
tests = self.test_loader.discover(start_dir=label, **kwargs)
|
||||
|
||||
# make unittest forget the top-level dir it calculated from this
|
||||
# run, to support running tests from two different top-levels.
|
||||
self.test_loader._top_level_dir = None
|
||||
|
||||
suite.addTests(tests)
|
||||
|
||||
for test in extra_tests:
|
||||
suite.addTest(test)
|
||||
|
||||
return reorder_suite(suite, self.reorder_by)
|
||||
|
||||
def setup_databases(self, **kwargs):
|
||||
return setup_databases(self.verbosity, self.interactive, **kwargs)
|
||||
|
||||
def run_suite(self, suite, **kwargs):
|
||||
return unittest.TextTestRunner(
|
||||
verbosity=self.verbosity,
|
||||
failfast=self.failfast,
|
||||
).run(suite)
|
||||
|
||||
def teardown_databases(self, old_config, **kwargs):
|
||||
"""
|
||||
Destroys all the non-mirror databases.
|
||||
"""
|
||||
old_names, mirrors = old_config
|
||||
for connection, old_name, destroy in old_names:
|
||||
if destroy:
|
||||
connection.creation.destroy_test_db(old_name, self.verbosity)
|
||||
|
||||
def teardown_test_environment(self, **kwargs):
|
||||
unittest.removeHandler()
|
||||
teardown_test_environment()
|
||||
|
||||
def suite_result(self, suite, result, **kwargs):
|
||||
return len(result.failures) + len(result.errors)
|
||||
|
||||
def run_tests(self, test_labels, extra_tests=None, **kwargs):
|
||||
"""
|
||||
Run the unit tests for all the test labels in the provided list.
|
||||
|
||||
Test labels should be dotted Python paths to test modules, test
|
||||
classes, or test methods.
|
||||
|
||||
A list of 'extra' tests may also be provided; these tests
|
||||
will be added to the test suite.
|
||||
|
||||
Returns the number of tests that failed.
|
||||
"""
|
||||
self.setup_test_environment()
|
||||
suite = self.build_suite(test_labels, extra_tests)
|
||||
old_config = self.setup_databases()
|
||||
result = self.run_suite(suite)
|
||||
self.teardown_databases(old_config)
|
||||
self.teardown_test_environment()
|
||||
return self.suite_result(suite, result)
|
||||
|
||||
|
||||
def dependency_ordered(test_databases, dependencies):
|
||||
"""
|
||||
Reorder test_databases into an order that honors the dependencies
|
||||
described in TEST_DEPENDENCIES.
|
||||
"""
|
||||
ordered_test_databases = []
|
||||
resolved_databases = set()
|
||||
|
||||
# Maps db signature to dependencies of all it's aliases
|
||||
dependencies_map = {}
|
||||
|
||||
# sanity check - no DB can depend on it's own alias
|
||||
for sig, (_, aliases) in test_databases:
|
||||
all_deps = set()
|
||||
for alias in aliases:
|
||||
all_deps.update(dependencies.get(alias, []))
|
||||
if not all_deps.isdisjoint(aliases):
|
||||
raise ImproperlyConfigured(
|
||||
"Circular dependency: databases %r depend on each other, "
|
||||
"but are aliases." % aliases)
|
||||
dependencies_map[sig] = all_deps
|
||||
|
||||
while test_databases:
|
||||
changed = False
|
||||
deferred = []
|
||||
|
||||
# Try to find a DB that has all it's dependencies met
|
||||
for signature, (db_name, aliases) in test_databases:
|
||||
if dependencies_map[signature].issubset(resolved_databases):
|
||||
resolved_databases.update(aliases)
|
||||
ordered_test_databases.append((signature, (db_name, aliases)))
|
||||
changed = True
|
||||
else:
|
||||
deferred.append((signature, (db_name, aliases)))
|
||||
|
||||
if not changed:
|
||||
raise ImproperlyConfigured(
|
||||
"Circular dependency in TEST_DEPENDENCIES")
|
||||
test_databases = deferred
|
||||
return ordered_test_databases
|
||||
|
||||
|
||||
def reorder_suite(suite, classes):
|
||||
"""
|
||||
Reorders a test suite by test type.
|
||||
|
||||
`classes` is a sequence of types
|
||||
|
||||
All tests of type classes[0] are placed first, then tests of type
|
||||
classes[1], etc. Tests with no match in classes are placed last.
|
||||
"""
|
||||
class_count = len(classes)
|
||||
bins = [unittest.TestSuite() for i in range(class_count+1)]
|
||||
partition_suite(suite, classes, bins)
|
||||
for i in range(class_count):
|
||||
bins[0].addTests(bins[i+1])
|
||||
return bins[0]
|
||||
|
||||
|
||||
def partition_suite(suite, classes, bins):
|
||||
"""
|
||||
Partitions a test suite by test type.
|
||||
|
||||
classes is a sequence of types
|
||||
bins is a sequence of TestSuites, one more than classes
|
||||
|
||||
Tests of type classes[i] are added to bins[i],
|
||||
tests with no match found in classes are place in bins[-1]
|
||||
"""
|
||||
for test in suite:
|
||||
if isinstance(test, unittest.TestSuite):
|
||||
partition_suite(test, classes, bins)
|
||||
else:
|
||||
for i in range(len(classes)):
|
||||
if isinstance(test, classes[i]):
|
||||
bins[i].addTest(test)
|
||||
break
|
||||
else:
|
||||
bins[-1].addTest(test)
|
||||
|
||||
|
||||
def setup_databases(verbosity, interactive, **kwargs):
|
||||
from django.db import connections, DEFAULT_DB_ALIAS
|
||||
|
||||
# First pass -- work out which databases actually need to be created,
|
||||
# and which ones are test mirrors or duplicate entries in DATABASES
|
||||
mirrored_aliases = {}
|
||||
test_databases = {}
|
||||
dependencies = {}
|
||||
for alias in connections:
|
||||
connection = connections[alias]
|
||||
if connection.settings_dict['TEST_MIRROR']:
|
||||
# If the database is marked as a test mirror, save
|
||||
# the alias.
|
||||
mirrored_aliases[alias] = (
|
||||
connection.settings_dict['TEST_MIRROR'])
|
||||
else:
|
||||
# Store a tuple with DB parameters that uniquely identify it.
|
||||
# If we have two aliases with the same values for that tuple,
|
||||
# we only need to create the test database once.
|
||||
item = test_databases.setdefault(
|
||||
connection.creation.test_db_signature(),
|
||||
(connection.settings_dict['NAME'], set())
|
||||
)
|
||||
item[1].add(alias)
|
||||
|
||||
if 'TEST_DEPENDENCIES' in connection.settings_dict:
|
||||
dependencies[alias] = (
|
||||
connection.settings_dict['TEST_DEPENDENCIES'])
|
||||
else:
|
||||
if alias != DEFAULT_DB_ALIAS:
|
||||
dependencies[alias] = connection.settings_dict.get(
|
||||
'TEST_DEPENDENCIES', [DEFAULT_DB_ALIAS])
|
||||
|
||||
# Second pass -- actually create the databases.
|
||||
old_names = []
|
||||
mirrors = []
|
||||
|
||||
for signature, (db_name, aliases) in dependency_ordered(
|
||||
test_databases.items(), dependencies):
|
||||
test_db_name = None
|
||||
# Actually create the database for the first connection
|
||||
|
||||
for alias in aliases:
|
||||
connection = connections[alias]
|
||||
old_names.append((connection, db_name, True))
|
||||
if test_db_name is None:
|
||||
test_db_name = connection.creation.create_test_db(
|
||||
verbosity, autoclobber=not interactive)
|
||||
else:
|
||||
connection.settings_dict['NAME'] = test_db_name
|
||||
|
||||
for alias, mirror_alias in mirrored_aliases.items():
|
||||
mirrors.append((alias, connections[alias].settings_dict['NAME']))
|
||||
connections[alias].settings_dict['NAME'] = (
|
||||
connections[mirror_alias].settings_dict['NAME'])
|
||||
|
||||
return old_names, mirrors
|
|
@ -1,10 +1,15 @@
|
|||
import unittest as real_unittest
|
||||
"""
|
||||
This module is pending deprecation as of Django 1.6 and will be removed in
|
||||
version 1.8.
|
||||
|
||||
"""
|
||||
|
||||
import unittest as real_unittest
|
||||
import warnings
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
from django.db.models import get_app, get_apps
|
||||
from django.test import _doctest as doctest
|
||||
from django.test.utils import setup_test_environment, teardown_test_environment
|
||||
from django.test import runner
|
||||
from django.test.testcases import OutputChecker, DocTestRunner
|
||||
from django.utils import unittest
|
||||
from django.utils.importlib import import_module
|
||||
|
@ -12,6 +17,11 @@ from django.utils.module_loading import module_has_submodule
|
|||
|
||||
__all__ = ('DjangoTestSuiteRunner',)
|
||||
|
||||
warnings.warn(
|
||||
"The django.test.simple module and DjangoTestSuiteRunner are deprecated; "
|
||||
"use django.test.runner.DiscoverRunner instead.",
|
||||
PendingDeprecationWarning)
|
||||
|
||||
# The module name for tests outside models.py
|
||||
TEST_MODULE = 'tests'
|
||||
|
||||
|
@ -154,97 +164,7 @@ def build_test(label):
|
|||
return unittest.TestSuite(tests)
|
||||
|
||||
|
||||
def partition_suite(suite, classes, bins):
|
||||
"""
|
||||
Partitions a test suite by test type.
|
||||
|
||||
classes is a sequence of types
|
||||
bins is a sequence of TestSuites, one more than classes
|
||||
|
||||
Tests of type classes[i] are added to bins[i],
|
||||
tests with no match found in classes are place in bins[-1]
|
||||
"""
|
||||
for test in suite:
|
||||
if isinstance(test, unittest.TestSuite):
|
||||
partition_suite(test, classes, bins)
|
||||
else:
|
||||
for i in range(len(classes)):
|
||||
if isinstance(test, classes[i]):
|
||||
bins[i].addTest(test)
|
||||
break
|
||||
else:
|
||||
bins[-1].addTest(test)
|
||||
|
||||
|
||||
def reorder_suite(suite, classes):
|
||||
"""
|
||||
Reorders a test suite by test type.
|
||||
|
||||
`classes` is a sequence of types
|
||||
|
||||
All tests of type classes[0] are placed first, then tests of type
|
||||
classes[1], etc. Tests with no match in classes are placed last.
|
||||
"""
|
||||
class_count = len(classes)
|
||||
bins = [unittest.TestSuite() for i in range(class_count+1)]
|
||||
partition_suite(suite, classes, bins)
|
||||
for i in range(class_count):
|
||||
bins[0].addTests(bins[i+1])
|
||||
return bins[0]
|
||||
|
||||
|
||||
def dependency_ordered(test_databases, dependencies):
|
||||
"""
|
||||
Reorder test_databases into an order that honors the dependencies
|
||||
described in TEST_DEPENDENCIES.
|
||||
"""
|
||||
ordered_test_databases = []
|
||||
resolved_databases = set()
|
||||
|
||||
# Maps db signature to dependencies of all it's aliases
|
||||
dependencies_map = {}
|
||||
|
||||
# sanity check - no DB can depend on it's own alias
|
||||
for sig, (_, aliases) in test_databases:
|
||||
all_deps = set()
|
||||
for alias in aliases:
|
||||
all_deps.update(dependencies.get(alias, []))
|
||||
if not all_deps.isdisjoint(aliases):
|
||||
raise ImproperlyConfigured(
|
||||
"Circular dependency: databases %r depend on each other, "
|
||||
"but are aliases." % aliases)
|
||||
dependencies_map[sig] = all_deps
|
||||
|
||||
while test_databases:
|
||||
changed = False
|
||||
deferred = []
|
||||
|
||||
# Try to find a DB that has all it's dependencies met
|
||||
for signature, (db_name, aliases) in test_databases:
|
||||
if dependencies_map[signature].issubset(resolved_databases):
|
||||
resolved_databases.update(aliases)
|
||||
ordered_test_databases.append((signature, (db_name, aliases)))
|
||||
changed = True
|
||||
else:
|
||||
deferred.append((signature, (db_name, aliases)))
|
||||
|
||||
if not changed:
|
||||
raise ImproperlyConfigured(
|
||||
"Circular dependency in TEST_DEPENDENCIES")
|
||||
test_databases = deferred
|
||||
return ordered_test_databases
|
||||
|
||||
|
||||
class DjangoTestSuiteRunner(object):
|
||||
def __init__(self, verbosity=1, interactive=True, failfast=True, **kwargs):
|
||||
self.verbosity = verbosity
|
||||
self.interactive = interactive
|
||||
self.failfast = failfast
|
||||
|
||||
def setup_test_environment(self, **kwargs):
|
||||
setup_test_environment()
|
||||
settings.DEBUG = False
|
||||
unittest.installHandler()
|
||||
class DjangoTestSuiteRunner(runner.DiscoverRunner):
|
||||
|
||||
def build_suite(self, test_labels, extra_tests=None, **kwargs):
|
||||
suite = unittest.TestSuite()
|
||||
|
@ -264,109 +184,4 @@ class DjangoTestSuiteRunner(object):
|
|||
for test in extra_tests:
|
||||
suite.addTest(test)
|
||||
|
||||
return reorder_suite(suite, (unittest.TestCase,))
|
||||
|
||||
def setup_databases(self, **kwargs):
|
||||
from django.db import connections, DEFAULT_DB_ALIAS
|
||||
|
||||
# First pass -- work out which databases actually need to be created,
|
||||
# and which ones are test mirrors or duplicate entries in DATABASES
|
||||
mirrored_aliases = {}
|
||||
test_databases = {}
|
||||
dependencies = {}
|
||||
for alias in connections:
|
||||
connection = connections[alias]
|
||||
if connection.settings_dict['TEST_MIRROR']:
|
||||
# If the database is marked as a test mirror, save
|
||||
# the alias.
|
||||
mirrored_aliases[alias] = (
|
||||
connection.settings_dict['TEST_MIRROR'])
|
||||
else:
|
||||
# Store a tuple with DB parameters that uniquely identify it.
|
||||
# If we have two aliases with the same values for that tuple,
|
||||
# we only need to create the test database once.
|
||||
item = test_databases.setdefault(
|
||||
connection.creation.test_db_signature(),
|
||||
(connection.settings_dict['NAME'], set())
|
||||
)
|
||||
item[1].add(alias)
|
||||
|
||||
if 'TEST_DEPENDENCIES' in connection.settings_dict:
|
||||
dependencies[alias] = (
|
||||
connection.settings_dict['TEST_DEPENDENCIES'])
|
||||
else:
|
||||
if alias != DEFAULT_DB_ALIAS:
|
||||
dependencies[alias] = connection.settings_dict.get(
|
||||
'TEST_DEPENDENCIES', [DEFAULT_DB_ALIAS])
|
||||
|
||||
# Second pass -- actually create the databases.
|
||||
old_names = []
|
||||
mirrors = []
|
||||
|
||||
for signature, (db_name, aliases) in dependency_ordered(
|
||||
test_databases.items(), dependencies):
|
||||
test_db_name = None
|
||||
# Actually create the database for the first connection
|
||||
|
||||
for alias in aliases:
|
||||
connection = connections[alias]
|
||||
old_names.append((connection, db_name, True))
|
||||
if test_db_name is None:
|
||||
test_db_name = connection.creation.create_test_db(
|
||||
self.verbosity, autoclobber=not self.interactive)
|
||||
else:
|
||||
connection.settings_dict['NAME'] = test_db_name
|
||||
|
||||
for alias, mirror_alias in mirrored_aliases.items():
|
||||
mirrors.append((alias, connections[alias].settings_dict['NAME']))
|
||||
connections[alias].settings_dict['NAME'] = (
|
||||
connections[mirror_alias].settings_dict['NAME'])
|
||||
|
||||
return old_names, mirrors
|
||||
|
||||
def run_suite(self, suite, **kwargs):
|
||||
return unittest.TextTestRunner(
|
||||
verbosity=self.verbosity, failfast=self.failfast).run(suite)
|
||||
|
||||
def teardown_databases(self, old_config, **kwargs):
|
||||
"""
|
||||
Destroys all the non-mirror databases.
|
||||
"""
|
||||
old_names, mirrors = old_config
|
||||
for connection, old_name, destroy in old_names:
|
||||
if destroy:
|
||||
connection.creation.destroy_test_db(old_name, self.verbosity)
|
||||
|
||||
def teardown_test_environment(self, **kwargs):
|
||||
unittest.removeHandler()
|
||||
teardown_test_environment()
|
||||
|
||||
def suite_result(self, suite, result, **kwargs):
|
||||
return len(result.failures) + len(result.errors)
|
||||
|
||||
def run_tests(self, test_labels, extra_tests=None, **kwargs):
|
||||
"""
|
||||
Run the unit tests for all the test labels in the provided list.
|
||||
Labels must be of the form:
|
||||
- app.TestClass.test_method
|
||||
Run a single specific test method
|
||||
- app.TestClass
|
||||
Run all the test methods in a given class
|
||||
- app
|
||||
Search for doctests and unittests in the named application.
|
||||
|
||||
When looking for tests, the test runner will look in the models and
|
||||
tests modules for the application.
|
||||
|
||||
A list of 'extra' tests may also be provided; these tests
|
||||
will be added to the test suite.
|
||||
|
||||
Returns the number of tests that failed.
|
||||
"""
|
||||
self.setup_test_environment()
|
||||
suite = self.build_suite(test_labels, extra_tests)
|
||||
old_config = self.setup_databases()
|
||||
result = self.run_suite(suite)
|
||||
self.teardown_databases(old_config)
|
||||
self.teardown_test_environment()
|
||||
return self.suite_result(suite, result)
|
||||
return runner.reorder_suite(suite, (unittest.TestCase,))
|
||||
|
|
|
@ -97,6 +97,12 @@ def assert_and_parse_html(self, html, user_msg, msg):
|
|||
|
||||
|
||||
class OutputChecker(doctest.OutputChecker):
|
||||
def __init__(self):
|
||||
warnings.warn(
|
||||
"The django.test.testcases.OutputChecker class is deprecated; "
|
||||
"use the doctest module from the Python standard library instead.",
|
||||
PendingDeprecationWarning)
|
||||
|
||||
def check_output(self, want, got, optionflags):
|
||||
"""
|
||||
The entry method for doctest output checking. Defers to a sequence of
|
||||
|
@ -151,6 +157,10 @@ class OutputChecker(doctest.OutputChecker):
|
|||
|
||||
class DocTestRunner(doctest.DocTestRunner):
|
||||
def __init__(self, *args, **kwargs):
|
||||
warnings.warn(
|
||||
"The django.test.testcases.DocTestRunner class is deprecated; "
|
||||
"use the doctest module from the Python standard library instead.",
|
||||
PendingDeprecationWarning)
|
||||
doctest.DocTestRunner.__init__(self, *args, **kwargs)
|
||||
self.optionflags = doctest.ELLIPSIS
|
||||
|
||||
|
|
|
@ -234,7 +234,7 @@ class DateFormat(TimeFormat):
|
|||
|
||||
def T(self):
|
||||
"Time zone of this machine; e.g. 'EST' or 'MDT'"
|
||||
name = self.timezone and self.timezone.tzname(self.data) or None
|
||||
name = self.timezone.tzname(self.data) if self.timezone else None
|
||||
if name is None:
|
||||
name = self.format('O')
|
||||
return six.text_type(name)
|
||||
|
|
|
@ -187,7 +187,10 @@ def urlize(text, trim_url_limit=None, nofollow=False, autoescape=False):
|
|||
|
||||
If autoescape is True, the link text and URLs will get autoescaped.
|
||||
"""
|
||||
trim_url = lambda x, limit=trim_url_limit: limit is not None and (len(x) > limit and ('%s...' % x[:max(0, limit - 3)])) or x
|
||||
def trim_url(x, limit=trim_url_limit):
|
||||
if limit is None or len(x) <= limit:
|
||||
return x
|
||||
return '%s...' % x[:max(0, limit - 3)]
|
||||
safe_input = isinstance(text, SafeData)
|
||||
words = word_split_re.split(force_text(text))
|
||||
for i, word in enumerate(words):
|
||||
|
|
|
@ -0,0 +1,148 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
To provide a shim layer over Pillow/PIL situation until the PIL support is
|
||||
removed.
|
||||
|
||||
|
||||
Combinations To Account For
|
||||
===========================
|
||||
|
||||
* Pillow:
|
||||
|
||||
* never has ``_imaging`` under any Python
|
||||
* has the ``Image.alpha_composite``, which may aid in detection
|
||||
|
||||
* PIL
|
||||
|
||||
* CPython 2.x may have _imaging (& work)
|
||||
* CPython 2.x may *NOT* have _imaging (broken & needs a error message)
|
||||
* CPython 3.x doesn't work
|
||||
* PyPy will *NOT* have _imaging (but works?)
|
||||
|
||||
Restated, that looks like:
|
||||
|
||||
* If we're on Python 2.x, it could be either Pillow or PIL:
|
||||
|
||||
* If ``import _imaging`` results in ``ImportError``, either they have a
|
||||
working Pillow installation or a broken PIL installation, so we need to
|
||||
detect further:
|
||||
|
||||
* To detect, we first ``import Image``.
|
||||
* If ``Image`` has a ``alpha_composite`` attribute present, only Pillow
|
||||
has this, so we assume it's working.
|
||||
* If ``Image`` DOES NOT have a ``alpha_composite``attribute, it must be
|
||||
PIL & is a broken (likely C compiler-less) install, which we need to
|
||||
warn the user about.
|
||||
|
||||
* If ``import _imaging`` works, it must be PIL & is a working install.
|
||||
|
||||
* Python 3.x
|
||||
|
||||
* If ``import Image`` works, it must be Pillow, since PIL isn't Python 3.x
|
||||
compatible.
|
||||
|
||||
* PyPy
|
||||
|
||||
* If ``import _imaging`` results in ``ImportError``, it could be either
|
||||
Pillow or PIL, both of which work without it on PyPy, so we're fine.
|
||||
|
||||
|
||||
Approach
|
||||
========
|
||||
|
||||
* Attempt to import ``Image``
|
||||
|
||||
* ``ImportError`` - nothing is installed, toss an exception
|
||||
* Either Pillow or the PIL is installed, so continue detecting
|
||||
|
||||
* Attempt to ``hasattr(Image, 'alpha_composite')``
|
||||
|
||||
* If it works, it's Pillow & working
|
||||
* If it fails, we've got a PIL install, continue detecting
|
||||
|
||||
* The only option here is that we're on Python 2.x or PyPy, of which
|
||||
we only care about if we're on CPython.
|
||||
* If we're on CPython, attempt to ``import _imaging``
|
||||
|
||||
* ``ImportError`` - Bad install, toss an exception
|
||||
|
||||
"""
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import warnings
|
||||
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
|
||||
Image = None
|
||||
_imaging = None
|
||||
ImageFile = None
|
||||
|
||||
|
||||
def _detect_image_library():
|
||||
global Image
|
||||
global _imaging
|
||||
global ImageFile
|
||||
|
||||
# Skip re-attempting to import if we've already run detection.
|
||||
if Image is not None:
|
||||
return Image, _imaging, ImageFile
|
||||
|
||||
# Assume it's not there.
|
||||
PIL_imaging = False
|
||||
|
||||
try:
|
||||
# Try from the Pillow (or one variant of PIL) install location first.
|
||||
from PIL import Image as PILImage
|
||||
except ImportError as err:
|
||||
try:
|
||||
# If that failed, try the alternate import syntax for PIL.
|
||||
import Image as PILImage
|
||||
except ImportError as err:
|
||||
# Neither worked, so it's likely not installed.
|
||||
raise ImproperlyConfigured(
|
||||
_("Neither Pillow nor PIL could be imported: %s" % err)
|
||||
)
|
||||
|
||||
# ``Image.alpha_composite`` was added to Pillow in SHA: e414c6 & is not
|
||||
# available in any version of the PIL.
|
||||
if hasattr(PILImage, 'alpha_composite'):
|
||||
PIL_imaging = False
|
||||
else:
|
||||
# We're dealing with the PIL. Determine if we're on CPython & if
|
||||
# ``_imaging`` is available.
|
||||
import platform
|
||||
|
||||
# This is the Alex Approved™ way.
|
||||
# See http://mail.python.org/pipermail//pypy-dev/2011-November/008739.html
|
||||
if platform.python_implementation().lower() == 'cpython':
|
||||
# We're on CPython (likely 2.x). Since a C compiler is needed to
|
||||
# produce a fully-working PIL & will create a ``_imaging`` module,
|
||||
# we'll attempt to import it to verify their kit works.
|
||||
try:
|
||||
import _imaging as PIL_imaging
|
||||
except ImportError as err:
|
||||
raise ImproperlyConfigured(
|
||||
_("The '_imaging' module for the PIL could not be " +
|
||||
"imported: %s" % err)
|
||||
)
|
||||
|
||||
# Try to import ImageFile as well.
|
||||
try:
|
||||
from PIL import ImageFile as PILImageFile
|
||||
except ImportError:
|
||||
import ImageFile as PILImageFile
|
||||
|
||||
# Finally, warn about deprecation...
|
||||
if PIL_imaging is not False:
|
||||
warnings.warn(
|
||||
"Support for the PIL will be removed in Django 1.8. Please " +
|
||||
"uninstall it & install Pillow instead.",
|
||||
PendingDeprecationWarning
|
||||
)
|
||||
|
||||
return PILImage, PIL_imaging, PILImageFile
|
||||
|
||||
|
||||
Image, _imaging, ImageFile = _detect_image_library()
|
|
@ -111,7 +111,7 @@ class AdminEmailHandler(logging.Handler):
|
|||
|
||||
message = "%s\n\n%s" % (stack_trace, request_repr)
|
||||
reporter = ExceptionReporter(request, is_email=True, *exc_info)
|
||||
html_message = self.include_html and reporter.get_traceback_html() or None
|
||||
html_message = reporter.get_traceback_html() if self.include_html else None
|
||||
mail.mail_admins(subject, message, fail_silently=True,
|
||||
html_message=html_message,
|
||||
connection=self.connection())
|
||||
|
|
|
@ -651,7 +651,10 @@ def parse_accept_lang_header(lang_string):
|
|||
first, lang, priority = pieces[i : i + 3]
|
||||
if first:
|
||||
return []
|
||||
priority = priority and float(priority) or 1.0
|
||||
if priority:
|
||||
priority = float(priority)
|
||||
if not priority: # if priority is 0.0 at this point make it 1.0
|
||||
priority = 1.0
|
||||
result.append((lang, priority))
|
||||
result.sort(key=lambda k: k[1], reverse=True)
|
||||
return result
|
||||
|
|
|
@ -20,7 +20,7 @@ class Node(object):
|
|||
Constructs a new Node. If no connector is given, the default will be
|
||||
used.
|
||||
"""
|
||||
self.children = children and children[:] or []
|
||||
self.children = children[:] if children else []
|
||||
self.connector = connector or self.default
|
||||
self.negated = negated
|
||||
|
||||
|
|
|
@ -125,7 +125,7 @@ class TestLoader(unittest.TestLoader):
|
|||
return self.loadTestsFromTestCase(obj)
|
||||
elif (isinstance(obj, types.UnboundMethodType) and
|
||||
isinstance(parent, type) and
|
||||
issubclass(parent, case.TestCase)):
|
||||
issubclass(parent, unittest.TestCase)):
|
||||
return self.suiteClass([parent(obj.__name__)])
|
||||
elif isinstance(obj, unittest.TestSuite):
|
||||
return obj
|
||||
|
|
|
@ -27,7 +27,7 @@ to make it dead easy, even for someone who may not be intimately familiar with
|
|||
that area of the code, to understand the problem and verify the fix:
|
||||
|
||||
* Are there clear instructions on how to reproduce the bug? If this
|
||||
touches a dependency (such as PIL), a contrib module, or a specific
|
||||
touches a dependency (such as Pillow/PIL), a contrib module, or a specific
|
||||
database, are those instructions clear enough even for someone not
|
||||
familiar with it?
|
||||
|
||||
|
|
|
@ -185,8 +185,7 @@ testing of Django applications:
|
|||
* **Testing:**
|
||||
:doc:`Introduction <topics/testing/index>` |
|
||||
:doc:`Writing and running tests <topics/testing/overview>` |
|
||||
:doc:`Advanced topics <topics/testing/advanced>` |
|
||||
:doc:`Doctests <topics/testing/doctests>`
|
||||
:doc:`Advanced topics <topics/testing/advanced>`
|
||||
|
||||
* **Deployment:**
|
||||
:doc:`Overview <howto/deployment/index>` |
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue