Limited lines to 119 characters in django/

refs #23395.
This commit is contained in:
Tim Graham 2014-09-04 08:15:09 -04:00
parent f8bb8436e5
commit 1101467ce0
112 changed files with 1239 additions and 412 deletions

View File

@ -99,7 +99,8 @@ class Settings(BaseSettings):
mod = importlib.import_module(self.SETTINGS_MODULE)
except ImportError as e:
raise ImportError(
"Could not import settings '%s' (Is it on sys.path? Is there an import error in the settings file?): %s"
"Could not import settings '%s' (Is it on sys.path? Is there an "
"import error in the settings file?): %s"
% (self.SETTINGS_MODULE, e)
)

View File

@ -459,18 +459,31 @@ MIDDLEWARE_CLASSES = (
# SESSIONS #
############
SESSION_CACHE_ALIAS = 'default' # Cache to store session data if using the cache session backend.
SESSION_COOKIE_NAME = 'sessionid' # Cookie name. This can be whatever you want.
SESSION_COOKIE_AGE = 60 * 60 * 24 * 7 * 2 # Age of cookie, in seconds (default: 2 weeks).
SESSION_COOKIE_DOMAIN = None # A string like ".example.com", or None for standard domain cookie.
SESSION_COOKIE_SECURE = False # Whether the session cookie should be secure (https:// only).
SESSION_COOKIE_PATH = '/' # The path of the session cookie.
SESSION_COOKIE_HTTPONLY = True # Whether to use the non-RFC standard httpOnly flag (IE, FF3+, others)
SESSION_SAVE_EVERY_REQUEST = False # Whether to save the session data on every request.
SESSION_EXPIRE_AT_BROWSER_CLOSE = False # Whether a user's session cookie expires when the Web browser is closed.
SESSION_ENGINE = 'django.contrib.sessions.backends.db' # The module to store session data
SESSION_FILE_PATH = None # Directory to store session files if using the file session module. If None, the backend will use a sensible default.
SESSION_SERIALIZER = 'django.contrib.sessions.serializers.JSONSerializer' # class to serialize session data
# Cache to store session data if using the cache session backend.
SESSION_CACHE_ALIAS = 'default'
# Cookie name. This can be whatever you want.
SESSION_COOKIE_NAME = 'sessionid'
# Age of cookie, in seconds (default: 2 weeks).
SESSION_COOKIE_AGE = 60 * 60 * 24 * 7 * 2
# A string like ".example.com", or None for standard domain cookie.
SESSION_COOKIE_DOMAIN = None
# Whether the session cookie should be secure (https:// only).
SESSION_COOKIE_SECURE = False
# The path of the session cookie.
SESSION_COOKIE_PATH = '/'
# Whether to use the non-RFC standard httpOnly flag (IE, FF3+, others)
SESSION_COOKIE_HTTPONLY = True
# Whether to save the session data on every request.
SESSION_SAVE_EVERY_REQUEST = False
# Whether a user's session cookie expires when the Web browser is closed.
SESSION_EXPIRE_AT_BROWSER_CLOSE = False
# The module to store session data
SESSION_ENGINE = 'django.contrib.sessions.backends.db'
# Directory to store session files if using the file session module. If None,
# the backend will use a sensible default.
SESSION_FILE_PATH = None
# class to serialize session data
SESSION_SERIALIZER = 'django.contrib.sessions.serializers.JSONSerializer'
#########
# CACHE #

View File

@ -33,13 +33,17 @@ TIME_INPUT_FORMATS = (
)
DATETIME_INPUT_FORMATS = (
# With time in %H:%M:%S :
'%d-%m-%Y %H:%M:%S', '%d-%m-%y %H:%M:%S', '%Y-%m-%d %H:%M:%S', # '20-01-2009 15:23:35', '20-01-09 15:23:35', '2009-01-20 15:23:35'
'%d/%m/%Y %H:%M:%S', '%d/%m/%y %H:%M:%S', '%Y/%m/%d %H:%M:%S', # '20/01/2009 15:23:35', '20/01/09 15:23:35', '2009/01/20 15:23:35'
'%d-%m-%Y %H:%M:%S', '%d-%m-%y %H:%M:%S', '%Y-%m-%d %H:%M:%S',
# '20-01-2009 15:23:35', '20-01-09 15:23:35', '2009-01-20 15:23:35'
'%d/%m/%Y %H:%M:%S', '%d/%m/%y %H:%M:%S', '%Y/%m/%d %H:%M:%S',
# '20/01/2009 15:23:35', '20/01/09 15:23:35', '2009/01/20 15:23:35'
# '%d %b %Y %H:%M:%S', '%d %b %y %H:%M:%S', # '20 jan 2009 15:23:35', '20 jan 09 15:23:35'
# '%d %B %Y %H:%M:%S', '%d %B %y %H:%M:%S', # '20 januari 2009 15:23:35', '20 januari 2009 15:23:35'
# With time in %H:%M:%S.%f :
'%d-%m-%Y %H:%M:%S.%f', '%d-%m-%y %H:%M:%S.%f', '%Y-%m-%d %H:%M:%S.%f', # '20-01-2009 15:23:35.000200', '20-01-09 15:23:35.000200', '2009-01-20 15:23:35.000200'
'%d/%m/%Y %H:%M:%S.%f', '%d/%m/%y %H:%M:%S.%f', '%Y/%m/%d %H:%M:%S.%f', # '20/01/2009 15:23:35.000200', '20/01/09 15:23:35.000200', '2009/01/20 15:23:35.000200'
'%d-%m-%Y %H:%M:%S.%f', '%d-%m-%y %H:%M:%S.%f', '%Y-%m-%d %H:%M:%S.%f',
# '20-01-2009 15:23:35.000200', '20-01-09 15:23:35.000200', '2009-01-20 15:23:35.000200'
'%d/%m/%Y %H:%M:%S.%f', '%d/%m/%y %H:%M:%S.%f', '%Y/%m/%d %H:%M:%S.%f',
# '20/01/2009 15:23:35.000200', '20/01/09 15:23:35.000200', '2009/01/20 15:23:35.000200'
# With time in %H.%M:%S :
'%d-%m-%Y %H.%M:%S', '%d-%m-%y %H.%M:%S', # '20-01-2009 15.23:35', '20-01-09 15.23:35'
'%d/%m/%Y %H.%M:%S', '%d/%m/%y %H.%M:%S', # '20/01/2009 15.23:35', '20/01/09 15.23:35'

View File

@ -23,7 +23,10 @@ Compiler library and Java version 6 or later."""
compiler = os.path.expanduser(options.compiler)
if not os.path.exists(compiler):
sys.exit("Google Closure compiler jar file %s not found. Please use the -c option to specify the path." % compiler)
sys.exit(
"Google Closure compiler jar file %s not found. Please use the -c "
"option to specify the path." % compiler
)
if not options.file:
if options.verbose:

View File

@ -287,7 +287,8 @@ class BaseModelAdminChecks(object):
if not (isinstance(field, models.ForeignKey) or field.choices):
return [
checks.Error(
"The value of '%s' refers to '%s', which is not an instance of ForeignKey, and does not have a 'choices' definition." % (
"The value of '%s' refers to '%s', which is not an "
"instance of ForeignKey, and does not have a 'choices' definition." % (
label, field_name
),
hint=None,
@ -592,7 +593,8 @@ class ModelAdminChecks(BaseModelAdminChecks):
if field is None:
return [
checks.Error(
"The value of '%s' refers to '%s', which is not a callable, an attribute of '%s', or an attribute or method on '%s.%s'." % (
"The value of '%s' refers to '%s', which is not a "
"callable, an attribute of '%s', or an attribute or method on '%s.%s'." % (
label, item, cls.__name__, model._meta.app_label, model._meta.object_name
),
hint=None,
@ -619,7 +621,8 @@ class ModelAdminChecks(BaseModelAdminChecks):
# This is a deliberate repeat of E108; there's more than one path
# required to test this condition.
checks.Error(
"The value of '%s' refers to '%s', which is not a callable, an attribute of '%s', or an attribute or method on '%s.%s'." % (
"The value of '%s' refers to '%s', which is not a callable, "
"an attribute of '%s', or an attribute or method on '%s.%s'." % (
label, item, cls.__name__, model._meta.app_label, model._meta.object_name
),
hint=None,

View File

@ -105,7 +105,10 @@ class Fieldline(object):
yield AdminField(self.form, field, is_first=(i == 0))
def errors(self):
return mark_safe('\n'.join(self.form[f].errors.as_ul() for f in self.fields if f not in self.readonly_fields).strip('\n'))
return mark_safe(
'\n'.join(self.form[f].errors.as_ul()
for f in self.fields if f not in self.readonly_fields).strip('\n')
)
class AdminField(object):

View File

@ -16,7 +16,10 @@ DELETION = 3
class LogEntryManager(models.Manager):
def log_action(self, user_id, content_type_id, object_id, object_repr, action_flag, change_message=''):
e = self.model(None, None, user_id, content_type_id, smart_text(object_id), object_repr[:200], action_flag, change_message)
e = self.model(
None, None, user_id, content_type_id, smart_text(object_id),
object_repr[:200], action_flag, change_message
)
e.save()

View File

@ -272,7 +272,10 @@ class BaseModelAdmin(six.with_metaclass(forms.MediaDefiningClass)):
self.admin_site, using=db)
kwargs['help_text'] = ''
elif db_field.name in (list(self.filter_vertical) + list(self.filter_horizontal)):
kwargs['widget'] = widgets.FilteredSelectMultiple(db_field.verbose_name, (db_field.name in self.filter_vertical))
kwargs['widget'] = widgets.FilteredSelectMultiple(
db_field.verbose_name,
db_field.name in self.filter_vertical
)
if 'queryset' not in kwargs:
queryset = self.get_field_queryset(db, db_field, request)
@ -1131,7 +1134,10 @@ class ModelAdmin(BaseModelAdmin):
(opts.app_label, opts.model_name),
args=(quote(pk_value),),
current_app=self.admin_site.name)
post_url_continue = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, post_url_continue)
post_url_continue = add_preserved_filters(
{'preserved_filters': preserved_filters, 'opts': opts},
post_url_continue
)
return HttpResponseRedirect(post_url_continue)
elif "_addanother" in request.POST:
@ -1833,7 +1839,8 @@ class InlineModelAdmin(BaseModelAdmin):
objs = []
for p in collector.protected:
objs.append(
# Translators: Model verbose name and instance representation, suitable to be an item in a list
# Translators: Model verbose name and instance representation,
# suitable to be an item in a list.
_('%(class_name)s %(instance)s') % {
'class_name': p._meta.verbose_name,
'instance': p}

View File

@ -239,9 +239,11 @@ class AdminSite(object):
url(r'^login/$', self.login, name='login'),
url(r'^logout/$', wrap(self.logout), name='logout'),
url(r'^password_change/$', wrap(self.password_change, cacheable=True), name='password_change'),
url(r'^password_change/done/$', wrap(self.password_change_done, cacheable=True), name='password_change_done'),
url(r'^password_change/done/$', wrap(self.password_change_done, cacheable=True),
name='password_change_done'),
url(r'^jsi18n/$', wrap(self.i18n_javascript, cacheable=True), name='jsi18n'),
url(r'^r/(?P<content_type_id>\d+)/(?P<object_id>.+)/$', wrap(contenttype_views.shortcut), name='view_on_site'),
url(r'^r/(?P<content_type_id>\d+)/(?P<object_id>.+)/$', wrap(contenttype_views.shortcut),
name='view_on_site'),
]
# Add in each model's views, and create a list of valid URLS for the
@ -408,7 +410,11 @@ class AdminSite(object):
app_dict[app_label] = {
'name': apps.get_app_config(app_label).verbose_name,
'app_label': app_label,
'app_url': reverse('admin:app_list', kwargs={'app_label': app_label}, current_app=self.name),
'app_url': reverse(
'admin:app_list',
kwargs={'app_label': app_label},
current_app=self.name,
),
'has_module_perms': has_module_perms,
'models': [model_dict],
}

View File

@ -251,7 +251,10 @@ def items_for_result(cl, result, form):
link_or_text = format_html(
'<a href="{0}"{1}>{2}</a>',
url,
format_html(' onclick="opener.dismissRelatedLookupPopup(window, &#39;{0}&#39;); return false;"', result_id) if cl.is_popup else '',
format_html(
' onclick="opener.dismissRelatedLookupPopup(window, '
'&#39;{0}&#39;); return false;"', result_id
) if cl.is_popup else '',
result_repr)
yield format_html('<{0}{1}>{2}</{3}>',

View File

@ -32,9 +32,15 @@ def submit_row(context):
save_as = context['save_as']
ctx = {
'opts': opts,
'show_delete_link': not is_popup and context['has_delete_permission'] and change and context.get('show_delete', True),
'show_delete_link': (
not is_popup and context['has_delete_permission'] and
change and context.get('show_delete', True)
),
'show_save_as_new': not is_popup and change and save_as,
'show_save_and_add_another': context['has_add_permission'] and not is_popup and (not save_as or context['add']),
'show_save_and_add_another': (
context['has_add_permission'] and not is_popup and
(not save_as or context['add'])
),
'show_save_and_continue': not is_popup and context['has_change_permission'],
'is_popup': is_popup,
'show_save': True,

View File

@ -18,7 +18,9 @@ class AdminLogNode(template.Node):
user_id = self.user
if not user_id.isdigit():
user_id = context[self.user].pk
context[self.varname] = LogEntry.objects.filter(user__pk=user_id).select_related('content_type', 'user')[:int(self.limit)]
context[self.varname] = LogEntry.objects.filter(
user__pk=user_id,
).select_related('content_type', 'user')[:int(self.limit)]
return ''

View File

@ -197,8 +197,11 @@ class BaseValidator(object):
try:
model._meta.get_field(field)
except models.FieldDoesNotExist:
raise ImproperlyConfigured("%s.readonly_fields[%d], %r is not a callable or an attribute of %r or found in the model %r."
% (cls.__name__, idx, field, cls.__name__, model._meta.object_name))
raise ImproperlyConfigured(
"%s.readonly_fields[%d], %r is not a callable or "
"an attribute of %r or found in the model %r."
% (cls.__name__, idx, field, cls.__name__, model._meta.object_name)
)
class ModelAdminValidator(BaseValidator):
@ -248,14 +251,20 @@ class ModelAdminValidator(BaseValidator):
try:
model._meta.get_field(field)
except models.FieldDoesNotExist:
raise ImproperlyConfigured("%s.list_display[%d], %r is not a callable or an attribute of %r or found in the model %r."
% (cls.__name__, idx, field, cls.__name__, model._meta.object_name))
raise ImproperlyConfigured(
"%s.list_display[%d], %r is not a callable or "
"an attribute of %r or found in the model %r."
% (cls.__name__, idx, field, cls.__name__, model._meta.object_name)
)
else:
# getattr(model, field) could be an X_RelatedObjectsDescriptor
f = fetch_attr(cls, model, "list_display[%d]" % idx, field)
if isinstance(f, models.ManyToManyField):
raise ImproperlyConfigured("'%s.list_display[%d]', '%s' is a ManyToManyField which is not supported."
% (cls.__name__, idx, field))
raise ImproperlyConfigured(
"'%s.list_display[%d]', '%s' is a ManyToManyField "
"which is not supported."
% (cls.__name__, idx, field)
)
def validate_list_display_links(self, cls, model):
" Validate that list_display_links either is None or a unique subset of list_display."
@ -433,5 +442,8 @@ def fetch_attr(cls, model, label, field):
try:
return getattr(model, field)
except AttributeError:
raise ImproperlyConfigured("'%s.%s' refers to '%s' that is neither a field, method or property of model '%s.%s'."
% (cls.__name__, label, field, model._meta.app_label, model.__name__))
raise ImproperlyConfigured(
"'%s.%s' refers to '%s' that is neither a field, method or "
"property of model '%s.%s'."
% (cls.__name__, label, field, model._meta.app_label, model.__name__)
)

View File

@ -102,7 +102,15 @@ def create_reference_role(rolename, urlbase):
options = {}
if content is None:
content = []
node = docutils.nodes.reference(rawtext, text, refuri=(urlbase % (inliner.document.settings.link_base, text.lower())), **options)
node = docutils.nodes.reference(
rawtext,
text,
refuri=(urlbase % (
inliner.document.settings.link_base,
text.lower(),
)),
**options
)
return [node], []
docutils.parsers.rst.roles.register_canonical_role(rolename, _role)
@ -113,7 +121,15 @@ def default_reference_role(name, rawtext, text, lineno, inliner, options=None, c
if content is None:
content = []
context = inliner.document.settings.default_reference_context
node = docutils.nodes.reference(rawtext, text, refuri=(ROLES[context] % (inliner.document.settings.link_base, text.lower())), **options)
node = docutils.nodes.reference(
rawtext,
text,
refuri=(ROLES[context] % (
inliner.document.settings.link_base,
text.lower(),
)),
**options
)
return [node], []
if docutils_is_available:

View File

@ -218,7 +218,10 @@ class ModelDetailView(BaseAdminDocsView):
for field in opts.many_to_many:
data_type = field.rel.to.__name__
app_label = field.rel.to._meta.app_label
verbose = _("related `%(app_label)s.%(object_name)s` objects") % {'app_label': app_label, 'object_name': data_type}
verbose = _("related `%(app_label)s.%(object_name)s` objects") % {
'app_label': app_label,
'object_name': data_type,
}
fields.append({
'name': "%s.all" % field.name,
"data_type": 'List',
@ -250,7 +253,10 @@ class ModelDetailView(BaseAdminDocsView):
# Gather related objects
for rel in opts.get_all_related_objects() + opts.get_all_related_many_to_many_objects():
verbose = _("related `%(app_label)s.%(object_name)s` objects") % {'app_label': rel.opts.app_label, 'object_name': rel.opts.object_name}
verbose = _("related `%(app_label)s.%(object_name)s` objects") % {
'app_label': rel.opts.app_label,
'object_name': rel.opts.object_name,
}
accessor = rel.get_accessor_name()
fields.append({
'name': "%s.all" % accessor,

View File

@ -25,7 +25,10 @@ def get_backends():
for backend_path in settings.AUTHENTICATION_BACKENDS:
backends.append(load_backend(backend_path))
if not backends:
raise ImproperlyConfigured('No authentication backends have been defined. Does AUTHENTICATION_BACKENDS contain anything?')
raise ImproperlyConfigured(
'No authentication backends have been defined. Does '
'AUTHENTICATION_BACKENDS contain anything?'
)
return backends
@ -136,7 +139,9 @@ def get_user_model():
except ValueError:
raise ImproperlyConfigured("AUTH_USER_MODEL must be of the form 'app_label.model_name'")
except LookupError:
raise ImproperlyConfigured("AUTH_USER_MODEL refers to model '%s' that has not been installed" % settings.AUTH_USER_MODEL)
raise ImproperlyConfigured(
"AUTH_USER_MODEL refers to model '%s' that has not been installed" % settings.AUTH_USER_MODEL
)
def get_user(request):

View File

@ -91,8 +91,13 @@ class Command(BaseCommand):
if default_username:
input_msg += " (leave blank to use '%s')" % default_username
username_rel = self.username_field.rel
input_msg = force_str('%s%s: ' % (input_msg,
' (%s.%s)' % (username_rel.to._meta.object_name, username_rel.field_name) if username_rel else ''))
input_msg = force_str('%s%s: ' % (
input_msg,
' (%s.%s)' % (
username_rel.to._meta.object_name,
username_rel.field_name
) if username_rel else '')
)
username = self.get_input_data(self.username_field, input_msg, default_username)
if not username:
continue

View File

@ -99,7 +99,9 @@ class PermissionsRequiredDecoratorTest(TestCase):
def test_permissioned_denied_exception_raised(self):
@permission_required(['auth.add_customuser', 'auth.change_customuser', 'non-existent-permission'], raise_exception=True)
@permission_required([
'auth.add_customuser', 'auth.change_customuser', 'non-existent-permission'
], raise_exception=True)
def a_view(request):
return HttpResponse()
request = self.factory.get('/rand')

View File

@ -519,7 +519,10 @@ class PasswordResetFormTest(TestCase):
self.assertEqual(message.get_payload(1).get_content_type(), 'text/html')
self.assertEqual(message.get_all('to'), [email])
self.assertTrue(re.match(r'^http://example.com/reset/[\w/-]+', message.get_payload(0).get_payload()))
self.assertTrue(re.match(r'^<html><a href="http://example.com/reset/[\w/-]+/">Link</a></html>$', message.get_payload(1).get_payload()))
self.assertTrue(
re.match(r'^<html><a href="http://example.com/reset/[\w/-]+/">Link</a></html>$',
message.get_payload(1).get_payload())
)
class ReadOnlyPasswordHashTest(TestCase):

View File

@ -121,7 +121,10 @@ class ChangepasswordManagementCommandTestCase(TestCase):
command.execute(username="joe", stdout=self.stdout)
command_output = self.stdout.getvalue().strip()
self.assertEqual(command_output, "Changing password for user 'joe'\nPassword changed successfully for user 'joe'")
self.assertEqual(
command_output,
"Changing password for user 'joe'\nPassword changed successfully for user 'joe'"
)
self.assertTrue(models.User.objects.get(username="joe").check_password("not qwerty"))
def test_that_max_tries_exits_1(self):

View File

@ -78,7 +78,8 @@ urlpatterns += [
url(r'^password_reset_from_email/$', views.password_reset, dict(from_email='staffmember@example.com')),
url(r'^password_reset/custom_redirect/$', views.password_reset, dict(post_reset_redirect='/custom/')),
url(r'^password_reset/custom_redirect/named/$', views.password_reset, dict(post_reset_redirect='password_reset')),
url(r'^password_reset/html_email_template/$', views.password_reset, dict(html_email_template_name='registration/html_password_reset_email.html')),
url(r'^password_reset/html_email_template/$', views.password_reset,
dict(html_email_template_name='registration/html_password_reset_email.html')),
url(r'^reset/custom/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$',
views.password_reset_confirm,
dict(post_reset_redirect='/custom/')),

View File

@ -104,7 +104,10 @@ class GenericForeignKey(object):
"'%s.%s' is not a ForeignKey." % (
self.model._meta.object_name, self.ct_field
),
hint="GenericForeignKeys must use a ForeignKey to 'contenttypes.ContentType' as the 'content_type' field.",
hint=(
"GenericForeignKeys must use a ForeignKey to "
"'contenttypes.ContentType' as the 'content_type' field."
),
obj=self,
id='contenttypes.E003',
)
@ -115,7 +118,10 @@ class GenericForeignKey(object):
"'%s.%s' is not a ForeignKey to 'contenttypes.ContentType'." % (
self.model._meta.object_name, self.ct_field
),
hint="GenericForeignKeys must use a ForeignKey to 'contenttypes.ContentType' as the 'content_type' field.",
hint=(
"GenericForeignKeys must use a ForeignKey to "
"'contenttypes.ContentType' as the 'content_type' field."
),
obj=self,
id='contenttypes.E004',
)

View File

@ -8,7 +8,8 @@ class FlatPageAdmin(admin.ModelAdmin):
form = FlatpageForm
fieldsets = (
(None, {'fields': ('url', 'title', 'content', 'sites')}),
(_('Advanced options'), {'classes': ('collapse',), 'fields': ('enable_comments', 'registration_required', 'template_name')}),
(_('Advanced options'), {'classes': ('collapse',),
'fields': ('enable_comments', 'registration_required', 'template_name')}),
)
list_display = ('url', 'title')
list_filter = ('sites', 'enable_comments', 'registration_required')

View File

@ -14,7 +14,11 @@ class FlatPage(models.Model):
content = models.TextField(_('content'), blank=True)
enable_comments = models.BooleanField(_('enable comments'), default=False)
template_name = models.CharField(_('template name'), max_length=70, blank=True,
help_text=_("Example: 'flatpages/contact_page.html'. If this isn't provided, the system will use 'flatpages/default.html'."))
help_text=_(
"Example: 'flatpages/contact_page.html'. If this isn't provided, "
"the system will use 'flatpages/default.html'."
),
)
registration_required = models.BooleanField(_('registration required'),
help_text=_("If this is checked, only logged-in users will be able to view the page."),
default=False)

View File

@ -237,7 +237,12 @@ class SpatialRefSysMixin(object):
# TODO: Figure out how to pull out angular units of projected coordinate system and
# fix for LOCAL_CS types. GDAL should be highly recommended for performing
# distance queries.
units_regex = re.compile(r'.+UNIT ?\["(?P<unit_name>[\w \'\(\)]+)", ?(?P<unit>[\d\.]+)(,AUTHORITY\["(?P<unit_auth_name>[\w \'\(\)]+)","(?P<unit_auth_val>\d+)"\])?\]([\w ]+)?(,AUTHORITY\["(?P<auth_name>[\w \'\(\)]+)","(?P<auth_val>\d+)"\])?\]$')
units_regex = re.compile(
r'.+UNIT ?\["(?P<unit_name>[\w \'\(\)]+)", ?(?P<unit>[\d\.]+)'
r'(,AUTHORITY\["(?P<unit_auth_name>[\w \'\(\)]+)",'
r'"(?P<unit_auth_val>\d+)"\])?\]([\w ]+)?(,'
r'AUTHORITY\["(?P<auth_name>[\w \'\(\)]+)","(?P<auth_val>\d+)"\])?\]$'
)
@property
def srs(self):

View File

@ -16,8 +16,11 @@ class OracleIntrospection(DatabaseIntrospection):
try:
# Querying USER_SDO_GEOM_METADATA to get the SRID and dimension information.
try:
cursor.execute('SELECT "DIMINFO", "SRID" FROM "USER_SDO_GEOM_METADATA" WHERE "TABLE_NAME"=%s AND "COLUMN_NAME"=%s',
(table_name.upper(), geo_col.upper()))
cursor.execute(
'SELECT "DIMINFO", "SRID" FROM "USER_SDO_GEOM_METADATA" '
'WHERE "TABLE_NAME"=%s AND "COLUMN_NAME"=%s',
(table_name.upper(), geo_col.upper())
)
row = cursor.fetchone()
except Exception as msg:
new_msg = (

View File

@ -3,12 +3,18 @@ from django.db.backends.sqlite3.schema import DatabaseSchemaEditor
class SpatialiteSchemaEditor(DatabaseSchemaEditor):
sql_add_geometry_column = "SELECT AddGeometryColumn(%(table)s, %(column)s, %(srid)s, %(geom_type)s, %(dim)s, %(null)s)"
sql_add_geometry_column = (
"SELECT AddGeometryColumn(%(table)s, %(column)s, %(srid)s, "
"%(geom_type)s, %(dim)s, %(null)s)"
)
sql_add_spatial_index = "SELECT CreateSpatialIndex(%(table)s, %(column)s)"
sql_drop_spatial_index = "DROP TABLE idx_%(table)s_%(column)s"
sql_remove_geometry_metadata = "SELECT DiscardGeometryColumn(%(table)s, %(column)s)"
sql_discard_geometry_columns = "DELETE FROM %(geom_table)s WHERE f_table_name = %(table)s"
sql_update_geometry_columns = "UPDATE %(geom_table)s SET f_table_name = %(new_table)s WHERE f_table_name = %(old_table)s"
sql_update_geometry_columns = (
"UPDATE %(geom_table)s SET f_table_name = %(new_table)s "
"WHERE f_table_name = %(old_table)s"
)
geometry_tables = [
"geometry_columns",

View File

@ -619,7 +619,10 @@ class GeoQuerySet(QuerySet):
geodetic = unit_name.lower() in geo_field.geodetic_units
if geodetic and not connection.features.supports_distance_geodetic:
raise ValueError('This database does not support linear distance calculations on geodetic coordinate systems.')
raise ValueError(
'This database does not support linear distance '
'calculations on geodetic coordinate systems.'
)
if distance:
if self.query.transformed_srid:
@ -661,7 +664,10 @@ class GeoQuerySet(QuerySet):
if not isinstance(geo_field, PointField):
raise ValueError('Spherical distance calculation only supported on PointFields.')
if not str(Geometry(six.memoryview(params[0].ewkb)).geom_type) == 'Point':
raise ValueError('Spherical distance calculation only supported with Point Geometry parameters')
raise ValueError(
'Spherical distance calculation only supported with '
'Point Geometry parameters'
)
# The `function` procedure argument needs to be set differently for
# geodetic distance calculations.
if spheroid:

View File

@ -39,7 +39,9 @@ reset_reading = void_output(lgdal.OGR_L_ResetReading, [c_void_p], errcheck=False
test_capability = int_output(lgdal.OGR_L_TestCapability, [c_void_p, c_char_p])
get_spatial_filter = geom_output(lgdal.OGR_L_GetSpatialFilter, [c_void_p])
set_spatial_filter = void_output(lgdal.OGR_L_SetSpatialFilter, [c_void_p, c_void_p], errcheck=False)
set_spatial_filter_rect = void_output(lgdal.OGR_L_SetSpatialFilterRect, [c_void_p, c_double, c_double, c_double, c_double], errcheck=False)
set_spatial_filter_rect = void_output(lgdal.OGR_L_SetSpatialFilterRect,
[c_void_p, c_double, c_double, c_double, c_double], errcheck=False
)
### Feature Definition Routines ###
get_fd_geom_type = int_output(lgdal.OGR_FD_GetGeomType, [c_void_p])
@ -56,7 +58,9 @@ get_feat_geom_ref = geom_output(lgdal.OGR_F_GetGeometryRef, [c_void_p])
get_feat_field_count = int_output(lgdal.OGR_F_GetFieldCount, [c_void_p])
get_feat_field_defn = voidptr_output(lgdal.OGR_F_GetFieldDefnRef, [c_void_p, c_int])
get_fid = int_output(lgdal.OGR_F_GetFID, [c_void_p])
get_field_as_datetime = int_output(lgdal.OGR_F_GetFieldAsDateTime, [c_void_p, c_int, c_int_p, c_int_p, c_int_p, c_int_p, c_int_p, c_int_p])
get_field_as_datetime = int_output(lgdal.OGR_F_GetFieldAsDateTime,
[c_void_p, c_int, c_int_p, c_int_p, c_int_p, c_int_p, c_int_p, c_int_p]
)
get_field_as_double = double_output(lgdal.OGR_F_GetFieldAsDouble, [c_void_p, c_int])
get_field_as_integer = int_output(lgdal.OGR_F_GetFieldAsInteger, [c_void_p, c_int])
get_field_as_string = const_string_output(lgdal.OGR_F_GetFieldAsString, [c_void_p, c_int])

View File

@ -79,7 +79,9 @@ get_geom_count = int_output(lgdal.OGR_G_GetGeometryCount, [c_void_p])
get_geom_name = const_string_output(lgdal.OGR_G_GetGeometryName, [c_void_p], decoding='ascii')
get_geom_type = int_output(lgdal.OGR_G_GetGeometryType, [c_void_p])
get_point_count = int_output(lgdal.OGR_G_GetPointCount, [c_void_p])
get_point = void_output(lgdal.OGR_G_GetPoint, [c_void_p, c_int, POINTER(c_double), POINTER(c_double), POINTER(c_double)], errcheck=False)
get_point = void_output(lgdal.OGR_G_GetPoint,
[c_void_p, c_int, POINTER(c_double), POINTER(c_double), POINTER(c_double)], errcheck=False
)
geom_close_rings = void_output(lgdal.OGR_G_CloseRings, [c_void_p], errcheck=False)
# Topology routines.

View File

@ -53,7 +53,9 @@ angular_units = units_func(lgdal.OSRGetAngularUnits)
# For exporting to WKT, PROJ.4, "Pretty" WKT, and XML.
to_wkt = string_output(std_call('OSRExportToWkt'), [c_void_p, POINTER(c_char_p)], decoding='ascii')
to_proj = string_output(std_call('OSRExportToProj4'), [c_void_p, POINTER(c_char_p)], decoding='ascii')
to_pretty_wkt = string_output(std_call('OSRExportToPrettyWkt'), [c_void_p, POINTER(c_char_p), c_int], offset=-2, decoding='ascii')
to_pretty_wkt = string_output(std_call('OSRExportToPrettyWkt'),
[c_void_p, POINTER(c_char_p), c_int], offset=-2, decoding='ascii'
)
# Memory leak fixed in GDAL 1.5; still exists in 1.4.
to_xml = string_output(lgdal.OSRExportToXML, [c_void_p, POINTER(c_char_p), c_char_p], offset=-2, decoding='ascii')

View File

@ -14,19 +14,42 @@ if HAS_GDAL:
TestDS('test_point', nfeat=5, nfld=3, geom='POINT', gtype=1, driver='ESRI Shapefile',
fields={'dbl': OFTReal, 'int': OFTInteger, 'str': OFTString},
extent=(-1.35011, 0.166623, -0.524093, 0.824508), # Got extent from QGIS
srs_wkt='GEOGCS["GCS_WGS_1984",DATUM["WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295]]',
field_values={'dbl': [float(i) for i in range(1, 6)], 'int': list(range(1, 6)), 'str': [str(i) for i in range(1, 6)]},
fids=range(5)),
srs_wkt=(
'GEOGCS["GCS_WGS_1984",DATUM["WGS_1984",SPHEROID["WGS_1984",'
'6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",'
'0.017453292519943295]]'
),
field_values={
'dbl': [float(i) for i in range(1, 6)],
'int': list(range(1, 6)),
'str': [str(i) for i in range(1, 6)],
},
fids=range(5)
),
TestDS('test_vrt', ext='vrt', nfeat=3, nfld=3, geom='POINT', gtype='Point25D', driver='VRT',
fields={'POINT_X': OFTString, 'POINT_Y': OFTString, 'NUM': OFTString}, # VRT uses CSV, which all types are OFTString.
fields={
'POINT_X': OFTString,
'POINT_Y': OFTString,
'NUM': OFTString,
}, # VRT uses CSV, which all types are OFTString.
extent=(1.0, 2.0, 100.0, 523.5), # Min/Max from CSV
field_values={'POINT_X': ['1.0', '5.0', '100.0'], 'POINT_Y': ['2.0', '23.0', '523.5'], 'NUM': ['5', '17', '23']},
fids=range(1, 4)),
field_values={
'POINT_X': ['1.0', '5.0', '100.0'],
'POINT_Y': ['2.0', '23.0', '523.5'],
'NUM': ['5', '17', '23'],
},
fids=range(1, 4)
),
TestDS('test_poly', nfeat=3, nfld=3, geom='POLYGON', gtype=3,
driver='ESRI Shapefile',
fields={'float': OFTReal, 'int': OFTInteger, 'str': OFTString},
extent=(-1.01513, -0.558245, 0.161876, 0.839637), # Got extent from QGIS
srs_wkt='GEOGCS["GCS_WGS_1984",DATUM["WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295]]'),
srs_wkt=(
'GEOGCS["GCS_WGS_1984",DATUM["WGS_1984",SPHEROID["WGS_1984",'
'6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",'
'0.017453292519943295]]'
),
)
)
bad_ds = (TestDS('foo'),)
@ -222,7 +245,10 @@ class DataSourceTest(unittest.TestCase):
# Setting the spatial filter with an OGRGeometry for buffer centering
# around Houston.
filter_geom = OGRGeometry('POLYGON((-96.363151 28.763374,-94.363151 28.763374,-94.363151 30.763374,-96.363151 30.763374,-96.363151 28.763374))')
filter_geom = OGRGeometry(
'POLYGON((-96.363151 28.763374,-94.363151 28.763374,'
'-94.363151 30.763374,-96.363151 30.763374,-96.363151 28.763374))'
)
lyr.spatial_filter = filter_geom
self.assertEqual(filter_geom, lyr.spatial_filter)
feats = [feat for feat in lyr]

View File

@ -14,27 +14,56 @@ class TestSRS:
setattr(self, key, value)
# Some Spatial Reference examples
srlist = (TestSRS('GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],TOWGS84[0,0,0,0,0,0,0],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]',
srlist = (
TestSRS(
'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,'
'AUTHORITY["EPSG","7030"]],TOWGS84[0,0,0,0,0,0,0],AUTHORITY["EPSG","6326"]],'
'PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",'
'0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]',
proj='+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs ',
epsg=4326, projected=False, geographic=True, local=False,
lin_name='unknown', ang_name='degree', lin_units=1.0, ang_units=0.0174532925199,
auth={'GEOGCS': ('EPSG', '4326'), 'spheroid': ('EPSG', '7030')},
attr=(('DATUM', 'WGS_1984'), (('SPHEROID', 1), '6378137'), ('primem|authority', 'EPSG'),),
),
TestSRS('PROJCS["NAD83 / Texas South Central",GEOGCS["NAD83",DATUM["North_American_Datum_1983",SPHEROID["GRS 1980",6378137,298.257222101,AUTHORITY["EPSG","7019"]],AUTHORITY["EPSG","6269"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4269"]],PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",30.28333333333333],PARAMETER["standard_parallel_2",28.38333333333333],PARAMETER["latitude_of_origin",27.83333333333333],PARAMETER["central_meridian",-99],PARAMETER["false_easting",600000],PARAMETER["false_northing",4000000],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AUTHORITY["EPSG","32140"]]',
TestSRS(
'PROJCS["NAD83 / Texas South Central",GEOGCS["NAD83",DATUM["North_American_Datum_1983",'
'SPHEROID["GRS 1980",6378137,298.257222101,AUTHORITY["EPSG","7019"]],'
'AUTHORITY["EPSG","6269"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],'
'UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],'
'AUTHORITY["EPSG","4269"]],PROJECTION["Lambert_Conformal_Conic_2SP"],'
'PARAMETER["standard_parallel_1",30.28333333333333],'
'PARAMETER["standard_parallel_2",28.38333333333333],'
'PARAMETER["latitude_of_origin",27.83333333333333],'
'PARAMETER["central_meridian",-99],PARAMETER["false_easting",600000],'
'PARAMETER["false_northing",4000000],UNIT["metre",1,AUTHORITY["EPSG","9001"]],'
'AUTHORITY["EPSG","32140"]]',
proj=None, epsg=32140, projected=True, geographic=False, local=False,
lin_name='metre', ang_name='degree', lin_units=1.0, ang_units=0.0174532925199,
auth={'PROJCS': ('EPSG', '32140'), 'spheroid': ('EPSG', '7019'), 'unit': ('EPSG', '9001')},
attr=(('DATUM', 'North_American_Datum_1983'), (('SPHEROID', 2), '298.257222101'), ('PROJECTION', 'Lambert_Conformal_Conic_2SP'),),
attr=(
('DATUM', 'North_American_Datum_1983'),
(('SPHEROID', 2), '298.257222101'),
('PROJECTION', 'Lambert_Conformal_Conic_2SP'),
),
TestSRS('PROJCS["NAD_1983_StatePlane_Texas_South_Central_FIPS_4204_Feet",GEOGCS["GCS_North_American_1983",DATUM["North_American_Datum_1983",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["False_Easting",1968500.0],PARAMETER["False_Northing",13123333.33333333],PARAMETER["Central_Meridian",-99.0],PARAMETER["Standard_Parallel_1",28.38333333333333],PARAMETER["Standard_Parallel_2",30.28333333333334],PARAMETER["Latitude_Of_Origin",27.83333333333333],UNIT["Foot_US",0.3048006096012192]]',
),
TestSRS(
'PROJCS["NAD_1983_StatePlane_Texas_South_Central_FIPS_4204_Feet",'
'GEOGCS["GCS_North_American_1983",DATUM["North_American_Datum_1983",'
'SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],'
'UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic_2SP"],'
'PARAMETER["False_Easting",1968500.0],PARAMETER["False_Northing",13123333.33333333],'
'PARAMETER["Central_Meridian",-99.0],PARAMETER["Standard_Parallel_1",28.38333333333333],'
'PARAMETER["Standard_Parallel_2",30.28333333333334],PARAMETER["Latitude_Of_Origin",27.83333333333333],'
'UNIT["Foot_US",0.3048006096012192]]',
proj=None, epsg=None, projected=True, geographic=False, local=False,
lin_name='Foot_US', ang_name='Degree', lin_units=0.3048006096012192, ang_units=0.0174532925199,
auth={'PROJCS': (None, None)},
attr=(('PROJCS|GeOgCs|spheroid', 'GRS_1980'), (('projcs', 9), 'UNIT'), (('projcs', 11), None),),
),
# This is really ESRI format, not WKT -- but the import should work the same
TestSRS('LOCAL_CS["Non-Earth (Meter)",LOCAL_DATUM["Local Datum",0],UNIT["Meter",1.0],AXIS["X",EAST],AXIS["Y",NORTH]]',
TestSRS(
'LOCAL_CS["Non-Earth (Meter)",LOCAL_DATUM["Local Datum",0],UNIT["Meter",1.0],AXIS["X",EAST],AXIS["Y",NORTH]]',
esri=True, proj=None, epsg=None, projected=False, geographic=False, local=True,
lin_name='Meter', ang_name='degree', lin_units=1.0, ang_units=0.0174532925199,
attr=(('LOCAL_DATUM', 'Local Datum'), ('unit', 'Meter')),
@ -42,14 +71,79 @@ srlist = (TestSRS('GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,29
)
# Well-Known Names
well_known = (TestSRS('GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],TOWGS84[0,0,0,0,0,0,0],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]', wk='WGS84', name='WGS 84', attrs=(('GEOGCS|AUTHORITY', 1, '4326'), ('SPHEROID', 'WGS 84'))),
TestSRS('GEOGCS["WGS 72",DATUM["WGS_1972",SPHEROID["WGS 72",6378135,298.26,AUTHORITY["EPSG","7043"]],AUTHORITY["EPSG","6322"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4322"]]', wk='WGS72', name='WGS 72', attrs=(('GEOGCS|AUTHORITY', 1, '4322'), ('SPHEROID', 'WGS 72'))),
TestSRS('GEOGCS["NAD27",DATUM["North_American_Datum_1927",SPHEROID["Clarke 1866",6378206.4,294.9786982138982,AUTHORITY["EPSG","7008"]],AUTHORITY["EPSG","6267"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4267"]]', wk='NAD27', name='NAD27', attrs=(('GEOGCS|AUTHORITY', 1, '4267'), ('SPHEROID', 'Clarke 1866'))),
TestSRS('GEOGCS["NAD83",DATUM["North_American_Datum_1983",SPHEROID["GRS 1980",6378137,298.257222101,AUTHORITY["EPSG","7019"]],AUTHORITY["EPSG","6269"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4269"]]', wk='NAD83', name='NAD83', attrs=(('GEOGCS|AUTHORITY', 1, '4269'), ('SPHEROID', 'GRS 1980'))),
TestSRS('PROJCS["NZGD49 / Karamea Circuit",GEOGCS["NZGD49",DATUM["New_Zealand_Geodetic_Datum_1949",SPHEROID["International 1924",6378388,297,AUTHORITY["EPSG","7022"]],TOWGS84[59.47,-5.04,187.44,0.47,-0.1,1.024,-4.5993],AUTHORITY["EPSG","6272"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4272"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-41.28991152777778],PARAMETER["central_meridian",172.1090281944444],PARAMETER["scale_factor",1],PARAMETER["false_easting",300000],PARAMETER["false_northing",700000],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AUTHORITY["EPSG","27216"]]', wk='EPSG:27216', name='NZGD49 / Karamea Circuit', attrs=(('PROJECTION', 'Transverse_Mercator'), ('SPHEROID', 'International 1924'))),
well_known = (
TestSRS(
'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,'
'AUTHORITY["EPSG","7030"]],TOWGS84[0,0,0,0,0,0,0],AUTHORITY["EPSG","6326"]],'
'PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,'
'AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]',
wk='WGS84', name='WGS 84',
attrs=(('GEOGCS|AUTHORITY', 1, '4326'), ('SPHEROID', 'WGS 84')),
),
TestSRS(
'GEOGCS["WGS 72",DATUM["WGS_1972",SPHEROID["WGS 72",6378135,298.26,'
'AUTHORITY["EPSG","7043"]],AUTHORITY["EPSG","6322"]],'
'PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],'
'UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],'
'AUTHORITY["EPSG","4322"]]',
wk='WGS72', name='WGS 72',
attrs=(('GEOGCS|AUTHORITY', 1, '4322'), ('SPHEROID', 'WGS 72')),
),
TestSRS(
'GEOGCS["NAD27",DATUM["North_American_Datum_1927",'
'SPHEROID["Clarke 1866",6378206.4,294.9786982138982,'
'AUTHORITY["EPSG","7008"]],AUTHORITY["EPSG","6267"]],'
'PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],'
'UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],'
'AUTHORITY["EPSG","4267"]]',
wk='NAD27', name='NAD27',
attrs=(('GEOGCS|AUTHORITY', 1, '4267'), ('SPHEROID', 'Clarke 1866'))
),
TestSRS(
'GEOGCS["NAD83",DATUM["North_American_Datum_1983",'
'SPHEROID["GRS 1980",6378137,298.257222101,'
'AUTHORITY["EPSG","7019"]],AUTHORITY["EPSG","6269"]],'
'PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],'
'UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],'
'AUTHORITY["EPSG","4269"]]',
wk='NAD83', name='NAD83',
attrs=(('GEOGCS|AUTHORITY', 1, '4269'), ('SPHEROID', 'GRS 1980')),
),
TestSRS(
'PROJCS["NZGD49 / Karamea Circuit",GEOGCS["NZGD49",'
'DATUM["New_Zealand_Geodetic_Datum_1949",'
'SPHEROID["International 1924",6378388,297,'
'AUTHORITY["EPSG","7022"]],'
'TOWGS84[59.47,-5.04,187.44,0.47,-0.1,1.024,-4.5993],'
'AUTHORITY["EPSG","6272"]],PRIMEM["Greenwich",0,'
'AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,'
'AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4272"]],'
'PROJECTION["Transverse_Mercator"],'
'PARAMETER["latitude_of_origin",-41.28991152777778],'
'PARAMETER["central_meridian",172.1090281944444],'
'PARAMETER["scale_factor",1],PARAMETER["false_easting",300000],'
'PARAMETER["false_northing",700000],'
'UNIT["metre",1,AUTHORITY["EPSG","9001"]],AUTHORITY["EPSG","27216"]]',
wk='EPSG:27216', name='NZGD49 / Karamea Circuit',
attrs=(('PROJECTION', 'Transverse_Mercator'), ('SPHEROID', 'International 1924')),
),
)
bad_srlist = ('Foobar', 'OOJCS["NAD83 / Texas South Central",GEOGCS["NAD83",DATUM["North_American_Datum_1983",SPHEROID["GRS 1980",6378137,298.257222101,AUTHORITY["EPSG","7019"]],AUTHORITY["EPSG","6269"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4269"]],PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",30.28333333333333],PARAMETER["standard_parallel_2",28.38333333333333],PARAMETER["latitude_of_origin",27.83333333333333],PARAMETER["central_meridian",-99],PARAMETER["false_easting",600000],PARAMETER["false_northing",4000000],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AUTHORITY["EPSG","32140"]]',)
bad_srlist = (
'Foobar',
'OOJCS["NAD83 / Texas South Central",GEOGCS["NAD83",'
'DATUM["North_American_Datum_1983",'
'SPHEROID["GRS 1980",6378137,298.257222101,AUTHORITY["EPSG","7019"]],'
'AUTHORITY["EPSG","6269"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],'
'UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],'
'AUTHORITY["EPSG","4269"]],PROJECTION["Lambert_Conformal_Conic_2SP"],'
'PARAMETER["standard_parallel_1",30.28333333333333],'
'PARAMETER["standard_parallel_2",28.38333333333333],'
'PARAMETER["latitude_of_origin",27.83333333333333],'
'PARAMETER["central_meridian",-99],PARAMETER["false_easting",600000],'
'PARAMETER["false_northing",4000000],UNIT["metre",1,'
'AUTHORITY["EPSG","9001"]],AUTHORITY["EPSG","32140"]]',
)
@skipUnless(HAS_GDAL, "GDAL is required")

View File

@ -6,7 +6,8 @@ import re
hex_regex = re.compile(r'^[0-9A-F]+$', re.I)
wkt_regex = re.compile(r'^(SRID=(?P<srid>\-?\d+);)?'
r'(?P<wkt>'
r'(?P<type>POINT|LINESTRING|LINEARRING|POLYGON|MULTIPOINT|MULTILINESTRING|MULTIPOLYGON|GEOMETRYCOLLECTION)'
r'(?P<type>POINT|LINESTRING|LINEARRING|POLYGON|MULTIPOINT|'
r'MULTILINESTRING|MULTIPOLYGON|GEOMETRYCOLLECTION)'
r'[ACEGIMLONPSRUTYZ\d,\.\-\(\) ]+)$',
re.I)
json_regex = re.compile(r'^(\s+)?\{.*}(\s+)?$', re.DOTALL)

View File

@ -98,7 +98,8 @@ class Polygon(GEOSGeometry):
else:
return capi.geom_clone(g.ptr)
def _construct_ring(self, param, msg='Parameter must be a sequence of LinearRings or objects that can initialize to LinearRings'):
def _construct_ring(self, param, msg=(
'Parameter must be a sequence of LinearRings or objects that can initialize to LinearRings')):
"Helper routine for trying to construct a ring from the given parameter."
if isinstance(param, LinearRing):
return param

View File

@ -8,7 +8,10 @@ from django.contrib.gis.geos.prototypes.threadsafe import GEOSFunc
def check_cs_ptr(result, func, cargs):
"Error checking on routines that return Geometries."
if not result:
raise GEOSException('Error encountered checking Coordinate Sequence returned from GEOS C function "%s".' % func.__name__)
raise GEOSException(
'Error encountered checking Coordinate Sequence returned from GEOS '
'C function "%s".' % func.__name__
)
return result

View File

@ -315,7 +315,8 @@ class GEOSTest(unittest.TestCase, TestDataMixin):
self.assertEqual(ls, LineString(ls.tuple)) # tuple
self.assertEqual(ls, LineString(*ls.tuple)) # as individual arguments
self.assertEqual(ls, LineString([list(tup) for tup in ls.tuple])) # as list
self.assertEqual(ls.wkt, LineString(*tuple(Point(tup) for tup in ls.tuple)).wkt) # Point individual arguments
# Point individual arguments
self.assertEqual(ls.wkt, LineString(*tuple(Point(tup) for tup in ls.tuple)).wkt)
if numpy:
self.assertEqual(ls, LineString(numpy.array(ls.tuple))) # as numpy array
@ -652,7 +653,10 @@ class GEOSTest(unittest.TestCase, TestDataMixin):
# Test conversion from custom to a known srid
c2w = gdal.CoordTransform(
gdal.SpatialReference('+proj=mill +lat_0=0 +lon_0=0 +x_0=0 +y_0=0 +R_A +ellps=WGS84 +datum=WGS84 +units=m +no_defs'),
gdal.SpatialReference(
'+proj=mill +lat_0=0 +lon_0=0 +x_0=0 +y_0=0 +R_A +ellps=WGS84 '
'+datum=WGS84 +units=m +no_defs'
),
gdal.SpatialReference(4326))
new_pnt = pnt.transform(c2w, clone=True)
self.assertEqual(new_pnt.srid, 4326)

View File

@ -107,6 +107,8 @@ class Command(BaseCommand):
rev_mapping = dict((v, k) for k, v in mapping_dict.items())
output.extend(['', '# Auto-generated `LayerMapping` dictionary for %s model' % model_name,
'%s_mapping = {' % model_name.lower()])
output.extend(" '%s' : '%s'," % (rev_mapping[ogr_fld], ogr_fld) for ogr_fld in ds[options['layer_key']].fields)
output.extend(" '%s' : '%s'," % (
rev_mapping[ogr_fld], ogr_fld) for ogr_fld in ds[options['layer_key']].fields
)
output.extend([" '%s' : '%s'," % (options['geom_name'], mapping_dict[options['geom_name']]), '}'])
return '\n'.join(output) + '\n'

View File

@ -11,7 +11,8 @@
* In the template:
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
{{ google.xhtml }}
<head>
<title>Google Maps via GeoDjango</title>

View File

@ -40,7 +40,10 @@ class GoogleMap(object):
try:
self.key = settings.GOOGLE_MAPS_API_KEY
except AttributeError:
raise GoogleMapException('Google Maps API Key not found (try adding GOOGLE_MAPS_API_KEY to your settings).')
raise GoogleMapException(
'Google Maps API Key not found (try adding '
'GOOGLE_MAPS_API_KEY to your settings).'
)
else:
self.key = key

View File

@ -102,11 +102,13 @@ class DistanceTest(TestCase):
lagrange = GEOSGeometry('POINT(-96.876369 29.905320)', 4326)
# Reference distances in feet and in meters. Got these values from
# using the provided raw SQL statements.
# SELECT ST_Distance(point, ST_Transform(ST_GeomFromText('POINT(-96.876369 29.905320)', 4326), 32140)) FROM distapp_southtexascity;
# SELECT ST_Distance(point, ST_Transform(ST_GeomFromText('POINT(-96.876369 29.905320)', 4326), 32140))
# FROM distapp_southtexascity;
m_distances = [147075.069813, 139630.198056, 140888.552826,
138809.684197, 158309.246259, 212183.594374,
70870.188967, 165337.758878, 139196.085105]
# SELECT ST_Distance(point, ST_Transform(ST_GeomFromText('POINT(-96.876369 29.905320)', 4326), 2278)) FROM distapp_southtexascityft;
# SELECT ST_Distance(point, ST_Transform(ST_GeomFromText('POINT(-96.876369 29.905320)', 4326), 2278))
# FROM distapp_southtexascityft;
# Oracle 11 thinks this is not a projected coordinate system, so it's
# not tested.
ft_distances = [482528.79154625, 458103.408123001, 462231.860397575,
@ -146,7 +148,8 @@ class DistanceTest(TestCase):
ls = LineString(((150.902, -34.4245), (150.87, -34.5789)))
# Reference query:
# SELECT ST_distance_sphere(point, ST_GeomFromText('LINESTRING(150.9020 -34.4245,150.8700 -34.5789)', 4326)) FROM distapp_australiacity ORDER BY name;
# SELECT ST_distance_sphere(point, ST_GeomFromText('LINESTRING(150.9020 -34.4245,150.8700 -34.5789)', 4326))
# FROM distapp_australiacity ORDER BY name;
distances = [1120954.92533513, 140575.720018241, 640396.662906304,
60580.9693849269, 972807.955955075, 568451.8357838,
40435.4335201384, 0, 68272.3896586844, 12375.0643697706, 0]
@ -156,8 +159,10 @@ class DistanceTest(TestCase):
self.assertAlmostEqual(distance, city.distance.m, 0)
# Got the reference distances using the raw SQL statements:
# SELECT ST_distance_spheroid(point, ST_GeomFromText('POINT(151.231341 -33.952685)', 4326), 'SPHEROID["WGS 84",6378137.0,298.257223563]') FROM distapp_australiacity WHERE (NOT (id = 11));
# SELECT ST_distance_sphere(point, ST_GeomFromText('POINT(151.231341 -33.952685)', 4326)) FROM distapp_australiacity WHERE (NOT (id = 11)); st_distance_sphere
# SELECT ST_distance_spheroid(point, ST_GeomFromText('POINT(151.231341 -33.952685)', 4326),
# 'SPHEROID["WGS 84",6378137.0,298.257223563]') FROM distapp_australiacity WHERE (NOT (id = 11));
# SELECT ST_distance_sphere(point, ST_GeomFromText('POINT(151.231341 -33.952685)', 4326))
# FROM distapp_australiacity WHERE (NOT (id = 11)); st_distance_sphere
if connection.ops.postgis and connection.ops.proj_version_tuple() >= (4, 7, 0):
# PROJ.4 versions 4.7+ have updated datums, and thus different
# distance values.
@ -204,7 +209,9 @@ class DistanceTest(TestCase):
z = SouthTexasZipcode.objects.get(name='77005')
# Reference query:
# SELECT ST_Distance(ST_Transform("distapp_censuszipcode"."poly", 32140), ST_GeomFromText('<buffer_wkt>', 32140)) FROM "distapp_censuszipcode";
# SELECT ST_Distance(ST_Transform("distapp_censuszipcode"."poly", 32140),
# ST_GeomFromText('<buffer_wkt>', 32140))
# FROM "distapp_censuszipcode";
dists_m = [3553.30384972258, 1243.18391525602, 2186.15439472242]
# Having our buffer in the SRID of the transformation and of the field
@ -230,14 +237,18 @@ class DistanceTest(TestCase):
# Retrieving the cities within a 20km 'donut' w/a 7km radius 'hole'
# (thus, Houston and Southside place will be excluded as tested in
# the `test02_dwithin` above).
qs1 = SouthTexasCity.objects.filter(point__distance_gte=(self.stx_pnt, D(km=7))).filter(point__distance_lte=(self.stx_pnt, D(km=20)))
qs1 = SouthTexasCity.objects.filter(point__distance_gte=(self.stx_pnt, D(km=7))).filter(
point__distance_lte=(self.stx_pnt, D(km=20)),
)
# Can't determine the units on SpatiaLite from PROJ.4 string, and
# Oracle 11 incorrectly thinks it is not projected.
if spatialite or oracle:
dist_qs = (qs1,)
else:
qs2 = SouthTexasCityFt.objects.filter(point__distance_gte=(self.stx_pnt, D(km=7))).filter(point__distance_lte=(self.stx_pnt, D(km=20)))
qs2 = SouthTexasCityFt.objects.filter(point__distance_gte=(self.stx_pnt, D(km=7))).filter(
point__distance_lte=(self.stx_pnt, D(km=20)),
)
dist_qs = (qs1, qs2)
for qs in dist_qs:
@ -269,8 +280,8 @@ class DistanceTest(TestCase):
self.get_names(dist_qs))
# Too many params (4 in this case) should raise a ValueError.
self.assertRaises(ValueError, len,
AustraliaCity.objects.filter(point__distance_lte=('POINT(5 23)', D(km=100), 'spheroid', '4')))
queryset = AustraliaCity.objects.filter(point__distance_lte=('POINT(5 23)', D(km=100), 'spheroid', '4'))
self.assertRaises(ValueError, len, queryset)
# Not enough params should raise a ValueError.
self.assertRaises(ValueError, len,
@ -325,7 +336,8 @@ class DistanceTest(TestCase):
Test the `length` GeoQuerySet method.
"""
# Reference query (should use `length_spheroid`).
# SELECT ST_length_spheroid(ST_GeomFromText('<wkt>', 4326) 'SPHEROID["WGS 84",6378137,298.257223563, AUTHORITY["EPSG","7030"]]');
# SELECT ST_length_spheroid(ST_GeomFromText('<wkt>', 4326) 'SPHEROID["WGS 84",6378137,298.257223563,
# AUTHORITY["EPSG","7030"]]');
len_m1 = 473504.769553813
len_m2 = 4617.668

View File

@ -44,7 +44,17 @@ city_dict = dict((name, coords) for name, coords in city_data)
# http://seamless.usgs.gov/products/9arc.php
interstate_data = (
('I-45',
'LINESTRING(-95.3708481 29.7765870 11.339,-95.3694580 29.7787980 4.536,-95.3690305 29.7797359 9.762,-95.3691886 29.7812450 12.448,-95.3696447 29.7850144 10.457,-95.3702511 29.7868518 9.418,-95.3706724 29.7881286 14.858,-95.3711632 29.7896157 15.386,-95.3714525 29.7936267 13.168,-95.3717848 29.7955007 15.104,-95.3717719 29.7969804 16.516,-95.3717305 29.7982117 13.923,-95.3717254 29.8000778 14.385,-95.3719875 29.8013539 15.160,-95.3720575 29.8026785 15.544,-95.3721321 29.8040912 14.975,-95.3722074 29.8050998 15.688,-95.3722779 29.8060430 16.099,-95.3733818 29.8076750 15.197,-95.3741563 29.8103686 17.268,-95.3749458 29.8129927 19.857,-95.3763564 29.8144557 15.435)',
'LINESTRING(-95.3708481 29.7765870 11.339,-95.3694580 29.7787980 4.536,'
'-95.3690305 29.7797359 9.762,-95.3691886 29.7812450 12.448,'
'-95.3696447 29.7850144 10.457,-95.3702511 29.7868518 9.418,'
'-95.3706724 29.7881286 14.858,-95.3711632 29.7896157 15.386,'
'-95.3714525 29.7936267 13.168,-95.3717848 29.7955007 15.104,'
'-95.3717719 29.7969804 16.516,-95.3717305 29.7982117 13.923,'
'-95.3717254 29.8000778 14.385,-95.3719875 29.8013539 15.160,'
'-95.3720575 29.8026785 15.544,-95.3721321 29.8040912 14.975,'
'-95.3722074 29.8050998 15.688,-95.3722779 29.8060430 16.099,'
'-95.3733818 29.8076750 15.197,-95.3741563 29.8103686 17.268,'
'-95.3749458 29.8129927 19.857,-95.3763564 29.8144557 15.435)',
(11.339, 4.536, 9.762, 12.448, 10.457, 9.418, 14.858,
15.386, 13.168, 15.104, 16.516, 13.923, 14.385, 15.16,
15.544, 14.975, 15.688, 16.099, 15.197, 17.268, 19.857,
@ -56,7 +66,8 @@ interstate_data = (
# system 32140), with elevation values from the National Elevation Dataset
# (see above).
bbox_data = (
'POLYGON((941527.97 4225693.20,962596.48 4226349.75,963152.57 4209023.95,942051.75 4208366.38,941527.97 4225693.20))',
'POLYGON((941527.97 4225693.20,962596.48 4226349.75,963152.57 4209023.95,'
'942051.75 4208366.38,941527.97 4225693.20))',
(21.71, 13.21, 9.12, 16.40, 21.71)
)
@ -184,7 +195,11 @@ class Geo3DTest(TestCase):
# PostGIS query that returned the reference EWKT for this test:
# `SELECT ST_AsText(ST_Union(point)) FROM geo3d_city3d;`
self._load_city_data()
ref_ewkt = 'SRID=4326;MULTIPOINT(-123.305196 48.462611 15,-104.609252 38.255001 1433,-97.521157 34.464642 380,-96.801611 32.782057 147,-95.363151 29.763374 18,-95.23506 38.971823 251,-87.650175 41.850385 181,174.783117 -41.315268 14)'
ref_ewkt = (
'SRID=4326;MULTIPOINT(-123.305196 48.462611 15,-104.609252 38.255001 1433,'
'-97.521157 34.464642 380,-96.801611 32.782057 147,-95.363151 29.763374 18,'
'-95.23506 38.971823 251,-87.650175 41.850385 181,174.783117 -41.315268 14)'
)
ref_union = GEOSGeometry(ref_ewkt)
union = City3D.objects.aggregate(Union('point'))['point__union']
self.assertTrue(union.hasz)

View File

@ -32,7 +32,8 @@ class GeoAdminTest(TestCase):
result = geoadmin.get_map_widget(City._meta.get_field('point'))(
).render('point', Point(-79.460734, 40.18476))
self.assertIn(
"""geodjango_point.layers.base = new OpenLayers.Layer.WMS("OpenLayers WMS", "http://vmap0.tiles.osgeo.org/wms/vmap0", {layers: \'basic\', format: 'image/jpeg'});""",
"""geodjango_point.layers.base = new OpenLayers.Layer.WMS("OpenLayers WMS", """
""""http://vmap0.tiles.osgeo.org/wms/vmap0", {layers: 'basic', format: 'image/jpeg'});""",
result)
def test_olwidget_has_changed(self):

View File

@ -111,12 +111,16 @@ class GeoModelTest(TestCase):
if oracle:
# San Antonio in 'Texas 4205, Southern Zone (1983, meters)' (SRID 41157)
# Used the following Oracle SQL to get this value:
# SELECT SDO_UTIL.TO_WKTGEOMETRY(SDO_CS.TRANSFORM(SDO_GEOMETRY('POINT (-98.493183 29.424170)', 4326), 41157)) FROM DUAL;
# SELECT SDO_UTIL.TO_WKTGEOMETRY(
# SDO_CS.TRANSFORM(SDO_GEOMETRY('POINT (-98.493183 29.424170)', 4326), 41157))
# )
# FROM DUAL;
nad_wkt = 'POINT (300662.034646583 5416427.45974934)'
nad_srid = 41157
else:
# San Antonio in 'NAD83(HARN) / Texas Centric Lambert Conformal' (SRID 3084)
nad_wkt = 'POINT (1645978.362408288754523 6276356.025927528738976)' # Used ogr.py in gdal 1.4.1 for this transform
# Used ogr.py in gdal 1.4.1 for this transform
nad_wkt = 'POINT (1645978.362408288754523 6276356.025927528738976)'
nad_srid = 3084
# Constructing & querying with a point from a different SRID. Oracle
@ -493,30 +497,49 @@ class GeoQuerySetTest(TestCase):
return
pueblo_json = '{"type":"Point","coordinates":[-104.609252,38.255001]}'
houston_json = '{"type":"Point","crs":{"type":"name","properties":{"name":"EPSG:4326"}},"coordinates":[-95.363151,29.763374]}'
victoria_json = '{"type":"Point","bbox":[-123.30519600,48.46261100,-123.30519600,48.46261100],"coordinates":[-123.305196,48.462611]}'
chicago_json = '{"type":"Point","crs":{"type":"name","properties":{"name":"EPSG:4326"}},"bbox":[-87.65018,41.85039,-87.65018,41.85039],"coordinates":[-87.65018,41.85039]}'
houston_json = (
'{"type":"Point","crs":{"type":"name","properties":'
'{"name":"EPSG:4326"}},"coordinates":[-95.363151,29.763374]}'
)
victoria_json = (
'{"type":"Point","bbox":[-123.30519600,48.46261100,-123.30519600,48.46261100],'
'"coordinates":[-123.305196,48.462611]}'
)
chicago_json = (
'{"type":"Point","crs":{"type":"name","properties":{"name":"EPSG:4326"}},'
'"bbox":[-87.65018,41.85039,-87.65018,41.85039],"coordinates":[-87.65018,41.85039]}'
)
if spatialite:
victoria_json = '{"type":"Point","bbox":[-123.305196,48.462611,-123.305196,48.462611],"coordinates":[-123.305196,48.462611]}'
victoria_json = (
'{"type":"Point","bbox":[-123.305196,48.462611,-123.305196,48.462611],'
'"coordinates":[-123.305196,48.462611]}'
)
# Precision argument should only be an integer
self.assertRaises(TypeError, City.objects.geojson, precision='foo')
# Reference queries and values.
# SELECT ST_AsGeoJson("geoapp_city"."point", 8, 0) FROM "geoapp_city" WHERE "geoapp_city"."name" = 'Pueblo';
# SELECT ST_AsGeoJson("geoapp_city"."point", 8, 0)
# FROM "geoapp_city" WHERE "geoapp_city"."name" = 'Pueblo';
self.assertEqual(pueblo_json, City.objects.geojson().get(name='Pueblo').geojson)
# SELECT ST_AsGeoJson("geoapp_city"."point", 8, 2) FROM "geoapp_city" WHERE "geoapp_city"."name" = 'Houston';
# SELECT ST_AsGeoJson("geoapp_city"."point", 8, 2) FROM "geoapp_city"
# WHERE "geoapp_city"."name" = 'Houston';
# This time we want to include the CRS by using the `crs` keyword.
self.assertEqual(houston_json, City.objects.geojson(crs=True, model_att='json').get(name='Houston').json)
# SELECT ST_AsGeoJson("geoapp_city"."point", 8, 1) FROM "geoapp_city" WHERE "geoapp_city"."name" = 'Houston';
# SELECT ST_AsGeoJson("geoapp_city"."point", 8, 1) FROM "geoapp_city"
# WHERE "geoapp_city"."name" = 'Houston';
# This time we include the bounding box by using the `bbox` keyword.
self.assertEqual(victoria_json, City.objects.geojson(bbox=True).get(name='Victoria').geojson)
# SELECT ST_AsGeoJson("geoapp_city"."point", 5, 3) FROM "geoapp_city" WHERE "geoapp_city"."name" = 'Chicago';
# SELECT ST_AsGeoJson("geoapp_city"."point", 5, 3) FROM "geoapp_city"
# WHERE "geoapp_city"."name" = 'Chicago';
# Finally, we set every available keyword.
self.assertEqual(chicago_json, City.objects.geojson(bbox=True, crs=True, precision=5).get(name='Chicago').geojson)
self.assertEqual(
chicago_json,
City.objects.geojson(bbox=True, crs=True, precision=5).get(name='Chicago').geojson
)
@skipUnlessDBFeature("has_gml_method")
def test_gml(self):
@ -530,12 +553,22 @@ class GeoQuerySetTest(TestCase):
if oracle:
# No precision parameter for Oracle :-/
gml_regex = re.compile(r'^<gml:Point srsName="SDO:4326" xmlns:gml="http://www.opengis.net/gml"><gml:coordinates decimal="\." cs="," ts=" ">-104.60925\d+,38.25500\d+ </gml:coordinates></gml:Point>')
gml_regex = re.compile(
r'^<gml:Point srsName="SDO:4326" xmlns:gml="http://www.opengis.net/gml">'
r'<gml:coordinates decimal="\." cs="," ts=" ">-104.60925\d+,38.25500\d+ '
r'</gml:coordinates></gml:Point>'
)
elif spatialite and connection.ops.spatial_version < (3, 0, 0):
# Spatialite before 3.0 has extra colon in SrsName
gml_regex = re.compile(r'^<gml:Point SrsName="EPSG::4326"><gml:coordinates decimal="\." cs="," ts=" ">-104.609251\d+,38.255001</gml:coordinates></gml:Point>')
gml_regex = re.compile(
r'^<gml:Point SrsName="EPSG::4326"><gml:coordinates decimal="\." '
r'cs="," ts=" ">-104.609251\d+,38.255001</gml:coordinates></gml:Point>'
)
else:
gml_regex = re.compile(r'^<gml:Point srsName="EPSG:4326"><gml:coordinates>-104\.60925\d+,38\.255001</gml:coordinates></gml:Point>')
gml_regex = re.compile(
r'^<gml:Point srsName="EPSG:4326"><gml:coordinates>'
r'-104\.60925\d+,38\.255001</gml:coordinates></gml:Point>'
)
for ptown in [ptown1, ptown2]:
self.assertTrue(gml_regex.match(ptown.gml))
@ -566,7 +599,12 @@ class GeoQuerySetTest(TestCase):
self.assertRaises(TypeError, Country.objects.make_line)
# Reference query:
# SELECT AsText(ST_MakeLine(geoapp_city.point)) FROM geoapp_city;
ref_line = GEOSGeometry('LINESTRING(-95.363151 29.763374,-96.801611 32.782057,-97.521157 34.464642,174.783117 -41.315268,-104.609252 38.255001,-95.23506 38.971823,-87.650175 41.850385,-123.305196 48.462611)', srid=4326)
ref_line = GEOSGeometry(
'LINESTRING(-95.363151 29.763374,-96.801611 32.782057,'
'-97.521157 34.464642,174.783117 -41.315268,-104.609252 38.255001,'
'-95.23506 38.971823,-87.650175 41.850385,-123.305196 48.462611)',
srid=4326
)
self.assertEqual(ref_line, City.objects.make_line())
@skipUnlessDBFeature("has_num_geom_method")
@ -601,7 +639,8 @@ class GeoQuerySetTest(TestCase):
"Testing the `point_on_surface` GeoQuerySet method."
# Reference values.
if oracle:
# SELECT SDO_UTIL.TO_WKTGEOMETRY(SDO_GEOM.SDO_POINTONSURFACE(GEOAPP_COUNTRY.MPOLY, 0.05)) FROM GEOAPP_COUNTRY;
# SELECT SDO_UTIL.TO_WKTGEOMETRY(SDO_GEOM.SDO_POINTONSURFACE(GEOAPP_COUNTRY.MPOLY, 0.05))
# FROM GEOAPP_COUNTRY;
ref = {'New Zealand': fromstr('POINT (174.616364 -36.100861)', srid=4326),
'Texas': fromstr('POINT (-103.002434 36.500397)', srid=4326),
}
@ -670,17 +709,29 @@ class GeoQuerySetTest(TestCase):
# to pass into GEOS `equals_exact`.
tol = 0.000000001
# SELECT AsText(ST_SnapToGrid("geoapp_country"."mpoly", 0.1)) FROM "geoapp_country" WHERE "geoapp_country"."name" = 'San Marino';
# SELECT AsText(ST_SnapToGrid("geoapp_country"."mpoly", 0.1)) FROM "geoapp_country"
# WHERE "geoapp_country"."name" = 'San Marino';
ref = fromstr('MULTIPOLYGON(((12.4 44,12.5 44,12.5 43.9,12.4 43.9,12.4 44)))')
self.assertTrue(ref.equals_exact(Country.objects.snap_to_grid(0.1).get(name='San Marino').snap_to_grid, tol))
# SELECT AsText(ST_SnapToGrid("geoapp_country"."mpoly", 0.05, 0.23)) FROM "geoapp_country" WHERE "geoapp_country"."name" = 'San Marino';
# SELECT AsText(ST_SnapToGrid("geoapp_country"."mpoly", 0.05, 0.23)) FROM "geoapp_country"
# WHERE "geoapp_country"."name" = 'San Marino';
ref = fromstr('MULTIPOLYGON(((12.4 43.93,12.45 43.93,12.5 43.93,12.45 43.93,12.4 43.93)))')
self.assertTrue(ref.equals_exact(Country.objects.snap_to_grid(0.05, 0.23).get(name='San Marino').snap_to_grid, tol))
self.assertTrue(
ref.equals_exact(Country.objects.snap_to_grid(0.05, 0.23).get(name='San Marino').snap_to_grid, tol)
)
# SELECT AsText(ST_SnapToGrid("geoapp_country"."mpoly", 0.5, 0.17, 0.05, 0.23)) FROM "geoapp_country" WHERE "geoapp_country"."name" = 'San Marino';
ref = fromstr('MULTIPOLYGON(((12.4 43.87,12.45 43.87,12.45 44.1,12.5 44.1,12.5 43.87,12.45 43.87,12.4 43.87)))')
self.assertTrue(ref.equals_exact(Country.objects.snap_to_grid(0.05, 0.23, 0.5, 0.17).get(name='San Marino').snap_to_grid, tol))
# SELECT AsText(ST_SnapToGrid("geoapp_country"."mpoly", 0.5, 0.17, 0.05, 0.23)) FROM "geoapp_country"
# WHERE "geoapp_country"."name" = 'San Marino';
ref = fromstr(
'MULTIPOLYGON(((12.4 43.87,12.45 43.87,12.45 44.1,12.5 44.1,12.5 43.87,12.45 43.87,12.4 43.87)))'
)
self.assertTrue(
ref.equals_exact(
Country.objects.snap_to_grid(0.05, 0.23, 0.5, 0.17).get(name='San Marino').snap_to_grid,
tol
)
)
@skipUnlessDBFeature("has_svg_method")
def test_svg(self):

View File

@ -71,8 +71,10 @@ class Invalid(models.Model):
# Mapping dictionaries for the models above.
co_mapping = {'name': 'Name',
'state': {'name': 'State'}, # ForeignKey's use another mapping dictionary for the _related_ Model (State in this case).
co_mapping = {
'name': 'Name',
# ForeignKey's use another mapping dictionary for the _related_ Model (State in this case).
'state': {'name': 'State'},
'mpoly': 'MULTIPOLYGON', # Will convert POLYGON features into MULTIPOLYGONS.
}

View File

@ -104,7 +104,9 @@ class RelatedGeoModelTest(TestCase):
ref_u2 = MultiPoint(p2, p3, srid=4326)
u1 = City.objects.unionagg(field_name='location__point')
u2 = City.objects.exclude(name__in=('Roswell', 'Houston', 'Dallas', 'Fort Worth')).unionagg(field_name='location__point')
u2 = City.objects.exclude(
name__in=('Roswell', 'Houston', 'Dallas', 'Fort Worth'),
).unionagg(field_name='location__point')
u3 = aggs['location__point__union']
self.assertEqual(type(u1), MultiPoint)
self.assertEqual(type(u3), MultiPoint)
@ -124,7 +126,11 @@ class RelatedGeoModelTest(TestCase):
"Testing F() expressions on GeometryFields."
# Constructing a dummy parcel border and getting the City instance for
# assigning the FK.
b1 = GEOSGeometry('POLYGON((-97.501205 33.052520,-97.501205 33.052576,-97.501150 33.052576,-97.501150 33.052520,-97.501205 33.052520))', srid=4326)
b1 = GEOSGeometry(
'POLYGON((-97.501205 33.052520,-97.501205 33.052576,'
'-97.501150 33.052576,-97.501150 33.052520,-97.501205 33.052520))',
srid=4326
)
pcity = City.objects.get(name='Aurora')
# First parcel has incorrect center point that is equal to the City;
@ -280,7 +286,10 @@ class RelatedGeoModelTest(TestCase):
# SELECT AsText(ST_Collect("relatedapp_location"."point")) FROM "relatedapp_city" LEFT OUTER JOIN
# "relatedapp_location" ON ("relatedapp_city"."location_id" = "relatedapp_location"."id")
# WHERE "relatedapp_city"."state" = 'TX';
ref_geom = GEOSGeometry('MULTIPOINT(-97.516111 33.058333,-96.801611 32.782057,-95.363151 29.763374,-96.801611 32.782057)')
ref_geom = GEOSGeometry(
'MULTIPOINT(-97.516111 33.058333,-96.801611 32.782057,'
'-95.363151 29.763374,-96.801611 32.782057)'
)
c1 = City.objects.filter(state='TX').collect(field_name='location__point')
c2 = City.objects.filter(state='TX').aggregate(Collect('location__point'))['location__point__collect']

View File

@ -8,7 +8,8 @@ from django.test import skipUnlessDBFeature
from django.utils import six
test_srs = ({'srid': 4326,
test_srs = ({
'srid': 4326,
'auth_name': ('EPSG', True),
'auth_srid': 4326,
# Only the beginning, because there are differences depending on installed libs
@ -17,22 +18,26 @@ test_srs = ({'srid': 4326,
'proj4_re': r'\+proj=longlat (\+ellps=WGS84 )?(\+datum=WGS84 |\+towgs84=0,0,0,0,0,0,0 )\+no_defs ',
'spheroid': 'WGS 84', 'name': 'WGS 84',
'geographic': True, 'projected': False, 'spatialite': True,
'ellipsoid': (6378137.0, 6356752.3, 298.257223563), # From proj's "cs2cs -le" and Wikipedia (semi-minor only)
# From proj's "cs2cs -le" and Wikipedia (semi-minor only)
'ellipsoid': (6378137.0, 6356752.3, 298.257223563),
'eprec': (1, 1, 9),
},
{'srid': 32140,
}, {
'srid': 32140,
'auth_name': ('EPSG', False),
'auth_srid': 32140,
'srtext': 'PROJCS["NAD83 / Texas South Central",GEOGCS["NAD83",DATUM["North_American_Datum_1983",SPHEROID["GRS 1980"',
'srtext': (
'PROJCS["NAD83 / Texas South Central",GEOGCS["NAD83",'
'DATUM["North_American_Datum_1983",SPHEROID["GRS 1980"'
),
'proj4_re': r'\+proj=lcc \+lat_1=30.28333333333333 \+lat_2=28.38333333333333 \+lat_0=27.83333333333333 '
r'\+lon_0=-99 \+x_0=600000 \+y_0=4000000 (\+ellps=GRS80 )?'
r'(\+datum=NAD83 |\+towgs84=0,0,0,0,0,0,0 )?\+units=m \+no_defs ',
'spheroid': 'GRS 1980', 'name': 'NAD83 / Texas South Central',
'geographic': False, 'projected': True, 'spatialite': False,
'ellipsoid': (6378137.0, 6356752.31414, 298.257222101), # From proj's "cs2cs -le" and Wikipedia (semi-minor only)
# From proj's "cs2cs -le" and Wikipedia (semi-minor only)
'ellipsoid': (6378137.0, 6356752.31414, 298.257222101),
'eprec': (1, 5, 10),
},
)
})
@unittest.skipUnless(HAS_GDAL, "SpatialRefSysTest needs gdal support")

View File

@ -374,8 +374,11 @@ class LayerMapping(object):
# If we have more than the maximum digits allowed, then throw an
# InvalidDecimal exception.
if n_prec > max_prec:
raise InvalidDecimal('A DecimalField with max_digits %d, decimal_places %d must round to an absolute value less than 10^%d.' %
(model_field.max_digits, model_field.decimal_places, max_prec))
raise InvalidDecimal(
'A DecimalField with max_digits %d, decimal_places %d must '
'round to an absolute value less than 10^%d.' %
(model_field.max_digits, model_field.decimal_places, max_prec)
)
val = d
elif isinstance(ogr_field, (OFTReal, OFTString)) and isinstance(model_field, models.IntegerField):
# Attempt to convert any OFTReal and OFTString value to an OFTInteger.
@ -406,7 +409,10 @@ class LayerMapping(object):
try:
return rel_model.objects.using(self.using).get(**fk_kwargs)
except ObjectDoesNotExist:
raise MissingForeignKey('No ForeignKey %s model found with keyword arguments: %s' % (rel_model.__name__, fk_kwargs))
raise MissingForeignKey(
'No ForeignKey %s model found with keyword arguments: %s' %
(rel_model.__name__, fk_kwargs)
)
def verify_geom(self, geom, model_field):
"""
@ -571,7 +577,10 @@ class LayerMapping(object):
if strict:
# Bailing out if the `strict` keyword is set.
if not silent:
stream.write('Failed to save the feature (id: %s) into the model with the keyword arguments:\n' % feat.fid)
stream.write(
'Failed to save the feature (id: %s) into the '
'model with the keyword arguments:\n' % feat.fid
)
stream.write('%s\n' % kwargs)
raise
elif not silent:

View File

@ -174,7 +174,8 @@ def _ogrinspect(data_source, model_name, geom_name='geom', layer_key=0, srid=Non
yield 'class %s(models.Model):' % model_name
for field_name, width, precision, field_type in zip(ogr_fields, layer.field_widths, layer.field_precisions, layer.field_types):
for field_name, width, precision, field_type in zip(
ogr_fields, layer.field_widths, layer.field_precisions, layer.field_types):
# The model field name.
mfield = field_name.lower()
if mfield[-1:] == '_':
@ -188,7 +189,9 @@ def _ogrinspect(data_source, model_name, geom_name='geom', layer_key=0, srid=Non
# may also be mapped to `DecimalField` if specified in the
# `decimal` keyword.
if field_name.lower() in decimal_fields:
yield ' %s = models.DecimalField(max_digits=%d, decimal_places=%d%s)' % (mfield, width, precision, kwargs_str)
yield ' %s = models.DecimalField(max_digits=%d, decimal_places=%d%s)' % (
mfield, width, precision, kwargs_str
)
else:
yield ' %s = models.FloatField(%s)' % (mfield, kwargs_str[2:])
elif field_type is OFTInteger:

View File

@ -150,7 +150,8 @@ def apnumber(value):
return value
if not 0 < value < 10:
return value
return (_('one'), _('two'), _('three'), _('four'), _('five'), _('six'), _('seven'), _('eight'), _('nine'))[value - 1]
return (_('one'), _('two'), _('three'), _('four'), _('five'),
_('six'), _('seven'), _('eight'), _('nine'))[value - 1]
# Perform the comparison in the default time zone when USE_TZ = True

View File

@ -72,6 +72,7 @@ urlpatterns = [
url('^add/(debug|info|success|warning|error)/$', add, name='add_message'),
url('^add/msg/$', ContactFormViewWithMsg.as_view(), name='add_success_msg'),
url('^show/$', show, name='show_message'),
url('^template_response/add/(debug|info|success|warning|error)/$', add_template_response, name='add_template_response'),
url('^template_response/add/(debug|info|success|warning|error)/$',
add_template_response, name='add_template_response'),
url('^template_response/show/$', show_template_response, name='show_template_response'),
]

View File

@ -5,7 +5,10 @@ from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = "Can be run as a cronjob or directly to clean out expired sessions (only with the database backend at the moment)."
help = (
"Can be run as a cronjob or directly to clean out expired sessions "
"(only with the database backend at the moment)."
)
def handle(self, **options):
engine = import_module(settings.SESSION_ENGINE)

View File

@ -179,7 +179,10 @@ class SessionTestsMixin(object):
try:
session.save()
except AttributeError:
self.fail("The session object did not save properly. Middleware may be saving cache items without namespaces.")
self.fail(
"The session object did not save properly. "
"Middleware may be saving cache items without namespaces."
)
self.assertNotEqual(session.session_key, '1')
self.assertEqual(session.get('cat'), None)
session.delete()
@ -597,8 +600,11 @@ class SessionMiddlewareTests(unittest.TestCase):
# Check that the cookie was deleted, not recreated.
# A deleted cookie header looks like:
# Set-Cookie: sessionid=; expires=Thu, 01-Jan-1970 00:00:00 GMT; Max-Age=0; Path=/
self.assertEqual('Set-Cookie: {0}=; expires=Thu, 01-Jan-1970 00:00:00 GMT; Max-Age=0; Path=/'.format(settings.SESSION_COOKIE_NAME),
str(response.cookies[settings.SESSION_COOKIE_NAME]))
self.assertEqual(
'Set-Cookie: {0}=; expires=Thu, 01-Jan-1970 00:00:00 GMT; '
'Max-Age=0; Path=/'.format(settings.SESSION_COOKIE_NAME),
str(response.cookies[settings.SESSION_COOKIE_NAME])
)
class CookieSessionTests(SessionTestsMixin, TestCase):

View File

@ -88,7 +88,10 @@ class Sitemap(object):
except Site.DoesNotExist:
pass
if site is None:
raise ImproperlyConfigured("To use sitemaps, either enable the sites framework or pass a Site/RequestSite object in your view.")
raise ImproperlyConfigured(
"To use sitemaps, either enable the sites framework or pass "
"a Site/RequestSite object in your view."
)
domain = site.domain
if getattr(self, 'i18n', False):

View File

@ -62,7 +62,10 @@ class Feed(object):
try:
return item.get_absolute_url()
except AttributeError:
raise ImproperlyConfigured('Give your %s class a get_absolute_url() method, or define an item_link() method in your Feed class.' % item.__class__.__name__)
raise ImproperlyConfigured(
'Give your %s class a get_absolute_url() method, or define an '
'item_link() method in your Feed class.' % item.__class__.__name__
)
def __get_dynamic_attr(self, attname, obj, default=None):
try:

View File

@ -26,7 +26,11 @@ class BaseHandler(object):
]
def __init__(self):
self._request_middleware = self._view_middleware = self._template_response_middleware = self._response_middleware = self._exception_middleware = None
self._request_middleware = None
self._view_middleware = None
self._template_response_middleware = None
self._response_middleware = None
self._exception_middleware = None
def load_middleware(self):
"""

View File

@ -22,13 +22,17 @@ class EmailBackend(ConsoleEmailBackend):
self.file_path = os.path.abspath(self.file_path)
# Make sure that self.file_path is an directory if it exists.
if os.path.exists(self.file_path) and not os.path.isdir(self.file_path):
raise ImproperlyConfigured('Path for saving email messages exists, but is not a directory: %s' % self.file_path)
raise ImproperlyConfigured(
'Path for saving email messages exists, but is not a directory: %s' % self.file_path
)
# Try to create it, if it not exists.
elif not os.path.exists(self.file_path):
try:
os.makedirs(self.file_path)
except OSError as err:
raise ImproperlyConfigured('Could not create directory for saving email messages: %s (%s)' % (self.file_path, err))
raise ImproperlyConfigured(
'Could not create directory for saving email messages: %s (%s)' % (self.file_path, err)
)
# Make sure that self.file_path is writable.
if not os.access(self.file_path, os.W_OK):
raise ImproperlyConfigured('Could not write to directory: %s' % self.file_path)

View File

@ -393,7 +393,9 @@ class EmailMultiAlternatives(EmailMessage):
bytestrings). The SafeMIMEText class will handle any necessary encoding
conversions.
"""
super(EmailMultiAlternatives, self).__init__(subject, body, from_email, to, bcc, connection, attachments, headers, cc)
super(EmailMultiAlternatives, self).__init__(
subject, body, from_email, to, bcc, connection, attachments, headers, cc
)
self.alternatives = alternatives or []
def attach_alternative(self, content, mimetype):

View File

@ -290,7 +290,12 @@ class BaseCommand(object):
type='choice', choices=['0', '1', '2', '3'],
help='Verbosity level; 0=minimal output, 1=normal output, 2=verbose output, 3=very verbose output')
parser.add_option('--settings',
help='The Python path to a settings module, e.g. "myproject.settings.main". If this isn\'t provided, the DJANGO_SETTINGS_MODULE environment variable will be used.')
help=(
'The Python path to a settings module, e.g. '
'"myproject.settings.main". If this isn\'t provided, the '
'DJANGO_SETTINGS_MODULE environment variable will be used.'
),
)
parser.add_option('--pythonpath',
help='A directory to add to the Python path, e.g. "/home/djangoprojects/myproject".'),
parser.add_option('--traceback', action='store_true',
@ -307,7 +312,12 @@ class BaseCommand(object):
type=int, choices=[0, 1, 2, 3],
help='Verbosity level; 0=minimal output, 1=normal output, 2=verbose output, 3=very verbose output')
parser.add_argument('--settings',
help='The Python path to a settings module, e.g. "myproject.settings.main". If this isn\'t provided, the DJANGO_SETTINGS_MODULE environment variable will be used.')
help=(
'The Python path to a settings module, e.g. '
'"myproject.settings.main". If this isn\'t provided, the '
'DJANGO_SETTINGS_MODULE environment variable will be used.'
),
)
parser.add_argument('--pythonpath',
help='A directory to add to the Python path, e.g. "/home/djangoprojects/myproject".')
parser.add_argument('--traceback', action='store_true',
@ -403,7 +413,7 @@ class BaseCommand(object):
try:
if (self.requires_system_checks and
not options.get('skip_validation') and # This will be removed at the end of deprecation process for `skip_validation`.
not options.get('skip_validation') and # Remove at the end of deprecation for `skip_validation`.
not options.get('skip_checks')):
self.check()
output = self.handle(*args, **options)

View File

@ -73,7 +73,8 @@ Are you sure you want to do this?
" * The database isn't running or isn't configured correctly.\n"
" * At least one of the expected database tables doesn't exist.\n"
" * The SQL was invalid.\n"
"Hint: Look at the output of 'django-admin sqlflush'. That's the SQL this command wasn't able to run.\n"
"Hint: Look at the output of 'django-admin sqlflush'. "
"That's the SQL this command wasn't able to run.\n"
"The full error: %s") % (connection.settings_dict['NAME'], e)
six.reraise(CommandError, CommandError(new_msg), sys.exc_info()[2])

View File

@ -40,7 +40,10 @@ class Command(BaseCommand):
yield "# You'll have to do the following manually to clean this up:"
yield "# * Rearrange models' order"
yield "# * Make sure each model has one field with primary_key=True"
yield "# * Remove `managed = False` lines if you wish to allow Django to create, modify, and delete the table"
yield (
"# * Remove `managed = False` lines if you wish to allow "
"Django to create, modify, and delete the table"
)
yield "# Feel free to rename the models, but don't rename db_table values or field names."
yield "#"
yield "# Also note: You'll have to insert the output of 'django-admin sqlcustom [app_label]'"

View File

@ -73,7 +73,10 @@ class Command(BaseCommand):
"%s in %s" % (", ".join(names), app)
for app, names in conflicts.items()
)
raise CommandError("Conflicting migrations detected (%s).\nTo fix them run 'python manage.py makemigrations --merge'" % name_str)
raise CommandError(
"Conflicting migrations detected (%s).\nTo fix them run "
"'python manage.py makemigrations --merge'" % name_str
)
# If they want to merge and there's nothing to merge, then politely exit
if self.merge and not conflicts:
@ -162,7 +165,9 @@ class Command(BaseCommand):
# Alternatively, makemigrations --dry-run --verbosity 3
# will output the migrations to stdout rather than saving
# the file to the disk.
self.stdout.write(self.style.MIGRATE_HEADING("Full migrations file '%s':" % writer.filename) + "\n")
self.stdout.write(self.style.MIGRATE_HEADING(
"Full migrations file '%s':" % writer.filename) + "\n"
)
self.stdout.write("%s\n" % writer.as_string())
def handle_merge(self, loader, conflicts):

View File

@ -26,7 +26,11 @@ class Command(BaseCommand):
parser.add_argument('app_label', nargs='?',
help='App label of an application to synchronize the state.')
parser.add_argument('migration_name', nargs='?',
help='Database state will be brought to the state after that migration. Use the name "zero" to unapply all migrations.')
help=(
'Database state will be brought to the state after that '
'migration. Use the name "zero" to unapply all migrations.'
),
)
parser.add_argument('--noinput', action='store_false', dest='interactive', default=True,
help='Tells Django to NOT prompt the user for input of any kind.')
parser.add_argument('--no-initial-data', action='store_false', dest='load_initial_data', default=True,
@ -71,7 +75,10 @@ class Command(BaseCommand):
"%s in %s" % (", ".join(names), app)
for app, names in conflicts.items()
)
raise CommandError("Conflicting migrations detected (%s).\nTo fix them run 'python manage.py makemigrations --merge'" % name_str)
raise CommandError(
"Conflicting migrations detected (%s).\nTo fix them run "
"'python manage.py makemigrations --merge'" % name_str
)
# If they supplied command line arguments, work out what they mean.
run_syncdb = False
@ -79,15 +86,21 @@ class Command(BaseCommand):
if options['app_label'] and options['migration_name']:
app_label, migration_name = options['app_label'], options['migration_name']
if app_label not in executor.loader.migrated_apps:
raise CommandError("App '%s' does not have migrations (you cannot selectively sync unmigrated apps)" % app_label)
raise CommandError(
"App '%s' does not have migrations (you cannot selectively "
"sync unmigrated apps)" % app_label
)
if migration_name == "zero":
targets = [(app_label, None)]
else:
try:
migration = executor.loader.get_migration_by_prefix(app_label, migration_name)
except AmbiguityError:
raise CommandError("More than one migration matches '%s' in app '%s'. Please be more specific." % (
migration_name, app_label))
raise CommandError(
"More than one migration matches '%s' in app '%s'. "
"Please be more specific." %
(migration_name, app_label)
)
except KeyError:
raise CommandError("Cannot find a migration matching '%s' from app '%s'." % (
migration_name, app_label))
@ -96,7 +109,10 @@ class Command(BaseCommand):
elif options['app_label']:
app_label = options['app_label']
if app_label not in executor.loader.migrated_apps:
raise CommandError("App '%s' does not have migrations (you cannot selectively sync unmigrated apps)" % app_label)
raise CommandError(
"App '%s' does not have migrations (you cannot selectively "
"sync unmigrated apps)" % app_label
)
targets = [key for key in executor.loader.graph.leaf_nodes() if key[0] == app_label]
else:
targets = executor.loader.graph.leaf_nodes()
@ -108,14 +124,25 @@ class Command(BaseCommand):
if self.verbosity >= 1:
self.stdout.write(self.style.MIGRATE_HEADING("Operations to perform:"))
if run_syncdb and executor.loader.unmigrated_apps:
self.stdout.write(self.style.MIGRATE_LABEL(" Synchronize unmigrated apps: ") + (", ".join(executor.loader.unmigrated_apps)))
self.stdout.write(
self.style.MIGRATE_LABEL(" Synchronize unmigrated apps: ") +
(", ".join(executor.loader.unmigrated_apps))
)
if target_app_labels_only:
self.stdout.write(self.style.MIGRATE_LABEL(" Apply all migrations: ") + (", ".join(set(a for a, n in targets)) or "(none)"))
self.stdout.write(
self.style.MIGRATE_LABEL(" Apply all migrations: ") +
(", ".join(set(a for a, n in targets)) or "(none)")
)
else:
if targets[0][1] is None:
self.stdout.write(self.style.MIGRATE_LABEL(" Unapply all migrations: ") + "%s" % (targets[0][0], ))
self.stdout.write(self.style.MIGRATE_LABEL(
" Unapply all migrations: ") + "%s" % (targets[0][0], )
)
else:
self.stdout.write(self.style.MIGRATE_LABEL(" Target specific migration: ") + "%s, from %s" % (targets[0][1], targets[0][0]))
self.stdout.write(self.style.MIGRATE_LABEL(
" Target specific migration: ") + "%s, from %s"
% (targets[0][1], targets[0][0])
)
# Run the syncdb phase.
# If you ever manage to get rid of this, I owe you many, many drinks.
@ -153,8 +180,15 @@ class Command(BaseCommand):
)
changes = autodetector.changes(graph=executor.loader.graph)
if changes:
self.stdout.write(self.style.NOTICE(" Your models have changes that are not yet reflected in a migration, and so won't be applied."))
self.stdout.write(self.style.NOTICE(" Run 'manage.py makemigrations' to make new migrations, and then re-run 'manage.py migrate' to apply them."))
self.stdout.write(self.style.NOTICE(
" Your models have changes that are not yet reflected "
"in a migration, and so won't be applied."
))
self.stdout.write(self.style.NOTICE(
" Run 'manage.py makemigrations' to make new "
"migrations, and then re-run 'manage.py migrate' to "
"apply them."
))
else:
executor.migrate(targets, plan, fake=options.get("fake", False))
@ -223,15 +257,25 @@ class Command(BaseCommand):
for model in model_list:
# Create the model's database table, if it doesn't already exist.
if self.verbosity >= 3:
self.stdout.write(" Processing %s.%s model\n" % (app_name, model._meta.object_name))
self.stdout.write(
" Processing %s.%s model\n" % (app_name, model._meta.object_name)
)
sql, references = connection.creation.sql_create_model(model, no_style(), seen_models)
seen_models.add(model)
created_models.add(model)
for refto, refs in references.items():
pending_references.setdefault(refto, []).extend(refs)
if refto in seen_models:
sql.extend(connection.creation.sql_for_pending_references(refto, no_style(), pending_references))
sql.extend(connection.creation.sql_for_pending_references(model, no_style(), pending_references))
sql.extend(
connection.creation.sql_for_pending_references(
refto, no_style(), pending_references,
)
)
sql.extend(
connection.creation.sql_for_pending_references(
model, no_style(), pending_references
)
)
if self.verbosity >= 1 and sql:
self.stdout.write(" Creating table %s\n" % model._meta.db_table)
for statement in sql:
@ -253,18 +297,27 @@ class Command(BaseCommand):
custom_sql = custom_sql_for_model(model, no_style(), connection)
if custom_sql:
if self.verbosity >= 2:
self.stdout.write(" Installing custom SQL for %s.%s model\n" % (app_name, model._meta.object_name))
self.stdout.write(
" Installing custom SQL for %s.%s model\n" %
(app_name, model._meta.object_name)
)
try:
with transaction.atomic(using=connection.alias):
for sql in custom_sql:
cursor.execute(sql)
except Exception as e:
self.stderr.write(" Failed to install custom SQL for %s.%s model: %s\n" % (app_name, model._meta.object_name, e))
self.stderr.write(
" Failed to install custom SQL for %s.%s model: %s\n"
% (app_name, model._meta.object_name, e)
)
if self.show_traceback:
traceback.print_exc()
else:
if self.verbosity >= 3:
self.stdout.write(" No custom SQL for %s.%s model\n" % (app_name, model._meta.object_name))
self.stdout.write(
" No custom SQL for %s.%s model\n" %
(app_name, model._meta.object_name)
)
if self.verbosity >= 1:
self.stdout.write(" Installing indexes...\n")
@ -276,20 +329,31 @@ class Command(BaseCommand):
index_sql = connection.creation.sql_indexes_for_model(model, no_style())
if index_sql:
if self.verbosity >= 2:
self.stdout.write(" Installing index for %s.%s model\n" % (app_name, model._meta.object_name))
self.stdout.write(
" Installing index for %s.%s model\n" %
(app_name, model._meta.object_name)
)
savepoint = connection.features.can_rollback_ddl
try:
with transaction.atomic(using=connection.alias, savepoint=connection.features.can_rollback_ddl):
with transaction.atomic(using=connection.alias, savepoint=savepoint):
for sql in index_sql:
cursor.execute(sql)
except Exception as e:
self.stderr.write(" Failed to install index for %s.%s model: %s\n" % (app_name, model._meta.object_name, e))
self.stderr.write(
" Failed to install index for %s.%s model: %s\n" %
(app_name, model._meta.object_name, e)
)
finally:
cursor.close()
# Load initial_data fixtures (unless that has been disabled)
if self.load_initial_data:
for app_label in app_labels:
call_command('loaddata', 'initial_data', verbosity=self.verbosity, database=connection.alias, skip_validation=True, app_label=app_label, hide_empty=True)
call_command(
'loaddata', 'initial_data', verbosity=self.verbosity,
database=connection.alias, skip_validation=True,
app_label=app_label, hide_empty=True,
)
return created_models

View File

@ -157,7 +157,9 @@ class Command(BaseCommand):
executor = MigrationExecutor(connections[DEFAULT_DB_ALIAS])
plan = executor.migration_plan(executor.loader.graph.leaf_nodes())
if plan:
self.stdout.write(self.style.NOTICE("\nYou have unapplied migrations; your app may not work properly until they are applied."))
self.stdout.write(self.style.NOTICE(
"\nYou have unapplied migrations; your app may not work properly until they are applied."
))
self.stdout.write(self.style.NOTICE("Run 'python manage.py migrate' to apply them.\n"))
# Kept for backward compatibility

View File

@ -6,7 +6,10 @@ from django.db import connections, DEFAULT_DB_ALIAS
class Command(BaseCommand):
help = "Returns a list of the SQL statements required to return all tables in the database to the state they were in just after they were installed."
help = (
"Returns a list of the SQL statements required to return all tables in "
"the database to the state they were in just after they were installed."
)
output_transaction = True

View File

@ -31,13 +31,22 @@ class Command(BaseCommand):
# Load the current graph state, check the app and migration they asked for exists
executor = MigrationExecutor(connections[DEFAULT_DB_ALIAS])
if app_label not in executor.loader.migrated_apps:
raise CommandError("App '%s' does not have migrations (so squashmigrations on it makes no sense)" % app_label)
raise CommandError(
"App '%s' does not have migrations (so squashmigrations on "
"it makes no sense)" % app_label
)
try:
migration = executor.loader.get_migration_by_prefix(app_label, migration_name)
except AmbiguityError:
raise CommandError("More than one migration matches '%s' in app '%s'. Please be more specific." % (migration_name, app_label))
raise CommandError(
"More than one migration matches '%s' in app '%s'. Please be "
"more specific." % (migration_name, app_label)
)
except KeyError:
raise CommandError("Cannot find a migration matching '%s' from app '%s'." % (migration_name, app_label))
raise CommandError(
"Cannot find a migration matching '%s' from app '%s'." %
(migration_name, app_label)
)
# Work out the list of predecessor migrations
migrations_to_squash = [
@ -71,7 +80,11 @@ class Command(BaseCommand):
dependencies = set()
for smigration in migrations_to_squash:
if smigration.replaces:
raise CommandError("You cannot squash squashed migrations! Please transition it to a normal migration first: https://docs.djangoproject.com/en/1.7/topics/migrations/#squashing-migrations")
raise CommandError(
"You cannot squash squashed migrations! Please transition "
"it to a normal migration first: "
"https://docs.djangoproject.com/en/1.7/topics/migrations/#squashing-migrations"
)
operations.extend(smigration.operations)
for dependency in smigration.dependencies:
if isinstance(dependency, SwappableTuple):
@ -92,7 +105,10 @@ class Command(BaseCommand):
if len(new_operations) == len(operations):
self.stdout.write(" No optimizations possible.")
else:
self.stdout.write(" Optimized from %s operations to %s operations." % (len(operations), len(new_operations)))
self.stdout.write(
" Optimized from %s operations to %s operations." %
(len(operations), len(new_operations))
)
# Work out the value of replaces (any squashed ones we're re-squashing)
# need to feed their replaces into ours

View File

@ -32,7 +32,10 @@ class Command(BaseCommand):
# Run the development server. Turn off auto-reloading because it causes
# a strange error -- it causes this handle() method to be called
# multiple times.
shutdown_message = '\nServer stopped.\nNote that the test database, %r, has not been deleted. You can explore it on your own.' % db_name
shutdown_message = (
'\nServer stopped.\nNote that the test database, %r, has not been '
'deleted. You can explore it on your own.' % db_name
)
use_threading = connection.features.test_db_allows_multiple_connections
call_command(
'runserver',

View File

@ -17,7 +17,10 @@ def check_for_migrations(app_config, connection):
from django.db.migrations.loader import MigrationLoader
loader = MigrationLoader(connection)
if app_config.label in loader.migrated_apps:
raise CommandError("App '%s' has migrations. Only the sqlmigrate and sqlflush commands can be used when an app has migrations." % app_config.label)
raise CommandError(
"App '%s' has migrations. Only the sqlmigrate and sqlflush commands "
"can be used when an app has migrations." % app_config.label
)
def sql_create(app_config, style, connection):
@ -176,7 +179,11 @@ def sql_all(app_config, style, connection):
check_for_migrations(app_config, connection)
"Returns a list of CREATE TABLE SQL, initial-data inserts, and CREATE INDEX SQL for the given module."
return sql_create(app_config, style, connection) + sql_custom(app_config, style, connection) + sql_indexes(app_config, style, connection)
return (
sql_create(app_config, style, connection) +
sql_custom(app_config, style, connection) +
sql_indexes(app_config, style, connection)
)
def _split_statements(content):

View File

@ -111,7 +111,9 @@ def Deserializer(object_list, **options):
continue
if isinstance(field_value, str):
field_value = smart_text(field_value, options.get("encoding", settings.DEFAULT_CHARSET), strings_only=True)
field_value = smart_text(
field_value, options.get("encoding", settings.DEFAULT_CHARSET), strings_only=True
)
field = Model._meta.get_field(field_name)

View File

@ -309,7 +309,14 @@ class RegexURLResolver(LocaleRegexProvider):
for name in pattern.reverse_dict:
for matches, pat, defaults in pattern.reverse_dict.getlist(name):
new_matches = normalize(parent_pat + pat)
lookups.appendlist(name, (new_matches, p_pattern + pat, dict(defaults, **pattern.default_kwargs)))
lookups.appendlist(
name,
(
new_matches,
p_pattern + pat,
dict(defaults, **pattern.default_kwargs),
)
)
for namespace, (prefix, sub_pattern) in pattern.namespace_dict.items():
namespaces[namespace] = (p_pattern + prefix, sub_pattern)
for app_name, namespace_list in pattern.app_dict.items():
@ -365,7 +372,14 @@ class RegexURLResolver(LocaleRegexProvider):
if sub_match:
sub_match_dict = dict(match.groupdict(), **self.default_kwargs)
sub_match_dict.update(sub_match.kwargs)
return ResolverMatch(sub_match.func, sub_match.args, sub_match_dict, sub_match.url_name, self.app_name or sub_match.app_name, [self.namespace] + sub_match.namespaces)
return ResolverMatch(
sub_match.func,
sub_match.args,
sub_match_dict,
sub_match.url_name,
self.app_name or sub_match.app_name,
[self.namespace] + sub_match.namespaces
)
tried.append([pattern])
raise Resolver404({'tried': tried, 'path': new_path})
raise Resolver404({'path': path})
@ -436,7 +450,8 @@ class RegexURLResolver(LocaleRegexProvider):
continue
candidate_subs = dict(zip(prefix_args + params, text_args))
else:
if set(kwargs.keys()) | set(defaults.keys()) != set(params) | set(defaults.keys()) | set(prefix_args):
if (set(kwargs.keys()) | set(defaults.keys()) != set(params) |
set(defaults.keys()) | set(prefix_args)):
continue
matches = True
for k, v in defaults.items():

View File

@ -179,12 +179,21 @@ class EmailValidator(object):
return False
def __eq__(self, other):
return isinstance(other, EmailValidator) and (self.domain_whitelist == other.domain_whitelist) and (self.message == other.message) and (self.code == other.code)
return (
isinstance(other, EmailValidator) and
(self.domain_whitelist == other.domain_whitelist) and
(self.message == other.message) and
(self.code == other.code)
)
validate_email = EmailValidator()
slug_re = re.compile(r'^[-a-zA-Z0-9_]+$')
validate_slug = RegexValidator(slug_re, _("Enter a valid 'slug' consisting of letters, numbers, underscores or hyphens."), 'invalid')
validate_slug = RegexValidator(
slug_re,
_("Enter a valid 'slug' consisting of letters, numbers, underscores or hyphens."),
'invalid'
)
ipv4_re = re.compile(r'^(25[0-5]|2[0-4]\d|[0-1]?\d?\d)(\.(25[0-5]|2[0-4]\d|[0-1]?\d?\d)){3}$')
validate_ipv4_address = RegexValidator(ipv4_re, _('Enter a valid IPv4 address.'), 'invalid')
@ -228,7 +237,11 @@ def ip_address_validators(protocol, unpack_ipv4):
% (protocol, list(ip_address_validator_map)))
comma_separated_int_list_re = re.compile('^[\d,]+$')
validate_comma_separated_integer_list = RegexValidator(comma_separated_int_list_re, _('Enter only digits separated by commas.'), 'invalid')
validate_comma_separated_integer_list = RegexValidator(
comma_separated_int_list_re,
_('Enter only digits separated by commas.'),
'invalid'
)
@deconstructible
@ -250,7 +263,12 @@ class BaseValidator(object):
raise ValidationError(self.message, code=self.code, params=params)
def __eq__(self, other):
return isinstance(other, self.__class__) and (self.limit_value == other.limit_value) and (self.message == other.message) and (self.code == other.code)
return (
isinstance(other, self.__class__) and
(self.limit_value == other.limit_value)
and (self.message == other.message)
and (self.code == other.code)
)
@deconstructible

View File

@ -466,7 +466,10 @@ class BaseDatabaseWrapper(object):
"""
Only required when autocommits_when_autocommit_is_off = True.
"""
raise NotImplementedError('subclasses of BaseDatabaseWrapper may require a _start_transaction_under_autocommit() method')
raise NotImplementedError(
'subclasses of BaseDatabaseWrapper may require a '
'_start_transaction_under_autocommit() method'
)
def schema_editor(self, *args, **kwargs):
"Returns a new instance of this backend's SchemaEditor"

View File

@ -423,7 +423,8 @@ class BaseDatabaseCreation(object):
# Make a function to iteratively return every object
def get_objects():
for model in sort_dependencies(app_list):
if not model._meta.proxy and model._meta.managed and router.allow_migrate(self.connection.alias, model):
if (not model._meta.proxy and model._meta.managed and
router.allow_migrate(self.connection.alias, model)):
queryset = model._default_manager.using(self.connection.alias).order_by(model._meta.pk.name)
for obj in queryset.iterator():
yield obj

View File

@ -513,15 +513,18 @@ class DatabaseWrapper(BaseDatabaseWrapper):
def check_constraints(self, table_names=None):
"""
Checks each table name in `table_names` for rows with invalid foreign key references. This method is
intended to be used in conjunction with `disable_constraint_checking()` and `enable_constraint_checking()`, to
determine if rows with invalid references were entered while constraint checks were off.
Checks each table name in `table_names` for rows with invalid foreign
key references. This method is intended to be used in conjunction with
`disable_constraint_checking()` and `enable_constraint_checking()`, to
determine if rows with invalid references were entered while constraint
checks were off.
Raises an IntegrityError on the first invalid foreign key reference encountered (if any) and provides
detailed information about the invalid reference in the error message.
Raises an IntegrityError on the first invalid foreign key reference
encountered (if any) and provides detailed information about the
invalid reference in the error message.
Backends can override this method if they can more directly apply constraint checking (e.g. via "SET CONSTRAINTS
ALL IMMEDIATE")
Backends can override this method if they can more directly apply
constraint checking (e.g. via "SET CONSTRAINTS ALL IMMEDIATE")
"""
cursor = self.cursor()
if table_names is None:

View File

@ -13,7 +13,10 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
sql_delete_unique = "ALTER TABLE %(table)s DROP INDEX %(name)s"
sql_create_fk = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s FOREIGN KEY (%(column)s) REFERENCES %(to_table)s (%(to_column)s)"
sql_create_fk = (
"ALTER TABLE %(table)s ADD CONSTRAINT %(name)s FOREIGN KEY "
"(%(column)s) REFERENCES %(to_table)s (%(to_column)s)"
)
sql_delete_fk = "ALTER TABLE %(table)s DROP FOREIGN KEY %(name)s"
sql_delete_index = "DROP INDEX %(name)s ON %(table)s"

View File

@ -32,7 +32,10 @@ class SQLCompiler(compiler.SQLCompiler):
high_where = ''
if self.query.high_mark is not None:
high_where = 'WHERE ROWNUM <= %d' % (self.query.high_mark,)
sql = 'SELECT * FROM (SELECT "_SUB".*, ROWNUM AS "_RN" FROM (%s) "_SUB" %s) WHERE "_RN" > %d' % (sql, high_where, self.query.low_mark)
sql = (
'SELECT * FROM (SELECT "_SUB".*, ROWNUM AS "_RN" FROM (%s) '
'"_SUB" %s) WHERE "_RN" > %d' % (sql, high_where, self.query.low_mark)
)
return sql, params

View File

@ -82,7 +82,10 @@ class DatabaseCreation(BaseDatabaseCreation):
return
sys.stderr.write("Got an error creating the test database: %s\n" % e)
if not autoclobber:
confirm = input("It appears the test database, %s, already exists. Type 'yes' to delete it, or 'no' to cancel: " % TEST_NAME)
confirm = input(
"It appears the test database, %s, already exists. "
"Type 'yes' to delete it, or 'no' to cancel: " % TEST_NAME
)
if autoclobber or confirm == 'yes':
try:
if verbosity >= 1:
@ -104,7 +107,10 @@ class DatabaseCreation(BaseDatabaseCreation):
except Exception as e:
sys.stderr.write("Got an error creating the test user: %s\n" % e)
if not autoclobber:
confirm = input("It appears the test user, %s, already exists. Type 'yes' to delete it, or 'no' to cancel: " % TEST_USER)
confirm = input(
"It appears the test user, %s, already exists. Type "
"'yes' to delete it, or 'no' to cancel: " % TEST_USER
)
if autoclobber or confirm == 'yes':
try:
if verbosity >= 1:
@ -123,11 +129,14 @@ class DatabaseCreation(BaseDatabaseCreation):
self.connection.close() # done with main user -- test user and tablespaces created
real_settings = settings.DATABASES[self.connection.alias]
real_settings['SAVED_USER'] = self.connection.settings_dict['SAVED_USER'] = self.connection.settings_dict['USER']
real_settings['SAVED_PASSWORD'] = self.connection.settings_dict['SAVED_PASSWORD'] = self.connection.settings_dict['PASSWORD']
real_settings['SAVED_USER'] = self.connection.settings_dict['SAVED_USER'] = \
self.connection.settings_dict['USER']
real_settings['SAVED_PASSWORD'] = self.connection.settings_dict['SAVED_PASSWORD'] = \
self.connection.settings_dict['PASSWORD']
real_test_settings = real_settings['TEST']
test_settings = self.connection.settings_dict['TEST']
real_test_settings['USER'] = real_settings['USER'] = test_settings['USER'] = self.connection.settings_dict['USER'] = TEST_USER
real_test_settings['USER'] = real_settings['USER'] = test_settings['USER'] = \
self.connection.settings_dict['USER'] = TEST_USER
real_settings['PASSWORD'] = self.connection.settings_dict['PASSWORD'] = TEST_PASSWD
return self.connection.settings_dict['NAME']

View File

@ -93,7 +93,8 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
def normalize_name(self, name):
"""
Get the properly shortened and uppercased identifier as returned by quote_name(), but without the actual quotes.
Get the properly shortened and uppercased identifier as returned by
quote_name(), but without the actual quotes.
"""
nn = self.quote_name(name)
if nn[0] == '"' and nn[-1] == '"':

View File

@ -40,7 +40,9 @@ class DatabaseCreation(BaseDatabaseCreation):
def sql_table_creation_suffix(self):
test_settings = self.connection.settings_dict['TEST']
assert test_settings['COLLATION'] is None, "PostgreSQL does not support collation setting at database creation time."
assert test_settings['COLLATION'] is None, (
"PostgreSQL does not support collation setting at database creation time."
)
if test_settings['CHARSET']:
return "WITH ENCODING '%s'" % test_settings['CHARSET']
return ''

View File

@ -128,7 +128,9 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
kc.constraint_name,
kc.column_name,
c.constraint_type,
array(SELECT table_name::text || '.' || column_name::text FROM information_schema.constraint_column_usage WHERE constraint_name = kc.constraint_name)
array(SELECT table_name::text || '.' || column_name::text
FROM information_schema.constraint_column_usage
WHERE constraint_name = kc.constraint_name)
FROM information_schema.key_column_usage AS kc
JOIN information_schema.table_constraints AS c ON
kc.table_schema = c.table_schema AND

View File

@ -164,27 +164,35 @@ class DatabaseOperations(BaseDatabaseOperations):
for f in model._meta.local_fields:
if isinstance(f, models.AutoField):
output.append("%s setval(pg_get_serial_sequence('%s','%s'), coalesce(max(%s), 1), max(%s) %s null) %s %s;" %
(style.SQL_KEYWORD('SELECT'),
output.append(
"%s setval(pg_get_serial_sequence('%s','%s'), "
"coalesce(max(%s), 1), max(%s) %s null) %s %s;" % (
style.SQL_KEYWORD('SELECT'),
style.SQL_TABLE(qn(model._meta.db_table)),
style.SQL_FIELD(f.column),
style.SQL_FIELD(qn(f.column)),
style.SQL_FIELD(qn(f.column)),
style.SQL_KEYWORD('IS NOT'),
style.SQL_KEYWORD('FROM'),
style.SQL_TABLE(qn(model._meta.db_table))))
style.SQL_TABLE(qn(model._meta.db_table)),
)
)
break # Only one AutoField is allowed per model, so don't bother continuing.
for f in model._meta.many_to_many:
if not f.rel.through:
output.append("%s setval(pg_get_serial_sequence('%s','%s'), coalesce(max(%s), 1), max(%s) %s null) %s %s;" %
(style.SQL_KEYWORD('SELECT'),
output.append(
"%s setval(pg_get_serial_sequence('%s','%s'), "
"coalesce(max(%s), 1), max(%s) %s null) %s %s;" % (
style.SQL_KEYWORD('SELECT'),
style.SQL_TABLE(qn(f.m2m_db_table())),
style.SQL_FIELD('id'),
style.SQL_FIELD(qn('id')),
style.SQL_FIELD(qn('id')),
style.SQL_KEYWORD('IS NOT'),
style.SQL_KEYWORD('FROM'),
style.SQL_TABLE(qn(f.m2m_db_table()))))
style.SQL_TABLE(qn(f.m2m_db_table()))
)
)
return output
def prep_for_iexact_query(self, x):

View File

@ -51,7 +51,10 @@ class BaseDatabaseSchemaEditor(object):
sql_create_unique = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s UNIQUE (%(columns)s)"
sql_delete_unique = "ALTER TABLE %(table)s DROP CONSTRAINT %(name)s"
sql_create_fk = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s FOREIGN KEY (%(column)s) REFERENCES %(to_table)s (%(to_column)s) DEFERRABLE INITIALLY DEFERRED"
sql_create_fk = (
"ALTER TABLE %(table)s ADD CONSTRAINT %(name)s FOREIGN KEY (%(column)s) "
"REFERENCES %(to_table)s (%(to_column)s) DEFERRABLE INITIALLY DEFERRED"
)
sql_create_inline_fk = None
sql_delete_fk = "ALTER TABLE %(table)s DROP CONSTRAINT %(name)s"
@ -160,7 +163,10 @@ class BaseDatabaseSchemaEditor(object):
"""
Only used for backends which have requires_literal_defaults feature
"""
raise NotImplementedError('subclasses of BaseDatabaseSchemaEditor for backends which have requires_literal_defaults must provide a prepare_default() method')
raise NotImplementedError(
'subclasses of BaseDatabaseSchemaEditor for backends which have '
'requires_literal_defaults must provide a prepare_default() method'
)
def effective_default(self, field):
"""
@ -234,7 +240,9 @@ class BaseDatabaseSchemaEditor(object):
if self.connection.features.supports_foreign_keys:
self.deferred_sql.append(
self.sql_create_fk % {
"name": self._create_index_name(model, [field.column], suffix="_fk_%s_%s" % (to_table, to_column)),
"name": self._create_index_name(
model, [field.column], suffix="_fk_%s_%s" % (to_table, to_column)
),
"table": self.quote_name(model._meta.db_table),
"column": self.quote_name(field.column),
"to_table": self.quote_name(to_table),
@ -491,24 +499,34 @@ class BaseDatabaseSchemaEditor(object):
new_db_params = new_field.db_parameters(connection=self.connection)
new_type = new_db_params['type']
if (old_type is None and old_field.rel is None) or (new_type is None and new_field.rel is None):
raise ValueError("Cannot alter field %s into %s - they do not properly define db_type (are you using PostGIS 1.5 or badly-written custom fields?)" % (
old_field,
new_field,
))
elif old_type is None and new_type is None and (old_field.rel.through and new_field.rel.through and old_field.rel.through._meta.auto_created and new_field.rel.through._meta.auto_created):
raise ValueError(
"Cannot alter field %s into %s - they do not properly define "
"db_type (are you using PostGIS 1.5 or badly-written custom "
"fields?)" % (old_field, new_field),
)
elif old_type is None and new_type is None and (
old_field.rel.through and new_field.rel.through and
old_field.rel.through._meta.auto_created and
new_field.rel.through._meta.auto_created):
return self._alter_many_to_many(model, old_field, new_field, strict)
elif old_type is None and new_type is None and (old_field.rel.through and new_field.rel.through and not old_field.rel.through._meta.auto_created and not new_field.rel.through._meta.auto_created):
elif old_type is None and new_type is None and (
old_field.rel.through and new_field.rel.through and
not old_field.rel.through._meta.auto_created and
not new_field.rel.through._meta.auto_created):
# Both sides have through models; this is a no-op.
return
elif old_type is None or new_type is None:
raise ValueError("Cannot alter field %s into %s - they are not compatible types (you cannot alter to or from M2M fields, or add or remove through= on M2M fields)" % (
old_field,
new_field,
))
raise ValueError(
"Cannot alter field %s into %s - they are not compatible types "
"(you cannot alter to or from M2M fields, or add or remove "
"through= on M2M fields)" % (old_field, new_field)
)
self._alter_field(model, old_field, new_field, old_type, new_type, old_db_params, new_db_params, strict)
self._alter_field(model, old_field, new_field, old_type, new_type,
old_db_params, new_db_params, strict)
def _alter_field(self, model, old_field, new_field, old_type, new_type, old_db_params, new_db_params, strict=False):
def _alter_field(self, model, old_field, new_field, old_type, new_type,
old_db_params, new_db_params, strict=False):
"""Actually perform a "physical" (non-ManyToMany) field update."""
# Has unique been removed?
@ -559,7 +577,9 @@ class BaseDatabaseSchemaEditor(object):
}
)
# Removed an index?
if old_field.db_index and not new_field.db_index and not old_field.unique and not (not new_field.unique and old_field.unique):
if (old_field.db_index and not new_field.db_index and
not old_field.unique and not
(not new_field.unique and old_field.unique)):
# Find the index for this field
index_names = self._constraint_names(model, [old_field.column], index=True)
if strict and len(index_names) != 1:
@ -683,7 +703,9 @@ class BaseDatabaseSchemaEditor(object):
}
)
# Added an index?
if not old_field.db_index and new_field.db_index and not new_field.unique and not (not old_field.unique and new_field.unique):
if (not old_field.db_index and new_field.db_index and
not new_field.unique and not
(not old_field.unique and new_field.unique)):
self.execute(
self.sql_create_index % {
"table": self.quote_name(model._meta.db_table),
@ -747,7 +769,9 @@ class BaseDatabaseSchemaEditor(object):
self.execute(
self.sql_create_fk % {
"table": self.quote_name(model._meta.db_table),
"name": self._create_index_name(model, [new_field.column], suffix="_fk_%s_%s" % (to_table, to_column)),
"name": self._create_index_name(
model, [new_field.column], suffix="_fk_%s_%s" % (to_table, to_column)
),
"column": self.quote_name(new_field.column),
"to_table": self.quote_name(to_table),
"to_column": self.quote_name(to_column),
@ -816,7 +840,8 @@ class BaseDatabaseSchemaEditor(object):
"""
# Rename the through table
if old_field.rel.through._meta.db_table != new_field.rel.through._meta.db_table:
self.alter_db_table(old_field.rel.through, old_field.rel.through._meta.db_table, new_field.rel.through._meta.db_table)
self.alter_db_table(old_field.rel.through, old_field.rel.through._meta.db_table,
new_field.rel.through._meta.db_table)
# Repoint the FK to the other side
self.alter_field(
new_field.rel.through,
@ -841,7 +866,9 @@ class BaseDatabaseSchemaEditor(object):
index_unique_name = '_%x' % abs(hash((table_name, ','.join(column_names))))
max_length = self.connection.ops.max_name_length() or 200
# If the index name is too long, truncate it
index_name = ('%s_%s%s%s' % (table_name, column_names[0], index_unique_name, suffix)).replace('"', '').replace('.', '_')
index_name = ('%s_%s%s%s' % (
table_name, column_names[0], index_unique_name, suffix,
)).replace('"', '').replace('.', '_')
if len(index_name) > max_length:
part = ('_%s%s%s' % (column_names[0], index_unique_name, suffix))
index_name = '%s%s' % (table_name[:(max_length - len(part))], part)
@ -856,7 +883,9 @@ class BaseDatabaseSchemaEditor(object):
index_name = "D%s" % index_name[:-1]
return index_name
def _constraint_names(self, model, column_names=None, unique=None, primary_key=None, index=None, foreign_key=None, check=None):
def _constraint_names(self, model, column_names=None, unique=None,
primary_key=None, index=None, foreign_key=None,
check=None):
"""
Returns all constraint names matching the columns and conditions
"""

View File

@ -434,15 +434,18 @@ class DatabaseWrapper(BaseDatabaseWrapper):
def check_constraints(self, table_names=None):
"""
Checks each table name in `table_names` for rows with invalid foreign key references. This method is
intended to be used in conjunction with `disable_constraint_checking()` and `enable_constraint_checking()`, to
determine if rows with invalid references were entered while constraint checks were off.
Checks each table name in `table_names` for rows with invalid foreign
key references. This method is intended to be used in conjunction with
`disable_constraint_checking()` and `enable_constraint_checking()`, to
determine if rows with invalid references were entered while constraint
checks were off.
Raises an IntegrityError on the first invalid foreign key reference encountered (if any) and provides
detailed information about the invalid reference in the error message.
Raises an IntegrityError on the first invalid foreign key reference
encountered (if any) and provides detailed information about the
invalid reference in the error message.
Backends can override this method if they can more directly apply constraint checking (e.g. via "SET CONSTRAINTS
ALL IMMEDIATE")
Backends can override this method if they can more directly apply
constraint checking (e.g. via "SET CONSTRAINTS ALL IMMEDIATE")
"""
cursor = self.cursor()
if table_names is None:

View File

@ -62,7 +62,10 @@ class DatabaseCreation(BaseDatabaseCreation):
print("Destroying old test database '%s'..." % self.connection.alias)
if os.access(test_database_name, os.F_OK):
if not autoclobber:
confirm = input("Type 'yes' if you would like to try deleting the test database '%s', or 'no' to cancel: " % test_database_name)
confirm = input(
"Type 'yes' if you would like to try deleting the test "
"database '%s', or 'no' to cancel: " % test_database_name
)
if autoclobber or confirm == 'yes':
try:
os.remove(test_database_name)

View File

@ -177,7 +177,8 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
return
self._remake_table(model, delete_fields=[field])
def _alter_field(self, model, old_field, new_field, old_type, new_type, old_db_params, new_db_params, strict=False):
def _alter_field(self, model, old_field, new_field, old_type, new_type,
old_db_params, new_db_params, strict=False):
"""Actually perform a "physical" (non-ManyToMany) field update."""
# Alter by remaking table
self._remake_table(model, alter_fields=[(old_field, new_field)])

View File

@ -130,14 +130,18 @@ class ModelBase(type):
'DoesNotExist',
subclass_exception(
str('DoesNotExist'),
tuple(x.DoesNotExist for x in parents if hasattr(x, '_meta') and not x._meta.abstract) or (ObjectDoesNotExist,),
tuple(
x.DoesNotExist for x in parents if hasattr(x, '_meta') and not x._meta.abstract
) or (ObjectDoesNotExist,),
module,
attached_to=new_class))
new_class.add_to_class(
'MultipleObjectsReturned',
subclass_exception(
str('MultipleObjectsReturned'),
tuple(x.MultipleObjectsReturned for x in parents if hasattr(x, '_meta') and not x._meta.abstract) or (MultipleObjectsReturned,),
tuple(
x.MultipleObjectsReturned for x in parents if hasattr(x, '_meta') and not x._meta.abstract
) or (MultipleObjectsReturned,),
module,
attached_to=new_class))
if base_meta and not base_meta.abstract:
@ -186,7 +190,10 @@ class ModelBase(type):
for parent in [kls for kls in parents if hasattr(kls, '_meta')]:
if parent._meta.abstract:
if parent._meta.fields:
raise TypeError("Abstract base class containing model fields not permitted for proxy model '%s'." % name)
raise TypeError(
"Abstract base class containing model fields not "
"permitted for proxy model '%s'." % name
)
else:
continue
if base is not None:
@ -765,7 +772,10 @@ class Model(six.with_metaclass(ModelBase)):
def delete(self, using=None):
using = using or router.db_for_write(self.__class__, instance=self)
assert self._get_pk_val() is not None, "%s object can't be deleted because its %s attribute is set to None." % (self._meta.object_name, self._meta.pk.attname)
assert self._get_pk_val() is not None, (
"%s object can't be deleted because its %s attribute is set to None." %
(self._meta.object_name, self._meta.pk.attname)
)
collector = Collector(using=using)
collector.collect([self])
@ -785,7 +795,9 @@ class Model(six.with_metaclass(ModelBase)):
param = force_text(getattr(self, field.attname))
q = Q(**{'%s__%s' % (field.name, op): param})
q = q | Q(**{field.name: param, 'pk__%s' % op: self.pk})
qs = self.__class__._default_manager.using(self._state.db).filter(**kwargs).filter(q).order_by('%s%s' % (order, field.name), '%spk' % order)
qs = self.__class__._default_manager.using(self._state.db).filter(**kwargs).filter(q).order_by(
'%s%s' % (order, field.name), '%spk' % order
)
try:
return qs[0]
except IndexError:
@ -1564,7 +1576,9 @@ def method_get_order(ordered_obj, self):
##############################################
def get_absolute_url(opts, func, self, *args, **kwargs):
return settings.ABSOLUTE_URL_OVERRIDES.get('%s.%s' % (opts.app_label, opts.model_name), func)(self, *args, **kwargs)
return settings.ABSOLUTE_URL_OVERRIDES.get(
'%s.%s' % (opts.app_label, opts.model_name), func
)(self, *args, **kwargs)
########

View File

@ -233,7 +233,8 @@ class RelatedField(Field):
if clash_field.get_accessor_name() == rel_query_name:
errors.append(
checks.Error(
"Reverse query name for '%s' clashes with reverse query name for '%s'." % (field_name, clash_name),
"Reverse query name for '%s' clashes with reverse query name for '%s'."
% (field_name, clash_name),
hint=("Add or change a related_name argument "
"to the definition for '%s' or '%s'.") % (field_name, clash_name),
obj=self,
@ -906,17 +907,23 @@ def create_many_related_manager(superclass, rel):
queryset = queryset.extra(select=dict(
('_prefetch_related_val_%s' % f.attname,
'%s.%s' % (qn(join_table), qn(f.column))) for f in fk.local_related_fields))
return (queryset,
lambda result: tuple(getattr(result, '_prefetch_related_val_%s' % f.attname) for f in fk.local_related_fields),
return (
queryset,
lambda result: tuple(
getattr(result, '_prefetch_related_val_%s' % f.attname)
for f in fk.local_related_fields
),
lambda inst: tuple(getattr(inst, f.attname) for f in fk.foreign_related_fields),
False,
self.prefetch_cache_name)
self.prefetch_cache_name,
)
def add(self, *objs):
if not rel.through._meta.auto_created:
opts = self.through._meta
raise AttributeError(
"Cannot use add() on a ManyToManyField which specifies an intermediary model. Use %s.%s's Manager instead." %
"Cannot use add() on a ManyToManyField which specifies an "
"intermediary model. Use %s.%s's Manager instead." %
(opts.app_label, opts.object_name)
)
@ -933,7 +940,8 @@ def create_many_related_manager(superclass, rel):
if not rel.through._meta.auto_created:
opts = self.through._meta
raise AttributeError(
"Cannot use remove() on a ManyToManyField which specifies an intermediary model. Use %s.%s's Manager instead." %
"Cannot use remove() on a ManyToManyField which specifies "
"an intermediary model. Use %s.%s's Manager instead." %
(opts.app_label, opts.object_name)
)
self._remove_items(self.source_field_name, self.target_field_name, *objs)
@ -960,7 +968,8 @@ def create_many_related_manager(superclass, rel):
if not self.through._meta.auto_created:
opts = self.through._meta
raise AttributeError(
"Cannot use create() on a ManyToManyField which specifies an intermediary model. Use %s.%s's Manager instead." %
"Cannot use create() on a ManyToManyField which specifies "
"an intermediary model. Use %s.%s's Manager instead." %
(opts.app_label, opts.object_name)
)
db = router.db_for_write(self.instance.__class__, instance=self.instance)
@ -1125,7 +1134,10 @@ class ManyRelatedObjectsDescriptor(object):
def __set__(self, instance, value):
if not self.related.field.rel.through._meta.auto_created:
opts = self.related.field.rel.through._meta
raise AttributeError("Cannot set values on a ManyToManyField which specifies an intermediary model. Use %s.%s's Manager instead." % (opts.app_label, opts.object_name))
raise AttributeError(
"Cannot set values on a ManyToManyField which specifies an "
"intermediary model. Use %s.%s's Manager instead." % (opts.app_label, opts.object_name)
)
# Force evaluation of `value` in case it's a queryset whose
# value could be affected by `manager.clear()`. Refs #19816.
@ -1185,7 +1197,10 @@ class ReverseManyRelatedObjectsDescriptor(object):
def __set__(self, instance, value):
if not self.field.rel.through._meta.auto_created:
opts = self.field.rel.through._meta
raise AttributeError("Cannot set values on a ManyToManyField which specifies an intermediary model. Use %s.%s's Manager instead." % (opts.app_label, opts.object_name))
raise AttributeError(
"Cannot set values on a ManyToManyField which specifies an "
"intermediary model. Use %s.%s's Manager instead." % (opts.app_label, opts.object_name)
)
# Force evaluation of `value` in case it's a queryset whose
# value could be affected by `manager.clear()`. Refs #19816.
@ -1204,7 +1219,9 @@ class ForeignObjectRel(object):
try:
to._meta
except AttributeError: # to._meta doesn't exist, so it must be RECURSIVE_RELATIONSHIP_CONSTANT
assert isinstance(to, six.string_types), "'to' must be either a model, a model name or the string %r" % RECURSIVE_RELATIONSHIP_CONSTANT
assert isinstance(to, six.string_types), (
"'to' must be either a model, a model name or the string %r" % RECURSIVE_RELATIONSHIP_CONSTANT
)
self.field = field
self.to = to
@ -1358,7 +1375,8 @@ class ForeignObject(RelatedField):
model_name = self.rel.to.__name__
return [
checks.Error(
"None of the fields %s on model '%s' have a unique=True constraint." % (field_combination, model_name),
"None of the fields %s on model '%s' have a unique=True constraint."
% (field_combination, model_name),
hint=None,
obj=self,
id='fields.E310',
@ -1403,7 +1421,11 @@ class ForeignObject(RelatedField):
# If it's already a settings reference, error
if hasattr(kwargs['to'], "setting_name"):
if kwargs['to'].setting_name != swappable_setting:
raise ValueError("Cannot deconstruct a ForeignKey pointing to a model that is swapped in place of more than one model (%s and %s)" % (kwargs['to'].setting_name, swappable_setting))
raise ValueError(
"Cannot deconstruct a ForeignKey pointing to a model "
"that is swapped in place of more than one model (%s and %s)"
% (kwargs['to'].setting_name, swappable_setting)
)
# Set it
from django.db.migrations.writer import SettingsReference
kwargs['to'] = SettingsReference(
@ -1617,7 +1639,13 @@ class ForeignKey(ForeignObject):
try:
to._meta.model_name
except AttributeError: # to._meta doesn't exist, so it must be RECURSIVE_RELATIONSHIP_CONSTANT
assert isinstance(to, six.string_types), "%s(%r) is invalid. First parameter to ForeignKey must be either a model, a model name, or the string %r" % (self.__class__.__name__, to, RECURSIVE_RELATIONSHIP_CONSTANT)
assert isinstance(to, six.string_types), (
"%s(%r) is invalid. First parameter to ForeignKey must be "
"either a model, a model name, or the string %r" % (
self.__class__.__name__, to,
RECURSIVE_RELATIONSHIP_CONSTANT,
)
)
else:
# For backwards compatibility purposes, we need to *try* and set
# the to_field during FK construction. It won't be guaranteed to
@ -1867,8 +1895,18 @@ def create_many_to_many_intermediary_model(field, klass):
return type(str(name), (models.Model,), {
'Meta': meta,
'__module__': klass.__module__,
from_: models.ForeignKey(klass, related_name='%s+' % name, db_tablespace=field.db_tablespace, db_constraint=field.rel.db_constraint),
to: models.ForeignKey(to_model, related_name='%s+' % name, db_tablespace=field.db_tablespace, db_constraint=field.rel.db_constraint)
from_: models.ForeignKey(
klass,
related_name='%s+' % name,
db_tablespace=field.db_tablespace,
db_constraint=field.rel.db_constraint,
),
to: models.ForeignKey(
to_model,
related_name='%s+' % name,
db_tablespace=field.db_tablespace,
db_constraint=field.rel.db_constraint,
)
})
@ -1879,8 +1917,13 @@ class ManyToManyField(RelatedField):
try:
to._meta
except AttributeError: # to._meta doesn't exist, so it must be RECURSIVE_RELATIONSHIP_CONSTANT
assert isinstance(to, six.string_types), "%s(%r) is invalid. First parameter to ManyToManyField must be either a model, a model name, or the string %r" % (self.__class__.__name__, to, RECURSIVE_RELATIONSHIP_CONSTANT)
# Class names must be ASCII in Python 2.x, so we forcibly coerce it here to break early if there's a problem.
assert isinstance(to, six.string_types), (
"%s(%r) is invalid. First parameter to ManyToManyField must be "
"either a model, a model name, or the string %r" %
(self.__class__.__name__, to, RECURSIVE_RELATIONSHIP_CONSTANT)
)
# Class names must be ASCII in Python 2.x, so we forcibly coerce it
# here to break early if there's a problem.
to = str(to)
kwargs['verbose_name'] = kwargs.get('verbose_name', None)
@ -2169,7 +2212,11 @@ class ManyToManyField(RelatedField):
# If it's already a settings reference, error
if hasattr(kwargs['to'], "setting_name"):
if kwargs['to'].setting_name != swappable_setting:
raise ValueError("Cannot deconstruct a ManyToManyField pointing to a model that is swapped in place of more than one model (%s and %s)" % (kwargs['to'].setting_name, swappable_setting))
raise ValueError(
"Cannot deconstruct a ManyToManyField pointing to a "
"model that is swapped in place of more than one model "
"(%s and %s)" % (kwargs['to'].setting_name, swappable_setting)
)
# Set it
from django.db.migrations.writer import SettingsReference
kwargs['to'] = SettingsReference(

View File

@ -25,7 +25,10 @@ def ensure_default_manager(cls):
# Create the default manager, if needed.
try:
cls._meta.get_field('objects')
raise ValueError("Model %s must specify a custom Manager, because it has a field named 'objects'" % cls.__name__)
raise ValueError(
"Model %s must specify a custom Manager, because it has a "
"field named 'objects'" % cls.__name__
)
except FieldDoesNotExist:
pass
cls.add_to_class('objects', Manager())
@ -44,7 +47,10 @@ def ensure_default_manager(cls):
getattr(base_class, "use_for_related_fields", False)):
cls.add_to_class('_base_manager', base_class())
return
raise AssertionError("Should never get here. Please report a bug, including your model and model manager setup.")
raise AssertionError(
"Should never get here. Please report a bug, including your "
"model and model manager setup."
)
@python_2_unicode_compatible
@ -118,7 +124,8 @@ class BaseManager(object):
else:
# if not model._meta.abstract and not model._meta.swapped:
setattr(model, name, ManagerDescriptor(self))
if not getattr(model, '_default_manager', None) or self.creation_counter < model._default_manager.creation_counter:
if (not getattr(model, '_default_manager', None) or
self.creation_counter < model._default_manager.creation_counter):
model._default_manager = self
if model._meta.abstract or (self._inherited and not self.model._meta.proxy):
model._meta.abstract_managers.append((self.creation_counter, name,

View File

@ -60,7 +60,10 @@ post_save = ModelSignal(providing_args=["instance", "raw", "created", "using", "
pre_delete = ModelSignal(providing_args=["instance", "using"], use_caching=True)
post_delete = ModelSignal(providing_args=["instance", "using"], use_caching=True)
m2m_changed = ModelSignal(providing_args=["action", "instance", "reverse", "model", "pk_set", "using"], use_caching=True)
m2m_changed = ModelSignal(
providing_args=["action", "instance", "reverse", "model", "pk_set", "using"],
use_caching=True,
)
pre_migrate = Signal(providing_args=["app_config", "verbosity", "interactive", "using"])
post_migrate = Signal(providing_args=["app_config", "verbosity", "interactive", "using"])

View File

@ -722,7 +722,10 @@ class SQLCompiler(object):
for rows in self.execute_sql(MULTI):
for row in rows:
if has_aggregate_select:
loaded_fields = self.query.get_loaded_field_names().get(self.query.model, set()) or self.query.select
loaded_fields = (
self.query.get_loaded_field_names().get(self.query.model, set()) or
self.query.select
)
aggregate_start = len(self.query.extra_select) + len(loaded_fields)
aggregate_end = aggregate_start + len(self.query.aggregate_select)
if fields is None:
@ -894,8 +897,10 @@ class SQLInsertCompiler(SQLCompiler):
if has_fields:
params = values = [
[
f.get_db_prep_save(getattr(obj, f.attname) if self.query.raw else f.pre_save(obj, True), connection=self.connection)
for f in fields
f.get_db_prep_save(
getattr(obj, f.attname) if self.query.raw else f.pre_save(obj, True),
connection=self.connection
) for f in fields
]
for obj in self.query.objs
]

View File

@ -1390,7 +1390,9 @@ class Query(object):
targets = (final_field.rel.get_related_field(),)
opts = int_model._meta
path.append(PathInfo(final_field.model._meta, opts, targets, final_field, False, True))
cur_names_with_path[1].append(PathInfo(final_field.model._meta, opts, targets, final_field, False, True))
cur_names_with_path[1].append(
PathInfo(final_field.model._meta, opts, targets, final_field, False, True)
)
if hasattr(field, 'get_path_info'):
pathinfos = field.get_path_info()
if not allow_many:

View File

@ -128,7 +128,10 @@ class UpdateQuery(Query):
for name, val in six.iteritems(values):
field, model, direct, m2m = self.get_meta().get_field_by_name(name)
if not direct or m2m:
raise FieldError('Cannot update model field %r (only non-relations and foreign keys permitted).' % field)
raise FieldError(
'Cannot update model field %r (only non-relations and '
'foreign keys permitted).' % field
)
if model:
self.add_related_update(model, field, val)
continue

View File

@ -643,7 +643,10 @@ class FileField(Field):
class ImageField(FileField):
default_error_messages = {
'invalid_image': _("Upload a valid image. The file you uploaded was either not an image or a corrupted image."),
'invalid_image': _(
"Upload a valid image. The file you uploaded was either not an "
"image or a corrupted image."
),
}
def to_python(self, data):

View File

@ -49,7 +49,10 @@ def get_declared_fields(bases, attrs, with_base_fields=True):
stacklevel=2,
)
fields = [(field_name, attrs.pop(field_name)) for field_name, obj in list(six.iteritems(attrs)) if isinstance(obj, Field)]
fields = [
(field_name, attrs.pop(field_name))
for field_name, obj in list(six.iteritems(attrs)) if isinstance(obj, Field)
]
fields.sort(key=lambda x: x[1].creation_counter)
# If this class is subclassing another Form, add that Form's fields.

View File

@ -969,12 +969,22 @@ def _get_foreign_key(parent_model, model, fk_name=None, can_fail=False):
if can_fail:
return
raise ValueError(
"'%s.%s' has no ForeignKey to '%s.%s'."
% (model._meta.app_label, model._meta.object_name, parent_model._meta.app_label, parent_model._meta.object_name))
"'%s.%s' has no ForeignKey to '%s.%s'." % (
model._meta.app_label,
model._meta.object_name,
parent_model._meta.app_label,
parent_model._meta.object_name,
)
)
else:
raise ValueError(
"'%s.%s' has more than one ForeignKey to '%s.%s'."
% (model._meta.app_label, model._meta.object_name, parent_model._meta.app_label, parent_model._meta.object_name))
"'%s.%s' has more than one ForeignKey to '%s.%s'." % (
model._meta.app_label,
model._meta.object_name,
parent_model._meta.app_label,
parent_model._meta.object_name,
)
)
return fk

View File

@ -55,16 +55,23 @@ class Media(object):
return mark_safe('\n'.join(chain(*[getattr(self, 'render_' + name)() for name in MEDIA_TYPES])))
def render_js(self):
return [format_html('<script type="text/javascript" src="{0}"></script>', self.absolute_path(path)) for path in self._js]
return [
format_html(
'<script type="text/javascript" src="{0}"></script>',
self.absolute_path(path)
) for path in self._js
]
def render_css(self):
# To keep rendering order consistent, we can't just iterate over items().
# We need to sort the keys, and iterate over the sorted list.
media = sorted(self._css.keys())
return chain(*[
[format_html('<link href="{0}" type="text/css" media="{1}" rel="stylesheet" />', self.absolute_path(path), medium)
for path in self._css[medium]]
for medium in media])
return chain(*[[
format_html(
'<link href="{0}" type="text/css" media="{1}" rel="stylesheet" />',
self.absolute_path(path), medium
) for path in self._css[medium]
] for medium in media])
def absolute_path(self, path, prefix=None):
if path.startswith(('http://', 'https://', '/')):
@ -341,7 +348,10 @@ class ClearableFileInput(FileInput):
input_text = ugettext_lazy('Change')
clear_checkbox_label = ugettext_lazy('Clear')
template_with_initial = '%(initial_text)s: <a href="%(initial_url)s">%(initial)s</a> %(clear_template)s<br />%(input_text)s: %(input)s'
template_with_initial = (
'%(initial_text)s: <a href="%(initial_url)s">%(initial)s</a> '
'%(clear_template)s<br />%(input_text)s: %(input)s'
)
template_with_clear = '%(clear)s <label for="%(clear_checkbox_id)s">%(clear_checkbox_label)s</label>'

View File

@ -97,7 +97,10 @@ class HttpRequest(object):
def get_full_path(self):
# RFC 3986 requires query string arguments to be in the ASCII range.
# Rather than crash if this doesn't happen, we encode defensively.
return '%s%s' % (self.path, ('?' + iri_to_uri(self.META.get('QUERY_STRING', ''))) if self.META.get('QUERY_STRING', '') else '')
return '%s%s' % (
self.path,
('?' + iri_to_uri(self.META.get('QUERY_STRING', ''))) if self.META.get('QUERY_STRING', '') else ''
)
def get_signed_cookie(self, key, default=RAISE_ERROR, salt='', max_age=None):
"""
@ -157,7 +160,9 @@ class HttpRequest(object):
try:
header, value = settings.SECURE_PROXY_SSL_HEADER
except ValueError:
raise ImproperlyConfigured('The SECURE_PROXY_SSL_HEADER setting must be a tuple containing two values.')
raise ImproperlyConfigured(
'The SECURE_PROXY_SSL_HEADER setting must be a tuple containing two values.'
)
if self.META.get(header, None) == value:
return 'https'
# Failing that, fall back to _get_scheme(), which is a hook for

View File

@ -775,7 +775,8 @@ class Variable(object):
current = getattr(current, bit)
except (TypeError, AttributeError) as e:
# Reraise an AttributeError raised by a @property
if isinstance(e, AttributeError) and not isinstance(current, BaseContext) and bit in dir(current):
if (isinstance(e, AttributeError) and
not isinstance(current, BaseContext) and bit in dir(current)):
raise
try: # list-index lookup
current = current[int(bit)]

View File

@ -61,7 +61,11 @@ class CsrfTokenNode(Node):
# It's very probable that the token is missing because of
# misconfiguration, so we raise a warning
if settings.DEBUG:
warnings.warn("A {% csrf_token %} was used in a template, but the context did not provide the value. This is usually caused by not using RequestContext.")
warnings.warn(
"A {% csrf_token %} was used in a template, but the context "
"did not provide the value. This is usually caused by not "
"using RequestContext."
)
return ''
@ -254,7 +258,8 @@ class IfChangedNode(Node):
if compare_to != state_frame[self]:
state_frame[self] = compare_to
return nodelist_true_output or self.nodelist_true.render(context) # render true block if not already rendered
# render true block if not already rendered
return nodelist_true_output or self.nodelist_true.render(context)
elif self.nodelist_false:
return self.nodelist_false.render(context)
return ''

View File

@ -102,12 +102,19 @@ def find_template_loader(loader):
else:
# Try loading module the old way - string is full path to callable
if args:
raise ImproperlyConfigured("Error importing template source loader %s - can't pass arguments to function-based loader." % loader)
raise ImproperlyConfigured(
"Error importing template source loader %s - can't pass "
"arguments to function-based loader." % loader
)
func = TemplateLoader
if not func.is_usable:
import warnings
warnings.warn("Your TEMPLATE_LOADERS setting includes %r, but your Python installation doesn't support that type of template loading. Consider removing that line from TEMPLATE_LOADERS." % loader)
warnings.warn(
"Your TEMPLATE_LOADERS setting includes %r, but your Python "
"installation doesn't support that type of template loading. "
"Consider removing that line from TEMPLATE_LOADERS." % loader
)
return None
else:
return func

Some files were not shown because too many files have changed in this diff Show More