Refs #23919 -- Replaced super(ClassName, self) with super().
This commit is contained in:
parent
dc165ec8e5
commit
d6eaf7c018
|
@ -67,13 +67,13 @@ class LazySettings(LazyObject):
|
|||
self.__dict__.clear()
|
||||
else:
|
||||
self.__dict__.pop(name, None)
|
||||
super(LazySettings, self).__setattr__(name, value)
|
||||
super().__setattr__(name, value)
|
||||
|
||||
def __delattr__(self, name):
|
||||
"""
|
||||
Delete a setting and clear it from cache if needed.
|
||||
"""
|
||||
super(LazySettings, self).__delattr__(name)
|
||||
super().__delattr__(name)
|
||||
self.__dict__.pop(name, None)
|
||||
|
||||
def configure(self, default_settings=global_settings, **options):
|
||||
|
@ -173,12 +173,12 @@ class UserSettingsHolder:
|
|||
|
||||
def __setattr__(self, name, value):
|
||||
self._deleted.discard(name)
|
||||
super(UserSettingsHolder, self).__setattr__(name, value)
|
||||
super().__setattr__(name, value)
|
||||
|
||||
def __delattr__(self, name):
|
||||
self._deleted.add(name)
|
||||
if hasattr(self, name):
|
||||
super(UserSettingsHolder, self).__delattr__(name)
|
||||
super().__delattr__(name)
|
||||
|
||||
def __dir__(self):
|
||||
return sorted(
|
||||
|
|
|
@ -19,5 +19,5 @@ class AdminConfig(SimpleAdminConfig):
|
|||
"""The default AppConfig for admin which does autodiscovery."""
|
||||
|
||||
def ready(self):
|
||||
super(AdminConfig, self).ready()
|
||||
super().ready()
|
||||
self.module.autodiscover()
|
||||
|
|
|
@ -513,7 +513,7 @@ class BaseModelAdminChecks:
|
|||
class ModelAdminChecks(BaseModelAdminChecks):
|
||||
|
||||
def check(self, admin_obj, **kwargs):
|
||||
errors = super(ModelAdminChecks, self).check(admin_obj)
|
||||
errors = super().check(admin_obj)
|
||||
errors.extend(self._check_save_as(admin_obj))
|
||||
errors.extend(self._check_save_on_top(admin_obj))
|
||||
errors.extend(self._check_inlines(admin_obj))
|
||||
|
@ -866,7 +866,7 @@ class ModelAdminChecks(BaseModelAdminChecks):
|
|||
class InlineModelAdminChecks(BaseModelAdminChecks):
|
||||
|
||||
def check(self, inline_obj, **kwargs):
|
||||
errors = super(InlineModelAdminChecks, self).check(inline_obj)
|
||||
errors = super().check(inline_obj)
|
||||
parent_model = inline_obj.parent_model
|
||||
errors.extend(self._check_relation(inline_obj, parent_model))
|
||||
errors.extend(self._check_exclude_of_parent_model(inline_obj, parent_model))
|
||||
|
@ -879,7 +879,7 @@ class InlineModelAdminChecks(BaseModelAdminChecks):
|
|||
def _check_exclude_of_parent_model(self, obj, parent_model):
|
||||
# Do not perform more specific checks if the base checks result in an
|
||||
# error.
|
||||
errors = super(InlineModelAdminChecks, self)._check_exclude(obj)
|
||||
errors = super()._check_exclude(obj)
|
||||
if errors:
|
||||
return []
|
||||
|
||||
|
|
|
@ -64,8 +64,7 @@ class SimpleListFilter(ListFilter):
|
|||
parameter_name = None
|
||||
|
||||
def __init__(self, request, params, model, model_admin):
|
||||
super(SimpleListFilter, self).__init__(
|
||||
request, params, model, model_admin)
|
||||
super().__init__(request, params, model, model_admin)
|
||||
if self.parameter_name is None:
|
||||
raise ImproperlyConfigured(
|
||||
"The list filter '%s' does not specify "
|
||||
|
@ -122,8 +121,7 @@ class FieldListFilter(ListFilter):
|
|||
self.field = field
|
||||
self.field_path = field_path
|
||||
self.title = getattr(field, 'verbose_name', field_path)
|
||||
super(FieldListFilter, self).__init__(
|
||||
request, params, model, model_admin)
|
||||
super().__init__(request, params, model, model_admin)
|
||||
for p in self.expected_parameters():
|
||||
if p in params:
|
||||
value = params.pop(p)
|
||||
|
@ -165,8 +163,7 @@ class RelatedFieldListFilter(FieldListFilter):
|
|||
self.lookup_kwarg_isnull = '%s__isnull' % field_path
|
||||
self.lookup_val = request.GET.get(self.lookup_kwarg)
|
||||
self.lookup_val_isnull = request.GET.get(self.lookup_kwarg_isnull)
|
||||
super(RelatedFieldListFilter, self).__init__(
|
||||
field, request, params, model, model_admin, field_path)
|
||||
super().__init__(field, request, params, model, model_admin, field_path)
|
||||
self.lookup_choices = self.field_choices(field, request, model_admin)
|
||||
if hasattr(field, 'verbose_name'):
|
||||
self.lookup_title = field.verbose_name
|
||||
|
@ -232,7 +229,7 @@ class BooleanFieldListFilter(FieldListFilter):
|
|||
self.lookup_kwarg2 = '%s__isnull' % field_path
|
||||
self.lookup_val = request.GET.get(self.lookup_kwarg)
|
||||
self.lookup_val2 = request.GET.get(self.lookup_kwarg2)
|
||||
super(BooleanFieldListFilter, self).__init__(field, request, params, model, model_admin, field_path)
|
||||
super().__init__(field, request, params, model, model_admin, field_path)
|
||||
if (self.used_parameters and self.lookup_kwarg in self.used_parameters and
|
||||
self.used_parameters[self.lookup_kwarg] in ('1', '0')):
|
||||
self.used_parameters[self.lookup_kwarg] = bool(int(self.used_parameters[self.lookup_kwarg]))
|
||||
|
@ -274,8 +271,7 @@ class ChoicesFieldListFilter(FieldListFilter):
|
|||
self.lookup_kwarg_isnull = '%s__isnull' % field_path
|
||||
self.lookup_val = request.GET.get(self.lookup_kwarg)
|
||||
self.lookup_val_isnull = request.GET.get(self.lookup_kwarg_isnull)
|
||||
super(ChoicesFieldListFilter, self).__init__(
|
||||
field, request, params, model, model_admin, field_path)
|
||||
super().__init__(field, request, params, model, model_admin, field_path)
|
||||
|
||||
def expected_parameters(self):
|
||||
return [self.lookup_kwarg, self.lookup_kwarg_isnull]
|
||||
|
@ -362,8 +358,7 @@ class DateFieldListFilter(FieldListFilter):
|
|||
(_('No date'), {self.field_generic + 'isnull': 'True'}),
|
||||
(_('Has date'), {self.field_generic + 'isnull': 'False'}),
|
||||
)
|
||||
super(DateFieldListFilter, self).__init__(
|
||||
field, request, params, model, model_admin, field_path)
|
||||
super().__init__(field, request, params, model, model_admin, field_path)
|
||||
|
||||
def expected_parameters(self):
|
||||
params = [self.lookup_kwarg_since, self.lookup_kwarg_until]
|
||||
|
@ -404,8 +399,7 @@ class AllValuesFieldListFilter(FieldListFilter):
|
|||
.distinct()
|
||||
.order_by(field.name)
|
||||
.values_list(field.name, flat=True))
|
||||
super(AllValuesFieldListFilter, self).__init__(
|
||||
field, request, params, model, model_admin, field_path)
|
||||
super().__init__(field, request, params, model, model_admin, field_path)
|
||||
|
||||
def expected_parameters(self):
|
||||
return [self.lookup_kwarg, self.lookup_kwarg_isnull]
|
||||
|
|
|
@ -324,7 +324,7 @@ class InlineAdminForm(AdminForm):
|
|||
self.original = original
|
||||
self.show_url = original and view_on_site_url is not None
|
||||
self.absolute_url = view_on_site_url
|
||||
super(InlineAdminForm, self).__init__(form, fieldsets, prepopulated_fields, readonly_fields, model_admin)
|
||||
super().__init__(form, fieldsets, prepopulated_fields, readonly_fields, model_admin)
|
||||
|
||||
def __iter__(self):
|
||||
for name, options in self.fieldsets:
|
||||
|
@ -366,7 +366,7 @@ class InlineAdminForm(AdminForm):
|
|||
class InlineFieldset(Fieldset):
|
||||
def __init__(self, formset, *args, **kwargs):
|
||||
self.formset = formset
|
||||
super(InlineFieldset, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def __iter__(self):
|
||||
fk = getattr(self.formset, "fk", None)
|
||||
|
@ -381,7 +381,7 @@ class AdminErrorList(forms.utils.ErrorList):
|
|||
Stores all errors for the form/formsets in an add/change stage view.
|
||||
"""
|
||||
def __init__(self, form, inline_formsets):
|
||||
super(AdminErrorList, self).__init__()
|
||||
super().__init__()
|
||||
|
||||
if form.is_bound:
|
||||
self.extend(form.errors.values())
|
||||
|
|
|
@ -519,7 +519,7 @@ class ModelAdmin(BaseModelAdmin):
|
|||
self.model = model
|
||||
self.opts = model._meta
|
||||
self.admin_site = admin_site
|
||||
super(ModelAdmin, self).__init__()
|
||||
super().__init__()
|
||||
|
||||
def __str__(self):
|
||||
return "%s.%s" % (self.model._meta.app_label, self.__class__.__name__)
|
||||
|
@ -1842,7 +1842,7 @@ class InlineModelAdmin(BaseModelAdmin):
|
|||
self.parent_model = parent_model
|
||||
self.opts = self.model._meta
|
||||
self.has_registered_model = admin_site.is_registered(self.model)
|
||||
super(InlineModelAdmin, self).__init__()
|
||||
super().__init__()
|
||||
if self.verbose_name is None:
|
||||
self.verbose_name = self.model._meta.verbose_name
|
||||
if self.verbose_name_plural is None:
|
||||
|
@ -1936,7 +1936,7 @@ class InlineModelAdmin(BaseModelAdmin):
|
|||
raise ValidationError(msg, code='deleting_protected', params=params)
|
||||
|
||||
def is_valid(self):
|
||||
result = super(DeleteProtectedModelForm, self).is_valid()
|
||||
result = super().is_valid()
|
||||
self.hand_clean_DELETE()
|
||||
return result
|
||||
|
||||
|
@ -1954,7 +1954,7 @@ class InlineModelAdmin(BaseModelAdmin):
|
|||
return list(form.base_fields) + list(self.get_readonly_fields(request, obj))
|
||||
|
||||
def get_queryset(self, request):
|
||||
queryset = super(InlineModelAdmin, self).get_queryset(request)
|
||||
queryset = super().get_queryset(request)
|
||||
if not self.has_change_permission(request):
|
||||
queryset = queryset.none()
|
||||
return queryset
|
||||
|
@ -1966,7 +1966,7 @@ class InlineModelAdmin(BaseModelAdmin):
|
|||
# to have the change permission for the related model in order to
|
||||
# be able to do anything with the intermediate model.
|
||||
return self.has_change_permission(request)
|
||||
return super(InlineModelAdmin, self).has_add_permission(request)
|
||||
return super().has_add_permission(request)
|
||||
|
||||
def has_change_permission(self, request, obj=None):
|
||||
opts = self.opts
|
||||
|
@ -1987,7 +1987,7 @@ class InlineModelAdmin(BaseModelAdmin):
|
|||
# to have the change permission for the related model in order to
|
||||
# be able to do anything with the intermediate model.
|
||||
return self.has_change_permission(request, obj)
|
||||
return super(InlineModelAdmin, self).has_delete_permission(request, obj)
|
||||
return super().has_delete_permission(request, obj)
|
||||
|
||||
|
||||
class StackedInline(InlineModelAdmin):
|
||||
|
|
|
@ -198,7 +198,7 @@ class AdminSite:
|
|||
def get_urls(self):
|
||||
from django.conf.urls import url
|
||||
|
||||
urls = super(MyAdminSite, self).get_urls()
|
||||
urls = super().get_urls()
|
||||
urls += [
|
||||
url(r'^my_view/$', self.admin_view(some_view))
|
||||
]
|
||||
|
|
|
@ -291,7 +291,7 @@ class ResultList(list):
|
|||
# compatibility with existing admin templates.
|
||||
def __init__(self, form, *items):
|
||||
self.form = form
|
||||
super(ResultList, self).__init__(*items)
|
||||
super().__init__(*items)
|
||||
|
||||
|
||||
def results(cl):
|
||||
|
|
|
@ -175,7 +175,7 @@ def get_deleted_objects(objs, opts, user, admin_site, using):
|
|||
|
||||
class NestedObjects(Collector):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(NestedObjects, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
self.edges = {} # {from_instance: [to_instances]}
|
||||
self.protected = set()
|
||||
self.model_objs = defaultdict(set)
|
||||
|
@ -195,12 +195,12 @@ class NestedObjects(Collector):
|
|||
self.add_edge(None, obj)
|
||||
self.model_objs[obj._meta.model].add(obj)
|
||||
try:
|
||||
return super(NestedObjects, self).collect(objs, source_attr=source_attr, **kwargs)
|
||||
return super().collect(objs, source_attr=source_attr, **kwargs)
|
||||
except models.ProtectedError as e:
|
||||
self.protected.update(e.protected_objects)
|
||||
|
||||
def related_objects(self, related, objs):
|
||||
qs = super(NestedObjects, self).related_objects(related, objs)
|
||||
qs = super().related_objects(related, objs)
|
||||
return qs.select_related(related.field.name)
|
||||
|
||||
def _nested(self, obj, seen, format_callback):
|
||||
|
|
|
@ -29,10 +29,10 @@ class FilteredSelectMultiple(forms.SelectMultiple):
|
|||
def __init__(self, verbose_name, is_stacked, attrs=None, choices=()):
|
||||
self.verbose_name = verbose_name
|
||||
self.is_stacked = is_stacked
|
||||
super(FilteredSelectMultiple, self).__init__(attrs, choices)
|
||||
super().__init__(attrs, choices)
|
||||
|
||||
def get_context(self, name, value, attrs=None):
|
||||
context = super(FilteredSelectMultiple, self).get_context(name, value, attrs)
|
||||
context = super().get_context(name, value, attrs)
|
||||
context['widget']['attrs']['class'] = 'selectfilter'
|
||||
if self.is_stacked:
|
||||
context['widget']['attrs']['class'] += 'stacked'
|
||||
|
@ -51,7 +51,7 @@ class AdminDateWidget(forms.DateInput):
|
|||
final_attrs = {'class': 'vDateField', 'size': '10'}
|
||||
if attrs is not None:
|
||||
final_attrs.update(attrs)
|
||||
super(AdminDateWidget, self).__init__(attrs=final_attrs, format=format)
|
||||
super().__init__(attrs=final_attrs, format=format)
|
||||
|
||||
|
||||
class AdminTimeWidget(forms.TimeInput):
|
||||
|
@ -64,7 +64,7 @@ class AdminTimeWidget(forms.TimeInput):
|
|||
final_attrs = {'class': 'vTimeField', 'size': '8'}
|
||||
if attrs is not None:
|
||||
final_attrs.update(attrs)
|
||||
super(AdminTimeWidget, self).__init__(attrs=final_attrs, format=format)
|
||||
super().__init__(attrs=final_attrs, format=format)
|
||||
|
||||
|
||||
class AdminSplitDateTime(forms.SplitDateTimeWidget):
|
||||
|
@ -80,7 +80,7 @@ class AdminSplitDateTime(forms.SplitDateTimeWidget):
|
|||
forms.MultiWidget.__init__(self, widgets, attrs)
|
||||
|
||||
def get_context(self, name, value, attrs):
|
||||
context = super(AdminSplitDateTime, self).get_context(name, value, attrs)
|
||||
context = super().get_context(name, value, attrs)
|
||||
context['date_label'] = _('Date:')
|
||||
context['time_label'] = _('Time:')
|
||||
return context
|
||||
|
@ -127,10 +127,10 @@ class ForeignKeyRawIdWidget(forms.TextInput):
|
|||
self.rel = rel
|
||||
self.admin_site = admin_site
|
||||
self.db = using
|
||||
super(ForeignKeyRawIdWidget, self).__init__(attrs)
|
||||
super().__init__(attrs)
|
||||
|
||||
def get_context(self, name, value, attrs=None):
|
||||
context = super(ForeignKeyRawIdWidget, self).get_context(name, value, attrs)
|
||||
context = super().get_context(name, value, attrs)
|
||||
rel_to = self.rel.model
|
||||
if rel_to in self.admin_site._registry:
|
||||
# The related object is registered with the same AdminSite
|
||||
|
@ -197,7 +197,7 @@ class ManyToManyRawIdWidget(ForeignKeyRawIdWidget):
|
|||
template_name = 'admin/widgets/many_to_many_raw_id.html'
|
||||
|
||||
def get_context(self, name, value, attrs=None):
|
||||
context = super(ManyToManyRawIdWidget, self).get_context(name, value, attrs)
|
||||
context = super().get_context(name, value, attrs)
|
||||
if self.rel.model in self.admin_site._registry:
|
||||
# The related object is registered with the same AdminSite
|
||||
context['widget']['attrs']['class'] = 'vManyToManyRawIdAdminField'
|
||||
|
@ -310,7 +310,7 @@ class AdminTextareaWidget(forms.Textarea):
|
|||
final_attrs = {'class': 'vLargeTextField'}
|
||||
if attrs is not None:
|
||||
final_attrs.update(attrs)
|
||||
super(AdminTextareaWidget, self).__init__(attrs=final_attrs)
|
||||
super().__init__(attrs=final_attrs)
|
||||
|
||||
|
||||
class AdminTextInputWidget(forms.TextInput):
|
||||
|
@ -318,7 +318,7 @@ class AdminTextInputWidget(forms.TextInput):
|
|||
final_attrs = {'class': 'vTextField'}
|
||||
if attrs is not None:
|
||||
final_attrs.update(attrs)
|
||||
super(AdminTextInputWidget, self).__init__(attrs=final_attrs)
|
||||
super().__init__(attrs=final_attrs)
|
||||
|
||||
|
||||
class AdminEmailInputWidget(forms.EmailInput):
|
||||
|
@ -326,7 +326,7 @@ class AdminEmailInputWidget(forms.EmailInput):
|
|||
final_attrs = {'class': 'vTextField'}
|
||||
if attrs is not None:
|
||||
final_attrs.update(attrs)
|
||||
super(AdminEmailInputWidget, self).__init__(attrs=final_attrs)
|
||||
super().__init__(attrs=final_attrs)
|
||||
|
||||
|
||||
class AdminURLFieldWidget(forms.URLInput):
|
||||
|
@ -336,17 +336,17 @@ class AdminURLFieldWidget(forms.URLInput):
|
|||
final_attrs = {'class': 'vURLField'}
|
||||
if attrs is not None:
|
||||
final_attrs.update(attrs)
|
||||
super(AdminURLFieldWidget, self).__init__(attrs=final_attrs)
|
||||
super().__init__(attrs=final_attrs)
|
||||
|
||||
def get_context(self, name, value, attrs):
|
||||
context = super(AdminURLFieldWidget, self).get_context(name, value, attrs)
|
||||
context = super().get_context(name, value, attrs)
|
||||
context['current_label'] = _('Currently:')
|
||||
context['change_label'] = _('Change:')
|
||||
context['widget']['href'] = smart_urlquote(context['widget']['value'])
|
||||
return context
|
||||
|
||||
def format_value(self, value):
|
||||
value = super(AdminURLFieldWidget, self).format_value(value)
|
||||
value = super().format_value(value)
|
||||
return force_text(value)
|
||||
|
||||
|
||||
|
@ -357,7 +357,7 @@ class AdminIntegerFieldWidget(forms.NumberInput):
|
|||
final_attrs = {'class': self.class_name}
|
||||
if attrs is not None:
|
||||
final_attrs.update(attrs)
|
||||
super(AdminIntegerFieldWidget, self).__init__(attrs=final_attrs)
|
||||
super().__init__(attrs=final_attrs)
|
||||
|
||||
|
||||
class AdminBigIntegerFieldWidget(AdminIntegerFieldWidget):
|
||||
|
|
|
@ -37,19 +37,19 @@ class BaseAdminDocsView(TemplateView):
|
|||
# Display an error message for people without docutils
|
||||
self.template_name = 'admin_doc/missing_docutils.html'
|
||||
return self.render_to_response(admin.site.each_context(request))
|
||||
return super(BaseAdminDocsView, self).dispatch(request, *args, **kwargs)
|
||||
return super().dispatch(request, *args, **kwargs)
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
kwargs.update({'root_path': reverse('admin:index')})
|
||||
kwargs.update(admin.site.each_context(self.request))
|
||||
return super(BaseAdminDocsView, self).get_context_data(**kwargs)
|
||||
return super().get_context_data(**kwargs)
|
||||
|
||||
|
||||
class BookmarkletsView(BaseAdminDocsView):
|
||||
template_name = 'admin_doc/bookmarklets.html'
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
context = super(BookmarkletsView, self).get_context_data(**kwargs)
|
||||
context = super().get_context_data(**kwargs)
|
||||
context.update({
|
||||
'admin_url': "%s://%s%s" % (
|
||||
self.request.scheme, self.request.get_host(), context['root_path'])
|
||||
|
@ -88,7 +88,7 @@ class TemplateTagIndexView(BaseAdminDocsView):
|
|||
'library': tag_library,
|
||||
})
|
||||
kwargs.update({'tags': tags})
|
||||
return super(TemplateTagIndexView, self).get_context_data(**kwargs)
|
||||
return super().get_context_data(**kwargs)
|
||||
|
||||
|
||||
class TemplateFilterIndexView(BaseAdminDocsView):
|
||||
|
@ -122,7 +122,7 @@ class TemplateFilterIndexView(BaseAdminDocsView):
|
|||
'library': tag_library,
|
||||
})
|
||||
kwargs.update({'filters': filters})
|
||||
return super(TemplateFilterIndexView, self).get_context_data(**kwargs)
|
||||
return super().get_context_data(**kwargs)
|
||||
|
||||
|
||||
class ViewIndexView(BaseAdminDocsView):
|
||||
|
@ -146,7 +146,7 @@ class ViewIndexView(BaseAdminDocsView):
|
|||
'name': name,
|
||||
})
|
||||
kwargs.update({'views': views})
|
||||
return super(ViewIndexView, self).get_context_data(**kwargs)
|
||||
return super().get_context_data(**kwargs)
|
||||
|
||||
|
||||
class ViewDetailView(BaseAdminDocsView):
|
||||
|
@ -194,7 +194,7 @@ class ViewDetailView(BaseAdminDocsView):
|
|||
'body': body,
|
||||
'meta': metadata,
|
||||
})
|
||||
return super(ViewDetailView, self).get_context_data(**kwargs)
|
||||
return super().get_context_data(**kwargs)
|
||||
|
||||
|
||||
class ModelIndexView(BaseAdminDocsView):
|
||||
|
@ -203,7 +203,7 @@ class ModelIndexView(BaseAdminDocsView):
|
|||
def get_context_data(self, **kwargs):
|
||||
m_list = [m._meta for m in apps.get_models()]
|
||||
kwargs.update({'models': m_list})
|
||||
return super(ModelIndexView, self).get_context_data(**kwargs)
|
||||
return super().get_context_data(**kwargs)
|
||||
|
||||
|
||||
class ModelDetailView(BaseAdminDocsView):
|
||||
|
@ -333,7 +333,7 @@ class ModelDetailView(BaseAdminDocsView):
|
|||
'fields': fields,
|
||||
'methods': methods,
|
||||
})
|
||||
return super(ModelDetailView, self).get_context_data(**kwargs)
|
||||
return super().get_context_data(**kwargs)
|
||||
|
||||
|
||||
class TemplateDetailView(BaseAdminDocsView):
|
||||
|
@ -366,7 +366,7 @@ class TemplateDetailView(BaseAdminDocsView):
|
|||
'name': template,
|
||||
'templates': templates,
|
||||
})
|
||||
return super(TemplateDetailView, self).get_context_data(**kwargs)
|
||||
return super().get_context_data(**kwargs)
|
||||
|
||||
|
||||
####################
|
||||
|
|
|
@ -36,8 +36,7 @@ class GroupAdmin(admin.ModelAdmin):
|
|||
# Avoid a major performance hit resolving permission names which
|
||||
# triggers a content_type load:
|
||||
kwargs['queryset'] = qs.select_related('content_type')
|
||||
return super(GroupAdmin, self).formfield_for_manytomany(
|
||||
db_field, request=request, **kwargs)
|
||||
return super().formfield_for_manytomany(db_field, request=request, **kwargs)
|
||||
|
||||
|
||||
@admin.register(User)
|
||||
|
@ -69,7 +68,7 @@ class UserAdmin(admin.ModelAdmin):
|
|||
def get_fieldsets(self, request, obj=None):
|
||||
if not obj:
|
||||
return self.add_fieldsets
|
||||
return super(UserAdmin, self).get_fieldsets(request, obj)
|
||||
return super().get_fieldsets(request, obj)
|
||||
|
||||
def get_form(self, request, obj=None, **kwargs):
|
||||
"""
|
||||
|
@ -79,7 +78,7 @@ class UserAdmin(admin.ModelAdmin):
|
|||
if obj is None:
|
||||
defaults['form'] = self.add_form
|
||||
defaults.update(kwargs)
|
||||
return super(UserAdmin, self).get_form(request, obj, **defaults)
|
||||
return super().get_form(request, obj, **defaults)
|
||||
|
||||
def get_urls(self):
|
||||
return [
|
||||
|
@ -88,13 +87,13 @@ class UserAdmin(admin.ModelAdmin):
|
|||
self.admin_site.admin_view(self.user_change_password),
|
||||
name='auth_user_password_change',
|
||||
),
|
||||
] + super(UserAdmin, self).get_urls()
|
||||
] + super().get_urls()
|
||||
|
||||
def lookup_allowed(self, lookup, value):
|
||||
# See #20078: we don't want to allow any lookups involving passwords.
|
||||
if lookup.startswith('password'):
|
||||
return False
|
||||
return super(UserAdmin, self).lookup_allowed(lookup, value)
|
||||
return super().lookup_allowed(lookup, value)
|
||||
|
||||
@sensitive_post_parameters_m
|
||||
@csrf_protect_m
|
||||
|
@ -127,8 +126,7 @@ class UserAdmin(admin.ModelAdmin):
|
|||
'username_help_text': username_field.help_text,
|
||||
}
|
||||
extra_context.update(defaults)
|
||||
return super(UserAdmin, self).add_view(request, form_url,
|
||||
extra_context)
|
||||
return super().add_view(request, form_url, extra_context)
|
||||
|
||||
@sensitive_post_parameters_m
|
||||
def user_change_password(self, request, id, form_url=''):
|
||||
|
@ -207,5 +205,4 @@ class UserAdmin(admin.ModelAdmin):
|
|||
if '_addanother' not in request.POST and IS_POPUP_VAR not in request.POST:
|
||||
request.POST = request.POST.copy()
|
||||
request.POST['_continue'] = 1
|
||||
return super(UserAdmin, self).response_add(request, obj,
|
||||
post_url_continue)
|
||||
return super().response_add(request, obj, post_url_continue)
|
||||
|
|
|
@ -61,7 +61,7 @@ class AbstractBaseUser(models.Model):
|
|||
return getattr(self, self.USERNAME_FIELD)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(AbstractBaseUser, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
# Stores the raw password if set_password() is called so that it can
|
||||
# be passed to password_changed() after the model is saved.
|
||||
self._password = None
|
||||
|
@ -73,7 +73,7 @@ class AbstractBaseUser(models.Model):
|
|||
setattr(self, self.USERNAME_FIELD, self.normalize_username(self.get_username()))
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
super(AbstractBaseUser, self).save(*args, **kwargs)
|
||||
super().save(*args, **kwargs)
|
||||
if self._password is not None:
|
||||
password_validation.password_changed(self._password, self)
|
||||
self._password = None
|
||||
|
|
|
@ -24,7 +24,7 @@ class ReadOnlyPasswordHashWidget(forms.Widget):
|
|||
template_name = 'auth/widgets/read_only_password_hash.html'
|
||||
|
||||
def get_context(self, name, value, attrs):
|
||||
context = super(ReadOnlyPasswordHashWidget, self).get_context(name, value, attrs)
|
||||
context = super().get_context(name, value, attrs)
|
||||
summary = []
|
||||
if not value or value.startswith(UNUSABLE_PASSWORD_PREFIX):
|
||||
summary.append({'label': ugettext("No password set.")})
|
||||
|
@ -45,7 +45,7 @@ class ReadOnlyPasswordHashField(forms.Field):
|
|||
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs.setdefault("required", False)
|
||||
super(ReadOnlyPasswordHashField, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def bound_data(self, data, initial):
|
||||
# Always return initial because the widget doesn't
|
||||
|
@ -58,7 +58,7 @@ class ReadOnlyPasswordHashField(forms.Field):
|
|||
|
||||
class UsernameField(forms.CharField):
|
||||
def to_python(self, value):
|
||||
return unicodedata.normalize('NFKC', super(UsernameField, self).to_python(value))
|
||||
return unicodedata.normalize('NFKC', super().to_python(value))
|
||||
|
||||
|
||||
class UserCreationForm(forms.ModelForm):
|
||||
|
@ -88,7 +88,7 @@ class UserCreationForm(forms.ModelForm):
|
|||
field_classes = {'username': UsernameField}
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(UserCreationForm, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
if self._meta.model.USERNAME_FIELD in self.fields:
|
||||
self.fields[self._meta.model.USERNAME_FIELD].widget.attrs.update({'autofocus': True})
|
||||
|
||||
|
@ -105,7 +105,7 @@ class UserCreationForm(forms.ModelForm):
|
|||
return password2
|
||||
|
||||
def save(self, commit=True):
|
||||
user = super(UserCreationForm, self).save(commit=False)
|
||||
user = super().save(commit=False)
|
||||
user.set_password(self.cleaned_data["password1"])
|
||||
if commit:
|
||||
user.save()
|
||||
|
@ -128,7 +128,7 @@ class UserChangeForm(forms.ModelForm):
|
|||
field_classes = {'username': UsernameField}
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(UserChangeForm, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
f = self.fields.get('user_permissions')
|
||||
if f is not None:
|
||||
f.queryset = f.queryset.select_related('content_type')
|
||||
|
@ -170,7 +170,7 @@ class AuthenticationForm(forms.Form):
|
|||
"""
|
||||
self.request = request
|
||||
self.user_cache = None
|
||||
super(AuthenticationForm, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
# Set the label for the "username" field.
|
||||
self.username_field = UserModel._meta.get_field(UserModel.USERNAME_FIELD)
|
||||
|
@ -310,7 +310,7 @@ class SetPasswordForm(forms.Form):
|
|||
|
||||
def __init__(self, user, *args, **kwargs):
|
||||
self.user = user
|
||||
super(SetPasswordForm, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def clean_new_password2(self):
|
||||
password1 = self.cleaned_data.get('new_password1')
|
||||
|
@ -384,7 +384,7 @@ class AdminPasswordChangeForm(forms.Form):
|
|||
|
||||
def __init__(self, user, *args, **kwargs):
|
||||
self.user = user
|
||||
super(AdminPasswordChangeForm, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def clean_password2(self):
|
||||
password1 = self.cleaned_data.get('password1')
|
||||
|
@ -410,7 +410,7 @@ class AdminPasswordChangeForm(forms.Form):
|
|||
|
||||
@property
|
||||
def changed_data(self):
|
||||
data = super(AdminPasswordChangeForm, self).changed_data
|
||||
data = super().changed_data
|
||||
for name in self.fields.keys():
|
||||
if name not in data:
|
||||
return []
|
||||
|
|
|
@ -22,7 +22,7 @@ class Command(BaseCommand):
|
|||
requires_migrations_checks = True
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(Command, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
self.UserModel = get_user_model()
|
||||
self.username_field = self.UserModel._meta.get_field(self.UserModel.USERNAME_FIELD)
|
||||
|
||||
|
@ -56,7 +56,7 @@ class Command(BaseCommand):
|
|||
|
||||
def execute(self, *args, **options):
|
||||
self.stdin = options.get('stdin', sys.stdin) # Used for testing
|
||||
return super(Command, self).execute(*args, **options)
|
||||
return super().execute(*args, **options)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
username = options[self.UserModel.USERNAME_FIELD]
|
||||
|
|
|
@ -52,7 +52,7 @@ class LoginRequiredMixin(AccessMixin):
|
|||
def dispatch(self, request, *args, **kwargs):
|
||||
if not request.user.is_authenticated:
|
||||
return self.handle_no_permission()
|
||||
return super(LoginRequiredMixin, self).dispatch(request, *args, **kwargs)
|
||||
return super().dispatch(request, *args, **kwargs)
|
||||
|
||||
|
||||
class PermissionRequiredMixin(AccessMixin):
|
||||
|
@ -88,7 +88,7 @@ class PermissionRequiredMixin(AccessMixin):
|
|||
def dispatch(self, request, *args, **kwargs):
|
||||
if not self.has_permission():
|
||||
return self.handle_no_permission()
|
||||
return super(PermissionRequiredMixin, self).dispatch(request, *args, **kwargs)
|
||||
return super().dispatch(request, *args, **kwargs)
|
||||
|
||||
|
||||
class UserPassesTestMixin(AccessMixin):
|
||||
|
@ -112,4 +112,4 @@ class UserPassesTestMixin(AccessMixin):
|
|||
user_test_result = self.get_test_func()()
|
||||
if not user_test_result:
|
||||
return self.handle_no_permission()
|
||||
return super(UserPassesTestMixin, self).dispatch(request, *args, **kwargs)
|
||||
return super().dispatch(request, *args, **kwargs)
|
||||
|
|
|
@ -340,7 +340,7 @@ class AbstractUser(AbstractBaseUser, PermissionsMixin):
|
|||
abstract = True
|
||||
|
||||
def clean(self):
|
||||
super(AbstractUser, self).clean()
|
||||
super().clean()
|
||||
self.email = self.__class__.objects.normalize_email(self.email)
|
||||
|
||||
def get_full_name(self):
|
||||
|
|
|
@ -63,7 +63,7 @@ class LoginView(SuccessURLAllowedHostsMixin, FormView):
|
|||
"your LOGIN_REDIRECT_URL doesn't point to a login page."
|
||||
)
|
||||
return HttpResponseRedirect(redirect_to)
|
||||
return super(LoginView, self).dispatch(request, *args, **kwargs)
|
||||
return super().dispatch(request, *args, **kwargs)
|
||||
|
||||
def get_success_url(self):
|
||||
"""Ensure the user-originating redirection URL is safe."""
|
||||
|
@ -89,7 +89,7 @@ class LoginView(SuccessURLAllowedHostsMixin, FormView):
|
|||
return HttpResponseRedirect(self.get_success_url())
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
context = super(LoginView, self).get_context_data(**kwargs)
|
||||
context = super().get_context_data(**kwargs)
|
||||
current_site = get_current_site(self.request)
|
||||
context.update({
|
||||
self.redirect_field_name: self.get_success_url(),
|
||||
|
@ -125,7 +125,7 @@ class LogoutView(SuccessURLAllowedHostsMixin, TemplateView):
|
|||
if next_page:
|
||||
# Redirect to this page until the session has been cleared.
|
||||
return HttpResponseRedirect(next_page)
|
||||
return super(LogoutView, self).dispatch(request, *args, **kwargs)
|
||||
return super().dispatch(request, *args, **kwargs)
|
||||
|
||||
def get_next_page(self):
|
||||
if self.next_page is not None:
|
||||
|
@ -153,7 +153,7 @@ class LogoutView(SuccessURLAllowedHostsMixin, TemplateView):
|
|||
return next_page
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
context = super(LogoutView, self).get_context_data(**kwargs)
|
||||
context = super().get_context_data(**kwargs)
|
||||
current_site = get_current_site(self.request)
|
||||
context.update({
|
||||
'site': current_site,
|
||||
|
@ -356,7 +356,7 @@ class PasswordContextMixin:
|
|||
extra_context = None
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
context = super(PasswordContextMixin, self).get_context_data(**kwargs)
|
||||
context = super().get_context_data(**kwargs)
|
||||
context['title'] = self.title
|
||||
if self.extra_context is not None:
|
||||
context.update(self.extra_context)
|
||||
|
@ -377,7 +377,7 @@ class PasswordResetView(PasswordContextMixin, FormView):
|
|||
|
||||
@method_decorator(csrf_protect)
|
||||
def dispatch(self, *args, **kwargs):
|
||||
return super(PasswordResetView, self).dispatch(*args, **kwargs)
|
||||
return super().dispatch(*args, **kwargs)
|
||||
|
||||
def form_valid(self, form):
|
||||
opts = {
|
||||
|
@ -391,7 +391,7 @@ class PasswordResetView(PasswordContextMixin, FormView):
|
|||
'extra_email_context': self.extra_email_context,
|
||||
}
|
||||
form.save(**opts)
|
||||
return super(PasswordResetView, self).form_valid(form)
|
||||
return super().form_valid(form)
|
||||
|
||||
|
||||
INTERNAL_RESET_URL_TOKEN = 'set-password'
|
||||
|
@ -426,7 +426,7 @@ class PasswordResetConfirmView(PasswordContextMixin, FormView):
|
|||
if self.token_generator.check_token(self.user, session_token):
|
||||
# If the token is valid, display the password reset form.
|
||||
self.validlink = True
|
||||
return super(PasswordResetConfirmView, self).dispatch(*args, **kwargs)
|
||||
return super().dispatch(*args, **kwargs)
|
||||
else:
|
||||
if self.token_generator.check_token(self.user, token):
|
||||
# Store the token in the session and redirect to the
|
||||
|
@ -450,7 +450,7 @@ class PasswordResetConfirmView(PasswordContextMixin, FormView):
|
|||
return user
|
||||
|
||||
def get_form_kwargs(self):
|
||||
kwargs = super(PasswordResetConfirmView, self).get_form_kwargs()
|
||||
kwargs = super().get_form_kwargs()
|
||||
kwargs['user'] = self.user
|
||||
return kwargs
|
||||
|
||||
|
@ -459,10 +459,10 @@ class PasswordResetConfirmView(PasswordContextMixin, FormView):
|
|||
if self.post_reset_login:
|
||||
auth_login(self.request, user)
|
||||
del self.request.session[INTERNAL_RESET_SESSION_TOKEN]
|
||||
return super(PasswordResetConfirmView, self).form_valid(form)
|
||||
return super().form_valid(form)
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
context = super(PasswordResetConfirmView, self).get_context_data(**kwargs)
|
||||
context = super().get_context_data(**kwargs)
|
||||
if self.validlink:
|
||||
context['validlink'] = True
|
||||
else:
|
||||
|
@ -479,7 +479,7 @@ class PasswordResetCompleteView(PasswordContextMixin, TemplateView):
|
|||
title = _('Password reset complete')
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
context = super(PasswordResetCompleteView, self).get_context_data(**kwargs)
|
||||
context = super().get_context_data(**kwargs)
|
||||
context['login_url'] = resolve_url(settings.LOGIN_URL)
|
||||
return context
|
||||
|
||||
|
@ -545,10 +545,10 @@ class PasswordChangeView(PasswordContextMixin, FormView):
|
|||
@method_decorator(csrf_protect)
|
||||
@method_decorator(login_required)
|
||||
def dispatch(self, *args, **kwargs):
|
||||
return super(PasswordChangeView, self).dispatch(*args, **kwargs)
|
||||
return super().dispatch(*args, **kwargs)
|
||||
|
||||
def get_form_kwargs(self):
|
||||
kwargs = super(PasswordChangeView, self).get_form_kwargs()
|
||||
kwargs = super().get_form_kwargs()
|
||||
kwargs['user'] = self.request.user
|
||||
return kwargs
|
||||
|
||||
|
@ -557,7 +557,7 @@ class PasswordChangeView(PasswordContextMixin, FormView):
|
|||
# Updating the password logs out all other sessions for the user
|
||||
# except the current one.
|
||||
update_session_auth_hash(self.request, form.user)
|
||||
return super(PasswordChangeView, self).form_valid(form)
|
||||
return super().form_valid(form)
|
||||
|
||||
|
||||
class PasswordChangeDoneView(PasswordContextMixin, TemplateView):
|
||||
|
@ -566,4 +566,4 @@ class PasswordChangeDoneView(PasswordContextMixin, TemplateView):
|
|||
|
||||
@method_decorator(login_required)
|
||||
def dispatch(self, *args, **kwargs):
|
||||
return super(PasswordChangeDoneView, self).dispatch(*args, **kwargs)
|
||||
return super().dispatch(*args, **kwargs)
|
||||
|
|
|
@ -261,12 +261,10 @@ class GenericRel(ForeignObjectRel):
|
|||
"""
|
||||
|
||||
def __init__(self, field, to, related_name=None, related_query_name=None, limit_choices_to=None):
|
||||
super(GenericRel, self).__init__(
|
||||
field, to,
|
||||
related_name=related_query_name or '+',
|
||||
super().__init__(
|
||||
field, to, related_name=related_query_name or '+',
|
||||
related_query_name=related_query_name,
|
||||
limit_choices_to=limit_choices_to,
|
||||
on_delete=DO_NOTHING,
|
||||
limit_choices_to=limit_choices_to, on_delete=DO_NOTHING,
|
||||
)
|
||||
|
||||
|
||||
|
@ -303,15 +301,14 @@ class GenericRelation(ForeignObject):
|
|||
# isn't direct, the join is generated reverse along foreign key. So,
|
||||
# the from_field is object_id field, to_field is pk because of the
|
||||
# reverse join.
|
||||
super(GenericRelation, self).__init__(
|
||||
to, from_fields=[object_id_field], to_fields=[], **kwargs)
|
||||
super().__init__(to, from_fields=[object_id_field], to_fields=[], **kwargs)
|
||||
|
||||
self.object_id_field_name = object_id_field
|
||||
self.content_type_field_name = content_type_field
|
||||
self.for_concrete_model = for_concrete_model
|
||||
|
||||
def check(self, **kwargs):
|
||||
errors = super(GenericRelation, self).check(**kwargs)
|
||||
errors = super().check(**kwargs)
|
||||
errors.extend(self._check_generic_foreign_key_existence())
|
||||
return errors
|
||||
|
||||
|
@ -403,7 +400,7 @@ class GenericRelation(ForeignObject):
|
|||
|
||||
def contribute_to_class(self, cls, name, **kwargs):
|
||||
kwargs['private_only'] = True
|
||||
super(GenericRelation, self).contribute_to_class(cls, name, **kwargs)
|
||||
super().contribute_to_class(cls, name, **kwargs)
|
||||
self.model = cls
|
||||
setattr(cls, self.name, ReverseGenericManyToOneDescriptor(self.remote_field))
|
||||
|
||||
|
@ -480,7 +477,7 @@ def create_generic_related_manager(superclass, rel):
|
|||
|
||||
class GenericRelatedObjectManager(superclass):
|
||||
def __init__(self, instance=None):
|
||||
super(GenericRelatedObjectManager, self).__init__()
|
||||
super().__init__()
|
||||
|
||||
self.instance = instance
|
||||
|
||||
|
@ -521,12 +518,12 @@ def create_generic_related_manager(superclass, rel):
|
|||
try:
|
||||
return self.instance._prefetched_objects_cache[self.prefetch_cache_name]
|
||||
except (AttributeError, KeyError):
|
||||
queryset = super(GenericRelatedObjectManager, self).get_queryset()
|
||||
queryset = super().get_queryset()
|
||||
return self._apply_rel_filters(queryset)
|
||||
|
||||
def get_prefetch_queryset(self, instances, queryset=None):
|
||||
if queryset is None:
|
||||
queryset = super(GenericRelatedObjectManager, self).get_queryset()
|
||||
queryset = super().get_queryset()
|
||||
|
||||
queryset._add_hints(instance=instances[0])
|
||||
queryset = queryset.using(queryset._db or self._db)
|
||||
|
@ -634,21 +631,21 @@ def create_generic_related_manager(superclass, rel):
|
|||
kwargs[self.content_type_field_name] = self.content_type
|
||||
kwargs[self.object_id_field_name] = self.pk_val
|
||||
db = router.db_for_write(self.model, instance=self.instance)
|
||||
return super(GenericRelatedObjectManager, self).using(db).create(**kwargs)
|
||||
return super().using(db).create(**kwargs)
|
||||
create.alters_data = True
|
||||
|
||||
def get_or_create(self, **kwargs):
|
||||
kwargs[self.content_type_field_name] = self.content_type
|
||||
kwargs[self.object_id_field_name] = self.pk_val
|
||||
db = router.db_for_write(self.model, instance=self.instance)
|
||||
return super(GenericRelatedObjectManager, self).using(db).get_or_create(**kwargs)
|
||||
return super().using(db).get_or_create(**kwargs)
|
||||
get_or_create.alters_data = True
|
||||
|
||||
def update_or_create(self, **kwargs):
|
||||
kwargs[self.content_type_field_name] = self.content_type
|
||||
kwargs[self.object_id_field_name] = self.pk_val
|
||||
db = router.db_for_write(self.model, instance=self.instance)
|
||||
return super(GenericRelatedObjectManager, self).using(db).update_or_create(**kwargs)
|
||||
return super().using(db).update_or_create(**kwargs)
|
||||
update_or_create.alters_data = True
|
||||
|
||||
return GenericRelatedObjectManager
|
||||
|
|
|
@ -27,11 +27,7 @@ class BaseGenericInlineFormSet(BaseModelFormSet):
|
|||
self.instance, for_concrete_model=self.for_concrete_model),
|
||||
self.ct_fk_field.name: self.instance.pk,
|
||||
})
|
||||
super(BaseGenericInlineFormSet, self).__init__(
|
||||
queryset=qs, data=data, files=files,
|
||||
prefix=prefix,
|
||||
**kwargs
|
||||
)
|
||||
super().__init__(queryset=qs, data=data, files=files, prefix=prefix, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def get_default_prefix(cls):
|
||||
|
|
|
@ -8,7 +8,7 @@ class RenameContentType(migrations.RunPython):
|
|||
self.app_label = app_label
|
||||
self.old_model = old_model
|
||||
self.new_model = new_model
|
||||
super(RenameContentType, self).__init__(self.rename_forward, self.rename_backward)
|
||||
super().__init__(self.rename_forward, self.rename_backward)
|
||||
|
||||
def _rename(self, apps, schema_editor, old_model, new_model):
|
||||
ContentType = apps.get_model('contenttypes', 'ContentType')
|
||||
|
|
|
@ -10,7 +10,7 @@ class ContentTypeManager(models.Manager):
|
|||
use_in_migrations = True
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(ContentTypeManager, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
# Cache shared by all the get_for_* methods to speed up
|
||||
# ContentType retrieval.
|
||||
self._cache = {}
|
||||
|
|
|
@ -55,4 +55,4 @@ class FlatpageForm(forms.ModelForm):
|
|||
params={'url': url, 'site': site},
|
||||
)
|
||||
|
||||
return super(FlatpageForm, self).clean()
|
||||
return super().clean()
|
||||
|
|
|
@ -46,7 +46,7 @@ class GeoModelAdmin(ModelAdmin):
|
|||
@property
|
||||
def media(self):
|
||||
"Injects OpenLayers JavaScript into the admin."
|
||||
media = super(GeoModelAdmin, self).media
|
||||
media = super().media
|
||||
media.add_js([self.openlayers_url])
|
||||
media.add_js(self.extra_js)
|
||||
return media
|
||||
|
@ -62,7 +62,7 @@ class GeoModelAdmin(ModelAdmin):
|
|||
kwargs['widget'] = self.get_map_widget(db_field)
|
||||
return db_field.formfield(**kwargs)
|
||||
else:
|
||||
return super(GeoModelAdmin, self).formfield_for_dbfield(db_field, request, **kwargs)
|
||||
return super().formfield_for_dbfield(db_field, request, **kwargs)
|
||||
|
||||
def get_map_widget(self, db_field):
|
||||
"""
|
||||
|
|
|
@ -116,7 +116,7 @@ class BaseSpatialOperations:
|
|||
raise NotImplementedError(
|
||||
"%s spatial aggregation is not supported by this database backend." % expression.name
|
||||
)
|
||||
super(BaseSpatialOperations, self).check_expression_support(expression)
|
||||
super().check_expression_support(expression)
|
||||
|
||||
def spatial_aggregate_name(self, agg_name):
|
||||
raise NotImplementedError('Aggregate support not implemented for this spatial backend.')
|
||||
|
|
|
@ -97,7 +97,7 @@ class MySQLOperations(BaseSpatialOperations, DatabaseOperations):
|
|||
return placeholder
|
||||
|
||||
def get_db_converters(self, expression):
|
||||
converters = super(MySQLOperations, self).get_db_converters(expression)
|
||||
converters = super().get_db_converters(expression)
|
||||
if isinstance(expression.output_field, GeometryField) and self.uses_invalid_empty_geometry_collection:
|
||||
converters.append(self.convert_invalid_empty_geometry_collection)
|
||||
return converters
|
||||
|
|
|
@ -12,18 +12,18 @@ class MySQLGISSchemaEditor(DatabaseSchemaEditor):
|
|||
sql_drop_spatial_index = 'DROP INDEX %(index)s ON %(table)s'
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(MySQLGISSchemaEditor, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
self.geometry_sql = []
|
||||
|
||||
def skip_default(self, field):
|
||||
return (
|
||||
super(MySQLGISSchemaEditor, self).skip_default(field) or
|
||||
super().skip_default(field) or
|
||||
# Geometry fields are stored as BLOB/TEXT and can't have defaults.
|
||||
isinstance(field, GeometryField)
|
||||
)
|
||||
|
||||
def column_sql(self, model, field, include_default=False):
|
||||
column_sql = super(MySQLGISSchemaEditor, self).column_sql(model, field, include_default)
|
||||
column_sql = super().column_sql(model, field, include_default)
|
||||
# MySQL doesn't support spatial indexes on NULL columns
|
||||
if isinstance(field, GeometryField) and field.spatial_index and not field.null:
|
||||
qn = self.connection.ops.quote_name
|
||||
|
@ -38,11 +38,11 @@ class MySQLGISSchemaEditor(DatabaseSchemaEditor):
|
|||
return column_sql
|
||||
|
||||
def create_model(self, model):
|
||||
super(MySQLGISSchemaEditor, self).create_model(model)
|
||||
super().create_model(model)
|
||||
self.create_spatial_indexes()
|
||||
|
||||
def add_field(self, model, field):
|
||||
super(MySQLGISSchemaEditor, self).add_field(model, field)
|
||||
super().add_field(model, field)
|
||||
self.create_spatial_indexes()
|
||||
|
||||
def remove_field(self, model, field):
|
||||
|
@ -60,7 +60,7 @@ class MySQLGISSchemaEditor(DatabaseSchemaEditor):
|
|||
"if your storage engine doesn't support them).", sql
|
||||
)
|
||||
|
||||
super(MySQLGISSchemaEditor, self).remove_field(model, field)
|
||||
super().remove_field(model, field)
|
||||
|
||||
def _create_spatial_index_name(self, model, field):
|
||||
return '%s_%s_id' % (model._meta.db_table, field.column)
|
||||
|
|
|
@ -49,7 +49,7 @@ class SDORelate(SpatialOperator):
|
|||
|
||||
def as_sql(self, connection, lookup, template_params, sql_params):
|
||||
template_params['mask'] = sql_params.pop()
|
||||
return super(SDORelate, self).as_sql(connection, lookup, template_params, sql_params)
|
||||
return super().as_sql(connection, lookup, template_params, sql_params)
|
||||
|
||||
|
||||
class SDOIsValid(SpatialOperator):
|
||||
|
@ -143,10 +143,10 @@ class OracleOperations(BaseSpatialOperations, DatabaseOperations):
|
|||
return unsupported
|
||||
|
||||
def geo_quote_name(self, name):
|
||||
return super(OracleOperations, self).geo_quote_name(name).upper()
|
||||
return super().geo_quote_name(name).upper()
|
||||
|
||||
def get_db_converters(self, expression):
|
||||
converters = super(OracleOperations, self).get_db_converters(expression)
|
||||
converters = super().get_db_converters(expression)
|
||||
internal_type = expression.output_field.get_internal_type()
|
||||
geometry_fields = (
|
||||
'PointField', 'GeometryField', 'LineStringField',
|
||||
|
@ -271,4 +271,4 @@ class OracleOperations(BaseSpatialOperations, DatabaseOperations):
|
|||
"""
|
||||
if placeholder == 'NULL':
|
||||
return []
|
||||
return super(OracleOperations, self).modify_insert_params(placeholder, params)
|
||||
return super().modify_insert_params(placeholder, params)
|
||||
|
|
|
@ -25,14 +25,14 @@ class OracleGISSchemaEditor(DatabaseSchemaEditor):
|
|||
)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(OracleGISSchemaEditor, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
self.geometry_sql = []
|
||||
|
||||
def geo_quote_name(self, name):
|
||||
return self.connection.ops.geo_quote_name(name)
|
||||
|
||||
def column_sql(self, model, field, include_default=False):
|
||||
column_sql = super(OracleGISSchemaEditor, self).column_sql(model, field, include_default)
|
||||
column_sql = super().column_sql(model, field, include_default)
|
||||
if isinstance(field, GeometryField):
|
||||
db_table = model._meta.db_table
|
||||
self.geometry_sql.append(
|
||||
|
@ -58,17 +58,17 @@ class OracleGISSchemaEditor(DatabaseSchemaEditor):
|
|||
return column_sql
|
||||
|
||||
def create_model(self, model):
|
||||
super(OracleGISSchemaEditor, self).create_model(model)
|
||||
super().create_model(model)
|
||||
self.run_geometry_sql()
|
||||
|
||||
def delete_model(self, model):
|
||||
super(OracleGISSchemaEditor, self).delete_model(model)
|
||||
super().delete_model(model)
|
||||
self.execute(self.sql_clear_geometry_table_metadata % {
|
||||
'table': self.geo_quote_name(model._meta.db_table),
|
||||
})
|
||||
|
||||
def add_field(self, model, field):
|
||||
super(OracleGISSchemaEditor, self).add_field(model, field)
|
||||
super().add_field(model, field)
|
||||
self.run_geometry_sql()
|
||||
|
||||
def remove_field(self, model, field):
|
||||
|
@ -81,7 +81,7 @@ class OracleGISSchemaEditor(DatabaseSchemaEditor):
|
|||
self.execute(self.sql_drop_spatial_index % {
|
||||
'index': self.quote_name(self._create_spatial_index_name(model, field)),
|
||||
})
|
||||
super(OracleGISSchemaEditor, self).remove_field(model, field)
|
||||
super().remove_field(model, field)
|
||||
|
||||
def run_geometry_sql(self):
|
||||
for sql in self.geometry_sql:
|
||||
|
|
|
@ -12,14 +12,14 @@ class DatabaseWrapper(Psycopg2DatabaseWrapper):
|
|||
SchemaEditorClass = PostGISSchemaEditor
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(DatabaseWrapper, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
if kwargs.get('alias', '') != NO_DB_ALIAS:
|
||||
self.features = DatabaseFeatures(self)
|
||||
self.ops = PostGISOperations(self)
|
||||
self.introspection = PostGISIntrospection(self)
|
||||
|
||||
def prepare_database(self):
|
||||
super(DatabaseWrapper, self).prepare_database()
|
||||
super().prepare_database()
|
||||
# Check that postgis extension is installed.
|
||||
with self.cursor() as cursor:
|
||||
cursor.execute("CREATE EXTENSION IF NOT EXISTS postgis")
|
||||
|
|
|
@ -79,7 +79,7 @@ class PostGISIntrospection(DatabaseIntrospection):
|
|||
# performed -- in other words, when this function is called.
|
||||
self.postgis_types_reverse = self.get_postgis_types()
|
||||
self.data_types_reverse.update(self.postgis_types_reverse)
|
||||
return super(PostGISIntrospection, self).get_field_type(data_type, description)
|
||||
return super().get_field_type(data_type, description)
|
||||
|
||||
def get_geometry_type(self, table_name, geo_col):
|
||||
"""
|
||||
|
|
|
@ -29,7 +29,7 @@ class PostGISOperator(SpatialOperator):
|
|||
# polygons. If the raster argument is set to BILATERAL, then the
|
||||
# operator cannot handle mixed geom-raster lookups.
|
||||
self.raster = raster
|
||||
super(PostGISOperator, self).__init__(**kwargs)
|
||||
super().__init__(**kwargs)
|
||||
|
||||
def as_sql(self, connection, lookup, template_params, *args):
|
||||
if lookup.lhs.output_field.geography and not self.geography:
|
||||
|
@ -37,7 +37,7 @@ class PostGISOperator(SpatialOperator):
|
|||
'function/operator.' % (self.func or self.op,))
|
||||
|
||||
template_params = self.check_raster(lookup, template_params)
|
||||
return super(PostGISOperator, self).as_sql(connection, lookup, template_params, *args)
|
||||
return super().as_sql(connection, lookup, template_params, *args)
|
||||
|
||||
def check_raster(self, lookup, template_params):
|
||||
# Get rhs value.
|
||||
|
@ -100,7 +100,7 @@ class PostGISDistanceOperator(PostGISOperator):
|
|||
else:
|
||||
template_params.update({'op': self.op, 'func': connection.ops.spatial_function_name('DistanceSphere')})
|
||||
return sql_template % template_params, sql_params
|
||||
return super(PostGISDistanceOperator, self).as_sql(connection, lookup, template_params, sql_params)
|
||||
return super().as_sql(connection, lookup, template_params, sql_params)
|
||||
|
||||
|
||||
class PostGISOperations(BaseSpatialOperations, DatabaseOperations):
|
||||
|
@ -149,7 +149,7 @@ class PostGISOperations(BaseSpatialOperations, DatabaseOperations):
|
|||
unsupported_functions = set()
|
||||
|
||||
def __init__(self, connection):
|
||||
super(PostGISOperations, self).__init__(connection)
|
||||
super().__init__(connection)
|
||||
|
||||
prefix = self.geom_func_prefix
|
||||
|
||||
|
|
|
@ -15,11 +15,11 @@ class PostGISSchemaEditor(DatabaseSchemaEditor):
|
|||
def _field_should_be_indexed(self, model, field):
|
||||
if getattr(field, 'spatial_index', False):
|
||||
return True
|
||||
return super(PostGISSchemaEditor, self)._field_should_be_indexed(model, field)
|
||||
return super()._field_should_be_indexed(model, field)
|
||||
|
||||
def _create_index_sql(self, model, fields, suffix="", sql=None):
|
||||
if len(fields) != 1 or not hasattr(fields[0], 'geodetic'):
|
||||
return super(PostGISSchemaEditor, self)._create_index_sql(model, fields, suffix=suffix, sql=sql)
|
||||
return super()._create_index_sql(model, fields, suffix=suffix, sql=sql)
|
||||
|
||||
field = fields[0]
|
||||
field_column = self.quote_name(field.column)
|
||||
|
@ -45,9 +45,7 @@ class PostGISSchemaEditor(DatabaseSchemaEditor):
|
|||
Special case when dimension changed.
|
||||
"""
|
||||
if not hasattr(old_field, 'dim') or not hasattr(new_field, 'dim'):
|
||||
return super(PostGISSchemaEditor, self)._alter_column_type_sql(
|
||||
table, old_field, new_field, new_type
|
||||
)
|
||||
return super()._alter_column_type_sql(table, old_field, new_field, new_type)
|
||||
|
||||
if old_field.dim == 2 and new_field.dim == 3:
|
||||
sql_alter = self.sql_alter_column_to_3d
|
||||
|
|
|
@ -34,10 +34,10 @@ class DatabaseWrapper(SQLiteDatabaseWrapper):
|
|||
'Make sure it is in your library path, or set '
|
||||
'SPATIALITE_LIBRARY_PATH in your settings.'
|
||||
)
|
||||
super(DatabaseWrapper, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def get_new_connection(self, conn_params):
|
||||
conn = super(DatabaseWrapper, self).get_new_connection(conn_params)
|
||||
conn = super().get_new_connection(conn_params)
|
||||
# Enabling extension loading on the SQLite connection.
|
||||
try:
|
||||
conn.enable_load_extension(True)
|
||||
|
@ -59,7 +59,7 @@ class DatabaseWrapper(SQLiteDatabaseWrapper):
|
|||
return conn
|
||||
|
||||
def prepare_database(self):
|
||||
super(DatabaseWrapper, self).prepare_database()
|
||||
super().prepare_database()
|
||||
# Check if spatial metadata have been initialized in the database
|
||||
with self.cursor() as cursor:
|
||||
cursor.execute("PRAGMA table_info(geometry_columns);")
|
||||
|
|
|
@ -61,7 +61,7 @@ class SpatiaLiteIntrospection(DatabaseIntrospection):
|
|||
return field_type, field_params
|
||||
|
||||
def get_constraints(self, cursor, table_name):
|
||||
constraints = super(SpatiaLiteIntrospection, self).get_constraints(cursor, table_name)
|
||||
constraints = super().get_constraints(cursor, table_name)
|
||||
cursor.execute('SELECT f_geometry_column '
|
||||
'FROM geometry_columns '
|
||||
'WHERE f_table_name=%s AND spatial_index_enabled=1', (table_name,))
|
||||
|
|
|
@ -28,7 +28,7 @@ class SpatiaLiteDistanceOperator(SpatialOperator):
|
|||
})
|
||||
sql_params.insert(1, len(lookup.rhs) == 3 and lookup.rhs[-1] == 'spheroid')
|
||||
return sql_template % template_params, sql_params
|
||||
return super(SpatiaLiteDistanceOperator, self).as_sql(connection, lookup, template_params, sql_params)
|
||||
return super().as_sql(connection, lookup, template_params, sql_params)
|
||||
|
||||
|
||||
class SpatiaLiteOperations(BaseSpatialOperations, DatabaseOperations):
|
||||
|
@ -261,7 +261,7 @@ class SpatiaLiteOperations(BaseSpatialOperations, DatabaseOperations):
|
|||
return SpatialiteSpatialRefSys
|
||||
|
||||
def get_db_converters(self, expression):
|
||||
converters = super(SpatiaLiteOperations, self).get_db_converters(expression)
|
||||
converters = super().get_db_converters(expression)
|
||||
if hasattr(expression.output_field, 'geom_type'):
|
||||
converters.append(self.convert_geometry)
|
||||
return converters
|
||||
|
|
|
@ -28,7 +28,7 @@ class SpatialiteSchemaEditor(DatabaseSchemaEditor):
|
|||
]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(SpatialiteSchemaEditor, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
self.geometry_sql = []
|
||||
|
||||
def geo_quote_name(self, name):
|
||||
|
@ -37,7 +37,7 @@ class SpatialiteSchemaEditor(DatabaseSchemaEditor):
|
|||
def column_sql(self, model, field, include_default=False):
|
||||
from django.contrib.gis.db.models.fields import GeometryField
|
||||
if not isinstance(field, GeometryField):
|
||||
return super(SpatialiteSchemaEditor, self).column_sql(model, field, include_default)
|
||||
return super().column_sql(model, field, include_default)
|
||||
|
||||
# Geometry columns are created by the `AddGeometryColumn` function
|
||||
self.geometry_sql.append(
|
||||
|
@ -75,7 +75,7 @@ class SpatialiteSchemaEditor(DatabaseSchemaEditor):
|
|||
)
|
||||
|
||||
def create_model(self, model):
|
||||
super(SpatialiteSchemaEditor, self).create_model(model)
|
||||
super().create_model(model)
|
||||
# Create geometry columns
|
||||
for sql in self.geometry_sql:
|
||||
self.execute(sql)
|
||||
|
@ -98,7 +98,7 @@ class SpatialiteSchemaEditor(DatabaseSchemaEditor):
|
|||
)
|
||||
except DatabaseError:
|
||||
pass
|
||||
super(SpatialiteSchemaEditor, self).delete_model(model, **kwargs)
|
||||
super().delete_model(model, **kwargs)
|
||||
|
||||
def add_field(self, model, field):
|
||||
from django.contrib.gis.db.models.fields import GeometryField
|
||||
|
@ -109,7 +109,7 @@ class SpatialiteSchemaEditor(DatabaseSchemaEditor):
|
|||
self.execute(sql)
|
||||
self.geometry_sql = []
|
||||
else:
|
||||
super(SpatialiteSchemaEditor, self).add_field(model, field)
|
||||
super().add_field(model, field)
|
||||
|
||||
def remove_field(self, model, field):
|
||||
from django.contrib.gis.db.models.fields import GeometryField
|
||||
|
@ -121,7 +121,7 @@ class SpatialiteSchemaEditor(DatabaseSchemaEditor):
|
|||
if isinstance(field, GeometryField):
|
||||
self._remake_table(model, delete_field=field)
|
||||
else:
|
||||
super(SpatialiteSchemaEditor, self).remove_field(model, field)
|
||||
super().remove_field(model, field)
|
||||
|
||||
def alter_db_table(self, model, old_db_table, new_db_table):
|
||||
from django.contrib.gis.db.models.fields import GeometryField
|
||||
|
@ -135,7 +135,7 @@ class SpatialiteSchemaEditor(DatabaseSchemaEditor):
|
|||
}
|
||||
)
|
||||
# Alter table
|
||||
super(SpatialiteSchemaEditor, self).alter_db_table(model, old_db_table, new_db_table)
|
||||
super().alter_db_table(model, old_db_table, new_db_table)
|
||||
# Repoint any straggler names
|
||||
for geom_table in self.geometry_tables:
|
||||
try:
|
||||
|
|
|
@ -13,7 +13,7 @@ class GeoAggregate(Aggregate):
|
|||
# we get the spatial_aggregate_name
|
||||
connection.ops.check_expression_support(self)
|
||||
self.function = connection.ops.spatial_aggregate_name(self.name)
|
||||
return super(GeoAggregate, self).as_sql(compiler, connection)
|
||||
return super().as_sql(compiler, connection)
|
||||
|
||||
def as_oracle(self, compiler, connection):
|
||||
if not hasattr(self, 'tolerance'):
|
||||
|
@ -24,7 +24,7 @@ class GeoAggregate(Aggregate):
|
|||
return self.as_sql(compiler, connection)
|
||||
|
||||
def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False):
|
||||
c = super(GeoAggregate, self).resolve_expression(query, allow_joins, reuse, summarize, for_save)
|
||||
c = super().resolve_expression(query, allow_joins, reuse, summarize, for_save)
|
||||
for expr in c.get_source_expressions():
|
||||
if not hasattr(expr.field, 'geom_type'):
|
||||
raise ValueError('Geospatial aggregates only allowed on geometry fields.')
|
||||
|
@ -40,7 +40,7 @@ class Extent(GeoAggregate):
|
|||
is_extent = '2D'
|
||||
|
||||
def __init__(self, expression, **extra):
|
||||
super(Extent, self).__init__(expression, output_field=ExtentField(), **extra)
|
||||
super().__init__(expression, output_field=ExtentField(), **extra)
|
||||
|
||||
def convert_value(self, value, expression, connection, context):
|
||||
return connection.ops.convert_extent(value, context.get('transformed_srid'))
|
||||
|
@ -51,7 +51,7 @@ class Extent3D(GeoAggregate):
|
|||
is_extent = '3D'
|
||||
|
||||
def __init__(self, expression, **extra):
|
||||
super(Extent3D, self).__init__(expression, output_field=ExtentField(), **extra)
|
||||
super().__init__(expression, output_field=ExtentField(), **extra)
|
||||
|
||||
def convert_value(self, value, expression, connection, context):
|
||||
return connection.ops.convert_extent3d(value, context.get('transformed_srid'))
|
||||
|
|
|
@ -110,10 +110,10 @@ class BaseSpatialField(Field):
|
|||
# first parameter, so this works like normal fields.
|
||||
kwargs['verbose_name'] = verbose_name
|
||||
|
||||
super(BaseSpatialField, self).__init__(**kwargs)
|
||||
super().__init__(**kwargs)
|
||||
|
||||
def deconstruct(self):
|
||||
name, path, args, kwargs = super(BaseSpatialField, self).deconstruct()
|
||||
name, path, args, kwargs = super().deconstruct()
|
||||
# Always include SRID for less fragility; include spatial index if it's
|
||||
# not the default value.
|
||||
kwargs['srid'] = self.srid
|
||||
|
@ -207,7 +207,7 @@ class BaseSpatialField(Field):
|
|||
geometry or raster value properly and preserves any other lookup
|
||||
parameters.
|
||||
"""
|
||||
value = super(BaseSpatialField, self).get_prep_value(value)
|
||||
value = super().get_prep_value(value)
|
||||
|
||||
# For IsValid lookups, boolean values are allowed.
|
||||
if isinstance(value, (Expression, bool)):
|
||||
|
@ -292,10 +292,10 @@ class GeometryField(GeoSelectFormatMixin, BaseSpatialField):
|
|||
self._extent = kwargs.pop('extent', (-180.0, -90.0, 180.0, 90.0))
|
||||
self._tolerance = kwargs.pop('tolerance', 0.05)
|
||||
|
||||
super(GeometryField, self).__init__(verbose_name=verbose_name, **kwargs)
|
||||
super().__init__(verbose_name=verbose_name, **kwargs)
|
||||
|
||||
def deconstruct(self):
|
||||
name, path, args, kwargs = super(GeometryField, self).deconstruct()
|
||||
name, path, args, kwargs = super().deconstruct()
|
||||
# Include kwargs if they're not the default values.
|
||||
if self.dim != 2:
|
||||
kwargs['dim'] = self.dim
|
||||
|
@ -314,7 +314,7 @@ class GeometryField(GeoSelectFormatMixin, BaseSpatialField):
|
|||
|
||||
def get_db_prep_value(self, value, connection, *args, **kwargs):
|
||||
return connection.ops.Adapter(
|
||||
super(GeometryField, self).get_db_prep_value(value, connection, *args, **kwargs),
|
||||
super().get_db_prep_value(value, connection, *args, **kwargs),
|
||||
**({'geography': True} if self.geography else {})
|
||||
)
|
||||
|
||||
|
@ -329,7 +329,7 @@ class GeometryField(GeoSelectFormatMixin, BaseSpatialField):
|
|||
|
||||
# ### Routines overloaded from Field ###
|
||||
def contribute_to_class(self, cls, name, **kwargs):
|
||||
super(GeometryField, self).contribute_to_class(cls, name, **kwargs)
|
||||
super().contribute_to_class(cls, name, **kwargs)
|
||||
|
||||
# Setup for lazy-instantiated Geometry object.
|
||||
setattr(cls, self.attname, SpatialProxy(Geometry, self))
|
||||
|
@ -343,7 +343,7 @@ class GeometryField(GeoSelectFormatMixin, BaseSpatialField):
|
|||
if (self.dim > 2 and 'widget' not in kwargs and
|
||||
not getattr(defaults['form_class'].widget, 'supports_3d', False)):
|
||||
defaults['widget'] = forms.Textarea
|
||||
return super(GeometryField, self).formfield(**defaults)
|
||||
return super().formfield(**defaults)
|
||||
|
||||
|
||||
# The OpenGIS Geometry Type Fields
|
||||
|
@ -414,7 +414,7 @@ class RasterField(BaseSpatialField):
|
|||
|
||||
def db_type(self, connection):
|
||||
self._check_connection(connection)
|
||||
return super(RasterField, self).db_type(connection)
|
||||
return super().db_type(connection)
|
||||
|
||||
def from_db_value(self, value, expression, connection, context):
|
||||
return connection.ops.parse_raster(value)
|
||||
|
@ -424,10 +424,10 @@ class RasterField(BaseSpatialField):
|
|||
# Prepare raster for writing to database.
|
||||
if not prepared:
|
||||
value = connection.ops.deconstruct_raster(value)
|
||||
return super(RasterField, self).get_db_prep_value(value, connection, prepared)
|
||||
return super().get_db_prep_value(value, connection, prepared)
|
||||
|
||||
def contribute_to_class(self, cls, name, **kwargs):
|
||||
super(RasterField, self).contribute_to_class(cls, name, **kwargs)
|
||||
super().contribute_to_class(cls, name, **kwargs)
|
||||
# Setup for lazy-instantiated Raster object. For large querysets, the
|
||||
# instantiation of all GDALRasters can potentially be expensive. This
|
||||
# delays the instantiation of the objects to the moment of evaluation
|
||||
|
@ -444,4 +444,4 @@ class RasterField(BaseSpatialField):
|
|||
)
|
||||
except ValueError:
|
||||
pass
|
||||
return super(RasterField, self).get_transform(name)
|
||||
return super().get_transform(name)
|
||||
|
|
|
@ -21,7 +21,7 @@ class GeoFunc(Func):
|
|||
def __init__(self, *expressions, **extra):
|
||||
if 'output_field' not in extra and self.output_field_class:
|
||||
extra['output_field'] = self.output_field_class()
|
||||
super(GeoFunc, self).__init__(*expressions, **extra)
|
||||
super().__init__(*expressions, **extra)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
|
@ -46,10 +46,10 @@ class GeoFunc(Func):
|
|||
self.function = connection.ops.spatial_function_name(self.name)
|
||||
if any(isinstance(field, RasterField) for field in self.get_source_fields()):
|
||||
raise TypeError("Geometry functions not supported for raster fields.")
|
||||
return super(GeoFunc, self).as_sql(compiler, connection, **extra_context)
|
||||
return super().as_sql(compiler, connection, **extra_context)
|
||||
|
||||
def resolve_expression(self, *args, **kwargs):
|
||||
res = super(GeoFunc, self).resolve_expression(*args, **kwargs)
|
||||
res = super().resolve_expression(*args, **kwargs)
|
||||
base_srid = res.srid
|
||||
if not base_srid:
|
||||
raise TypeError("Geometry functions can only operate on geometric content.")
|
||||
|
@ -88,7 +88,7 @@ class GeomValue(Value):
|
|||
self.value = connection.ops.Adapter(self.value, geography=self.geography)
|
||||
else:
|
||||
self.value = connection.ops.Adapter(self.value)
|
||||
return super(GeomValue, self).as_sql(compiler, connection)
|
||||
return super().as_sql(compiler, connection)
|
||||
|
||||
|
||||
class GeoFuncWithGeoParam(GeoFunc):
|
||||
|
@ -97,7 +97,7 @@ class GeoFuncWithGeoParam(GeoFunc):
|
|||
raise TypeError("Please provide a geometry object.")
|
||||
if not hasattr(geom, 'srid') or not geom.srid:
|
||||
raise ValueError("Please provide a geometry attribute with a defined SRID.")
|
||||
super(GeoFuncWithGeoParam, self).__init__(expression, GeomValue(geom), *expressions, **extra)
|
||||
super().__init__(expression, GeomValue(geom), *expressions, **extra)
|
||||
|
||||
|
||||
class SQLiteDecimalToFloatMixin:
|
||||
|
@ -109,7 +109,7 @@ class SQLiteDecimalToFloatMixin:
|
|||
for expr in self.get_source_expressions():
|
||||
if hasattr(expr, 'value') and isinstance(expr.value, Decimal):
|
||||
expr.value = float(expr.value)
|
||||
return super(SQLiteDecimalToFloatMixin, self).as_sql(compiler, connection)
|
||||
return super().as_sql(compiler, connection)
|
||||
|
||||
|
||||
class OracleToleranceMixin:
|
||||
|
@ -118,7 +118,7 @@ class OracleToleranceMixin:
|
|||
def as_oracle(self, compiler, connection):
|
||||
tol = self.extra.get('tolerance', self.tolerance)
|
||||
self.template = "%%(function)s(%%(expressions)s, %s)" % tol
|
||||
return super(OracleToleranceMixin, self).as_sql(compiler, connection)
|
||||
return super().as_sql(compiler, connection)
|
||||
|
||||
|
||||
class Area(OracleToleranceMixin, GeoFunc):
|
||||
|
@ -141,11 +141,11 @@ class Area(OracleToleranceMixin, GeoFunc):
|
|||
units_name = geo_field.units_name(connection)
|
||||
if units_name:
|
||||
self.output_field.area_att = AreaMeasure.unit_attname(units_name)
|
||||
return super(Area, self).as_sql(compiler, connection, **extra_context)
|
||||
return super().as_sql(compiler, connection, **extra_context)
|
||||
|
||||
def as_oracle(self, compiler, connection):
|
||||
self.output_field = AreaField('sq_m') # Oracle returns area in units of meters.
|
||||
return super(Area, self).as_oracle(compiler, connection)
|
||||
return super().as_oracle(compiler, connection)
|
||||
|
||||
def as_sqlite(self, compiler, connection, **extra_context):
|
||||
if self.geo_field.geodetic(connection):
|
||||
|
@ -170,7 +170,7 @@ class AsGeoJSON(GeoFunc):
|
|||
options = 2
|
||||
if options:
|
||||
expressions.append(options)
|
||||
super(AsGeoJSON, self).__init__(*expressions, **extra)
|
||||
super().__init__(*expressions, **extra)
|
||||
|
||||
|
||||
class AsGML(GeoFunc):
|
||||
|
@ -181,7 +181,7 @@ class AsGML(GeoFunc):
|
|||
expressions = [version, expression]
|
||||
if precision is not None:
|
||||
expressions.append(self._handle_param(precision, 'precision', int))
|
||||
super(AsGML, self).__init__(*expressions, **extra)
|
||||
super().__init__(*expressions, **extra)
|
||||
|
||||
def as_oracle(self, compiler, connection, **extra_context):
|
||||
source_expressions = self.get_source_expressions()
|
||||
|
@ -196,7 +196,7 @@ class AsKML(AsGML):
|
|||
def as_sqlite(self, compiler, connection):
|
||||
# No version parameter
|
||||
self.source_expressions.pop(0)
|
||||
return super(AsKML, self).as_sql(compiler, connection)
|
||||
return super().as_sql(compiler, connection)
|
||||
|
||||
|
||||
class AsSVG(GeoFunc):
|
||||
|
@ -209,12 +209,12 @@ class AsSVG(GeoFunc):
|
|||
relative,
|
||||
self._handle_param(precision, 'precision', int),
|
||||
]
|
||||
super(AsSVG, self).__init__(*expressions, **extra)
|
||||
super().__init__(*expressions, **extra)
|
||||
|
||||
|
||||
class BoundingCircle(OracleToleranceMixin, GeoFunc):
|
||||
def __init__(self, expression, num_seg=48, **extra):
|
||||
super(BoundingCircle, self).__init__(*[expression, num_seg], **extra)
|
||||
super().__init__(*[expression, num_seg], **extra)
|
||||
|
||||
def as_oracle(self, compiler, connection):
|
||||
clone = self.copy()
|
||||
|
@ -260,7 +260,7 @@ class Distance(DistanceResultMixin, OracleToleranceMixin, GeoFuncWithGeoParam):
|
|||
if spheroid is not None:
|
||||
self.spheroid = spheroid
|
||||
expressions += (self._handle_param(spheroid, 'spheroid', bool),)
|
||||
super(Distance, self).__init__(*expressions, **extra)
|
||||
super().__init__(*expressions, **extra)
|
||||
|
||||
def as_postgresql(self, compiler, connection):
|
||||
geo_field = GeometryField(srid=self.srid) # Fake field to get SRID info
|
||||
|
@ -279,12 +279,12 @@ class Distance(DistanceResultMixin, OracleToleranceMixin, GeoFuncWithGeoParam):
|
|||
self.source_expressions[2] = Value(geo_field._spheroid)
|
||||
else:
|
||||
self.function = connection.ops.spatial_function_name('DistanceSphere')
|
||||
return super(Distance, self).as_sql(compiler, connection)
|
||||
return super().as_sql(compiler, connection)
|
||||
|
||||
def as_oracle(self, compiler, connection):
|
||||
if self.spheroid:
|
||||
self.source_expressions.pop(2)
|
||||
return super(Distance, self).as_oracle(compiler, connection)
|
||||
return super().as_oracle(compiler, connection)
|
||||
|
||||
def as_sqlite(self, compiler, connection, **extra_context):
|
||||
if self.spheroid:
|
||||
|
@ -293,7 +293,7 @@ class Distance(DistanceResultMixin, OracleToleranceMixin, GeoFuncWithGeoParam):
|
|||
# SpatiaLite returns NULL instead of zero on geodetic coordinates
|
||||
extra_context['template'] = 'COALESCE(%(function)s(%(expressions)s, %(spheroid)s), 0)'
|
||||
extra_context['spheroid'] = int(bool(self.spheroid))
|
||||
return super(Distance, self).as_sql(compiler, connection, **extra_context)
|
||||
return super().as_sql(compiler, connection, **extra_context)
|
||||
|
||||
|
||||
class Envelope(GeoFunc):
|
||||
|
@ -311,7 +311,7 @@ class GeoHash(GeoFunc):
|
|||
expressions = [expression]
|
||||
if precision is not None:
|
||||
expressions.append(self._handle_param(precision, 'precision', int))
|
||||
super(GeoHash, self).__init__(*expressions, **extra)
|
||||
super().__init__(*expressions, **extra)
|
||||
|
||||
|
||||
class Intersection(OracleToleranceMixin, GeoFuncWithGeoParam):
|
||||
|
@ -322,7 +322,7 @@ class IsValid(OracleToleranceMixin, GeoFunc):
|
|||
output_field_class = BooleanField
|
||||
|
||||
def as_oracle(self, compiler, connection, **extra_context):
|
||||
sql, params = super(IsValid, self).as_oracle(compiler, connection, **extra_context)
|
||||
sql, params = super().as_oracle(compiler, connection, **extra_context)
|
||||
return "CASE %s WHEN 'TRUE' THEN 1 ELSE 0 END" % sql, params
|
||||
|
||||
|
||||
|
@ -331,13 +331,13 @@ class Length(DistanceResultMixin, OracleToleranceMixin, GeoFunc):
|
|||
|
||||
def __init__(self, expr1, spheroid=True, **extra):
|
||||
self.spheroid = spheroid
|
||||
super(Length, self).__init__(expr1, **extra)
|
||||
super().__init__(expr1, **extra)
|
||||
|
||||
def as_sql(self, compiler, connection):
|
||||
geo_field = GeometryField(srid=self.srid) # Fake field to get SRID info
|
||||
if geo_field.geodetic(connection) and not connection.features.supports_length_geodetic:
|
||||
raise NotImplementedError("This backend doesn't support Length on geodetic fields")
|
||||
return super(Length, self).as_sql(compiler, connection)
|
||||
return super().as_sql(compiler, connection)
|
||||
|
||||
def as_postgresql(self, compiler, connection):
|
||||
geo_field = GeometryField(srid=self.srid) # Fake field to get SRID info
|
||||
|
@ -351,7 +351,7 @@ class Length(DistanceResultMixin, OracleToleranceMixin, GeoFunc):
|
|||
dim = min(f.dim for f in self.get_source_fields() if f)
|
||||
if dim > 2:
|
||||
self.function = connection.ops.length3d
|
||||
return super(Length, self).as_sql(compiler, connection)
|
||||
return super().as_sql(compiler, connection)
|
||||
|
||||
def as_sqlite(self, compiler, connection):
|
||||
geo_field = GeometryField(srid=self.srid)
|
||||
|
@ -360,7 +360,7 @@ class Length(DistanceResultMixin, OracleToleranceMixin, GeoFunc):
|
|||
self.function = 'GeodesicLength'
|
||||
else:
|
||||
self.function = 'GreatCircleLength'
|
||||
return super(Length, self).as_sql(compiler, connection)
|
||||
return super().as_sql(compiler, connection)
|
||||
|
||||
|
||||
class MakeValid(GeoFunc):
|
||||
|
@ -385,7 +385,7 @@ class NumPoints(GeoFunc):
|
|||
if self.source_expressions[self.geom_param_pos].output_field.geom_type != 'LINESTRING':
|
||||
if not connection.features.supports_num_points_poly:
|
||||
raise TypeError('NumPoints can only operate on LineString content on this database.')
|
||||
return super(NumPoints, self).as_sql(compiler, connection)
|
||||
return super().as_sql(compiler, connection)
|
||||
|
||||
|
||||
class Perimeter(DistanceResultMixin, OracleToleranceMixin, GeoFunc):
|
||||
|
@ -399,13 +399,13 @@ class Perimeter(DistanceResultMixin, OracleToleranceMixin, GeoFunc):
|
|||
dim = min(f.dim for f in self.get_source_fields())
|
||||
if dim > 2:
|
||||
self.function = connection.ops.perimeter3d
|
||||
return super(Perimeter, self).as_sql(compiler, connection)
|
||||
return super().as_sql(compiler, connection)
|
||||
|
||||
def as_sqlite(self, compiler, connection):
|
||||
geo_field = GeometryField(srid=self.srid) # Fake field to get SRID info
|
||||
if geo_field.geodetic(connection):
|
||||
raise NotImplementedError("Perimeter cannot use a non-projected field.")
|
||||
return super(Perimeter, self).as_sql(compiler, connection)
|
||||
return super().as_sql(compiler, connection)
|
||||
|
||||
|
||||
class PointOnSurface(OracleToleranceMixin, GeoFunc):
|
||||
|
@ -425,7 +425,7 @@ class Scale(SQLiteDecimalToFloatMixin, GeoFunc):
|
|||
]
|
||||
if z != 0.0:
|
||||
expressions.append(self._handle_param(z, 'z', NUMERIC_TYPES))
|
||||
super(Scale, self).__init__(*expressions, **extra)
|
||||
super().__init__(*expressions, **extra)
|
||||
|
||||
|
||||
class SnapToGrid(SQLiteDecimalToFloatMixin, GeoFunc):
|
||||
|
@ -446,7 +446,7 @@ class SnapToGrid(SQLiteDecimalToFloatMixin, GeoFunc):
|
|||
)
|
||||
else:
|
||||
raise ValueError('Must provide 1, 2, or 4 arguments to `SnapToGrid`.')
|
||||
super(SnapToGrid, self).__init__(*expressions, **extra)
|
||||
super().__init__(*expressions, **extra)
|
||||
|
||||
|
||||
class SymDifference(OracleToleranceMixin, GeoFuncWithGeoParam):
|
||||
|
@ -461,7 +461,7 @@ class Transform(GeoFunc):
|
|||
]
|
||||
if 'output_field' not in extra:
|
||||
extra['output_field'] = GeometryField(srid=srid)
|
||||
super(Transform, self).__init__(*expressions, **extra)
|
||||
super().__init__(*expressions, **extra)
|
||||
|
||||
@property
|
||||
def srid(self):
|
||||
|
@ -474,7 +474,7 @@ class Translate(Scale):
|
|||
if len(self.source_expressions) < 4:
|
||||
# Always provide the z parameter for ST_Translate
|
||||
self.source_expressions.append(Value(0))
|
||||
return super(Translate, self).as_sqlite(compiler, connection)
|
||||
return super().as_sqlite(compiler, connection)
|
||||
|
||||
|
||||
class Union(OracleToleranceMixin, GeoFuncWithGeoParam):
|
||||
|
|
|
@ -22,7 +22,7 @@ class GISLookup(Lookup):
|
|||
band_lhs = None
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(GISLookup, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
self.template_params = {}
|
||||
|
||||
@classmethod
|
||||
|
@ -100,7 +100,7 @@ class GISLookup(Lookup):
|
|||
def process_rhs(self, compiler, connection):
|
||||
if isinstance(self.rhs, Query):
|
||||
# If rhs is some Query, don't touch it.
|
||||
return super(GISLookup, self).process_rhs(compiler, connection)
|
||||
return super().process_rhs(compiler, connection)
|
||||
|
||||
geom = self.rhs
|
||||
if isinstance(self.rhs, Col):
|
||||
|
@ -124,7 +124,7 @@ class GISLookup(Lookup):
|
|||
elif isinstance(self.lhs, RasterBandTransform):
|
||||
self.process_band_indices(only_lhs=True)
|
||||
|
||||
rhs, rhs_params = super(GISLookup, self).process_rhs(compiler, connection)
|
||||
rhs, rhs_params = super().process_rhs(compiler, connection)
|
||||
rhs = connection.ops.get_geom_placeholder(self.lhs.output_field, geom, compiler)
|
||||
return rhs, rhs_params
|
||||
|
||||
|
@ -390,7 +390,7 @@ class RelateLookup(GISLookup):
|
|||
pattern = value[1]
|
||||
if not isinstance(pattern, str) or not self.pattern_regex.match(pattern):
|
||||
raise ValueError('Invalid intersection matrix pattern "%s".' % pattern)
|
||||
return super(RelateLookup, self).get_db_prep_lookup(value, connection)
|
||||
return super().get_db_prep_lookup(value, connection)
|
||||
|
||||
|
||||
gis_lookups['relate'] = RelateLookup
|
||||
|
|
|
@ -16,7 +16,7 @@ class SpatialProxy(DeferredAttribute):
|
|||
"""
|
||||
self._field = field
|
||||
self._klass = klass
|
||||
super(SpatialProxy, self).__init__(field.attname, klass)
|
||||
super().__init__(field.attname, klass)
|
||||
|
||||
def __get__(self, instance, cls=None):
|
||||
"""
|
||||
|
@ -33,7 +33,7 @@ class SpatialProxy(DeferredAttribute):
|
|||
try:
|
||||
geo_value = instance.__dict__[self._field.attname]
|
||||
except KeyError:
|
||||
geo_value = super(SpatialProxy, self).__get__(instance, cls)
|
||||
geo_value = super().__get__(instance, cls)
|
||||
|
||||
if isinstance(geo_value, self._klass):
|
||||
geo_obj = geo_value
|
||||
|
|
|
@ -82,46 +82,46 @@ class GeoFeedMixin:
|
|||
# ### SyndicationFeed subclasses ###
|
||||
class GeoRSSFeed(Rss201rev2Feed, GeoFeedMixin):
|
||||
def rss_attributes(self):
|
||||
attrs = super(GeoRSSFeed, self).rss_attributes()
|
||||
attrs = super().rss_attributes()
|
||||
attrs['xmlns:georss'] = 'http://www.georss.org/georss'
|
||||
return attrs
|
||||
|
||||
def add_item_elements(self, handler, item):
|
||||
super(GeoRSSFeed, self).add_item_elements(handler, item)
|
||||
super().add_item_elements(handler, item)
|
||||
self.add_georss_element(handler, item)
|
||||
|
||||
def add_root_elements(self, handler):
|
||||
super(GeoRSSFeed, self).add_root_elements(handler)
|
||||
super().add_root_elements(handler)
|
||||
self.add_georss_element(handler, self.feed)
|
||||
|
||||
|
||||
class GeoAtom1Feed(Atom1Feed, GeoFeedMixin):
|
||||
def root_attributes(self):
|
||||
attrs = super(GeoAtom1Feed, self).root_attributes()
|
||||
attrs = super().root_attributes()
|
||||
attrs['xmlns:georss'] = 'http://www.georss.org/georss'
|
||||
return attrs
|
||||
|
||||
def add_item_elements(self, handler, item):
|
||||
super(GeoAtom1Feed, self).add_item_elements(handler, item)
|
||||
super().add_item_elements(handler, item)
|
||||
self.add_georss_element(handler, item)
|
||||
|
||||
def add_root_elements(self, handler):
|
||||
super(GeoAtom1Feed, self).add_root_elements(handler)
|
||||
super().add_root_elements(handler)
|
||||
self.add_georss_element(handler, self.feed)
|
||||
|
||||
|
||||
class W3CGeoFeed(Rss201rev2Feed, GeoFeedMixin):
|
||||
def rss_attributes(self):
|
||||
attrs = super(W3CGeoFeed, self).rss_attributes()
|
||||
attrs = super().rss_attributes()
|
||||
attrs['xmlns:geo'] = 'http://www.w3.org/2003/01/geo/wgs84_pos#'
|
||||
return attrs
|
||||
|
||||
def add_item_elements(self, handler, item):
|
||||
super(W3CGeoFeed, self).add_item_elements(handler, item)
|
||||
super().add_item_elements(handler, item)
|
||||
self.add_georss_element(handler, item, w3c_geo=True)
|
||||
|
||||
def add_root_elements(self, handler):
|
||||
super(W3CGeoFeed, self).add_root_elements(handler)
|
||||
super().add_root_elements(handler)
|
||||
self.add_georss_element(handler, self.feed, w3c_geo=True)
|
||||
|
||||
|
||||
|
|
|
@ -27,7 +27,7 @@ class GeometryField(forms.Field):
|
|||
# defaults (e.g., allow None).
|
||||
self.srid = kwargs.pop('srid', None)
|
||||
self.geom_type = kwargs.pop('geom_type', self.geom_type)
|
||||
super(GeometryField, self).__init__(**kwargs)
|
||||
super().__init__(**kwargs)
|
||||
self.widget.attrs['geom_type'] = self.geom_type
|
||||
|
||||
def to_python(self, value):
|
||||
|
@ -58,7 +58,7 @@ class GeometryField(forms.Field):
|
|||
object (which is returned). A ValidationError is raised if
|
||||
the value cannot be instantiated as a Geometry.
|
||||
"""
|
||||
geom = super(GeometryField, self).clean(value)
|
||||
geom = super().clean(value)
|
||||
if geom is None:
|
||||
return geom
|
||||
|
||||
|
|
|
@ -103,7 +103,7 @@ class OSMWidget(OpenLayersWidget):
|
|||
map_srid = 3857
|
||||
|
||||
def __init__(self, attrs=None):
|
||||
super(OSMWidget, self).__init__()
|
||||
super().__init__()
|
||||
for key in ('default_lon', 'default_lat'):
|
||||
self.attrs[key] = getattr(self, key)
|
||||
if attrs:
|
||||
|
|
|
@ -502,7 +502,7 @@ class Point(OGRGeometry):
|
|||
|
||||
def _geos_ptr(self):
|
||||
from django.contrib.gis import geos
|
||||
return geos.Point._create_empty() if self.empty else super(Point, self)._geos_ptr()
|
||||
return geos.Point._create_empty() if self.empty else super()._geos_ptr()
|
||||
|
||||
@classmethod
|
||||
def _create_empty(cls):
|
||||
|
|
|
@ -37,7 +37,7 @@ class GeometryCollection(GEOSGeometry):
|
|||
|
||||
# Creating the geometry pointer array.
|
||||
collection = self._create_collection(len(init_geoms), iter(init_geoms))
|
||||
super(GeometryCollection, self).__init__(collection, **kwargs)
|
||||
super().__init__(collection, **kwargs)
|
||||
|
||||
def __iter__(self):
|
||||
"Iterates over each Geometry in the Collection."
|
||||
|
@ -89,7 +89,7 @@ class GeometryCollection(GEOSGeometry):
|
|||
for geom in self
|
||||
],
|
||||
})
|
||||
return super(GeometryCollection, self).json
|
||||
return super().json
|
||||
geojson = json
|
||||
|
||||
@property
|
||||
|
@ -118,7 +118,7 @@ class MultiLineString(LinearGeometryMixin, GeometryCollection):
|
|||
def closed(self):
|
||||
if geos_version_info()['version'] < '3.5':
|
||||
raise GEOSException("MultiLineString.closed requires GEOS >= 3.5.0.")
|
||||
return super(MultiLineString, self).closed
|
||||
return super().closed
|
||||
|
||||
|
||||
class MultiPolygon(GeometryCollection):
|
||||
|
|
|
@ -15,10 +15,10 @@ __all__ = ['WKBWriter', 'WKTWriter', 'WKBReader', 'WKTReader']
|
|||
class WKBReader(_WKBReader):
|
||||
def read(self, wkb):
|
||||
"Returns a GEOSGeometry for the given WKB buffer."
|
||||
return GEOSGeometry(super(WKBReader, self).read(wkb))
|
||||
return GEOSGeometry(super().read(wkb))
|
||||
|
||||
|
||||
class WKTReader(_WKTReader):
|
||||
def read(self, wkt):
|
||||
"Returns a GEOSGeometry for the given WKT string."
|
||||
return GEOSGeometry(super(WKTReader, self).read(wkt))
|
||||
return GEOSGeometry(super().read(wkt))
|
||||
|
|
|
@ -37,7 +37,7 @@ class LineString(LinearGeometryMixin, GEOSGeometry):
|
|||
|
||||
ncoords = len(coords)
|
||||
if not ncoords:
|
||||
super(LineString, self).__init__(self._init_func(None), srid=srid)
|
||||
super().__init__(self._init_func(None), srid=srid)
|
||||
return
|
||||
|
||||
if ncoords < self._minlength:
|
||||
|
@ -86,7 +86,7 @@ class LineString(LinearGeometryMixin, GEOSGeometry):
|
|||
|
||||
# Calling the base geometry initialization with the returned pointer
|
||||
# from the function.
|
||||
super(LineString, self).__init__(self._init_func(cs.ptr), srid=srid)
|
||||
super().__init__(self._init_func(cs.ptr), srid=srid)
|
||||
|
||||
def __iter__(self):
|
||||
"Allows iteration over this LineString."
|
||||
|
|
|
@ -67,7 +67,7 @@ class ListMixin:
|
|||
self._set_single = self._set_single_rebuild
|
||||
self._assign_extended_slice = self._assign_extended_slice_rebuild
|
||||
|
||||
super(ListMixin, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def __getitem__(self, index):
|
||||
"Get the item(s) at the specified index/slice."
|
||||
|
|
|
@ -38,10 +38,10 @@ class Point(GEOSGeometry):
|
|||
|
||||
# Initializing using the address returned from the GEOS
|
||||
# createPoint factory.
|
||||
super(Point, self).__init__(point, srid=srid)
|
||||
super().__init__(point, srid=srid)
|
||||
|
||||
def _ogr_ptr(self):
|
||||
return gdal.geometries.Point._create_empty() if self.empty else super(Point, self)._ogr_ptr()
|
||||
return gdal.geometries.Point._create_empty() if self.empty else super()._ogr_ptr()
|
||||
|
||||
@classmethod
|
||||
def _create_empty(cls):
|
||||
|
|
|
@ -27,7 +27,7 @@ class Polygon(GEOSGeometry):
|
|||
... ((4, 4), (4, 6), (6, 6), (6, 4), (4, 4)))
|
||||
"""
|
||||
if not args:
|
||||
super(Polygon, self).__init__(self._create_polygon(0, None), **kwargs)
|
||||
super().__init__(self._create_polygon(0, None), **kwargs)
|
||||
return
|
||||
|
||||
# Getting the ext_ring and init_holes parameters from the argument list
|
||||
|
@ -45,7 +45,7 @@ class Polygon(GEOSGeometry):
|
|||
n_holes = len(init_holes)
|
||||
|
||||
polygon = self._create_polygon(n_holes + 1, (ext_ring,) + init_holes)
|
||||
super(Polygon, self).__init__(polygon, **kwargs)
|
||||
super().__init__(polygon, **kwargs)
|
||||
|
||||
def __iter__(self):
|
||||
"Iterates over each ring in the polygon."
|
||||
|
|
|
@ -48,7 +48,7 @@ class CsOperation(GEOSFuncFactory):
|
|||
self.argtypes = [CS_PTR, c_uint, c_uint, dbl_param]
|
||||
else:
|
||||
self.argtypes = [CS_PTR, c_uint, dbl_param]
|
||||
return super(CsOperation, self).get_func()
|
||||
return super().get_func()
|
||||
|
||||
|
||||
class CsOutput(GEOSFuncFactory):
|
||||
|
@ -56,7 +56,7 @@ class CsOutput(GEOSFuncFactory):
|
|||
|
||||
def get_func(self, argtypes):
|
||||
self.argtypes = argtypes
|
||||
return super(CsOutput, self).get_func()
|
||||
return super().get_func()
|
||||
|
||||
@staticmethod
|
||||
def errcheck(result, func, cargs):
|
||||
|
|
|
@ -43,7 +43,7 @@ class GeomOutput(GEOSFuncFactory):
|
|||
|
||||
def get_func(self, argtypes):
|
||||
self.argtypes = argtypes
|
||||
return super(GeomOutput, self).get_func()
|
||||
return super().get_func()
|
||||
|
||||
|
||||
class IntFromGeom(GEOSFuncFactory):
|
||||
|
@ -56,7 +56,7 @@ class IntFromGeom(GEOSFuncFactory):
|
|||
self.errcheck = check_zero
|
||||
else:
|
||||
self.errcheck = check_minus_one
|
||||
return super(IntFromGeom, self).get_func()
|
||||
return super().get_func()
|
||||
|
||||
|
||||
class StringFromGeom(GEOSFuncFactory):
|
||||
|
|
|
@ -165,7 +165,7 @@ class WKTWriter(IOBase):
|
|||
_precision = None
|
||||
|
||||
def __init__(self, dim=2, trim=False, precision=None):
|
||||
super(WKTWriter, self).__init__()
|
||||
super().__init__()
|
||||
if bool(trim) != self._trim:
|
||||
self.trim = trim
|
||||
if precision is not None:
|
||||
|
@ -215,7 +215,7 @@ class WKBWriter(IOBase):
|
|||
destructor = wkb_writer_destroy
|
||||
|
||||
def __init__(self, dim=2):
|
||||
super(WKBWriter, self).__init__()
|
||||
super().__init__()
|
||||
self.outdim = dim
|
||||
|
||||
def _handle_empty_point(self, geom):
|
||||
|
|
|
@ -23,7 +23,7 @@ class DblFromGeom(GEOSFuncFactory):
|
|||
argtypes = [GEOM_PTR for i in range(num_geom)]
|
||||
argtypes += [POINTER(c_double)]
|
||||
self.argtypes = argtypes
|
||||
return super(DblFromGeom, self).get_func()
|
||||
return super().get_func()
|
||||
|
||||
|
||||
# ### ctypes prototypes ###
|
||||
|
|
|
@ -6,7 +6,7 @@ class Command(InspectDBCommand):
|
|||
db_module = 'django.contrib.gis.db'
|
||||
|
||||
def get_field_type(self, connection, table_name, row):
|
||||
field_type, field_params, field_notes = super(Command, self).get_field_type(connection, table_name, row)
|
||||
field_type, field_params, field_notes = super().get_field_type(connection, table_name, row)
|
||||
if field_type == 'GeometryField':
|
||||
geo_col = row[0]
|
||||
# Getting a more specific field type and any additional parameters
|
||||
|
|
|
@ -11,7 +11,7 @@ class Serializer(JSONSerializer):
|
|||
Convert a queryset to GeoJSON, http://geojson.org/
|
||||
"""
|
||||
def _init_options(self):
|
||||
super(Serializer, self)._init_options()
|
||||
super()._init_options()
|
||||
self.geometry_field = self.json_kwargs.pop('geometry_field', None)
|
||||
self.srid = self.json_kwargs.pop('srid', 4326)
|
||||
if (self.selected_fields is not None and self.geometry_field is not None and
|
||||
|
@ -29,7 +29,7 @@ class Serializer(JSONSerializer):
|
|||
self.stream.write(']}')
|
||||
|
||||
def start_object(self, obj):
|
||||
super(Serializer, self).start_object(obj)
|
||||
super().start_object(obj)
|
||||
self._geometry = None
|
||||
if self.geometry_field is None:
|
||||
# Find the first declared geometry field
|
||||
|
@ -62,7 +62,7 @@ class Serializer(JSONSerializer):
|
|||
if field.name == self.geometry_field:
|
||||
self._geometry = field.value_from_object(obj)
|
||||
else:
|
||||
super(Serializer, self).handle_field(obj, field)
|
||||
super().handle_field(obj, field)
|
||||
|
||||
|
||||
class Deserializer:
|
||||
|
|
|
@ -64,7 +64,7 @@ class BaseStorage:
|
|||
self._queued_messages = []
|
||||
self.used = False
|
||||
self.added_new = False
|
||||
super(BaseStorage, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def __len__(self):
|
||||
return len(self._loaded_messages) + len(self._queued_messages)
|
||||
|
|
|
@ -21,7 +21,7 @@ class MessageEncoder(json.JSONEncoder):
|
|||
if obj.extra_tags:
|
||||
message.append(obj.extra_tags)
|
||||
return message
|
||||
return super(MessageEncoder, self).default(obj)
|
||||
return super().default(obj)
|
||||
|
||||
|
||||
class MessageDecoder(json.JSONDecoder):
|
||||
|
@ -45,7 +45,7 @@ class MessageDecoder(json.JSONDecoder):
|
|||
return obj
|
||||
|
||||
def decode(self, s, **kwargs):
|
||||
decoded = super(MessageDecoder, self).decode(s, **kwargs)
|
||||
decoded = super().decode(s, **kwargs)
|
||||
return self.process_messages(decoded)
|
||||
|
||||
|
||||
|
|
|
@ -11,7 +11,7 @@ class FallbackStorage(BaseStorage):
|
|||
storage_classes = (CookieStorage, SessionStorage)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(FallbackStorage, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
self.storages = [storage_class(*args, **kwargs)
|
||||
for storage_class in self.storage_classes]
|
||||
self._used_storages = set()
|
||||
|
|
|
@ -18,7 +18,7 @@ class SessionStorage(BaseStorage):
|
|||
"message storage requires session middleware to be installed, "\
|
||||
"and come before the message middleware in the "\
|
||||
"MIDDLEWARE%s list." % ("_CLASSES" if settings.MIDDLEWARE is None else "")
|
||||
super(SessionStorage, self).__init__(request, *args, **kwargs)
|
||||
super().__init__(request, *args, **kwargs)
|
||||
|
||||
def _get(self, *args, **kwargs):
|
||||
"""
|
||||
|
|
|
@ -8,7 +8,7 @@ class SuccessMessageMixin:
|
|||
success_message = ''
|
||||
|
||||
def form_valid(self, form):
|
||||
response = super(SuccessMessageMixin, self).form_valid(form)
|
||||
response = super().form_valid(form)
|
||||
success_message = self.get_success_message(form.cleaned_data)
|
||||
if success_message:
|
||||
messages.success(self.request, success_message)
|
||||
|
|
|
@ -47,7 +47,7 @@ class StringAgg(Aggregate):
|
|||
|
||||
def __init__(self, expression, delimiter, distinct=False, **extra):
|
||||
distinct = 'DISTINCT ' if distinct else ''
|
||||
super(StringAgg, self).__init__(expression, delimiter=delimiter, distinct=distinct, **extra)
|
||||
super().__init__(expression, delimiter=delimiter, distinct=distinct, **extra)
|
||||
|
||||
def convert_value(self, value, expression, connection, context):
|
||||
if not value:
|
||||
|
|
|
@ -11,7 +11,7 @@ class StatAggregate(Aggregate):
|
|||
def __init__(self, y, x, output_field=FloatField()):
|
||||
if not x or not y:
|
||||
raise ValueError('Both y and x must be provided.')
|
||||
super(StatAggregate, self).__init__(y=y, x=x, output_field=output_field)
|
||||
super().__init__(y=y, x=x, output_field=output_field)
|
||||
self.x = x
|
||||
self.y = y
|
||||
self.source_expressions = self._parse_expressions(self.y, self.x)
|
||||
|
@ -23,7 +23,7 @@ class StatAggregate(Aggregate):
|
|||
self.y, self.x = exprs
|
||||
|
||||
def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False):
|
||||
return super(Aggregate, self).resolve_expression(query, allow_joins, reuse, summarize)
|
||||
return super().resolve_expression(query, allow_joins, reuse, summarize)
|
||||
|
||||
|
||||
class Corr(StatAggregate):
|
||||
|
@ -33,7 +33,7 @@ class Corr(StatAggregate):
|
|||
class CovarPop(StatAggregate):
|
||||
def __init__(self, y, x, sample=False):
|
||||
self.function = 'COVAR_SAMP' if sample else 'COVAR_POP'
|
||||
super(CovarPop, self).__init__(y, x)
|
||||
super().__init__(y, x)
|
||||
|
||||
|
||||
class RegrAvgX(StatAggregate):
|
||||
|
@ -48,7 +48,7 @@ class RegrCount(StatAggregate):
|
|||
function = 'REGR_COUNT'
|
||||
|
||||
def __init__(self, y, x):
|
||||
super(RegrCount, self).__init__(y=y, x=x, output_field=IntegerField())
|
||||
super().__init__(y=y, x=x, output_field=IntegerField())
|
||||
|
||||
def convert_value(self, value, expression, connection, context):
|
||||
if value is None:
|
||||
|
|
|
@ -31,7 +31,7 @@ class ArrayField(Field):
|
|||
# implements it.
|
||||
if hasattr(self.base_field, 'from_db_value'):
|
||||
self.from_db_value = self._from_db_value
|
||||
super(ArrayField, self).__init__(**kwargs)
|
||||
super().__init__(**kwargs)
|
||||
|
||||
@property
|
||||
def model(self):
|
||||
|
@ -46,7 +46,7 @@ class ArrayField(Field):
|
|||
self.base_field.model = model
|
||||
|
||||
def check(self, **kwargs):
|
||||
errors = super(ArrayField, self).check(**kwargs)
|
||||
errors = super().check(**kwargs)
|
||||
if self.base_field.remote_field:
|
||||
errors.append(
|
||||
checks.Error(
|
||||
|
@ -70,7 +70,7 @@ class ArrayField(Field):
|
|||
return errors
|
||||
|
||||
def set_attributes_from_name(self, name):
|
||||
super(ArrayField, self).set_attributes_from_name(name)
|
||||
super().set_attributes_from_name(name)
|
||||
self.base_field.set_attributes_from_name(name)
|
||||
|
||||
@property
|
||||
|
@ -87,7 +87,7 @@ class ArrayField(Field):
|
|||
return value
|
||||
|
||||
def deconstruct(self):
|
||||
name, path, args, kwargs = super(ArrayField, self).deconstruct()
|
||||
name, path, args, kwargs = super().deconstruct()
|
||||
if path == 'django.contrib.postgres.fields.array.ArrayField':
|
||||
path = 'django.contrib.postgres.fields.ArrayField'
|
||||
kwargs.update({
|
||||
|
@ -125,7 +125,7 @@ class ArrayField(Field):
|
|||
return json.dumps(values)
|
||||
|
||||
def get_transform(self, name):
|
||||
transform = super(ArrayField, self).get_transform(name)
|
||||
transform = super().get_transform(name)
|
||||
if transform:
|
||||
return transform
|
||||
if '_' not in name:
|
||||
|
@ -146,7 +146,7 @@ class ArrayField(Field):
|
|||
return SliceTransformFactory(start, end)
|
||||
|
||||
def validate(self, value, model_instance):
|
||||
super(ArrayField, self).validate(value, model_instance)
|
||||
super().validate(value, model_instance)
|
||||
for index, part in enumerate(value):
|
||||
try:
|
||||
self.base_field.validate(part, model_instance)
|
||||
|
@ -165,7 +165,7 @@ class ArrayField(Field):
|
|||
)
|
||||
|
||||
def run_validators(self, value):
|
||||
super(ArrayField, self).run_validators(value)
|
||||
super().run_validators(value)
|
||||
for index, part in enumerate(value):
|
||||
try:
|
||||
self.base_field.run_validators(part)
|
||||
|
@ -184,13 +184,13 @@ class ArrayField(Field):
|
|||
'max_length': self.size,
|
||||
}
|
||||
defaults.update(kwargs)
|
||||
return super(ArrayField, self).formfield(**defaults)
|
||||
return super().formfield(**defaults)
|
||||
|
||||
|
||||
@ArrayField.register_lookup
|
||||
class ArrayContains(lookups.DataContains):
|
||||
def as_sql(self, qn, connection):
|
||||
sql, params = super(ArrayContains, self).as_sql(qn, connection)
|
||||
sql, params = super().as_sql(qn, connection)
|
||||
sql = '%s::%s' % (sql, self.lhs.output_field.db_type(connection))
|
||||
return sql, params
|
||||
|
||||
|
@ -198,7 +198,7 @@ class ArrayContains(lookups.DataContains):
|
|||
@ArrayField.register_lookup
|
||||
class ArrayContainedBy(lookups.ContainedBy):
|
||||
def as_sql(self, qn, connection):
|
||||
sql, params = super(ArrayContainedBy, self).as_sql(qn, connection)
|
||||
sql, params = super().as_sql(qn, connection)
|
||||
sql = '%s::%s' % (sql, self.lhs.output_field.db_type(connection))
|
||||
return sql, params
|
||||
|
||||
|
@ -206,7 +206,7 @@ class ArrayContainedBy(lookups.ContainedBy):
|
|||
@ArrayField.register_lookup
|
||||
class ArrayExact(Exact):
|
||||
def as_sql(self, qn, connection):
|
||||
sql, params = super(ArrayExact, self).as_sql(qn, connection)
|
||||
sql, params = super().as_sql(qn, connection)
|
||||
sql = '%s::%s' % (sql, self.lhs.output_field.db_type(connection))
|
||||
return sql, params
|
||||
|
||||
|
@ -214,7 +214,7 @@ class ArrayExact(Exact):
|
|||
@ArrayField.register_lookup
|
||||
class ArrayOverlap(lookups.Overlap):
|
||||
def as_sql(self, qn, connection):
|
||||
sql, params = super(ArrayOverlap, self).as_sql(qn, connection)
|
||||
sql, params = super().as_sql(qn, connection)
|
||||
sql = '%s::%s' % (sql, self.lhs.output_field.db_type(connection))
|
||||
return sql, params
|
||||
|
||||
|
@ -236,7 +236,7 @@ class ArrayLenTransform(Transform):
|
|||
@ArrayField.register_lookup
|
||||
class ArrayInLookup(In):
|
||||
def get_prep_lookup(self):
|
||||
values = super(ArrayInLookup, self).get_prep_lookup()
|
||||
values = super().get_prep_lookup()
|
||||
# In.process_rhs() expects values to be hashable, so convert lists
|
||||
# to tuples.
|
||||
prepared_values = []
|
||||
|
@ -251,7 +251,7 @@ class ArrayInLookup(In):
|
|||
class IndexTransform(Transform):
|
||||
|
||||
def __init__(self, index, base_field, *args, **kwargs):
|
||||
super(IndexTransform, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
self.index = index
|
||||
self.base_field = base_field
|
||||
|
||||
|
@ -277,7 +277,7 @@ class IndexTransformFactory:
|
|||
class SliceTransform(Transform):
|
||||
|
||||
def __init__(self, start, end, *args, **kwargs):
|
||||
super(SliceTransform, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
self.start = start
|
||||
self.end = end
|
||||
|
||||
|
|
|
@ -21,13 +21,13 @@ class HStoreField(Field):
|
|||
return 'hstore'
|
||||
|
||||
def get_transform(self, name):
|
||||
transform = super(HStoreField, self).get_transform(name)
|
||||
transform = super().get_transform(name)
|
||||
if transform:
|
||||
return transform
|
||||
return KeyTransformFactory(name)
|
||||
|
||||
def validate(self, value, model_instance):
|
||||
super(HStoreField, self).validate(value, model_instance)
|
||||
super().validate(value, model_instance)
|
||||
for key, val in value.items():
|
||||
if not isinstance(val, str) and val is not None:
|
||||
raise exceptions.ValidationError(
|
||||
|
@ -49,10 +49,10 @@ class HStoreField(Field):
|
|||
'form_class': forms.HStoreField,
|
||||
}
|
||||
defaults.update(kwargs)
|
||||
return super(HStoreField, self).formfield(**defaults)
|
||||
return super().formfield(**defaults)
|
||||
|
||||
def get_prep_value(self, value):
|
||||
value = super(HStoreField, self).get_prep_value(value)
|
||||
value = super().get_prep_value(value)
|
||||
|
||||
if isinstance(value, dict):
|
||||
prep_value = {}
|
||||
|
@ -80,7 +80,7 @@ class KeyTransform(Transform):
|
|||
output_field = TextField()
|
||||
|
||||
def __init__(self, key_name, *args, **kwargs):
|
||||
super(KeyTransform, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
self.key_name = key_name
|
||||
|
||||
def as_sql(self, compiler, connection):
|
||||
|
|
|
@ -18,7 +18,7 @@ class JsonAdapter(Json):
|
|||
"""
|
||||
def __init__(self, adapted, dumps=None, encoder=None):
|
||||
self.encoder = encoder
|
||||
super(JsonAdapter, self).__init__(adapted, dumps=dumps)
|
||||
super().__init__(adapted, dumps=dumps)
|
||||
|
||||
def dumps(self, obj):
|
||||
options = {'cls': self.encoder} if self.encoder else {}
|
||||
|
@ -36,19 +36,19 @@ class JSONField(Field):
|
|||
if encoder and not callable(encoder):
|
||||
raise ValueError("The encoder parameter must be a callable object.")
|
||||
self.encoder = encoder
|
||||
super(JSONField, self).__init__(verbose_name, name, **kwargs)
|
||||
super().__init__(verbose_name, name, **kwargs)
|
||||
|
||||
def db_type(self, connection):
|
||||
return 'jsonb'
|
||||
|
||||
def deconstruct(self):
|
||||
name, path, args, kwargs = super(JSONField, self).deconstruct()
|
||||
name, path, args, kwargs = super().deconstruct()
|
||||
if self.encoder is not None:
|
||||
kwargs['encoder'] = self.encoder
|
||||
return name, path, args, kwargs
|
||||
|
||||
def get_transform(self, name):
|
||||
transform = super(JSONField, self).get_transform(name)
|
||||
transform = super().get_transform(name)
|
||||
if transform:
|
||||
return transform
|
||||
return KeyTransformFactory(name)
|
||||
|
@ -59,7 +59,7 @@ class JSONField(Field):
|
|||
return value
|
||||
|
||||
def validate(self, value, model_instance):
|
||||
super(JSONField, self).validate(value, model_instance)
|
||||
super().validate(value, model_instance)
|
||||
options = {'cls': self.encoder} if self.encoder else {}
|
||||
try:
|
||||
json.dumps(value, **options)
|
||||
|
@ -76,7 +76,7 @@ class JSONField(Field):
|
|||
def formfield(self, **kwargs):
|
||||
defaults = {'form_class': forms.JSONField}
|
||||
defaults.update(kwargs)
|
||||
return super(JSONField, self).formfield(**defaults)
|
||||
return super().formfield(**defaults)
|
||||
|
||||
|
||||
JSONField.register_lookup(lookups.DataContains)
|
||||
|
@ -91,7 +91,7 @@ class KeyTransform(Transform):
|
|||
nested_operator = '#>'
|
||||
|
||||
def __init__(self, key_name, *args, **kwargs):
|
||||
super(KeyTransform, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
self.key_name = key_name
|
||||
|
||||
def as_sql(self, compiler, connection):
|
||||
|
@ -129,7 +129,7 @@ class KeyTransformTextLookupMixin:
|
|||
key_text_transform = KeyTextTransform(
|
||||
key_transform.key_name, *key_transform.source_expressions, **key_transform.extra
|
||||
)
|
||||
super(KeyTransformTextLookupMixin, self).__init__(key_text_transform, *args, **kwargs)
|
||||
super().__init__(key_text_transform, *args, **kwargs)
|
||||
|
||||
|
||||
class KeyTransformIExact(KeyTransformTextLookupMixin, builtin_lookups.IExact):
|
||||
|
|
|
@ -20,7 +20,7 @@ class RangeField(models.Field):
|
|||
# Initializing base_field here ensures that its model matches the model for self.
|
||||
if hasattr(self, 'base_field'):
|
||||
self.base_field = self.base_field()
|
||||
super(RangeField, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
@property
|
||||
def model(self):
|
||||
|
@ -56,7 +56,7 @@ class RangeField(models.Field):
|
|||
return value
|
||||
|
||||
def set_attributes_from_name(self, name):
|
||||
super(RangeField, self).set_attributes_from_name(name)
|
||||
super().set_attributes_from_name(name)
|
||||
self.base_field.set_attributes_from_name(name)
|
||||
|
||||
def value_to_string(self, obj):
|
||||
|
@ -78,7 +78,7 @@ class RangeField(models.Field):
|
|||
|
||||
def formfield(self, **kwargs):
|
||||
kwargs.setdefault('form_class', self.form_field)
|
||||
return super(RangeField, self).formfield(**kwargs)
|
||||
return super().formfield(**kwargs)
|
||||
|
||||
|
||||
class IntegerRangeField(RangeField):
|
||||
|
|
|
@ -20,7 +20,7 @@ class SimpleArrayField(forms.CharField):
|
|||
def __init__(self, base_field, delimiter=',', max_length=None, min_length=None, *args, **kwargs):
|
||||
self.base_field = base_field
|
||||
self.delimiter = delimiter
|
||||
super(SimpleArrayField, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
if min_length is not None:
|
||||
self.min_length = min_length
|
||||
self.validators.append(ArrayMinLengthValidator(int(min_length)))
|
||||
|
@ -57,7 +57,7 @@ class SimpleArrayField(forms.CharField):
|
|||
return values
|
||||
|
||||
def validate(self, value):
|
||||
super(SimpleArrayField, self).validate(value)
|
||||
super().validate(value)
|
||||
errors = []
|
||||
for index, item in enumerate(value):
|
||||
try:
|
||||
|
@ -73,7 +73,7 @@ class SimpleArrayField(forms.CharField):
|
|||
raise ValidationError(errors)
|
||||
|
||||
def run_validators(self, value):
|
||||
super(SimpleArrayField, self).run_validators(value)
|
||||
super().run_validators(value)
|
||||
errors = []
|
||||
for index, item in enumerate(value):
|
||||
try:
|
||||
|
@ -94,7 +94,7 @@ class SplitArrayWidget(forms.Widget):
|
|||
def __init__(self, widget, size, **kwargs):
|
||||
self.widget = widget() if isinstance(widget, type) else widget
|
||||
self.size = size
|
||||
super(SplitArrayWidget, self).__init__(**kwargs)
|
||||
super().__init__(**kwargs)
|
||||
|
||||
@property
|
||||
def is_hidden(self):
|
||||
|
@ -141,7 +141,7 @@ class SplitArrayWidget(forms.Widget):
|
|||
return self.widget.media
|
||||
|
||||
def __deepcopy__(self, memo):
|
||||
obj = super(SplitArrayWidget, self).__deepcopy__(memo)
|
||||
obj = super().__deepcopy__(memo)
|
||||
obj.widget = copy.deepcopy(self.widget)
|
||||
return obj
|
||||
|
||||
|
@ -161,7 +161,7 @@ class SplitArrayField(forms.Field):
|
|||
self.remove_trailing_nulls = remove_trailing_nulls
|
||||
widget = SplitArrayWidget(widget=base_field.widget, size=size)
|
||||
kwargs.setdefault('widget', widget)
|
||||
super(SplitArrayField, self).__init__(**kwargs)
|
||||
super().__init__(**kwargs)
|
||||
|
||||
def clean(self, value):
|
||||
cleaned_data = []
|
||||
|
|
|
@ -55,4 +55,4 @@ class HStoreField(forms.CharField):
|
|||
# the same as an empty dict, if the data or initial value we get
|
||||
# is None, replace it w/ {}.
|
||||
initial_value = self.to_python(initial)
|
||||
return super(HStoreField, self).has_changed(initial_value, data)
|
||||
return super().has_changed(initial_value, data)
|
||||
|
|
|
@ -21,7 +21,7 @@ class BaseRangeField(forms.MultiValueField):
|
|||
kwargs['fields'] = [self.base_field(required=False), self.base_field(required=False)]
|
||||
kwargs.setdefault('required', False)
|
||||
kwargs.setdefault('require_all_fields', False)
|
||||
super(BaseRangeField, self).__init__(**kwargs)
|
||||
super().__init__(**kwargs)
|
||||
|
||||
def prepare_value(self, value):
|
||||
lower_base, upper_base = self.fields
|
||||
|
@ -84,7 +84,7 @@ class DateRangeField(BaseRangeField):
|
|||
class RangeWidget(MultiWidget):
|
||||
def __init__(self, base_widget, attrs=None):
|
||||
widgets = (base_widget, base_widget)
|
||||
super(RangeWidget, self).__init__(widgets, attrs)
|
||||
super().__init__(widgets, attrs)
|
||||
|
||||
def decompress(self, value):
|
||||
if value:
|
||||
|
|
|
@ -7,4 +7,4 @@ class TransactionNow(Func):
|
|||
def __init__(self, output_field=None, **extra):
|
||||
if output_field is None:
|
||||
output_field = DateTimeField()
|
||||
super(TransactionNow, self).__init__(output_field=output_field, **extra)
|
||||
super().__init__(output_field=output_field, **extra)
|
||||
|
|
|
@ -10,7 +10,7 @@ class BrinIndex(Index):
|
|||
if pages_per_range is not None and pages_per_range <= 0:
|
||||
raise ValueError('pages_per_range must be None or a positive integer')
|
||||
self.pages_per_range = pages_per_range
|
||||
super(BrinIndex, self).__init__(fields, name)
|
||||
super().__init__(fields, name)
|
||||
|
||||
def __repr__(self):
|
||||
if self.pages_per_range is not None:
|
||||
|
@ -20,15 +20,15 @@ class BrinIndex(Index):
|
|||
'pages_per_range': self.pages_per_range,
|
||||
}
|
||||
else:
|
||||
return super(BrinIndex, self).__repr__()
|
||||
return super().__repr__()
|
||||
|
||||
def deconstruct(self):
|
||||
path, args, kwargs = super(BrinIndex, self).deconstruct()
|
||||
path, args, kwargs = super().deconstruct()
|
||||
kwargs['pages_per_range'] = self.pages_per_range
|
||||
return path, args, kwargs
|
||||
|
||||
def get_sql_create_template_values(self, model, schema_editor, using):
|
||||
parameters = super(BrinIndex, self).get_sql_create_template_values(model, schema_editor, using=' USING brin')
|
||||
parameters = super().get_sql_create_template_values(model, schema_editor, using=' USING brin')
|
||||
if self.pages_per_range is not None:
|
||||
parameters['extra'] = ' WITH (pages_per_range={})'.format(
|
||||
schema_editor.quote_value(self.pages_per_range)) + parameters['extra']
|
||||
|
@ -39,4 +39,4 @@ class GinIndex(Index):
|
|||
suffix = 'gin'
|
||||
|
||||
def create_sql(self, model, schema_editor):
|
||||
return super(GinIndex, self).create_sql(model, schema_editor, using=' USING gin')
|
||||
return super().create_sql(model, schema_editor, using=' USING gin')
|
||||
|
|
|
@ -58,7 +58,7 @@ class SearchLookup(SearchVectorExact):
|
|||
def process_lhs(self, qn, connection):
|
||||
if not isinstance(self.lhs.output_field, SearchVectorField):
|
||||
self.lhs = SearchVector(self.lhs)
|
||||
lhs, lhs_params = super(SearchLookup, self).process_lhs(qn, connection)
|
||||
lhs, lhs_params = super().process_lhs(qn, connection)
|
||||
return lhs, lhs_params
|
||||
|
||||
|
||||
|
|
|
@ -41,7 +41,7 @@ class HStoreExtension(CreateExtension):
|
|||
self.name = 'hstore'
|
||||
|
||||
def database_forwards(self, app_label, schema_editor, from_state, to_state):
|
||||
super(HStoreExtension, self).database_forwards(app_label, schema_editor, from_state, to_state)
|
||||
super().database_forwards(app_label, schema_editor, from_state, to_state)
|
||||
# Register hstore straight away as it cannot be done before the
|
||||
# extension is installed, a subsequent data migration would use the
|
||||
# same connection
|
||||
|
|
|
@ -11,7 +11,7 @@ class SearchVectorExact(Lookup):
|
|||
if not hasattr(self.rhs, 'resolve_expression'):
|
||||
config = getattr(self.lhs, 'config', None)
|
||||
self.rhs = SearchQuery(self.rhs, config=config)
|
||||
rhs, rhs_params = super(SearchVectorExact, self).process_rhs(qn, connection)
|
||||
rhs, rhs_params = super().process_rhs(qn, connection)
|
||||
return rhs, rhs_params
|
||||
|
||||
def as_sql(self, qn, connection):
|
||||
|
@ -51,7 +51,7 @@ class SearchVector(SearchVectorCombinable, Func):
|
|||
config = None
|
||||
|
||||
def __init__(self, *expressions, **extra):
|
||||
super(SearchVector, self).__init__(*expressions, **extra)
|
||||
super().__init__(*expressions, **extra)
|
||||
self.source_expressions = [
|
||||
Coalesce(expression, Value('')) for expression in self.source_expressions
|
||||
]
|
||||
|
@ -62,7 +62,7 @@ class SearchVector(SearchVectorCombinable, Func):
|
|||
self.weight = weight
|
||||
|
||||
def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False):
|
||||
resolved = super(SearchVector, self).resolve_expression(query, allow_joins, reuse, summarize, for_save)
|
||||
resolved = super().resolve_expression(query, allow_joins, reuse, summarize, for_save)
|
||||
if self.config:
|
||||
if not hasattr(self.config, 'resolve_expression'):
|
||||
resolved.config = Value(self.config).resolve_expression(query, allow_joins, reuse, summarize, for_save)
|
||||
|
@ -78,7 +78,7 @@ class SearchVector(SearchVectorCombinable, Func):
|
|||
template = "%(function)s({}::regconfig, %(expressions)s)".format(config_sql.replace('%', '%%'))
|
||||
else:
|
||||
template = self.template
|
||||
sql, params = super(SearchVector, self).as_sql(compiler, connection, function=function, template=template)
|
||||
sql, params = super().as_sql(compiler, connection, function=function, template=template)
|
||||
extra_params = []
|
||||
if self.weight:
|
||||
weight_sql, extra_params = compiler.compile(self.weight)
|
||||
|
@ -89,7 +89,7 @@ class SearchVector(SearchVectorCombinable, Func):
|
|||
class CombinedSearchVector(SearchVectorCombinable, CombinedExpression):
|
||||
def __init__(self, lhs, connector, rhs, config, output_field=None):
|
||||
self.config = config
|
||||
super(CombinedSearchVector, self).__init__(lhs, connector, rhs, output_field)
|
||||
super().__init__(lhs, connector, rhs, output_field)
|
||||
|
||||
|
||||
class SearchQueryCombinable:
|
||||
|
@ -132,10 +132,10 @@ class SearchQuery(SearchQueryCombinable, Value):
|
|||
def __init__(self, value, output_field=None, **extra):
|
||||
self.config = extra.pop('config', self.config)
|
||||
self.invert = extra.pop('invert', self.invert)
|
||||
super(SearchQuery, self).__init__(value, output_field=output_field)
|
||||
super().__init__(value, output_field=output_field)
|
||||
|
||||
def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False):
|
||||
resolved = super(SearchQuery, self).resolve_expression(query, allow_joins, reuse, summarize, for_save)
|
||||
resolved = super().resolve_expression(query, allow_joins, reuse, summarize, for_save)
|
||||
if self.config:
|
||||
if not hasattr(self.config, 'resolve_expression'):
|
||||
resolved.config = Value(self.config).resolve_expression(query, allow_joins, reuse, summarize, for_save)
|
||||
|
@ -156,7 +156,7 @@ class SearchQuery(SearchQueryCombinable, Value):
|
|||
return template, params
|
||||
|
||||
def _combine(self, other, connector, reversed, node=None):
|
||||
combined = super(SearchQuery, self)._combine(other, connector, reversed, node)
|
||||
combined = super()._combine(other, connector, reversed, node)
|
||||
combined.output_field = SearchQueryField()
|
||||
return combined
|
||||
|
||||
|
@ -171,7 +171,7 @@ class SearchQuery(SearchQueryCombinable, Value):
|
|||
class CombinedSearchQuery(SearchQueryCombinable, CombinedExpression):
|
||||
def __init__(self, lhs, connector, rhs, config, output_field=None):
|
||||
self.config = config
|
||||
super(CombinedSearchQuery, self).__init__(lhs, connector, rhs, output_field)
|
||||
super().__init__(lhs, connector, rhs, output_field)
|
||||
|
||||
|
||||
class SearchRank(Func):
|
||||
|
@ -187,7 +187,7 @@ class SearchRank(Func):
|
|||
if weights is not None and not hasattr(weights, 'resolve_expression'):
|
||||
weights = Value(weights)
|
||||
self.weights = weights
|
||||
super(SearchRank, self).__init__(vector, query, **extra)
|
||||
super().__init__(vector, query, **extra)
|
||||
|
||||
def as_sql(self, compiler, connection, function=None, template=None):
|
||||
extra_params = []
|
||||
|
@ -197,7 +197,7 @@ class SearchRank(Func):
|
|||
template = '%(function)s(%(weights)s, %(expressions)s)'
|
||||
weight_sql, extra_params = compiler.compile(self.weights)
|
||||
extra_context['weights'] = weight_sql
|
||||
sql, params = super(SearchRank, self).as_sql(
|
||||
sql, params = super().as_sql(
|
||||
compiler, connection,
|
||||
function=function, template=template, **extra_context
|
||||
)
|
||||
|
@ -211,7 +211,7 @@ class TrigramBase(Func):
|
|||
def __init__(self, expression, string, **extra):
|
||||
if not hasattr(string, 'resolve_expression'):
|
||||
string = Value(string)
|
||||
super(TrigramBase, self).__init__(expression, string, output_field=FloatField(), **extra)
|
||||
super().__init__(expression, string, output_field=FloatField(), **extra)
|
||||
|
||||
|
||||
class TrigramSimilarity(TrigramBase):
|
||||
|
|
|
@ -18,7 +18,7 @@ class RedirectFallbackMiddleware(MiddlewareMixin):
|
|||
"You cannot use RedirectFallbackMiddleware when "
|
||||
"django.contrib.sites is not installed."
|
||||
)
|
||||
super(RedirectFallbackMiddleware, self).__init__(get_response)
|
||||
super().__init__(get_response)
|
||||
|
||||
def process_response(self, request, response):
|
||||
# No need to check for a redirect for non-404 responses.
|
||||
|
|
|
@ -15,7 +15,7 @@ class SessionStore(SessionBase):
|
|||
|
||||
def __init__(self, session_key=None):
|
||||
self._cache = caches[settings.SESSION_CACHE_ALIAS]
|
||||
super(SessionStore, self).__init__(session_key)
|
||||
super().__init__(session_key)
|
||||
|
||||
@property
|
||||
def cache_key(self):
|
||||
|
|
|
@ -22,7 +22,7 @@ class SessionStore(DBStore):
|
|||
|
||||
def __init__(self, session_key=None):
|
||||
self._cache = caches[settings.SESSION_CACHE_ALIAS]
|
||||
super(SessionStore, self).__init__(session_key)
|
||||
super().__init__(session_key)
|
||||
|
||||
@property
|
||||
def cache_key(self):
|
||||
|
@ -57,14 +57,14 @@ class SessionStore(DBStore):
|
|||
def exists(self, session_key):
|
||||
if session_key and (self.cache_key_prefix + session_key) in self._cache:
|
||||
return True
|
||||
return super(SessionStore, self).exists(session_key)
|
||||
return super().exists(session_key)
|
||||
|
||||
def save(self, must_create=False):
|
||||
super(SessionStore, self).save(must_create)
|
||||
super().save(must_create)
|
||||
self._cache.set(self.cache_key, self._session, self.get_expiry_age())
|
||||
|
||||
def delete(self, session_key=None):
|
||||
super(SessionStore, self).delete(session_key)
|
||||
super().delete(session_key)
|
||||
if session_key is None:
|
||||
if self.session_key is None:
|
||||
return
|
||||
|
|
|
@ -15,7 +15,7 @@ class SessionStore(SessionBase):
|
|||
Implements database session store.
|
||||
"""
|
||||
def __init__(self, session_key=None):
|
||||
super(SessionStore, self).__init__(session_key)
|
||||
super().__init__(session_key)
|
||||
|
||||
@classmethod
|
||||
def get_model_class(cls):
|
||||
|
|
|
@ -21,7 +21,7 @@ class SessionStore(SessionBase):
|
|||
def __init__(self, session_key=None):
|
||||
self.storage_path = type(self)._get_storage_path()
|
||||
self.file_prefix = settings.SESSION_COOKIE_NAME
|
||||
super(SessionStore, self).__init__(session_key)
|
||||
super().__init__(session_key)
|
||||
|
||||
@classmethod
|
||||
def _get_storage_path(cls):
|
||||
|
|
|
@ -10,11 +10,11 @@ class CurrentSiteManager(models.Manager):
|
|||
use_in_migrations = True
|
||||
|
||||
def __init__(self, field_name=None):
|
||||
super(CurrentSiteManager, self).__init__()
|
||||
super().__init__()
|
||||
self.__field_name = field_name
|
||||
|
||||
def check(self, **kwargs):
|
||||
errors = super(CurrentSiteManager, self).check(**kwargs)
|
||||
errors = super().check(**kwargs)
|
||||
errors.extend(self._check_field_name())
|
||||
return errors
|
||||
|
||||
|
@ -57,5 +57,4 @@ class CurrentSiteManager(models.Manager):
|
|||
return self.__field_name
|
||||
|
||||
def get_queryset(self):
|
||||
return super(CurrentSiteManager, self).get_queryset().filter(
|
||||
**{self._get_field_name() + '__id': settings.SITE_ID})
|
||||
return super().get_queryset().filter(**{self._get_field_name() + '__id': settings.SITE_ID})
|
||||
|
|
|
@ -71,7 +71,7 @@ class FileSystemFinder(BaseFinder):
|
|||
filesystem_storage = FileSystemStorage(location=root)
|
||||
filesystem_storage.prefix = prefix
|
||||
self.storages[root] = filesystem_storage
|
||||
super(FileSystemFinder, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def find(self, path, all=False):
|
||||
"""
|
||||
|
@ -137,7 +137,7 @@ class AppDirectoriesFinder(BaseFinder):
|
|||
self.storages[app_config.name] = app_storage
|
||||
if app_config.name not in self.apps:
|
||||
self.apps.append(app_config.name)
|
||||
super(AppDirectoriesFinder, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def list(self, ignore_patterns):
|
||||
"""
|
||||
|
@ -194,7 +194,7 @@ class BaseStorageFinder(BaseFinder):
|
|||
# Make sure we have an storage instance here.
|
||||
if not isinstance(self.storage, (Storage, LazyObject)):
|
||||
self.storage = self.storage()
|
||||
super(BaseStorageFinder, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def find(self, path, all=False):
|
||||
"""
|
||||
|
@ -229,7 +229,7 @@ class DefaultStorageFinder(BaseStorageFinder):
|
|||
storage = default_storage
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(DefaultStorageFinder, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
base_location = getattr(self.storage, 'base_location', empty)
|
||||
if not base_location:
|
||||
raise ImproperlyConfigured("The storage backend of the "
|
||||
|
|
|
@ -19,7 +19,7 @@ class StaticFilesHandler(WSGIHandler):
|
|||
def __init__(self, application):
|
||||
self.application = application
|
||||
self.base_url = urlparse(self.get_base_url())
|
||||
super(StaticFilesHandler, self).__init__()
|
||||
super().__init__()
|
||||
|
||||
def get_base_url(self):
|
||||
utils.check_settings()
|
||||
|
@ -57,9 +57,9 @@ class StaticFilesHandler(WSGIHandler):
|
|||
if settings.DEBUG:
|
||||
from django.views import debug
|
||||
return debug.technical_404_response(request, e)
|
||||
return super(StaticFilesHandler, self).get_response(request)
|
||||
return super().get_response(request)
|
||||
|
||||
def __call__(self, environ, start_response):
|
||||
if not self._should_handle(get_path_info(environ)):
|
||||
return self.application(environ, start_response)
|
||||
return super(StaticFilesHandler, self).__call__(environ, start_response)
|
||||
return super().__call__(environ, start_response)
|
||||
|
|
|
@ -20,7 +20,7 @@ class Command(BaseCommand):
|
|||
requires_system_checks = False
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(Command, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
self.copied_files = []
|
||||
self.symlinked_files = []
|
||||
self.unmodified_files = []
|
||||
|
|
|
@ -10,7 +10,7 @@ class Command(LabelCommand):
|
|||
label = 'staticfile'
|
||||
|
||||
def add_arguments(self, parser):
|
||||
super(Command, self).add_arguments(parser)
|
||||
super().add_arguments(parser)
|
||||
parser.add_argument(
|
||||
'--first', action='store_false', dest='all',
|
||||
default=True,
|
||||
|
|
|
@ -8,7 +8,7 @@ class Command(RunserverCommand):
|
|||
help = "Starts a lightweight Web server for development and also serves static files."
|
||||
|
||||
def add_arguments(self, parser):
|
||||
super(Command, self).add_arguments(parser)
|
||||
super().add_arguments(parser)
|
||||
parser.add_argument(
|
||||
'--nostatic', action="store_false", dest='use_static_handler', default=True,
|
||||
help='Tells Django to NOT automatically serve static files at STATIC_URL.',
|
||||
|
@ -24,7 +24,7 @@ class Command(RunserverCommand):
|
|||
if static files should be served. Otherwise just returns the default
|
||||
handler.
|
||||
"""
|
||||
handler = super(Command, self).get_handler(*args, **options)
|
||||
handler = super().get_handler(*args, **options)
|
||||
use_static_handler = options['use_static_handler']
|
||||
insecure_serving = options['insecure_serving']
|
||||
if use_static_handler and (settings.DEBUG or insecure_serving):
|
||||
|
|
|
@ -31,8 +31,7 @@ class StaticFilesStorage(FileSystemStorage):
|
|||
if base_url is None:
|
||||
base_url = settings.STATIC_URL
|
||||
check_settings(base_url)
|
||||
super(StaticFilesStorage, self).__init__(location, base_url,
|
||||
*args, **kwargs)
|
||||
super().__init__(location, base_url, *args, **kwargs)
|
||||
# FileSystemStorage fallbacks to MEDIA_ROOT when location
|
||||
# is empty, so we restore the empty value.
|
||||
if not location:
|
||||
|
@ -44,7 +43,7 @@ class StaticFilesStorage(FileSystemStorage):
|
|||
raise ImproperlyConfigured("You're using the staticfiles app "
|
||||
"without having set the STATIC_ROOT "
|
||||
"setting to a filesystem path.")
|
||||
return super(StaticFilesStorage, self).path(name)
|
||||
return super().path(name)
|
||||
|
||||
|
||||
class HashedFilesMixin:
|
||||
|
@ -58,7 +57,7 @@ class HashedFilesMixin:
|
|||
)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(HashedFilesMixin, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
self._patterns = OrderedDict()
|
||||
self.hashed_files = {}
|
||||
for extension, patterns in self.patterns:
|
||||
|
@ -134,7 +133,7 @@ class HashedFilesMixin:
|
|||
args += (hashed_files,)
|
||||
hashed_name = hashed_name_func(*args)
|
||||
|
||||
final_url = super(HashedFilesMixin, self).url(hashed_name)
|
||||
final_url = super().url(hashed_name)
|
||||
|
||||
# Special casing for a @font-face hack, like url(myfont.eot?#iefix")
|
||||
# http://www.fontspring.com/blog/the-new-bulletproof-font-face-syntax
|
||||
|
@ -376,7 +375,7 @@ class ManifestFilesMixin(HashedFilesMixin):
|
|||
manifest_strict = True
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(ManifestFilesMixin, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
self.hashed_files = self.load_manifest()
|
||||
|
||||
def read_manifest(self):
|
||||
|
@ -403,8 +402,7 @@ class ManifestFilesMixin(HashedFilesMixin):
|
|||
|
||||
def post_process(self, *args, **kwargs):
|
||||
self.hashed_files = OrderedDict()
|
||||
all_post_processed = super(ManifestFilesMixin,
|
||||
self).post_process(*args, **kwargs)
|
||||
all_post_processed = super().post_process(*args, **kwargs)
|
||||
for post_processed in all_post_processed:
|
||||
yield post_processed
|
||||
self.save_manifest()
|
||||
|
@ -465,7 +463,7 @@ class _MappingCache:
|
|||
|
||||
class CachedFilesMixin(HashedFilesMixin):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CachedFilesMixin, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
try:
|
||||
self.hashed_files = _MappingCache(caches['staticfiles'])
|
||||
except InvalidCacheBackendError:
|
||||
|
|
|
@ -17,7 +17,7 @@ class FileBasedCache(BaseCache):
|
|||
cache_suffix = '.djcache'
|
||||
|
||||
def __init__(self, dir, params):
|
||||
super(FileBasedCache, self).__init__(params)
|
||||
super().__init__(params)
|
||||
self._dir = os.path.abspath(dir)
|
||||
self._createdir()
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@ from django.utils.functional import cached_property
|
|||
|
||||
class BaseMemcachedCache(BaseCache):
|
||||
def __init__(self, server, params, library, value_not_found_exception):
|
||||
super(BaseMemcachedCache, self).__init__(params)
|
||||
super().__init__(params)
|
||||
if isinstance(server, str):
|
||||
self._servers = re.split('[;,]', server)
|
||||
else:
|
||||
|
@ -154,9 +154,7 @@ class MemcachedCache(BaseMemcachedCache):
|
|||
"An implementation of a cache binding using python-memcached"
|
||||
def __init__(self, server, params):
|
||||
import memcache
|
||||
super(MemcachedCache, self).__init__(server, params,
|
||||
library=memcache,
|
||||
value_not_found_exception=ValueError)
|
||||
super().__init__(server, params, library=memcache, value_not_found_exception=ValueError)
|
||||
|
||||
@property
|
||||
def _cache(self):
|
||||
|
@ -171,9 +169,7 @@ class PyLibMCCache(BaseMemcachedCache):
|
|||
"An implementation of a cache binding using pylibmc"
|
||||
def __init__(self, server, params):
|
||||
import pylibmc
|
||||
super(PyLibMCCache, self).__init__(server, params,
|
||||
library=pylibmc,
|
||||
value_not_found_exception=pylibmc.NotFound)
|
||||
super().__init__(server, params, library=pylibmc, value_not_found_exception=pylibmc.NotFound)
|
||||
|
||||
# The contents of `OPTIONS` was formerly only used to set the behaviors
|
||||
# attribute, but is now passed directly to the Client constructor. As such,
|
||||
|
|
|
@ -54,24 +54,24 @@ class CheckMessage:
|
|||
|
||||
class Debug(CheckMessage):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(Debug, self).__init__(DEBUG, *args, **kwargs)
|
||||
super().__init__(DEBUG, *args, **kwargs)
|
||||
|
||||
|
||||
class Info(CheckMessage):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(Info, self).__init__(INFO, *args, **kwargs)
|
||||
super().__init__(INFO, *args, **kwargs)
|
||||
|
||||
|
||||
class Warning(CheckMessage):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(Warning, self).__init__(WARNING, *args, **kwargs)
|
||||
super().__init__(WARNING, *args, **kwargs)
|
||||
|
||||
|
||||
class Error(CheckMessage):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(Error, self).__init__(ERROR, *args, **kwargs)
|
||||
super().__init__(ERROR, *args, **kwargs)
|
||||
|
||||
|
||||
class Critical(CheckMessage):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(Critical, self).__init__(CRITICAL, *args, **kwargs)
|
||||
super().__init__(CRITICAL, *args, **kwargs)
|
||||
|
|
|
@ -109,7 +109,7 @@ class ValidationError(Exception):
|
|||
"""
|
||||
|
||||
# PY2 can't pickle naive exception: http://bugs.python.org/issue1692335.
|
||||
super(ValidationError, self).__init__(message, code, params)
|
||||
super().__init__(message, code, params)
|
||||
|
||||
if isinstance(message, ValidationError):
|
||||
if hasattr(message, 'error_dict'):
|
||||
|
|
|
@ -137,7 +137,7 @@ class ContentFile(File):
|
|||
"""
|
||||
def __init__(self, content, name=None):
|
||||
stream_class = StringIO if isinstance(content, str) else BytesIO
|
||||
super(ContentFile, self).__init__(stream_class(content), name=name)
|
||||
super().__init__(stream_class(content), name=name)
|
||||
self.size = len(content)
|
||||
|
||||
def __str__(self):
|
||||
|
|
|
@ -24,7 +24,7 @@ class UploadedFile(File):
|
|||
DEFAULT_CHUNK_SIZE = 64 * 2 ** 10
|
||||
|
||||
def __init__(self, file=None, name=None, content_type=None, size=None, charset=None, content_type_extra=None):
|
||||
super(UploadedFile, self).__init__(file, name)
|
||||
super().__init__(file, name)
|
||||
self.size = size
|
||||
self.content_type = content_type
|
||||
self.charset = charset
|
||||
|
@ -59,7 +59,7 @@ class TemporaryUploadedFile(UploadedFile):
|
|||
"""
|
||||
def __init__(self, name, content_type, size, charset, content_type_extra=None):
|
||||
file = tempfile.NamedTemporaryFile(suffix='.upload', dir=settings.FILE_UPLOAD_TEMP_DIR)
|
||||
super(TemporaryUploadedFile, self).__init__(file, name, content_type, size, charset, content_type_extra)
|
||||
super().__init__(file, name, content_type, size, charset, content_type_extra)
|
||||
|
||||
def temporary_file_path(self):
|
||||
"""
|
||||
|
@ -82,7 +82,7 @@ class InMemoryUploadedFile(UploadedFile):
|
|||
A file uploaded into memory (i.e. stream-to-memory).
|
||||
"""
|
||||
def __init__(self, file, field_name, name, content_type, size, charset, content_type_extra=None):
|
||||
super(InMemoryUploadedFile, self).__init__(file, name, content_type, size, charset, content_type_extra)
|
||||
super().__init__(file, name, content_type, size, charset, content_type_extra)
|
||||
self.field_name = field_name
|
||||
|
||||
def open(self, mode=None):
|
||||
|
@ -103,8 +103,7 @@ class SimpleUploadedFile(InMemoryUploadedFile):
|
|||
"""
|
||||
def __init__(self, name, content, content_type='text/plain'):
|
||||
content = content or b''
|
||||
super(SimpleUploadedFile, self).__init__(BytesIO(content), None, name,
|
||||
content_type, len(content), None, None)
|
||||
super().__init__(BytesIO(content), None, name, content_type, len(content), None, None)
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, file_dict):
|
||||
|
|
|
@ -133,13 +133,13 @@ class TemporaryFileUploadHandler(FileUploadHandler):
|
|||
Upload handler that streams data into a temporary file.
|
||||
"""
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(TemporaryFileUploadHandler, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def new_file(self, *args, **kwargs):
|
||||
"""
|
||||
Create the file object to append to as data is coming in.
|
||||
"""
|
||||
super(TemporaryFileUploadHandler, self).new_file(*args, **kwargs)
|
||||
super().new_file(*args, **kwargs)
|
||||
self.file = TemporaryUploadedFile(self.file_name, self.content_type, 0, self.charset, self.content_type_extra)
|
||||
|
||||
def receive_data_chunk(self, raw_data, start):
|
||||
|
@ -168,7 +168,7 @@ class MemoryFileUploadHandler(FileUploadHandler):
|
|||
self.activated = True
|
||||
|
||||
def new_file(self, *args, **kwargs):
|
||||
super(MemoryFileUploadHandler, self).new_file(*args, **kwargs)
|
||||
super().new_file(*args, **kwargs)
|
||||
if self.activated:
|
||||
self.file = BytesIO()
|
||||
raise StopFutureHandlers()
|
||||
|
|
|
@ -141,7 +141,7 @@ class WSGIHandler(base.BaseHandler):
|
|||
request_class = WSGIRequest
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(WSGIHandler, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
self.load_middleware()
|
||||
|
||||
def __call__(self, environ, start_response):
|
||||
|
|
|
@ -11,7 +11,7 @@ class EmailBackend(BaseEmailBackend):
|
|||
def __init__(self, *args, **kwargs):
|
||||
self.stream = kwargs.pop('stream', sys.stdout)
|
||||
self._lock = threading.RLock()
|
||||
super(EmailBackend, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def write_message(self, message):
|
||||
msg = message.message()
|
||||
|
|
|
@ -40,7 +40,7 @@ class EmailBackend(ConsoleEmailBackend):
|
|||
# Since we're using the console-based backend as a base,
|
||||
# force the stream to be None, so we don't default to stdout
|
||||
kwargs['stream'] = None
|
||||
super(EmailBackend, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def write_message(self, message):
|
||||
self.stream.write(message.message().as_bytes() + b'\n')
|
||||
|
|
|
@ -15,7 +15,7 @@ class EmailBackend(BaseEmailBackend):
|
|||
The dummy outbox is accessible through the outbox instance attribute.
|
||||
"""
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(EmailBackend, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
if not hasattr(mail, 'outbox'):
|
||||
mail.outbox = []
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue