Fixed #28982 -- Simplified code with and/or.
This commit is contained in:
parent
c2d0f8c084
commit
d7b2aa24f7
|
@ -94,9 +94,7 @@ class AdminSite:
|
||||||
|
|
||||||
If a model is abstract, raise ImproperlyConfigured.
|
If a model is abstract, raise ImproperlyConfigured.
|
||||||
"""
|
"""
|
||||||
if not admin_class:
|
admin_class = admin_class or ModelAdmin
|
||||||
admin_class = ModelAdmin
|
|
||||||
|
|
||||||
if isinstance(model_or_iterable, ModelBase):
|
if isinstance(model_or_iterable, ModelBase):
|
||||||
model_or_iterable = [model_or_iterable]
|
model_or_iterable = [model_or_iterable]
|
||||||
for model in model_or_iterable:
|
for model in model_or_iterable:
|
||||||
|
|
|
@ -86,8 +86,7 @@ class ChangeList:
|
||||||
"""
|
"""
|
||||||
Return all params except IGNORED_PARAMS.
|
Return all params except IGNORED_PARAMS.
|
||||||
"""
|
"""
|
||||||
if not params:
|
params = params or self.params
|
||||||
params = self.params
|
|
||||||
lookup_params = params.copy() # a dictionary of the query string
|
lookup_params = params.copy() # a dictionary of the query string
|
||||||
# Remove all the parameters that are globally and systematically
|
# Remove all the parameters that are globally and systematically
|
||||||
# ignored.
|
# ignored.
|
||||||
|
|
|
@ -71,8 +71,7 @@ def parse_rst(text, default_reference_context, thing_being_parsed=None):
|
||||||
'raw_enabled': False,
|
'raw_enabled': False,
|
||||||
'file_insertion_enabled': False,
|
'file_insertion_enabled': False,
|
||||||
}
|
}
|
||||||
if thing_being_parsed:
|
thing_being_parsed = thing_being_parsed and force_bytes('<%s>' % thing_being_parsed)
|
||||||
thing_being_parsed = force_bytes("<%s>" % thing_being_parsed)
|
|
||||||
# Wrap ``text`` in some reST that sets the default role to ``cmsreference``,
|
# Wrap ``text`` in some reST that sets the default role to ``cmsreference``,
|
||||||
# then restores it.
|
# then restores it.
|
||||||
source = """
|
source = """
|
||||||
|
|
|
@ -75,10 +75,8 @@ class TemplateTagIndexView(BaseAdminDocsView):
|
||||||
for module_name, library in builtin_libs + app_libs:
|
for module_name, library in builtin_libs + app_libs:
|
||||||
for tag_name, tag_func in library.tags.items():
|
for tag_name, tag_func in library.tags.items():
|
||||||
title, body, metadata = utils.parse_docstring(tag_func.__doc__)
|
title, body, metadata = utils.parse_docstring(tag_func.__doc__)
|
||||||
if title:
|
title = title and utils.parse_rst(title, 'tag', _('tag:') + tag_name)
|
||||||
title = utils.parse_rst(title, 'tag', _('tag:') + tag_name)
|
body = body and utils.parse_rst(body, 'tag', _('tag:') + tag_name)
|
||||||
if body:
|
|
||||||
body = utils.parse_rst(body, 'tag', _('tag:') + tag_name)
|
|
||||||
for key in metadata:
|
for key in metadata:
|
||||||
metadata[key] = utils.parse_rst(metadata[key], 'tag', _('tag:') + tag_name)
|
metadata[key] = utils.parse_rst(metadata[key], 'tag', _('tag:') + tag_name)
|
||||||
tag_library = module_name.split('.')[-1]
|
tag_library = module_name.split('.')[-1]
|
||||||
|
@ -108,10 +106,8 @@ class TemplateFilterIndexView(BaseAdminDocsView):
|
||||||
for module_name, library in builtin_libs + app_libs:
|
for module_name, library in builtin_libs + app_libs:
|
||||||
for filter_name, filter_func in library.filters.items():
|
for filter_name, filter_func in library.filters.items():
|
||||||
title, body, metadata = utils.parse_docstring(filter_func.__doc__)
|
title, body, metadata = utils.parse_docstring(filter_func.__doc__)
|
||||||
if title:
|
title = title and utils.parse_rst(title, 'filter', _('filter:') + filter_name)
|
||||||
title = utils.parse_rst(title, 'filter', _('filter:') + filter_name)
|
body = body and utils.parse_rst(body, 'filter', _('filter:') + filter_name)
|
||||||
if body:
|
|
||||||
body = utils.parse_rst(body, 'filter', _('filter:') + filter_name)
|
|
||||||
for key in metadata:
|
for key in metadata:
|
||||||
metadata[key] = utils.parse_rst(metadata[key], 'filter', _('filter:') + filter_name)
|
metadata[key] = utils.parse_rst(metadata[key], 'filter', _('filter:') + filter_name)
|
||||||
tag_library = module_name.split('.')[-1]
|
tag_library = module_name.split('.')[-1]
|
||||||
|
@ -174,10 +170,8 @@ class ViewDetailView(BaseAdminDocsView):
|
||||||
if view_func is None:
|
if view_func is None:
|
||||||
raise Http404
|
raise Http404
|
||||||
title, body, metadata = utils.parse_docstring(view_func.__doc__)
|
title, body, metadata = utils.parse_docstring(view_func.__doc__)
|
||||||
if title:
|
title = title and utils.parse_rst(title, 'view', _('view:') + view)
|
||||||
title = utils.parse_rst(title, 'view', _('view:') + view)
|
body = body and utils.parse_rst(body, 'view', _('view:') + view)
|
||||||
if body:
|
|
||||||
body = utils.parse_rst(body, 'view', _('view:') + view)
|
|
||||||
for key in metadata:
|
for key in metadata:
|
||||||
metadata[key] = utils.parse_rst(metadata[key], 'model', _('view:') + view)
|
metadata[key] = utils.parse_rst(metadata[key], 'model', _('view:') + view)
|
||||||
return super().get_context_data(**{
|
return super().get_context_data(**{
|
||||||
|
@ -215,10 +209,8 @@ class ModelDetailView(BaseAdminDocsView):
|
||||||
opts = model._meta
|
opts = model._meta
|
||||||
|
|
||||||
title, body, metadata = utils.parse_docstring(model.__doc__)
|
title, body, metadata = utils.parse_docstring(model.__doc__)
|
||||||
if title:
|
title = title and utils.parse_rst(title, 'model', _('model:') + model_name)
|
||||||
title = utils.parse_rst(title, 'model', _('model:') + model_name)
|
body = body and utils.parse_rst(body, 'model', _('model:') + model_name)
|
||||||
if body:
|
|
||||||
body = utils.parse_rst(body, 'model', _('model:') + model_name)
|
|
||||||
|
|
||||||
# Gather fields/field descriptions.
|
# Gather fields/field descriptions.
|
||||||
fields = []
|
fields = []
|
||||||
|
@ -275,8 +267,9 @@ class ModelDetailView(BaseAdminDocsView):
|
||||||
except StopIteration:
|
except StopIteration:
|
||||||
continue
|
continue
|
||||||
verbose = func.__doc__
|
verbose = func.__doc__
|
||||||
if verbose:
|
verbose = verbose and (
|
||||||
verbose = utils.parse_rst(utils.trim_docstring(verbose), 'model', _('model:') + opts.model_name)
|
utils.parse_rst(utils.trim_docstring(verbose), 'model', _('model:') + opts.model_name)
|
||||||
|
)
|
||||||
# If a method has no arguments, show it as a 'field', otherwise
|
# If a method has no arguments, show it as a 'field', otherwise
|
||||||
# as a 'method with arguments'.
|
# as a 'method with arguments'.
|
||||||
if func_has_no_args(func) and not func_accepts_kwargs(func) and not func_accepts_var_args(func):
|
if func_has_no_args(func) and not func_accepts_kwargs(func) and not func_accepts_var_args(func):
|
||||||
|
|
|
@ -88,10 +88,8 @@ class UserAdmin(admin.ModelAdmin):
|
||||||
] + super().get_urls()
|
] + super().get_urls()
|
||||||
|
|
||||||
def lookup_allowed(self, lookup, value):
|
def lookup_allowed(self, lookup, value):
|
||||||
# See #20078: we don't want to allow any lookups involving passwords.
|
# Don't allow lookups involving passwords.
|
||||||
if lookup.startswith('password'):
|
return not lookup.startswith('password') and super().lookup_allowed(lookup, value)
|
||||||
return False
|
|
||||||
return super().lookup_allowed(lookup, value)
|
|
||||||
|
|
||||||
@sensitive_post_parameters_m
|
@sensitive_post_parameters_m
|
||||||
@csrf_protect_m
|
@csrf_protect_m
|
||||||
|
|
|
@ -82,9 +82,7 @@ class ModelBackend:
|
||||||
return user_obj._perm_cache
|
return user_obj._perm_cache
|
||||||
|
|
||||||
def has_perm(self, user_obj, perm, obj=None):
|
def has_perm(self, user_obj, perm, obj=None):
|
||||||
if not user_obj.is_active:
|
return user_obj.is_active and perm in self.get_all_permissions(user_obj, obj)
|
||||||
return False
|
|
||||||
return perm in self.get_all_permissions(user_obj, obj)
|
|
||||||
|
|
||||||
def has_module_perms(self, user_obj, app_label):
|
def has_module_perms(self, user_obj, app_label):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -73,10 +73,7 @@ def make_password(password, salt=None, hasher='default'):
|
||||||
if password is None:
|
if password is None:
|
||||||
return UNUSABLE_PASSWORD_PREFIX + get_random_string(UNUSABLE_PASSWORD_SUFFIX_LENGTH)
|
return UNUSABLE_PASSWORD_PREFIX + get_random_string(UNUSABLE_PASSWORD_SUFFIX_LENGTH)
|
||||||
hasher = get_hasher(hasher)
|
hasher = get_hasher(hasher)
|
||||||
|
salt = salt or hasher.salt()
|
||||||
if not salt:
|
|
||||||
salt = hasher.salt()
|
|
||||||
|
|
||||||
return hasher.encode(password, salt)
|
return hasher.encode(password, salt)
|
||||||
|
|
||||||
|
|
||||||
|
@ -244,8 +241,7 @@ class PBKDF2PasswordHasher(BasePasswordHasher):
|
||||||
def encode(self, password, salt, iterations=None):
|
def encode(self, password, salt, iterations=None):
|
||||||
assert password is not None
|
assert password is not None
|
||||||
assert salt and '$' not in salt
|
assert salt and '$' not in salt
|
||||||
if not iterations:
|
iterations = iterations or self.iterations
|
||||||
iterations = self.iterations
|
|
||||||
hash = pbkdf2(password, salt, iterations, digest=self.digest)
|
hash = pbkdf2(password, salt, iterations, digest=self.digest)
|
||||||
hash = base64.b64encode(hash).decode('ascii').strip()
|
hash = base64.b64encode(hash).decode('ascii').strip()
|
||||||
return "%s$%d$%s$%s" % (self.algorithm, iterations, salt, hash)
|
return "%s$%d$%s$%s" % (self.algorithm, iterations, salt, hash)
|
||||||
|
|
|
@ -164,9 +164,7 @@ def logout_then_login(request, login_url=None):
|
||||||
"""
|
"""
|
||||||
Log out the user if they are logged in. Then redirect to the login page.
|
Log out the user if they are logged in. Then redirect to the login page.
|
||||||
"""
|
"""
|
||||||
if not login_url:
|
login_url = resolve_url(login_url or settings.LOGIN_URL)
|
||||||
login_url = settings.LOGIN_URL
|
|
||||||
login_url = resolve_url(login_url)
|
|
||||||
return LogoutView.as_view(next_page=login_url)(request)
|
return LogoutView.as_view(next_page=login_url)(request)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -186,15 +186,9 @@ class GDALBand(GDALRasterBase):
|
||||||
|
|
||||||
Allowed input data types are bytes, memoryview, list, tuple, and array.
|
Allowed input data types are bytes, memoryview, list, tuple, and array.
|
||||||
"""
|
"""
|
||||||
if not offset:
|
offset = offset or (0, 0)
|
||||||
offset = (0, 0)
|
size = size or (self.width - offset[0], self.height - offset[1])
|
||||||
|
shape = shape or size
|
||||||
if not size:
|
|
||||||
size = (self.width - offset[0], self.height - offset[1])
|
|
||||||
|
|
||||||
if not shape:
|
|
||||||
shape = size
|
|
||||||
|
|
||||||
if any(x <= 0 for x in size):
|
if any(x <= 0 for x in size):
|
||||||
raise ValueError('Offset too big for this raster.')
|
raise ValueError('Offset too big for this raster.')
|
||||||
|
|
||||||
|
|
|
@ -73,10 +73,9 @@ class GeoIP2:
|
||||||
raise GeoIP2Exception('Invalid GeoIP caching option: %s' % cache)
|
raise GeoIP2Exception('Invalid GeoIP caching option: %s' % cache)
|
||||||
|
|
||||||
# Getting the GeoIP data path.
|
# Getting the GeoIP data path.
|
||||||
|
path = path or GEOIP_SETTINGS['GEOIP_PATH']
|
||||||
if not path:
|
if not path:
|
||||||
path = GEOIP_SETTINGS['GEOIP_PATH']
|
raise GeoIP2Exception('GeoIP path must be provided via parameter or the GEOIP_PATH setting.')
|
||||||
if not path:
|
|
||||||
raise GeoIP2Exception('GeoIP path must be provided via parameter or the GEOIP_PATH setting.')
|
|
||||||
if not isinstance(path, str):
|
if not isinstance(path, str):
|
||||||
raise TypeError('Invalid path type: %s' % type(path).__name__)
|
raise TypeError('Invalid path type: %s' % type(path).__name__)
|
||||||
|
|
||||||
|
@ -115,10 +114,7 @@ class GeoIP2:
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def _reader(self):
|
def _reader(self):
|
||||||
if self._country:
|
return self._country or self._city
|
||||||
return self._country
|
|
||||||
else:
|
|
||||||
return self._city
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def _country_or_city(self):
|
def _country_or_city(self):
|
||||||
|
|
|
@ -300,8 +300,7 @@ thread_context = ThreadLocalIO()
|
||||||
# These module-level routines return the I/O object that is local to the
|
# These module-level routines return the I/O object that is local to the
|
||||||
# thread. If the I/O object does not exist yet it will be initialized.
|
# thread. If the I/O object does not exist yet it will be initialized.
|
||||||
def wkt_r():
|
def wkt_r():
|
||||||
if not thread_context.wkt_r:
|
thread_context.wkt_r = thread_context.wkt_r or _WKTReader()
|
||||||
thread_context.wkt_r = _WKTReader()
|
|
||||||
return thread_context.wkt_r
|
return thread_context.wkt_r
|
||||||
|
|
||||||
|
|
||||||
|
@ -316,8 +315,7 @@ def wkt_w(dim=2, trim=False, precision=None):
|
||||||
|
|
||||||
|
|
||||||
def wkb_r():
|
def wkb_r():
|
||||||
if not thread_context.wkb_r:
|
thread_context.wkb_r = thread_context.wkb_r or _WKBReader()
|
||||||
thread_context.wkb_r = _WKBReader()
|
|
||||||
return thread_context.wkb_r
|
return thread_context.wkb_r
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -41,8 +41,7 @@ class GEOSFunc:
|
||||||
|
|
||||||
def __call__(self, *args):
|
def __call__(self, *args):
|
||||||
# Create a context handle if one doesn't exist for this thread.
|
# Create a context handle if one doesn't exist for this thread.
|
||||||
if not self.thread_context.handle:
|
self.thread_context.handle = self.thread_context.handle or GEOSContextHandle()
|
||||||
self.thread_context.handle = GEOSContextHandle()
|
|
||||||
# Call the threaded GEOS routine with the pointer of the context handle
|
# Call the threaded GEOS routine with the pointer of the context handle
|
||||||
# as the first argument.
|
# as the first argument.
|
||||||
return self.cfunc(self.thread_context.handle.ptr, *args)
|
return self.cfunc(self.thread_context.handle.ptr, *args)
|
||||||
|
|
|
@ -32,8 +32,7 @@ def add_srs_entry(srs, auth_name='EPSG', auth_srid=None, ref_sys_name=None,
|
||||||
of `django.db.DEFAULT_DB_ALIAS` (at the time of this writing, its value
|
of `django.db.DEFAULT_DB_ALIAS` (at the time of this writing, its value
|
||||||
is 'default').
|
is 'default').
|
||||||
"""
|
"""
|
||||||
if not database:
|
database = database or DEFAULT_DB_ALIAS
|
||||||
database = DEFAULT_DB_ALIAS
|
|
||||||
connection = connections[database]
|
connection = connections[database]
|
||||||
|
|
||||||
if not hasattr(connection.ops, 'spatial_version'):
|
if not hasattr(connection.ops, 'spatial_version'):
|
||||||
|
|
|
@ -240,8 +240,7 @@ class SessionBase:
|
||||||
|
|
||||||
if isinstance(expiry, datetime):
|
if isinstance(expiry, datetime):
|
||||||
return expiry
|
return expiry
|
||||||
if not expiry: # Checks both None and 0 cases
|
expiry = expiry or settings.SESSION_COOKIE_AGE # Checks both None and 0 cases
|
||||||
expiry = settings.SESSION_COOKIE_AGE
|
|
||||||
return modification + timedelta(seconds=expiry)
|
return modification + timedelta(seconds=expiry)
|
||||||
|
|
||||||
def set_expiry(self, value):
|
def set_expiry(self, value):
|
||||||
|
|
|
@ -41,9 +41,7 @@ class SessionStore(DBStore):
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def exists(self, session_key):
|
def exists(self, session_key):
|
||||||
if session_key and (self.cache_key_prefix + session_key) in self._cache:
|
return session_key and (self.cache_key_prefix + session_key) in self._cache or super().exists(session_key)
|
||||||
return True
|
|
||||||
return super().exists(session_key)
|
|
||||||
|
|
||||||
def save(self, must_create=False):
|
def save(self, must_create=False):
|
||||||
super().save(must_create)
|
super().save(must_create)
|
||||||
|
|
|
@ -27,10 +27,7 @@ class SessionStore(SessionBase):
|
||||||
try:
|
try:
|
||||||
return cls._storage_path
|
return cls._storage_path
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
storage_path = getattr(settings, "SESSION_FILE_PATH", None)
|
storage_path = getattr(settings, 'SESSION_FILE_PATH', None) or tempfile.gettempdir()
|
||||||
if not storage_path:
|
|
||||||
storage_path = tempfile.gettempdir()
|
|
||||||
|
|
||||||
# Make sure the storage path is valid.
|
# Make sure the storage path is valid.
|
||||||
if not os.path.isdir(storage_path):
|
if not os.path.isdir(storage_path):
|
||||||
raise ImproperlyConfigured(
|
raise ImproperlyConfigured(
|
||||||
|
|
|
@ -85,9 +85,7 @@ class HashedFilesMixin:
|
||||||
# `name` is the base name to construct the new hashed filename from.
|
# `name` is the base name to construct the new hashed filename from.
|
||||||
parsed_name = urlsplit(unquote(name))
|
parsed_name = urlsplit(unquote(name))
|
||||||
clean_name = parsed_name.path.strip()
|
clean_name = parsed_name.path.strip()
|
||||||
if filename:
|
filename = (filename and urlsplit(unquote(filename)).path.strip()) or clean_name
|
||||||
filename = urlsplit(unquote(filename)).path.strip()
|
|
||||||
filename = filename or clean_name
|
|
||||||
opened = False
|
opened = False
|
||||||
if content is None:
|
if content is None:
|
||||||
if not self.exists(filename):
|
if not self.exists(filename):
|
||||||
|
|
|
@ -98,9 +98,7 @@ class LocMemCache(BaseCache):
|
||||||
|
|
||||||
def _has_expired(self, key):
|
def _has_expired(self, key):
|
||||||
exp = self._expire_info.get(key, -1)
|
exp = self._expire_info.get(key, -1)
|
||||||
if exp is None or exp > time.time():
|
return exp is not None and exp <= time.time()
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
def _cull(self):
|
def _cull(self):
|
||||||
if self._cull_frequency == 0:
|
if self._cull_frequency == 0:
|
||||||
|
|
|
@ -59,9 +59,7 @@ class File(FileProxyMixin):
|
||||||
Read the file and yield chunks of ``chunk_size`` bytes (defaults to
|
Read the file and yield chunks of ``chunk_size`` bytes (defaults to
|
||||||
``UploadedFile.DEFAULT_CHUNK_SIZE``).
|
``UploadedFile.DEFAULT_CHUNK_SIZE``).
|
||||||
"""
|
"""
|
||||||
if not chunk_size:
|
chunk_size = chunk_size or self.DEFAULT_CHUNK_SIZE
|
||||||
chunk_size = self.DEFAULT_CHUNK_SIZE
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.seek(0)
|
self.seek(0)
|
||||||
except (AttributeError, UnsupportedOperation):
|
except (AttributeError, UnsupportedOperation):
|
||||||
|
@ -81,9 +79,7 @@ class File(FileProxyMixin):
|
||||||
always return ``False`` -- there's no good reason to read from memory in
|
always return ``False`` -- there's no good reason to read from memory in
|
||||||
chunks.
|
chunks.
|
||||||
"""
|
"""
|
||||||
if not chunk_size:
|
return self.size > (chunk_size or self.DEFAULT_CHUNK_SIZE)
|
||||||
chunk_size = self.DEFAULT_CHUNK_SIZE
|
|
||||||
return self.size > chunk_size
|
|
||||||
|
|
||||||
def __iter__(self):
|
def __iter__(self):
|
||||||
# Iterate over this file-like object by newlines
|
# Iterate over this file-like object by newlines
|
||||||
|
|
|
@ -176,9 +176,7 @@ def get_script_name(environ):
|
||||||
# rewrites. Unfortunately not every Web server (lighttpd!) passes this
|
# rewrites. Unfortunately not every Web server (lighttpd!) passes this
|
||||||
# information through all the time, so FORCE_SCRIPT_NAME, above, is still
|
# information through all the time, so FORCE_SCRIPT_NAME, above, is still
|
||||||
# needed.
|
# needed.
|
||||||
script_url = get_bytes_from_wsgi(environ, 'SCRIPT_URL', '')
|
script_url = get_bytes_from_wsgi(environ, 'SCRIPT_URL', '') or get_bytes_from_wsgi(environ, 'REDIRECT_URL', '')
|
||||||
if not script_url:
|
|
||||||
script_url = get_bytes_from_wsgi(environ, 'REDIRECT_URL', '')
|
|
||||||
|
|
||||||
if script_url:
|
if script_url:
|
||||||
if b'//' in script_url:
|
if b'//' in script_url:
|
||||||
|
|
|
@ -309,11 +309,7 @@ class EmailMessage:
|
||||||
self.attachments.append(filename)
|
self.attachments.append(filename)
|
||||||
else:
|
else:
|
||||||
assert content is not None
|
assert content is not None
|
||||||
|
mimetype = mimetype or mimetypes.guess_type(filename)[0] or DEFAULT_ATTACHMENT_MIME_TYPE
|
||||||
if not mimetype:
|
|
||||||
mimetype, _ = mimetypes.guess_type(filename)
|
|
||||||
if not mimetype:
|
|
||||||
mimetype = DEFAULT_ATTACHMENT_MIME_TYPE
|
|
||||||
basetype, subtype = mimetype.split('/', 1)
|
basetype, subtype = mimetype.split('/', 1)
|
||||||
|
|
||||||
if basetype == 'text':
|
if basetype == 'text':
|
||||||
|
|
|
@ -19,9 +19,7 @@ def supports_color():
|
||||||
|
|
||||||
# isatty is not always implemented, #6223.
|
# isatty is not always implemented, #6223.
|
||||||
is_a_tty = hasattr(sys.stdout, 'isatty') and sys.stdout.isatty()
|
is_a_tty = hasattr(sys.stdout, 'isatty') and sys.stdout.isatty()
|
||||||
if not supported_platform or not is_a_tty:
|
return supported_platform and is_a_tty
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
class Style:
|
class Style:
|
||||||
|
|
|
@ -502,8 +502,7 @@ class Command(BaseCommand):
|
||||||
if os.path.abspath(dirpath).startswith(os.path.dirname(path)):
|
if os.path.abspath(dirpath).startswith(os.path.dirname(path)):
|
||||||
locale_dir = path
|
locale_dir = path
|
||||||
break
|
break
|
||||||
if not locale_dir:
|
locale_dir = locale_dir or self.default_locale_path or NO_LOCALE_DIR
|
||||||
locale_dir = self.default_locale_path or NO_LOCALE_DIR
|
|
||||||
all_files.append(self.translatable_file_class(dirpath, filename, locale_dir))
|
all_files.append(self.translatable_file_class(dirpath, filename, locale_dir))
|
||||||
return sorted(all_files)
|
return sorted(all_files)
|
||||||
|
|
||||||
|
|
|
@ -96,8 +96,7 @@ class Serializer:
|
||||||
self.handle_m2m_field(obj, field)
|
self.handle_m2m_field(obj, field)
|
||||||
self.end_object(obj)
|
self.end_object(obj)
|
||||||
progress_bar.update(count)
|
progress_bar.update(count)
|
||||||
if self.first:
|
self.first = self.first and False
|
||||||
self.first = False
|
|
||||||
self.end_serialization()
|
self.end_serialization()
|
||||||
return self.getvalue()
|
return self.getvalue()
|
||||||
|
|
||||||
|
|
|
@ -61,9 +61,7 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
|
||||||
field.get_internal_type() == 'ForeignKey' and
|
field.get_internal_type() == 'ForeignKey' and
|
||||||
field.db_constraint):
|
field.db_constraint):
|
||||||
return False
|
return False
|
||||||
if self._is_limited_data_type(field):
|
return not self._is_limited_data_type(field) and create_index
|
||||||
return False
|
|
||||||
return create_index
|
|
||||||
|
|
||||||
def _delete_composed_index(self, model, fields, *args):
|
def _delete_composed_index(self, model, fields, *args):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -16,9 +16,7 @@ class DatabaseCreation(BaseDatabaseCreation):
|
||||||
suffix += " ENCODING '{}'".format(encoding)
|
suffix += " ENCODING '{}'".format(encoding)
|
||||||
if template:
|
if template:
|
||||||
suffix += " TEMPLATE {}".format(self._quote_name(template))
|
suffix += " TEMPLATE {}".format(self._quote_name(template))
|
||||||
if suffix:
|
return suffix and "WITH" + suffix
|
||||||
suffix = "WITH" + suffix
|
|
||||||
return suffix
|
|
||||||
|
|
||||||
def sql_table_creation_suffix(self):
|
def sql_table_creation_suffix(self):
|
||||||
test_settings = self.connection.settings_dict['TEST']
|
test_settings = self.connection.settings_dict['TEST']
|
||||||
|
|
|
@ -131,11 +131,9 @@ class DatabaseOperations(BaseDatabaseOperations):
|
||||||
sql = []
|
sql = []
|
||||||
for sequence_info in sequences:
|
for sequence_info in sequences:
|
||||||
table_name = sequence_info['table']
|
table_name = sequence_info['table']
|
||||||
column_name = sequence_info['column']
|
# 'id' will be the case if it's an m2m using an autogenerated
|
||||||
if not column_name:
|
# intermediate table (see BaseDatabaseIntrospection.sequence_list).
|
||||||
# This will be the case if it's an m2m using an autogenerated
|
column_name = sequence_info['column'] or 'id'
|
||||||
# intermediate table (see BaseDatabaseIntrospection.sequence_list)
|
|
||||||
column_name = 'id'
|
|
||||||
sql.append("%s setval(pg_get_serial_sequence('%s','%s'), 1, false);" % (
|
sql.append("%s setval(pg_get_serial_sequence('%s','%s'), 1, false);" % (
|
||||||
style.SQL_KEYWORD('SELECT'),
|
style.SQL_KEYWORD('SELECT'),
|
||||||
style.SQL_TABLE(self.quote_name(table_name)),
|
style.SQL_TABLE(self.quote_name(table_name)),
|
||||||
|
|
|
@ -31,15 +31,13 @@ class Aggregate(Func):
|
||||||
return source_expressions
|
return source_expressions
|
||||||
|
|
||||||
def set_source_expressions(self, exprs):
|
def set_source_expressions(self, exprs):
|
||||||
if self.filter:
|
self.filter = self.filter and exprs.pop()
|
||||||
self.filter = exprs.pop()
|
|
||||||
return super().set_source_expressions(exprs)
|
return super().set_source_expressions(exprs)
|
||||||
|
|
||||||
def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False):
|
def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False):
|
||||||
# Aggregates are not allowed in UPDATE queries, so ignore for_save
|
# Aggregates are not allowed in UPDATE queries, so ignore for_save
|
||||||
c = super().resolve_expression(query, allow_joins, reuse, summarize)
|
c = super().resolve_expression(query, allow_joins, reuse, summarize)
|
||||||
if c.filter:
|
c.filter = c.filter and c.filter.resolve_expression(query, allow_joins, reuse, summarize)
|
||||||
c.filter = c.filter.resolve_expression(query, allow_joins, reuse, summarize)
|
|
||||||
if not summarize:
|
if not summarize:
|
||||||
# Call Aggregate.get_source_expressions() to avoid
|
# Call Aggregate.get_source_expressions() to avoid
|
||||||
# returning self.filter and including that in this loop.
|
# returning self.filter and including that in this loop.
|
||||||
|
|
|
@ -549,8 +549,7 @@ class Model(metaclass=ModelBase):
|
||||||
self.__dict__.update(state)
|
self.__dict__.update(state)
|
||||||
|
|
||||||
def _get_pk_val(self, meta=None):
|
def _get_pk_val(self, meta=None):
|
||||||
if not meta:
|
meta = meta or self._meta
|
||||||
meta = self._meta
|
|
||||||
return getattr(self, meta.pk.attname)
|
return getattr(self, meta.pk.attname)
|
||||||
|
|
||||||
def _set_pk_val(self, value):
|
def _set_pk_val(self, value):
|
||||||
|
@ -852,7 +851,8 @@ class Model(metaclass=ModelBase):
|
||||||
# exists.
|
# exists.
|
||||||
return update_fields is not None or filtered.exists()
|
return update_fields is not None or filtered.exists()
|
||||||
if self._meta.select_on_save and not forced_update:
|
if self._meta.select_on_save and not forced_update:
|
||||||
if filtered.exists():
|
return (
|
||||||
|
filtered.exists() and
|
||||||
# It may happen that the object is deleted from the DB right after
|
# It may happen that the object is deleted from the DB right after
|
||||||
# this check, causing the subsequent UPDATE to return zero matching
|
# this check, causing the subsequent UPDATE to return zero matching
|
||||||
# rows. The same result can occur in some rare cases when the
|
# rows. The same result can occur in some rare cases when the
|
||||||
|
@ -860,9 +860,8 @@ class Model(metaclass=ModelBase):
|
||||||
# successfully (a row is matched and updated). In order to
|
# successfully (a row is matched and updated). In order to
|
||||||
# distinguish these two cases, the object's existence in the
|
# distinguish these two cases, the object's existence in the
|
||||||
# database is again checked for if the UPDATE query returns 0.
|
# database is again checked for if the UPDATE query returns 0.
|
||||||
return filtered._update(values) > 0 or filtered.exists()
|
(filtered._update(values) > 0 or filtered.exists())
|
||||||
else:
|
)
|
||||||
return False
|
|
||||||
return filtered._update(values) > 0
|
return filtered._update(values) > 0
|
||||||
|
|
||||||
def _do_insert(self, manager, using, fields, update_pk, raw):
|
def _do_insert(self, manager, using, fields, update_pk, raw):
|
||||||
|
|
|
@ -694,8 +694,7 @@ class Field(RegisterLookupMixin):
|
||||||
return self._db_tablespace or settings.DEFAULT_INDEX_TABLESPACE
|
return self._db_tablespace or settings.DEFAULT_INDEX_TABLESPACE
|
||||||
|
|
||||||
def set_attributes_from_name(self, name):
|
def set_attributes_from_name(self, name):
|
||||||
if not self.name:
|
self.name = self.name or name
|
||||||
self.name = name
|
|
||||||
self.attname, self.column = self.get_attname_column()
|
self.attname, self.column = self.get_attname_column()
|
||||||
self.concrete = self.column is not None
|
self.concrete = self.column is not None
|
||||||
if self.verbose_name is None and self.name:
|
if self.verbose_name is None and self.name:
|
||||||
|
|
|
@ -314,9 +314,7 @@ class FileField(Field):
|
||||||
if data is not None:
|
if data is not None:
|
||||||
# This value will be converted to str and stored in the
|
# This value will be converted to str and stored in the
|
||||||
# database, so leaving False as-is is not acceptable.
|
# database, so leaving False as-is is not acceptable.
|
||||||
if not data:
|
setattr(instance, self.name, data or '')
|
||||||
data = ''
|
|
||||||
setattr(instance, self.name, data)
|
|
||||||
|
|
||||||
def formfield(self, **kwargs):
|
def formfield(self, **kwargs):
|
||||||
return super().formfield(**{
|
return super().formfield(**{
|
||||||
|
|
|
@ -177,9 +177,7 @@ class FieldGetDbPrepValueMixin:
|
||||||
def get_db_prep_lookup(self, value, connection):
|
def get_db_prep_lookup(self, value, connection):
|
||||||
# For relational fields, use the output_field of the 'field' attribute.
|
# For relational fields, use the output_field of the 'field' attribute.
|
||||||
field = getattr(self.lhs.output_field, 'field', None)
|
field = getattr(self.lhs.output_field, 'field', None)
|
||||||
get_db_prep_value = getattr(field, 'get_db_prep_value', None)
|
get_db_prep_value = getattr(field, 'get_db_prep_value', None) or self.lhs.output_field.get_db_prep_value
|
||||||
if not get_db_prep_value:
|
|
||||||
get_db_prep_value = self.lhs.output_field.get_db_prep_value
|
|
||||||
return (
|
return (
|
||||||
'%s',
|
'%s',
|
||||||
[get_db_prep_value(v, connection, prepared=True) for v in value]
|
[get_db_prep_value(v, connection, prepared=True) for v in value]
|
||||||
|
|
|
@ -107,8 +107,7 @@ class BaseManager:
|
||||||
})
|
})
|
||||||
|
|
||||||
def contribute_to_class(self, model, name):
|
def contribute_to_class(self, model, name):
|
||||||
if not self.name:
|
self.name = self.name or name
|
||||||
self.name = name
|
|
||||||
self.model = model
|
self.model = model
|
||||||
|
|
||||||
setattr(model, name, ManagerDescriptor(self))
|
setattr(model, name, ManagerDescriptor(self))
|
||||||
|
|
|
@ -521,8 +521,7 @@ class SQLCompiler:
|
||||||
if grouping:
|
if grouping:
|
||||||
if distinct_fields:
|
if distinct_fields:
|
||||||
raise NotImplementedError('annotate() + distinct(fields) is not implemented.')
|
raise NotImplementedError('annotate() + distinct(fields) is not implemented.')
|
||||||
if not order_by:
|
order_by = order_by or self.connection.ops.force_no_ordering()
|
||||||
order_by = self.connection.ops.force_no_ordering()
|
|
||||||
result.append('GROUP BY %s' % ', '.join(grouping))
|
result.append('GROUP BY %s' % ', '.join(grouping))
|
||||||
|
|
||||||
if having:
|
if having:
|
||||||
|
@ -588,8 +587,7 @@ class SQLCompiler:
|
||||||
if opts is None:
|
if opts is None:
|
||||||
opts = self.query.get_meta()
|
opts = self.query.get_meta()
|
||||||
only_load = self.deferred_to_columns()
|
only_load = self.deferred_to_columns()
|
||||||
if not start_alias:
|
start_alias = start_alias or self.query.get_initial_alias()
|
||||||
start_alias = self.query.get_initial_alias()
|
|
||||||
# The 'seen_models' is used to optimize checking the needed parent
|
# The 'seen_models' is used to optimize checking the needed parent
|
||||||
# alias for a given field. This also includes None -> start_alias to
|
# alias for a given field. This also includes None -> start_alias to
|
||||||
# be used by local fields.
|
# be used by local fields.
|
||||||
|
@ -657,8 +655,7 @@ class SQLCompiler:
|
||||||
# of the field is specified.
|
# of the field is specified.
|
||||||
if field.is_relation and opts.ordering and getattr(field, 'attname', None) != name:
|
if field.is_relation and opts.ordering and getattr(field, 'attname', None) != name:
|
||||||
# Firstly, avoid infinite loops.
|
# Firstly, avoid infinite loops.
|
||||||
if not already_seen:
|
already_seen = already_seen or set()
|
||||||
already_seen = set()
|
|
||||||
join_tuple = tuple(getattr(self.query.alias_map[j], 'join_cols', None) for j in joins)
|
join_tuple = tuple(getattr(self.query.alias_map[j], 'join_cols', None) for j in joins)
|
||||||
if join_tuple in already_seen:
|
if join_tuple in already_seen:
|
||||||
raise FieldError('Infinite loop caused by ordering.')
|
raise FieldError('Infinite loop caused by ordering.')
|
||||||
|
@ -680,8 +677,7 @@ class SQLCompiler:
|
||||||
same input, as the prefixes of get_ordering() and get_distinct() must
|
same input, as the prefixes of get_ordering() and get_distinct() must
|
||||||
match. Executing SQL where this is not true is an error.
|
match. Executing SQL where this is not true is an error.
|
||||||
"""
|
"""
|
||||||
if not alias:
|
alias = alias or self.query.get_initial_alias()
|
||||||
alias = self.query.get_initial_alias()
|
|
||||||
field, targets, opts, joins, path = self.query.setup_joins(
|
field, targets, opts, joins, path = self.query.setup_joins(
|
||||||
pieces, opts, alias)
|
pieces, opts, alias)
|
||||||
alias = joins[-1]
|
alias = joins[-1]
|
||||||
|
@ -1037,8 +1033,7 @@ class SQLCompiler:
|
||||||
is needed, as the filters describe an empty set. In that case, None is
|
is needed, as the filters describe an empty set. In that case, None is
|
||||||
returned, to avoid any unnecessary database interaction.
|
returned, to avoid any unnecessary database interaction.
|
||||||
"""
|
"""
|
||||||
if not result_type:
|
result_type = result_type or NO_RESULTS
|
||||||
result_type = NO_RESULTS
|
|
||||||
try:
|
try:
|
||||||
sql, params = self.as_sql()
|
sql, params = self.as_sql()
|
||||||
if not sql:
|
if not sql:
|
||||||
|
|
|
@ -805,9 +805,9 @@ class Query:
|
||||||
if isinstance(self.group_by, tuple):
|
if isinstance(self.group_by, tuple):
|
||||||
self.group_by = tuple([col.relabeled_clone(change_map) for col in self.group_by])
|
self.group_by = tuple([col.relabeled_clone(change_map) for col in self.group_by])
|
||||||
self.select = tuple([col.relabeled_clone(change_map) for col in self.select])
|
self.select = tuple([col.relabeled_clone(change_map) for col in self.select])
|
||||||
if self._annotations:
|
self._annotations = self._annotations and OrderedDict(
|
||||||
self._annotations = OrderedDict(
|
(key, col.relabeled_clone(change_map)) for key, col in self._annotations.items()
|
||||||
(key, col.relabeled_clone(change_map)) for key, col in self._annotations.items())
|
)
|
||||||
|
|
||||||
# 2. Rename the alias in the internal table/alias datastructures.
|
# 2. Rename the alias in the internal table/alias datastructures.
|
||||||
for old_alias, new_alias in change_map.items():
|
for old_alias, new_alias in change_map.items():
|
||||||
|
@ -1061,9 +1061,7 @@ class Query:
|
||||||
and get_transform().
|
and get_transform().
|
||||||
"""
|
"""
|
||||||
# __exact is the default lookup if one isn't given.
|
# __exact is the default lookup if one isn't given.
|
||||||
if not lookups:
|
lookups = lookups or ['exact']
|
||||||
lookups = ['exact']
|
|
||||||
|
|
||||||
for name in lookups[:-1]:
|
for name in lookups[:-1]:
|
||||||
lhs = self.try_transform(lhs, name)
|
lhs = self.try_transform(lhs, name)
|
||||||
# First try get_lookup() so that the lookup takes precedence if the lhs
|
# First try get_lookup() so that the lookup takes precedence if the lhs
|
||||||
|
@ -2050,10 +2048,10 @@ class Query:
|
||||||
# used. The proper fix would be to defer all decisions where
|
# used. The proper fix would be to defer all decisions where
|
||||||
# is_nullable() is needed to the compiler stage, but that is not easy
|
# is_nullable() is needed to the compiler stage, but that is not easy
|
||||||
# to do currently.
|
# to do currently.
|
||||||
if connections[DEFAULT_DB_ALIAS].features.interprets_empty_strings_as_nulls and field.empty_strings_allowed:
|
return (
|
||||||
return True
|
connections[DEFAULT_DB_ALIAS].features.interprets_empty_strings_as_nulls and
|
||||||
else:
|
field.empty_strings_allowed
|
||||||
return field.null
|
) or field.null
|
||||||
|
|
||||||
|
|
||||||
def get_order_dir(field, default='ASC'):
|
def get_order_dir(field, default='ASC'):
|
||||||
|
|
|
@ -79,12 +79,9 @@ class BoundField:
|
||||||
attributes passed as attrs. If a widget isn't specified, use the
|
attributes passed as attrs. If a widget isn't specified, use the
|
||||||
field's default widget.
|
field's default widget.
|
||||||
"""
|
"""
|
||||||
if not widget:
|
widget = widget or self.field.widget
|
||||||
widget = self.field.widget
|
|
||||||
|
|
||||||
if self.field.localize:
|
if self.field.localize:
|
||||||
widget.is_localized = True
|
widget.is_localized = True
|
||||||
|
|
||||||
attrs = attrs or {}
|
attrs = attrs or {}
|
||||||
attrs = self.build_widget_attrs(attrs, widget)
|
attrs = self.build_widget_attrs(attrs, widget)
|
||||||
if self.auto_id and 'id' not in widget.attrs:
|
if self.auto_id and 'id' not in widget.attrs:
|
||||||
|
@ -219,8 +216,7 @@ class BoundField:
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def build_widget_attrs(self, attrs, widget=None):
|
def build_widget_attrs(self, attrs, widget=None):
|
||||||
if not widget:
|
widget = widget or self.field.widget
|
||||||
widget = self.field.widget
|
|
||||||
attrs = dict(attrs) # Copy attrs to avoid modifying the argument.
|
attrs = dict(attrs) # Copy attrs to avoid modifying the argument.
|
||||||
if widget.use_required_attribute(self.initial) and self.field.required and self.form.use_required_attribute:
|
if widget.use_required_attribute(self.initial) and self.field.required and self.form.use_required_attribute:
|
||||||
attrs['required'] = True
|
attrs['required'] = True
|
||||||
|
|
|
@ -591,11 +591,7 @@ class FileField(Field):
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def has_changed(self, initial, data):
|
def has_changed(self, initial, data):
|
||||||
if self.disabled:
|
return not self.disabled and data is not None
|
||||||
return False
|
|
||||||
if data is None:
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
class ImageField(FileField):
|
class ImageField(FileField):
|
||||||
|
|
|
@ -376,19 +376,17 @@ class QueryDict(MultiValueDict):
|
||||||
|
|
||||||
def __init__(self, query_string=None, mutable=False, encoding=None):
|
def __init__(self, query_string=None, mutable=False, encoding=None):
|
||||||
super().__init__()
|
super().__init__()
|
||||||
if not encoding:
|
self.encoding = encoding or settings.DEFAULT_CHARSET
|
||||||
encoding = settings.DEFAULT_CHARSET
|
|
||||||
self.encoding = encoding
|
|
||||||
query_string = query_string or ''
|
query_string = query_string or ''
|
||||||
parse_qsl_kwargs = {
|
parse_qsl_kwargs = {
|
||||||
'keep_blank_values': True,
|
'keep_blank_values': True,
|
||||||
'fields_limit': settings.DATA_UPLOAD_MAX_NUMBER_FIELDS,
|
'fields_limit': settings.DATA_UPLOAD_MAX_NUMBER_FIELDS,
|
||||||
'encoding': encoding,
|
'encoding': self.encoding,
|
||||||
}
|
}
|
||||||
if isinstance(query_string, bytes):
|
if isinstance(query_string, bytes):
|
||||||
# query_string normally contains URL-encoded data, a subset of ASCII.
|
# query_string normally contains URL-encoded data, a subset of ASCII.
|
||||||
try:
|
try:
|
||||||
query_string = query_string.decode(encoding)
|
query_string = query_string.decode(self.encoding)
|
||||||
except UnicodeDecodeError:
|
except UnicodeDecodeError:
|
||||||
# ... but some user agents are misbehaving :-(
|
# ... but some user agents are misbehaving :-(
|
||||||
query_string = query_string.decode('iso-8859-1')
|
query_string = query_string.decode('iso-8859-1')
|
||||||
|
|
|
@ -23,8 +23,7 @@ class ConditionalGetMiddleware(MiddlewareMixin):
|
||||||
|
|
||||||
etag = response.get('ETag')
|
etag = response.get('ETag')
|
||||||
last_modified = response.get('Last-Modified')
|
last_modified = response.get('Last-Modified')
|
||||||
if last_modified:
|
last_modified = last_modified and parse_http_date_safe(last_modified)
|
||||||
last_modified = parse_http_date_safe(last_modified)
|
|
||||||
|
|
||||||
if etag or last_modified:
|
if etag or last_modified:
|
||||||
return get_conditional_response(
|
return get_conditional_response(
|
||||||
|
|
|
@ -236,8 +236,7 @@ def encode_file(boundary, key, file):
|
||||||
|
|
||||||
if content_type is None:
|
if content_type is None:
|
||||||
content_type = 'application/octet-stream'
|
content_type = 'application/octet-stream'
|
||||||
if not filename:
|
filename = filename or key
|
||||||
filename = key
|
|
||||||
return [
|
return [
|
||||||
to_bytes('--%s' % boundary),
|
to_bytes('--%s' % boundary),
|
||||||
to_bytes('Content-Disposition: form-data; name="%s"; filename="%s"'
|
to_bytes('Content-Disposition: form-data; name="%s"; filename="%s"'
|
||||||
|
|
|
@ -69,9 +69,7 @@ class Element:
|
||||||
other_value = other_attr
|
other_value = other_attr
|
||||||
if attr != other_attr or value != other_value:
|
if attr != other_attr or value != other_value:
|
||||||
return False
|
return False
|
||||||
if self.children != element.children:
|
return self.children == element.children
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
def __hash__(self):
|
def __hash__(self):
|
||||||
return hash((self.name,) + tuple(a for a in self.attributes))
|
return hash((self.name,) + tuple(a for a in self.attributes))
|
||||||
|
|
|
@ -50,9 +50,7 @@ class Approximate:
|
||||||
return repr(self.val)
|
return repr(self.val)
|
||||||
|
|
||||||
def __eq__(self, other):
|
def __eq__(self, other):
|
||||||
if self.val == other:
|
return self.val == other or round(abs(self.val - other), self.places) == 0
|
||||||
return True
|
|
||||||
return round(abs(self.val - other), self.places) == 0
|
|
||||||
|
|
||||||
|
|
||||||
class ContextList(list):
|
class ContextList(list):
|
||||||
|
@ -300,9 +298,7 @@ def teardown_databases(old_config, verbosity, parallel=0, keepdb=False):
|
||||||
|
|
||||||
|
|
||||||
def get_runner(settings, test_runner_class=None):
|
def get_runner(settings, test_runner_class=None):
|
||||||
if not test_runner_class:
|
test_runner_class = test_runner_class or settings.TEST_RUNNER
|
||||||
test_runner_class = settings.TEST_RUNNER
|
|
||||||
|
|
||||||
test_path = test_runner_class.split('.')
|
test_path = test_runner_class.split('.')
|
||||||
# Allow for relative paths
|
# Allow for relative paths
|
||||||
if len(test_path) > 1:
|
if len(test_path) > 1:
|
||||||
|
|
|
@ -393,9 +393,7 @@ class URLResolver:
|
||||||
warnings = []
|
warnings = []
|
||||||
for pattern in self.url_patterns:
|
for pattern in self.url_patterns:
|
||||||
warnings.extend(check_resolver(pattern))
|
warnings.extend(check_resolver(pattern))
|
||||||
if not warnings:
|
return warnings or self.pattern.check()
|
||||||
warnings = self.pattern.check()
|
|
||||||
return warnings
|
|
||||||
|
|
||||||
def _populate(self):
|
def _populate(self):
|
||||||
# Short-circuit if called recursively in this thread to prevent
|
# Short-circuit if called recursively in this thread to prevent
|
||||||
|
|
|
@ -142,12 +142,10 @@ def get_conditional_response(request, etag=None, last_modified=None, response=No
|
||||||
# Get HTTP request headers.
|
# Get HTTP request headers.
|
||||||
if_match_etags = parse_etags(request.META.get('HTTP_IF_MATCH', ''))
|
if_match_etags = parse_etags(request.META.get('HTTP_IF_MATCH', ''))
|
||||||
if_unmodified_since = request.META.get('HTTP_IF_UNMODIFIED_SINCE')
|
if_unmodified_since = request.META.get('HTTP_IF_UNMODIFIED_SINCE')
|
||||||
if if_unmodified_since:
|
if_unmodified_since = if_unmodified_since and parse_http_date_safe(if_unmodified_since)
|
||||||
if_unmodified_since = parse_http_date_safe(if_unmodified_since)
|
|
||||||
if_none_match_etags = parse_etags(request.META.get('HTTP_IF_NONE_MATCH', ''))
|
if_none_match_etags = parse_etags(request.META.get('HTTP_IF_NONE_MATCH', ''))
|
||||||
if_modified_since = request.META.get('HTTP_IF_MODIFIED_SINCE')
|
if_modified_since = request.META.get('HTTP_IF_MODIFIED_SINCE')
|
||||||
if if_modified_since:
|
if_modified_since = if_modified_since and parse_http_date_safe(if_modified_since)
|
||||||
if_modified_since = parse_http_date_safe(if_modified_since)
|
|
||||||
|
|
||||||
# Step 1 of section 6 of RFC 7232: Test the If-Match precondition.
|
# Step 1 of section 6 of RFC 7232: Test the If-Match precondition.
|
||||||
if if_match_etags and not _if_match_passes(etag, if_match_etags):
|
if if_match_etags and not _if_match_passes(etag, if_match_etags):
|
||||||
|
|
|
@ -78,8 +78,7 @@ def pbkdf2(password, salt, iterations, dklen=0, digest=None):
|
||||||
"""Return the hash of password using pbkdf2."""
|
"""Return the hash of password using pbkdf2."""
|
||||||
if digest is None:
|
if digest is None:
|
||||||
digest = hashlib.sha256
|
digest = hashlib.sha256
|
||||||
if not dklen:
|
dklen = dklen or None
|
||||||
dklen = None
|
|
||||||
password = force_bytes(password)
|
password = force_bytes(password)
|
||||||
salt = force_bytes(salt)
|
salt = force_bytes(salt)
|
||||||
return hashlib.pbkdf2_hmac(digest().name, password, salt, iterations, dklen)
|
return hashlib.pbkdf2_hmac(digest().name, password, salt, iterations, dklen)
|
||||||
|
|
|
@ -89,8 +89,7 @@ def parse_time(value):
|
||||||
match = time_re.match(value)
|
match = time_re.match(value)
|
||||||
if match:
|
if match:
|
||||||
kw = match.groupdict()
|
kw = match.groupdict()
|
||||||
if kw['microsecond']:
|
kw['microsecond'] = kw['microsecond'] and kw['microsecond'].ljust(6, '0')
|
||||||
kw['microsecond'] = kw['microsecond'].ljust(6, '0')
|
|
||||||
kw = {k: int(v) for k, v in kw.items() if v is not None}
|
kw = {k: int(v) for k, v in kw.items() if v is not None}
|
||||||
return datetime.time(**kw)
|
return datetime.time(**kw)
|
||||||
|
|
||||||
|
@ -107,8 +106,7 @@ def parse_datetime(value):
|
||||||
match = datetime_re.match(value)
|
match = datetime_re.match(value)
|
||||||
if match:
|
if match:
|
||||||
kw = match.groupdict()
|
kw = match.groupdict()
|
||||||
if kw['microsecond']:
|
kw['microsecond'] = kw['microsecond'] and kw['microsecond'].ljust(6, '0')
|
||||||
kw['microsecond'] = kw['microsecond'].ljust(6, '0')
|
|
||||||
tzinfo = kw.pop('tzinfo')
|
tzinfo = kw.pop('tzinfo')
|
||||||
if tzinfo == 'Z':
|
if tzinfo == 'Z':
|
||||||
tzinfo = utc
|
tzinfo = utc
|
||||||
|
|
|
@ -88,8 +88,7 @@ class MiddlewareMixin:
|
||||||
response = None
|
response = None
|
||||||
if hasattr(self, 'process_request'):
|
if hasattr(self, 'process_request'):
|
||||||
response = self.process_request(request)
|
response = self.process_request(request)
|
||||||
if not response:
|
response = response or self.get_response(request)
|
||||||
response = self.get_response(request)
|
|
||||||
if hasattr(self, 'process_response'):
|
if hasattr(self, 'process_response'):
|
||||||
response = self.process_response(request, response)
|
response = self.process_response(request, response)
|
||||||
return response
|
return response
|
||||||
|
|
|
@ -86,8 +86,7 @@ class SyndicationFeed:
|
||||||
feed_url=None, feed_copyright=None, feed_guid=None, ttl=None, **kwargs):
|
feed_url=None, feed_copyright=None, feed_guid=None, ttl=None, **kwargs):
|
||||||
def to_str(s):
|
def to_str(s):
|
||||||
return str(s) if s is not None else s
|
return str(s) if s is not None else s
|
||||||
if categories:
|
categories = categories and [str(c) for c in categories]
|
||||||
categories = [str(c) for c in categories]
|
|
||||||
self.feed = {
|
self.feed = {
|
||||||
'title': to_str(title),
|
'title': to_str(title),
|
||||||
'link': iri_to_uri(link),
|
'link': iri_to_uri(link),
|
||||||
|
@ -117,8 +116,7 @@ class SyndicationFeed:
|
||||||
"""
|
"""
|
||||||
def to_str(s):
|
def to_str(s):
|
||||||
return str(s) if s is not None else s
|
return str(s) if s is not None else s
|
||||||
if categories:
|
categories = categories and [to_str(c) for c in categories]
|
||||||
categories = [to_str(c) for c in categories]
|
|
||||||
self.items.append({
|
self.items.append({
|
||||||
'title': to_str(title),
|
'title': to_str(title),
|
||||||
'link': iri_to_uri(link),
|
'link': iri_to_uri(link),
|
||||||
|
|
|
@ -42,8 +42,7 @@ def format(number, decimal_sep, decimal_pos=None, grouping=0, thousand_sep='',
|
||||||
int_part, dec_part = str_number, ''
|
int_part, dec_part = str_number, ''
|
||||||
if decimal_pos is not None:
|
if decimal_pos is not None:
|
||||||
dec_part = dec_part + ('0' * (decimal_pos - len(dec_part)))
|
dec_part = dec_part + ('0' * (decimal_pos - len(dec_part)))
|
||||||
if dec_part:
|
dec_part = dec_part and decimal_sep + dec_part
|
||||||
dec_part = decimal_sep + dec_part
|
|
||||||
# grouping
|
# grouping
|
||||||
if use_grouping:
|
if use_grouping:
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -35,8 +35,7 @@ def timesince(d, now=None, reversed=False):
|
||||||
if now and not isinstance(now, datetime.datetime):
|
if now and not isinstance(now, datetime.datetime):
|
||||||
now = datetime.datetime(now.year, now.month, now.day)
|
now = datetime.datetime(now.year, now.month, now.day)
|
||||||
|
|
||||||
if not now:
|
now = now or datetime.datetime.now(utc if is_aware(d) else None)
|
||||||
now = datetime.datetime.now(utc if is_aware(d) else None)
|
|
||||||
|
|
||||||
if reversed:
|
if reversed:
|
||||||
d, now = now, d
|
d, now = now, d
|
||||||
|
|
|
@ -35,8 +35,7 @@ def set_language(request):
|
||||||
if ((next or not request.is_ajax()) and
|
if ((next or not request.is_ajax()) and
|
||||||
not is_safe_url(url=next, allowed_hosts={request.get_host()}, require_https=request.is_secure())):
|
not is_safe_url(url=next, allowed_hosts={request.get_host()}, require_https=request.is_secure())):
|
||||||
next = request.META.get('HTTP_REFERER')
|
next = request.META.get('HTTP_REFERER')
|
||||||
if next:
|
next = next and unquote(next) # HTTP_REFERER may be encoded.
|
||||||
next = unquote(next) # HTTP_REFERER may be encoded.
|
|
||||||
if not is_safe_url(url=next, allowed_hosts={request.get_host()}, require_https=request.is_secure()):
|
if not is_safe_url(url=next, allowed_hosts={request.get_host()}, require_https=request.is_secure()):
|
||||||
next = '/'
|
next = '/'
|
||||||
response = HttpResponseRedirect(next) if next else HttpResponse(status=204)
|
response = HttpResponseRedirect(next) if next else HttpResponse(status=204)
|
||||||
|
|
|
@ -339,9 +339,7 @@ class SimpleRowlevelBackend:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def has_module_perms(self, user, app_label):
|
def has_module_perms(self, user, app_label):
|
||||||
if not user.is_anonymous and not user.is_active:
|
return (user.is_anonymous or user.is_active) and app_label == 'app1'
|
||||||
return False
|
|
||||||
return app_label == "app1"
|
|
||||||
|
|
||||||
def get_all_permissions(self, user, obj=None):
|
def get_all_permissions(self, user, obj=None):
|
||||||
if not obj:
|
if not obj:
|
||||||
|
|
|
@ -51,8 +51,7 @@ class DumpDataAssertMixin:
|
||||||
natural_foreign_keys=False, natural_primary_keys=False,
|
natural_foreign_keys=False, natural_primary_keys=False,
|
||||||
use_base_manager=False, exclude_list=[], primary_keys=''):
|
use_base_manager=False, exclude_list=[], primary_keys=''):
|
||||||
new_io = StringIO()
|
new_io = StringIO()
|
||||||
if filename:
|
filename = filename and os.path.join(tempfile.gettempdir(), filename)
|
||||||
filename = os.path.join(tempfile.gettempdir(), filename)
|
|
||||||
management.call_command('dumpdata', *args, **{'format': format,
|
management.call_command('dumpdata', *args, **{'format': format,
|
||||||
'stdout': new_io,
|
'stdout': new_io,
|
||||||
'stderr': new_io,
|
'stderr': new_io,
|
||||||
|
|
|
@ -62,10 +62,7 @@ class TestGeom(TestObj):
|
||||||
self.coords = tuplize(coords)
|
self.coords = tuplize(coords)
|
||||||
if centroid:
|
if centroid:
|
||||||
self.centroid = tuple(centroid)
|
self.centroid = tuple(centroid)
|
||||||
if ext_ring_cs:
|
self.ext_ring_cs = ext_ring_cs and tuplize(ext_ring_cs)
|
||||||
ext_ring_cs = tuplize(ext_ring_cs)
|
|
||||||
self.ext_ring_cs = ext_ring_cs
|
|
||||||
|
|
||||||
super().__init__(**kwargs)
|
super().__init__(**kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -43,9 +43,7 @@ class AuthRouter:
|
||||||
|
|
||||||
def allow_relation(self, obj1, obj2, **hints):
|
def allow_relation(self, obj1, obj2, **hints):
|
||||||
"Allow any relation if a model in Auth is involved"
|
"Allow any relation if a model in Auth is involved"
|
||||||
if obj1._meta.app_label == 'auth' or obj2._meta.app_label == 'auth':
|
return obj1._meta.app_label == 'auth' or obj2._meta.app_label == 'auth' or None
|
||||||
return True
|
|
||||||
return None
|
|
||||||
|
|
||||||
def allow_migrate(self, db, app_label, **hints):
|
def allow_migrate(self, db, app_label, **hints):
|
||||||
"Make sure the auth app only appears on the 'other' db"
|
"Make sure the auth app only appears on the 'other' db"
|
||||||
|
|
Loading…
Reference in New Issue