Fixed #28982 -- Simplified code with and/or.
This commit is contained in:
parent
c2d0f8c084
commit
d7b2aa24f7
|
@ -94,9 +94,7 @@ class AdminSite:
|
|||
|
||||
If a model is abstract, raise ImproperlyConfigured.
|
||||
"""
|
||||
if not admin_class:
|
||||
admin_class = ModelAdmin
|
||||
|
||||
admin_class = admin_class or ModelAdmin
|
||||
if isinstance(model_or_iterable, ModelBase):
|
||||
model_or_iterable = [model_or_iterable]
|
||||
for model in model_or_iterable:
|
||||
|
|
|
@ -86,8 +86,7 @@ class ChangeList:
|
|||
"""
|
||||
Return all params except IGNORED_PARAMS.
|
||||
"""
|
||||
if not params:
|
||||
params = self.params
|
||||
params = params or self.params
|
||||
lookup_params = params.copy() # a dictionary of the query string
|
||||
# Remove all the parameters that are globally and systematically
|
||||
# ignored.
|
||||
|
|
|
@ -71,8 +71,7 @@ def parse_rst(text, default_reference_context, thing_being_parsed=None):
|
|||
'raw_enabled': False,
|
||||
'file_insertion_enabled': False,
|
||||
}
|
||||
if thing_being_parsed:
|
||||
thing_being_parsed = force_bytes("<%s>" % thing_being_parsed)
|
||||
thing_being_parsed = thing_being_parsed and force_bytes('<%s>' % thing_being_parsed)
|
||||
# Wrap ``text`` in some reST that sets the default role to ``cmsreference``,
|
||||
# then restores it.
|
||||
source = """
|
||||
|
|
|
@ -75,10 +75,8 @@ class TemplateTagIndexView(BaseAdminDocsView):
|
|||
for module_name, library in builtin_libs + app_libs:
|
||||
for tag_name, tag_func in library.tags.items():
|
||||
title, body, metadata = utils.parse_docstring(tag_func.__doc__)
|
||||
if title:
|
||||
title = utils.parse_rst(title, 'tag', _('tag:') + tag_name)
|
||||
if body:
|
||||
body = utils.parse_rst(body, 'tag', _('tag:') + tag_name)
|
||||
title = title and utils.parse_rst(title, 'tag', _('tag:') + tag_name)
|
||||
body = body and utils.parse_rst(body, 'tag', _('tag:') + tag_name)
|
||||
for key in metadata:
|
||||
metadata[key] = utils.parse_rst(metadata[key], 'tag', _('tag:') + tag_name)
|
||||
tag_library = module_name.split('.')[-1]
|
||||
|
@ -108,10 +106,8 @@ class TemplateFilterIndexView(BaseAdminDocsView):
|
|||
for module_name, library in builtin_libs + app_libs:
|
||||
for filter_name, filter_func in library.filters.items():
|
||||
title, body, metadata = utils.parse_docstring(filter_func.__doc__)
|
||||
if title:
|
||||
title = utils.parse_rst(title, 'filter', _('filter:') + filter_name)
|
||||
if body:
|
||||
body = utils.parse_rst(body, 'filter', _('filter:') + filter_name)
|
||||
title = title and utils.parse_rst(title, 'filter', _('filter:') + filter_name)
|
||||
body = body and utils.parse_rst(body, 'filter', _('filter:') + filter_name)
|
||||
for key in metadata:
|
||||
metadata[key] = utils.parse_rst(metadata[key], 'filter', _('filter:') + filter_name)
|
||||
tag_library = module_name.split('.')[-1]
|
||||
|
@ -174,10 +170,8 @@ class ViewDetailView(BaseAdminDocsView):
|
|||
if view_func is None:
|
||||
raise Http404
|
||||
title, body, metadata = utils.parse_docstring(view_func.__doc__)
|
||||
if title:
|
||||
title = utils.parse_rst(title, 'view', _('view:') + view)
|
||||
if body:
|
||||
body = utils.parse_rst(body, 'view', _('view:') + view)
|
||||
title = title and utils.parse_rst(title, 'view', _('view:') + view)
|
||||
body = body and utils.parse_rst(body, 'view', _('view:') + view)
|
||||
for key in metadata:
|
||||
metadata[key] = utils.parse_rst(metadata[key], 'model', _('view:') + view)
|
||||
return super().get_context_data(**{
|
||||
|
@ -215,10 +209,8 @@ class ModelDetailView(BaseAdminDocsView):
|
|||
opts = model._meta
|
||||
|
||||
title, body, metadata = utils.parse_docstring(model.__doc__)
|
||||
if title:
|
||||
title = utils.parse_rst(title, 'model', _('model:') + model_name)
|
||||
if body:
|
||||
body = utils.parse_rst(body, 'model', _('model:') + model_name)
|
||||
title = title and utils.parse_rst(title, 'model', _('model:') + model_name)
|
||||
body = body and utils.parse_rst(body, 'model', _('model:') + model_name)
|
||||
|
||||
# Gather fields/field descriptions.
|
||||
fields = []
|
||||
|
@ -275,8 +267,9 @@ class ModelDetailView(BaseAdminDocsView):
|
|||
except StopIteration:
|
||||
continue
|
||||
verbose = func.__doc__
|
||||
if verbose:
|
||||
verbose = utils.parse_rst(utils.trim_docstring(verbose), 'model', _('model:') + opts.model_name)
|
||||
verbose = verbose and (
|
||||
utils.parse_rst(utils.trim_docstring(verbose), 'model', _('model:') + opts.model_name)
|
||||
)
|
||||
# If a method has no arguments, show it as a 'field', otherwise
|
||||
# as a 'method with arguments'.
|
||||
if func_has_no_args(func) and not func_accepts_kwargs(func) and not func_accepts_var_args(func):
|
||||
|
|
|
@ -88,10 +88,8 @@ class UserAdmin(admin.ModelAdmin):
|
|||
] + super().get_urls()
|
||||
|
||||
def lookup_allowed(self, lookup, value):
|
||||
# See #20078: we don't want to allow any lookups involving passwords.
|
||||
if lookup.startswith('password'):
|
||||
return False
|
||||
return super().lookup_allowed(lookup, value)
|
||||
# Don't allow lookups involving passwords.
|
||||
return not lookup.startswith('password') and super().lookup_allowed(lookup, value)
|
||||
|
||||
@sensitive_post_parameters_m
|
||||
@csrf_protect_m
|
||||
|
|
|
@ -82,9 +82,7 @@ class ModelBackend:
|
|||
return user_obj._perm_cache
|
||||
|
||||
def has_perm(self, user_obj, perm, obj=None):
|
||||
if not user_obj.is_active:
|
||||
return False
|
||||
return perm in self.get_all_permissions(user_obj, obj)
|
||||
return user_obj.is_active and perm in self.get_all_permissions(user_obj, obj)
|
||||
|
||||
def has_module_perms(self, user_obj, app_label):
|
||||
"""
|
||||
|
|
|
@ -73,10 +73,7 @@ def make_password(password, salt=None, hasher='default'):
|
|||
if password is None:
|
||||
return UNUSABLE_PASSWORD_PREFIX + get_random_string(UNUSABLE_PASSWORD_SUFFIX_LENGTH)
|
||||
hasher = get_hasher(hasher)
|
||||
|
||||
if not salt:
|
||||
salt = hasher.salt()
|
||||
|
||||
salt = salt or hasher.salt()
|
||||
return hasher.encode(password, salt)
|
||||
|
||||
|
||||
|
@ -244,8 +241,7 @@ class PBKDF2PasswordHasher(BasePasswordHasher):
|
|||
def encode(self, password, salt, iterations=None):
|
||||
assert password is not None
|
||||
assert salt and '$' not in salt
|
||||
if not iterations:
|
||||
iterations = self.iterations
|
||||
iterations = iterations or self.iterations
|
||||
hash = pbkdf2(password, salt, iterations, digest=self.digest)
|
||||
hash = base64.b64encode(hash).decode('ascii').strip()
|
||||
return "%s$%d$%s$%s" % (self.algorithm, iterations, salt, hash)
|
||||
|
|
|
@ -164,9 +164,7 @@ def logout_then_login(request, login_url=None):
|
|||
"""
|
||||
Log out the user if they are logged in. Then redirect to the login page.
|
||||
"""
|
||||
if not login_url:
|
||||
login_url = settings.LOGIN_URL
|
||||
login_url = resolve_url(login_url)
|
||||
login_url = resolve_url(login_url or settings.LOGIN_URL)
|
||||
return LogoutView.as_view(next_page=login_url)(request)
|
||||
|
||||
|
||||
|
|
|
@ -186,15 +186,9 @@ class GDALBand(GDALRasterBase):
|
|||
|
||||
Allowed input data types are bytes, memoryview, list, tuple, and array.
|
||||
"""
|
||||
if not offset:
|
||||
offset = (0, 0)
|
||||
|
||||
if not size:
|
||||
size = (self.width - offset[0], self.height - offset[1])
|
||||
|
||||
if not shape:
|
||||
shape = size
|
||||
|
||||
offset = offset or (0, 0)
|
||||
size = size or (self.width - offset[0], self.height - offset[1])
|
||||
shape = shape or size
|
||||
if any(x <= 0 for x in size):
|
||||
raise ValueError('Offset too big for this raster.')
|
||||
|
||||
|
|
|
@ -73,10 +73,9 @@ class GeoIP2:
|
|||
raise GeoIP2Exception('Invalid GeoIP caching option: %s' % cache)
|
||||
|
||||
# Getting the GeoIP data path.
|
||||
path = path or GEOIP_SETTINGS['GEOIP_PATH']
|
||||
if not path:
|
||||
path = GEOIP_SETTINGS['GEOIP_PATH']
|
||||
if not path:
|
||||
raise GeoIP2Exception('GeoIP path must be provided via parameter or the GEOIP_PATH setting.')
|
||||
raise GeoIP2Exception('GeoIP path must be provided via parameter or the GEOIP_PATH setting.')
|
||||
if not isinstance(path, str):
|
||||
raise TypeError('Invalid path type: %s' % type(path).__name__)
|
||||
|
||||
|
@ -115,10 +114,7 @@ class GeoIP2:
|
|||
|
||||
@property
|
||||
def _reader(self):
|
||||
if self._country:
|
||||
return self._country
|
||||
else:
|
||||
return self._city
|
||||
return self._country or self._city
|
||||
|
||||
@property
|
||||
def _country_or_city(self):
|
||||
|
|
|
@ -300,8 +300,7 @@ thread_context = ThreadLocalIO()
|
|||
# These module-level routines return the I/O object that is local to the
|
||||
# thread. If the I/O object does not exist yet it will be initialized.
|
||||
def wkt_r():
|
||||
if not thread_context.wkt_r:
|
||||
thread_context.wkt_r = _WKTReader()
|
||||
thread_context.wkt_r = thread_context.wkt_r or _WKTReader()
|
||||
return thread_context.wkt_r
|
||||
|
||||
|
||||
|
@ -316,8 +315,7 @@ def wkt_w(dim=2, trim=False, precision=None):
|
|||
|
||||
|
||||
def wkb_r():
|
||||
if not thread_context.wkb_r:
|
||||
thread_context.wkb_r = _WKBReader()
|
||||
thread_context.wkb_r = thread_context.wkb_r or _WKBReader()
|
||||
return thread_context.wkb_r
|
||||
|
||||
|
||||
|
|
|
@ -41,8 +41,7 @@ class GEOSFunc:
|
|||
|
||||
def __call__(self, *args):
|
||||
# Create a context handle if one doesn't exist for this thread.
|
||||
if not self.thread_context.handle:
|
||||
self.thread_context.handle = GEOSContextHandle()
|
||||
self.thread_context.handle = self.thread_context.handle or GEOSContextHandle()
|
||||
# Call the threaded GEOS routine with the pointer of the context handle
|
||||
# as the first argument.
|
||||
return self.cfunc(self.thread_context.handle.ptr, *args)
|
||||
|
|
|
@ -32,8 +32,7 @@ def add_srs_entry(srs, auth_name='EPSG', auth_srid=None, ref_sys_name=None,
|
|||
of `django.db.DEFAULT_DB_ALIAS` (at the time of this writing, its value
|
||||
is 'default').
|
||||
"""
|
||||
if not database:
|
||||
database = DEFAULT_DB_ALIAS
|
||||
database = database or DEFAULT_DB_ALIAS
|
||||
connection = connections[database]
|
||||
|
||||
if not hasattr(connection.ops, 'spatial_version'):
|
||||
|
|
|
@ -240,8 +240,7 @@ class SessionBase:
|
|||
|
||||
if isinstance(expiry, datetime):
|
||||
return expiry
|
||||
if not expiry: # Checks both None and 0 cases
|
||||
expiry = settings.SESSION_COOKIE_AGE
|
||||
expiry = expiry or settings.SESSION_COOKIE_AGE # Checks both None and 0 cases
|
||||
return modification + timedelta(seconds=expiry)
|
||||
|
||||
def set_expiry(self, value):
|
||||
|
|
|
@ -41,9 +41,7 @@ class SessionStore(DBStore):
|
|||
return data
|
||||
|
||||
def exists(self, session_key):
|
||||
if session_key and (self.cache_key_prefix + session_key) in self._cache:
|
||||
return True
|
||||
return super().exists(session_key)
|
||||
return session_key and (self.cache_key_prefix + session_key) in self._cache or super().exists(session_key)
|
||||
|
||||
def save(self, must_create=False):
|
||||
super().save(must_create)
|
||||
|
|
|
@ -27,10 +27,7 @@ class SessionStore(SessionBase):
|
|||
try:
|
||||
return cls._storage_path
|
||||
except AttributeError:
|
||||
storage_path = getattr(settings, "SESSION_FILE_PATH", None)
|
||||
if not storage_path:
|
||||
storage_path = tempfile.gettempdir()
|
||||
|
||||
storage_path = getattr(settings, 'SESSION_FILE_PATH', None) or tempfile.gettempdir()
|
||||
# Make sure the storage path is valid.
|
||||
if not os.path.isdir(storage_path):
|
||||
raise ImproperlyConfigured(
|
||||
|
|
|
@ -85,9 +85,7 @@ class HashedFilesMixin:
|
|||
# `name` is the base name to construct the new hashed filename from.
|
||||
parsed_name = urlsplit(unquote(name))
|
||||
clean_name = parsed_name.path.strip()
|
||||
if filename:
|
||||
filename = urlsplit(unquote(filename)).path.strip()
|
||||
filename = filename or clean_name
|
||||
filename = (filename and urlsplit(unquote(filename)).path.strip()) or clean_name
|
||||
opened = False
|
||||
if content is None:
|
||||
if not self.exists(filename):
|
||||
|
|
|
@ -98,9 +98,7 @@ class LocMemCache(BaseCache):
|
|||
|
||||
def _has_expired(self, key):
|
||||
exp = self._expire_info.get(key, -1)
|
||||
if exp is None or exp > time.time():
|
||||
return False
|
||||
return True
|
||||
return exp is not None and exp <= time.time()
|
||||
|
||||
def _cull(self):
|
||||
if self._cull_frequency == 0:
|
||||
|
|
|
@ -59,9 +59,7 @@ class File(FileProxyMixin):
|
|||
Read the file and yield chunks of ``chunk_size`` bytes (defaults to
|
||||
``UploadedFile.DEFAULT_CHUNK_SIZE``).
|
||||
"""
|
||||
if not chunk_size:
|
||||
chunk_size = self.DEFAULT_CHUNK_SIZE
|
||||
|
||||
chunk_size = chunk_size or self.DEFAULT_CHUNK_SIZE
|
||||
try:
|
||||
self.seek(0)
|
||||
except (AttributeError, UnsupportedOperation):
|
||||
|
@ -81,9 +79,7 @@ class File(FileProxyMixin):
|
|||
always return ``False`` -- there's no good reason to read from memory in
|
||||
chunks.
|
||||
"""
|
||||
if not chunk_size:
|
||||
chunk_size = self.DEFAULT_CHUNK_SIZE
|
||||
return self.size > chunk_size
|
||||
return self.size > (chunk_size or self.DEFAULT_CHUNK_SIZE)
|
||||
|
||||
def __iter__(self):
|
||||
# Iterate over this file-like object by newlines
|
||||
|
|
|
@ -176,9 +176,7 @@ def get_script_name(environ):
|
|||
# rewrites. Unfortunately not every Web server (lighttpd!) passes this
|
||||
# information through all the time, so FORCE_SCRIPT_NAME, above, is still
|
||||
# needed.
|
||||
script_url = get_bytes_from_wsgi(environ, 'SCRIPT_URL', '')
|
||||
if not script_url:
|
||||
script_url = get_bytes_from_wsgi(environ, 'REDIRECT_URL', '')
|
||||
script_url = get_bytes_from_wsgi(environ, 'SCRIPT_URL', '') or get_bytes_from_wsgi(environ, 'REDIRECT_URL', '')
|
||||
|
||||
if script_url:
|
||||
if b'//' in script_url:
|
||||
|
|
|
@ -309,11 +309,7 @@ class EmailMessage:
|
|||
self.attachments.append(filename)
|
||||
else:
|
||||
assert content is not None
|
||||
|
||||
if not mimetype:
|
||||
mimetype, _ = mimetypes.guess_type(filename)
|
||||
if not mimetype:
|
||||
mimetype = DEFAULT_ATTACHMENT_MIME_TYPE
|
||||
mimetype = mimetype or mimetypes.guess_type(filename)[0] or DEFAULT_ATTACHMENT_MIME_TYPE
|
||||
basetype, subtype = mimetype.split('/', 1)
|
||||
|
||||
if basetype == 'text':
|
||||
|
|
|
@ -19,9 +19,7 @@ def supports_color():
|
|||
|
||||
# isatty is not always implemented, #6223.
|
||||
is_a_tty = hasattr(sys.stdout, 'isatty') and sys.stdout.isatty()
|
||||
if not supported_platform or not is_a_tty:
|
||||
return False
|
||||
return True
|
||||
return supported_platform and is_a_tty
|
||||
|
||||
|
||||
class Style:
|
||||
|
|
|
@ -502,8 +502,7 @@ class Command(BaseCommand):
|
|||
if os.path.abspath(dirpath).startswith(os.path.dirname(path)):
|
||||
locale_dir = path
|
||||
break
|
||||
if not locale_dir:
|
||||
locale_dir = self.default_locale_path or NO_LOCALE_DIR
|
||||
locale_dir = locale_dir or self.default_locale_path or NO_LOCALE_DIR
|
||||
all_files.append(self.translatable_file_class(dirpath, filename, locale_dir))
|
||||
return sorted(all_files)
|
||||
|
||||
|
|
|
@ -96,8 +96,7 @@ class Serializer:
|
|||
self.handle_m2m_field(obj, field)
|
||||
self.end_object(obj)
|
||||
progress_bar.update(count)
|
||||
if self.first:
|
||||
self.first = False
|
||||
self.first = self.first and False
|
||||
self.end_serialization()
|
||||
return self.getvalue()
|
||||
|
||||
|
|
|
@ -61,9 +61,7 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
|
|||
field.get_internal_type() == 'ForeignKey' and
|
||||
field.db_constraint):
|
||||
return False
|
||||
if self._is_limited_data_type(field):
|
||||
return False
|
||||
return create_index
|
||||
return not self._is_limited_data_type(field) and create_index
|
||||
|
||||
def _delete_composed_index(self, model, fields, *args):
|
||||
"""
|
||||
|
|
|
@ -16,9 +16,7 @@ class DatabaseCreation(BaseDatabaseCreation):
|
|||
suffix += " ENCODING '{}'".format(encoding)
|
||||
if template:
|
||||
suffix += " TEMPLATE {}".format(self._quote_name(template))
|
||||
if suffix:
|
||||
suffix = "WITH" + suffix
|
||||
return suffix
|
||||
return suffix and "WITH" + suffix
|
||||
|
||||
def sql_table_creation_suffix(self):
|
||||
test_settings = self.connection.settings_dict['TEST']
|
||||
|
|
|
@ -131,11 +131,9 @@ class DatabaseOperations(BaseDatabaseOperations):
|
|||
sql = []
|
||||
for sequence_info in sequences:
|
||||
table_name = sequence_info['table']
|
||||
column_name = sequence_info['column']
|
||||
if not column_name:
|
||||
# This will be the case if it's an m2m using an autogenerated
|
||||
# intermediate table (see BaseDatabaseIntrospection.sequence_list)
|
||||
column_name = 'id'
|
||||
# 'id' will be the case if it's an m2m using an autogenerated
|
||||
# intermediate table (see BaseDatabaseIntrospection.sequence_list).
|
||||
column_name = sequence_info['column'] or 'id'
|
||||
sql.append("%s setval(pg_get_serial_sequence('%s','%s'), 1, false);" % (
|
||||
style.SQL_KEYWORD('SELECT'),
|
||||
style.SQL_TABLE(self.quote_name(table_name)),
|
||||
|
|
|
@ -31,15 +31,13 @@ class Aggregate(Func):
|
|||
return source_expressions
|
||||
|
||||
def set_source_expressions(self, exprs):
|
||||
if self.filter:
|
||||
self.filter = exprs.pop()
|
||||
self.filter = self.filter and exprs.pop()
|
||||
return super().set_source_expressions(exprs)
|
||||
|
||||
def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False):
|
||||
# Aggregates are not allowed in UPDATE queries, so ignore for_save
|
||||
c = super().resolve_expression(query, allow_joins, reuse, summarize)
|
||||
if c.filter:
|
||||
c.filter = c.filter.resolve_expression(query, allow_joins, reuse, summarize)
|
||||
c.filter = c.filter and c.filter.resolve_expression(query, allow_joins, reuse, summarize)
|
||||
if not summarize:
|
||||
# Call Aggregate.get_source_expressions() to avoid
|
||||
# returning self.filter and including that in this loop.
|
||||
|
|
|
@ -549,8 +549,7 @@ class Model(metaclass=ModelBase):
|
|||
self.__dict__.update(state)
|
||||
|
||||
def _get_pk_val(self, meta=None):
|
||||
if not meta:
|
||||
meta = self._meta
|
||||
meta = meta or self._meta
|
||||
return getattr(self, meta.pk.attname)
|
||||
|
||||
def _set_pk_val(self, value):
|
||||
|
@ -852,7 +851,8 @@ class Model(metaclass=ModelBase):
|
|||
# exists.
|
||||
return update_fields is not None or filtered.exists()
|
||||
if self._meta.select_on_save and not forced_update:
|
||||
if filtered.exists():
|
||||
return (
|
||||
filtered.exists() and
|
||||
# It may happen that the object is deleted from the DB right after
|
||||
# this check, causing the subsequent UPDATE to return zero matching
|
||||
# rows. The same result can occur in some rare cases when the
|
||||
|
@ -860,9 +860,8 @@ class Model(metaclass=ModelBase):
|
|||
# successfully (a row is matched and updated). In order to
|
||||
# distinguish these two cases, the object's existence in the
|
||||
# database is again checked for if the UPDATE query returns 0.
|
||||
return filtered._update(values) > 0 or filtered.exists()
|
||||
else:
|
||||
return False
|
||||
(filtered._update(values) > 0 or filtered.exists())
|
||||
)
|
||||
return filtered._update(values) > 0
|
||||
|
||||
def _do_insert(self, manager, using, fields, update_pk, raw):
|
||||
|
|
|
@ -694,8 +694,7 @@ class Field(RegisterLookupMixin):
|
|||
return self._db_tablespace or settings.DEFAULT_INDEX_TABLESPACE
|
||||
|
||||
def set_attributes_from_name(self, name):
|
||||
if not self.name:
|
||||
self.name = name
|
||||
self.name = self.name or name
|
||||
self.attname, self.column = self.get_attname_column()
|
||||
self.concrete = self.column is not None
|
||||
if self.verbose_name is None and self.name:
|
||||
|
|
|
@ -314,9 +314,7 @@ class FileField(Field):
|
|||
if data is not None:
|
||||
# This value will be converted to str and stored in the
|
||||
# database, so leaving False as-is is not acceptable.
|
||||
if not data:
|
||||
data = ''
|
||||
setattr(instance, self.name, data)
|
||||
setattr(instance, self.name, data or '')
|
||||
|
||||
def formfield(self, **kwargs):
|
||||
return super().formfield(**{
|
||||
|
|
|
@ -177,9 +177,7 @@ class FieldGetDbPrepValueMixin:
|
|||
def get_db_prep_lookup(self, value, connection):
|
||||
# For relational fields, use the output_field of the 'field' attribute.
|
||||
field = getattr(self.lhs.output_field, 'field', None)
|
||||
get_db_prep_value = getattr(field, 'get_db_prep_value', None)
|
||||
if not get_db_prep_value:
|
||||
get_db_prep_value = self.lhs.output_field.get_db_prep_value
|
||||
get_db_prep_value = getattr(field, 'get_db_prep_value', None) or self.lhs.output_field.get_db_prep_value
|
||||
return (
|
||||
'%s',
|
||||
[get_db_prep_value(v, connection, prepared=True) for v in value]
|
||||
|
|
|
@ -107,8 +107,7 @@ class BaseManager:
|
|||
})
|
||||
|
||||
def contribute_to_class(self, model, name):
|
||||
if not self.name:
|
||||
self.name = name
|
||||
self.name = self.name or name
|
||||
self.model = model
|
||||
|
||||
setattr(model, name, ManagerDescriptor(self))
|
||||
|
|
|
@ -521,8 +521,7 @@ class SQLCompiler:
|
|||
if grouping:
|
||||
if distinct_fields:
|
||||
raise NotImplementedError('annotate() + distinct(fields) is not implemented.')
|
||||
if not order_by:
|
||||
order_by = self.connection.ops.force_no_ordering()
|
||||
order_by = order_by or self.connection.ops.force_no_ordering()
|
||||
result.append('GROUP BY %s' % ', '.join(grouping))
|
||||
|
||||
if having:
|
||||
|
@ -588,8 +587,7 @@ class SQLCompiler:
|
|||
if opts is None:
|
||||
opts = self.query.get_meta()
|
||||
only_load = self.deferred_to_columns()
|
||||
if not start_alias:
|
||||
start_alias = self.query.get_initial_alias()
|
||||
start_alias = start_alias or self.query.get_initial_alias()
|
||||
# The 'seen_models' is used to optimize checking the needed parent
|
||||
# alias for a given field. This also includes None -> start_alias to
|
||||
# be used by local fields.
|
||||
|
@ -657,8 +655,7 @@ class SQLCompiler:
|
|||
# of the field is specified.
|
||||
if field.is_relation and opts.ordering and getattr(field, 'attname', None) != name:
|
||||
# Firstly, avoid infinite loops.
|
||||
if not already_seen:
|
||||
already_seen = set()
|
||||
already_seen = already_seen or set()
|
||||
join_tuple = tuple(getattr(self.query.alias_map[j], 'join_cols', None) for j in joins)
|
||||
if join_tuple in already_seen:
|
||||
raise FieldError('Infinite loop caused by ordering.')
|
||||
|
@ -680,8 +677,7 @@ class SQLCompiler:
|
|||
same input, as the prefixes of get_ordering() and get_distinct() must
|
||||
match. Executing SQL where this is not true is an error.
|
||||
"""
|
||||
if not alias:
|
||||
alias = self.query.get_initial_alias()
|
||||
alias = alias or self.query.get_initial_alias()
|
||||
field, targets, opts, joins, path = self.query.setup_joins(
|
||||
pieces, opts, alias)
|
||||
alias = joins[-1]
|
||||
|
@ -1037,8 +1033,7 @@ class SQLCompiler:
|
|||
is needed, as the filters describe an empty set. In that case, None is
|
||||
returned, to avoid any unnecessary database interaction.
|
||||
"""
|
||||
if not result_type:
|
||||
result_type = NO_RESULTS
|
||||
result_type = result_type or NO_RESULTS
|
||||
try:
|
||||
sql, params = self.as_sql()
|
||||
if not sql:
|
||||
|
|
|
@ -805,9 +805,9 @@ class Query:
|
|||
if isinstance(self.group_by, tuple):
|
||||
self.group_by = tuple([col.relabeled_clone(change_map) for col in self.group_by])
|
||||
self.select = tuple([col.relabeled_clone(change_map) for col in self.select])
|
||||
if self._annotations:
|
||||
self._annotations = OrderedDict(
|
||||
(key, col.relabeled_clone(change_map)) for key, col in self._annotations.items())
|
||||
self._annotations = self._annotations and OrderedDict(
|
||||
(key, col.relabeled_clone(change_map)) for key, col in self._annotations.items()
|
||||
)
|
||||
|
||||
# 2. Rename the alias in the internal table/alias datastructures.
|
||||
for old_alias, new_alias in change_map.items():
|
||||
|
@ -1061,9 +1061,7 @@ class Query:
|
|||
and get_transform().
|
||||
"""
|
||||
# __exact is the default lookup if one isn't given.
|
||||
if not lookups:
|
||||
lookups = ['exact']
|
||||
|
||||
lookups = lookups or ['exact']
|
||||
for name in lookups[:-1]:
|
||||
lhs = self.try_transform(lhs, name)
|
||||
# First try get_lookup() so that the lookup takes precedence if the lhs
|
||||
|
@ -2050,10 +2048,10 @@ class Query:
|
|||
# used. The proper fix would be to defer all decisions where
|
||||
# is_nullable() is needed to the compiler stage, but that is not easy
|
||||
# to do currently.
|
||||
if connections[DEFAULT_DB_ALIAS].features.interprets_empty_strings_as_nulls and field.empty_strings_allowed:
|
||||
return True
|
||||
else:
|
||||
return field.null
|
||||
return (
|
||||
connections[DEFAULT_DB_ALIAS].features.interprets_empty_strings_as_nulls and
|
||||
field.empty_strings_allowed
|
||||
) or field.null
|
||||
|
||||
|
||||
def get_order_dir(field, default='ASC'):
|
||||
|
|
|
@ -79,12 +79,9 @@ class BoundField:
|
|||
attributes passed as attrs. If a widget isn't specified, use the
|
||||
field's default widget.
|
||||
"""
|
||||
if not widget:
|
||||
widget = self.field.widget
|
||||
|
||||
widget = widget or self.field.widget
|
||||
if self.field.localize:
|
||||
widget.is_localized = True
|
||||
|
||||
attrs = attrs or {}
|
||||
attrs = self.build_widget_attrs(attrs, widget)
|
||||
if self.auto_id and 'id' not in widget.attrs:
|
||||
|
@ -219,8 +216,7 @@ class BoundField:
|
|||
return data
|
||||
|
||||
def build_widget_attrs(self, attrs, widget=None):
|
||||
if not widget:
|
||||
widget = self.field.widget
|
||||
widget = widget or self.field.widget
|
||||
attrs = dict(attrs) # Copy attrs to avoid modifying the argument.
|
||||
if widget.use_required_attribute(self.initial) and self.field.required and self.form.use_required_attribute:
|
||||
attrs['required'] = True
|
||||
|
|
|
@ -591,11 +591,7 @@ class FileField(Field):
|
|||
return data
|
||||
|
||||
def has_changed(self, initial, data):
|
||||
if self.disabled:
|
||||
return False
|
||||
if data is None:
|
||||
return False
|
||||
return True
|
||||
return not self.disabled and data is not None
|
||||
|
||||
|
||||
class ImageField(FileField):
|
||||
|
|
|
@ -376,19 +376,17 @@ class QueryDict(MultiValueDict):
|
|||
|
||||
def __init__(self, query_string=None, mutable=False, encoding=None):
|
||||
super().__init__()
|
||||
if not encoding:
|
||||
encoding = settings.DEFAULT_CHARSET
|
||||
self.encoding = encoding
|
||||
self.encoding = encoding or settings.DEFAULT_CHARSET
|
||||
query_string = query_string or ''
|
||||
parse_qsl_kwargs = {
|
||||
'keep_blank_values': True,
|
||||
'fields_limit': settings.DATA_UPLOAD_MAX_NUMBER_FIELDS,
|
||||
'encoding': encoding,
|
||||
'encoding': self.encoding,
|
||||
}
|
||||
if isinstance(query_string, bytes):
|
||||
# query_string normally contains URL-encoded data, a subset of ASCII.
|
||||
try:
|
||||
query_string = query_string.decode(encoding)
|
||||
query_string = query_string.decode(self.encoding)
|
||||
except UnicodeDecodeError:
|
||||
# ... but some user agents are misbehaving :-(
|
||||
query_string = query_string.decode('iso-8859-1')
|
||||
|
|
|
@ -23,8 +23,7 @@ class ConditionalGetMiddleware(MiddlewareMixin):
|
|||
|
||||
etag = response.get('ETag')
|
||||
last_modified = response.get('Last-Modified')
|
||||
if last_modified:
|
||||
last_modified = parse_http_date_safe(last_modified)
|
||||
last_modified = last_modified and parse_http_date_safe(last_modified)
|
||||
|
||||
if etag or last_modified:
|
||||
return get_conditional_response(
|
||||
|
|
|
@ -236,8 +236,7 @@ def encode_file(boundary, key, file):
|
|||
|
||||
if content_type is None:
|
||||
content_type = 'application/octet-stream'
|
||||
if not filename:
|
||||
filename = key
|
||||
filename = filename or key
|
||||
return [
|
||||
to_bytes('--%s' % boundary),
|
||||
to_bytes('Content-Disposition: form-data; name="%s"; filename="%s"'
|
||||
|
|
|
@ -69,9 +69,7 @@ class Element:
|
|||
other_value = other_attr
|
||||
if attr != other_attr or value != other_value:
|
||||
return False
|
||||
if self.children != element.children:
|
||||
return False
|
||||
return True
|
||||
return self.children == element.children
|
||||
|
||||
def __hash__(self):
|
||||
return hash((self.name,) + tuple(a for a in self.attributes))
|
||||
|
|
|
@ -50,9 +50,7 @@ class Approximate:
|
|||
return repr(self.val)
|
||||
|
||||
def __eq__(self, other):
|
||||
if self.val == other:
|
||||
return True
|
||||
return round(abs(self.val - other), self.places) == 0
|
||||
return self.val == other or round(abs(self.val - other), self.places) == 0
|
||||
|
||||
|
||||
class ContextList(list):
|
||||
|
@ -300,9 +298,7 @@ def teardown_databases(old_config, verbosity, parallel=0, keepdb=False):
|
|||
|
||||
|
||||
def get_runner(settings, test_runner_class=None):
|
||||
if not test_runner_class:
|
||||
test_runner_class = settings.TEST_RUNNER
|
||||
|
||||
test_runner_class = test_runner_class or settings.TEST_RUNNER
|
||||
test_path = test_runner_class.split('.')
|
||||
# Allow for relative paths
|
||||
if len(test_path) > 1:
|
||||
|
|
|
@ -393,9 +393,7 @@ class URLResolver:
|
|||
warnings = []
|
||||
for pattern in self.url_patterns:
|
||||
warnings.extend(check_resolver(pattern))
|
||||
if not warnings:
|
||||
warnings = self.pattern.check()
|
||||
return warnings
|
||||
return warnings or self.pattern.check()
|
||||
|
||||
def _populate(self):
|
||||
# Short-circuit if called recursively in this thread to prevent
|
||||
|
|
|
@ -142,12 +142,10 @@ def get_conditional_response(request, etag=None, last_modified=None, response=No
|
|||
# Get HTTP request headers.
|
||||
if_match_etags = parse_etags(request.META.get('HTTP_IF_MATCH', ''))
|
||||
if_unmodified_since = request.META.get('HTTP_IF_UNMODIFIED_SINCE')
|
||||
if if_unmodified_since:
|
||||
if_unmodified_since = parse_http_date_safe(if_unmodified_since)
|
||||
if_unmodified_since = if_unmodified_since and parse_http_date_safe(if_unmodified_since)
|
||||
if_none_match_etags = parse_etags(request.META.get('HTTP_IF_NONE_MATCH', ''))
|
||||
if_modified_since = request.META.get('HTTP_IF_MODIFIED_SINCE')
|
||||
if if_modified_since:
|
||||
if_modified_since = parse_http_date_safe(if_modified_since)
|
||||
if_modified_since = if_modified_since and parse_http_date_safe(if_modified_since)
|
||||
|
||||
# Step 1 of section 6 of RFC 7232: Test the If-Match precondition.
|
||||
if if_match_etags and not _if_match_passes(etag, if_match_etags):
|
||||
|
|
|
@ -78,8 +78,7 @@ def pbkdf2(password, salt, iterations, dklen=0, digest=None):
|
|||
"""Return the hash of password using pbkdf2."""
|
||||
if digest is None:
|
||||
digest = hashlib.sha256
|
||||
if not dklen:
|
||||
dklen = None
|
||||
dklen = dklen or None
|
||||
password = force_bytes(password)
|
||||
salt = force_bytes(salt)
|
||||
return hashlib.pbkdf2_hmac(digest().name, password, salt, iterations, dklen)
|
||||
|
|
|
@ -89,8 +89,7 @@ def parse_time(value):
|
|||
match = time_re.match(value)
|
||||
if match:
|
||||
kw = match.groupdict()
|
||||
if kw['microsecond']:
|
||||
kw['microsecond'] = kw['microsecond'].ljust(6, '0')
|
||||
kw['microsecond'] = kw['microsecond'] and kw['microsecond'].ljust(6, '0')
|
||||
kw = {k: int(v) for k, v in kw.items() if v is not None}
|
||||
return datetime.time(**kw)
|
||||
|
||||
|
@ -107,8 +106,7 @@ def parse_datetime(value):
|
|||
match = datetime_re.match(value)
|
||||
if match:
|
||||
kw = match.groupdict()
|
||||
if kw['microsecond']:
|
||||
kw['microsecond'] = kw['microsecond'].ljust(6, '0')
|
||||
kw['microsecond'] = kw['microsecond'] and kw['microsecond'].ljust(6, '0')
|
||||
tzinfo = kw.pop('tzinfo')
|
||||
if tzinfo == 'Z':
|
||||
tzinfo = utc
|
||||
|
|
|
@ -88,8 +88,7 @@ class MiddlewareMixin:
|
|||
response = None
|
||||
if hasattr(self, 'process_request'):
|
||||
response = self.process_request(request)
|
||||
if not response:
|
||||
response = self.get_response(request)
|
||||
response = response or self.get_response(request)
|
||||
if hasattr(self, 'process_response'):
|
||||
response = self.process_response(request, response)
|
||||
return response
|
||||
|
|
|
@ -86,8 +86,7 @@ class SyndicationFeed:
|
|||
feed_url=None, feed_copyright=None, feed_guid=None, ttl=None, **kwargs):
|
||||
def to_str(s):
|
||||
return str(s) if s is not None else s
|
||||
if categories:
|
||||
categories = [str(c) for c in categories]
|
||||
categories = categories and [str(c) for c in categories]
|
||||
self.feed = {
|
||||
'title': to_str(title),
|
||||
'link': iri_to_uri(link),
|
||||
|
@ -117,8 +116,7 @@ class SyndicationFeed:
|
|||
"""
|
||||
def to_str(s):
|
||||
return str(s) if s is not None else s
|
||||
if categories:
|
||||
categories = [to_str(c) for c in categories]
|
||||
categories = categories and [to_str(c) for c in categories]
|
||||
self.items.append({
|
||||
'title': to_str(title),
|
||||
'link': iri_to_uri(link),
|
||||
|
|
|
@ -42,8 +42,7 @@ def format(number, decimal_sep, decimal_pos=None, grouping=0, thousand_sep='',
|
|||
int_part, dec_part = str_number, ''
|
||||
if decimal_pos is not None:
|
||||
dec_part = dec_part + ('0' * (decimal_pos - len(dec_part)))
|
||||
if dec_part:
|
||||
dec_part = decimal_sep + dec_part
|
||||
dec_part = dec_part and decimal_sep + dec_part
|
||||
# grouping
|
||||
if use_grouping:
|
||||
try:
|
||||
|
|
|
@ -35,8 +35,7 @@ def timesince(d, now=None, reversed=False):
|
|||
if now and not isinstance(now, datetime.datetime):
|
||||
now = datetime.datetime(now.year, now.month, now.day)
|
||||
|
||||
if not now:
|
||||
now = datetime.datetime.now(utc if is_aware(d) else None)
|
||||
now = now or datetime.datetime.now(utc if is_aware(d) else None)
|
||||
|
||||
if reversed:
|
||||
d, now = now, d
|
||||
|
|
|
@ -35,8 +35,7 @@ def set_language(request):
|
|||
if ((next or not request.is_ajax()) and
|
||||
not is_safe_url(url=next, allowed_hosts={request.get_host()}, require_https=request.is_secure())):
|
||||
next = request.META.get('HTTP_REFERER')
|
||||
if next:
|
||||
next = unquote(next) # HTTP_REFERER may be encoded.
|
||||
next = next and unquote(next) # HTTP_REFERER may be encoded.
|
||||
if not is_safe_url(url=next, allowed_hosts={request.get_host()}, require_https=request.is_secure()):
|
||||
next = '/'
|
||||
response = HttpResponseRedirect(next) if next else HttpResponse(status=204)
|
||||
|
|
|
@ -339,9 +339,7 @@ class SimpleRowlevelBackend:
|
|||
return False
|
||||
|
||||
def has_module_perms(self, user, app_label):
|
||||
if not user.is_anonymous and not user.is_active:
|
||||
return False
|
||||
return app_label == "app1"
|
||||
return (user.is_anonymous or user.is_active) and app_label == 'app1'
|
||||
|
||||
def get_all_permissions(self, user, obj=None):
|
||||
if not obj:
|
||||
|
|
|
@ -51,8 +51,7 @@ class DumpDataAssertMixin:
|
|||
natural_foreign_keys=False, natural_primary_keys=False,
|
||||
use_base_manager=False, exclude_list=[], primary_keys=''):
|
||||
new_io = StringIO()
|
||||
if filename:
|
||||
filename = os.path.join(tempfile.gettempdir(), filename)
|
||||
filename = filename and os.path.join(tempfile.gettempdir(), filename)
|
||||
management.call_command('dumpdata', *args, **{'format': format,
|
||||
'stdout': new_io,
|
||||
'stderr': new_io,
|
||||
|
|
|
@ -62,10 +62,7 @@ class TestGeom(TestObj):
|
|||
self.coords = tuplize(coords)
|
||||
if centroid:
|
||||
self.centroid = tuple(centroid)
|
||||
if ext_ring_cs:
|
||||
ext_ring_cs = tuplize(ext_ring_cs)
|
||||
self.ext_ring_cs = ext_ring_cs
|
||||
|
||||
self.ext_ring_cs = ext_ring_cs and tuplize(ext_ring_cs)
|
||||
super().__init__(**kwargs)
|
||||
|
||||
|
||||
|
|
|
@ -43,9 +43,7 @@ class AuthRouter:
|
|||
|
||||
def allow_relation(self, obj1, obj2, **hints):
|
||||
"Allow any relation if a model in Auth is involved"
|
||||
if obj1._meta.app_label == 'auth' or obj2._meta.app_label == 'auth':
|
||||
return True
|
||||
return None
|
||||
return obj1._meta.app_label == 'auth' or obj2._meta.app_label == 'auth' or None
|
||||
|
||||
def allow_migrate(self, db, app_label, **hints):
|
||||
"Make sure the auth app only appears on the 'other' db"
|
||||
|
|
Loading…
Reference in New Issue