mirror of https://github.com/django/django.git
Refs #34233 -- Used str.removeprefix()/removesuffix().
This commit is contained in:
parent
fd21f82aa8
commit
23e8868862
|
@ -261,7 +261,7 @@ class Apps:
|
||||||
candidates = []
|
candidates = []
|
||||||
for app_config in self.app_configs.values():
|
for app_config in self.app_configs.values():
|
||||||
if object_name.startswith(app_config.name):
|
if object_name.startswith(app_config.name):
|
||||||
subpath = object_name[len(app_config.name) :]
|
subpath = object_name.removeprefix(app_config.name)
|
||||||
if subpath == "" or subpath[0] == ".":
|
if subpath == "" or subpath[0] == ".":
|
||||||
candidates.append(app_config)
|
candidates.append(app_config)
|
||||||
if candidates:
|
if candidates:
|
||||||
|
|
|
@ -727,8 +727,7 @@ class BaseModelAdminChecks:
|
||||||
# this format would be nice, but it's a little fiddly).
|
# this format would be nice, but it's a little fiddly).
|
||||||
return []
|
return []
|
||||||
else:
|
else:
|
||||||
if field_name.startswith("-"):
|
field_name = field_name.removeprefix("-")
|
||||||
field_name = field_name[1:]
|
|
||||||
if field_name == "pk":
|
if field_name == "pk":
|
||||||
return []
|
return []
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -1109,11 +1109,11 @@ class ModelAdmin(BaseModelAdmin):
|
||||||
# Apply keyword searches.
|
# Apply keyword searches.
|
||||||
def construct_search(field_name):
|
def construct_search(field_name):
|
||||||
if field_name.startswith("^"):
|
if field_name.startswith("^"):
|
||||||
return "%s__istartswith" % field_name[1:]
|
return "%s__istartswith" % field_name.removeprefix("^")
|
||||||
elif field_name.startswith("="):
|
elif field_name.startswith("="):
|
||||||
return "%s__iexact" % field_name[1:]
|
return "%s__iexact" % field_name.removeprefix("=")
|
||||||
elif field_name.startswith("@"):
|
elif field_name.startswith("@"):
|
||||||
return "%s__search" % field_name[1:]
|
return "%s__search" % field_name.removeprefix("@")
|
||||||
# Use field_name if it includes a lookup.
|
# Use field_name if it includes a lookup.
|
||||||
opts = queryset.model._meta
|
opts = queryset.model._meta
|
||||||
lookup_fields = field_name.split(LOOKUP_SEP)
|
lookup_fields = field_name.split(LOOKUP_SEP)
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
import re
|
|
||||||
from functools import update_wrapper
|
from functools import update_wrapper
|
||||||
from weakref import WeakSet
|
from weakref import WeakSet
|
||||||
|
|
||||||
|
@ -126,7 +125,7 @@ class AdminSite:
|
||||||
msg = "The model %s is already registered " % model.__name__
|
msg = "The model %s is already registered " % model.__name__
|
||||||
if registered_admin.endswith(".ModelAdmin"):
|
if registered_admin.endswith(".ModelAdmin"):
|
||||||
# Most likely registered without a ModelAdmin subclass.
|
# Most likely registered without a ModelAdmin subclass.
|
||||||
msg += "in app %r." % re.sub(r"\.ModelAdmin$", "", registered_admin)
|
msg += "in app %r." % registered_admin.removesuffix(".ModelAdmin")
|
||||||
else:
|
else:
|
||||||
msg += "with %r." % registered_admin
|
msg += "with %r." % registered_admin
|
||||||
raise AlreadyRegistered(msg)
|
raise AlreadyRegistered(msg)
|
||||||
|
|
|
@ -375,8 +375,8 @@ class ChangeList:
|
||||||
order_field.desc() if pfx == "-" else order_field.asc()
|
order_field.desc() if pfx == "-" else order_field.asc()
|
||||||
)
|
)
|
||||||
# reverse order if order_field has already "-" as prefix
|
# reverse order if order_field has already "-" as prefix
|
||||||
elif order_field.startswith("-") and pfx == "-":
|
elif pfx == "-" and order_field.startswith(pfx):
|
||||||
ordering.append(order_field[1:])
|
ordering.append(order_field.removeprefix(pfx))
|
||||||
else:
|
else:
|
||||||
ordering.append(pfx + order_field)
|
ordering.append(pfx + order_field)
|
||||||
except (IndexError, ValueError):
|
except (IndexError, ValueError):
|
||||||
|
@ -474,7 +474,7 @@ class ChangeList:
|
||||||
else:
|
else:
|
||||||
continue
|
continue
|
||||||
elif field.startswith("-"):
|
elif field.startswith("-"):
|
||||||
field = field[1:]
|
field = field.removeprefix("-")
|
||||||
order_type = "desc"
|
order_type = "desc"
|
||||||
else:
|
else:
|
||||||
order_type = "asc"
|
order_type = "asc"
|
||||||
|
|
|
@ -808,8 +808,8 @@ class UnsaltedMD5PasswordHasher(BasePasswordHasher):
|
||||||
}
|
}
|
||||||
|
|
||||||
def verify(self, password, encoded):
|
def verify(self, password, encoded):
|
||||||
if len(encoded) == 37 and encoded.startswith("md5$$"):
|
if len(encoded) == 37:
|
||||||
encoded = encoded[5:]
|
encoded = encoded.removeprefix("md5$$")
|
||||||
encoded_2 = self.encode(password, "")
|
encoded_2 = self.encode(password, "")
|
||||||
return constant_time_compare(encoded, encoded_2)
|
return constant_time_compare(encoded, encoded_2)
|
||||||
|
|
||||||
|
|
|
@ -201,7 +201,7 @@ class SessionStore(SessionBase):
|
||||||
for session_file in os.listdir(storage_path):
|
for session_file in os.listdir(storage_path):
|
||||||
if not session_file.startswith(file_prefix):
|
if not session_file.startswith(file_prefix):
|
||||||
continue
|
continue
|
||||||
session_key = session_file[len(file_prefix) :]
|
session_key = session_file.removeprefix(file_prefix)
|
||||||
session = cls(session_key)
|
session = cls(session_key)
|
||||||
# When an expired session is loaded, its file is removed, and a
|
# When an expired session is loaded, its file is removed, and a
|
||||||
# new file is immediately created. Prevent this by disabling
|
# new file is immediately created. Prevent this by disabling
|
||||||
|
|
|
@ -137,7 +137,7 @@ class FileSystemFinder(BaseFinder):
|
||||||
prefix = "%s%s" % (prefix, os.sep)
|
prefix = "%s%s" % (prefix, os.sep)
|
||||||
if not path.startswith(prefix):
|
if not path.startswith(prefix):
|
||||||
return None
|
return None
|
||||||
path = path[len(prefix) :]
|
path = path.removeprefix(prefix)
|
||||||
path = safe_join(root, path)
|
path = safe_join(root, path)
|
||||||
if os.path.exists(path):
|
if os.path.exists(path):
|
||||||
return path
|
return path
|
||||||
|
|
|
@ -42,7 +42,7 @@ class StaticFilesHandlerMixin:
|
||||||
"""
|
"""
|
||||||
Return the relative path to the media file on disk for the given URL.
|
Return the relative path to the media file on disk for the given URL.
|
||||||
"""
|
"""
|
||||||
relative_url = url[len(self.base_url[2]) :]
|
relative_url = url.removeprefix(self.base_url[2])
|
||||||
return url2pathname(relative_url)
|
return url2pathname(relative_url)
|
||||||
|
|
||||||
def serve(self, request):
|
def serve(self, request):
|
||||||
|
|
|
@ -231,7 +231,7 @@ class HashedFilesMixin:
|
||||||
if url_path.startswith("/"):
|
if url_path.startswith("/"):
|
||||||
# Otherwise the condition above would have returned prematurely.
|
# Otherwise the condition above would have returned prematurely.
|
||||||
assert url_path.startswith(settings.STATIC_URL)
|
assert url_path.startswith(settings.STATIC_URL)
|
||||||
target_name = url_path[len(settings.STATIC_URL) :]
|
target_name = url_path.removeprefix(settings.STATIC_URL)
|
||||||
else:
|
else:
|
||||||
# We're using the posixpath module to mix paths and URLs conveniently.
|
# We're using the posixpath module to mix paths and URLs conveniently.
|
||||||
source_name = name if os.sep == "/" else name.replace(os.sep, "/")
|
source_name = name if os.sep == "/" else name.replace(os.sep, "/")
|
||||||
|
|
|
@ -155,7 +155,7 @@ class PyLibMCCache(BaseMemcachedCache):
|
||||||
def client_servers(self):
|
def client_servers(self):
|
||||||
output = []
|
output = []
|
||||||
for server in self._servers:
|
for server in self._servers:
|
||||||
output.append(server[5:] if server.startswith("unix:") else server)
|
output.append(server.removeprefix("unix:"))
|
||||||
return output
|
return output
|
||||||
|
|
||||||
def touch(self, key, timeout=DEFAULT_TIMEOUT, version=None):
|
def touch(self, key, timeout=DEFAULT_TIMEOUT, version=None):
|
||||||
|
|
|
@ -41,9 +41,9 @@ class ASGIRequest(HttpRequest):
|
||||||
self._read_started = False
|
self._read_started = False
|
||||||
self.resolver_match = None
|
self.resolver_match = None
|
||||||
self.script_name = self.scope.get("root_path", "")
|
self.script_name = self.scope.get("root_path", "")
|
||||||
if self.script_name and scope["path"].startswith(self.script_name):
|
if self.script_name:
|
||||||
# TODO: Better is-prefix checking, slash handling?
|
# TODO: Better is-prefix checking, slash handling?
|
||||||
self.path_info = scope["path"][len(self.script_name) :]
|
self.path_info = scope["path"].removeprefix(self.script_name)
|
||||||
else:
|
else:
|
||||||
self.path_info = scope["path"]
|
self.path_info = scope["path"]
|
||||||
# The Django path is different from ASGI scope path args, it should
|
# The Django path is different from ASGI scope path args, it should
|
||||||
|
|
|
@ -187,7 +187,7 @@ def get_script_name(environ):
|
||||||
# do the same with script_url before manipulating paths (#17133).
|
# do the same with script_url before manipulating paths (#17133).
|
||||||
script_url = _slashes_re.sub(b"/", script_url)
|
script_url = _slashes_re.sub(b"/", script_url)
|
||||||
path_info = get_bytes_from_wsgi(environ, "PATH_INFO", "")
|
path_info = get_bytes_from_wsgi(environ, "PATH_INFO", "")
|
||||||
script_name = script_url[: -len(path_info)] if path_info else script_url
|
script_name = script_url.removesuffix(path_info)
|
||||||
else:
|
else:
|
||||||
script_name = get_bytes_from_wsgi(environ, "SCRIPT_NAME", "")
|
script_name = get_bytes_from_wsgi(environ, "SCRIPT_NAME", "")
|
||||||
|
|
||||||
|
|
|
@ -275,7 +275,7 @@ class Command(BaseCommand):
|
||||||
|
|
||||||
if is_relation:
|
if is_relation:
|
||||||
if new_name.endswith("_id"):
|
if new_name.endswith("_id"):
|
||||||
new_name = new_name[:-3]
|
new_name = new_name.removesuffix("_id")
|
||||||
else:
|
else:
|
||||||
field_params["db_column"] = col_name
|
field_params["db_column"] = col_name
|
||||||
|
|
||||||
|
|
|
@ -182,7 +182,7 @@ class TemplateCommand(BaseCommand):
|
||||||
)
|
)
|
||||||
for old_suffix, new_suffix in self.rewrite_template_suffixes:
|
for old_suffix, new_suffix in self.rewrite_template_suffixes:
|
||||||
if new_path.endswith(old_suffix):
|
if new_path.endswith(old_suffix):
|
||||||
new_path = new_path[: -len(old_suffix)] + new_suffix
|
new_path = new_path.removesuffix(old_suffix) + new_suffix
|
||||||
break # Only rewrite once
|
break # Only rewrite once
|
||||||
|
|
||||||
if os.path.exists(new_path):
|
if os.path.exists(new_path):
|
||||||
|
@ -241,8 +241,7 @@ class TemplateCommand(BaseCommand):
|
||||||
if template is None:
|
if template is None:
|
||||||
return os.path.join(django.__path__[0], "conf", subdir)
|
return os.path.join(django.__path__[0], "conf", subdir)
|
||||||
else:
|
else:
|
||||||
if template.startswith("file://"):
|
template = template.removeprefix("file://")
|
||||||
template = template[7:]
|
|
||||||
expanded_template = os.path.expanduser(template)
|
expanded_template = os.path.expanduser(template)
|
||||||
expanded_template = os.path.normpath(expanded_template)
|
expanded_template = os.path.normpath(expanded_template)
|
||||||
if os.path.isdir(expanded_template):
|
if os.path.isdir(expanded_template):
|
||||||
|
|
|
@ -135,7 +135,7 @@ def normalize_path_patterns(patterns):
|
||||||
for pattern in patterns:
|
for pattern in patterns:
|
||||||
for dir_suffix in dir_suffixes:
|
for dir_suffix in dir_suffixes:
|
||||||
if pattern.endswith(dir_suffix):
|
if pattern.endswith(dir_suffix):
|
||||||
norm_patterns.append(pattern[: -len(dir_suffix)])
|
norm_patterns.append(pattern.removesuffix(dir_suffix))
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
norm_patterns.append(pattern)
|
norm_patterns.append(pattern)
|
||||||
|
|
|
@ -87,7 +87,7 @@ class DjangoJSONEncoder(json.JSONEncoder):
|
||||||
if o.microsecond:
|
if o.microsecond:
|
||||||
r = r[:23] + r[26:]
|
r = r[:23] + r[26:]
|
||||||
if r.endswith("+00:00"):
|
if r.endswith("+00:00"):
|
||||||
r = r[:-6] + "Z"
|
r = r.removesuffix("+00:00") + "Z"
|
||||||
return r
|
return r
|
||||||
elif isinstance(o, datetime.date):
|
elif isinstance(o, datetime.date):
|
||||||
return o.isoformat()
|
return o.isoformat()
|
||||||
|
|
|
@ -2109,7 +2109,7 @@ class Model(AltersData, metaclass=ModelBase):
|
||||||
fields = (f for f in fields if isinstance(f, str) and f != "?")
|
fields = (f for f in fields if isinstance(f, str) and f != "?")
|
||||||
|
|
||||||
# Convert "-field" to "field".
|
# Convert "-field" to "field".
|
||||||
fields = ((f[1:] if f.startswith("-") else f) for f in fields)
|
fields = (f.removeprefix("-") for f in fields)
|
||||||
|
|
||||||
# Separate related fields and non-related fields.
|
# Separate related fields and non-related fields.
|
||||||
_fields = []
|
_fields = []
|
||||||
|
|
|
@ -65,7 +65,7 @@ class Index:
|
||||||
self.fields = list(fields)
|
self.fields = list(fields)
|
||||||
# A list of 2-tuple with the field name and ordering ('' or 'DESC').
|
# A list of 2-tuple with the field name and ordering ('' or 'DESC').
|
||||||
self.fields_orders = [
|
self.fields_orders = [
|
||||||
(field_name[1:], "DESC") if field_name.startswith("-") else (field_name, "")
|
(field_name.removeprefix("-"), "DESC" if field_name.startswith("-") else "")
|
||||||
for field_name in self.fields
|
for field_name in self.fields
|
||||||
]
|
]
|
||||||
self.name = name or ""
|
self.name = name or ""
|
||||||
|
|
|
@ -2163,8 +2163,7 @@ class Query(BaseExpression):
|
||||||
if isinstance(item, str):
|
if isinstance(item, str):
|
||||||
if item == "?":
|
if item == "?":
|
||||||
continue
|
continue
|
||||||
if item.startswith("-"):
|
item = item.removeprefix("-")
|
||||||
item = item[1:]
|
|
||||||
if item in self.annotations:
|
if item in self.annotations:
|
||||||
continue
|
continue
|
||||||
if self.extra and item in self.extra:
|
if self.extra and item in self.extra:
|
||||||
|
|
|
@ -242,9 +242,7 @@ class HttpRequest:
|
||||||
# If location starts with '//' but has no netloc, reuse the
|
# If location starts with '//' but has no netloc, reuse the
|
||||||
# schema and netloc from the current request. Strip the double
|
# schema and netloc from the current request. Strip the double
|
||||||
# slashes and continue as if it wasn't specified.
|
# slashes and continue as if it wasn't specified.
|
||||||
if location.startswith("//"):
|
location = self._current_scheme_host + location.removeprefix("//")
|
||||||
location = location[2:]
|
|
||||||
location = self._current_scheme_host + location
|
|
||||||
else:
|
else:
|
||||||
# Join the constructed URL with the provided location, which
|
# Join the constructed URL with the provided location, which
|
||||||
# allows the provided location to apply query strings to the
|
# allows the provided location to apply query strings to the
|
||||||
|
@ -456,7 +454,7 @@ class HttpHeaders(CaseInsensitiveMapping):
|
||||||
@classmethod
|
@classmethod
|
||||||
def parse_header_name(cls, header):
|
def parse_header_name(cls, header):
|
||||||
if header.startswith(cls.HTTP_PREFIX):
|
if header.startswith(cls.HTTP_PREFIX):
|
||||||
header = header[len(cls.HTTP_PREFIX) :]
|
header = header.removeprefix(cls.HTTP_PREFIX)
|
||||||
elif header not in cls.UNPREFIXED_HEADERS:
|
elif header not in cls.UNPREFIXED_HEADERS:
|
||||||
return None
|
return None
|
||||||
return header.replace("_", "-").title()
|
return header.replace("_", "-").title()
|
||||||
|
@ -724,7 +722,7 @@ def split_domain_port(host):
|
||||||
bits = host.rsplit(":", 1)
|
bits = host.rsplit(":", 1)
|
||||||
domain, port = bits if len(bits) == 2 else (bits[0], "")
|
domain, port = bits if len(bits) == 2 else (bits[0], "")
|
||||||
# Remove a trailing dot (if present) from the domain.
|
# Remove a trailing dot (if present) from the domain.
|
||||||
domain = domain[:-1] if domain.endswith(".") else domain
|
domain = domain.removesuffix(".")
|
||||||
return domain, port
|
return domain, port
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -104,7 +104,7 @@ def get_template_tag_modules():
|
||||||
|
|
||||||
if hasattr(pkg, "__path__"):
|
if hasattr(pkg, "__path__"):
|
||||||
for name in get_package_libraries(pkg):
|
for name in get_package_libraries(pkg):
|
||||||
yield name[len(candidate) + 1 :], name
|
yield name.removeprefix(candidate).lstrip("."), name
|
||||||
|
|
||||||
|
|
||||||
def get_installed_libraries():
|
def get_installed_libraries():
|
||||||
|
|
|
@ -87,7 +87,7 @@ def do_cache(parser, token):
|
||||||
if len(tokens) < 3:
|
if len(tokens) < 3:
|
||||||
raise TemplateSyntaxError("'%r' tag requires at least 2 arguments." % tokens[0])
|
raise TemplateSyntaxError("'%r' tag requires at least 2 arguments." % tokens[0])
|
||||||
if len(tokens) > 3 and tokens[-1].startswith("using="):
|
if len(tokens) > 3 and tokens[-1].startswith("using="):
|
||||||
cache_name = parser.compile_filter(tokens[-1][len("using=") :])
|
cache_name = parser.compile_filter(tokens[-1].removeprefix("using="))
|
||||||
tokens = tokens[:-1]
|
tokens = tokens[:-1]
|
||||||
else:
|
else:
|
||||||
cache_name = None
|
cache_name = None
|
||||||
|
|
|
@ -1524,7 +1524,7 @@ class FSFilesHandler(WSGIHandler):
|
||||||
|
|
||||||
def file_path(self, url):
|
def file_path(self, url):
|
||||||
"""Return the relative path to the file on disk for the given URL."""
|
"""Return the relative path to the file on disk for the given URL."""
|
||||||
relative_url = url[len(self.base_url[2]) :]
|
relative_url = url.removeprefix(self.base_url[2])
|
||||||
return url2pathname(relative_url)
|
return url2pathname(relative_url)
|
||||||
|
|
||||||
def get_response(self, request):
|
def get_response(self, request):
|
||||||
|
|
|
@ -359,7 +359,7 @@ class LocalePrefixPattern:
|
||||||
def match(self, path):
|
def match(self, path):
|
||||||
language_prefix = self.language_prefix
|
language_prefix = self.language_prefix
|
||||||
if path.startswith(language_prefix):
|
if path.startswith(language_prefix):
|
||||||
return path[len(language_prefix) :], (), {}
|
return path.removeprefix(language_prefix), (), {}
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def check(self):
|
def check(self):
|
||||||
|
@ -542,8 +542,7 @@ class URLResolver:
|
||||||
language_code = get_language()
|
language_code = get_language()
|
||||||
for url_pattern in reversed(self.url_patterns):
|
for url_pattern in reversed(self.url_patterns):
|
||||||
p_pattern = url_pattern.pattern.regex.pattern
|
p_pattern = url_pattern.pattern.regex.pattern
|
||||||
if p_pattern.startswith("^"):
|
p_pattern = p_pattern.removeprefix("^")
|
||||||
p_pattern = p_pattern[1:]
|
|
||||||
if isinstance(url_pattern, URLPattern):
|
if isinstance(url_pattern, URLPattern):
|
||||||
self._callback_strs.add(url_pattern.lookup_str)
|
self._callback_strs.add(url_pattern.lookup_str)
|
||||||
bits = normalize(url_pattern.pattern.regex.pattern)
|
bits = normalize(url_pattern.pattern.regex.pattern)
|
||||||
|
@ -645,8 +644,7 @@ class URLResolver:
|
||||||
"""Join two routes, without the starting ^ in the second route."""
|
"""Join two routes, without the starting ^ in the second route."""
|
||||||
if not route1:
|
if not route1:
|
||||||
return route2
|
return route2
|
||||||
if route2.startswith("^"):
|
route2 = route2.removeprefix("^")
|
||||||
route2 = route2[1:]
|
|
||||||
return route1 + route2
|
return route1 + route2
|
||||||
|
|
||||||
def _is_callback(self, name):
|
def _is_callback(self, name):
|
||||||
|
|
|
@ -276,8 +276,7 @@ class DictWrapper(dict):
|
||||||
before returning, otherwise return the raw value.
|
before returning, otherwise return the raw value.
|
||||||
"""
|
"""
|
||||||
use_func = key.startswith(self.prefix)
|
use_func = key.startswith(self.prefix)
|
||||||
if use_func:
|
key = key.removeprefix(self.prefix)
|
||||||
key = key[len(self.prefix) :]
|
|
||||||
value = super().__getitem__(key)
|
value = super().__getitem__(key)
|
||||||
if use_func:
|
if use_func:
|
||||||
return self.func(value)
|
return self.func(value)
|
||||||
|
|
|
@ -343,7 +343,7 @@ class Urlizer:
|
||||||
# Trim wrapping punctuation.
|
# Trim wrapping punctuation.
|
||||||
for opening, closing in self.wrapping_punctuation:
|
for opening, closing in self.wrapping_punctuation:
|
||||||
if middle.startswith(opening):
|
if middle.startswith(opening):
|
||||||
middle = middle[len(opening) :]
|
middle = middle.removeprefix(opening)
|
||||||
lead += opening
|
lead += opening
|
||||||
trimmed_something = True
|
trimmed_something = True
|
||||||
# Keep parentheses at the end only if they're balanced.
|
# Keep parentheses at the end only if they're balanced.
|
||||||
|
@ -351,7 +351,7 @@ class Urlizer:
|
||||||
middle.endswith(closing)
|
middle.endswith(closing)
|
||||||
and middle.count(closing) == middle.count(opening) + 1
|
and middle.count(closing) == middle.count(opening) + 1
|
||||||
):
|
):
|
||||||
middle = middle[: -len(closing)]
|
middle = middle.removesuffix(closing)
|
||||||
trail = closing + trail
|
trail = closing + trail
|
||||||
trimmed_something = True
|
trimmed_something = True
|
||||||
# Trim trailing punctuation (after trimming wrapping punctuation,
|
# Trim trailing punctuation (after trimming wrapping punctuation,
|
||||||
|
|
|
@ -307,7 +307,7 @@ def escape_leading_slashes(url):
|
||||||
redirecting to another host.
|
redirecting to another host.
|
||||||
"""
|
"""
|
||||||
if url.startswith("//"):
|
if url.startswith("//"):
|
||||||
url = "/%2F{}".format(url[2:])
|
url = "/%2F{}".format(url.removeprefix("//"))
|
||||||
return url
|
return url
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -312,7 +312,7 @@ would override ``get_lookup`` with something like::
|
||||||
def get_lookup(self, lookup_name):
|
def get_lookup(self, lookup_name):
|
||||||
if lookup_name.startswith('x'):
|
if lookup_name.startswith('x'):
|
||||||
try:
|
try:
|
||||||
dimension = int(lookup_name[1:])
|
dimension = int(lookup_name.removeprefix("x"))
|
||||||
except ValueError:
|
except ValueError:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -9,9 +9,7 @@ PATH = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
|
||||||
def fix_os_paths(x):
|
def fix_os_paths(x):
|
||||||
if isinstance(x, str):
|
if isinstance(x, str):
|
||||||
if x.startswith(PATH):
|
return x.removeprefix(PATH).replace("\\", "/")
|
||||||
x = x[len(PATH) :]
|
|
||||||
return x.replace("\\", "/")
|
|
||||||
elif isinstance(x, tuple):
|
elif isinstance(x, tuple):
|
||||||
return tuple(fix_os_paths(list(x)))
|
return tuple(fix_os_paths(list(x)))
|
||||||
elif isinstance(x, list):
|
elif isinstance(x, list):
|
||||||
|
|
Loading…
Reference in New Issue