Merge branch 'master' into lookups_3
Conflicts: django/db/models/fields/__init__.py django/db/models/sql/compiler.py django/db/models/sql/query.py tests/null_queries/tests.py
This commit is contained in:
commit
049eebc070
|
@ -34,11 +34,8 @@ class LazySettings(LazyObject):
|
|||
is used the first time we need any settings at all, if the user has not
|
||||
previously configured the settings manually.
|
||||
"""
|
||||
try:
|
||||
settings_module = os.environ[ENVIRONMENT_VARIABLE]
|
||||
if not settings_module: # If it's set but is an empty string.
|
||||
raise KeyError
|
||||
except KeyError:
|
||||
settings_module = os.environ.get(ENVIRONMENT_VARIABLE)
|
||||
if not settings_module:
|
||||
desc = ("setting %s" % name) if name else "settings"
|
||||
raise ImproperlyConfigured(
|
||||
"Requested %s, but settings are not configured. "
|
||||
|
|
|
@ -179,9 +179,9 @@ class RelatedFieldListFilter(FieldListFilter):
|
|||
self.title = self.lookup_title
|
||||
|
||||
def has_output(self):
|
||||
if (isinstance(self.field, models.related.RelatedObject)
|
||||
and self.field.field.null or hasattr(self.field, 'rel')
|
||||
and self.field.null):
|
||||
if (isinstance(self.field, models.related.RelatedObject) and
|
||||
self.field.field.null or hasattr(self.field, 'rel') and
|
||||
self.field.null):
|
||||
extra = 1
|
||||
else:
|
||||
extra = 0
|
||||
|
@ -206,9 +206,9 @@ class RelatedFieldListFilter(FieldListFilter):
|
|||
}, [self.lookup_kwarg_isnull]),
|
||||
'display': val,
|
||||
}
|
||||
if (isinstance(self.field, models.related.RelatedObject)
|
||||
and self.field.field.null or hasattr(self.field, 'rel')
|
||||
and self.field.null):
|
||||
if (isinstance(self.field, models.related.RelatedObject) and
|
||||
self.field.field.null or hasattr(self.field, 'rel') and
|
||||
self.field.null):
|
||||
yield {
|
||||
'selected': bool(self.lookup_val_isnull),
|
||||
'query_string': cl.get_query_string({
|
||||
|
|
|
@ -30,7 +30,7 @@ checkbox = forms.CheckboxInput({'class': 'action-select'}, lambda value: False)
|
|||
|
||||
class AdminForm(object):
|
||||
def __init__(self, form, fieldsets, prepopulated_fields, readonly_fields=None, model_admin=None):
|
||||
self.form, self.fieldsets = form, normalize_fieldsets(fieldsets)
|
||||
self.form, self.fieldsets = form, fieldsets
|
||||
self.prepopulated_fields = [{
|
||||
'field': form[field_name],
|
||||
'dependencies': [form[f] for f in dependencies]
|
||||
|
@ -42,7 +42,8 @@ class AdminForm(object):
|
|||
|
||||
def __iter__(self):
|
||||
for name, options in self.fieldsets:
|
||||
yield Fieldset(self.form, name,
|
||||
yield Fieldset(
|
||||
self.form, name,
|
||||
readonly_fields=self.readonly_fields,
|
||||
model_admin=self.model_admin,
|
||||
**options
|
||||
|
@ -328,32 +329,11 @@ class AdminErrorList(forms.utils.ErrorList):
|
|||
Stores all errors for the form/formsets in an add/change stage view.
|
||||
"""
|
||||
def __init__(self, form, inline_formsets):
|
||||
super(AdminErrorList, self).__init__()
|
||||
|
||||
if form.is_bound:
|
||||
self.extend(list(six.itervalues(form.errors)))
|
||||
for inline_formset in inline_formsets:
|
||||
self.extend(inline_formset.non_form_errors())
|
||||
for errors_in_inline_form in inline_formset.errors:
|
||||
self.extend(list(six.itervalues(errors_in_inline_form)))
|
||||
|
||||
|
||||
def normalize_fieldsets(fieldsets):
|
||||
"""
|
||||
Make sure the keys in fieldset dictionaries are strings. Returns the
|
||||
normalized data.
|
||||
"""
|
||||
result = []
|
||||
for name, options in fieldsets:
|
||||
result.append((name, normalize_dictionary(options)))
|
||||
return result
|
||||
|
||||
|
||||
def normalize_dictionary(data_dict):
|
||||
"""
|
||||
Converts all the keys in "data_dict" to strings. The keys must be
|
||||
convertible using str().
|
||||
"""
|
||||
for key, value in data_dict.items():
|
||||
if not isinstance(key, str):
|
||||
del data_dict[key]
|
||||
data_dict[str(key)] = value
|
||||
return data_dict
|
||||
|
|
|
@ -1520,7 +1520,8 @@ class ModelAdmin(BaseModelAdmin):
|
|||
selection_note_all = ungettext('%(total_count)s selected',
|
||||
'All %(total_count)s selected', cl.result_count)
|
||||
|
||||
context = dict(self.admin_site.each_context(),
|
||||
context = dict(
|
||||
self.admin_site.each_context(),
|
||||
module_name=force_text(opts.verbose_name_plural),
|
||||
selection_note=_('0 of %(cnt)s selected') % {'cnt': len(cl.result_list)},
|
||||
selection_note_all=selection_note_all % {'total_count': cl.result_count},
|
||||
|
@ -1587,7 +1588,8 @@ class ModelAdmin(BaseModelAdmin):
|
|||
else:
|
||||
title = _("Are you sure?")
|
||||
|
||||
context = dict(self.admin_site.each_context(),
|
||||
context = dict(
|
||||
self.admin_site.each_context(),
|
||||
title=title,
|
||||
object_name=object_name,
|
||||
object=obj,
|
||||
|
|
|
@ -398,7 +398,8 @@ class AdminSite(object):
|
|||
for app in app_list:
|
||||
app['models'].sort(key=lambda x: x['name'])
|
||||
|
||||
context = dict(self.each_context(),
|
||||
context = dict(
|
||||
self.each_context(),
|
||||
title=self.index_title,
|
||||
app_list=app_list,
|
||||
)
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
<a href="{% url 'admin:index' %}">{% trans 'Home' %}</a>
|
||||
›
|
||||
{% for app in app_list %}
|
||||
{% blocktrans with app.name as name %}{{ name }}{% endblocktrans %}
|
||||
{{ app.name }}
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% endblock %}
|
||||
|
|
|
@ -17,9 +17,7 @@
|
|||
<div class="app-{{ app.app_label }} module">
|
||||
<table>
|
||||
<caption>
|
||||
<a href="{{ app.app_url }}" class="section" title="{% blocktrans with name=app.name %}Models in the {{ name }} application{% endblocktrans %}">
|
||||
{% blocktrans with name=app.name %}{{ name }}{% endblocktrans %}
|
||||
</a>
|
||||
<a href="{{ app.app_url }}" class="section" title="{% blocktrans with name=app.name %}Models in the {{ name }} application{% endblocktrans %}">{{ app.name }}</a>
|
||||
</caption>
|
||||
{% for model in app.models %}
|
||||
<tr class="model-{{ model.object_name|lower }}">
|
||||
|
|
|
@ -95,7 +95,8 @@ def result_headers(cl):
|
|||
"""
|
||||
ordering_field_columns = cl.get_ordering_field_columns()
|
||||
for i, field_name in enumerate(cl.list_display):
|
||||
text, attr = label_for_field(field_name, cl.model,
|
||||
text, attr = label_for_field(
|
||||
field_name, cl.model,
|
||||
model_admin=cl.model_admin,
|
||||
return_attr=True
|
||||
)
|
||||
|
|
|
@ -32,8 +32,7 @@ def submit_row(context):
|
|||
save_as = context['save_as']
|
||||
ctx = {
|
||||
'opts': opts,
|
||||
'show_delete_link': (not is_popup and context['has_delete_permission']
|
||||
and change and context.get('show_delete', True)),
|
||||
'show_delete_link': not is_popup and context['has_delete_permission'] and change and context.get('show_delete', True),
|
||||
'show_save_as_new': not is_popup and change and save_as,
|
||||
'show_save_and_add_another': context['has_add_permission'] and not is_popup and (not save_as or context['add']),
|
||||
'show_save_and_continue': not is_popup and context['has_change_permission'],
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
from django.core.apps import app_cache
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
from django.db import models
|
||||
from django.db.models.fields import FieldDoesNotExist
|
||||
|
@ -15,9 +16,9 @@ __all__ = ['BaseValidator', 'InlineValidator']
|
|||
|
||||
class BaseValidator(object):
|
||||
def __init__(self):
|
||||
# Before we can introspect models, they need to be fully loaded so that
|
||||
# inter-relations are set up correctly. We force that here.
|
||||
models.get_apps()
|
||||
# Before we can introspect models, they need the app cache to be fully
|
||||
# loaded so that inter-relations are set up correctly.
|
||||
app_cache.populate()
|
||||
|
||||
def validate(self, cls, model):
|
||||
for m in dir(self):
|
||||
|
|
|
@ -9,7 +9,7 @@ from django.db import models
|
|||
from django.db.models.fields import FieldDoesNotExist
|
||||
from django.utils import six
|
||||
from django.utils.deprecation import RenameMethodsBase
|
||||
from django.utils.encoding import force_str, force_text
|
||||
from django.utils.encoding import force_text
|
||||
from django.utils.translation import ugettext, ugettext_lazy
|
||||
from django.utils.http import urlencode
|
||||
|
||||
|
@ -142,14 +142,7 @@ class ChangeList(six.with_metaclass(RenameChangeListMethods)):
|
|||
lookup_params = self.get_filters_params()
|
||||
use_distinct = False
|
||||
|
||||
# Normalize the types of keys
|
||||
for key, value in lookup_params.items():
|
||||
if not isinstance(key, str):
|
||||
# 'key' will be used as a keyword argument later, so Python
|
||||
# requires it to be a string.
|
||||
del lookup_params[key]
|
||||
lookup_params[force_str(key)] = value
|
||||
|
||||
if not self.model_admin.lookup_allowed(key, value):
|
||||
raise DisallowedModelAdminLookup("Filtering by %s not allowed" % key)
|
||||
|
||||
|
@ -224,7 +217,7 @@ class ChangeList(six.with_metaclass(RenameChangeListMethods)):
|
|||
# Perform a slight optimization:
|
||||
# full_result_count is equal to paginator.count if no filters
|
||||
# were applied
|
||||
if self.get_filters_params():
|
||||
if self.get_filters_params() or self.params.get(SEARCH_VAR):
|
||||
full_result_count = self.root_queryset.count()
|
||||
else:
|
||||
full_result_count = result_count
|
||||
|
|
|
@ -153,10 +153,13 @@ class ForeignKeyRawIdWidget(forms.TextInput):
|
|||
extra = []
|
||||
if rel_to in self.admin_site._registry:
|
||||
# The related object is registered with the same AdminSite
|
||||
related_url = reverse('admin:%s_%s_changelist' %
|
||||
(rel_to._meta.app_label,
|
||||
rel_to._meta.model_name),
|
||||
current_app=self.admin_site.name)
|
||||
related_url = reverse(
|
||||
'admin:%s_%s_changelist' % (
|
||||
rel_to._meta.app_label,
|
||||
rel_to._meta.model_name,
|
||||
),
|
||||
current_app=self.admin_site.name,
|
||||
)
|
||||
|
||||
params = self.url_parameters()
|
||||
if params:
|
||||
|
@ -167,10 +170,10 @@ class ForeignKeyRawIdWidget(forms.TextInput):
|
|||
attrs['class'] = 'vForeignKeyRawIdAdminField' # The JavaScript code looks for this hook.
|
||||
# TODO: "lookup_id_" is hard-coded here. This should instead use
|
||||
# the correct API to determine the ID dynamically.
|
||||
extra.append('<a href="%s%s" class="related-lookup" id="lookup_id_%s" onclick="return showRelatedObjectLookupPopup(this);"> '
|
||||
% (related_url, url, name))
|
||||
extra.append('<img src="%s" width="16" height="16" alt="%s" /></a>'
|
||||
% (static('admin/img/selector-search.gif'), _('Lookup')))
|
||||
extra.append('<a href="%s%s" class="related-lookup" id="lookup_id_%s" onclick="return showRelatedObjectLookupPopup(this);"> ' %
|
||||
(related_url, url, name))
|
||||
extra.append('<img src="%s" width="16" height="16" alt="%s" /></a>' %
|
||||
(static('admin/img/selector-search.gif'), _('Lookup')))
|
||||
output = [super(ForeignKeyRawIdWidget, self).render(name, value, attrs)] + extra
|
||||
if value:
|
||||
output.append(self.label_for_value(value))
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
# Empty models.py to allow for specifying admindocs as a test label.
|
|
@ -15,15 +15,12 @@
|
|||
{% block content %}
|
||||
<h1>{% blocktrans %}Template: "{{ name }}"{% endblocktrans %}</h1>
|
||||
|
||||
{% regroup templates|dictsort:"site_id" by site as templates_by_site %}
|
||||
{% for group in templates_by_site %}
|
||||
<h2>{% blocktrans with group.grouper as grouper %}Search path for template "{{ name }}" on {{ grouper }}:{% endblocktrans %}</h2>
|
||||
<ol>
|
||||
{% for template in group.list|dictsort:"order" %}
|
||||
<h2>{% blocktrans %}Search path for template "{{ name }}":{% endblocktrans %}</h2>
|
||||
<ol>
|
||||
{% for template in templates|dictsort:"order" %}
|
||||
<li><code>{{ template.file }}</code>{% if not template.exists %} <em>{% trans '(does not exist)' %}</em>{% endif %}</li>
|
||||
{% endfor %}
|
||||
</ol>
|
||||
{% endfor %}
|
||||
</ol>
|
||||
|
||||
<p class="small"><a href="{% url 'django-admindocs-docroot' %}">‹ {% trans 'Back to Documentation' %}</a></p>
|
||||
{% endblock %}
|
||||
|
|
|
@ -15,29 +15,40 @@
|
|||
|
||||
<h1>{% trans 'View documentation' %}</h1>
|
||||
|
||||
{% regroup views|dictsort:"site_id" by site as views_by_site %}
|
||||
{% regroup views|dictsort:'namespace' by namespace as views_by_ns %}
|
||||
|
||||
<div id="content-related" class="sidebar">
|
||||
<div class="module">
|
||||
<h2>{% trans 'Jump to site' %}</h2>
|
||||
<h2>{% trans 'Jump to namespace' %}</h2>
|
||||
<ul>
|
||||
{% for site_views in views_by_site %}
|
||||
<li><a href="#site{{ site_views.grouper.id }}">{{ site_views.grouper.name }}</a></li>
|
||||
{% endfor %}
|
||||
{% for ns_views in views_by_ns %}
|
||||
<li><a href="#ns|{{ ns_views.grouper }}">
|
||||
{% if ns_views.grouper %}{{ ns_views.grouper }}
|
||||
{% else %}{% trans "Empty namespace" %}{% endif %}
|
||||
</a></li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div id="content-main">
|
||||
|
||||
{% for site_views in views_by_site %}
|
||||
{% for ns_views in views_by_ns %}
|
||||
<div class="module">
|
||||
<h2 id="site{{ site_views.grouper.id }}">{% blocktrans with site_views.grouper.name as name %}Views by URL on {{ name }}{% endblocktrans %}</h2>
|
||||
<h2 id="ns|{{ ns_views.grouper }}">
|
||||
{% if ns_views.grouper %}
|
||||
{% blocktrans with ns_views.grouper as name %}Views by namespace {{ name }}{% endblocktrans %}
|
||||
{% else %}
|
||||
{% blocktrans %}Views by empty namespace{% endblocktrans %}
|
||||
{% endif %}
|
||||
</h2>
|
||||
|
||||
{% for view in site_views.list|dictsort:"url" %}
|
||||
{% for view in ns_views.list|dictsort:"url" %}
|
||||
{% ifchanged %}
|
||||
<h3><a href="{% url 'django-admindocs-views-detail' view=view.full_name %}">{{ view.url }}</a></h3>
|
||||
<p class="small quiet">{% blocktrans with view.full_name as name %}View function: {{ name }}{% endblocktrans %}</p>
|
||||
<p class="small quiet">{% blocktrans with view.full_name as full_name and view.url_name as url_name %}
|
||||
View function: <code>{{ full_name }}</code>. Name: <code>{{ url_name }}</code>.
|
||||
{% endblocktrans %}</p>
|
||||
<p>{{ view.title }}</p>
|
||||
<hr />
|
||||
{% endifchanged %}
|
||||
|
|
|
@ -21,7 +21,8 @@ class TestFieldType(unittest.TestCase):
|
|||
pass
|
||||
|
||||
def test_field_name(self):
|
||||
self.assertRaises(AttributeError,
|
||||
self.assertRaises(
|
||||
AttributeError,
|
||||
views.get_readable_field_data_type, "NotAField"
|
||||
)
|
||||
|
||||
|
|
|
@ -4,38 +4,29 @@ from django.contrib.admindocs import views
|
|||
urlpatterns = patterns('',
|
||||
url('^$',
|
||||
views.BaseAdminDocsView.as_view(template_name='admin_doc/index.html'),
|
||||
name='django-admindocs-docroot'
|
||||
),
|
||||
name='django-admindocs-docroot'),
|
||||
url('^bookmarklets/$',
|
||||
views.BookmarkletsView.as_view(),
|
||||
name='django-admindocs-bookmarklets'
|
||||
),
|
||||
name='django-admindocs-bookmarklets'),
|
||||
url('^tags/$',
|
||||
views.TemplateTagIndexView.as_view(),
|
||||
name='django-admindocs-tags'
|
||||
),
|
||||
name='django-admindocs-tags'),
|
||||
url('^filters/$',
|
||||
views.TemplateFilterIndexView.as_view(),
|
||||
name='django-admindocs-filters'
|
||||
),
|
||||
name='django-admindocs-filters'),
|
||||
url('^views/$',
|
||||
views.ViewIndexView.as_view(),
|
||||
name='django-admindocs-views-index'
|
||||
),
|
||||
name='django-admindocs-views-index'),
|
||||
url('^views/(?P<view>[^/]+)/$',
|
||||
views.ViewDetailView.as_view(),
|
||||
name='django-admindocs-views-detail'
|
||||
),
|
||||
name='django-admindocs-views-detail'),
|
||||
url('^models/$',
|
||||
views.ModelIndexView.as_view(),
|
||||
name='django-admindocs-models-index'
|
||||
),
|
||||
name='django-admindocs-models-index'),
|
||||
url('^models/(?P<app_label>[^\.]+)\.(?P<model_name>[^/]+)/$',
|
||||
views.ModelDetailView.as_view(),
|
||||
name='django-admindocs-models-detail'
|
||||
),
|
||||
name='django-admindocs-models-detail'),
|
||||
url('^templates/(?P<template>.*)/$',
|
||||
views.TemplateDetailView.as_view(),
|
||||
name='django-admindocs-templates'
|
||||
),
|
||||
name='django-admindocs-templates'),
|
||||
)
|
||||
|
|
|
@ -2,17 +2,18 @@ from importlib import import_module
|
|||
import inspect
|
||||
import os
|
||||
import re
|
||||
import warnings
|
||||
|
||||
from django import template
|
||||
from django.conf import settings
|
||||
from django.contrib import admin
|
||||
from django.contrib.admin.views.decorators import staff_member_required
|
||||
from django.core.apps import app_cache
|
||||
from django.db import models
|
||||
from django.core.exceptions import ImproperlyConfigured, ViewDoesNotExist
|
||||
from django.core.exceptions import ViewDoesNotExist
|
||||
from django.http import Http404
|
||||
from django.core import urlresolvers
|
||||
from django.contrib.admindocs import utils
|
||||
from django.contrib.sites.models import Site
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.utils._os import upath
|
||||
from django.utils import six
|
||||
|
@ -22,10 +23,10 @@ from django.views.generic import TemplateView
|
|||
# Exclude methods starting with these strings from documentation
|
||||
MODEL_METHODS_EXCLUDE = ('_', 'add_', 'delete', 'save', 'set_')
|
||||
|
||||
|
||||
class GenericSite(object):
|
||||
domain = 'example.com'
|
||||
name = 'my site'
|
||||
if getattr(settings, 'ADMIN_FOR', None):
|
||||
warnings.warn('The ADMIN_FOR setting has been removed, you can remove '
|
||||
'this setting from your configuration.', DeprecationWarning,
|
||||
stacklevel=2)
|
||||
|
||||
|
||||
class BaseAdminDocsView(TemplateView):
|
||||
|
@ -128,25 +129,16 @@ class ViewIndexView(BaseAdminDocsView):
|
|||
template_name = 'admin_doc/view_index.html'
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
if settings.ADMIN_FOR:
|
||||
settings_modules = [import_module(m) for m in settings.ADMIN_FOR]
|
||||
else:
|
||||
settings_modules = [settings]
|
||||
|
||||
views = []
|
||||
for settings_mod in settings_modules:
|
||||
urlconf = import_module(settings_mod.ROOT_URLCONF)
|
||||
urlconf = import_module(settings.ROOT_URLCONF)
|
||||
view_functions = extract_views_from_urlpatterns(urlconf.urlpatterns)
|
||||
if Site._meta.installed:
|
||||
site_obj = Site.objects.get(pk=settings_mod.SITE_ID)
|
||||
else:
|
||||
site_obj = GenericSite()
|
||||
for (func, regex) in view_functions:
|
||||
for (func, regex, namespace, name) in view_functions:
|
||||
views.append({
|
||||
'full_name': '%s.%s' % (func.__module__, getattr(func, '__name__', func.__class__.__name__)),
|
||||
'site_id': settings_mod.SITE_ID,
|
||||
'site': site_obj,
|
||||
'url': simplify_regex(regex),
|
||||
'url_name': ':'.join((namespace or []) + (name and [name] or [])),
|
||||
'namespace': ':'.join((namespace or [])),
|
||||
'name': name,
|
||||
})
|
||||
kwargs.update({'views': views})
|
||||
return super(ViewIndexView, self).get_context_data(**kwargs)
|
||||
|
@ -182,7 +174,7 @@ class ModelIndexView(BaseAdminDocsView):
|
|||
template_name = 'admin_doc/model_index.html'
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
m_list = [m._meta for m in models.get_models()]
|
||||
m_list = [m._meta for m in app_cache.get_models()]
|
||||
kwargs.update({'models': m_list})
|
||||
return super(ModelIndexView, self).get_context_data(**kwargs)
|
||||
|
||||
|
@ -193,17 +185,12 @@ class ModelDetailView(BaseAdminDocsView):
|
|||
def get_context_data(self, **kwargs):
|
||||
# Get the model class.
|
||||
try:
|
||||
app_mod = models.get_app(self.kwargs['app_label'])
|
||||
except ImproperlyConfigured:
|
||||
raise Http404(_("App %r not found") % self.kwargs['app_label'])
|
||||
model = None
|
||||
for m in models.get_models(app_mod):
|
||||
if m._meta.model_name == self.kwargs['model_name']:
|
||||
model = m
|
||||
break
|
||||
app_cache.get_app_config(self.kwargs['app_label'])
|
||||
except LookupError:
|
||||
raise Http404(_("App %(app_label)r not found") % self.kwargs)
|
||||
model = app_cache.get_model(self.kwargs['app_label'], self.kwargs['model_name'])
|
||||
if model is None:
|
||||
raise Http404(_("Model %(model_name)r not found in app %(app_label)r") % {
|
||||
'model_name': self.kwargs['model_name'], 'app_label': self.kwargs['app_label']})
|
||||
raise Http404(_("Model %(model_name)r not found in app %(app_label)r") % self.kwargs)
|
||||
|
||||
opts = model._meta
|
||||
|
||||
|
@ -296,21 +283,13 @@ class TemplateDetailView(BaseAdminDocsView):
|
|||
def get_context_data(self, **kwargs):
|
||||
template = self.kwargs['template']
|
||||
templates = []
|
||||
for site_settings_module in settings.ADMIN_FOR:
|
||||
settings_mod = import_module(site_settings_module)
|
||||
if Site._meta.installed:
|
||||
site_obj = Site.objects.get(pk=settings_mod.SITE_ID)
|
||||
else:
|
||||
site_obj = GenericSite()
|
||||
for dir in settings_mod.TEMPLATE_DIRS:
|
||||
for dir in settings.TEMPLATE_DIRS:
|
||||
template_file = os.path.join(dir, template)
|
||||
templates.append({
|
||||
'file': template_file,
|
||||
'exists': os.path.exists(template_file),
|
||||
'contents': lambda: open(template_file).read() if os.path.exists(template_file) else '',
|
||||
'site_id': settings_mod.SITE_ID,
|
||||
'site': site_obj,
|
||||
'order': list(settings_mod.TEMPLATE_DIRS).index(dir),
|
||||
'order': list(settings.TEMPLATE_DIRS).index(dir),
|
||||
})
|
||||
kwargs.update({
|
||||
'name': template,
|
||||
|
@ -360,7 +339,7 @@ def get_readable_field_data_type(field):
|
|||
return field.description % field.__dict__
|
||||
|
||||
|
||||
def extract_views_from_urlpatterns(urlpatterns, base=''):
|
||||
def extract_views_from_urlpatterns(urlpatterns, base='', namespace=None):
|
||||
"""
|
||||
Return a list of views from a list of urlpatterns.
|
||||
|
||||
|
@ -373,10 +352,15 @@ def extract_views_from_urlpatterns(urlpatterns, base=''):
|
|||
patterns = p.url_patterns
|
||||
except ImportError:
|
||||
continue
|
||||
views.extend(extract_views_from_urlpatterns(patterns, base + p.regex.pattern))
|
||||
views.extend(extract_views_from_urlpatterns(
|
||||
patterns,
|
||||
base + p.regex.pattern,
|
||||
(namespace or []) + (p.namespace and [p.namespace] or [])
|
||||
))
|
||||
elif hasattr(p, 'callback'):
|
||||
try:
|
||||
views.append((p.callback, base + p.regex.pattern))
|
||||
views.append((p.callback, base + p.regex.pattern,
|
||||
namespace, p.name))
|
||||
except ViewDoesNotExist:
|
||||
continue
|
||||
else:
|
||||
|
|
|
@ -105,7 +105,15 @@ def logout(request):
|
|||
user = None
|
||||
user_logged_out.send(sender=user.__class__, request=request, user=user)
|
||||
|
||||
# remember language choice saved to session
|
||||
# for backwards compatibility django_language is also checked (remove in 1.8)
|
||||
language = request.session.get('_language', request.session.get('django_language'))
|
||||
|
||||
request.session.flush()
|
||||
|
||||
if language is not None:
|
||||
request.session['_language'] = language
|
||||
|
||||
if hasattr(request, 'user'):
|
||||
from django.contrib.auth.models import AnonymousUser
|
||||
request.user = AnonymousUser()
|
||||
|
@ -115,13 +123,13 @@ def get_user_model():
|
|||
"""
|
||||
Returns the User model that is active in this project.
|
||||
"""
|
||||
from django.db.models import get_model
|
||||
from django.core.apps import app_cache
|
||||
|
||||
try:
|
||||
app_label, model_name = settings.AUTH_USER_MODEL.split('.')
|
||||
except ValueError:
|
||||
raise ImproperlyConfigured("AUTH_USER_MODEL must be of the form 'app_label.model_name'")
|
||||
user_model = get_model(app_label, model_name)
|
||||
user_model = app_cache.get_model(app_label, model_name)
|
||||
if user_model is None:
|
||||
raise ImproperlyConfigured("AUTH_USER_MODEL refers to model '%s' that has not been installed" % settings.AUTH_USER_MODEL)
|
||||
return user_model
|
||||
|
|
|
@ -48,8 +48,8 @@ class UserAdmin(admin.ModelAdmin):
|
|||
add_fieldsets = (
|
||||
(None, {
|
||||
'classes': ('wide',),
|
||||
'fields': ('username', 'password1', 'password2')}
|
||||
),
|
||||
'fields': ('username', 'password1', 'password2'),
|
||||
}),
|
||||
)
|
||||
form = UserChangeForm
|
||||
add_form = UserCreationForm
|
||||
|
|
|
@ -57,7 +57,7 @@ def check_password(password, encoded, setter=None, preferred='default'):
|
|||
|
||||
must_update = hasher.algorithm != preferred.algorithm
|
||||
if not must_update:
|
||||
must_update = hasher.must_update(encoded)
|
||||
must_update = preferred.must_update(encoded)
|
||||
is_correct = hasher.verify(password, encoded)
|
||||
if setter and is_correct and must_update:
|
||||
setter(password)
|
||||
|
|
|
@ -8,10 +8,11 @@ import unicodedata
|
|||
|
||||
from django.contrib.auth import (models as auth_app, get_permission_codename,
|
||||
get_user_model)
|
||||
from django.core.apps import app_cache, UnavailableApp
|
||||
from django.core import exceptions
|
||||
from django.core.management.base import CommandError
|
||||
from django.db import DEFAULT_DB_ALIAS, router
|
||||
from django.db.models import get_model, get_models, signals, UnavailableApp
|
||||
from django.db.models import signals
|
||||
from django.utils.encoding import DEFAULT_LOCALE_ENCODING
|
||||
from django.utils import six
|
||||
from django.utils.six.moves import input
|
||||
|
@ -61,7 +62,7 @@ def _check_permission_clashing(custom, builtin, ctype):
|
|||
|
||||
def create_permissions(app, created_models, verbosity, db=DEFAULT_DB_ALIAS, **kwargs):
|
||||
try:
|
||||
get_model('auth', 'Permission')
|
||||
app_cache.get_model('auth', 'Permission')
|
||||
except UnavailableApp:
|
||||
return
|
||||
|
||||
|
@ -70,7 +71,7 @@ def create_permissions(app, created_models, verbosity, db=DEFAULT_DB_ALIAS, **kw
|
|||
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
|
||||
app_models = get_models(app)
|
||||
app_models = app_cache.get_models(app)
|
||||
|
||||
# This will hold the permissions we're looking for as
|
||||
# (content_type, (codename, name))
|
||||
|
@ -119,7 +120,7 @@ def create_permissions(app, created_models, verbosity, db=DEFAULT_DB_ALIAS, **kw
|
|||
|
||||
def create_superuser(app, created_models, verbosity, db, **kwargs):
|
||||
try:
|
||||
get_model('auth', 'Permission')
|
||||
app_cache.get_model('auth', 'Permission')
|
||||
UserModel = get_user_model()
|
||||
except UnavailableApp:
|
||||
return
|
||||
|
|
|
@ -130,7 +130,8 @@ class BasicTestCase(TestCase):
|
|||
"Check the operation of the createsuperuser management command"
|
||||
# We can use the management command to create a superuser
|
||||
new_io = StringIO()
|
||||
call_command("createsuperuser",
|
||||
call_command(
|
||||
"createsuperuser",
|
||||
interactive=False,
|
||||
username="joe",
|
||||
email="joe@somewhere.org",
|
||||
|
@ -146,7 +147,8 @@ class BasicTestCase(TestCase):
|
|||
|
||||
# We can supress output on the management command
|
||||
new_io = StringIO()
|
||||
call_command("createsuperuser",
|
||||
call_command(
|
||||
"createsuperuser",
|
||||
interactive=False,
|
||||
username="joe2",
|
||||
email="joe2@somewhere.org",
|
||||
|
@ -159,7 +161,8 @@ class BasicTestCase(TestCase):
|
|||
self.assertEqual(u.email, 'joe2@somewhere.org')
|
||||
self.assertFalse(u.has_usable_password())
|
||||
|
||||
call_command("createsuperuser",
|
||||
call_command(
|
||||
"createsuperuser",
|
||||
interactive=False,
|
||||
username="joe+admin@somewhere.org",
|
||||
email="joe@somewhere.org",
|
||||
|
@ -182,7 +185,8 @@ class BasicTestCase(TestCase):
|
|||
locale.getdefaultlocale = lambda: (None, None)
|
||||
|
||||
# Call the command in this new environment
|
||||
call_command("createsuperuser",
|
||||
call_command(
|
||||
"createsuperuser",
|
||||
interactive=True,
|
||||
username="nolocale@somewhere.org",
|
||||
email="nolocale@somewhere.org",
|
||||
|
@ -212,7 +216,8 @@ class BasicTestCase(TestCase):
|
|||
username_field.verbose_name = ulazy('uživatel')
|
||||
new_io = StringIO()
|
||||
try:
|
||||
call_command("createsuperuser",
|
||||
call_command(
|
||||
"createsuperuser",
|
||||
interactive=True,
|
||||
stdout=new_io
|
||||
)
|
||||
|
|
|
@ -133,7 +133,7 @@ class AuthenticationFormTest(TestCase):
|
|||
[force_text(form.error_messages['inactive'])])
|
||||
|
||||
def test_custom_login_allowed_policy(self):
|
||||
# The user is inactive, but our custom form policy allows him to log in.
|
||||
# The user is inactive, but our custom form policy allows them to log in.
|
||||
data = {
|
||||
'username': 'inactive',
|
||||
'password': 'password',
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import unittest
|
||||
from unittest import skipUnless
|
||||
|
||||
from django.conf.global_settings import PASSWORD_HASHERS as default_hashers
|
||||
from django.contrib.auth.hashers import (is_password_usable, BasePasswordHasher,
|
||||
check_password, make_password, PBKDF2PasswordHasher, load_hashers, PBKDF2SHA1PasswordHasher,
|
||||
get_hasher, identify_hasher, UNUSABLE_PASSWORD_PREFIX, UNUSABLE_PASSWORD_SUFFIX_LENGTH)
|
||||
from django.test import SimpleTestCase
|
||||
from django.utils import six
|
||||
|
||||
|
||||
|
@ -22,7 +22,11 @@ except ImportError:
|
|||
bcrypt = None
|
||||
|
||||
|
||||
class TestUtilsHashPass(unittest.TestCase):
|
||||
class PBKDF2SingleIterationHasher(PBKDF2PasswordHasher):
|
||||
iterations = 1
|
||||
|
||||
|
||||
class TestUtilsHashPass(SimpleTestCase):
|
||||
|
||||
def setUp(self):
|
||||
load_hashers(password_hashers=default_hashers)
|
||||
|
@ -279,6 +283,34 @@ class TestUtilsHashPass(unittest.TestCase):
|
|||
finally:
|
||||
hasher.iterations = old_iterations
|
||||
|
||||
def test_pbkdf2_upgrade_new_hasher(self):
|
||||
self.assertEqual('pbkdf2_sha256', get_hasher('default').algorithm)
|
||||
hasher = get_hasher('default')
|
||||
self.assertNotEqual(hasher.iterations, 1)
|
||||
|
||||
state = {'upgraded': False}
|
||||
|
||||
def setter(password):
|
||||
state['upgraded'] = True
|
||||
|
||||
with self.settings(PASSWORD_HASHERS=[
|
||||
'django.contrib.auth.tests.test_hashers.PBKDF2SingleIterationHasher']):
|
||||
encoded = make_password('letmein')
|
||||
algo, iterations, salt, hash = encoded.split('$', 3)
|
||||
self.assertEqual(iterations, '1')
|
||||
|
||||
# Check that no upgrade is triggerd
|
||||
self.assertTrue(check_password('letmein', encoded, setter))
|
||||
self.assertFalse(state['upgraded'])
|
||||
|
||||
# Revert to the old iteration count and check if the password would get
|
||||
# updated to the new iteration count.
|
||||
with self.settings(PASSWORD_HASHERS=[
|
||||
'django.contrib.auth.hashers.PBKDF2PasswordHasher',
|
||||
'django.contrib.auth.tests.test_hashers.PBKDF2SingleIterationHasher']):
|
||||
self.assertTrue(check_password('letmein', encoded, setter))
|
||||
self.assertTrue(state['upgraded'])
|
||||
|
||||
def test_load_library_no_algorithm(self):
|
||||
with self.assertRaises(ValueError) as e:
|
||||
BasePasswordHasher()._load_library()
|
||||
|
|
|
@ -8,11 +8,11 @@ from django.contrib.auth.models import User
|
|||
from django.contrib.auth.tests.custom_user import CustomUser
|
||||
from django.contrib.auth.tests.utils import skipIfCustomUser
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.core.apps import app_cache
|
||||
from django.core import exceptions
|
||||
from django.core.management import call_command
|
||||
from django.core.management.base import CommandError
|
||||
from django.core.management.validation import get_validation_errors
|
||||
from django.db.models.loading import get_app
|
||||
from django.test import TestCase
|
||||
from django.test.utils import override_settings
|
||||
from django.utils import six
|
||||
|
@ -91,7 +91,8 @@ class CreatesuperuserManagementCommandTestCase(TestCase):
|
|||
"Check the operation of the createsuperuser management command"
|
||||
# We can use the management command to create a superuser
|
||||
new_io = StringIO()
|
||||
call_command("createsuperuser",
|
||||
call_command(
|
||||
"createsuperuser",
|
||||
interactive=False,
|
||||
username="joe",
|
||||
email="joe@somewhere.org",
|
||||
|
@ -108,7 +109,8 @@ class CreatesuperuserManagementCommandTestCase(TestCase):
|
|||
def test_verbosity_zero(self):
|
||||
# We can supress output on the management command
|
||||
new_io = StringIO()
|
||||
call_command("createsuperuser",
|
||||
call_command(
|
||||
"createsuperuser",
|
||||
interactive=False,
|
||||
username="joe2",
|
||||
email="joe2@somewhere.org",
|
||||
|
@ -123,7 +125,8 @@ class CreatesuperuserManagementCommandTestCase(TestCase):
|
|||
|
||||
def test_email_in_username(self):
|
||||
new_io = StringIO()
|
||||
call_command("createsuperuser",
|
||||
call_command(
|
||||
"createsuperuser",
|
||||
interactive=False,
|
||||
username="joe+admin@somewhere.org",
|
||||
email="joe@somewhere.org",
|
||||
|
@ -140,7 +143,8 @@ class CreatesuperuserManagementCommandTestCase(TestCase):
|
|||
# We skip validation because the temporary substitution of the
|
||||
# swappable User model messes with validation.
|
||||
new_io = StringIO()
|
||||
call_command("createsuperuser",
|
||||
call_command(
|
||||
"createsuperuser",
|
||||
interactive=False,
|
||||
email="joe@somewhere.org",
|
||||
date_of_birth="1976-04-01",
|
||||
|
@ -163,7 +167,8 @@ class CreatesuperuserManagementCommandTestCase(TestCase):
|
|||
# swappable User model messes with validation.
|
||||
new_io = StringIO()
|
||||
with self.assertRaises(CommandError):
|
||||
call_command("createsuperuser",
|
||||
call_command(
|
||||
"createsuperuser",
|
||||
interactive=False,
|
||||
username="joe@somewhere.org",
|
||||
stdout=new_io,
|
||||
|
@ -179,21 +184,21 @@ class CustomUserModelValidationTestCase(TestCase):
|
|||
def test_required_fields_is_list(self):
|
||||
"REQUIRED_FIELDS should be a list."
|
||||
new_io = StringIO()
|
||||
get_validation_errors(new_io, get_app('auth'))
|
||||
get_validation_errors(new_io, app_cache.get_app_config('auth').models_module)
|
||||
self.assertIn("The REQUIRED_FIELDS must be a list or tuple.", new_io.getvalue())
|
||||
|
||||
@override_settings(AUTH_USER_MODEL='auth.CustomUserBadRequiredFields')
|
||||
def test_username_not_in_required_fields(self):
|
||||
"USERNAME_FIELD should not appear in REQUIRED_FIELDS."
|
||||
new_io = StringIO()
|
||||
get_validation_errors(new_io, get_app('auth'))
|
||||
get_validation_errors(new_io, app_cache.get_app_config('auth').models_module)
|
||||
self.assertIn("The field named as the USERNAME_FIELD should not be included in REQUIRED_FIELDS on a swappable User model.", new_io.getvalue())
|
||||
|
||||
@override_settings(AUTH_USER_MODEL='auth.CustomUserNonUniqueUsername')
|
||||
def test_username_non_unique(self):
|
||||
"A non-unique USERNAME_FIELD should raise a model validation error."
|
||||
new_io = StringIO()
|
||||
get_validation_errors(new_io, get_app('auth'))
|
||||
get_validation_errors(new_io, app_cache.get_app_config('auth').models_module)
|
||||
self.assertIn("The USERNAME_FIELD must be unique. Add unique=True to the field parameters.", new_io.getvalue())
|
||||
|
||||
|
||||
|
|
|
@ -69,9 +69,11 @@ class UserManagerTestCase(TestCase):
|
|||
self.assertEqual(returned, 'email\ with_whitespace@d.com')
|
||||
|
||||
def test_empty_username(self):
|
||||
self.assertRaisesMessage(ValueError,
|
||||
self.assertRaisesMessage(
|
||||
ValueError,
|
||||
'The given username must be set',
|
||||
User.objects.create_user, username='')
|
||||
User.objects.create_user, username=''
|
||||
)
|
||||
|
||||
|
||||
class AbstractUserTestCase(TestCase):
|
||||
|
|
|
@ -3,6 +3,7 @@ from datetime import datetime
|
|||
from django.conf import settings
|
||||
from django.contrib.auth import authenticate
|
||||
from django.contrib.auth.backends import RemoteUserBackend
|
||||
from django.contrib.auth.middleware import RemoteUserMiddleware
|
||||
from django.contrib.auth.models import User
|
||||
from django.contrib.auth.tests.utils import skipIfCustomUser
|
||||
from django.test import TestCase
|
||||
|
@ -15,6 +16,7 @@ class RemoteUserTest(TestCase):
|
|||
urls = 'django.contrib.auth.tests.urls'
|
||||
middleware = 'django.contrib.auth.middleware.RemoteUserMiddleware'
|
||||
backend = 'django.contrib.auth.backends.RemoteUserBackend'
|
||||
header = 'REMOTE_USER'
|
||||
|
||||
# Usernames to be passed in REMOTE_USER for the test_known_user test case.
|
||||
known_user = 'knownuser'
|
||||
|
@ -37,11 +39,11 @@ class RemoteUserTest(TestCase):
|
|||
self.assertTrue(response.context['user'].is_anonymous())
|
||||
self.assertEqual(User.objects.count(), num_users)
|
||||
|
||||
response = self.client.get('/remote_user/', REMOTE_USER=None)
|
||||
response = self.client.get('/remote_user/', **{self.header: None})
|
||||
self.assertTrue(response.context['user'].is_anonymous())
|
||||
self.assertEqual(User.objects.count(), num_users)
|
||||
|
||||
response = self.client.get('/remote_user/', REMOTE_USER='')
|
||||
response = self.client.get('/remote_user/', **{self.header: ''})
|
||||
self.assertTrue(response.context['user'].is_anonymous())
|
||||
self.assertEqual(User.objects.count(), num_users)
|
||||
|
||||
|
@ -51,13 +53,13 @@ class RemoteUserTest(TestCase):
|
|||
as a User.
|
||||
"""
|
||||
num_users = User.objects.count()
|
||||
response = self.client.get('/remote_user/', REMOTE_USER='newuser')
|
||||
response = self.client.get('/remote_user/', **{self.header: 'newuser'})
|
||||
self.assertEqual(response.context['user'].username, 'newuser')
|
||||
self.assertEqual(User.objects.count(), num_users + 1)
|
||||
User.objects.get(username='newuser')
|
||||
|
||||
# Another request with same user should not create any new users.
|
||||
response = self.client.get('/remote_user/', REMOTE_USER='newuser')
|
||||
response = self.client.get('/remote_user/', **{self.header: 'newuser'})
|
||||
self.assertEqual(User.objects.count(), num_users + 1)
|
||||
|
||||
def test_known_user(self):
|
||||
|
@ -67,12 +69,14 @@ class RemoteUserTest(TestCase):
|
|||
User.objects.create(username='knownuser')
|
||||
User.objects.create(username='knownuser2')
|
||||
num_users = User.objects.count()
|
||||
response = self.client.get('/remote_user/', REMOTE_USER=self.known_user)
|
||||
response = self.client.get('/remote_user/',
|
||||
**{self.header: self.known_user})
|
||||
self.assertEqual(response.context['user'].username, 'knownuser')
|
||||
self.assertEqual(User.objects.count(), num_users)
|
||||
# Test that a different user passed in the headers causes the new user
|
||||
# to be logged in.
|
||||
response = self.client.get('/remote_user/', REMOTE_USER=self.known_user2)
|
||||
response = self.client.get('/remote_user/',
|
||||
**{self.header: self.known_user2})
|
||||
self.assertEqual(response.context['user'].username, 'knownuser2')
|
||||
self.assertEqual(User.objects.count(), num_users)
|
||||
|
||||
|
@ -89,13 +93,15 @@ class RemoteUserTest(TestCase):
|
|||
user.last_login = default_login
|
||||
user.save()
|
||||
|
||||
response = self.client.get('/remote_user/', REMOTE_USER=self.known_user)
|
||||
response = self.client.get('/remote_user/',
|
||||
**{self.header: self.known_user})
|
||||
self.assertNotEqual(default_login, response.context['user'].last_login)
|
||||
|
||||
user = User.objects.get(username='knownuser')
|
||||
user.last_login = default_login
|
||||
user.save()
|
||||
response = self.client.get('/remote_user/', REMOTE_USER=self.known_user)
|
||||
response = self.client.get('/remote_user/',
|
||||
**{self.header: self.known_user})
|
||||
self.assertEqual(default_login, response.context['user'].last_login)
|
||||
|
||||
def test_header_disappears(self):
|
||||
|
@ -105,7 +111,8 @@ class RemoteUserTest(TestCase):
|
|||
"""
|
||||
User.objects.create(username='knownuser')
|
||||
# Known user authenticates
|
||||
response = self.client.get('/remote_user/', REMOTE_USER=self.known_user)
|
||||
response = self.client.get('/remote_user/',
|
||||
**{self.header: self.known_user})
|
||||
self.assertEqual(response.context['user'].username, 'knownuser')
|
||||
# During the session, the REMOTE_USER header disappears. Should trigger logout.
|
||||
response = self.client.get('/remote_user/')
|
||||
|
@ -140,7 +147,7 @@ class RemoteUserNoCreateTest(RemoteUserTest):
|
|||
|
||||
def test_unknown_user(self):
|
||||
num_users = User.objects.count()
|
||||
response = self.client.get('/remote_user/', REMOTE_USER='newuser')
|
||||
response = self.client.get('/remote_user/', **{self.header: 'newuser'})
|
||||
self.assertTrue(response.context['user'].is_anonymous())
|
||||
self.assertEqual(User.objects.count(), num_users)
|
||||
|
||||
|
@ -194,3 +201,22 @@ class RemoteUserCustomTest(RemoteUserTest):
|
|||
super(RemoteUserCustomTest, self).test_unknown_user()
|
||||
newuser = User.objects.get(username='newuser')
|
||||
self.assertEqual(newuser.email, 'user@example.com')
|
||||
|
||||
|
||||
class CustomHeaderMiddleware(RemoteUserMiddleware):
|
||||
"""
|
||||
Middleware that overrides custom HTTP auth user header.
|
||||
"""
|
||||
header = 'HTTP_AUTHUSER'
|
||||
|
||||
|
||||
@skipIfCustomUser
|
||||
class CustomHeaderRemoteUserTest(RemoteUserTest):
|
||||
"""
|
||||
Tests a custom RemoteUserMiddleware subclass with custom HTTP auth user
|
||||
header.
|
||||
"""
|
||||
middleware = (
|
||||
'django.contrib.auth.tests.test_remote_user.CustomHeaderMiddleware'
|
||||
)
|
||||
header = 'HTTP_AUTHUSER'
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
from importlib import import_module
|
||||
import itertools
|
||||
import os
|
||||
import re
|
||||
|
@ -710,6 +711,18 @@ class LogoutTest(AuthViewsTestCase):
|
|||
"%s should be allowed" % good_url)
|
||||
self.confirm_logged_out()
|
||||
|
||||
def test_logout_preserve_language(self):
|
||||
"""Check that language stored in session is preserved after logout"""
|
||||
# Create a new session with language
|
||||
engine = import_module(settings.SESSION_ENGINE)
|
||||
session = engine.SessionStore()
|
||||
session['_language'] = 'pl'
|
||||
session.save()
|
||||
self.client.cookies[settings.SESSION_COOKIE_NAME] = session.session_key
|
||||
|
||||
self.client.get('/logout/')
|
||||
self.assertEqual(self.client.session['_language'], 'pl')
|
||||
|
||||
|
||||
@skipIfCustomUser
|
||||
@override_settings(
|
||||
|
|
|
@ -98,7 +98,7 @@ def logout(request, next_page=None,
|
|||
|
||||
def logout_then_login(request, login_url=None, current_app=None, extra_context=None):
|
||||
"""
|
||||
Logs out the user if he is logged in. Then redirects to the log-in page.
|
||||
Logs out the user if they are logged in. Then redirects to the log-in page.
|
||||
"""
|
||||
if not login_url:
|
||||
login_url = settings.LOGIN_URL
|
||||
|
|
|
@ -3,6 +3,7 @@ from django.conf import settings
|
|||
from django.contrib import comments
|
||||
from django.contrib.comments import signals
|
||||
from django.contrib.comments.views.utils import next_redirect, confirmation_view
|
||||
from django.core.apps import app_cache
|
||||
from django.core.exceptions import ObjectDoesNotExist, ValidationError
|
||||
from django.db import models
|
||||
from django.shortcuts import render_to_response
|
||||
|
@ -48,7 +49,7 @@ def post_comment(request, next=None, using=None):
|
|||
if ctype is None or object_pk is None:
|
||||
return CommentPostBadRequest("Missing content_type or object_pk field.")
|
||||
try:
|
||||
model = models.get_model(*ctype.split(".", 1))
|
||||
model = app_cache.get_model(*ctype.split(".", 1))
|
||||
target = model._default_manager.using(using).get(pk=object_pk)
|
||||
except TypeError:
|
||||
return CommentPostBadRequest(
|
||||
|
|
|
@ -453,9 +453,10 @@ class BaseGenericInlineFormSet(BaseModelFormSet):
|
|||
@classmethod
|
||||
def get_default_prefix(cls):
|
||||
opts = cls.model._meta
|
||||
return '-'.join((opts.app_label, opts.model_name,
|
||||
cls.ct_field.name, cls.ct_fk_field.name,
|
||||
))
|
||||
return '-'.join(
|
||||
(opts.app_label, opts.model_name,
|
||||
cls.ct_field.name, cls.ct_fk_field.name)
|
||||
)
|
||||
|
||||
def save_new(self, form, commit=True):
|
||||
setattr(form.instance, self.ct_field.get_attname(),
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.core.apps import app_cache, UnavailableApp
|
||||
from django.db import DEFAULT_DB_ALIAS, router
|
||||
from django.db.models import get_apps, get_model, get_models, signals, UnavailableApp
|
||||
from django.db.models import signals
|
||||
from django.utils.encoding import smart_text
|
||||
from django.utils import six
|
||||
from django.utils.six.moves import input
|
||||
|
@ -12,7 +13,7 @@ def update_contenttypes(app, created_models, verbosity=2, db=DEFAULT_DB_ALIAS, *
|
|||
entries that no longer have a matching model class.
|
||||
"""
|
||||
try:
|
||||
get_model('contenttypes', 'ContentType')
|
||||
app_cache.get_model('contenttypes', 'ContentType')
|
||||
except UnavailableApp:
|
||||
return
|
||||
|
||||
|
@ -20,7 +21,7 @@ def update_contenttypes(app, created_models, verbosity=2, db=DEFAULT_DB_ALIAS, *
|
|||
return
|
||||
|
||||
ContentType.objects.clear_cache()
|
||||
app_models = get_models(app)
|
||||
app_models = app_cache.get_models(app)
|
||||
if not app_models:
|
||||
return
|
||||
# They all have the same app_label, get the first one.
|
||||
|
@ -85,8 +86,8 @@ If you're unsure, answer 'no'.
|
|||
|
||||
|
||||
def update_all_contenttypes(verbosity=2, **kwargs):
|
||||
for app in get_apps():
|
||||
update_contenttypes(app, None, verbosity, **kwargs)
|
||||
for app_config in app_cache.get_app_configs(only_with_models_module=True):
|
||||
update_contenttypes(app_config.models_module, None, verbosity, **kwargs)
|
||||
|
||||
signals.post_migrate.connect(update_contenttypes)
|
||||
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
from django.core.apps import app_cache
|
||||
from django.db import models
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.utils.encoding import smart_text, force_text
|
||||
|
@ -156,7 +157,7 @@ class ContentType(models.Model):
|
|||
|
||||
def model_class(self):
|
||||
"Returns the Python model class for this type of content."
|
||||
return models.get_model(self.app_label, self.model,
|
||||
return app_cache.get_model(self.app_label, self.model,
|
||||
only_installed=False)
|
||||
|
||||
def get_object_for_this_type(self, **kwargs):
|
||||
|
|
|
@ -52,12 +52,13 @@ class FooWithBrokenAbsoluteUrl(FooWithoutUrl):
|
|||
|
||||
|
||||
class ContentTypesTests(TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.old_Site_meta_installed = Site._meta.installed
|
||||
self._old_installed = Site._meta.app_config.installed
|
||||
ContentType.objects.clear_cache()
|
||||
|
||||
def tearDown(self):
|
||||
Site._meta.installed = self.old_Site_meta_installed
|
||||
Site._meta.app_config.installed = self._old_installed
|
||||
ContentType.objects.clear_cache()
|
||||
|
||||
def test_lookup_cache(self):
|
||||
|
@ -222,12 +223,12 @@ class ContentTypesTests(TestCase):
|
|||
user_ct = ContentType.objects.get_for_model(FooWithUrl)
|
||||
obj = FooWithUrl.objects.create(name="john")
|
||||
|
||||
if Site._meta.installed:
|
||||
Site._meta.app_config.installed = True
|
||||
response = shortcut(request, user_ct.id, obj.id)
|
||||
self.assertEqual("http://%s/users/john/" % get_current_site(request).domain,
|
||||
response._headers.get("location")[1])
|
||||
|
||||
Site._meta.installed = False
|
||||
Site._meta.app_config.installed = False
|
||||
response = shortcut(request, user_ct.id, obj.id)
|
||||
self.assertEqual("http://Example.com/users/john/",
|
||||
response._headers.get("location")[1])
|
||||
|
|
|
@ -1 +1,2 @@
|
|||
""" models.py (even empty) currently required by the runtests.py to enable unit tests """
|
||||
# This file is required to pretend formtools has models.
|
||||
# Otherwise test models cannot be registered.
|
||||
|
|
|
@ -48,7 +48,7 @@ class CustomKwargsStep1(Step1):
|
|||
|
||||
def __init__(self, test=None, *args, **kwargs):
|
||||
self.test = test
|
||||
return super(CustomKwargsStep1, self).__init__(*args, **kwargs)
|
||||
super(CustomKwargsStep1, self).__init__(*args, **kwargs)
|
||||
|
||||
|
||||
class TestModel(models.Model):
|
||||
|
|
|
@ -1,6 +1,4 @@
|
|||
from django.conf import settings
|
||||
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
|
||||
from django.utils.functional import cached_property
|
||||
|
||||
|
||||
class PostGISSchemaEditor(DatabaseSchemaEditor):
|
||||
|
|
|
@ -13,7 +13,7 @@ class SpatiaLiteCreation(DatabaseCreation):
|
|||
database already exists. Returns the name of the test database created.
|
||||
|
||||
This method is overloaded to load up the SpatiaLite initialization
|
||||
SQL prior to calling the `syncdb` command.
|
||||
SQL prior to calling the `migrate` command.
|
||||
"""
|
||||
# Don't import django.core.management if it isn't needed.
|
||||
from django.core.management import call_command
|
||||
|
@ -31,13 +31,13 @@ class SpatiaLiteCreation(DatabaseCreation):
|
|||
self.connection.close()
|
||||
self.connection.settings_dict["NAME"] = test_database_name
|
||||
|
||||
# Need to load the SpatiaLite initialization SQL before running `syncdb`.
|
||||
# Need to load the SpatiaLite initialization SQL before running `migrate`.
|
||||
self.load_spatialite_sql()
|
||||
|
||||
# Report syncdb messages at one level lower than that requested.
|
||||
# Report migrate messages at one level lower than that requested.
|
||||
# This ensures we don't get flooded with messages during testing
|
||||
# (unless you really ask to be flooded)
|
||||
call_command('syncdb',
|
||||
call_command('migrate',
|
||||
verbosity=max(verbosity - 1, 0),
|
||||
interactive=False,
|
||||
database=self.connection.alias,
|
||||
|
@ -47,7 +47,7 @@ class SpatiaLiteCreation(DatabaseCreation):
|
|||
# custom SQL has been removed. The only test data should come from
|
||||
# test fixtures, or autogenerated from post_migrate triggers.
|
||||
# This has the side effect of loading initial data (which was
|
||||
# intentionally skipped in the syncdb).
|
||||
# intentionally skipped in the migrate).
|
||||
call_command('flush',
|
||||
verbosity=max(verbosity - 1, 0),
|
||||
interactive=False,
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
"""
|
||||
The GeometryColumns and SpatialRefSys models for the SpatiaLite backend.
|
||||
"""
|
||||
from django.db import models
|
||||
from django.db import connection, models
|
||||
from django.contrib.gis.db.backends.base import SpatialRefSysMixin
|
||||
from django.utils.encoding import python_2_unicode_compatible
|
||||
|
||||
|
@ -53,9 +53,13 @@ class SpatialRefSys(models.Model, SpatialRefSysMixin):
|
|||
auth_srid = models.IntegerField()
|
||||
ref_sys_name = models.CharField(max_length=256)
|
||||
proj4text = models.CharField(max_length=2048)
|
||||
if connection.ops.spatial_version[0] >= 4:
|
||||
srtext = models.CharField(max_length=2048)
|
||||
|
||||
@property
|
||||
def wkt(self):
|
||||
if hasattr(self, 'srtext'):
|
||||
return self.srtext
|
||||
from django.contrib.gis.gdal import SpatialReference
|
||||
return SpatialReference(self.proj4text).wkt
|
||||
|
||||
|
|
|
@ -114,9 +114,9 @@ class GeometryField(Field):
|
|||
kwargs['srid'] = self.srid
|
||||
if self.dim != 2:
|
||||
kwargs['dim'] = self.dim
|
||||
if self.spatial_index != True:
|
||||
if self.spatial_index is not True:
|
||||
kwargs['spatial_index'] = self.spatial_index
|
||||
if self.geography != False:
|
||||
if self.geography is not False:
|
||||
kwargs['geography'] = self.geography
|
||||
return name, path, args, kwargs
|
||||
|
||||
|
|
|
@ -362,9 +362,11 @@ class GeoQuerySet(QuerySet):
|
|||
relative = int(bool(relative))
|
||||
if not isinstance(precision, six.integer_types):
|
||||
raise TypeError('SVG precision keyword argument must be an integer.')
|
||||
s = {'desc': 'SVG',
|
||||
s = {
|
||||
'desc': 'SVG',
|
||||
'procedure_fmt': '%(geo_col)s,%(rel)s,%(precision)s',
|
||||
'procedure_args': {'rel': relative,
|
||||
'procedure_args': {
|
||||
'rel': relative,
|
||||
'precision': precision,
|
||||
}
|
||||
}
|
||||
|
@ -746,7 +748,8 @@ class GeoQuerySet(QuerySet):
|
|||
for geometry set-like operations (e.g., intersection, difference,
|
||||
union, sym_difference).
|
||||
"""
|
||||
s = {'geom_args': ('geom',),
|
||||
s = {
|
||||
'geom_args': ('geom',),
|
||||
'select_field': GeomField(),
|
||||
'procedure_fmt': '%(geo_col)s,%(geom)s',
|
||||
'procedure_args': {'geom': geom},
|
||||
|
|
|
@ -44,10 +44,16 @@ class GeometryField(forms.Field):
|
|||
if not isinstance(value, GEOSGeometry):
|
||||
try:
|
||||
value = GEOSGeometry(value)
|
||||
if not value.srid:
|
||||
value.srid = self.widget.map_srid
|
||||
except (GEOSException, ValueError, TypeError):
|
||||
raise forms.ValidationError(self.error_messages['invalid_geom'], code='invalid_geom')
|
||||
|
||||
# Try to set the srid
|
||||
if not value.srid:
|
||||
try:
|
||||
value.srid = self.widget.map_srid
|
||||
except AttributeError:
|
||||
if self.srid:
|
||||
value.srid = self.srid
|
||||
return value
|
||||
|
||||
def clean(self, value):
|
||||
|
@ -66,15 +72,12 @@ class GeometryField(forms.Field):
|
|||
raise forms.ValidationError(self.error_messages['invalid_geom_type'], code='invalid_geom_type')
|
||||
|
||||
# Transforming the geometry if the SRID was set.
|
||||
if self.srid:
|
||||
if not geom.srid:
|
||||
# Should match that of the field if not given.
|
||||
geom.srid = self.srid
|
||||
elif self.srid != -1 and self.srid != geom.srid:
|
||||
if self.srid and self.srid != -1 and self.srid != geom.srid:
|
||||
try:
|
||||
geom.transform(self.srid)
|
||||
except GEOSException:
|
||||
raise forms.ValidationError(self.error_messages['transform_error'], code='transform_error')
|
||||
raise forms.ValidationError(
|
||||
self.error_messages['transform_error'], code='transform_error')
|
||||
|
||||
return geom
|
||||
|
||||
|
|
|
@ -66,7 +66,8 @@ class BaseGeometryWidget(Widget):
|
|||
value.srid, self.map_srid, err)
|
||||
)
|
||||
|
||||
context = self.build_attrs(attrs,
|
||||
context = self.build_attrs(
|
||||
attrs,
|
||||
name=name,
|
||||
module='geodjango_%s' % name.replace('-', '_'), # JS-safe
|
||||
serialized=self.serialize(value),
|
||||
|
@ -102,6 +103,13 @@ class OSMWidget(BaseGeometryWidget):
|
|||
'gis/js/OLMapWidget.js',
|
||||
)
|
||||
|
||||
def __init__(self, attrs=None):
|
||||
super(OSMWidget, self).__init__()
|
||||
for key in ('default_lon', 'default_lat'):
|
||||
self.attrs[key] = getattr(self, key)
|
||||
if attrs:
|
||||
self.attrs.update(attrs)
|
||||
|
||||
@property
|
||||
def map_srid(self):
|
||||
# Use the official spherical mercator projection SRID on versions
|
||||
|
@ -110,12 +118,3 @@ class OSMWidget(BaseGeometryWidget):
|
|||
return 3857
|
||||
else:
|
||||
return 900913
|
||||
|
||||
def render(self, name, value, attrs=None):
|
||||
default_attrs = {
|
||||
'default_lon': self.default_lon,
|
||||
'default_lat': self.default_lat,
|
||||
}
|
||||
if attrs:
|
||||
default_attrs.update(attrs)
|
||||
return super(OSMWidget, self).render(name, value, default_attrs)
|
||||
|
|
|
@ -25,7 +25,7 @@ if HAS_GEOS:
|
|||
|
||||
|
||||
@skipUnless(HAS_GEOIP and getattr(settings, "GEOIP_PATH", None),
|
||||
"GeoIP is required along with the GEOIP_DATA setting.")
|
||||
"GeoIP is required along with the GEOIP_PATH setting.")
|
||||
class GeoIPTest(unittest.TestCase):
|
||||
|
||||
def test01_init(self):
|
||||
|
|
|
@ -48,9 +48,11 @@ if lib_names:
|
|||
|
||||
# No GEOS library could be found.
|
||||
if lib_path is None:
|
||||
raise ImportError('Could not find the GEOS library (tried "%s"). '
|
||||
raise ImportError(
|
||||
'Could not find the GEOS library (tried "%s"). '
|
||||
'Try setting GEOS_LIBRARY_PATH in your settings.' %
|
||||
'", "'.join(lib_names))
|
||||
'", "'.join(lib_names)
|
||||
)
|
||||
|
||||
# Getting the GEOS C library. The C interface (CDLL) is used for
|
||||
# both *NIX and Windows.
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
from django.core.apps import app_cache
|
||||
from django.core import urlresolvers
|
||||
from django.contrib.sitemaps import Sitemap
|
||||
from django.contrib.gis.db.models.fields import GeometryField
|
||||
|
@ -25,7 +26,7 @@ class KMLSitemap(Sitemap):
|
|||
"""
|
||||
kml_sources = []
|
||||
if sources is None:
|
||||
sources = models.get_models()
|
||||
sources = app_cache.get_models()
|
||||
for source in sources:
|
||||
if isinstance(source, models.base.ModelBase):
|
||||
for field in source._meta.fields:
|
||||
|
|
|
@ -1,5 +1,8 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import warnings
|
||||
|
||||
from django.core.apps import app_cache
|
||||
from django.http import HttpResponse, Http404
|
||||
from django.template import loader
|
||||
from django.contrib.sites.models import get_current_site
|
||||
|
@ -7,7 +10,6 @@ from django.core import urlresolvers
|
|||
from django.core.paginator import EmptyPage, PageNotAnInteger
|
||||
from django.contrib.gis.db.models.fields import GeometryField
|
||||
from django.db import connections, DEFAULT_DB_ALIAS
|
||||
from django.db.models import get_model
|
||||
from django.db.models.fields import FieldDoesNotExist
|
||||
from django.utils import six
|
||||
from django.utils.translation import ugettext as _
|
||||
|
@ -20,6 +22,8 @@ def index(request, sitemaps):
|
|||
This view generates a sitemap index that uses the proper view
|
||||
for resolving geographic section sitemap URLs.
|
||||
"""
|
||||
warnings.warn("Geo Sitemaps are deprecated. Use plain sitemaps from "
|
||||
"django.contrib.sitemaps instead", DeprecationWarning, stacklevel=2)
|
||||
current_site = get_current_site(request)
|
||||
sites = []
|
||||
protocol = request.scheme
|
||||
|
@ -43,6 +47,8 @@ def sitemap(request, sitemaps, section=None):
|
|||
This view generates a sitemap with additional geographic
|
||||
elements defined by Google.
|
||||
"""
|
||||
warnings.warn("Geo Sitemaps are deprecated. Use plain sitemaps from "
|
||||
"django.contrib.sitemaps instead", DeprecationWarning, stacklevel=2)
|
||||
maps, urls = [], []
|
||||
if section is not None:
|
||||
if section not in sitemaps:
|
||||
|
@ -75,7 +81,7 @@ def kml(request, label, model, field_name=None, compress=False, using=DEFAULT_DB
|
|||
must be that of a geographic field.
|
||||
"""
|
||||
placemarks = []
|
||||
klass = get_model(label, model)
|
||||
klass = app_cache.get_model(label, model)
|
||||
if not klass:
|
||||
raise Http404('You must supply a valid app label and module name. Got "%s.%s"' % (label, model))
|
||||
|
||||
|
|
|
@ -20,11 +20,11 @@ class GeoFeedTest(TestCase):
|
|||
|
||||
def setUp(self):
|
||||
Site(id=settings.SITE_ID, domain="example.com", name="example.com").save()
|
||||
self.old_Site_meta_installed = Site._meta.installed
|
||||
Site._meta.installed = True
|
||||
self._old_installed = Site._meta.app_config.installed
|
||||
Site._meta.app_config.installed = True
|
||||
|
||||
def tearDown(self):
|
||||
Site._meta.installed = self.old_Site_meta_installed
|
||||
Site._meta.app_config.installed = self._old_installed
|
||||
|
||||
def assertChildNodes(self, elem, expected):
|
||||
"Taken from syndication/tests.py."
|
||||
|
|
|
@ -33,7 +33,8 @@ class GeoRegressionTests(TestCase):
|
|||
def test_kmz(self):
|
||||
"Testing `render_to_kmz` with non-ASCII data. See #11624."
|
||||
name = "Åland Islands"
|
||||
places = [{'name': name,
|
||||
places = [{
|
||||
'name': name,
|
||||
'description': name,
|
||||
'kml': '<Point><coordinates>5.0,23.0</coordinates></Point>'
|
||||
}]
|
||||
|
|
|
@ -11,6 +11,7 @@ from django.contrib.gis.geos import HAS_GEOS
|
|||
from django.contrib.gis.tests.utils import HAS_SPATIAL_DB
|
||||
from django.contrib.sites.models import Site
|
||||
from django.test import TestCase
|
||||
from django.test.utils import IgnoreDeprecationWarningsMixin
|
||||
from django.utils._os import upath
|
||||
|
||||
if HAS_GEOS:
|
||||
|
@ -18,17 +19,19 @@ if HAS_GEOS:
|
|||
|
||||
|
||||
@skipUnless(HAS_GEOS and HAS_SPATIAL_DB, "Geos and spatial db are required.")
|
||||
class GeoSitemapTest(TestCase):
|
||||
class GeoSitemapTest(IgnoreDeprecationWarningsMixin, TestCase):
|
||||
|
||||
urls = 'django.contrib.gis.tests.geoapp.urls'
|
||||
|
||||
def setUp(self):
|
||||
super(GeoSitemapTest, self).setUp()
|
||||
Site(id=settings.SITE_ID, domain="example.com", name="example.com").save()
|
||||
self.old_Site_meta_installed = Site._meta.installed
|
||||
Site._meta.installed = True
|
||||
self._old_installed = Site._meta.app_config.installed
|
||||
Site._meta.app_config.installed = True
|
||||
|
||||
def tearDown(self):
|
||||
Site._meta.installed = self.old_Site_meta_installed
|
||||
Site._meta.app_config.installed = self._old_installed
|
||||
super(GeoSitemapTest, self).tearDown()
|
||||
|
||||
def assertChildNodes(self, elem, expected):
|
||||
"Taken from syndication/tests.py."
|
||||
|
|
|
@ -76,6 +76,19 @@ class GeometryFieldTest(SimpleTestCase):
|
|||
for wkt in ('POINT(5)', 'MULTI POLYGON(((0 0, 0 1, 1 1, 1 0, 0 0)))', 'BLAH(0 0, 1 1)'):
|
||||
self.assertRaises(forms.ValidationError, fld.to_python, wkt)
|
||||
|
||||
def test_field_with_text_widget(self):
|
||||
class PointForm(forms.Form):
|
||||
pt = forms.PointField(srid=4326, widget=forms.TextInput)
|
||||
|
||||
form = PointForm()
|
||||
cleaned_pt = form.fields['pt'].clean('POINT(5 23)')
|
||||
self.assertEqual(cleaned_pt, GEOSGeometry('POINT(5 23)'))
|
||||
self.assertEqual(4326, cleaned_pt.srid)
|
||||
|
||||
point = GEOSGeometry('SRID=4326;POINT(5 23)')
|
||||
form = PointForm(data={'pt': 'POINT(5 23)'}, initial={'pt': point})
|
||||
self.assertFalse(form.has_changed())
|
||||
|
||||
|
||||
@skipUnless(HAS_GDAL and HAS_SPATIALREFSYS,
|
||||
"SpecializedFieldTest needs gdal support and a spatial database")
|
||||
|
@ -244,6 +257,15 @@ class SpecializedFieldTest(SimpleTestCase):
|
|||
for invalid in [geo for key, geo in self.geometries.items() if key != 'geometrycollection']:
|
||||
self.assertFalse(GeometryForm(data={'g': invalid.wkt}).is_valid())
|
||||
|
||||
|
||||
@skipUnless(HAS_GDAL and HAS_SPATIALREFSYS,
|
||||
"OSMWidgetTest needs gdal support and a spatial database")
|
||||
class OSMWidgetTest(SimpleTestCase):
|
||||
def setUp(self):
|
||||
self.geometries = {
|
||||
'point': GEOSGeometry("SRID=4326;POINT(9.052734375 42.451171875)"),
|
||||
}
|
||||
|
||||
def test_osm_widget(self):
|
||||
class PointForm(forms.Form):
|
||||
p = forms.PointField(widget=forms.OSMWidget)
|
||||
|
@ -251,9 +273,32 @@ class SpecializedFieldTest(SimpleTestCase):
|
|||
geom = self.geometries['point']
|
||||
form = PointForm(data={'p': geom})
|
||||
rendered = form.as_p()
|
||||
|
||||
self.assertIn("OpenStreetMap (Mapnik)", rendered)
|
||||
self.assertIn("id: 'id_p',", rendered)
|
||||
|
||||
def test_default_lat_lon(self):
|
||||
class PointForm(forms.Form):
|
||||
p = forms.PointField(
|
||||
widget=forms.OSMWidget(attrs={
|
||||
'default_lon': 20, 'default_lat': 30
|
||||
}),
|
||||
)
|
||||
|
||||
form = PointForm()
|
||||
rendered = form.as_p()
|
||||
|
||||
self.assertIn("options['default_lon'] = 20;", rendered)
|
||||
self.assertIn("options['default_lat'] = 30;", rendered)
|
||||
if forms.OSMWidget.default_lon != 20:
|
||||
self.assertNotIn(
|
||||
"options['default_lon'] = %d;" % forms.OSMWidget.default_lon,
|
||||
rendered)
|
||||
if forms.OSMWidget.default_lat != 30:
|
||||
self.assertNotIn(
|
||||
"options['default_lat'] = %d;" % forms.OSMWidget.default_lat,
|
||||
rendered)
|
||||
|
||||
|
||||
@skipUnless(HAS_GDAL and HAS_SPATIALREFSYS,
|
||||
"CustomGeometryWidgetTest needs gdal support and a spatial database")
|
||||
|
|
|
@ -12,7 +12,7 @@ test_srs = ({'srid': 4326,
|
|||
# Only the beginning, because there are differences depending on installed libs
|
||||
'srtext': 'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84"',
|
||||
# +ellps=WGS84 has been removed in the 4326 proj string in proj-4.8
|
||||
'proj4_re': r'\+proj=longlat (\+ellps=WGS84 )?\+datum=WGS84 \+no_defs ',
|
||||
'proj4_re': r'\+proj=longlat (\+ellps=WGS84 )?(\+datum=WGS84 |\+towgs84=0,0,0,0,0,0,0 )\+no_defs ',
|
||||
'spheroid': 'WGS 84', 'name': 'WGS 84',
|
||||
'geographic': True, 'projected': False, 'spatialite': True,
|
||||
'ellipsoid': (6378137.0, 6356752.3, 298.257223563), # From proj's "cs2cs -le" and Wikipedia (semi-minor only)
|
||||
|
|
|
@ -28,7 +28,7 @@ now = datetime.datetime(2012, 3, 9, 22, 30)
|
|||
|
||||
class MockDateTime(datetime.datetime):
|
||||
@classmethod
|
||||
def now(self, tz=None):
|
||||
def now(cls, tz=None):
|
||||
if tz is None or tz.utcoffset(now) is None:
|
||||
return now
|
||||
else:
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
# Models module required so tests are discovered.
|
|
@ -164,8 +164,7 @@ class BaseTests(object):
|
|||
response = self.client.post(add_url, data, follow=True)
|
||||
self.assertRedirects(response, show_url)
|
||||
self.assertTrue('messages' in response.context)
|
||||
messages = [Message(self.levels[level], msg) for msg in
|
||||
data['messages']]
|
||||
messages = [Message(self.levels[level], msg) for msg in data['messages']]
|
||||
self.assertEqual(list(response.context['messages']), messages)
|
||||
for msg in data['messages']:
|
||||
self.assertContains(response, msg)
|
||||
|
@ -209,8 +208,7 @@ class BaseTests(object):
|
|||
show_url = reverse('django.contrib.messages.tests.urls.show')
|
||||
messages = []
|
||||
for level in ('debug', 'info', 'success', 'warning', 'error'):
|
||||
messages.extend([Message(self.levels[level], msg) for msg in
|
||||
data['messages']])
|
||||
messages.extend([Message(self.levels[level], msg) for msg in data['messages']])
|
||||
add_url = reverse('django.contrib.messages.tests.urls.add',
|
||||
args=(level,))
|
||||
self.client.post(add_url, data)
|
||||
|
|
|
@ -1 +1,2 @@
|
|||
# This file intentionally left blank
|
||||
# This file is required to pretend sitemaps has models.
|
||||
# Otherwise test models cannot be registered.
|
||||
|
|
|
@ -25,10 +25,10 @@ class SitemapTestsBase(TestCase):
|
|||
|
||||
def setUp(self):
|
||||
self.base_url = '%s://%s' % (self.protocol, self.domain)
|
||||
self.old_Site_meta_installed = Site._meta.installed
|
||||
self._old_installed = Site._meta.app_config.installed
|
||||
cache.clear()
|
||||
# Create an object for sitemap content.
|
||||
TestModel.objects.create(name='Test Object')
|
||||
|
||||
def tearDown(self):
|
||||
Site._meta.installed = self.old_Site_meta_installed
|
||||
Site._meta.app_config.installed = self._old_installed
|
||||
|
|
|
@ -107,8 +107,9 @@ class HTTPSitemapTests(SitemapTestsBase):
|
|||
|
||||
def test_requestsite_sitemap(self):
|
||||
# Make sure hitting the flatpages sitemap without the sites framework
|
||||
# installed doesn't raise an exception
|
||||
Site._meta.installed = False
|
||||
# installed doesn't raise an exception.
|
||||
# Reset by SitemapTestsBase.tearDown().
|
||||
Site._meta.app_config.installed = False
|
||||
response = self.client.get('/simple/sitemap.xml')
|
||||
expected_content = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
|
||||
|
@ -133,7 +134,8 @@ class HTTPSitemapTests(SitemapTestsBase):
|
|||
Sitemap.get_urls if Site objects exists, but the sites framework is not
|
||||
actually installed.
|
||||
"""
|
||||
Site._meta.installed = False
|
||||
# Reset by SitemapTestsBase.tearDown().
|
||||
Site._meta.app_config.installed = False
|
||||
self.assertRaises(ImproperlyConfigured, Sitemap().get_urls)
|
||||
|
||||
def test_sitemap_item(self):
|
||||
|
|
|
@ -12,11 +12,11 @@ class SitesFrameworkTests(TestCase):
|
|||
|
||||
def setUp(self):
|
||||
Site(id=settings.SITE_ID, domain="example.com", name="example.com").save()
|
||||
self.old_Site_meta_installed = Site._meta.installed
|
||||
Site._meta.installed = True
|
||||
self._old_installed = Site._meta.app_config.installed
|
||||
Site._meta.app_config.installed = True
|
||||
|
||||
def tearDown(self):
|
||||
Site._meta.installed = self.old_Site_meta_installed
|
||||
Site._meta.app_config.installed = self._old_installed
|
||||
|
||||
def test_save_another(self):
|
||||
# Regression for #17415
|
||||
|
@ -67,7 +67,7 @@ class SitesFrameworkTests(TestCase):
|
|||
self.assertRaises(ObjectDoesNotExist, get_current_site, request)
|
||||
|
||||
# A RequestSite is returned if the sites framework is not installed
|
||||
Site._meta.installed = False
|
||||
Site._meta.app_config.installed = False
|
||||
site = get_current_site(request)
|
||||
self.assertTrue(isinstance(site, RequestSite))
|
||||
self.assertEqual(site.name, "example.com")
|
||||
|
|
|
@ -12,18 +12,11 @@ class StaticFilesHandler(WSGIHandler):
|
|||
WSGI middleware that intercepts calls to the static files directory, as
|
||||
defined by the STATIC_URL setting, and serves those files.
|
||||
"""
|
||||
def __init__(self, application, base_dir=None):
|
||||
def __init__(self, application):
|
||||
self.application = application
|
||||
if base_dir:
|
||||
self.base_dir = base_dir
|
||||
else:
|
||||
self.base_dir = self.get_base_dir()
|
||||
self.base_url = urlparse(self.get_base_url())
|
||||
super(StaticFilesHandler, self).__init__()
|
||||
|
||||
def get_base_dir(self):
|
||||
return settings.STATIC_ROOT
|
||||
|
||||
def get_base_url(self):
|
||||
utils.check_settings()
|
||||
return settings.STATIC_URL
|
||||
|
|
|
@ -294,12 +294,6 @@ Type 'yes' to continue, or 'no' to cancel: """
|
|||
self.log("Pretending to copy '%s'" % source_path, level=1)
|
||||
else:
|
||||
self.log("Copying '%s'" % source_path, level=1)
|
||||
if self.local:
|
||||
full_path = self.storage.path(prefixed_path)
|
||||
try:
|
||||
os.makedirs(os.path.dirname(full_path))
|
||||
except OSError:
|
||||
pass
|
||||
with source_storage.open(path) as source_file:
|
||||
self.storage.save(prefixed_path, source_file)
|
||||
if not prefixed_path in self.copied_files:
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
from .cache import app_cache, UnavailableApp # NOQA
|
|
@ -0,0 +1,47 @@
|
|||
from collections import OrderedDict
|
||||
|
||||
from django.utils._os import upath
|
||||
|
||||
|
||||
class AppConfig(object):
|
||||
"""
|
||||
Class representing a Django application and its configuration.
|
||||
"""
|
||||
|
||||
def __init__(self, name, app_module, models_module):
|
||||
# Full Python path to the application eg. 'django.contrib.admin'.
|
||||
# This is the value that appears in INSTALLED_APPS.
|
||||
self.name = name
|
||||
|
||||
# Last component of the Python path to the application eg. 'admin'.
|
||||
# This value must be unique across a Django project.
|
||||
self.label = name.rpartition(".")[2]
|
||||
|
||||
# Root module eg. <module 'django.contrib.admin' from
|
||||
# 'django/contrib/admin/__init__.pyc'>.
|
||||
self.app_module = app_module
|
||||
|
||||
# Module containing models eg. <module 'django.contrib.admin.models'
|
||||
# from 'django/contrib/admin/models.pyc'>. None if the application
|
||||
# doesn't have a models module.
|
||||
self.models_module = models_module
|
||||
|
||||
# Mapping of lower case model names to model classes.
|
||||
# Populated by calls to AppCache.register_model().
|
||||
self.models = OrderedDict()
|
||||
|
||||
# Whether the app is in INSTALLED_APPS or was automatically created
|
||||
# when one of its models was imported.
|
||||
self.installed = app_module is not None
|
||||
|
||||
# Filesystem path to the application directory eg.
|
||||
# u'/usr/lib/python2.7/dist-packages/django/contrib/admin'.
|
||||
# This is a unicode object on Python 2 and a str on Python 3.
|
||||
self.path = upath(app_module.__path__[0]) if app_module is not None else None
|
||||
|
||||
@classmethod
|
||||
def _stub(cls, label):
|
||||
return cls(label, None, None)
|
||||
|
||||
def __repr__(self):
|
||||
return '<AppConfig: %s>' % self.label
|
|
@ -0,0 +1,395 @@
|
|||
"Utilities for loading models and the modules that contain them."
|
||||
|
||||
from collections import OrderedDict
|
||||
import imp
|
||||
from importlib import import_module
|
||||
import os
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
from django.utils.module_loading import module_has_submodule
|
||||
from django.utils._os import upath
|
||||
from django.utils import six
|
||||
|
||||
from .base import AppConfig
|
||||
|
||||
|
||||
MODELS_MODULE_NAME = 'models'
|
||||
|
||||
|
||||
class UnavailableApp(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class AppCache(object):
|
||||
"""
|
||||
A cache that stores installed applications and their models. Used to
|
||||
provide reverse-relations and for app introspection.
|
||||
"""
|
||||
|
||||
def __init__(self, master=False):
|
||||
# Only one master of the app-cache may exist at a given time, and it
|
||||
# shall be the app_cache variable defined at the end of this module.
|
||||
if master and hasattr(sys.modules[__name__], 'app_cache'):
|
||||
raise RuntimeError("You may create only one master app cache.")
|
||||
|
||||
# When master is set to False, the app cache isn't populated from
|
||||
# INSTALLED_APPS and ignores the only_installed arguments to
|
||||
# get_model[s].
|
||||
self.master = master
|
||||
|
||||
# Mapping of labels to AppConfig instances for installed apps.
|
||||
self.app_configs = OrderedDict()
|
||||
|
||||
# Pending lookups for lazy relations
|
||||
self.pending_lookups = {}
|
||||
|
||||
# Set of app names. Allows restricting the set of installed apps.
|
||||
# Used by TransactionTestCase.available_apps for performance reasons.
|
||||
self.available_apps = None
|
||||
|
||||
# -- Everything below here is only used when populating the cache --
|
||||
self.loaded = False
|
||||
self.handled = set()
|
||||
self.postponed = []
|
||||
self.nesting_level = 0
|
||||
self._get_models_cache = {}
|
||||
|
||||
def populate(self):
|
||||
"""
|
||||
Fill in all the cache information. This method is threadsafe, in the
|
||||
sense that every caller will see the same state upon return, and if the
|
||||
cache is already initialised, it does no work.
|
||||
"""
|
||||
if self.loaded:
|
||||
return
|
||||
if not self.master:
|
||||
self.loaded = True
|
||||
return
|
||||
# Note that we want to use the import lock here - the app loading is
|
||||
# in many cases initiated implicitly by importing, and thus it is
|
||||
# possible to end up in deadlock when one thread initiates loading
|
||||
# without holding the importer lock and another thread then tries to
|
||||
# import something which also launches the app loading. For details of
|
||||
# this situation see #18251.
|
||||
imp.acquire_lock()
|
||||
try:
|
||||
if self.loaded:
|
||||
return
|
||||
for app_name in settings.INSTALLED_APPS:
|
||||
if app_name in self.handled:
|
||||
continue
|
||||
self.load_app(app_name, can_postpone=True)
|
||||
if not self.nesting_level:
|
||||
for app_name in self.postponed:
|
||||
self.load_app(app_name)
|
||||
self.loaded = True
|
||||
finally:
|
||||
imp.release_lock()
|
||||
|
||||
def load_app(self, app_name, can_postpone=False):
|
||||
"""
|
||||
Loads the app with the provided fully qualified name, and returns the
|
||||
model module.
|
||||
"""
|
||||
app_module = import_module(app_name)
|
||||
self.handled.add(app_name)
|
||||
self.nesting_level += 1
|
||||
try:
|
||||
models_module = import_module('%s.%s' % (app_name, MODELS_MODULE_NAME))
|
||||
except ImportError:
|
||||
# If the app doesn't have a models module, we can just swallow the
|
||||
# ImportError and return no models for this app.
|
||||
if not module_has_submodule(app_module, MODELS_MODULE_NAME):
|
||||
models_module = None
|
||||
# But if the app does have a models module, we need to figure out
|
||||
# whether to suppress or propagate the error. If can_postpone is
|
||||
# True then it may be that the package is still being imported by
|
||||
# Python and the models module isn't available yet. So we add the
|
||||
# app to the postponed list and we'll try it again after all the
|
||||
# recursion has finished (in populate). If can_postpone is False
|
||||
# then it's time to raise the ImportError.
|
||||
else:
|
||||
if can_postpone:
|
||||
self.postponed.append(app_name)
|
||||
return
|
||||
else:
|
||||
raise
|
||||
finally:
|
||||
self.nesting_level -= 1
|
||||
|
||||
app_config = AppConfig(
|
||||
name=app_name, app_module=app_module, models_module=models_module)
|
||||
# If a stub config existed for this app, preserve models registry.
|
||||
old_app_config = self.app_configs.get(app_config.label)
|
||||
if old_app_config is not None:
|
||||
app_config.models = old_app_config.models
|
||||
self.app_configs[app_config.label] = app_config
|
||||
|
||||
return models_module
|
||||
|
||||
def app_cache_ready(self):
|
||||
"""
|
||||
Returns true if the model cache is fully populated.
|
||||
|
||||
Useful for code that wants to cache the results of get_models() for
|
||||
themselves once it is safe to do so.
|
||||
"""
|
||||
return self.loaded
|
||||
|
||||
def get_app_configs(self, only_installed=True, only_with_models_module=False):
|
||||
"""
|
||||
Return an iterable of application configurations.
|
||||
|
||||
If only_installed is True (default), only applications explicitly
|
||||
listed in INSTALLED_APPS are considered.
|
||||
|
||||
If only_with_models_module in True (non-default), only applications
|
||||
containing a models module are considered.
|
||||
"""
|
||||
self.populate()
|
||||
for app_config in self.app_configs.values():
|
||||
if only_installed and not app_config.installed:
|
||||
continue
|
||||
if only_with_models_module and app_config.models_module is None:
|
||||
continue
|
||||
if self.available_apps is not None and app_config.name not in self.available_apps:
|
||||
continue
|
||||
yield app_config
|
||||
|
||||
def get_app_config(self, app_label, only_installed=True, only_with_models_module=False):
|
||||
"""
|
||||
Returns the application configuration for the given app_label.
|
||||
|
||||
Raises LookupError if no application exists with this app_label.
|
||||
|
||||
Raises UnavailableApp when set_available_apps() disables the
|
||||
application with this app_label.
|
||||
|
||||
If only_installed is True (default), only applications explicitly
|
||||
listed in INSTALLED_APPS are considered.
|
||||
|
||||
If only_with_models_module in True (non-default), only applications
|
||||
containing a models module are considered.
|
||||
"""
|
||||
self.populate()
|
||||
app_config = self.app_configs.get(app_label)
|
||||
if app_config is None:
|
||||
raise LookupError("No app with label %r." % app_label)
|
||||
if only_installed and not app_config.installed:
|
||||
raise LookupError("App with label %r isn't in INSTALLED_APPS." % app_label)
|
||||
if only_with_models_module and app_config.models_module is None:
|
||||
raise LookupError("App with label %r doesn't have a models module." % app_label)
|
||||
if self.available_apps is not None and app_config.name not in self.available_apps:
|
||||
raise UnavailableApp("App with label %r isn't available." % app_label)
|
||||
return app_config
|
||||
|
||||
def get_models(self, app_mod=None,
|
||||
include_auto_created=False, include_deferred=False,
|
||||
only_installed=True, include_swapped=False):
|
||||
"""
|
||||
Given a module containing models, returns a list of the models.
|
||||
Otherwise returns a list of all installed models.
|
||||
|
||||
By default, auto-created models (i.e., m2m models without an
|
||||
explicit intermediate table) are not included. However, if you
|
||||
specify include_auto_created=True, they will be.
|
||||
|
||||
By default, models created to satisfy deferred attribute
|
||||
queries are *not* included in the list of models. However, if
|
||||
you specify include_deferred, they will be.
|
||||
|
||||
By default, models that aren't part of installed apps will *not*
|
||||
be included in the list of models. However, if you specify
|
||||
only_installed=False, they will be. If you're using a non-default
|
||||
AppCache, this argument does nothing - all models will be included.
|
||||
|
||||
By default, models that have been swapped out will *not* be
|
||||
included in the list of models. However, if you specify
|
||||
include_swapped, they will be.
|
||||
"""
|
||||
if not self.master:
|
||||
only_installed = False
|
||||
cache_key = (app_mod, include_auto_created, include_deferred, only_installed, include_swapped)
|
||||
model_list = None
|
||||
try:
|
||||
model_list = self._get_models_cache[cache_key]
|
||||
if self.available_apps is not None and only_installed:
|
||||
model_list = [
|
||||
m for m in model_list
|
||||
if self.app_configs[m._meta.app_label].name in self.available_apps
|
||||
]
|
||||
return model_list
|
||||
except KeyError:
|
||||
pass
|
||||
self.populate()
|
||||
if app_mod:
|
||||
app_label = app_mod.__name__.split('.')[-2]
|
||||
try:
|
||||
app_config = self.app_configs[app_label]
|
||||
except KeyError:
|
||||
app_list = []
|
||||
else:
|
||||
app_list = [app_config] if app_config.installed else []
|
||||
else:
|
||||
app_list = six.itervalues(self.app_configs)
|
||||
if only_installed:
|
||||
app_list = (app for app in app_list if app.installed)
|
||||
model_list = []
|
||||
for app in app_list:
|
||||
model_list.extend(
|
||||
model for model in app.models.values()
|
||||
if ((not model._deferred or include_deferred) and
|
||||
(not model._meta.auto_created or include_auto_created) and
|
||||
(not model._meta.swapped or include_swapped))
|
||||
)
|
||||
self._get_models_cache[cache_key] = model_list
|
||||
if self.available_apps is not None and only_installed:
|
||||
model_list = [
|
||||
m for m in model_list
|
||||
if self.app_configs[m._meta.app_label].name in self.available_apps
|
||||
]
|
||||
return model_list
|
||||
|
||||
def get_model(self, app_label, model_name,
|
||||
seed_cache=True, only_installed=True):
|
||||
"""
|
||||
Returns the model matching the given app_label and case-insensitive
|
||||
model_name.
|
||||
|
||||
Returns None if no model is found.
|
||||
|
||||
Raises UnavailableApp when set_available_apps() in in effect and
|
||||
doesn't include app_label.
|
||||
"""
|
||||
if not self.master:
|
||||
only_installed = False
|
||||
if seed_cache:
|
||||
self.populate()
|
||||
if only_installed:
|
||||
app_config = self.app_configs.get(app_label)
|
||||
if app_config is not None and not app_config.installed:
|
||||
return None
|
||||
if (self.available_apps is not None
|
||||
and app_config.name not in self.available_apps):
|
||||
raise UnavailableApp("App with label %s isn't available." % app_label)
|
||||
try:
|
||||
return self.app_configs[app_label].models[model_name.lower()]
|
||||
except KeyError:
|
||||
return None
|
||||
|
||||
def register_model(self, app_label, model):
|
||||
try:
|
||||
app_config = self.app_configs[app_label]
|
||||
except KeyError:
|
||||
app_config = AppConfig._stub(app_label)
|
||||
self.app_configs[app_label] = app_config
|
||||
# Add the model to the app_config's models dictionary.
|
||||
model_name = model._meta.model_name
|
||||
model_dict = app_config.models
|
||||
if model_name in model_dict:
|
||||
# The same model may be imported via different paths (e.g.
|
||||
# appname.models and project.appname.models). We use the source
|
||||
# filename as a means to detect identity.
|
||||
fname1 = os.path.abspath(upath(sys.modules[model.__module__].__file__))
|
||||
fname2 = os.path.abspath(upath(sys.modules[model_dict[model_name].__module__].__file__))
|
||||
# Since the filename extension could be .py the first time and
|
||||
# .pyc or .pyo the second time, ignore the extension when
|
||||
# comparing.
|
||||
if os.path.splitext(fname1)[0] == os.path.splitext(fname2)[0]:
|
||||
return
|
||||
model_dict[model_name] = model
|
||||
self._get_models_cache.clear()
|
||||
|
||||
def set_available_apps(self, available):
|
||||
available = set(available)
|
||||
installed = set(settings.INSTALLED_APPS)
|
||||
if not available.issubset(installed):
|
||||
raise ValueError("Available apps isn't a subset of installed "
|
||||
"apps, extra apps: %s" % ", ".join(available - installed))
|
||||
self.available_apps = available
|
||||
|
||||
def unset_available_apps(self):
|
||||
self.available_apps = None
|
||||
|
||||
### DEPRECATED METHODS GO BELOW THIS LINE ###
|
||||
|
||||
def get_app(self, app_label):
|
||||
"""
|
||||
Returns the module containing the models for the given app_label.
|
||||
|
||||
Raises UnavailableApp when set_available_apps() in in effect and
|
||||
doesn't include app_label.
|
||||
"""
|
||||
warnings.warn(
|
||||
"get_app_config(app_label).models_module supersedes get_app(app_label).",
|
||||
PendingDeprecationWarning, stacklevel=2)
|
||||
try:
|
||||
return self.get_app_config(app_label).models_module
|
||||
except LookupError as exc:
|
||||
# Change the exception type for backwards compatibility.
|
||||
raise ImproperlyConfigured(*exc.args)
|
||||
|
||||
def get_apps(self):
|
||||
"""
|
||||
Returns a list of all installed modules that contain models.
|
||||
"""
|
||||
warnings.warn(
|
||||
"[a.models_module for a in get_app_configs()] supersedes get_apps().",
|
||||
PendingDeprecationWarning, stacklevel=2)
|
||||
return [app_config.models_module for app_config in self.get_app_configs()]
|
||||
|
||||
def _get_app_package(self, app):
|
||||
return '.'.join(app.__name__.split('.')[:-1])
|
||||
|
||||
def get_app_package(self, app_label):
|
||||
warnings.warn(
|
||||
"get_app_config(label).name supersedes get_app_package(label).",
|
||||
PendingDeprecationWarning, stacklevel=2)
|
||||
return self._get_app_package(self.get_app(app_label))
|
||||
|
||||
def _get_app_path(self, app):
|
||||
if hasattr(app, '__path__'): # models/__init__.py package
|
||||
app_path = app.__path__[0]
|
||||
else: # models.py module
|
||||
app_path = app.__file__
|
||||
return os.path.dirname(upath(app_path))
|
||||
|
||||
def get_app_path(self, app_label):
|
||||
warnings.warn(
|
||||
"get_app_config(label).path supersedes get_app_path(label).",
|
||||
PendingDeprecationWarning, stacklevel=2)
|
||||
return self._get_app_path(self.get_app(app_label))
|
||||
|
||||
def get_app_paths(self):
|
||||
"""
|
||||
Returns a list of paths to all installed apps.
|
||||
|
||||
Useful for discovering files at conventional locations inside apps
|
||||
(static files, templates, etc.)
|
||||
"""
|
||||
warnings.warn(
|
||||
"[a.path for a in get_app_configs()] supersedes get_app_paths().",
|
||||
PendingDeprecationWarning, stacklevel=2)
|
||||
|
||||
self.populate()
|
||||
|
||||
app_paths = []
|
||||
for app in self.get_apps():
|
||||
app_paths.append(self._get_app_path(app))
|
||||
return app_paths
|
||||
|
||||
def register_models(self, app_label, *models):
|
||||
"""
|
||||
Register a set of models as belonging to an app.
|
||||
"""
|
||||
warnings.warn(
|
||||
"register_models(app_label, models) is deprecated.",
|
||||
PendingDeprecationWarning, stacklevel=2)
|
||||
for model in models:
|
||||
self.register_model(app_label, model)
|
||||
|
||||
|
||||
app_cache = AppCache(master=True)
|
|
@ -1,5 +1,6 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
from django.core.apps import app_cache
|
||||
from django.db import models
|
||||
|
||||
|
||||
|
@ -31,7 +32,7 @@ def check_boolean_field_default_value():
|
|||
warns the user that the default has changed from False to Null.
|
||||
"""
|
||||
fields = []
|
||||
for cls in models.get_models():
|
||||
for cls in app_cache.get_models():
|
||||
opts = cls._meta
|
||||
for f in opts.local_fields:
|
||||
if isinstance(f, models.BooleanField) and not f.has_default():
|
||||
|
|
|
@ -4,6 +4,7 @@ Global Django exception and warning classes.
|
|||
from functools import reduce
|
||||
import operator
|
||||
|
||||
from django.utils import six
|
||||
from django.utils.encoding import force_text
|
||||
|
||||
|
||||
|
@ -77,64 +78,89 @@ class ValidationError(Exception):
|
|||
"""An error while validating data."""
|
||||
def __init__(self, message, code=None, params=None):
|
||||
"""
|
||||
ValidationError can be passed any object that can be printed (usually
|
||||
a string), a list of objects or a dictionary.
|
||||
The `message` argument can be a single error, a list of errors, or a
|
||||
dictionary that maps field names to lists of errors. What we define as
|
||||
an "error" can be either a simple string or an instance of
|
||||
ValidationError with its message attribute set, and what we define as
|
||||
list or dictionary can be an actual `list` or `dict` or an instance
|
||||
of ValidationError with its `error_list` or `error_dict` attribute set.
|
||||
"""
|
||||
if isinstance(message, dict):
|
||||
self.error_dict = message
|
||||
elif isinstance(message, list):
|
||||
self.error_list = message
|
||||
|
||||
# PY2 can't pickle naive exception: http://bugs.python.org/issue1692335.
|
||||
super(ValidationError, self).__init__(message, code, params)
|
||||
|
||||
if isinstance(message, ValidationError):
|
||||
if hasattr(message, 'error_dict'):
|
||||
message = message.error_dict
|
||||
# PY2 has a `message` property which is always there so we can't
|
||||
# duck-type on it. It was introduced in Python 2.5 and already
|
||||
# deprecated in Python 2.6.
|
||||
elif not hasattr(message, 'message' if six.PY3 else 'code'):
|
||||
message = message.error_list
|
||||
else:
|
||||
message, code, params = message.message, message.code, message.params
|
||||
|
||||
if isinstance(message, dict):
|
||||
self.error_dict = {}
|
||||
for field, messages in message.items():
|
||||
if not isinstance(messages, ValidationError):
|
||||
messages = ValidationError(messages)
|
||||
self.error_dict[field] = messages.error_list
|
||||
|
||||
elif isinstance(message, list):
|
||||
self.error_list = []
|
||||
for message in message:
|
||||
# Normalize plain strings to instances of ValidationError.
|
||||
if not isinstance(message, ValidationError):
|
||||
message = ValidationError(message)
|
||||
self.error_list.extend(message.error_list)
|
||||
|
||||
else:
|
||||
self.message = message
|
||||
self.code = code
|
||||
self.params = params
|
||||
self.message = message
|
||||
self.error_list = [self]
|
||||
|
||||
@property
|
||||
def message_dict(self):
|
||||
message_dict = {}
|
||||
for field, messages in self.error_dict.items():
|
||||
message_dict[field] = []
|
||||
for message in messages:
|
||||
if isinstance(message, ValidationError):
|
||||
message_dict[field].extend(message.messages)
|
||||
else:
|
||||
message_dict[field].append(force_text(message))
|
||||
return message_dict
|
||||
# Trigger an AttributeError if this ValidationError
|
||||
# doesn't have an error_dict.
|
||||
getattr(self, 'error_dict')
|
||||
|
||||
return dict(self)
|
||||
|
||||
@property
|
||||
def messages(self):
|
||||
if hasattr(self, 'error_dict'):
|
||||
message_list = reduce(operator.add, self.error_dict.values())
|
||||
else:
|
||||
message_list = self.error_list
|
||||
|
||||
messages = []
|
||||
for message in message_list:
|
||||
if isinstance(message, ValidationError):
|
||||
params = message.params
|
||||
message = message.message
|
||||
if params:
|
||||
message %= params
|
||||
message = force_text(message)
|
||||
messages.append(message)
|
||||
return messages
|
||||
|
||||
def __str__(self):
|
||||
if hasattr(self, 'error_dict'):
|
||||
return repr(self.message_dict)
|
||||
return repr(self.messages)
|
||||
|
||||
def __repr__(self):
|
||||
return 'ValidationError(%s)' % self
|
||||
return reduce(operator.add, dict(self).values())
|
||||
return list(self)
|
||||
|
||||
def update_error_dict(self, error_dict):
|
||||
if hasattr(self, 'error_dict'):
|
||||
if error_dict:
|
||||
for k, v in self.error_dict.items():
|
||||
error_dict.setdefault(k, []).extend(v)
|
||||
for field, errors in self.error_dict.items():
|
||||
error_dict.setdefault(field, []).extend(errors)
|
||||
else:
|
||||
error_dict = self.error_dict
|
||||
else:
|
||||
error_dict[NON_FIELD_ERRORS] = self.error_list
|
||||
return error_dict
|
||||
|
||||
def __iter__(self):
|
||||
if hasattr(self, 'error_dict'):
|
||||
for field, errors in self.error_dict.items():
|
||||
yield field, list(ValidationError(errors))
|
||||
else:
|
||||
for error in self.error_list:
|
||||
message = error.message
|
||||
if error.params:
|
||||
message %= error.params
|
||||
yield force_text(message)
|
||||
|
||||
def __str__(self):
|
||||
if hasattr(self, 'error_dict'):
|
||||
return repr(dict(self))
|
||||
return repr(list(self))
|
||||
|
||||
def __repr__(self):
|
||||
return 'ValidationError(%s)' % self
|
||||
|
|
|
@ -68,8 +68,8 @@ def file_move_safe(old_file_name, new_file_name, chunk_size=1024 * 64, allow_ove
|
|||
# first open the old file, so that it won't go away
|
||||
with open(old_file_name, 'rb') as old_file:
|
||||
# now open the new file, not forgetting allow_overwrite
|
||||
fd = os.open(new_file_name, os.O_WRONLY | os.O_CREAT | getattr(os, 'O_BINARY', 0) |
|
||||
(os.O_EXCL if not allow_overwrite else 0))
|
||||
fd = os.open(new_file_name, (os.O_WRONLY | os.O_CREAT | getattr(os, 'O_BINARY', 0) |
|
||||
(os.O_EXCL if not allow_overwrite else 0)))
|
||||
try:
|
||||
locks.lock(fd, locks.LOCK_EX)
|
||||
current_chunk = None
|
||||
|
|
|
@ -149,7 +149,8 @@ class FileSystemStorage(Storage):
|
|||
Standard filesystem storage
|
||||
"""
|
||||
|
||||
def __init__(self, location=None, base_url=None, file_permissions_mode=None):
|
||||
def __init__(self, location=None, base_url=None, file_permissions_mode=None,
|
||||
directory_permissions_mode=None):
|
||||
if location is None:
|
||||
location = settings.MEDIA_ROOT
|
||||
self.base_location = location
|
||||
|
@ -161,6 +162,10 @@ class FileSystemStorage(Storage):
|
|||
file_permissions_mode if file_permissions_mode is not None
|
||||
else settings.FILE_UPLOAD_PERMISSIONS
|
||||
)
|
||||
self.directory_permissions_mode = (
|
||||
directory_permissions_mode if directory_permissions_mode is not None
|
||||
else settings.FILE_UPLOAD_DIRECTORY_PERMISSIONS
|
||||
)
|
||||
|
||||
def _open(self, name, mode='rb'):
|
||||
return File(open(self.path(name), mode))
|
||||
|
@ -175,12 +180,12 @@ class FileSystemStorage(Storage):
|
|||
directory = os.path.dirname(full_path)
|
||||
if not os.path.exists(directory):
|
||||
try:
|
||||
if settings.FILE_UPLOAD_DIRECTORY_PERMISSIONS is not None:
|
||||
if self.directory_permissions_mode is not None:
|
||||
# os.makedirs applies the global umask, so we reset it,
|
||||
# for consistency with FILE_UPLOAD_PERMISSIONS behavior.
|
||||
# for consistency with file_permissions_mode behavior.
|
||||
old_umask = os.umask(0)
|
||||
try:
|
||||
os.makedirs(directory, settings.FILE_UPLOAD_DIRECTORY_PERMISSIONS)
|
||||
os.makedirs(directory, self.directory_permissions_mode)
|
||||
finally:
|
||||
os.umask(old_umask)
|
||||
else:
|
||||
|
|
|
@ -118,8 +118,7 @@ def get_commands():
|
|||
for app_name in apps:
|
||||
try:
|
||||
path = find_management_module(app_name)
|
||||
_commands.update(dict((name, app_name)
|
||||
for name in find_commands(path)))
|
||||
_commands.update(dict((name, app_name) for name in find_commands(path)))
|
||||
except ImportError:
|
||||
pass # No management module - ignore this app
|
||||
|
||||
|
|
|
@ -11,7 +11,6 @@ import sys
|
|||
from optparse import make_option, OptionParser
|
||||
|
||||
import django
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
from django.core.management.color import color_style, no_style
|
||||
from django.utils.encoding import force_str
|
||||
from django.utils.six import StringIO
|
||||
|
@ -342,16 +341,20 @@ class AppCommand(BaseCommand):
|
|||
args = '<appname appname ...>'
|
||||
|
||||
def handle(self, *app_labels, **options):
|
||||
from django.db import models
|
||||
from django.core.apps import app_cache
|
||||
if not app_labels:
|
||||
raise CommandError('Enter at least one appname.')
|
||||
try:
|
||||
app_list = [models.get_app(app_label) for app_label in app_labels]
|
||||
except (ImproperlyConfigured, ImportError) as e:
|
||||
app_configs = [app_cache.get_app_config(app_label) for app_label in app_labels]
|
||||
except (LookupError, ImportError) as e:
|
||||
raise CommandError("%s. Are you sure your INSTALLED_APPS setting is correct?" % e)
|
||||
output = []
|
||||
for app in app_list:
|
||||
app_output = self.handle_app(app, **options)
|
||||
for app_config in app_configs:
|
||||
if app_config.models_module is None:
|
||||
raise CommandError(
|
||||
"AppCommand cannot handle app %r because it doesn't have "
|
||||
"a models module." % app_config.label)
|
||||
app_output = self.handle_app(app_config.models_module, **options)
|
||||
if app_output:
|
||||
output.append(app_output)
|
||||
return '\n'.join(output)
|
||||
|
|
|
@ -13,10 +13,12 @@ def supports_color():
|
|||
Returns True if the running system's terminal supports color, and False
|
||||
otherwise.
|
||||
"""
|
||||
unsupported_platform = (sys.platform in ('win32', 'Pocket PC'))
|
||||
plat = sys.platform
|
||||
supported_platform = plat != 'Pocket PC' and (plat != 'win32' or
|
||||
'ANSICON' in os.environ)
|
||||
# isatty is not always implemented, #6223.
|
||||
is_a_tty = hasattr(sys.stdout, 'isatty') and sys.stdout.isatty()
|
||||
if unsupported_platform or not is_a_tty:
|
||||
if not supported_platform or not is_a_tty:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
|
|
@ -3,7 +3,6 @@ import warnings
|
|||
from collections import OrderedDict
|
||||
from optparse import make_option
|
||||
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.core import serializers
|
||||
from django.db import router, DEFAULT_DB_ALIAS
|
||||
|
@ -38,7 +37,7 @@ class Command(BaseCommand):
|
|||
args = '[appname appname.ModelName ...]'
|
||||
|
||||
def handle(self, *app_labels, **options):
|
||||
from django.db.models import get_app, get_apps, get_model
|
||||
from django.core.apps import app_cache
|
||||
|
||||
format = options.get('format')
|
||||
indent = options.get('indent')
|
||||
|
@ -64,21 +63,24 @@ class Command(BaseCommand):
|
|||
for exclude in excludes:
|
||||
if '.' in exclude:
|
||||
app_label, model_name = exclude.split('.', 1)
|
||||
model_obj = get_model(app_label, model_name)
|
||||
model_obj = app_cache.get_model(app_label, model_name)
|
||||
if not model_obj:
|
||||
raise CommandError('Unknown model in excludes: %s' % exclude)
|
||||
excluded_models.add(model_obj)
|
||||
else:
|
||||
try:
|
||||
app_obj = get_app(exclude)
|
||||
app_obj = app_cache.get_app_config(exclude).models_module
|
||||
if app_obj is not None:
|
||||
excluded_apps.add(app_obj)
|
||||
except ImproperlyConfigured:
|
||||
except LookupError:
|
||||
raise CommandError('Unknown app in excludes: %s' % exclude)
|
||||
|
||||
if len(app_labels) == 0:
|
||||
if primary_keys:
|
||||
raise CommandError("You can only use --pks option with one model")
|
||||
app_list = OrderedDict((app, None) for app in get_apps() if app not in excluded_apps)
|
||||
app_list = OrderedDict((app_config.models_module, None)
|
||||
for app_config in app_cache.get_app_configs(only_with_models_module=True)
|
||||
if app_config.models_module not in excluded_apps)
|
||||
else:
|
||||
if len(app_labels) > 1 and primary_keys:
|
||||
raise CommandError("You can only use --pks option with one model")
|
||||
|
@ -87,12 +89,12 @@ class Command(BaseCommand):
|
|||
try:
|
||||
app_label, model_label = label.split('.')
|
||||
try:
|
||||
app = get_app(app_label)
|
||||
except ImproperlyConfigured:
|
||||
app = app_cache.get_app_config(app_label).models_module
|
||||
except LookupError:
|
||||
raise CommandError("Unknown application: %s" % app_label)
|
||||
if app in excluded_apps:
|
||||
if app is None or app in excluded_apps:
|
||||
continue
|
||||
model = get_model(app_label, model_label)
|
||||
model = app_cache.get_model(app_label, model_label)
|
||||
if model is None:
|
||||
raise CommandError("Unknown model: %s.%s" % (app_label, model_label))
|
||||
|
||||
|
@ -107,10 +109,10 @@ class Command(BaseCommand):
|
|||
# This is just an app - no model qualifier
|
||||
app_label = label
|
||||
try:
|
||||
app = get_app(app_label)
|
||||
except ImproperlyConfigured:
|
||||
app = app_cache.get_app_config(app_label).models_module
|
||||
except LookupError:
|
||||
raise CommandError("Unknown application: %s" % app_label)
|
||||
if app in excluded_apps:
|
||||
if app is None or app in excluded_apps:
|
||||
continue
|
||||
app_list[app] = None
|
||||
|
||||
|
@ -160,13 +162,13 @@ def sort_dependencies(app_list):
|
|||
is serialized before a normal model, and any model with a natural key
|
||||
dependency has it's dependencies serialized first.
|
||||
"""
|
||||
from django.db.models import get_model, get_models
|
||||
from django.core.apps import app_cache
|
||||
# Process the list of models, and get the list of dependencies
|
||||
model_dependencies = []
|
||||
models = set()
|
||||
for app, model_list in app_list:
|
||||
if model_list is None:
|
||||
model_list = get_models(app)
|
||||
model_list = app_cache.get_models(app)
|
||||
|
||||
for model in model_list:
|
||||
models.add(model)
|
||||
|
@ -174,7 +176,7 @@ def sort_dependencies(app_list):
|
|||
if hasattr(model, 'natural_key'):
|
||||
deps = getattr(model.natural_key, 'dependencies', [])
|
||||
if deps:
|
||||
deps = [get_model(*d.split('.')) for d in deps]
|
||||
deps = [app_cache.get_model(*d.split('.')) for d in deps]
|
||||
else:
|
||||
deps = []
|
||||
|
||||
|
|
|
@ -3,7 +3,8 @@ from importlib import import_module
|
|||
from optparse import make_option
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import connections, router, transaction, models, DEFAULT_DB_ALIAS
|
||||
from django.core.apps import app_cache
|
||||
from django.db import connections, router, transaction, DEFAULT_DB_ALIAS
|
||||
from django.core.management import call_command
|
||||
from django.core.management.base import NoArgsCommand, CommandError
|
||||
from django.core.management.color import no_style
|
||||
|
@ -93,6 +94,6 @@ Are you sure you want to do this?
|
|||
# Emit the post migrate signal. This allows individual applications to
|
||||
# respond as if the database had been migrated from scratch.
|
||||
all_models = []
|
||||
for app in models.get_apps():
|
||||
all_models.extend(router.get_migratable_models(app, database, include_auto_created=True))
|
||||
for app_config in app_cache.get_app_configs(only_with_models_module=True):
|
||||
all_models.extend(router.get_migratable_models(app_config.models_module, database, include_auto_created=True))
|
||||
emit_post_migrate_signal(set(all_models), verbosity, interactive, database)
|
||||
|
|
|
@ -8,12 +8,12 @@ import zipfile
|
|||
from optparse import make_option
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.apps import app_cache
|
||||
from django.core import serializers
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.core.management.color import no_style
|
||||
from django.db import (connections, router, transaction, DEFAULT_DB_ALIAS,
|
||||
IntegrityError, DatabaseError)
|
||||
from django.db.models import get_app_paths
|
||||
from django.utils import lru_cache
|
||||
from django.utils.encoding import force_text
|
||||
from django.utils.functional import cached_property
|
||||
|
@ -178,11 +178,15 @@ class Command(BaseCommand):
|
|||
if self.verbosity >= 2:
|
||||
self.stdout.write("Loading '%s' fixtures..." % fixture_name)
|
||||
|
||||
if os.path.sep in fixture_name:
|
||||
if os.path.isabs(fixture_name):
|
||||
fixture_dirs = [os.path.dirname(fixture_name)]
|
||||
fixture_name = os.path.basename(fixture_name)
|
||||
else:
|
||||
fixture_dirs = self.fixture_dirs
|
||||
if os.path.sep in fixture_name:
|
||||
fixture_dirs = [os.path.join(dir_, os.path.dirname(fixture_name))
|
||||
for dir_ in fixture_dirs]
|
||||
fixture_name = os.path.basename(fixture_name)
|
||||
|
||||
suffixes = ('.'.join(ext for ext in combo if ext)
|
||||
for combo in product(databases, ser_fmts, cmp_fmts))
|
||||
|
@ -226,8 +230,8 @@ class Command(BaseCommand):
|
|||
current directory.
|
||||
"""
|
||||
dirs = []
|
||||
for path in get_app_paths():
|
||||
d = os.path.join(path, 'fixtures')
|
||||
for app_config in app_cache.get_app_configs():
|
||||
d = os.path.join(app_config.path, 'fixtures')
|
||||
if os.path.isdir(d):
|
||||
dirs.append(d)
|
||||
dirs.extend(list(settings.FIXTURE_DIRS))
|
||||
|
|
|
@ -29,25 +29,28 @@ def check_programs(*programs):
|
|||
|
||||
@total_ordering
|
||||
class TranslatableFile(object):
|
||||
def __init__(self, dirpath, file_name):
|
||||
def __init__(self, dirpath, file_name, locale_dir):
|
||||
self.file = file_name
|
||||
self.dirpath = dirpath
|
||||
self.locale_dir = locale_dir
|
||||
|
||||
def __repr__(self):
|
||||
return "<TranslatableFile: %s>" % os.sep.join([self.dirpath, self.file])
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.dirpath == other.dirpath and self.file == other.file
|
||||
return self.path == other.path
|
||||
|
||||
def __lt__(self, other):
|
||||
if self.dirpath == other.dirpath:
|
||||
return self.file < other.file
|
||||
return self.dirpath < other.dirpath
|
||||
return self.path < other.path
|
||||
|
||||
def process(self, command, potfile, domain, keep_pot=False):
|
||||
@property
|
||||
def path(self):
|
||||
return os.path.join(self.dirpath, self.file)
|
||||
|
||||
def process(self, command, domain):
|
||||
"""
|
||||
Extract translatable literals from self.file for :param domain:
|
||||
creating or updating the :param potfile: POT file.
|
||||
Extract translatable literals from self.file for :param domain:,
|
||||
creating or updating the POT file.
|
||||
|
||||
Uses the xgettext GNU gettext utility.
|
||||
"""
|
||||
|
@ -127,8 +130,6 @@ class TranslatableFile(object):
|
|||
if status != STATUS_OK:
|
||||
if is_templatized:
|
||||
os.unlink(work_file)
|
||||
if not keep_pot and os.path.exists(potfile):
|
||||
os.unlink(potfile)
|
||||
raise CommandError(
|
||||
"errors happened while running xgettext on %s\n%s" %
|
||||
(self.file, errors))
|
||||
|
@ -136,6 +137,8 @@ class TranslatableFile(object):
|
|||
# Print warnings
|
||||
command.stdout.write(errors)
|
||||
if msgs:
|
||||
# Write/append messages to pot file
|
||||
potfile = os.path.join(self.locale_dir, '%s.pot' % str(domain))
|
||||
if is_templatized:
|
||||
# Remove '.py' suffix
|
||||
if os.name == 'nt':
|
||||
|
@ -147,6 +150,7 @@ class TranslatableFile(object):
|
|||
new = '#: ' + orig_file[2:]
|
||||
msgs = msgs.replace(old, new)
|
||||
write_pot_file(potfile, msgs)
|
||||
|
||||
if is_templatized:
|
||||
os.unlink(work_file)
|
||||
|
||||
|
@ -242,64 +246,94 @@ class Command(NoArgsCommand):
|
|||
% get_text_list(list(self.extensions), 'and'))
|
||||
|
||||
self.invoked_for_django = False
|
||||
self.locale_paths = []
|
||||
self.default_locale_path = None
|
||||
if os.path.isdir(os.path.join('conf', 'locale')):
|
||||
localedir = os.path.abspath(os.path.join('conf', 'locale'))
|
||||
self.locale_paths = [os.path.abspath(os.path.join('conf', 'locale'))]
|
||||
self.default_locale_path = self.locale_paths[0]
|
||||
self.invoked_for_django = True
|
||||
# Ignoring all contrib apps
|
||||
self.ignore_patterns += ['contrib/*']
|
||||
elif os.path.isdir('locale'):
|
||||
localedir = os.path.abspath('locale')
|
||||
else:
|
||||
raise CommandError("This script should be run from the Django Git "
|
||||
"tree or your project or app tree. If you did indeed run it "
|
||||
"from the Git checkout or your project or application, "
|
||||
"maybe you are just missing the conf/locale (in the django "
|
||||
"tree) or locale (for project and application) directory? It "
|
||||
"is not created automatically, you have to create it by hand "
|
||||
"if you want to enable i18n for your project or application.")
|
||||
self.locale_paths.extend(list(settings.LOCALE_PATHS))
|
||||
# Allow to run makemessages inside an app dir
|
||||
if os.path.isdir('locale'):
|
||||
self.locale_paths.append(os.path.abspath('locale'))
|
||||
if self.locale_paths:
|
||||
self.default_locale_path = self.locale_paths[0]
|
||||
if not os.path.exists(self.default_locale_path):
|
||||
os.makedirs(self.default_locale_path)
|
||||
|
||||
check_programs('xgettext')
|
||||
|
||||
potfile = self.build_pot_file(localedir)
|
||||
|
||||
# Build po files for each selected locale
|
||||
# Build locale list
|
||||
locales = []
|
||||
if locale is not None:
|
||||
locales = locale
|
||||
elif process_all:
|
||||
locale_dirs = filter(os.path.isdir, glob.glob('%s/*' % localedir))
|
||||
locale_dirs = filter(os.path.isdir, glob.glob('%s/*' % self.default_locale_path))
|
||||
locales = [os.path.basename(l) for l in locale_dirs]
|
||||
|
||||
if locales:
|
||||
check_programs('msguniq', 'msgmerge', 'msgattrib')
|
||||
|
||||
check_programs('xgettext')
|
||||
|
||||
try:
|
||||
potfiles = self.build_potfiles()
|
||||
|
||||
# Build po files for each selected locale
|
||||
for locale in locales:
|
||||
if self.verbosity > 0:
|
||||
self.stdout.write("processing locale %s\n" % locale)
|
||||
for potfile in potfiles:
|
||||
self.write_po_file(potfile, locale)
|
||||
finally:
|
||||
if not self.keep_pot and os.path.exists(potfile):
|
||||
os.unlink(potfile)
|
||||
if not self.keep_pot:
|
||||
self.remove_potfiles()
|
||||
|
||||
def build_pot_file(self, localedir):
|
||||
def build_potfiles(self):
|
||||
"""
|
||||
Build pot files and apply msguniq to them.
|
||||
"""
|
||||
file_list = self.find_files(".")
|
||||
|
||||
potfile = os.path.join(localedir, '%s.pot' % str(self.domain))
|
||||
if os.path.exists(potfile):
|
||||
# Remove a previous undeleted potfile, if any
|
||||
os.unlink(potfile)
|
||||
|
||||
self.remove_potfiles()
|
||||
for f in file_list:
|
||||
try:
|
||||
f.process(self, potfile, self.domain, self.keep_pot)
|
||||
f.process(self, self.domain)
|
||||
except UnicodeDecodeError:
|
||||
self.stdout.write("UnicodeDecodeError: skipped file %s in %s" % (f.file, f.dirpath))
|
||||
return potfile
|
||||
|
||||
potfiles = []
|
||||
for path in self.locale_paths:
|
||||
potfile = os.path.join(path, '%s.pot' % str(self.domain))
|
||||
if not os.path.exists(potfile):
|
||||
continue
|
||||
args = ['msguniq', '--to-code=utf-8']
|
||||
if self.wrap:
|
||||
args.append(self.wrap)
|
||||
if self.location:
|
||||
args.append(self.location)
|
||||
args.append(potfile)
|
||||
msgs, errors, status = popen_wrapper(args)
|
||||
if errors:
|
||||
if status != STATUS_OK:
|
||||
raise CommandError(
|
||||
"errors happened while running msguniq\n%s" % errors)
|
||||
elif self.verbosity > 0:
|
||||
self.stdout.write(errors)
|
||||
with open(potfile, 'w') as fp:
|
||||
fp.write(msgs)
|
||||
potfiles.append(potfile)
|
||||
return potfiles
|
||||
|
||||
def remove_potfiles(self):
|
||||
for path in self.locale_paths:
|
||||
pot_path = os.path.join(path, '%s.pot' % str(self.domain))
|
||||
if os.path.exists(pot_path):
|
||||
os.unlink(pot_path)
|
||||
|
||||
def find_files(self, root):
|
||||
"""
|
||||
Helper method to get all files in the given root.
|
||||
Helper method to get all files in the given root. Also check that there
|
||||
is a matching locale dir for each file.
|
||||
"""
|
||||
|
||||
def is_ignored(path, ignore_patterns):
|
||||
|
@ -319,12 +353,26 @@ class Command(NoArgsCommand):
|
|||
dirnames.remove(dirname)
|
||||
if self.verbosity > 1:
|
||||
self.stdout.write('ignoring directory %s\n' % dirname)
|
||||
elif dirname == 'locale':
|
||||
dirnames.remove(dirname)
|
||||
self.locale_paths.insert(0, os.path.join(os.path.abspath(dirpath), dirname))
|
||||
for filename in filenames:
|
||||
if is_ignored(os.path.normpath(os.path.join(dirpath, filename)), self.ignore_patterns):
|
||||
file_path = os.path.normpath(os.path.join(dirpath, filename))
|
||||
if is_ignored(file_path, self.ignore_patterns):
|
||||
if self.verbosity > 1:
|
||||
self.stdout.write('ignoring file %s in %s\n' % (filename, dirpath))
|
||||
else:
|
||||
all_files.append(TranslatableFile(dirpath, filename))
|
||||
locale_dir = None
|
||||
for path in self.locale_paths:
|
||||
if os.path.abspath(dirpath).startswith(os.path.dirname(path)):
|
||||
locale_dir = path
|
||||
break
|
||||
if not locale_dir:
|
||||
locale_dir = self.default_locale_path
|
||||
if not locale_dir:
|
||||
raise CommandError(
|
||||
"Unable to find a locale path to store translations for file %s" % file_path)
|
||||
all_files.append(TranslatableFile(dirpath, filename, locale_dir))
|
||||
return sorted(all_files)
|
||||
|
||||
def write_po_file(self, potfile, locale):
|
||||
|
@ -332,30 +380,14 @@ class Command(NoArgsCommand):
|
|||
Creates or updates the PO file for self.domain and :param locale:.
|
||||
Uses contents of the existing :param potfile:.
|
||||
|
||||
Uses mguniq, msgmerge, and msgattrib GNU gettext utilities.
|
||||
Uses msgmerge, and msgattrib GNU gettext utilities.
|
||||
"""
|
||||
args = ['msguniq', '--to-code=utf-8']
|
||||
if self.wrap:
|
||||
args.append(self.wrap)
|
||||
if self.location:
|
||||
args.append(self.location)
|
||||
args.append(potfile)
|
||||
msgs, errors, status = popen_wrapper(args)
|
||||
if errors:
|
||||
if status != STATUS_OK:
|
||||
raise CommandError(
|
||||
"errors happened while running msguniq\n%s" % errors)
|
||||
elif self.verbosity > 0:
|
||||
self.stdout.write(errors)
|
||||
|
||||
basedir = os.path.join(os.path.dirname(potfile), locale, 'LC_MESSAGES')
|
||||
if not os.path.isdir(basedir):
|
||||
os.makedirs(basedir)
|
||||
pofile = os.path.join(basedir, '%s.po' % str(self.domain))
|
||||
|
||||
if os.path.exists(pofile):
|
||||
with open(potfile, 'w') as fp:
|
||||
fp.write(msgs)
|
||||
args = ['msgmerge', '-q']
|
||||
if self.wrap:
|
||||
args.append(self.wrap)
|
||||
|
@ -369,7 +401,10 @@ class Command(NoArgsCommand):
|
|||
"errors happened while running msgmerge\n%s" % errors)
|
||||
elif self.verbosity > 0:
|
||||
self.stdout.write(errors)
|
||||
elif not self.invoked_for_django:
|
||||
else:
|
||||
with open(potfile, 'r') as fp:
|
||||
msgs = fp.read()
|
||||
if not self.invoked_for_django:
|
||||
msgs = self.copy_plural_forms(msgs, locale)
|
||||
msgs = msgs.replace(
|
||||
"#. #-#-#-#-# %s.pot (PACKAGE VERSION) #-#-#-#-#\n" % self.domain, "")
|
||||
|
|
|
@ -1,38 +1,44 @@
|
|||
import sys
|
||||
import os
|
||||
import operator
|
||||
from optparse import make_option
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
from django.db import connections, DEFAULT_DB_ALIAS
|
||||
from django.core.apps import app_cache
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.db import connections, DEFAULT_DB_ALIAS, migrations
|
||||
from django.db.migrations.loader import MigrationLoader
|
||||
from django.db.migrations.autodetector import MigrationAutodetector, InteractiveMigrationQuestioner
|
||||
from django.db.migrations.autodetector import MigrationAutodetector
|
||||
from django.db.migrations.questioner import MigrationQuestioner, InteractiveMigrationQuestioner
|
||||
from django.db.migrations.state import ProjectState
|
||||
from django.db.migrations.writer import MigrationWriter
|
||||
from django.db.models.loading import cache
|
||||
from django.utils.six.moves import reduce
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
option_list = BaseCommand.option_list + (
|
||||
make_option('--empty', action='store_true', dest='empty', default=False,
|
||||
help='Make a blank migration.'),
|
||||
make_option('--dry-run', action='store_true', dest='dry_run', default=False,
|
||||
help="Just show what migrations would be made; don't actually write them."),
|
||||
make_option('--merge', action='store_true', dest='merge', default=False,
|
||||
help="Enable fixing of migration conflicts."),
|
||||
)
|
||||
|
||||
help = "Creates new migration(s) for apps."
|
||||
usage_str = "Usage: ./manage.py makemigrations [--empty] [app [app ...]]"
|
||||
usage_str = "Usage: ./manage.py makemigrations [--dry-run] [app [app ...]]"
|
||||
|
||||
def handle(self, *app_labels, **options):
|
||||
|
||||
self.verbosity = int(options.get('verbosity'))
|
||||
self.interactive = options.get('interactive')
|
||||
self.dry_run = options.get('dry_run', False)
|
||||
self.merge = options.get('merge', False)
|
||||
|
||||
# Make sure the app they asked for exists
|
||||
app_labels = set(app_labels)
|
||||
bad_app_labels = set()
|
||||
for app_label in app_labels:
|
||||
try:
|
||||
cache.get_app(app_label)
|
||||
except ImproperlyConfigured:
|
||||
app_cache.get_app_config(app_label)
|
||||
except LookupError:
|
||||
bad_app_labels.add(app_label)
|
||||
if bad_app_labels:
|
||||
for app_label in bad_app_labels:
|
||||
|
@ -43,10 +49,30 @@ class Command(BaseCommand):
|
|||
# (makemigrations doesn't look at the database state).
|
||||
loader = MigrationLoader(connections[DEFAULT_DB_ALIAS])
|
||||
|
||||
# Before anything else, see if there's conflicting apps and drop out
|
||||
# hard if there are any and they don't want to merge
|
||||
conflicts = loader.detect_conflicts()
|
||||
if conflicts and not self.merge:
|
||||
name_str = "; ".join(
|
||||
"%s in %s" % (", ".join(names), app)
|
||||
for app, names in conflicts.items()
|
||||
)
|
||||
raise CommandError("Conflicting migrations detected (%s).\nTo fix them run 'python manage.py makemigrations --merge'" % name_str)
|
||||
|
||||
# If they want to merge and there's nothing to merge, then politely exit
|
||||
if self.merge and not conflicts:
|
||||
self.stdout.write("No conflicts detected to merge.")
|
||||
return
|
||||
|
||||
# If they want to merge and there is something to merge, then
|
||||
# divert into the merge code
|
||||
if self.merge and conflicts:
|
||||
return self.handle_merge(loader, conflicts)
|
||||
|
||||
# Detect changes
|
||||
autodetector = MigrationAutodetector(
|
||||
loader.graph.project_state(),
|
||||
ProjectState.from_app_cache(cache),
|
||||
ProjectState.from_app_cache(app_cache),
|
||||
InteractiveMigrationQuestioner(specified_apps=app_labels),
|
||||
)
|
||||
changes = autodetector.changes(graph=loader.graph, trim_to_apps=app_labels or None)
|
||||
|
@ -62,10 +88,10 @@ class Command(BaseCommand):
|
|||
return
|
||||
|
||||
directory_created = {}
|
||||
for app_label, migrations in changes.items():
|
||||
for app_label, app_migrations in changes.items():
|
||||
if self.verbosity >= 1:
|
||||
self.stdout.write(self.style.MIGRATE_HEADING("Migrations for '%s':" % app_label) + "\n")
|
||||
for migration in migrations:
|
||||
for migration in app_migrations:
|
||||
# Describe the migration
|
||||
writer = MigrationWriter(migration)
|
||||
if self.verbosity >= 1:
|
||||
|
@ -73,6 +99,7 @@ class Command(BaseCommand):
|
|||
for operation in migration.operations:
|
||||
self.stdout.write(" - %s\n" % operation.describe())
|
||||
# Write it
|
||||
if not self.dry_run:
|
||||
migrations_directory = os.path.dirname(writer.path)
|
||||
if not directory_created.get(app_label, False):
|
||||
if not os.path.isdir(migrations_directory):
|
||||
|
@ -85,3 +112,68 @@ class Command(BaseCommand):
|
|||
migration_string = writer.as_string()
|
||||
with open(writer.path, "wb") as fh:
|
||||
fh.write(migration_string)
|
||||
|
||||
def handle_merge(self, loader, conflicts):
|
||||
"""
|
||||
Handles merging together conflicted migrations interactively,
|
||||
if it's safe; otherwise, advises on how to fix it.
|
||||
"""
|
||||
if self.interactive:
|
||||
questioner = InteractiveMigrationQuestioner()
|
||||
else:
|
||||
questioner = MigrationQuestioner()
|
||||
for app_label, migration_names in conflicts.items():
|
||||
# Grab out the migrations in question, and work out their
|
||||
# common ancestor.
|
||||
merge_migrations = []
|
||||
for migration_name in migration_names:
|
||||
migration = loader.get_migration(app_label, migration_name)
|
||||
migration.ancestry = loader.graph.forwards_plan((app_label, migration_name))
|
||||
merge_migrations.append(migration)
|
||||
common_ancestor = None
|
||||
for level in zip(*[m.ancestry for m in merge_migrations]):
|
||||
if reduce(operator.eq, level):
|
||||
common_ancestor = level[0]
|
||||
else:
|
||||
break
|
||||
if common_ancestor is None:
|
||||
raise ValueError("Could not find common ancestor of %s" % migration_names)
|
||||
# Now work out the operations along each divergent branch
|
||||
for migration in merge_migrations:
|
||||
migration.branch = migration.ancestry[
|
||||
(migration.ancestry.index(common_ancestor) + 1):
|
||||
]
|
||||
migration.merged_operations = []
|
||||
for node_app, node_name in migration.branch:
|
||||
migration.merged_operations.extend(
|
||||
loader.get_migration(node_app, node_name).operations
|
||||
)
|
||||
# In future, this could use some of the Optimizer code
|
||||
# (can_optimize_through) to automatically see if they're
|
||||
# mergeable. For now, we always just prompt the user.
|
||||
if self.verbosity > 0:
|
||||
self.stdout.write(self.style.MIGRATE_HEADING("Merging %s" % app_label))
|
||||
for migration in merge_migrations:
|
||||
self.stdout.write(self.style.MIGRATE_LABEL(" Branch %s" % migration.name))
|
||||
for operation in migration.merged_operations:
|
||||
self.stdout.write(" - %s\n" % operation.describe())
|
||||
if questioner.ask_merge(app_label):
|
||||
# If they still want to merge it, then write out an empty
|
||||
# file depending on the migrations needing merging.
|
||||
numbers = [
|
||||
MigrationAutodetector.parse_number(migration.name)
|
||||
for migration in merge_migrations
|
||||
]
|
||||
try:
|
||||
biggest_number = max([x for x in numbers if x is not None])
|
||||
except ValueError:
|
||||
biggest_number = 1
|
||||
subclass = type("Migration", (migrations.Migration, ), {
|
||||
"dependencies": [(app_label, migration.name) for migration in merge_migrations],
|
||||
})
|
||||
new_migration = subclass("%04i_merge" % (biggest_number + 1), app_label)
|
||||
writer = MigrationWriter(new_migration)
|
||||
with open(writer.path, "wb") as fh:
|
||||
fh.write(writer.as_string())
|
||||
if self.verbosity > 0:
|
||||
self.stdout.write("\nCreated new merge migration %s" % writer.path)
|
||||
|
|
|
@ -7,13 +7,16 @@ import itertools
|
|||
import traceback
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.apps import app_cache
|
||||
from django.core.management import call_command
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.core.management.color import no_style
|
||||
from django.core.management.sql import custom_sql_for_model, emit_post_migrate_signal, emit_pre_migrate_signal
|
||||
from django.db import connections, router, transaction, models, DEFAULT_DB_ALIAS
|
||||
from django.db import connections, router, transaction, DEFAULT_DB_ALIAS
|
||||
from django.db.migrations.executor import MigrationExecutor
|
||||
from django.db.migrations.loader import MigrationLoader, AmbiguityError
|
||||
from django.db.migrations.state import ProjectState
|
||||
from django.db.migrations.autodetector import MigrationAutodetector
|
||||
from django.utils.module_loading import module_has_submodule
|
||||
|
||||
|
||||
|
@ -59,6 +62,16 @@ class Command(BaseCommand):
|
|||
# Work out which apps have migrations and which do not
|
||||
executor = MigrationExecutor(connection, self.migration_progress_callback)
|
||||
|
||||
# Before anything else, see if there's conflicting apps and drop out
|
||||
# hard if there are any
|
||||
conflicts = executor.loader.detect_conflicts()
|
||||
if conflicts:
|
||||
name_str = "; ".join(
|
||||
"%s in %s" % (", ".join(names), app)
|
||||
for app, names in conflicts.items()
|
||||
)
|
||||
raise CommandError("Conflicting migrations detected (%s).\nTo fix them run 'python manage.py makemigrations --merge'" % name_str)
|
||||
|
||||
# If they supplied command line arguments, work out what they mean.
|
||||
run_syncdb = False
|
||||
target_app_labels_only = True
|
||||
|
@ -120,6 +133,15 @@ class Command(BaseCommand):
|
|||
if not plan:
|
||||
if self.verbosity >= 1:
|
||||
self.stdout.write(" No migrations needed.")
|
||||
# If there's changes that aren't in migrations yet, tell them how to fix it.
|
||||
autodetector = MigrationAutodetector(
|
||||
executor.loader.graph.project_state(),
|
||||
ProjectState.from_app_cache(app_cache),
|
||||
)
|
||||
changes = autodetector.changes(graph=executor.loader.graph)
|
||||
if changes:
|
||||
self.stdout.write(self.style.NOTICE(" Your models have changes that are not yet reflected in a migration, and so won't be applied."))
|
||||
self.stdout.write(self.style.NOTICE(" Run 'manage.py makemigrations' to make new migrations, and then re-run 'manage.py migrate' to apply them."))
|
||||
else:
|
||||
executor.migrate(targets, plan, fake=options.get("fake", False))
|
||||
|
||||
|
@ -158,9 +180,10 @@ class Command(BaseCommand):
|
|||
|
||||
# Build the manifest of apps and models that are to be synchronized
|
||||
all_models = [
|
||||
(app.__name__.split('.')[-2],
|
||||
router.get_migratable_models(app, connection.alias, include_auto_created=True))
|
||||
for app in models.get_apps() if app.__name__.split('.')[-2] in apps
|
||||
(app_config.label,
|
||||
router.get_migratable_models(app_config.models_module, connection.alias, include_auto_created=True))
|
||||
for app_config in app_cache.get_app_configs(only_with_models_module=True)
|
||||
if app_config.label in apps
|
||||
]
|
||||
|
||||
def model_installed(model):
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
from optparse import make_option
|
||||
from datetime import datetime
|
||||
import errno
|
||||
|
@ -8,7 +10,10 @@ import socket
|
|||
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.core.servers.basehttp import run, get_internal_wsgi_application
|
||||
from django.db import connections, DEFAULT_DB_ALIAS
|
||||
from django.db.migrations.executor import MigrationExecutor
|
||||
from django.utils import autoreload
|
||||
from django.utils import six
|
||||
|
||||
naiveip_re = re.compile(r"""^(?:
|
||||
(?P<addr>
|
||||
|
@ -96,13 +101,17 @@ class Command(BaseCommand):
|
|||
|
||||
self.stdout.write("Validating models...\n\n")
|
||||
self.validate(display_num_errors=True)
|
||||
self.check_migrations()
|
||||
now = datetime.now().strftime('%B %d, %Y - %X')
|
||||
if six.PY2:
|
||||
now = now.decode('utf-8')
|
||||
self.stdout.write((
|
||||
"%(started_at)s\n"
|
||||
"Django version %(version)s, using settings %(settings)r\n"
|
||||
"Starting development server at http://%(addr)s:%(port)s/\n"
|
||||
"Quit the server with %(quit_command)s.\n"
|
||||
) % {
|
||||
"started_at": datetime.now().strftime('%B %d, %Y - %X'),
|
||||
"started_at": now,
|
||||
"version": self.get_version(),
|
||||
"settings": settings.SETTINGS_MODULE,
|
||||
"addr": '[%s]' % self.addr if self._raw_ipv6 else self.addr,
|
||||
|
@ -137,6 +146,16 @@ class Command(BaseCommand):
|
|||
self.stdout.write(shutdown_message)
|
||||
sys.exit(0)
|
||||
|
||||
def check_migrations(self):
|
||||
"""
|
||||
Checks to see if the set of migrations on disk matches the
|
||||
migrations in the database. Prints a warning if they don't match.
|
||||
"""
|
||||
executor = MigrationExecutor(connections[DEFAULT_DB_ALIAS])
|
||||
plan = executor.migration_plan(executor.loader.graph.leaf_nodes())
|
||||
if plan:
|
||||
self.stdout.write(self.style.NOTICE("\nYou have unapplied migrations; your app may not work properly until they are applied."))
|
||||
self.stdout.write(self.style.NOTICE("Run 'python manage.py migrate' to apply them.\n"))
|
||||
|
||||
# Kept for backward compatibility
|
||||
BaseRunserverCommand = Command
|
||||
|
|
|
@ -66,8 +66,8 @@ class Command(NoArgsCommand):
|
|||
def handle_noargs(self, **options):
|
||||
# XXX: (Temporary) workaround for ticket #1796: force early loading of all
|
||||
# models from installed apps.
|
||||
from django.db.models.loading import get_models
|
||||
get_models()
|
||||
from django.core.apps import app_cache
|
||||
app_cache.get_models()
|
||||
|
||||
use_plain = options.get('plain', False)
|
||||
no_startup = options.get('no_startup', False)
|
||||
|
|
|
@ -2,8 +2,9 @@ from __future__ import unicode_literals
|
|||
|
||||
from optparse import make_option
|
||||
|
||||
from django.core.apps import app_cache
|
||||
from django.core.management.base import AppCommand
|
||||
from django.db import connections, models, DEFAULT_DB_ALIAS
|
||||
from django.db import connections, DEFAULT_DB_ALIAS
|
||||
|
||||
|
||||
class Command(AppCommand):
|
||||
|
@ -20,4 +21,4 @@ class Command(AppCommand):
|
|||
|
||||
def handle_app(self, app, **options):
|
||||
connection = connections[options.get('database')]
|
||||
return '\n'.join(connection.ops.sequence_reset_sql(self.style, models.get_models(app, include_auto_created=True)))
|
||||
return '\n'.join(connection.ops.sequence_reset_sql(self.style, app_cache.get_models(app, include_auto_created=True)))
|
||||
|
|
|
@ -37,7 +37,8 @@ class Command(BaseCommand):
|
|||
# multiple times.
|
||||
shutdown_message = '\nServer stopped.\nNote that the test database, %r, has not been deleted. You can explore it on your own.' % db_name
|
||||
use_threading = connection.features.test_db_allows_multiple_connections
|
||||
call_command('runserver',
|
||||
call_command(
|
||||
'runserver',
|
||||
addrport=addrport,
|
||||
shutdown_message=shutdown_message,
|
||||
use_reloader=False,
|
||||
|
|
|
@ -6,6 +6,7 @@ import re
|
|||
import warnings
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.apps import app_cache
|
||||
from django.core.management.base import CommandError
|
||||
from django.db import models, router
|
||||
|
||||
|
@ -24,7 +25,7 @@ def sql_create(app, style, connection):
|
|||
# We trim models from the current app so that the sqlreset command does not
|
||||
# generate invalid SQL (leaving models out of known_models is harmless, so
|
||||
# we can be conservative).
|
||||
app_models = models.get_models(app, include_auto_created=True)
|
||||
app_models = app_cache.get_models(app, include_auto_created=True)
|
||||
final_output = []
|
||||
tables = connection.introspection.table_names()
|
||||
known_models = set(model for model in connection.introspection.installed_models(tables) if model not in app_models)
|
||||
|
@ -168,7 +169,7 @@ def _split_statements(content):
|
|||
def custom_sql_for_model(model, style, connection):
|
||||
opts = model._meta
|
||||
app_dirs = []
|
||||
app_dir = models.get_app_path(model._meta.app_label)
|
||||
app_dir = app_cache.get_app_config(model._meta.app_label).path
|
||||
app_dirs.append(os.path.normpath(os.path.join(app_dir, 'sql')))
|
||||
|
||||
# Deprecated location -- remove in Django 1.9
|
||||
|
@ -206,11 +207,12 @@ def custom_sql_for_model(model, style, connection):
|
|||
|
||||
def emit_pre_migrate_signal(create_models, verbosity, interactive, db):
|
||||
# Emit the pre_migrate signal for every application.
|
||||
for app in models.get_apps():
|
||||
app_name = app.__name__.split('.')[-2]
|
||||
for app_config in app_cache.get_app_configs(only_with_models_module=True):
|
||||
if verbosity >= 2:
|
||||
print("Running pre-migrate handlers for application %s" % app_name)
|
||||
models.signals.pre_migrate.send(sender=app, app=app,
|
||||
print("Running pre-migrate handlers for application %s" % app_config.label)
|
||||
models.signals.pre_migrate.send(
|
||||
sender=app_config.models_module,
|
||||
app=app_config.models_module,
|
||||
create_models=create_models,
|
||||
verbosity=verbosity,
|
||||
interactive=interactive,
|
||||
|
@ -219,10 +221,13 @@ def emit_pre_migrate_signal(create_models, verbosity, interactive, db):
|
|||
|
||||
def emit_post_migrate_signal(created_models, verbosity, interactive, db):
|
||||
# Emit the post_migrate signal for every application.
|
||||
for app in models.get_apps():
|
||||
app_name = app.__name__.split('.')[-2]
|
||||
for app_config in app_cache.get_app_configs(only_with_models_module=True):
|
||||
if verbosity >= 2:
|
||||
print("Running post-migrate handlers for application %s" % app_name)
|
||||
models.signals.post_migrate.send(sender=app, app=app,
|
||||
created_models=created_models, verbosity=verbosity,
|
||||
interactive=interactive, db=db)
|
||||
print("Running post-migrate handlers for application %s" % app_config.label)
|
||||
models.signals.post_migrate.send(
|
||||
sender=app_config.models_module,
|
||||
app=app_config.models_module,
|
||||
created_models=created_models,
|
||||
verbosity=verbosity,
|
||||
interactive=interactive,
|
||||
db=db)
|
||||
|
|
|
@ -26,16 +26,13 @@ def get_validation_errors(outfile, app=None):
|
|||
validates all models of all installed apps. Writes errors, if any, to outfile.
|
||||
Returns number of errors.
|
||||
"""
|
||||
from django.core.apps import app_cache
|
||||
from django.db import connection, models
|
||||
from django.db.models.loading import get_app_errors
|
||||
from django.db.models.deletion import SET_NULL, SET_DEFAULT
|
||||
|
||||
e = ModelErrorCollection(outfile)
|
||||
|
||||
for (app_name, error) in get_app_errors().items():
|
||||
e.add(app_name, error)
|
||||
|
||||
for cls in models.get_models(app, include_swapped=True):
|
||||
for cls in app_cache.get_models(app, include_swapped=True):
|
||||
opts = cls._meta
|
||||
|
||||
# Check swappable attribute.
|
||||
|
@ -45,7 +42,7 @@ def get_validation_errors(outfile, app=None):
|
|||
except ValueError:
|
||||
e.add(opts, "%s is not of the form 'app_label.app_name'." % opts.swappable)
|
||||
continue
|
||||
if not models.get_model(app_label, model_name):
|
||||
if not app_cache.get_model(app_label, model_name):
|
||||
e.add(opts, "Model has been swapped out for '%s' which has not been installed or is abstract." % opts.swapped)
|
||||
# No need to perform any other validation checks on a swapped model.
|
||||
continue
|
||||
|
@ -155,7 +152,7 @@ def get_validation_errors(outfile, app=None):
|
|||
# Check to see if the related field will clash with any existing
|
||||
# fields, m2m fields, m2m related objects or related objects
|
||||
if f.rel:
|
||||
if f.rel.to not in models.get_models():
|
||||
if f.rel.to not in app_cache.get_models():
|
||||
# If the related model is swapped, provide a hint;
|
||||
# otherwise, the model just hasn't been installed.
|
||||
if not isinstance(f.rel.to, six.string_types) and f.rel.to._meta.swapped:
|
||||
|
@ -210,7 +207,7 @@ def get_validation_errors(outfile, app=None):
|
|||
# Check to see if the related m2m field will clash with any
|
||||
# existing fields, m2m fields, m2m related objects or related
|
||||
# objects
|
||||
if f.rel.to not in models.get_models():
|
||||
if f.rel.to not in app_cache.get_models():
|
||||
# If the related model is swapped, provide a hint;
|
||||
# otherwise, the model just hasn't been installed.
|
||||
if not isinstance(f.rel.to, six.string_types) and f.rel.to._meta.swapped:
|
||||
|
@ -268,10 +265,9 @@ def get_validation_errors(outfile, app=None):
|
|||
)
|
||||
else:
|
||||
seen_to = True
|
||||
if f.rel.through not in models.get_models(include_auto_created=True):
|
||||
if f.rel.through not in app_cache.get_models(include_auto_created=True):
|
||||
e.add(opts, "'%s' specifies an m2m relation through model "
|
||||
"%s, which has not been installed." % (f.name, f.rel.through)
|
||||
)
|
||||
"%s, which has not been installed." % (f.name, f.rel.through))
|
||||
signature = (f.rel.to, cls, f.rel.through)
|
||||
if signature in seen_intermediary_signatures:
|
||||
e.add(opts, "The model %s has two manually-defined m2m "
|
||||
|
@ -295,13 +291,14 @@ def get_validation_errors(outfile, app=None):
|
|||
if not seen_related_fk or not seen_this_fk:
|
||||
e.add(opts, "'%s' is a manually-defined m2m relation "
|
||||
"through model %s, which does not have foreign keys "
|
||||
"to %s and %s" % (f.name, f.rel.through._meta.object_name,
|
||||
f.rel.to._meta.object_name, cls._meta.object_name)
|
||||
"to %s and %s" % (
|
||||
f.name, f.rel.through._meta.object_name,
|
||||
f.rel.to._meta.object_name, cls._meta.object_name
|
||||
)
|
||||
)
|
||||
elif isinstance(f.rel.through, six.string_types):
|
||||
e.add(opts, "'%s' specifies an m2m relation through model %s, "
|
||||
"which has not been installed" % (f.name, f.rel.through)
|
||||
)
|
||||
"which has not been installed" % (f.name, f.rel.through))
|
||||
|
||||
rel_opts = f.rel.to._meta
|
||||
rel_name = f.related.get_accessor_name()
|
||||
|
|
|
@ -3,6 +3,7 @@ Module for abstract serializer/unserializer base classes.
|
|||
"""
|
||||
import warnings
|
||||
|
||||
from django.core.apps import app_cache
|
||||
from django.db import models
|
||||
from django.utils import six
|
||||
|
||||
|
@ -136,10 +137,9 @@ class Deserializer(six.Iterator):
|
|||
self.stream = six.StringIO(stream_or_string)
|
||||
else:
|
||||
self.stream = stream_or_string
|
||||
# hack to make sure that the models have all been loaded before
|
||||
# deserialization starts (otherwise subclass calls to get_model()
|
||||
# and friends might fail...)
|
||||
models.get_apps()
|
||||
# Make sure the app cache is loaded before deserialization starts
|
||||
# (otherwise subclass calls to get_model() and friends might fail...)
|
||||
app_cache.populate()
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
|
|
@ -6,6 +6,7 @@ other serializers.
|
|||
from __future__ import unicode_literals
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.apps import app_cache
|
||||
from django.core.serializers import base
|
||||
from django.db import models, DEFAULT_DB_ALIAS
|
||||
from django.utils.encoding import smart_text, is_protected_type
|
||||
|
@ -87,7 +88,8 @@ def Deserializer(object_list, **options):
|
|||
db = options.pop('using', DEFAULT_DB_ALIAS)
|
||||
ignore = options.pop('ignorenonexistent', False)
|
||||
|
||||
models.get_apps()
|
||||
app_cache.populate()
|
||||
|
||||
for d in object_list:
|
||||
# Look up the model and starting build a dict of data for it.
|
||||
Model = _get_model(d["model"])
|
||||
|
@ -153,7 +155,7 @@ def _get_model(model_identifier):
|
|||
Helper to look up a model from an "app_label.model_name" string.
|
||||
"""
|
||||
try:
|
||||
Model = models.get_model(*model_identifier.split("."))
|
||||
Model = app_cache.get_model(*model_identifier.split("."))
|
||||
except TypeError:
|
||||
Model = None
|
||||
if Model is None:
|
||||
|
|
|
@ -5,6 +5,7 @@ XML serializer.
|
|||
from __future__ import unicode_literals
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.apps import app_cache
|
||||
from django.core.serializers import base
|
||||
from django.db import models, DEFAULT_DB_ALIAS
|
||||
from django.utils.xmlutils import SimplerXMLGenerator
|
||||
|
@ -276,7 +277,7 @@ class Deserializer(base.Deserializer):
|
|||
"<%s> node is missing the required '%s' attribute"
|
||||
% (node.nodeName, attr))
|
||||
try:
|
||||
Model = models.get_model(*model_identifier.split("."))
|
||||
Model = app_cache.get_model(*model_identifier.split("."))
|
||||
except TypeError:
|
||||
Model = None
|
||||
if Model is None:
|
||||
|
|
|
@ -131,8 +131,7 @@ def get_ns_resolver(ns_pattern, resolver):
|
|||
# Build a namespaced resolver for the given parent urlconf pattern.
|
||||
# This makes it possible to have captured parameters in the parent
|
||||
# urlconf pattern.
|
||||
ns_resolver = RegexURLResolver(ns_pattern,
|
||||
resolver.url_patterns)
|
||||
ns_resolver = RegexURLResolver(ns_pattern, resolver.url_patterns)
|
||||
return RegexURLResolver(r'^/', [ns_resolver])
|
||||
|
||||
|
||||
|
|
|
@ -1218,8 +1218,7 @@ class BaseDatabaseOperations(object):
|
|||
|
||||
# Structure returned by the DB-API cursor.description interface (PEP 249)
|
||||
FieldInfo = namedtuple('FieldInfo',
|
||||
'name type_code display_size internal_size precision scale null_ok'
|
||||
)
|
||||
'name type_code display_size internal_size precision scale null_ok')
|
||||
|
||||
|
||||
class BaseDatabaseIntrospection(object):
|
||||
|
@ -1272,10 +1271,11 @@ class BaseDatabaseIntrospection(object):
|
|||
If only_existing is True, the resulting list will only include the tables
|
||||
that actually exist in the database.
|
||||
"""
|
||||
from django.db import models, router
|
||||
from django.core.apps import app_cache
|
||||
from django.db import router
|
||||
tables = set()
|
||||
for app in models.get_apps():
|
||||
for model in router.get_migratable_models(app, self.connection.alias):
|
||||
for app_config in app_cache.get_app_configs(only_with_models_module=True):
|
||||
for model in router.get_migratable_models(app_config.models_module, self.connection.alias):
|
||||
if not model._meta.managed:
|
||||
continue
|
||||
tables.add(model._meta.db_table)
|
||||
|
@ -1292,10 +1292,11 @@ class BaseDatabaseIntrospection(object):
|
|||
|
||||
def installed_models(self, tables):
|
||||
"Returns a set of all models represented by the provided list of table names."
|
||||
from django.db import models, router
|
||||
from django.core.apps import app_cache
|
||||
from django.db import router
|
||||
all_models = []
|
||||
for app in models.get_apps():
|
||||
all_models.extend(router.get_migratable_models(app, self.connection.alias))
|
||||
for app_config in app_cache.get_app_configs(only_with_models_module=True):
|
||||
all_models.extend(router.get_migratable_models(app_config.models_module, self.connection.alias))
|
||||
tables = list(map(self.table_name_converter, tables))
|
||||
return set([
|
||||
m for m in all_models
|
||||
|
@ -1304,13 +1305,13 @@ class BaseDatabaseIntrospection(object):
|
|||
|
||||
def sequence_list(self):
|
||||
"Returns a list of information about all DB sequences for all models in all apps."
|
||||
from django.core.apps import app_cache
|
||||
from django.db import models, router
|
||||
|
||||
apps = models.get_apps()
|
||||
sequence_list = []
|
||||
|
||||
for app in apps:
|
||||
for model in router.get_migratable_models(app, self.connection.alias):
|
||||
for app_config in app_cache.get_app_configs(only_with_models_module=True):
|
||||
for model in router.get_migratable_models(app_config.models_module, self.connection.alias):
|
||||
if not model._meta.managed:
|
||||
continue
|
||||
if model._meta.swapped:
|
||||
|
|
|
@ -332,13 +332,15 @@ class DatabaseOperations(BaseDatabaseOperations):
|
|||
# Truncate already resets the AUTO_INCREMENT field from
|
||||
# MySQL version 5.0.13 onwards. Refs #16961.
|
||||
if self.connection.mysql_version < (5, 0, 13):
|
||||
return ["%s %s %s %s %s;" %
|
||||
(style.SQL_KEYWORD('ALTER'),
|
||||
return [
|
||||
"%s %s %s %s %s;" % (
|
||||
style.SQL_KEYWORD('ALTER'),
|
||||
style.SQL_KEYWORD('TABLE'),
|
||||
style.SQL_TABLE(self.quote_name(sequence['table'])),
|
||||
style.SQL_KEYWORD('AUTO_INCREMENT'),
|
||||
style.SQL_FIELD('= 1'),
|
||||
) for sequence in sequences]
|
||||
) for sequence in sequences
|
||||
]
|
||||
else:
|
||||
return []
|
||||
|
||||
|
|
|
@ -390,9 +390,11 @@ WHEN (new.%(col_name)s IS NULL)
|
|||
sequence_name = self._get_sequence_name(sequence_info['table'])
|
||||
table_name = self.quote_name(sequence_info['table'])
|
||||
column_name = self.quote_name(sequence_info['column'] or 'id')
|
||||
query = _get_sequence_reset_sql() % {'sequence': sequence_name,
|
||||
query = _get_sequence_reset_sql() % {
|
||||
'sequence': sequence_name,
|
||||
'table': table_name,
|
||||
'column': column_name}
|
||||
'column': column_name,
|
||||
}
|
||||
sql.append(query)
|
||||
return sql
|
||||
|
||||
|
@ -880,12 +882,10 @@ class FormatStylePlaceholderCursor(object):
|
|||
def fetchmany(self, size=None):
|
||||
if size is None:
|
||||
size = self.arraysize
|
||||
return tuple(_rowfactory(r, self.cursor)
|
||||
for r in self.cursor.fetchmany(size))
|
||||
return tuple(_rowfactory(r, self.cursor) for r in self.cursor.fetchmany(size))
|
||||
|
||||
def fetchall(self):
|
||||
return tuple(_rowfactory(r, self.cursor)
|
||||
for r in self.cursor.fetchall())
|
||||
return tuple(_rowfactory(r, self.cursor) for r in self.cursor.fetchall())
|
||||
|
||||
def var(self, *args):
|
||||
return VariableWrapper(self.cursor.var(*args))
|
||||
|
|
|
@ -168,7 +168,8 @@ class DatabaseWrapper(BaseDatabaseWrapper):
|
|||
# notification. If we don't set self.connection to None, the error
|
||||
# will occur a every request.
|
||||
self.connection = None
|
||||
logger.warning('psycopg2 error while closing the connection.',
|
||||
logger.warning(
|
||||
'psycopg2 error while closing the connection.',
|
||||
exc_info=sys.exc_info()
|
||||
)
|
||||
raise
|
||||
|
|
|
@ -2,4 +2,57 @@ from django.db.backends.schema import BaseDatabaseSchemaEditor
|
|||
|
||||
|
||||
class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
|
||||
pass
|
||||
|
||||
sql_create_sequence = "CREATE SEQUENCE %(sequence)s"
|
||||
sql_delete_sequence = "DROP SEQUENCE IF EXISTS %(sequence)s CASCADE"
|
||||
sql_set_sequence_max = "SELECT setval('%(sequence)s', MAX(%(column)s)) FROM %(table)s"
|
||||
|
||||
def _alter_column_type_sql(self, table, column, type):
|
||||
"""
|
||||
Makes ALTER TYPE with SERIAL make sense.
|
||||
"""
|
||||
if type.lower() == "serial":
|
||||
sequence_name = "%s_%s_seq" % (table, column)
|
||||
return (
|
||||
(
|
||||
self.sql_alter_column_type % {
|
||||
"column": self.quote_name(column),
|
||||
"type": "integer",
|
||||
},
|
||||
[],
|
||||
),
|
||||
[
|
||||
(
|
||||
self.sql_delete_sequence % {
|
||||
"sequence": sequence_name,
|
||||
},
|
||||
[],
|
||||
),
|
||||
(
|
||||
self.sql_create_sequence % {
|
||||
"sequence": sequence_name,
|
||||
},
|
||||
[],
|
||||
),
|
||||
(
|
||||
self.sql_alter_column % {
|
||||
"table": table,
|
||||
"changes": self.sql_alter_column_default % {
|
||||
"column": column,
|
||||
"default": "nextval('%s')" % sequence_name,
|
||||
}
|
||||
},
|
||||
[],
|
||||
),
|
||||
(
|
||||
self.sql_set_sequence_max % {
|
||||
"table": table,
|
||||
"column": column,
|
||||
"sequence": sequence_name,
|
||||
},
|
||||
[],
|
||||
),
|
||||
],
|
||||
)
|
||||
else:
|
||||
return super(DatabaseSchemaEditor, self)._alter_column_type_sql(table, column, type)
|
||||
|
|
|
@ -90,7 +90,7 @@ class BaseDatabaseSchemaEditor(object):
|
|||
# Log the command we're running, then run it
|
||||
logger.debug("%s; (params %r)" % (sql, params))
|
||||
if self.collect_sql:
|
||||
self.collected_sql.append((sql % list(map(self.connection.ops.quote_parameter, params))) + ";")
|
||||
self.collected_sql.append((sql % tuple(map(self.connection.ops.quote_parameter, params))) + ";")
|
||||
else:
|
||||
cursor.execute(sql, params)
|
||||
|
||||
|
@ -498,6 +498,18 @@ class BaseDatabaseSchemaEditor(object):
|
|||
"name": fk_name,
|
||||
}
|
||||
)
|
||||
# Drop incoming FK constraints if we're a primary key and things are going
|
||||
# to change.
|
||||
if old_field.primary_key and new_field.primary_key and old_type != new_type:
|
||||
for rel in new_field.model._meta.get_all_related_objects():
|
||||
rel_fk_names = self._constraint_names(rel.model, [rel.field.column], foreign_key=True)
|
||||
for fk_name in rel_fk_names:
|
||||
self.execute(
|
||||
self.sql_delete_fk % {
|
||||
"table": self.quote_name(rel.model._meta.db_table),
|
||||
"name": fk_name,
|
||||
}
|
||||
)
|
||||
# Change check constraints?
|
||||
if old_db_params['check'] != new_db_params['check'] and old_db_params['check']:
|
||||
constraint_names = self._constraint_names(model, [old_field.column], check=True)
|
||||
|
@ -524,15 +536,12 @@ class BaseDatabaseSchemaEditor(object):
|
|||
})
|
||||
# Next, start accumulating actions to do
|
||||
actions = []
|
||||
post_actions = []
|
||||
# Type change?
|
||||
if old_type != new_type:
|
||||
actions.append((
|
||||
self.sql_alter_column_type % {
|
||||
"column": self.quote_name(new_field.column),
|
||||
"type": new_type,
|
||||
},
|
||||
[],
|
||||
))
|
||||
fragment, other_actions = self._alter_column_type_sql(model._meta.db_table, new_field.column, new_type)
|
||||
actions.append(fragment)
|
||||
post_actions.extend(other_actions)
|
||||
# Default change?
|
||||
old_default = self.effective_default(old_field)
|
||||
new_default = self.effective_default(new_field)
|
||||
|
@ -596,6 +605,9 @@ class BaseDatabaseSchemaEditor(object):
|
|||
},
|
||||
params,
|
||||
)
|
||||
if post_actions:
|
||||
for sql, params in post_actions:
|
||||
self.execute(sql, params)
|
||||
# Added a unique?
|
||||
if not old_field.unique and new_field.unique:
|
||||
self.execute(
|
||||
|
@ -615,6 +627,11 @@ class BaseDatabaseSchemaEditor(object):
|
|||
"extra": "",
|
||||
}
|
||||
)
|
||||
# Type alteration on primary key? Then we need to alter the column
|
||||
# referring to us.
|
||||
rels_to_update = []
|
||||
if old_field.primary_key and new_field.primary_key and old_type != new_type:
|
||||
rels_to_update.extend(new_field.model._meta.get_all_related_objects())
|
||||
# Changed to become primary key?
|
||||
# Note that we don't detect unsetting of a PK, as we assume another field
|
||||
# will always come along and replace it.
|
||||
|
@ -641,6 +658,21 @@ class BaseDatabaseSchemaEditor(object):
|
|||
"columns": self.quote_name(new_field.column),
|
||||
}
|
||||
)
|
||||
# Update all referencing columns
|
||||
rels_to_update.extend(new_field.model._meta.get_all_related_objects())
|
||||
# Handle our type alters on the other end of rels from the PK stuff above
|
||||
for rel in rels_to_update:
|
||||
rel_db_params = rel.field.db_parameters(connection=self.connection)
|
||||
rel_type = rel_db_params['type']
|
||||
self.execute(
|
||||
self.sql_alter_column % {
|
||||
"table": self.quote_name(rel.model._meta.db_table),
|
||||
"changes": self.sql_alter_column_type % {
|
||||
"column": self.quote_name(rel.field.column),
|
||||
"type": rel_type,
|
||||
}
|
||||
}
|
||||
)
|
||||
# Does it have a foreign key?
|
||||
if new_field.rel:
|
||||
self.execute(
|
||||
|
@ -652,6 +684,18 @@ class BaseDatabaseSchemaEditor(object):
|
|||
"to_column": self.quote_name(new_field.rel.get_related_field().column),
|
||||
}
|
||||
)
|
||||
# Rebuild FKs that pointed to us if we previously had to drop them
|
||||
if old_field.primary_key and new_field.primary_key and old_type != new_type:
|
||||
for rel in new_field.model._meta.get_all_related_objects():
|
||||
self.execute(
|
||||
self.sql_create_fk % {
|
||||
"table": self.quote_name(rel.model._meta.db_table),
|
||||
"name": self._create_index_name(rel.model, [rel.field.column], suffix="_fk"),
|
||||
"column": self.quote_name(rel.field.column),
|
||||
"to_table": self.quote_name(model._meta.db_table),
|
||||
"to_column": self.quote_name(new_field.column),
|
||||
}
|
||||
)
|
||||
# Does it have check constraints we need to add?
|
||||
if old_db_params['check'] != new_db_params['check'] and new_db_params['check']:
|
||||
self.execute(
|
||||
|
@ -666,6 +710,27 @@ class BaseDatabaseSchemaEditor(object):
|
|||
if self.connection.features.connection_persists_old_columns:
|
||||
self.connection.close()
|
||||
|
||||
def _alter_column_type_sql(self, table, column, type):
|
||||
"""
|
||||
Hook to specialise column type alteration for different backends,
|
||||
for cases when a creation type is different to an alteration type
|
||||
(e.g. SERIAL in PostgreSQL, PostGIS fields).
|
||||
|
||||
Should return two things; an SQL fragment of (sql, params) to insert
|
||||
into an ALTER TABLE statement, and a list of extra (sql, params) tuples
|
||||
to run once the field is altered.
|
||||
"""
|
||||
return (
|
||||
(
|
||||
self.sql_alter_column_type % {
|
||||
"column": self.quote_name(column),
|
||||
"type": type,
|
||||
},
|
||||
[],
|
||||
),
|
||||
[],
|
||||
)
|
||||
|
||||
def _alter_many_to_many(self, model, old_field, new_field, strict):
|
||||
"""
|
||||
Alters M2Ms to repoint their to= endpoints.
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
from django.core.apps.cache import AppCache
|
||||
from django.db.backends.schema import BaseDatabaseSchemaEditor
|
||||
from django.db.models.fields.related import ManyToManyField
|
||||
from django.db.models.loading import BaseAppCache
|
||||
|
||||
|
||||
class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
|
||||
|
@ -39,7 +39,7 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
|
|||
del body[field.name]
|
||||
del mapping[field.column]
|
||||
# Work inside a new AppCache
|
||||
app_cache = BaseAppCache()
|
||||
app_cache = AppCache()
|
||||
# Construct a new model for the new state
|
||||
meta_contents = {
|
||||
'app_label': model._meta.app_label,
|
||||
|
|
|
@ -1,11 +1,9 @@
|
|||
import re
|
||||
import os
|
||||
import sys
|
||||
from django.utils import datetime_safe, importlib
|
||||
from django.utils.six.moves import input
|
||||
import datetime
|
||||
|
||||
from django.db.migrations import operations
|
||||
from django.db.migrations.migration import Migration
|
||||
from django.db.models.loading import cache
|
||||
from django.db.migrations.questioner import MigrationQuestioner
|
||||
|
||||
|
||||
class MigrationAutodetector(object):
|
||||
|
@ -70,7 +68,7 @@ class MigrationAutodetector(object):
|
|||
model_state = self.to_state.models[app_label, model_name]
|
||||
# Are there any relationships out from this model? if so, punt it to the next phase.
|
||||
related_fields = []
|
||||
for field in new_app_cache.get_model(app_label, model_name)._meta.fields:
|
||||
for field in new_app_cache.get_model(app_label, model_name)._meta.local_fields:
|
||||
if field.rel:
|
||||
if field.rel.to:
|
||||
related_fields.append((field.name, field.rel.to._meta.app_label.lower(), field.rel.to._meta.object_name.lower()))
|
||||
|
@ -156,37 +154,64 @@ class MigrationAutodetector(object):
|
|||
)
|
||||
# Changes within models
|
||||
kept_models = set(old_model_keys).intersection(new_model_keys)
|
||||
old_fields = set()
|
||||
new_fields = set()
|
||||
for app_label, model_name in kept_models:
|
||||
old_model_state = self.from_state.models[app_label, model_name]
|
||||
new_model_state = self.to_state.models[app_label, model_name]
|
||||
# Collect field changes for later global dealing with (so AddFields
|
||||
# always come before AlterFields even on separate models)
|
||||
old_fields.update((app_label, model_name, x) for x, y in old_model_state.fields)
|
||||
new_fields.update((app_label, model_name, x) for x, y in new_model_state.fields)
|
||||
# Unique_together changes
|
||||
if old_model_state.options.get("unique_together", set()) != new_model_state.options.get("unique_together", set()):
|
||||
self.add_to_migration(
|
||||
app_label,
|
||||
operations.AlterUniqueTogether(
|
||||
name=model_name,
|
||||
unique_together=new_model_state.options.get("unique_together", set()),
|
||||
)
|
||||
)
|
||||
# New fields
|
||||
old_field_names = set(x for x, y in old_model_state.fields)
|
||||
new_field_names = set(x for x, y in new_model_state.fields)
|
||||
for field_name in new_field_names - old_field_names:
|
||||
for app_label, model_name, field_name in new_fields - old_fields:
|
||||
old_model_state = self.from_state.models[app_label, model_name]
|
||||
new_model_state = self.to_state.models[app_label, model_name]
|
||||
field = new_model_state.get_field_by_name(field_name)
|
||||
# Scan to see if this is actually a rename!
|
||||
field_dec = field.deconstruct()[1:]
|
||||
found_rename = False
|
||||
for removed_field_name in (old_field_names - new_field_names):
|
||||
if old_model_state.get_field_by_name(removed_field_name).deconstruct()[1:] == field_dec:
|
||||
if self.questioner.ask_rename(model_name, removed_field_name, field_name, field):
|
||||
for rem_app_label, rem_model_name, rem_field_name in (old_fields - new_fields):
|
||||
if rem_app_label == app_label and rem_model_name == model_name:
|
||||
if old_model_state.get_field_by_name(rem_field_name).deconstruct()[1:] == field_dec:
|
||||
if self.questioner.ask_rename(model_name, rem_field_name, field_name, field):
|
||||
self.add_to_migration(
|
||||
app_label,
|
||||
operations.RenameField(
|
||||
model_name=model_name,
|
||||
old_name=removed_field_name,
|
||||
old_name=rem_field_name,
|
||||
new_name=field_name,
|
||||
)
|
||||
)
|
||||
old_field_names.remove(removed_field_name)
|
||||
new_field_names.remove(field_name)
|
||||
old_fields.remove((rem_app_label, rem_model_name, rem_field_name))
|
||||
new_fields.remove((app_label, model_name, field_name))
|
||||
found_rename = True
|
||||
break
|
||||
if found_rename:
|
||||
continue
|
||||
# You can't just add NOT NULL fields with no default
|
||||
if not field.null and not field.has_default():
|
||||
field = field.clone()
|
||||
field.default = self.questioner.ask_not_null_addition(field_name, model_name)
|
||||
self.add_to_migration(
|
||||
app_label,
|
||||
operations.AddField(
|
||||
model_name=model_name,
|
||||
name=field_name,
|
||||
field=field,
|
||||
preserve_default=False,
|
||||
)
|
||||
)
|
||||
else:
|
||||
self.add_to_migration(
|
||||
app_label,
|
||||
operations.AddField(
|
||||
|
@ -196,7 +221,9 @@ class MigrationAutodetector(object):
|
|||
)
|
||||
)
|
||||
# Old fields
|
||||
for field_name in old_field_names - new_field_names:
|
||||
for app_label, model_name, field_name in old_fields - new_fields:
|
||||
old_model_state = self.from_state.models[app_label, model_name]
|
||||
new_model_state = self.to_state.models[app_label, model_name]
|
||||
self.add_to_migration(
|
||||
app_label,
|
||||
operations.RemoveField(
|
||||
|
@ -205,8 +232,10 @@ class MigrationAutodetector(object):
|
|||
)
|
||||
)
|
||||
# The same fields
|
||||
for field_name in old_field_names.intersection(new_field_names):
|
||||
for app_label, model_name, field_name in old_fields.intersection(new_fields):
|
||||
# Did the field change?
|
||||
old_model_state = self.from_state.models[app_label, model_name]
|
||||
new_model_state = self.to_state.models[app_label, model_name]
|
||||
old_field_dec = old_model_state.get_field_by_name(field_name).deconstruct()
|
||||
new_field_dec = new_model_state.get_field_by_name(field_name).deconstruct()
|
||||
if old_field_dec != new_field_dec:
|
||||
|
@ -218,15 +247,6 @@ class MigrationAutodetector(object):
|
|||
field=new_model_state.get_field_by_name(field_name),
|
||||
)
|
||||
)
|
||||
# unique_together changes
|
||||
if old_model_state.options.get("unique_together", set()) != new_model_state.options.get("unique_together", set()):
|
||||
self.add_to_migration(
|
||||
app_label,
|
||||
operations.AlterUniqueTogether(
|
||||
name=model_name,
|
||||
unique_together=new_model_state.options.get("unique_together", set()),
|
||||
)
|
||||
)
|
||||
# Alright, now add internal dependencies
|
||||
for app_label, migrations in self.migrations.items():
|
||||
for m1, m2 in zip(migrations, migrations[1:]):
|
||||
|
@ -331,8 +351,9 @@ class MigrationAutodetector(object):
|
|||
def suggest_name(cls, ops):
|
||||
"""
|
||||
Given a set of operations, suggests a name for the migration
|
||||
they might represent. Names not guaranteed to be unique; they
|
||||
must be prefixed by a number or date.
|
||||
they might represent. Names are not guaranteed to be unique,
|
||||
but we put some effort in to the fallback name to avoid VCS conflicts
|
||||
if we can.
|
||||
"""
|
||||
if len(ops) == 1:
|
||||
if isinstance(ops[0], operations.CreateModel):
|
||||
|
@ -345,7 +366,7 @@ class MigrationAutodetector(object):
|
|||
return "remove_%s_%s" % (ops[0].model_name.lower(), ops[0].name.lower())
|
||||
elif all(isinstance(o, operations.CreateModel) for o in ops):
|
||||
return "_".join(sorted(o.name.lower() for o in ops))
|
||||
return "auto"
|
||||
return "auto_%s" % datetime.datetime.now().strftime("%Y%m%d_%H%M")
|
||||
|
||||
@classmethod
|
||||
def parse_number(cls, name):
|
||||
|
@ -356,107 +377,3 @@ class MigrationAutodetector(object):
|
|||
if re.match(r"^\d+_", name):
|
||||
return int(name.split("_")[0])
|
||||
return None
|
||||
|
||||
|
||||
class MigrationQuestioner(object):
|
||||
"""
|
||||
Gives the autodetector responses to questions it might have.
|
||||
This base class has a built-in noninteractive mode, but the
|
||||
interactive subclass is what the command-line arguments will use.
|
||||
"""
|
||||
|
||||
def __init__(self, defaults=None):
|
||||
self.defaults = defaults or {}
|
||||
|
||||
def ask_initial(self, app_label):
|
||||
"Should we create an initial migration for the app?"
|
||||
return self.defaults.get("ask_initial", False)
|
||||
|
||||
def ask_not_null_addition(self, field_name, model_name):
|
||||
"Adding a NOT NULL field to a model"
|
||||
# None means quit
|
||||
return None
|
||||
|
||||
def ask_rename(self, model_name, old_name, new_name, field_instance):
|
||||
"Was this field really renamed?"
|
||||
return self.defaults.get("ask_rename", False)
|
||||
|
||||
|
||||
class InteractiveMigrationQuestioner(MigrationQuestioner):
|
||||
|
||||
def __init__(self, specified_apps=set()):
|
||||
self.specified_apps = specified_apps
|
||||
|
||||
def _boolean_input(self, question, default=None):
|
||||
result = input("%s " % question)
|
||||
if not result and default is not None:
|
||||
return default
|
||||
while len(result) < 1 or result[0].lower() not in "yn":
|
||||
result = input("Please answer yes or no: ")
|
||||
return result[0].lower() == "y"
|
||||
|
||||
def _choice_input(self, question, choices):
|
||||
print(question)
|
||||
for i, choice in enumerate(choices):
|
||||
print(" %s) %s" % (i + 1, choice))
|
||||
result = input("Select an option: ")
|
||||
while True:
|
||||
try:
|
||||
value = int(result)
|
||||
if 0 < value <= len(choices):
|
||||
return value
|
||||
except ValueError:
|
||||
pass
|
||||
result = input("Please select a valid option: ")
|
||||
|
||||
def ask_initial(self, app_label):
|
||||
"Should we create an initial migration for the app?"
|
||||
# If it was specified on the command line, definitely true
|
||||
if app_label in self.specified_apps:
|
||||
return True
|
||||
# Otherwise, we look to see if it has a migrations module
|
||||
# without any Python files in it, apart from __init__.py.
|
||||
# Apps from the new app template will have these; the python
|
||||
# file check will ensure we skip South ones.
|
||||
models_module = cache.get_app(app_label)
|
||||
migrations_import_path = "%s.migrations" % models_module.__package__
|
||||
try:
|
||||
migrations_module = importlib.import_module(migrations_import_path)
|
||||
except ImportError:
|
||||
return False
|
||||
else:
|
||||
filenames = os.listdir(os.path.dirname(migrations_module.__file__))
|
||||
return not any(x.endswith(".py") for x in filenames if x != "__init__.py")
|
||||
|
||||
def ask_not_null_addition(self, field_name, model_name):
|
||||
"Adding a NOT NULL field to a model"
|
||||
choice = self._choice_input(
|
||||
"You are trying to add a non-nullable field '%s' to %s without a default;\n" % (field_name, model_name) +
|
||||
"this is not possible. Please select a fix:",
|
||||
[
|
||||
"Provide a one-off default now (will be set on all existing rows)",
|
||||
"Quit, and let me add a default in models.py",
|
||||
]
|
||||
)
|
||||
if choice == 2:
|
||||
sys.exit(3)
|
||||
else:
|
||||
print("Please enter the default value now, as valid Python")
|
||||
print("The datetime module is available, so you can do e.g. datetime.date.today()")
|
||||
while True:
|
||||
code = input(">>> ")
|
||||
if not code:
|
||||
print("Please enter some code, or 'exit' (with no quotes) to exit.")
|
||||
elif code == "exit":
|
||||
sys.exit(1)
|
||||
else:
|
||||
try:
|
||||
return eval(code, {}, {"datetime": datetime_safe})
|
||||
except (SyntaxError, NameError) as e:
|
||||
print("Invalid input: %s" % e)
|
||||
else:
|
||||
break
|
||||
|
||||
def ask_rename(self, model_name, old_name, new_name, field_instance):
|
||||
"Was this field really renamed?"
|
||||
return self._boolean_input("Did you rename %s.%s to %s.%s (a %s)? [y/N]" % (model_name, old_name, model_name, new_name, field_instance.__class__.__name__), False)
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
from importlib import import_module
|
||||
import os
|
||||
import sys
|
||||
from importlib import import_module
|
||||
from django.db.models.loading import cache
|
||||
|
||||
from django.core.apps import app_cache
|
||||
from django.db.migrations.recorder import MigrationRecorder
|
||||
from django.db.migrations.graph import MigrationGraph
|
||||
from django.utils import six
|
||||
|
@ -45,7 +46,7 @@ class MigrationLoader(object):
|
|||
if app_label in settings.MIGRATION_MODULES:
|
||||
return settings.MIGRATION_MODULES[app_label]
|
||||
else:
|
||||
return '%s.migrations' % cache.get_app_package(app_label)
|
||||
return '%s.migrations' % app_cache.get_app_config(app_label).name
|
||||
|
||||
def load_disk(self):
|
||||
"""
|
||||
|
@ -54,10 +55,9 @@ class MigrationLoader(object):
|
|||
self.disk_migrations = {}
|
||||
self.unmigrated_apps = set()
|
||||
self.migrated_apps = set()
|
||||
for app in cache.get_apps():
|
||||
for app_config in app_cache.get_app_configs(only_with_models_module=True):
|
||||
# Get the migrations module directory
|
||||
app_label = app.__name__.split(".")[-2]
|
||||
module_name = self.migrations_module(app_label)
|
||||
module_name = self.migrations_module(app_config.label)
|
||||
was_loaded = module_name in sys.modules
|
||||
try:
|
||||
module = import_module(module_name)
|
||||
|
@ -65,7 +65,7 @@ class MigrationLoader(object):
|
|||
# I hate doing this, but I don't want to squash other import errors.
|
||||
# Might be better to try a directory check directly.
|
||||
if "No module named" in str(e) and "migrations" in str(e):
|
||||
self.unmigrated_apps.add(app_label)
|
||||
self.unmigrated_apps.add(app_config.label)
|
||||
continue
|
||||
raise
|
||||
else:
|
||||
|
@ -78,7 +78,7 @@ class MigrationLoader(object):
|
|||
# Force a reload if it's already loaded (tests need this)
|
||||
if was_loaded:
|
||||
six.moves.reload_module(module)
|
||||
self.migrated_apps.add(app_label)
|
||||
self.migrated_apps.add(app_config.label)
|
||||
directory = os.path.dirname(module.__file__)
|
||||
# Scan for .py[c|o] files
|
||||
migration_names = set()
|
||||
|
@ -99,14 +99,14 @@ class MigrationLoader(object):
|
|||
break
|
||||
raise
|
||||
if not hasattr(migration_module, "Migration"):
|
||||
raise BadMigrationError("Migration %s in app %s has no Migration class" % (migration_name, app_label))
|
||||
raise BadMigrationError("Migration %s in app %s has no Migration class" % (migration_name, app_config.label))
|
||||
# Ignore South-style migrations
|
||||
if hasattr(migration_module.Migration, "forwards"):
|
||||
south_style_migrations = True
|
||||
break
|
||||
self.disk_migrations[app_label, migration_name] = migration_module.Migration(migration_name, app_label)
|
||||
self.disk_migrations[app_config.label, migration_name] = migration_module.Migration(migration_name, app_config.label)
|
||||
if south_style_migrations:
|
||||
self.unmigrated_apps.add(app_label)
|
||||
self.unmigrated_apps.add(app_config.label)
|
||||
|
||||
def get_migration(self, app_label, name_prefix):
|
||||
"Gets the migration exactly named, or raises KeyError"
|
||||
|
@ -187,6 +187,20 @@ class MigrationLoader(object):
|
|||
for parent in migration.dependencies:
|
||||
self.graph.add_dependency(key, parent)
|
||||
|
||||
def detect_conflicts(self):
|
||||
"""
|
||||
Looks through the loaded graph and detects any conflicts - apps
|
||||
with more than one leaf migration. Returns a dict of the app labels
|
||||
that conflict with the migration names that conflict.
|
||||
"""
|
||||
seen_apps = {}
|
||||
conflicting_apps = set()
|
||||
for app_label, migration_name in self.graph.leaf_nodes():
|
||||
if app_label in seen_apps:
|
||||
conflicting_apps.add(app_label)
|
||||
seen_apps.setdefault(app_label, set()).add(migration_name)
|
||||
return dict((app_label, seen_apps[app_label]) for app_label in conflicting_apps)
|
||||
|
||||
|
||||
class BadMigrationError(Exception):
|
||||
"""
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
from django.db import router
|
||||
from django.db.models.fields import NOT_PROVIDED
|
||||
from .base import Operation
|
||||
|
||||
|
||||
|
@ -7,13 +8,20 @@ class AddField(Operation):
|
|||
Adds a field to a model.
|
||||
"""
|
||||
|
||||
def __init__(self, model_name, name, field):
|
||||
def __init__(self, model_name, name, field, preserve_default=True):
|
||||
self.model_name = model_name
|
||||
self.name = name
|
||||
self.field = field
|
||||
self.preserve_default = preserve_default
|
||||
|
||||
def state_forwards(self, app_label, state):
|
||||
state.models[app_label, self.model_name.lower()].fields.append((self.name, self.field))
|
||||
# If preserve default is off, don't use the default for future state
|
||||
if not self.preserve_default:
|
||||
field = self.field.clone()
|
||||
field.default = NOT_PROVIDED
|
||||
else:
|
||||
field = self.field
|
||||
state.models[app_label, self.model_name.lower()].fields.append((self.name, field))
|
||||
|
||||
def database_forwards(self, app_label, schema_editor, from_state, to_state):
|
||||
from_model = from_state.render().get_model(app_label, self.model_name)
|
||||
|
|
|
@ -176,12 +176,14 @@ class MigrationOptimizer(object):
|
|||
Folds a model rename into its create
|
||||
"""
|
||||
if operation.name.lower() == other.old_name.lower():
|
||||
return [migrations.CreateModel(
|
||||
return [
|
||||
migrations.CreateModel(
|
||||
other.new_name,
|
||||
fields=operation.fields,
|
||||
options=operation.options,
|
||||
bases=operation.bases,
|
||||
)]
|
||||
)
|
||||
]
|
||||
|
||||
def reduce_model_rename_self(self, operation, other):
|
||||
"""
|
||||
|
@ -197,16 +199,19 @@ class MigrationOptimizer(object):
|
|||
|
||||
def reduce_create_model_add_field(self, operation, other):
|
||||
if operation.name.lower() == other.model_name.lower():
|
||||
return [migrations.CreateModel(
|
||||
return [
|
||||
migrations.CreateModel(
|
||||
operation.name,
|
||||
fields=operation.fields + [(other.name, other.field)],
|
||||
options=operation.options,
|
||||
bases=operation.bases,
|
||||
)]
|
||||
)
|
||||
]
|
||||
|
||||
def reduce_create_model_alter_field(self, operation, other):
|
||||
if operation.name.lower() == other.model_name.lower():
|
||||
return [migrations.CreateModel(
|
||||
return [
|
||||
migrations.CreateModel(
|
||||
operation.name,
|
||||
fields=[
|
||||
(n, other.field if n == other.name else v)
|
||||
|
@ -214,11 +219,13 @@ class MigrationOptimizer(object):
|
|||
],
|
||||
options=operation.options,
|
||||
bases=operation.bases,
|
||||
)]
|
||||
)
|
||||
]
|
||||
|
||||
def reduce_create_model_rename_field(self, operation, other):
|
||||
if operation.name.lower() == other.model_name.lower():
|
||||
return [migrations.CreateModel(
|
||||
return [
|
||||
migrations.CreateModel(
|
||||
operation.name,
|
||||
fields=[
|
||||
(other.new_name if n == other.old_name else n, v)
|
||||
|
@ -226,11 +233,13 @@ class MigrationOptimizer(object):
|
|||
],
|
||||
options=operation.options,
|
||||
bases=operation.bases,
|
||||
)]
|
||||
)
|
||||
]
|
||||
|
||||
def reduce_create_model_remove_field(self, operation, other):
|
||||
if operation.name.lower() == other.model_name.lower():
|
||||
return [migrations.CreateModel(
|
||||
return [
|
||||
migrations.CreateModel(
|
||||
operation.name,
|
||||
fields=[
|
||||
(n, v)
|
||||
|
@ -239,15 +248,18 @@ class MigrationOptimizer(object):
|
|||
],
|
||||
options=operation.options,
|
||||
bases=operation.bases,
|
||||
)]
|
||||
)
|
||||
]
|
||||
|
||||
def reduce_add_field_alter_field(self, operation, other):
|
||||
if operation.model_name.lower() == other.model_name.lower() and operation.name.lower() == other.name.lower():
|
||||
return [migrations.AddField(
|
||||
return [
|
||||
migrations.AddField(
|
||||
model_name=operation.model_name,
|
||||
name=operation.name,
|
||||
field=other.field,
|
||||
)]
|
||||
)
|
||||
]
|
||||
|
||||
def reduce_add_field_delete_field(self, operation, other):
|
||||
if operation.model_name.lower() == other.model_name.lower() and operation.name.lower() == other.name.lower():
|
||||
|
@ -259,11 +271,13 @@ class MigrationOptimizer(object):
|
|||
|
||||
def reduce_add_field_rename_field(self, operation, other):
|
||||
if operation.model_name.lower() == other.model_name.lower() and operation.name.lower() == other.old_name.lower():
|
||||
return [migrations.AddField(
|
||||
return [
|
||||
migrations.AddField(
|
||||
model_name=operation.model_name,
|
||||
name=other.new_name,
|
||||
field=operation.field,
|
||||
)]
|
||||
)
|
||||
]
|
||||
|
||||
def reduce_alter_field_rename_field(self, operation, other):
|
||||
if operation.model_name.lower() == other.model_name.lower() and operation.name.lower() == other.old_name.lower():
|
||||
|
|
|
@ -0,0 +1,119 @@
|
|||
import importlib
|
||||
import os
|
||||
import sys
|
||||
|
||||
from django.core.apps import app_cache
|
||||
from django.utils import datetime_safe
|
||||
from django.utils.six.moves import input
|
||||
|
||||
|
||||
class MigrationQuestioner(object):
|
||||
"""
|
||||
Gives the autodetector responses to questions it might have.
|
||||
This base class has a built-in noninteractive mode, but the
|
||||
interactive subclass is what the command-line arguments will use.
|
||||
"""
|
||||
|
||||
def __init__(self, defaults=None, specified_apps=None):
|
||||
self.defaults = defaults or {}
|
||||
self.specified_apps = specified_apps or set()
|
||||
|
||||
def ask_initial(self, app_label):
|
||||
"Should we create an initial migration for the app?"
|
||||
# If it was specified on the command line, definitely true
|
||||
if app_label in self.specified_apps:
|
||||
return True
|
||||
# Otherwise, we look to see if it has a migrations module
|
||||
# without any Python files in it, apart from __init__.py.
|
||||
# Apps from the new app template will have these; the python
|
||||
# file check will ensure we skip South ones.
|
||||
try:
|
||||
app_config = app_cache.get_app_config(app_label)
|
||||
except LookupError: # It's a fake app.
|
||||
return self.defaults.get("ask_initial", False)
|
||||
migrations_import_path = "%s.migrations" % app_config.name
|
||||
try:
|
||||
migrations_module = importlib.import_module(migrations_import_path)
|
||||
except ImportError:
|
||||
return self.defaults.get("ask_initial", False)
|
||||
else:
|
||||
filenames = os.listdir(os.path.dirname(migrations_module.__file__))
|
||||
return not any(x.endswith(".py") for x in filenames if x != "__init__.py")
|
||||
|
||||
def ask_not_null_addition(self, field_name, model_name):
|
||||
"Adding a NOT NULL field to a model"
|
||||
# None means quit
|
||||
return None
|
||||
|
||||
def ask_rename(self, model_name, old_name, new_name, field_instance):
|
||||
"Was this field really renamed?"
|
||||
return self.defaults.get("ask_rename", False)
|
||||
|
||||
def ask_merge(self, app_label):
|
||||
"Do you really want to merge these migrations?"
|
||||
return self.defaults.get("ask_merge", False)
|
||||
|
||||
|
||||
class InteractiveMigrationQuestioner(MigrationQuestioner):
|
||||
|
||||
def _boolean_input(self, question, default=None):
|
||||
result = input("%s " % question)
|
||||
if not result and default is not None:
|
||||
return default
|
||||
while len(result) < 1 or result[0].lower() not in "yn":
|
||||
result = input("Please answer yes or no: ")
|
||||
return result[0].lower() == "y"
|
||||
|
||||
def _choice_input(self, question, choices):
|
||||
print(question)
|
||||
for i, choice in enumerate(choices):
|
||||
print(" %s) %s" % (i + 1, choice))
|
||||
result = input("Select an option: ")
|
||||
while True:
|
||||
try:
|
||||
value = int(result)
|
||||
if 0 < value <= len(choices):
|
||||
return value
|
||||
except ValueError:
|
||||
pass
|
||||
result = input("Please select a valid option: ")
|
||||
|
||||
def ask_not_null_addition(self, field_name, model_name):
|
||||
"Adding a NOT NULL field to a model"
|
||||
choice = self._choice_input(
|
||||
"You are trying to add a non-nullable field '%s' to %s without a default;\n" % (field_name, model_name) +
|
||||
"we can't do that (the database needs something to populate existing rows).\n" +
|
||||
"Please select a fix:",
|
||||
[
|
||||
"Provide a one-off default now (will be set on all existing rows)",
|
||||
"Quit, and let me add a default in models.py",
|
||||
]
|
||||
)
|
||||
if choice == 2:
|
||||
sys.exit(3)
|
||||
else:
|
||||
print("Please enter the default value now, as valid Python")
|
||||
print("The datetime module is available, so you can do e.g. datetime.date.today()")
|
||||
while True:
|
||||
code = input(">>> ")
|
||||
if not code:
|
||||
print("Please enter some code, or 'exit' (with no quotes) to exit.")
|
||||
elif code == "exit":
|
||||
sys.exit(1)
|
||||
else:
|
||||
try:
|
||||
return eval(code, {}, {"datetime": datetime_safe})
|
||||
except (SyntaxError, NameError) as e:
|
||||
print("Invalid input: %s" % e)
|
||||
|
||||
def ask_rename(self, model_name, old_name, new_name, field_instance):
|
||||
"Was this field really renamed?"
|
||||
return self._boolean_input("Did you rename %s.%s to %s.%s (a %s)? [y/N]" % (model_name, old_name, model_name, new_name, field_instance.__class__.__name__), False)
|
||||
|
||||
def ask_merge(self, app_label):
|
||||
return self._boolean_input(
|
||||
"\nMerging will only work if the operations printed above do not conflict\n" +
|
||||
"with each other (working on different fields or models)\n" +
|
||||
"Do you want to merge these migration branches? [y/N]",
|
||||
False,
|
||||
)
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue