Merge branch 'master' into schema-alteration
Conflicts: django/db/backends/mysql/introspection.py django/db/backends/oracle/creation.py django/db/backends/postgresql_psycopg2/creation.py django/db/models/base.py django/db/models/loading.py
This commit is contained in:
commit
03ec3219a0
3
AUTHORS
3
AUTHORS
|
@ -204,6 +204,7 @@ answer newbie questions, and generally made Django that much better:
|
||||||
Clint Ecker
|
Clint Ecker
|
||||||
Nick Efford <nick@efford.org>
|
Nick Efford <nick@efford.org>
|
||||||
Marc Egli <frog32@me.com>
|
Marc Egli <frog32@me.com>
|
||||||
|
Matt Deacalion Stevens <matt@dirtymonkey.co.uk>
|
||||||
eibaan@gmail.com
|
eibaan@gmail.com
|
||||||
David Eklund
|
David Eklund
|
||||||
Julia Elman
|
Julia Elman
|
||||||
|
@ -530,6 +531,7 @@ answer newbie questions, and generally made Django that much better:
|
||||||
Leo Shklovskii
|
Leo Shklovskii
|
||||||
jason.sidabras@gmail.com
|
jason.sidabras@gmail.com
|
||||||
Mikołaj Siedlarek <mikolaj.siedlarek@gmail.com>
|
Mikołaj Siedlarek <mikolaj.siedlarek@gmail.com>
|
||||||
|
Karol Sikora <elektrrrus@gmail.com>
|
||||||
Brenton Simpson <http://theillustratedlife.com>
|
Brenton Simpson <http://theillustratedlife.com>
|
||||||
Jozko Skrablin <jozko.skrablin@gmail.com>
|
Jozko Skrablin <jozko.skrablin@gmail.com>
|
||||||
Ben Slavin <benjamin.slavin@gmail.com>
|
Ben Slavin <benjamin.slavin@gmail.com>
|
||||||
|
@ -542,6 +544,7 @@ answer newbie questions, and generally made Django that much better:
|
||||||
George Song <george@damacy.net>
|
George Song <george@damacy.net>
|
||||||
sopel
|
sopel
|
||||||
Leo Soto <leo.soto@gmail.com>
|
Leo Soto <leo.soto@gmail.com>
|
||||||
|
Thomas Sorrel
|
||||||
Wiliam Alves de Souza <wiliamsouza83@gmail.com>
|
Wiliam Alves de Souza <wiliamsouza83@gmail.com>
|
||||||
Don Spaulding <donspauldingii@gmail.com>
|
Don Spaulding <donspauldingii@gmail.com>
|
||||||
Calvin Spealman <ironfroggy@gmail.com>
|
Calvin Spealman <ironfroggy@gmail.com>
|
||||||
|
|
|
@ -184,6 +184,7 @@ EMAIL_PORT = 25
|
||||||
EMAIL_HOST_USER = ''
|
EMAIL_HOST_USER = ''
|
||||||
EMAIL_HOST_PASSWORD = ''
|
EMAIL_HOST_PASSWORD = ''
|
||||||
EMAIL_USE_TLS = False
|
EMAIL_USE_TLS = False
|
||||||
|
EMAIL_USE_SSL = False
|
||||||
|
|
||||||
# List of strings representing installed apps.
|
# List of strings representing installed apps.
|
||||||
INSTALLED_APPS = ()
|
INSTALLED_APPS = ()
|
||||||
|
|
|
@ -279,7 +279,7 @@ LANG_INFO = {
|
||||||
'bidi': False,
|
'bidi': False,
|
||||||
'code': 'lt',
|
'code': 'lt',
|
||||||
'name': 'Lithuanian',
|
'name': 'Lithuanian',
|
||||||
'name_local': 'Lithuanian',
|
'name_local': 'Lietuviškai',
|
||||||
},
|
},
|
||||||
'lv': {
|
'lv': {
|
||||||
'bidi': False,
|
'bidi': False,
|
||||||
|
@ -399,7 +399,7 @@ LANG_INFO = {
|
||||||
'bidi': False,
|
'bidi': False,
|
||||||
'code': 'sq',
|
'code': 'sq',
|
||||||
'name': 'Albanian',
|
'name': 'Albanian',
|
||||||
'name_local': 'Albanian',
|
'name_local': 'shqip',
|
||||||
},
|
},
|
||||||
'sr': {
|
'sr': {
|
||||||
'bidi': False,
|
'bidi': False,
|
||||||
|
@ -441,7 +441,7 @@ LANG_INFO = {
|
||||||
'bidi': False,
|
'bidi': False,
|
||||||
'code': 'th',
|
'code': 'th',
|
||||||
'name': 'Thai',
|
'name': 'Thai',
|
||||||
'name_local': 'Thai',
|
'name_local': 'ภาษาไทย',
|
||||||
},
|
},
|
||||||
'tr': {
|
'tr': {
|
||||||
'bidi': False,
|
'bidi': False,
|
||||||
|
@ -477,7 +477,7 @@ LANG_INFO = {
|
||||||
'bidi': False,
|
'bidi': False,
|
||||||
'code': 'vi',
|
'code': 'vi',
|
||||||
'name': 'Vietnamese',
|
'name': 'Vietnamese',
|
||||||
'name_local': 'Vietnamese',
|
'name_local': 'Tiếng Việt',
|
||||||
},
|
},
|
||||||
'zh-cn': {
|
'zh-cn': {
|
||||||
'bidi': False,
|
'bidi': False,
|
||||||
|
|
|
@ -4,7 +4,7 @@ msgid ""
|
||||||
msgstr ""
|
msgstr ""
|
||||||
"Project-Id-Version: Django\n"
|
"Project-Id-Version: Django\n"
|
||||||
"Report-Msgid-Bugs-To: \n"
|
"Report-Msgid-Bugs-To: \n"
|
||||||
"POT-Creation-Date: 2013-05-25 14:27+0200\n"
|
"POT-Creation-Date: 2013-06-11 18:44+0200\n"
|
||||||
"PO-Revision-Date: 2010-05-13 15:35+0200\n"
|
"PO-Revision-Date: 2010-05-13 15:35+0200\n"
|
||||||
"Last-Translator: Django team\n"
|
"Last-Translator: Django team\n"
|
||||||
"Language-Team: English <en@li.org>\n"
|
"Language-Team: English <en@li.org>\n"
|
||||||
|
@ -699,11 +699,22 @@ msgstr ""
|
||||||
msgid "Enter a list of values."
|
msgid "Enter a list of values."
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: forms/forms.py:158
|
#. Translators: This is the default suffix added to form field labels
|
||||||
|
#: forms/forms.py:90
|
||||||
|
msgid ":"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
#: forms/forms.py:159
|
||||||
#, python-format
|
#, python-format
|
||||||
msgid "(Hidden field %(name)s) %(error)s"
|
msgid "(Hidden field %(name)s) %(error)s"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
|
#. Translators: If found as last label character, these punctuation
|
||||||
|
#. characters will prevent the default label_suffix to be appended to the label
|
||||||
|
#: forms/forms.py:525
|
||||||
|
msgid ":?.!"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
#: forms/formsets.py:310
|
#: forms/formsets.py:310
|
||||||
#, python-format
|
#, python-format
|
||||||
msgid "Please submit %d or fewer forms."
|
msgid "Please submit %d or fewer forms."
|
||||||
|
|
|
@ -266,10 +266,12 @@ class InlineAdminForm(AdminForm):
|
||||||
yield InlineFieldset(self.formset, self.form, name,
|
yield InlineFieldset(self.formset, self.form, name,
|
||||||
self.readonly_fields, model_admin=self.model_admin, **options)
|
self.readonly_fields, model_admin=self.model_admin, **options)
|
||||||
|
|
||||||
def has_auto_field(self):
|
def needs_explicit_pk_field(self):
|
||||||
if self.form._meta.model._meta.has_auto_field:
|
# Auto fields are editable (oddly), so need to check for auto or non-editable pk
|
||||||
|
if self.form._meta.model._meta.has_auto_field or not self.form._meta.model._meta.pk.editable:
|
||||||
return True
|
return True
|
||||||
# Also search any parents for an auto field.
|
# Also search any parents for an auto field. (The pk info is propagated to child
|
||||||
|
# models so that does not need to be checked in parents.)
|
||||||
for parent in self.form._meta.model._meta.get_parent_list():
|
for parent in self.form._meta.model._meta.get_parent_list():
|
||||||
if parent._meta.has_auto_field:
|
if parent._meta.has_auto_field:
|
||||||
return True
|
return True
|
||||||
|
|
|
@ -771,12 +771,10 @@ class ModelAdmin(BaseModelAdmin):
|
||||||
Returns the preserved filters querystring.
|
Returns the preserved filters querystring.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# FIXME: We can remove that getattr as soon as #20619 is fixed.
|
match = request.resolver_match
|
||||||
match = getattr(request, 'resolver_match', None)
|
|
||||||
|
|
||||||
if self.preserve_filters and match:
|
if self.preserve_filters and match:
|
||||||
opts = self.model._meta
|
opts = self.model._meta
|
||||||
current_url = '%s:%s' % (match.namespace, match.url_name)
|
current_url = '%s:%s' % (match.app_name, match.url_name)
|
||||||
changelist_url = 'admin:%s_%s_changelist' % (opts.app_label, opts.model_name)
|
changelist_url = 'admin:%s_%s_changelist' % (opts.app_label, opts.model_name)
|
||||||
if current_url == changelist_url:
|
if current_url == changelist_url:
|
||||||
preserved_filters = request.GET.urlencode()
|
preserved_filters = request.GET.urlencode()
|
||||||
|
|
|
@ -439,6 +439,7 @@ class AdminSite(object):
|
||||||
context = {
|
context = {
|
||||||
'title': _('%s administration') % capfirst(app_label),
|
'title': _('%s administration') % capfirst(app_label),
|
||||||
'app_list': [app_dict],
|
'app_list': [app_dict],
|
||||||
|
'app_label': app_label,
|
||||||
}
|
}
|
||||||
context.update(extra_context or {})
|
context.update(extra_context or {})
|
||||||
|
|
||||||
|
|
|
@ -14,6 +14,8 @@ var DateTimeShortcuts = {
|
||||||
clockDivName: 'clockbox', // name of clock <div> that gets toggled
|
clockDivName: 'clockbox', // name of clock <div> that gets toggled
|
||||||
clockLinkName: 'clocklink', // name of the link that is used to toggle
|
clockLinkName: 'clocklink', // name of the link that is used to toggle
|
||||||
shortCutsClass: 'datetimeshortcuts', // class of the clock and cal shortcuts
|
shortCutsClass: 'datetimeshortcuts', // class of the clock and cal shortcuts
|
||||||
|
timezoneWarningClass: 'timezonewarning', // class of the warning for timezone mismatch
|
||||||
|
timezoneOffset: 0,
|
||||||
admin_media_prefix: '',
|
admin_media_prefix: '',
|
||||||
init: function() {
|
init: function() {
|
||||||
// Get admin_media_prefix by grabbing it off the window object. It's
|
// Get admin_media_prefix by grabbing it off the window object. It's
|
||||||
|
@ -26,17 +28,77 @@ var DateTimeShortcuts = {
|
||||||
DateTimeShortcuts.admin_media_prefix = '/missing-admin-media-prefix/';
|
DateTimeShortcuts.admin_media_prefix = '/missing-admin-media-prefix/';
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (window.__admin_utc_offset__ != undefined) {
|
||||||
|
var serverOffset = window.__admin_utc_offset__;
|
||||||
|
var localOffset = new Date().getTimezoneOffset() * -60;
|
||||||
|
DateTimeShortcuts.timezoneOffset = localOffset - serverOffset;
|
||||||
|
}
|
||||||
|
|
||||||
var inputs = document.getElementsByTagName('input');
|
var inputs = document.getElementsByTagName('input');
|
||||||
for (i=0; i<inputs.length; i++) {
|
for (i=0; i<inputs.length; i++) {
|
||||||
var inp = inputs[i];
|
var inp = inputs[i];
|
||||||
if (inp.getAttribute('type') == 'text' && inp.className.match(/vTimeField/)) {
|
if (inp.getAttribute('type') == 'text' && inp.className.match(/vTimeField/)) {
|
||||||
DateTimeShortcuts.addClock(inp);
|
DateTimeShortcuts.addClock(inp);
|
||||||
|
DateTimeShortcuts.addTimezoneWarning(inp);
|
||||||
}
|
}
|
||||||
else if (inp.getAttribute('type') == 'text' && inp.className.match(/vDateField/)) {
|
else if (inp.getAttribute('type') == 'text' && inp.className.match(/vDateField/)) {
|
||||||
DateTimeShortcuts.addCalendar(inp);
|
DateTimeShortcuts.addCalendar(inp);
|
||||||
|
DateTimeShortcuts.addTimezoneWarning(inp);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
// Return the current time while accounting for the server timezone.
|
||||||
|
now: function() {
|
||||||
|
if (window.__admin_utc_offset__ != undefined) {
|
||||||
|
var serverOffset = window.__admin_utc_offset__;
|
||||||
|
var localNow = new Date();
|
||||||
|
var localOffset = localNow.getTimezoneOffset() * -60;
|
||||||
|
localNow.setTime(localNow.getTime() + 1000 * (serverOffset - localOffset));
|
||||||
|
return localNow;
|
||||||
|
} else {
|
||||||
|
return new Date();
|
||||||
|
}
|
||||||
|
},
|
||||||
|
// Add a warning when the time zone in the browser and backend do not match.
|
||||||
|
addTimezoneWarning: function(inp) {
|
||||||
|
var $ = django.jQuery;
|
||||||
|
var warningClass = DateTimeShortcuts.timezoneWarningClass;
|
||||||
|
var timezoneOffset = DateTimeShortcuts.timezoneOffset / 3600;
|
||||||
|
|
||||||
|
// Only warn if there is a time zone mismatch.
|
||||||
|
if (!timezoneOffset)
|
||||||
|
return;
|
||||||
|
|
||||||
|
// Check if warning is already there.
|
||||||
|
if ($(inp).siblings('.' + warningClass).length)
|
||||||
|
return;
|
||||||
|
|
||||||
|
var message;
|
||||||
|
if (timezoneOffset > 0) {
|
||||||
|
message = ngettext(
|
||||||
|
'Note: You are %s hour ahead of server time.',
|
||||||
|
'Note: You are %s hours ahead of server time.',
|
||||||
|
timezoneOffset
|
||||||
|
);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
timezoneOffset *= -1
|
||||||
|
message = ngettext(
|
||||||
|
'Note: You are %s hour behind server time.',
|
||||||
|
'Note: You are %s hours behind server time.',
|
||||||
|
timezoneOffset
|
||||||
|
);
|
||||||
|
}
|
||||||
|
message = interpolate(message, [timezoneOffset]);
|
||||||
|
|
||||||
|
var $warning = $('<span>');
|
||||||
|
$warning.attr('class', warningClass);
|
||||||
|
$warning.text(message);
|
||||||
|
|
||||||
|
$(inp).parent()
|
||||||
|
.append($('<br>'))
|
||||||
|
.append($warning)
|
||||||
|
},
|
||||||
// Add clock widget to a given field
|
// Add clock widget to a given field
|
||||||
addClock: function(inp) {
|
addClock: function(inp) {
|
||||||
var num = DateTimeShortcuts.clockInputs.length;
|
var num = DateTimeShortcuts.clockInputs.length;
|
||||||
|
@ -48,7 +110,7 @@ var DateTimeShortcuts = {
|
||||||
shortcuts_span.className = DateTimeShortcuts.shortCutsClass;
|
shortcuts_span.className = DateTimeShortcuts.shortCutsClass;
|
||||||
inp.parentNode.insertBefore(shortcuts_span, inp.nextSibling);
|
inp.parentNode.insertBefore(shortcuts_span, inp.nextSibling);
|
||||||
var now_link = document.createElement('a');
|
var now_link = document.createElement('a');
|
||||||
now_link.setAttribute('href', "javascript:DateTimeShortcuts.handleClockQuicklink(" + num + ", new Date().strftime('" + get_format('TIME_INPUT_FORMATS')[0] + "'));");
|
now_link.setAttribute('href', "javascript:DateTimeShortcuts.handleClockQuicklink(" + num + ", -1);");
|
||||||
now_link.appendChild(document.createTextNode(gettext('Now')));
|
now_link.appendChild(document.createTextNode(gettext('Now')));
|
||||||
var clock_link = document.createElement('a');
|
var clock_link = document.createElement('a');
|
||||||
clock_link.setAttribute('href', 'javascript:DateTimeShortcuts.openClock(' + num + ');');
|
clock_link.setAttribute('href', 'javascript:DateTimeShortcuts.openClock(' + num + ');');
|
||||||
|
@ -84,11 +146,10 @@ var DateTimeShortcuts = {
|
||||||
quickElement('h2', clock_box, gettext('Choose a time'));
|
quickElement('h2', clock_box, gettext('Choose a time'));
|
||||||
var time_list = quickElement('ul', clock_box, '');
|
var time_list = quickElement('ul', clock_box, '');
|
||||||
time_list.className = 'timelist';
|
time_list.className = 'timelist';
|
||||||
var time_format = get_format('TIME_INPUT_FORMATS')[0];
|
quickElement("a", quickElement("li", time_list, ""), gettext("Now"), "href", "javascript:DateTimeShortcuts.handleClockQuicklink(" + num + ", -1);");
|
||||||
quickElement("a", quickElement("li", time_list, ""), gettext("Now"), "href", "javascript:DateTimeShortcuts.handleClockQuicklink(" + num + ", new Date().strftime('" + time_format + "'));");
|
quickElement("a", quickElement("li", time_list, ""), gettext("Midnight"), "href", "javascript:DateTimeShortcuts.handleClockQuicklink(" + num + ", 0);");
|
||||||
quickElement("a", quickElement("li", time_list, ""), gettext("Midnight"), "href", "javascript:DateTimeShortcuts.handleClockQuicklink(" + num + ", new Date(1970,1,1,0,0,0,0).strftime('" + time_format + "'));");
|
quickElement("a", quickElement("li", time_list, ""), gettext("6 a.m."), "href", "javascript:DateTimeShortcuts.handleClockQuicklink(" + num + ", 6);");
|
||||||
quickElement("a", quickElement("li", time_list, ""), gettext("6 a.m."), "href", "javascript:DateTimeShortcuts.handleClockQuicklink(" + num + ", new Date(1970,1,1,6,0,0,0).strftime('" + time_format + "'));");
|
quickElement("a", quickElement("li", time_list, ""), gettext("Noon"), "href", "javascript:DateTimeShortcuts.handleClockQuicklink(" + num + ", 12);");
|
||||||
quickElement("a", quickElement("li", time_list, ""), gettext("Noon"), "href", "javascript:DateTimeShortcuts.handleClockQuicklink(" + num + ", new Date(1970,1,1,12,0,0,0).strftime('" + time_format + "'));");
|
|
||||||
|
|
||||||
var cancel_p = quickElement('p', clock_box, '');
|
var cancel_p = quickElement('p', clock_box, '');
|
||||||
cancel_p.className = 'calendar-cancel';
|
cancel_p.className = 'calendar-cancel';
|
||||||
|
@ -128,7 +189,14 @@ var DateTimeShortcuts = {
|
||||||
removeEvent(document, 'click', DateTimeShortcuts.dismissClockFunc[num]);
|
removeEvent(document, 'click', DateTimeShortcuts.dismissClockFunc[num]);
|
||||||
},
|
},
|
||||||
handleClockQuicklink: function(num, val) {
|
handleClockQuicklink: function(num, val) {
|
||||||
DateTimeShortcuts.clockInputs[num].value = val;
|
var d;
|
||||||
|
if (val == -1) {
|
||||||
|
d = DateTimeShortcuts.now();
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
d = new Date(1970, 1, 1, val, 0, 0, 0)
|
||||||
|
}
|
||||||
|
DateTimeShortcuts.clockInputs[num].value = d.strftime(get_format('TIME_INPUT_FORMATS')[0]);
|
||||||
DateTimeShortcuts.clockInputs[num].focus();
|
DateTimeShortcuts.clockInputs[num].focus();
|
||||||
DateTimeShortcuts.dismissClock(num);
|
DateTimeShortcuts.dismissClock(num);
|
||||||
},
|
},
|
||||||
|
@ -258,7 +326,7 @@ var DateTimeShortcuts = {
|
||||||
DateTimeShortcuts.calendars[num].drawNextMonth();
|
DateTimeShortcuts.calendars[num].drawNextMonth();
|
||||||
},
|
},
|
||||||
handleCalendarCallback: function(num) {
|
handleCalendarCallback: function(num) {
|
||||||
format = get_format('DATE_INPUT_FORMATS')[0];
|
var format = get_format('DATE_INPUT_FORMATS')[0];
|
||||||
// the format needs to be escaped a little
|
// the format needs to be escaped a little
|
||||||
format = format.replace('\\', '\\\\');
|
format = format.replace('\\', '\\\\');
|
||||||
format = format.replace('\r', '\\r');
|
format = format.replace('\r', '\\r');
|
||||||
|
@ -276,7 +344,7 @@ var DateTimeShortcuts = {
|
||||||
").style.display='none';}"].join('');
|
").style.display='none';}"].join('');
|
||||||
},
|
},
|
||||||
handleCalendarQuickLink: function(num, offset) {
|
handleCalendarQuickLink: function(num, offset) {
|
||||||
var d = new Date();
|
var d = DateTimeShortcuts.now();
|
||||||
d.setDate(d.getDate() + offset)
|
d.setDate(d.getDate() + offset)
|
||||||
DateTimeShortcuts.calendarInputs[num].value = d.strftime(get_format('DATE_INPUT_FORMATS')[0]);
|
DateTimeShortcuts.calendarInputs[num].value = d.strftime(get_format('DATE_INPUT_FORMATS')[0]);
|
||||||
DateTimeShortcuts.calendarInputs[num].focus();
|
DateTimeShortcuts.calendarInputs[num].focus();
|
||||||
|
|
|
@ -55,7 +55,7 @@ function dismissRelatedLookupPopup(win, chosenId) {
|
||||||
function showAddAnotherPopup(triggeringLink) {
|
function showAddAnotherPopup(triggeringLink) {
|
||||||
var name = triggeringLink.id.replace(/^add_/, '');
|
var name = triggeringLink.id.replace(/^add_/, '');
|
||||||
name = id_to_windowname(name);
|
name = id_to_windowname(name);
|
||||||
href = triggeringLink.href
|
var href = triggeringLink.href;
|
||||||
if (href.indexOf('?') == -1) {
|
if (href.indexOf('?') == -1) {
|
||||||
href += '?_popup=1';
|
href += '?_popup=1';
|
||||||
} else {
|
} else {
|
||||||
|
@ -73,10 +73,11 @@ function dismissAddAnotherPopup(win, newId, newRepr) {
|
||||||
newRepr = html_unescape(newRepr);
|
newRepr = html_unescape(newRepr);
|
||||||
var name = windowname_to_id(win.name);
|
var name = windowname_to_id(win.name);
|
||||||
var elem = document.getElementById(name);
|
var elem = document.getElementById(name);
|
||||||
|
var o;
|
||||||
if (elem) {
|
if (elem) {
|
||||||
var elemName = elem.nodeName.toUpperCase();
|
var elemName = elem.nodeName.toUpperCase();
|
||||||
if (elemName == 'SELECT') {
|
if (elemName == 'SELECT') {
|
||||||
var o = new Option(newRepr, newId);
|
o = new Option(newRepr, newId);
|
||||||
elem.options[elem.options.length] = o;
|
elem.options[elem.options.length] = o;
|
||||||
o.selected = true;
|
o.selected = true;
|
||||||
} else if (elemName == 'INPUT') {
|
} else if (elemName == 'INPUT') {
|
||||||
|
@ -88,8 +89,7 @@ function dismissAddAnotherPopup(win, newId, newRepr) {
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
var toId = name + "_to";
|
var toId = name + "_to";
|
||||||
elem = document.getElementById(toId);
|
o = new Option(newRepr, newId);
|
||||||
var o = new Option(newRepr, newId);
|
|
||||||
SelectBox.add_to_cache(toId, o);
|
SelectBox.add_to_cache(toId, o);
|
||||||
SelectBox.redisplay(toId);
|
SelectBox.redisplay(toId);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
{% extends "admin/index.html" %}
|
{% extends "admin/index.html" %}
|
||||||
{% load i18n %}
|
{% load i18n %}
|
||||||
|
|
||||||
|
{% block bodyclass %}app-{{ app_label }} {{ block.super }}{% endblock %}
|
||||||
|
|
||||||
{% if not is_popup %}
|
{% if not is_popup %}
|
||||||
{% block breadcrumbs %}
|
{% block breadcrumbs %}
|
||||||
<div class="breadcrumbs">
|
<div class="breadcrumbs">
|
||||||
|
|
|
@ -7,6 +7,7 @@
|
||||||
<!--[if lte IE 7]><link rel="stylesheet" type="text/css" href="{% block stylesheet_ie %}{% static "admin/css/ie.css" %}{% endblock %}" /><![endif]-->
|
<!--[if lte IE 7]><link rel="stylesheet" type="text/css" href="{% block stylesheet_ie %}{% static "admin/css/ie.css" %}{% endblock %}" /><![endif]-->
|
||||||
{% if LANGUAGE_BIDI %}<link rel="stylesheet" type="text/css" href="{% block stylesheet_rtl %}{% static "admin/css/rtl.css" %}{% endblock %}" />{% endif %}
|
{% if LANGUAGE_BIDI %}<link rel="stylesheet" type="text/css" href="{% block stylesheet_rtl %}{% static "admin/css/rtl.css" %}{% endblock %}" />{% endif %}
|
||||||
<script type="text/javascript">window.__admin_media_prefix__ = "{% filter escapejs %}{% static "admin/" %}{% endfilter %}";</script>
|
<script type="text/javascript">window.__admin_media_prefix__ = "{% filter escapejs %}{% static "admin/" %}{% endfilter %}";</script>
|
||||||
|
<script type="text/javascript">window.__admin_utc_offset__ = "{% filter escapejs %}{% now "Z" %}{% endfilter %}";</script>
|
||||||
{% block extrahead %}{% endblock %}
|
{% block extrahead %}{% endblock %}
|
||||||
{% block blockbots %}<meta name="robots" content="NONE,NOARCHIVE" />{% endblock %}
|
{% block blockbots %}<meta name="robots" content="NONE,NOARCHIVE" />{% endblock %}
|
||||||
</head>
|
</head>
|
||||||
|
|
|
@ -10,7 +10,7 @@
|
||||||
|
|
||||||
{% block coltype %}colM{% endblock %}
|
{% block coltype %}colM{% endblock %}
|
||||||
|
|
||||||
{% block bodyclass %}{{ opts.app_label }}-{{ opts.object_name.lower }} change-form{% endblock %}
|
{% block bodyclass %}app-{{ opts.app_label }} model-{{ opts.object_name.lower }} change-form{% endblock %}
|
||||||
|
|
||||||
{% if not is_popup %}
|
{% if not is_popup %}
|
||||||
{% block breadcrumbs %}
|
{% block breadcrumbs %}
|
||||||
|
|
|
@ -32,7 +32,7 @@
|
||||||
{% endif %}{% endif %}
|
{% endif %}{% endif %}
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
{% block bodyclass %}change-list{% endblock %}
|
{% block bodyclass %}app-{{ opts.app_label }} model-{{ opts.object_name.lower }} change-list{% endblock %}
|
||||||
|
|
||||||
{% if not is_popup %}
|
{% if not is_popup %}
|
||||||
{% block breadcrumbs %}
|
{% block breadcrumbs %}
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
{% extends "admin/base_site.html" %}
|
{% extends "admin/base_site.html" %}
|
||||||
{% load i18n admin_urls %}
|
{% load i18n admin_urls %}
|
||||||
|
|
||||||
|
{% block bodyclass %}app-{{ opts.app_label }} model-{{ opts.object_name.lower }} delete-confirmation{% endblock %}
|
||||||
|
|
||||||
{% block breadcrumbs %}
|
{% block breadcrumbs %}
|
||||||
<div class="breadcrumbs">
|
<div class="breadcrumbs">
|
||||||
<a href="{% url 'admin:index' %}">{% trans 'Home' %}</a>
|
<a href="{% url 'admin:index' %}">{% trans 'Home' %}</a>
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
{% extends "admin/base_site.html" %}
|
{% extends "admin/base_site.html" %}
|
||||||
{% load i18n l10n admin_urls %}
|
{% load i18n l10n admin_urls %}
|
||||||
|
|
||||||
|
{% block bodyclass %}app-{{ opts.app_label }} model-{{ opts.object_name.lower }} delete-confirmation delete-selected-confirmation{% endblock %}
|
||||||
|
|
||||||
{% block breadcrumbs %}
|
{% block breadcrumbs %}
|
||||||
<div class="breadcrumbs">
|
<div class="breadcrumbs">
|
||||||
<a href="{% url 'admin:index' %}">{% trans 'Home' %}</a>
|
<a href="{% url 'admin:index' %}">{% trans 'Home' %}</a>
|
||||||
|
|
|
@ -13,7 +13,7 @@
|
||||||
{% for fieldset in inline_admin_form %}
|
{% for fieldset in inline_admin_form %}
|
||||||
{% include "admin/includes/fieldset.html" %}
|
{% include "admin/includes/fieldset.html" %}
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
{% if inline_admin_form.has_auto_field %}{{ inline_admin_form.pk_field.field }}{% endif %}
|
{% if inline_admin_form.needs_explicit_pk_field %}{{ inline_admin_form.pk_field.field }}{% endif %}
|
||||||
{{ inline_admin_form.fk_field.field }}
|
{{ inline_admin_form.fk_field.field }}
|
||||||
</div>{% endfor %}
|
</div>{% endfor %}
|
||||||
</div>
|
</div>
|
||||||
|
|
|
@ -29,7 +29,7 @@
|
||||||
{% if inline_admin_form.original %} {{ inline_admin_form.original }}{% endif %}
|
{% if inline_admin_form.original %} {{ inline_admin_form.original }}{% endif %}
|
||||||
{% if inline_admin_form.show_url %}<a href="{% url 'admin:view_on_site' inline_admin_form.original_content_type_id inline_admin_form.original.pk %}">{% trans "View on site" %}</a>{% endif %}
|
{% if inline_admin_form.show_url %}<a href="{% url 'admin:view_on_site' inline_admin_form.original_content_type_id inline_admin_form.original.pk %}">{% trans "View on site" %}</a>{% endif %}
|
||||||
</p>{% endif %}
|
</p>{% endif %}
|
||||||
{% if inline_admin_form.has_auto_field %}{{ inline_admin_form.pk_field.field }}{% endif %}
|
{% if inline_admin_form.needs_explicit_pk_field %}{{ inline_admin_form.pk_field.field }}{% endif %}
|
||||||
{{ inline_admin_form.fk_field.field }}
|
{{ inline_admin_form.fk_field.field }}
|
||||||
{% spaceless %}
|
{% spaceless %}
|
||||||
{% for fieldset in inline_admin_form %}
|
{% for fieldset in inline_admin_form %}
|
||||||
|
|
|
@ -38,7 +38,7 @@ def add_preserved_filters(context, url, popup=False):
|
||||||
except Resolver404:
|
except Resolver404:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
current_url = '%s:%s' % (match.namespace, match.url_name)
|
current_url = '%s:%s' % (match.app_name, match.url_name)
|
||||||
changelist_url = 'admin:%s_%s_changelist' % (opts.app_label, opts.model_name)
|
changelist_url = 'admin:%s_%s_changelist' % (opts.app_label, opts.model_name)
|
||||||
if changelist_url == current_url and '_changelist_filters' in preserved_filters:
|
if changelist_url == current_url and '_changelist_filters' in preserved_filters:
|
||||||
preserved_filters = dict(parse_qsl(preserved_filters['_changelist_filters']))
|
preserved_filters = dict(parse_qsl(preserved_filters['_changelist_filters']))
|
||||||
|
|
|
@ -70,10 +70,7 @@ class UserAdmin(admin.ModelAdmin):
|
||||||
"""
|
"""
|
||||||
defaults = {}
|
defaults = {}
|
||||||
if obj is None:
|
if obj is None:
|
||||||
defaults.update({
|
defaults['form'] = self.add_form
|
||||||
'form': self.add_form,
|
|
||||||
'fields': admin.util.flatten_fieldsets(self.add_fieldsets),
|
|
||||||
})
|
|
||||||
defaults.update(kwargs)
|
defaults.update(kwargs)
|
||||||
return super(UserAdmin, self).get_form(request, obj, **defaults)
|
return super(UserAdmin, self).get_form(request, obj, **defaults)
|
||||||
|
|
||||||
|
|
|
@ -5,7 +5,7 @@ from django.contrib.auth.models import Permission
|
||||||
|
|
||||||
class ModelBackend(object):
|
class ModelBackend(object):
|
||||||
"""
|
"""
|
||||||
Authenticates against django.contrib.auth.models.User.
|
Authenticates against settings.AUTH_USER_MODEL.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def authenticate(self, username=None, password=None, **kwargs):
|
def authenticate(self, username=None, password=None, **kwargs):
|
||||||
|
|
|
@ -22,6 +22,7 @@ UNUSABLE_PASSWORD_SUFFIX_LENGTH = 40 # number of random chars to add after UNUS
|
||||||
HASHERS = None # lazily loaded from PASSWORD_HASHERS
|
HASHERS = None # lazily loaded from PASSWORD_HASHERS
|
||||||
PREFERRED_HASHER = None # defaults to first item in PASSWORD_HASHERS
|
PREFERRED_HASHER = None # defaults to first item in PASSWORD_HASHERS
|
||||||
|
|
||||||
|
|
||||||
@receiver(setting_changed)
|
@receiver(setting_changed)
|
||||||
def reset_hashers(**kwargs):
|
def reset_hashers(**kwargs):
|
||||||
if kwargs['setting'] == 'PASSWORD_HASHERS':
|
if kwargs['setting'] == 'PASSWORD_HASHERS':
|
||||||
|
@ -34,7 +35,7 @@ def is_password_usable(encoded):
|
||||||
if encoded is None or encoded.startswith(UNUSABLE_PASSWORD_PREFIX):
|
if encoded is None or encoded.startswith(UNUSABLE_PASSWORD_PREFIX):
|
||||||
return False
|
return False
|
||||||
try:
|
try:
|
||||||
hasher = identify_hasher(encoded)
|
identify_hasher(encoded)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
@ -48,7 +49,7 @@ def check_password(password, encoded, setter=None, preferred='default'):
|
||||||
If setter is specified, it'll be called when you need to
|
If setter is specified, it'll be called when you need to
|
||||||
regenerate the password.
|
regenerate the password.
|
||||||
"""
|
"""
|
||||||
if not is_password_usable(encoded):
|
if password is None or not is_password_usable(encoded):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
preferred = get_hasher(preferred)
|
preferred = get_hasher(preferred)
|
||||||
|
|
|
@ -187,6 +187,13 @@ class TestUtilsHashPass(unittest.TestCase):
|
||||||
# This might fail one day due to a hash collision.
|
# This might fail one day due to a hash collision.
|
||||||
self.assertNotEqual(encoded, make_password(None), "Random password collision?")
|
self.assertNotEqual(encoded, make_password(None), "Random password collision?")
|
||||||
|
|
||||||
|
def test_unspecified_password(self):
|
||||||
|
"""
|
||||||
|
Makes sure specifying no plain password with a valid encoded password
|
||||||
|
returns `False`.
|
||||||
|
"""
|
||||||
|
self.assertFalse(check_password(None, make_password('lètmein')))
|
||||||
|
|
||||||
def test_bad_algorithm(self):
|
def test_bad_algorithm(self):
|
||||||
with self.assertRaises(ValueError):
|
with self.assertRaises(ValueError):
|
||||||
make_password('lètmein', hasher='lolcat')
|
make_password('lètmein', hasher='lolcat')
|
||||||
|
|
|
@ -66,7 +66,7 @@ class GenericForeignKey(six.with_metaclass(RenameGenericForeignKeyMethods)):
|
||||||
if obj is not None:
|
if obj is not None:
|
||||||
return ContentType.objects.db_manager(obj._state.db).get_for_model(
|
return ContentType.objects.db_manager(obj._state.db).get_for_model(
|
||||||
obj, for_concrete_model=self.for_concrete_model)
|
obj, for_concrete_model=self.for_concrete_model)
|
||||||
elif id:
|
elif id is not None:
|
||||||
return ContentType.objects.db_manager(using).get_for_id(id)
|
return ContentType.objects.db_manager(using).get_for_id(id)
|
||||||
else:
|
else:
|
||||||
# This should never happen. I love comments like this, don't you?
|
# This should never happen. I love comments like this, don't you?
|
||||||
|
@ -130,7 +130,7 @@ class GenericForeignKey(six.with_metaclass(RenameGenericForeignKeyMethods)):
|
||||||
# performance when dealing with GFKs in loops and such.
|
# performance when dealing with GFKs in loops and such.
|
||||||
f = self.model._meta.get_field(self.ct_field)
|
f = self.model._meta.get_field(self.ct_field)
|
||||||
ct_id = getattr(instance, f.get_attname(), None)
|
ct_id = getattr(instance, f.get_attname(), None)
|
||||||
if ct_id:
|
if ct_id is not None:
|
||||||
ct = self.get_content_type(id=ct_id, using=instance._state.db)
|
ct = self.get_content_type(id=ct_id, using=instance._state.db)
|
||||||
try:
|
try:
|
||||||
rel_obj = ct.get_object_for_this_type(pk=getattr(instance, self.fk_field))
|
rel_obj = ct.get_object_for_this_type(pk=getattr(instance, self.fk_field))
|
||||||
|
|
|
@ -78,7 +78,7 @@ class OracleOperations(DatabaseOperations, BaseSpatialOperations):
|
||||||
|
|
||||||
name = 'oracle'
|
name = 'oracle'
|
||||||
oracle = True
|
oracle = True
|
||||||
valid_aggregates = dict([(a, None) for a in ('Union', 'Extent')])
|
valid_aggregates = {'Union', 'Extent'}
|
||||||
|
|
||||||
Adapter = OracleSpatialAdapter
|
Adapter = OracleSpatialAdapter
|
||||||
Adaptor = Adapter # Backwards-compatibility alias.
|
Adaptor = Adapter # Backwards-compatibility alias.
|
||||||
|
|
|
@ -56,6 +56,7 @@ class PostGISSphereDistance(PostGISDistance):
|
||||||
class PostGISRelate(PostGISFunctionParam):
|
class PostGISRelate(PostGISFunctionParam):
|
||||||
"For PostGIS Relate(<geom>, <pattern>) calls."
|
"For PostGIS Relate(<geom>, <pattern>) calls."
|
||||||
pattern_regex = re.compile(r'^[012TF\*]{9}$')
|
pattern_regex = re.compile(r'^[012TF\*]{9}$')
|
||||||
|
|
||||||
def __init__(self, prefix, pattern):
|
def __init__(self, prefix, pattern):
|
||||||
if not self.pattern_regex.match(pattern):
|
if not self.pattern_regex.match(pattern):
|
||||||
raise ValueError('Invalid intersection matrix pattern "%s".' % pattern)
|
raise ValueError('Invalid intersection matrix pattern "%s".' % pattern)
|
||||||
|
@ -68,8 +69,7 @@ class PostGISOperations(DatabaseOperations, BaseSpatialOperations):
|
||||||
postgis = True
|
postgis = True
|
||||||
geom_func_prefix = 'ST_'
|
geom_func_prefix = 'ST_'
|
||||||
version_regex = re.compile(r'^(?P<major>\d)\.(?P<minor1>\d)\.(?P<minor2>\d+)')
|
version_regex = re.compile(r'^(?P<major>\d)\.(?P<minor1>\d)\.(?P<minor2>\d+)')
|
||||||
valid_aggregates = dict([(k, None) for k in
|
valid_aggregates = {'Collect', 'Extent', 'Extent3D', 'MakeLine', 'Union'}
|
||||||
('Collect', 'Extent', 'Extent3D', 'MakeLine', 'Union')])
|
|
||||||
|
|
||||||
Adapter = PostGISAdapter
|
Adapter = PostGISAdapter
|
||||||
Adaptor = Adapter # Backwards-compatibility alias.
|
Adaptor = Adapter # Backwards-compatibility alias.
|
||||||
|
|
|
@ -56,7 +56,7 @@ class SpatiaLiteOperations(DatabaseOperations, BaseSpatialOperations):
|
||||||
name = 'spatialite'
|
name = 'spatialite'
|
||||||
spatialite = True
|
spatialite = True
|
||||||
version_regex = re.compile(r'^(?P<major>\d)\.(?P<minor1>\d)\.(?P<minor2>\d+)')
|
version_regex = re.compile(r'^(?P<major>\d)\.(?P<minor1>\d)\.(?P<minor2>\d+)')
|
||||||
valid_aggregates = dict([(k, None) for k in ('Extent', 'Union')])
|
valid_aggregates = {'Extent', 'Union'}
|
||||||
|
|
||||||
Adapter = SpatiaLiteAdapter
|
Adapter = SpatiaLiteAdapter
|
||||||
Adaptor = Adapter # Backwards-compatibility alias.
|
Adaptor = Adapter # Backwards-compatibility alias.
|
||||||
|
@ -169,6 +169,7 @@ class SpatiaLiteOperations(DatabaseOperations, BaseSpatialOperations):
|
||||||
Checks if the given aggregate name is supported (that is, if it's
|
Checks if the given aggregate name is supported (that is, if it's
|
||||||
in `self.valid_aggregates`).
|
in `self.valid_aggregates`).
|
||||||
"""
|
"""
|
||||||
|
super(SpatiaLiteOperations, self).check_aggregate_support(aggregate)
|
||||||
agg_name = aggregate.__class__.__name__
|
agg_name = aggregate.__class__.__name__
|
||||||
return agg_name in self.valid_aggregates
|
return agg_name in self.valid_aggregates
|
||||||
|
|
||||||
|
|
|
@ -140,7 +140,7 @@ class GeoQuerySet(QuerySet):
|
||||||
Returns a GeoJSON representation of the geomtry field in a `geojson`
|
Returns a GeoJSON representation of the geomtry field in a `geojson`
|
||||||
attribute on each element of the GeoQuerySet.
|
attribute on each element of the GeoQuerySet.
|
||||||
|
|
||||||
The `crs` and `bbox` keywords may be set to True if the users wants
|
The `crs` and `bbox` keywords may be set to True if the user wants
|
||||||
the coordinate reference system and the bounding box to be included
|
the coordinate reference system and the bounding box to be included
|
||||||
in the GeoJSON representation of the geometry.
|
in the GeoJSON representation of the geometry.
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -198,14 +198,6 @@ class OGRGeometry(GDALBase):
|
||||||
|
|
||||||
def _get_coord_dim(self):
|
def _get_coord_dim(self):
|
||||||
"Returns the coordinate dimension of the Geometry."
|
"Returns the coordinate dimension of the Geometry."
|
||||||
if isinstance(self, GeometryCollection) and GDAL_VERSION < (1, 5, 2):
|
|
||||||
# On GDAL versions prior to 1.5.2, there exists a bug in which
|
|
||||||
# the coordinate dimension of geometry collections is always 2:
|
|
||||||
# http://trac.osgeo.org/gdal/ticket/2334
|
|
||||||
# Here we workaround by returning the coordinate dimension of the
|
|
||||||
# first geometry in the collection instead.
|
|
||||||
if len(self):
|
|
||||||
return capi.get_coord_dim(capi.get_geom_ref(self.ptr, 0))
|
|
||||||
return capi.get_coord_dim(self.ptr)
|
return capi.get_coord_dim(self.ptr)
|
||||||
|
|
||||||
def _set_coord_dim(self, dim):
|
def _set_coord_dim(self, dim):
|
||||||
|
|
|
@ -53,7 +53,7 @@ class Command(LabelCommand):
|
||||||
make_option('--multi-geom', action='store_true', dest='multi_geom', default=False,
|
make_option('--multi-geom', action='store_true', dest='multi_geom', default=False,
|
||||||
help='Treat the geometry in the data source as a geometry collection.'),
|
help='Treat the geometry in the data source as a geometry collection.'),
|
||||||
make_option('--name-field', dest='name_field',
|
make_option('--name-field', dest='name_field',
|
||||||
help='Specifies a field name to return for the `__unicode__` function.'),
|
help='Specifies a field name to return for the `__unicode__`/`__str__` function.'),
|
||||||
make_option('--no-imports', action='store_false', dest='imports', default=True,
|
make_option('--no-imports', action='store_false', dest='imports', default=True,
|
||||||
help='Do not include `from django.contrib.gis.db import models` '
|
help='Do not include `from django.contrib.gis.db import models` '
|
||||||
'statement.'),
|
'statement.'),
|
||||||
|
|
|
@ -134,7 +134,8 @@ class GoogleMap(object):
|
||||||
@property
|
@property
|
||||||
def scripts(self):
|
def scripts(self):
|
||||||
"Returns all <script></script> tags required with Google Maps JavaScript."
|
"Returns all <script></script> tags required with Google Maps JavaScript."
|
||||||
return format_html('%s\n <script type="text/javascript">\n//<![CDATA[\n%s//]]>\n </script>', self.api_script, mark_safe(self.js))
|
return format_html('{0}\n <script type="text/javascript">\n//<![CDATA[\n{1}//]]>\n </script>',
|
||||||
|
self.api_script, mark_safe(self.js))
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def style(self):
|
def style(self):
|
||||||
|
|
|
@ -1,30 +0,0 @@
|
||||||
def geo_apps():
|
|
||||||
"""
|
|
||||||
Returns a list of GeoDjango test applications that reside in
|
|
||||||
`django.contrib.gis.tests` that can be used with the current
|
|
||||||
database and the spatial libraries that are installed.
|
|
||||||
"""
|
|
||||||
from django.db import connection
|
|
||||||
from django.contrib.gis.geos import GEOS_PREPARE
|
|
||||||
from django.contrib.gis.gdal import HAS_GDAL
|
|
||||||
|
|
||||||
apps = ['geoapp', 'relatedapp']
|
|
||||||
|
|
||||||
# No distance queries on MySQL.
|
|
||||||
if not connection.ops.mysql:
|
|
||||||
apps.append('distapp')
|
|
||||||
|
|
||||||
# Test geography support with PostGIS 1.5+.
|
|
||||||
if connection.ops.postgis and connection.ops.geography:
|
|
||||||
apps.append('geogapp')
|
|
||||||
|
|
||||||
# The following GeoDjango test apps depend on GDAL support.
|
|
||||||
if HAS_GDAL:
|
|
||||||
# Geographic admin, LayerMapping, and ogrinspect test apps
|
|
||||||
# all require GDAL.
|
|
||||||
apps.extend(['geoadmin', 'layermap', 'inspectapp'])
|
|
||||||
|
|
||||||
# 3D apps use LayerMapping, which uses GDAL and require GEOS 3.1+.
|
|
||||||
if connection.ops.postgis and GEOS_PREPARE:
|
|
||||||
apps.append('geo3d')
|
|
||||||
return [('django.contrib.gis.tests', app) for app in apps]
|
|
|
@ -2,9 +2,10 @@ from __future__ import absolute_import
|
||||||
|
|
||||||
from unittest import skipUnless
|
from unittest import skipUnless
|
||||||
|
|
||||||
from django.test import TestCase
|
|
||||||
from django.contrib.gis.geos import HAS_GEOS
|
from django.contrib.gis.geos import HAS_GEOS
|
||||||
from django.contrib.gis.tests.utils import HAS_SPATIAL_DB
|
from django.contrib.gis.tests.utils import HAS_SPATIAL_DB
|
||||||
|
from django.test import TestCase
|
||||||
|
from django.test.utils import override_settings
|
||||||
|
|
||||||
if HAS_GEOS and HAS_SPATIAL_DB:
|
if HAS_GEOS and HAS_SPATIAL_DB:
|
||||||
from django.contrib.gis import admin
|
from django.contrib.gis import admin
|
||||||
|
@ -12,6 +13,8 @@ if HAS_GEOS and HAS_SPATIAL_DB:
|
||||||
|
|
||||||
from .models import City
|
from .models import City
|
||||||
|
|
||||||
|
GOOGLE_MAPS_API_KEY = 'XXXX'
|
||||||
|
|
||||||
|
|
||||||
@skipUnless(HAS_GEOS and HAS_SPATIAL_DB, "Geos and spatial db are required.")
|
@skipUnless(HAS_GEOS and HAS_SPATIAL_DB, "Geos and spatial db are required.")
|
||||||
class GeoAdminTest(TestCase):
|
class GeoAdminTest(TestCase):
|
||||||
|
@ -39,7 +42,9 @@ class GeoAdminTest(TestCase):
|
||||||
result)
|
result)
|
||||||
|
|
||||||
def test_olwidget_has_changed(self):
|
def test_olwidget_has_changed(self):
|
||||||
""" Check that changes are accurately noticed by OpenLayersWidget. """
|
"""
|
||||||
|
Check that changes are accurately noticed by OpenLayersWidget.
|
||||||
|
"""
|
||||||
geoadmin = admin.site._registry[City]
|
geoadmin = admin.site._registry[City]
|
||||||
form = geoadmin.get_changelist_form(None)()
|
form = geoadmin.get_changelist_form(None)()
|
||||||
has_changed = form.fields['point']._has_changed
|
has_changed = form.fields['point']._has_changed
|
||||||
|
@ -55,3 +60,15 @@ class GeoAdminTest(TestCase):
|
||||||
self.assertFalse(has_changed(initial, data_same))
|
self.assertFalse(has_changed(initial, data_same))
|
||||||
self.assertFalse(has_changed(initial, data_almost_same))
|
self.assertFalse(has_changed(initial, data_almost_same))
|
||||||
self.assertTrue(has_changed(initial, data_changed))
|
self.assertTrue(has_changed(initial, data_changed))
|
||||||
|
|
||||||
|
@override_settings(GOOGLE_MAPS_API_KEY=GOOGLE_MAPS_API_KEY)
|
||||||
|
def test_google_map_scripts(self):
|
||||||
|
"""
|
||||||
|
Testing GoogleMap.scripts() output. See #20773.
|
||||||
|
"""
|
||||||
|
from django.contrib.gis.maps.google.gmap import GoogleMap
|
||||||
|
|
||||||
|
google_map = GoogleMap()
|
||||||
|
scripts = google_map.scripts
|
||||||
|
self.assertIn(GOOGLE_MAPS_API_KEY, scripts)
|
||||||
|
self.assertIn("new GMap2", scripts)
|
||||||
|
|
|
@ -3,6 +3,7 @@ from __future__ import absolute_import
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
from unittest import skipUnless
|
from unittest import skipUnless
|
||||||
from xml.dom import minidom
|
from xml.dom import minidom
|
||||||
|
import os
|
||||||
import zipfile
|
import zipfile
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
@ -10,6 +11,7 @@ from django.contrib.gis.geos import HAS_GEOS
|
||||||
from django.contrib.gis.tests.utils import HAS_SPATIAL_DB
|
from django.contrib.gis.tests.utils import HAS_SPATIAL_DB
|
||||||
from django.contrib.sites.models import Site
|
from django.contrib.sites.models import Site
|
||||||
from django.test import TestCase
|
from django.test import TestCase
|
||||||
|
from django.utils._os import upath
|
||||||
|
|
||||||
if HAS_GEOS:
|
if HAS_GEOS:
|
||||||
from .models import City, Country
|
from .models import City, Country
|
||||||
|
@ -37,6 +39,10 @@ class GeoSitemapTest(TestCase):
|
||||||
def test_geositemap_index(self):
|
def test_geositemap_index(self):
|
||||||
"Tests geographic sitemap index."
|
"Tests geographic sitemap index."
|
||||||
# Getting the geo index.
|
# Getting the geo index.
|
||||||
|
from django.contrib import sitemaps
|
||||||
|
template_dirs = settings.TEMPLATE_DIRS + (
|
||||||
|
os.path.join(os.path.dirname(upath(sitemaps.__file__)), 'templates'),)
|
||||||
|
with self.settings(TEMPLATE_DIRS=template_dirs):
|
||||||
doc = minidom.parseString(self.client.get('/sitemap.xml').content)
|
doc = minidom.parseString(self.client.get('/sitemap.xml').content)
|
||||||
index = doc.firstChild
|
index = doc.firstChild
|
||||||
self.assertEqual(index.getAttribute('xmlns'), 'http://www.sitemaps.org/schemas/sitemap/0.9')
|
self.assertEqual(index.getAttribute('xmlns'), 'http://www.sitemaps.org/schemas/sitemap/0.9')
|
||||||
|
|
|
@ -8,7 +8,7 @@ from django.db import connection
|
||||||
from django.contrib.gis import gdal
|
from django.contrib.gis import gdal
|
||||||
from django.contrib.gis.geos import HAS_GEOS
|
from django.contrib.gis.geos import HAS_GEOS
|
||||||
from django.contrib.gis.tests.utils import (
|
from django.contrib.gis.tests.utils import (
|
||||||
no_mysql, no_oracle, no_spatialite,
|
HAS_SPATIAL_DB, no_mysql, no_oracle, no_spatialite,
|
||||||
mysql, oracle, postgis, spatialite)
|
mysql, oracle, postgis, spatialite)
|
||||||
from django.test import TestCase
|
from django.test import TestCase
|
||||||
from django.utils import six
|
from django.utils import six
|
||||||
|
@ -28,7 +28,7 @@ def postgis_bug_version():
|
||||||
return spatial_version and (2, 0, 0) <= spatial_version <= (2, 0, 1)
|
return spatial_version and (2, 0, 0) <= spatial_version <= (2, 0, 1)
|
||||||
|
|
||||||
|
|
||||||
@skipUnless(HAS_GEOS and postgis, "Geos and postgis are required.")
|
@skipUnless(HAS_GEOS and HAS_SPATIAL_DB, "Geos and spatial db are required.")
|
||||||
class GeoModelTest(TestCase):
|
class GeoModelTest(TestCase):
|
||||||
|
|
||||||
def test_fixtures(self):
|
def test_fixtures(self):
|
||||||
|
@ -203,7 +203,7 @@ class GeoModelTest(TestCase):
|
||||||
self.assertTrue(isinstance(cities2[0].point, Point))
|
self.assertTrue(isinstance(cities2[0].point, Point))
|
||||||
|
|
||||||
|
|
||||||
@skipUnless(HAS_GEOS and postgis, "Geos and postgis are required.")
|
@skipUnless(HAS_GEOS and HAS_SPATIAL_DB, "Geos and spatial db are required.")
|
||||||
class GeoLookupTest(TestCase):
|
class GeoLookupTest(TestCase):
|
||||||
|
|
||||||
@no_mysql
|
@no_mysql
|
||||||
|
@ -389,7 +389,7 @@ class GeoLookupTest(TestCase):
|
||||||
self.assertEqual('Lawrence', City.objects.get(point__relate=(ks.poly, intersects_mask)).name)
|
self.assertEqual('Lawrence', City.objects.get(point__relate=(ks.poly, intersects_mask)).name)
|
||||||
|
|
||||||
|
|
||||||
@skipUnless(HAS_GEOS and postgis, "Geos and postgis are required.")
|
@skipUnless(HAS_GEOS and HAS_SPATIAL_DB, "Geos and spatial db are required.")
|
||||||
class GeoQuerySetTest(TestCase):
|
class GeoQuerySetTest(TestCase):
|
||||||
# Please keep the tests in GeoQuerySet method's alphabetic order
|
# Please keep the tests in GeoQuerySet method's alphabetic order
|
||||||
|
|
||||||
|
@ -433,6 +433,13 @@ class GeoQuerySetTest(TestCase):
|
||||||
self.assertEqual(c.mpoly.sym_difference(geom), c.sym_difference)
|
self.assertEqual(c.mpoly.sym_difference(geom), c.sym_difference)
|
||||||
self.assertEqual(c.mpoly.union(geom), c.union)
|
self.assertEqual(c.mpoly.union(geom), c.union)
|
||||||
|
|
||||||
|
@skipUnless(getattr(connection.ops, 'envelope', False), 'Database does not support envelope operation')
|
||||||
|
def test_envelope(self):
|
||||||
|
"Testing the `envelope` GeoQuerySet method."
|
||||||
|
countries = Country.objects.all().envelope()
|
||||||
|
for country in countries:
|
||||||
|
self.assertIsInstance(country.envelope, Polygon)
|
||||||
|
|
||||||
@no_mysql
|
@no_mysql
|
||||||
@no_spatialite # SpatiaLite does not have an Extent function
|
@no_spatialite # SpatiaLite does not have an Extent function
|
||||||
def test_extent(self):
|
def test_extent(self):
|
||||||
|
|
|
@ -23,7 +23,7 @@ test_srs = ({'srid' : 4326,
|
||||||
'auth_srid' : 32140,
|
'auth_srid' : 32140,
|
||||||
'srtext' : 'PROJCS["NAD83 / Texas South Central",GEOGCS["NAD83",DATUM["North_American_Datum_1983",SPHEROID["GRS 1980"',
|
'srtext' : 'PROJCS["NAD83 / Texas South Central",GEOGCS["NAD83",DATUM["North_American_Datum_1983",SPHEROID["GRS 1980"',
|
||||||
'proj4_re' : r'\+proj=lcc \+lat_1=30.28333333333333 \+lat_2=28.38333333333333 \+lat_0=27.83333333333333 '
|
'proj4_re' : r'\+proj=lcc \+lat_1=30.28333333333333 \+lat_2=28.38333333333333 \+lat_0=27.83333333333333 '
|
||||||
r'\+lon_0=-99 \+x_0=600000 \+y_0=4000000 \+ellps=GRS80 '
|
r'\+lon_0=-99 \+x_0=600000 \+y_0=4000000 (\+ellps=GRS80 )?'
|
||||||
r'(\+datum=NAD83 |\+towgs84=0,0,0,0,0,0,0)?\+units=m \+no_defs ',
|
r'(\+datum=NAD83 |\+towgs84=0,0,0,0,0,0,0)?\+units=m \+no_defs ',
|
||||||
'spheroid' : 'GRS 1980', 'name' : 'NAD83 / Texas South Central',
|
'spheroid' : 'GRS 1980', 'name' : 'NAD83 / Texas South Central',
|
||||||
'geographic' : False, 'projected' : True, 'spatialite' : False,
|
'geographic' : False, 'projected' : True, 'spatialite' : False,
|
||||||
|
|
|
@ -89,7 +89,7 @@ def ogrinspect(*args, **kwargs):
|
||||||
`multi_geom` => Boolean (default: False) - specify as multigeometry.
|
`multi_geom` => Boolean (default: False) - specify as multigeometry.
|
||||||
|
|
||||||
`name_field` => String - specifies a field name to return for the
|
`name_field` => String - specifies a field name to return for the
|
||||||
`__unicode__` function (which will be generated if specified).
|
`__unicode__`/`__str__` function (which will be generated if specified).
|
||||||
|
|
||||||
`imports` => Boolean (default: True) - set to False to omit the
|
`imports` => Boolean (default: True) - set to False to omit the
|
||||||
`from django.contrib.gis.db import models` code from the
|
`from django.contrib.gis.db import models` code from the
|
||||||
|
@ -221,4 +221,5 @@ def _ogrinspect(data_source, model_name, geom_name='geom', layer_key=0, srid=Non
|
||||||
|
|
||||||
if name_field:
|
if name_field:
|
||||||
yield ''
|
yield ''
|
||||||
yield ' def __str__(self): return self.%s' % name_field
|
yield ' def __%s__(self): return self.%s' % (
|
||||||
|
'str' if six.PY3 else 'unicode', name_field)
|
||||||
|
|
|
@ -43,9 +43,9 @@ class Feed(object):
|
||||||
raise Http404('Feed object does not exist.')
|
raise Http404('Feed object does not exist.')
|
||||||
feedgen = self.get_feed(obj, request)
|
feedgen = self.get_feed(obj, request)
|
||||||
response = HttpResponse(content_type=feedgen.mime_type)
|
response = HttpResponse(content_type=feedgen.mime_type)
|
||||||
if hasattr(self, 'item_pubdate'):
|
if hasattr(self, 'item_pubdate') or hasattr(self, 'item_updateddate'):
|
||||||
# if item_pubdate is defined for the feed, set header so as
|
# if item_pubdate or item_updateddate is defined for the feed, set
|
||||||
# ConditionalGetMiddleware is able to send 304 NOT MODIFIED
|
# header so as ConditionalGetMiddleware is able to send 304 NOT MODIFIED
|
||||||
response['Last-Modified'] = http_date(
|
response['Last-Modified'] = http_date(
|
||||||
timegm(feedgen.latest_post_date().utctimetuple()))
|
timegm(feedgen.latest_post_date().utctimetuple()))
|
||||||
feedgen.write(response, 'utf-8')
|
feedgen.write(response, 'utf-8')
|
||||||
|
@ -191,6 +191,11 @@ class Feed(object):
|
||||||
ltz = tzinfo.LocalTimezone(pubdate)
|
ltz = tzinfo.LocalTimezone(pubdate)
|
||||||
pubdate = pubdate.replace(tzinfo=ltz)
|
pubdate = pubdate.replace(tzinfo=ltz)
|
||||||
|
|
||||||
|
updateddate = self.__get_dynamic_attr('item_updateddate', item)
|
||||||
|
if updateddate and is_naive(updateddate):
|
||||||
|
ltz = tzinfo.LocalTimezone(updateddate)
|
||||||
|
updateddate = updateddate.replace(tzinfo=ltz)
|
||||||
|
|
||||||
feed.add_item(
|
feed.add_item(
|
||||||
title = title,
|
title = title,
|
||||||
link = link,
|
link = link,
|
||||||
|
@ -200,6 +205,7 @@ class Feed(object):
|
||||||
'item_guid_is_permalink', item),
|
'item_guid_is_permalink', item),
|
||||||
enclosure = enc,
|
enclosure = enc,
|
||||||
pubdate = pubdate,
|
pubdate = pubdate,
|
||||||
|
updateddate = updateddate,
|
||||||
author_name = author_name,
|
author_name = author_name,
|
||||||
author_email = author_email,
|
author_email = author_email,
|
||||||
author_link = author_link,
|
author_link = author_link,
|
||||||
|
|
|
@ -9,16 +9,17 @@ except ImportError:
|
||||||
from django.core.cache.backends.base import BaseCache, DEFAULT_TIMEOUT
|
from django.core.cache.backends.base import BaseCache, DEFAULT_TIMEOUT
|
||||||
from django.utils.synch import RWLock
|
from django.utils.synch import RWLock
|
||||||
|
|
||||||
|
|
||||||
# Global in-memory store of cache data. Keyed by name, to provide
|
# Global in-memory store of cache data. Keyed by name, to provide
|
||||||
# multiple named local memory caches.
|
# multiple named local memory caches.
|
||||||
_caches = {}
|
_caches = {}
|
||||||
_expire_info = {}
|
_expire_info = {}
|
||||||
_locks = {}
|
_locks = {}
|
||||||
|
|
||||||
|
|
||||||
class LocMemCache(BaseCache):
|
class LocMemCache(BaseCache):
|
||||||
def __init__(self, name, params):
|
def __init__(self, name, params):
|
||||||
BaseCache.__init__(self, params)
|
BaseCache.__init__(self, params)
|
||||||
global _caches, _expire_info, _locks
|
|
||||||
self._cache = _caches.setdefault(name, {})
|
self._cache = _caches.setdefault(name, {})
|
||||||
self._expire_info = _expire_info.setdefault(name, {})
|
self._expire_info = _expire_info.setdefault(name, {})
|
||||||
self._lock = _locks.setdefault(name, RWLock())
|
self._lock = _locks.setdefault(name, RWLock())
|
||||||
|
@ -26,28 +27,31 @@ class LocMemCache(BaseCache):
|
||||||
def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
|
def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
|
||||||
key = self.make_key(key, version=version)
|
key = self.make_key(key, version=version)
|
||||||
self.validate_key(key)
|
self.validate_key(key)
|
||||||
|
try:
|
||||||
|
pickled = pickle.dumps(value, pickle.HIGHEST_PROTOCOL)
|
||||||
|
except pickle.PickleError:
|
||||||
|
return False
|
||||||
with self._lock.writer():
|
with self._lock.writer():
|
||||||
exp = self._expire_info.get(key)
|
exp = self._expire_info.get(key)
|
||||||
if exp is None or exp <= time.time():
|
if exp is None or exp <= time.time():
|
||||||
try:
|
|
||||||
pickled = pickle.dumps(value, pickle.HIGHEST_PROTOCOL)
|
|
||||||
self._set(key, pickled, timeout)
|
self._set(key, pickled, timeout)
|
||||||
return True
|
return True
|
||||||
except pickle.PickleError:
|
|
||||||
pass
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def get(self, key, default=None, version=None):
|
def get(self, key, default=None, version=None):
|
||||||
key = self.make_key(key, version=version)
|
key = self.make_key(key, version=version)
|
||||||
self.validate_key(key)
|
self.validate_key(key)
|
||||||
|
pickled = None
|
||||||
with self._lock.reader():
|
with self._lock.reader():
|
||||||
exp = self._expire_info.get(key, 0)
|
exp = self._expire_info.get(key, 0)
|
||||||
if exp is None or exp > time.time():
|
if exp is None or exp > time.time():
|
||||||
try:
|
|
||||||
pickled = self._cache[key]
|
pickled = self._cache[key]
|
||||||
|
if pickled is not None:
|
||||||
|
try:
|
||||||
return pickle.loads(pickled)
|
return pickle.loads(pickled)
|
||||||
except pickle.PickleError:
|
except pickle.PickleError:
|
||||||
return default
|
return default
|
||||||
|
|
||||||
with self._lock.writer():
|
with self._lock.writer():
|
||||||
try:
|
try:
|
||||||
del self._cache[key]
|
del self._cache[key]
|
||||||
|
@ -68,12 +72,13 @@ class LocMemCache(BaseCache):
|
||||||
def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
|
def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
|
||||||
key = self.make_key(key, version=version)
|
key = self.make_key(key, version=version)
|
||||||
self.validate_key(key)
|
self.validate_key(key)
|
||||||
with self._lock.writer():
|
|
||||||
try:
|
try:
|
||||||
pickled = pickle.dumps(value, pickle.HIGHEST_PROTOCOL)
|
pickled = pickle.dumps(value, pickle.HIGHEST_PROTOCOL)
|
||||||
self._set(key, pickled, timeout)
|
|
||||||
except pickle.PickleError:
|
except pickle.PickleError:
|
||||||
pass
|
pass
|
||||||
|
else:
|
||||||
|
with self._lock.writer():
|
||||||
|
self._set(key, pickled, timeout)
|
||||||
|
|
||||||
def incr(self, key, delta=1, version=None):
|
def incr(self, key, delta=1, version=None):
|
||||||
value = self.get(key, version=version)
|
value = self.get(key, version=version)
|
||||||
|
@ -81,12 +86,13 @@ class LocMemCache(BaseCache):
|
||||||
raise ValueError("Key '%s' not found" % key)
|
raise ValueError("Key '%s' not found" % key)
|
||||||
new_value = value + delta
|
new_value = value + delta
|
||||||
key = self.make_key(key, version=version)
|
key = self.make_key(key, version=version)
|
||||||
with self._lock.writer():
|
|
||||||
try:
|
try:
|
||||||
pickled = pickle.dumps(new_value, pickle.HIGHEST_PROTOCOL)
|
pickled = pickle.dumps(new_value, pickle.HIGHEST_PROTOCOL)
|
||||||
self._cache[key] = pickled
|
|
||||||
except pickle.PickleError:
|
except pickle.PickleError:
|
||||||
pass
|
pass
|
||||||
|
else:
|
||||||
|
with self._lock.writer():
|
||||||
|
self._cache[key] = pickled
|
||||||
return new_value
|
return new_value
|
||||||
|
|
||||||
def has_key(self, key, version=None):
|
def has_key(self, key, version=None):
|
||||||
|
@ -135,6 +141,7 @@ class LocMemCache(BaseCache):
|
||||||
self._cache.clear()
|
self._cache.clear()
|
||||||
self._expire_info.clear()
|
self._expire_info.clear()
|
||||||
|
|
||||||
|
|
||||||
# For backwards compatibility
|
# For backwards compatibility
|
||||||
class CacheClass(LocMemCache):
|
class CacheClass(LocMemCache):
|
||||||
pass
|
pass
|
||||||
|
|
|
@ -86,7 +86,7 @@ class BaseMemcachedCache(BaseCache):
|
||||||
self._cache.delete(key)
|
self._cache.delete(key)
|
||||||
|
|
||||||
def get_many(self, keys, version=None):
|
def get_many(self, keys, version=None):
|
||||||
new_keys = map(lambda x: self.make_key(x, version=version), keys)
|
new_keys = [self.make_key(x, version=version) for x in keys]
|
||||||
ret = self._cache.get_multi(new_keys)
|
ret = self._cache.get_multi(new_keys)
|
||||||
if ret:
|
if ret:
|
||||||
_ = {}
|
_ = {}
|
||||||
|
|
|
@ -23,11 +23,12 @@ class UploadedFile(File):
|
||||||
"""
|
"""
|
||||||
DEFAULT_CHUNK_SIZE = 64 * 2**10
|
DEFAULT_CHUNK_SIZE = 64 * 2**10
|
||||||
|
|
||||||
def __init__(self, file=None, name=None, content_type=None, size=None, charset=None):
|
def __init__(self, file=None, name=None, content_type=None, size=None, charset=None, content_type_extra=None):
|
||||||
super(UploadedFile, self).__init__(file, name)
|
super(UploadedFile, self).__init__(file, name)
|
||||||
self.size = size
|
self.size = size
|
||||||
self.content_type = content_type
|
self.content_type = content_type
|
||||||
self.charset = charset
|
self.charset = charset
|
||||||
|
self.content_type_extra = content_type_extra
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return force_str("<%s: %s (%s)>" % (
|
return force_str("<%s: %s (%s)>" % (
|
||||||
|
@ -55,13 +56,13 @@ class TemporaryUploadedFile(UploadedFile):
|
||||||
"""
|
"""
|
||||||
A file uploaded to a temporary location (i.e. stream-to-disk).
|
A file uploaded to a temporary location (i.e. stream-to-disk).
|
||||||
"""
|
"""
|
||||||
def __init__(self, name, content_type, size, charset):
|
def __init__(self, name, content_type, size, charset, content_type_extra):
|
||||||
if settings.FILE_UPLOAD_TEMP_DIR:
|
if settings.FILE_UPLOAD_TEMP_DIR:
|
||||||
file = tempfile.NamedTemporaryFile(suffix='.upload',
|
file = tempfile.NamedTemporaryFile(suffix='.upload',
|
||||||
dir=settings.FILE_UPLOAD_TEMP_DIR)
|
dir=settings.FILE_UPLOAD_TEMP_DIR)
|
||||||
else:
|
else:
|
||||||
file = tempfile.NamedTemporaryFile(suffix='.upload')
|
file = tempfile.NamedTemporaryFile(suffix='.upload')
|
||||||
super(TemporaryUploadedFile, self).__init__(file, name, content_type, size, charset)
|
super(TemporaryUploadedFile, self).__init__(file, name, content_type, size, charset, content_type_extra)
|
||||||
|
|
||||||
def temporary_file_path(self):
|
def temporary_file_path(self):
|
||||||
"""
|
"""
|
||||||
|
@ -83,8 +84,8 @@ class InMemoryUploadedFile(UploadedFile):
|
||||||
"""
|
"""
|
||||||
A file uploaded into memory (i.e. stream-to-memory).
|
A file uploaded into memory (i.e. stream-to-memory).
|
||||||
"""
|
"""
|
||||||
def __init__(self, file, field_name, name, content_type, size, charset):
|
def __init__(self, file, field_name, name, content_type, size, charset, content_type_extra):
|
||||||
super(InMemoryUploadedFile, self).__init__(file, name, content_type, size, charset)
|
super(InMemoryUploadedFile, self).__init__(file, name, content_type, size, charset, content_type_extra)
|
||||||
self.field_name = field_name
|
self.field_name = field_name
|
||||||
|
|
||||||
def open(self, mode=None):
|
def open(self, mode=None):
|
||||||
|
@ -109,7 +110,7 @@ class SimpleUploadedFile(InMemoryUploadedFile):
|
||||||
def __init__(self, name, content, content_type='text/plain'):
|
def __init__(self, name, content, content_type='text/plain'):
|
||||||
content = content or b''
|
content = content or b''
|
||||||
super(SimpleUploadedFile, self).__init__(BytesIO(content), None, name,
|
super(SimpleUploadedFile, self).__init__(BytesIO(content), None, name,
|
||||||
content_type, len(content), None)
|
content_type, len(content), None, None)
|
||||||
|
|
||||||
def from_dict(cls, file_dict):
|
def from_dict(cls, file_dict):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -64,6 +64,7 @@ class FileUploadHandler(object):
|
||||||
self.content_type = None
|
self.content_type = None
|
||||||
self.content_length = None
|
self.content_length = None
|
||||||
self.charset = None
|
self.charset = None
|
||||||
|
self.content_type_extra = None
|
||||||
self.request = request
|
self.request = request
|
||||||
|
|
||||||
def handle_raw_input(self, input_data, META, content_length, boundary, encoding=None):
|
def handle_raw_input(self, input_data, META, content_length, boundary, encoding=None):
|
||||||
|
@ -84,7 +85,7 @@ class FileUploadHandler(object):
|
||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def new_file(self, field_name, file_name, content_type, content_length, charset=None):
|
def new_file(self, field_name, file_name, content_type, content_length, charset=None, content_type_extra=None):
|
||||||
"""
|
"""
|
||||||
Signal that a new file has been started.
|
Signal that a new file has been started.
|
||||||
|
|
||||||
|
@ -96,6 +97,7 @@ class FileUploadHandler(object):
|
||||||
self.content_type = content_type
|
self.content_type = content_type
|
||||||
self.content_length = content_length
|
self.content_length = content_length
|
||||||
self.charset = charset
|
self.charset = charset
|
||||||
|
self.content_type_extra = content_type_extra
|
||||||
|
|
||||||
def receive_data_chunk(self, raw_data, start):
|
def receive_data_chunk(self, raw_data, start):
|
||||||
"""
|
"""
|
||||||
|
@ -132,7 +134,7 @@ class TemporaryFileUploadHandler(FileUploadHandler):
|
||||||
Create the file object to append to as data is coming in.
|
Create the file object to append to as data is coming in.
|
||||||
"""
|
"""
|
||||||
super(TemporaryFileUploadHandler, self).new_file(file_name, *args, **kwargs)
|
super(TemporaryFileUploadHandler, self).new_file(file_name, *args, **kwargs)
|
||||||
self.file = TemporaryUploadedFile(self.file_name, self.content_type, 0, self.charset)
|
self.file = TemporaryUploadedFile(self.file_name, self.content_type, 0, self.charset, self.content_type_extra)
|
||||||
|
|
||||||
def receive_data_chunk(self, raw_data, start):
|
def receive_data_chunk(self, raw_data, start):
|
||||||
self.file.write(raw_data)
|
self.file.write(raw_data)
|
||||||
|
@ -187,7 +189,8 @@ class MemoryFileUploadHandler(FileUploadHandler):
|
||||||
name = self.file_name,
|
name = self.file_name,
|
||||||
content_type = self.content_type,
|
content_type = self.content_type,
|
||||||
size = file_size,
|
size = file_size,
|
||||||
charset = self.charset
|
charset = self.charset,
|
||||||
|
content_type_extra = self.content_type_extra
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -16,6 +16,7 @@ class EmailBackend(BaseEmailBackend):
|
||||||
"""Write all messages to the stream in a thread-safe way."""
|
"""Write all messages to the stream in a thread-safe way."""
|
||||||
if not email_messages:
|
if not email_messages:
|
||||||
return
|
return
|
||||||
|
msg_count = 0
|
||||||
with self._lock:
|
with self._lock:
|
||||||
try:
|
try:
|
||||||
stream_created = self.open()
|
stream_created = self.open()
|
||||||
|
@ -24,9 +25,10 @@ class EmailBackend(BaseEmailBackend):
|
||||||
self.stream.write('-' * 79)
|
self.stream.write('-' * 79)
|
||||||
self.stream.write('\n')
|
self.stream.write('\n')
|
||||||
self.stream.flush() # flush after each message
|
self.stream.flush() # flush after each message
|
||||||
|
msg_count += 1
|
||||||
if stream_created:
|
if stream_created:
|
||||||
self.close()
|
self.close()
|
||||||
except:
|
except:
|
||||||
if not self.fail_silently:
|
if not self.fail_silently:
|
||||||
raise
|
raise
|
||||||
return len(email_messages)
|
return msg_count
|
||||||
|
|
|
@ -6,4 +6,4 @@ from django.core.mail.backends.base import BaseEmailBackend
|
||||||
|
|
||||||
class EmailBackend(BaseEmailBackend):
|
class EmailBackend(BaseEmailBackend):
|
||||||
def send_messages(self, email_messages):
|
def send_messages(self, email_messages):
|
||||||
return len(email_messages)
|
return len(list(email_messages))
|
||||||
|
|
|
@ -20,7 +20,9 @@ class EmailBackend(BaseEmailBackend):
|
||||||
|
|
||||||
def send_messages(self, messages):
|
def send_messages(self, messages):
|
||||||
"""Redirect messages to the dummy outbox"""
|
"""Redirect messages to the dummy outbox"""
|
||||||
|
msg_count = 0
|
||||||
for message in messages: # .message() triggers header validation
|
for message in messages: # .message() triggers header validation
|
||||||
message.message()
|
message.message()
|
||||||
|
msg_count += 1
|
||||||
mail.outbox.extend(messages)
|
mail.outbox.extend(messages)
|
||||||
return len(messages)
|
return msg_count
|
||||||
|
|
|
@ -15,22 +15,18 @@ class EmailBackend(BaseEmailBackend):
|
||||||
A wrapper that manages the SMTP network connection.
|
A wrapper that manages the SMTP network connection.
|
||||||
"""
|
"""
|
||||||
def __init__(self, host=None, port=None, username=None, password=None,
|
def __init__(self, host=None, port=None, username=None, password=None,
|
||||||
use_tls=None, fail_silently=False, **kwargs):
|
use_tls=None, fail_silently=False, use_ssl=None, **kwargs):
|
||||||
super(EmailBackend, self).__init__(fail_silently=fail_silently)
|
super(EmailBackend, self).__init__(fail_silently=fail_silently)
|
||||||
self.host = host or settings.EMAIL_HOST
|
self.host = host or settings.EMAIL_HOST
|
||||||
self.port = port or settings.EMAIL_PORT
|
self.port = port or settings.EMAIL_PORT
|
||||||
if username is None:
|
self.username = settings.EMAIL_HOST_USER if username is None else username
|
||||||
self.username = settings.EMAIL_HOST_USER
|
self.password = settings.EMAIL_HOST_PASSWORD if password is None else password
|
||||||
else:
|
self.use_tls = settings.EMAIL_USE_TLS if use_tls is None else use_tls
|
||||||
self.username = username
|
self.use_ssl = settings.EMAIL_USE_SSL if use_ssl is None else use_ssl
|
||||||
if password is None:
|
if self.use_ssl and self.use_tls:
|
||||||
self.password = settings.EMAIL_HOST_PASSWORD
|
raise ValueError(
|
||||||
else:
|
"EMAIL_USE_TLS/EMAIL_USE_SSL are mutually exclusive, so only set "
|
||||||
self.password = password
|
"one of those settings to True.")
|
||||||
if use_tls is None:
|
|
||||||
self.use_tls = settings.EMAIL_USE_TLS
|
|
||||||
else:
|
|
||||||
self.use_tls = use_tls
|
|
||||||
self.connection = None
|
self.connection = None
|
||||||
self._lock = threading.RLock()
|
self._lock = threading.RLock()
|
||||||
|
|
||||||
|
@ -45,8 +41,14 @@ class EmailBackend(BaseEmailBackend):
|
||||||
try:
|
try:
|
||||||
# If local_hostname is not specified, socket.getfqdn() gets used.
|
# If local_hostname is not specified, socket.getfqdn() gets used.
|
||||||
# For performance, we use the cached FQDN for local_hostname.
|
# For performance, we use the cached FQDN for local_hostname.
|
||||||
|
if self.use_ssl:
|
||||||
|
self.connection = smtplib.SMTP_SSL(self.host, self.port,
|
||||||
|
local_hostname=DNS_NAME.get_fqdn())
|
||||||
|
else:
|
||||||
self.connection = smtplib.SMTP(self.host, self.port,
|
self.connection = smtplib.SMTP(self.host, self.port,
|
||||||
local_hostname=DNS_NAME.get_fqdn())
|
local_hostname=DNS_NAME.get_fqdn())
|
||||||
|
# TLS/SSL are mutually exclusive, so only attempt TLS over
|
||||||
|
# non-secure connections.
|
||||||
if self.use_tls:
|
if self.use_tls:
|
||||||
self.connection.ehlo()
|
self.connection.ehlo()
|
||||||
self.connection.starttls()
|
self.connection.starttls()
|
||||||
|
|
|
@ -3,7 +3,6 @@ from __future__ import unicode_literals
|
||||||
import mimetypes
|
import mimetypes
|
||||||
import os
|
import os
|
||||||
import random
|
import random
|
||||||
import sys
|
|
||||||
import time
|
import time
|
||||||
from email import charset as Charset, encoders as Encoders
|
from email import charset as Charset, encoders as Encoders
|
||||||
from email.generator import Generator
|
from email.generator import Generator
|
||||||
|
@ -139,9 +138,6 @@ class SafeMIMEText(MIMEText):
|
||||||
"""
|
"""
|
||||||
fp = six.StringIO()
|
fp = six.StringIO()
|
||||||
g = Generator(fp, mangle_from_ = False)
|
g = Generator(fp, mangle_from_ = False)
|
||||||
if sys.version_info < (2, 6, 6) and isinstance(self._payload, six.text_type):
|
|
||||||
# Workaround for http://bugs.python.org/issue1368247
|
|
||||||
self._payload = self._payload.encode(self._charset.output_charset)
|
|
||||||
g.flatten(self, unixfrom=unixfrom)
|
g.flatten(self, unixfrom=unixfrom)
|
||||||
return fp.getvalue()
|
return fp.getvalue()
|
||||||
|
|
||||||
|
|
|
@ -183,6 +183,10 @@ class TimestampSigner(Signer):
|
||||||
return super(TimestampSigner, self).sign(value)
|
return super(TimestampSigner, self).sign(value)
|
||||||
|
|
||||||
def unsign(self, value, max_age=None):
|
def unsign(self, value, max_age=None):
|
||||||
|
"""
|
||||||
|
Retrieve original value and check it wasn't signed more
|
||||||
|
than max_age seconds ago.
|
||||||
|
"""
|
||||||
result = super(TimestampSigner, self).unsign(value)
|
result = super(TimestampSigner, self).unsign(value)
|
||||||
value, timestamp = result.rsplit(self.sep, 1)
|
value, timestamp = result.rsplit(self.sep, 1)
|
||||||
timestamp = baseconv.base62.decode(timestamp)
|
timestamp = baseconv.base62.decode(timestamp)
|
||||||
|
|
|
@ -87,7 +87,7 @@ class EmailValidator(object):
|
||||||
r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|\\[\001-\011\013\014\016-\177])*"$)', # quoted-string
|
r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|\\[\001-\011\013\014\016-\177])*"$)', # quoted-string
|
||||||
re.IGNORECASE)
|
re.IGNORECASE)
|
||||||
domain_regex = re.compile(
|
domain_regex = re.compile(
|
||||||
r'(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?$)' # domain
|
r'(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}|[A-Z0-9-]{2,})\.?$' # domain
|
||||||
# literal form, ipv4 address (SMTP 4.1.3)
|
# literal form, ipv4 address (SMTP 4.1.3)
|
||||||
r'|^\[(25[0-5]|2[0-4]\d|[0-1]?\d?\d)(\.(25[0-5]|2[0-4]\d|[0-1]?\d?\d)){3}\]$',
|
r'|^\[(25[0-5]|2[0-4]\d|[0-1]?\d?\d)(\.(25[0-5]|2[0-4]\d|[0-1]?\d?\d)){3}\]$',
|
||||||
re.IGNORECASE)
|
re.IGNORECASE)
|
||||||
|
|
|
@ -1,20 +1,25 @@
|
||||||
import warnings
|
import warnings
|
||||||
|
|
||||||
from django.core import signals
|
from django.core import signals
|
||||||
from django.db.utils import (DEFAULT_DB_ALIAS,
|
from django.db.utils import (DEFAULT_DB_ALIAS, DataError, OperationalError,
|
||||||
DataError, OperationalError, IntegrityError, InternalError,
|
IntegrityError, InternalError, ProgrammingError, NotSupportedError,
|
||||||
ProgrammingError, NotSupportedError, DatabaseError,
|
DatabaseError, InterfaceError, Error, load_backend,
|
||||||
InterfaceError, Error,
|
ConnectionHandler, ConnectionRouter)
|
||||||
load_backend, ConnectionHandler, ConnectionRouter)
|
|
||||||
from django.utils.functional import cached_property
|
from django.utils.functional import cached_property
|
||||||
|
|
||||||
__all__ = ('backend', 'connection', 'connections', 'router', 'DatabaseError',
|
|
||||||
'IntegrityError', 'DEFAULT_DB_ALIAS')
|
__all__ = [
|
||||||
|
'backend', 'connection', 'connections', 'router', 'DatabaseError',
|
||||||
|
'IntegrityError', 'InternalError', 'ProgrammingError', 'DataError',
|
||||||
|
'NotSupportedError', 'Error', 'InterfaceError', 'OperationalError',
|
||||||
|
'DEFAULT_DB_ALIAS'
|
||||||
|
]
|
||||||
|
|
||||||
connections = ConnectionHandler()
|
connections = ConnectionHandler()
|
||||||
|
|
||||||
router = ConnectionRouter()
|
router = ConnectionRouter()
|
||||||
|
|
||||||
|
|
||||||
# `connection`, `DatabaseError` and `IntegrityError` are convenient aliases
|
# `connection`, `DatabaseError` and `IntegrityError` are convenient aliases
|
||||||
# for backend bits.
|
# for backend bits.
|
||||||
|
|
||||||
|
@ -39,8 +44,15 @@ class DefaultConnectionProxy(object):
|
||||||
def __delattr__(self, name):
|
def __delattr__(self, name):
|
||||||
return delattr(connections[DEFAULT_DB_ALIAS], name)
|
return delattr(connections[DEFAULT_DB_ALIAS], name)
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return connections[DEFAULT_DB_ALIAS] == other
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return connections[DEFAULT_DB_ALIAS] != other
|
||||||
|
|
||||||
connection = DefaultConnectionProxy()
|
connection = DefaultConnectionProxy()
|
||||||
|
|
||||||
|
|
||||||
class DefaultBackendProxy(object):
|
class DefaultBackendProxy(object):
|
||||||
"""
|
"""
|
||||||
Temporary proxy class used during deprecation period of the `backend` module
|
Temporary proxy class used during deprecation period of the `backend` module
|
||||||
|
@ -63,6 +75,7 @@ class DefaultBackendProxy(object):
|
||||||
|
|
||||||
backend = DefaultBackendProxy()
|
backend = DefaultBackendProxy()
|
||||||
|
|
||||||
|
|
||||||
def close_connection(**kwargs):
|
def close_connection(**kwargs):
|
||||||
warnings.warn(
|
warnings.warn(
|
||||||
"close_connection is superseded by close_old_connections.",
|
"close_connection is superseded by close_old_connections.",
|
||||||
|
@ -76,12 +89,14 @@ def close_connection(**kwargs):
|
||||||
transaction.abort(conn)
|
transaction.abort(conn)
|
||||||
connections[conn].close()
|
connections[conn].close()
|
||||||
|
|
||||||
|
|
||||||
# Register an event to reset saved queries when a Django request is started.
|
# Register an event to reset saved queries when a Django request is started.
|
||||||
def reset_queries(**kwargs):
|
def reset_queries(**kwargs):
|
||||||
for conn in connections.all():
|
for conn in connections.all():
|
||||||
conn.queries = []
|
conn.queries = []
|
||||||
signals.request_started.connect(reset_queries)
|
signals.request_started.connect(reset_queries)
|
||||||
|
|
||||||
|
|
||||||
# Register an event to reset transaction state and close connections past
|
# Register an event to reset transaction state and close connections past
|
||||||
# their lifetime. NB: abort() doesn't do anything outside of a transaction.
|
# their lifetime. NB: abort() doesn't do anything outside of a transaction.
|
||||||
def close_old_connections(**kwargs):
|
def close_old_connections(**kwargs):
|
||||||
|
|
|
@ -70,7 +70,9 @@ class BaseDatabaseWrapper(object):
|
||||||
self._thread_ident = thread.get_ident()
|
self._thread_ident = thread.get_ident()
|
||||||
|
|
||||||
def __eq__(self, other):
|
def __eq__(self, other):
|
||||||
|
if isinstance(other, BaseDatabaseWrapper):
|
||||||
return self.alias == other.alias
|
return self.alias == other.alias
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
def __ne__(self, other):
|
def __ne__(self, other):
|
||||||
return not self == other
|
return not self == other
|
||||||
|
@ -1184,6 +1186,7 @@ FieldInfo = namedtuple('FieldInfo',
|
||||||
'name type_code display_size internal_size precision scale null_ok'
|
'name type_code display_size internal_size precision scale null_ok'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class BaseDatabaseIntrospection(object):
|
class BaseDatabaseIntrospection(object):
|
||||||
"""
|
"""
|
||||||
This class encapsulates all backend-specific introspection utilities
|
This class encapsulates all backend-specific introspection utilities
|
||||||
|
|
|
@ -253,12 +253,13 @@ class BaseDatabaseCreation(object):
|
||||||
r_col = model._meta.get_field(f.rel.field_name).column
|
r_col = model._meta.get_field(f.rel.field_name).column
|
||||||
r_name = '%s_refs_%s_%s' % (
|
r_name = '%s_refs_%s_%s' % (
|
||||||
col, r_col, self._digest(table, r_table))
|
col, r_col, self._digest(table, r_table))
|
||||||
output.append('%s %s %s %s;' % \
|
output.append('%s %s %s %s;' % (
|
||||||
(style.SQL_KEYWORD('ALTER TABLE'),
|
style.SQL_KEYWORD('ALTER TABLE'),
|
||||||
style.SQL_TABLE(qn(table)),
|
style.SQL_TABLE(qn(table)),
|
||||||
style.SQL_KEYWORD(self.connection.ops.drop_foreignkey_sql()),
|
style.SQL_KEYWORD(self.connection.ops.drop_foreignkey_sql()),
|
||||||
style.SQL_FIELD(qn(truncate_name(
|
style.SQL_FIELD(qn(truncate_name(
|
||||||
r_name, self.connection.ops.max_name_length())))))
|
r_name, self.connection.ops.max_name_length())))
|
||||||
|
))
|
||||||
del references_to_delete[model]
|
del references_to_delete[model]
|
||||||
return output
|
return output
|
||||||
|
|
||||||
|
|
|
@ -8,33 +8,43 @@ ImproperlyConfigured.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from django.core.exceptions import ImproperlyConfigured
|
from django.core.exceptions import ImproperlyConfigured
|
||||||
from django.db.backends import *
|
from django.db.backends import (BaseDatabaseOperations, BaseDatabaseClient,
|
||||||
|
BaseDatabaseIntrospection, BaseDatabaseWrapper, BaseDatabaseFeatures,
|
||||||
|
BaseDatabaseValidation)
|
||||||
from django.db.backends.creation import BaseDatabaseCreation
|
from django.db.backends.creation import BaseDatabaseCreation
|
||||||
|
|
||||||
|
|
||||||
def complain(*args, **kwargs):
|
def complain(*args, **kwargs):
|
||||||
raise ImproperlyConfigured("settings.DATABASES is improperly configured. "
|
raise ImproperlyConfigured("settings.DATABASES is improperly configured. "
|
||||||
"Please supply the ENGINE value. Check "
|
"Please supply the ENGINE value. Check "
|
||||||
"settings documentation for more details.")
|
"settings documentation for more details.")
|
||||||
|
|
||||||
|
|
||||||
def ignore(*args, **kwargs):
|
def ignore(*args, **kwargs):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class DatabaseError(Exception):
|
class DatabaseError(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class IntegrityError(DatabaseError):
|
class IntegrityError(DatabaseError):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class DatabaseOperations(BaseDatabaseOperations):
|
class DatabaseOperations(BaseDatabaseOperations):
|
||||||
quote_name = complain
|
quote_name = complain
|
||||||
|
|
||||||
|
|
||||||
class DatabaseClient(BaseDatabaseClient):
|
class DatabaseClient(BaseDatabaseClient):
|
||||||
runshell = complain
|
runshell = complain
|
||||||
|
|
||||||
|
|
||||||
class DatabaseCreation(BaseDatabaseCreation):
|
class DatabaseCreation(BaseDatabaseCreation):
|
||||||
create_test_db = ignore
|
create_test_db = ignore
|
||||||
destroy_test_db = ignore
|
destroy_test_db = ignore
|
||||||
|
|
||||||
|
|
||||||
class DatabaseIntrospection(BaseDatabaseIntrospection):
|
class DatabaseIntrospection(BaseDatabaseIntrospection):
|
||||||
get_table_list = complain
|
get_table_list = complain
|
||||||
get_table_description = complain
|
get_table_description = complain
|
||||||
|
@ -42,6 +52,7 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
|
||||||
get_indexes = complain
|
get_indexes = complain
|
||||||
get_key_columns = complain
|
get_key_columns = complain
|
||||||
|
|
||||||
|
|
||||||
class DatabaseWrapper(BaseDatabaseWrapper):
|
class DatabaseWrapper(BaseDatabaseWrapper):
|
||||||
operators = {}
|
operators = {}
|
||||||
# Override the base class implementations with null
|
# Override the base class implementations with null
|
||||||
|
|
|
@ -37,7 +37,8 @@ except ImportError:
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.db import utils
|
from django.db import utils
|
||||||
from django.db.backends import *
|
from django.db.backends import (util, BaseDatabaseFeatures,
|
||||||
|
BaseDatabaseOperations, BaseDatabaseWrapper)
|
||||||
from django.db.backends.mysql.client import DatabaseClient
|
from django.db.backends.mysql.client import DatabaseClient
|
||||||
from django.db.backends.mysql.creation import DatabaseCreation
|
from django.db.backends.mysql.creation import DatabaseCreation
|
||||||
from django.db.backends.mysql.introspection import DatabaseIntrospection
|
from django.db.backends.mysql.introspection import DatabaseIntrospection
|
||||||
|
@ -60,6 +61,7 @@ IntegrityError = Database.IntegrityError
|
||||||
# It's impossible to import datetime_or_None directly from MySQLdb.times
|
# It's impossible to import datetime_or_None directly from MySQLdb.times
|
||||||
parse_datetime = conversions[FIELD_TYPE.DATETIME]
|
parse_datetime = conversions[FIELD_TYPE.DATETIME]
|
||||||
|
|
||||||
|
|
||||||
def parse_datetime_with_timezone_support(value):
|
def parse_datetime_with_timezone_support(value):
|
||||||
dt = parse_datetime(value)
|
dt = parse_datetime(value)
|
||||||
# Confirm that dt is naive before overwriting its tzinfo.
|
# Confirm that dt is naive before overwriting its tzinfo.
|
||||||
|
@ -67,6 +69,7 @@ def parse_datetime_with_timezone_support(value):
|
||||||
dt = dt.replace(tzinfo=timezone.utc)
|
dt = dt.replace(tzinfo=timezone.utc)
|
||||||
return dt
|
return dt
|
||||||
|
|
||||||
|
|
||||||
def adapt_datetime_with_timezone_support(value, conv):
|
def adapt_datetime_with_timezone_support(value, conv):
|
||||||
# Equivalent to DateTimeField.get_db_prep_value. Used only by raw SQL.
|
# Equivalent to DateTimeField.get_db_prep_value. Used only by raw SQL.
|
||||||
if settings.USE_TZ:
|
if settings.USE_TZ:
|
||||||
|
@ -101,6 +104,7 @@ django_conversions.update({
|
||||||
# http://dev.mysql.com/doc/refman/5.0/en/news.html .
|
# http://dev.mysql.com/doc/refman/5.0/en/news.html .
|
||||||
server_version_re = re.compile(r'(\d{1,2})\.(\d{1,2})\.(\d{1,2})')
|
server_version_re = re.compile(r'(\d{1,2})\.(\d{1,2})\.(\d{1,2})')
|
||||||
|
|
||||||
|
|
||||||
# MySQLdb-1.2.1 and newer automatically makes use of SHOW WARNINGS on
|
# MySQLdb-1.2.1 and newer automatically makes use of SHOW WARNINGS on
|
||||||
# MySQL-4.1 and newer, so the MysqlDebugWrapper is unnecessary. Since the
|
# MySQL-4.1 and newer, so the MysqlDebugWrapper is unnecessary. Since the
|
||||||
# point is to raise Warnings as exceptions, this can be done with the Python
|
# point is to raise Warnings as exceptions, this can be done with the Python
|
||||||
|
@ -151,6 +155,7 @@ class CursorWrapper(object):
|
||||||
def __iter__(self):
|
def __iter__(self):
|
||||||
return iter(self.cursor)
|
return iter(self.cursor)
|
||||||
|
|
||||||
|
|
||||||
class DatabaseFeatures(BaseDatabaseFeatures):
|
class DatabaseFeatures(BaseDatabaseFeatures):
|
||||||
empty_fetchmany_value = ()
|
empty_fetchmany_value = ()
|
||||||
update_can_self_select = False
|
update_can_self_select = False
|
||||||
|
@ -208,6 +213,7 @@ class DatabaseFeatures(BaseDatabaseFeatures):
|
||||||
cursor.execute("SELECT 1 FROM mysql.time_zone LIMIT 1")
|
cursor.execute("SELECT 1 FROM mysql.time_zone LIMIT 1")
|
||||||
return cursor.fetchone() is not None
|
return cursor.fetchone() is not None
|
||||||
|
|
||||||
|
|
||||||
class DatabaseOperations(BaseDatabaseOperations):
|
class DatabaseOperations(BaseDatabaseOperations):
|
||||||
compiler_module = "django.db.backends.mysql.compiler"
|
compiler_module = "django.db.backends.mysql.compiler"
|
||||||
|
|
||||||
|
@ -323,7 +329,7 @@ class DatabaseOperations(BaseDatabaseOperations):
|
||||||
# Truncate already resets the AUTO_INCREMENT field from
|
# Truncate already resets the AUTO_INCREMENT field from
|
||||||
# MySQL version 5.0.13 onwards. Refs #16961.
|
# MySQL version 5.0.13 onwards. Refs #16961.
|
||||||
if self.connection.mysql_version < (5, 0, 13):
|
if self.connection.mysql_version < (5, 0, 13):
|
||||||
return ["%s %s %s %s %s;" % \
|
return ["%s %s %s %s %s;" %
|
||||||
(style.SQL_KEYWORD('ALTER'),
|
(style.SQL_KEYWORD('ALTER'),
|
||||||
style.SQL_KEYWORD('TABLE'),
|
style.SQL_KEYWORD('TABLE'),
|
||||||
style.SQL_TABLE(self.quote_name(sequence['table'])),
|
style.SQL_TABLE(self.quote_name(sequence['table'])),
|
||||||
|
@ -377,6 +383,7 @@ class DatabaseOperations(BaseDatabaseOperations):
|
||||||
items_sql = "(%s)" % ", ".join(["%s"] * len(fields))
|
items_sql = "(%s)" % ", ".join(["%s"] * len(fields))
|
||||||
return "VALUES " + ", ".join([items_sql] * num_values)
|
return "VALUES " + ", ".join([items_sql] * num_values)
|
||||||
|
|
||||||
|
|
||||||
class DatabaseWrapper(BaseDatabaseWrapper):
|
class DatabaseWrapper(BaseDatabaseWrapper):
|
||||||
vendor = 'mysql'
|
vendor = 'mysql'
|
||||||
operators = {
|
operators = {
|
||||||
|
|
|
@ -3,6 +3,7 @@ import sys
|
||||||
|
|
||||||
from django.db.backends import BaseDatabaseClient
|
from django.db.backends import BaseDatabaseClient
|
||||||
|
|
||||||
|
|
||||||
class DatabaseClient(BaseDatabaseClient):
|
class DatabaseClient(BaseDatabaseClient):
|
||||||
executable_name = 'mysql'
|
executable_name = 'mysql'
|
||||||
|
|
||||||
|
@ -37,4 +38,3 @@ class DatabaseClient(BaseDatabaseClient):
|
||||||
sys.exit(os.system(" ".join(args)))
|
sys.exit(os.system(" ".join(args)))
|
||||||
else:
|
else:
|
||||||
os.execvp(self.executable_name, args)
|
os.execvp(self.executable_name, args)
|
||||||
|
|
||||||
|
|
|
@ -22,20 +22,26 @@ class SQLCompiler(compiler.SQLCompiler):
|
||||||
sql, params = self.as_sql()
|
sql, params = self.as_sql()
|
||||||
return '(%s) IN (%s)' % (', '.join(['%s.%s' % (qn(alias), qn2(column)) for column in columns]), sql), params
|
return '(%s) IN (%s)' % (', '.join(['%s.%s' % (qn(alias), qn2(column)) for column in columns]), sql), params
|
||||||
|
|
||||||
|
|
||||||
class SQLInsertCompiler(compiler.SQLInsertCompiler, SQLCompiler):
|
class SQLInsertCompiler(compiler.SQLInsertCompiler, SQLCompiler):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class SQLDeleteCompiler(compiler.SQLDeleteCompiler, SQLCompiler):
|
class SQLDeleteCompiler(compiler.SQLDeleteCompiler, SQLCompiler):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class SQLUpdateCompiler(compiler.SQLUpdateCompiler, SQLCompiler):
|
class SQLUpdateCompiler(compiler.SQLUpdateCompiler, SQLCompiler):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class SQLAggregateCompiler(compiler.SQLAggregateCompiler, SQLCompiler):
|
class SQLAggregateCompiler(compiler.SQLAggregateCompiler, SQLCompiler):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class SQLDateCompiler(compiler.SQLDateCompiler, SQLCompiler):
|
class SQLDateCompiler(compiler.SQLDateCompiler, SQLCompiler):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class SQLDateTimeCompiler(compiler.SQLDateTimeCompiler, SQLCompiler):
|
class SQLDateTimeCompiler(compiler.SQLDateTimeCompiler, SQLCompiler):
|
||||||
pass
|
pass
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
from django.db.backends.creation import BaseDatabaseCreation
|
from django.db.backends.creation import BaseDatabaseCreation
|
||||||
|
|
||||||
|
|
||||||
class DatabaseCreation(BaseDatabaseCreation):
|
class DatabaseCreation(BaseDatabaseCreation):
|
||||||
# This dictionary maps Field objects to their associated MySQL column
|
# This dictionary maps Field objects to their associated MySQL column
|
||||||
# types, as strings. Column-type strings can contain format strings; they'll
|
# types, as strings. Column-type strings can contain format strings; they'll
|
||||||
|
|
|
@ -7,6 +7,7 @@ from django.utils.encoding import force_text
|
||||||
|
|
||||||
foreign_key_re = re.compile(r"\sCONSTRAINT `[^`]*` FOREIGN KEY \(`([^`]*)`\) REFERENCES `([^`]*)` \(`([^`]*)`\)")
|
foreign_key_re = re.compile(r"\sCONSTRAINT `[^`]*` FOREIGN KEY \(`([^`]*)`\) REFERENCES `([^`]*)` \(`([^`]*)`\)")
|
||||||
|
|
||||||
|
|
||||||
class DatabaseIntrospection(BaseDatabaseIntrospection):
|
class DatabaseIntrospection(BaseDatabaseIntrospection):
|
||||||
data_types_reverse = {
|
data_types_reverse = {
|
||||||
FIELD_TYPE.BLOB: 'TextField',
|
FIELD_TYPE.BLOB: 'TextField',
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
from django.db.backends import BaseDatabaseValidation
|
from django.db.backends import BaseDatabaseValidation
|
||||||
|
|
||||||
|
|
||||||
class DatabaseValidation(BaseDatabaseValidation):
|
class DatabaseValidation(BaseDatabaseValidation):
|
||||||
def validate_field(self, errors, opts, f):
|
def validate_field(self, errors, opts, f):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -7,11 +7,12 @@ from __future__ import unicode_literals
|
||||||
|
|
||||||
import decimal
|
import decimal
|
||||||
import re
|
import re
|
||||||
|
import platform
|
||||||
import sys
|
import sys
|
||||||
import warnings
|
import warnings
|
||||||
|
|
||||||
|
|
||||||
def _setup_environment(environ):
|
def _setup_environment(environ):
|
||||||
import platform
|
|
||||||
# Cygwin requires some special voodoo to set the environment variables
|
# Cygwin requires some special voodoo to set the environment variables
|
||||||
# properly so that Oracle will see them.
|
# properly so that Oracle will see them.
|
||||||
if platform.system().upper().startswith('CYGWIN'):
|
if platform.system().upper().startswith('CYGWIN'):
|
||||||
|
@ -91,6 +92,7 @@ class DatabaseFeatures(BaseDatabaseFeatures):
|
||||||
supports_tablespaces = True
|
supports_tablespaces = True
|
||||||
supports_sequence_reset = False
|
supports_sequence_reset = False
|
||||||
|
|
||||||
|
|
||||||
class DatabaseOperations(BaseDatabaseOperations):
|
class DatabaseOperations(BaseDatabaseOperations):
|
||||||
compiler_module = "django.db.backends.oracle.compiler"
|
compiler_module = "django.db.backends.oracle.compiler"
|
||||||
|
|
||||||
|
@ -268,7 +270,7 @@ WHEN (new.%(col_name)s IS NULL)
|
||||||
# http://cx-oracle.sourceforge.net/html/cursor.html#Cursor.statement
|
# http://cx-oracle.sourceforge.net/html/cursor.html#Cursor.statement
|
||||||
# The DB API definition does not define this attribute.
|
# The DB API definition does not define this attribute.
|
||||||
statement = cursor.statement
|
statement = cursor.statement
|
||||||
if not six.PY3 and not isinstance(statement, unicode):
|
if statement and not six.PY3 and not isinstance(statement, unicode):
|
||||||
statement = statement.decode('utf-8')
|
statement = statement.decode('utf-8')
|
||||||
# Unlike Psycopg's `query` and MySQLdb`'s `_last_executed`, CxOracle's
|
# Unlike Psycopg's `query` and MySQLdb`'s `_last_executed`, CxOracle's
|
||||||
# `statement` doesn't contain the query parameters. refs #20010.
|
# `statement` doesn't contain the query parameters. refs #20010.
|
||||||
|
|
|
@ -3,6 +3,7 @@ import sys
|
||||||
|
|
||||||
from django.db.backends import BaseDatabaseClient
|
from django.db.backends import BaseDatabaseClient
|
||||||
|
|
||||||
|
|
||||||
class DatabaseClient(BaseDatabaseClient):
|
class DatabaseClient(BaseDatabaseClient):
|
||||||
executable_name = 'sqlplus'
|
executable_name = 'sqlplus'
|
||||||
|
|
||||||
|
@ -13,4 +14,3 @@ class DatabaseClient(BaseDatabaseClient):
|
||||||
sys.exit(os.system(" ".join(args)))
|
sys.exit(os.system(" ".join(args)))
|
||||||
else:
|
else:
|
||||||
os.execvp(self.executable_name, args)
|
os.execvp(self.executable_name, args)
|
||||||
|
|
||||||
|
|
|
@ -60,17 +60,22 @@ class SQLCompiler(compiler.SQLCompiler):
|
||||||
class SQLInsertCompiler(compiler.SQLInsertCompiler, SQLCompiler):
|
class SQLInsertCompiler(compiler.SQLInsertCompiler, SQLCompiler):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class SQLDeleteCompiler(compiler.SQLDeleteCompiler, SQLCompiler):
|
class SQLDeleteCompiler(compiler.SQLDeleteCompiler, SQLCompiler):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class SQLUpdateCompiler(compiler.SQLUpdateCompiler, SQLCompiler):
|
class SQLUpdateCompiler(compiler.SQLUpdateCompiler, SQLCompiler):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class SQLAggregateCompiler(compiler.SQLAggregateCompiler, SQLCompiler):
|
class SQLAggregateCompiler(compiler.SQLAggregateCompiler, SQLCompiler):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class SQLDateCompiler(compiler.SQLDateCompiler, SQLCompiler):
|
class SQLDateCompiler(compiler.SQLDateCompiler, SQLCompiler):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class SQLDateTimeCompiler(compiler.SQLDateTimeCompiler, SQLCompiler):
|
class SQLDateTimeCompiler(compiler.SQLDateTimeCompiler, SQLCompiler):
|
||||||
pass
|
pass
|
||||||
|
|
|
@ -5,9 +5,11 @@ from django.conf import settings
|
||||||
from django.db.backends.creation import BaseDatabaseCreation
|
from django.db.backends.creation import BaseDatabaseCreation
|
||||||
from django.utils.six.moves import input
|
from django.utils.six.moves import input
|
||||||
|
|
||||||
|
|
||||||
TEST_DATABASE_PREFIX = 'test_'
|
TEST_DATABASE_PREFIX = 'test_'
|
||||||
PASSWORD = 'Im_a_lumberjack'
|
PASSWORD = 'Im_a_lumberjack'
|
||||||
|
|
||||||
|
|
||||||
class DatabaseCreation(BaseDatabaseCreation):
|
class DatabaseCreation(BaseDatabaseCreation):
|
||||||
# This dictionary maps Field objects to their associated Oracle column
|
# This dictionary maps Field objects to their associated Oracle column
|
||||||
# types, as strings. Column-type strings can contain format strings; they'll
|
# types, as strings. Column-type strings can contain format strings; they'll
|
||||||
|
|
|
@ -1,10 +1,13 @@
|
||||||
from django.db.backends import BaseDatabaseIntrospection, FieldInfo
|
|
||||||
from django.utils.encoding import force_text
|
|
||||||
import cx_Oracle
|
|
||||||
import re
|
import re
|
||||||
|
|
||||||
|
import cx_Oracle
|
||||||
|
|
||||||
|
from django.db.backends import BaseDatabaseIntrospection, FieldInfo
|
||||||
|
from django.utils.encoding import force_text
|
||||||
|
|
||||||
foreign_key_re = re.compile(r"\sCONSTRAINT `[^`]*` FOREIGN KEY \(`([^`]*)`\) REFERENCES `([^`]*)` \(`([^`]*)`\)")
|
foreign_key_re = re.compile(r"\sCONSTRAINT `[^`]*` FOREIGN KEY \(`([^`]*)`\) REFERENCES `([^`]*)` \(`([^`]*)`\)")
|
||||||
|
|
||||||
|
|
||||||
class DatabaseIntrospection(BaseDatabaseIntrospection):
|
class DatabaseIntrospection(BaseDatabaseIntrospection):
|
||||||
# Maps type objects to Django Field types.
|
# Maps type objects to Django Field types.
|
||||||
data_types_reverse = {
|
data_types_reverse = {
|
||||||
|
|
|
@ -6,7 +6,9 @@ Requires psycopg 2: http://initd.org/projects/psycopg2
|
||||||
import logging
|
import logging
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from django.db.backends import *
|
from django.conf import settings
|
||||||
|
from django.db.backends import (BaseDatabaseFeatures, BaseDatabaseWrapper,
|
||||||
|
BaseDatabaseValidation)
|
||||||
from django.db.backends.postgresql_psycopg2.operations import DatabaseOperations
|
from django.db.backends.postgresql_psycopg2.operations import DatabaseOperations
|
||||||
from django.db.backends.postgresql_psycopg2.client import DatabaseClient
|
from django.db.backends.postgresql_psycopg2.client import DatabaseClient
|
||||||
from django.db.backends.postgresql_psycopg2.creation import DatabaseCreation
|
from django.db.backends.postgresql_psycopg2.creation import DatabaseCreation
|
||||||
|
@ -34,11 +36,13 @@ psycopg2.extensions.register_adapter(SafeText, psycopg2.extensions.QuotedString)
|
||||||
|
|
||||||
logger = logging.getLogger('django.db.backends')
|
logger = logging.getLogger('django.db.backends')
|
||||||
|
|
||||||
|
|
||||||
def utc_tzinfo_factory(offset):
|
def utc_tzinfo_factory(offset):
|
||||||
if offset != 0:
|
if offset != 0:
|
||||||
raise AssertionError("database connection isn't set to UTC")
|
raise AssertionError("database connection isn't set to UTC")
|
||||||
return utc
|
return utc
|
||||||
|
|
||||||
|
|
||||||
class DatabaseFeatures(BaseDatabaseFeatures):
|
class DatabaseFeatures(BaseDatabaseFeatures):
|
||||||
needs_datetime_string_cast = False
|
needs_datetime_string_cast = False
|
||||||
can_return_id_from_insert = True
|
can_return_id_from_insert = True
|
||||||
|
@ -55,6 +59,7 @@ class DatabaseFeatures(BaseDatabaseFeatures):
|
||||||
can_rollback_ddl = True
|
can_rollback_ddl = True
|
||||||
supports_combined_alters = True
|
supports_combined_alters = True
|
||||||
|
|
||||||
|
|
||||||
class DatabaseWrapper(BaseDatabaseWrapper):
|
class DatabaseWrapper(BaseDatabaseWrapper):
|
||||||
vendor = 'postgresql'
|
vendor = 'postgresql'
|
||||||
operators = {
|
operators = {
|
||||||
|
@ -135,7 +140,8 @@ class DatabaseWrapper(BaseDatabaseWrapper):
|
||||||
# Set the time zone in autocommit mode (see #17062)
|
# Set the time zone in autocommit mode (see #17062)
|
||||||
self.set_autocommit(True)
|
self.set_autocommit(True)
|
||||||
self.connection.cursor().execute(
|
self.connection.cursor().execute(
|
||||||
self.ops.set_time_zone_sql(), [tz])
|
self.ops.set_time_zone_sql(), [tz]
|
||||||
|
)
|
||||||
self.connection.set_isolation_level(self.isolation_level)
|
self.connection.set_isolation_level(self.isolation_level)
|
||||||
|
|
||||||
def create_cursor(self):
|
def create_cursor(self):
|
||||||
|
|
|
@ -3,6 +3,7 @@ import sys
|
||||||
|
|
||||||
from django.db.backends import BaseDatabaseClient
|
from django.db.backends import BaseDatabaseClient
|
||||||
|
|
||||||
|
|
||||||
class DatabaseClient(BaseDatabaseClient):
|
class DatabaseClient(BaseDatabaseClient):
|
||||||
executable_name = 'psql'
|
executable_name = 'psql'
|
||||||
|
|
||||||
|
@ -20,4 +21,3 @@ class DatabaseClient(BaseDatabaseClient):
|
||||||
sys.exit(os.system(" ".join(args)))
|
sys.exit(os.system(" ".join(args)))
|
||||||
else:
|
else:
|
||||||
os.execvp(self.executable_name, args)
|
os.execvp(self.executable_name, args)
|
||||||
|
|
||||||
|
|
|
@ -135,7 +135,7 @@ class DatabaseOperations(BaseDatabaseOperations):
|
||||||
# This will be the case if it's an m2m using an autogenerated
|
# This will be the case if it's an m2m using an autogenerated
|
||||||
# intermediate table (see BaseDatabaseIntrospection.sequence_list)
|
# intermediate table (see BaseDatabaseIntrospection.sequence_list)
|
||||||
column_name = 'id'
|
column_name = 'id'
|
||||||
sql.append("%s setval(pg_get_serial_sequence('%s','%s'), 1, false);" % \
|
sql.append("%s setval(pg_get_serial_sequence('%s','%s'), 1, false);" %
|
||||||
(style.SQL_KEYWORD('SELECT'),
|
(style.SQL_KEYWORD('SELECT'),
|
||||||
style.SQL_TABLE(self.quote_name(table_name)),
|
style.SQL_TABLE(self.quote_name(table_name)),
|
||||||
style.SQL_FIELD(column_name))
|
style.SQL_FIELD(column_name))
|
||||||
|
@ -161,7 +161,7 @@ class DatabaseOperations(BaseDatabaseOperations):
|
||||||
|
|
||||||
for f in model._meta.local_fields:
|
for f in model._meta.local_fields:
|
||||||
if isinstance(f, models.AutoField):
|
if isinstance(f, models.AutoField):
|
||||||
output.append("%s setval(pg_get_serial_sequence('%s','%s'), coalesce(max(%s), 1), max(%s) %s null) %s %s;" % \
|
output.append("%s setval(pg_get_serial_sequence('%s','%s'), coalesce(max(%s), 1), max(%s) %s null) %s %s;" %
|
||||||
(style.SQL_KEYWORD('SELECT'),
|
(style.SQL_KEYWORD('SELECT'),
|
||||||
style.SQL_TABLE(qn(model._meta.db_table)),
|
style.SQL_TABLE(qn(model._meta.db_table)),
|
||||||
style.SQL_FIELD(f.column),
|
style.SQL_FIELD(f.column),
|
||||||
|
@ -173,7 +173,7 @@ class DatabaseOperations(BaseDatabaseOperations):
|
||||||
break # Only one AutoField is allowed per model, so don't bother continuing.
|
break # Only one AutoField is allowed per model, so don't bother continuing.
|
||||||
for f in model._meta.many_to_many:
|
for f in model._meta.many_to_many:
|
||||||
if not f.rel.through:
|
if not f.rel.through:
|
||||||
output.append("%s setval(pg_get_serial_sequence('%s','%s'), coalesce(max(%s), 1), max(%s) %s null) %s %s;" % \
|
output.append("%s setval(pg_get_serial_sequence('%s','%s'), coalesce(max(%s), 1), max(%s) %s null) %s %s;" %
|
||||||
(style.SQL_KEYWORD('SELECT'),
|
(style.SQL_KEYWORD('SELECT'),
|
||||||
style.SQL_TABLE(qn(f.m2m_db_table())),
|
style.SQL_TABLE(qn(f.m2m_db_table())),
|
||||||
style.SQL_FIELD('id'),
|
style.SQL_FIELD('id'),
|
||||||
|
|
|
@ -21,6 +21,7 @@ def _parse_version(text):
|
||||||
except (ValueError, TypeError):
|
except (ValueError, TypeError):
|
||||||
return int(major) * 10000 + int(major2) * 100
|
return int(major) * 10000 + int(major2) * 100
|
||||||
|
|
||||||
|
|
||||||
def get_version(connection):
|
def get_version(connection):
|
||||||
"""
|
"""
|
||||||
Returns an integer representing the major, minor and revision number of the
|
Returns an integer representing the major, minor and revision number of the
|
||||||
|
|
|
@ -11,8 +11,10 @@ import decimal
|
||||||
import warnings
|
import warnings
|
||||||
import re
|
import re
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
from django.db import utils
|
from django.db import utils
|
||||||
from django.db.backends import *
|
from django.db.backends import (util, BaseDatabaseFeatures,
|
||||||
|
BaseDatabaseOperations, BaseDatabaseWrapper, BaseDatabaseValidation)
|
||||||
from django.db.backends.sqlite3.client import DatabaseClient
|
from django.db.backends.sqlite3.client import DatabaseClient
|
||||||
from django.db.backends.sqlite3.creation import DatabaseCreation
|
from django.db.backends.sqlite3.creation import DatabaseCreation
|
||||||
from django.db.backends.sqlite3.introspection import DatabaseIntrospection
|
from django.db.backends.sqlite3.introspection import DatabaseIntrospection
|
||||||
|
@ -43,6 +45,7 @@ except ImportError:
|
||||||
DatabaseError = Database.DatabaseError
|
DatabaseError = Database.DatabaseError
|
||||||
IntegrityError = Database.IntegrityError
|
IntegrityError = Database.IntegrityError
|
||||||
|
|
||||||
|
|
||||||
def parse_datetime_with_timezone_support(value):
|
def parse_datetime_with_timezone_support(value):
|
||||||
dt = parse_datetime(value)
|
dt = parse_datetime(value)
|
||||||
# Confirm that dt is naive before overwriting its tzinfo.
|
# Confirm that dt is naive before overwriting its tzinfo.
|
||||||
|
@ -50,6 +53,7 @@ def parse_datetime_with_timezone_support(value):
|
||||||
dt = dt.replace(tzinfo=timezone.utc)
|
dt = dt.replace(tzinfo=timezone.utc)
|
||||||
return dt
|
return dt
|
||||||
|
|
||||||
|
|
||||||
def adapt_datetime_with_timezone_support(value):
|
def adapt_datetime_with_timezone_support(value):
|
||||||
# Equivalent to DateTimeField.get_db_prep_value. Used only by raw SQL.
|
# Equivalent to DateTimeField.get_db_prep_value. Used only by raw SQL.
|
||||||
if settings.USE_TZ:
|
if settings.USE_TZ:
|
||||||
|
@ -62,6 +66,7 @@ def adapt_datetime_with_timezone_support(value):
|
||||||
value = value.astimezone(timezone.utc).replace(tzinfo=None)
|
value = value.astimezone(timezone.utc).replace(tzinfo=None)
|
||||||
return value.isoformat(str(" "))
|
return value.isoformat(str(" "))
|
||||||
|
|
||||||
|
|
||||||
def decoder(conv_func):
|
def decoder(conv_func):
|
||||||
""" The Python sqlite3 interface returns always byte strings.
|
""" The Python sqlite3 interface returns always byte strings.
|
||||||
This function converts the received value to a regular string before
|
This function converts the received value to a regular string before
|
||||||
|
@ -82,6 +87,7 @@ Database.register_adapter(decimal.Decimal, util.rev_typecast_decimal)
|
||||||
Database.register_adapter(str, lambda s: s.decode('utf-8'))
|
Database.register_adapter(str, lambda s: s.decode('utf-8'))
|
||||||
Database.register_adapter(SafeBytes, lambda s: s.decode('utf-8'))
|
Database.register_adapter(SafeBytes, lambda s: s.decode('utf-8'))
|
||||||
|
|
||||||
|
|
||||||
class DatabaseFeatures(BaseDatabaseFeatures):
|
class DatabaseFeatures(BaseDatabaseFeatures):
|
||||||
# SQLite cannot handle us only partially reading from a cursor's result set
|
# SQLite cannot handle us only partially reading from a cursor's result set
|
||||||
# and then writing the same rows to the database in another cursor. This
|
# and then writing the same rows to the database in another cursor. This
|
||||||
|
@ -127,6 +133,7 @@ class DatabaseFeatures(BaseDatabaseFeatures):
|
||||||
def has_zoneinfo_database(self):
|
def has_zoneinfo_database(self):
|
||||||
return pytz is not None
|
return pytz is not None
|
||||||
|
|
||||||
|
|
||||||
class DatabaseOperations(BaseDatabaseOperations):
|
class DatabaseOperations(BaseDatabaseOperations):
|
||||||
def bulk_batch_size(self, fields, objs):
|
def bulk_batch_size(self, fields, objs):
|
||||||
"""
|
"""
|
||||||
|
@ -275,6 +282,7 @@ class DatabaseOperations(BaseDatabaseOperations):
|
||||||
res.extend(["UNION ALL SELECT %s" % ", ".join(["%s"] * len(fields))] * (num_values - 1))
|
res.extend(["UNION ALL SELECT %s" % ", ".join(["%s"] * len(fields))] * (num_values - 1))
|
||||||
return " ".join(res)
|
return " ".join(res)
|
||||||
|
|
||||||
|
|
||||||
class DatabaseWrapper(BaseDatabaseWrapper):
|
class DatabaseWrapper(BaseDatabaseWrapper):
|
||||||
vendor = 'sqlite'
|
vendor = 'sqlite'
|
||||||
# SQLite requires LIKE statements to include an ESCAPE clause if the value
|
# SQLite requires LIKE statements to include an ESCAPE clause if the value
|
||||||
|
@ -433,6 +441,7 @@ class DatabaseWrapper(BaseDatabaseWrapper):
|
||||||
|
|
||||||
FORMAT_QMARK_REGEX = re.compile(r'(?<!%)%s')
|
FORMAT_QMARK_REGEX = re.compile(r'(?<!%)%s')
|
||||||
|
|
||||||
|
|
||||||
class SQLiteCursorWrapper(Database.Cursor):
|
class SQLiteCursorWrapper(Database.Cursor):
|
||||||
"""
|
"""
|
||||||
Django uses "format" style placeholders, but pysqlite2 uses "qmark" style.
|
Django uses "format" style placeholders, but pysqlite2 uses "qmark" style.
|
||||||
|
@ -452,6 +461,7 @@ class SQLiteCursorWrapper(Database.Cursor):
|
||||||
def convert_query(self, query):
|
def convert_query(self, query):
|
||||||
return FORMAT_QMARK_REGEX.sub('?', query).replace('%%', '%')
|
return FORMAT_QMARK_REGEX.sub('?', query).replace('%%', '%')
|
||||||
|
|
||||||
|
|
||||||
def _sqlite_date_extract(lookup_type, dt):
|
def _sqlite_date_extract(lookup_type, dt):
|
||||||
if dt is None:
|
if dt is None:
|
||||||
return None
|
return None
|
||||||
|
@ -464,6 +474,7 @@ def _sqlite_date_extract(lookup_type, dt):
|
||||||
else:
|
else:
|
||||||
return getattr(dt, lookup_type)
|
return getattr(dt, lookup_type)
|
||||||
|
|
||||||
|
|
||||||
def _sqlite_date_trunc(lookup_type, dt):
|
def _sqlite_date_trunc(lookup_type, dt):
|
||||||
try:
|
try:
|
||||||
dt = util.typecast_timestamp(dt)
|
dt = util.typecast_timestamp(dt)
|
||||||
|
@ -476,6 +487,7 @@ def _sqlite_date_trunc(lookup_type, dt):
|
||||||
elif lookup_type == 'day':
|
elif lookup_type == 'day':
|
||||||
return "%i-%02i-%02i" % (dt.year, dt.month, dt.day)
|
return "%i-%02i-%02i" % (dt.year, dt.month, dt.day)
|
||||||
|
|
||||||
|
|
||||||
def _sqlite_datetime_extract(lookup_type, dt, tzname):
|
def _sqlite_datetime_extract(lookup_type, dt, tzname):
|
||||||
if dt is None:
|
if dt is None:
|
||||||
return None
|
return None
|
||||||
|
@ -490,6 +502,7 @@ def _sqlite_datetime_extract(lookup_type, dt, tzname):
|
||||||
else:
|
else:
|
||||||
return getattr(dt, lookup_type)
|
return getattr(dt, lookup_type)
|
||||||
|
|
||||||
|
|
||||||
def _sqlite_datetime_trunc(lookup_type, dt, tzname):
|
def _sqlite_datetime_trunc(lookup_type, dt, tzname):
|
||||||
try:
|
try:
|
||||||
dt = util.typecast_timestamp(dt)
|
dt = util.typecast_timestamp(dt)
|
||||||
|
@ -510,6 +523,7 @@ def _sqlite_datetime_trunc(lookup_type, dt, tzname):
|
||||||
elif lookup_type == 'second':
|
elif lookup_type == 'second':
|
||||||
return "%i-%02i-%02i %02i:%02i:%02i" % (dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second)
|
return "%i-%02i-%02i %02i:%02i:%02i" % (dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second)
|
||||||
|
|
||||||
|
|
||||||
def _sqlite_format_dtdelta(dt, conn, days, secs, usecs):
|
def _sqlite_format_dtdelta(dt, conn, days, secs, usecs):
|
||||||
try:
|
try:
|
||||||
dt = util.typecast_timestamp(dt)
|
dt = util.typecast_timestamp(dt)
|
||||||
|
@ -524,5 +538,6 @@ def _sqlite_format_dtdelta(dt, conn, days, secs, usecs):
|
||||||
# It will be formatted as "%Y-%m-%d" or "%Y-%m-%d %H:%M:%S[.%f]"
|
# It will be formatted as "%Y-%m-%d" or "%Y-%m-%d %H:%M:%S[.%f]"
|
||||||
return str(dt)
|
return str(dt)
|
||||||
|
|
||||||
|
|
||||||
def _sqlite_regexp(re_pattern, re_string):
|
def _sqlite_regexp(re_pattern, re_string):
|
||||||
return bool(re.search(re_pattern, force_text(re_string))) if re_string is not None else False
|
return bool(re.search(re_pattern, force_text(re_string))) if re_string is not None else False
|
||||||
|
|
|
@ -3,6 +3,7 @@ import sys
|
||||||
|
|
||||||
from django.db.backends import BaseDatabaseClient
|
from django.db.backends import BaseDatabaseClient
|
||||||
|
|
||||||
|
|
||||||
class DatabaseClient(BaseDatabaseClient):
|
class DatabaseClient(BaseDatabaseClient):
|
||||||
executable_name = 'sqlite3'
|
executable_name = 'sqlite3'
|
||||||
|
|
||||||
|
@ -13,4 +14,3 @@ class DatabaseClient(BaseDatabaseClient):
|
||||||
sys.exit(os.system(" ".join(args)))
|
sys.exit(os.system(" ".join(args)))
|
||||||
else:
|
else:
|
||||||
os.execvp(self.executable_name, args)
|
os.execvp(self.executable_name, args)
|
||||||
|
|
||||||
|
|
|
@ -1,8 +1,10 @@
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from django.db.backends.creation import BaseDatabaseCreation
|
from django.db.backends.creation import BaseDatabaseCreation
|
||||||
from django.utils.six.moves import input
|
from django.utils.six.moves import input
|
||||||
|
|
||||||
|
|
||||||
class DatabaseCreation(BaseDatabaseCreation):
|
class DatabaseCreation(BaseDatabaseCreation):
|
||||||
# SQLite doesn't actually support most of these types, but it "does the right
|
# SQLite doesn't actually support most of these types, but it "does the right
|
||||||
# thing" given more verbose field definitions, so leave them as is so that
|
# thing" given more verbose field definitions, so leave them as is so that
|
||||||
|
@ -80,7 +82,6 @@ class DatabaseCreation(BaseDatabaseCreation):
|
||||||
SQLite since the databases will be distinct despite having the same
|
SQLite since the databases will be distinct despite having the same
|
||||||
TEST_NAME. See http://www.sqlite.org/inmemorydb.html
|
TEST_NAME. See http://www.sqlite.org/inmemorydb.html
|
||||||
"""
|
"""
|
||||||
settings_dict = self.connection.settings_dict
|
|
||||||
test_dbname = self._get_test_db_name()
|
test_dbname = self._get_test_db_name()
|
||||||
sig = [self.connection.settings_dict['NAME']]
|
sig = [self.connection.settings_dict['NAME']]
|
||||||
if test_dbname == ':memory:':
|
if test_dbname == ':memory:':
|
||||||
|
|
|
@ -1,8 +1,11 @@
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from django.db.backends import BaseDatabaseIntrospection, FieldInfo
|
from django.db.backends import BaseDatabaseIntrospection, FieldInfo
|
||||||
|
|
||||||
|
|
||||||
field_size_re = re.compile(r'^\s*(?:var)?char\s*\(\s*(\d+)\s*\)\s*$')
|
field_size_re = re.compile(r'^\s*(?:var)?char\s*\(\s*(\d+)\s*\)\s*$')
|
||||||
|
|
||||||
|
|
||||||
def get_field_size(name):
|
def get_field_size(name):
|
||||||
""" Extract the size number from a "varchar(11)" type name """
|
""" Extract the size number from a "varchar(11)" type name """
|
||||||
m = field_size_re.search(name)
|
m = field_size_re.search(name)
|
||||||
|
@ -46,6 +49,7 @@ class FlexibleFieldLookupDict(object):
|
||||||
return ('CharField', {'max_length': size})
|
return ('CharField', {'max_length': size})
|
||||||
raise KeyError
|
raise KeyError
|
||||||
|
|
||||||
|
|
||||||
class DatabaseIntrospection(BaseDatabaseIntrospection):
|
class DatabaseIntrospection(BaseDatabaseIntrospection):
|
||||||
data_types_reverse = FlexibleFieldLookupDict()
|
data_types_reverse = FlexibleFieldLookupDict()
|
||||||
|
|
||||||
|
@ -98,7 +102,6 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
|
||||||
li, ri = other_table_results.index('('), other_table_results.rindex(')')
|
li, ri = other_table_results.index('('), other_table_results.rindex(')')
|
||||||
other_table_results = other_table_results[li + 1:ri]
|
other_table_results = other_table_results[li + 1:ri]
|
||||||
|
|
||||||
|
|
||||||
for other_index, other_desc in enumerate(other_table_results.split(',')):
|
for other_index, other_desc in enumerate(other_table_results.split(',')):
|
||||||
other_desc = other_desc.strip()
|
other_desc = other_desc.strip()
|
||||||
if other_desc.startswith('UNIQUE'):
|
if other_desc.startswith('UNIQUE'):
|
||||||
|
|
|
@ -85,8 +85,10 @@ class CursorDebugWrapper(CursorWrapper):
|
||||||
def typecast_date(s):
|
def typecast_date(s):
|
||||||
return datetime.date(*map(int, s.split('-'))) if s else None # returns None if s is null
|
return datetime.date(*map(int, s.split('-'))) if s else None # returns None if s is null
|
||||||
|
|
||||||
|
|
||||||
def typecast_time(s): # does NOT store time zone information
|
def typecast_time(s): # does NOT store time zone information
|
||||||
if not s: return None
|
if not s:
|
||||||
|
return None
|
||||||
hour, minutes, seconds = s.split(':')
|
hour, minutes, seconds = s.split(':')
|
||||||
if '.' in seconds: # check whether seconds have a fractional part
|
if '.' in seconds: # check whether seconds have a fractional part
|
||||||
seconds, microseconds = seconds.split('.')
|
seconds, microseconds = seconds.split('.')
|
||||||
|
@ -94,11 +96,14 @@ def typecast_time(s): # does NOT store time zone information
|
||||||
microseconds = '0'
|
microseconds = '0'
|
||||||
return datetime.time(int(hour), int(minutes), int(seconds), int(float('.' + microseconds) * 1000000))
|
return datetime.time(int(hour), int(minutes), int(seconds), int(float('.' + microseconds) * 1000000))
|
||||||
|
|
||||||
|
|
||||||
def typecast_timestamp(s): # does NOT store time zone information
|
def typecast_timestamp(s): # does NOT store time zone information
|
||||||
# "2005-07-29 15:48:00.590358-05"
|
# "2005-07-29 15:48:00.590358-05"
|
||||||
# "2005-07-29 09:56:00-05"
|
# "2005-07-29 09:56:00-05"
|
||||||
if not s: return None
|
if not s:
|
||||||
if not ' ' in s: return typecast_date(s)
|
return None
|
||||||
|
if not ' ' in s:
|
||||||
|
return typecast_date(s)
|
||||||
d, t = s.split()
|
d, t = s.split()
|
||||||
# Extract timezone information, if it exists. Currently we just throw
|
# Extract timezone information, if it exists. Currently we just throw
|
||||||
# it away, but in the future we may make use of it.
|
# it away, but in the future we may make use of it.
|
||||||
|
@ -122,11 +127,13 @@ def typecast_timestamp(s): # does NOT store time zone information
|
||||||
int(times[0]), int(times[1]), int(seconds),
|
int(times[0]), int(times[1]), int(seconds),
|
||||||
int((microseconds + '000000')[:6]), tzinfo)
|
int((microseconds + '000000')[:6]), tzinfo)
|
||||||
|
|
||||||
|
|
||||||
def typecast_decimal(s):
|
def typecast_decimal(s):
|
||||||
if s is None or s == '':
|
if s is None or s == '':
|
||||||
return None
|
return None
|
||||||
return decimal.Decimal(s)
|
return decimal.Decimal(s)
|
||||||
|
|
||||||
|
|
||||||
###############################################
|
###############################################
|
||||||
# Converters from Python to database (string) #
|
# Converters from Python to database (string) #
|
||||||
###############################################
|
###############################################
|
||||||
|
@ -136,6 +143,7 @@ def rev_typecast_decimal(d):
|
||||||
return None
|
return None
|
||||||
return str(d)
|
return str(d)
|
||||||
|
|
||||||
|
|
||||||
def truncate_name(name, length=None, hash_len=4):
|
def truncate_name(name, length=None, hash_len=4):
|
||||||
"""Shortens a string to a repeatable mangled version with the given length.
|
"""Shortens a string to a repeatable mangled version with the given length.
|
||||||
"""
|
"""
|
||||||
|
@ -145,6 +153,7 @@ def truncate_name(name, length=None, hash_len=4):
|
||||||
hsh = hashlib.md5(force_bytes(name)).hexdigest()[:hash_len]
|
hsh = hashlib.md5(force_bytes(name)).hexdigest()[:hash_len]
|
||||||
return '%s%s' % (name[:length - hash_len], hsh)
|
return '%s%s' % (name[:length - hash_len], hsh)
|
||||||
|
|
||||||
|
|
||||||
def format_number(value, max_digits, decimal_places):
|
def format_number(value, max_digits, decimal_places):
|
||||||
"""
|
"""
|
||||||
Formats a number into a string with the requisite number of digits and
|
Formats a number into a string with the requisite number of digits and
|
||||||
|
|
|
@ -26,6 +26,7 @@ def permalink(func):
|
||||||
(viewname, viewargs, viewkwargs)
|
(viewname, viewargs, viewkwargs)
|
||||||
"""
|
"""
|
||||||
from django.core.urlresolvers import reverse
|
from django.core.urlresolvers import reverse
|
||||||
|
|
||||||
@wraps(func)
|
@wraps(func)
|
||||||
def inner(*args, **kwargs):
|
def inner(*args, **kwargs):
|
||||||
bits = func(*args, **kwargs)
|
bits = func(*args, **kwargs)
|
||||||
|
|
|
@ -3,6 +3,7 @@ Classes to represent the definitions of aggregate functions.
|
||||||
"""
|
"""
|
||||||
from django.db.models.constants import LOOKUP_SEP
|
from django.db.models.constants import LOOKUP_SEP
|
||||||
|
|
||||||
|
|
||||||
def refs_aggregate(lookup_parts, aggregates):
|
def refs_aggregate(lookup_parts, aggregates):
|
||||||
"""
|
"""
|
||||||
A little helper method to check if the lookup_parts contains references
|
A little helper method to check if the lookup_parts contains references
|
||||||
|
@ -15,6 +16,7 @@ def refs_aggregate(lookup_parts, aggregates):
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
class Aggregate(object):
|
class Aggregate(object):
|
||||||
"""
|
"""
|
||||||
Default Aggregate definition.
|
Default Aggregate definition.
|
||||||
|
@ -58,23 +60,30 @@ class Aggregate(object):
|
||||||
aggregate = klass(col, source=source, is_summary=is_summary, **self.extra)
|
aggregate = klass(col, source=source, is_summary=is_summary, **self.extra)
|
||||||
query.aggregates[alias] = aggregate
|
query.aggregates[alias] = aggregate
|
||||||
|
|
||||||
|
|
||||||
class Avg(Aggregate):
|
class Avg(Aggregate):
|
||||||
name = 'Avg'
|
name = 'Avg'
|
||||||
|
|
||||||
|
|
||||||
class Count(Aggregate):
|
class Count(Aggregate):
|
||||||
name = 'Count'
|
name = 'Count'
|
||||||
|
|
||||||
|
|
||||||
class Max(Aggregate):
|
class Max(Aggregate):
|
||||||
name = 'Max'
|
name = 'Max'
|
||||||
|
|
||||||
|
|
||||||
class Min(Aggregate):
|
class Min(Aggregate):
|
||||||
name = 'Min'
|
name = 'Min'
|
||||||
|
|
||||||
|
|
||||||
class StdDev(Aggregate):
|
class StdDev(Aggregate):
|
||||||
name = 'StdDev'
|
name = 'StdDev'
|
||||||
|
|
||||||
|
|
||||||
class Sum(Aggregate):
|
class Sum(Aggregate):
|
||||||
name = 'Sum'
|
name = 'Sum'
|
||||||
|
|
||||||
|
|
||||||
class Variance(Aggregate):
|
class Variance(Aggregate):
|
||||||
name = 'Variance'
|
name = 'Variance'
|
||||||
|
|
|
@ -19,6 +19,7 @@ from django.db.models.query_utils import DeferredAttribute, deferred_class_facto
|
||||||
from django.db.models.deletion import Collector
|
from django.db.models.deletion import Collector
|
||||||
from django.db.models.options import Options
|
from django.db.models.options import Options
|
||||||
from django.db.models import signals
|
from django.db.models import signals
|
||||||
|
from django.db.models.loading import register_models, get_model, MODELS_MODULE_NAME
|
||||||
from django.utils.translation import ugettext_lazy as _
|
from django.utils.translation import ugettext_lazy as _
|
||||||
from django.utils.functional import curry
|
from django.utils.functional import curry
|
||||||
from django.utils.encoding import force_str, force_text
|
from django.utils.encoding import force_str, force_text
|
||||||
|
@ -85,10 +86,22 @@ class ModelBase(type):
|
||||||
base_meta = getattr(new_class, '_meta', None)
|
base_meta = getattr(new_class, '_meta', None)
|
||||||
|
|
||||||
if getattr(meta, 'app_label', None) is None:
|
if getattr(meta, 'app_label', None) is None:
|
||||||
# Figure out the app_label by looking one level up.
|
# Figure out the app_label by looking one level up from the package
|
||||||
|
# or module named 'models'. If no such package or module exists,
|
||||||
|
# fall back to looking one level up from the module this model is
|
||||||
|
# defined in.
|
||||||
|
|
||||||
# For 'django.contrib.sites.models', this would be 'sites'.
|
# For 'django.contrib.sites.models', this would be 'sites'.
|
||||||
|
# For 'geo.models.places' this would be 'geo'.
|
||||||
|
|
||||||
model_module = sys.modules[new_class.__module__]
|
model_module = sys.modules[new_class.__module__]
|
||||||
kwargs = {"app_label": model_module.__name__.split('.')[-2]}
|
package_components = model_module.__name__.split('.')
|
||||||
|
package_components.reverse() # find the last occurrence of 'models'
|
||||||
|
try:
|
||||||
|
app_label_index = package_components.index(MODELS_MODULE_NAME) + 1
|
||||||
|
except ValueError:
|
||||||
|
app_label_index = 1
|
||||||
|
kwargs = {"app_label": package_components[app_label_index]}
|
||||||
else:
|
else:
|
||||||
kwargs = {}
|
kwargs = {}
|
||||||
|
|
||||||
|
@ -225,9 +238,9 @@ class ModelBase(type):
|
||||||
# class
|
# class
|
||||||
for field in base._meta.virtual_fields:
|
for field in base._meta.virtual_fields:
|
||||||
if base._meta.abstract and field.name in field_names:
|
if base._meta.abstract and field.name in field_names:
|
||||||
raise FieldError('Local field %r in class %r clashes '\
|
raise FieldError('Local field %r in class %r clashes '
|
||||||
'with field of similar name from '\
|
'with field of similar name from '
|
||||||
'abstract base class %r' % \
|
'abstract base class %r' %
|
||||||
(field.name, name, base.__name__))
|
(field.name, name, base.__name__))
|
||||||
new_class.add_to_class(field.name, copy.deepcopy(field))
|
new_class.add_to_class(field.name, copy.deepcopy(field))
|
||||||
|
|
||||||
|
@ -1007,8 +1020,6 @@ def get_absolute_url(opts, func, self, *args, **kwargs):
|
||||||
# MISC #
|
# MISC #
|
||||||
########
|
########
|
||||||
|
|
||||||
class Empty(object):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def simple_class_factory(model, attrs):
|
def simple_class_factory(model, attrs):
|
||||||
"""
|
"""
|
||||||
|
@ -1016,6 +1027,7 @@ def simple_class_factory(model, attrs):
|
||||||
"""
|
"""
|
||||||
return model
|
return model
|
||||||
|
|
||||||
|
|
||||||
def model_unpickle(model_id, attrs, factory):
|
def model_unpickle(model_id, attrs, factory):
|
||||||
"""
|
"""
|
||||||
Used to unpickle Model subclasses with deferred fields.
|
Used to unpickle Model subclasses with deferred fields.
|
||||||
|
|
|
@ -4,6 +4,7 @@ from django.db.models.aggregates import refs_aggregate
|
||||||
from django.db.models.constants import LOOKUP_SEP
|
from django.db.models.constants import LOOKUP_SEP
|
||||||
from django.utils import tree
|
from django.utils import tree
|
||||||
|
|
||||||
|
|
||||||
class ExpressionNode(tree.Node):
|
class ExpressionNode(tree.Node):
|
||||||
"""
|
"""
|
||||||
Base class for all query expressions.
|
Base class for all query expressions.
|
||||||
|
@ -128,6 +129,7 @@ class ExpressionNode(tree.Node):
|
||||||
"Use .bitand() and .bitor() for bitwise logical operations."
|
"Use .bitand() and .bitor() for bitwise logical operations."
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class F(ExpressionNode):
|
class F(ExpressionNode):
|
||||||
"""
|
"""
|
||||||
An expression representing the value of the given field.
|
An expression representing the value of the given field.
|
||||||
|
@ -147,6 +149,7 @@ class F(ExpressionNode):
|
||||||
def evaluate(self, evaluator, qn, connection):
|
def evaluate(self, evaluator, qn, connection):
|
||||||
return evaluator.evaluate_leaf(self, qn, connection)
|
return evaluator.evaluate_leaf(self, qn, connection)
|
||||||
|
|
||||||
|
|
||||||
class DateModifierNode(ExpressionNode):
|
class DateModifierNode(ExpressionNode):
|
||||||
"""
|
"""
|
||||||
Node that implements the following syntax:
|
Node that implements the following syntax:
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
import collections
|
||||||
import copy
|
import copy
|
||||||
import datetime
|
import datetime
|
||||||
import decimal
|
import decimal
|
||||||
|
@ -17,7 +18,6 @@ from django.core import exceptions, validators
|
||||||
from django.utils.datastructures import DictWrapper
|
from django.utils.datastructures import DictWrapper
|
||||||
from django.utils.dateparse import parse_date, parse_datetime, parse_time
|
from django.utils.dateparse import parse_date, parse_datetime, parse_time
|
||||||
from django.utils.functional import curry, total_ordering
|
from django.utils.functional import curry, total_ordering
|
||||||
from django.utils.itercompat import is_iterator
|
|
||||||
from django.utils.text import capfirst
|
from django.utils.text import capfirst
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
from django.utils.translation import ugettext_lazy as _
|
from django.utils.translation import ugettext_lazy as _
|
||||||
|
@ -25,9 +25,11 @@ from django.utils.encoding import smart_text, force_text, force_bytes
|
||||||
from django.utils.ipv6 import clean_ipv6_address
|
from django.utils.ipv6 import clean_ipv6_address
|
||||||
from django.utils import six
|
from django.utils import six
|
||||||
|
|
||||||
|
|
||||||
class Empty(object):
|
class Empty(object):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class NOT_PROVIDED:
|
class NOT_PROVIDED:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -35,12 +37,15 @@ class NOT_PROVIDED:
|
||||||
# of most "choices" lists.
|
# of most "choices" lists.
|
||||||
BLANK_CHOICE_DASH = [("", "---------")]
|
BLANK_CHOICE_DASH = [("", "---------")]
|
||||||
|
|
||||||
|
|
||||||
def _load_field(app_label, model_name, field_name):
|
def _load_field(app_label, model_name, field_name):
|
||||||
return get_model(app_label, model_name)._meta.get_field_by_name(field_name)[0]
|
return get_model(app_label, model_name)._meta.get_field_by_name(field_name)[0]
|
||||||
|
|
||||||
|
|
||||||
class FieldDoesNotExist(Exception):
|
class FieldDoesNotExist(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
# A guide to Field parameters:
|
# A guide to Field parameters:
|
||||||
#
|
#
|
||||||
# * name: The name of the field specifed in the model.
|
# * name: The name of the field specifed in the model.
|
||||||
|
@ -61,6 +66,7 @@ def _empty(of_cls):
|
||||||
new.__class__ = of_cls
|
new.__class__ = of_cls
|
||||||
return new
|
return new
|
||||||
|
|
||||||
|
|
||||||
@total_ordering
|
@total_ordering
|
||||||
class Field(object):
|
class Field(object):
|
||||||
"""Base class for all field types"""
|
"""Base class for all field types"""
|
||||||
|
@ -464,12 +470,12 @@ class Field(object):
|
||||||
if hasattr(value, '_prepare'):
|
if hasattr(value, '_prepare'):
|
||||||
return value._prepare()
|
return value._prepare()
|
||||||
|
|
||||||
if lookup_type in (
|
if lookup_type in {
|
||||||
'iexact', 'contains', 'icontains',
|
'iexact', 'contains', 'icontains',
|
||||||
'startswith', 'istartswith', 'endswith', 'iendswith',
|
'startswith', 'istartswith', 'endswith', 'iendswith',
|
||||||
'month', 'day', 'week_day', 'hour', 'minute', 'second',
|
'month', 'day', 'week_day', 'hour', 'minute', 'second',
|
||||||
'isnull', 'search', 'regex', 'iregex',
|
'isnull', 'search', 'regex', 'iregex',
|
||||||
):
|
}:
|
||||||
return value
|
return value
|
||||||
elif lookup_type in ('exact', 'gt', 'gte', 'lt', 'lte'):
|
elif lookup_type in ('exact', 'gt', 'gte', 'lt', 'lte'):
|
||||||
return self.get_prep_value(value)
|
return self.get_prep_value(value)
|
||||||
|
@ -599,7 +605,7 @@ class Field(object):
|
||||||
return bound_field_class(self, fieldmapping, original)
|
return bound_field_class(self, fieldmapping, original)
|
||||||
|
|
||||||
def _get_choices(self):
|
def _get_choices(self):
|
||||||
if is_iterator(self._choices):
|
if isinstance(self._choices, collections.Iterator):
|
||||||
choices, self._choices = tee(self._choices)
|
choices, self._choices = tee(self._choices)
|
||||||
return choices
|
return choices
|
||||||
else:
|
else:
|
||||||
|
@ -732,6 +738,7 @@ class AutoField(Field):
|
||||||
def formfield(self, **kwargs):
|
def formfield(self, **kwargs):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
class BooleanField(Field):
|
class BooleanField(Field):
|
||||||
empty_strings_allowed = False
|
empty_strings_allowed = False
|
||||||
default_error_messages = {
|
default_error_messages = {
|
||||||
|
@ -786,13 +793,13 @@ class BooleanField(Field):
|
||||||
if self.choices:
|
if self.choices:
|
||||||
include_blank = (self.null or
|
include_blank = (self.null or
|
||||||
not (self.has_default() or 'initial' in kwargs))
|
not (self.has_default() or 'initial' in kwargs))
|
||||||
defaults = {'choices': self.get_choices(
|
defaults = {'choices': self.get_choices(include_blank=include_blank)}
|
||||||
include_blank=include_blank)}
|
|
||||||
else:
|
else:
|
||||||
defaults = {'form_class': forms.BooleanField}
|
defaults = {'form_class': forms.BooleanField}
|
||||||
defaults.update(kwargs)
|
defaults.update(kwargs)
|
||||||
return super(BooleanField, self).formfield(**defaults)
|
return super(BooleanField, self).formfield(**defaults)
|
||||||
|
|
||||||
|
|
||||||
class CharField(Field):
|
class CharField(Field):
|
||||||
description = _("String (up to %(max_length)s)")
|
description = _("String (up to %(max_length)s)")
|
||||||
|
|
||||||
|
@ -819,6 +826,7 @@ class CharField(Field):
|
||||||
defaults.update(kwargs)
|
defaults.update(kwargs)
|
||||||
return super(CharField, self).formfield(**defaults)
|
return super(CharField, self).formfield(**defaults)
|
||||||
|
|
||||||
|
|
||||||
# TODO: Maybe move this into contrib, because it's specialized.
|
# TODO: Maybe move this into contrib, because it's specialized.
|
||||||
class CommaSeparatedIntegerField(CharField):
|
class CommaSeparatedIntegerField(CharField):
|
||||||
default_validators = [validators.validate_comma_separated_integer_list]
|
default_validators = [validators.validate_comma_separated_integer_list]
|
||||||
|
@ -833,6 +841,7 @@ class CommaSeparatedIntegerField(CharField):
|
||||||
defaults.update(kwargs)
|
defaults.update(kwargs)
|
||||||
return super(CommaSeparatedIntegerField, self).formfield(**defaults)
|
return super(CommaSeparatedIntegerField, self).formfield(**defaults)
|
||||||
|
|
||||||
|
|
||||||
class DateField(Field):
|
class DateField(Field):
|
||||||
empty_strings_allowed = False
|
empty_strings_allowed = False
|
||||||
default_error_messages = {
|
default_error_messages = {
|
||||||
|
@ -939,6 +948,7 @@ class DateField(Field):
|
||||||
defaults.update(kwargs)
|
defaults.update(kwargs)
|
||||||
return super(DateField, self).formfield(**defaults)
|
return super(DateField, self).formfield(**defaults)
|
||||||
|
|
||||||
|
|
||||||
class DateTimeField(DateField):
|
class DateTimeField(DateField):
|
||||||
empty_strings_allowed = False
|
empty_strings_allowed = False
|
||||||
default_error_messages = {
|
default_error_messages = {
|
||||||
|
@ -1045,6 +1055,7 @@ class DateTimeField(DateField):
|
||||||
defaults.update(kwargs)
|
defaults.update(kwargs)
|
||||||
return super(DateTimeField, self).formfield(**defaults)
|
return super(DateTimeField, self).formfield(**defaults)
|
||||||
|
|
||||||
|
|
||||||
class DecimalField(Field):
|
class DecimalField(Field):
|
||||||
empty_strings_allowed = False
|
empty_strings_allowed = False
|
||||||
default_error_messages = {
|
default_error_messages = {
|
||||||
|
@ -1116,6 +1127,7 @@ class DecimalField(Field):
|
||||||
defaults.update(kwargs)
|
defaults.update(kwargs)
|
||||||
return super(DecimalField, self).formfield(**defaults)
|
return super(DecimalField, self).formfield(**defaults)
|
||||||
|
|
||||||
|
|
||||||
class EmailField(CharField):
|
class EmailField(CharField):
|
||||||
default_validators = [validators.validate_email]
|
default_validators = [validators.validate_email]
|
||||||
description = _("Email address")
|
description = _("Email address")
|
||||||
|
@ -1142,6 +1154,7 @@ class EmailField(CharField):
|
||||||
defaults.update(kwargs)
|
defaults.update(kwargs)
|
||||||
return super(EmailField, self).formfield(**defaults)
|
return super(EmailField, self).formfield(**defaults)
|
||||||
|
|
||||||
|
|
||||||
class FilePathField(Field):
|
class FilePathField(Field):
|
||||||
description = _("File path")
|
description = _("File path")
|
||||||
|
|
||||||
|
@ -1183,6 +1196,7 @@ class FilePathField(Field):
|
||||||
def get_internal_type(self):
|
def get_internal_type(self):
|
||||||
return "FilePathField"
|
return "FilePathField"
|
||||||
|
|
||||||
|
|
||||||
class FloatField(Field):
|
class FloatField(Field):
|
||||||
empty_strings_allowed = False
|
empty_strings_allowed = False
|
||||||
default_error_messages = {
|
default_error_messages = {
|
||||||
|
@ -1215,6 +1229,7 @@ class FloatField(Field):
|
||||||
defaults.update(kwargs)
|
defaults.update(kwargs)
|
||||||
return super(FloatField, self).formfield(**defaults)
|
return super(FloatField, self).formfield(**defaults)
|
||||||
|
|
||||||
|
|
||||||
class IntegerField(Field):
|
class IntegerField(Field):
|
||||||
empty_strings_allowed = False
|
empty_strings_allowed = False
|
||||||
default_error_messages = {
|
default_error_messages = {
|
||||||
|
@ -1253,6 +1268,7 @@ class IntegerField(Field):
|
||||||
defaults.update(kwargs)
|
defaults.update(kwargs)
|
||||||
return super(IntegerField, self).formfield(**defaults)
|
return super(IntegerField, self).formfield(**defaults)
|
||||||
|
|
||||||
|
|
||||||
class BigIntegerField(IntegerField):
|
class BigIntegerField(IntegerField):
|
||||||
empty_strings_allowed = False
|
empty_strings_allowed = False
|
||||||
description = _("Big (8 byte) integer")
|
description = _("Big (8 byte) integer")
|
||||||
|
@ -1267,6 +1283,7 @@ class BigIntegerField(IntegerField):
|
||||||
defaults.update(kwargs)
|
defaults.update(kwargs)
|
||||||
return super(BigIntegerField, self).formfield(**defaults)
|
return super(BigIntegerField, self).formfield(**defaults)
|
||||||
|
|
||||||
|
|
||||||
class IPAddressField(Field):
|
class IPAddressField(Field):
|
||||||
empty_strings_allowed = False
|
empty_strings_allowed = False
|
||||||
description = _("IPv4 address")
|
description = _("IPv4 address")
|
||||||
|
@ -1288,6 +1305,7 @@ class IPAddressField(Field):
|
||||||
defaults.update(kwargs)
|
defaults.update(kwargs)
|
||||||
return super(IPAddressField, self).formfield(**defaults)
|
return super(IPAddressField, self).formfield(**defaults)
|
||||||
|
|
||||||
|
|
||||||
class GenericIPAddressField(Field):
|
class GenericIPAddressField(Field):
|
||||||
empty_strings_allowed = True
|
empty_strings_allowed = True
|
||||||
description = _("IP address")
|
description = _("IP address")
|
||||||
|
@ -1336,7 +1354,10 @@ class GenericIPAddressField(Field):
|
||||||
return value
|
return value
|
||||||
|
|
||||||
def formfield(self, **kwargs):
|
def formfield(self, **kwargs):
|
||||||
defaults = {'form_class': forms.GenericIPAddressField}
|
defaults = {
|
||||||
|
'protocol': self.protocol,
|
||||||
|
'form_class': forms.GenericIPAddressField,
|
||||||
|
}
|
||||||
defaults.update(kwargs)
|
defaults.update(kwargs)
|
||||||
return super(GenericIPAddressField, self).formfield(**defaults)
|
return super(GenericIPAddressField, self).formfield(**defaults)
|
||||||
|
|
||||||
|
@ -1403,6 +1424,7 @@ class NullBooleanField(Field):
|
||||||
defaults.update(kwargs)
|
defaults.update(kwargs)
|
||||||
return super(NullBooleanField, self).formfield(**defaults)
|
return super(NullBooleanField, self).formfield(**defaults)
|
||||||
|
|
||||||
|
|
||||||
class PositiveIntegerField(IntegerField):
|
class PositiveIntegerField(IntegerField):
|
||||||
description = _("Positive integer")
|
description = _("Positive integer")
|
||||||
|
|
||||||
|
@ -1414,6 +1436,7 @@ class PositiveIntegerField(IntegerField):
|
||||||
defaults.update(kwargs)
|
defaults.update(kwargs)
|
||||||
return super(PositiveIntegerField, self).formfield(**defaults)
|
return super(PositiveIntegerField, self).formfield(**defaults)
|
||||||
|
|
||||||
|
|
||||||
class PositiveSmallIntegerField(IntegerField):
|
class PositiveSmallIntegerField(IntegerField):
|
||||||
description = _("Positive small integer")
|
description = _("Positive small integer")
|
||||||
|
|
||||||
|
@ -1425,6 +1448,7 @@ class PositiveSmallIntegerField(IntegerField):
|
||||||
defaults.update(kwargs)
|
defaults.update(kwargs)
|
||||||
return super(PositiveSmallIntegerField, self).formfield(**defaults)
|
return super(PositiveSmallIntegerField, self).formfield(**defaults)
|
||||||
|
|
||||||
|
|
||||||
class SlugField(CharField):
|
class SlugField(CharField):
|
||||||
default_validators = [validators.validate_slug]
|
default_validators = [validators.validate_slug]
|
||||||
description = _("Slug (up to %(max_length)s)")
|
description = _("Slug (up to %(max_length)s)")
|
||||||
|
@ -1454,12 +1478,14 @@ class SlugField(CharField):
|
||||||
defaults.update(kwargs)
|
defaults.update(kwargs)
|
||||||
return super(SlugField, self).formfield(**defaults)
|
return super(SlugField, self).formfield(**defaults)
|
||||||
|
|
||||||
|
|
||||||
class SmallIntegerField(IntegerField):
|
class SmallIntegerField(IntegerField):
|
||||||
description = _("Small integer")
|
description = _("Small integer")
|
||||||
|
|
||||||
def get_internal_type(self):
|
def get_internal_type(self):
|
||||||
return "SmallIntegerField"
|
return "SmallIntegerField"
|
||||||
|
|
||||||
|
|
||||||
class TextField(Field):
|
class TextField(Field):
|
||||||
description = _("Text")
|
description = _("Text")
|
||||||
|
|
||||||
|
@ -1476,6 +1502,7 @@ class TextField(Field):
|
||||||
defaults.update(kwargs)
|
defaults.update(kwargs)
|
||||||
return super(TextField, self).formfield(**defaults)
|
return super(TextField, self).formfield(**defaults)
|
||||||
|
|
||||||
|
|
||||||
class TimeField(Field):
|
class TimeField(Field):
|
||||||
empty_strings_allowed = False
|
empty_strings_allowed = False
|
||||||
default_error_messages = {
|
default_error_messages = {
|
||||||
|
@ -1559,6 +1586,7 @@ class TimeField(Field):
|
||||||
defaults.update(kwargs)
|
defaults.update(kwargs)
|
||||||
return super(TimeField, self).formfield(**defaults)
|
return super(TimeField, self).formfield(**defaults)
|
||||||
|
|
||||||
|
|
||||||
class URLField(CharField):
|
class URLField(CharField):
|
||||||
default_validators = [validators.URLValidator()]
|
default_validators = [validators.URLValidator()]
|
||||||
description = _("URL")
|
description = _("URL")
|
||||||
|
@ -1582,6 +1610,7 @@ class URLField(CharField):
|
||||||
defaults.update(kwargs)
|
defaults.update(kwargs)
|
||||||
return super(URLField, self).formfield(**defaults)
|
return super(URLField, self).formfield(**defaults)
|
||||||
|
|
||||||
|
|
||||||
class BinaryField(Field):
|
class BinaryField(Field):
|
||||||
description = _("Raw binary data")
|
description = _("Raw binary data")
|
||||||
empty_values = [None, b'']
|
empty_values = [None, b'']
|
||||||
|
|
|
@ -11,6 +11,7 @@ from django.utils.encoding import force_str, force_text
|
||||||
from django.utils import six
|
from django.utils import six
|
||||||
from django.utils.translation import ugettext_lazy as _
|
from django.utils.translation import ugettext_lazy as _
|
||||||
|
|
||||||
|
|
||||||
class FieldFile(File):
|
class FieldFile(File):
|
||||||
def __init__(self, instance, field, name):
|
def __init__(self, instance, field, name):
|
||||||
super(FieldFile, self).__init__(None, name)
|
super(FieldFile, self).__init__(None, name)
|
||||||
|
@ -135,6 +136,7 @@ class FieldFile(File):
|
||||||
# be restored later, by FileDescriptor below.
|
# be restored later, by FileDescriptor below.
|
||||||
return {'name': self.name, 'closed': False, '_committed': True, '_file': None}
|
return {'name': self.name, 'closed': False, '_committed': True, '_file': None}
|
||||||
|
|
||||||
|
|
||||||
class FileDescriptor(object):
|
class FileDescriptor(object):
|
||||||
"""
|
"""
|
||||||
The descriptor for the file attribute on the model instance. Returns a
|
The descriptor for the file attribute on the model instance. Returns a
|
||||||
|
@ -205,6 +207,7 @@ class FileDescriptor(object):
|
||||||
def __set__(self, instance, value):
|
def __set__(self, instance, value):
|
||||||
instance.__dict__[self.field.name] = value
|
instance.__dict__[self.field.name] = value
|
||||||
|
|
||||||
|
|
||||||
class FileField(Field):
|
class FileField(Field):
|
||||||
|
|
||||||
# The class to wrap instance attributes in. Accessing the file object off
|
# The class to wrap instance attributes in. Accessing the file object off
|
||||||
|
@ -300,6 +303,7 @@ class FileField(Field):
|
||||||
defaults.update(kwargs)
|
defaults.update(kwargs)
|
||||||
return super(FileField, self).formfield(**defaults)
|
return super(FileField, self).formfield(**defaults)
|
||||||
|
|
||||||
|
|
||||||
class ImageFileDescriptor(FileDescriptor):
|
class ImageFileDescriptor(FileDescriptor):
|
||||||
"""
|
"""
|
||||||
Just like the FileDescriptor, but for ImageFields. The only difference is
|
Just like the FileDescriptor, but for ImageFields. The only difference is
|
||||||
|
@ -321,14 +325,15 @@ class ImageFileDescriptor(FileDescriptor):
|
||||||
if previous_file is not None:
|
if previous_file is not None:
|
||||||
self.field.update_dimension_fields(instance, force=True)
|
self.field.update_dimension_fields(instance, force=True)
|
||||||
|
|
||||||
class ImageFieldFile(ImageFile, FieldFile):
|
|
||||||
|
|
||||||
|
class ImageFieldFile(ImageFile, FieldFile):
|
||||||
def delete(self, save=True):
|
def delete(self, save=True):
|
||||||
# Clear the image dimensions cache
|
# Clear the image dimensions cache
|
||||||
if hasattr(self, '_dimensions_cache'):
|
if hasattr(self, '_dimensions_cache'):
|
||||||
del self._dimensions_cache
|
del self._dimensions_cache
|
||||||
super(ImageFieldFile, self).delete(save)
|
super(ImageFieldFile, self).delete(save)
|
||||||
|
|
||||||
|
|
||||||
class ImageField(FileField):
|
class ImageField(FileField):
|
||||||
attr_class = ImageFieldFile
|
attr_class = ImageFieldFile
|
||||||
descriptor_class = ImageFileDescriptor
|
descriptor_class = ImageFileDescriptor
|
||||||
|
|
|
@ -5,6 +5,7 @@ have the same attributes as fields sometimes (avoids a lot of special casing).
|
||||||
|
|
||||||
from django.db.models import fields
|
from django.db.models import fields
|
||||||
|
|
||||||
|
|
||||||
class OrderWrt(fields.IntegerField):
|
class OrderWrt(fields.IntegerField):
|
||||||
"""
|
"""
|
||||||
A proxy for the _order database field that is used when
|
A proxy for the _order database field that is used when
|
||||||
|
|
|
@ -211,7 +211,7 @@ class SingleRelatedObjectDescriptor(six.with_metaclass(RenameRelatedObjectDescri
|
||||||
|
|
||||||
# If null=True, we can assign null here, but otherwise the value needs
|
# If null=True, we can assign null here, but otherwise the value needs
|
||||||
# to be an instance of the related class.
|
# to be an instance of the related class.
|
||||||
if value is None and self.related.field.null == False:
|
if value is None and self.related.field.null is False:
|
||||||
raise ValueError('Cannot assign None: "%s.%s" does not allow null values.' %
|
raise ValueError('Cannot assign None: "%s.%s" does not allow null values.' %
|
||||||
(instance._meta.object_name, self.related.get_accessor_name()))
|
(instance._meta.object_name, self.related.get_accessor_name()))
|
||||||
elif value is not None and not isinstance(value, self.related.model):
|
elif value is not None and not isinstance(value, self.related.model):
|
||||||
|
@ -310,7 +310,7 @@ class ReverseSingleRelatedObjectDescriptor(six.with_metaclass(RenameRelatedObjec
|
||||||
def __set__(self, instance, value):
|
def __set__(self, instance, value):
|
||||||
# If null=True, we can assign null here, but otherwise the value needs
|
# If null=True, we can assign null here, but otherwise the value needs
|
||||||
# to be an instance of the related class.
|
# to be an instance of the related class.
|
||||||
if value is None and self.field.null == False:
|
if value is None and self.field.null is False:
|
||||||
raise ValueError('Cannot assign None: "%s.%s" does not allow null values.' %
|
raise ValueError('Cannot assign None: "%s.%s" does not allow null values.' %
|
||||||
(instance._meta.object_name, self.field.name))
|
(instance._meta.object_name, self.field.name))
|
||||||
elif value is not None and not isinstance(value, self.field.rel.to):
|
elif value is not None and not isinstance(value, self.field.rel.to):
|
||||||
|
@ -510,7 +510,6 @@ def create_many_related_manager(superclass, rel):
|
||||||
"a many-to-many relationship can be used." %
|
"a many-to-many relationship can be used." %
|
||||||
instance.__class__.__name__)
|
instance.__class__.__name__)
|
||||||
|
|
||||||
|
|
||||||
def _get_fk_val(self, obj, field_name):
|
def _get_fk_val(self, obj, field_name):
|
||||||
"""
|
"""
|
||||||
Returns the correct value for this relationship's foreign key. This
|
Returns the correct value for this relationship's foreign key. This
|
||||||
|
@ -821,6 +820,7 @@ class ReverseManyRelatedObjectsDescriptor(object):
|
||||||
manager.clear()
|
manager.clear()
|
||||||
manager.add(*value)
|
manager.add(*value)
|
||||||
|
|
||||||
|
|
||||||
class ForeignObjectRel(object):
|
class ForeignObjectRel(object):
|
||||||
def __init__(self, field, to, related_name=None, limit_choices_to=None,
|
def __init__(self, field, to, related_name=None, limit_choices_to=None,
|
||||||
parent_link=False, on_delete=None, related_query_name=None):
|
parent_link=False, on_delete=None, related_query_name=None):
|
||||||
|
@ -858,6 +858,7 @@ class ForeignObjectRel(object):
|
||||||
# example custom multicolumn joins currently have no remote field).
|
# example custom multicolumn joins currently have no remote field).
|
||||||
self.field_name = None
|
self.field_name = None
|
||||||
|
|
||||||
|
|
||||||
class ManyToOneRel(ForeignObjectRel):
|
class ManyToOneRel(ForeignObjectRel):
|
||||||
def __init__(self, field, to, field_name, related_name=None, limit_choices_to=None,
|
def __init__(self, field, to, field_name, related_name=None, limit_choices_to=None,
|
||||||
parent_link=False, on_delete=None, related_query_name=None):
|
parent_link=False, on_delete=None, related_query_name=None):
|
||||||
|
@ -1125,7 +1126,7 @@ class ForeignKey(ForeignObject):
|
||||||
def __init__(self, to, to_field=None, rel_class=ManyToOneRel,
|
def __init__(self, to, to_field=None, rel_class=ManyToOneRel,
|
||||||
db_constraint=True, **kwargs):
|
db_constraint=True, **kwargs):
|
||||||
try:
|
try:
|
||||||
to_name = to._meta.object_name.lower()
|
to._meta.object_name.lower()
|
||||||
except AttributeError: # to._meta doesn't exist, so it must be RECURSIVE_RELATIONSHIP_CONSTANT
|
except AttributeError: # to._meta doesn't exist, so it must be RECURSIVE_RELATIONSHIP_CONSTANT
|
||||||
assert isinstance(to, six.string_types), "%s(%r) is invalid. First parameter to ForeignKey must be either a model, a model name, or the string %r" % (self.__class__.__name__, to, RECURSIVE_RELATIONSHIP_CONSTANT)
|
assert isinstance(to, six.string_types), "%s(%r) is invalid. First parameter to ForeignKey must be either a model, a model name, or the string %r" % (self.__class__.__name__, to, RECURSIVE_RELATIONSHIP_CONSTANT)
|
||||||
else:
|
else:
|
||||||
|
@ -1162,7 +1163,6 @@ class ForeignKey(ForeignObject):
|
||||||
if self.rel.on_delete is not CASCADE:
|
if self.rel.on_delete is not CASCADE:
|
||||||
kwargs['on_delete'] = self.rel.on_delete
|
kwargs['on_delete'] = self.rel.on_delete
|
||||||
# Rel needs more work.
|
# Rel needs more work.
|
||||||
rel = self.rel
|
|
||||||
if self.rel.field_name:
|
if self.rel.field_name:
|
||||||
kwargs['to_field'] = self.rel.field_name
|
kwargs['to_field'] = self.rel.field_name
|
||||||
if isinstance(self.rel.to, six.string_types):
|
if isinstance(self.rel.to, six.string_types):
|
||||||
|
@ -1222,7 +1222,7 @@ class ForeignKey(ForeignObject):
|
||||||
return field_default
|
return field_default
|
||||||
|
|
||||||
def get_db_prep_save(self, value, connection):
|
def get_db_prep_save(self, value, connection):
|
||||||
if value == '' or value == None:
|
if value == '' or value is None:
|
||||||
return None
|
return None
|
||||||
else:
|
else:
|
||||||
return self.related_field.get_db_prep_save(value,
|
return self.related_field.get_db_prep_save(value,
|
||||||
|
@ -1393,7 +1393,6 @@ class ManyToManyField(RelatedField):
|
||||||
if "help_text" in kwargs:
|
if "help_text" in kwargs:
|
||||||
del kwargs['help_text']
|
del kwargs['help_text']
|
||||||
# Rel needs more work.
|
# Rel needs more work.
|
||||||
rel = self.rel
|
|
||||||
if isinstance(self.rel.to, six.string_types):
|
if isinstance(self.rel.to, six.string_types):
|
||||||
kwargs['to'] = self.rel.to
|
kwargs['to'] = self.rel.to
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -7,6 +7,7 @@ to_python() and the other necessary methods and everything will work
|
||||||
seamlessly.
|
seamlessly.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
class SubfieldBase(type):
|
class SubfieldBase(type):
|
||||||
"""
|
"""
|
||||||
A metaclass for custom Field subclasses. This ensures the model's attribute
|
A metaclass for custom Field subclasses. This ensures the model's attribute
|
||||||
|
@ -19,6 +20,7 @@ class SubfieldBase(type):
|
||||||
)
|
)
|
||||||
return new_class
|
return new_class
|
||||||
|
|
||||||
|
|
||||||
class Creator(object):
|
class Creator(object):
|
||||||
"""
|
"""
|
||||||
A placeholder class that provides a way to set the attribute on the model.
|
A placeholder class that provides a way to set the attribute on the model.
|
||||||
|
@ -34,6 +36,7 @@ class Creator(object):
|
||||||
def __set__(self, obj, value):
|
def __set__(self, obj, value):
|
||||||
obj.__dict__[self.field.name] = self.field.to_python(value)
|
obj.__dict__[self.field.name] = self.field.to_python(value)
|
||||||
|
|
||||||
|
|
||||||
def make_contrib(superclass, func=None):
|
def make_contrib(superclass, func=None):
|
||||||
"""
|
"""
|
||||||
Returns a suitable contribute_to_class() method for the Field subclass.
|
Returns a suitable contribute_to_class() method for the Field subclass.
|
||||||
|
|
|
@ -15,9 +15,13 @@ import os
|
||||||
__all__ = ('get_apps', 'get_app', 'get_models', 'get_model', 'register_models',
|
__all__ = ('get_apps', 'get_app', 'get_models', 'get_model', 'register_models',
|
||||||
'load_app', 'app_cache_ready')
|
'load_app', 'app_cache_ready')
|
||||||
|
|
||||||
|
MODELS_MODULE_NAME = 'models'
|
||||||
|
|
||||||
|
|
||||||
class UnavailableApp(Exception):
|
class UnavailableApp(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def _initialize():
|
def _initialize():
|
||||||
"""
|
"""
|
||||||
Returns a dictionary to be used as the initial value of the
|
Returns a dictionary to be used as the initial value of the
|
||||||
|
@ -118,12 +122,12 @@ class BaseAppCache(object):
|
||||||
self.nesting_level += 1
|
self.nesting_level += 1
|
||||||
app_module = import_module(app_name)
|
app_module = import_module(app_name)
|
||||||
try:
|
try:
|
||||||
models = import_module('.models', app_name)
|
models = import_module('.' + MODELS_MODULE_NAME, app_name)
|
||||||
except ImportError:
|
except ImportError:
|
||||||
self.nesting_level -= 1
|
self.nesting_level -= 1
|
||||||
# If the app doesn't have a models module, we can just ignore the
|
# If the app doesn't have a models module, we can just ignore the
|
||||||
# ImportError and return no models for it.
|
# ImportError and return no models for it.
|
||||||
if not module_has_submodule(app_module, 'models'):
|
if not module_has_submodule(app_module, MODELS_MODULE_NAME):
|
||||||
return None
|
return None
|
||||||
# But if the app does have a models module, we need to figure out
|
# But if the app does have a models module, we need to figure out
|
||||||
# whether to suppress or propagate the error. If can_postpone is
|
# whether to suppress or propagate the error. If can_postpone is
|
||||||
|
|
|
@ -6,6 +6,7 @@ from django.db.models.fields import FieldDoesNotExist
|
||||||
from django.utils import six
|
from django.utils import six
|
||||||
from django.utils.deprecation import RenameMethodsBase
|
from django.utils.deprecation import RenameMethodsBase
|
||||||
|
|
||||||
|
|
||||||
def ensure_default_manager(sender, **kwargs):
|
def ensure_default_manager(sender, **kwargs):
|
||||||
"""
|
"""
|
||||||
Ensures that a Model subclass contains a default manager and sets the
|
Ensures that a Model subclass contains a default manager and sets the
|
||||||
|
@ -153,6 +154,9 @@ class Manager(six.with_metaclass(RenameManagerMethods)):
|
||||||
def get_or_create(self, **kwargs):
|
def get_or_create(self, **kwargs):
|
||||||
return self.get_queryset().get_or_create(**kwargs)
|
return self.get_queryset().get_or_create(**kwargs)
|
||||||
|
|
||||||
|
def update_or_create(self, **kwargs):
|
||||||
|
return self.get_queryset().update_or_create(**kwargs)
|
||||||
|
|
||||||
def create(self, **kwargs):
|
def create(self, **kwargs):
|
||||||
return self.get_queryset().create(**kwargs)
|
return self.get_queryset().create(**kwargs)
|
||||||
|
|
||||||
|
@ -245,7 +249,7 @@ class ManagerDescriptor(object):
|
||||||
self.manager = manager
|
self.manager = manager
|
||||||
|
|
||||||
def __get__(self, instance, type=None):
|
def __get__(self, instance, type=None):
|
||||||
if instance != None:
|
if instance is not None:
|
||||||
raise AttributeError("Manager isn't accessible via %s instances" % type.__name__)
|
raise AttributeError("Manager isn't accessible via %s instances" % type.__name__)
|
||||||
return self.manager
|
return self.manager
|
||||||
|
|
||||||
|
|
|
@ -19,6 +19,10 @@ from django.utils.functional import partition
|
||||||
from django.utils import six
|
from django.utils import six
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
|
|
||||||
|
# The maximum number (one less than the max to be precise) of results to fetch
|
||||||
|
# in a get() query
|
||||||
|
MAX_GET_RESULTS = 20
|
||||||
|
|
||||||
# The maximum number of items to display in a QuerySet.__repr__
|
# The maximum number of items to display in a QuerySet.__repr__
|
||||||
REPR_OUTPUT_SIZE = 20
|
REPR_OUTPUT_SIZE = 20
|
||||||
|
|
||||||
|
@ -297,6 +301,7 @@ class QuerySet(object):
|
||||||
clone = self.filter(*args, **kwargs)
|
clone = self.filter(*args, **kwargs)
|
||||||
if self.query.can_filter():
|
if self.query.can_filter():
|
||||||
clone = clone.order_by()
|
clone = clone.order_by()
|
||||||
|
clone = clone[:MAX_GET_RESULTS + 1]
|
||||||
num = len(clone)
|
num = len(clone)
|
||||||
if num == 1:
|
if num == 1:
|
||||||
return clone._result_cache[0]
|
return clone._result_cache[0]
|
||||||
|
@ -305,8 +310,11 @@ class QuerySet(object):
|
||||||
"%s matching query does not exist." %
|
"%s matching query does not exist." %
|
||||||
self.model._meta.object_name)
|
self.model._meta.object_name)
|
||||||
raise self.model.MultipleObjectsReturned(
|
raise self.model.MultipleObjectsReturned(
|
||||||
"get() returned more than one %s -- it returned %s!" %
|
"get() returned more than one %s -- it returned %s!" % (
|
||||||
(self.model._meta.object_name, num))
|
self.model._meta.object_name,
|
||||||
|
num if num <= MAX_GET_RESULTS else 'more than %s' % MAX_GET_RESULTS
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
def create(self, **kwargs):
|
def create(self, **kwargs):
|
||||||
"""
|
"""
|
||||||
|
@ -356,24 +364,51 @@ class QuerySet(object):
|
||||||
|
|
||||||
return objs
|
return objs
|
||||||
|
|
||||||
def get_or_create(self, **kwargs):
|
def get_or_create(self, defaults=None, **kwargs):
|
||||||
"""
|
"""
|
||||||
Looks up an object with the given kwargs, creating one if necessary.
|
Looks up an object with the given kwargs, creating one if necessary.
|
||||||
Returns a tuple of (object, created), where created is a boolean
|
Returns a tuple of (object, created), where created is a boolean
|
||||||
specifying whether an object was created.
|
specifying whether an object was created.
|
||||||
"""
|
"""
|
||||||
defaults = kwargs.pop('defaults', {})
|
lookup, params, _ = self._extract_model_params(defaults, **kwargs)
|
||||||
lookup = kwargs.copy()
|
|
||||||
for f in self.model._meta.fields:
|
|
||||||
if f.attname in lookup:
|
|
||||||
lookup[f.name] = lookup.pop(f.attname)
|
|
||||||
try:
|
try:
|
||||||
self._for_write = True
|
self._for_write = True
|
||||||
return self.get(**lookup), False
|
return self.get(**lookup), False
|
||||||
except self.model.DoesNotExist:
|
except self.model.DoesNotExist:
|
||||||
|
return self._create_object_from_params(lookup, params)
|
||||||
|
|
||||||
|
def update_or_create(self, defaults=None, **kwargs):
|
||||||
|
"""
|
||||||
|
Looks up an object with the given kwargs, updating one with defaults
|
||||||
|
if it exists, otherwise creates a new one.
|
||||||
|
Returns a tuple (object, created), where created is a boolean
|
||||||
|
specifying whether an object was created.
|
||||||
|
"""
|
||||||
|
lookup, params, filtered_defaults = self._extract_model_params(defaults, **kwargs)
|
||||||
|
try:
|
||||||
|
self._for_write = True
|
||||||
|
obj = self.get(**lookup)
|
||||||
|
except self.model.DoesNotExist:
|
||||||
|
obj, created = self._create_object_from_params(lookup, params)
|
||||||
|
if created:
|
||||||
|
return obj, created
|
||||||
|
for k, v in six.iteritems(filtered_defaults):
|
||||||
|
setattr(obj, k, v)
|
||||||
|
try:
|
||||||
|
sid = transaction.savepoint(using=self.db)
|
||||||
|
obj.save(update_fields=filtered_defaults.keys(), using=self.db)
|
||||||
|
transaction.savepoint_commit(sid, using=self.db)
|
||||||
|
return obj, False
|
||||||
|
except DatabaseError:
|
||||||
|
transaction.savepoint_rollback(sid, using=self.db)
|
||||||
|
six.reraise(sys.exc_info())
|
||||||
|
|
||||||
|
def _create_object_from_params(self, lookup, params):
|
||||||
|
"""
|
||||||
|
Tries to create an object using passed params.
|
||||||
|
Used by get_or_create and update_or_create
|
||||||
|
"""
|
||||||
try:
|
try:
|
||||||
params = dict((k, v) for k, v in kwargs.items() if LOOKUP_SEP not in k)
|
|
||||||
params.update(defaults)
|
|
||||||
obj = self.model(**params)
|
obj = self.model(**params)
|
||||||
sid = transaction.savepoint(using=self.db)
|
sid = transaction.savepoint(using=self.db)
|
||||||
obj.save(force_insert=True, using=self.db)
|
obj.save(force_insert=True, using=self.db)
|
||||||
|
@ -388,6 +423,26 @@ class QuerySet(object):
|
||||||
# Re-raise the DatabaseError with its original traceback.
|
# Re-raise the DatabaseError with its original traceback.
|
||||||
six.reraise(*exc_info)
|
six.reraise(*exc_info)
|
||||||
|
|
||||||
|
def _extract_model_params(self, defaults, **kwargs):
|
||||||
|
"""
|
||||||
|
Prepares `lookup` (kwargs that are valid model attributes), `params`
|
||||||
|
(for creating a model instance) and `filtered_defaults` (defaults
|
||||||
|
that are valid model attributes) based on given kwargs; for use by
|
||||||
|
get_or_create and update_or_create.
|
||||||
|
"""
|
||||||
|
defaults = defaults or {}
|
||||||
|
filtered_defaults = {}
|
||||||
|
lookup = kwargs.copy()
|
||||||
|
for f in self.model._meta.fields:
|
||||||
|
# Filter out fields that don't belongs to the model.
|
||||||
|
if f.attname in lookup:
|
||||||
|
lookup[f.name] = lookup.pop(f.attname)
|
||||||
|
if f.attname in defaults:
|
||||||
|
filtered_defaults[f.name] = defaults.pop(f.attname)
|
||||||
|
params = dict((k, v) for k, v in kwargs.items() if LOOKUP_SEP not in k)
|
||||||
|
params.update(filtered_defaults)
|
||||||
|
return lookup, params, filtered_defaults
|
||||||
|
|
||||||
def _earliest_or_latest(self, field_name=None, direction="-"):
|
def _earliest_or_latest(self, field_name=None, direction="-"):
|
||||||
"""
|
"""
|
||||||
Returns the latest object, according to the model's
|
Returns the latest object, according to the model's
|
||||||
|
@ -638,7 +693,7 @@ class QuerySet(object):
|
||||||
obj.query.select_for_update_nowait = nowait
|
obj.query.select_for_update_nowait = nowait
|
||||||
return obj
|
return obj
|
||||||
|
|
||||||
def select_related(self, *fields, **kwargs):
|
def select_related(self, *fields):
|
||||||
"""
|
"""
|
||||||
Returns a new QuerySet instance that will select related objects.
|
Returns a new QuerySet instance that will select related objects.
|
||||||
|
|
||||||
|
@ -647,9 +702,6 @@ class QuerySet(object):
|
||||||
|
|
||||||
If select_related(None) is called, the list is cleared.
|
If select_related(None) is called, the list is cleared.
|
||||||
"""
|
"""
|
||||||
if kwargs:
|
|
||||||
raise TypeError('Unexpected keyword arguments to select_related: %s'
|
|
||||||
% (list(kwargs),))
|
|
||||||
obj = self._clone()
|
obj = self._clone()
|
||||||
if fields == (None,):
|
if fields == (None,):
|
||||||
obj.query.select_related = False
|
obj.query.select_related = False
|
||||||
|
@ -902,10 +954,12 @@ class QuerySet(object):
|
||||||
# empty" result.
|
# empty" result.
|
||||||
value_annotation = True
|
value_annotation = True
|
||||||
|
|
||||||
|
|
||||||
class InstanceCheckMeta(type):
|
class InstanceCheckMeta(type):
|
||||||
def __instancecheck__(self, instance):
|
def __instancecheck__(self, instance):
|
||||||
return instance.query.is_empty()
|
return instance.query.is_empty()
|
||||||
|
|
||||||
|
|
||||||
class EmptyQuerySet(six.with_metaclass(InstanceCheckMeta)):
|
class EmptyQuerySet(six.with_metaclass(InstanceCheckMeta)):
|
||||||
"""
|
"""
|
||||||
Marker class usable for checking if a queryset is empty by .none():
|
Marker class usable for checking if a queryset is empty by .none():
|
||||||
|
@ -915,6 +969,7 @@ class EmptyQuerySet(six.with_metaclass(InstanceCheckMeta)):
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
raise TypeError("EmptyQuerySet can't be instantiated")
|
raise TypeError("EmptyQuerySet can't be instantiated")
|
||||||
|
|
||||||
|
|
||||||
class ValuesQuerySet(QuerySet):
|
class ValuesQuerySet(QuerySet):
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
super(ValuesQuerySet, self).__init__(*args, **kwargs)
|
super(ValuesQuerySet, self).__init__(*args, **kwargs)
|
||||||
|
@ -1279,11 +1334,10 @@ def get_cached_row(row, index_start, using, klass_info, offset=0,
|
||||||
return None
|
return None
|
||||||
klass, field_names, field_count, related_fields, reverse_related_fields, pk_idx = klass_info
|
klass, field_names, field_count, related_fields, reverse_related_fields, pk_idx = klass_info
|
||||||
|
|
||||||
|
|
||||||
fields = row[index_start:index_start + field_count]
|
fields = row[index_start:index_start + field_count]
|
||||||
# If the pk column is None (or the Oracle equivalent ''), then the related
|
# If the pk column is None (or the Oracle equivalent ''), then the related
|
||||||
# object must be non-existent - set the relation to None.
|
# object must be non-existent - set the relation to None.
|
||||||
if fields[pk_idx] == None or fields[pk_idx] == '':
|
if fields[pk_idx] is None or fields[pk_idx] == '':
|
||||||
obj = None
|
obj = None
|
||||||
elif field_names:
|
elif field_names:
|
||||||
fields = list(fields)
|
fields = list(fields)
|
||||||
|
@ -1513,8 +1567,6 @@ def prefetch_related_objects(result_cache, related_lookups):
|
||||||
if len(result_cache) == 0:
|
if len(result_cache) == 0:
|
||||||
return # nothing to do
|
return # nothing to do
|
||||||
|
|
||||||
model = result_cache[0].__class__
|
|
||||||
|
|
||||||
# We need to be able to dynamically add to the list of prefetch_related
|
# We need to be able to dynamically add to the list of prefetch_related
|
||||||
# lookups that we look up (see below). So we need some book keeping to
|
# lookups that we look up (see below). So we need some book keeping to
|
||||||
# ensure we don't do duplicate work.
|
# ensure we don't do duplicate work.
|
||||||
|
@ -1541,7 +1593,7 @@ def prefetch_related_objects(result_cache, related_lookups):
|
||||||
if len(obj_list) == 0:
|
if len(obj_list) == 0:
|
||||||
break
|
break
|
||||||
|
|
||||||
current_lookup = LOOKUP_SEP.join(attrs[0:level+1])
|
current_lookup = LOOKUP_SEP.join(attrs[:level + 1])
|
||||||
if current_lookup in done_queries:
|
if current_lookup in done_queries:
|
||||||
# Skip any prefetching, and any object preparation
|
# Skip any prefetching, and any object preparation
|
||||||
obj_list = done_queries[current_lookup]
|
obj_list = done_queries[current_lookup]
|
||||||
|
|
|
@ -30,6 +30,7 @@ class QueryWrapper(object):
|
||||||
def as_sql(self, qn=None, connection=None):
|
def as_sql(self, qn=None, connection=None):
|
||||||
return self.data
|
return self.data
|
||||||
|
|
||||||
|
|
||||||
class Q(tree.Node):
|
class Q(tree.Node):
|
||||||
"""
|
"""
|
||||||
Encapsulates filters as objects that can then be combined logically (using
|
Encapsulates filters as objects that can then be combined logically (using
|
||||||
|
@ -74,6 +75,7 @@ class Q(tree.Node):
|
||||||
clone.children.append(child)
|
clone.children.append(child)
|
||||||
return clone
|
return clone
|
||||||
|
|
||||||
|
|
||||||
class DeferredAttribute(object):
|
class DeferredAttribute(object):
|
||||||
"""
|
"""
|
||||||
A wrapper for a deferred-loading field. When the value is read from this
|
A wrapper for a deferred-loading field. When the value is read from this
|
||||||
|
@ -99,8 +101,7 @@ class DeferredAttribute(object):
|
||||||
try:
|
try:
|
||||||
f = opts.get_field_by_name(self.field_name)[0]
|
f = opts.get_field_by_name(self.field_name)[0]
|
||||||
except FieldDoesNotExist:
|
except FieldDoesNotExist:
|
||||||
f = [f for f in opts.fields
|
f = [f for f in opts.fields if f.attname == self.field_name][0]
|
||||||
if f.attname == self.field_name][0]
|
|
||||||
name = f.name
|
name = f.name
|
||||||
# Let's see if the field is part of the parent chain. If so we
|
# Let's see if the field is part of the parent chain. If so we
|
||||||
# might be able to reuse the already loaded value. Refs #18343.
|
# might be able to reuse the already loaded value. Refs #18343.
|
||||||
|
@ -174,6 +175,7 @@ def select_related_descend(field, restricted, requested, load_fields, reverse=Fa
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
# This function is needed because data descriptors must be defined on a class
|
# This function is needed because data descriptors must be defined on a class
|
||||||
# object, not an instance, to have any effect.
|
# object, not an instance, to have any effect.
|
||||||
|
|
||||||
|
|
|
@ -10,6 +10,7 @@ PathInfo = namedtuple('PathInfo',
|
||||||
'from_opts to_opts target_fields join_field '
|
'from_opts to_opts target_fields join_field '
|
||||||
'm2m direct')
|
'm2m direct')
|
||||||
|
|
||||||
|
|
||||||
class RelatedObject(object):
|
class RelatedObject(object):
|
||||||
def __init__(self, parent_model, model, field):
|
def __init__(self, parent_model, model, field):
|
||||||
self.parent_model = parent_model
|
self.parent_model = parent_model
|
||||||
|
|
|
@ -9,6 +9,7 @@ from django.db.models.fields import IntegerField, FloatField
|
||||||
ordinal_aggregate_field = IntegerField()
|
ordinal_aggregate_field = IntegerField()
|
||||||
computed_aggregate_field = FloatField()
|
computed_aggregate_field = FloatField()
|
||||||
|
|
||||||
|
|
||||||
class Aggregate(object):
|
class Aggregate(object):
|
||||||
"""
|
"""
|
||||||
Default SQL Aggregate.
|
Default SQL Aggregate.
|
||||||
|
@ -93,6 +94,7 @@ class Avg(Aggregate):
|
||||||
is_computed = True
|
is_computed = True
|
||||||
sql_function = 'AVG'
|
sql_function = 'AVG'
|
||||||
|
|
||||||
|
|
||||||
class Count(Aggregate):
|
class Count(Aggregate):
|
||||||
is_ordinal = True
|
is_ordinal = True
|
||||||
sql_function = 'COUNT'
|
sql_function = 'COUNT'
|
||||||
|
@ -101,12 +103,15 @@ class Count(Aggregate):
|
||||||
def __init__(self, col, distinct=False, **extra):
|
def __init__(self, col, distinct=False, **extra):
|
||||||
super(Count, self).__init__(col, distinct='DISTINCT ' if distinct else '', **extra)
|
super(Count, self).__init__(col, distinct='DISTINCT ' if distinct else '', **extra)
|
||||||
|
|
||||||
|
|
||||||
class Max(Aggregate):
|
class Max(Aggregate):
|
||||||
sql_function = 'MAX'
|
sql_function = 'MAX'
|
||||||
|
|
||||||
|
|
||||||
class Min(Aggregate):
|
class Min(Aggregate):
|
||||||
sql_function = 'MIN'
|
sql_function = 'MIN'
|
||||||
|
|
||||||
|
|
||||||
class StdDev(Aggregate):
|
class StdDev(Aggregate):
|
||||||
is_computed = True
|
is_computed = True
|
||||||
|
|
||||||
|
@ -114,9 +119,11 @@ class StdDev(Aggregate):
|
||||||
super(StdDev, self).__init__(col, **extra)
|
super(StdDev, self).__init__(col, **extra)
|
||||||
self.sql_function = 'STDDEV_SAMP' if sample else 'STDDEV_POP'
|
self.sql_function = 'STDDEV_SAMP' if sample else 'STDDEV_POP'
|
||||||
|
|
||||||
|
|
||||||
class Sum(Aggregate):
|
class Sum(Aggregate):
|
||||||
sql_function = 'SUM'
|
sql_function = 'SUM'
|
||||||
|
|
||||||
|
|
||||||
class Variance(Aggregate):
|
class Variance(Aggregate):
|
||||||
is_computed = True
|
is_computed = True
|
||||||
|
|
||||||
|
|
|
@ -750,6 +750,16 @@ class SQLCompiler(object):
|
||||||
|
|
||||||
yield row
|
yield row
|
||||||
|
|
||||||
|
def has_results(self):
|
||||||
|
"""
|
||||||
|
Backends (e.g. NoSQL) can override this in order to use optimized
|
||||||
|
versions of "query has any results."
|
||||||
|
"""
|
||||||
|
# This is always executed on a query clone, so we can modify self.query
|
||||||
|
self.query.add_extra({'a': 1}, None, None, None, None, None)
|
||||||
|
self.query.set_extra_mask(['a'])
|
||||||
|
return bool(self.execute_sql(SINGLE))
|
||||||
|
|
||||||
def execute_sql(self, result_type=MULTI):
|
def execute_sql(self, result_type=MULTI):
|
||||||
"""
|
"""
|
||||||
Run the query against the database and returns the result(s). The
|
Run the query against the database and returns the result(s). The
|
||||||
|
@ -915,6 +925,7 @@ class SQLDeleteCompiler(SQLCompiler):
|
||||||
result.append('WHERE %s' % where)
|
result.append('WHERE %s' % where)
|
||||||
return ' '.join(result), tuple(params)
|
return ' '.join(result), tuple(params)
|
||||||
|
|
||||||
|
|
||||||
class SQLUpdateCompiler(SQLCompiler):
|
class SQLUpdateCompiler(SQLCompiler):
|
||||||
def as_sql(self):
|
def as_sql(self):
|
||||||
"""
|
"""
|
||||||
|
@ -1029,6 +1040,7 @@ class SQLUpdateCompiler(SQLCompiler):
|
||||||
for alias in self.query.tables[1:]:
|
for alias in self.query.tables[1:]:
|
||||||
self.query.alias_refcount[alias] = 0
|
self.query.alias_refcount[alias] = 0
|
||||||
|
|
||||||
|
|
||||||
class SQLAggregateCompiler(SQLCompiler):
|
class SQLAggregateCompiler(SQLCompiler):
|
||||||
def as_sql(self, qn=None):
|
def as_sql(self, qn=None):
|
||||||
"""
|
"""
|
||||||
|
@ -1050,6 +1062,7 @@ class SQLAggregateCompiler(SQLCompiler):
|
||||||
params = params + self.query.sub_params
|
params = params + self.query.sub_params
|
||||||
return sql, params
|
return sql, params
|
||||||
|
|
||||||
|
|
||||||
class SQLDateCompiler(SQLCompiler):
|
class SQLDateCompiler(SQLCompiler):
|
||||||
def results_iter(self):
|
def results_iter(self):
|
||||||
"""
|
"""
|
||||||
|
@ -1075,6 +1088,7 @@ class SQLDateCompiler(SQLCompiler):
|
||||||
date = date.date()
|
date = date.date()
|
||||||
yield date
|
yield date
|
||||||
|
|
||||||
|
|
||||||
class SQLDateTimeCompiler(SQLCompiler):
|
class SQLDateTimeCompiler(SQLCompiler):
|
||||||
def results_iter(self):
|
def results_iter(self):
|
||||||
"""
|
"""
|
||||||
|
@ -1107,6 +1121,7 @@ class SQLDateTimeCompiler(SQLCompiler):
|
||||||
datetime = timezone.make_aware(datetime, self.query.tzinfo)
|
datetime = timezone.make_aware(datetime, self.query.tzinfo)
|
||||||
yield datetime
|
yield datetime
|
||||||
|
|
||||||
|
|
||||||
def order_modified_iter(cursor, trim, sentinel):
|
def order_modified_iter(cursor, trim, sentinel):
|
||||||
"""
|
"""
|
||||||
Yields blocks of rows from a cursor. We use this iterator in the special
|
Yields blocks of rows from a cursor. We use this iterator in the special
|
||||||
|
|
|
@ -3,9 +3,11 @@ Useful auxilliary data structures for query construction. Not useful outside
|
||||||
the SQL domain.
|
the SQL domain.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
class EmptyResultSet(Exception):
|
class EmptyResultSet(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class MultiJoin(Exception):
|
class MultiJoin(Exception):
|
||||||
"""
|
"""
|
||||||
Used by join construction code to indicate the point at which a
|
Used by join construction code to indicate the point at which a
|
||||||
|
@ -17,12 +19,10 @@ class MultiJoin(Exception):
|
||||||
# The path travelled, this includes the path to the multijoin.
|
# The path travelled, this includes the path to the multijoin.
|
||||||
self.names_with_path = path_with_names
|
self.names_with_path = path_with_names
|
||||||
|
|
||||||
|
|
||||||
class Empty(object):
|
class Empty(object):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
class RawValue(object):
|
|
||||||
def __init__(self, value):
|
|
||||||
self.value = value
|
|
||||||
|
|
||||||
class Date(object):
|
class Date(object):
|
||||||
"""
|
"""
|
||||||
|
@ -42,6 +42,7 @@ class Date(object):
|
||||||
col = self.col
|
col = self.col
|
||||||
return connection.ops.date_trunc_sql(self.lookup_type, col), []
|
return connection.ops.date_trunc_sql(self.lookup_type, col), []
|
||||||
|
|
||||||
|
|
||||||
class DateTime(object):
|
class DateTime(object):
|
||||||
"""
|
"""
|
||||||
Add a datetime selection column.
|
Add a datetime selection column.
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
|
import copy
|
||||||
|
|
||||||
from django.core.exceptions import FieldError
|
from django.core.exceptions import FieldError
|
||||||
from django.db.models.constants import LOOKUP_SEP
|
from django.db.models.constants import LOOKUP_SEP
|
||||||
from django.db.models.fields import FieldDoesNotExist
|
from django.db.models.fields import FieldDoesNotExist
|
||||||
import copy
|
|
||||||
|
|
||||||
class SQLEvaluator(object):
|
class SQLEvaluator(object):
|
||||||
def __init__(self, expression, query, allow_joins=True, reuse=None):
|
def __init__(self, expression, query, allow_joins=True, reuse=None):
|
||||||
|
|
|
@ -401,12 +401,10 @@ class Query(object):
|
||||||
def has_results(self, using):
|
def has_results(self, using):
|
||||||
q = self.clone()
|
q = self.clone()
|
||||||
q.clear_select_clause()
|
q.clear_select_clause()
|
||||||
q.add_extra({'a': 1}, None, None, None, None, None)
|
|
||||||
q.set_extra_mask(['a'])
|
|
||||||
q.clear_ordering(True)
|
q.clear_ordering(True)
|
||||||
q.set_limits(high=1)
|
q.set_limits(high=1)
|
||||||
compiler = q.get_compiler(using=using)
|
compiler = q.get_compiler(using=using)
|
||||||
return bool(compiler.execute_sql(SINGLE))
|
return compiler.has_results()
|
||||||
|
|
||||||
def combine(self, rhs, connector):
|
def combine(self, rhs, connector):
|
||||||
"""
|
"""
|
||||||
|
@ -615,7 +613,6 @@ class Query(object):
|
||||||
for model, values in six.iteritems(seen):
|
for model, values in six.iteritems(seen):
|
||||||
callback(target, model, values)
|
callback(target, model, values)
|
||||||
|
|
||||||
|
|
||||||
def deferred_to_columns_cb(self, target, model, fields):
|
def deferred_to_columns_cb(self, target, model, fields):
|
||||||
"""
|
"""
|
||||||
Callback used by deferred_to_columns(). The "target" parameter should
|
Callback used by deferred_to_columns(). The "target" parameter should
|
||||||
|
@ -627,7 +624,6 @@ class Query(object):
|
||||||
for field in fields:
|
for field in fields:
|
||||||
target[table].add(field.column)
|
target[table].add(field.column)
|
||||||
|
|
||||||
|
|
||||||
def table_alias(self, table_name, create=False):
|
def table_alias(self, table_name, create=False):
|
||||||
"""
|
"""
|
||||||
Returns a table alias for the given table_name and whether this is a
|
Returns a table alias for the given table_name and whether this is a
|
||||||
|
@ -955,7 +951,6 @@ class Query(object):
|
||||||
self.unref_alias(alias)
|
self.unref_alias(alias)
|
||||||
self.included_inherited_models = {}
|
self.included_inherited_models = {}
|
||||||
|
|
||||||
|
|
||||||
def add_aggregate(self, aggregate, model, alias, is_summary):
|
def add_aggregate(self, aggregate, model, alias, is_summary):
|
||||||
"""
|
"""
|
||||||
Adds a single aggregate expression to the Query
|
Adds a single aggregate expression to the Query
|
||||||
|
@ -1876,6 +1871,7 @@ class Query(object):
|
||||||
else:
|
else:
|
||||||
return field.null
|
return field.null
|
||||||
|
|
||||||
|
|
||||||
def get_order_dir(field, default='ASC'):
|
def get_order_dir(field, default='ASC'):
|
||||||
"""
|
"""
|
||||||
Returns the field name and direction for an order specification. For
|
Returns the field name and direction for an order specification. For
|
||||||
|
@ -1900,6 +1896,7 @@ def add_to_dict(data, key, value):
|
||||||
else:
|
else:
|
||||||
data[key] = set([value])
|
data[key] = set([value])
|
||||||
|
|
||||||
|
|
||||||
def is_reverse_o2o(field):
|
def is_reverse_o2o(field):
|
||||||
"""
|
"""
|
||||||
A little helper to check if the given field is reverse-o2o. The field is
|
A little helper to check if the given field is reverse-o2o. The field is
|
||||||
|
@ -1907,6 +1904,7 @@ def is_reverse_o2o(field):
|
||||||
"""
|
"""
|
||||||
return not hasattr(field, 'rel') and field.field.unique
|
return not hasattr(field, 'rel') and field.field.unique
|
||||||
|
|
||||||
|
|
||||||
def alias_diff(refcounts_before, refcounts_after):
|
def alias_diff(refcounts_before, refcounts_after):
|
||||||
"""
|
"""
|
||||||
Given the before and after copies of refcounts works out which aliases
|
Given the before and after copies of refcounts works out which aliases
|
||||||
|
|
|
@ -7,7 +7,7 @@ from django.core.exceptions import FieldError
|
||||||
from django.db import connections
|
from django.db import connections
|
||||||
from django.db.models.constants import LOOKUP_SEP
|
from django.db.models.constants import LOOKUP_SEP
|
||||||
from django.db.models.fields import DateField, DateTimeField, FieldDoesNotExist
|
from django.db.models.fields import DateField, DateTimeField, FieldDoesNotExist
|
||||||
from django.db.models.sql.constants import *
|
from django.db.models.sql.constants import GET_ITERATOR_CHUNK_SIZE, SelectInfo
|
||||||
from django.db.models.sql.datastructures import Date, DateTime
|
from django.db.models.sql.datastructures import Date, DateTime
|
||||||
from django.db.models.sql.query import Query
|
from django.db.models.sql.query import Query
|
||||||
from django.db.models.sql.where import AND, Constraint
|
from django.db.models.sql.where import AND, Constraint
|
||||||
|
@ -20,6 +20,7 @@ from django.utils import timezone
|
||||||
__all__ = ['DeleteQuery', 'UpdateQuery', 'InsertQuery', 'DateQuery',
|
__all__ = ['DeleteQuery', 'UpdateQuery', 'InsertQuery', 'DateQuery',
|
||||||
'DateTimeQuery', 'AggregateQuery']
|
'DateTimeQuery', 'AggregateQuery']
|
||||||
|
|
||||||
|
|
||||||
class DeleteQuery(Query):
|
class DeleteQuery(Query):
|
||||||
"""
|
"""
|
||||||
Delete queries are done through this class, since they are more constrained
|
Delete queries are done through this class, since they are more constrained
|
||||||
|
@ -77,7 +78,9 @@ class DeleteQuery(Query):
|
||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
innerq.clear_select_clause()
|
innerq.clear_select_clause()
|
||||||
innerq.select = [SelectInfo((self.get_initial_alias(), pk.column), None)]
|
innerq.select = [
|
||||||
|
SelectInfo((self.get_initial_alias(), pk.column), None)
|
||||||
|
]
|
||||||
values = innerq
|
values = innerq
|
||||||
where = self.where_class()
|
where = self.where_class()
|
||||||
where.add((Constraint(None, pk.column, pk), 'in', values), AND)
|
where.add((Constraint(None, pk.column, pk), 'in', values), AND)
|
||||||
|
@ -178,6 +181,7 @@ class UpdateQuery(Query):
|
||||||
result.append(query)
|
result.append(query)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
class InsertQuery(Query):
|
class InsertQuery(Query):
|
||||||
compiler = 'SQLInsertCompiler'
|
compiler = 'SQLInsertCompiler'
|
||||||
|
|
||||||
|
@ -215,6 +219,7 @@ class InsertQuery(Query):
|
||||||
self.objs = objs
|
self.objs = objs
|
||||||
self.raw = raw
|
self.raw = raw
|
||||||
|
|
||||||
|
|
||||||
class DateQuery(Query):
|
class DateQuery(Query):
|
||||||
"""
|
"""
|
||||||
A DateQuery is a normal query, except that it specifically selects a single
|
A DateQuery is a normal query, except that it specifically selects a single
|
||||||
|
@ -260,6 +265,7 @@ class DateQuery(Query):
|
||||||
def _get_select(self, col, lookup_type):
|
def _get_select(self, col, lookup_type):
|
||||||
return Date(col, lookup_type)
|
return Date(col, lookup_type)
|
||||||
|
|
||||||
|
|
||||||
class DateTimeQuery(DateQuery):
|
class DateTimeQuery(DateQuery):
|
||||||
"""
|
"""
|
||||||
A DateTimeQuery is like a DateQuery but for a datetime field. If time zone
|
A DateTimeQuery is like a DateQuery but for a datetime field. If time zone
|
||||||
|
@ -280,6 +286,7 @@ class DateTimeQuery(DateQuery):
|
||||||
tzname = timezone._get_timezone_name(self.tzinfo)
|
tzname = timezone._get_timezone_name(self.tzinfo)
|
||||||
return DateTime(col, lookup_type, tzname)
|
return DateTime(col, lookup_type, tzname)
|
||||||
|
|
||||||
|
|
||||||
class AggregateQuery(Query):
|
class AggregateQuery(Query):
|
||||||
"""
|
"""
|
||||||
An AggregateQuery takes another query as a parameter to the FROM
|
An AggregateQuery takes another query as a parameter to the FROM
|
||||||
|
|
|
@ -4,6 +4,7 @@ Code to manage the creation and SQL rendering of 'where' constraints.
|
||||||
|
|
||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import collections
|
||||||
import datetime
|
import datetime
|
||||||
from itertools import repeat
|
from itertools import repeat
|
||||||
|
|
||||||
|
@ -11,15 +12,16 @@ from django.conf import settings
|
||||||
from django.db.models.fields import DateTimeField, Field
|
from django.db.models.fields import DateTimeField, Field
|
||||||
from django.db.models.sql.datastructures import EmptyResultSet, Empty
|
from django.db.models.sql.datastructures import EmptyResultSet, Empty
|
||||||
from django.db.models.sql.aggregates import Aggregate
|
from django.db.models.sql.aggregates import Aggregate
|
||||||
from django.utils.itercompat import is_iterator
|
|
||||||
from django.utils.six.moves import xrange
|
from django.utils.six.moves import xrange
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
from django.utils import tree
|
from django.utils import tree
|
||||||
|
|
||||||
|
|
||||||
# Connection types
|
# Connection types
|
||||||
AND = 'AND'
|
AND = 'AND'
|
||||||
OR = 'OR'
|
OR = 'OR'
|
||||||
|
|
||||||
|
|
||||||
class EmptyShortCircuit(Exception):
|
class EmptyShortCircuit(Exception):
|
||||||
"""
|
"""
|
||||||
Internal exception used to indicate that a "matches nothing" node should be
|
Internal exception used to indicate that a "matches nothing" node should be
|
||||||
|
@ -27,6 +29,7 @@ class EmptyShortCircuit(Exception):
|
||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class WhereNode(tree.Node):
|
class WhereNode(tree.Node):
|
||||||
"""
|
"""
|
||||||
Used to represent the SQL where-clause.
|
Used to represent the SQL where-clause.
|
||||||
|
@ -58,7 +61,7 @@ class WhereNode(tree.Node):
|
||||||
if not isinstance(data, (list, tuple)):
|
if not isinstance(data, (list, tuple)):
|
||||||
return data
|
return data
|
||||||
obj, lookup_type, value = data
|
obj, lookup_type, value = data
|
||||||
if is_iterator(value):
|
if isinstance(value, collections.Iterator):
|
||||||
# Consume any generators immediately, so that we can determine
|
# Consume any generators immediately, so that we can determine
|
||||||
# emptiness and transform any non-empty values correctly.
|
# emptiness and transform any non-empty values correctly.
|
||||||
value = list(value)
|
value = list(value)
|
||||||
|
@ -304,14 +307,15 @@ class WhereNode(tree.Node):
|
||||||
clone.children.append(child)
|
clone.children.append(child)
|
||||||
return clone
|
return clone
|
||||||
|
|
||||||
class EmptyWhere(WhereNode):
|
|
||||||
|
|
||||||
|
class EmptyWhere(WhereNode):
|
||||||
def add(self, data, connector):
|
def add(self, data, connector):
|
||||||
return
|
return
|
||||||
|
|
||||||
def as_sql(self, qn=None, connection=None):
|
def as_sql(self, qn=None, connection=None):
|
||||||
raise EmptyResultSet
|
raise EmptyResultSet
|
||||||
|
|
||||||
|
|
||||||
class EverythingNode(object):
|
class EverythingNode(object):
|
||||||
"""
|
"""
|
||||||
A node that matches everything.
|
A node that matches everything.
|
||||||
|
@ -385,6 +389,7 @@ class Constraint(object):
|
||||||
new.alias, new.col, new.field = change_map[self.alias], self.col, self.field
|
new.alias, new.col, new.field = change_map[self.alias], self.col, self.field
|
||||||
return new
|
return new
|
||||||
|
|
||||||
|
|
||||||
class SubqueryConstraint(object):
|
class SubqueryConstraint(object):
|
||||||
def __init__(self, alias, columns, targets, query_object):
|
def __init__(self, alias, columns, targets, query_object):
|
||||||
self.alias = alias
|
self.alias = alias
|
||||||
|
|
|
@ -27,6 +27,7 @@ class TransactionManagementError(Exception):
|
||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
################
|
################
|
||||||
# Private APIs #
|
# Private APIs #
|
||||||
################
|
################
|
||||||
|
@ -40,6 +41,7 @@ def get_connection(using=None):
|
||||||
using = DEFAULT_DB_ALIAS
|
using = DEFAULT_DB_ALIAS
|
||||||
return connections[using]
|
return connections[using]
|
||||||
|
|
||||||
|
|
||||||
###########################
|
###########################
|
||||||
# Deprecated private APIs #
|
# Deprecated private APIs #
|
||||||
###########################
|
###########################
|
||||||
|
@ -56,6 +58,7 @@ def abort(using=None):
|
||||||
"""
|
"""
|
||||||
get_connection(using).abort()
|
get_connection(using).abort()
|
||||||
|
|
||||||
|
|
||||||
def enter_transaction_management(managed=True, using=None, forced=False):
|
def enter_transaction_management(managed=True, using=None, forced=False):
|
||||||
"""
|
"""
|
||||||
Enters transaction management for a running thread. It must be balanced with
|
Enters transaction management for a running thread. It must be balanced with
|
||||||
|
@ -68,6 +71,7 @@ def enter_transaction_management(managed=True, using=None, forced=False):
|
||||||
"""
|
"""
|
||||||
get_connection(using).enter_transaction_management(managed, forced)
|
get_connection(using).enter_transaction_management(managed, forced)
|
||||||
|
|
||||||
|
|
||||||
def leave_transaction_management(using=None):
|
def leave_transaction_management(using=None):
|
||||||
"""
|
"""
|
||||||
Leaves transaction management for a running thread. A dirty flag is carried
|
Leaves transaction management for a running thread. A dirty flag is carried
|
||||||
|
@ -76,6 +80,7 @@ def leave_transaction_management(using=None):
|
||||||
"""
|
"""
|
||||||
get_connection(using).leave_transaction_management()
|
get_connection(using).leave_transaction_management()
|
||||||
|
|
||||||
|
|
||||||
def is_dirty(using=None):
|
def is_dirty(using=None):
|
||||||
"""
|
"""
|
||||||
Returns True if the current transaction requires a commit for changes to
|
Returns True if the current transaction requires a commit for changes to
|
||||||
|
@ -83,6 +88,7 @@ def is_dirty(using=None):
|
||||||
"""
|
"""
|
||||||
return get_connection(using).is_dirty()
|
return get_connection(using).is_dirty()
|
||||||
|
|
||||||
|
|
||||||
def set_dirty(using=None):
|
def set_dirty(using=None):
|
||||||
"""
|
"""
|
||||||
Sets a dirty flag for the current thread and code streak. This can be used
|
Sets a dirty flag for the current thread and code streak. This can be used
|
||||||
|
@ -91,6 +97,7 @@ def set_dirty(using=None):
|
||||||
"""
|
"""
|
||||||
get_connection(using).set_dirty()
|
get_connection(using).set_dirty()
|
||||||
|
|
||||||
|
|
||||||
def set_clean(using=None):
|
def set_clean(using=None):
|
||||||
"""
|
"""
|
||||||
Resets a dirty flag for the current thread and code streak. This can be used
|
Resets a dirty flag for the current thread and code streak. This can be used
|
||||||
|
@ -99,22 +106,27 @@ def set_clean(using=None):
|
||||||
"""
|
"""
|
||||||
get_connection(using).set_clean()
|
get_connection(using).set_clean()
|
||||||
|
|
||||||
|
|
||||||
def is_managed(using=None):
|
def is_managed(using=None):
|
||||||
warnings.warn("'is_managed' is deprecated.",
|
warnings.warn("'is_managed' is deprecated.",
|
||||||
DeprecationWarning, stacklevel=2)
|
DeprecationWarning, stacklevel=2)
|
||||||
|
|
||||||
|
|
||||||
def managed(flag=True, using=None):
|
def managed(flag=True, using=None):
|
||||||
warnings.warn("'managed' no longer serves a purpose.",
|
warnings.warn("'managed' no longer serves a purpose.",
|
||||||
DeprecationWarning, stacklevel=2)
|
DeprecationWarning, stacklevel=2)
|
||||||
|
|
||||||
|
|
||||||
def commit_unless_managed(using=None):
|
def commit_unless_managed(using=None):
|
||||||
warnings.warn("'commit_unless_managed' is now a no-op.",
|
warnings.warn("'commit_unless_managed' is now a no-op.",
|
||||||
DeprecationWarning, stacklevel=2)
|
DeprecationWarning, stacklevel=2)
|
||||||
|
|
||||||
|
|
||||||
def rollback_unless_managed(using=None):
|
def rollback_unless_managed(using=None):
|
||||||
warnings.warn("'rollback_unless_managed' is now a no-op.",
|
warnings.warn("'rollback_unless_managed' is now a no-op.",
|
||||||
DeprecationWarning, stacklevel=2)
|
DeprecationWarning, stacklevel=2)
|
||||||
|
|
||||||
|
|
||||||
###############
|
###############
|
||||||
# Public APIs #
|
# Public APIs #
|
||||||
###############
|
###############
|
||||||
|
@ -125,24 +137,28 @@ def get_autocommit(using=None):
|
||||||
"""
|
"""
|
||||||
return get_connection(using).get_autocommit()
|
return get_connection(using).get_autocommit()
|
||||||
|
|
||||||
|
|
||||||
def set_autocommit(autocommit, using=None):
|
def set_autocommit(autocommit, using=None):
|
||||||
"""
|
"""
|
||||||
Set the autocommit status of the connection.
|
Set the autocommit status of the connection.
|
||||||
"""
|
"""
|
||||||
return get_connection(using).set_autocommit(autocommit)
|
return get_connection(using).set_autocommit(autocommit)
|
||||||
|
|
||||||
|
|
||||||
def commit(using=None):
|
def commit(using=None):
|
||||||
"""
|
"""
|
||||||
Commits a transaction and resets the dirty flag.
|
Commits a transaction and resets the dirty flag.
|
||||||
"""
|
"""
|
||||||
get_connection(using).commit()
|
get_connection(using).commit()
|
||||||
|
|
||||||
|
|
||||||
def rollback(using=None):
|
def rollback(using=None):
|
||||||
"""
|
"""
|
||||||
Rolls back a transaction and resets the dirty flag.
|
Rolls back a transaction and resets the dirty flag.
|
||||||
"""
|
"""
|
||||||
get_connection(using).rollback()
|
get_connection(using).rollback()
|
||||||
|
|
||||||
|
|
||||||
def savepoint(using=None):
|
def savepoint(using=None):
|
||||||
"""
|
"""
|
||||||
Creates a savepoint (if supported and required by the backend) inside the
|
Creates a savepoint (if supported and required by the backend) inside the
|
||||||
|
@ -151,6 +167,7 @@ def savepoint(using=None):
|
||||||
"""
|
"""
|
||||||
return get_connection(using).savepoint()
|
return get_connection(using).savepoint()
|
||||||
|
|
||||||
|
|
||||||
def savepoint_rollback(sid, using=None):
|
def savepoint_rollback(sid, using=None):
|
||||||
"""
|
"""
|
||||||
Rolls back the most recent savepoint (if one exists). Does nothing if
|
Rolls back the most recent savepoint (if one exists). Does nothing if
|
||||||
|
@ -158,6 +175,7 @@ def savepoint_rollback(sid, using=None):
|
||||||
"""
|
"""
|
||||||
get_connection(using).savepoint_rollback(sid)
|
get_connection(using).savepoint_rollback(sid)
|
||||||
|
|
||||||
|
|
||||||
def savepoint_commit(sid, using=None):
|
def savepoint_commit(sid, using=None):
|
||||||
"""
|
"""
|
||||||
Commits the most recent savepoint (if one exists). Does nothing if
|
Commits the most recent savepoint (if one exists). Does nothing if
|
||||||
|
@ -165,18 +183,21 @@ def savepoint_commit(sid, using=None):
|
||||||
"""
|
"""
|
||||||
get_connection(using).savepoint_commit(sid)
|
get_connection(using).savepoint_commit(sid)
|
||||||
|
|
||||||
|
|
||||||
def clean_savepoints(using=None):
|
def clean_savepoints(using=None):
|
||||||
"""
|
"""
|
||||||
Resets the counter used to generate unique savepoint ids in this thread.
|
Resets the counter used to generate unique savepoint ids in this thread.
|
||||||
"""
|
"""
|
||||||
get_connection(using).clean_savepoints()
|
get_connection(using).clean_savepoints()
|
||||||
|
|
||||||
|
|
||||||
def get_rollback(using=None):
|
def get_rollback(using=None):
|
||||||
"""
|
"""
|
||||||
Gets the "needs rollback" flag -- for *advanced use* only.
|
Gets the "needs rollback" flag -- for *advanced use* only.
|
||||||
"""
|
"""
|
||||||
return get_connection(using).get_rollback()
|
return get_connection(using).get_rollback()
|
||||||
|
|
||||||
|
|
||||||
def set_rollback(rollback, using=None):
|
def set_rollback(rollback, using=None):
|
||||||
"""
|
"""
|
||||||
Sets or unsets the "needs rollback" flag -- for *advanced use* only.
|
Sets or unsets the "needs rollback" flag -- for *advanced use* only.
|
||||||
|
@ -191,6 +212,7 @@ def set_rollback(rollback, using=None):
|
||||||
"""
|
"""
|
||||||
return get_connection(using).set_rollback(rollback)
|
return get_connection(using).set_rollback(rollback)
|
||||||
|
|
||||||
|
|
||||||
#################################
|
#################################
|
||||||
# Decorators / context managers #
|
# Decorators / context managers #
|
||||||
#################################
|
#################################
|
||||||
|
@ -398,6 +420,7 @@ class Transaction(object):
|
||||||
return func(*args, **kwargs)
|
return func(*args, **kwargs)
|
||||||
return inner
|
return inner
|
||||||
|
|
||||||
|
|
||||||
def _transaction_func(entering, exiting, using):
|
def _transaction_func(entering, exiting, using):
|
||||||
"""
|
"""
|
||||||
Takes 3 things, an entering function (what to do to start this block of
|
Takes 3 things, an entering function (what to do to start this block of
|
||||||
|
@ -436,6 +459,7 @@ def autocommit(using=None):
|
||||||
|
|
||||||
return _transaction_func(entering, exiting, using)
|
return _transaction_func(entering, exiting, using)
|
||||||
|
|
||||||
|
|
||||||
def commit_on_success(using=None):
|
def commit_on_success(using=None):
|
||||||
"""
|
"""
|
||||||
This decorator activates commit on response. This way, if the view function
|
This decorator activates commit on response. This way, if the view function
|
||||||
|
@ -466,6 +490,7 @@ def commit_on_success(using=None):
|
||||||
|
|
||||||
return _transaction_func(entering, exiting, using)
|
return _transaction_func(entering, exiting, using)
|
||||||
|
|
||||||
|
|
||||||
def commit_manually(using=None):
|
def commit_manually(using=None):
|
||||||
"""
|
"""
|
||||||
Decorator that activates manual transaction control. It just disables
|
Decorator that activates manual transaction control. It just disables
|
||||||
|
@ -484,6 +509,7 @@ def commit_manually(using=None):
|
||||||
|
|
||||||
return _transaction_func(entering, exiting, using)
|
return _transaction_func(entering, exiting, using)
|
||||||
|
|
||||||
|
|
||||||
def commit_on_success_unless_managed(using=None, savepoint=False):
|
def commit_on_success_unless_managed(using=None, savepoint=False):
|
||||||
"""
|
"""
|
||||||
Transitory API to preserve backwards-compatibility while refactoring.
|
Transitory API to preserve backwards-compatibility while refactoring.
|
||||||
|
|
|
@ -370,8 +370,14 @@ class DecimalField(IntegerField):
|
||||||
|
|
||||||
def widget_attrs(self, widget):
|
def widget_attrs(self, widget):
|
||||||
attrs = super(DecimalField, self).widget_attrs(widget)
|
attrs = super(DecimalField, self).widget_attrs(widget)
|
||||||
if isinstance(widget, NumberInput) and self.decimal_places:
|
if isinstance(widget, NumberInput):
|
||||||
attrs['step'] = '0.%s1' % ('0' * (self.decimal_places - 1))
|
if self.decimal_places is not None:
|
||||||
|
# Use exponential notation for small values since they might
|
||||||
|
# be parsed as 0 otherwise. ref #20765
|
||||||
|
step = str(Decimal('1') / 10 ** self.decimal_places).lower()
|
||||||
|
else:
|
||||||
|
step = 'any'
|
||||||
|
attrs.setdefault('step', step)
|
||||||
return attrs
|
return attrs
|
||||||
|
|
||||||
|
|
||||||
|
@ -1019,7 +1025,7 @@ class MultiValueField(Field):
|
||||||
if not isinstance(initial, list):
|
if not isinstance(initial, list):
|
||||||
initial = self.widget.decompress(initial)
|
initial = self.widget.decompress(initial)
|
||||||
for field, initial, data in zip(self.fields, initial, data):
|
for field, initial, data in zip(self.fields, initial, data):
|
||||||
if field._has_changed(initial, data):
|
if field._has_changed(field.to_python(initial), data):
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
|
@ -77,7 +77,7 @@ class BaseForm(object):
|
||||||
# information. Any improvements to the form API should be made to *this*
|
# information. Any improvements to the form API should be made to *this*
|
||||||
# class, not to the Form class.
|
# class, not to the Form class.
|
||||||
def __init__(self, data=None, files=None, auto_id='id_%s', prefix=None,
|
def __init__(self, data=None, files=None, auto_id='id_%s', prefix=None,
|
||||||
initial=None, error_class=ErrorList, label_suffix=':',
|
initial=None, error_class=ErrorList, label_suffix=None,
|
||||||
empty_permitted=False):
|
empty_permitted=False):
|
||||||
self.is_bound = data is not None or files is not None
|
self.is_bound = data is not None or files is not None
|
||||||
self.data = data or {}
|
self.data = data or {}
|
||||||
|
@ -86,7 +86,8 @@ class BaseForm(object):
|
||||||
self.prefix = prefix
|
self.prefix = prefix
|
||||||
self.initial = initial or {}
|
self.initial = initial or {}
|
||||||
self.error_class = error_class
|
self.error_class = error_class
|
||||||
self.label_suffix = label_suffix
|
# Translators: This is the default suffix added to form field labels
|
||||||
|
self.label_suffix = label_suffix if label_suffix is not None else _(':')
|
||||||
self.empty_permitted = empty_permitted
|
self.empty_permitted = empty_permitted
|
||||||
self._errors = None # Stores the errors after clean() has been called.
|
self._errors = None # Stores the errors after clean() has been called.
|
||||||
self._changed_data = None
|
self._changed_data = None
|
||||||
|
@ -518,7 +519,9 @@ class BoundField(object):
|
||||||
"""
|
"""
|
||||||
contents = contents or self.label
|
contents = contents or self.label
|
||||||
# Only add the suffix if the label does not end in punctuation.
|
# Only add the suffix if the label does not end in punctuation.
|
||||||
if self.form.label_suffix and contents and contents[-1] not in ':?.!':
|
# Translators: If found as last label character, these punctuation
|
||||||
|
# characters will prevent the default label_suffix to be appended to the label
|
||||||
|
if self.form.label_suffix and contents and contents[-1] not in _(':?.!'):
|
||||||
contents = format_html('{0}{1}', contents, self.form.label_suffix)
|
contents = format_html('{0}{1}', contents, self.form.label_suffix)
|
||||||
widget = self.field.widget
|
widget = self.field.widget
|
||||||
id_ = widget.attrs.get('id') or self.auto_id
|
id_ = widget.attrs.get('id') or self.auto_id
|
||||||
|
|
|
@ -292,7 +292,7 @@ class ModelFormMetaclass(type):
|
||||||
|
|
||||||
class BaseModelForm(BaseForm):
|
class BaseModelForm(BaseForm):
|
||||||
def __init__(self, data=None, files=None, auto_id='id_%s', prefix=None,
|
def __init__(self, data=None, files=None, auto_id='id_%s', prefix=None,
|
||||||
initial=None, error_class=ErrorList, label_suffix=':',
|
initial=None, error_class=ErrorList, label_suffix=None,
|
||||||
empty_permitted=False, instance=None):
|
empty_permitted=False, instance=None):
|
||||||
opts = self._meta
|
opts = self._meta
|
||||||
if opts.model is None:
|
if opts.model is None:
|
||||||
|
|
|
@ -1,5 +1,7 @@
|
||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
import warnings
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.utils.html import format_html, format_html_join
|
from django.utils.html import format_html, format_html_join
|
||||||
from django.utils.encoding import force_text, python_2_unicode_compatible
|
from django.utils.encoding import force_text, python_2_unicode_compatible
|
||||||
|
@ -21,6 +23,11 @@ def flatatt(attrs):
|
||||||
|
|
||||||
The result is passed through 'mark_safe'.
|
The result is passed through 'mark_safe'.
|
||||||
"""
|
"""
|
||||||
|
if [v for v in attrs.values() if v is True or v is False]:
|
||||||
|
warnings.warn(
|
||||||
|
'The meaning of boolean values for widget attributes will change in Django 1.8',
|
||||||
|
DeprecationWarning
|
||||||
|
)
|
||||||
return format_html_join('', ' {0}="{1}"', sorted(attrs.items()))
|
return format_html_join('', ' {0}="{1}"', sorted(attrs.items()))
|
||||||
|
|
||||||
@python_2_unicode_compatible
|
@python_2_unicode_compatible
|
||||||
|
|
|
@ -177,11 +177,9 @@ class MultiPartParser(object):
|
||||||
file_name = force_text(file_name, encoding, errors='replace')
|
file_name = force_text(file_name, encoding, errors='replace')
|
||||||
file_name = self.IE_sanitize(unescape_entities(file_name))
|
file_name = self.IE_sanitize(unescape_entities(file_name))
|
||||||
|
|
||||||
content_type = meta_data.get('content-type', ('',))[0].strip()
|
content_type, content_type_extra = meta_data.get('content-type', ('', {}))
|
||||||
try:
|
content_type = content_type.strip()
|
||||||
charset = meta_data.get('content-type', (0, {}))[1].get('charset', None)
|
charset = content_type_extra.get('charset')
|
||||||
except:
|
|
||||||
charset = None
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
content_length = int(meta_data.get('content-length')[0])
|
content_length = int(meta_data.get('content-length')[0])
|
||||||
|
@ -194,7 +192,7 @@ class MultiPartParser(object):
|
||||||
try:
|
try:
|
||||||
handler.new_file(field_name, file_name,
|
handler.new_file(field_name, file_name,
|
||||||
content_type, content_length,
|
content_type, content_length,
|
||||||
charset)
|
charset, content_type_extra)
|
||||||
except StopFutureHandlers:
|
except StopFutureHandlers:
|
||||||
break
|
break
|
||||||
|
|
||||||
|
|
|
@ -12,6 +12,21 @@ class ContextPopException(Exception):
|
||||||
"pop() has been called more times than push()"
|
"pop() has been called more times than push()"
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class ContextDict(dict):
|
||||||
|
def __init__(self, context, *args, **kwargs):
|
||||||
|
super(ContextDict, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
context.dicts.append(self)
|
||||||
|
self.context = context
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __exit__(self, *args, **kwargs):
|
||||||
|
self.context.pop()
|
||||||
|
|
||||||
|
|
||||||
class BaseContext(object):
|
class BaseContext(object):
|
||||||
def __init__(self, dict_=None):
|
def __init__(self, dict_=None):
|
||||||
self._reset_dicts(dict_)
|
self._reset_dicts(dict_)
|
||||||
|
@ -34,10 +49,8 @@ class BaseContext(object):
|
||||||
for d in reversed(self.dicts):
|
for d in reversed(self.dicts):
|
||||||
yield d
|
yield d
|
||||||
|
|
||||||
def push(self):
|
def push(self, *args, **kwargs):
|
||||||
d = {}
|
return ContextDict(self, *args, **kwargs)
|
||||||
self.dicts.append(d)
|
|
||||||
return d
|
|
||||||
|
|
||||||
def pop(self):
|
def pop(self):
|
||||||
if len(self.dicts) == 1:
|
if len(self.dicts) == 1:
|
||||||
|
@ -83,6 +96,7 @@ class BaseContext(object):
|
||||||
new_context._reset_dicts(values)
|
new_context._reset_dicts(values)
|
||||||
return new_context
|
return new_context
|
||||||
|
|
||||||
|
|
||||||
class Context(BaseContext):
|
class Context(BaseContext):
|
||||||
"A stack container for variable context"
|
"A stack container for variable context"
|
||||||
def __init__(self, dict_=None, autoescape=True, current_app=None,
|
def __init__(self, dict_=None, autoescape=True, current_app=None,
|
||||||
|
@ -106,6 +120,7 @@ class Context(BaseContext):
|
||||||
self.dicts.append(other_dict)
|
self.dicts.append(other_dict)
|
||||||
return other_dict
|
return other_dict
|
||||||
|
|
||||||
|
|
||||||
class RenderContext(BaseContext):
|
class RenderContext(BaseContext):
|
||||||
"""
|
"""
|
||||||
A stack container for storing Template state.
|
A stack container for storing Template state.
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue