Fixed a suite of errors in the ORM -- a) fixed calling values_list().values_list() and changing whether the results are flat, b) fixed an issue with fields on the left-hand side of what becomes the HAVING clause not being included in the GROUP BY clause, and c) fixed a bug with fields from values() calls not being included in the GROUP BY clause. This fixed the recent test failures under postgresql.
git-svn-id: http://code.djangoproject.com/svn/django/trunk@14715 bcc190cf-cafb-0310-a4f2-bffc1f526a37
This commit is contained in:
parent
3cbaf3c2b6
commit
678f626c24
|
@ -965,8 +965,7 @@ class ValuesListQuerySet(ValuesQuerySet):
|
|||
# If a field list has been specified, use it. Otherwise, use the
|
||||
# full list of fields, including extras and aggregates.
|
||||
if self._fields:
|
||||
fields = list(self._fields) + filter(lambda f: f not in self._fields,
|
||||
aggregate_names)
|
||||
fields = list(self._fields) + filter(lambda f: f not in self._fields, aggregate_names)
|
||||
else:
|
||||
fields = names
|
||||
|
||||
|
@ -976,7 +975,9 @@ class ValuesListQuerySet(ValuesQuerySet):
|
|||
|
||||
def _clone(self, *args, **kwargs):
|
||||
clone = super(ValuesListQuerySet, self)._clone(*args, **kwargs)
|
||||
clone.flat = self.flat
|
||||
if not hasattr(clone, "flat"):
|
||||
# Only assign flat if the clone didn't already get it from kwargs
|
||||
clone.flat = self.flat
|
||||
return clone
|
||||
|
||||
|
||||
|
|
|
@ -469,9 +469,11 @@ class SQLCompiler(object):
|
|||
qn = self.quote_name_unless_alias
|
||||
result, params = [], []
|
||||
if self.query.group_by is not None:
|
||||
if len(self.query.model._meta.fields) == len(self.query.select) and \
|
||||
self.connection.features.allows_group_by_pk:
|
||||
self.query.group_by = [(self.query.model._meta.db_table, self.query.model._meta.pk.column)]
|
||||
if (len(self.query.model._meta.fields) == len(self.query.select) and
|
||||
self.connection.features.allows_group_by_pk):
|
||||
self.query.group_by = [
|
||||
(self.query.model._meta.db_table, self.query.model._meta.pk.column)
|
||||
]
|
||||
|
||||
group_by = self.query.group_by or []
|
||||
|
||||
|
@ -479,11 +481,13 @@ class SQLCompiler(object):
|
|||
for extra_select, extra_params in self.query.extra_select.itervalues():
|
||||
extra_selects.append(extra_select)
|
||||
params.extend(extra_params)
|
||||
for col in group_by + self.query.related_select_cols + extra_selects:
|
||||
cols = (group_by + self.query.select +
|
||||
self.query.related_select_cols + extra_selects)
|
||||
for col in cols:
|
||||
if isinstance(col, (list, tuple)):
|
||||
result.append('%s.%s' % (qn(col[0]), qn(col[1])))
|
||||
elif hasattr(col, 'as_sql'):
|
||||
result.append(col.as_sql(qn))
|
||||
result.append(col.as_sql(qn, self.connection))
|
||||
else:
|
||||
result.append('(%s)' % str(col))
|
||||
return result, params
|
||||
|
|
|
@ -195,8 +195,9 @@ class Query(object):
|
|||
Unpickling support.
|
||||
"""
|
||||
# Rebuild list of field instances
|
||||
opts = obj_dict['model']._meta
|
||||
obj_dict['select_fields'] = [
|
||||
name is not None and obj_dict['model']._meta.get_field(name) or None
|
||||
name is not None and opts.get_field(name) or None
|
||||
for name in obj_dict['select_fields']
|
||||
]
|
||||
|
||||
|
@ -707,13 +708,20 @@ class Query(object):
|
|||
# "group by", "where" and "having".
|
||||
self.where.relabel_aliases(change_map)
|
||||
self.having.relabel_aliases(change_map)
|
||||
for columns in (self.select, self.aggregates.values(), self.group_by or []):
|
||||
for columns in [self.select, self.group_by or []]:
|
||||
for pos, col in enumerate(columns):
|
||||
if isinstance(col, (list, tuple)):
|
||||
old_alias = col[0]
|
||||
columns[pos] = (change_map.get(old_alias, old_alias), col[1])
|
||||
else:
|
||||
col.relabel_aliases(change_map)
|
||||
for mapping in [self.aggregates]:
|
||||
for key, col in mapping.items():
|
||||
if isinstance(col, (list, tuple)):
|
||||
old_alias = col[0]
|
||||
mapping[key] = (change_map.get(old_alias, old_alias), col[1])
|
||||
else:
|
||||
col.relabel_aliases(change_map)
|
||||
|
||||
# 2. Rename the alias in the internal table/alias datastructures.
|
||||
for old_alias, new_alias in change_map.iteritems():
|
||||
|
@ -1075,6 +1083,8 @@ class Query(object):
|
|||
|
||||
|
||||
if having_clause:
|
||||
if (alias, col) not in self.group_by:
|
||||
self.group_by.append((alias, col))
|
||||
self.having.add((Constraint(alias, col, field), lookup_type, value),
|
||||
connector)
|
||||
else:
|
||||
|
|
|
@ -202,7 +202,7 @@ class DateQuery(Query):
|
|||
alias = result[3][-1]
|
||||
select = Date((alias, field.column), lookup_type)
|
||||
self.select = [select]
|
||||
self.select_fields = [None]
|
||||
self.select_fields = []
|
||||
self.select_related = False # See #7097.
|
||||
self.set_extra_mask([])
|
||||
self.distinct = True
|
||||
|
|
|
@ -654,6 +654,25 @@ class AggregationTests(TestCase):
|
|||
attrgetter("name")
|
||||
)
|
||||
|
||||
def test_values_annotate_values(self):
|
||||
qs = Book.objects.values("name").annotate(
|
||||
n_authors=Count("authors")
|
||||
).values_list("pk", flat=True)
|
||||
self.assertEqual(list(qs), list(Book.objects.values_list("pk", flat=True)))
|
||||
|
||||
def test_having_group_by(self):
|
||||
# Test that when a field occurs on the LHS of a HAVING clause that it
|
||||
# appears correctly in the GROUP BY clause
|
||||
qs = Book.objects.values_list("name").annotate(
|
||||
n_authors=Count("authors")
|
||||
).filter(
|
||||
pages__gt=F("n_authors")
|
||||
).values_list("name", flat=True)
|
||||
# Results should be the same, all Books have more pages than authors
|
||||
self.assertEqual(
|
||||
list(qs), list(Book.objects.values_list("name", flat=True))
|
||||
)
|
||||
|
||||
@skipUnlessDBFeature('supports_stddev')
|
||||
def test_stddev(self):
|
||||
self.assertEqual(
|
||||
|
|
|
@ -1093,11 +1093,12 @@ class Queries5Tests(TestCase):
|
|||
# Extra tables used to crash SQL construction on the second use.
|
||||
qs = Ranking.objects.extra(tables=['django_site'])
|
||||
qs.query.get_compiler(qs.db).as_sql()
|
||||
qs.query.get_compiler(qs.db).as_sql() # test passes if this doesn't raise an exception.
|
||||
# test passes if this doesn't raise an exception.
|
||||
qs.query.get_compiler(qs.db).as_sql()
|
||||
|
||||
def test_ticket9848(self):
|
||||
# Make sure that updates which only filter on sub-tables don't inadvertently
|
||||
# update the wrong records (bug #9848).
|
||||
# Make sure that updates which only filter on sub-tables don't
|
||||
# inadvertently update the wrong records (bug #9848).
|
||||
|
||||
# Make sure that the IDs from different tables don't happen to match.
|
||||
self.assertQuerysetEqual(
|
||||
|
@ -1283,15 +1284,15 @@ class Queries6Tests(TestCase):
|
|||
)
|
||||
|
||||
# The annotation->tag link is single values and tag->children links is
|
||||
# multi-valued. So we have to split the exclude filter in the middle and then
|
||||
# optimise the inner query without losing results.
|
||||
# multi-valued. So we have to split the exclude filter in the middle
|
||||
# and then optimise the inner query without losing results.
|
||||
self.assertQuerysetEqual(
|
||||
Annotation.objects.exclude(tag__children__name="t2"),
|
||||
['<Annotation: a2>']
|
||||
)
|
||||
|
||||
# Nested queries are possible (although should be used with care, since they have
|
||||
# performance problems on backends like MySQL.
|
||||
# Nested queries are possible (although should be used with care, since
|
||||
# they have performance problems on backends like MySQL.
|
||||
|
||||
self.assertQuerysetEqual(
|
||||
Annotation.objects.filter(notes__in=Note.objects.filter(note="n1")),
|
||||
|
@ -1301,7 +1302,7 @@ class Queries6Tests(TestCase):
|
|||
def test_ticket3739(self):
|
||||
# The all() method on querysets returns a copy of the queryset.
|
||||
q1 = Tag.objects.order_by('name')
|
||||
self.assertNotEqual(id(q1), id(q1.all()))
|
||||
self.assertIsNot(q1, q1.all())
|
||||
|
||||
|
||||
class GeneratorExpressionTests(TestCase):
|
||||
|
@ -1452,6 +1453,16 @@ class EmptyQuerySetTests(TestCase):
|
|||
)
|
||||
|
||||
|
||||
class ValuesQuerysetTests(BaseQuerysetTest):
|
||||
def test_flat_values_lits(self):
|
||||
Number.objects.create(num=72)
|
||||
qs = Number.objects.values_list("num")
|
||||
qs = qs.values_list("num", flat=True)
|
||||
self.assertValueQuerysetEqual(
|
||||
qs, [72]
|
||||
)
|
||||
|
||||
|
||||
class WeirdQuerysetSlicingTests(BaseQuerysetTest):
|
||||
def setUp(self):
|
||||
Number.objects.create(num=1)
|
||||
|
@ -1481,8 +1492,8 @@ class WeirdQuerysetSlicingTests(BaseQuerysetTest):
|
|||
class EscapingTests(TestCase):
|
||||
def test_ticket_7302(self):
|
||||
# Reserved names are appropriately escaped
|
||||
_ = ReservedName.objects.create(name='a',order=42)
|
||||
ReservedName.objects.create(name='b',order=37)
|
||||
_ = ReservedName.objects.create(name='a', order=42)
|
||||
ReservedName.objects.create(name='b', order=37)
|
||||
self.assertQuerysetEqual(
|
||||
ReservedName.objects.all().order_by('order'),
|
||||
['<ReservedName: b>', '<ReservedName: a>']
|
||||
|
|
Loading…
Reference in New Issue