Merged the queryset-refactor branch into trunk.
This is a big internal change, but mostly backwards compatible with existing
code. Also adds a couple of new features.
Fixed #245, #1050, #1656, #1801, #2076, #2091, #2150, #2253, #2306, #2400, #2430, #2482, #2496, #2676, #2737, #2874, #2902, #2939, #3037, #3141, #3288, #3440, #3592, #3739, #4088, #4260, #4289, #4306, #4358, #4464, #4510, #4858, #5012, #5020, #5261, #5295, #5321, #5324, #5325, #5555, #5707, #5796, #5817, #5987, #6018, #6074, #6088, #6154, #6177, #6180, #6203, #6658
git-svn-id: http://code.djangoproject.com/svn/django/trunk@7477 bcc190cf-cafb-0310-a4f2-bffc1f526a37
2008-04-27 10:50:16 +08:00
|
|
|
"""
|
|
|
|
Various data structures used in query construction.
|
|
|
|
|
2009-03-19 17:06:04 +08:00
|
|
|
Factored out from django.db.models.query to avoid making the main module very
|
|
|
|
large and/or so that they can be used by other modules without getting into
|
|
|
|
circular import difficulties.
|
Merged the queryset-refactor branch into trunk.
This is a big internal change, but mostly backwards compatible with existing
code. Also adds a couple of new features.
Fixed #245, #1050, #1656, #1801, #2076, #2091, #2150, #2253, #2306, #2400, #2430, #2482, #2496, #2676, #2737, #2874, #2902, #2939, #3037, #3141, #3288, #3440, #3592, #3739, #4088, #4260, #4289, #4306, #4358, #4464, #4510, #4858, #5012, #5020, #5261, #5295, #5321, #5324, #5325, #5555, #5707, #5796, #5817, #5987, #6018, #6074, #6088, #6154, #6177, #6180, #6203, #6658
git-svn-id: http://code.djangoproject.com/svn/django/trunk@7477 bcc190cf-cafb-0310-a4f2-bffc1f526a37
2008-04-27 10:50:16 +08:00
|
|
|
"""
|
2012-06-12 04:05:13 +08:00
|
|
|
from __future__ import unicode_literals
|
Merged the queryset-refactor branch into trunk.
This is a big internal change, but mostly backwards compatible with existing
code. Also adds a couple of new features.
Fixed #245, #1050, #1656, #1801, #2076, #2091, #2150, #2253, #2306, #2400, #2430, #2482, #2496, #2676, #2737, #2874, #2902, #2939, #3037, #3141, #3288, #3440, #3592, #3739, #4088, #4260, #4289, #4306, #4358, #4464, #4510, #4858, #5012, #5020, #5261, #5295, #5321, #5324, #5325, #5555, #5707, #5796, #5817, #5987, #6018, #6074, #6088, #6154, #6177, #6180, #6203, #6658
git-svn-id: http://code.djangoproject.com/svn/django/trunk@7477 bcc190cf-cafb-0310-a4f2-bffc1f526a37
2008-04-27 10:50:16 +08:00
|
|
|
|
2013-11-09 20:25:15 +08:00
|
|
|
from collections import namedtuple
|
|
|
|
|
2014-01-05 16:32:22 +08:00
|
|
|
from django.apps import apps
|
2015-01-02 23:14:23 +08:00
|
|
|
from django.core.exceptions import FieldDoesNotExist
|
2013-09-17 00:52:05 +08:00
|
|
|
from django.db.backends import utils
|
2013-12-25 21:13:18 +08:00
|
|
|
from django.db.models.constants import LOOKUP_SEP
|
2015-04-17 22:25:11 +08:00
|
|
|
from django.utils import tree
|
2009-03-19 17:06:04 +08:00
|
|
|
|
2013-11-09 20:25:15 +08:00
|
|
|
# PathInfo is used when converting lookups (fk__somecol). The contents
|
|
|
|
# describe the relation in Model terms (model Options and Fields for both
|
|
|
|
# sides of the relation. The join_field is the field backing the relation.
|
|
|
|
PathInfo = namedtuple('PathInfo', 'from_opts to_opts target_fields join_field m2m direct')
|
|
|
|
|
|
|
|
|
2009-12-20 10:46:58 +08:00
|
|
|
class InvalidQuery(Exception):
|
|
|
|
"""
|
|
|
|
The query passed to raw isn't a safe query to use with raw.
|
|
|
|
"""
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
Merged the queryset-refactor branch into trunk.
This is a big internal change, but mostly backwards compatible with existing
code. Also adds a couple of new features.
Fixed #245, #1050, #1656, #1801, #2076, #2091, #2150, #2253, #2306, #2400, #2430, #2482, #2496, #2676, #2737, #2874, #2902, #2939, #3037, #3141, #3288, #3440, #3592, #3739, #4088, #4260, #4289, #4306, #4358, #4464, #4510, #4858, #5012, #5020, #5261, #5295, #5321, #5324, #5325, #5555, #5707, #5796, #5817, #5987, #6018, #6074, #6088, #6154, #6177, #6180, #6203, #6658
git-svn-id: http://code.djangoproject.com/svn/django/trunk@7477 bcc190cf-cafb-0310-a4f2-bffc1f526a37
2008-04-27 10:50:16 +08:00
|
|
|
class QueryWrapper(object):
|
|
|
|
"""
|
|
|
|
A type that indicates the contents are an SQL fragment and the associate
|
|
|
|
parameters. Can be used to pass opaque data to a where-clause, for example.
|
|
|
|
"""
|
2014-12-23 21:16:56 +08:00
|
|
|
contains_aggregate = False
|
|
|
|
|
Merged the queryset-refactor branch into trunk.
This is a big internal change, but mostly backwards compatible with existing
code. Also adds a couple of new features.
Fixed #245, #1050, #1656, #1801, #2076, #2091, #2150, #2253, #2306, #2400, #2430, #2482, #2496, #2676, #2737, #2874, #2902, #2939, #3037, #3141, #3288, #3440, #3592, #3739, #4088, #4260, #4289, #4306, #4358, #4464, #4510, #4858, #5012, #5020, #5261, #5295, #5321, #5324, #5325, #5555, #5707, #5796, #5817, #5987, #6018, #6074, #6088, #6154, #6177, #6180, #6203, #6658
git-svn-id: http://code.djangoproject.com/svn/django/trunk@7477 bcc190cf-cafb-0310-a4f2-bffc1f526a37
2008-04-27 10:50:16 +08:00
|
|
|
def __init__(self, sql, params):
|
2013-02-10 23:15:49 +08:00
|
|
|
self.data = sql, list(params)
|
Merged the queryset-refactor branch into trunk.
This is a big internal change, but mostly backwards compatible with existing
code. Also adds a couple of new features.
Fixed #245, #1050, #1656, #1801, #2076, #2091, #2150, #2253, #2306, #2400, #2430, #2482, #2496, #2676, #2737, #2874, #2902, #2939, #3037, #3141, #3288, #3440, #3592, #3739, #4088, #4260, #4289, #4306, #4358, #4464, #4510, #4858, #5012, #5020, #5261, #5295, #5321, #5324, #5325, #5555, #5707, #5796, #5817, #5987, #6018, #6074, #6088, #6154, #6177, #6180, #6203, #6658
git-svn-id: http://code.djangoproject.com/svn/django/trunk@7477 bcc190cf-cafb-0310-a4f2-bffc1f526a37
2008-04-27 10:50:16 +08:00
|
|
|
|
2014-11-16 09:56:42 +08:00
|
|
|
def as_sql(self, compiler=None, connection=None):
|
2009-01-29 18:46:36 +08:00
|
|
|
return self.data
|
|
|
|
|
2013-07-08 08:39:54 +08:00
|
|
|
|
Merged the queryset-refactor branch into trunk.
This is a big internal change, but mostly backwards compatible with existing
code. Also adds a couple of new features.
Fixed #245, #1050, #1656, #1801, #2076, #2091, #2150, #2253, #2306, #2400, #2430, #2482, #2496, #2676, #2737, #2874, #2902, #2939, #3037, #3141, #3288, #3440, #3592, #3739, #4088, #4260, #4289, #4306, #4358, #4464, #4510, #4858, #5012, #5020, #5261, #5295, #5321, #5324, #5325, #5555, #5707, #5796, #5817, #5987, #6018, #6074, #6088, #6154, #6177, #6180, #6203, #6658
git-svn-id: http://code.djangoproject.com/svn/django/trunk@7477 bcc190cf-cafb-0310-a4f2-bffc1f526a37
2008-04-27 10:50:16 +08:00
|
|
|
class Q(tree.Node):
|
|
|
|
"""
|
|
|
|
Encapsulates filters as objects that can then be combined logically (using
|
|
|
|
& and |).
|
|
|
|
"""
|
|
|
|
# Connection types
|
|
|
|
AND = 'AND'
|
|
|
|
OR = 'OR'
|
|
|
|
default = AND
|
|
|
|
|
|
|
|
def __init__(self, *args, **kwargs):
|
2015-04-17 22:25:11 +08:00
|
|
|
super(Q, self).__init__(children=list(args) + list(kwargs.items()))
|
Merged the queryset-refactor branch into trunk.
This is a big internal change, but mostly backwards compatible with existing
code. Also adds a couple of new features.
Fixed #245, #1050, #1656, #1801, #2076, #2091, #2150, #2253, #2306, #2400, #2430, #2482, #2496, #2676, #2737, #2874, #2902, #2939, #3037, #3141, #3288, #3440, #3592, #3739, #4088, #4260, #4289, #4306, #4358, #4464, #4510, #4858, #5012, #5020, #5261, #5295, #5321, #5324, #5325, #5555, #5707, #5796, #5817, #5987, #6018, #6074, #6088, #6154, #6177, #6180, #6203, #6658
git-svn-id: http://code.djangoproject.com/svn/django/trunk@7477 bcc190cf-cafb-0310-a4f2-bffc1f526a37
2008-04-27 10:50:16 +08:00
|
|
|
|
|
|
|
def _combine(self, other, conn):
|
|
|
|
if not isinstance(other, Q):
|
|
|
|
raise TypeError(other)
|
2010-03-27 23:16:27 +08:00
|
|
|
obj = type(self)()
|
Refactored qs.add_q() and utils/tree.py
The sql/query.py add_q method did a lot of where/having tree hacking to
get complex queries to work correctly. The logic was refactored so that
it should be simpler to understand. The new logic should also produce
leaner WHERE conditions.
The changes cascade somewhat, as some other parts of Django (like
add_filter() and WhereNode) expect boolean trees in certain format or
they fail to work. So to fix the add_q() one must fix utils/tree.py,
some things in add_filter(), WhereNode and so on.
This commit also fixed add_filter to see negate clauses up the path.
A query like .exclude(Q(reversefk__in=a_list)) didn't work similarly to
.filter(~Q(reversefk__in=a_list)). The reason for this is that only
the immediate parent negate clauses were seen by add_filter, and thus a
tree like AND: (NOT AND: (AND: condition)) will not be handled
correctly, as there is one intermediary AND node in the tree. The
example tree is generated by .exclude(~Q(reversefk__in=a_list)).
Still, aggregation lost connectors in OR cases, and F() objects and
aggregates in same filter clause caused GROUP BY problems on some
databases.
Fixed #17600, fixed #13198, fixed #17025, fixed #17000, fixed #11293.
2012-05-25 05:27:24 +08:00
|
|
|
obj.connector = conn
|
2010-03-27 23:16:27 +08:00
|
|
|
obj.add(self, conn)
|
Merged the queryset-refactor branch into trunk.
This is a big internal change, but mostly backwards compatible with existing
code. Also adds a couple of new features.
Fixed #245, #1050, #1656, #1801, #2076, #2091, #2150, #2253, #2306, #2400, #2430, #2482, #2496, #2676, #2737, #2874, #2902, #2939, #3037, #3141, #3288, #3440, #3592, #3739, #4088, #4260, #4289, #4306, #4358, #4464, #4510, #4858, #5012, #5020, #5261, #5295, #5321, #5324, #5325, #5555, #5707, #5796, #5817, #5987, #6018, #6074, #6088, #6154, #6177, #6180, #6203, #6658
git-svn-id: http://code.djangoproject.com/svn/django/trunk@7477 bcc190cf-cafb-0310-a4f2-bffc1f526a37
2008-04-27 10:50:16 +08:00
|
|
|
obj.add(other, conn)
|
|
|
|
return obj
|
|
|
|
|
|
|
|
def __or__(self, other):
|
|
|
|
return self._combine(other, self.OR)
|
|
|
|
|
|
|
|
def __and__(self, other):
|
|
|
|
return self._combine(other, self.AND)
|
|
|
|
|
|
|
|
def __invert__(self):
|
2010-03-27 23:16:27 +08:00
|
|
|
obj = type(self)()
|
|
|
|
obj.add(self, self.AND)
|
Merged the queryset-refactor branch into trunk.
This is a big internal change, but mostly backwards compatible with existing
code. Also adds a couple of new features.
Fixed #245, #1050, #1656, #1801, #2076, #2091, #2150, #2253, #2306, #2400, #2430, #2482, #2496, #2676, #2737, #2874, #2902, #2939, #3037, #3141, #3288, #3440, #3592, #3739, #4088, #4260, #4289, #4306, #4358, #4464, #4510, #4858, #5012, #5020, #5261, #5295, #5321, #5324, #5325, #5555, #5707, #5796, #5817, #5987, #6018, #6074, #6088, #6154, #6177, #6180, #6203, #6658
git-svn-id: http://code.djangoproject.com/svn/django/trunk@7477 bcc190cf-cafb-0310-a4f2-bffc1f526a37
2008-04-27 10:50:16 +08:00
|
|
|
obj.negate()
|
|
|
|
return obj
|
|
|
|
|
Refactored qs.add_q() and utils/tree.py
The sql/query.py add_q method did a lot of where/having tree hacking to
get complex queries to work correctly. The logic was refactored so that
it should be simpler to understand. The new logic should also produce
leaner WHERE conditions.
The changes cascade somewhat, as some other parts of Django (like
add_filter() and WhereNode) expect boolean trees in certain format or
they fail to work. So to fix the add_q() one must fix utils/tree.py,
some things in add_filter(), WhereNode and so on.
This commit also fixed add_filter to see negate clauses up the path.
A query like .exclude(Q(reversefk__in=a_list)) didn't work similarly to
.filter(~Q(reversefk__in=a_list)). The reason for this is that only
the immediate parent negate clauses were seen by add_filter, and thus a
tree like AND: (NOT AND: (AND: condition)) will not be handled
correctly, as there is one intermediary AND node in the tree. The
example tree is generated by .exclude(~Q(reversefk__in=a_list)).
Still, aggregation lost connectors in OR cases, and F() objects and
aggregates in same filter clause caused GROUP BY problems on some
databases.
Fixed #17600, fixed #13198, fixed #17025, fixed #17000, fixed #11293.
2012-05-25 05:27:24 +08:00
|
|
|
def clone(self):
|
|
|
|
clone = self.__class__._new_instance(
|
|
|
|
children=[], connector=self.connector, negated=self.negated)
|
|
|
|
for child in self.children:
|
|
|
|
if hasattr(child, 'clone'):
|
|
|
|
clone.children.append(child.clone())
|
|
|
|
else:
|
|
|
|
clone.children.append(child)
|
|
|
|
return clone
|
|
|
|
|
2015-01-02 09:39:31 +08:00
|
|
|
def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False):
|
2015-05-11 15:02:41 +08:00
|
|
|
# We must promote any new joins to left outer joins so that when Q is
|
|
|
|
# used as an expression, rows aren't filtered due to joins.
|
2015-05-19 19:49:00 +08:00
|
|
|
clause, joins = query._add_q(self, reuse, allow_joins=allow_joins, split_subq=False)
|
2015-06-05 22:48:57 +08:00
|
|
|
query.promote_joins(joins)
|
2015-01-02 09:39:31 +08:00
|
|
|
return clause
|
|
|
|
|
2015-01-13 08:55:57 +08:00
|
|
|
@classmethod
|
|
|
|
def _refs_aggregate(cls, obj, existing_aggregates):
|
|
|
|
if not isinstance(obj, tree.Node):
|
|
|
|
aggregate, aggregate_lookups = refs_aggregate(obj[0].split(LOOKUP_SEP), existing_aggregates)
|
|
|
|
if not aggregate and hasattr(obj[1], 'refs_aggregate'):
|
|
|
|
return obj[1].refs_aggregate(existing_aggregates)
|
|
|
|
return aggregate, aggregate_lookups
|
|
|
|
for c in obj.children:
|
|
|
|
aggregate, aggregate_lookups = cls._refs_aggregate(c, existing_aggregates)
|
|
|
|
if aggregate:
|
2015-01-02 09:39:31 +08:00
|
|
|
return aggregate, aggregate_lookups
|
2015-01-13 08:55:57 +08:00
|
|
|
return False, ()
|
2015-01-02 09:39:31 +08:00
|
|
|
|
2015-01-13 08:55:57 +08:00
|
|
|
def refs_aggregate(self, existing_aggregates):
|
2015-01-02 09:39:31 +08:00
|
|
|
if not existing_aggregates:
|
|
|
|
return False
|
2015-01-13 08:55:57 +08:00
|
|
|
|
|
|
|
return self._refs_aggregate(self, existing_aggregates)
|
2015-01-02 09:39:31 +08:00
|
|
|
|
2013-07-08 08:39:54 +08:00
|
|
|
|
2009-03-19 17:06:04 +08:00
|
|
|
class DeferredAttribute(object):
|
|
|
|
"""
|
|
|
|
A wrapper for a deferred-loading field. When the value is read from this
|
|
|
|
object the first time, the query is executed.
|
|
|
|
"""
|
2009-04-04 11:21:31 +08:00
|
|
|
def __init__(self, field_name, model):
|
2009-03-19 17:06:04 +08:00
|
|
|
self.field_name = field_name
|
|
|
|
|
|
|
|
def __get__(self, instance, owner):
|
|
|
|
"""
|
|
|
|
Retrieves and caches the value from the datastore on the first lookup.
|
|
|
|
Returns the cached value.
|
|
|
|
"""
|
2012-05-27 07:08:44 +08:00
|
|
|
non_deferred_model = instance._meta.proxy_for_model
|
|
|
|
opts = non_deferred_model._meta
|
2010-02-25 03:06:59 +08:00
|
|
|
|
2009-03-19 17:06:04 +08:00
|
|
|
assert instance is not None
|
2009-04-04 11:21:31 +08:00
|
|
|
data = instance.__dict__
|
|
|
|
if data.get(self.field_name, self) is self:
|
2010-02-25 03:06:59 +08:00
|
|
|
# self.field_name is the attname of the field, but only() takes the
|
|
|
|
# actual name, so we need to translate it here.
|
|
|
|
try:
|
2015-01-07 08:16:35 +08:00
|
|
|
f = opts.get_field(self.field_name)
|
2010-02-25 03:06:59 +08:00
|
|
|
except FieldDoesNotExist:
|
2013-07-08 08:39:54 +08:00
|
|
|
f = [f for f in opts.fields if f.attname == self.field_name][0]
|
2012-05-27 07:08:44 +08:00
|
|
|
name = f.name
|
2013-04-26 00:42:08 +08:00
|
|
|
# Let's see if the field is part of the parent chain. If so we
|
2012-05-27 07:08:44 +08:00
|
|
|
# might be able to reuse the already loaded value. Refs #18343.
|
|
|
|
val = self._check_parent_chain(instance, name)
|
|
|
|
if val is None:
|
2014-07-05 14:03:52 +08:00
|
|
|
instance.refresh_from_db(fields=[self.field_name])
|
|
|
|
val = getattr(instance, self.field_name)
|
2010-02-25 03:06:59 +08:00
|
|
|
data[self.field_name] = val
|
2009-04-04 11:21:31 +08:00
|
|
|
return data[self.field_name]
|
|
|
|
|
|
|
|
def __set__(self, instance, value):
|
2009-03-19 17:06:04 +08:00
|
|
|
"""
|
|
|
|
Deferred loading attributes can be set normally (which means there will
|
|
|
|
never be a database lookup involved.
|
|
|
|
"""
|
2009-04-04 11:21:31 +08:00
|
|
|
instance.__dict__[self.field_name] = value
|
2009-03-19 17:06:04 +08:00
|
|
|
|
2012-05-27 07:08:44 +08:00
|
|
|
def _check_parent_chain(self, instance, name):
|
|
|
|
"""
|
2012-07-21 03:14:27 +08:00
|
|
|
Check if the field value can be fetched from a parent field already
|
2012-05-27 07:08:44 +08:00
|
|
|
loaded in the instance. This can be done if the to-be fetched
|
|
|
|
field is a primary key field.
|
|
|
|
"""
|
|
|
|
opts = instance._meta
|
2015-01-07 08:16:35 +08:00
|
|
|
f = opts.get_field(name)
|
2012-05-27 07:08:44 +08:00
|
|
|
link_field = opts.get_ancestor_link(f.model)
|
|
|
|
if f.primary_key and f != link_field:
|
|
|
|
return getattr(instance, link_field.attname)
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
2012-06-26 23:08:42 +08:00
|
|
|
def select_related_descend(field, restricted, requested, load_fields, reverse=False):
|
2008-06-29 17:40:17 +08:00
|
|
|
"""
|
|
|
|
Returns True if this field should be used to descend deeper for
|
|
|
|
select_related() purposes. Used by both the query construction code
|
|
|
|
(sql.query.fill_related_selections()) and the model instance creation code
|
2012-06-26 23:08:42 +08:00
|
|
|
(query.get_klass_info()).
|
2010-01-27 21:30:29 +08:00
|
|
|
|
|
|
|
Arguments:
|
|
|
|
* field - the field to be checked
|
|
|
|
* restricted - a boolean field, indicating if the field list has been
|
|
|
|
manually restricted using a requested clause)
|
|
|
|
* requested - The select_related() dictionary.
|
2012-06-26 23:08:42 +08:00
|
|
|
* load_fields - the set of fields to be loaded on this model
|
2010-01-27 21:30:29 +08:00
|
|
|
* reverse - boolean, True if we are checking a reverse select related
|
2008-06-29 17:40:17 +08:00
|
|
|
"""
|
2015-02-26 22:19:17 +08:00
|
|
|
if not field.remote_field:
|
2008-06-29 17:40:17 +08:00
|
|
|
return False
|
2015-02-26 22:19:17 +08:00
|
|
|
if field.remote_field.parent_link and not reverse:
|
2008-06-29 17:40:17 +08:00
|
|
|
return False
|
2010-01-27 21:30:29 +08:00
|
|
|
if restricted:
|
|
|
|
if reverse and field.related_query_name() not in requested:
|
|
|
|
return False
|
|
|
|
if not reverse and field.name not in requested:
|
|
|
|
return False
|
2008-06-29 17:40:17 +08:00
|
|
|
if not restricted and field.null:
|
|
|
|
return False
|
2012-06-26 23:08:42 +08:00
|
|
|
if load_fields:
|
2014-12-01 15:28:01 +08:00
|
|
|
if field.attname not in load_fields:
|
2012-06-26 23:08:42 +08:00
|
|
|
if restricted and field.name in requested:
|
|
|
|
raise InvalidQuery("Field %s.%s cannot be both deferred"
|
|
|
|
" and traversed using select_related"
|
|
|
|
" at the same time." %
|
|
|
|
(field.model._meta.object_name, field.name))
|
|
|
|
return False
|
2008-06-29 17:40:17 +08:00
|
|
|
return True
|
2009-03-19 17:06:04 +08:00
|
|
|
|
2013-07-08 08:39:54 +08:00
|
|
|
|
2009-03-19 17:06:04 +08:00
|
|
|
# This function is needed because data descriptors must be defined on a class
|
|
|
|
# object, not an instance, to have any effect.
|
|
|
|
|
2009-04-04 11:21:31 +08:00
|
|
|
def deferred_class_factory(model, attrs):
|
2009-03-19 17:06:04 +08:00
|
|
|
"""
|
|
|
|
Returns a class object that is a copy of "model" with the specified "attrs"
|
|
|
|
being replaced with DeferredAttribute objects. The "pk_value" ties the
|
|
|
|
deferred attributes to a particular instance of the model.
|
|
|
|
"""
|
2014-07-14 22:53:23 +08:00
|
|
|
if not attrs:
|
|
|
|
return model
|
|
|
|
# Never create deferred models based on deferred model
|
|
|
|
if model._deferred:
|
|
|
|
# Deferred models are proxies for the non-deferred model. We never
|
|
|
|
# create chains of defers => proxy_for_model is the non-deferred
|
|
|
|
# model.
|
|
|
|
model = model._meta.proxy_for_model
|
2013-12-24 19:25:17 +08:00
|
|
|
# The app registry wants a unique name for each model, otherwise the new
|
2014-01-05 16:32:22 +08:00
|
|
|
# class won't be created (we get an exception). Therefore, we generate
|
2013-12-24 19:25:17 +08:00
|
|
|
# the name using the passed in attrs. It's OK to reuse an existing class
|
2010-09-13 13:08:10 +08:00
|
|
|
# object if the attrs are identical.
|
2015-05-16 21:15:54 +08:00
|
|
|
name = "%s_Deferred_%s" % (model.__name__, '_'.join(sorted(attrs)))
|
2013-09-17 00:52:05 +08:00
|
|
|
name = utils.truncate_name(name, 80, 32)
|
2009-03-19 17:06:04 +08:00
|
|
|
|
2014-01-05 16:32:22 +08:00
|
|
|
try:
|
|
|
|
return apps.get_model(model._meta.app_label, name)
|
|
|
|
|
|
|
|
except LookupError:
|
|
|
|
|
|
|
|
class Meta:
|
|
|
|
proxy = True
|
|
|
|
app_label = model._meta.app_label
|
|
|
|
|
2014-12-07 05:00:09 +08:00
|
|
|
overrides = {attr: DeferredAttribute(attr, model) for attr in attrs}
|
2014-01-05 16:32:22 +08:00
|
|
|
overrides["Meta"] = Meta
|
|
|
|
overrides["__module__"] = model.__module__
|
|
|
|
overrides["_deferred"] = True
|
|
|
|
return type(str(name), (model,), overrides)
|
|
|
|
|
2009-03-19 17:06:04 +08:00
|
|
|
|
|
|
|
# The above function is also used to unpickle model instances with deferred
|
|
|
|
# fields.
|
|
|
|
deferred_class_factory.__safe_for_unpickling__ = True
|
2013-12-25 21:13:18 +08:00
|
|
|
|
|
|
|
|
|
|
|
def refs_aggregate(lookup_parts, aggregates):
|
|
|
|
"""
|
2015-02-02 19:48:30 +08:00
|
|
|
A helper method to check if the lookup_parts contains references
|
2013-12-25 21:13:18 +08:00
|
|
|
to the given aggregates set. Because the LOOKUP_SEP is contained in the
|
|
|
|
default annotation names we must check each prefix of the lookup_parts
|
|
|
|
for a match.
|
|
|
|
"""
|
|
|
|
for n in range(len(lookup_parts) + 1):
|
|
|
|
level_n_lookup = LOOKUP_SEP.join(lookup_parts[0:n])
|
|
|
|
if level_n_lookup in aggregates and aggregates[level_n_lookup].contains_aggregate:
|
|
|
|
return aggregates[level_n_lookup], lookup_parts[n:]
|
|
|
|
return False, ()
|
2015-02-02 19:48:30 +08:00
|
|
|
|
|
|
|
|
|
|
|
def refs_expression(lookup_parts, annotations):
|
|
|
|
"""
|
|
|
|
A helper method to check if the lookup_parts contains references
|
|
|
|
to the given annotations set. Because the LOOKUP_SEP is contained in the
|
|
|
|
default annotation names we must check each prefix of the lookup_parts
|
|
|
|
for a match.
|
|
|
|
"""
|
|
|
|
for n in range(len(lookup_parts) + 1):
|
|
|
|
level_n_lookup = LOOKUP_SEP.join(lookup_parts[0:n])
|
|
|
|
if level_n_lookup in annotations and annotations[level_n_lookup]:
|
|
|
|
return annotations[level_n_lookup], lookup_parts[n:]
|
|
|
|
return False, ()
|
2015-06-25 23:31:07 +08:00
|
|
|
|
|
|
|
|
|
|
|
def check_rel_lookup_compatibility(model, target_opts, field):
|
|
|
|
"""
|
|
|
|
Check that self.model is compatible with target_opts. Compatibility
|
|
|
|
is OK if:
|
|
|
|
1) model and opts match (where proxy inheritance is removed)
|
|
|
|
2) model is parent of opts' model or the other way around
|
|
|
|
"""
|
|
|
|
def check(opts):
|
|
|
|
return (
|
|
|
|
model._meta.concrete_model == opts.concrete_model or
|
|
|
|
opts.concrete_model in model._meta.get_parent_list() or
|
|
|
|
model in opts.get_parent_list()
|
|
|
|
)
|
|
|
|
# If the field is a primary key, then doing a query against the field's
|
|
|
|
# model is ok, too. Consider the case:
|
|
|
|
# class Restaurant(models.Model):
|
|
|
|
# place = OnetoOneField(Place, primary_key=True):
|
|
|
|
# Restaurant.objects.filter(pk__in=Restaurant.objects.all()).
|
|
|
|
# If we didn't have the primary key check, then pk__in (== place__in) would
|
|
|
|
# give Place's opts as the target opts, but Restaurant isn't compatible
|
|
|
|
# with that. This logic applies only to primary keys, as when doing __in=qs,
|
|
|
|
# we are going to turn this into __in=qs.values('pk') later on.
|
|
|
|
return (
|
|
|
|
check(target_opts) or
|
|
|
|
(getattr(field, 'primary_key', False) and check(field.model._meta))
|
|
|
|
)
|