2015-06-05 19:31:44 +08:00
|
|
|
import warnings
|
2015-01-28 20:35:27 +08:00
|
|
|
from copy import copy
|
2014-01-18 17:09:43 +08:00
|
|
|
|
2015-08-03 10:30:06 +08:00
|
|
|
from django.conf import settings
|
|
|
|
from django.db.models.expressions import Func, Value
|
|
|
|
from django.db.models.fields import (
|
|
|
|
DateField, DateTimeField, Field, IntegerField, TimeField,
|
|
|
|
)
|
|
|
|
from django.db.models.query_utils import RegisterLookupMixin
|
|
|
|
from django.utils import timezone
|
2015-06-05 19:31:44 +08:00
|
|
|
from django.utils.deprecation import RemovedInDjango20Warning
|
2014-01-18 17:09:43 +08:00
|
|
|
from django.utils.functional import cached_property
|
2014-12-13 21:04:36 +08:00
|
|
|
from django.utils.six.moves import range
|
2014-01-18 17:09:43 +08:00
|
|
|
|
2014-09-14 18:34:41 +08:00
|
|
|
|
2015-08-03 10:30:06 +08:00
|
|
|
class Lookup(object):
|
2014-01-18 17:09:43 +08:00
|
|
|
lookup_name = None
|
|
|
|
|
2014-11-15 19:04:02 +08:00
|
|
|
def __init__(self, lhs, rhs):
|
2014-01-18 17:09:43 +08:00
|
|
|
self.lhs, self.rhs = lhs, rhs
|
|
|
|
self.rhs = self.get_prep_lookup()
|
2014-11-15 19:04:02 +08:00
|
|
|
if hasattr(self.lhs, 'get_bilateral_transforms'):
|
|
|
|
bilateral_transforms = self.lhs.get_bilateral_transforms()
|
|
|
|
else:
|
2014-09-14 18:34:41 +08:00
|
|
|
bilateral_transforms = []
|
|
|
|
if bilateral_transforms:
|
2015-11-12 05:52:13 +08:00
|
|
|
# Warn the user as soon as possible if they are trying to apply
|
2014-09-14 18:34:41 +08:00
|
|
|
# a bilateral transformation on a nested QuerySet: that won't work.
|
|
|
|
# We need to import QuerySet here so as to avoid circular
|
|
|
|
from django.db.models.query import QuerySet
|
|
|
|
if isinstance(rhs, QuerySet):
|
|
|
|
raise NotImplementedError("Bilateral transformations on nested querysets are not supported.")
|
|
|
|
self.bilateral_transforms = bilateral_transforms
|
|
|
|
|
|
|
|
def apply_bilateral_transforms(self, value):
|
2015-08-03 10:30:06 +08:00
|
|
|
for transform in self.bilateral_transforms:
|
|
|
|
value = transform(value)
|
2014-09-14 18:34:41 +08:00
|
|
|
return value
|
|
|
|
|
2014-11-16 09:56:42 +08:00
|
|
|
def batch_process_rhs(self, compiler, connection, rhs=None):
|
2014-09-14 18:34:41 +08:00
|
|
|
if rhs is None:
|
|
|
|
rhs = self.rhs
|
|
|
|
if self.bilateral_transforms:
|
|
|
|
sqls, sqls_params = [], []
|
|
|
|
for p in rhs:
|
2015-08-03 10:30:06 +08:00
|
|
|
value = Value(p, output_field=self.lhs.output_field)
|
2014-09-14 18:34:41 +08:00
|
|
|
value = self.apply_bilateral_transforms(value)
|
2015-08-03 10:30:06 +08:00
|
|
|
value = value.resolve_expression(compiler.query)
|
2014-11-16 09:56:42 +08:00
|
|
|
sql, sql_params = compiler.compile(value)
|
2014-09-14 18:34:41 +08:00
|
|
|
sqls.append(sql)
|
|
|
|
sqls_params.extend(sql_params)
|
|
|
|
else:
|
|
|
|
params = self.lhs.output_field.get_db_prep_lookup(
|
|
|
|
self.lookup_name, rhs, connection, prepared=True)
|
|
|
|
sqls, sqls_params = ['%s'] * len(params), params
|
|
|
|
return sqls, sqls_params
|
2014-01-18 17:09:43 +08:00
|
|
|
|
|
|
|
def get_prep_lookup(self):
|
2014-06-17 23:57:16 +08:00
|
|
|
return self.lhs.output_field.get_prep_lookup(self.lookup_name, self.rhs)
|
2014-01-18 17:09:43 +08:00
|
|
|
|
|
|
|
def get_db_prep_lookup(self, value, connection):
|
|
|
|
return (
|
2014-06-17 23:57:16 +08:00
|
|
|
'%s', self.lhs.output_field.get_db_prep_lookup(
|
2014-01-18 17:09:43 +08:00
|
|
|
self.lookup_name, value, connection, prepared=True))
|
|
|
|
|
2014-11-16 09:56:42 +08:00
|
|
|
def process_lhs(self, compiler, connection, lhs=None):
|
2014-01-18 17:09:43 +08:00
|
|
|
lhs = lhs or self.lhs
|
2014-11-16 09:56:42 +08:00
|
|
|
return compiler.compile(lhs)
|
2014-01-18 17:09:43 +08:00
|
|
|
|
2014-11-16 09:56:42 +08:00
|
|
|
def process_rhs(self, compiler, connection):
|
2014-01-19 17:30:26 +08:00
|
|
|
value = self.rhs
|
2014-09-14 18:34:41 +08:00
|
|
|
if self.bilateral_transforms:
|
|
|
|
if self.rhs_is_direct_value():
|
|
|
|
# Do not call get_db_prep_lookup here as the value will be
|
|
|
|
# transformed before being used for lookup
|
2015-08-03 10:30:06 +08:00
|
|
|
value = Value(value, output_field=self.lhs.output_field)
|
2014-09-14 18:34:41 +08:00
|
|
|
value = self.apply_bilateral_transforms(value)
|
2015-08-03 10:30:06 +08:00
|
|
|
value = value.resolve_expression(compiler.query)
|
2014-01-18 17:09:43 +08:00
|
|
|
# Due to historical reasons there are a couple of different
|
|
|
|
# ways to produce sql here. get_compiler is likely a Query
|
|
|
|
# instance, _as_sql QuerySet and as_sql just something with
|
|
|
|
# as_sql. Finally the value can of course be just plain
|
|
|
|
# Python value.
|
|
|
|
if hasattr(value, 'get_compiler'):
|
|
|
|
value = value.get_compiler(connection=connection)
|
|
|
|
if hasattr(value, 'as_sql'):
|
2014-11-16 09:56:42 +08:00
|
|
|
sql, params = compiler.compile(value)
|
2014-01-18 17:09:43 +08:00
|
|
|
return '(' + sql + ')', params
|
|
|
|
if hasattr(value, '_as_sql'):
|
|
|
|
sql, params = value._as_sql(connection=connection)
|
|
|
|
return '(' + sql + ')', params
|
|
|
|
else:
|
|
|
|
return self.get_db_prep_lookup(value, connection)
|
|
|
|
|
2014-01-19 17:30:26 +08:00
|
|
|
def rhs_is_direct_value(self):
|
|
|
|
return not(
|
|
|
|
hasattr(self.rhs, 'as_sql') or
|
|
|
|
hasattr(self.rhs, '_as_sql') or
|
|
|
|
hasattr(self.rhs, 'get_compiler'))
|
|
|
|
|
2014-01-18 17:09:43 +08:00
|
|
|
def relabeled_clone(self, relabels):
|
|
|
|
new = copy(self)
|
|
|
|
new.lhs = new.lhs.relabeled_clone(relabels)
|
|
|
|
if hasattr(new.rhs, 'relabeled_clone'):
|
|
|
|
new.rhs = new.rhs.relabeled_clone(relabels)
|
|
|
|
return new
|
|
|
|
|
|
|
|
def get_group_by_cols(self):
|
|
|
|
cols = self.lhs.get_group_by_cols()
|
|
|
|
if hasattr(self.rhs, 'get_group_by_cols'):
|
|
|
|
cols.extend(self.rhs.get_group_by_cols())
|
|
|
|
return cols
|
|
|
|
|
2014-11-16 09:56:42 +08:00
|
|
|
def as_sql(self, compiler, connection):
|
2014-01-18 17:09:43 +08:00
|
|
|
raise NotImplementedError
|
|
|
|
|
2014-12-23 21:16:56 +08:00
|
|
|
@cached_property
|
|
|
|
def contains_aggregate(self):
|
|
|
|
return self.lhs.contains_aggregate or getattr(self.rhs, 'contains_aggregate', False)
|
|
|
|
|
2014-01-18 17:09:43 +08:00
|
|
|
|
2015-08-03 10:30:06 +08:00
|
|
|
class Transform(RegisterLookupMixin, Func):
|
|
|
|
"""
|
|
|
|
RegisterLookupMixin() is first so that get_lookup() and get_transform()
|
|
|
|
first examine self and then check output_field.
|
|
|
|
"""
|
|
|
|
bilateral = False
|
2015-10-31 18:01:08 +08:00
|
|
|
arity = 1
|
2015-08-03 10:30:06 +08:00
|
|
|
|
|
|
|
@property
|
|
|
|
def lhs(self):
|
|
|
|
return self.get_source_expressions()[0]
|
|
|
|
|
|
|
|
def get_bilateral_transforms(self):
|
|
|
|
if hasattr(self.lhs, 'get_bilateral_transforms'):
|
|
|
|
bilateral_transforms = self.lhs.get_bilateral_transforms()
|
|
|
|
else:
|
|
|
|
bilateral_transforms = []
|
|
|
|
if self.bilateral:
|
|
|
|
bilateral_transforms.append(self.__class__)
|
|
|
|
return bilateral_transforms
|
|
|
|
|
|
|
|
|
2014-01-18 17:09:43 +08:00
|
|
|
class BuiltinLookup(Lookup):
|
2014-11-16 09:56:42 +08:00
|
|
|
def process_lhs(self, compiler, connection, lhs=None):
|
2014-01-20 10:38:46 +08:00
|
|
|
lhs_sql, params = super(BuiltinLookup, self).process_lhs(
|
2014-11-16 09:56:42 +08:00
|
|
|
compiler, connection, lhs)
|
2014-06-17 23:57:16 +08:00
|
|
|
field_internal_type = self.lhs.output_field.get_internal_type()
|
|
|
|
db_type = self.lhs.output_field.db_type(connection=connection)
|
2014-01-20 10:38:46 +08:00
|
|
|
lhs_sql = connection.ops.field_cast_sql(
|
|
|
|
db_type, field_internal_type) % lhs_sql
|
2015-01-11 02:13:28 +08:00
|
|
|
lhs_sql = connection.ops.lookup_cast(self.lookup_name, field_internal_type) % lhs_sql
|
2015-10-06 04:13:14 +08:00
|
|
|
return lhs_sql, list(params)
|
2014-01-20 10:38:46 +08:00
|
|
|
|
2014-11-16 09:56:42 +08:00
|
|
|
def as_sql(self, compiler, connection):
|
|
|
|
lhs_sql, params = self.process_lhs(compiler, connection)
|
|
|
|
rhs_sql, rhs_params = self.process_rhs(compiler, connection)
|
2014-01-18 17:09:43 +08:00
|
|
|
params.extend(rhs_params)
|
2014-01-20 10:38:46 +08:00
|
|
|
rhs_sql = self.get_rhs_op(connection, rhs_sql)
|
|
|
|
return '%s %s' % (lhs_sql, rhs_sql), params
|
2014-01-18 17:09:43 +08:00
|
|
|
|
|
|
|
def get_rhs_op(self, connection, rhs):
|
|
|
|
return connection.operators[self.lookup_name] % rhs
|
|
|
|
|
|
|
|
|
|
|
|
class Exact(BuiltinLookup):
|
|
|
|
lookup_name = 'exact'
|
2015-08-03 10:30:06 +08:00
|
|
|
Field.register_lookup(Exact)
|
2014-01-18 17:09:43 +08:00
|
|
|
|
|
|
|
|
|
|
|
class IExact(BuiltinLookup):
|
|
|
|
lookup_name = 'iexact'
|
2015-01-11 00:11:15 +08:00
|
|
|
|
|
|
|
def process_rhs(self, qn, connection):
|
|
|
|
rhs, params = super(IExact, self).process_rhs(qn, connection)
|
|
|
|
if params:
|
|
|
|
params[0] = connection.ops.prep_for_iexact_query(params[0])
|
|
|
|
return rhs, params
|
|
|
|
|
|
|
|
|
2015-08-03 10:30:06 +08:00
|
|
|
Field.register_lookup(IExact)
|
2014-01-18 17:09:43 +08:00
|
|
|
|
|
|
|
|
|
|
|
class GreaterThan(BuiltinLookup):
|
|
|
|
lookup_name = 'gt'
|
2015-08-03 10:30:06 +08:00
|
|
|
Field.register_lookup(GreaterThan)
|
2014-01-18 17:09:43 +08:00
|
|
|
|
|
|
|
|
|
|
|
class GreaterThanOrEqual(BuiltinLookup):
|
|
|
|
lookup_name = 'gte'
|
2015-08-03 10:30:06 +08:00
|
|
|
Field.register_lookup(GreaterThanOrEqual)
|
2014-01-18 17:09:43 +08:00
|
|
|
|
|
|
|
|
|
|
|
class LessThan(BuiltinLookup):
|
|
|
|
lookup_name = 'lt'
|
2015-08-03 10:30:06 +08:00
|
|
|
Field.register_lookup(LessThan)
|
2014-01-18 17:09:43 +08:00
|
|
|
|
|
|
|
|
|
|
|
class LessThanOrEqual(BuiltinLookup):
|
|
|
|
lookup_name = 'lte'
|
2015-08-03 10:30:06 +08:00
|
|
|
Field.register_lookup(LessThanOrEqual)
|
2014-01-18 17:09:43 +08:00
|
|
|
|
|
|
|
|
|
|
|
class In(BuiltinLookup):
|
|
|
|
lookup_name = 'in'
|
|
|
|
|
2014-11-16 09:56:42 +08:00
|
|
|
def process_rhs(self, compiler, connection):
|
2016-02-26 12:02:43 +08:00
|
|
|
db_rhs = getattr(self.rhs, '_db', None)
|
|
|
|
if db_rhs is not None and db_rhs != connection.alias:
|
|
|
|
raise ValueError(
|
|
|
|
"Subqueries aren't allowed across different databases. Force "
|
|
|
|
"the inner query to be evaluated using `list(inner_query)`."
|
|
|
|
)
|
|
|
|
|
2014-09-14 18:34:41 +08:00
|
|
|
if self.rhs_is_direct_value():
|
2015-11-07 23:06:06 +08:00
|
|
|
try:
|
|
|
|
rhs = set(self.rhs)
|
|
|
|
except TypeError: # Unhashable items in self.rhs
|
|
|
|
rhs = self.rhs
|
|
|
|
|
2014-09-14 18:34:41 +08:00
|
|
|
if not rhs:
|
|
|
|
from django.db.models.sql.datastructures import EmptyResultSet
|
|
|
|
raise EmptyResultSet
|
2015-11-07 23:06:06 +08:00
|
|
|
|
|
|
|
# rhs should be an iterable; use batch_process_rhs() to
|
|
|
|
# prepare/transform those values.
|
2014-11-16 09:56:42 +08:00
|
|
|
sqls, sqls_params = self.batch_process_rhs(compiler, connection, rhs)
|
2014-09-14 18:34:41 +08:00
|
|
|
placeholder = '(' + ', '.join(sqls) + ')'
|
|
|
|
return (placeholder, sqls_params)
|
|
|
|
else:
|
2014-11-16 09:56:42 +08:00
|
|
|
return super(In, self).process_rhs(compiler, connection)
|
2014-01-18 17:09:43 +08:00
|
|
|
|
|
|
|
def get_rhs_op(self, connection, rhs):
|
|
|
|
return 'IN %s' % rhs
|
2014-01-19 17:30:26 +08:00
|
|
|
|
2014-11-16 09:56:42 +08:00
|
|
|
def as_sql(self, compiler, connection):
|
2014-01-19 17:30:26 +08:00
|
|
|
max_in_list_size = connection.ops.max_in_list_size()
|
2015-08-03 10:30:06 +08:00
|
|
|
if self.rhs_is_direct_value() and max_in_list_size and len(self.rhs) > max_in_list_size:
|
|
|
|
return self.split_parameter_list_as_sql(compiler, connection)
|
|
|
|
return super(In, self).as_sql(compiler, connection)
|
2014-01-19 17:30:26 +08:00
|
|
|
|
2015-08-03 10:30:06 +08:00
|
|
|
def split_parameter_list_as_sql(self, compiler, connection):
|
|
|
|
# This is a special case for databases which limit the number of
|
|
|
|
# elements which can appear in an 'IN' clause.
|
|
|
|
max_in_list_size = connection.ops.max_in_list_size()
|
|
|
|
lhs, lhs_params = self.process_lhs(compiler, connection)
|
|
|
|
rhs, rhs_params = self.batch_process_rhs(compiler, connection)
|
|
|
|
in_clause_elements = ['(']
|
|
|
|
params = []
|
|
|
|
for offset in range(0, len(rhs_params), max_in_list_size):
|
|
|
|
if offset > 0:
|
|
|
|
in_clause_elements.append(' OR ')
|
|
|
|
in_clause_elements.append('%s IN (' % lhs)
|
|
|
|
params.extend(lhs_params)
|
|
|
|
sqls = rhs[offset: offset + max_in_list_size]
|
|
|
|
sqls_params = rhs_params[offset: offset + max_in_list_size]
|
|
|
|
param_group = ', '.join(sqls)
|
|
|
|
in_clause_elements.append(param_group)
|
|
|
|
in_clause_elements.append(')')
|
|
|
|
params.extend(sqls_params)
|
|
|
|
in_clause_elements.append(')')
|
|
|
|
return ''.join(in_clause_elements), params
|
|
|
|
Field.register_lookup(In)
|
2014-01-18 17:09:43 +08:00
|
|
|
|
|
|
|
|
|
|
|
class PatternLookup(BuiltinLookup):
|
2014-09-27 18:41:54 +08:00
|
|
|
|
2014-01-18 17:09:43 +08:00
|
|
|
def get_rhs_op(self, connection, rhs):
|
|
|
|
# Assume we are in startswith. We need to produce SQL like:
|
|
|
|
# col LIKE %s, ['thevalue%']
|
|
|
|
# For python values we can (and should) do that directly in Python,
|
|
|
|
# but if the value is for example reference to other column, then
|
|
|
|
# we need to add the % pattern match to the lookup by something like
|
|
|
|
# col LIKE othercol || '%%'
|
|
|
|
# So, for Python values we don't need any special pattern, but for
|
2014-09-14 18:34:41 +08:00
|
|
|
# SQL reference values or SQL transformations we need the correct
|
|
|
|
# pattern added.
|
|
|
|
if (hasattr(self.rhs, 'get_compiler') or hasattr(self.rhs, 'as_sql')
|
|
|
|
or hasattr(self.rhs, '_as_sql') or self.bilateral_transforms):
|
2014-09-27 18:41:54 +08:00
|
|
|
pattern = connection.pattern_ops[self.lookup_name].format(connection.pattern_esc)
|
|
|
|
return pattern.format(rhs)
|
2014-01-18 17:09:43 +08:00
|
|
|
else:
|
|
|
|
return super(PatternLookup, self).get_rhs_op(connection, rhs)
|
|
|
|
|
|
|
|
|
2014-09-27 18:41:54 +08:00
|
|
|
class Contains(PatternLookup):
|
|
|
|
lookup_name = 'contains'
|
2015-01-11 00:11:15 +08:00
|
|
|
|
|
|
|
def process_rhs(self, qn, connection):
|
|
|
|
rhs, params = super(Contains, self).process_rhs(qn, connection)
|
|
|
|
if params and not self.bilateral_transforms:
|
|
|
|
params[0] = "%%%s%%" % connection.ops.prep_for_like_query(params[0])
|
|
|
|
return rhs, params
|
2015-08-03 10:30:06 +08:00
|
|
|
Field.register_lookup(Contains)
|
2014-09-27 18:41:54 +08:00
|
|
|
|
|
|
|
|
2015-01-11 00:11:15 +08:00
|
|
|
class IContains(Contains):
|
2014-09-27 18:41:54 +08:00
|
|
|
lookup_name = 'icontains'
|
2015-08-03 10:30:06 +08:00
|
|
|
Field.register_lookup(IContains)
|
2014-09-27 18:41:54 +08:00
|
|
|
|
|
|
|
|
2014-01-18 17:09:43 +08:00
|
|
|
class StartsWith(PatternLookup):
|
|
|
|
lookup_name = 'startswith'
|
2015-01-11 00:11:15 +08:00
|
|
|
|
|
|
|
def process_rhs(self, qn, connection):
|
|
|
|
rhs, params = super(StartsWith, self).process_rhs(qn, connection)
|
|
|
|
if params and not self.bilateral_transforms:
|
|
|
|
params[0] = "%s%%" % connection.ops.prep_for_like_query(params[0])
|
|
|
|
return rhs, params
|
2015-08-03 10:30:06 +08:00
|
|
|
Field.register_lookup(StartsWith)
|
2014-01-18 17:09:43 +08:00
|
|
|
|
|
|
|
|
|
|
|
class IStartsWith(PatternLookup):
|
|
|
|
lookup_name = 'istartswith'
|
2015-01-11 00:11:15 +08:00
|
|
|
|
|
|
|
def process_rhs(self, qn, connection):
|
|
|
|
rhs, params = super(IStartsWith, self).process_rhs(qn, connection)
|
|
|
|
if params and not self.bilateral_transforms:
|
|
|
|
params[0] = "%s%%" % connection.ops.prep_for_like_query(params[0])
|
|
|
|
return rhs, params
|
2015-08-03 10:30:06 +08:00
|
|
|
Field.register_lookup(IStartsWith)
|
2014-01-18 17:09:43 +08:00
|
|
|
|
|
|
|
|
2014-09-27 18:41:54 +08:00
|
|
|
class EndsWith(PatternLookup):
|
2014-01-18 17:09:43 +08:00
|
|
|
lookup_name = 'endswith'
|
2015-01-11 00:11:15 +08:00
|
|
|
|
|
|
|
def process_rhs(self, qn, connection):
|
|
|
|
rhs, params = super(EndsWith, self).process_rhs(qn, connection)
|
|
|
|
if params and not self.bilateral_transforms:
|
|
|
|
params[0] = "%%%s" % connection.ops.prep_for_like_query(params[0])
|
|
|
|
return rhs, params
|
2015-08-03 10:30:06 +08:00
|
|
|
Field.register_lookup(EndsWith)
|
2014-01-18 17:09:43 +08:00
|
|
|
|
|
|
|
|
2014-09-27 18:41:54 +08:00
|
|
|
class IEndsWith(PatternLookup):
|
2014-01-18 17:09:43 +08:00
|
|
|
lookup_name = 'iendswith'
|
2015-01-11 00:11:15 +08:00
|
|
|
|
|
|
|
def process_rhs(self, qn, connection):
|
|
|
|
rhs, params = super(IEndsWith, self).process_rhs(qn, connection)
|
|
|
|
if params and not self.bilateral_transforms:
|
|
|
|
params[0] = "%%%s" % connection.ops.prep_for_like_query(params[0])
|
|
|
|
return rhs, params
|
2015-08-03 10:30:06 +08:00
|
|
|
Field.register_lookup(IEndsWith)
|
2014-01-18 17:09:43 +08:00
|
|
|
|
|
|
|
|
2014-09-14 18:34:41 +08:00
|
|
|
class Range(BuiltinLookup):
|
2014-01-18 17:09:43 +08:00
|
|
|
lookup_name = 'range'
|
2014-09-14 18:34:41 +08:00
|
|
|
|
|
|
|
def get_rhs_op(self, connection, rhs):
|
|
|
|
return "BETWEEN %s AND %s" % (rhs[0], rhs[1])
|
|
|
|
|
2014-11-16 09:56:42 +08:00
|
|
|
def process_rhs(self, compiler, connection):
|
2014-09-14 18:34:41 +08:00
|
|
|
if self.rhs_is_direct_value():
|
|
|
|
# rhs should be an iterable of 2 values, we use batch_process_rhs
|
|
|
|
# to prepare/transform those values
|
2014-11-16 09:56:42 +08:00
|
|
|
return self.batch_process_rhs(compiler, connection)
|
2014-09-14 18:34:41 +08:00
|
|
|
else:
|
2014-11-16 09:56:42 +08:00
|
|
|
return super(Range, self).process_rhs(compiler, connection)
|
2015-08-03 10:30:06 +08:00
|
|
|
Field.register_lookup(Range)
|
2014-01-18 17:09:43 +08:00
|
|
|
|
|
|
|
|
|
|
|
class IsNull(BuiltinLookup):
|
|
|
|
lookup_name = 'isnull'
|
|
|
|
|
2014-11-16 09:56:42 +08:00
|
|
|
def as_sql(self, compiler, connection):
|
|
|
|
sql, params = compiler.compile(self.lhs)
|
2014-01-18 17:09:43 +08:00
|
|
|
if self.rhs:
|
|
|
|
return "%s IS NULL" % sql, params
|
|
|
|
else:
|
|
|
|
return "%s IS NOT NULL" % sql, params
|
2015-08-03 10:30:06 +08:00
|
|
|
Field.register_lookup(IsNull)
|
2014-01-18 17:09:43 +08:00
|
|
|
|
|
|
|
|
|
|
|
class Search(BuiltinLookup):
|
|
|
|
lookup_name = 'search'
|
2014-04-23 04:01:16 +08:00
|
|
|
|
2014-11-16 09:56:42 +08:00
|
|
|
def as_sql(self, compiler, connection):
|
2015-06-05 19:31:44 +08:00
|
|
|
warnings.warn(
|
|
|
|
'The `__search` lookup is deprecated. See the 1.10 release notes '
|
|
|
|
'for how to replace it.', RemovedInDjango20Warning, stacklevel=2
|
|
|
|
)
|
2014-11-16 09:56:42 +08:00
|
|
|
lhs, lhs_params = self.process_lhs(compiler, connection)
|
|
|
|
rhs, rhs_params = self.process_rhs(compiler, connection)
|
2014-04-23 04:01:16 +08:00
|
|
|
sql_template = connection.ops.fulltext_search_sql(field_name=lhs)
|
|
|
|
return sql_template, lhs_params + rhs_params
|
2015-08-03 10:30:06 +08:00
|
|
|
Field.register_lookup(Search)
|
2014-01-18 17:09:43 +08:00
|
|
|
|
|
|
|
|
|
|
|
class Regex(BuiltinLookup):
|
|
|
|
lookup_name = 'regex'
|
2014-01-20 10:38:46 +08:00
|
|
|
|
2014-11-16 09:56:42 +08:00
|
|
|
def as_sql(self, compiler, connection):
|
2014-01-20 10:38:46 +08:00
|
|
|
if self.lookup_name in connection.operators:
|
2014-11-16 09:56:42 +08:00
|
|
|
return super(Regex, self).as_sql(compiler, connection)
|
2014-01-20 10:38:46 +08:00
|
|
|
else:
|
2014-11-16 09:56:42 +08:00
|
|
|
lhs, lhs_params = self.process_lhs(compiler, connection)
|
|
|
|
rhs, rhs_params = self.process_rhs(compiler, connection)
|
2014-01-20 10:38:46 +08:00
|
|
|
sql_template = connection.ops.regex_lookup(self.lookup_name)
|
|
|
|
return sql_template % (lhs, rhs), lhs_params + rhs_params
|
2015-08-03 10:30:06 +08:00
|
|
|
Field.register_lookup(Regex)
|
2014-01-18 17:09:43 +08:00
|
|
|
|
|
|
|
|
2014-01-20 10:38:46 +08:00
|
|
|
class IRegex(Regex):
|
2014-01-18 17:09:43 +08:00
|
|
|
lookup_name = 'iregex'
|
2015-08-03 10:30:06 +08:00
|
|
|
Field.register_lookup(IRegex)
|
|
|
|
|
|
|
|
|
|
|
|
class DateTimeDateTransform(Transform):
|
|
|
|
lookup_name = 'date'
|
|
|
|
|
|
|
|
@cached_property
|
|
|
|
def output_field(self):
|
|
|
|
return DateField()
|
|
|
|
|
|
|
|
def as_sql(self, compiler, connection):
|
|
|
|
lhs, lhs_params = compiler.compile(self.lhs)
|
|
|
|
tzname = timezone.get_current_timezone_name() if settings.USE_TZ else None
|
|
|
|
sql, tz_params = connection.ops.datetime_cast_date_sql(lhs, tzname)
|
|
|
|
lhs_params.extend(tz_params)
|
|
|
|
return sql, lhs_params
|
|
|
|
|
|
|
|
|
|
|
|
class DateTransform(Transform):
|
|
|
|
def as_sql(self, compiler, connection):
|
|
|
|
sql, params = compiler.compile(self.lhs)
|
|
|
|
lhs_output_field = self.lhs.output_field
|
|
|
|
if isinstance(lhs_output_field, DateTimeField):
|
|
|
|
tzname = timezone.get_current_timezone_name() if settings.USE_TZ else None
|
|
|
|
sql, tz_params = connection.ops.datetime_extract_sql(self.lookup_name, sql, tzname)
|
|
|
|
params.extend(tz_params)
|
|
|
|
elif isinstance(lhs_output_field, DateField):
|
|
|
|
sql = connection.ops.date_extract_sql(self.lookup_name, sql)
|
|
|
|
elif isinstance(lhs_output_field, TimeField):
|
|
|
|
sql = connection.ops.time_extract_sql(self.lookup_name, sql)
|
|
|
|
else:
|
|
|
|
raise ValueError('DateTransform only valid on Date/Time/DateTimeFields')
|
|
|
|
return sql, params
|
|
|
|
|
|
|
|
@cached_property
|
|
|
|
def output_field(self):
|
|
|
|
return IntegerField()
|
|
|
|
|
|
|
|
|
|
|
|
class YearTransform(DateTransform):
|
|
|
|
lookup_name = 'year'
|
|
|
|
|
|
|
|
|
|
|
|
class YearLookup(Lookup):
|
|
|
|
def year_lookup_bounds(self, connection, year):
|
|
|
|
output_field = self.lhs.lhs.output_field
|
|
|
|
if isinstance(output_field, DateTimeField):
|
|
|
|
bounds = connection.ops.year_lookup_bounds_for_datetime_field(year)
|
|
|
|
else:
|
|
|
|
bounds = connection.ops.year_lookup_bounds_for_date_field(year)
|
|
|
|
return bounds
|
|
|
|
|
|
|
|
|
|
|
|
@YearTransform.register_lookup
|
|
|
|
class YearExact(YearLookup):
|
|
|
|
lookup_name = 'exact'
|
|
|
|
|
|
|
|
def as_sql(self, compiler, connection):
|
|
|
|
# We will need to skip the extract part and instead go
|
|
|
|
# directly with the originating field, that is self.lhs.lhs.
|
|
|
|
lhs_sql, params = self.process_lhs(compiler, connection, self.lhs.lhs)
|
|
|
|
rhs_sql, rhs_params = self.process_rhs(compiler, connection)
|
|
|
|
bounds = self.year_lookup_bounds(connection, rhs_params[0])
|
|
|
|
params.extend(bounds)
|
|
|
|
return '%s BETWEEN %%s AND %%s' % lhs_sql, params
|
|
|
|
|
|
|
|
|
|
|
|
class YearComparisonLookup(YearLookup):
|
|
|
|
def as_sql(self, compiler, connection):
|
|
|
|
# We will need to skip the extract part and instead go
|
|
|
|
# directly with the originating field, that is self.lhs.lhs.
|
|
|
|
lhs_sql, params = self.process_lhs(compiler, connection, self.lhs.lhs)
|
|
|
|
rhs_sql, rhs_params = self.process_rhs(compiler, connection)
|
|
|
|
rhs_sql = self.get_rhs_op(connection, rhs_sql)
|
|
|
|
start, finish = self.year_lookup_bounds(connection, rhs_params[0])
|
|
|
|
params.append(self.get_bound(start, finish))
|
|
|
|
return '%s %s' % (lhs_sql, rhs_sql), params
|
|
|
|
|
|
|
|
def get_rhs_op(self, connection, rhs):
|
|
|
|
return connection.operators[self.lookup_name] % rhs
|
|
|
|
|
|
|
|
def get_bound(self):
|
|
|
|
raise NotImplementedError(
|
|
|
|
'subclasses of YearComparisonLookup must provide a get_bound() method'
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
@YearTransform.register_lookup
|
|
|
|
class YearGt(YearComparisonLookup):
|
|
|
|
lookup_name = 'gt'
|
|
|
|
|
|
|
|
def get_bound(self, start, finish):
|
|
|
|
return finish
|
|
|
|
|
|
|
|
|
|
|
|
@YearTransform.register_lookup
|
|
|
|
class YearGte(YearComparisonLookup):
|
|
|
|
lookup_name = 'gte'
|
|
|
|
|
|
|
|
def get_bound(self, start, finish):
|
|
|
|
return start
|
|
|
|
|
|
|
|
|
|
|
|
@YearTransform.register_lookup
|
|
|
|
class YearLt(YearComparisonLookup):
|
|
|
|
lookup_name = 'lt'
|
|
|
|
|
|
|
|
def get_bound(self, start, finish):
|
|
|
|
return start
|
|
|
|
|
|
|
|
|
|
|
|
@YearTransform.register_lookup
|
|
|
|
class YearLte(YearComparisonLookup):
|
|
|
|
lookup_name = 'lte'
|
|
|
|
|
|
|
|
def get_bound(self, start, finish):
|
|
|
|
return finish
|
|
|
|
|
|
|
|
|
|
|
|
class MonthTransform(DateTransform):
|
|
|
|
lookup_name = 'month'
|
|
|
|
|
|
|
|
|
|
|
|
class DayTransform(DateTransform):
|
|
|
|
lookup_name = 'day'
|
|
|
|
|
|
|
|
|
|
|
|
class WeekDayTransform(DateTransform):
|
|
|
|
lookup_name = 'week_day'
|
|
|
|
|
|
|
|
|
|
|
|
class HourTransform(DateTransform):
|
|
|
|
lookup_name = 'hour'
|
|
|
|
|
|
|
|
|
|
|
|
class MinuteTransform(DateTransform):
|
|
|
|
lookup_name = 'minute'
|
|
|
|
|
|
|
|
|
|
|
|
class SecondTransform(DateTransform):
|
|
|
|
lookup_name = 'second'
|
|
|
|
|
|
|
|
|
|
|
|
DateField.register_lookup(YearTransform)
|
|
|
|
DateField.register_lookup(MonthTransform)
|
|
|
|
DateField.register_lookup(DayTransform)
|
|
|
|
DateField.register_lookup(WeekDayTransform)
|
|
|
|
|
|
|
|
TimeField.register_lookup(HourTransform)
|
|
|
|
TimeField.register_lookup(MinuteTransform)
|
|
|
|
TimeField.register_lookup(SecondTransform)
|
|
|
|
|
|
|
|
DateTimeField.register_lookup(DateTimeDateTransform)
|
|
|
|
DateTimeField.register_lookup(YearTransform)
|
|
|
|
DateTimeField.register_lookup(MonthTransform)
|
|
|
|
DateTimeField.register_lookup(DayTransform)
|
|
|
|
DateTimeField.register_lookup(WeekDayTransform)
|
|
|
|
DateTimeField.register_lookup(HourTransform)
|
|
|
|
DateTimeField.register_lookup(MinuteTransform)
|
|
|
|
DateTimeField.register_lookup(SecondTransform)
|