2016-06-03 02:05:25 +08:00
|
|
|
import itertools
|
2016-04-12 22:14:41 +08:00
|
|
|
import math
|
2015-01-28 20:35:27 +08:00
|
|
|
from copy import copy
|
2014-01-18 17:09:43 +08:00
|
|
|
|
2016-05-26 03:00:20 +08:00
|
|
|
from django.core.exceptions import EmptyResultSet
|
2015-08-03 10:30:06 +08:00
|
|
|
from django.db.models.expressions import Func, Value
|
2017-08-11 21:32:32 +08:00
|
|
|
from django.db.models.fields import DateTimeField, Field, IntegerField
|
2015-08-03 10:30:06 +08:00
|
|
|
from django.db.models.query_utils import RegisterLookupMixin
|
2014-01-18 17:09:43 +08:00
|
|
|
from django.utils.functional import cached_property
|
|
|
|
|
2014-09-14 18:34:41 +08:00
|
|
|
|
2017-01-19 15:39:46 +08:00
|
|
|
class Lookup:
|
2014-01-18 17:09:43 +08:00
|
|
|
lookup_name = None
|
2016-04-24 01:13:31 +08:00
|
|
|
prepare_rhs = True
|
2014-01-18 17:09:43 +08:00
|
|
|
|
2014-11-15 19:04:02 +08:00
|
|
|
def __init__(self, lhs, rhs):
|
2014-01-18 17:09:43 +08:00
|
|
|
self.lhs, self.rhs = lhs, rhs
|
|
|
|
self.rhs = self.get_prep_lookup()
|
2014-11-15 19:04:02 +08:00
|
|
|
if hasattr(self.lhs, 'get_bilateral_transforms'):
|
|
|
|
bilateral_transforms = self.lhs.get_bilateral_transforms()
|
|
|
|
else:
|
2014-09-14 18:34:41 +08:00
|
|
|
bilateral_transforms = []
|
|
|
|
if bilateral_transforms:
|
2015-11-12 05:52:13 +08:00
|
|
|
# Warn the user as soon as possible if they are trying to apply
|
2014-09-14 18:34:41 +08:00
|
|
|
# a bilateral transformation on a nested QuerySet: that won't work.
|
2016-10-28 23:20:23 +08:00
|
|
|
from django.db.models.sql.query import Query # avoid circular import
|
|
|
|
if isinstance(rhs, Query):
|
2017-10-07 00:47:08 +08:00
|
|
|
raise NotImplementedError("Bilateral transformations on nested querysets are not implemented.")
|
2014-09-14 18:34:41 +08:00
|
|
|
self.bilateral_transforms = bilateral_transforms
|
|
|
|
|
|
|
|
def apply_bilateral_transforms(self, value):
|
2015-08-03 10:30:06 +08:00
|
|
|
for transform in self.bilateral_transforms:
|
|
|
|
value = transform(value)
|
2014-09-14 18:34:41 +08:00
|
|
|
return value
|
|
|
|
|
2014-11-16 09:56:42 +08:00
|
|
|
def batch_process_rhs(self, compiler, connection, rhs=None):
|
2014-09-14 18:34:41 +08:00
|
|
|
if rhs is None:
|
|
|
|
rhs = self.rhs
|
|
|
|
if self.bilateral_transforms:
|
|
|
|
sqls, sqls_params = [], []
|
|
|
|
for p in rhs:
|
2015-08-03 10:30:06 +08:00
|
|
|
value = Value(p, output_field=self.lhs.output_field)
|
2014-09-14 18:34:41 +08:00
|
|
|
value = self.apply_bilateral_transforms(value)
|
2015-08-03 10:30:06 +08:00
|
|
|
value = value.resolve_expression(compiler.query)
|
2014-11-16 09:56:42 +08:00
|
|
|
sql, sql_params = compiler.compile(value)
|
2014-09-14 18:34:41 +08:00
|
|
|
sqls.append(sql)
|
|
|
|
sqls_params.extend(sql_params)
|
|
|
|
else:
|
2016-04-23 01:43:05 +08:00
|
|
|
_, params = self.get_db_prep_lookup(rhs, connection)
|
2014-09-14 18:34:41 +08:00
|
|
|
sqls, sqls_params = ['%s'] * len(params), params
|
|
|
|
return sqls, sqls_params
|
2014-01-18 17:09:43 +08:00
|
|
|
|
2017-01-16 23:03:15 +08:00
|
|
|
def get_source_expressions(self):
|
|
|
|
if self.rhs_is_direct_value():
|
|
|
|
return [self.lhs]
|
|
|
|
return [self.lhs, self.rhs]
|
|
|
|
|
|
|
|
def set_source_expressions(self, new_exprs):
|
|
|
|
if len(new_exprs) == 1:
|
|
|
|
self.lhs = new_exprs[0]
|
|
|
|
else:
|
|
|
|
self.lhs, self.rhs = new_exprs
|
|
|
|
|
2014-01-18 17:09:43 +08:00
|
|
|
def get_prep_lookup(self):
|
2016-04-24 01:13:31 +08:00
|
|
|
if hasattr(self.rhs, '_prepare'):
|
|
|
|
return self.rhs._prepare(self.lhs.output_field)
|
|
|
|
if self.prepare_rhs and hasattr(self.lhs.output_field, 'get_prep_value'):
|
|
|
|
return self.lhs.output_field.get_prep_value(self.rhs)
|
|
|
|
return self.rhs
|
2014-01-18 17:09:43 +08:00
|
|
|
|
|
|
|
def get_db_prep_lookup(self, value, connection):
|
2016-04-24 01:13:31 +08:00
|
|
|
return ('%s', [value])
|
2014-01-18 17:09:43 +08:00
|
|
|
|
2014-11-16 09:56:42 +08:00
|
|
|
def process_lhs(self, compiler, connection, lhs=None):
|
2014-01-18 17:09:43 +08:00
|
|
|
lhs = lhs or self.lhs
|
2017-04-01 21:47:49 +08:00
|
|
|
if hasattr(lhs, 'resolve_expression'):
|
|
|
|
lhs = lhs.resolve_expression(compiler.query)
|
2014-11-16 09:56:42 +08:00
|
|
|
return compiler.compile(lhs)
|
2014-01-18 17:09:43 +08:00
|
|
|
|
2014-11-16 09:56:42 +08:00
|
|
|
def process_rhs(self, compiler, connection):
|
2014-01-19 17:30:26 +08:00
|
|
|
value = self.rhs
|
2014-09-14 18:34:41 +08:00
|
|
|
if self.bilateral_transforms:
|
|
|
|
if self.rhs_is_direct_value():
|
|
|
|
# Do not call get_db_prep_lookup here as the value will be
|
|
|
|
# transformed before being used for lookup
|
2015-08-03 10:30:06 +08:00
|
|
|
value = Value(value, output_field=self.lhs.output_field)
|
2014-09-14 18:34:41 +08:00
|
|
|
value = self.apply_bilateral_transforms(value)
|
2015-08-03 10:30:06 +08:00
|
|
|
value = value.resolve_expression(compiler.query)
|
2014-01-18 17:09:43 +08:00
|
|
|
if hasattr(value, 'as_sql'):
|
2014-11-16 09:56:42 +08:00
|
|
|
sql, params = compiler.compile(value)
|
2014-01-18 17:09:43 +08:00
|
|
|
return '(' + sql + ')', params
|
|
|
|
else:
|
|
|
|
return self.get_db_prep_lookup(value, connection)
|
|
|
|
|
2014-01-19 17:30:26 +08:00
|
|
|
def rhs_is_direct_value(self):
|
2017-07-18 20:24:01 +08:00
|
|
|
return not hasattr(self.rhs, 'as_sql')
|
2014-01-19 17:30:26 +08:00
|
|
|
|
2014-01-18 17:09:43 +08:00
|
|
|
def relabeled_clone(self, relabels):
|
|
|
|
new = copy(self)
|
|
|
|
new.lhs = new.lhs.relabeled_clone(relabels)
|
|
|
|
if hasattr(new.rhs, 'relabeled_clone'):
|
|
|
|
new.rhs = new.rhs.relabeled_clone(relabels)
|
|
|
|
return new
|
|
|
|
|
|
|
|
def get_group_by_cols(self):
|
|
|
|
cols = self.lhs.get_group_by_cols()
|
|
|
|
if hasattr(self.rhs, 'get_group_by_cols'):
|
|
|
|
cols.extend(self.rhs.get_group_by_cols())
|
|
|
|
return cols
|
|
|
|
|
2014-11-16 09:56:42 +08:00
|
|
|
def as_sql(self, compiler, connection):
|
2014-01-18 17:09:43 +08:00
|
|
|
raise NotImplementedError
|
|
|
|
|
2014-12-23 21:16:56 +08:00
|
|
|
@cached_property
|
|
|
|
def contains_aggregate(self):
|
|
|
|
return self.lhs.contains_aggregate or getattr(self.rhs, 'contains_aggregate', False)
|
|
|
|
|
2017-09-18 21:42:29 +08:00
|
|
|
@cached_property
|
|
|
|
def contains_over_clause(self):
|
|
|
|
return self.lhs.contains_over_clause or getattr(self.rhs, 'contains_over_clause', False)
|
|
|
|
|
2017-01-16 23:03:15 +08:00
|
|
|
@property
|
|
|
|
def is_summary(self):
|
|
|
|
return self.lhs.is_summary or getattr(self.rhs, 'is_summary', False)
|
|
|
|
|
2014-01-18 17:09:43 +08:00
|
|
|
|
2015-08-03 10:30:06 +08:00
|
|
|
class Transform(RegisterLookupMixin, Func):
|
|
|
|
"""
|
|
|
|
RegisterLookupMixin() is first so that get_lookup() and get_transform()
|
|
|
|
first examine self and then check output_field.
|
|
|
|
"""
|
|
|
|
bilateral = False
|
2015-10-31 18:01:08 +08:00
|
|
|
arity = 1
|
2015-08-03 10:30:06 +08:00
|
|
|
|
|
|
|
@property
|
|
|
|
def lhs(self):
|
|
|
|
return self.get_source_expressions()[0]
|
|
|
|
|
|
|
|
def get_bilateral_transforms(self):
|
|
|
|
if hasattr(self.lhs, 'get_bilateral_transforms'):
|
|
|
|
bilateral_transforms = self.lhs.get_bilateral_transforms()
|
|
|
|
else:
|
|
|
|
bilateral_transforms = []
|
|
|
|
if self.bilateral:
|
|
|
|
bilateral_transforms.append(self.__class__)
|
|
|
|
return bilateral_transforms
|
|
|
|
|
|
|
|
|
2014-01-18 17:09:43 +08:00
|
|
|
class BuiltinLookup(Lookup):
|
2014-11-16 09:56:42 +08:00
|
|
|
def process_lhs(self, compiler, connection, lhs=None):
|
2017-01-21 21:13:44 +08:00
|
|
|
lhs_sql, params = super().process_lhs(compiler, connection, lhs)
|
2014-06-17 23:57:16 +08:00
|
|
|
field_internal_type = self.lhs.output_field.get_internal_type()
|
|
|
|
db_type = self.lhs.output_field.db_type(connection=connection)
|
2014-01-20 10:38:46 +08:00
|
|
|
lhs_sql = connection.ops.field_cast_sql(
|
|
|
|
db_type, field_internal_type) % lhs_sql
|
2015-01-11 02:13:28 +08:00
|
|
|
lhs_sql = connection.ops.lookup_cast(self.lookup_name, field_internal_type) % lhs_sql
|
2015-10-06 04:13:14 +08:00
|
|
|
return lhs_sql, list(params)
|
2014-01-20 10:38:46 +08:00
|
|
|
|
2014-11-16 09:56:42 +08:00
|
|
|
def as_sql(self, compiler, connection):
|
|
|
|
lhs_sql, params = self.process_lhs(compiler, connection)
|
|
|
|
rhs_sql, rhs_params = self.process_rhs(compiler, connection)
|
2014-01-18 17:09:43 +08:00
|
|
|
params.extend(rhs_params)
|
2014-01-20 10:38:46 +08:00
|
|
|
rhs_sql = self.get_rhs_op(connection, rhs_sql)
|
|
|
|
return '%s %s' % (lhs_sql, rhs_sql), params
|
2014-01-18 17:09:43 +08:00
|
|
|
|
|
|
|
def get_rhs_op(self, connection, rhs):
|
|
|
|
return connection.operators[self.lookup_name] % rhs
|
|
|
|
|
|
|
|
|
2017-01-19 15:39:46 +08:00
|
|
|
class FieldGetDbPrepValueMixin:
|
2016-04-23 01:43:05 +08:00
|
|
|
"""
|
|
|
|
Some lookups require Field.get_db_prep_value() to be called on their
|
|
|
|
inputs.
|
|
|
|
"""
|
|
|
|
get_db_prep_lookup_value_is_iterable = False
|
|
|
|
|
|
|
|
def get_db_prep_lookup(self, value, connection):
|
|
|
|
# For relational fields, use the output_field of the 'field' attribute.
|
|
|
|
field = getattr(self.lhs.output_field, 'field', None)
|
|
|
|
get_db_prep_value = getattr(field, 'get_db_prep_value', None)
|
|
|
|
if not get_db_prep_value:
|
|
|
|
get_db_prep_value = self.lhs.output_field.get_db_prep_value
|
|
|
|
return (
|
|
|
|
'%s',
|
|
|
|
[get_db_prep_value(v, connection, prepared=True) for v in value]
|
|
|
|
if self.get_db_prep_lookup_value_is_iterable else
|
|
|
|
[get_db_prep_value(value, connection, prepared=True)]
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
class FieldGetDbPrepValueIterableMixin(FieldGetDbPrepValueMixin):
|
|
|
|
"""
|
|
|
|
Some lookups require Field.get_db_prep_value() to be called on each value
|
|
|
|
in an iterable.
|
|
|
|
"""
|
|
|
|
get_db_prep_lookup_value_is_iterable = True
|
|
|
|
|
2016-06-03 02:05:25 +08:00
|
|
|
def get_prep_lookup(self):
|
|
|
|
prepared_values = []
|
|
|
|
if hasattr(self.rhs, '_prepare'):
|
|
|
|
# A subquery is like an iterable but its items shouldn't be
|
|
|
|
# prepared independently.
|
|
|
|
return self.rhs._prepare(self.lhs.output_field)
|
|
|
|
for rhs_value in self.rhs:
|
|
|
|
if hasattr(rhs_value, 'resolve_expression'):
|
|
|
|
# An expression will be handled by the database but can coexist
|
|
|
|
# alongside real values.
|
|
|
|
pass
|
|
|
|
elif self.prepare_rhs and hasattr(self.lhs.output_field, 'get_prep_value'):
|
|
|
|
rhs_value = self.lhs.output_field.get_prep_value(rhs_value)
|
|
|
|
prepared_values.append(rhs_value)
|
|
|
|
return prepared_values
|
|
|
|
|
|
|
|
def process_rhs(self, compiler, connection):
|
|
|
|
if self.rhs_is_direct_value():
|
|
|
|
# rhs should be an iterable of values. Use batch_process_rhs()
|
|
|
|
# to prepare/transform those values.
|
|
|
|
return self.batch_process_rhs(compiler, connection)
|
|
|
|
else:
|
2017-01-21 21:13:44 +08:00
|
|
|
return super().process_rhs(compiler, connection)
|
2016-06-03 02:05:25 +08:00
|
|
|
|
|
|
|
def resolve_expression_parameter(self, compiler, connection, sql, param):
|
|
|
|
params = [param]
|
|
|
|
if hasattr(param, 'resolve_expression'):
|
|
|
|
param = param.resolve_expression(compiler.query)
|
|
|
|
if hasattr(param, 'as_sql'):
|
|
|
|
sql, params = param.as_sql(compiler, connection)
|
|
|
|
return sql, params
|
|
|
|
|
|
|
|
def batch_process_rhs(self, compiler, connection, rhs=None):
|
2017-01-21 21:13:44 +08:00
|
|
|
pre_processed = super().batch_process_rhs(compiler, connection, rhs)
|
2016-06-03 02:05:25 +08:00
|
|
|
# The params list may contain expressions which compile to a
|
|
|
|
# sql/param pair. Zip them to get sql and param pairs that refer to the
|
|
|
|
# same argument and attempt to replace them with the result of
|
|
|
|
# compiling the param step.
|
|
|
|
sql, params = zip(*(
|
|
|
|
self.resolve_expression_parameter(compiler, connection, sql, param)
|
|
|
|
for sql, param in zip(*pre_processed)
|
|
|
|
))
|
|
|
|
params = itertools.chain.from_iterable(params)
|
|
|
|
return sql, tuple(params)
|
|
|
|
|
2016-04-23 01:43:05 +08:00
|
|
|
|
2017-01-16 22:22:26 +08:00
|
|
|
@Field.register_lookup
|
2016-04-23 01:43:05 +08:00
|
|
|
class Exact(FieldGetDbPrepValueMixin, BuiltinLookup):
|
2014-01-18 17:09:43 +08:00
|
|
|
lookup_name = 'exact'
|
2016-11-13 01:11:23 +08:00
|
|
|
|
2017-10-16 23:10:21 +08:00
|
|
|
def process_rhs(self, compiler, connection):
|
|
|
|
from django.db.models.sql.query import Query
|
|
|
|
if isinstance(self.rhs, Query):
|
|
|
|
if self.rhs.has_limit_one():
|
|
|
|
# The subquery must select only the pk.
|
|
|
|
self.rhs.clear_select_clause()
|
|
|
|
self.rhs.add_fields(['pk'])
|
|
|
|
else:
|
|
|
|
raise ValueError(
|
|
|
|
'The QuerySet value for an exact lookup must be limited to '
|
|
|
|
'one result using slicing.'
|
|
|
|
)
|
|
|
|
return super().process_rhs(compiler, connection)
|
|
|
|
|
2016-11-13 01:11:23 +08:00
|
|
|
|
2017-01-16 22:22:26 +08:00
|
|
|
@Field.register_lookup
|
2014-01-18 17:09:43 +08:00
|
|
|
class IExact(BuiltinLookup):
|
|
|
|
lookup_name = 'iexact'
|
2016-04-24 01:13:31 +08:00
|
|
|
prepare_rhs = False
|
2015-01-11 00:11:15 +08:00
|
|
|
|
|
|
|
def process_rhs(self, qn, connection):
|
2017-01-21 21:13:44 +08:00
|
|
|
rhs, params = super().process_rhs(qn, connection)
|
2015-01-11 00:11:15 +08:00
|
|
|
if params:
|
|
|
|
params[0] = connection.ops.prep_for_iexact_query(params[0])
|
|
|
|
return rhs, params
|
|
|
|
|
|
|
|
|
2017-01-16 22:22:26 +08:00
|
|
|
@Field.register_lookup
|
2016-04-23 01:43:05 +08:00
|
|
|
class GreaterThan(FieldGetDbPrepValueMixin, BuiltinLookup):
|
2014-01-18 17:09:43 +08:00
|
|
|
lookup_name = 'gt'
|
2016-11-13 01:11:23 +08:00
|
|
|
|
|
|
|
|
2017-01-16 22:22:26 +08:00
|
|
|
@Field.register_lookup
|
2016-04-23 01:43:05 +08:00
|
|
|
class GreaterThanOrEqual(FieldGetDbPrepValueMixin, BuiltinLookup):
|
2014-01-18 17:09:43 +08:00
|
|
|
lookup_name = 'gte'
|
2016-11-13 01:11:23 +08:00
|
|
|
|
|
|
|
|
2017-01-16 22:22:26 +08:00
|
|
|
@Field.register_lookup
|
2016-04-23 01:43:05 +08:00
|
|
|
class LessThan(FieldGetDbPrepValueMixin, BuiltinLookup):
|
2014-01-18 17:09:43 +08:00
|
|
|
lookup_name = 'lt'
|
2016-11-13 01:11:23 +08:00
|
|
|
|
|
|
|
|
2017-01-16 22:22:26 +08:00
|
|
|
@Field.register_lookup
|
2016-04-23 01:43:05 +08:00
|
|
|
class LessThanOrEqual(FieldGetDbPrepValueMixin, BuiltinLookup):
|
2014-01-18 17:09:43 +08:00
|
|
|
lookup_name = 'lte'
|
2016-11-13 01:11:23 +08:00
|
|
|
|
|
|
|
|
2017-01-19 15:39:46 +08:00
|
|
|
class IntegerFieldFloatRounding:
|
2016-04-12 22:14:41 +08:00
|
|
|
"""
|
|
|
|
Allow floats to work as query values for IntegerField. Without this, the
|
|
|
|
decimal portion of the float would always be discarded.
|
|
|
|
"""
|
|
|
|
def get_prep_lookup(self):
|
|
|
|
if isinstance(self.rhs, float):
|
|
|
|
self.rhs = math.ceil(self.rhs)
|
2017-01-21 21:13:44 +08:00
|
|
|
return super().get_prep_lookup()
|
2016-04-12 22:14:41 +08:00
|
|
|
|
|
|
|
|
2017-01-16 22:22:26 +08:00
|
|
|
@IntegerField.register_lookup
|
2016-04-12 22:14:41 +08:00
|
|
|
class IntegerGreaterThanOrEqual(IntegerFieldFloatRounding, GreaterThanOrEqual):
|
|
|
|
pass
|
2016-11-13 01:11:23 +08:00
|
|
|
|
|
|
|
|
2017-01-16 22:22:26 +08:00
|
|
|
@IntegerField.register_lookup
|
2016-04-12 22:14:41 +08:00
|
|
|
class IntegerLessThan(IntegerFieldFloatRounding, LessThan):
|
|
|
|
pass
|
2016-11-13 01:11:23 +08:00
|
|
|
|
|
|
|
|
2017-01-16 22:22:26 +08:00
|
|
|
@Field.register_lookup
|
2016-04-23 01:43:05 +08:00
|
|
|
class In(FieldGetDbPrepValueIterableMixin, BuiltinLookup):
|
2014-01-18 17:09:43 +08:00
|
|
|
lookup_name = 'in'
|
|
|
|
|
2014-11-16 09:56:42 +08:00
|
|
|
def process_rhs(self, compiler, connection):
|
2016-02-26 12:02:43 +08:00
|
|
|
db_rhs = getattr(self.rhs, '_db', None)
|
|
|
|
if db_rhs is not None and db_rhs != connection.alias:
|
|
|
|
raise ValueError(
|
|
|
|
"Subqueries aren't allowed across different databases. Force "
|
|
|
|
"the inner query to be evaluated using `list(inner_query)`."
|
|
|
|
)
|
|
|
|
|
2014-09-14 18:34:41 +08:00
|
|
|
if self.rhs_is_direct_value():
|
2015-11-07 23:06:06 +08:00
|
|
|
try:
|
|
|
|
rhs = set(self.rhs)
|
|
|
|
except TypeError: # Unhashable items in self.rhs
|
|
|
|
rhs = self.rhs
|
|
|
|
|
2014-09-14 18:34:41 +08:00
|
|
|
if not rhs:
|
|
|
|
raise EmptyResultSet
|
2015-11-07 23:06:06 +08:00
|
|
|
|
|
|
|
# rhs should be an iterable; use batch_process_rhs() to
|
|
|
|
# prepare/transform those values.
|
2014-11-16 09:56:42 +08:00
|
|
|
sqls, sqls_params = self.batch_process_rhs(compiler, connection, rhs)
|
2014-09-14 18:34:41 +08:00
|
|
|
placeholder = '(' + ', '.join(sqls) + ')'
|
|
|
|
return (placeholder, sqls_params)
|
|
|
|
else:
|
2017-04-28 21:30:35 +08:00
|
|
|
if not getattr(self.rhs, 'has_select_fields', True):
|
|
|
|
self.rhs.clear_select_clause()
|
|
|
|
self.rhs.add_fields(['pk'])
|
2017-01-21 21:13:44 +08:00
|
|
|
return super().process_rhs(compiler, connection)
|
2014-01-18 17:09:43 +08:00
|
|
|
|
|
|
|
def get_rhs_op(self, connection, rhs):
|
|
|
|
return 'IN %s' % rhs
|
2014-01-19 17:30:26 +08:00
|
|
|
|
2014-11-16 09:56:42 +08:00
|
|
|
def as_sql(self, compiler, connection):
|
2014-01-19 17:30:26 +08:00
|
|
|
max_in_list_size = connection.ops.max_in_list_size()
|
2015-08-03 10:30:06 +08:00
|
|
|
if self.rhs_is_direct_value() and max_in_list_size and len(self.rhs) > max_in_list_size:
|
|
|
|
return self.split_parameter_list_as_sql(compiler, connection)
|
2017-01-21 21:13:44 +08:00
|
|
|
return super().as_sql(compiler, connection)
|
2014-01-19 17:30:26 +08:00
|
|
|
|
2015-08-03 10:30:06 +08:00
|
|
|
def split_parameter_list_as_sql(self, compiler, connection):
|
|
|
|
# This is a special case for databases which limit the number of
|
|
|
|
# elements which can appear in an 'IN' clause.
|
|
|
|
max_in_list_size = connection.ops.max_in_list_size()
|
|
|
|
lhs, lhs_params = self.process_lhs(compiler, connection)
|
|
|
|
rhs, rhs_params = self.batch_process_rhs(compiler, connection)
|
|
|
|
in_clause_elements = ['(']
|
|
|
|
params = []
|
|
|
|
for offset in range(0, len(rhs_params), max_in_list_size):
|
|
|
|
if offset > 0:
|
|
|
|
in_clause_elements.append(' OR ')
|
|
|
|
in_clause_elements.append('%s IN (' % lhs)
|
|
|
|
params.extend(lhs_params)
|
|
|
|
sqls = rhs[offset: offset + max_in_list_size]
|
|
|
|
sqls_params = rhs_params[offset: offset + max_in_list_size]
|
|
|
|
param_group = ', '.join(sqls)
|
|
|
|
in_clause_elements.append(param_group)
|
|
|
|
in_clause_elements.append(')')
|
|
|
|
params.extend(sqls_params)
|
|
|
|
in_clause_elements.append(')')
|
|
|
|
return ''.join(in_clause_elements), params
|
2016-11-13 01:11:23 +08:00
|
|
|
|
|
|
|
|
2014-01-18 17:09:43 +08:00
|
|
|
class PatternLookup(BuiltinLookup):
|
2014-09-27 18:41:54 +08:00
|
|
|
|
2014-01-18 17:09:43 +08:00
|
|
|
def get_rhs_op(self, connection, rhs):
|
|
|
|
# Assume we are in startswith. We need to produce SQL like:
|
|
|
|
# col LIKE %s, ['thevalue%']
|
|
|
|
# For python values we can (and should) do that directly in Python,
|
|
|
|
# but if the value is for example reference to other column, then
|
|
|
|
# we need to add the % pattern match to the lookup by something like
|
|
|
|
# col LIKE othercol || '%%'
|
|
|
|
# So, for Python values we don't need any special pattern, but for
|
2014-09-14 18:34:41 +08:00
|
|
|
# SQL reference values or SQL transformations we need the correct
|
|
|
|
# pattern added.
|
2017-07-18 20:24:01 +08:00
|
|
|
if hasattr(self.rhs, 'as_sql') or self.bilateral_transforms:
|
2014-09-27 18:41:54 +08:00
|
|
|
pattern = connection.pattern_ops[self.lookup_name].format(connection.pattern_esc)
|
|
|
|
return pattern.format(rhs)
|
2014-01-18 17:09:43 +08:00
|
|
|
else:
|
2017-01-21 21:13:44 +08:00
|
|
|
return super().get_rhs_op(connection, rhs)
|
2014-01-18 17:09:43 +08:00
|
|
|
|
|
|
|
|
2017-01-16 22:22:26 +08:00
|
|
|
@Field.register_lookup
|
2014-09-27 18:41:54 +08:00
|
|
|
class Contains(PatternLookup):
|
|
|
|
lookup_name = 'contains'
|
2016-04-24 01:13:31 +08:00
|
|
|
prepare_rhs = False
|
2015-01-11 00:11:15 +08:00
|
|
|
|
|
|
|
def process_rhs(self, qn, connection):
|
2017-01-21 21:13:44 +08:00
|
|
|
rhs, params = super().process_rhs(qn, connection)
|
2015-01-11 00:11:15 +08:00
|
|
|
if params and not self.bilateral_transforms:
|
|
|
|
params[0] = "%%%s%%" % connection.ops.prep_for_like_query(params[0])
|
|
|
|
return rhs, params
|
2016-11-13 01:11:23 +08:00
|
|
|
|
|
|
|
|
2017-01-16 22:22:26 +08:00
|
|
|
@Field.register_lookup
|
2015-01-11 00:11:15 +08:00
|
|
|
class IContains(Contains):
|
2014-09-27 18:41:54 +08:00
|
|
|
lookup_name = 'icontains'
|
2016-04-24 01:13:31 +08:00
|
|
|
prepare_rhs = False
|
2016-11-13 01:11:23 +08:00
|
|
|
|
|
|
|
|
2017-01-16 22:22:26 +08:00
|
|
|
@Field.register_lookup
|
2014-01-18 17:09:43 +08:00
|
|
|
class StartsWith(PatternLookup):
|
|
|
|
lookup_name = 'startswith'
|
2016-04-24 01:13:31 +08:00
|
|
|
prepare_rhs = False
|
2015-01-11 00:11:15 +08:00
|
|
|
|
|
|
|
def process_rhs(self, qn, connection):
|
2017-01-21 21:13:44 +08:00
|
|
|
rhs, params = super().process_rhs(qn, connection)
|
2015-01-11 00:11:15 +08:00
|
|
|
if params and not self.bilateral_transforms:
|
|
|
|
params[0] = "%s%%" % connection.ops.prep_for_like_query(params[0])
|
|
|
|
return rhs, params
|
2016-11-13 01:11:23 +08:00
|
|
|
|
|
|
|
|
2017-01-16 22:22:26 +08:00
|
|
|
@Field.register_lookup
|
2014-01-18 17:09:43 +08:00
|
|
|
class IStartsWith(PatternLookup):
|
|
|
|
lookup_name = 'istartswith'
|
2016-04-24 01:13:31 +08:00
|
|
|
prepare_rhs = False
|
2015-01-11 00:11:15 +08:00
|
|
|
|
|
|
|
def process_rhs(self, qn, connection):
|
2017-01-21 21:13:44 +08:00
|
|
|
rhs, params = super().process_rhs(qn, connection)
|
2015-01-11 00:11:15 +08:00
|
|
|
if params and not self.bilateral_transforms:
|
|
|
|
params[0] = "%s%%" % connection.ops.prep_for_like_query(params[0])
|
|
|
|
return rhs, params
|
2016-11-13 01:11:23 +08:00
|
|
|
|
|
|
|
|
2017-01-16 22:22:26 +08:00
|
|
|
@Field.register_lookup
|
2014-09-27 18:41:54 +08:00
|
|
|
class EndsWith(PatternLookup):
|
2014-01-18 17:09:43 +08:00
|
|
|
lookup_name = 'endswith'
|
2016-04-24 01:13:31 +08:00
|
|
|
prepare_rhs = False
|
2015-01-11 00:11:15 +08:00
|
|
|
|
|
|
|
def process_rhs(self, qn, connection):
|
2017-01-21 21:13:44 +08:00
|
|
|
rhs, params = super().process_rhs(qn, connection)
|
2015-01-11 00:11:15 +08:00
|
|
|
if params and not self.bilateral_transforms:
|
|
|
|
params[0] = "%%%s" % connection.ops.prep_for_like_query(params[0])
|
|
|
|
return rhs, params
|
2016-11-13 01:11:23 +08:00
|
|
|
|
|
|
|
|
2017-01-16 22:22:26 +08:00
|
|
|
@Field.register_lookup
|
2014-09-27 18:41:54 +08:00
|
|
|
class IEndsWith(PatternLookup):
|
2014-01-18 17:09:43 +08:00
|
|
|
lookup_name = 'iendswith'
|
2016-04-24 01:13:31 +08:00
|
|
|
prepare_rhs = False
|
2015-01-11 00:11:15 +08:00
|
|
|
|
|
|
|
def process_rhs(self, qn, connection):
|
2017-01-21 21:13:44 +08:00
|
|
|
rhs, params = super().process_rhs(qn, connection)
|
2015-01-11 00:11:15 +08:00
|
|
|
if params and not self.bilateral_transforms:
|
|
|
|
params[0] = "%%%s" % connection.ops.prep_for_like_query(params[0])
|
|
|
|
return rhs, params
|
2016-11-13 01:11:23 +08:00
|
|
|
|
|
|
|
|
2017-01-16 22:22:26 +08:00
|
|
|
@Field.register_lookup
|
2016-04-23 01:43:05 +08:00
|
|
|
class Range(FieldGetDbPrepValueIterableMixin, BuiltinLookup):
|
2014-01-18 17:09:43 +08:00
|
|
|
lookup_name = 'range'
|
2014-09-14 18:34:41 +08:00
|
|
|
|
|
|
|
def get_rhs_op(self, connection, rhs):
|
|
|
|
return "BETWEEN %s AND %s" % (rhs[0], rhs[1])
|
|
|
|
|
2016-11-13 01:11:23 +08:00
|
|
|
|
2017-01-16 22:22:26 +08:00
|
|
|
@Field.register_lookup
|
2014-01-18 17:09:43 +08:00
|
|
|
class IsNull(BuiltinLookup):
|
|
|
|
lookup_name = 'isnull'
|
2016-04-24 01:13:31 +08:00
|
|
|
prepare_rhs = False
|
2014-01-18 17:09:43 +08:00
|
|
|
|
2014-11-16 09:56:42 +08:00
|
|
|
def as_sql(self, compiler, connection):
|
|
|
|
sql, params = compiler.compile(self.lhs)
|
2014-01-18 17:09:43 +08:00
|
|
|
if self.rhs:
|
|
|
|
return "%s IS NULL" % sql, params
|
|
|
|
else:
|
|
|
|
return "%s IS NOT NULL" % sql, params
|
2016-11-13 01:11:23 +08:00
|
|
|
|
|
|
|
|
2017-01-16 22:22:26 +08:00
|
|
|
@Field.register_lookup
|
2014-01-18 17:09:43 +08:00
|
|
|
class Regex(BuiltinLookup):
|
|
|
|
lookup_name = 'regex'
|
2016-04-24 01:13:31 +08:00
|
|
|
prepare_rhs = False
|
2014-01-20 10:38:46 +08:00
|
|
|
|
2014-11-16 09:56:42 +08:00
|
|
|
def as_sql(self, compiler, connection):
|
2014-01-20 10:38:46 +08:00
|
|
|
if self.lookup_name in connection.operators:
|
2017-01-21 21:13:44 +08:00
|
|
|
return super().as_sql(compiler, connection)
|
2014-01-20 10:38:46 +08:00
|
|
|
else:
|
2014-11-16 09:56:42 +08:00
|
|
|
lhs, lhs_params = self.process_lhs(compiler, connection)
|
|
|
|
rhs, rhs_params = self.process_rhs(compiler, connection)
|
2014-01-20 10:38:46 +08:00
|
|
|
sql_template = connection.ops.regex_lookup(self.lookup_name)
|
|
|
|
return sql_template % (lhs, rhs), lhs_params + rhs_params
|
2016-11-13 01:11:23 +08:00
|
|
|
|
|
|
|
|
2017-01-16 22:22:26 +08:00
|
|
|
@Field.register_lookup
|
2014-01-20 10:38:46 +08:00
|
|
|
class IRegex(Regex):
|
2014-01-18 17:09:43 +08:00
|
|
|
lookup_name = 'iregex'
|
2016-11-13 01:11:23 +08:00
|
|
|
|
|
|
|
|
2015-08-03 10:30:06 +08:00
|
|
|
class YearLookup(Lookup):
|
|
|
|
def year_lookup_bounds(self, connection, year):
|
|
|
|
output_field = self.lhs.lhs.output_field
|
|
|
|
if isinstance(output_field, DateTimeField):
|
|
|
|
bounds = connection.ops.year_lookup_bounds_for_datetime_field(year)
|
|
|
|
else:
|
|
|
|
bounds = connection.ops.year_lookup_bounds_for_date_field(year)
|
|
|
|
return bounds
|
|
|
|
|
|
|
|
|
|
|
|
class YearComparisonLookup(YearLookup):
|
|
|
|
def as_sql(self, compiler, connection):
|
|
|
|
# We will need to skip the extract part and instead go
|
|
|
|
# directly with the originating field, that is self.lhs.lhs.
|
|
|
|
lhs_sql, params = self.process_lhs(compiler, connection, self.lhs.lhs)
|
|
|
|
rhs_sql, rhs_params = self.process_rhs(compiler, connection)
|
|
|
|
rhs_sql = self.get_rhs_op(connection, rhs_sql)
|
|
|
|
start, finish = self.year_lookup_bounds(connection, rhs_params[0])
|
|
|
|
params.append(self.get_bound(start, finish))
|
|
|
|
return '%s %s' % (lhs_sql, rhs_sql), params
|
|
|
|
|
|
|
|
def get_rhs_op(self, connection, rhs):
|
|
|
|
return connection.operators[self.lookup_name] % rhs
|
|
|
|
|
2017-09-04 22:17:51 +08:00
|
|
|
def get_bound(self, start, finish):
|
2015-08-03 10:30:06 +08:00
|
|
|
raise NotImplementedError(
|
|
|
|
'subclasses of YearComparisonLookup must provide a get_bound() method'
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2016-03-05 20:05:47 +08:00
|
|
|
class YearExact(YearLookup, Exact):
|
|
|
|
lookup_name = 'exact'
|
|
|
|
|
|
|
|
def as_sql(self, compiler, connection):
|
|
|
|
# We will need to skip the extract part and instead go
|
|
|
|
# directly with the originating field, that is self.lhs.lhs.
|
|
|
|
lhs_sql, params = self.process_lhs(compiler, connection, self.lhs.lhs)
|
|
|
|
rhs_sql, rhs_params = self.process_rhs(compiler, connection)
|
|
|
|
try:
|
|
|
|
# Check that rhs_params[0] exists (IndexError),
|
|
|
|
# it isn't None (TypeError), and is a number (ValueError)
|
|
|
|
int(rhs_params[0])
|
|
|
|
except (IndexError, TypeError, ValueError):
|
|
|
|
# Can't determine the bounds before executing the query, so skip
|
|
|
|
# optimizations by falling back to a standard exact comparison.
|
2017-01-21 21:13:44 +08:00
|
|
|
return super().as_sql(compiler, connection)
|
2016-03-05 20:05:47 +08:00
|
|
|
bounds = self.year_lookup_bounds(connection, rhs_params[0])
|
|
|
|
params.extend(bounds)
|
|
|
|
return '%s BETWEEN %%s AND %%s' % lhs_sql, params
|
|
|
|
|
|
|
|
|
2015-08-03 10:30:06 +08:00
|
|
|
class YearGt(YearComparisonLookup):
|
|
|
|
lookup_name = 'gt'
|
|
|
|
|
|
|
|
def get_bound(self, start, finish):
|
|
|
|
return finish
|
|
|
|
|
|
|
|
|
|
|
|
class YearGte(YearComparisonLookup):
|
|
|
|
lookup_name = 'gte'
|
|
|
|
|
|
|
|
def get_bound(self, start, finish):
|
|
|
|
return start
|
|
|
|
|
|
|
|
|
|
|
|
class YearLt(YearComparisonLookup):
|
|
|
|
lookup_name = 'lt'
|
|
|
|
|
|
|
|
def get_bound(self, start, finish):
|
|
|
|
return start
|
|
|
|
|
|
|
|
|
|
|
|
class YearLte(YearComparisonLookup):
|
|
|
|
lookup_name = 'lte'
|
|
|
|
|
|
|
|
def get_bound(self, start, finish):
|
|
|
|
return finish
|