2013-02-10 23:15:49 +08:00
|
|
|
import datetime
|
2012-09-24 02:19:58 +08:00
|
|
|
|
2013-02-10 23:15:49 +08:00
|
|
|
from django.conf import settings
|
2013-09-17 00:52:05 +08:00
|
|
|
from django.db.backends.utils import truncate_name, typecast_date, typecast_timestamp
|
2009-12-22 23:18:51 +08:00
|
|
|
from django.db.models.sql import compiler
|
2012-04-30 06:50:48 +08:00
|
|
|
from django.db.models.sql.constants import MULTI
|
2012-07-21 03:14:27 +08:00
|
|
|
from django.utils import six
|
2013-09-02 18:10:03 +08:00
|
|
|
from django.utils.six.moves import zip, zip_longest
|
2013-02-10 23:15:49 +08:00
|
|
|
from django.utils import timezone
|
2009-12-22 23:18:51 +08:00
|
|
|
|
|
|
|
SQLCompiler = compiler.SQLCompiler
|
|
|
|
|
2013-11-03 01:18:46 +08:00
|
|
|
|
2009-12-22 23:18:51 +08:00
|
|
|
class GeoSQLCompiler(compiler.SQLCompiler):
|
|
|
|
|
|
|
|
def get_columns(self, with_aliases=False):
|
|
|
|
"""
|
|
|
|
Return the list of columns to use in the select statement. If no
|
|
|
|
columns have been specified, returns all columns relating to fields in
|
|
|
|
the model.
|
|
|
|
|
|
|
|
If 'with_aliases' is true, any column names that are duplicated
|
|
|
|
(without the table names) are given unique aliases. This is needed in
|
|
|
|
some cases to avoid ambiguitity with nested queries.
|
|
|
|
|
|
|
|
This routine is overridden from Query to handle customized selection of
|
|
|
|
geometry columns.
|
|
|
|
"""
|
|
|
|
qn = self.quote_name_unless_alias
|
|
|
|
qn2 = self.connection.ops.quote_name
|
|
|
|
result = ['(%s) AS %s' % (self.get_extra_select_format(alias) % col[0], qn2(alias))
|
2012-07-21 03:14:27 +08:00
|
|
|
for alias, col in six.iteritems(self.query.extra_select)]
|
2013-02-10 23:15:49 +08:00
|
|
|
params = []
|
2009-12-22 23:18:51 +08:00
|
|
|
aliases = set(self.query.extra_select.keys())
|
|
|
|
if with_aliases:
|
|
|
|
col_aliases = aliases.copy()
|
|
|
|
else:
|
|
|
|
col_aliases = set()
|
|
|
|
if self.query.select:
|
|
|
|
only_load = self.deferred_to_columns()
|
|
|
|
# This loop customized for GeoQuery.
|
2012-10-26 01:57:32 +08:00
|
|
|
for col, field in self.query.select:
|
2009-12-22 23:18:51 +08:00
|
|
|
if isinstance(col, (list, tuple)):
|
|
|
|
alias, column = col
|
2012-04-30 06:50:48 +08:00
|
|
|
table = self.query.alias_map[alias].table_name
|
2012-02-11 21:50:27 +08:00
|
|
|
if table in only_load and column not in only_load[table]:
|
2009-12-22 23:18:51 +08:00
|
|
|
continue
|
|
|
|
r = self.get_field_select(field, alias, column)
|
|
|
|
if with_aliases:
|
|
|
|
if col[1] in col_aliases:
|
|
|
|
c_alias = 'Col%d' % len(col_aliases)
|
|
|
|
result.append('%s AS %s' % (r, c_alias))
|
|
|
|
aliases.add(c_alias)
|
|
|
|
col_aliases.add(c_alias)
|
|
|
|
else:
|
|
|
|
result.append('%s AS %s' % (r, qn2(col[1])))
|
|
|
|
aliases.add(r)
|
|
|
|
col_aliases.add(col[1])
|
|
|
|
else:
|
|
|
|
result.append(r)
|
|
|
|
aliases.add(r)
|
|
|
|
col_aliases.add(col[1])
|
|
|
|
else:
|
2013-02-10 23:15:49 +08:00
|
|
|
col_sql, col_params = col.as_sql(qn, self.connection)
|
|
|
|
result.append(col_sql)
|
|
|
|
params.extend(col_params)
|
2010-12-02 01:53:22 +08:00
|
|
|
|
2009-12-22 23:18:51 +08:00
|
|
|
if hasattr(col, 'alias'):
|
|
|
|
aliases.add(col.alias)
|
|
|
|
col_aliases.add(col.alias)
|
|
|
|
|
|
|
|
elif self.query.default_cols:
|
|
|
|
cols, new_aliases = self.get_default_columns(with_aliases,
|
|
|
|
col_aliases)
|
|
|
|
result.extend(cols)
|
|
|
|
aliases.update(new_aliases)
|
|
|
|
|
|
|
|
max_name_length = self.connection.ops.max_name_length()
|
2013-02-10 23:15:49 +08:00
|
|
|
for alias, aggregate in self.query.aggregate_select.items():
|
|
|
|
agg_sql, agg_params = aggregate.as_sql(qn, self.connection)
|
|
|
|
if alias is None:
|
|
|
|
result.append(agg_sql)
|
|
|
|
else:
|
|
|
|
result.append('%s AS %s' % (agg_sql, qn(truncate_name(alias, max_name_length))))
|
|
|
|
params.extend(agg_params)
|
2009-12-22 23:18:51 +08:00
|
|
|
|
|
|
|
# This loop customized for GeoQuery.
|
2012-10-26 01:57:32 +08:00
|
|
|
for (table, col), field in self.query.related_select_cols:
|
2009-12-22 23:18:51 +08:00
|
|
|
r = self.get_field_select(field, table, col)
|
|
|
|
if with_aliases and col in col_aliases:
|
|
|
|
c_alias = 'Col%d' % len(col_aliases)
|
|
|
|
result.append('%s AS %s' % (r, c_alias))
|
|
|
|
aliases.add(c_alias)
|
|
|
|
col_aliases.add(c_alias)
|
|
|
|
else:
|
|
|
|
result.append(r)
|
|
|
|
aliases.add(r)
|
|
|
|
col_aliases.add(col)
|
|
|
|
|
|
|
|
self._select_aliases = aliases
|
2013-02-10 23:15:49 +08:00
|
|
|
return result, params
|
2009-12-22 23:18:51 +08:00
|
|
|
|
|
|
|
def get_default_columns(self, with_aliases=False, col_aliases=None,
|
2012-11-16 03:33:56 +08:00
|
|
|
start_alias=None, opts=None, as_pairs=False, from_parent=None):
|
2009-12-22 23:18:51 +08:00
|
|
|
"""
|
|
|
|
Computes the default columns for selecting every field in the base
|
|
|
|
model. Will sometimes be called to pull in related models (e.g. via
|
|
|
|
select_related), in which case "opts" and "start_alias" will be given
|
|
|
|
to provide a starting point for the traversal.
|
|
|
|
|
|
|
|
Returns a list of strings, quoted appropriately for use in SQL
|
|
|
|
directly, as well as a set of aliases used in the select statement (if
|
|
|
|
'as_pairs' is True, returns a list of (alias, col_name) pairs instead
|
|
|
|
of strings as the first component and None as the second component).
|
|
|
|
|
|
|
|
This routine is overridden from Query to handle customized selection of
|
|
|
|
geometry columns.
|
|
|
|
"""
|
|
|
|
result = []
|
|
|
|
if opts is None:
|
2013-05-16 05:07:34 +08:00
|
|
|
opts = self.query.get_meta()
|
2009-12-22 23:18:51 +08:00
|
|
|
aliases = set()
|
|
|
|
only_load = self.deferred_to_columns()
|
2012-12-30 18:10:15 +08:00
|
|
|
seen = self.query.included_inherited_models.copy()
|
2009-12-22 23:18:51 +08:00
|
|
|
if start_alias:
|
2012-12-30 18:10:15 +08:00
|
|
|
seen[None] = start_alias
|
2009-12-22 23:18:51 +08:00
|
|
|
for field, model in opts.get_fields_with_model():
|
2012-11-16 03:33:56 +08:00
|
|
|
if from_parent and model is not None and issubclass(from_parent, model):
|
2012-12-30 18:10:15 +08:00
|
|
|
# Avoid loading data for already loaded parents.
|
2010-07-21 03:05:46 +08:00
|
|
|
continue
|
2012-12-30 18:10:15 +08:00
|
|
|
alias = self.query.join_parent_model(opts, model, start_alias, seen)
|
2012-04-30 06:50:48 +08:00
|
|
|
table = self.query.alias_map[alias].table_name
|
2009-12-22 23:18:51 +08:00
|
|
|
if table in only_load and field.column not in only_load[table]:
|
|
|
|
continue
|
|
|
|
if as_pairs:
|
|
|
|
result.append((alias, field.column))
|
|
|
|
aliases.add(alias)
|
|
|
|
continue
|
|
|
|
# This part of the function is customized for GeoQuery. We
|
|
|
|
# see if there was any custom selection specified in the
|
|
|
|
# dictionary, and set up the selection format appropriately.
|
|
|
|
field_sel = self.get_field_select(field, alias)
|
|
|
|
if with_aliases and field.column in col_aliases:
|
|
|
|
c_alias = 'Col%d' % len(col_aliases)
|
|
|
|
result.append('%s AS %s' % (field_sel, c_alias))
|
|
|
|
col_aliases.add(c_alias)
|
|
|
|
aliases.add(c_alias)
|
|
|
|
else:
|
|
|
|
r = field_sel
|
|
|
|
result.append(r)
|
|
|
|
aliases.add(r)
|
|
|
|
if with_aliases:
|
|
|
|
col_aliases.add(field.column)
|
|
|
|
return result, aliases
|
|
|
|
|
|
|
|
def resolve_columns(self, row, fields=()):
|
|
|
|
"""
|
|
|
|
This routine is necessary so that distances and geometries returned
|
|
|
|
from extra selection SQL get resolved appropriately into Python
|
|
|
|
objects.
|
|
|
|
"""
|
|
|
|
values = []
|
2012-08-08 22:33:15 +08:00
|
|
|
aliases = list(self.query.extra_select)
|
2009-12-22 23:18:51 +08:00
|
|
|
|
|
|
|
# Have to set a starting row number offset that is used for
|
|
|
|
# determining the correct starting row index -- needed for
|
|
|
|
# doing pagination with Oracle.
|
|
|
|
rn_offset = 0
|
|
|
|
if self.connection.ops.oracle:
|
2013-10-17 16:17:41 +08:00
|
|
|
if self.query.high_mark is not None or self.query.low_mark:
|
|
|
|
rn_offset = 1
|
2009-12-22 23:18:51 +08:00
|
|
|
index_start = rn_offset + len(aliases)
|
|
|
|
|
|
|
|
# Converting any extra selection values (e.g., geometries and
|
|
|
|
# distance objects added by GeoQuerySet methods).
|
|
|
|
values = [self.query.convert_values(v,
|
|
|
|
self.query.extra_select_fields.get(a, None),
|
|
|
|
self.connection)
|
2012-05-07 23:25:12 +08:00
|
|
|
for v, a in zip(row[rn_offset:index_start], aliases)]
|
2012-03-01 01:46:23 +08:00
|
|
|
if self.connection.ops.oracle or getattr(self.query, 'geo_values', False):
|
|
|
|
# We resolve the rest of the columns if we're on Oracle or if
|
|
|
|
# the `geo_values` attribute is defined.
|
2012-09-24 02:19:58 +08:00
|
|
|
for value, field in zip_longest(row[index_start:], fields):
|
2011-09-11 06:53:26 +08:00
|
|
|
values.append(self.query.convert_values(value, field, self.connection))
|
2009-12-22 23:18:51 +08:00
|
|
|
else:
|
|
|
|
values.extend(row[index_start:])
|
|
|
|
return tuple(values)
|
|
|
|
|
|
|
|
#### Routines unique to GeoQuery ####
|
|
|
|
def get_extra_select_format(self, alias):
|
|
|
|
sel_fmt = '%s'
|
2011-09-10 06:34:23 +08:00
|
|
|
if hasattr(self.query, 'custom_select') and alias in self.query.custom_select:
|
2009-12-22 23:18:51 +08:00
|
|
|
sel_fmt = sel_fmt % self.query.custom_select[alias]
|
|
|
|
return sel_fmt
|
|
|
|
|
|
|
|
def get_field_select(self, field, alias=None, column=None):
|
|
|
|
"""
|
|
|
|
Returns the SELECT SQL string for the given field. Figures out
|
|
|
|
if any custom selection SQL is needed for the column The `alias`
|
|
|
|
keyword may be used to manually specify the database table where
|
|
|
|
the column exists, if not in the model associated with this
|
|
|
|
`GeoQuery`. Similarly, `column` may be used to specify the exact
|
|
|
|
column name, rather than using the `column` attribute on `field`.
|
|
|
|
"""
|
|
|
|
sel_fmt = self.get_select_format(field)
|
|
|
|
if field in self.query.custom_select:
|
|
|
|
field_sel = sel_fmt % self.query.custom_select[field]
|
|
|
|
else:
|
|
|
|
field_sel = sel_fmt % self._field_column(field, alias, column)
|
|
|
|
return field_sel
|
|
|
|
|
|
|
|
def get_select_format(self, fld):
|
|
|
|
"""
|
|
|
|
Returns the selection format string, depending on the requirements
|
|
|
|
of the spatial backend. For example, Oracle and MySQL require custom
|
|
|
|
selection formats in order to retrieve geometries in OGC WKT. For all
|
|
|
|
other fields a simple '%s' format string is returned.
|
|
|
|
"""
|
|
|
|
if self.connection.ops.select and hasattr(fld, 'geom_type'):
|
|
|
|
# This allows operations to be done on fields in the SELECT,
|
|
|
|
# overriding their values -- used by the Oracle and MySQL
|
|
|
|
# spatial backends to get database values as WKT, and by the
|
|
|
|
# `transform` method.
|
|
|
|
sel_fmt = self.connection.ops.select
|
|
|
|
|
|
|
|
# Because WKT doesn't contain spatial reference information,
|
|
|
|
# the SRID is prefixed to the returned WKT to ensure that the
|
|
|
|
# transformed geometries have an SRID different than that of the
|
|
|
|
# field -- this is only used by `transform` for Oracle and
|
|
|
|
# SpatiaLite backends.
|
2013-10-15 03:13:14 +08:00
|
|
|
if self.query.transformed_srid and (self.connection.ops.oracle or
|
|
|
|
self.connection.ops.spatialite):
|
2009-12-22 23:18:51 +08:00
|
|
|
sel_fmt = "'SRID=%d;'||%s" % (self.query.transformed_srid, sel_fmt)
|
|
|
|
else:
|
|
|
|
sel_fmt = '%s'
|
|
|
|
return sel_fmt
|
|
|
|
|
|
|
|
# Private API utilities, subject to change.
|
|
|
|
def _field_column(self, field, table_alias=None, column=None):
|
|
|
|
"""
|
|
|
|
Helper function that returns the database column for the given field.
|
|
|
|
The table and column are returned (quoted) in the proper format, e.g.,
|
|
|
|
`"geoapp_city"."point"`. If `table_alias` is not specified, the
|
|
|
|
database table associated with the model of this `GeoQuery` will be
|
|
|
|
used. If `column` is specified, it will be used instead of the value
|
|
|
|
in `field.column`.
|
|
|
|
"""
|
2013-10-17 16:17:41 +08:00
|
|
|
if table_alias is None:
|
|
|
|
table_alias = self.query.get_meta().db_table
|
2009-12-22 23:18:51 +08:00
|
|
|
return "%s.%s" % (self.quote_name_unless_alias(table_alias),
|
|
|
|
self.connection.ops.quote_name(column or field.column))
|
|
|
|
|
2013-11-03 01:18:46 +08:00
|
|
|
|
2009-12-22 23:18:51 +08:00
|
|
|
class SQLInsertCompiler(compiler.SQLInsertCompiler, GeoSQLCompiler):
|
|
|
|
pass
|
|
|
|
|
2013-11-03 01:18:46 +08:00
|
|
|
|
2009-12-22 23:18:51 +08:00
|
|
|
class SQLDeleteCompiler(compiler.SQLDeleteCompiler, GeoSQLCompiler):
|
|
|
|
pass
|
|
|
|
|
2013-11-03 01:18:46 +08:00
|
|
|
|
2009-12-22 23:18:51 +08:00
|
|
|
class SQLUpdateCompiler(compiler.SQLUpdateCompiler, GeoSQLCompiler):
|
|
|
|
pass
|
|
|
|
|
2013-11-03 01:18:46 +08:00
|
|
|
|
2009-12-22 23:18:51 +08:00
|
|
|
class SQLAggregateCompiler(compiler.SQLAggregateCompiler, GeoSQLCompiler):
|
|
|
|
pass
|
|
|
|
|
2013-11-03 01:18:46 +08:00
|
|
|
|
2009-12-22 23:18:51 +08:00
|
|
|
class SQLDateCompiler(compiler.SQLDateCompiler, GeoSQLCompiler):
|
2011-09-11 06:53:26 +08:00
|
|
|
"""
|
|
|
|
This is overridden for GeoDjango to properly cast date columns, since
|
|
|
|
`GeoQuery.resolve_columns` is used for spatial values.
|
|
|
|
See #14648, #16757.
|
|
|
|
"""
|
|
|
|
def results_iter(self):
|
|
|
|
if self.connection.ops.oracle:
|
|
|
|
from django.db.models.fields import DateTimeField
|
|
|
|
fields = [DateTimeField()]
|
|
|
|
else:
|
|
|
|
needs_string_cast = self.connection.features.needs_datetime_string_cast
|
|
|
|
|
|
|
|
offset = len(self.query.extra_select)
|
|
|
|
for rows in self.execute_sql(MULTI):
|
|
|
|
for row in rows:
|
|
|
|
date = row[offset]
|
|
|
|
if self.connection.ops.oracle:
|
|
|
|
date = self.resolve_columns(row, fields)[offset]
|
|
|
|
elif needs_string_cast:
|
2013-02-10 23:15:49 +08:00
|
|
|
date = typecast_date(str(date))
|
|
|
|
if isinstance(date, datetime.datetime):
|
|
|
|
date = date.date()
|
2011-09-11 06:53:26 +08:00
|
|
|
yield date
|
2013-02-10 23:15:49 +08:00
|
|
|
|
2013-11-03 01:18:46 +08:00
|
|
|
|
2013-02-10 23:15:49 +08:00
|
|
|
class SQLDateTimeCompiler(compiler.SQLDateTimeCompiler, GeoSQLCompiler):
|
|
|
|
"""
|
|
|
|
This is overridden for GeoDjango to properly cast date columns, since
|
|
|
|
`GeoQuery.resolve_columns` is used for spatial values.
|
|
|
|
See #14648, #16757.
|
|
|
|
"""
|
|
|
|
def results_iter(self):
|
|
|
|
if self.connection.ops.oracle:
|
|
|
|
from django.db.models.fields import DateTimeField
|
|
|
|
fields = [DateTimeField()]
|
|
|
|
else:
|
|
|
|
needs_string_cast = self.connection.features.needs_datetime_string_cast
|
|
|
|
|
|
|
|
offset = len(self.query.extra_select)
|
|
|
|
for rows in self.execute_sql(MULTI):
|
|
|
|
for row in rows:
|
|
|
|
datetime = row[offset]
|
|
|
|
if self.connection.ops.oracle:
|
|
|
|
datetime = self.resolve_columns(row, fields)[offset]
|
|
|
|
elif needs_string_cast:
|
|
|
|
datetime = typecast_timestamp(str(datetime))
|
|
|
|
# Datetimes are artifically returned in UTC on databases that
|
|
|
|
# don't support time zone. Restore the zone used in the query.
|
|
|
|
if settings.USE_TZ:
|
|
|
|
datetime = datetime.replace(tzinfo=None)
|
|
|
|
datetime = timezone.make_aware(datetime, self.query.tzinfo)
|
|
|
|
yield datetime
|