2008-08-06 02:13:06 +08:00
|
|
|
from itertools import izip
|
|
|
|
from django.db.models.query import sql
|
|
|
|
from django.db.models.fields.related import ForeignKey
|
|
|
|
|
|
|
|
from django.contrib.gis.db.backend import SpatialBackend
|
|
|
|
from django.contrib.gis.db.models.fields import GeometryField
|
2009-01-15 19:06:34 +08:00
|
|
|
from django.contrib.gis.db.models.sql import aggregates as gis_aggregates_module
|
2009-01-16 03:35:04 +08:00
|
|
|
from django.contrib.gis.db.models.sql.conversion import AreaField, DistanceField, GeomField
|
2008-08-06 02:13:06 +08:00
|
|
|
from django.contrib.gis.db.models.sql.where import GeoWhereNode
|
|
|
|
from django.contrib.gis.measure import Area, Distance
|
|
|
|
|
|
|
|
# Valid GIS query types.
|
|
|
|
ALL_TERMS = sql.constants.QUERY_TERMS.copy()
|
|
|
|
ALL_TERMS.update(SpatialBackend.gis_terms)
|
|
|
|
|
2009-07-17 06:38:22 +08:00
|
|
|
# Pulling out other needed constants/routines to avoid attribute lookups.
|
2009-04-02 00:01:50 +08:00
|
|
|
TABLE_NAME = sql.constants.TABLE_NAME
|
2009-07-17 06:38:22 +08:00
|
|
|
get_proxied_model = sql.query.get_proxied_model
|
2009-04-02 00:01:50 +08:00
|
|
|
|
2008-08-06 02:13:06 +08:00
|
|
|
class GeoQuery(sql.Query):
|
|
|
|
"""
|
|
|
|
A single spatial SQL query.
|
|
|
|
"""
|
|
|
|
# Overridding the valid query terms.
|
|
|
|
query_terms = ALL_TERMS
|
2009-01-15 19:06:34 +08:00
|
|
|
aggregates_module = gis_aggregates_module
|
2008-08-06 02:13:06 +08:00
|
|
|
|
|
|
|
#### Methods overridden from the base Query class ####
|
|
|
|
def __init__(self, model, conn):
|
|
|
|
super(GeoQuery, self).__init__(model, conn, where=GeoWhereNode)
|
|
|
|
# The following attributes are customized for the GeoQuerySet.
|
|
|
|
# The GeoWhereNode and SpatialBackend classes contain backend-specific
|
|
|
|
# routines and functions.
|
|
|
|
self.custom_select = {}
|
|
|
|
self.transformed_srid = None
|
|
|
|
self.extra_select_fields = {}
|
|
|
|
|
2009-04-22 06:35:04 +08:00
|
|
|
if SpatialBackend.oracle:
|
|
|
|
# Have to override this so that GeoQuery, instead of OracleQuery,
|
|
|
|
# is returned when unpickling.
|
|
|
|
def __reduce__(self):
|
|
|
|
callable, args, data = super(GeoQuery, self).__reduce__()
|
|
|
|
return (unpickle_geoquery, (), data)
|
|
|
|
|
2008-08-06 02:13:06 +08:00
|
|
|
def clone(self, *args, **kwargs):
|
|
|
|
obj = super(GeoQuery, self).clone(*args, **kwargs)
|
|
|
|
# Customized selection dictionary and transformed srid flag have
|
|
|
|
# to also be added to obj.
|
|
|
|
obj.custom_select = self.custom_select.copy()
|
|
|
|
obj.transformed_srid = self.transformed_srid
|
|
|
|
obj.extra_select_fields = self.extra_select_fields.copy()
|
|
|
|
return obj
|
|
|
|
|
|
|
|
def get_columns(self, with_aliases=False):
|
|
|
|
"""
|
|
|
|
Return the list of columns to use in the select statement. If no
|
|
|
|
columns have been specified, returns all columns relating to fields in
|
|
|
|
the model.
|
|
|
|
|
|
|
|
If 'with_aliases' is true, any column names that are duplicated
|
|
|
|
(without the table names) are given unique aliases. This is needed in
|
|
|
|
some cases to avoid ambiguitity with nested queries.
|
|
|
|
|
2009-01-15 19:06:34 +08:00
|
|
|
This routine is overridden from Query to handle customized selection of
|
2008-08-06 02:13:06 +08:00
|
|
|
geometry columns.
|
|
|
|
"""
|
|
|
|
qn = self.quote_name_unless_alias
|
|
|
|
qn2 = self.connection.ops.quote_name
|
2009-01-15 19:06:34 +08:00
|
|
|
result = ['(%s) AS %s' % (self.get_extra_select_format(alias) % col[0], qn2(alias))
|
2008-08-06 02:13:06 +08:00
|
|
|
for alias, col in self.extra_select.iteritems()]
|
|
|
|
aliases = set(self.extra_select.keys())
|
|
|
|
if with_aliases:
|
|
|
|
col_aliases = aliases.copy()
|
|
|
|
else:
|
|
|
|
col_aliases = set()
|
|
|
|
if self.select:
|
2009-04-02 00:01:50 +08:00
|
|
|
only_load = self.deferred_to_columns()
|
2008-08-06 02:13:06 +08:00
|
|
|
# This loop customized for GeoQuery.
|
|
|
|
for col, field in izip(self.select, self.select_fields):
|
|
|
|
if isinstance(col, (list, tuple)):
|
2009-04-02 00:01:50 +08:00
|
|
|
alias, column = col
|
|
|
|
table = self.alias_map[alias][TABLE_NAME]
|
|
|
|
if table in only_load and col not in only_load[table]:
|
|
|
|
continue
|
2009-04-08 05:18:41 +08:00
|
|
|
r = self.get_field_select(field, alias, column)
|
2009-01-15 19:06:34 +08:00
|
|
|
if with_aliases:
|
|
|
|
if col[1] in col_aliases:
|
|
|
|
c_alias = 'Col%d' % len(col_aliases)
|
|
|
|
result.append('%s AS %s' % (r, c_alias))
|
|
|
|
aliases.add(c_alias)
|
|
|
|
col_aliases.add(c_alias)
|
|
|
|
else:
|
2009-04-02 00:01:50 +08:00
|
|
|
result.append('%s AS %s' % (r, qn2(col[1])))
|
2009-01-15 19:06:34 +08:00
|
|
|
aliases.add(r)
|
|
|
|
col_aliases.add(col[1])
|
2008-08-06 02:13:06 +08:00
|
|
|
else:
|
|
|
|
result.append(r)
|
|
|
|
aliases.add(r)
|
|
|
|
col_aliases.add(col[1])
|
|
|
|
else:
|
|
|
|
result.append(col.as_sql(quote_func=qn))
|
2009-01-15 19:06:34 +08:00
|
|
|
|
2008-08-06 02:13:06 +08:00
|
|
|
if hasattr(col, 'alias'):
|
|
|
|
aliases.add(col.alias)
|
|
|
|
col_aliases.add(col.alias)
|
2009-01-15 19:06:34 +08:00
|
|
|
|
2008-08-06 02:13:06 +08:00
|
|
|
elif self.default_cols:
|
|
|
|
cols, new_aliases = self.get_default_columns(with_aliases,
|
|
|
|
col_aliases)
|
|
|
|
result.extend(cols)
|
|
|
|
aliases.update(new_aliases)
|
2009-01-15 19:06:34 +08:00
|
|
|
|
|
|
|
result.extend([
|
|
|
|
'%s%s' % (
|
2009-01-16 03:35:04 +08:00
|
|
|
self.get_extra_select_format(alias) % aggregate.as_sql(quote_func=qn),
|
2009-01-15 19:06:34 +08:00
|
|
|
alias is not None and ' AS %s' % alias or ''
|
|
|
|
)
|
|
|
|
for alias, aggregate in self.aggregate_select.items()
|
2009-04-02 00:01:50 +08:00
|
|
|
])
|
2009-01-15 19:06:34 +08:00
|
|
|
|
2008-08-06 02:13:06 +08:00
|
|
|
# This loop customized for GeoQuery.
|
2009-01-15 19:06:34 +08:00
|
|
|
for (table, col), field in izip(self.related_select_cols, self.related_select_fields):
|
2009-04-08 05:18:41 +08:00
|
|
|
r = self.get_field_select(field, table, col)
|
2009-01-15 19:06:34 +08:00
|
|
|
if with_aliases and col in col_aliases:
|
|
|
|
c_alias = 'Col%d' % len(col_aliases)
|
|
|
|
result.append('%s AS %s' % (r, c_alias))
|
|
|
|
aliases.add(c_alias)
|
|
|
|
col_aliases.add(c_alias)
|
|
|
|
else:
|
|
|
|
result.append(r)
|
|
|
|
aliases.add(r)
|
|
|
|
col_aliases.add(col)
|
2008-08-06 02:13:06 +08:00
|
|
|
|
|
|
|
self._select_aliases = aliases
|
|
|
|
return result
|
|
|
|
|
|
|
|
def get_default_columns(self, with_aliases=False, col_aliases=None,
|
|
|
|
start_alias=None, opts=None, as_pairs=False):
|
|
|
|
"""
|
|
|
|
Computes the default columns for selecting every field in the base
|
2009-04-02 00:01:50 +08:00
|
|
|
model. Will sometimes be called to pull in related models (e.g. via
|
|
|
|
select_related), in which case "opts" and "start_alias" will be given
|
|
|
|
to provide a starting point for the traversal.
|
2008-08-06 02:13:06 +08:00
|
|
|
|
|
|
|
Returns a list of strings, quoted appropriately for use in SQL
|
2009-04-02 00:01:50 +08:00
|
|
|
directly, as well as a set of aliases used in the select statement (if
|
|
|
|
'as_pairs' is True, returns a list of (alias, col_name) pairs instead
|
|
|
|
of strings as the first component and None as the second component).
|
2008-08-06 02:13:06 +08:00
|
|
|
|
2009-01-15 19:06:34 +08:00
|
|
|
This routine is overridden from Query to handle customized selection of
|
2008-08-06 02:13:06 +08:00
|
|
|
geometry columns.
|
|
|
|
"""
|
|
|
|
result = []
|
|
|
|
if opts is None:
|
|
|
|
opts = self.model._meta
|
|
|
|
aliases = set()
|
2009-04-02 00:01:50 +08:00
|
|
|
only_load = self.deferred_to_columns()
|
2009-07-17 06:38:22 +08:00
|
|
|
# Skip all proxy to the root proxied model
|
|
|
|
proxied_model = get_proxied_model(opts)
|
|
|
|
|
2009-04-02 00:01:50 +08:00
|
|
|
if start_alias:
|
|
|
|
seen = {None: start_alias}
|
2008-08-06 02:13:06 +08:00
|
|
|
for field, model in opts.get_fields_with_model():
|
2009-04-02 00:01:50 +08:00
|
|
|
if start_alias:
|
|
|
|
try:
|
|
|
|
alias = seen[model]
|
|
|
|
except KeyError:
|
|
|
|
if model is proxied_model:
|
|
|
|
alias = start_alias
|
|
|
|
else:
|
|
|
|
link_field = opts.get_ancestor_link(model)
|
|
|
|
alias = self.join((start_alias, model._meta.db_table,
|
|
|
|
link_field.column, model._meta.pk.column))
|
|
|
|
seen[model] = alias
|
|
|
|
else:
|
|
|
|
# If we're starting from the base model of the queryset, the
|
|
|
|
# aliases will have already been set up in pre_sql_setup(), so
|
|
|
|
# we can save time here.
|
|
|
|
alias = self.included_inherited_models[model]
|
|
|
|
table = self.alias_map[alias][TABLE_NAME]
|
|
|
|
if table in only_load and field.column not in only_load[table]:
|
|
|
|
continue
|
2008-08-06 02:13:06 +08:00
|
|
|
if as_pairs:
|
|
|
|
result.append((alias, field.column))
|
2009-04-02 00:01:50 +08:00
|
|
|
aliases.add(alias)
|
2008-08-06 02:13:06 +08:00
|
|
|
continue
|
|
|
|
# This part of the function is customized for GeoQuery. We
|
|
|
|
# see if there was any custom selection specified in the
|
|
|
|
# dictionary, and set up the selection format appropriately.
|
|
|
|
field_sel = self.get_field_select(field, alias)
|
|
|
|
if with_aliases and field.column in col_aliases:
|
|
|
|
c_alias = 'Col%d' % len(col_aliases)
|
|
|
|
result.append('%s AS %s' % (field_sel, c_alias))
|
|
|
|
col_aliases.add(c_alias)
|
|
|
|
aliases.add(c_alias)
|
|
|
|
else:
|
|
|
|
r = field_sel
|
|
|
|
result.append(r)
|
|
|
|
aliases.add(r)
|
|
|
|
if with_aliases:
|
|
|
|
col_aliases.add(field.column)
|
|
|
|
return result, aliases
|
|
|
|
|
|
|
|
def resolve_columns(self, row, fields=()):
|
|
|
|
"""
|
|
|
|
This routine is necessary so that distances and geometries returned
|
2009-01-15 19:06:34 +08:00
|
|
|
from extra selection SQL get resolved appropriately into Python
|
2008-08-06 02:13:06 +08:00
|
|
|
objects.
|
|
|
|
"""
|
|
|
|
values = []
|
|
|
|
aliases = self.extra_select.keys()
|
2009-07-17 06:38:22 +08:00
|
|
|
if self.aggregates:
|
|
|
|
# If we have an aggregate annotation, must extend the aliases
|
|
|
|
# so their corresponding row values are included.
|
|
|
|
aliases.extend([None for i in xrange(len(self.aggregates))])
|
2008-11-06 02:15:48 +08:00
|
|
|
|
|
|
|
# Have to set a starting row number offset that is used for
|
|
|
|
# determining the correct starting row index -- needed for
|
|
|
|
# doing pagination with Oracle.
|
|
|
|
rn_offset = 0
|
|
|
|
if SpatialBackend.oracle:
|
|
|
|
if self.high_mark is not None or self.low_mark: rn_offset = 1
|
|
|
|
index_start = rn_offset + len(aliases)
|
|
|
|
|
|
|
|
# Converting any extra selection values (e.g., geometries and
|
|
|
|
# distance objects added by GeoQuerySet methods).
|
2009-01-15 19:06:34 +08:00
|
|
|
values = [self.convert_values(v, self.extra_select_fields.get(a, None))
|
2008-11-06 02:15:48 +08:00
|
|
|
for v, a in izip(row[rn_offset:index_start], aliases)]
|
2009-04-02 00:01:50 +08:00
|
|
|
if SpatialBackend.oracle or getattr(self, 'geo_values', False):
|
2009-04-08 05:18:41 +08:00
|
|
|
# We resolve the rest of the columns if we're on Oracle or if
|
|
|
|
# the `geo_values` attribute is defined.
|
2008-08-06 02:13:06 +08:00
|
|
|
for value, field in izip(row[index_start:], fields):
|
|
|
|
values.append(self.convert_values(value, field))
|
|
|
|
else:
|
|
|
|
values.extend(row[index_start:])
|
2009-06-30 00:31:21 +08:00
|
|
|
return tuple(values)
|
2008-08-06 02:13:06 +08:00
|
|
|
|
|
|
|
def convert_values(self, value, field):
|
|
|
|
"""
|
|
|
|
Using the same routines that Oracle does we can convert our
|
|
|
|
extra selection objects into Geometry and Distance objects.
|
2008-11-06 02:15:48 +08:00
|
|
|
TODO: Make converted objects 'lazy' for less overhead.
|
2008-08-06 02:13:06 +08:00
|
|
|
"""
|
|
|
|
if SpatialBackend.oracle:
|
|
|
|
# Running through Oracle's first.
|
2009-01-16 03:35:04 +08:00
|
|
|
value = super(GeoQuery, self).convert_values(value, field or GeomField())
|
2009-03-31 01:15:49 +08:00
|
|
|
|
2008-08-06 02:13:06 +08:00
|
|
|
if isinstance(field, DistanceField):
|
|
|
|
# Using the field's distance attribute, can instantiate
|
|
|
|
# `Distance` with the right context.
|
|
|
|
value = Distance(**{field.distance_att : value})
|
|
|
|
elif isinstance(field, AreaField):
|
|
|
|
value = Area(**{field.area_att : value})
|
2009-04-02 00:01:50 +08:00
|
|
|
elif isinstance(field, (GeomField, GeometryField)) and value:
|
2008-08-06 02:13:06 +08:00
|
|
|
value = SpatialBackend.Geometry(value)
|
|
|
|
return value
|
|
|
|
|
2009-01-15 19:06:34 +08:00
|
|
|
def resolve_aggregate(self, value, aggregate):
|
|
|
|
"""
|
|
|
|
Overridden from GeoQuery's normalize to handle the conversion of
|
|
|
|
GeoAggregate objects.
|
|
|
|
"""
|
|
|
|
if isinstance(aggregate, self.aggregates_module.GeoAggregate):
|
|
|
|
if aggregate.is_extent:
|
2009-01-16 03:35:04 +08:00
|
|
|
return self.aggregates_module.convert_extent(value)
|
2009-01-15 19:06:34 +08:00
|
|
|
else:
|
2009-01-16 03:35:04 +08:00
|
|
|
return self.aggregates_module.convert_geom(value, aggregate.source)
|
2009-01-15 19:06:34 +08:00
|
|
|
else:
|
|
|
|
return super(GeoQuery, self).resolve_aggregate(value, aggregate)
|
|
|
|
|
2008-08-06 02:13:06 +08:00
|
|
|
#### Routines unique to GeoQuery ####
|
|
|
|
def get_extra_select_format(self, alias):
|
|
|
|
sel_fmt = '%s'
|
|
|
|
if alias in self.custom_select:
|
|
|
|
sel_fmt = sel_fmt % self.custom_select[alias]
|
|
|
|
return sel_fmt
|
|
|
|
|
2009-04-08 05:18:41 +08:00
|
|
|
def get_field_select(self, field, alias=None, column=None):
|
2008-08-06 02:13:06 +08:00
|
|
|
"""
|
|
|
|
Returns the SELECT SQL string for the given field. Figures out
|
2009-01-15 19:06:34 +08:00
|
|
|
if any custom selection SQL is needed for the column The `alias`
|
|
|
|
keyword may be used to manually specify the database table where
|
|
|
|
the column exists, if not in the model associated with this
|
2009-04-08 05:18:41 +08:00
|
|
|
`GeoQuery`. Similarly, `column` may be used to specify the exact
|
|
|
|
column name, rather than using the `column` attribute on `field`.
|
2008-08-06 02:13:06 +08:00
|
|
|
"""
|
2009-04-08 05:18:41 +08:00
|
|
|
sel_fmt = self.get_select_format(field)
|
|
|
|
if field in self.custom_select:
|
|
|
|
field_sel = sel_fmt % self.custom_select[field]
|
2008-08-06 02:13:06 +08:00
|
|
|
else:
|
2009-04-08 05:18:41 +08:00
|
|
|
field_sel = sel_fmt % self._field_column(field, alias, column)
|
2008-08-06 02:13:06 +08:00
|
|
|
return field_sel
|
|
|
|
|
|
|
|
def get_select_format(self, fld):
|
|
|
|
"""
|
|
|
|
Returns the selection format string, depending on the requirements
|
|
|
|
of the spatial backend. For example, Oracle and MySQL require custom
|
|
|
|
selection formats in order to retrieve geometries in OGC WKT. For all
|
|
|
|
other fields a simple '%s' format string is returned.
|
|
|
|
"""
|
2009-03-31 01:15:49 +08:00
|
|
|
if SpatialBackend.select and hasattr(fld, 'geom_type'):
|
2008-08-06 02:13:06 +08:00
|
|
|
# This allows operations to be done on fields in the SELECT,
|
|
|
|
# overriding their values -- used by the Oracle and MySQL
|
|
|
|
# spatial backends to get database values as WKT, and by the
|
|
|
|
# `transform` method.
|
|
|
|
sel_fmt = SpatialBackend.select
|
|
|
|
|
|
|
|
# Because WKT doesn't contain spatial reference information,
|
|
|
|
# the SRID is prefixed to the returned WKT to ensure that the
|
|
|
|
# transformed geometries have an SRID different than that of the
|
2009-03-31 01:15:49 +08:00
|
|
|
# field -- this is only used by `transform` for Oracle and
|
|
|
|
# SpatiaLite backends.
|
|
|
|
if self.transformed_srid and ( SpatialBackend.oracle or
|
|
|
|
SpatialBackend.spatialite ):
|
2008-08-06 02:13:06 +08:00
|
|
|
sel_fmt = "'SRID=%d;'||%s" % (self.transformed_srid, sel_fmt)
|
|
|
|
else:
|
|
|
|
sel_fmt = '%s'
|
|
|
|
return sel_fmt
|
|
|
|
|
|
|
|
# Private API utilities, subject to change.
|
2009-04-08 05:18:41 +08:00
|
|
|
def _field_column(self, field, table_alias=None, column=None):
|
2008-08-06 02:13:06 +08:00
|
|
|
"""
|
|
|
|
Helper function that returns the database column for the given field.
|
|
|
|
The table and column are returned (quoted) in the proper format, e.g.,
|
2009-01-15 19:06:34 +08:00
|
|
|
`"geoapp_city"."point"`. If `table_alias` is not specified, the
|
2008-08-06 02:13:06 +08:00
|
|
|
database table associated with the model of this `GeoQuery` will be
|
2009-04-08 05:18:41 +08:00
|
|
|
used. If `column` is specified, it will be used instead of the value
|
|
|
|
in `field.column`.
|
2008-08-06 02:13:06 +08:00
|
|
|
"""
|
|
|
|
if table_alias is None: table_alias = self.model._meta.db_table
|
2009-01-15 19:06:34 +08:00
|
|
|
return "%s.%s" % (self.quote_name_unless_alias(table_alias),
|
2009-04-08 05:18:41 +08:00
|
|
|
self.connection.ops.quote_name(column or field.column))
|
2008-08-06 02:13:06 +08:00
|
|
|
|
|
|
|
def _geo_field(self, field_name=None):
|
|
|
|
"""
|
|
|
|
Returns the first Geometry field encountered; or specified via the
|
|
|
|
`field_name` keyword. The `field_name` may be a string specifying
|
|
|
|
the geometry field on this GeoQuery's model, or a lookup string
|
|
|
|
to a geometry field via a ForeignKey relation.
|
|
|
|
"""
|
|
|
|
if field_name is None:
|
|
|
|
# Incrementing until the first geographic field is found.
|
|
|
|
for fld in self.model._meta.fields:
|
|
|
|
if isinstance(fld, GeometryField): return fld
|
|
|
|
return False
|
|
|
|
else:
|
|
|
|
# Otherwise, check by the given field name -- which may be
|
|
|
|
# a lookup to a _related_ geographic field.
|
2009-03-04 06:10:15 +08:00
|
|
|
return GeoWhereNode._check_geo_field(self.model._meta, field_name)
|
2009-04-22 06:35:04 +08:00
|
|
|
|
|
|
|
if SpatialBackend.oracle:
|
|
|
|
def unpickle_geoquery():
|
|
|
|
"""
|
|
|
|
Utility function, called by Python's unpickling machinery, that handles
|
|
|
|
unpickling of GeoQuery subclasses of OracleQuery.
|
|
|
|
"""
|
|
|
|
return GeoQuery.__new__(GeoQuery)
|
|
|
|
unpickle_geoquery.__safe_for_unpickling__ = True
|