Fixed #22603 -- Reorganized classes in django.db.backends.
This commit is contained in:
parent
737d24923a
commit
28308078f3
|
@ -0,0 +1,82 @@
|
|||
from functools import partial
|
||||
|
||||
|
||||
class BaseSpatialFeatures(object):
|
||||
gis_enabled = True
|
||||
|
||||
# Does the database contain a SpatialRefSys model to store SRID information?
|
||||
has_spatialrefsys_table = True
|
||||
|
||||
# Does the backend support the django.contrib.gis.utils.add_srs_entry() utility?
|
||||
supports_add_srs_entry = True
|
||||
# Does the backend introspect GeometryField to its subtypes?
|
||||
supports_geometry_field_introspection = True
|
||||
|
||||
# Reference implementation of 3D functions is:
|
||||
# http://postgis.net/docs/PostGIS_Special_Functions_Index.html#PostGIS_3D_Functions
|
||||
supports_3d_functions = False
|
||||
# Does the database support SRID transform operations?
|
||||
supports_transform = True
|
||||
# Do geometric relationship operations operate on real shapes (or only on bounding boxes)?
|
||||
supports_real_shape_operations = True
|
||||
# Can geometry fields be null?
|
||||
supports_null_geometries = True
|
||||
# Can the `distance` GeoQuerySet method be applied on geodetic coordinate systems?
|
||||
supports_distance_geodetic = True
|
||||
# Is the database able to count vertices on polygons (with `num_points`)?
|
||||
supports_num_points_poly = True
|
||||
|
||||
# The following properties indicate if the database backend support
|
||||
# certain lookups (dwithin, left and right, relate, ...)
|
||||
supports_distances_lookups = True
|
||||
supports_left_right_lookups = False
|
||||
|
||||
@property
|
||||
def supports_bbcontains_lookup(self):
|
||||
return 'bbcontains' in self.connection.ops.gis_operators
|
||||
|
||||
@property
|
||||
def supports_contained_lookup(self):
|
||||
return 'contained' in self.connection.ops.gis_operators
|
||||
|
||||
@property
|
||||
def supports_dwithin_lookup(self):
|
||||
return 'dwithin' in self.connection.ops.gis_operators
|
||||
|
||||
@property
|
||||
def supports_relate_lookup(self):
|
||||
return 'relate' in self.connection.ops.gis_operators
|
||||
|
||||
# For each of those methods, the class will have a property named
|
||||
# `has_<name>_method` (defined in __init__) which accesses connection.ops
|
||||
# to determine GIS method availability.
|
||||
geoqueryset_methods = (
|
||||
'area', 'centroid', 'difference', 'distance', 'distance_spheroid',
|
||||
'envelope', 'force_rhr', 'geohash', 'gml', 'intersection', 'kml',
|
||||
'length', 'num_geom', 'perimeter', 'point_on_surface', 'reverse',
|
||||
'scale', 'snap_to_grid', 'svg', 'sym_difference', 'transform',
|
||||
'translate', 'union', 'unionagg',
|
||||
)
|
||||
|
||||
# Specifies whether the Collect and Extent aggregates are supported by the database
|
||||
@property
|
||||
def supports_collect_aggr(self):
|
||||
return 'Collect' in self.connection.ops.valid_aggregates
|
||||
|
||||
@property
|
||||
def supports_extent_aggr(self):
|
||||
return 'Extent' in self.connection.ops.valid_aggregates
|
||||
|
||||
@property
|
||||
def supports_make_line_aggr(self):
|
||||
return 'MakeLine' in self.connection.ops.valid_aggregates
|
||||
|
||||
def __init__(self, *args):
|
||||
super(BaseSpatialFeatures, self).__init__(*args)
|
||||
for method in self.geoqueryset_methods:
|
||||
# Add dynamically properties for each GQS method, e.g. has_force_rhr_method, etc.
|
||||
setattr(self.__class__, 'has_%s_method' % method,
|
||||
property(partial(BaseSpatialFeatures.has_ops_method, method=method)))
|
||||
|
||||
def has_ops_method(self, method):
|
||||
return getattr(self.connection.ops, method, False)
|
|
@ -1,8 +1,3 @@
|
|||
"""
|
||||
Base/mixin classes for the spatial backend database operations and the
|
||||
`<Backend>SpatialRefSys` model.
|
||||
"""
|
||||
from functools import partial
|
||||
import re
|
||||
|
||||
from django.contrib.gis import gdal
|
||||
|
@ -10,203 +5,6 @@ from django.utils import six
|
|||
from django.utils.encoding import python_2_unicode_compatible
|
||||
|
||||
|
||||
class BaseSpatialFeatures(object):
|
||||
gis_enabled = True
|
||||
|
||||
# Does the database contain a SpatialRefSys model to store SRID information?
|
||||
has_spatialrefsys_table = True
|
||||
|
||||
# Does the backend support the django.contrib.gis.utils.add_srs_entry() utility?
|
||||
supports_add_srs_entry = True
|
||||
# Does the backend introspect GeometryField to its subtypes?
|
||||
supports_geometry_field_introspection = True
|
||||
|
||||
# Reference implementation of 3D functions is:
|
||||
# http://postgis.net/docs/PostGIS_Special_Functions_Index.html#PostGIS_3D_Functions
|
||||
supports_3d_functions = False
|
||||
# Does the database support SRID transform operations?
|
||||
supports_transform = True
|
||||
# Do geometric relationship operations operate on real shapes (or only on bounding boxes)?
|
||||
supports_real_shape_operations = True
|
||||
# Can geometry fields be null?
|
||||
supports_null_geometries = True
|
||||
# Can the `distance` GeoQuerySet method be applied on geodetic coordinate systems?
|
||||
supports_distance_geodetic = True
|
||||
# Is the database able to count vertices on polygons (with `num_points`)?
|
||||
supports_num_points_poly = True
|
||||
|
||||
# The following properties indicate if the database backend support
|
||||
# certain lookups (dwithin, left and right, relate, ...)
|
||||
supports_distances_lookups = True
|
||||
supports_left_right_lookups = False
|
||||
|
||||
@property
|
||||
def supports_bbcontains_lookup(self):
|
||||
return 'bbcontains' in self.connection.ops.gis_operators
|
||||
|
||||
@property
|
||||
def supports_contained_lookup(self):
|
||||
return 'contained' in self.connection.ops.gis_operators
|
||||
|
||||
@property
|
||||
def supports_dwithin_lookup(self):
|
||||
return 'dwithin' in self.connection.ops.gis_operators
|
||||
|
||||
@property
|
||||
def supports_relate_lookup(self):
|
||||
return 'relate' in self.connection.ops.gis_operators
|
||||
|
||||
# For each of those methods, the class will have a property named
|
||||
# `has_<name>_method` (defined in __init__) which accesses connection.ops
|
||||
# to determine GIS method availability.
|
||||
geoqueryset_methods = (
|
||||
'area', 'centroid', 'difference', 'distance', 'distance_spheroid',
|
||||
'envelope', 'force_rhr', 'geohash', 'gml', 'intersection', 'kml',
|
||||
'length', 'num_geom', 'perimeter', 'point_on_surface', 'reverse',
|
||||
'scale', 'snap_to_grid', 'svg', 'sym_difference', 'transform',
|
||||
'translate', 'union', 'unionagg',
|
||||
)
|
||||
|
||||
# Specifies whether the Collect and Extent aggregates are supported by the database
|
||||
@property
|
||||
def supports_collect_aggr(self):
|
||||
return 'Collect' in self.connection.ops.valid_aggregates
|
||||
|
||||
@property
|
||||
def supports_extent_aggr(self):
|
||||
return 'Extent' in self.connection.ops.valid_aggregates
|
||||
|
||||
@property
|
||||
def supports_make_line_aggr(self):
|
||||
return 'MakeLine' in self.connection.ops.valid_aggregates
|
||||
|
||||
def __init__(self, *args):
|
||||
super(BaseSpatialFeatures, self).__init__(*args)
|
||||
for method in self.geoqueryset_methods:
|
||||
# Add dynamically properties for each GQS method, e.g. has_force_rhr_method, etc.
|
||||
setattr(self.__class__, 'has_%s_method' % method,
|
||||
property(partial(BaseSpatialFeatures.has_ops_method, method=method)))
|
||||
|
||||
def has_ops_method(self, method):
|
||||
return getattr(self.connection.ops, method, False)
|
||||
|
||||
|
||||
class BaseSpatialOperations(object):
|
||||
"""
|
||||
This module holds the base `BaseSpatialBackend` object, which is
|
||||
instantiated by each spatial database backend with the features
|
||||
it has.
|
||||
"""
|
||||
truncate_params = {}
|
||||
|
||||
# Quick booleans for the type of this spatial backend, and
|
||||
# an attribute for the spatial database version tuple (if applicable)
|
||||
postgis = False
|
||||
spatialite = False
|
||||
mysql = False
|
||||
oracle = False
|
||||
spatial_version = None
|
||||
|
||||
# How the geometry column should be selected.
|
||||
select = None
|
||||
|
||||
# Does the spatial database have a geometry or geography type?
|
||||
geography = False
|
||||
geometry = False
|
||||
|
||||
area = False
|
||||
centroid = False
|
||||
difference = False
|
||||
distance = False
|
||||
distance_sphere = False
|
||||
distance_spheroid = False
|
||||
envelope = False
|
||||
force_rhr = False
|
||||
mem_size = False
|
||||
bounding_circle = False
|
||||
num_geom = False
|
||||
num_points = False
|
||||
perimeter = False
|
||||
perimeter3d = False
|
||||
point_on_surface = False
|
||||
polygonize = False
|
||||
reverse = False
|
||||
scale = False
|
||||
snap_to_grid = False
|
||||
sym_difference = False
|
||||
transform = False
|
||||
translate = False
|
||||
union = False
|
||||
|
||||
# Aggregates
|
||||
collect = False
|
||||
extent = False
|
||||
extent3d = False
|
||||
make_line = False
|
||||
unionagg = False
|
||||
|
||||
# Serialization
|
||||
geohash = False
|
||||
geojson = False
|
||||
gml = False
|
||||
kml = False
|
||||
svg = False
|
||||
|
||||
# Constructors
|
||||
from_text = False
|
||||
from_wkb = False
|
||||
|
||||
# Default conversion functions for aggregates; will be overridden if implemented
|
||||
# for the spatial backend.
|
||||
def convert_extent(self, box, srid):
|
||||
raise NotImplementedError('Aggregate extent not implemented for this spatial backend.')
|
||||
|
||||
def convert_extent3d(self, box, srid):
|
||||
raise NotImplementedError('Aggregate 3D extent not implemented for this spatial backend.')
|
||||
|
||||
def convert_geom(self, geom_val, geom_field):
|
||||
raise NotImplementedError('Aggregate method not implemented for this spatial backend.')
|
||||
|
||||
# For quoting column values, rather than columns.
|
||||
def geo_quote_name(self, name):
|
||||
return "'%s'" % name
|
||||
|
||||
# GeometryField operations
|
||||
def geo_db_type(self, f):
|
||||
"""
|
||||
Returns the database column type for the geometry field on
|
||||
the spatial backend.
|
||||
"""
|
||||
raise NotImplementedError('subclasses of BaseSpatialOperations must provide a geo_db_type() method')
|
||||
|
||||
def get_distance(self, f, value, lookup_type):
|
||||
"""
|
||||
Returns the distance parameters for the given geometry field,
|
||||
lookup value, and lookup type.
|
||||
"""
|
||||
raise NotImplementedError('Distance operations not available on this spatial backend.')
|
||||
|
||||
def get_geom_placeholder(self, f, value, compiler):
|
||||
"""
|
||||
Returns the placeholder for the given geometry field with the given
|
||||
value. Depending on the spatial backend, the placeholder may contain a
|
||||
stored procedure call to the transformation function of the spatial
|
||||
backend.
|
||||
"""
|
||||
raise NotImplementedError('subclasses of BaseSpatialOperations must provide a geo_db_placeholder() method')
|
||||
|
||||
# Spatial SQL Construction
|
||||
def spatial_aggregate_sql(self, agg):
|
||||
raise NotImplementedError('Aggregate support not implemented for this spatial backend.')
|
||||
|
||||
# Routines for getting the OGC-compliant models.
|
||||
def geometry_columns(self):
|
||||
raise NotImplementedError('subclasses of BaseSpatialOperations must a provide geometry_columns() method')
|
||||
|
||||
def spatial_ref_sys(self):
|
||||
raise NotImplementedError('subclasses of BaseSpatialOperations must a provide spatial_ref_sys() method')
|
||||
|
||||
|
||||
@python_2_unicode_compatible
|
||||
class SpatialRefSysMixin(object):
|
||||
"""
|
|
@ -0,0 +1,114 @@
|
|||
class BaseSpatialOperations(object):
|
||||
"""
|
||||
This module holds the base `BaseSpatialBackend` object, which is
|
||||
instantiated by each spatial database backend with the features
|
||||
it has.
|
||||
"""
|
||||
truncate_params = {}
|
||||
|
||||
# Quick booleans for the type of this spatial backend, and
|
||||
# an attribute for the spatial database version tuple (if applicable)
|
||||
postgis = False
|
||||
spatialite = False
|
||||
mysql = False
|
||||
oracle = False
|
||||
spatial_version = None
|
||||
|
||||
# How the geometry column should be selected.
|
||||
select = None
|
||||
|
||||
# Does the spatial database have a geometry or geography type?
|
||||
geography = False
|
||||
geometry = False
|
||||
|
||||
area = False
|
||||
centroid = False
|
||||
difference = False
|
||||
distance = False
|
||||
distance_sphere = False
|
||||
distance_spheroid = False
|
||||
envelope = False
|
||||
force_rhr = False
|
||||
mem_size = False
|
||||
bounding_circle = False
|
||||
num_geom = False
|
||||
num_points = False
|
||||
perimeter = False
|
||||
perimeter3d = False
|
||||
point_on_surface = False
|
||||
polygonize = False
|
||||
reverse = False
|
||||
scale = False
|
||||
snap_to_grid = False
|
||||
sym_difference = False
|
||||
transform = False
|
||||
translate = False
|
||||
union = False
|
||||
|
||||
# Aggregates
|
||||
collect = False
|
||||
extent = False
|
||||
extent3d = False
|
||||
make_line = False
|
||||
unionagg = False
|
||||
|
||||
# Serialization
|
||||
geohash = False
|
||||
geojson = False
|
||||
gml = False
|
||||
kml = False
|
||||
svg = False
|
||||
|
||||
# Constructors
|
||||
from_text = False
|
||||
from_wkb = False
|
||||
|
||||
# Default conversion functions for aggregates; will be overridden if implemented
|
||||
# for the spatial backend.
|
||||
def convert_extent(self, box, srid):
|
||||
raise NotImplementedError('Aggregate extent not implemented for this spatial backend.')
|
||||
|
||||
def convert_extent3d(self, box, srid):
|
||||
raise NotImplementedError('Aggregate 3D extent not implemented for this spatial backend.')
|
||||
|
||||
def convert_geom(self, geom_val, geom_field):
|
||||
raise NotImplementedError('Aggregate method not implemented for this spatial backend.')
|
||||
|
||||
# For quoting column values, rather than columns.
|
||||
def geo_quote_name(self, name):
|
||||
return "'%s'" % name
|
||||
|
||||
# GeometryField operations
|
||||
def geo_db_type(self, f):
|
||||
"""
|
||||
Returns the database column type for the geometry field on
|
||||
the spatial backend.
|
||||
"""
|
||||
raise NotImplementedError('subclasses of BaseSpatialOperations must provide a geo_db_type() method')
|
||||
|
||||
def get_distance(self, f, value, lookup_type):
|
||||
"""
|
||||
Returns the distance parameters for the given geometry field,
|
||||
lookup value, and lookup type.
|
||||
"""
|
||||
raise NotImplementedError('Distance operations not available on this spatial backend.')
|
||||
|
||||
def get_geom_placeholder(self, f, value, compiler):
|
||||
"""
|
||||
Returns the placeholder for the given geometry field with the given
|
||||
value. Depending on the spatial backend, the placeholder may contain a
|
||||
stored procedure call to the transformation function of the spatial
|
||||
backend.
|
||||
"""
|
||||
raise NotImplementedError('subclasses of BaseSpatialOperations must provide a geo_db_placeholder() method')
|
||||
|
||||
# Spatial SQL Construction
|
||||
def spatial_aggregate_sql(self, agg):
|
||||
raise NotImplementedError('Aggregate support not implemented for this spatial backend.')
|
||||
|
||||
# Routines for getting the OGC-compliant models.
|
||||
def geometry_columns(self):
|
||||
raise NotImplementedError('subclasses of BaseSpatialOperations must a provide geometry_columns() method')
|
||||
|
||||
def spatial_ref_sys(self):
|
||||
raise NotImplementedError('subclasses of BaseSpatialOperations must a provide spatial_ref_sys() method')
|
|
@ -1,22 +1,10 @@
|
|||
from django.db.backends.mysql.base import (
|
||||
DatabaseWrapper as MySQLDatabaseWrapper,
|
||||
DatabaseFeatures as MySQLDatabaseFeatures,
|
||||
)
|
||||
from django.contrib.gis.db.backends.base import BaseSpatialFeatures
|
||||
from django.contrib.gis.db.backends.mysql.creation import MySQLCreation
|
||||
from django.contrib.gis.db.backends.mysql.introspection import MySQLIntrospection
|
||||
from django.contrib.gis.db.backends.mysql.operations import MySQLOperations
|
||||
from django.contrib.gis.db.backends.mysql.schema import MySQLGISSchemaEditor
|
||||
from django.db.backends.mysql.base import DatabaseWrapper as MySQLDatabaseWrapper
|
||||
|
||||
|
||||
class DatabaseFeatures(BaseSpatialFeatures, MySQLDatabaseFeatures):
|
||||
has_spatialrefsys_table = False
|
||||
supports_add_srs_entry = False
|
||||
supports_distances_lookups = False
|
||||
supports_transform = False
|
||||
supports_real_shape_operations = False
|
||||
supports_null_geometries = False
|
||||
supports_num_points_poly = False
|
||||
from .creation import MySQLCreation
|
||||
from .features import DatabaseFeatures
|
||||
from .introspection import MySQLIntrospection
|
||||
from .operations import MySQLOperations
|
||||
from .schema import MySQLGISSchemaEditor
|
||||
|
||||
|
||||
class DatabaseWrapper(MySQLDatabaseWrapper):
|
||||
|
|
|
@ -0,0 +1,12 @@
|
|||
from django.contrib.gis.db.backends.base.features import BaseSpatialFeatures
|
||||
from django.db.backends.mysql.features import DatabaseFeatures as MySQLDatabaseFeatures
|
||||
|
||||
|
||||
class DatabaseFeatures(BaseSpatialFeatures, MySQLDatabaseFeatures):
|
||||
has_spatialrefsys_table = False
|
||||
supports_add_srs_entry = False
|
||||
supports_distances_lookups = False
|
||||
supports_transform = False
|
||||
supports_real_shape_operations = False
|
||||
supports_null_geometries = False
|
||||
supports_num_points_poly = False
|
|
@ -1,8 +1,7 @@
|
|||
from django.db.backends.mysql.base import DatabaseOperations
|
||||
|
||||
from django.contrib.gis.db.backends.adapter import WKTAdapter
|
||||
from django.contrib.gis.db.backends.base import BaseSpatialOperations
|
||||
from django.contrib.gis.db.backends.base.adapter import WKTAdapter
|
||||
from django.contrib.gis.db.backends.base.operations import BaseSpatialOperations
|
||||
from django.contrib.gis.db.backends.utils import SpatialOperator
|
||||
from django.db.backends.mysql.operations import DatabaseOperations
|
||||
|
||||
|
||||
class MySQLOperations(DatabaseOperations, BaseSpatialOperations):
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from cx_Oracle import CLOB
|
||||
from django.contrib.gis.db.backends.adapter import WKTAdapter
|
||||
from django.contrib.gis.db.backends.base.adapter import WKTAdapter
|
||||
|
||||
|
||||
class OracleSpatialAdapter(WKTAdapter):
|
||||
|
|
|
@ -1,17 +1,10 @@
|
|||
from django.db.backends.oracle.base import (
|
||||
DatabaseWrapper as OracleDatabaseWrapper,
|
||||
DatabaseFeatures as OracleDatabaseFeatures,
|
||||
)
|
||||
from django.contrib.gis.db.backends.base import BaseSpatialFeatures
|
||||
from django.contrib.gis.db.backends.oracle.creation import OracleCreation
|
||||
from django.contrib.gis.db.backends.oracle.introspection import OracleIntrospection
|
||||
from django.contrib.gis.db.backends.oracle.operations import OracleOperations
|
||||
from django.contrib.gis.db.backends.oracle.schema import OracleGISSchemaEditor
|
||||
from django.db.backends.oracle.base import DatabaseWrapper as OracleDatabaseWrapper
|
||||
|
||||
|
||||
class DatabaseFeatures(BaseSpatialFeatures, OracleDatabaseFeatures):
|
||||
supports_add_srs_entry = False
|
||||
supports_geometry_field_introspection = False
|
||||
from .creation import OracleCreation
|
||||
from .features import DatabaseFeatures
|
||||
from .introspection import OracleIntrospection
|
||||
from .operations import OracleOperations
|
||||
from .schema import OracleGISSchemaEditor
|
||||
|
||||
|
||||
class DatabaseWrapper(OracleDatabaseWrapper):
|
||||
|
|
|
@ -0,0 +1,7 @@
|
|||
from django.contrib.gis.db.backends.base.features import BaseSpatialFeatures
|
||||
from django.db.backends.oracle.features import DatabaseFeatures as OracleDatabaseFeatures
|
||||
|
||||
|
||||
class DatabaseFeatures(BaseSpatialFeatures, OracleDatabaseFeatures):
|
||||
supports_add_srs_entry = False
|
||||
supports_geometry_field_introspection = False
|
|
@ -8,7 +8,7 @@
|
|||
model and the `SDO_COORD_REF_SYS` is used for the SpatialRefSys model.
|
||||
"""
|
||||
from django.contrib.gis.db import models
|
||||
from django.contrib.gis.db.backends.base import SpatialRefSysMixin
|
||||
from django.contrib.gis.db.backends.base.models import SpatialRefSysMixin
|
||||
from django.utils.encoding import python_2_unicode_compatible
|
||||
|
||||
|
||||
|
|
|
@ -9,12 +9,13 @@
|
|||
"""
|
||||
import re
|
||||
|
||||
from django.db.backends.oracle.base import DatabaseOperations, Database
|
||||
from django.contrib.gis.db.backends.base import BaseSpatialOperations
|
||||
from django.contrib.gis.db.backends.base.operations import BaseSpatialOperations
|
||||
from django.contrib.gis.db.backends.oracle.adapter import OracleSpatialAdapter
|
||||
from django.contrib.gis.db.backends.utils import SpatialOperator
|
||||
from django.contrib.gis.geometry.backend import Geometry
|
||||
from django.contrib.gis.measure import Distance
|
||||
from django.db.backends.oracle.base import Database
|
||||
from django.db.backends.oracle.operations import DatabaseOperations
|
||||
from django.utils import six
|
||||
|
||||
|
||||
|
|
|
@ -1,20 +1,15 @@
|
|||
from django.conf import settings
|
||||
from django.db.backends import NO_DB_ALIAS
|
||||
from django.db.backends.base.base import NO_DB_ALIAS
|
||||
from django.db.backends.postgresql_psycopg2.base import (
|
||||
DatabaseWrapper as Psycopg2DatabaseWrapper,
|
||||
DatabaseFeatures as Psycopg2DatabaseFeatures
|
||||
)
|
||||
from django.contrib.gis.db.backends.base import BaseSpatialFeatures
|
||||
from django.contrib.gis.db.backends.postgis.creation import PostGISCreation
|
||||
from django.contrib.gis.db.backends.postgis.introspection import PostGISIntrospection
|
||||
from django.contrib.gis.db.backends.postgis.operations import PostGISOperations
|
||||
from django.contrib.gis.db.backends.postgis.schema import PostGISSchemaEditor
|
||||
from django.utils.functional import cached_property
|
||||
|
||||
|
||||
class DatabaseFeatures(BaseSpatialFeatures, Psycopg2DatabaseFeatures):
|
||||
supports_3d_functions = True
|
||||
supports_left_right_lookups = True
|
||||
from .creation import PostGISCreation
|
||||
from .features import DatabaseFeatures
|
||||
from .introspection import PostGISIntrospection
|
||||
from .operations import PostGISOperations
|
||||
from .schema import PostGISSchemaEditor
|
||||
|
||||
|
||||
class DatabaseWrapper(Psycopg2DatabaseWrapper):
|
||||
|
|
|
@ -0,0 +1,9 @@
|
|||
from django.contrib.gis.db.backends.base.features import BaseSpatialFeatures
|
||||
from django.db.backends.postgresql_psycopg2.features import (
|
||||
DatabaseFeatures as Psycopg2DatabaseFeatures,
|
||||
)
|
||||
|
||||
|
||||
class DatabaseFeatures(BaseSpatialFeatures, Psycopg2DatabaseFeatures):
|
||||
supports_3d_functions = True
|
||||
supports_left_right_lookups = True
|
|
@ -2,7 +2,7 @@
|
|||
The GeometryColumns and SpatialRefSys models for the PostGIS backend.
|
||||
"""
|
||||
from django.db import models
|
||||
from django.contrib.gis.db.backends.base import SpatialRefSysMixin
|
||||
from django.contrib.gis.db.backends.base.models import SpatialRefSysMixin
|
||||
from django.utils.encoding import python_2_unicode_compatible
|
||||
|
||||
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
import re
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.gis.db.backends.base import BaseSpatialOperations
|
||||
from django.contrib.gis.db.backends.base.operations import BaseSpatialOperations
|
||||
from django.contrib.gis.db.backends.postgis.adapter import PostGISAdapter
|
||||
from django.contrib.gis.db.backends.utils import SpatialOperator
|
||||
from django.contrib.gis.geometry.backend import Geometry
|
||||
from django.contrib.gis.measure import Distance
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
from django.db.backends.postgresql_psycopg2.base import DatabaseOperations
|
||||
from django.db.backends.postgresql_psycopg2.operations import DatabaseOperations
|
||||
from django.db.utils import ProgrammingError
|
||||
from django.utils.functional import cached_property
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from django.db.backends.sqlite3.base import Database
|
||||
from django.contrib.gis.db.backends.adapter import WKTAdapter
|
||||
from django.contrib.gis.db.backends.base.adapter import WKTAdapter
|
||||
|
||||
|
||||
class SpatiaLiteAdapter(WKTAdapter):
|
||||
|
|
|
@ -1,32 +1,19 @@
|
|||
import sys
|
||||
from ctypes.util import find_library
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
from django.db.backends.sqlite3.base import (Database,
|
||||
DatabaseWrapper as SQLiteDatabaseWrapper,
|
||||
DatabaseFeatures as SQLiteDatabaseFeatures, SQLiteCursorWrapper)
|
||||
from django.contrib.gis.db.backends.base import BaseSpatialFeatures
|
||||
from django.contrib.gis.db.backends.spatialite.client import SpatiaLiteClient
|
||||
from django.contrib.gis.db.backends.spatialite.creation import SpatiaLiteCreation
|
||||
from django.contrib.gis.db.backends.spatialite.introspection import SpatiaLiteIntrospection
|
||||
from django.contrib.gis.db.backends.spatialite.operations import SpatiaLiteOperations
|
||||
from django.contrib.gis.db.backends.spatialite.schema import SpatialiteSchemaEditor
|
||||
from django.db.backends.sqlite3.base import (
|
||||
Database, DatabaseWrapper as SQLiteDatabaseWrapper, SQLiteCursorWrapper,
|
||||
)
|
||||
from django.utils import six
|
||||
from django.utils.functional import cached_property
|
||||
|
||||
|
||||
class DatabaseFeatures(BaseSpatialFeatures, SQLiteDatabaseFeatures):
|
||||
supports_distance_geodetic = False
|
||||
# SpatiaLite can only count vertices in LineStrings
|
||||
supports_num_points_poly = False
|
||||
|
||||
@cached_property
|
||||
def supports_initspatialmetadata_in_one_transaction(self):
|
||||
# SpatiaLite 4.1+ support initializing all metadata in one transaction
|
||||
# which can result in a significant performance improvement when
|
||||
# creating the database.
|
||||
return self.connection.ops.spatial_version >= (4, 1, 0)
|
||||
from .client import SpatiaLiteClient
|
||||
from .creation import SpatiaLiteCreation
|
||||
from .features import DatabaseFeatures
|
||||
from .introspection import SpatiaLiteIntrospection
|
||||
from .operations import SpatiaLiteOperations
|
||||
from .schema import SpatialiteSchemaEditor
|
||||
|
||||
|
||||
class DatabaseWrapper(SQLiteDatabaseWrapper):
|
||||
|
|
|
@ -0,0 +1,16 @@
|
|||
from django.contrib.gis.db.backends.base.features import BaseSpatialFeatures
|
||||
from django.db.backends.sqlite3.features import DatabaseFeatures as SQLiteDatabaseFeatures
|
||||
from django.utils.functional import cached_property
|
||||
|
||||
|
||||
class DatabaseFeatures(BaseSpatialFeatures, SQLiteDatabaseFeatures):
|
||||
supports_distance_geodetic = False
|
||||
# SpatiaLite can only count vertices in LineStrings
|
||||
supports_num_points_poly = False
|
||||
|
||||
@cached_property
|
||||
def supports_initspatialmetadata_in_one_transaction(self):
|
||||
# SpatiaLite 4.1+ support initializing all metadata in one transaction
|
||||
# which can result in a significant performance improvement when
|
||||
# creating the database.
|
||||
return self.connection.ops.spatial_version >= (4, 1, 0)
|
|
@ -3,7 +3,7 @@
|
|||
"""
|
||||
from django.db import connection, models
|
||||
from django.db.backends.signals import connection_created
|
||||
from django.contrib.gis.db.backends.base import SpatialRefSysMixin
|
||||
from django.contrib.gis.db.backends.base.models import SpatialRefSysMixin
|
||||
from django.contrib.gis.db.backends.spatialite.base import DatabaseWrapper
|
||||
from django.utils.encoding import python_2_unicode_compatible
|
||||
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
import re
|
||||
import sys
|
||||
|
||||
from django.contrib.gis.db.backends.base import BaseSpatialOperations
|
||||
from django.contrib.gis.db.backends.base.operations import BaseSpatialOperations
|
||||
from django.contrib.gis.db.backends.utils import SpatialOperator
|
||||
from django.contrib.gis.db.backends.spatialite.adapter import SpatiaLiteAdapter
|
||||
from django.contrib.gis.geometry.backend import Geometry
|
||||
from django.contrib.gis.measure import Distance
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
from django.db.backends.sqlite3.base import DatabaseOperations
|
||||
from django.db.backends.sqlite3.operations import DatabaseOperations
|
||||
from django.db.utils import DatabaseError
|
||||
from django.utils import six
|
||||
from django.utils.functional import cached_property
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,507 @@
|
|||
from collections import deque
|
||||
from contextlib import contextmanager
|
||||
import time
|
||||
import warnings
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import DEFAULT_DB_ALIAS
|
||||
from django.db.backends.signals import connection_created
|
||||
from django.db.backends import utils
|
||||
from django.db.transaction import TransactionManagementError
|
||||
from django.db.utils import DatabaseError, DatabaseErrorWrapper
|
||||
from django.utils.functional import cached_property
|
||||
try:
|
||||
from django.utils.six.moves import _thread as thread
|
||||
except ImportError:
|
||||
from django.utils.six.moves import _dummy_thread as thread
|
||||
|
||||
|
||||
NO_DB_ALIAS = '__no_db__'
|
||||
|
||||
|
||||
class BaseDatabaseWrapper(object):
|
||||
"""
|
||||
Represents a database connection.
|
||||
"""
|
||||
# Mapping of Field objects to their column types.
|
||||
data_types = {}
|
||||
# Mapping of Field objects to their SQL suffix such as AUTOINCREMENT.
|
||||
data_types_suffix = {}
|
||||
# Mapping of Field objects to their SQL for CHECK constraints.
|
||||
data_type_check_constraints = {}
|
||||
ops = None
|
||||
vendor = 'unknown'
|
||||
SchemaEditorClass = None
|
||||
|
||||
queries_limit = 9000
|
||||
|
||||
def __init__(self, settings_dict, alias=DEFAULT_DB_ALIAS,
|
||||
allow_thread_sharing=False):
|
||||
# Connection related attributes.
|
||||
# The underlying database connection.
|
||||
self.connection = None
|
||||
# `settings_dict` should be a dictionary containing keys such as
|
||||
# NAME, USER, etc. It's called `settings_dict` instead of `settings`
|
||||
# to disambiguate it from Django settings modules.
|
||||
self.settings_dict = settings_dict
|
||||
self.alias = alias
|
||||
# Query logging in debug mode or when explicitly enabled.
|
||||
self.queries_log = deque(maxlen=self.queries_limit)
|
||||
self.force_debug_cursor = False
|
||||
|
||||
# Transaction related attributes.
|
||||
# Tracks if the connection is in autocommit mode. Per PEP 249, by
|
||||
# default, it isn't.
|
||||
self.autocommit = False
|
||||
# Tracks if the connection is in a transaction managed by 'atomic'.
|
||||
self.in_atomic_block = False
|
||||
# Increment to generate unique savepoint ids.
|
||||
self.savepoint_state = 0
|
||||
# List of savepoints created by 'atomic'.
|
||||
self.savepoint_ids = []
|
||||
# Tracks if the outermost 'atomic' block should commit on exit,
|
||||
# ie. if autocommit was active on entry.
|
||||
self.commit_on_exit = True
|
||||
# Tracks if the transaction should be rolled back to the next
|
||||
# available savepoint because of an exception in an inner block.
|
||||
self.needs_rollback = False
|
||||
|
||||
# Connection termination related attributes.
|
||||
self.close_at = None
|
||||
self.closed_in_transaction = False
|
||||
self.errors_occurred = False
|
||||
|
||||
# Thread-safety related attributes.
|
||||
self.allow_thread_sharing = allow_thread_sharing
|
||||
self._thread_ident = thread.get_ident()
|
||||
|
||||
@property
|
||||
def queries_logged(self):
|
||||
return self.force_debug_cursor or settings.DEBUG
|
||||
|
||||
@property
|
||||
def queries(self):
|
||||
if len(self.queries_log) == self.queries_log.maxlen:
|
||||
warnings.warn(
|
||||
"Limit for query logging exceeded, only the last {} queries "
|
||||
"will be returned.".format(self.queries_log.maxlen))
|
||||
return list(self.queries_log)
|
||||
|
||||
##### Backend-specific methods for creating connections and cursors #####
|
||||
|
||||
def get_connection_params(self):
|
||||
"""Returns a dict of parameters suitable for get_new_connection."""
|
||||
raise NotImplementedError('subclasses of BaseDatabaseWrapper may require a get_connection_params() method')
|
||||
|
||||
def get_new_connection(self, conn_params):
|
||||
"""Opens a connection to the database."""
|
||||
raise NotImplementedError('subclasses of BaseDatabaseWrapper may require a get_new_connection() method')
|
||||
|
||||
def init_connection_state(self):
|
||||
"""Initializes the database connection settings."""
|
||||
raise NotImplementedError('subclasses of BaseDatabaseWrapper may require an init_connection_state() method')
|
||||
|
||||
def create_cursor(self):
|
||||
"""Creates a cursor. Assumes that a connection is established."""
|
||||
raise NotImplementedError('subclasses of BaseDatabaseWrapper may require a create_cursor() method')
|
||||
|
||||
##### Backend-specific methods for creating connections #####
|
||||
|
||||
def connect(self):
|
||||
"""Connects to the database. Assumes that the connection is closed."""
|
||||
# In case the previous connection was closed while in an atomic block
|
||||
self.in_atomic_block = False
|
||||
self.savepoint_ids = []
|
||||
self.needs_rollback = False
|
||||
# Reset parameters defining when to close the connection
|
||||
max_age = self.settings_dict['CONN_MAX_AGE']
|
||||
self.close_at = None if max_age is None else time.time() + max_age
|
||||
self.closed_in_transaction = False
|
||||
self.errors_occurred = False
|
||||
# Establish the connection
|
||||
conn_params = self.get_connection_params()
|
||||
self.connection = self.get_new_connection(conn_params)
|
||||
self.set_autocommit(self.settings_dict['AUTOCOMMIT'])
|
||||
self.init_connection_state()
|
||||
connection_created.send(sender=self.__class__, connection=self)
|
||||
|
||||
def ensure_connection(self):
|
||||
"""
|
||||
Guarantees that a connection to the database is established.
|
||||
"""
|
||||
if self.connection is None:
|
||||
with self.wrap_database_errors:
|
||||
self.connect()
|
||||
|
||||
##### Backend-specific wrappers for PEP-249 connection methods #####
|
||||
|
||||
def _cursor(self):
|
||||
self.ensure_connection()
|
||||
with self.wrap_database_errors:
|
||||
return self.create_cursor()
|
||||
|
||||
def _commit(self):
|
||||
if self.connection is not None:
|
||||
with self.wrap_database_errors:
|
||||
return self.connection.commit()
|
||||
|
||||
def _rollback(self):
|
||||
if self.connection is not None:
|
||||
with self.wrap_database_errors:
|
||||
return self.connection.rollback()
|
||||
|
||||
def _close(self):
|
||||
if self.connection is not None:
|
||||
with self.wrap_database_errors:
|
||||
return self.connection.close()
|
||||
|
||||
##### Generic wrappers for PEP-249 connection methods #####
|
||||
|
||||
def cursor(self):
|
||||
"""
|
||||
Creates a cursor, opening a connection if necessary.
|
||||
"""
|
||||
self.validate_thread_sharing()
|
||||
if self.queries_logged:
|
||||
cursor = self.make_debug_cursor(self._cursor())
|
||||
else:
|
||||
cursor = self.make_cursor(self._cursor())
|
||||
return cursor
|
||||
|
||||
def commit(self):
|
||||
"""
|
||||
Commits a transaction and resets the dirty flag.
|
||||
"""
|
||||
self.validate_thread_sharing()
|
||||
self.validate_no_atomic_block()
|
||||
self._commit()
|
||||
# A successful commit means that the database connection works.
|
||||
self.errors_occurred = False
|
||||
|
||||
def rollback(self):
|
||||
"""
|
||||
Rolls back a transaction and resets the dirty flag.
|
||||
"""
|
||||
self.validate_thread_sharing()
|
||||
self.validate_no_atomic_block()
|
||||
self._rollback()
|
||||
# A successful rollback means that the database connection works.
|
||||
self.errors_occurred = False
|
||||
|
||||
def close(self):
|
||||
"""
|
||||
Closes the connection to the database.
|
||||
"""
|
||||
self.validate_thread_sharing()
|
||||
# Don't call validate_no_atomic_block() to avoid making it difficult
|
||||
# to get rid of a connection in an invalid state. The next connect()
|
||||
# will reset the transaction state anyway.
|
||||
if self.closed_in_transaction or self.connection is None:
|
||||
return
|
||||
try:
|
||||
self._close()
|
||||
finally:
|
||||
if self.in_atomic_block:
|
||||
self.closed_in_transaction = True
|
||||
self.needs_rollback = True
|
||||
else:
|
||||
self.connection = None
|
||||
|
||||
##### Backend-specific savepoint management methods #####
|
||||
|
||||
def _savepoint(self, sid):
|
||||
with self.cursor() as cursor:
|
||||
cursor.execute(self.ops.savepoint_create_sql(sid))
|
||||
|
||||
def _savepoint_rollback(self, sid):
|
||||
with self.cursor() as cursor:
|
||||
cursor.execute(self.ops.savepoint_rollback_sql(sid))
|
||||
|
||||
def _savepoint_commit(self, sid):
|
||||
with self.cursor() as cursor:
|
||||
cursor.execute(self.ops.savepoint_commit_sql(sid))
|
||||
|
||||
def _savepoint_allowed(self):
|
||||
# Savepoints cannot be created outside a transaction
|
||||
return self.features.uses_savepoints and not self.get_autocommit()
|
||||
|
||||
##### Generic savepoint management methods #####
|
||||
|
||||
def savepoint(self):
|
||||
"""
|
||||
Creates a savepoint inside the current transaction. Returns an
|
||||
identifier for the savepoint that will be used for the subsequent
|
||||
rollback or commit. Does nothing if savepoints are not supported.
|
||||
"""
|
||||
if not self._savepoint_allowed():
|
||||
return
|
||||
|
||||
thread_ident = thread.get_ident()
|
||||
tid = str(thread_ident).replace('-', '')
|
||||
|
||||
self.savepoint_state += 1
|
||||
sid = "s%s_x%d" % (tid, self.savepoint_state)
|
||||
|
||||
self.validate_thread_sharing()
|
||||
self._savepoint(sid)
|
||||
|
||||
return sid
|
||||
|
||||
def savepoint_rollback(self, sid):
|
||||
"""
|
||||
Rolls back to a savepoint. Does nothing if savepoints are not supported.
|
||||
"""
|
||||
if not self._savepoint_allowed():
|
||||
return
|
||||
|
||||
self.validate_thread_sharing()
|
||||
self._savepoint_rollback(sid)
|
||||
|
||||
def savepoint_commit(self, sid):
|
||||
"""
|
||||
Releases a savepoint. Does nothing if savepoints are not supported.
|
||||
"""
|
||||
if not self._savepoint_allowed():
|
||||
return
|
||||
|
||||
self.validate_thread_sharing()
|
||||
self._savepoint_commit(sid)
|
||||
|
||||
def clean_savepoints(self):
|
||||
"""
|
||||
Resets the counter used to generate unique savepoint ids in this thread.
|
||||
"""
|
||||
self.savepoint_state = 0
|
||||
|
||||
##### Backend-specific transaction management methods #####
|
||||
|
||||
def _set_autocommit(self, autocommit):
|
||||
"""
|
||||
Backend-specific implementation to enable or disable autocommit.
|
||||
"""
|
||||
raise NotImplementedError('subclasses of BaseDatabaseWrapper may require a _set_autocommit() method')
|
||||
|
||||
##### Generic transaction management methods #####
|
||||
|
||||
def get_autocommit(self):
|
||||
"""
|
||||
Check the autocommit state.
|
||||
"""
|
||||
self.ensure_connection()
|
||||
return self.autocommit
|
||||
|
||||
def set_autocommit(self, autocommit):
|
||||
"""
|
||||
Enable or disable autocommit.
|
||||
"""
|
||||
self.validate_no_atomic_block()
|
||||
self.ensure_connection()
|
||||
self._set_autocommit(autocommit)
|
||||
self.autocommit = autocommit
|
||||
|
||||
def get_rollback(self):
|
||||
"""
|
||||
Get the "needs rollback" flag -- for *advanced use* only.
|
||||
"""
|
||||
if not self.in_atomic_block:
|
||||
raise TransactionManagementError(
|
||||
"The rollback flag doesn't work outside of an 'atomic' block.")
|
||||
return self.needs_rollback
|
||||
|
||||
def set_rollback(self, rollback):
|
||||
"""
|
||||
Set or unset the "needs rollback" flag -- for *advanced use* only.
|
||||
"""
|
||||
if not self.in_atomic_block:
|
||||
raise TransactionManagementError(
|
||||
"The rollback flag doesn't work outside of an 'atomic' block.")
|
||||
self.needs_rollback = rollback
|
||||
|
||||
def validate_no_atomic_block(self):
|
||||
"""
|
||||
Raise an error if an atomic block is active.
|
||||
"""
|
||||
if self.in_atomic_block:
|
||||
raise TransactionManagementError(
|
||||
"This is forbidden when an 'atomic' block is active.")
|
||||
|
||||
def validate_no_broken_transaction(self):
|
||||
if self.needs_rollback:
|
||||
raise TransactionManagementError(
|
||||
"An error occurred in the current transaction. You can't "
|
||||
"execute queries until the end of the 'atomic' block.")
|
||||
|
||||
##### Foreign key constraints checks handling #####
|
||||
|
||||
@contextmanager
|
||||
def constraint_checks_disabled(self):
|
||||
"""
|
||||
Context manager that disables foreign key constraint checking.
|
||||
"""
|
||||
disabled = self.disable_constraint_checking()
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
if disabled:
|
||||
self.enable_constraint_checking()
|
||||
|
||||
def disable_constraint_checking(self):
|
||||
"""
|
||||
Backends can implement as needed to temporarily disable foreign key
|
||||
constraint checking. Should return True if the constraints were
|
||||
disabled and will need to be reenabled.
|
||||
"""
|
||||
return False
|
||||
|
||||
def enable_constraint_checking(self):
|
||||
"""
|
||||
Backends can implement as needed to re-enable foreign key constraint
|
||||
checking.
|
||||
"""
|
||||
pass
|
||||
|
||||
def check_constraints(self, table_names=None):
|
||||
"""
|
||||
Backends can override this method if they can apply constraint
|
||||
checking (e.g. via "SET CONSTRAINTS ALL IMMEDIATE"). Should raise an
|
||||
IntegrityError if any invalid foreign key references are encountered.
|
||||
"""
|
||||
pass
|
||||
|
||||
##### Connection termination handling #####
|
||||
|
||||
def is_usable(self):
|
||||
"""
|
||||
Tests if the database connection is usable.
|
||||
|
||||
This function may assume that self.connection is not None.
|
||||
|
||||
Actual implementations should take care not to raise exceptions
|
||||
as that may prevent Django from recycling unusable connections.
|
||||
"""
|
||||
raise NotImplementedError(
|
||||
"subclasses of BaseDatabaseWrapper may require an is_usable() method")
|
||||
|
||||
def close_if_unusable_or_obsolete(self):
|
||||
"""
|
||||
Closes the current connection if unrecoverable errors have occurred,
|
||||
or if it outlived its maximum age.
|
||||
"""
|
||||
if self.connection is not None:
|
||||
# If the application didn't restore the original autocommit setting,
|
||||
# don't take chances, drop the connection.
|
||||
if self.get_autocommit() != self.settings_dict['AUTOCOMMIT']:
|
||||
self.close()
|
||||
return
|
||||
|
||||
# If an exception other than DataError or IntegrityError occurred
|
||||
# since the last commit / rollback, check if the connection works.
|
||||
if self.errors_occurred:
|
||||
if self.is_usable():
|
||||
self.errors_occurred = False
|
||||
else:
|
||||
self.close()
|
||||
return
|
||||
|
||||
if self.close_at is not None and time.time() >= self.close_at:
|
||||
self.close()
|
||||
return
|
||||
|
||||
##### Thread safety handling #####
|
||||
|
||||
def validate_thread_sharing(self):
|
||||
"""
|
||||
Validates that the connection isn't accessed by another thread than the
|
||||
one which originally created it, unless the connection was explicitly
|
||||
authorized to be shared between threads (via the `allow_thread_sharing`
|
||||
property). Raises an exception if the validation fails.
|
||||
"""
|
||||
if not (self.allow_thread_sharing
|
||||
or self._thread_ident == thread.get_ident()):
|
||||
raise DatabaseError("DatabaseWrapper objects created in a "
|
||||
"thread can only be used in that same thread. The object "
|
||||
"with alias '%s' was created in thread id %s and this is "
|
||||
"thread id %s."
|
||||
% (self.alias, self._thread_ident, thread.get_ident()))
|
||||
|
||||
##### Miscellaneous #####
|
||||
|
||||
def prepare_database(self):
|
||||
"""
|
||||
Hook to do any database check or preparation, generally called before
|
||||
migrating a project or an app.
|
||||
"""
|
||||
pass
|
||||
|
||||
@cached_property
|
||||
def wrap_database_errors(self):
|
||||
"""
|
||||
Context manager and decorator that re-throws backend-specific database
|
||||
exceptions using Django's common wrappers.
|
||||
"""
|
||||
return DatabaseErrorWrapper(self)
|
||||
|
||||
def make_debug_cursor(self, cursor):
|
||||
"""
|
||||
Creates a cursor that logs all queries in self.queries_log.
|
||||
"""
|
||||
return utils.CursorDebugWrapper(cursor, self)
|
||||
|
||||
def make_cursor(self, cursor):
|
||||
"""
|
||||
Creates a cursor without debug logging.
|
||||
"""
|
||||
return utils.CursorWrapper(cursor, self)
|
||||
|
||||
@contextmanager
|
||||
def temporary_connection(self):
|
||||
"""
|
||||
Context manager that ensures that a connection is established, and
|
||||
if it opened one, closes it to avoid leaving a dangling connection.
|
||||
This is useful for operations outside of the request-response cycle.
|
||||
|
||||
Provides a cursor: with self.temporary_connection() as cursor: ...
|
||||
"""
|
||||
must_close = self.connection is None
|
||||
cursor = self.cursor()
|
||||
try:
|
||||
yield cursor
|
||||
finally:
|
||||
cursor.close()
|
||||
if must_close:
|
||||
self.close()
|
||||
|
||||
@cached_property
|
||||
def _nodb_connection(self):
|
||||
"""
|
||||
Alternative connection to be used when there is no need to access
|
||||
the main database, specifically for test db creation/deletion.
|
||||
This also prevents the production database from being exposed to
|
||||
potential child threads while (or after) the test database is destroyed.
|
||||
Refs #10868, #17786, #16969.
|
||||
"""
|
||||
settings_dict = self.settings_dict.copy()
|
||||
settings_dict['NAME'] = None
|
||||
nodb_connection = self.__class__(
|
||||
settings_dict,
|
||||
alias=NO_DB_ALIAS,
|
||||
allow_thread_sharing=False)
|
||||
return nodb_connection
|
||||
|
||||
def _start_transaction_under_autocommit(self):
|
||||
"""
|
||||
Only required when autocommits_when_autocommit_is_off = True.
|
||||
"""
|
||||
raise NotImplementedError(
|
||||
'subclasses of BaseDatabaseWrapper may require a '
|
||||
'_start_transaction_under_autocommit() method'
|
||||
)
|
||||
|
||||
def schema_editor(self, *args, **kwargs):
|
||||
"""
|
||||
Returns a new instance of this backend's SchemaEditor.
|
||||
"""
|
||||
if self.SchemaEditorClass is None:
|
||||
raise NotImplementedError(
|
||||
'The SchemaEditorClass attribute of this database wrapper is still None')
|
||||
return self.SchemaEditorClass(self, *args, **kwargs)
|
|
@ -0,0 +1,15 @@
|
|||
class BaseDatabaseClient(object):
|
||||
"""
|
||||
This class encapsulates all backend-specific methods for opening a
|
||||
client shell.
|
||||
"""
|
||||
# This should be a string representing the name of the executable
|
||||
# (e.g., "psql"). Subclasses must override this.
|
||||
executable_name = None
|
||||
|
||||
def __init__(self, connection):
|
||||
# connection is an instance of BaseDatabaseWrapper.
|
||||
self.connection = connection
|
||||
|
||||
def runshell(self):
|
||||
raise NotImplementedError('subclasses of BaseDatabaseClient must provide a runshell() method')
|
|
@ -3,16 +3,16 @@ import sys
|
|||
import time
|
||||
import warnings
|
||||
|
||||
from django.apps import apps
|
||||
from django.conf import settings
|
||||
from django.core import serializers
|
||||
from django.db import router
|
||||
from django.db.backends.utils import truncate_name
|
||||
from django.utils.deprecation import RemovedInDjango20Warning
|
||||
from django.utils.encoding import force_bytes
|
||||
from django.utils.six.moves import input
|
||||
from django.utils.six import StringIO
|
||||
from django.db import router
|
||||
from django.apps import apps
|
||||
from django.core import serializers
|
||||
from django.utils.six.moves import input
|
||||
|
||||
from .utils import truncate_name
|
||||
|
||||
# The prefix to put on the default database name when creating
|
||||
# the test database.
|
|
@ -0,0 +1,250 @@
|
|||
from django.db.utils import ProgrammingError
|
||||
from django.utils.functional import cached_property
|
||||
|
||||
|
||||
class BaseDatabaseFeatures(object):
|
||||
gis_enabled = False
|
||||
allows_group_by_pk = False
|
||||
# True if django.db.backends.utils.typecast_timestamp is used on values
|
||||
# returned from dates() calls.
|
||||
needs_datetime_string_cast = True
|
||||
empty_fetchmany_value = []
|
||||
update_can_self_select = True
|
||||
|
||||
# Does the backend distinguish between '' and None?
|
||||
interprets_empty_strings_as_nulls = False
|
||||
|
||||
# Does the backend allow inserting duplicate NULL rows in a nullable
|
||||
# unique field? All core backends implement this correctly, but other
|
||||
# databases such as SQL Server do not.
|
||||
supports_nullable_unique_constraints = True
|
||||
|
||||
# Does the backend allow inserting duplicate rows when a unique_together
|
||||
# constraint exists and some fields are nullable but not all of them?
|
||||
supports_partially_nullable_unique_constraints = True
|
||||
|
||||
can_use_chunked_reads = True
|
||||
can_return_id_from_insert = False
|
||||
has_bulk_insert = False
|
||||
uses_savepoints = False
|
||||
can_release_savepoints = False
|
||||
can_combine_inserts_with_and_without_auto_increment_pk = False
|
||||
|
||||
# If True, don't use integer foreign keys referring to, e.g., positive
|
||||
# integer primary keys.
|
||||
related_fields_match_type = False
|
||||
allow_sliced_subqueries = True
|
||||
has_select_for_update = False
|
||||
has_select_for_update_nowait = False
|
||||
|
||||
supports_select_related = True
|
||||
|
||||
# Does the default test database allow multiple connections?
|
||||
# Usually an indication that the test database is in-memory
|
||||
test_db_allows_multiple_connections = True
|
||||
|
||||
# Can an object be saved without an explicit primary key?
|
||||
supports_unspecified_pk = False
|
||||
|
||||
# Can a fixture contain forward references? i.e., are
|
||||
# FK constraints checked at the end of transaction, or
|
||||
# at the end of each save operation?
|
||||
supports_forward_references = True
|
||||
|
||||
# Does the backend truncate names properly when they are too long?
|
||||
truncates_names = False
|
||||
|
||||
# Is there a REAL datatype in addition to floats/doubles?
|
||||
has_real_datatype = False
|
||||
supports_subqueries_in_group_by = True
|
||||
supports_bitwise_or = True
|
||||
|
||||
# Is there a true datatype for timedeltas?
|
||||
has_native_duration_field = False
|
||||
|
||||
# Does the database driver support timedeltas as arguments?
|
||||
# This is only relevant when there is a native duration field.
|
||||
# Specifically, there is a bug with cx_Oracle:
|
||||
# https://bitbucket.org/anthony_tuininga/cx_oracle/issue/7/
|
||||
driver_supports_timedelta_args = False
|
||||
|
||||
# Do time/datetime fields have microsecond precision?
|
||||
supports_microsecond_precision = True
|
||||
|
||||
# Does the __regex lookup support backreferencing and grouping?
|
||||
supports_regex_backreferencing = True
|
||||
|
||||
# Can date/datetime lookups be performed using a string?
|
||||
supports_date_lookup_using_string = True
|
||||
|
||||
# Can datetimes with timezones be used?
|
||||
supports_timezones = True
|
||||
|
||||
# Does the database have a copy of the zoneinfo database?
|
||||
has_zoneinfo_database = True
|
||||
|
||||
# When performing a GROUP BY, is an ORDER BY NULL required
|
||||
# to remove any ordering?
|
||||
requires_explicit_null_ordering_when_grouping = False
|
||||
|
||||
# Does the backend order NULL values as largest or smallest?
|
||||
nulls_order_largest = False
|
||||
|
||||
# Is there a 1000 item limit on query parameters?
|
||||
supports_1000_query_parameters = True
|
||||
|
||||
# Can an object have an autoincrement primary key of 0? MySQL says No.
|
||||
allows_auto_pk_0 = True
|
||||
|
||||
# Do we need to NULL a ForeignKey out, or can the constraint check be
|
||||
# deferred
|
||||
can_defer_constraint_checks = False
|
||||
|
||||
# date_interval_sql can properly handle mixed Date/DateTime fields and timedeltas
|
||||
supports_mixed_date_datetime_comparisons = True
|
||||
|
||||
# Does the backend support tablespaces? Default to False because it isn't
|
||||
# in the SQL standard.
|
||||
supports_tablespaces = False
|
||||
|
||||
# Does the backend reset sequences between tests?
|
||||
supports_sequence_reset = True
|
||||
|
||||
# Can the backend determine reliably the length of a CharField?
|
||||
can_introspect_max_length = True
|
||||
|
||||
# Can the backend determine reliably if a field is nullable?
|
||||
# Note that this is separate from interprets_empty_strings_as_nulls,
|
||||
# although the latter feature, when true, interferes with correct
|
||||
# setting (and introspection) of CharFields' nullability.
|
||||
# This is True for all core backends.
|
||||
can_introspect_null = True
|
||||
|
||||
# Confirm support for introspected foreign keys
|
||||
# Every database can do this reliably, except MySQL,
|
||||
# which can't do it for MyISAM tables
|
||||
can_introspect_foreign_keys = True
|
||||
|
||||
# Can the backend introspect an AutoField, instead of an IntegerField?
|
||||
can_introspect_autofield = False
|
||||
|
||||
# Can the backend introspect a BigIntegerField, instead of an IntegerField?
|
||||
can_introspect_big_integer_field = True
|
||||
|
||||
# Can the backend introspect an BinaryField, instead of an TextField?
|
||||
can_introspect_binary_field = True
|
||||
|
||||
# Can the backend introspect an DecimalField, instead of an FloatField?
|
||||
can_introspect_decimal_field = True
|
||||
|
||||
# Can the backend introspect an IPAddressField, instead of an CharField?
|
||||
can_introspect_ip_address_field = False
|
||||
|
||||
# Can the backend introspect a PositiveIntegerField, instead of an IntegerField?
|
||||
can_introspect_positive_integer_field = False
|
||||
|
||||
# Can the backend introspect a SmallIntegerField, instead of an IntegerField?
|
||||
can_introspect_small_integer_field = False
|
||||
|
||||
# Can the backend introspect a TimeField, instead of a DateTimeField?
|
||||
can_introspect_time_field = True
|
||||
|
||||
# Support for the DISTINCT ON clause
|
||||
can_distinct_on_fields = False
|
||||
|
||||
# Does the backend decide to commit before SAVEPOINT statements
|
||||
# when autocommit is disabled? http://bugs.python.org/issue8145#msg109965
|
||||
autocommits_when_autocommit_is_off = False
|
||||
|
||||
# Does the backend prevent running SQL queries in broken transactions?
|
||||
atomic_transactions = True
|
||||
|
||||
# Can we roll back DDL in a transaction?
|
||||
can_rollback_ddl = False
|
||||
|
||||
# Can we issue more than one ALTER COLUMN clause in an ALTER TABLE?
|
||||
supports_combined_alters = False
|
||||
|
||||
# Does it support foreign keys?
|
||||
supports_foreign_keys = True
|
||||
|
||||
# Does it support CHECK constraints?
|
||||
supports_column_check_constraints = True
|
||||
|
||||
# Does the backend support 'pyformat' style ("... %(name)s ...", {'name': value})
|
||||
# parameter passing? Note this can be provided by the backend even if not
|
||||
# supported by the Python driver
|
||||
supports_paramstyle_pyformat = True
|
||||
|
||||
# Does the backend require literal defaults, rather than parameterized ones?
|
||||
requires_literal_defaults = False
|
||||
|
||||
# Does the backend require a connection reset after each material schema change?
|
||||
connection_persists_old_columns = False
|
||||
|
||||
# What kind of error does the backend throw when accessing closed cursor?
|
||||
closed_cursor_error_class = ProgrammingError
|
||||
|
||||
# Does 'a' LIKE 'A' match?
|
||||
has_case_insensitive_like = True
|
||||
|
||||
# Does the backend require the sqlparse library for splitting multi-line
|
||||
# statements before executing them?
|
||||
requires_sqlparse_for_splitting = True
|
||||
|
||||
# Suffix for backends that don't support "SELECT xxx;" queries.
|
||||
bare_select_suffix = ''
|
||||
|
||||
# If NULL is implied on columns without needing to be explicitly specified
|
||||
implied_column_null = False
|
||||
|
||||
uppercases_column_names = False
|
||||
|
||||
# Does the backend support "select for update" queries with limit (and offset)?
|
||||
supports_select_for_update_with_limit = True
|
||||
|
||||
def __init__(self, connection):
|
||||
self.connection = connection
|
||||
|
||||
@cached_property
|
||||
def supports_transactions(self):
|
||||
"""Confirm support for transactions."""
|
||||
with self.connection.cursor() as cursor:
|
||||
cursor.execute('CREATE TABLE ROLLBACK_TEST (X INT)')
|
||||
self.connection.set_autocommit(False)
|
||||
cursor.execute('INSERT INTO ROLLBACK_TEST (X) VALUES (8)')
|
||||
self.connection.rollback()
|
||||
self.connection.set_autocommit(True)
|
||||
cursor.execute('SELECT COUNT(X) FROM ROLLBACK_TEST')
|
||||
count, = cursor.fetchone()
|
||||
cursor.execute('DROP TABLE ROLLBACK_TEST')
|
||||
return count == 0
|
||||
|
||||
@cached_property
|
||||
def supports_stddev(self):
|
||||
"""Confirm support for STDDEV and related stats functions."""
|
||||
class StdDevPop(object):
|
||||
sql_function = 'STDDEV_POP'
|
||||
|
||||
try:
|
||||
self.connection.ops.check_aggregate_support(StdDevPop())
|
||||
return True
|
||||
except NotImplementedError:
|
||||
return False
|
||||
|
||||
def introspected_boolean_field_type(self, field=None, created_separately=False):
|
||||
"""
|
||||
What is the type returned when the backend introspects a BooleanField?
|
||||
The optional arguments may be used to give further details of the field to be
|
||||
introspected; in particular, they are provided by Django's test suite:
|
||||
field -- the field definition
|
||||
created_separately -- True if the field was added via a SchemaEditor's AddField,
|
||||
False if the field was created with the model
|
||||
|
||||
Note that return value from this function is compared by tests against actual
|
||||
introspection results; it should provide expectations, not run an introspection
|
||||
itself.
|
||||
"""
|
||||
if self.can_introspect_null and field and field.null:
|
||||
return 'NullBooleanField'
|
||||
return 'BooleanField'
|
|
@ -0,0 +1,178 @@
|
|||
from collections import namedtuple
|
||||
|
||||
from django.utils import six
|
||||
|
||||
|
||||
# Structure returned by DatabaseIntrospection.get_table_list()
|
||||
TableInfo = namedtuple('TableInfo', ['name', 'type'])
|
||||
|
||||
# Structure returned by the DB-API cursor.description interface (PEP 249)
|
||||
FieldInfo = namedtuple('FieldInfo',
|
||||
'name type_code display_size internal_size precision scale null_ok')
|
||||
|
||||
|
||||
class BaseDatabaseIntrospection(object):
|
||||
"""
|
||||
This class encapsulates all backend-specific introspection utilities
|
||||
"""
|
||||
data_types_reverse = {}
|
||||
|
||||
def __init__(self, connection):
|
||||
self.connection = connection
|
||||
|
||||
def get_field_type(self, data_type, description):
|
||||
"""Hook for a database backend to use the cursor description to
|
||||
match a Django field type to a database column.
|
||||
|
||||
For Oracle, the column data_type on its own is insufficient to
|
||||
distinguish between a FloatField and IntegerField, for example."""
|
||||
return self.data_types_reverse[data_type]
|
||||
|
||||
def table_name_converter(self, name):
|
||||
"""Apply a conversion to the name for the purposes of comparison.
|
||||
|
||||
The default table name converter is for case sensitive comparison.
|
||||
"""
|
||||
return name
|
||||
|
||||
def column_name_converter(self, name):
|
||||
"""
|
||||
Apply a conversion to the column name for the purposes of comparison.
|
||||
|
||||
Uses table_name_converter() by default.
|
||||
"""
|
||||
return self.table_name_converter(name)
|
||||
|
||||
def table_names(self, cursor=None, include_views=False):
|
||||
"""
|
||||
Returns a list of names of all tables that exist in the database.
|
||||
The returned table list is sorted by Python's default sorting. We
|
||||
do NOT use database's ORDER BY here to avoid subtle differences
|
||||
in sorting order between databases.
|
||||
"""
|
||||
def get_names(cursor):
|
||||
return sorted(ti.name for ti in self.get_table_list(cursor)
|
||||
if include_views or ti.type == 't')
|
||||
if cursor is None:
|
||||
with self.connection.cursor() as cursor:
|
||||
return get_names(cursor)
|
||||
return get_names(cursor)
|
||||
|
||||
def get_table_list(self, cursor):
|
||||
"""
|
||||
Returns an unsorted list of TableInfo named tuples of all tables and
|
||||
views that exist in the database.
|
||||
"""
|
||||
raise NotImplementedError('subclasses of BaseDatabaseIntrospection may require a get_table_list() method')
|
||||
|
||||
def django_table_names(self, only_existing=False, include_views=True):
|
||||
"""
|
||||
Returns a list of all table names that have associated Django models and
|
||||
are in INSTALLED_APPS.
|
||||
|
||||
If only_existing is True, the resulting list will only include the tables
|
||||
that actually exist in the database.
|
||||
"""
|
||||
from django.apps import apps
|
||||
from django.db import router
|
||||
tables = set()
|
||||
for app_config in apps.get_app_configs():
|
||||
for model in router.get_migratable_models(app_config, self.connection.alias):
|
||||
if not model._meta.managed:
|
||||
continue
|
||||
tables.add(model._meta.db_table)
|
||||
tables.update(f.m2m_db_table() for f in model._meta.local_many_to_many)
|
||||
tables = list(tables)
|
||||
if only_existing:
|
||||
existing_tables = self.table_names(include_views=include_views)
|
||||
tables = [
|
||||
t
|
||||
for t in tables
|
||||
if self.table_name_converter(t) in existing_tables
|
||||
]
|
||||
return tables
|
||||
|
||||
def installed_models(self, tables):
|
||||
"Returns a set of all models represented by the provided list of table names."
|
||||
from django.apps import apps
|
||||
from django.db import router
|
||||
all_models = []
|
||||
for app_config in apps.get_app_configs():
|
||||
all_models.extend(router.get_migratable_models(app_config, self.connection.alias))
|
||||
tables = list(map(self.table_name_converter, tables))
|
||||
return {
|
||||
m for m in all_models
|
||||
if self.table_name_converter(m._meta.db_table) in tables
|
||||
}
|
||||
|
||||
def sequence_list(self):
|
||||
"Returns a list of information about all DB sequences for all models in all apps."
|
||||
from django.apps import apps
|
||||
from django.db import models, router
|
||||
|
||||
sequence_list = []
|
||||
|
||||
for app_config in apps.get_app_configs():
|
||||
for model in router.get_migratable_models(app_config, self.connection.alias):
|
||||
if not model._meta.managed:
|
||||
continue
|
||||
if model._meta.swapped:
|
||||
continue
|
||||
for f in model._meta.local_fields:
|
||||
if isinstance(f, models.AutoField):
|
||||
sequence_list.append({'table': model._meta.db_table, 'column': f.column})
|
||||
break # Only one AutoField is allowed per model, so don't bother continuing.
|
||||
|
||||
for f in model._meta.local_many_to_many:
|
||||
# If this is an m2m using an intermediate table,
|
||||
# we don't need to reset the sequence.
|
||||
if f.rel.through is None:
|
||||
sequence_list.append({'table': f.m2m_db_table(), 'column': None})
|
||||
|
||||
return sequence_list
|
||||
|
||||
def get_key_columns(self, cursor, table_name):
|
||||
"""
|
||||
Backends can override this to return a list of (column_name, referenced_table_name,
|
||||
referenced_column_name) for all key columns in given table.
|
||||
"""
|
||||
raise NotImplementedError('subclasses of BaseDatabaseIntrospection may require a get_key_columns() method')
|
||||
|
||||
def get_primary_key_column(self, cursor, table_name):
|
||||
"""
|
||||
Returns the name of the primary key column for the given table.
|
||||
"""
|
||||
for column in six.iteritems(self.get_indexes(cursor, table_name)):
|
||||
if column[1]['primary_key']:
|
||||
return column[0]
|
||||
return None
|
||||
|
||||
def get_indexes(self, cursor, table_name):
|
||||
"""
|
||||
Returns a dictionary of indexed fieldname -> infodict for the given
|
||||
table, where each infodict is in the format:
|
||||
{'primary_key': boolean representing whether it's the primary key,
|
||||
'unique': boolean representing whether it's a unique index}
|
||||
|
||||
Only single-column indexes are introspected.
|
||||
"""
|
||||
raise NotImplementedError('subclasses of BaseDatabaseIntrospection may require a get_indexes() method')
|
||||
|
||||
def get_constraints(self, cursor, table_name):
|
||||
"""
|
||||
Retrieves any constraints or keys (unique, pk, fk, check, index)
|
||||
across one or more columns.
|
||||
|
||||
Returns a dict mapping constraint names to their attributes,
|
||||
where attributes is a dict with keys:
|
||||
* columns: List of columns this covers
|
||||
* primary_key: True if primary key, False otherwise
|
||||
* unique: True if this is a unique constraint, False otherwise
|
||||
* foreign_key: (table, column) of target, or None
|
||||
* check: True if check constraint, False otherwise
|
||||
* index: True if index, False otherwise.
|
||||
|
||||
Some backends may return special constraint names that don't exist
|
||||
if they don't name constraints of a certain type (e.g. SQLite)
|
||||
"""
|
||||
raise NotImplementedError('subclasses of BaseDatabaseIntrospection may require a get_constraints() method')
|
|
@ -0,0 +1,555 @@
|
|||
import datetime
|
||||
import decimal
|
||||
from importlib import import_module
|
||||
import warnings
|
||||
|
||||
from django.conf import settings
|
||||
from django.db.backends import utils
|
||||
from django.utils import six, timezone
|
||||
from django.utils.dateparse import parse_duration
|
||||
from django.utils.deprecation import RemovedInDjango19Warning
|
||||
from django.utils.encoding import force_text
|
||||
|
||||
|
||||
class BaseDatabaseOperations(object):
|
||||
"""
|
||||
This class encapsulates all backend-specific differences, such as the way
|
||||
a backend performs ordering or calculates the ID of a recently-inserted
|
||||
row.
|
||||
"""
|
||||
compiler_module = "django.db.models.sql.compiler"
|
||||
|
||||
# Integer field safe ranges by `internal_type` as documented
|
||||
# in docs/ref/models/fields.txt.
|
||||
integer_field_ranges = {
|
||||
'SmallIntegerField': (-32768, 32767),
|
||||
'IntegerField': (-2147483648, 2147483647),
|
||||
'BigIntegerField': (-9223372036854775808, 9223372036854775807),
|
||||
'PositiveSmallIntegerField': (0, 32767),
|
||||
'PositiveIntegerField': (0, 2147483647),
|
||||
}
|
||||
|
||||
def __init__(self, connection):
|
||||
self.connection = connection
|
||||
self._cache = None
|
||||
|
||||
def autoinc_sql(self, table, column):
|
||||
"""
|
||||
Returns any SQL needed to support auto-incrementing primary keys, or
|
||||
None if no SQL is necessary.
|
||||
|
||||
This SQL is executed when a table is created.
|
||||
"""
|
||||
return None
|
||||
|
||||
def bulk_batch_size(self, fields, objs):
|
||||
"""
|
||||
Returns the maximum allowed batch size for the backend. The fields
|
||||
are the fields going to be inserted in the batch, the objs contains
|
||||
all the objects to be inserted.
|
||||
"""
|
||||
return len(objs)
|
||||
|
||||
def cache_key_culling_sql(self):
|
||||
"""
|
||||
Returns an SQL query that retrieves the first cache key greater than the
|
||||
n smallest.
|
||||
|
||||
This is used by the 'db' cache backend to determine where to start
|
||||
culling.
|
||||
"""
|
||||
return "SELECT cache_key FROM %s ORDER BY cache_key LIMIT 1 OFFSET %%s"
|
||||
|
||||
def unification_cast_sql(self, output_field):
|
||||
"""
|
||||
Given a field instance, returns the SQL necessary to cast the result of
|
||||
a union to that type. Note that the resulting string should contain a
|
||||
'%s' placeholder for the expression being cast.
|
||||
"""
|
||||
return '%s'
|
||||
|
||||
def date_extract_sql(self, lookup_type, field_name):
|
||||
"""
|
||||
Given a lookup_type of 'year', 'month' or 'day', returns the SQL that
|
||||
extracts a value from the given date field field_name.
|
||||
"""
|
||||
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a date_extract_sql() method')
|
||||
|
||||
def date_interval_sql(self, sql, connector, timedelta):
|
||||
"""
|
||||
Implements the date interval functionality for expressions
|
||||
"""
|
||||
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a date_interval_sql() method')
|
||||
|
||||
def date_trunc_sql(self, lookup_type, field_name):
|
||||
"""
|
||||
Given a lookup_type of 'year', 'month' or 'day', returns the SQL that
|
||||
truncates the given date field field_name to a date object with only
|
||||
the given specificity.
|
||||
"""
|
||||
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a datetrunc_sql() method')
|
||||
|
||||
def datetime_cast_sql(self):
|
||||
"""
|
||||
Returns the SQL necessary to cast a datetime value so that it will be
|
||||
retrieved as a Python datetime object instead of a string.
|
||||
|
||||
This SQL should include a '%s' in place of the field's name.
|
||||
"""
|
||||
return "%s"
|
||||
|
||||
def datetime_extract_sql(self, lookup_type, field_name, tzname):
|
||||
"""
|
||||
Given a lookup_type of 'year', 'month', 'day', 'hour', 'minute' or
|
||||
'second', returns the SQL that extracts a value from the given
|
||||
datetime field field_name, and a tuple of parameters.
|
||||
"""
|
||||
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a datetime_extract_sql() method')
|
||||
|
||||
def datetime_trunc_sql(self, lookup_type, field_name, tzname):
|
||||
"""
|
||||
Given a lookup_type of 'year', 'month', 'day', 'hour', 'minute' or
|
||||
'second', returns the SQL that truncates the given datetime field
|
||||
field_name to a datetime object with only the given specificity, and
|
||||
a tuple of parameters.
|
||||
"""
|
||||
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a datetime_trunk_sql() method')
|
||||
|
||||
def deferrable_sql(self):
|
||||
"""
|
||||
Returns the SQL necessary to make a constraint "initially deferred"
|
||||
during a CREATE TABLE statement.
|
||||
"""
|
||||
return ''
|
||||
|
||||
def distinct_sql(self, fields):
|
||||
"""
|
||||
Returns an SQL DISTINCT clause which removes duplicate rows from the
|
||||
result set. If any fields are given, only the given fields are being
|
||||
checked for duplicates.
|
||||
"""
|
||||
if fields:
|
||||
raise NotImplementedError('DISTINCT ON fields is not supported by this database backend')
|
||||
else:
|
||||
return 'DISTINCT'
|
||||
|
||||
def drop_foreignkey_sql(self):
|
||||
"""
|
||||
Returns the SQL command that drops a foreign key.
|
||||
"""
|
||||
return "DROP CONSTRAINT"
|
||||
|
||||
def drop_sequence_sql(self, table):
|
||||
"""
|
||||
Returns any SQL necessary to drop the sequence for the given table.
|
||||
Returns None if no SQL is necessary.
|
||||
"""
|
||||
return None
|
||||
|
||||
def fetch_returned_insert_id(self, cursor):
|
||||
"""
|
||||
Given a cursor object that has just performed an INSERT...RETURNING
|
||||
statement into a table that has an auto-incrementing ID, returns the
|
||||
newly created ID.
|
||||
"""
|
||||
return cursor.fetchone()[0]
|
||||
|
||||
def field_cast_sql(self, db_type, internal_type):
|
||||
"""
|
||||
Given a column type (e.g. 'BLOB', 'VARCHAR'), and an internal type
|
||||
(e.g. 'GenericIPAddressField'), returns the SQL necessary to cast it
|
||||
before using it in a WHERE statement. Note that the resulting string
|
||||
should contain a '%s' placeholder for the column being searched against.
|
||||
"""
|
||||
return '%s'
|
||||
|
||||
def force_no_ordering(self):
|
||||
"""
|
||||
Returns a list used in the "ORDER BY" clause to force no ordering at
|
||||
all. Returning an empty list means that nothing will be included in the
|
||||
ordering.
|
||||
"""
|
||||
return []
|
||||
|
||||
def for_update_sql(self, nowait=False):
|
||||
"""
|
||||
Returns the FOR UPDATE SQL clause to lock rows for an update operation.
|
||||
"""
|
||||
if nowait:
|
||||
return 'FOR UPDATE NOWAIT'
|
||||
else:
|
||||
return 'FOR UPDATE'
|
||||
|
||||
def fulltext_search_sql(self, field_name):
|
||||
"""
|
||||
Returns the SQL WHERE clause to use in order to perform a full-text
|
||||
search of the given field_name. Note that the resulting string should
|
||||
contain a '%s' placeholder for the value being searched against.
|
||||
"""
|
||||
raise NotImplementedError('Full-text search is not implemented for this database backend')
|
||||
|
||||
def last_executed_query(self, cursor, sql, params):
|
||||
"""
|
||||
Returns a string of the query last executed by the given cursor, with
|
||||
placeholders replaced with actual values.
|
||||
|
||||
`sql` is the raw query containing placeholders, and `params` is the
|
||||
sequence of parameters. These are used by default, but this method
|
||||
exists for database backends to provide a better implementation
|
||||
according to their own quoting schemes.
|
||||
"""
|
||||
# Convert params to contain Unicode values.
|
||||
to_unicode = lambda s: force_text(s, strings_only=True, errors='replace')
|
||||
if isinstance(params, (list, tuple)):
|
||||
u_params = tuple(to_unicode(val) for val in params)
|
||||
elif params is None:
|
||||
u_params = ()
|
||||
else:
|
||||
u_params = {to_unicode(k): to_unicode(v) for k, v in params.items()}
|
||||
|
||||
return six.text_type("QUERY = %r - PARAMS = %r") % (sql, u_params)
|
||||
|
||||
def last_insert_id(self, cursor, table_name, pk_name):
|
||||
"""
|
||||
Given a cursor object that has just performed an INSERT statement into
|
||||
a table that has an auto-incrementing ID, returns the newly created ID.
|
||||
|
||||
This method also receives the table name and the name of the primary-key
|
||||
column.
|
||||
"""
|
||||
return cursor.lastrowid
|
||||
|
||||
def lookup_cast(self, lookup_type):
|
||||
"""
|
||||
Returns the string to use in a query when performing lookups
|
||||
("contains", "like", etc). The resulting string should contain a '%s'
|
||||
placeholder for the column being searched against.
|
||||
"""
|
||||
return "%s"
|
||||
|
||||
def max_in_list_size(self):
|
||||
"""
|
||||
Returns the maximum number of items that can be passed in a single 'IN'
|
||||
list condition, or None if the backend does not impose a limit.
|
||||
"""
|
||||
return None
|
||||
|
||||
def max_name_length(self):
|
||||
"""
|
||||
Returns the maximum length of table and column names, or None if there
|
||||
is no limit.
|
||||
"""
|
||||
return None
|
||||
|
||||
def no_limit_value(self):
|
||||
"""
|
||||
Returns the value to use for the LIMIT when we are wanting "LIMIT
|
||||
infinity". Returns None if the limit clause can be omitted in this case.
|
||||
"""
|
||||
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a no_limit_value() method')
|
||||
|
||||
def pk_default_value(self):
|
||||
"""
|
||||
Returns the value to use during an INSERT statement to specify that
|
||||
the field should use its default value.
|
||||
"""
|
||||
return 'DEFAULT'
|
||||
|
||||
def prepare_sql_script(self, sql, _allow_fallback=False):
|
||||
"""
|
||||
Takes a SQL script that may contain multiple lines and returns a list
|
||||
of statements to feed to successive cursor.execute() calls.
|
||||
|
||||
Since few databases are able to process raw SQL scripts in a single
|
||||
cursor.execute() call and PEP 249 doesn't talk about this use case,
|
||||
the default implementation is conservative.
|
||||
"""
|
||||
# Remove _allow_fallback and keep only 'return ...' in Django 1.9.
|
||||
try:
|
||||
# This import must stay inside the method because it's optional.
|
||||
import sqlparse
|
||||
except ImportError:
|
||||
if _allow_fallback:
|
||||
# Without sqlparse, fall back to the legacy (and buggy) logic.
|
||||
warnings.warn(
|
||||
"Providing initial SQL data on a %s database will require "
|
||||
"sqlparse in Django 1.9." % self.connection.vendor,
|
||||
RemovedInDjango19Warning)
|
||||
from django.core.management.sql import _split_statements
|
||||
return _split_statements(sql)
|
||||
else:
|
||||
raise
|
||||
else:
|
||||
return [sqlparse.format(statement, strip_comments=True)
|
||||
for statement in sqlparse.split(sql) if statement]
|
||||
|
||||
def process_clob(self, value):
|
||||
"""
|
||||
Returns the value of a CLOB column, for backends that return a locator
|
||||
object that requires additional processing.
|
||||
"""
|
||||
return value
|
||||
|
||||
def return_insert_id(self):
|
||||
"""
|
||||
For backends that support returning the last insert ID as part
|
||||
of an insert query, this method returns the SQL and params to
|
||||
append to the INSERT query. The returned fragment should
|
||||
contain a format string to hold the appropriate column.
|
||||
"""
|
||||
pass
|
||||
|
||||
def compiler(self, compiler_name):
|
||||
"""
|
||||
Returns the SQLCompiler class corresponding to the given name,
|
||||
in the namespace corresponding to the `compiler_module` attribute
|
||||
on this backend.
|
||||
"""
|
||||
if self._cache is None:
|
||||
self._cache = import_module(self.compiler_module)
|
||||
return getattr(self._cache, compiler_name)
|
||||
|
||||
def quote_name(self, name):
|
||||
"""
|
||||
Returns a quoted version of the given table, index or column name. Does
|
||||
not quote the given name if it's already been quoted.
|
||||
"""
|
||||
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a quote_name() method')
|
||||
|
||||
def random_function_sql(self):
|
||||
"""
|
||||
Returns an SQL expression that returns a random value.
|
||||
"""
|
||||
return 'RANDOM()'
|
||||
|
||||
def regex_lookup(self, lookup_type):
|
||||
"""
|
||||
Returns the string to use in a query when performing regular expression
|
||||
lookups (using "regex" or "iregex"). The resulting string should
|
||||
contain a '%s' placeholder for the column being searched against.
|
||||
|
||||
If the feature is not supported (or part of it is not supported), a
|
||||
NotImplementedError exception can be raised.
|
||||
"""
|
||||
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a regex_lookup() method')
|
||||
|
||||
def savepoint_create_sql(self, sid):
|
||||
"""
|
||||
Returns the SQL for starting a new savepoint. Only required if the
|
||||
"uses_savepoints" feature is True. The "sid" parameter is a string
|
||||
for the savepoint id.
|
||||
"""
|
||||
return "SAVEPOINT %s" % self.quote_name(sid)
|
||||
|
||||
def savepoint_commit_sql(self, sid):
|
||||
"""
|
||||
Returns the SQL for committing the given savepoint.
|
||||
"""
|
||||
return "RELEASE SAVEPOINT %s" % self.quote_name(sid)
|
||||
|
||||
def savepoint_rollback_sql(self, sid):
|
||||
"""
|
||||
Returns the SQL for rolling back the given savepoint.
|
||||
"""
|
||||
return "ROLLBACK TO SAVEPOINT %s" % self.quote_name(sid)
|
||||
|
||||
def set_time_zone_sql(self):
|
||||
"""
|
||||
Returns the SQL that will set the connection's time zone.
|
||||
|
||||
Returns '' if the backend doesn't support time zones.
|
||||
"""
|
||||
return ''
|
||||
|
||||
def sql_flush(self, style, tables, sequences, allow_cascade=False):
|
||||
"""
|
||||
Returns a list of SQL statements required to remove all data from
|
||||
the given database tables (without actually removing the tables
|
||||
themselves).
|
||||
|
||||
The returned value also includes SQL statements required to reset DB
|
||||
sequences passed in :param sequences:.
|
||||
|
||||
The `style` argument is a Style object as returned by either
|
||||
color_style() or no_style() in django.core.management.color.
|
||||
|
||||
The `allow_cascade` argument determines whether truncation may cascade
|
||||
to tables with foreign keys pointing the tables being truncated.
|
||||
PostgreSQL requires a cascade even if these tables are empty.
|
||||
"""
|
||||
raise NotImplementedError('subclasses of BaseDatabaseOperations must provide a sql_flush() method')
|
||||
|
||||
def sequence_reset_by_name_sql(self, style, sequences):
|
||||
"""
|
||||
Returns a list of the SQL statements required to reset sequences
|
||||
passed in :param sequences:.
|
||||
|
||||
The `style` argument is a Style object as returned by either
|
||||
color_style() or no_style() in django.core.management.color.
|
||||
"""
|
||||
return []
|
||||
|
||||
def sequence_reset_sql(self, style, model_list):
|
||||
"""
|
||||
Returns a list of the SQL statements required to reset sequences for
|
||||
the given models.
|
||||
|
||||
The `style` argument is a Style object as returned by either
|
||||
color_style() or no_style() in django.core.management.color.
|
||||
"""
|
||||
return [] # No sequence reset required by default.
|
||||
|
||||
def start_transaction_sql(self):
|
||||
"""
|
||||
Returns the SQL statement required to start a transaction.
|
||||
"""
|
||||
return "BEGIN;"
|
||||
|
||||
def end_transaction_sql(self, success=True):
|
||||
"""
|
||||
Returns the SQL statement required to end a transaction.
|
||||
"""
|
||||
if not success:
|
||||
return "ROLLBACK;"
|
||||
return "COMMIT;"
|
||||
|
||||
def tablespace_sql(self, tablespace, inline=False):
|
||||
"""
|
||||
Returns the SQL that will be used in a query to define the tablespace.
|
||||
|
||||
Returns '' if the backend doesn't support tablespaces.
|
||||
|
||||
If inline is True, the SQL is appended to a row; otherwise it's appended
|
||||
to the entire CREATE TABLE or CREATE INDEX statement.
|
||||
"""
|
||||
return ''
|
||||
|
||||
def prep_for_like_query(self, x):
|
||||
"""Prepares a value for use in a LIKE query."""
|
||||
return force_text(x).replace("\\", "\\\\").replace("%", "\%").replace("_", "\_")
|
||||
|
||||
# Same as prep_for_like_query(), but called for "iexact" matches, which
|
||||
# need not necessarily be implemented using "LIKE" in the backend.
|
||||
prep_for_iexact_query = prep_for_like_query
|
||||
|
||||
def validate_autopk_value(self, value):
|
||||
"""
|
||||
Certain backends do not accept some values for "serial" fields
|
||||
(for example zero in MySQL). This method will raise a ValueError
|
||||
if the value is invalid, otherwise returns validated value.
|
||||
"""
|
||||
return value
|
||||
|
||||
def value_to_db_date(self, value):
|
||||
"""
|
||||
Transform a date value to an object compatible with what is expected
|
||||
by the backend driver for date columns.
|
||||
"""
|
||||
if value is None:
|
||||
return None
|
||||
return six.text_type(value)
|
||||
|
||||
def value_to_db_datetime(self, value):
|
||||
"""
|
||||
Transform a datetime value to an object compatible with what is expected
|
||||
by the backend driver for datetime columns.
|
||||
"""
|
||||
if value is None:
|
||||
return None
|
||||
return six.text_type(value)
|
||||
|
||||
def value_to_db_time(self, value):
|
||||
"""
|
||||
Transform a time value to an object compatible with what is expected
|
||||
by the backend driver for time columns.
|
||||
"""
|
||||
if value is None:
|
||||
return None
|
||||
if timezone.is_aware(value):
|
||||
raise ValueError("Django does not support timezone-aware times.")
|
||||
return six.text_type(value)
|
||||
|
||||
def value_to_db_decimal(self, value, max_digits, decimal_places):
|
||||
"""
|
||||
Transform a decimal.Decimal value to an object compatible with what is
|
||||
expected by the backend driver for decimal (numeric) columns.
|
||||
"""
|
||||
return utils.format_number(value, max_digits, decimal_places)
|
||||
|
||||
def year_lookup_bounds_for_date_field(self, value):
|
||||
"""
|
||||
Returns a two-elements list with the lower and upper bound to be used
|
||||
with a BETWEEN operator to query a DateField value using a year
|
||||
lookup.
|
||||
|
||||
`value` is an int, containing the looked-up year.
|
||||
"""
|
||||
first = datetime.date(value, 1, 1)
|
||||
second = datetime.date(value, 12, 31)
|
||||
return [first, second]
|
||||
|
||||
def year_lookup_bounds_for_datetime_field(self, value):
|
||||
"""
|
||||
Returns a two-elements list with the lower and upper bound to be used
|
||||
with a BETWEEN operator to query a DateTimeField value using a year
|
||||
lookup.
|
||||
|
||||
`value` is an int, containing the looked-up year.
|
||||
"""
|
||||
first = datetime.datetime(value, 1, 1)
|
||||
second = datetime.datetime(value, 12, 31, 23, 59, 59, 999999)
|
||||
if settings.USE_TZ:
|
||||
tz = timezone.get_current_timezone()
|
||||
first = timezone.make_aware(first, tz)
|
||||
second = timezone.make_aware(second, tz)
|
||||
return [first, second]
|
||||
|
||||
def get_db_converters(self, expression):
|
||||
"""Get a list of functions needed to convert field data.
|
||||
|
||||
Some field types on some backends do not provide data in the correct
|
||||
format, this is the hook for coverter functions.
|
||||
"""
|
||||
return []
|
||||
|
||||
def convert_durationfield_value(self, value, expression, context):
|
||||
if value is not None:
|
||||
value = str(decimal.Decimal(value) / decimal.Decimal(1000000))
|
||||
value = parse_duration(value)
|
||||
return value
|
||||
|
||||
def check_aggregate_support(self, aggregate_func):
|
||||
"""Check that the backend supports the provided aggregate
|
||||
|
||||
This is used on specific backends to rule out known aggregates
|
||||
that are known to have faulty implementations. If the named
|
||||
aggregate function has a known problem, the backend should
|
||||
raise NotImplementedError.
|
||||
"""
|
||||
pass
|
||||
|
||||
def combine_expression(self, connector, sub_expressions):
|
||||
"""Combine a list of subexpressions into a single expression, using
|
||||
the provided connecting operator. This is required because operators
|
||||
can vary between backends (e.g., Oracle with %% and &) and between
|
||||
subexpression types (e.g., date expressions)
|
||||
"""
|
||||
conn = ' %s ' % connector
|
||||
return conn.join(sub_expressions)
|
||||
|
||||
def combine_duration_expression(self, connector, sub_expressions):
|
||||
return self.combine_expression(connector, sub_expressions)
|
||||
|
||||
def modify_insert_params(self, placeholders, params):
|
||||
"""Allow modification of insert parameters. Needed for Oracle Spatial
|
||||
backend due to #10888.
|
||||
"""
|
||||
return params
|
||||
|
||||
def integer_field_range(self, internal_type):
|
||||
"""
|
||||
Given an integer field internal type (e.g. 'PositiveIntegerField'),
|
||||
returns a tuple of the (min_value, max_value) form representing the
|
||||
range of the column type bound to the field.
|
||||
"""
|
||||
return self.integer_field_ranges[internal_type]
|
|
@ -3,9 +3,9 @@ import hashlib
|
|||
from django.db.backends.utils import truncate_name
|
||||
from django.db.models.fields.related import ManyToManyField
|
||||
from django.db.transaction import atomic
|
||||
from django.utils import six
|
||||
from django.utils.encoding import force_bytes
|
||||
from django.utils.log import getLogger
|
||||
from django.utils import six
|
||||
|
||||
logger = getLogger('django.db.backends.schema')
|
||||
|
|
@ -0,0 +1,38 @@
|
|||
from django.core import checks
|
||||
|
||||
|
||||
class BaseDatabaseValidation(object):
|
||||
"""
|
||||
This class encapsulates all backend-specific model validation.
|
||||
"""
|
||||
def __init__(self, connection):
|
||||
self.connection = connection
|
||||
|
||||
def validate_field(self, errors, opts, f):
|
||||
"""
|
||||
By default, there is no backend-specific validation.
|
||||
|
||||
This method has been deprecated by the new checks framework. New
|
||||
backends should implement check_field instead.
|
||||
"""
|
||||
# This is deliberately commented out. It exists as a marker to
|
||||
# remind us to remove this method, and the check_field() shim,
|
||||
# when the time comes.
|
||||
# warnings.warn('"validate_field" has been deprecated", RemovedInDjango19Warning)
|
||||
pass
|
||||
|
||||
def check_field(self, field, **kwargs):
|
||||
class ErrorList(list):
|
||||
"""A dummy list class that emulates API used by the older
|
||||
validate_field() method. When validate_field() is fully
|
||||
deprecated, this dummy can be removed too.
|
||||
"""
|
||||
def add(self, opts, error_message):
|
||||
self.append(checks.Error(error_message, hint=None, obj=field))
|
||||
|
||||
errors = ErrorList()
|
||||
# Some tests create fields in isolation -- the fields are not attached
|
||||
# to any model, so they have no `model` attribute.
|
||||
opts = field.model._meta if hasattr(field, 'model') else None
|
||||
self.validate_field(errors, field, opts)
|
||||
return list(errors)
|
|
@ -8,10 +8,13 @@ ImproperlyConfigured.
|
|||
"""
|
||||
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
from django.db.backends import (BaseDatabaseOperations, BaseDatabaseClient,
|
||||
BaseDatabaseIntrospection, BaseDatabaseWrapper, BaseDatabaseFeatures,
|
||||
BaseDatabaseValidation)
|
||||
from django.db.backends.creation import BaseDatabaseCreation
|
||||
from django.db.backends.base.base import BaseDatabaseWrapper
|
||||
from django.db.backends.base.client import BaseDatabaseClient
|
||||
from django.db.backends.base.creation import BaseDatabaseCreation
|
||||
from django.db.backends.base.features import BaseDatabaseFeatures
|
||||
from django.db.backends.base.operations import BaseDatabaseOperations
|
||||
from django.db.backends.base.introspection import BaseDatabaseIntrospection
|
||||
from django.db.backends.base.validation import BaseDatabaseValidation
|
||||
|
||||
|
||||
def complain(*args, **kwargs):
|
||||
|
|
|
@ -9,15 +9,35 @@ from __future__ import unicode_literals
|
|||
import datetime
|
||||
import re
|
||||
import sys
|
||||
import uuid
|
||||
import warnings
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import utils
|
||||
from django.db.backends import utils as backend_utils
|
||||
from django.db.backends.base.base import BaseDatabaseWrapper
|
||||
from django.utils.encoding import force_str
|
||||
from django.db.backends.mysql.schema import DatabaseSchemaEditor
|
||||
from django.utils import six, timezone
|
||||
from django.utils.functional import cached_property
|
||||
from django.utils.safestring import SafeBytes, SafeText
|
||||
|
||||
try:
|
||||
import MySQLdb as Database
|
||||
except ImportError as e:
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
raise ImproperlyConfigured("Error loading MySQLdb module: %s" % e)
|
||||
|
||||
from MySQLdb.converters import conversions, Thing2Literal
|
||||
from MySQLdb.constants import FIELD_TYPE, CLIENT
|
||||
|
||||
# Some of these import MySQLdb, so import them after checking if it's installed.
|
||||
from .client import DatabaseClient
|
||||
from .creation import DatabaseCreation
|
||||
from .features import DatabaseFeatures
|
||||
from .introspection import DatabaseIntrospection
|
||||
from .operations import DatabaseOperations
|
||||
from .validation import DatabaseValidation
|
||||
|
||||
# We want version (1, 2, 1, 'final', 2) or later. We can't just use
|
||||
# lexicographic ordering in this check because then (1, 2, 1, 'gamma')
|
||||
# inadvertently passes the version test.
|
||||
|
@ -27,28 +47,6 @@ if (version < (1, 2, 1) or (version[:3] == (1, 2, 1) and
|
|||
from django.core.exceptions import ImproperlyConfigured
|
||||
raise ImproperlyConfigured("MySQLdb-1.2.1p2 or newer is required; you have %s" % Database.__version__)
|
||||
|
||||
from MySQLdb.converters import conversions, Thing2Literal
|
||||
from MySQLdb.constants import FIELD_TYPE, CLIENT
|
||||
|
||||
try:
|
||||
import pytz
|
||||
except ImportError:
|
||||
pytz = None
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import utils
|
||||
from django.db.backends import (utils as backend_utils, BaseDatabaseFeatures,
|
||||
BaseDatabaseOperations, BaseDatabaseWrapper)
|
||||
from django.db.backends.mysql.client import DatabaseClient
|
||||
from django.db.backends.mysql.creation import DatabaseCreation
|
||||
from django.db.backends.mysql.introspection import DatabaseIntrospection
|
||||
from django.db.backends.mysql.validation import DatabaseValidation
|
||||
from django.utils.encoding import force_str, force_text
|
||||
from django.db.backends.mysql.schema import DatabaseSchemaEditor
|
||||
from django.utils.functional import cached_property
|
||||
from django.utils.safestring import SafeBytes, SafeText
|
||||
from django.utils import six
|
||||
from django.utils import timezone
|
||||
|
||||
DatabaseError = Database.DatabaseError
|
||||
IntegrityError = Database.IntegrityError
|
||||
|
@ -159,259 +157,6 @@ class CursorWrapper(object):
|
|||
self.close()
|
||||
|
||||
|
||||
class DatabaseFeatures(BaseDatabaseFeatures):
|
||||
empty_fetchmany_value = ()
|
||||
update_can_self_select = False
|
||||
allows_group_by_pk = True
|
||||
related_fields_match_type = True
|
||||
allow_sliced_subqueries = False
|
||||
has_bulk_insert = True
|
||||
has_select_for_update = True
|
||||
has_select_for_update_nowait = False
|
||||
supports_forward_references = False
|
||||
supports_regex_backreferencing = False
|
||||
supports_date_lookup_using_string = False
|
||||
can_introspect_autofield = True
|
||||
can_introspect_binary_field = False
|
||||
can_introspect_small_integer_field = True
|
||||
supports_timezones = False
|
||||
requires_explicit_null_ordering_when_grouping = True
|
||||
allows_auto_pk_0 = False
|
||||
uses_savepoints = True
|
||||
can_release_savepoints = True
|
||||
atomic_transactions = False
|
||||
supports_column_check_constraints = False
|
||||
|
||||
@cached_property
|
||||
def _mysql_storage_engine(self):
|
||||
"Internal method used in Django tests. Don't rely on this from your code"
|
||||
with self.connection.cursor() as cursor:
|
||||
cursor.execute("SELECT ENGINE FROM INFORMATION_SCHEMA.ENGINES WHERE SUPPORT = 'DEFAULT'")
|
||||
result = cursor.fetchone()
|
||||
return result[0]
|
||||
|
||||
@cached_property
|
||||
def can_introspect_foreign_keys(self):
|
||||
"Confirm support for introspected foreign keys"
|
||||
return self._mysql_storage_engine != 'MyISAM'
|
||||
|
||||
@cached_property
|
||||
def supports_microsecond_precision(self):
|
||||
# See https://github.com/farcepest/MySQLdb1/issues/24 for the reason
|
||||
# about requiring MySQLdb 1.2.5
|
||||
return self.connection.mysql_version >= (5, 6, 4) and Database.version_info >= (1, 2, 5)
|
||||
|
||||
@cached_property
|
||||
def has_zoneinfo_database(self):
|
||||
# MySQL accepts full time zones names (eg. Africa/Nairobi) but rejects
|
||||
# abbreviations (eg. EAT). When pytz isn't installed and the current
|
||||
# time zone is LocalTimezone (the only sensible value in this
|
||||
# context), the current time zone name will be an abbreviation. As a
|
||||
# consequence, MySQL cannot perform time zone conversions reliably.
|
||||
if pytz is None:
|
||||
return False
|
||||
|
||||
# Test if the time zone definitions are installed.
|
||||
with self.connection.cursor() as cursor:
|
||||
cursor.execute("SELECT 1 FROM mysql.time_zone LIMIT 1")
|
||||
return cursor.fetchone() is not None
|
||||
|
||||
def introspected_boolean_field_type(self, *args, **kwargs):
|
||||
return 'IntegerField'
|
||||
|
||||
|
||||
class DatabaseOperations(BaseDatabaseOperations):
|
||||
compiler_module = "django.db.backends.mysql.compiler"
|
||||
|
||||
# MySQL stores positive fields as UNSIGNED ints.
|
||||
integer_field_ranges = dict(BaseDatabaseOperations.integer_field_ranges,
|
||||
PositiveSmallIntegerField=(0, 4294967295),
|
||||
PositiveIntegerField=(0, 18446744073709551615),
|
||||
)
|
||||
|
||||
def date_extract_sql(self, lookup_type, field_name):
|
||||
# http://dev.mysql.com/doc/mysql/en/date-and-time-functions.html
|
||||
if lookup_type == 'week_day':
|
||||
# DAYOFWEEK() returns an integer, 1-7, Sunday=1.
|
||||
# Note: WEEKDAY() returns 0-6, Monday=0.
|
||||
return "DAYOFWEEK(%s)" % field_name
|
||||
else:
|
||||
return "EXTRACT(%s FROM %s)" % (lookup_type.upper(), field_name)
|
||||
|
||||
def date_trunc_sql(self, lookup_type, field_name):
|
||||
fields = ['year', 'month', 'day', 'hour', 'minute', 'second']
|
||||
format = ('%%Y-', '%%m', '-%%d', ' %%H:', '%%i', ':%%s') # Use double percents to escape.
|
||||
format_def = ('0000-', '01', '-01', ' 00:', '00', ':00')
|
||||
try:
|
||||
i = fields.index(lookup_type) + 1
|
||||
except ValueError:
|
||||
sql = field_name
|
||||
else:
|
||||
format_str = ''.join([f for f in format[:i]] + [f for f in format_def[i:]])
|
||||
sql = "CAST(DATE_FORMAT(%s, '%s') AS DATETIME)" % (field_name, format_str)
|
||||
return sql
|
||||
|
||||
def datetime_extract_sql(self, lookup_type, field_name, tzname):
|
||||
if settings.USE_TZ:
|
||||
field_name = "CONVERT_TZ(%s, 'UTC', %%s)" % field_name
|
||||
params = [tzname]
|
||||
else:
|
||||
params = []
|
||||
# http://dev.mysql.com/doc/mysql/en/date-and-time-functions.html
|
||||
if lookup_type == 'week_day':
|
||||
# DAYOFWEEK() returns an integer, 1-7, Sunday=1.
|
||||
# Note: WEEKDAY() returns 0-6, Monday=0.
|
||||
sql = "DAYOFWEEK(%s)" % field_name
|
||||
else:
|
||||
sql = "EXTRACT(%s FROM %s)" % (lookup_type.upper(), field_name)
|
||||
return sql, params
|
||||
|
||||
def datetime_trunc_sql(self, lookup_type, field_name, tzname):
|
||||
if settings.USE_TZ:
|
||||
field_name = "CONVERT_TZ(%s, 'UTC', %%s)" % field_name
|
||||
params = [tzname]
|
||||
else:
|
||||
params = []
|
||||
fields = ['year', 'month', 'day', 'hour', 'minute', 'second']
|
||||
format = ('%%Y-', '%%m', '-%%d', ' %%H:', '%%i', ':%%s') # Use double percents to escape.
|
||||
format_def = ('0000-', '01', '-01', ' 00:', '00', ':00')
|
||||
try:
|
||||
i = fields.index(lookup_type) + 1
|
||||
except ValueError:
|
||||
sql = field_name
|
||||
else:
|
||||
format_str = ''.join([f for f in format[:i]] + [f for f in format_def[i:]])
|
||||
sql = "CAST(DATE_FORMAT(%s, '%s') AS DATETIME)" % (field_name, format_str)
|
||||
return sql, params
|
||||
|
||||
def date_interval_sql(self, timedelta):
|
||||
return "INTERVAL '%d 0:0:%d:%d' DAY_MICROSECOND" % (
|
||||
timedelta.days, timedelta.seconds, timedelta.microseconds), []
|
||||
|
||||
def format_for_duration_arithmetic(self, sql):
|
||||
return 'INTERVAL %s MICROSECOND' % sql
|
||||
|
||||
def drop_foreignkey_sql(self):
|
||||
return "DROP FOREIGN KEY"
|
||||
|
||||
def force_no_ordering(self):
|
||||
"""
|
||||
"ORDER BY NULL" prevents MySQL from implicitly ordering by grouped
|
||||
columns. If no ordering would otherwise be applied, we don't want any
|
||||
implicit sorting going on.
|
||||
"""
|
||||
return [(None, ("NULL", [], False))]
|
||||
|
||||
def fulltext_search_sql(self, field_name):
|
||||
return 'MATCH (%s) AGAINST (%%s IN BOOLEAN MODE)' % field_name
|
||||
|
||||
def last_executed_query(self, cursor, sql, params):
|
||||
# With MySQLdb, cursor objects have an (undocumented) "_last_executed"
|
||||
# attribute where the exact query sent to the database is saved.
|
||||
# See MySQLdb/cursors.py in the source distribution.
|
||||
return force_text(getattr(cursor, '_last_executed', None), errors='replace')
|
||||
|
||||
def no_limit_value(self):
|
||||
# 2**64 - 1, as recommended by the MySQL documentation
|
||||
return 18446744073709551615
|
||||
|
||||
def quote_name(self, name):
|
||||
if name.startswith("`") and name.endswith("`"):
|
||||
return name # Quoting once is enough.
|
||||
return "`%s`" % name
|
||||
|
||||
def random_function_sql(self):
|
||||
return 'RAND()'
|
||||
|
||||
def sql_flush(self, style, tables, sequences, allow_cascade=False):
|
||||
# NB: The generated SQL below is specific to MySQL
|
||||
# 'TRUNCATE x;', 'TRUNCATE y;', 'TRUNCATE z;'... style SQL statements
|
||||
# to clear all tables of all data
|
||||
if tables:
|
||||
sql = ['SET FOREIGN_KEY_CHECKS = 0;']
|
||||
for table in tables:
|
||||
sql.append('%s %s;' % (
|
||||
style.SQL_KEYWORD('TRUNCATE'),
|
||||
style.SQL_FIELD(self.quote_name(table)),
|
||||
))
|
||||
sql.append('SET FOREIGN_KEY_CHECKS = 1;')
|
||||
sql.extend(self.sequence_reset_by_name_sql(style, sequences))
|
||||
return sql
|
||||
else:
|
||||
return []
|
||||
|
||||
def validate_autopk_value(self, value):
|
||||
# MySQLism: zero in AUTO_INCREMENT field does not work. Refs #17653.
|
||||
if value == 0:
|
||||
raise ValueError('The database backend does not accept 0 as a '
|
||||
'value for AutoField.')
|
||||
return value
|
||||
|
||||
def value_to_db_datetime(self, value):
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
# MySQL doesn't support tz-aware datetimes
|
||||
if timezone.is_aware(value):
|
||||
if settings.USE_TZ:
|
||||
value = value.astimezone(timezone.utc).replace(tzinfo=None)
|
||||
else:
|
||||
raise ValueError("MySQL backend does not support timezone-aware datetimes when USE_TZ is False.")
|
||||
|
||||
return six.text_type(value)
|
||||
|
||||
def value_to_db_time(self, value):
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
# MySQL doesn't support tz-aware times
|
||||
if timezone.is_aware(value):
|
||||
raise ValueError("MySQL backend does not support timezone-aware times.")
|
||||
|
||||
return six.text_type(value)
|
||||
|
||||
def max_name_length(self):
|
||||
return 64
|
||||
|
||||
def bulk_insert_sql(self, fields, num_values):
|
||||
items_sql = "(%s)" % ", ".join(["%s"] * len(fields))
|
||||
return "VALUES " + ", ".join([items_sql] * num_values)
|
||||
|
||||
def combine_expression(self, connector, sub_expressions):
|
||||
"""
|
||||
MySQL requires special cases for ^ operators in query expressions
|
||||
"""
|
||||
if connector == '^':
|
||||
return 'POW(%s)' % ','.join(sub_expressions)
|
||||
return super(DatabaseOperations, self).combine_expression(connector, sub_expressions)
|
||||
|
||||
def get_db_converters(self, expression):
|
||||
converters = super(DatabaseOperations, self).get_db_converters(expression)
|
||||
internal_type = expression.output_field.get_internal_type()
|
||||
if internal_type in ['BooleanField', 'NullBooleanField']:
|
||||
converters.append(self.convert_booleanfield_value)
|
||||
if internal_type == 'UUIDField':
|
||||
converters.append(self.convert_uuidfield_value)
|
||||
if internal_type == 'TextField':
|
||||
converters.append(self.convert_textfield_value)
|
||||
return converters
|
||||
|
||||
def convert_booleanfield_value(self, value, expression, context):
|
||||
if value in (0, 1):
|
||||
value = bool(value)
|
||||
return value
|
||||
|
||||
def convert_uuidfield_value(self, value, expression, context):
|
||||
if value is not None:
|
||||
value = uuid.UUID(value)
|
||||
return value
|
||||
|
||||
def convert_textfield_value(self, value, expression, context):
|
||||
if value is not None:
|
||||
value = force_text(value)
|
||||
return value
|
||||
|
||||
|
||||
class DatabaseWrapper(BaseDatabaseWrapper):
|
||||
vendor = 'mysql'
|
||||
# This dictionary maps Field objects to their associated MySQL column
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import subprocess
|
||||
|
||||
from django.db.backends import BaseDatabaseClient
|
||||
from django.db.backends.base.client import BaseDatabaseClient
|
||||
|
||||
|
||||
class DatabaseClient(BaseDatabaseClient):
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from django.db.backends.creation import BaseDatabaseCreation
|
||||
from django.db.backends.base.creation import BaseDatabaseCreation
|
||||
|
||||
|
||||
class DatabaseCreation(BaseDatabaseCreation):
|
||||
|
|
|
@ -0,0 +1,70 @@
|
|||
from django.db.backends.base.features import BaseDatabaseFeatures
|
||||
from django.utils.functional import cached_property
|
||||
|
||||
from .base import Database
|
||||
|
||||
try:
|
||||
import pytz
|
||||
except ImportError:
|
||||
pytz = None
|
||||
|
||||
|
||||
class DatabaseFeatures(BaseDatabaseFeatures):
|
||||
empty_fetchmany_value = ()
|
||||
update_can_self_select = False
|
||||
allows_group_by_pk = True
|
||||
related_fields_match_type = True
|
||||
allow_sliced_subqueries = False
|
||||
has_bulk_insert = True
|
||||
has_select_for_update = True
|
||||
has_select_for_update_nowait = False
|
||||
supports_forward_references = False
|
||||
supports_regex_backreferencing = False
|
||||
supports_date_lookup_using_string = False
|
||||
can_introspect_autofield = True
|
||||
can_introspect_binary_field = False
|
||||
can_introspect_small_integer_field = True
|
||||
supports_timezones = False
|
||||
requires_explicit_null_ordering_when_grouping = True
|
||||
allows_auto_pk_0 = False
|
||||
uses_savepoints = True
|
||||
can_release_savepoints = True
|
||||
atomic_transactions = False
|
||||
supports_column_check_constraints = False
|
||||
|
||||
@cached_property
|
||||
def _mysql_storage_engine(self):
|
||||
"Internal method used in Django tests. Don't rely on this from your code"
|
||||
with self.connection.cursor() as cursor:
|
||||
cursor.execute("SELECT ENGINE FROM INFORMATION_SCHEMA.ENGINES WHERE SUPPORT = 'DEFAULT'")
|
||||
result = cursor.fetchone()
|
||||
return result[0]
|
||||
|
||||
@cached_property
|
||||
def can_introspect_foreign_keys(self):
|
||||
"Confirm support for introspected foreign keys"
|
||||
return self._mysql_storage_engine != 'MyISAM'
|
||||
|
||||
@cached_property
|
||||
def supports_microsecond_precision(self):
|
||||
# See https://github.com/farcepest/MySQLdb1/issues/24 for the reason
|
||||
# about requiring MySQLdb 1.2.5
|
||||
return self.connection.mysql_version >= (5, 6, 4) and Database.version_info >= (1, 2, 5)
|
||||
|
||||
@cached_property
|
||||
def has_zoneinfo_database(self):
|
||||
# MySQL accepts full time zones names (eg. Africa/Nairobi) but rejects
|
||||
# abbreviations (eg. EAT). When pytz isn't installed and the current
|
||||
# time zone is LocalTimezone (the only sensible value in this
|
||||
# context), the current time zone name will be an abbreviation. As a
|
||||
# consequence, MySQL cannot perform time zone conversions reliably.
|
||||
if pytz is None:
|
||||
return False
|
||||
|
||||
# Test if the time zone definitions are installed.
|
||||
with self.connection.cursor() as cursor:
|
||||
cursor.execute("SELECT 1 FROM mysql.time_zone LIMIT 1")
|
||||
return cursor.fetchone() is not None
|
||||
|
||||
def introspected_boolean_field_type(self, *args, **kwargs):
|
||||
return 'IntegerField'
|
|
@ -1,10 +1,14 @@
|
|||
from collections import namedtuple
|
||||
import re
|
||||
from .base import FIELD_TYPE
|
||||
|
||||
from django.utils.datastructures import OrderedSet
|
||||
from django.db.backends import BaseDatabaseIntrospection, FieldInfo, TableInfo
|
||||
from django.db.backends.base.introspection import (
|
||||
BaseDatabaseIntrospection, FieldInfo, TableInfo,
|
||||
)
|
||||
from django.utils.encoding import force_text
|
||||
|
||||
from MySQLdb.constants import FIELD_TYPE
|
||||
|
||||
FieldInfo = namedtuple('FieldInfo', FieldInfo._fields + ('extra',))
|
||||
|
||||
foreign_key_re = re.compile(r"\sCONSTRAINT `[^`]*` FOREIGN KEY \(`([^`]*)`\) REFERENCES `([^`]*)` \(`([^`]*)`\)")
|
||||
|
|
|
@ -0,0 +1,200 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import uuid
|
||||
|
||||
from django.conf import settings
|
||||
from django.db.backends.base.operations import BaseDatabaseOperations
|
||||
from django.utils import six, timezone
|
||||
from django.utils.encoding import force_text
|
||||
|
||||
|
||||
class DatabaseOperations(BaseDatabaseOperations):
|
||||
compiler_module = "django.db.backends.mysql.compiler"
|
||||
|
||||
# MySQL stores positive fields as UNSIGNED ints.
|
||||
integer_field_ranges = dict(BaseDatabaseOperations.integer_field_ranges,
|
||||
PositiveSmallIntegerField=(0, 4294967295),
|
||||
PositiveIntegerField=(0, 18446744073709551615),
|
||||
)
|
||||
|
||||
def date_extract_sql(self, lookup_type, field_name):
|
||||
# http://dev.mysql.com/doc/mysql/en/date-and-time-functions.html
|
||||
if lookup_type == 'week_day':
|
||||
# DAYOFWEEK() returns an integer, 1-7, Sunday=1.
|
||||
# Note: WEEKDAY() returns 0-6, Monday=0.
|
||||
return "DAYOFWEEK(%s)" % field_name
|
||||
else:
|
||||
return "EXTRACT(%s FROM %s)" % (lookup_type.upper(), field_name)
|
||||
|
||||
def date_trunc_sql(self, lookup_type, field_name):
|
||||
fields = ['year', 'month', 'day', 'hour', 'minute', 'second']
|
||||
format = ('%%Y-', '%%m', '-%%d', ' %%H:', '%%i', ':%%s') # Use double percents to escape.
|
||||
format_def = ('0000-', '01', '-01', ' 00:', '00', ':00')
|
||||
try:
|
||||
i = fields.index(lookup_type) + 1
|
||||
except ValueError:
|
||||
sql = field_name
|
||||
else:
|
||||
format_str = ''.join([f for f in format[:i]] + [f for f in format_def[i:]])
|
||||
sql = "CAST(DATE_FORMAT(%s, '%s') AS DATETIME)" % (field_name, format_str)
|
||||
return sql
|
||||
|
||||
def datetime_extract_sql(self, lookup_type, field_name, tzname):
|
||||
if settings.USE_TZ:
|
||||
field_name = "CONVERT_TZ(%s, 'UTC', %%s)" % field_name
|
||||
params = [tzname]
|
||||
else:
|
||||
params = []
|
||||
# http://dev.mysql.com/doc/mysql/en/date-and-time-functions.html
|
||||
if lookup_type == 'week_day':
|
||||
# DAYOFWEEK() returns an integer, 1-7, Sunday=1.
|
||||
# Note: WEEKDAY() returns 0-6, Monday=0.
|
||||
sql = "DAYOFWEEK(%s)" % field_name
|
||||
else:
|
||||
sql = "EXTRACT(%s FROM %s)" % (lookup_type.upper(), field_name)
|
||||
return sql, params
|
||||
|
||||
def datetime_trunc_sql(self, lookup_type, field_name, tzname):
|
||||
if settings.USE_TZ:
|
||||
field_name = "CONVERT_TZ(%s, 'UTC', %%s)" % field_name
|
||||
params = [tzname]
|
||||
else:
|
||||
params = []
|
||||
fields = ['year', 'month', 'day', 'hour', 'minute', 'second']
|
||||
format = ('%%Y-', '%%m', '-%%d', ' %%H:', '%%i', ':%%s') # Use double percents to escape.
|
||||
format_def = ('0000-', '01', '-01', ' 00:', '00', ':00')
|
||||
try:
|
||||
i = fields.index(lookup_type) + 1
|
||||
except ValueError:
|
||||
sql = field_name
|
||||
else:
|
||||
format_str = ''.join([f for f in format[:i]] + [f for f in format_def[i:]])
|
||||
sql = "CAST(DATE_FORMAT(%s, '%s') AS DATETIME)" % (field_name, format_str)
|
||||
return sql, params
|
||||
|
||||
def date_interval_sql(self, timedelta):
|
||||
return "INTERVAL '%d 0:0:%d:%d' DAY_MICROSECOND" % (
|
||||
timedelta.days, timedelta.seconds, timedelta.microseconds), []
|
||||
|
||||
def format_for_duration_arithmetic(self, sql):
|
||||
return 'INTERVAL %s MICROSECOND' % sql
|
||||
|
||||
def drop_foreignkey_sql(self):
|
||||
return "DROP FOREIGN KEY"
|
||||
|
||||
def force_no_ordering(self):
|
||||
"""
|
||||
"ORDER BY NULL" prevents MySQL from implicitly ordering by grouped
|
||||
columns. If no ordering would otherwise be applied, we don't want any
|
||||
implicit sorting going on.
|
||||
"""
|
||||
return [(None, ("NULL", [], False))]
|
||||
|
||||
def fulltext_search_sql(self, field_name):
|
||||
return 'MATCH (%s) AGAINST (%%s IN BOOLEAN MODE)' % field_name
|
||||
|
||||
def last_executed_query(self, cursor, sql, params):
|
||||
# With MySQLdb, cursor objects have an (undocumented) "_last_executed"
|
||||
# attribute where the exact query sent to the database is saved.
|
||||
# See MySQLdb/cursors.py in the source distribution.
|
||||
return force_text(getattr(cursor, '_last_executed', None), errors='replace')
|
||||
|
||||
def no_limit_value(self):
|
||||
# 2**64 - 1, as recommended by the MySQL documentation
|
||||
return 18446744073709551615
|
||||
|
||||
def quote_name(self, name):
|
||||
if name.startswith("`") and name.endswith("`"):
|
||||
return name # Quoting once is enough.
|
||||
return "`%s`" % name
|
||||
|
||||
def random_function_sql(self):
|
||||
return 'RAND()'
|
||||
|
||||
def sql_flush(self, style, tables, sequences, allow_cascade=False):
|
||||
# NB: The generated SQL below is specific to MySQL
|
||||
# 'TRUNCATE x;', 'TRUNCATE y;', 'TRUNCATE z;'... style SQL statements
|
||||
# to clear all tables of all data
|
||||
if tables:
|
||||
sql = ['SET FOREIGN_KEY_CHECKS = 0;']
|
||||
for table in tables:
|
||||
sql.append('%s %s;' % (
|
||||
style.SQL_KEYWORD('TRUNCATE'),
|
||||
style.SQL_FIELD(self.quote_name(table)),
|
||||
))
|
||||
sql.append('SET FOREIGN_KEY_CHECKS = 1;')
|
||||
sql.extend(self.sequence_reset_by_name_sql(style, sequences))
|
||||
return sql
|
||||
else:
|
||||
return []
|
||||
|
||||
def validate_autopk_value(self, value):
|
||||
# MySQLism: zero in AUTO_INCREMENT field does not work. Refs #17653.
|
||||
if value == 0:
|
||||
raise ValueError('The database backend does not accept 0 as a '
|
||||
'value for AutoField.')
|
||||
return value
|
||||
|
||||
def value_to_db_datetime(self, value):
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
# MySQL doesn't support tz-aware datetimes
|
||||
if timezone.is_aware(value):
|
||||
if settings.USE_TZ:
|
||||
value = value.astimezone(timezone.utc).replace(tzinfo=None)
|
||||
else:
|
||||
raise ValueError("MySQL backend does not support timezone-aware datetimes when USE_TZ is False.")
|
||||
|
||||
return six.text_type(value)
|
||||
|
||||
def value_to_db_time(self, value):
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
# MySQL doesn't support tz-aware times
|
||||
if timezone.is_aware(value):
|
||||
raise ValueError("MySQL backend does not support timezone-aware times.")
|
||||
|
||||
return six.text_type(value)
|
||||
|
||||
def max_name_length(self):
|
||||
return 64
|
||||
|
||||
def bulk_insert_sql(self, fields, num_values):
|
||||
items_sql = "(%s)" % ", ".join(["%s"] * len(fields))
|
||||
return "VALUES " + ", ".join([items_sql] * num_values)
|
||||
|
||||
def combine_expression(self, connector, sub_expressions):
|
||||
"""
|
||||
MySQL requires special cases for ^ operators in query expressions
|
||||
"""
|
||||
if connector == '^':
|
||||
return 'POW(%s)' % ','.join(sub_expressions)
|
||||
return super(DatabaseOperations, self).combine_expression(connector, sub_expressions)
|
||||
|
||||
def get_db_converters(self, expression):
|
||||
converters = super(DatabaseOperations, self).get_db_converters(expression)
|
||||
internal_type = expression.output_field.get_internal_type()
|
||||
if internal_type in ['BooleanField', 'NullBooleanField']:
|
||||
converters.append(self.convert_booleanfield_value)
|
||||
if internal_type == 'UUIDField':
|
||||
converters.append(self.convert_uuidfield_value)
|
||||
if internal_type == 'TextField':
|
||||
converters.append(self.convert_textfield_value)
|
||||
return converters
|
||||
|
||||
def convert_booleanfield_value(self, value, expression, context):
|
||||
if value in (0, 1):
|
||||
value = bool(value)
|
||||
return value
|
||||
|
||||
def convert_uuidfield_value(self, value, expression, context):
|
||||
if value is not None:
|
||||
value = uuid.UUID(value)
|
||||
return value
|
||||
|
||||
def convert_textfield_value(self, value, expression, context):
|
||||
if value is not None:
|
||||
value = force_text(value)
|
||||
return value
|
|
@ -1,4 +1,4 @@
|
|||
from django.db.backends.schema import BaseDatabaseSchemaEditor
|
||||
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||
from django.db.models import NOT_PROVIDED
|
||||
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from django.core import checks
|
||||
from django.db.backends import BaseDatabaseValidation
|
||||
from django.db.backends.base.validation import BaseDatabaseValidation
|
||||
|
||||
|
||||
class DatabaseValidation(BaseDatabaseValidation):
|
||||
|
|
|
@ -7,12 +7,20 @@ from __future__ import unicode_literals
|
|||
|
||||
import datetime
|
||||
import decimal
|
||||
import re
|
||||
import os
|
||||
import platform
|
||||
import sys
|
||||
import uuid
|
||||
import warnings
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import utils
|
||||
from django.db.backends.base.base import BaseDatabaseWrapper
|
||||
from django.db.backends.base.validation import BaseDatabaseValidation
|
||||
from django.utils import six, timezone
|
||||
from django.utils.duration import duration_string
|
||||
from django.utils.encoding import force_bytes, force_text
|
||||
from django.utils.functional import cached_property
|
||||
|
||||
|
||||
def _setup_environment(environ):
|
||||
# Cygwin requires some special voodoo to set the environment variables
|
||||
|
@ -29,7 +37,6 @@ def _setup_environment(environ):
|
|||
for name, value in environ:
|
||||
kernel32.SetEnvironmentVariableA(name, value)
|
||||
else:
|
||||
import os
|
||||
os.environ.update(environ)
|
||||
|
||||
_setup_environment([
|
||||
|
@ -47,520 +54,18 @@ except ImportError as e:
|
|||
from django.core.exceptions import ImproperlyConfigured
|
||||
raise ImproperlyConfigured("Error loading cx_Oracle module: %s" % e)
|
||||
|
||||
try:
|
||||
import pytz
|
||||
except ImportError:
|
||||
pytz = None
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import utils
|
||||
from django.db.backends import (BaseDatabaseFeatures, BaseDatabaseOperations,
|
||||
BaseDatabaseWrapper, BaseDatabaseValidation, utils as backend_utils)
|
||||
from django.db.backends.oracle.client import DatabaseClient
|
||||
from django.db.backends.oracle.creation import DatabaseCreation
|
||||
from django.db.backends.oracle.introspection import DatabaseIntrospection
|
||||
from django.db.backends.oracle.schema import DatabaseSchemaEditor
|
||||
from django.db.utils import InterfaceError
|
||||
from django.utils import six, timezone
|
||||
from django.utils.duration import duration_string
|
||||
from django.utils.encoding import force_bytes, force_text
|
||||
from django.utils.functional import cached_property
|
||||
|
||||
# Some of these import cx_Oracle, so import them after checking if it's installed.
|
||||
from .client import DatabaseClient
|
||||
from .creation import DatabaseCreation
|
||||
from .features import DatabaseFeatures
|
||||
from .introspection import DatabaseIntrospection
|
||||
from .operations import DatabaseOperations
|
||||
from .schema import DatabaseSchemaEditor
|
||||
from .utils import convert_unicode, Oracle_datetime
|
||||
|
||||
DatabaseError = Database.DatabaseError
|
||||
IntegrityError = Database.IntegrityError
|
||||
|
||||
# Check whether cx_Oracle was compiled with the WITH_UNICODE option if cx_Oracle is pre-5.1. This will
|
||||
# also be True for cx_Oracle 5.1 and in Python 3.0. See #19606
|
||||
if int(Database.version.split('.', 1)[0]) >= 5 and \
|
||||
(int(Database.version.split('.', 2)[1]) >= 1 or
|
||||
not hasattr(Database, 'UNICODE')):
|
||||
convert_unicode = force_text
|
||||
else:
|
||||
convert_unicode = force_bytes
|
||||
|
||||
|
||||
class Oracle_datetime(datetime.datetime):
|
||||
"""
|
||||
A datetime object, with an additional class attribute
|
||||
to tell cx_Oracle to save the microseconds too.
|
||||
"""
|
||||
input_size = Database.TIMESTAMP
|
||||
|
||||
@classmethod
|
||||
def from_datetime(cls, dt):
|
||||
return Oracle_datetime(dt.year, dt.month, dt.day,
|
||||
dt.hour, dt.minute, dt.second, dt.microsecond)
|
||||
|
||||
|
||||
class DatabaseFeatures(BaseDatabaseFeatures):
|
||||
empty_fetchmany_value = ()
|
||||
needs_datetime_string_cast = False
|
||||
interprets_empty_strings_as_nulls = True
|
||||
uses_savepoints = True
|
||||
has_select_for_update = True
|
||||
has_select_for_update_nowait = True
|
||||
can_return_id_from_insert = True
|
||||
allow_sliced_subqueries = False
|
||||
supports_subqueries_in_group_by = False
|
||||
supports_transactions = True
|
||||
supports_timezones = False
|
||||
has_zoneinfo_database = pytz is not None
|
||||
supports_bitwise_or = False
|
||||
has_native_duration_field = True
|
||||
can_defer_constraint_checks = True
|
||||
supports_partially_nullable_unique_constraints = False
|
||||
truncates_names = True
|
||||
has_bulk_insert = True
|
||||
supports_tablespaces = True
|
||||
supports_sequence_reset = False
|
||||
can_introspect_max_length = False
|
||||
can_introspect_time_field = False
|
||||
atomic_transactions = False
|
||||
supports_combined_alters = False
|
||||
nulls_order_largest = True
|
||||
requires_literal_defaults = True
|
||||
connection_persists_old_columns = True
|
||||
closed_cursor_error_class = InterfaceError
|
||||
bare_select_suffix = " FROM DUAL"
|
||||
uppercases_column_names = True
|
||||
# select for update with limit can be achieved on Oracle, but not with the current backend.
|
||||
supports_select_for_update_with_limit = False
|
||||
|
||||
def introspected_boolean_field_type(self, field=None, created_separately=False):
|
||||
"""
|
||||
Some versions of Oracle -- we've seen this on 11.2.0.1 and suspect
|
||||
it goes back -- have a weird bug where, when an integer column is
|
||||
added to an existing table with a default, its precision is later
|
||||
reported on introspection as 0, regardless of the real precision.
|
||||
For Django introspection, this means that such columns are reported
|
||||
as IntegerField even if they are really BigIntegerField or BooleanField.
|
||||
|
||||
The bug is solved in Oracle 11.2.0.2 and up.
|
||||
"""
|
||||
if self.connection.oracle_full_version < '11.2.0.2' and field and field.has_default() and created_separately:
|
||||
return 'IntegerField'
|
||||
return super(DatabaseFeatures, self).introspected_boolean_field_type(field, created_separately)
|
||||
|
||||
|
||||
class DatabaseOperations(BaseDatabaseOperations):
|
||||
compiler_module = "django.db.backends.oracle.compiler"
|
||||
|
||||
# Oracle uses NUMBER(11) and NUMBER(19) for integer fields.
|
||||
integer_field_ranges = {
|
||||
'SmallIntegerField': (-99999999999, 99999999999),
|
||||
'IntegerField': (-99999999999, 99999999999),
|
||||
'BigIntegerField': (-9999999999999999999, 9999999999999999999),
|
||||
'PositiveSmallIntegerField': (0, 99999999999),
|
||||
'PositiveIntegerField': (0, 99999999999),
|
||||
}
|
||||
|
||||
def autoinc_sql(self, table, column):
|
||||
# To simulate auto-incrementing primary keys in Oracle, we have to
|
||||
# create a sequence and a trigger.
|
||||
sq_name = self._get_sequence_name(table)
|
||||
tr_name = self._get_trigger_name(table)
|
||||
tbl_name = self.quote_name(table)
|
||||
col_name = self.quote_name(column)
|
||||
sequence_sql = """
|
||||
DECLARE
|
||||
i INTEGER;
|
||||
BEGIN
|
||||
SELECT COUNT(*) INTO i FROM USER_CATALOG
|
||||
WHERE TABLE_NAME = '%(sq_name)s' AND TABLE_TYPE = 'SEQUENCE';
|
||||
IF i = 0 THEN
|
||||
EXECUTE IMMEDIATE 'CREATE SEQUENCE "%(sq_name)s"';
|
||||
END IF;
|
||||
END;
|
||||
/""" % locals()
|
||||
trigger_sql = """
|
||||
CREATE OR REPLACE TRIGGER "%(tr_name)s"
|
||||
BEFORE INSERT ON %(tbl_name)s
|
||||
FOR EACH ROW
|
||||
WHEN (new.%(col_name)s IS NULL)
|
||||
BEGIN
|
||||
SELECT "%(sq_name)s".nextval
|
||||
INTO :new.%(col_name)s FROM dual;
|
||||
END;
|
||||
/""" % locals()
|
||||
return sequence_sql, trigger_sql
|
||||
|
||||
def cache_key_culling_sql(self):
|
||||
return """
|
||||
SELECT cache_key
|
||||
FROM (SELECT cache_key, rank() OVER (ORDER BY cache_key) AS rank FROM %s)
|
||||
WHERE rank = %%s + 1
|
||||
"""
|
||||
|
||||
def date_extract_sql(self, lookup_type, field_name):
|
||||
if lookup_type == 'week_day':
|
||||
# TO_CHAR(field, 'D') returns an integer from 1-7, where 1=Sunday.
|
||||
return "TO_CHAR(%s, 'D')" % field_name
|
||||
else:
|
||||
# http://docs.oracle.com/cd/B19306_01/server.102/b14200/functions050.htm
|
||||
return "EXTRACT(%s FROM %s)" % (lookup_type.upper(), field_name)
|
||||
|
||||
def date_interval_sql(self, timedelta):
|
||||
"""
|
||||
Implements the interval functionality for expressions
|
||||
format for Oracle:
|
||||
INTERVAL '3 00:03:20.000000' DAY(1) TO SECOND(6)
|
||||
"""
|
||||
minutes, seconds = divmod(timedelta.seconds, 60)
|
||||
hours, minutes = divmod(minutes, 60)
|
||||
days = str(timedelta.days)
|
||||
day_precision = len(days)
|
||||
fmt = "INTERVAL '%s %02d:%02d:%02d.%06d' DAY(%d) TO SECOND(6)"
|
||||
return fmt % (days, hours, minutes, seconds, timedelta.microseconds,
|
||||
day_precision), []
|
||||
|
||||
def date_trunc_sql(self, lookup_type, field_name):
|
||||
# http://docs.oracle.com/cd/B19306_01/server.102/b14200/functions230.htm#i1002084
|
||||
if lookup_type in ('year', 'month'):
|
||||
return "TRUNC(%s, '%s')" % (field_name, lookup_type.upper())
|
||||
else:
|
||||
return "TRUNC(%s)" % field_name
|
||||
|
||||
# Oracle crashes with "ORA-03113: end-of-file on communication channel"
|
||||
# if the time zone name is passed in parameter. Use interpolation instead.
|
||||
# https://groups.google.com/forum/#!msg/django-developers/zwQju7hbG78/9l934yelwfsJ
|
||||
# This regexp matches all time zone names from the zoneinfo database.
|
||||
_tzname_re = re.compile(r'^[\w/:+-]+$')
|
||||
|
||||
def _convert_field_to_tz(self, field_name, tzname):
|
||||
if not self._tzname_re.match(tzname):
|
||||
raise ValueError("Invalid time zone name: %s" % tzname)
|
||||
# Convert from UTC to local time, returning TIMESTAMP WITH TIME ZONE.
|
||||
result = "(FROM_TZ(%s, '0:00') AT TIME ZONE '%s')" % (field_name, tzname)
|
||||
# Extracting from a TIMESTAMP WITH TIME ZONE ignore the time zone.
|
||||
# Convert to a DATETIME, which is called DATE by Oracle. There's no
|
||||
# built-in function to do that; the easiest is to go through a string.
|
||||
result = "TO_CHAR(%s, 'YYYY-MM-DD HH24:MI:SS')" % result
|
||||
result = "TO_DATE(%s, 'YYYY-MM-DD HH24:MI:SS')" % result
|
||||
# Re-convert to a TIMESTAMP because EXTRACT only handles the date part
|
||||
# on DATE values, even though they actually store the time part.
|
||||
return "CAST(%s AS TIMESTAMP)" % result
|
||||
|
||||
def datetime_extract_sql(self, lookup_type, field_name, tzname):
|
||||
if settings.USE_TZ:
|
||||
field_name = self._convert_field_to_tz(field_name, tzname)
|
||||
if lookup_type == 'week_day':
|
||||
# TO_CHAR(field, 'D') returns an integer from 1-7, where 1=Sunday.
|
||||
sql = "TO_CHAR(%s, 'D')" % field_name
|
||||
else:
|
||||
# http://docs.oracle.com/cd/B19306_01/server.102/b14200/functions050.htm
|
||||
sql = "EXTRACT(%s FROM %s)" % (lookup_type.upper(), field_name)
|
||||
return sql, []
|
||||
|
||||
def datetime_trunc_sql(self, lookup_type, field_name, tzname):
|
||||
if settings.USE_TZ:
|
||||
field_name = self._convert_field_to_tz(field_name, tzname)
|
||||
# http://docs.oracle.com/cd/B19306_01/server.102/b14200/functions230.htm#i1002084
|
||||
if lookup_type in ('year', 'month'):
|
||||
sql = "TRUNC(%s, '%s')" % (field_name, lookup_type.upper())
|
||||
elif lookup_type == 'day':
|
||||
sql = "TRUNC(%s)" % field_name
|
||||
elif lookup_type == 'hour':
|
||||
sql = "TRUNC(%s, 'HH24')" % field_name
|
||||
elif lookup_type == 'minute':
|
||||
sql = "TRUNC(%s, 'MI')" % field_name
|
||||
else:
|
||||
sql = field_name # Cast to DATE removes sub-second precision.
|
||||
return sql, []
|
||||
|
||||
def get_db_converters(self, expression):
|
||||
converters = super(DatabaseOperations, self).get_db_converters(expression)
|
||||
internal_type = expression.output_field.get_internal_type()
|
||||
if internal_type == 'TextField':
|
||||
converters.append(self.convert_textfield_value)
|
||||
elif internal_type == 'BinaryField':
|
||||
converters.append(self.convert_binaryfield_value)
|
||||
elif internal_type in ['BooleanField', 'NullBooleanField']:
|
||||
converters.append(self.convert_booleanfield_value)
|
||||
elif internal_type == 'DateField':
|
||||
converters.append(self.convert_datefield_value)
|
||||
elif internal_type == 'TimeField':
|
||||
converters.append(self.convert_timefield_value)
|
||||
elif internal_type == 'UUIDField':
|
||||
converters.append(self.convert_uuidfield_value)
|
||||
converters.append(self.convert_empty_values)
|
||||
return converters
|
||||
|
||||
def convert_empty_values(self, value, expression, context):
|
||||
# Oracle stores empty strings as null. We need to undo this in
|
||||
# order to adhere to the Django convention of using the empty
|
||||
# string instead of null, but only if the field accepts the
|
||||
# empty string.
|
||||
field = expression.output_field
|
||||
if value is None and field.empty_strings_allowed:
|
||||
value = ''
|
||||
if field.get_internal_type() == 'BinaryField':
|
||||
value = b''
|
||||
return value
|
||||
|
||||
def convert_textfield_value(self, value, expression, context):
|
||||
if isinstance(value, Database.LOB):
|
||||
value = force_text(value.read())
|
||||
return value
|
||||
|
||||
def convert_binaryfield_value(self, value, expression, context):
|
||||
if isinstance(value, Database.LOB):
|
||||
value = force_bytes(value.read())
|
||||
return value
|
||||
|
||||
def convert_booleanfield_value(self, value, expression, context):
|
||||
if value in (1, 0):
|
||||
value = bool(value)
|
||||
return value
|
||||
|
||||
# cx_Oracle always returns datetime.datetime objects for
|
||||
# DATE and TIMESTAMP columns, but Django wants to see a
|
||||
# python datetime.date, .time, or .datetime.
|
||||
def convert_datefield_value(self, value, expression, context):
|
||||
if isinstance(value, Database.Timestamp):
|
||||
return value.date()
|
||||
|
||||
def convert_timefield_value(self, value, expression, context):
|
||||
if isinstance(value, Database.Timestamp):
|
||||
value = value.time()
|
||||
return value
|
||||
|
||||
def convert_uuidfield_value(self, value, expression, context):
|
||||
if value is not None:
|
||||
value = uuid.UUID(value)
|
||||
return value
|
||||
|
||||
def deferrable_sql(self):
|
||||
return " DEFERRABLE INITIALLY DEFERRED"
|
||||
|
||||
def drop_sequence_sql(self, table):
|
||||
return "DROP SEQUENCE %s;" % self.quote_name(self._get_sequence_name(table))
|
||||
|
||||
def fetch_returned_insert_id(self, cursor):
|
||||
return int(cursor._insert_id_var.getvalue())
|
||||
|
||||
def field_cast_sql(self, db_type, internal_type):
|
||||
if db_type and db_type.endswith('LOB'):
|
||||
return "DBMS_LOB.SUBSTR(%s)"
|
||||
else:
|
||||
return "%s"
|
||||
|
||||
def last_executed_query(self, cursor, sql, params):
|
||||
# http://cx-oracle.sourceforge.net/html/cursor.html#Cursor.statement
|
||||
# The DB API definition does not define this attribute.
|
||||
statement = cursor.statement
|
||||
if statement and six.PY2 and not isinstance(statement, unicode):
|
||||
statement = statement.decode('utf-8')
|
||||
# Unlike Psycopg's `query` and MySQLdb`'s `_last_executed`, CxOracle's
|
||||
# `statement` doesn't contain the query parameters. refs #20010.
|
||||
return super(DatabaseOperations, self).last_executed_query(cursor, statement, params)
|
||||
|
||||
def last_insert_id(self, cursor, table_name, pk_name):
|
||||
sq_name = self._get_sequence_name(table_name)
|
||||
cursor.execute('SELECT "%s".currval FROM dual' % sq_name)
|
||||
return cursor.fetchone()[0]
|
||||
|
||||
def lookup_cast(self, lookup_type):
|
||||
if lookup_type in ('iexact', 'icontains', 'istartswith', 'iendswith'):
|
||||
return "UPPER(%s)"
|
||||
return "%s"
|
||||
|
||||
def max_in_list_size(self):
|
||||
return 1000
|
||||
|
||||
def max_name_length(self):
|
||||
return 30
|
||||
|
||||
def prep_for_iexact_query(self, x):
|
||||
return x
|
||||
|
||||
def process_clob(self, value):
|
||||
if value is None:
|
||||
return ''
|
||||
return force_text(value.read())
|
||||
|
||||
def quote_name(self, name):
|
||||
# SQL92 requires delimited (quoted) names to be case-sensitive. When
|
||||
# not quoted, Oracle has case-insensitive behavior for identifiers, but
|
||||
# always defaults to uppercase.
|
||||
# We simplify things by making Oracle identifiers always uppercase.
|
||||
if not name.startswith('"') and not name.endswith('"'):
|
||||
name = '"%s"' % backend_utils.truncate_name(name.upper(),
|
||||
self.max_name_length())
|
||||
# Oracle puts the query text into a (query % args) construct, so % signs
|
||||
# in names need to be escaped. The '%%' will be collapsed back to '%' at
|
||||
# that stage so we aren't really making the name longer here.
|
||||
name = name.replace('%', '%%')
|
||||
return name.upper()
|
||||
|
||||
def random_function_sql(self):
|
||||
return "DBMS_RANDOM.RANDOM"
|
||||
|
||||
def regex_lookup(self, lookup_type):
|
||||
if lookup_type == 'regex':
|
||||
match_option = "'c'"
|
||||
else:
|
||||
match_option = "'i'"
|
||||
return 'REGEXP_LIKE(%%s, %%s, %s)' % match_option
|
||||
|
||||
def return_insert_id(self):
|
||||
return "RETURNING %s INTO %%s", (InsertIdVar(),)
|
||||
|
||||
def savepoint_create_sql(self, sid):
|
||||
return convert_unicode("SAVEPOINT " + self.quote_name(sid))
|
||||
|
||||
def savepoint_rollback_sql(self, sid):
|
||||
return convert_unicode("ROLLBACK TO SAVEPOINT " + self.quote_name(sid))
|
||||
|
||||
def sql_flush(self, style, tables, sequences, allow_cascade=False):
|
||||
# Return a list of 'TRUNCATE x;', 'TRUNCATE y;',
|
||||
# 'TRUNCATE z;'... style SQL statements
|
||||
if tables:
|
||||
# Oracle does support TRUNCATE, but it seems to get us into
|
||||
# FK referential trouble, whereas DELETE FROM table works.
|
||||
sql = ['%s %s %s;' % (
|
||||
style.SQL_KEYWORD('DELETE'),
|
||||
style.SQL_KEYWORD('FROM'),
|
||||
style.SQL_FIELD(self.quote_name(table))
|
||||
) for table in tables]
|
||||
# Since we've just deleted all the rows, running our sequence
|
||||
# ALTER code will reset the sequence to 0.
|
||||
sql.extend(self.sequence_reset_by_name_sql(style, sequences))
|
||||
return sql
|
||||
else:
|
||||
return []
|
||||
|
||||
def sequence_reset_by_name_sql(self, style, sequences):
|
||||
sql = []
|
||||
for sequence_info in sequences:
|
||||
sequence_name = self._get_sequence_name(sequence_info['table'])
|
||||
table_name = self.quote_name(sequence_info['table'])
|
||||
column_name = self.quote_name(sequence_info['column'] or 'id')
|
||||
query = _get_sequence_reset_sql() % {
|
||||
'sequence': sequence_name,
|
||||
'table': table_name,
|
||||
'column': column_name,
|
||||
}
|
||||
sql.append(query)
|
||||
return sql
|
||||
|
||||
def sequence_reset_sql(self, style, model_list):
|
||||
from django.db import models
|
||||
output = []
|
||||
query = _get_sequence_reset_sql()
|
||||
for model in model_list:
|
||||
for f in model._meta.local_fields:
|
||||
if isinstance(f, models.AutoField):
|
||||
table_name = self.quote_name(model._meta.db_table)
|
||||
sequence_name = self._get_sequence_name(model._meta.db_table)
|
||||
column_name = self.quote_name(f.column)
|
||||
output.append(query % {'sequence': sequence_name,
|
||||
'table': table_name,
|
||||
'column': column_name})
|
||||
# Only one AutoField is allowed per model, so don't
|
||||
# continue to loop
|
||||
break
|
||||
for f in model._meta.many_to_many:
|
||||
if not f.rel.through:
|
||||
table_name = self.quote_name(f.m2m_db_table())
|
||||
sequence_name = self._get_sequence_name(f.m2m_db_table())
|
||||
column_name = self.quote_name('id')
|
||||
output.append(query % {'sequence': sequence_name,
|
||||
'table': table_name,
|
||||
'column': column_name})
|
||||
return output
|
||||
|
||||
def start_transaction_sql(self):
|
||||
return ''
|
||||
|
||||
def tablespace_sql(self, tablespace, inline=False):
|
||||
if inline:
|
||||
return "USING INDEX TABLESPACE %s" % self.quote_name(tablespace)
|
||||
else:
|
||||
return "TABLESPACE %s" % self.quote_name(tablespace)
|
||||
|
||||
def value_to_db_date(self, value):
|
||||
"""
|
||||
Transform a date value to an object compatible with what is expected
|
||||
by the backend driver for date columns.
|
||||
The default implementation transforms the date to text, but that is not
|
||||
necessary for Oracle.
|
||||
"""
|
||||
return value
|
||||
|
||||
def value_to_db_datetime(self, value):
|
||||
"""
|
||||
Transform a datetime value to an object compatible with what is expected
|
||||
by the backend driver for datetime columns.
|
||||
|
||||
If naive datetime is passed assumes that is in UTC. Normally Django
|
||||
models.DateTimeField makes sure that if USE_TZ is True passed datetime
|
||||
is timezone aware.
|
||||
"""
|
||||
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
# cx_Oracle doesn't support tz-aware datetimes
|
||||
if timezone.is_aware(value):
|
||||
if settings.USE_TZ:
|
||||
value = value.astimezone(timezone.utc).replace(tzinfo=None)
|
||||
else:
|
||||
raise ValueError("Oracle backend does not support timezone-aware datetimes when USE_TZ is False.")
|
||||
|
||||
return Oracle_datetime.from_datetime(value)
|
||||
|
||||
def value_to_db_time(self, value):
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
if isinstance(value, six.string_types):
|
||||
return datetime.datetime.strptime(value, '%H:%M:%S')
|
||||
|
||||
# Oracle doesn't support tz-aware times
|
||||
if timezone.is_aware(value):
|
||||
raise ValueError("Oracle backend does not support timezone-aware times.")
|
||||
|
||||
return Oracle_datetime(1900, 1, 1, value.hour, value.minute,
|
||||
value.second, value.microsecond)
|
||||
|
||||
def year_lookup_bounds_for_date_field(self, value):
|
||||
# Create bounds as real date values
|
||||
first = datetime.date(value, 1, 1)
|
||||
last = datetime.date(value, 12, 31)
|
||||
return [first, last]
|
||||
|
||||
def year_lookup_bounds_for_datetime_field(self, value):
|
||||
# cx_Oracle doesn't support tz-aware datetimes
|
||||
bounds = super(DatabaseOperations, self).year_lookup_bounds_for_datetime_field(value)
|
||||
if settings.USE_TZ:
|
||||
bounds = [b.astimezone(timezone.utc) for b in bounds]
|
||||
return [Oracle_datetime.from_datetime(b) for b in bounds]
|
||||
|
||||
def combine_expression(self, connector, sub_expressions):
|
||||
"Oracle requires special cases for %% and & operators in query expressions"
|
||||
if connector == '%%':
|
||||
return 'MOD(%s)' % ','.join(sub_expressions)
|
||||
elif connector == '&':
|
||||
return 'BITAND(%s)' % ','.join(sub_expressions)
|
||||
elif connector == '|':
|
||||
raise NotImplementedError("Bit-wise or is not supported in Oracle.")
|
||||
elif connector == '^':
|
||||
return 'POWER(%s)' % ','.join(sub_expressions)
|
||||
return super(DatabaseOperations, self).combine_expression(connector, sub_expressions)
|
||||
|
||||
def _get_sequence_name(self, table):
|
||||
name_length = self.max_name_length() - 3
|
||||
return '%s_SQ' % backend_utils.truncate_name(table, name_length).upper()
|
||||
|
||||
def _get_trigger_name(self, table):
|
||||
name_length = self.max_name_length() - 3
|
||||
return '%s_TR' % backend_utils.truncate_name(table, name_length).upper()
|
||||
|
||||
def bulk_insert_sql(self, fields, num_values):
|
||||
items_sql = "SELECT %s FROM DUAL" % ", ".join(["%s"] * len(fields))
|
||||
return " UNION ALL ".join([items_sql] * num_values)
|
||||
|
||||
|
||||
class _UninitializedOperatorsDescriptor(object):
|
||||
|
||||
|
@ -897,19 +402,6 @@ class VariableWrapper(object):
|
|||
setattr(self.var, key, value)
|
||||
|
||||
|
||||
class InsertIdVar(object):
|
||||
"""
|
||||
A late-binding cursor variable that can be passed to Cursor.execute
|
||||
as a parameter, in order to receive the id of the row created by an
|
||||
insert statement.
|
||||
"""
|
||||
|
||||
def bind_parameter(self, cursor):
|
||||
param = cursor.cursor.var(Database.NUMBER)
|
||||
cursor._insert_id_var = param
|
||||
return param
|
||||
|
||||
|
||||
class FormatStylePlaceholderCursor(object):
|
||||
"""
|
||||
Django uses "format" (e.g. '%s') style placeholders, but Oracle uses ":var"
|
||||
|
@ -1117,20 +609,3 @@ def to_unicode(s):
|
|||
if isinstance(s, six.string_types):
|
||||
return force_text(s)
|
||||
return s
|
||||
|
||||
|
||||
def _get_sequence_reset_sql():
|
||||
# TODO: colorize this SQL code with style.SQL_KEYWORD(), etc.
|
||||
return """
|
||||
DECLARE
|
||||
table_value integer;
|
||||
seq_value integer;
|
||||
BEGIN
|
||||
SELECT NVL(MAX(%(column)s), 0) INTO table_value FROM %(table)s;
|
||||
SELECT NVL(last_number - cache_size, 0) INTO seq_value FROM user_sequences
|
||||
WHERE sequence_name = '%(sequence)s';
|
||||
WHILE table_value > seq_value LOOP
|
||||
SELECT "%(sequence)s".nextval INTO seq_value FROM dual;
|
||||
END LOOP;
|
||||
END;
|
||||
/"""
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import subprocess
|
||||
|
||||
from django.db.backends import BaseDatabaseClient
|
||||
from django.db.backends.base.client import BaseDatabaseClient
|
||||
|
||||
|
||||
class DatabaseClient(BaseDatabaseClient):
|
||||
|
|
|
@ -2,7 +2,7 @@ import sys
|
|||
import time
|
||||
|
||||
from django.conf import settings
|
||||
from django.db.backends.creation import BaseDatabaseCreation
|
||||
from django.db.backends.base.creation import BaseDatabaseCreation
|
||||
from django.db.utils import DatabaseError
|
||||
from django.utils.six.moves import input
|
||||
|
||||
|
|
|
@ -0,0 +1,57 @@
|
|||
from django.db.backends.base.features import BaseDatabaseFeatures
|
||||
from django.db.utils import InterfaceError
|
||||
|
||||
try:
|
||||
import pytz
|
||||
except ImportError:
|
||||
pytz = None
|
||||
|
||||
|
||||
class DatabaseFeatures(BaseDatabaseFeatures):
|
||||
empty_fetchmany_value = ()
|
||||
needs_datetime_string_cast = False
|
||||
interprets_empty_strings_as_nulls = True
|
||||
uses_savepoints = True
|
||||
has_select_for_update = True
|
||||
has_select_for_update_nowait = True
|
||||
can_return_id_from_insert = True
|
||||
allow_sliced_subqueries = False
|
||||
supports_subqueries_in_group_by = False
|
||||
supports_transactions = True
|
||||
supports_timezones = False
|
||||
has_zoneinfo_database = pytz is not None
|
||||
supports_bitwise_or = False
|
||||
has_native_duration_field = True
|
||||
can_defer_constraint_checks = True
|
||||
supports_partially_nullable_unique_constraints = False
|
||||
truncates_names = True
|
||||
has_bulk_insert = True
|
||||
supports_tablespaces = True
|
||||
supports_sequence_reset = False
|
||||
can_introspect_max_length = False
|
||||
can_introspect_time_field = False
|
||||
atomic_transactions = False
|
||||
supports_combined_alters = False
|
||||
nulls_order_largest = True
|
||||
requires_literal_defaults = True
|
||||
connection_persists_old_columns = True
|
||||
closed_cursor_error_class = InterfaceError
|
||||
bare_select_suffix = " FROM DUAL"
|
||||
uppercases_column_names = True
|
||||
# select for update with limit can be achieved on Oracle, but not with the current backend.
|
||||
supports_select_for_update_with_limit = False
|
||||
|
||||
def introspected_boolean_field_type(self, field=None, created_separately=False):
|
||||
"""
|
||||
Some versions of Oracle -- we've seen this on 11.2.0.1 and suspect
|
||||
it goes back -- have a weird bug where, when an integer column is
|
||||
added to an existing table with a default, its precision is later
|
||||
reported on introspection as 0, regardless of the real precision.
|
||||
For Django introspection, this means that such columns are reported
|
||||
as IntegerField even if they are really BigIntegerField or BooleanField.
|
||||
|
||||
The bug is solved in Oracle 11.2.0.2 and up.
|
||||
"""
|
||||
if self.connection.oracle_full_version < '11.2.0.2' and field and field.has_default() and created_separately:
|
||||
return 'IntegerField'
|
||||
return super(DatabaseFeatures, self).introspected_boolean_field_type(field, created_separately)
|
|
@ -2,7 +2,9 @@ import re
|
|||
|
||||
import cx_Oracle
|
||||
|
||||
from django.db.backends import BaseDatabaseIntrospection, FieldInfo, TableInfo
|
||||
from django.db.backends.base.introspection import (
|
||||
BaseDatabaseIntrospection, FieldInfo, TableInfo,
|
||||
)
|
||||
from django.utils.encoding import force_text
|
||||
|
||||
foreign_key_re = re.compile(r"\sCONSTRAINT `[^`]*` FOREIGN KEY \(`([^`]*)`\) REFERENCES `([^`]*)` \(`([^`]*)`\)")
|
||||
|
|
|
@ -0,0 +1,447 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import datetime
|
||||
import re
|
||||
import uuid
|
||||
|
||||
from django.conf import settings
|
||||
from django.db.backends.base.operations import BaseDatabaseOperations
|
||||
from django.db.backends.utils import truncate_name
|
||||
from django.utils import six, timezone
|
||||
from django.utils.encoding import force_bytes, force_text
|
||||
|
||||
from .base import Database
|
||||
from .utils import convert_unicode, InsertIdVar, Oracle_datetime
|
||||
|
||||
|
||||
class DatabaseOperations(BaseDatabaseOperations):
|
||||
compiler_module = "django.db.backends.oracle.compiler"
|
||||
|
||||
# Oracle uses NUMBER(11) and NUMBER(19) for integer fields.
|
||||
integer_field_ranges = {
|
||||
'SmallIntegerField': (-99999999999, 99999999999),
|
||||
'IntegerField': (-99999999999, 99999999999),
|
||||
'BigIntegerField': (-9999999999999999999, 9999999999999999999),
|
||||
'PositiveSmallIntegerField': (0, 99999999999),
|
||||
'PositiveIntegerField': (0, 99999999999),
|
||||
}
|
||||
|
||||
# TODO: colorize this SQL code with style.SQL_KEYWORD(), etc.
|
||||
_sequence_reset_sql = """
|
||||
DECLARE
|
||||
table_value integer;
|
||||
seq_value integer;
|
||||
BEGIN
|
||||
SELECT NVL(MAX(%(column)s), 0) INTO table_value FROM %(table)s;
|
||||
SELECT NVL(last_number - cache_size, 0) INTO seq_value FROM user_sequences
|
||||
WHERE sequence_name = '%(sequence)s';
|
||||
WHILE table_value > seq_value LOOP
|
||||
SELECT "%(sequence)s".nextval INTO seq_value FROM dual;
|
||||
END LOOP;
|
||||
END;
|
||||
/"""
|
||||
|
||||
def autoinc_sql(self, table, column):
|
||||
# To simulate auto-incrementing primary keys in Oracle, we have to
|
||||
# create a sequence and a trigger.
|
||||
sq_name = self._get_sequence_name(table)
|
||||
tr_name = self._get_trigger_name(table)
|
||||
tbl_name = self.quote_name(table)
|
||||
col_name = self.quote_name(column)
|
||||
sequence_sql = """
|
||||
DECLARE
|
||||
i INTEGER;
|
||||
BEGIN
|
||||
SELECT COUNT(*) INTO i FROM USER_CATALOG
|
||||
WHERE TABLE_NAME = '%(sq_name)s' AND TABLE_TYPE = 'SEQUENCE';
|
||||
IF i = 0 THEN
|
||||
EXECUTE IMMEDIATE 'CREATE SEQUENCE "%(sq_name)s"';
|
||||
END IF;
|
||||
END;
|
||||
/""" % locals()
|
||||
trigger_sql = """
|
||||
CREATE OR REPLACE TRIGGER "%(tr_name)s"
|
||||
BEFORE INSERT ON %(tbl_name)s
|
||||
FOR EACH ROW
|
||||
WHEN (new.%(col_name)s IS NULL)
|
||||
BEGIN
|
||||
SELECT "%(sq_name)s".nextval
|
||||
INTO :new.%(col_name)s FROM dual;
|
||||
END;
|
||||
/""" % locals()
|
||||
return sequence_sql, trigger_sql
|
||||
|
||||
def cache_key_culling_sql(self):
|
||||
return """
|
||||
SELECT cache_key
|
||||
FROM (SELECT cache_key, rank() OVER (ORDER BY cache_key) AS rank FROM %s)
|
||||
WHERE rank = %%s + 1
|
||||
"""
|
||||
|
||||
def date_extract_sql(self, lookup_type, field_name):
|
||||
if lookup_type == 'week_day':
|
||||
# TO_CHAR(field, 'D') returns an integer from 1-7, where 1=Sunday.
|
||||
return "TO_CHAR(%s, 'D')" % field_name
|
||||
else:
|
||||
# http://docs.oracle.com/cd/B19306_01/server.102/b14200/functions050.htm
|
||||
return "EXTRACT(%s FROM %s)" % (lookup_type.upper(), field_name)
|
||||
|
||||
def date_interval_sql(self, timedelta):
|
||||
"""
|
||||
Implements the interval functionality for expressions
|
||||
format for Oracle:
|
||||
INTERVAL '3 00:03:20.000000' DAY(1) TO SECOND(6)
|
||||
"""
|
||||
minutes, seconds = divmod(timedelta.seconds, 60)
|
||||
hours, minutes = divmod(minutes, 60)
|
||||
days = str(timedelta.days)
|
||||
day_precision = len(days)
|
||||
fmt = "INTERVAL '%s %02d:%02d:%02d.%06d' DAY(%d) TO SECOND(6)"
|
||||
return fmt % (days, hours, minutes, seconds, timedelta.microseconds,
|
||||
day_precision), []
|
||||
|
||||
def date_trunc_sql(self, lookup_type, field_name):
|
||||
# http://docs.oracle.com/cd/B19306_01/server.102/b14200/functions230.htm#i1002084
|
||||
if lookup_type in ('year', 'month'):
|
||||
return "TRUNC(%s, '%s')" % (field_name, lookup_type.upper())
|
||||
else:
|
||||
return "TRUNC(%s)" % field_name
|
||||
|
||||
# Oracle crashes with "ORA-03113: end-of-file on communication channel"
|
||||
# if the time zone name is passed in parameter. Use interpolation instead.
|
||||
# https://groups.google.com/forum/#!msg/django-developers/zwQju7hbG78/9l934yelwfsJ
|
||||
# This regexp matches all time zone names from the zoneinfo database.
|
||||
_tzname_re = re.compile(r'^[\w/:+-]+$')
|
||||
|
||||
def _convert_field_to_tz(self, field_name, tzname):
|
||||
if not self._tzname_re.match(tzname):
|
||||
raise ValueError("Invalid time zone name: %s" % tzname)
|
||||
# Convert from UTC to local time, returning TIMESTAMP WITH TIME ZONE.
|
||||
result = "(FROM_TZ(%s, '0:00') AT TIME ZONE '%s')" % (field_name, tzname)
|
||||
# Extracting from a TIMESTAMP WITH TIME ZONE ignore the time zone.
|
||||
# Convert to a DATETIME, which is called DATE by Oracle. There's no
|
||||
# built-in function to do that; the easiest is to go through a string.
|
||||
result = "TO_CHAR(%s, 'YYYY-MM-DD HH24:MI:SS')" % result
|
||||
result = "TO_DATE(%s, 'YYYY-MM-DD HH24:MI:SS')" % result
|
||||
# Re-convert to a TIMESTAMP because EXTRACT only handles the date part
|
||||
# on DATE values, even though they actually store the time part.
|
||||
return "CAST(%s AS TIMESTAMP)" % result
|
||||
|
||||
def datetime_extract_sql(self, lookup_type, field_name, tzname):
|
||||
if settings.USE_TZ:
|
||||
field_name = self._convert_field_to_tz(field_name, tzname)
|
||||
if lookup_type == 'week_day':
|
||||
# TO_CHAR(field, 'D') returns an integer from 1-7, where 1=Sunday.
|
||||
sql = "TO_CHAR(%s, 'D')" % field_name
|
||||
else:
|
||||
# http://docs.oracle.com/cd/B19306_01/server.102/b14200/functions050.htm
|
||||
sql = "EXTRACT(%s FROM %s)" % (lookup_type.upper(), field_name)
|
||||
return sql, []
|
||||
|
||||
def datetime_trunc_sql(self, lookup_type, field_name, tzname):
|
||||
if settings.USE_TZ:
|
||||
field_name = self._convert_field_to_tz(field_name, tzname)
|
||||
# http://docs.oracle.com/cd/B19306_01/server.102/b14200/functions230.htm#i1002084
|
||||
if lookup_type in ('year', 'month'):
|
||||
sql = "TRUNC(%s, '%s')" % (field_name, lookup_type.upper())
|
||||
elif lookup_type == 'day':
|
||||
sql = "TRUNC(%s)" % field_name
|
||||
elif lookup_type == 'hour':
|
||||
sql = "TRUNC(%s, 'HH24')" % field_name
|
||||
elif lookup_type == 'minute':
|
||||
sql = "TRUNC(%s, 'MI')" % field_name
|
||||
else:
|
||||
sql = field_name # Cast to DATE removes sub-second precision.
|
||||
return sql, []
|
||||
|
||||
def get_db_converters(self, expression):
|
||||
converters = super(DatabaseOperations, self).get_db_converters(expression)
|
||||
internal_type = expression.output_field.get_internal_type()
|
||||
if internal_type == 'TextField':
|
||||
converters.append(self.convert_textfield_value)
|
||||
elif internal_type == 'BinaryField':
|
||||
converters.append(self.convert_binaryfield_value)
|
||||
elif internal_type in ['BooleanField', 'NullBooleanField']:
|
||||
converters.append(self.convert_booleanfield_value)
|
||||
elif internal_type == 'DateField':
|
||||
converters.append(self.convert_datefield_value)
|
||||
elif internal_type == 'TimeField':
|
||||
converters.append(self.convert_timefield_value)
|
||||
elif internal_type == 'UUIDField':
|
||||
converters.append(self.convert_uuidfield_value)
|
||||
converters.append(self.convert_empty_values)
|
||||
return converters
|
||||
|
||||
def convert_empty_values(self, value, expression, context):
|
||||
# Oracle stores empty strings as null. We need to undo this in
|
||||
# order to adhere to the Django convention of using the empty
|
||||
# string instead of null, but only if the field accepts the
|
||||
# empty string.
|
||||
field = expression.output_field
|
||||
if value is None and field.empty_strings_allowed:
|
||||
value = ''
|
||||
if field.get_internal_type() == 'BinaryField':
|
||||
value = b''
|
||||
return value
|
||||
|
||||
def convert_textfield_value(self, value, expression, context):
|
||||
if isinstance(value, Database.LOB):
|
||||
value = force_text(value.read())
|
||||
return value
|
||||
|
||||
def convert_binaryfield_value(self, value, expression, context):
|
||||
if isinstance(value, Database.LOB):
|
||||
value = force_bytes(value.read())
|
||||
return value
|
||||
|
||||
def convert_booleanfield_value(self, value, expression, context):
|
||||
if value in (1, 0):
|
||||
value = bool(value)
|
||||
return value
|
||||
|
||||
# cx_Oracle always returns datetime.datetime objects for
|
||||
# DATE and TIMESTAMP columns, but Django wants to see a
|
||||
# python datetime.date, .time, or .datetime.
|
||||
def convert_datefield_value(self, value, expression, context):
|
||||
if isinstance(value, Database.Timestamp):
|
||||
return value.date()
|
||||
|
||||
def convert_timefield_value(self, value, expression, context):
|
||||
if isinstance(value, Database.Timestamp):
|
||||
value = value.time()
|
||||
return value
|
||||
|
||||
def convert_uuidfield_value(self, value, expression, context):
|
||||
if value is not None:
|
||||
value = uuid.UUID(value)
|
||||
return value
|
||||
|
||||
def deferrable_sql(self):
|
||||
return " DEFERRABLE INITIALLY DEFERRED"
|
||||
|
||||
def drop_sequence_sql(self, table):
|
||||
return "DROP SEQUENCE %s;" % self.quote_name(self._get_sequence_name(table))
|
||||
|
||||
def fetch_returned_insert_id(self, cursor):
|
||||
return int(cursor._insert_id_var.getvalue())
|
||||
|
||||
def field_cast_sql(self, db_type, internal_type):
|
||||
if db_type and db_type.endswith('LOB'):
|
||||
return "DBMS_LOB.SUBSTR(%s)"
|
||||
else:
|
||||
return "%s"
|
||||
|
||||
def last_executed_query(self, cursor, sql, params):
|
||||
# http://cx-oracle.sourceforge.net/html/cursor.html#Cursor.statement
|
||||
# The DB API definition does not define this attribute.
|
||||
statement = cursor.statement
|
||||
if statement and six.PY2 and not isinstance(statement, unicode):
|
||||
statement = statement.decode('utf-8')
|
||||
# Unlike Psycopg's `query` and MySQLdb`'s `_last_executed`, CxOracle's
|
||||
# `statement` doesn't contain the query parameters. refs #20010.
|
||||
return super(DatabaseOperations, self).last_executed_query(cursor, statement, params)
|
||||
|
||||
def last_insert_id(self, cursor, table_name, pk_name):
|
||||
sq_name = self._get_sequence_name(table_name)
|
||||
cursor.execute('SELECT "%s".currval FROM dual' % sq_name)
|
||||
return cursor.fetchone()[0]
|
||||
|
||||
def lookup_cast(self, lookup_type):
|
||||
if lookup_type in ('iexact', 'icontains', 'istartswith', 'iendswith'):
|
||||
return "UPPER(%s)"
|
||||
return "%s"
|
||||
|
||||
def max_in_list_size(self):
|
||||
return 1000
|
||||
|
||||
def max_name_length(self):
|
||||
return 30
|
||||
|
||||
def prep_for_iexact_query(self, x):
|
||||
return x
|
||||
|
||||
def process_clob(self, value):
|
||||
if value is None:
|
||||
return ''
|
||||
return force_text(value.read())
|
||||
|
||||
def quote_name(self, name):
|
||||
# SQL92 requires delimited (quoted) names to be case-sensitive. When
|
||||
# not quoted, Oracle has case-insensitive behavior for identifiers, but
|
||||
# always defaults to uppercase.
|
||||
# We simplify things by making Oracle identifiers always uppercase.
|
||||
if not name.startswith('"') and not name.endswith('"'):
|
||||
name = '"%s"' % truncate_name(name.upper(), self.max_name_length())
|
||||
# Oracle puts the query text into a (query % args) construct, so % signs
|
||||
# in names need to be escaped. The '%%' will be collapsed back to '%' at
|
||||
# that stage so we aren't really making the name longer here.
|
||||
name = name.replace('%', '%%')
|
||||
return name.upper()
|
||||
|
||||
def random_function_sql(self):
|
||||
return "DBMS_RANDOM.RANDOM"
|
||||
|
||||
def regex_lookup(self, lookup_type):
|
||||
if lookup_type == 'regex':
|
||||
match_option = "'c'"
|
||||
else:
|
||||
match_option = "'i'"
|
||||
return 'REGEXP_LIKE(%%s, %%s, %s)' % match_option
|
||||
|
||||
def return_insert_id(self):
|
||||
return "RETURNING %s INTO %%s", (InsertIdVar(),)
|
||||
|
||||
def savepoint_create_sql(self, sid):
|
||||
return convert_unicode("SAVEPOINT " + self.quote_name(sid))
|
||||
|
||||
def savepoint_rollback_sql(self, sid):
|
||||
return convert_unicode("ROLLBACK TO SAVEPOINT " + self.quote_name(sid))
|
||||
|
||||
def sql_flush(self, style, tables, sequences, allow_cascade=False):
|
||||
# Return a list of 'TRUNCATE x;', 'TRUNCATE y;',
|
||||
# 'TRUNCATE z;'... style SQL statements
|
||||
if tables:
|
||||
# Oracle does support TRUNCATE, but it seems to get us into
|
||||
# FK referential trouble, whereas DELETE FROM table works.
|
||||
sql = ['%s %s %s;' % (
|
||||
style.SQL_KEYWORD('DELETE'),
|
||||
style.SQL_KEYWORD('FROM'),
|
||||
style.SQL_FIELD(self.quote_name(table))
|
||||
) for table in tables]
|
||||
# Since we've just deleted all the rows, running our sequence
|
||||
# ALTER code will reset the sequence to 0.
|
||||
sql.extend(self.sequence_reset_by_name_sql(style, sequences))
|
||||
return sql
|
||||
else:
|
||||
return []
|
||||
|
||||
def sequence_reset_by_name_sql(self, style, sequences):
|
||||
sql = []
|
||||
for sequence_info in sequences:
|
||||
sequence_name = self._get_sequence_name(sequence_info['table'])
|
||||
table_name = self.quote_name(sequence_info['table'])
|
||||
column_name = self.quote_name(sequence_info['column'] or 'id')
|
||||
query = self._sequence_reset_sql % {
|
||||
'sequence': sequence_name,
|
||||
'table': table_name,
|
||||
'column': column_name,
|
||||
}
|
||||
sql.append(query)
|
||||
return sql
|
||||
|
||||
def sequence_reset_sql(self, style, model_list):
|
||||
from django.db import models
|
||||
output = []
|
||||
query = self._sequence_reset_sql
|
||||
for model in model_list:
|
||||
for f in model._meta.local_fields:
|
||||
if isinstance(f, models.AutoField):
|
||||
table_name = self.quote_name(model._meta.db_table)
|
||||
sequence_name = self._get_sequence_name(model._meta.db_table)
|
||||
column_name = self.quote_name(f.column)
|
||||
output.append(query % {'sequence': sequence_name,
|
||||
'table': table_name,
|
||||
'column': column_name})
|
||||
# Only one AutoField is allowed per model, so don't
|
||||
# continue to loop
|
||||
break
|
||||
for f in model._meta.many_to_many:
|
||||
if not f.rel.through:
|
||||
table_name = self.quote_name(f.m2m_db_table())
|
||||
sequence_name = self._get_sequence_name(f.m2m_db_table())
|
||||
column_name = self.quote_name('id')
|
||||
output.append(query % {'sequence': sequence_name,
|
||||
'table': table_name,
|
||||
'column': column_name})
|
||||
return output
|
||||
|
||||
def start_transaction_sql(self):
|
||||
return ''
|
||||
|
||||
def tablespace_sql(self, tablespace, inline=False):
|
||||
if inline:
|
||||
return "USING INDEX TABLESPACE %s" % self.quote_name(tablespace)
|
||||
else:
|
||||
return "TABLESPACE %s" % self.quote_name(tablespace)
|
||||
|
||||
def value_to_db_date(self, value):
|
||||
"""
|
||||
Transform a date value to an object compatible with what is expected
|
||||
by the backend driver for date columns.
|
||||
The default implementation transforms the date to text, but that is not
|
||||
necessary for Oracle.
|
||||
"""
|
||||
return value
|
||||
|
||||
def value_to_db_datetime(self, value):
|
||||
"""
|
||||
Transform a datetime value to an object compatible with what is expected
|
||||
by the backend driver for datetime columns.
|
||||
|
||||
If naive datetime is passed assumes that is in UTC. Normally Django
|
||||
models.DateTimeField makes sure that if USE_TZ is True passed datetime
|
||||
is timezone aware.
|
||||
"""
|
||||
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
# cx_Oracle doesn't support tz-aware datetimes
|
||||
if timezone.is_aware(value):
|
||||
if settings.USE_TZ:
|
||||
value = value.astimezone(timezone.utc).replace(tzinfo=None)
|
||||
else:
|
||||
raise ValueError("Oracle backend does not support timezone-aware datetimes when USE_TZ is False.")
|
||||
|
||||
return Oracle_datetime.from_datetime(value)
|
||||
|
||||
def value_to_db_time(self, value):
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
if isinstance(value, six.string_types):
|
||||
return datetime.datetime.strptime(value, '%H:%M:%S')
|
||||
|
||||
# Oracle doesn't support tz-aware times
|
||||
if timezone.is_aware(value):
|
||||
raise ValueError("Oracle backend does not support timezone-aware times.")
|
||||
|
||||
return Oracle_datetime(1900, 1, 1, value.hour, value.minute,
|
||||
value.second, value.microsecond)
|
||||
|
||||
def year_lookup_bounds_for_date_field(self, value):
|
||||
# Create bounds as real date values
|
||||
first = datetime.date(value, 1, 1)
|
||||
last = datetime.date(value, 12, 31)
|
||||
return [first, last]
|
||||
|
||||
def year_lookup_bounds_for_datetime_field(self, value):
|
||||
# cx_Oracle doesn't support tz-aware datetimes
|
||||
bounds = super(DatabaseOperations, self).year_lookup_bounds_for_datetime_field(value)
|
||||
if settings.USE_TZ:
|
||||
bounds = [b.astimezone(timezone.utc) for b in bounds]
|
||||
return [Oracle_datetime.from_datetime(b) for b in bounds]
|
||||
|
||||
def combine_expression(self, connector, sub_expressions):
|
||||
"Oracle requires special cases for %% and & operators in query expressions"
|
||||
if connector == '%%':
|
||||
return 'MOD(%s)' % ','.join(sub_expressions)
|
||||
elif connector == '&':
|
||||
return 'BITAND(%s)' % ','.join(sub_expressions)
|
||||
elif connector == '|':
|
||||
raise NotImplementedError("Bit-wise or is not supported in Oracle.")
|
||||
elif connector == '^':
|
||||
return 'POWER(%s)' % ','.join(sub_expressions)
|
||||
return super(DatabaseOperations, self).combine_expression(connector, sub_expressions)
|
||||
|
||||
def _get_sequence_name(self, table):
|
||||
name_length = self.max_name_length() - 3
|
||||
return '%s_SQ' % truncate_name(table, name_length).upper()
|
||||
|
||||
def _get_trigger_name(self, table):
|
||||
name_length = self.max_name_length() - 3
|
||||
return '%s_TR' % truncate_name(table, name_length).upper()
|
||||
|
||||
def bulk_insert_sql(self, fields, num_values):
|
||||
items_sql = "SELECT %s FROM DUAL" % ", ".join(["%s"] * len(fields))
|
||||
return " UNION ALL ".join([items_sql] * num_values)
|
|
@ -4,7 +4,7 @@ import binascii
|
|||
|
||||
from django.utils import six
|
||||
from django.utils.text import force_text
|
||||
from django.db.backends.schema import BaseDatabaseSchemaEditor
|
||||
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||
from django.db.utils import DatabaseError
|
||||
|
||||
|
||||
|
|
|
@ -0,0 +1,42 @@
|
|||
import datetime
|
||||
|
||||
from django.utils.encoding import force_bytes, force_text
|
||||
|
||||
from .base import Database
|
||||
|
||||
# Check whether cx_Oracle was compiled with the WITH_UNICODE option if cx_Oracle is pre-5.1. This will
|
||||
# also be True for cx_Oracle 5.1 and in Python 3.0. See #19606
|
||||
if int(Database.version.split('.', 1)[0]) >= 5 and \
|
||||
(int(Database.version.split('.', 2)[1]) >= 1 or
|
||||
not hasattr(Database, 'UNICODE')):
|
||||
convert_unicode = force_text
|
||||
else:
|
||||
convert_unicode = force_bytes
|
||||
|
||||
|
||||
class InsertIdVar(object):
|
||||
"""
|
||||
A late-binding cursor variable that can be passed to Cursor.execute
|
||||
as a parameter, in order to receive the id of the row created by an
|
||||
insert statement.
|
||||
"""
|
||||
|
||||
def bind_parameter(self, cursor):
|
||||
param = cursor.cursor.var(Database.NUMBER)
|
||||
cursor._insert_id_var = param
|
||||
return param
|
||||
|
||||
|
||||
class Oracle_datetime(datetime.datetime):
|
||||
"""
|
||||
A datetime object, with an additional class attribute
|
||||
to tell cx_Oracle to save the microseconds too.
|
||||
"""
|
||||
input_size = Database.TIMESTAMP
|
||||
|
||||
@classmethod
|
||||
def from_datetime(cls, dt):
|
||||
return Oracle_datetime(
|
||||
dt.year, dt.month, dt.day,
|
||||
dt.hour, dt.minute, dt.second, dt.microsecond,
|
||||
)
|
|
@ -5,19 +5,11 @@ Requires psycopg 2: http://initd.org/projects/psycopg2
|
|||
"""
|
||||
|
||||
from django.conf import settings
|
||||
from django.db.backends import (BaseDatabaseFeatures, BaseDatabaseWrapper,
|
||||
BaseDatabaseValidation)
|
||||
from django.db.backends.postgresql_psycopg2.operations import DatabaseOperations
|
||||
from django.db.backends.postgresql_psycopg2.client import DatabaseClient
|
||||
from django.db.backends.postgresql_psycopg2.creation import DatabaseCreation
|
||||
from django.db.backends.postgresql_psycopg2.version import get_version
|
||||
from django.db.backends.postgresql_psycopg2.introspection import DatabaseIntrospection
|
||||
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
|
||||
from django.db.utils import InterfaceError
|
||||
from django.db.backends.base.base import BaseDatabaseWrapper
|
||||
from django.db.backends.base.validation import BaseDatabaseValidation
|
||||
from django.utils.encoding import force_str
|
||||
from django.utils.functional import cached_property
|
||||
from django.utils.safestring import SafeText, SafeBytes
|
||||
from django.utils.timezone import utc
|
||||
|
||||
try:
|
||||
import psycopg2 as Database
|
||||
|
@ -27,6 +19,16 @@ except ImportError as e:
|
|||
from django.core.exceptions import ImproperlyConfigured
|
||||
raise ImproperlyConfigured("Error loading psycopg2 module: %s" % e)
|
||||
|
||||
# Some of these import psycopg2, so import them after checking if it's installed.
|
||||
from .client import DatabaseClient
|
||||
from .creation import DatabaseCreation
|
||||
from .features import DatabaseFeatures
|
||||
from .introspection import DatabaseIntrospection
|
||||
from .operations import DatabaseOperations
|
||||
from .schema import DatabaseSchemaEditor
|
||||
from .utils import utc_tzinfo_factory
|
||||
from .version import get_version
|
||||
|
||||
DatabaseError = Database.DatabaseError
|
||||
IntegrityError = Database.IntegrityError
|
||||
|
||||
|
@ -37,38 +39,6 @@ psycopg2.extensions.register_adapter(SafeText, psycopg2.extensions.QuotedString)
|
|||
psycopg2.extras.register_uuid()
|
||||
|
||||
|
||||
def utc_tzinfo_factory(offset):
|
||||
if offset != 0:
|
||||
raise AssertionError("database connection isn't set to UTC")
|
||||
return utc
|
||||
|
||||
|
||||
class DatabaseFeatures(BaseDatabaseFeatures):
|
||||
needs_datetime_string_cast = False
|
||||
can_return_id_from_insert = True
|
||||
has_real_datatype = True
|
||||
has_native_duration_field = True
|
||||
driver_supports_timedelta_args = True
|
||||
can_defer_constraint_checks = True
|
||||
has_select_for_update = True
|
||||
has_select_for_update_nowait = True
|
||||
has_bulk_insert = True
|
||||
uses_savepoints = True
|
||||
can_release_savepoints = True
|
||||
supports_tablespaces = True
|
||||
supports_transactions = True
|
||||
can_introspect_autofield = True
|
||||
can_introspect_ip_address_field = True
|
||||
can_introspect_small_integer_field = True
|
||||
can_distinct_on_fields = True
|
||||
can_rollback_ddl = True
|
||||
supports_combined_alters = True
|
||||
nulls_order_largest = True
|
||||
closed_cursor_error_class = InterfaceError
|
||||
has_case_insensitive_like = False
|
||||
requires_sqlparse_for_splitting = False
|
||||
|
||||
|
||||
class DatabaseWrapper(BaseDatabaseWrapper):
|
||||
vendor = 'postgresql'
|
||||
# This dictionary maps Field objects to their associated PostgreSQL column
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import subprocess
|
||||
|
||||
from django.db.backends import BaseDatabaseClient
|
||||
from django.db.backends.base.client import BaseDatabaseClient
|
||||
|
||||
|
||||
class DatabaseClient(BaseDatabaseClient):
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from django.db.backends.creation import BaseDatabaseCreation
|
||||
from django.db.backends.base.creation import BaseDatabaseCreation
|
||||
from django.db.backends.utils import truncate_name
|
||||
|
||||
|
||||
|
|
|
@ -0,0 +1,28 @@
|
|||
from django.db.backends.base.features import BaseDatabaseFeatures
|
||||
from django.db.utils import InterfaceError
|
||||
|
||||
|
||||
class DatabaseFeatures(BaseDatabaseFeatures):
|
||||
needs_datetime_string_cast = False
|
||||
can_return_id_from_insert = True
|
||||
has_real_datatype = True
|
||||
has_native_duration_field = True
|
||||
driver_supports_timedelta_args = True
|
||||
can_defer_constraint_checks = True
|
||||
has_select_for_update = True
|
||||
has_select_for_update_nowait = True
|
||||
has_bulk_insert = True
|
||||
uses_savepoints = True
|
||||
can_release_savepoints = True
|
||||
supports_tablespaces = True
|
||||
supports_transactions = True
|
||||
can_introspect_autofield = True
|
||||
can_introspect_ip_address_field = True
|
||||
can_introspect_small_integer_field = True
|
||||
can_distinct_on_fields = True
|
||||
can_rollback_ddl = True
|
||||
supports_combined_alters = True
|
||||
nulls_order_largest = True
|
||||
closed_cursor_error_class = InterfaceError
|
||||
has_case_insensitive_like = False
|
||||
requires_sqlparse_for_splitting = False
|
|
@ -1,7 +1,9 @@
|
|||
from __future__ import unicode_literals
|
||||
from collections import namedtuple
|
||||
|
||||
from django.db.backends import BaseDatabaseIntrospection, FieldInfo, TableInfo
|
||||
from django.db.backends.base.introspection import (
|
||||
BaseDatabaseIntrospection, FieldInfo, TableInfo,
|
||||
)
|
||||
from django.utils.encoding import force_text
|
||||
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
from django.conf import settings
|
||||
from django.db.backends import BaseDatabaseOperations
|
||||
from django.db.backends.base.operations import BaseDatabaseOperations
|
||||
|
||||
|
||||
class DatabaseOperations(BaseDatabaseOperations):
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
from django.db.backends.schema import BaseDatabaseSchemaEditor
|
||||
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||
|
||||
import psycopg2
|
||||
|
||||
|
||||
class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
|
||||
|
@ -10,8 +12,6 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
|
|||
sql_create_text_index = "CREATE INDEX %(name)s ON %(table)s (%(columns)s text_pattern_ops)%(extra)s"
|
||||
|
||||
def quote_value(self, value):
|
||||
# Inner import so backend fails nicely if it's not present
|
||||
import psycopg2
|
||||
return psycopg2.extensions.adapt(value)
|
||||
|
||||
def _model_indexes_sql(self, model):
|
||||
|
|
|
@ -0,0 +1,7 @@
|
|||
from django.utils.timezone import utc
|
||||
|
||||
|
||||
def utc_tzinfo_factory(offset):
|
||||
if offset != 0:
|
||||
raise AssertionError("database connection isn't set to UTC")
|
||||
return utc
|
|
@ -9,26 +9,22 @@ from __future__ import unicode_literals
|
|||
import datetime
|
||||
import decimal
|
||||
import re
|
||||
import sys
|
||||
import uuid
|
||||
import warnings
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import utils
|
||||
from django.db.backends import (utils as backend_utils, BaseDatabaseFeatures,
|
||||
BaseDatabaseOperations, BaseDatabaseWrapper, BaseDatabaseValidation)
|
||||
from django.db.backends.sqlite3.client import DatabaseClient
|
||||
from django.db.backends.sqlite3.creation import DatabaseCreation
|
||||
from django.db.backends.sqlite3.introspection import DatabaseIntrospection
|
||||
from django.db.backends.sqlite3.schema import DatabaseSchemaEditor
|
||||
from django.db.models import fields, aggregates
|
||||
from django.utils.dateparse import parse_date, parse_datetime, parse_time, parse_duration
|
||||
from django.utils.duration import duration_string
|
||||
from django.db.backends import utils as backend_utils
|
||||
from django.db.backends.base.base import BaseDatabaseWrapper
|
||||
from django.db.backends.base.validation import BaseDatabaseValidation
|
||||
from django.utils import six, timezone
|
||||
from django.utils.dateparse import parse_date, parse_duration, parse_time
|
||||
from django.utils.encoding import force_text
|
||||
from django.utils.functional import cached_property
|
||||
from django.utils.safestring import SafeBytes
|
||||
from django.utils import six
|
||||
from django.utils import timezone
|
||||
|
||||
try:
|
||||
import pytz
|
||||
except ImportError:
|
||||
pytz = None
|
||||
|
||||
try:
|
||||
try:
|
||||
|
@ -39,23 +35,19 @@ except ImportError as exc:
|
|||
from django.core.exceptions import ImproperlyConfigured
|
||||
raise ImproperlyConfigured("Error loading either pysqlite2 or sqlite3 modules (tried in that order): %s" % exc)
|
||||
|
||||
try:
|
||||
import pytz
|
||||
except ImportError:
|
||||
pytz = None
|
||||
# Some of these import sqlite3, so import them after checking if it's installed.
|
||||
from .client import DatabaseClient
|
||||
from .creation import DatabaseCreation
|
||||
from .features import DatabaseFeatures
|
||||
from .introspection import DatabaseIntrospection
|
||||
from .operations import DatabaseOperations
|
||||
from .schema import DatabaseSchemaEditor
|
||||
from .utils import parse_datetime_with_timezone_support
|
||||
|
||||
DatabaseError = Database.DatabaseError
|
||||
IntegrityError = Database.IntegrityError
|
||||
|
||||
|
||||
def parse_datetime_with_timezone_support(value):
|
||||
dt = parse_datetime(value)
|
||||
# Confirm that dt is naive before overwriting its tzinfo.
|
||||
if dt is not None and settings.USE_TZ and timezone.is_naive(dt):
|
||||
dt = dt.replace(tzinfo=timezone.utc)
|
||||
return dt
|
||||
|
||||
|
||||
def adapt_datetime_with_timezone_support(value):
|
||||
# Equivalent to DateTimeField.get_db_prep_value. Used only by raw SQL.
|
||||
if settings.USE_TZ:
|
||||
|
@ -91,250 +83,6 @@ if six.PY2:
|
|||
Database.register_adapter(SafeBytes, lambda s: s.decode('utf-8'))
|
||||
|
||||
|
||||
class DatabaseFeatures(BaseDatabaseFeatures):
|
||||
# SQLite cannot handle us only partially reading from a cursor's result set
|
||||
# and then writing the same rows to the database in another cursor. This
|
||||
# setting ensures we always read result sets fully into memory all in one
|
||||
# go.
|
||||
can_use_chunked_reads = False
|
||||
test_db_allows_multiple_connections = False
|
||||
supports_unspecified_pk = True
|
||||
supports_timezones = False
|
||||
supports_1000_query_parameters = False
|
||||
supports_mixed_date_datetime_comparisons = False
|
||||
has_bulk_insert = True
|
||||
can_combine_inserts_with_and_without_auto_increment_pk = False
|
||||
supports_foreign_keys = False
|
||||
supports_column_check_constraints = False
|
||||
autocommits_when_autocommit_is_off = True
|
||||
can_introspect_decimal_field = False
|
||||
can_introspect_positive_integer_field = True
|
||||
can_introspect_small_integer_field = True
|
||||
supports_transactions = True
|
||||
atomic_transactions = False
|
||||
can_rollback_ddl = True
|
||||
supports_paramstyle_pyformat = False
|
||||
supports_sequence_reset = False
|
||||
|
||||
@cached_property
|
||||
def uses_savepoints(self):
|
||||
return Database.sqlite_version_info >= (3, 6, 8)
|
||||
|
||||
@cached_property
|
||||
def can_release_savepoints(self):
|
||||
return self.uses_savepoints
|
||||
|
||||
@cached_property
|
||||
def can_share_in_memory_db(self):
|
||||
return (
|
||||
sys.version_info[:2] >= (3, 4) and
|
||||
Database.__name__ == 'sqlite3.dbapi2' and
|
||||
Database.sqlite_version_info >= (3, 7, 13)
|
||||
)
|
||||
|
||||
@cached_property
|
||||
def supports_stddev(self):
|
||||
"""Confirm support for STDDEV and related stats functions
|
||||
|
||||
SQLite supports STDDEV as an extension package; so
|
||||
connection.ops.check_aggregate_support() can't unilaterally
|
||||
rule out support for STDDEV. We need to manually check
|
||||
whether the call works.
|
||||
"""
|
||||
with self.connection.cursor() as cursor:
|
||||
cursor.execute('CREATE TABLE STDDEV_TEST (X INT)')
|
||||
try:
|
||||
cursor.execute('SELECT STDDEV(*) FROM STDDEV_TEST')
|
||||
has_support = True
|
||||
except utils.DatabaseError:
|
||||
has_support = False
|
||||
cursor.execute('DROP TABLE STDDEV_TEST')
|
||||
return has_support
|
||||
|
||||
@cached_property
|
||||
def has_zoneinfo_database(self):
|
||||
return pytz is not None
|
||||
|
||||
|
||||
class DatabaseOperations(BaseDatabaseOperations):
|
||||
def bulk_batch_size(self, fields, objs):
|
||||
"""
|
||||
SQLite has a compile-time default (SQLITE_LIMIT_VARIABLE_NUMBER) of
|
||||
999 variables per query.
|
||||
|
||||
If there is just single field to insert, then we can hit another
|
||||
limit, SQLITE_MAX_COMPOUND_SELECT which defaults to 500.
|
||||
"""
|
||||
limit = 999 if len(fields) > 1 else 500
|
||||
return (limit // len(fields)) if len(fields) > 0 else len(objs)
|
||||
|
||||
def check_aggregate_support(self, aggregate):
|
||||
bad_fields = (fields.DateField, fields.DateTimeField, fields.TimeField)
|
||||
bad_aggregates = (aggregates.Sum, aggregates.Avg,
|
||||
aggregates.Variance, aggregates.StdDev)
|
||||
if aggregate.refs_field(bad_aggregates, bad_fields):
|
||||
raise NotImplementedError(
|
||||
'You cannot use Sum, Avg, StdDev and Variance aggregations '
|
||||
'on date/time fields in sqlite3 '
|
||||
'since date/time is saved as text.')
|
||||
|
||||
def date_extract_sql(self, lookup_type, field_name):
|
||||
# sqlite doesn't support extract, so we fake it with the user-defined
|
||||
# function django_date_extract that's registered in connect(). Note that
|
||||
# single quotes are used because this is a string (and could otherwise
|
||||
# cause a collision with a field name).
|
||||
return "django_date_extract('%s', %s)" % (lookup_type.lower(), field_name)
|
||||
|
||||
def date_interval_sql(self, timedelta):
|
||||
return "'%s'" % duration_string(timedelta), []
|
||||
|
||||
def format_for_duration_arithmetic(self, sql):
|
||||
"""Do nothing here, we will handle it in the custom function."""
|
||||
return sql
|
||||
|
||||
def date_trunc_sql(self, lookup_type, field_name):
|
||||
# sqlite doesn't support DATE_TRUNC, so we fake it with a user-defined
|
||||
# function django_date_trunc that's registered in connect(). Note that
|
||||
# single quotes are used because this is a string (and could otherwise
|
||||
# cause a collision with a field name).
|
||||
return "django_date_trunc('%s', %s)" % (lookup_type.lower(), field_name)
|
||||
|
||||
def datetime_extract_sql(self, lookup_type, field_name, tzname):
|
||||
# Same comment as in date_extract_sql.
|
||||
if settings.USE_TZ:
|
||||
if pytz is None:
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
raise ImproperlyConfigured("This query requires pytz, "
|
||||
"but it isn't installed.")
|
||||
return "django_datetime_extract('%s', %s, %%s)" % (
|
||||
lookup_type.lower(), field_name), [tzname]
|
||||
|
||||
def datetime_trunc_sql(self, lookup_type, field_name, tzname):
|
||||
# Same comment as in date_trunc_sql.
|
||||
if settings.USE_TZ:
|
||||
if pytz is None:
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
raise ImproperlyConfigured("This query requires pytz, "
|
||||
"but it isn't installed.")
|
||||
return "django_datetime_trunc('%s', %s, %%s)" % (
|
||||
lookup_type.lower(), field_name), [tzname]
|
||||
|
||||
def drop_foreignkey_sql(self):
|
||||
return ""
|
||||
|
||||
def pk_default_value(self):
|
||||
return "NULL"
|
||||
|
||||
def quote_name(self, name):
|
||||
if name.startswith('"') and name.endswith('"'):
|
||||
return name # Quoting once is enough.
|
||||
return '"%s"' % name
|
||||
|
||||
def no_limit_value(self):
|
||||
return -1
|
||||
|
||||
def sql_flush(self, style, tables, sequences, allow_cascade=False):
|
||||
# NB: The generated SQL below is specific to SQLite
|
||||
# Note: The DELETE FROM... SQL generated below works for SQLite databases
|
||||
# because constraints don't exist
|
||||
sql = ['%s %s %s;' % (
|
||||
style.SQL_KEYWORD('DELETE'),
|
||||
style.SQL_KEYWORD('FROM'),
|
||||
style.SQL_FIELD(self.quote_name(table))
|
||||
) for table in tables]
|
||||
# Note: No requirement for reset of auto-incremented indices (cf. other
|
||||
# sql_flush() implementations). Just return SQL at this point
|
||||
return sql
|
||||
|
||||
def value_to_db_datetime(self, value):
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
# SQLite doesn't support tz-aware datetimes
|
||||
if timezone.is_aware(value):
|
||||
if settings.USE_TZ:
|
||||
value = value.astimezone(timezone.utc).replace(tzinfo=None)
|
||||
else:
|
||||
raise ValueError("SQLite backend does not support timezone-aware datetimes when USE_TZ is False.")
|
||||
|
||||
return six.text_type(value)
|
||||
|
||||
def value_to_db_time(self, value):
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
# SQLite doesn't support tz-aware datetimes
|
||||
if timezone.is_aware(value):
|
||||
raise ValueError("SQLite backend does not support timezone-aware times.")
|
||||
|
||||
return six.text_type(value)
|
||||
|
||||
def get_db_converters(self, expression):
|
||||
converters = super(DatabaseOperations, self).get_db_converters(expression)
|
||||
internal_type = expression.output_field.get_internal_type()
|
||||
if internal_type == 'DateTimeField':
|
||||
converters.append(self.convert_datetimefield_value)
|
||||
elif internal_type == 'DateField':
|
||||
converters.append(self.convert_datefield_value)
|
||||
elif internal_type == 'TimeField':
|
||||
converters.append(self.convert_timefield_value)
|
||||
elif internal_type == 'DecimalField':
|
||||
converters.append(self.convert_decimalfield_value)
|
||||
elif internal_type == 'UUIDField':
|
||||
converters.append(self.convert_uuidfield_value)
|
||||
return converters
|
||||
|
||||
def convert_decimalfield_value(self, value, expression, context):
|
||||
return backend_utils.typecast_decimal(expression.output_field.format_number(value))
|
||||
|
||||
def convert_datefield_value(self, value, expression, context):
|
||||
if value is not None and not isinstance(value, datetime.date):
|
||||
value = parse_date(value)
|
||||
return value
|
||||
|
||||
def convert_datetimefield_value(self, value, expression, context):
|
||||
if value is not None and not isinstance(value, datetime.datetime):
|
||||
value = parse_datetime_with_timezone_support(value)
|
||||
return value
|
||||
|
||||
def convert_timefield_value(self, value, expression, context):
|
||||
if value is not None and not isinstance(value, datetime.time):
|
||||
value = parse_time(value)
|
||||
return value
|
||||
|
||||
def convert_uuidfield_value(self, value, expression, context):
|
||||
if value is not None:
|
||||
value = uuid.UUID(value)
|
||||
return value
|
||||
|
||||
def bulk_insert_sql(self, fields, num_values):
|
||||
res = []
|
||||
res.append("SELECT %s" % ", ".join(
|
||||
"%%s AS %s" % self.quote_name(f.column) for f in fields
|
||||
))
|
||||
res.extend(["UNION ALL SELECT %s" % ", ".join(["%s"] * len(fields))] * (num_values - 1))
|
||||
return " ".join(res)
|
||||
|
||||
def combine_expression(self, connector, sub_expressions):
|
||||
# SQLite doesn't have a power function, so we fake it with a
|
||||
# user-defined function django_power that's registered in connect().
|
||||
if connector == '^':
|
||||
return 'django_power(%s)' % ','.join(sub_expressions)
|
||||
return super(DatabaseOperations, self).combine_expression(connector, sub_expressions)
|
||||
|
||||
def combine_duration_expression(self, connector, sub_expressions):
|
||||
if connector not in ['+', '-']:
|
||||
raise utils.DatabaseError('Invalid connector for timedelta: %s.' % connector)
|
||||
fn_params = ["'%s'" % connector] + sub_expressions
|
||||
if len(fn_params) > 3:
|
||||
raise ValueError('Too many params for timedelta operations.')
|
||||
return "django_format_dtdelta(%s)" % ', '.join(fn_params)
|
||||
|
||||
def integer_field_range(self, internal_type):
|
||||
# SQLite doesn't enforce any integer constraints
|
||||
return (None, None)
|
||||
|
||||
|
||||
class DatabaseWrapper(BaseDatabaseWrapper):
|
||||
vendor = 'sqlite'
|
||||
# SQLite doesn't actually support most of these types, but it "does the right
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import subprocess
|
||||
|
||||
from django.db.backends import BaseDatabaseClient
|
||||
from django.db.backends.base.client import BaseDatabaseClient
|
||||
|
||||
|
||||
class DatabaseClient(BaseDatabaseClient):
|
||||
|
|
|
@ -2,7 +2,7 @@ import os
|
|||
import sys
|
||||
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
from django.db.backends.creation import BaseDatabaseCreation
|
||||
from django.db.backends.base.creation import BaseDatabaseCreation
|
||||
from django.utils.six.moves import input
|
||||
|
||||
|
||||
|
|
|
@ -0,0 +1,79 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import sys
|
||||
|
||||
from django.db import utils
|
||||
from django.db.backends.base.features import BaseDatabaseFeatures
|
||||
from django.utils.functional import cached_property
|
||||
|
||||
from .base import Database
|
||||
|
||||
try:
|
||||
import pytz
|
||||
except ImportError:
|
||||
pytz = None
|
||||
|
||||
|
||||
class DatabaseFeatures(BaseDatabaseFeatures):
|
||||
# SQLite cannot handle us only partially reading from a cursor's result set
|
||||
# and then writing the same rows to the database in another cursor. This
|
||||
# setting ensures we always read result sets fully into memory all in one
|
||||
# go.
|
||||
can_use_chunked_reads = False
|
||||
test_db_allows_multiple_connections = False
|
||||
supports_unspecified_pk = True
|
||||
supports_timezones = False
|
||||
supports_1000_query_parameters = False
|
||||
supports_mixed_date_datetime_comparisons = False
|
||||
has_bulk_insert = True
|
||||
can_combine_inserts_with_and_without_auto_increment_pk = False
|
||||
supports_foreign_keys = False
|
||||
supports_column_check_constraints = False
|
||||
autocommits_when_autocommit_is_off = True
|
||||
can_introspect_decimal_field = False
|
||||
can_introspect_positive_integer_field = True
|
||||
can_introspect_small_integer_field = True
|
||||
supports_transactions = True
|
||||
atomic_transactions = False
|
||||
can_rollback_ddl = True
|
||||
supports_paramstyle_pyformat = False
|
||||
supports_sequence_reset = False
|
||||
|
||||
@cached_property
|
||||
def uses_savepoints(self):
|
||||
return Database.sqlite_version_info >= (3, 6, 8)
|
||||
|
||||
@cached_property
|
||||
def can_release_savepoints(self):
|
||||
return self.uses_savepoints
|
||||
|
||||
@cached_property
|
||||
def can_share_in_memory_db(self):
|
||||
return (
|
||||
sys.version_info[:2] >= (3, 4) and
|
||||
Database.__name__ == 'sqlite3.dbapi2' and
|
||||
Database.sqlite_version_info >= (3, 7, 13)
|
||||
)
|
||||
|
||||
@cached_property
|
||||
def supports_stddev(self):
|
||||
"""Confirm support for STDDEV and related stats functions
|
||||
|
||||
SQLite supports STDDEV as an extension package; so
|
||||
connection.ops.check_aggregate_support() can't unilaterally
|
||||
rule out support for STDDEV. We need to manually check
|
||||
whether the call works.
|
||||
"""
|
||||
with self.connection.cursor() as cursor:
|
||||
cursor.execute('CREATE TABLE STDDEV_TEST (X INT)')
|
||||
try:
|
||||
cursor.execute('SELECT STDDEV(*) FROM STDDEV_TEST')
|
||||
has_support = True
|
||||
except utils.DatabaseError:
|
||||
has_support = False
|
||||
cursor.execute('DROP TABLE STDDEV_TEST')
|
||||
return has_support
|
||||
|
||||
@cached_property
|
||||
def has_zoneinfo_database(self):
|
||||
return pytz is not None
|
|
@ -1,6 +1,8 @@
|
|||
import re
|
||||
|
||||
from django.db.backends import BaseDatabaseIntrospection, FieldInfo, TableInfo
|
||||
from django.db.backends.base.introspection import (
|
||||
BaseDatabaseIntrospection, FieldInfo, TableInfo,
|
||||
)
|
||||
|
||||
|
||||
field_size_re = re.compile(r'^\s*(?:var)?char\s*\(\s*(\d+)\s*\)\s*$')
|
||||
|
|
|
@ -0,0 +1,198 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import datetime
|
||||
import uuid
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
from django.db import utils
|
||||
from django.db.backends import utils as backend_utils
|
||||
from django.db.backends.base.operations import BaseDatabaseOperations
|
||||
from django.db.models import fields, aggregates
|
||||
from django.utils.dateparse import parse_date, parse_time
|
||||
from django.utils.duration import duration_string
|
||||
from django.utils import six, timezone
|
||||
|
||||
from .utils import parse_datetime_with_timezone_support
|
||||
|
||||
try:
|
||||
import pytz
|
||||
except ImportError:
|
||||
pytz = None
|
||||
|
||||
|
||||
class DatabaseOperations(BaseDatabaseOperations):
|
||||
def bulk_batch_size(self, fields, objs):
|
||||
"""
|
||||
SQLite has a compile-time default (SQLITE_LIMIT_VARIABLE_NUMBER) of
|
||||
999 variables per query.
|
||||
|
||||
If there is just single field to insert, then we can hit another
|
||||
limit, SQLITE_MAX_COMPOUND_SELECT which defaults to 500.
|
||||
"""
|
||||
limit = 999 if len(fields) > 1 else 500
|
||||
return (limit // len(fields)) if len(fields) > 0 else len(objs)
|
||||
|
||||
def check_aggregate_support(self, aggregate):
|
||||
bad_fields = (fields.DateField, fields.DateTimeField, fields.TimeField)
|
||||
bad_aggregates = (aggregates.Sum, aggregates.Avg,
|
||||
aggregates.Variance, aggregates.StdDev)
|
||||
if aggregate.refs_field(bad_aggregates, bad_fields):
|
||||
raise NotImplementedError(
|
||||
'You cannot use Sum, Avg, StdDev and Variance aggregations '
|
||||
'on date/time fields in sqlite3 '
|
||||
'since date/time is saved as text.')
|
||||
|
||||
def date_extract_sql(self, lookup_type, field_name):
|
||||
# sqlite doesn't support extract, so we fake it with the user-defined
|
||||
# function django_date_extract that's registered in connect(). Note that
|
||||
# single quotes are used because this is a string (and could otherwise
|
||||
# cause a collision with a field name).
|
||||
return "django_date_extract('%s', %s)" % (lookup_type.lower(), field_name)
|
||||
|
||||
def date_interval_sql(self, timedelta):
|
||||
return "'%s'" % duration_string(timedelta), []
|
||||
|
||||
def format_for_duration_arithmetic(self, sql):
|
||||
"""Do nothing here, we will handle it in the custom function."""
|
||||
return sql
|
||||
|
||||
def date_trunc_sql(self, lookup_type, field_name):
|
||||
# sqlite doesn't support DATE_TRUNC, so we fake it with a user-defined
|
||||
# function django_date_trunc that's registered in connect(). Note that
|
||||
# single quotes are used because this is a string (and could otherwise
|
||||
# cause a collision with a field name).
|
||||
return "django_date_trunc('%s', %s)" % (lookup_type.lower(), field_name)
|
||||
|
||||
def datetime_extract_sql(self, lookup_type, field_name, tzname):
|
||||
# Same comment as in date_extract_sql.
|
||||
if settings.USE_TZ:
|
||||
if pytz is None:
|
||||
raise ImproperlyConfigured("This query requires pytz, "
|
||||
"but it isn't installed.")
|
||||
return "django_datetime_extract('%s', %s, %%s)" % (
|
||||
lookup_type.lower(), field_name), [tzname]
|
||||
|
||||
def datetime_trunc_sql(self, lookup_type, field_name, tzname):
|
||||
# Same comment as in date_trunc_sql.
|
||||
if settings.USE_TZ:
|
||||
if pytz is None:
|
||||
raise ImproperlyConfigured("This query requires pytz, "
|
||||
"but it isn't installed.")
|
||||
return "django_datetime_trunc('%s', %s, %%s)" % (
|
||||
lookup_type.lower(), field_name), [tzname]
|
||||
|
||||
def drop_foreignkey_sql(self):
|
||||
return ""
|
||||
|
||||
def pk_default_value(self):
|
||||
return "NULL"
|
||||
|
||||
def quote_name(self, name):
|
||||
if name.startswith('"') and name.endswith('"'):
|
||||
return name # Quoting once is enough.
|
||||
return '"%s"' % name
|
||||
|
||||
def no_limit_value(self):
|
||||
return -1
|
||||
|
||||
def sql_flush(self, style, tables, sequences, allow_cascade=False):
|
||||
# NB: The generated SQL below is specific to SQLite
|
||||
# Note: The DELETE FROM... SQL generated below works for SQLite databases
|
||||
# because constraints don't exist
|
||||
sql = ['%s %s %s;' % (
|
||||
style.SQL_KEYWORD('DELETE'),
|
||||
style.SQL_KEYWORD('FROM'),
|
||||
style.SQL_FIELD(self.quote_name(table))
|
||||
) for table in tables]
|
||||
# Note: No requirement for reset of auto-incremented indices (cf. other
|
||||
# sql_flush() implementations). Just return SQL at this point
|
||||
return sql
|
||||
|
||||
def value_to_db_datetime(self, value):
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
# SQLite doesn't support tz-aware datetimes
|
||||
if timezone.is_aware(value):
|
||||
if settings.USE_TZ:
|
||||
value = value.astimezone(timezone.utc).replace(tzinfo=None)
|
||||
else:
|
||||
raise ValueError("SQLite backend does not support timezone-aware datetimes when USE_TZ is False.")
|
||||
|
||||
return six.text_type(value)
|
||||
|
||||
def value_to_db_time(self, value):
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
# SQLite doesn't support tz-aware datetimes
|
||||
if timezone.is_aware(value):
|
||||
raise ValueError("SQLite backend does not support timezone-aware times.")
|
||||
|
||||
return six.text_type(value)
|
||||
|
||||
def get_db_converters(self, expression):
|
||||
converters = super(DatabaseOperations, self).get_db_converters(expression)
|
||||
internal_type = expression.output_field.get_internal_type()
|
||||
if internal_type == 'DateTimeField':
|
||||
converters.append(self.convert_datetimefield_value)
|
||||
elif internal_type == 'DateField':
|
||||
converters.append(self.convert_datefield_value)
|
||||
elif internal_type == 'TimeField':
|
||||
converters.append(self.convert_timefield_value)
|
||||
elif internal_type == 'DecimalField':
|
||||
converters.append(self.convert_decimalfield_value)
|
||||
elif internal_type == 'UUIDField':
|
||||
converters.append(self.convert_uuidfield_value)
|
||||
return converters
|
||||
|
||||
def convert_decimalfield_value(self, value, expression, context):
|
||||
return backend_utils.typecast_decimal(expression.output_field.format_number(value))
|
||||
|
||||
def convert_datefield_value(self, value, expression, context):
|
||||
if value is not None and not isinstance(value, datetime.date):
|
||||
value = parse_date(value)
|
||||
return value
|
||||
|
||||
def convert_datetimefield_value(self, value, expression, context):
|
||||
if value is not None and not isinstance(value, datetime.datetime):
|
||||
value = parse_datetime_with_timezone_support(value)
|
||||
return value
|
||||
|
||||
def convert_timefield_value(self, value, expression, context):
|
||||
if value is not None and not isinstance(value, datetime.time):
|
||||
value = parse_time(value)
|
||||
return value
|
||||
|
||||
def convert_uuidfield_value(self, value, expression, context):
|
||||
if value is not None:
|
||||
value = uuid.UUID(value)
|
||||
return value
|
||||
|
||||
def bulk_insert_sql(self, fields, num_values):
|
||||
res = []
|
||||
res.append("SELECT %s" % ", ".join(
|
||||
"%%s AS %s" % self.quote_name(f.column) for f in fields
|
||||
))
|
||||
res.extend(["UNION ALL SELECT %s" % ", ".join(["%s"] * len(fields))] * (num_values - 1))
|
||||
return " ".join(res)
|
||||
|
||||
def combine_expression(self, connector, sub_expressions):
|
||||
# SQLite doesn't have a power function, so we fake it with a
|
||||
# user-defined function django_power that's registered in connect().
|
||||
if connector == '^':
|
||||
return 'django_power(%s)' % ','.join(sub_expressions)
|
||||
return super(DatabaseOperations, self).combine_expression(connector, sub_expressions)
|
||||
|
||||
def combine_duration_expression(self, connector, sub_expressions):
|
||||
if connector not in ['+', '-']:
|
||||
raise utils.DatabaseError('Invalid connector for timedelta: %s.' % connector)
|
||||
fn_params = ["'%s'" % connector] + sub_expressions
|
||||
if len(fn_params) > 3:
|
||||
raise ValueError('Too many params for timedelta operations.')
|
||||
return "django_format_dtdelta(%s)" % ', '.join(fn_params)
|
||||
|
||||
def integer_field_range(self, internal_type):
|
||||
# SQLite doesn't enforce any integer constraints
|
||||
return (None, None)
|
|
@ -1,10 +1,13 @@
|
|||
import codecs
|
||||
import copy
|
||||
from decimal import Decimal
|
||||
from django.utils import six
|
||||
|
||||
from django.apps.registry import Apps
|
||||
from django.db.backends.schema import BaseDatabaseSchemaEditor
|
||||
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||
from django.db.models.fields.related import ManyToManyField
|
||||
from django.utils import six
|
||||
|
||||
import _sqlite3
|
||||
|
||||
|
||||
class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
|
||||
|
@ -13,8 +16,6 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
|
|||
sql_create_inline_fk = "REFERENCES %(to_table)s (%(to_column)s)"
|
||||
|
||||
def quote_value(self, value):
|
||||
# Inner import to allow nice failure for backend if not present
|
||||
import _sqlite3
|
||||
try:
|
||||
value = _sqlite3.adapt(value)
|
||||
except _sqlite3.ProgrammingError:
|
||||
|
|
|
@ -0,0 +1,11 @@
|
|||
from django.conf import settings
|
||||
from django.utils import timezone
|
||||
from django.utils.dateparse import parse_datetime
|
||||
|
||||
|
||||
def parse_datetime_with_timezone_support(value):
|
||||
dt = parse_datetime(value)
|
||||
# Confirm that dt is naive before overwriting its tzinfo.
|
||||
if dt is not None and settings.USE_TZ and timezone.is_naive(dt):
|
||||
dt = dt.replace(tzinfo=timezone.utc)
|
||||
return dt
|
|
@ -272,7 +272,7 @@ if supplied) should be callable objects that accept two arguments; the first is
|
|||
an instance of ``django.apps.registry.Apps`` containing historical models that
|
||||
match the operation's place in the project history, and the second is an
|
||||
instance of :class:`SchemaEditor
|
||||
<django.db.backends.schema.BaseDatabaseSchemaEditor>`.
|
||||
<django.db.backends.base.schema.BaseDatabaseSchemaEditor>`.
|
||||
|
||||
The optional ``hints`` argument will be passed as ``**hints`` to the
|
||||
:meth:`allow_migrate` method of database routers to assist them in making a
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
``SchemaEditor``
|
||||
================
|
||||
|
||||
.. module:: django.db.backends.schema
|
||||
.. module:: django.db.backends.base.schema
|
||||
|
||||
.. class:: BaseDatabaseSchemaEditor
|
||||
|
||||
|
|
|
@ -942,6 +942,19 @@ Database backend API
|
|||
The following changes to the database backend API are documented to assist
|
||||
those writing third-party backends in updating their code:
|
||||
|
||||
* ``BaseDatabaseXXX`` classes have been moved to ``django.db.backends.base``.
|
||||
Please import them from the new locations::
|
||||
|
||||
from django.db.backends.base.base import BaseDatabaseWrapper
|
||||
from django.db.backends.base.client import BaseDatabaseClient
|
||||
from django.db.backends.base.creation import BaseDatabaseCreation
|
||||
from django.db.backends.base.features import BaseDatabaseFeatures
|
||||
from django.db.backends.base.introspection import BaseDatabaseIntrospection
|
||||
from django.db.backends.base.introspection import FieldInfo, TableInfo
|
||||
from django.db.backends.base.operations import BaseDatabaseOperations
|
||||
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||
from django.db.backends.base.validation import BaseDatabaseValidation
|
||||
|
||||
* The ``data_types``, ``data_types_suffix``, and
|
||||
``data_type_check_constraints`` attributes have moved from the
|
||||
``DatabaseCreation`` class to ``DatabaseWrapper``.
|
||||
|
|
|
@ -15,7 +15,7 @@ from django.core.exceptions import ImproperlyConfigured
|
|||
from django.core.management.color import no_style
|
||||
from django.db import (connection, connections, DEFAULT_DB_ALIAS,
|
||||
DatabaseError, IntegrityError, reset_queries, transaction)
|
||||
from django.db.backends import BaseDatabaseWrapper
|
||||
from django.db.backends.base.base import BaseDatabaseWrapper
|
||||
from django.db.backends.signals import connection_created
|
||||
from django.db.backends.postgresql_psycopg2 import version as pg_version
|
||||
from django.db.backends.utils import format_number, CursorWrapper
|
||||
|
|
Loading…
Reference in New Issue