2015-01-13 04:20:40 +08:00
|
|
|
import datetime
|
|
|
|
import decimal
|
2015-01-28 20:35:27 +08:00
|
|
|
from importlib import import_module
|
2015-01-13 04:20:40 +08:00
|
|
|
|
|
|
|
from django.conf import settings
|
2014-12-27 03:48:44 +08:00
|
|
|
from django.core.exceptions import ImproperlyConfigured
|
2015-01-11 00:24:16 +08:00
|
|
|
from django.db import transaction
|
2015-01-13 04:20:40 +08:00
|
|
|
from django.db.backends import utils
|
2016-12-29 23:27:49 +08:00
|
|
|
from django.utils import timezone
|
2015-01-13 04:20:40 +08:00
|
|
|
from django.utils.dateparse import parse_duration
|
|
|
|
from django.utils.encoding import force_text
|
|
|
|
|
|
|
|
|
2017-01-19 15:39:46 +08:00
|
|
|
class BaseDatabaseOperations:
|
2015-01-13 04:20:40 +08:00
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Encapsulate backend-specific differences, such as the way a backend
|
|
|
|
performs ordering or calculates the ID of a recently-inserted row.
|
2015-01-13 04:20:40 +08:00
|
|
|
"""
|
|
|
|
compiler_module = "django.db.models.sql.compiler"
|
|
|
|
|
|
|
|
# Integer field safe ranges by `internal_type` as documented
|
|
|
|
# in docs/ref/models/fields.txt.
|
|
|
|
integer_field_ranges = {
|
|
|
|
'SmallIntegerField': (-32768, 32767),
|
|
|
|
'IntegerField': (-2147483648, 2147483647),
|
|
|
|
'BigIntegerField': (-9223372036854775808, 9223372036854775807),
|
|
|
|
'PositiveSmallIntegerField': (0, 32767),
|
|
|
|
'PositiveIntegerField': (0, 2147483647),
|
|
|
|
}
|
2017-01-14 21:32:07 +08:00
|
|
|
set_operators = {
|
|
|
|
'union': 'UNION',
|
|
|
|
'intersection': 'INTERSECT',
|
|
|
|
'difference': 'EXCEPT',
|
|
|
|
}
|
2015-01-13 04:20:40 +08:00
|
|
|
|
|
|
|
def __init__(self, connection):
|
|
|
|
self.connection = connection
|
|
|
|
self._cache = None
|
|
|
|
|
|
|
|
def autoinc_sql(self, table, column):
|
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Return any SQL needed to support auto-incrementing primary keys, or
|
2015-01-13 04:20:40 +08:00
|
|
|
None if no SQL is necessary.
|
|
|
|
|
|
|
|
This SQL is executed when a table is created.
|
|
|
|
"""
|
|
|
|
return None
|
|
|
|
|
|
|
|
def bulk_batch_size(self, fields, objs):
|
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Return the maximum allowed batch size for the backend. The fields
|
2015-01-13 04:20:40 +08:00
|
|
|
are the fields going to be inserted in the batch, the objs contains
|
|
|
|
all the objects to be inserted.
|
|
|
|
"""
|
|
|
|
return len(objs)
|
|
|
|
|
|
|
|
def cache_key_culling_sql(self):
|
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Return an SQL query that retrieves the first cache key greater than the
|
2015-01-13 04:20:40 +08:00
|
|
|
n smallest.
|
|
|
|
|
|
|
|
This is used by the 'db' cache backend to determine where to start
|
|
|
|
culling.
|
|
|
|
"""
|
|
|
|
return "SELECT cache_key FROM %s ORDER BY cache_key LIMIT 1 OFFSET %%s"
|
|
|
|
|
|
|
|
def unification_cast_sql(self, output_field):
|
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Given a field instance, return the SQL that casts the result of a union
|
|
|
|
to that type. The resulting string should contain a '%s' placeholder
|
|
|
|
for the expression being cast.
|
2015-01-13 04:20:40 +08:00
|
|
|
"""
|
|
|
|
return '%s'
|
|
|
|
|
|
|
|
def date_extract_sql(self, lookup_type, field_name):
|
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Given a lookup_type of 'year', 'month', or 'day', return the SQL that
|
2015-01-13 04:20:40 +08:00
|
|
|
extracts a value from the given date field field_name.
|
|
|
|
"""
|
|
|
|
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a date_extract_sql() method')
|
|
|
|
|
2015-02-23 12:23:16 +08:00
|
|
|
def date_interval_sql(self, timedelta):
|
2015-01-13 04:20:40 +08:00
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Implement the date interval functionality for expressions.
|
2015-01-13 04:20:40 +08:00
|
|
|
"""
|
|
|
|
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a date_interval_sql() method')
|
|
|
|
|
|
|
|
def date_trunc_sql(self, lookup_type, field_name):
|
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Given a lookup_type of 'year', 'month', or 'day', return the SQL that
|
2015-01-13 04:20:40 +08:00
|
|
|
truncates the given date field field_name to a date object with only
|
|
|
|
the given specificity.
|
|
|
|
"""
|
|
|
|
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a datetrunc_sql() method')
|
|
|
|
|
2015-03-08 05:20:29 +08:00
|
|
|
def datetime_cast_date_sql(self, field_name, tzname):
|
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Return the SQL to cast a datetime value to date value.
|
2015-03-08 05:20:29 +08:00
|
|
|
"""
|
|
|
|
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a datetime_cast_date() method')
|
|
|
|
|
2016-06-19 11:39:26 +08:00
|
|
|
def datetime_cast_time_sql(self, field_name, tzname):
|
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Return the SQL to cast a datetime value to time value.
|
2016-06-19 11:39:26 +08:00
|
|
|
"""
|
|
|
|
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a datetime_cast_time_sql() method')
|
|
|
|
|
2015-01-13 04:20:40 +08:00
|
|
|
def datetime_extract_sql(self, lookup_type, field_name, tzname):
|
|
|
|
"""
|
2017-02-27 23:07:24 +08:00
|
|
|
Given a lookup_type of 'year', 'month', 'day', 'hour', 'minute', or
|
|
|
|
'second', return the SQL that extracts a value from the given
|
|
|
|
datetime field field_name.
|
2015-01-13 04:20:40 +08:00
|
|
|
"""
|
|
|
|
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a datetime_extract_sql() method')
|
|
|
|
|
|
|
|
def datetime_trunc_sql(self, lookup_type, field_name, tzname):
|
|
|
|
"""
|
2017-02-27 23:07:24 +08:00
|
|
|
Given a lookup_type of 'year', 'month', 'day', 'hour', 'minute', or
|
|
|
|
'second', return the SQL that truncates the given datetime field
|
|
|
|
field_name to a datetime object with only the given specificity.
|
2015-01-13 04:20:40 +08:00
|
|
|
"""
|
2017-02-27 23:07:24 +08:00
|
|
|
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a datetime_trunc_sql() method')
|
2015-01-13 04:20:40 +08:00
|
|
|
|
2016-06-19 11:38:24 +08:00
|
|
|
def time_trunc_sql(self, lookup_type, field_name):
|
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Given a lookup_type of 'hour', 'minute' or 'second', return the SQL
|
2016-06-19 11:38:24 +08:00
|
|
|
that truncates the given time field field_name to a time object with
|
|
|
|
only the given specificity.
|
|
|
|
"""
|
|
|
|
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a time_trunc_sql() method')
|
|
|
|
|
2015-05-23 03:16:26 +08:00
|
|
|
def time_extract_sql(self, lookup_type, field_name):
|
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Given a lookup_type of 'hour', 'minute', or 'second', return the SQL
|
2015-05-23 03:16:26 +08:00
|
|
|
that extracts a value from the given time field field_name.
|
|
|
|
"""
|
|
|
|
return self.date_extract_sql(lookup_type, field_name)
|
|
|
|
|
2015-01-13 04:20:40 +08:00
|
|
|
def deferrable_sql(self):
|
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Return the SQL to make a constraint "initially deferred" during a
|
|
|
|
CREATE TABLE statement.
|
2015-01-13 04:20:40 +08:00
|
|
|
"""
|
|
|
|
return ''
|
|
|
|
|
|
|
|
def distinct_sql(self, fields):
|
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Return an SQL DISTINCT clause which removes duplicate rows from the
|
|
|
|
result set. If any fields are given, only check the given fields for
|
|
|
|
duplicates.
|
2015-01-13 04:20:40 +08:00
|
|
|
"""
|
|
|
|
if fields:
|
|
|
|
raise NotImplementedError('DISTINCT ON fields is not supported by this database backend')
|
|
|
|
else:
|
|
|
|
return 'DISTINCT'
|
|
|
|
|
|
|
|
def fetch_returned_insert_id(self, cursor):
|
|
|
|
"""
|
|
|
|
Given a cursor object that has just performed an INSERT...RETURNING
|
2017-01-25 07:04:12 +08:00
|
|
|
statement into a table that has an auto-incrementing ID, return the
|
2015-01-13 04:20:40 +08:00
|
|
|
newly created ID.
|
|
|
|
"""
|
|
|
|
return cursor.fetchone()[0]
|
|
|
|
|
|
|
|
def field_cast_sql(self, db_type, internal_type):
|
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Given a column type (e.g. 'BLOB', 'VARCHAR') and an internal type
|
|
|
|
(e.g. 'GenericIPAddressField'), return the SQL to cast it before using
|
|
|
|
it in a WHERE statement. The resulting string should contain a '%s'
|
|
|
|
placeholder for the column being searched against.
|
2015-01-13 04:20:40 +08:00
|
|
|
"""
|
|
|
|
return '%s'
|
|
|
|
|
|
|
|
def force_no_ordering(self):
|
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Return a list used in the "ORDER BY" clause to force no ordering at
|
|
|
|
all. Return an empty list to include nothing in the ordering.
|
2015-01-13 04:20:40 +08:00
|
|
|
"""
|
|
|
|
return []
|
|
|
|
|
2017-06-30 04:00:15 +08:00
|
|
|
def for_update_sql(self, nowait=False, skip_locked=False, of=()):
|
2015-01-13 04:20:40 +08:00
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Return the FOR UPDATE SQL clause to lock rows for an update operation.
|
2015-01-13 04:20:40 +08:00
|
|
|
"""
|
2017-06-30 04:00:15 +08:00
|
|
|
return 'FOR UPDATE%s%s%s' % (
|
|
|
|
' OF %s' % ', '.join(of) if of else '',
|
|
|
|
' NOWAIT' if nowait else '',
|
|
|
|
' SKIP LOCKED' if skip_locked else '',
|
|
|
|
)
|
2015-01-13 04:20:40 +08:00
|
|
|
|
|
|
|
def last_executed_query(self, cursor, sql, params):
|
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Return a string of the query last executed by the given cursor, with
|
2015-01-13 04:20:40 +08:00
|
|
|
placeholders replaced with actual values.
|
|
|
|
|
2017-01-25 07:04:12 +08:00
|
|
|
`sql` is the raw query containing placeholders and `params` is the
|
2015-01-13 04:20:40 +08:00
|
|
|
sequence of parameters. These are used by default, but this method
|
|
|
|
exists for database backends to provide a better implementation
|
|
|
|
according to their own quoting schemes.
|
|
|
|
"""
|
2016-12-29 23:27:49 +08:00
|
|
|
# Convert params to contain string values.
|
|
|
|
def to_string(s):
|
2016-01-24 00:47:07 +08:00
|
|
|
return force_text(s, strings_only=True, errors='replace')
|
2015-01-13 04:20:40 +08:00
|
|
|
if isinstance(params, (list, tuple)):
|
2016-12-29 23:27:49 +08:00
|
|
|
u_params = tuple(to_string(val) for val in params)
|
2015-01-13 04:20:40 +08:00
|
|
|
elif params is None:
|
|
|
|
u_params = ()
|
|
|
|
else:
|
2016-12-29 23:27:49 +08:00
|
|
|
u_params = {to_string(k): to_string(v) for k, v in params.items()}
|
2015-01-13 04:20:40 +08:00
|
|
|
|
2016-12-29 23:27:49 +08:00
|
|
|
return "QUERY = %r - PARAMS = %r" % (sql, u_params)
|
2015-01-13 04:20:40 +08:00
|
|
|
|
|
|
|
def last_insert_id(self, cursor, table_name, pk_name):
|
|
|
|
"""
|
|
|
|
Given a cursor object that has just performed an INSERT statement into
|
2017-01-25 07:04:12 +08:00
|
|
|
a table that has an auto-incrementing ID, return the newly created ID.
|
2015-01-13 04:20:40 +08:00
|
|
|
|
2017-01-25 07:04:12 +08:00
|
|
|
`pk_name` is the name of the primary-key column.
|
2015-01-13 04:20:40 +08:00
|
|
|
"""
|
|
|
|
return cursor.lastrowid
|
|
|
|
|
2015-01-11 02:13:28 +08:00
|
|
|
def lookup_cast(self, lookup_type, internal_type=None):
|
2015-01-13 04:20:40 +08:00
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Return the string to use in a query when performing lookups
|
|
|
|
("contains", "like", etc.). It should contain a '%s' placeholder for
|
|
|
|
the column being searched against.
|
2015-01-13 04:20:40 +08:00
|
|
|
"""
|
|
|
|
return "%s"
|
|
|
|
|
|
|
|
def max_in_list_size(self):
|
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Return the maximum number of items that can be passed in a single 'IN'
|
2015-01-13 04:20:40 +08:00
|
|
|
list condition, or None if the backend does not impose a limit.
|
|
|
|
"""
|
|
|
|
return None
|
|
|
|
|
|
|
|
def max_name_length(self):
|
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Return the maximum length of table and column names, or None if there
|
2015-01-13 04:20:40 +08:00
|
|
|
is no limit.
|
|
|
|
"""
|
|
|
|
return None
|
|
|
|
|
|
|
|
def no_limit_value(self):
|
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Return the value to use for the LIMIT when we are wanting "LIMIT
|
|
|
|
infinity". Return None if the limit clause can be omitted in this case.
|
2015-01-13 04:20:40 +08:00
|
|
|
"""
|
|
|
|
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a no_limit_value() method')
|
|
|
|
|
|
|
|
def pk_default_value(self):
|
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Return the value to use during an INSERT statement to specify that
|
2015-01-13 04:20:40 +08:00
|
|
|
the field should use its default value.
|
|
|
|
"""
|
|
|
|
return 'DEFAULT'
|
|
|
|
|
2014-12-27 03:48:44 +08:00
|
|
|
def prepare_sql_script(self, sql):
|
2015-01-13 04:20:40 +08:00
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Take an SQL script that may contain multiple lines and return a list
|
2015-01-13 04:20:40 +08:00
|
|
|
of statements to feed to successive cursor.execute() calls.
|
|
|
|
|
|
|
|
Since few databases are able to process raw SQL scripts in a single
|
|
|
|
cursor.execute() call and PEP 249 doesn't talk about this use case,
|
|
|
|
the default implementation is conservative.
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
import sqlparse
|
|
|
|
except ImportError:
|
2014-12-27 03:48:44 +08:00
|
|
|
raise ImproperlyConfigured(
|
|
|
|
"sqlparse is required if you don't split your SQL "
|
|
|
|
"statements manually."
|
|
|
|
)
|
2015-01-13 04:20:40 +08:00
|
|
|
else:
|
|
|
|
return [sqlparse.format(statement, strip_comments=True)
|
|
|
|
for statement in sqlparse.split(sql) if statement]
|
|
|
|
|
|
|
|
def process_clob(self, value):
|
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Return the value of a CLOB column, for backends that return a locator
|
2015-01-13 04:20:40 +08:00
|
|
|
object that requires additional processing.
|
|
|
|
"""
|
|
|
|
return value
|
|
|
|
|
|
|
|
def return_insert_id(self):
|
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
For backends that support returning the last insert ID as part of an
|
|
|
|
insert query, return the SQL and params to append to the INSERT query.
|
|
|
|
The returned fragment should contain a format string to hold the
|
|
|
|
appropriate column.
|
2015-01-13 04:20:40 +08:00
|
|
|
"""
|
|
|
|
pass
|
|
|
|
|
|
|
|
def compiler(self, compiler_name):
|
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Return the SQLCompiler class corresponding to the given name,
|
2015-01-13 04:20:40 +08:00
|
|
|
in the namespace corresponding to the `compiler_module` attribute
|
|
|
|
on this backend.
|
|
|
|
"""
|
|
|
|
if self._cache is None:
|
|
|
|
self._cache = import_module(self.compiler_module)
|
|
|
|
return getattr(self._cache, compiler_name)
|
|
|
|
|
|
|
|
def quote_name(self, name):
|
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Return a quoted version of the given table, index, or column name. Do
|
2015-01-13 04:20:40 +08:00
|
|
|
not quote the given name if it's already been quoted.
|
|
|
|
"""
|
|
|
|
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a quote_name() method')
|
|
|
|
|
|
|
|
def random_function_sql(self):
|
2017-01-25 07:04:12 +08:00
|
|
|
"""Return an SQL expression that returns a random value."""
|
2015-01-13 04:20:40 +08:00
|
|
|
return 'RANDOM()'
|
|
|
|
|
|
|
|
def regex_lookup(self, lookup_type):
|
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Return the string to use in a query when performing regular expression
|
|
|
|
lookups (using "regex" or "iregex"). It should contain a '%s'
|
|
|
|
placeholder for the column being searched against.
|
2015-01-13 04:20:40 +08:00
|
|
|
|
2017-01-25 07:04:12 +08:00
|
|
|
If the feature is not supported (or part of it is not supported), raise
|
|
|
|
NotImplementedError.
|
2015-01-13 04:20:40 +08:00
|
|
|
"""
|
|
|
|
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a regex_lookup() method')
|
|
|
|
|
|
|
|
def savepoint_create_sql(self, sid):
|
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Return the SQL for starting a new savepoint. Only required if the
|
2015-01-13 04:20:40 +08:00
|
|
|
"uses_savepoints" feature is True. The "sid" parameter is a string
|
|
|
|
for the savepoint id.
|
|
|
|
"""
|
|
|
|
return "SAVEPOINT %s" % self.quote_name(sid)
|
|
|
|
|
|
|
|
def savepoint_commit_sql(self, sid):
|
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Return the SQL for committing the given savepoint.
|
2015-01-13 04:20:40 +08:00
|
|
|
"""
|
|
|
|
return "RELEASE SAVEPOINT %s" % self.quote_name(sid)
|
|
|
|
|
|
|
|
def savepoint_rollback_sql(self, sid):
|
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Return the SQL for rolling back the given savepoint.
|
2015-01-13 04:20:40 +08:00
|
|
|
"""
|
|
|
|
return "ROLLBACK TO SAVEPOINT %s" % self.quote_name(sid)
|
|
|
|
|
|
|
|
def set_time_zone_sql(self):
|
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Return the SQL that will set the connection's time zone.
|
2015-01-13 04:20:40 +08:00
|
|
|
|
2017-01-25 07:04:12 +08:00
|
|
|
Return '' if the backend doesn't support time zones.
|
2015-01-13 04:20:40 +08:00
|
|
|
"""
|
|
|
|
return ''
|
|
|
|
|
|
|
|
def sql_flush(self, style, tables, sequences, allow_cascade=False):
|
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Return a list of SQL statements required to remove all data from
|
2015-01-13 04:20:40 +08:00
|
|
|
the given database tables (without actually removing the tables
|
2017-01-25 07:04:12 +08:00
|
|
|
themselves) and the SQL statements required to reset the sequences
|
|
|
|
passed in `sequences`.
|
2015-01-13 04:20:40 +08:00
|
|
|
|
|
|
|
The `style` argument is a Style object as returned by either
|
|
|
|
color_style() or no_style() in django.core.management.color.
|
|
|
|
|
|
|
|
The `allow_cascade` argument determines whether truncation may cascade
|
|
|
|
to tables with foreign keys pointing the tables being truncated.
|
|
|
|
PostgreSQL requires a cascade even if these tables are empty.
|
|
|
|
"""
|
2016-05-04 07:30:48 +08:00
|
|
|
raise NotImplementedError('subclasses of BaseDatabaseOperations must provide an sql_flush() method')
|
2015-01-13 04:20:40 +08:00
|
|
|
|
2015-01-11 00:24:16 +08:00
|
|
|
def execute_sql_flush(self, using, sql_list):
|
|
|
|
"""Execute a list of SQL statements to flush the database."""
|
|
|
|
with transaction.atomic(using=using, savepoint=self.connection.features.can_rollback_ddl):
|
|
|
|
with self.connection.cursor() as cursor:
|
|
|
|
for sql in sql_list:
|
|
|
|
cursor.execute(sql)
|
|
|
|
|
2015-01-13 04:20:40 +08:00
|
|
|
def sequence_reset_by_name_sql(self, style, sequences):
|
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Return a list of the SQL statements required to reset sequences
|
|
|
|
passed in `sequences`.
|
2015-01-13 04:20:40 +08:00
|
|
|
|
|
|
|
The `style` argument is a Style object as returned by either
|
|
|
|
color_style() or no_style() in django.core.management.color.
|
|
|
|
"""
|
|
|
|
return []
|
|
|
|
|
|
|
|
def sequence_reset_sql(self, style, model_list):
|
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Return a list of the SQL statements required to reset sequences for
|
2015-01-13 04:20:40 +08:00
|
|
|
the given models.
|
|
|
|
|
|
|
|
The `style` argument is a Style object as returned by either
|
|
|
|
color_style() or no_style() in django.core.management.color.
|
|
|
|
"""
|
|
|
|
return [] # No sequence reset required by default.
|
|
|
|
|
|
|
|
def start_transaction_sql(self):
|
2017-01-25 07:04:12 +08:00
|
|
|
"""Return the SQL statement required to start a transaction."""
|
2015-01-13 04:20:40 +08:00
|
|
|
return "BEGIN;"
|
|
|
|
|
|
|
|
def end_transaction_sql(self, success=True):
|
2017-01-25 07:04:12 +08:00
|
|
|
"""Return the SQL statement required to end a transaction."""
|
2015-01-13 04:20:40 +08:00
|
|
|
if not success:
|
|
|
|
return "ROLLBACK;"
|
|
|
|
return "COMMIT;"
|
|
|
|
|
|
|
|
def tablespace_sql(self, tablespace, inline=False):
|
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Return the SQL that will be used in a query to define the tablespace.
|
2015-01-13 04:20:40 +08:00
|
|
|
|
2017-01-25 07:04:12 +08:00
|
|
|
Return '' if the backend doesn't support tablespaces.
|
2015-01-13 04:20:40 +08:00
|
|
|
|
2017-01-25 07:04:12 +08:00
|
|
|
If `inline` is True, append the SQL to a row; otherwise append it to
|
|
|
|
the entire CREATE TABLE or CREATE INDEX statement.
|
2015-01-13 04:20:40 +08:00
|
|
|
"""
|
|
|
|
return ''
|
|
|
|
|
|
|
|
def prep_for_like_query(self, x):
|
2017-01-25 07:04:12 +08:00
|
|
|
"""Prepare a value for use in a LIKE query."""
|
2017-04-22 01:52:26 +08:00
|
|
|
return str(x).replace("\\", "\\\\").replace("%", r"\%").replace("_", r"\_")
|
2015-01-13 04:20:40 +08:00
|
|
|
|
|
|
|
# Same as prep_for_like_query(), but called for "iexact" matches, which
|
|
|
|
# need not necessarily be implemented using "LIKE" in the backend.
|
|
|
|
prep_for_iexact_query = prep_for_like_query
|
|
|
|
|
|
|
|
def validate_autopk_value(self, value):
|
|
|
|
"""
|
|
|
|
Certain backends do not accept some values for "serial" fields
|
2017-01-25 07:04:12 +08:00
|
|
|
(for example zero in MySQL). Raise a ValueError if the value is
|
|
|
|
invalid, otherwise return the validated value.
|
2015-01-13 04:20:40 +08:00
|
|
|
"""
|
|
|
|
return value
|
|
|
|
|
2015-05-03 03:27:44 +08:00
|
|
|
def adapt_unknown_value(self, value):
|
2015-05-02 21:54:17 +08:00
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Transform a value to something compatible with the backend driver.
|
2015-05-02 21:54:17 +08:00
|
|
|
|
|
|
|
This method only depends on the type of the value. It's designed for
|
|
|
|
cases where the target type isn't known, such as .raw() SQL queries.
|
|
|
|
As a consequence it may not work perfectly in all circumstances.
|
|
|
|
"""
|
|
|
|
if isinstance(value, datetime.datetime): # must be before date
|
2015-05-03 03:27:44 +08:00
|
|
|
return self.adapt_datetimefield_value(value)
|
2015-05-02 21:54:17 +08:00
|
|
|
elif isinstance(value, datetime.date):
|
2015-05-03 03:27:44 +08:00
|
|
|
return self.adapt_datefield_value(value)
|
2015-05-02 21:54:17 +08:00
|
|
|
elif isinstance(value, datetime.time):
|
2015-05-03 03:27:44 +08:00
|
|
|
return self.adapt_timefield_value(value)
|
2015-05-02 21:54:17 +08:00
|
|
|
elif isinstance(value, decimal.Decimal):
|
2015-05-03 03:27:44 +08:00
|
|
|
return self.adapt_decimalfield_value(value)
|
2015-05-02 21:54:17 +08:00
|
|
|
else:
|
|
|
|
return value
|
|
|
|
|
2015-05-03 03:27:44 +08:00
|
|
|
def adapt_datefield_value(self, value):
|
2015-01-13 04:20:40 +08:00
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Transform a date value to an object compatible with what is expected
|
2015-01-13 04:20:40 +08:00
|
|
|
by the backend driver for date columns.
|
|
|
|
"""
|
|
|
|
if value is None:
|
|
|
|
return None
|
2016-12-29 23:27:49 +08:00
|
|
|
return str(value)
|
2015-01-13 04:20:40 +08:00
|
|
|
|
2015-05-03 03:27:44 +08:00
|
|
|
def adapt_datetimefield_value(self, value):
|
2015-01-13 04:20:40 +08:00
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Transform a datetime value to an object compatible with what is expected
|
2015-01-13 04:20:40 +08:00
|
|
|
by the backend driver for datetime columns.
|
|
|
|
"""
|
|
|
|
if value is None:
|
|
|
|
return None
|
2016-12-29 23:27:49 +08:00
|
|
|
return str(value)
|
2015-01-13 04:20:40 +08:00
|
|
|
|
2015-05-03 03:27:44 +08:00
|
|
|
def adapt_timefield_value(self, value):
|
2015-01-13 04:20:40 +08:00
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Transform a time value to an object compatible with what is expected
|
2015-01-13 04:20:40 +08:00
|
|
|
by the backend driver for time columns.
|
|
|
|
"""
|
|
|
|
if value is None:
|
|
|
|
return None
|
|
|
|
if timezone.is_aware(value):
|
|
|
|
raise ValueError("Django does not support timezone-aware times.")
|
2016-12-29 23:27:49 +08:00
|
|
|
return str(value)
|
2015-01-13 04:20:40 +08:00
|
|
|
|
2016-02-16 23:03:37 +08:00
|
|
|
def adapt_decimalfield_value(self, value, max_digits=None, decimal_places=None):
|
2015-01-13 04:20:40 +08:00
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Transform a decimal.Decimal value to an object compatible with what is
|
2015-01-13 04:20:40 +08:00
|
|
|
expected by the backend driver for decimal (numeric) columns.
|
|
|
|
"""
|
|
|
|
return utils.format_number(value, max_digits, decimal_places)
|
|
|
|
|
2015-05-03 03:27:44 +08:00
|
|
|
def adapt_ipaddressfield_value(self, value):
|
2015-01-11 02:13:28 +08:00
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Transform a string representation of an IP address into the expected
|
2015-01-11 02:13:28 +08:00
|
|
|
type for the backend driver.
|
|
|
|
"""
|
2016-04-29 18:44:56 +08:00
|
|
|
return value or None
|
2015-01-11 02:13:28 +08:00
|
|
|
|
2015-01-13 04:20:40 +08:00
|
|
|
def year_lookup_bounds_for_date_field(self, value):
|
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Return a two-elements list with the lower and upper bound to be used
|
2015-01-13 04:20:40 +08:00
|
|
|
with a BETWEEN operator to query a DateField value using a year
|
|
|
|
lookup.
|
|
|
|
|
|
|
|
`value` is an int, containing the looked-up year.
|
|
|
|
"""
|
|
|
|
first = datetime.date(value, 1, 1)
|
|
|
|
second = datetime.date(value, 12, 31)
|
2015-05-03 03:27:44 +08:00
|
|
|
first = self.adapt_datefield_value(first)
|
|
|
|
second = self.adapt_datefield_value(second)
|
2015-01-13 04:20:40 +08:00
|
|
|
return [first, second]
|
|
|
|
|
|
|
|
def year_lookup_bounds_for_datetime_field(self, value):
|
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Return a two-elements list with the lower and upper bound to be used
|
2015-01-13 04:20:40 +08:00
|
|
|
with a BETWEEN operator to query a DateTimeField value using a year
|
|
|
|
lookup.
|
|
|
|
|
|
|
|
`value` is an int, containing the looked-up year.
|
|
|
|
"""
|
|
|
|
first = datetime.datetime(value, 1, 1)
|
|
|
|
second = datetime.datetime(value, 12, 31, 23, 59, 59, 999999)
|
|
|
|
if settings.USE_TZ:
|
|
|
|
tz = timezone.get_current_timezone()
|
|
|
|
first = timezone.make_aware(first, tz)
|
|
|
|
second = timezone.make_aware(second, tz)
|
2015-05-03 03:27:44 +08:00
|
|
|
first = self.adapt_datetimefield_value(first)
|
|
|
|
second = self.adapt_datetimefield_value(second)
|
2015-01-13 04:20:40 +08:00
|
|
|
return [first, second]
|
|
|
|
|
|
|
|
def get_db_converters(self, expression):
|
2015-04-13 02:33:58 +08:00
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Return a list of functions needed to convert field data.
|
2015-01-13 04:20:40 +08:00
|
|
|
|
|
|
|
Some field types on some backends do not provide data in the correct
|
2015-04-13 02:33:58 +08:00
|
|
|
format, this is the hook for converter functions.
|
2015-01-13 04:20:40 +08:00
|
|
|
"""
|
|
|
|
return []
|
|
|
|
|
2015-02-20 18:53:59 +08:00
|
|
|
def convert_durationfield_value(self, value, expression, connection, context):
|
2015-01-13 04:20:40 +08:00
|
|
|
if value is not None:
|
|
|
|
value = str(decimal.Decimal(value) / decimal.Decimal(1000000))
|
|
|
|
value = parse_duration(value)
|
|
|
|
return value
|
|
|
|
|
2015-01-17 13:03:46 +08:00
|
|
|
def check_expression_support(self, expression):
|
|
|
|
"""
|
|
|
|
Check that the backend supports the provided expression.
|
|
|
|
|
|
|
|
This is used on specific backends to rule out known expressions
|
|
|
|
that have problematic or nonexistent implementations. If the
|
|
|
|
expression has a known problem, the backend should raise
|
|
|
|
NotImplementedError.
|
2015-01-13 04:20:40 +08:00
|
|
|
"""
|
|
|
|
pass
|
|
|
|
|
|
|
|
def combine_expression(self, connector, sub_expressions):
|
2017-01-25 07:04:12 +08:00
|
|
|
"""
|
|
|
|
Combine a list of subexpressions into a single expression, using
|
2015-01-13 04:20:40 +08:00
|
|
|
the provided connecting operator. This is required because operators
|
|
|
|
can vary between backends (e.g., Oracle with %% and &) and between
|
2017-01-25 07:04:12 +08:00
|
|
|
subexpression types (e.g., date expressions).
|
2015-01-13 04:20:40 +08:00
|
|
|
"""
|
|
|
|
conn = ' %s ' % connector
|
|
|
|
return conn.join(sub_expressions)
|
|
|
|
|
|
|
|
def combine_duration_expression(self, connector, sub_expressions):
|
|
|
|
return self.combine_expression(connector, sub_expressions)
|
|
|
|
|
2016-02-14 02:07:58 +08:00
|
|
|
def binary_placeholder_sql(self, value):
|
|
|
|
"""
|
|
|
|
Some backends require special syntax to insert binary content (MySQL
|
|
|
|
for example uses '_binary %s').
|
|
|
|
"""
|
|
|
|
return '%s'
|
|
|
|
|
2015-08-03 22:34:19 +08:00
|
|
|
def modify_insert_params(self, placeholder, params):
|
2017-01-25 07:04:12 +08:00
|
|
|
"""
|
|
|
|
Allow modification of insert parameters. Needed for Oracle Spatial
|
2015-01-13 04:20:40 +08:00
|
|
|
backend due to #10888.
|
|
|
|
"""
|
|
|
|
return params
|
|
|
|
|
|
|
|
def integer_field_range(self, internal_type):
|
|
|
|
"""
|
|
|
|
Given an integer field internal type (e.g. 'PositiveIntegerField'),
|
2017-01-25 07:04:12 +08:00
|
|
|
return a tuple of the (min_value, max_value) form representing the
|
2015-01-13 04:20:40 +08:00
|
|
|
range of the column type bound to the field.
|
|
|
|
"""
|
|
|
|
return self.integer_field_ranges[internal_type]
|
2016-01-20 09:43:41 +08:00
|
|
|
|
|
|
|
def subtract_temporals(self, internal_type, lhs, rhs):
|
|
|
|
if self.connection.features.supports_temporal_subtraction:
|
|
|
|
lhs_sql, lhs_params = lhs
|
|
|
|
rhs_sql, rhs_params = rhs
|
|
|
|
return "(%s - %s)" % (lhs_sql, rhs_sql), lhs_params + rhs_params
|
|
|
|
raise NotImplementedError("This backend does not support %s subtraction." % internal_type)
|