2015-01-13 04:20:40 +08:00
|
|
|
import uuid
|
|
|
|
|
|
|
|
from django.conf import settings
|
|
|
|
from django.db.backends.base.operations import BaseDatabaseOperations
|
2016-12-29 23:27:49 +08:00
|
|
|
from django.utils import timezone
|
2017-12-29 06:35:41 +08:00
|
|
|
from django.utils.duration import duration_microseconds
|
2015-01-13 04:20:40 +08:00
|
|
|
from django.utils.encoding import force_text
|
|
|
|
|
|
|
|
|
|
|
|
class DatabaseOperations(BaseDatabaseOperations):
|
|
|
|
compiler_module = "django.db.backends.mysql.compiler"
|
|
|
|
|
|
|
|
# MySQL stores positive fields as UNSIGNED ints.
|
2017-12-11 20:08:45 +08:00
|
|
|
integer_field_ranges = {
|
|
|
|
**BaseDatabaseOperations.integer_field_ranges,
|
|
|
|
'PositiveSmallIntegerField': (0, 65535),
|
|
|
|
'PositiveIntegerField': (0, 4294967295),
|
|
|
|
}
|
2017-07-27 08:26:58 +08:00
|
|
|
cast_data_types = {
|
|
|
|
'CharField': 'char(%(max_length)s)',
|
|
|
|
'IntegerField': 'signed integer',
|
|
|
|
'BigIntegerField': 'signed integer',
|
|
|
|
'SmallIntegerField': 'signed integer',
|
|
|
|
'PositiveIntegerField': 'unsigned integer',
|
|
|
|
'PositiveSmallIntegerField': 'unsigned integer',
|
|
|
|
}
|
2017-07-28 01:36:47 +08:00
|
|
|
cast_char_field_without_max_length = 'char'
|
2015-01-13 04:20:40 +08:00
|
|
|
|
|
|
|
def date_extract_sql(self, lookup_type, field_name):
|
|
|
|
# http://dev.mysql.com/doc/mysql/en/date-and-time-functions.html
|
|
|
|
if lookup_type == 'week_day':
|
|
|
|
# DAYOFWEEK() returns an integer, 1-7, Sunday=1.
|
|
|
|
# Note: WEEKDAY() returns 0-6, Monday=0.
|
|
|
|
return "DAYOFWEEK(%s)" % field_name
|
2016-11-11 21:01:40 +08:00
|
|
|
elif lookup_type == 'week':
|
|
|
|
# Override the value of default_week_format for consistency with
|
|
|
|
# other database backends.
|
|
|
|
# Mode 3: Monday, 1-53, with 4 or more days this year.
|
|
|
|
return "WEEK(%s, 3)" % field_name
|
2015-01-13 04:20:40 +08:00
|
|
|
else:
|
2016-11-11 21:01:40 +08:00
|
|
|
# EXTRACT returns 1-53 based on ISO-8601 for the week number.
|
2015-01-13 04:20:40 +08:00
|
|
|
return "EXTRACT(%s FROM %s)" % (lookup_type.upper(), field_name)
|
|
|
|
|
|
|
|
def date_trunc_sql(self, lookup_type, field_name):
|
2015-10-14 02:25:06 +08:00
|
|
|
fields = {
|
|
|
|
'year': '%%Y-01-01',
|
|
|
|
'month': '%%Y-%%m-01',
|
|
|
|
} # Use double percents to escape.
|
|
|
|
if lookup_type in fields:
|
|
|
|
format_str = fields[lookup_type]
|
|
|
|
return "CAST(DATE_FORMAT(%s, '%s') AS DATE)" % (field_name, format_str)
|
2017-06-09 03:15:29 +08:00
|
|
|
elif lookup_type == 'quarter':
|
|
|
|
return "MAKEDATE(YEAR(%s), 1) + INTERVAL QUARTER(%s) QUARTER - INTERVAL 1 QUARTER" % (
|
|
|
|
field_name, field_name
|
|
|
|
)
|
2015-01-13 04:20:40 +08:00
|
|
|
else:
|
2015-10-14 02:25:06 +08:00
|
|
|
return "DATE(%s)" % (field_name)
|
2015-01-13 04:20:40 +08:00
|
|
|
|
2015-03-08 05:20:29 +08:00
|
|
|
def _convert_field_to_tz(self, field_name, tzname):
|
2015-01-13 04:20:40 +08:00
|
|
|
if settings.USE_TZ:
|
2017-02-01 21:48:04 +08:00
|
|
|
field_name = "CONVERT_TZ(%s, 'UTC', '%s')" % (field_name, tzname)
|
|
|
|
return field_name
|
2015-03-08 05:20:29 +08:00
|
|
|
|
|
|
|
def datetime_cast_date_sql(self, field_name, tzname):
|
2017-02-01 21:48:04 +08:00
|
|
|
field_name = self._convert_field_to_tz(field_name, tzname)
|
|
|
|
return "DATE(%s)" % field_name
|
2015-03-08 05:20:29 +08:00
|
|
|
|
2016-06-19 11:39:26 +08:00
|
|
|
def datetime_cast_time_sql(self, field_name, tzname):
|
2017-02-01 21:48:04 +08:00
|
|
|
field_name = self._convert_field_to_tz(field_name, tzname)
|
|
|
|
return "TIME(%s)" % field_name
|
2016-06-19 11:39:26 +08:00
|
|
|
|
2015-03-08 05:20:29 +08:00
|
|
|
def datetime_extract_sql(self, lookup_type, field_name, tzname):
|
2017-02-01 21:48:04 +08:00
|
|
|
field_name = self._convert_field_to_tz(field_name, tzname)
|
|
|
|
return self.date_extract_sql(lookup_type, field_name)
|
2015-01-13 04:20:40 +08:00
|
|
|
|
|
|
|
def datetime_trunc_sql(self, lookup_type, field_name, tzname):
|
2017-02-01 21:48:04 +08:00
|
|
|
field_name = self._convert_field_to_tz(field_name, tzname)
|
2015-01-13 04:20:40 +08:00
|
|
|
fields = ['year', 'month', 'day', 'hour', 'minute', 'second']
|
|
|
|
format = ('%%Y-', '%%m', '-%%d', ' %%H:', '%%i', ':%%s') # Use double percents to escape.
|
|
|
|
format_def = ('0000-', '01', '-01', ' 00:', '00', ':00')
|
2017-06-09 03:15:29 +08:00
|
|
|
if lookup_type == 'quarter':
|
|
|
|
return (
|
|
|
|
"CAST(DATE_FORMAT(MAKEDATE(YEAR({field_name}), 1) + "
|
|
|
|
"INTERVAL QUARTER({field_name}) QUARTER - " +
|
|
|
|
"INTERVAL 1 QUARTER, '%%Y-%%m-01 00:00:00') AS DATETIME)"
|
|
|
|
).format(field_name=field_name)
|
2015-01-13 04:20:40 +08:00
|
|
|
try:
|
|
|
|
i = fields.index(lookup_type) + 1
|
|
|
|
except ValueError:
|
|
|
|
sql = field_name
|
|
|
|
else:
|
|
|
|
format_str = ''.join([f for f in format[:i]] + [f for f in format_def[i:]])
|
|
|
|
sql = "CAST(DATE_FORMAT(%s, '%s') AS DATETIME)" % (field_name, format_str)
|
2017-02-01 21:48:04 +08:00
|
|
|
return sql
|
2015-01-13 04:20:40 +08:00
|
|
|
|
2016-06-19 11:38:24 +08:00
|
|
|
def time_trunc_sql(self, lookup_type, field_name):
|
|
|
|
fields = {
|
|
|
|
'hour': '%%H:00:00',
|
|
|
|
'minute': '%%H:%%i:00',
|
|
|
|
'second': '%%H:%%i:%%s',
|
|
|
|
} # Use double percents to escape.
|
|
|
|
if lookup_type in fields:
|
|
|
|
format_str = fields[lookup_type]
|
|
|
|
return "CAST(DATE_FORMAT(%s, '%s') AS TIME)" % (field_name, format_str)
|
|
|
|
else:
|
|
|
|
return "TIME(%s)" % (field_name)
|
|
|
|
|
2015-01-13 04:20:40 +08:00
|
|
|
def date_interval_sql(self, timedelta):
|
2017-12-29 06:35:41 +08:00
|
|
|
return 'INTERVAL %s MICROSECOND' % duration_microseconds(timedelta)
|
2015-01-13 04:20:40 +08:00
|
|
|
|
|
|
|
def format_for_duration_arithmetic(self, sql):
|
2017-08-25 02:56:09 +08:00
|
|
|
return 'INTERVAL %s MICROSECOND' % sql
|
2015-01-13 04:20:40 +08:00
|
|
|
|
|
|
|
def force_no_ordering(self):
|
|
|
|
"""
|
|
|
|
"ORDER BY NULL" prevents MySQL from implicitly ordering by grouped
|
|
|
|
columns. If no ordering would otherwise be applied, we don't want any
|
|
|
|
implicit sorting going on.
|
|
|
|
"""
|
|
|
|
return [(None, ("NULL", [], False))]
|
|
|
|
|
|
|
|
def last_executed_query(self, cursor, sql, params):
|
|
|
|
# With MySQLdb, cursor objects have an (undocumented) "_last_executed"
|
|
|
|
# attribute where the exact query sent to the database is saved.
|
|
|
|
# See MySQLdb/cursors.py in the source distribution.
|
|
|
|
return force_text(getattr(cursor, '_last_executed', None), errors='replace')
|
|
|
|
|
|
|
|
def no_limit_value(self):
|
|
|
|
# 2**64 - 1, as recommended by the MySQL documentation
|
|
|
|
return 18446744073709551615
|
|
|
|
|
|
|
|
def quote_name(self, name):
|
|
|
|
if name.startswith("`") and name.endswith("`"):
|
|
|
|
return name # Quoting once is enough.
|
|
|
|
return "`%s`" % name
|
|
|
|
|
|
|
|
def random_function_sql(self):
|
|
|
|
return 'RAND()'
|
|
|
|
|
|
|
|
def sql_flush(self, style, tables, sequences, allow_cascade=False):
|
|
|
|
# NB: The generated SQL below is specific to MySQL
|
|
|
|
# 'TRUNCATE x;', 'TRUNCATE y;', 'TRUNCATE z;'... style SQL statements
|
|
|
|
# to clear all tables of all data
|
|
|
|
if tables:
|
|
|
|
sql = ['SET FOREIGN_KEY_CHECKS = 0;']
|
|
|
|
for table in tables:
|
|
|
|
sql.append('%s %s;' % (
|
|
|
|
style.SQL_KEYWORD('TRUNCATE'),
|
|
|
|
style.SQL_FIELD(self.quote_name(table)),
|
|
|
|
))
|
|
|
|
sql.append('SET FOREIGN_KEY_CHECKS = 1;')
|
|
|
|
sql.extend(self.sequence_reset_by_name_sql(style, sequences))
|
|
|
|
return sql
|
|
|
|
else:
|
|
|
|
return []
|
|
|
|
|
|
|
|
def validate_autopk_value(self, value):
|
|
|
|
# MySQLism: zero in AUTO_INCREMENT field does not work. Refs #17653.
|
|
|
|
if value == 0:
|
|
|
|
raise ValueError('The database backend does not accept 0 as a '
|
|
|
|
'value for AutoField.')
|
|
|
|
return value
|
|
|
|
|
2015-05-03 03:27:44 +08:00
|
|
|
def adapt_datetimefield_value(self, value):
|
2015-01-13 04:20:40 +08:00
|
|
|
if value is None:
|
|
|
|
return None
|
|
|
|
|
2016-06-03 02:05:25 +08:00
|
|
|
# Expression values are adapted by the database.
|
|
|
|
if hasattr(value, 'resolve_expression'):
|
|
|
|
return value
|
|
|
|
|
2015-01-13 04:20:40 +08:00
|
|
|
# MySQL doesn't support tz-aware datetimes
|
|
|
|
if timezone.is_aware(value):
|
|
|
|
if settings.USE_TZ:
|
2015-05-03 03:56:53 +08:00
|
|
|
value = timezone.make_naive(value, self.connection.timezone)
|
2015-01-13 04:20:40 +08:00
|
|
|
else:
|
|
|
|
raise ValueError("MySQL backend does not support timezone-aware datetimes when USE_TZ is False.")
|
2016-12-29 23:27:49 +08:00
|
|
|
return str(value)
|
2015-01-13 04:20:40 +08:00
|
|
|
|
2015-05-03 03:27:44 +08:00
|
|
|
def adapt_timefield_value(self, value):
|
2015-01-13 04:20:40 +08:00
|
|
|
if value is None:
|
|
|
|
return None
|
|
|
|
|
2016-06-03 02:05:25 +08:00
|
|
|
# Expression values are adapted by the database.
|
|
|
|
if hasattr(value, 'resolve_expression'):
|
|
|
|
return value
|
|
|
|
|
2015-01-13 04:20:40 +08:00
|
|
|
# MySQL doesn't support tz-aware times
|
|
|
|
if timezone.is_aware(value):
|
|
|
|
raise ValueError("MySQL backend does not support timezone-aware times.")
|
|
|
|
|
2016-12-29 23:27:49 +08:00
|
|
|
return str(value)
|
2015-01-13 04:20:40 +08:00
|
|
|
|
|
|
|
def max_name_length(self):
|
|
|
|
return 64
|
|
|
|
|
2015-08-03 22:34:19 +08:00
|
|
|
def bulk_insert_sql(self, fields, placeholder_rows):
|
|
|
|
placeholder_rows_sql = (", ".join(row) for row in placeholder_rows)
|
|
|
|
values_sql = ", ".join("(%s)" % sql for sql in placeholder_rows_sql)
|
|
|
|
return "VALUES " + values_sql
|
2015-01-13 04:20:40 +08:00
|
|
|
|
|
|
|
def combine_expression(self, connector, sub_expressions):
|
|
|
|
if connector == '^':
|
|
|
|
return 'POW(%s)' % ','.join(sub_expressions)
|
2017-01-03 14:13:12 +08:00
|
|
|
# Convert the result to a signed integer since MySQL's binary operators
|
|
|
|
# return an unsigned integer.
|
2015-12-15 03:13:21 +08:00
|
|
|
elif connector in ('&', '|', '<<'):
|
2017-01-03 14:13:12 +08:00
|
|
|
return 'CONVERT(%s, SIGNED)' % connector.join(sub_expressions)
|
2015-12-15 03:13:21 +08:00
|
|
|
elif connector == '>>':
|
|
|
|
lhs, rhs = sub_expressions
|
|
|
|
return 'FLOOR(%(lhs)s / POW(2, %(rhs)s))' % {'lhs': lhs, 'rhs': rhs}
|
2017-01-21 21:13:44 +08:00
|
|
|
return super().combine_expression(connector, sub_expressions)
|
2015-01-13 04:20:40 +08:00
|
|
|
|
|
|
|
def get_db_converters(self, expression):
|
2017-01-21 21:13:44 +08:00
|
|
|
converters = super().get_db_converters(expression)
|
2015-01-13 04:20:40 +08:00
|
|
|
internal_type = expression.output_field.get_internal_type()
|
2015-04-13 02:33:58 +08:00
|
|
|
if internal_type == 'TextField':
|
|
|
|
converters.append(self.convert_textfield_value)
|
|
|
|
elif internal_type in ['BooleanField', 'NullBooleanField']:
|
2015-01-13 04:20:40 +08:00
|
|
|
converters.append(self.convert_booleanfield_value)
|
2015-04-13 02:33:58 +08:00
|
|
|
elif internal_type == 'DateTimeField':
|
2017-12-08 22:53:27 +08:00
|
|
|
if settings.USE_TZ:
|
|
|
|
converters.append(self.convert_datetimefield_value)
|
2015-04-13 02:33:58 +08:00
|
|
|
elif internal_type == 'UUIDField':
|
2015-01-13 04:20:40 +08:00
|
|
|
converters.append(self.convert_uuidfield_value)
|
|
|
|
return converters
|
|
|
|
|
2017-07-07 01:18:05 +08:00
|
|
|
def convert_textfield_value(self, value, expression, connection):
|
2015-04-13 02:33:58 +08:00
|
|
|
if value is not None:
|
|
|
|
value = force_text(value)
|
|
|
|
return value
|
|
|
|
|
2017-07-07 01:18:05 +08:00
|
|
|
def convert_booleanfield_value(self, value, expression, connection):
|
2015-01-13 04:20:40 +08:00
|
|
|
if value in (0, 1):
|
|
|
|
value = bool(value)
|
|
|
|
return value
|
|
|
|
|
2017-07-07 01:18:05 +08:00
|
|
|
def convert_datetimefield_value(self, value, expression, connection):
|
2015-04-12 20:46:24 +08:00
|
|
|
if value is not None:
|
2017-12-08 22:53:27 +08:00
|
|
|
value = timezone.make_aware(value, self.connection.timezone)
|
2015-04-12 20:46:24 +08:00
|
|
|
return value
|
|
|
|
|
2017-07-07 01:18:05 +08:00
|
|
|
def convert_uuidfield_value(self, value, expression, connection):
|
2015-01-13 04:20:40 +08:00
|
|
|
if value is not None:
|
|
|
|
value = uuid.UUID(value)
|
|
|
|
return value
|
2016-01-20 09:43:41 +08:00
|
|
|
|
2016-02-14 02:07:58 +08:00
|
|
|
def binary_placeholder_sql(self, value):
|
2017-04-11 01:26:26 +08:00
|
|
|
return '_binary %s' if value is not None and not hasattr(value, 'as_sql') else '%s'
|
2016-02-14 02:07:58 +08:00
|
|
|
|
2016-01-20 09:43:41 +08:00
|
|
|
def subtract_temporals(self, internal_type, lhs, rhs):
|
|
|
|
lhs_sql, lhs_params = lhs
|
|
|
|
rhs_sql, rhs_params = rhs
|
2017-08-25 02:56:09 +08:00
|
|
|
if internal_type == 'TimeField':
|
2016-01-20 09:43:41 +08:00
|
|
|
return (
|
2017-08-25 02:56:09 +08:00
|
|
|
"((TIME_TO_SEC(%(lhs)s) * POW(10, 6) + MICROSECOND(%(lhs)s)) -"
|
|
|
|
" (TIME_TO_SEC(%(rhs)s) * POW(10, 6) + MICROSECOND(%(rhs)s)))"
|
|
|
|
) % {'lhs': lhs_sql, 'rhs': rhs_sql}, lhs_params * 2 + rhs_params * 2
|
2016-01-20 09:43:41 +08:00
|
|
|
else:
|
2017-08-25 02:56:09 +08:00
|
|
|
return "TIMESTAMPDIFF(MICROSECOND, %s, %s)" % (rhs_sql, lhs_sql), rhs_params + lhs_params
|