2006-05-02 09:31:56 +08:00
|
|
|
"""
|
2018-12-22 07:29:48 +08:00
|
|
|
SQLite backend for the sqlite3 module in the standard library.
|
2006-05-02 09:31:56 +08:00
|
|
|
"""
|
2018-01-05 06:16:17 +08:00
|
|
|
import datetime
|
2011-07-13 17:35:51 +08:00
|
|
|
import decimal
|
2018-07-01 04:49:20 +08:00
|
|
|
import functools
|
2019-02-21 17:52:51 +08:00
|
|
|
import hashlib
|
2019-06-09 08:56:37 +08:00
|
|
|
import json
|
2017-06-09 03:15:29 +08:00
|
|
|
import math
|
2018-04-04 01:36:12 +08:00
|
|
|
import operator
|
2010-03-23 21:51:11 +08:00
|
|
|
import re
|
2018-12-20 07:01:44 +08:00
|
|
|
import statistics
|
2014-07-15 17:35:29 +08:00
|
|
|
import warnings
|
2018-12-11 15:44:42 +08:00
|
|
|
from itertools import chain
|
2017-01-21 07:21:15 +08:00
|
|
|
from sqlite3 import dbapi2 as Database
|
2010-01-29 23:45:55 +08:00
|
|
|
|
2016-10-08 09:06:49 +08:00
|
|
|
import pytz
|
|
|
|
|
2017-01-17 01:48:41 +08:00
|
|
|
from django.core.exceptions import ImproperlyConfigured
|
2019-08-20 15:54:41 +08:00
|
|
|
from django.db import IntegrityError
|
2015-01-13 04:20:40 +08:00
|
|
|
from django.db.backends import utils as backend_utils
|
|
|
|
from django.db.backends.base.base import BaseDatabaseWrapper
|
2016-12-29 23:27:49 +08:00
|
|
|
from django.utils import timezone
|
2019-04-12 21:15:18 +08:00
|
|
|
from django.utils.asyncio import async_unsafe
|
2018-01-05 06:16:17 +08:00
|
|
|
from django.utils.dateparse import parse_datetime, parse_time
|
2017-12-29 06:35:41 +08:00
|
|
|
from django.utils.duration import duration_microseconds
|
2019-10-26 22:42:32 +08:00
|
|
|
from django.utils.regex_helper import _lazy_re_compile
|
2019-11-26 14:59:05 +08:00
|
|
|
from django.utils.version import PY38
|
2015-01-13 04:20:40 +08:00
|
|
|
|
2015-01-28 20:35:27 +08:00
|
|
|
from .client import DatabaseClient # isort:skip
|
|
|
|
from .creation import DatabaseCreation # isort:skip
|
|
|
|
from .features import DatabaseFeatures # isort:skip
|
|
|
|
from .introspection import DatabaseIntrospection # isort:skip
|
|
|
|
from .operations import DatabaseOperations # isort:skip
|
|
|
|
from .schema import DatabaseSchemaEditor # isort:skip
|
2007-05-21 09:29:58 +08:00
|
|
|
|
2013-07-08 08:39:54 +08:00
|
|
|
|
2012-08-13 02:43:01 +08:00
|
|
|
def decoder(conv_func):
|
2017-01-25 07:04:12 +08:00
|
|
|
"""
|
|
|
|
Convert bytestrings from Python's sqlite3 interface to a regular string.
|
2012-08-13 02:43:01 +08:00
|
|
|
"""
|
2017-02-08 01:05:47 +08:00
|
|
|
return lambda s: conv_func(s.decode())
|
2012-08-13 02:43:01 +08:00
|
|
|
|
2016-11-13 01:11:23 +08:00
|
|
|
|
2018-07-01 04:49:20 +08:00
|
|
|
def none_guard(func):
|
|
|
|
"""
|
|
|
|
Decorator that returns None if any of the arguments to the decorated
|
|
|
|
function are None. Many SQL functions return NULL if any of their arguments
|
|
|
|
are NULL. This decorator simplifies the implementation of this for the
|
|
|
|
custom functions registered below.
|
|
|
|
"""
|
|
|
|
@functools.wraps(func)
|
|
|
|
def wrapper(*args, **kwargs):
|
|
|
|
return None if None in args else func(*args, **kwargs)
|
|
|
|
return wrapper
|
|
|
|
|
|
|
|
|
2018-12-20 07:01:44 +08:00
|
|
|
def list_aggregate(function):
|
|
|
|
"""
|
|
|
|
Return an aggregate class that accumulates values in a list and applies
|
|
|
|
the provided function to the data.
|
|
|
|
"""
|
|
|
|
return type('ListAggregate', (list,), {'finalize': function, 'step': list.append})
|
|
|
|
|
|
|
|
|
2019-01-29 00:14:45 +08:00
|
|
|
def check_sqlite_version():
|
|
|
|
if Database.sqlite_version_info < (3, 8, 3):
|
|
|
|
raise ImproperlyConfigured('SQLite 3.8.3 or later is required (found %s).' % Database.sqlite_version)
|
|
|
|
|
|
|
|
|
|
|
|
check_sqlite_version()
|
|
|
|
|
2017-12-13 11:40:15 +08:00
|
|
|
Database.register_converter("bool", b'1'.__eq__)
|
2017-01-20 17:20:53 +08:00
|
|
|
Database.register_converter("time", decoder(parse_time))
|
|
|
|
Database.register_converter("datetime", decoder(parse_datetime))
|
|
|
|
Database.register_converter("timestamp", decoder(parse_datetime))
|
|
|
|
Database.register_converter("TIMESTAMP", decoder(parse_datetime))
|
2012-08-13 02:43:01 +08:00
|
|
|
|
2018-07-03 01:36:40 +08:00
|
|
|
Database.register_adapter(decimal.Decimal, str)
|
2006-05-02 09:31:56 +08:00
|
|
|
|
2013-07-08 08:39:54 +08:00
|
|
|
|
2007-08-20 05:30:57 +08:00
|
|
|
class DatabaseWrapper(BaseDatabaseWrapper):
|
2010-10-11 20:55:17 +08:00
|
|
|
vendor = 'sqlite'
|
2017-05-23 21:09:35 +08:00
|
|
|
display_name = 'SQLite'
|
2014-12-30 04:14:40 +08:00
|
|
|
# SQLite doesn't actually support most of these types, but it "does the right
|
|
|
|
# thing" given more verbose field definitions, so leave them as is so that
|
|
|
|
# schema inspection is more useful.
|
|
|
|
data_types = {
|
|
|
|
'AutoField': 'integer',
|
2015-07-02 16:43:15 +08:00
|
|
|
'BigAutoField': 'integer',
|
2014-12-30 04:14:40 +08:00
|
|
|
'BinaryField': 'BLOB',
|
|
|
|
'BooleanField': 'bool',
|
|
|
|
'CharField': 'varchar(%(max_length)s)',
|
|
|
|
'DateField': 'date',
|
|
|
|
'DateTimeField': 'datetime',
|
|
|
|
'DecimalField': 'decimal',
|
|
|
|
'DurationField': 'bigint',
|
|
|
|
'FileField': 'varchar(%(max_length)s)',
|
|
|
|
'FilePathField': 'varchar(%(max_length)s)',
|
|
|
|
'FloatField': 'real',
|
|
|
|
'IntegerField': 'integer',
|
|
|
|
'BigIntegerField': 'bigint',
|
|
|
|
'IPAddressField': 'char(15)',
|
|
|
|
'GenericIPAddressField': 'char(39)',
|
2019-06-09 08:56:37 +08:00
|
|
|
'JSONField': 'text',
|
2014-12-30 04:14:40 +08:00
|
|
|
'NullBooleanField': 'bool',
|
|
|
|
'OneToOneField': 'integer',
|
2019-10-16 20:32:12 +08:00
|
|
|
'PositiveBigIntegerField': 'bigint unsigned',
|
2014-12-30 04:14:40 +08:00
|
|
|
'PositiveIntegerField': 'integer unsigned',
|
|
|
|
'PositiveSmallIntegerField': 'smallint unsigned',
|
|
|
|
'SlugField': 'varchar(%(max_length)s)',
|
2019-07-27 05:05:22 +08:00
|
|
|
'SmallAutoField': 'integer',
|
2014-12-30 04:14:40 +08:00
|
|
|
'SmallIntegerField': 'smallint',
|
|
|
|
'TextField': 'text',
|
|
|
|
'TimeField': 'time',
|
|
|
|
'UUIDField': 'char(32)',
|
|
|
|
}
|
2018-06-23 02:42:51 +08:00
|
|
|
data_type_check_constraints = {
|
2019-10-16 20:32:12 +08:00
|
|
|
'PositiveBigIntegerField': '"%(column)s" >= 0',
|
2019-06-09 08:56:37 +08:00
|
|
|
'JSONField': '(JSON_VALID("%(column)s") OR "%(column)s" IS NULL)',
|
2018-06-23 02:42:51 +08:00
|
|
|
'PositiveIntegerField': '"%(column)s" >= 0',
|
|
|
|
'PositiveSmallIntegerField': '"%(column)s" >= 0',
|
|
|
|
}
|
2014-12-30 04:14:40 +08:00
|
|
|
data_types_suffix = {
|
|
|
|
'AutoField': 'AUTOINCREMENT',
|
2015-07-02 16:43:15 +08:00
|
|
|
'BigAutoField': 'AUTOINCREMENT',
|
2019-07-27 05:05:22 +08:00
|
|
|
'SmallAutoField': 'AUTOINCREMENT',
|
2014-12-30 04:14:40 +08:00
|
|
|
}
|
2007-08-20 11:26:55 +08:00
|
|
|
# SQLite requires LIKE statements to include an ESCAPE clause if the value
|
|
|
|
# being escaped has a percent or underscore in it.
|
2018-09-26 14:48:47 +08:00
|
|
|
# See https://www.sqlite.org/lang_expr.html for an explanation.
|
2007-08-20 11:26:55 +08:00
|
|
|
operators = {
|
|
|
|
'exact': '= %s',
|
|
|
|
'iexact': "LIKE %s ESCAPE '\\'",
|
|
|
|
'contains': "LIKE %s ESCAPE '\\'",
|
|
|
|
'icontains': "LIKE %s ESCAPE '\\'",
|
|
|
|
'regex': 'REGEXP %s',
|
|
|
|
'iregex': "REGEXP '(?i)' || %s",
|
|
|
|
'gt': '> %s',
|
|
|
|
'gte': '>= %s',
|
|
|
|
'lt': '< %s',
|
|
|
|
'lte': '<= %s',
|
|
|
|
'startswith': "LIKE %s ESCAPE '\\'",
|
|
|
|
'endswith': "LIKE %s ESCAPE '\\'",
|
|
|
|
'istartswith': "LIKE %s ESCAPE '\\'",
|
|
|
|
'iendswith': "LIKE %s ESCAPE '\\'",
|
|
|
|
}
|
|
|
|
|
2014-09-27 18:41:54 +08:00
|
|
|
# The patterns below are used to generate SQL pattern lookup clauses when
|
|
|
|
# the right-hand side of the lookup isn't a raw string (it might be an expression
|
|
|
|
# or the result of a bilateral transformation).
|
|
|
|
# In those cases, special characters for LIKE operators (e.g. \, *, _) should be
|
|
|
|
# escaped on database side.
|
|
|
|
#
|
|
|
|
# Note: we use str.format() here for readability as '%' is used as a wildcard for
|
|
|
|
# the LIKE operator.
|
|
|
|
pattern_esc = r"REPLACE(REPLACE(REPLACE({}, '\', '\\'), '%%', '\%%'), '_', '\_')"
|
2014-01-18 17:09:43 +08:00
|
|
|
pattern_ops = {
|
2014-09-27 18:41:54 +08:00
|
|
|
'contains': r"LIKE '%%' || {} || '%%' ESCAPE '\'",
|
|
|
|
'icontains': r"LIKE '%%' || UPPER({}) || '%%' ESCAPE '\'",
|
|
|
|
'startswith': r"LIKE {} || '%%' ESCAPE '\'",
|
|
|
|
'istartswith': r"LIKE UPPER({}) || '%%' ESCAPE '\'",
|
|
|
|
'endswith': r"LIKE '%%' || {} ESCAPE '\'",
|
|
|
|
'iendswith': r"LIKE '%%' || UPPER({}) ESCAPE '\'",
|
2014-01-18 17:09:43 +08:00
|
|
|
}
|
|
|
|
|
Refactored database exceptions wrapping.
Squashed commit of the following:
commit 2181d833ed1a2e422494738dcef311164c4e097e
Author: Aymeric Augustin <aymeric.augustin@m4x.org>
Date: Wed Feb 27 14:28:39 2013 +0100
Fixed #15901 -- Wrapped all PEP-249 exceptions.
commit 5476a5d93c19aa2f928c497d39ce6e33f52694e2
Author: Aymeric Augustin <aymeric.augustin@m4x.org>
Date: Tue Feb 26 17:26:52 2013 +0100
Added PEP 3134 exception chaining.
Thanks Jacob Kaplan-Moss for the suggestion.
commit 9365fad0a650328002fb424457d675a273c95802
Author: Aymeric Augustin <aymeric.augustin@m4x.org>
Date: Tue Feb 26 17:13:49 2013 +0100
Improved API for wrapping database errors.
Thanks Alex Gaynor for the proposal.
commit 1b463b765f2826f73a8d9266795cd5da4f8d5e9e
Author: Aymeric Augustin <aymeric.augustin@m4x.org>
Date: Tue Feb 26 15:00:39 2013 +0100
Removed redundant exception wrapping.
This is now taken care of by the cursor wrapper.
commit 524bc7345a724bf526bdd2dd1bcf5ede67d6bb5c
Author: Aymeric Augustin <aymeric.augustin@m4x.org>
Date: Tue Feb 26 14:55:10 2013 +0100
Wrapped database exceptions in the base backend.
This covers the most common PEP-249 APIs:
- Connection APIs: close(), commit(), rollback(), cursor()
- Cursor APIs: callproc(), close(), execute(), executemany(),
fetchone(), fetchmany(), fetchall(), nextset().
Fixed #19920.
commit a66746bb5f0839f35543222787fce3b6a0d0a3ea
Author: Aymeric Augustin <aymeric.augustin@m4x.org>
Date: Tue Feb 26 14:53:34 2013 +0100
Added a wrap_database_exception context manager and decorator.
It re-throws backend-specific exceptions using Django's common wrappers.
2013-02-26 21:53:34 +08:00
|
|
|
Database = Database
|
2014-09-26 01:59:03 +08:00
|
|
|
SchemaEditorClass = DatabaseSchemaEditor
|
2016-09-09 04:33:36 +08:00
|
|
|
# Classes instantiated in __init__().
|
|
|
|
client_class = DatabaseClient
|
|
|
|
creation_class = DatabaseCreation
|
|
|
|
features_class = DatabaseFeatures
|
|
|
|
introspection_class = DatabaseIntrospection
|
|
|
|
ops_class = DatabaseOperations
|
2008-08-11 20:11:25 +08:00
|
|
|
|
2012-11-27 04:42:27 +08:00
|
|
|
def get_connection_params(self):
|
2012-02-11 11:09:54 +08:00
|
|
|
settings_dict = self.settings_dict
|
|
|
|
if not settings_dict['NAME']:
|
2012-07-10 19:22:55 +08:00
|
|
|
raise ImproperlyConfigured(
|
|
|
|
"settings.DATABASES is improperly configured. "
|
|
|
|
"Please supply the NAME value.")
|
2012-02-11 11:09:54 +08:00
|
|
|
kwargs = {
|
2019-11-07 17:26:22 +08:00
|
|
|
# TODO: Remove str() when dropping support for PY36.
|
|
|
|
# https://bugs.python.org/issue33496
|
|
|
|
'database': str(settings_dict['NAME']),
|
2012-02-11 11:09:54 +08:00
|
|
|
'detect_types': Database.PARSE_DECLTYPES | Database.PARSE_COLNAMES,
|
2017-12-11 20:08:45 +08:00
|
|
|
**settings_dict['OPTIONS'],
|
2012-02-11 11:09:54 +08:00
|
|
|
}
|
|
|
|
# Always allow the underlying SQLite connection to be shareable
|
|
|
|
# between multiple threads. The safe-guarding will be handled at a
|
|
|
|
# higher level by the `BaseDatabaseWrapper.allow_thread_sharing`
|
|
|
|
# property. This is necessary as the shareability is disabled by
|
|
|
|
# default in pysqlite and it cannot be changed once a connection is
|
|
|
|
# opened.
|
|
|
|
if 'check_same_thread' in kwargs and kwargs['check_same_thread']:
|
|
|
|
warnings.warn(
|
|
|
|
'The `check_same_thread` option was provided and set to '
|
2013-07-28 09:45:25 +08:00
|
|
|
'True. It will be overridden with False. Use the '
|
2012-02-11 11:09:54 +08:00
|
|
|
'`DatabaseWrapper.allow_thread_sharing` property instead '
|
|
|
|
'for controlling thread shareability.',
|
|
|
|
RuntimeWarning
|
|
|
|
)
|
2017-10-04 08:15:44 +08:00
|
|
|
kwargs.update({'check_same_thread': False, 'uri': True})
|
2012-11-27 04:42:27 +08:00
|
|
|
return kwargs
|
|
|
|
|
2019-04-12 21:15:18 +08:00
|
|
|
@async_unsafe
|
2012-11-27 04:42:27 +08:00
|
|
|
def get_new_connection(self, conn_params):
|
|
|
|
conn = Database.connect(**conn_params)
|
2019-11-26 14:59:05 +08:00
|
|
|
if PY38:
|
|
|
|
create_deterministic_function = functools.partial(
|
|
|
|
conn.create_function,
|
|
|
|
deterministic=True,
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
create_deterministic_function = conn.create_function
|
|
|
|
create_deterministic_function('django_date_extract', 2, _sqlite_datetime_extract)
|
|
|
|
create_deterministic_function('django_date_trunc', 2, _sqlite_date_trunc)
|
|
|
|
create_deterministic_function('django_datetime_cast_date', 3, _sqlite_datetime_cast_date)
|
|
|
|
create_deterministic_function('django_datetime_cast_time', 3, _sqlite_datetime_cast_time)
|
|
|
|
create_deterministic_function('django_datetime_extract', 4, _sqlite_datetime_extract)
|
|
|
|
create_deterministic_function('django_datetime_trunc', 4, _sqlite_datetime_trunc)
|
|
|
|
create_deterministic_function('django_time_extract', 2, _sqlite_time_extract)
|
|
|
|
create_deterministic_function('django_time_trunc', 2, _sqlite_time_trunc)
|
|
|
|
create_deterministic_function('django_time_diff', 2, _sqlite_time_diff)
|
|
|
|
create_deterministic_function('django_timestamp_diff', 2, _sqlite_timestamp_diff)
|
|
|
|
create_deterministic_function('django_format_dtdelta', 3, _sqlite_format_dtdelta)
|
|
|
|
create_deterministic_function('regexp', 2, _sqlite_regexp)
|
|
|
|
create_deterministic_function('ACOS', 1, none_guard(math.acos))
|
|
|
|
create_deterministic_function('ASIN', 1, none_guard(math.asin))
|
|
|
|
create_deterministic_function('ATAN', 1, none_guard(math.atan))
|
|
|
|
create_deterministic_function('ATAN2', 2, none_guard(math.atan2))
|
|
|
|
create_deterministic_function('BITXOR', 2, none_guard(operator.xor))
|
|
|
|
create_deterministic_function('CEILING', 1, none_guard(math.ceil))
|
|
|
|
create_deterministic_function('COS', 1, none_guard(math.cos))
|
|
|
|
create_deterministic_function('COT', 1, none_guard(lambda x: 1 / math.tan(x)))
|
|
|
|
create_deterministic_function('DEGREES', 1, none_guard(math.degrees))
|
|
|
|
create_deterministic_function('EXP', 1, none_guard(math.exp))
|
|
|
|
create_deterministic_function('FLOOR', 1, none_guard(math.floor))
|
2019-06-09 08:56:37 +08:00
|
|
|
create_deterministic_function('JSON_CONTAINS', 2, _sqlite_json_contains)
|
2019-11-26 14:59:05 +08:00
|
|
|
create_deterministic_function('LN', 1, none_guard(math.log))
|
|
|
|
create_deterministic_function('LOG', 2, none_guard(lambda x, y: math.log(y, x)))
|
|
|
|
create_deterministic_function('LPAD', 3, _sqlite_lpad)
|
|
|
|
create_deterministic_function('MD5', 1, none_guard(lambda x: hashlib.md5(x.encode()).hexdigest()))
|
|
|
|
create_deterministic_function('MOD', 2, none_guard(math.fmod))
|
|
|
|
create_deterministic_function('PI', 0, lambda: math.pi)
|
|
|
|
create_deterministic_function('POWER', 2, none_guard(operator.pow))
|
|
|
|
create_deterministic_function('RADIANS', 1, none_guard(math.radians))
|
|
|
|
create_deterministic_function('REPEAT', 2, none_guard(operator.mul))
|
|
|
|
create_deterministic_function('REVERSE', 1, none_guard(lambda x: x[::-1]))
|
|
|
|
create_deterministic_function('RPAD', 3, _sqlite_rpad)
|
|
|
|
create_deterministic_function('SHA1', 1, none_guard(lambda x: hashlib.sha1(x.encode()).hexdigest()))
|
|
|
|
create_deterministic_function('SHA224', 1, none_guard(lambda x: hashlib.sha224(x.encode()).hexdigest()))
|
|
|
|
create_deterministic_function('SHA256', 1, none_guard(lambda x: hashlib.sha256(x.encode()).hexdigest()))
|
|
|
|
create_deterministic_function('SHA384', 1, none_guard(lambda x: hashlib.sha384(x.encode()).hexdigest()))
|
|
|
|
create_deterministic_function('SHA512', 1, none_guard(lambda x: hashlib.sha512(x.encode()).hexdigest()))
|
|
|
|
create_deterministic_function('SIGN', 1, none_guard(lambda x: (x > 0) - (x < 0)))
|
|
|
|
create_deterministic_function('SIN', 1, none_guard(math.sin))
|
|
|
|
create_deterministic_function('SQRT', 1, none_guard(math.sqrt))
|
|
|
|
create_deterministic_function('TAN', 1, none_guard(math.tan))
|
2018-12-20 07:01:44 +08:00
|
|
|
conn.create_aggregate('STDDEV_POP', 1, list_aggregate(statistics.pstdev))
|
|
|
|
conn.create_aggregate('STDDEV_SAMP', 1, list_aggregate(statistics.stdev))
|
|
|
|
conn.create_aggregate('VAR_POP', 1, list_aggregate(statistics.pvariance))
|
|
|
|
conn.create_aggregate('VAR_SAMP', 1, list_aggregate(statistics.variance))
|
2015-01-11 00:24:16 +08:00
|
|
|
conn.execute('PRAGMA foreign_keys = ON')
|
2012-11-27 04:42:27 +08:00
|
|
|
return conn
|
|
|
|
|
|
|
|
def init_connection_state(self):
|
|
|
|
pass
|
|
|
|
|
2016-06-04 06:31:21 +08:00
|
|
|
def create_cursor(self, name=None):
|
2007-08-20 05:30:57 +08:00
|
|
|
return self.connection.cursor(factory=SQLiteCursorWrapper)
|
2006-05-02 09:31:56 +08:00
|
|
|
|
2019-04-12 21:15:18 +08:00
|
|
|
@async_unsafe
|
2013-03-02 19:12:51 +08:00
|
|
|
def close(self):
|
|
|
|
self.validate_thread_sharing()
|
|
|
|
# If database is in memory, closing the connection destroys the
|
|
|
|
# database. To prevent accidental data loss, ignore close requests on
|
|
|
|
# an in-memory db.
|
2016-08-18 08:34:18 +08:00
|
|
|
if not self.is_in_memory_db():
|
2013-03-02 19:12:51 +08:00
|
|
|
BaseDatabaseWrapper.close(self)
|
2013-02-18 18:37:26 +08:00
|
|
|
|
2013-03-04 22:57:04 +08:00
|
|
|
def _savepoint_allowed(self):
|
2013-03-05 05:17:35 +08:00
|
|
|
# When 'isolation_level' is not None, sqlite3 commits before each
|
|
|
|
# savepoint; it's a bug. When it is None, savepoints don't make sense
|
2013-10-07 16:47:50 +08:00
|
|
|
# because autocommit is enabled. The only exception is inside 'atomic'
|
|
|
|
# blocks. To work around that bug, on SQLite, 'atomic' starts a
|
2013-03-05 05:17:35 +08:00
|
|
|
# transaction explicitly rather than simply disable autocommit.
|
2018-10-25 22:17:37 +08:00
|
|
|
return self.in_atomic_block
|
2013-03-04 22:57:04 +08:00
|
|
|
|
2013-03-02 20:47:46 +08:00
|
|
|
def _set_autocommit(self, autocommit):
|
|
|
|
if autocommit:
|
|
|
|
level = None
|
|
|
|
else:
|
|
|
|
# sqlite3's internal default is ''. It's different from None.
|
|
|
|
# See Modules/_sqlite/connection.c.
|
|
|
|
level = ''
|
|
|
|
# 'isolation_level' is a misleading API.
|
|
|
|
# SQLite always runs at the SERIALIZABLE isolation level.
|
2014-03-24 06:09:26 +08:00
|
|
|
with self.wrap_database_errors:
|
|
|
|
self.connection.isolation_level = level
|
2013-03-02 20:47:46 +08:00
|
|
|
|
2015-01-11 00:24:16 +08:00
|
|
|
def disable_constraint_checking(self):
|
2018-12-08 08:55:47 +08:00
|
|
|
with self.cursor() as cursor:
|
|
|
|
cursor.execute('PRAGMA foreign_keys = OFF')
|
|
|
|
# Foreign key constraints cannot be turned off while in a multi-
|
|
|
|
# statement transaction. Fetch the current state of the pragma
|
|
|
|
# to determine if constraints are effectively disabled.
|
|
|
|
enabled = cursor.execute('PRAGMA foreign_keys').fetchone()[0]
|
|
|
|
return not bool(enabled)
|
2015-01-11 00:24:16 +08:00
|
|
|
|
|
|
|
def enable_constraint_checking(self):
|
2020-02-04 11:07:00 +08:00
|
|
|
with self.cursor() as cursor:
|
|
|
|
cursor.execute('PRAGMA foreign_keys = ON')
|
2015-01-11 00:24:16 +08:00
|
|
|
|
2011-08-07 08:43:26 +08:00
|
|
|
def check_constraints(self, table_names=None):
|
|
|
|
"""
|
2017-01-25 07:04:12 +08:00
|
|
|
Check each table name in `table_names` for rows with invalid foreign
|
2014-09-04 20:15:09 +08:00
|
|
|
key references. This method is intended to be used in conjunction with
|
|
|
|
`disable_constraint_checking()` and `enable_constraint_checking()`, to
|
|
|
|
determine if rows with invalid references were entered while constraint
|
|
|
|
checks were off.
|
2011-08-07 08:43:26 +08:00
|
|
|
"""
|
2018-12-22 07:26:30 +08:00
|
|
|
if self.features.supports_pragma_foreign_key_check:
|
2018-12-11 15:44:42 +08:00
|
|
|
with self.cursor() as cursor:
|
|
|
|
if table_names is None:
|
2020-02-04 11:07:00 +08:00
|
|
|
violations = cursor.execute('PRAGMA foreign_key_check').fetchall()
|
2018-12-11 15:44:42 +08:00
|
|
|
else:
|
|
|
|
violations = chain.from_iterable(
|
|
|
|
cursor.execute('PRAGMA foreign_key_check(%s)' % table_name).fetchall()
|
|
|
|
for table_name in table_names
|
|
|
|
)
|
|
|
|
# See https://www.sqlite.org/pragma.html#pragma_foreign_key_check
|
|
|
|
for table_name, rowid, referenced_table_name, foreign_key_index in violations:
|
|
|
|
foreign_key = cursor.execute(
|
|
|
|
'PRAGMA foreign_key_list(%s)' % table_name
|
|
|
|
).fetchall()[foreign_key_index]
|
|
|
|
column_name, referenced_column_name = foreign_key[3:5]
|
|
|
|
primary_key_column_name = self.introspection.get_primary_key_column(cursor, table_name)
|
|
|
|
primary_key_value, bad_value = cursor.execute(
|
|
|
|
'SELECT %s, %s FROM %s WHERE rowid = %%s' % (
|
|
|
|
primary_key_column_name, column_name, table_name
|
|
|
|
),
|
|
|
|
(rowid,),
|
|
|
|
).fetchone()
|
2019-08-20 15:54:41 +08:00
|
|
|
raise IntegrityError(
|
2018-12-11 15:44:42 +08:00
|
|
|
"The row in table '%s' with primary key '%s' has an "
|
|
|
|
"invalid foreign key: %s.%s contains a value '%s' that "
|
|
|
|
"does not have a corresponding value in %s.%s." % (
|
|
|
|
table_name, primary_key_value, table_name, column_name,
|
|
|
|
bad_value, referenced_table_name, referenced_column_name
|
2016-03-29 06:33:29 +08:00
|
|
|
)
|
|
|
|
)
|
2018-12-11 15:44:42 +08:00
|
|
|
else:
|
|
|
|
with self.cursor() as cursor:
|
|
|
|
if table_names is None:
|
|
|
|
table_names = self.introspection.table_names(cursor)
|
|
|
|
for table_name in table_names:
|
|
|
|
primary_key_column_name = self.introspection.get_primary_key_column(cursor, table_name)
|
|
|
|
if not primary_key_column_name:
|
|
|
|
continue
|
|
|
|
key_columns = self.introspection.get_key_columns(cursor, table_name)
|
|
|
|
for column_name, referenced_table_name, referenced_column_name in key_columns:
|
|
|
|
cursor.execute(
|
|
|
|
"""
|
|
|
|
SELECT REFERRING.`%s`, REFERRING.`%s` FROM `%s` as REFERRING
|
|
|
|
LEFT JOIN `%s` as REFERRED
|
|
|
|
ON (REFERRING.`%s` = REFERRED.`%s`)
|
|
|
|
WHERE REFERRING.`%s` IS NOT NULL AND REFERRED.`%s` IS NULL
|
|
|
|
"""
|
|
|
|
% (
|
|
|
|
primary_key_column_name, column_name, table_name,
|
|
|
|
referenced_table_name, column_name, referenced_column_name,
|
|
|
|
column_name, referenced_column_name,
|
2017-11-28 21:12:28 +08:00
|
|
|
)
|
|
|
|
)
|
2018-12-11 15:44:42 +08:00
|
|
|
for bad_row in cursor.fetchall():
|
2019-08-20 15:54:41 +08:00
|
|
|
raise IntegrityError(
|
2018-12-11 15:44:42 +08:00
|
|
|
"The row in table '%s' with primary key '%s' has an "
|
|
|
|
"invalid foreign key: %s.%s contains a value '%s' that "
|
|
|
|
"does not have a corresponding value in %s.%s." % (
|
|
|
|
table_name, bad_row[0], table_name, column_name,
|
|
|
|
bad_row[1], referenced_table_name, referenced_column_name,
|
|
|
|
)
|
|
|
|
)
|
2011-08-07 08:43:26 +08:00
|
|
|
|
2013-03-02 19:12:51 +08:00
|
|
|
def is_usable(self):
|
|
|
|
return True
|
|
|
|
|
2013-03-05 05:17:35 +08:00
|
|
|
def _start_transaction_under_autocommit(self):
|
|
|
|
"""
|
|
|
|
Start a transaction explicitly in autocommit mode.
|
|
|
|
|
|
|
|
Staying in autocommit mode works around a bug of sqlite3 that breaks
|
|
|
|
savepoints when autocommit is disabled.
|
|
|
|
"""
|
|
|
|
self.cursor().execute("BEGIN")
|
2006-05-02 09:31:56 +08:00
|
|
|
|
2016-08-18 08:34:18 +08:00
|
|
|
def is_in_memory_db(self):
|
|
|
|
return self.creation.is_in_memory_db(self.settings_dict['NAME'])
|
2014-12-04 07:17:59 +08:00
|
|
|
|
2012-08-18 21:16:52 +08:00
|
|
|
|
2019-10-26 22:42:32 +08:00
|
|
|
FORMAT_QMARK_REGEX = _lazy_re_compile(r'(?<!%)%s')
|
2010-03-23 21:51:11 +08:00
|
|
|
|
2013-07-08 08:39:54 +08:00
|
|
|
|
2006-05-02 09:31:56 +08:00
|
|
|
class SQLiteCursorWrapper(Database.Cursor):
|
|
|
|
"""
|
|
|
|
Django uses "format" style placeholders, but pysqlite2 uses "qmark" style.
|
|
|
|
This fixes it -- but note that if you want to use a literal "%s" in a query,
|
|
|
|
you'll need to use "%%s".
|
|
|
|
"""
|
2013-03-23 23:09:56 +08:00
|
|
|
def execute(self, query, params=None):
|
|
|
|
if params is None:
|
|
|
|
return Database.Cursor.execute(self, query)
|
2010-03-23 21:51:11 +08:00
|
|
|
query = self.convert_query(query)
|
Refactored database exceptions wrapping.
Squashed commit of the following:
commit 2181d833ed1a2e422494738dcef311164c4e097e
Author: Aymeric Augustin <aymeric.augustin@m4x.org>
Date: Wed Feb 27 14:28:39 2013 +0100
Fixed #15901 -- Wrapped all PEP-249 exceptions.
commit 5476a5d93c19aa2f928c497d39ce6e33f52694e2
Author: Aymeric Augustin <aymeric.augustin@m4x.org>
Date: Tue Feb 26 17:26:52 2013 +0100
Added PEP 3134 exception chaining.
Thanks Jacob Kaplan-Moss for the suggestion.
commit 9365fad0a650328002fb424457d675a273c95802
Author: Aymeric Augustin <aymeric.augustin@m4x.org>
Date: Tue Feb 26 17:13:49 2013 +0100
Improved API for wrapping database errors.
Thanks Alex Gaynor for the proposal.
commit 1b463b765f2826f73a8d9266795cd5da4f8d5e9e
Author: Aymeric Augustin <aymeric.augustin@m4x.org>
Date: Tue Feb 26 15:00:39 2013 +0100
Removed redundant exception wrapping.
This is now taken care of by the cursor wrapper.
commit 524bc7345a724bf526bdd2dd1bcf5ede67d6bb5c
Author: Aymeric Augustin <aymeric.augustin@m4x.org>
Date: Tue Feb 26 14:55:10 2013 +0100
Wrapped database exceptions in the base backend.
This covers the most common PEP-249 APIs:
- Connection APIs: close(), commit(), rollback(), cursor()
- Cursor APIs: callproc(), close(), execute(), executemany(),
fetchone(), fetchmany(), fetchall(), nextset().
Fixed #19920.
commit a66746bb5f0839f35543222787fce3b6a0d0a3ea
Author: Aymeric Augustin <aymeric.augustin@m4x.org>
Date: Tue Feb 26 14:53:34 2013 +0100
Added a wrap_database_exception context manager and decorator.
It re-throws backend-specific exceptions using Django's common wrappers.
2013-02-26 21:53:34 +08:00
|
|
|
return Database.Cursor.execute(self, query, params)
|
2006-05-02 09:31:56 +08:00
|
|
|
|
|
|
|
def executemany(self, query, param_list):
|
2010-03-23 21:51:11 +08:00
|
|
|
query = self.convert_query(query)
|
Refactored database exceptions wrapping.
Squashed commit of the following:
commit 2181d833ed1a2e422494738dcef311164c4e097e
Author: Aymeric Augustin <aymeric.augustin@m4x.org>
Date: Wed Feb 27 14:28:39 2013 +0100
Fixed #15901 -- Wrapped all PEP-249 exceptions.
commit 5476a5d93c19aa2f928c497d39ce6e33f52694e2
Author: Aymeric Augustin <aymeric.augustin@m4x.org>
Date: Tue Feb 26 17:26:52 2013 +0100
Added PEP 3134 exception chaining.
Thanks Jacob Kaplan-Moss for the suggestion.
commit 9365fad0a650328002fb424457d675a273c95802
Author: Aymeric Augustin <aymeric.augustin@m4x.org>
Date: Tue Feb 26 17:13:49 2013 +0100
Improved API for wrapping database errors.
Thanks Alex Gaynor for the proposal.
commit 1b463b765f2826f73a8d9266795cd5da4f8d5e9e
Author: Aymeric Augustin <aymeric.augustin@m4x.org>
Date: Tue Feb 26 15:00:39 2013 +0100
Removed redundant exception wrapping.
This is now taken care of by the cursor wrapper.
commit 524bc7345a724bf526bdd2dd1bcf5ede67d6bb5c
Author: Aymeric Augustin <aymeric.augustin@m4x.org>
Date: Tue Feb 26 14:55:10 2013 +0100
Wrapped database exceptions in the base backend.
This covers the most common PEP-249 APIs:
- Connection APIs: close(), commit(), rollback(), cursor()
- Cursor APIs: callproc(), close(), execute(), executemany(),
fetchone(), fetchmany(), fetchall(), nextset().
Fixed #19920.
commit a66746bb5f0839f35543222787fce3b6a0d0a3ea
Author: Aymeric Augustin <aymeric.augustin@m4x.org>
Date: Tue Feb 26 14:53:34 2013 +0100
Added a wrap_database_exception context manager and decorator.
It re-throws backend-specific exceptions using Django's common wrappers.
2013-02-26 21:53:34 +08:00
|
|
|
return Database.Cursor.executemany(self, query, param_list)
|
2006-05-02 09:31:56 +08:00
|
|
|
|
2010-03-23 21:51:11 +08:00
|
|
|
def convert_query(self, query):
|
Refactored database exceptions wrapping.
Squashed commit of the following:
commit 2181d833ed1a2e422494738dcef311164c4e097e
Author: Aymeric Augustin <aymeric.augustin@m4x.org>
Date: Wed Feb 27 14:28:39 2013 +0100
Fixed #15901 -- Wrapped all PEP-249 exceptions.
commit 5476a5d93c19aa2f928c497d39ce6e33f52694e2
Author: Aymeric Augustin <aymeric.augustin@m4x.org>
Date: Tue Feb 26 17:26:52 2013 +0100
Added PEP 3134 exception chaining.
Thanks Jacob Kaplan-Moss for the suggestion.
commit 9365fad0a650328002fb424457d675a273c95802
Author: Aymeric Augustin <aymeric.augustin@m4x.org>
Date: Tue Feb 26 17:13:49 2013 +0100
Improved API for wrapping database errors.
Thanks Alex Gaynor for the proposal.
commit 1b463b765f2826f73a8d9266795cd5da4f8d5e9e
Author: Aymeric Augustin <aymeric.augustin@m4x.org>
Date: Tue Feb 26 15:00:39 2013 +0100
Removed redundant exception wrapping.
This is now taken care of by the cursor wrapper.
commit 524bc7345a724bf526bdd2dd1bcf5ede67d6bb5c
Author: Aymeric Augustin <aymeric.augustin@m4x.org>
Date: Tue Feb 26 14:55:10 2013 +0100
Wrapped database exceptions in the base backend.
This covers the most common PEP-249 APIs:
- Connection APIs: close(), commit(), rollback(), cursor()
- Cursor APIs: callproc(), close(), execute(), executemany(),
fetchone(), fetchmany(), fetchall(), nextset().
Fixed #19920.
commit a66746bb5f0839f35543222787fce3b6a0d0a3ea
Author: Aymeric Augustin <aymeric.augustin@m4x.org>
Date: Tue Feb 26 14:53:34 2013 +0100
Added a wrap_database_exception context manager and decorator.
It re-throws backend-specific exceptions using Django's common wrappers.
2013-02-26 21:53:34 +08:00
|
|
|
return FORMAT_QMARK_REGEX.sub('?', query).replace('%%', '%')
|
2006-05-02 09:31:56 +08:00
|
|
|
|
2013-07-08 08:39:54 +08:00
|
|
|
|
2019-03-30 05:07:29 +08:00
|
|
|
def _sqlite_datetime_parse(dt, tzname=None, conn_tzname=None):
|
2008-11-16 16:48:24 +08:00
|
|
|
if dt is None:
|
|
|
|
return None
|
2006-05-02 09:31:56 +08:00
|
|
|
try:
|
2013-09-17 00:52:05 +08:00
|
|
|
dt = backend_utils.typecast_timestamp(dt)
|
2018-08-30 04:31:41 +08:00
|
|
|
except (TypeError, ValueError):
|
|
|
|
return None
|
2019-03-30 05:07:29 +08:00
|
|
|
if conn_tzname:
|
|
|
|
dt = dt.replace(tzinfo=pytz.timezone(conn_tzname))
|
|
|
|
if tzname is not None and tzname != conn_tzname:
|
2019-06-12 21:35:06 +08:00
|
|
|
sign_index = tzname.find('+') + tzname.find('-') + 1
|
|
|
|
if sign_index > -1:
|
|
|
|
sign = tzname[sign_index]
|
|
|
|
tzname, offset = tzname.split(sign)
|
|
|
|
if offset:
|
|
|
|
hours, minutes = offset.split(':')
|
|
|
|
offset_delta = datetime.timedelta(hours=int(hours), minutes=int(minutes))
|
|
|
|
dt += offset_delta if sign == '+' else -offset_delta
|
2018-08-30 04:31:41 +08:00
|
|
|
dt = timezone.localtime(dt, pytz.timezone(tzname))
|
|
|
|
return dt
|
|
|
|
|
|
|
|
|
2006-05-02 09:31:56 +08:00
|
|
|
def _sqlite_date_trunc(lookup_type, dt):
|
2018-08-30 04:31:41 +08:00
|
|
|
dt = _sqlite_datetime_parse(dt)
|
|
|
|
if dt is None:
|
2006-05-02 09:31:56 +08:00
|
|
|
return None
|
2013-02-10 23:15:49 +08:00
|
|
|
if lookup_type == 'year':
|
|
|
|
return "%i-01-01" % dt.year
|
2017-06-09 03:15:29 +08:00
|
|
|
elif lookup_type == 'quarter':
|
|
|
|
month_in_quarter = dt.month - (dt.month - 1) % 3
|
|
|
|
return '%i-%02i-01' % (dt.year, month_in_quarter)
|
2013-02-10 23:15:49 +08:00
|
|
|
elif lookup_type == 'month':
|
|
|
|
return "%i-%02i-01" % (dt.year, dt.month)
|
2017-09-29 04:39:03 +08:00
|
|
|
elif lookup_type == 'week':
|
|
|
|
dt = dt - datetime.timedelta(days=dt.weekday())
|
|
|
|
return "%i-%02i-%02i" % (dt.year, dt.month, dt.day)
|
2013-02-10 23:15:49 +08:00
|
|
|
elif lookup_type == 'day':
|
|
|
|
return "%i-%02i-%02i" % (dt.year, dt.month, dt.day)
|
|
|
|
|
2013-07-08 08:39:54 +08:00
|
|
|
|
2016-06-19 11:38:24 +08:00
|
|
|
def _sqlite_time_trunc(lookup_type, dt):
|
2018-07-01 04:49:20 +08:00
|
|
|
if dt is None:
|
|
|
|
return None
|
2016-06-19 11:38:24 +08:00
|
|
|
try:
|
|
|
|
dt = backend_utils.typecast_time(dt)
|
|
|
|
except (ValueError, TypeError):
|
|
|
|
return None
|
|
|
|
if lookup_type == 'hour':
|
|
|
|
return "%02i:00:00" % dt.hour
|
|
|
|
elif lookup_type == 'minute':
|
|
|
|
return "%02i:%02i:00" % (dt.hour, dt.minute)
|
|
|
|
elif lookup_type == 'second':
|
|
|
|
return "%02i:%02i:%02i" % (dt.hour, dt.minute, dt.second)
|
|
|
|
|
|
|
|
|
2019-03-30 05:07:29 +08:00
|
|
|
def _sqlite_datetime_cast_date(dt, tzname, conn_tzname):
|
|
|
|
dt = _sqlite_datetime_parse(dt, tzname, conn_tzname)
|
2015-03-08 05:20:29 +08:00
|
|
|
if dt is None:
|
|
|
|
return None
|
|
|
|
return dt.date().isoformat()
|
|
|
|
|
|
|
|
|
2019-03-30 05:07:29 +08:00
|
|
|
def _sqlite_datetime_cast_time(dt, tzname, conn_tzname):
|
|
|
|
dt = _sqlite_datetime_parse(dt, tzname, conn_tzname)
|
2016-06-19 11:39:26 +08:00
|
|
|
if dt is None:
|
|
|
|
return None
|
|
|
|
return dt.time().isoformat()
|
|
|
|
|
|
|
|
|
2019-03-30 05:07:29 +08:00
|
|
|
def _sqlite_datetime_extract(lookup_type, dt, tzname=None, conn_tzname=None):
|
|
|
|
dt = _sqlite_datetime_parse(dt, tzname, conn_tzname)
|
2015-03-08 05:20:29 +08:00
|
|
|
if dt is None:
|
|
|
|
return None
|
2013-02-10 23:15:49 +08:00
|
|
|
if lookup_type == 'week_day':
|
|
|
|
return (dt.isoweekday() % 7) + 1
|
2019-10-01 06:12:19 +08:00
|
|
|
elif lookup_type == 'iso_week_day':
|
|
|
|
return dt.isoweekday()
|
2016-11-11 21:01:40 +08:00
|
|
|
elif lookup_type == 'week':
|
|
|
|
return dt.isocalendar()[1]
|
2017-06-09 03:15:29 +08:00
|
|
|
elif lookup_type == 'quarter':
|
|
|
|
return math.ceil(dt.month / 3)
|
2017-09-29 04:28:48 +08:00
|
|
|
elif lookup_type == 'iso_year':
|
|
|
|
return dt.isocalendar()[0]
|
2013-02-10 23:15:49 +08:00
|
|
|
else:
|
|
|
|
return getattr(dt, lookup_type)
|
|
|
|
|
2013-07-08 08:39:54 +08:00
|
|
|
|
2019-03-30 05:07:29 +08:00
|
|
|
def _sqlite_datetime_trunc(lookup_type, dt, tzname, conn_tzname):
|
|
|
|
dt = _sqlite_datetime_parse(dt, tzname, conn_tzname)
|
2015-03-08 05:20:29 +08:00
|
|
|
if dt is None:
|
2013-02-10 23:15:49 +08:00
|
|
|
return None
|
2006-05-02 09:31:56 +08:00
|
|
|
if lookup_type == 'year':
|
|
|
|
return "%i-01-01 00:00:00" % dt.year
|
2017-06-09 03:15:29 +08:00
|
|
|
elif lookup_type == 'quarter':
|
|
|
|
month_in_quarter = dt.month - (dt.month - 1) % 3
|
|
|
|
return '%i-%02i-01 00:00:00' % (dt.year, month_in_quarter)
|
2006-05-02 09:31:56 +08:00
|
|
|
elif lookup_type == 'month':
|
|
|
|
return "%i-%02i-01 00:00:00" % (dt.year, dt.month)
|
2017-09-29 04:39:03 +08:00
|
|
|
elif lookup_type == 'week':
|
|
|
|
dt = dt - datetime.timedelta(days=dt.weekday())
|
|
|
|
return "%i-%02i-%02i 00:00:00" % (dt.year, dt.month, dt.day)
|
2006-05-02 09:31:56 +08:00
|
|
|
elif lookup_type == 'day':
|
|
|
|
return "%i-%02i-%02i 00:00:00" % (dt.year, dt.month, dt.day)
|
2013-02-10 23:15:49 +08:00
|
|
|
elif lookup_type == 'hour':
|
|
|
|
return "%i-%02i-%02i %02i:00:00" % (dt.year, dt.month, dt.day, dt.hour)
|
|
|
|
elif lookup_type == 'minute':
|
|
|
|
return "%i-%02i-%02i %02i:%02i:00" % (dt.year, dt.month, dt.day, dt.hour, dt.minute)
|
|
|
|
elif lookup_type == 'second':
|
|
|
|
return "%i-%02i-%02i %02i:%02i:%02i" % (dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second)
|
2006-05-02 09:31:56 +08:00
|
|
|
|
2013-07-08 08:39:54 +08:00
|
|
|
|
2015-05-23 03:16:26 +08:00
|
|
|
def _sqlite_time_extract(lookup_type, dt):
|
|
|
|
if dt is None:
|
|
|
|
return None
|
|
|
|
try:
|
|
|
|
dt = backend_utils.typecast_time(dt)
|
|
|
|
except (ValueError, TypeError):
|
|
|
|
return None
|
|
|
|
return getattr(dt, lookup_type)
|
|
|
|
|
|
|
|
|
2018-07-01 04:49:20 +08:00
|
|
|
@none_guard
|
2014-07-24 20:57:24 +08:00
|
|
|
def _sqlite_format_dtdelta(conn, lhs, rhs):
|
|
|
|
"""
|
|
|
|
LHS and RHS can be either:
|
2017-01-25 07:04:12 +08:00
|
|
|
- An integer number of microseconds
|
|
|
|
- A string representing a datetime
|
2014-07-24 20:57:24 +08:00
|
|
|
"""
|
2010-12-22 11:34:04 +08:00
|
|
|
try:
|
2018-01-05 06:16:17 +08:00
|
|
|
real_lhs = datetime.timedelta(0, 0, lhs) if isinstance(lhs, int) else backend_utils.typecast_timestamp(lhs)
|
|
|
|
real_rhs = datetime.timedelta(0, 0, rhs) if isinstance(rhs, int) else backend_utils.typecast_timestamp(rhs)
|
2010-12-22 11:34:04 +08:00
|
|
|
if conn.strip() == '+':
|
2014-07-24 20:57:24 +08:00
|
|
|
out = real_lhs + real_rhs
|
2010-12-22 11:34:04 +08:00
|
|
|
else:
|
2014-07-24 20:57:24 +08:00
|
|
|
out = real_lhs - real_rhs
|
2010-12-22 11:34:04 +08:00
|
|
|
except (ValueError, TypeError):
|
|
|
|
return None
|
2011-10-14 03:23:45 +08:00
|
|
|
# typecast_timestamp returns a date or a datetime without timezone.
|
|
|
|
# It will be formatted as "%Y-%m-%d" or "%Y-%m-%d %H:%M:%S[.%f]"
|
2014-07-24 20:57:24 +08:00
|
|
|
return str(out)
|
2010-12-22 11:34:04 +08:00
|
|
|
|
2013-07-08 08:39:54 +08:00
|
|
|
|
2018-07-01 04:49:20 +08:00
|
|
|
@none_guard
|
2016-01-20 09:43:41 +08:00
|
|
|
def _sqlite_time_diff(lhs, rhs):
|
|
|
|
left = backend_utils.typecast_time(lhs)
|
|
|
|
right = backend_utils.typecast_time(rhs)
|
|
|
|
return (
|
|
|
|
(left.hour * 60 * 60 * 1000000) +
|
|
|
|
(left.minute * 60 * 1000000) +
|
|
|
|
(left.second * 1000000) +
|
|
|
|
(left.microsecond) -
|
|
|
|
(right.hour * 60 * 60 * 1000000) -
|
|
|
|
(right.minute * 60 * 1000000) -
|
|
|
|
(right.second * 1000000) -
|
|
|
|
(right.microsecond)
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2018-07-01 04:49:20 +08:00
|
|
|
@none_guard
|
2016-01-20 09:43:41 +08:00
|
|
|
def _sqlite_timestamp_diff(lhs, rhs):
|
|
|
|
left = backend_utils.typecast_timestamp(lhs)
|
|
|
|
right = backend_utils.typecast_timestamp(rhs)
|
2017-12-29 06:35:41 +08:00
|
|
|
return duration_microseconds(left - right)
|
2016-01-20 09:43:41 +08:00
|
|
|
|
|
|
|
|
2018-07-01 04:49:20 +08:00
|
|
|
@none_guard
|
2007-06-28 02:58:10 +08:00
|
|
|
def _sqlite_regexp(re_pattern, re_string):
|
2018-07-01 04:49:20 +08:00
|
|
|
return bool(re.search(re_pattern, str(re_string)))
|
2013-02-22 06:02:18 +08:00
|
|
|
|
|
|
|
|
2018-07-01 04:49:20 +08:00
|
|
|
@none_guard
|
2018-03-20 00:35:16 +08:00
|
|
|
def _sqlite_lpad(text, length, fill_text):
|
|
|
|
if len(text) >= length:
|
|
|
|
return text[:length]
|
|
|
|
return (fill_text * length)[:length - len(text)] + text
|
|
|
|
|
|
|
|
|
2018-07-01 04:49:20 +08:00
|
|
|
@none_guard
|
2018-03-20 00:35:16 +08:00
|
|
|
def _sqlite_rpad(text, length, fill_text):
|
|
|
|
return (text + fill_text * length)[:length]
|
2019-06-09 08:56:37 +08:00
|
|
|
|
|
|
|
|
|
|
|
@none_guard
|
|
|
|
def _sqlite_json_contains(haystack, needle):
|
|
|
|
target, candidate = json.loads(haystack), json.loads(needle)
|
|
|
|
if isinstance(target, dict) and isinstance(candidate, dict):
|
|
|
|
return target.items() >= candidate.items()
|
|
|
|
return target == candidate
|