Removed legacy transaction management per the deprecation timeline.

This commit is contained in:
Aymeric Augustin 2014-03-21 14:21:43 +01:00
parent 907ac64641
commit 0f9560855e
29 changed files with 78 additions and 1454 deletions

View File

@ -500,7 +500,7 @@ def create_generic_related_manager(superclass):
if bulk:
queryset.delete()
else:
with transaction.commit_on_success_unless_managed(using=db, savepoint=False):
with transaction.atomic(using=db, savepoint=False):
for obj in queryset:
obj.delete()
_clear.alters_data = True

View File

@ -71,7 +71,7 @@ class Command(BaseCommand):
for i, line in enumerate(table_output):
full_statement.append(' %s%s' % (line, ',' if i < len(table_output) - 1 else ''))
full_statement.append(');')
with transaction.commit_on_success_unless_managed():
with transaction.atomic():
with connection.cursor() as curs:
try:
curs.execute("\n".join(full_statement))

View File

@ -63,7 +63,7 @@ Are you sure you want to do this?
if confirm == 'yes':
try:
with transaction.commit_on_success_unless_managed():
with transaction.atomic():
with connection.cursor() as cursor:
for sql in sql_list:
cursor.execute(sql)

View File

@ -56,7 +56,7 @@ class Command(BaseCommand):
self.verbosity = int(options.get('verbosity'))
with transaction.commit_on_success_unless_managed(using=self.using):
with transaction.atomic(using=self.using):
self.loaddata(fixture_labels)
# Close the DB connection -- unless we're still in a transaction. This

View File

@ -223,10 +223,6 @@ class Command(BaseCommand):
for statement in sql:
cursor.execute(statement)
tables.append(connection.introspection.table_name_converter(model._meta.db_table))
# We force a commit here, as that was the previous behavior.
# If you can prove we don't need this, remove it.
transaction.set_dirty(using=connection.alias)
finally:
cursor.close()
@ -245,7 +241,7 @@ class Command(BaseCommand):
if self.verbosity >= 2:
self.stdout.write(" Installing custom SQL for %s.%s model\n" % (app_name, model._meta.object_name))
try:
with transaction.commit_on_success_unless_managed(using=connection.alias):
with transaction.atomic(using=connection.alias):
for sql in custom_sql:
cursor.execute(sql)
except Exception as e:
@ -268,7 +264,7 @@ class Command(BaseCommand):
if self.verbosity >= 2:
self.stdout.write(" Installing index for %s.%s model\n" % (app_name, model._meta.object_name))
try:
with transaction.commit_on_success_unless_managed(using=connection.alias):
with transaction.atomic(using=connection.alias):
for sql in index_sql:
cursor.execute(sql)
except Exception as e:

View File

@ -77,20 +77,6 @@ class DefaultBackendProxy(object):
backend = DefaultBackendProxy()
def close_connection(**kwargs):
warnings.warn(
"close_connection is superseded by close_old_connections.",
RemovedInDjango18Warning, stacklevel=2)
# Avoid circular imports
from django.db import transaction
for conn in connections:
# If an error happens here the connection will be left in broken
# state. Once a good db connection is again available, the
# connection state will be cleaned up.
transaction.abort(conn)
connections[conn].close()
# Register an event to reset saved queries when a Django request is started.
def reset_queries(**kwargs):
for conn in connections.all():
@ -99,14 +85,9 @@ signals.request_started.connect(reset_queries)
# Register an event to reset transaction state and close connections past
# their lifetime. NB: abort() doesn't do anything outside of a transaction.
# their lifetime.
def close_old_connections(**kwargs):
for conn in connections.all():
# Remove this when the legacy transaction management goes away.
try:
conn.abort()
except DatabaseError:
pass
conn.close_if_unusable_or_obsolete()
signals.request_started.connect(close_old_connections)
signals.request_finished.connect(close_old_connections)

View File

@ -30,28 +30,25 @@ class BaseDatabaseWrapper(object):
def __init__(self, settings_dict, alias=DEFAULT_DB_ALIAS,
allow_thread_sharing=False):
# Connection related attributes.
self.connection = None
self.queries = []
# `settings_dict` should be a dictionary containing keys such as
# NAME, USER, etc. It's called `settings_dict` instead of `settings`
# to disambiguate it from Django settings modules.
self.connection = None
self.queries = []
self.settings_dict = settings_dict
self.alias = alias
self.use_debug_cursor = None
# Savepoint management related attributes
self.savepoint_state = 0
# Transaction management related attributes
# Transaction related attributes.
# Tracks if the connection is in autocommit mode. Per PEP 249, by
# default, it isn't.
self.autocommit = False
self.transaction_state = []
# Tracks if the connection is believed to be in transaction. This is
# set somewhat aggressively, as the DBAPI doesn't make it easy to
# deduce if the connection is in transaction or not.
self._dirty = False
# Tracks if the connection is in a transaction managed by 'atomic'.
self.in_atomic_block = False
# List of savepoints created by 'atomic'
# Increment to generate unique savepoint ids.
self.savepoint_state = 0
# List of savepoints created by 'atomic'.
self.savepoint_ids = []
# Tracks if the outermost 'atomic' block should commit on exit,
# ie. if autocommit was active on entry.
@ -60,11 +57,11 @@ class BaseDatabaseWrapper(object):
# available savepoint because of an exception in an inner block.
self.needs_rollback = False
# Connection termination related attributes
# Connection termination related attributes.
self.close_at = None
self.errors_occurred = False
# Thread-safety related attributes
# Thread-safety related attributes.
self.allow_thread_sharing = allow_thread_sharing
self._thread_ident = thread.get_ident()
@ -166,7 +163,6 @@ class BaseDatabaseWrapper(object):
self.validate_thread_sharing()
self.validate_no_atomic_block()
self._commit()
self.set_clean()
def rollback(self):
"""
@ -175,7 +171,6 @@ class BaseDatabaseWrapper(object):
self.validate_thread_sharing()
self.validate_no_atomic_block()
self._rollback()
self.set_clean()
def close(self):
"""
@ -189,7 +184,6 @@ class BaseDatabaseWrapper(object):
self._close()
finally:
self.connection = None
self.set_clean()
##### Backend-specific savepoint management methods #####
@ -267,59 +261,6 @@ class BaseDatabaseWrapper(object):
##### Generic transaction management methods #####
def enter_transaction_management(self, managed=True, forced=False):
"""
Enters transaction management for a running thread. It must be balanced with
the appropriate leave_transaction_management call, since the actual state is
managed as a stack.
The state and dirty flag are carried over from the surrounding block or
from the settings, if there is no surrounding block (dirty is always false
when no current block is running).
If you switch off transaction management and there is a pending
commit/rollback, the data will be committed, unless "forced" is True.
"""
self.validate_no_atomic_block()
self.transaction_state.append(managed)
if not managed and self.is_dirty() and not forced:
self.commit()
self.set_clean()
if managed == self.get_autocommit():
self.set_autocommit(not managed)
def leave_transaction_management(self):
"""
Leaves transaction management for a running thread. A dirty flag is carried
over to the surrounding block, as a commit will commit all changes, even
those from outside. (Commits are on connection level.)
"""
self.validate_no_atomic_block()
if self.transaction_state:
del self.transaction_state[-1]
else:
raise TransactionManagementError(
"This code isn't under transaction management")
if self.transaction_state:
managed = self.transaction_state[-1]
else:
managed = not self.settings_dict['AUTOCOMMIT']
if self._dirty:
self.rollback()
if managed == self.get_autocommit():
self.set_autocommit(not managed)
raise TransactionManagementError(
"Transaction managed block ended with pending COMMIT/ROLLBACK")
if managed == self.get_autocommit():
self.set_autocommit(not managed)
def get_autocommit(self):
"""
Check the autocommit state.
@ -368,41 +309,6 @@ class BaseDatabaseWrapper(object):
"An error occurred in the current transaction. You can't "
"execute queries until the end of the 'atomic' block.")
def abort(self):
"""
Roll back any ongoing transaction and clean the transaction state
stack.
"""
if self._dirty:
self.rollback()
while self.transaction_state:
self.leave_transaction_management()
def is_dirty(self):
"""
Returns True if the current transaction requires a commit for changes to
happen.
"""
return self._dirty
def set_dirty(self):
"""
Sets a dirty flag for the current thread and code streak. This can be used
to decide in a managed block of code to decide whether there are open
changes waiting for commit.
"""
if not self.get_autocommit():
self._dirty = True
def set_clean(self):
"""
Resets a dirty flag for the current thread and code streak. This can be used
to decide in a managed block of code to decide whether a commit or rollback
should happen.
"""
self._dirty = False
self.clean_savepoints()
##### Foreign key constraints checks handling #####
@contextmanager
@ -576,10 +482,6 @@ class BaseDatabaseFeatures(object):
# at the end of each save operation?
supports_forward_references = True
# Does a dirty transaction need to be rolled back
# before the cursor can be used again?
requires_rollback_on_dirty_transaction = False
# Does the backend allow very long model names without error?
supports_long_model_names = True
@ -682,28 +584,21 @@ class BaseDatabaseFeatures(object):
@cached_property
def supports_transactions(self):
"Confirm support for transactions"
try:
# Make sure to run inside a managed transaction block,
# otherwise autocommit will cause the confimation to
# fail.
self.connection.enter_transaction_management()
with self.connection.cursor() as cursor:
cursor.execute('CREATE TABLE ROLLBACK_TEST (X INT)')
self.connection.commit()
cursor.execute('INSERT INTO ROLLBACK_TEST (X) VALUES (8)')
self.connection.rollback()
cursor.execute('SELECT COUNT(X) FROM ROLLBACK_TEST')
count, = cursor.fetchone()
cursor.execute('DROP TABLE ROLLBACK_TEST')
self.connection.commit()
finally:
self.connection.leave_transaction_management()
"""Confirm support for transactions."""
with self.connection.cursor() as cursor:
cursor.execute('CREATE TABLE ROLLBACK_TEST (X INT)')
self.connection.set_autocommit(False)
cursor.execute('INSERT INTO ROLLBACK_TEST (X) VALUES (8)')
self.connection.rollback()
self.connection.set_autocommit(True)
cursor.execute('SELECT COUNT(X) FROM ROLLBACK_TEST')
count, = cursor.fetchone()
cursor.execute('DROP TABLE ROLLBACK_TEST')
return count == 0
@cached_property
def supports_stddev(self):
"Confirm support for STDDEV and related stats functions"
"""Confirm support for STDDEV and related stats functions."""
class StdDevPop(object):
sql_function = 'STDDEV_POP'

View File

@ -67,8 +67,6 @@ class DatabaseWrapper(BaseDatabaseWrapper):
_savepoint_commit = complain
_savepoint_rollback = ignore
_set_autocommit = complain
set_dirty = complain
set_clean = complain
def __init__(self, *args, **kwargs):
super(DatabaseWrapper, self).__init__(*args, **kwargs)

View File

@ -48,7 +48,6 @@ def utc_tzinfo_factory(offset):
class DatabaseFeatures(BaseDatabaseFeatures):
needs_datetime_string_cast = False
can_return_id_from_insert = True
requires_rollback_on_dirty_transaction = True
has_real_datatype = True
can_defer_constraint_checks = True
has_select_for_update = True
@ -181,8 +180,6 @@ class DatabaseWrapper(BaseDatabaseWrapper):
exc_info=sys.exc_info()
)
raise
finally:
self.set_clean()
def _set_isolation_level(self, isolation_level):
assert isolation_level in range(1, 5) # Use set_autocommit for level = 0

View File

@ -44,7 +44,6 @@ class CursorWrapper(object):
def callproc(self, procname, params=None):
self.db.validate_no_broken_transaction()
self.db.set_dirty()
with self.db.wrap_database_errors:
if params is None:
return self.cursor.callproc(procname)
@ -53,7 +52,6 @@ class CursorWrapper(object):
def execute(self, sql, params=None):
self.db.validate_no_broken_transaction()
self.db.set_dirty()
with self.db.wrap_database_errors:
if params is None:
return self.cursor.execute(sql)
@ -62,7 +60,6 @@ class CursorWrapper(object):
def executemany(self, sql, param_list):
self.db.validate_no_broken_transaction()
self.db.set_dirty()
with self.db.wrap_database_errors:
return self.cursor.executemany(sql, param_list)

View File

@ -625,7 +625,7 @@ class Model(six.with_metaclass(ModelBase)):
if not meta.auto_created:
signals.pre_save.send(sender=origin, instance=self, raw=raw, using=using,
update_fields=update_fields)
with transaction.commit_on_success_unless_managed(using=using, savepoint=False):
with transaction.atomic(using=using, savepoint=False):
if not raw:
self._save_parents(cls, using, update_fields)
updated = self._save_table(raw, cls, force_insert, force_update, using, update_fields)
@ -1417,7 +1417,7 @@ def method_set_order(ordered_obj, self, id_list, using=None):
order_name = ordered_obj._meta.order_with_respect_to.name
# FIXME: It would be nice if there was an "update many" version of update
# for situations like this.
with transaction.commit_on_success_unless_managed(using=using):
with transaction.atomic(using=using, savepoint=False):
for i, j in enumerate(id_list):
ordered_obj.objects.filter(**{'pk': j, order_name: rel_val}).update(_order=i)

View File

@ -249,7 +249,7 @@ class Collector(object):
# end of a transaction.
self.sort()
with transaction.commit_on_success_unless_managed(using=self.using):
with transaction.atomic(using=self.using, savepoint=False):
# send pre_delete signals
for model, obj in self.instances_with_model():
if not model._meta.auto_created:

View File

@ -686,8 +686,7 @@ def create_foreign_related_manager(superclass, rel_field, rel_model):
def add(self, *objs):
objs = list(objs)
db = router.db_for_write(self.model, instance=self.instance)
with transaction.commit_on_success_unless_managed(
using=db, savepoint=False):
with transaction.atomic(using=db, savepoint=False):
for obj in objs:
if not isinstance(obj, self.model):
raise TypeError("'%s' instance expected, got %r" %
@ -738,7 +737,7 @@ def create_foreign_related_manager(superclass, rel_field, rel_model):
if bulk:
queryset.update(**{rel_field.name: None})
else:
with transaction.commit_on_success_unless_managed(using=db, savepoint=False):
with transaction.atomic(using=db, savepoint=False):
for obj in queryset:
setattr(obj, rel_field.name, None)
obj.save(update_fields=[rel_field.name])

View File

@ -391,7 +391,7 @@ class QuerySet(object):
self._for_write = True
connection = connections[self.db]
fields = self.model._meta.local_concrete_fields
with transaction.commit_on_success_unless_managed(using=self.db):
with transaction.atomic(using=self.db, savepoint=False):
if (connection.features.can_combine_inserts_with_and_without_auto_increment_pk
and self.model._meta.has_auto_field):
self._batched_insert(objs, fields, batch_size)
@ -437,7 +437,7 @@ class QuerySet(object):
for k, v in six.iteritems(defaults):
setattr(obj, k, v)
with transaction.atomic(using=self.db):
with transaction.atomic(using=self.db, savepoint=False):
obj.save(using=self.db)
return obj, False
@ -574,7 +574,7 @@ class QuerySet(object):
self._for_write = True
query = self.query.clone(sql.UpdateQuery)
query.add_update_values(kwargs)
with transaction.commit_on_success_unless_managed(using=self.db):
with transaction.atomic(using=self.db, savepoint=False):
rows = query.get_compiler(self.db).execute_sql(CURSOR)
self._result_cache = None
return rows

View File

@ -1,26 +1,9 @@
"""
This module implements a transaction manager that can be used to define
transaction handling in a request or view function. It is used by transaction
control middleware and decorators.
The transaction manager can be in managed or in auto state. Auto state means the
system is using a commit-on-save strategy (actually it's more like
commit-on-change). As soon as the .save() or .delete() (or related) methods are
called, a commit is made.
Managed transactions don't do those commits, but will need some kind of manual
or implicit commits or rollbacks.
"""
import warnings
from functools import wraps
from django.db import (
connections, DEFAULT_DB_ALIAS,
DatabaseError, ProgrammingError)
from django.utils.decorators import available_attrs
from django.utils.deprecation import RemovedInDjango18Warning
class TransactionManagementError(ProgrammingError):
@ -30,109 +13,16 @@ class TransactionManagementError(ProgrammingError):
pass
################
# Private APIs #
################
def get_connection(using=None):
"""
Get a database connection by name, or the default database connection
if no name is provided.
if no name is provided. This is a private API.
"""
if using is None:
using = DEFAULT_DB_ALIAS
return connections[using]
###########################
# Deprecated private APIs #
###########################
def abort(using=None):
"""
Roll back any ongoing transactions and clean the transaction management
state of the connection.
This method is to be used only in cases where using balanced
leave_transaction_management() calls isn't possible. For example after a
request has finished, the transaction state isn't known, yet the connection
must be cleaned up for the next request.
"""
get_connection(using).abort()
def enter_transaction_management(managed=True, using=None, forced=False):
"""
Enters transaction management for a running thread. It must be balanced with
the appropriate leave_transaction_management call, since the actual state is
managed as a stack.
The state and dirty flag are carried over from the surrounding block or
from the settings, if there is no surrounding block (dirty is always false
when no current block is running).
"""
get_connection(using).enter_transaction_management(managed, forced)
def leave_transaction_management(using=None):
"""
Leaves transaction management for a running thread. A dirty flag is carried
over to the surrounding block, as a commit will commit all changes, even
those from outside. (Commits are on connection level.)
"""
get_connection(using).leave_transaction_management()
def is_dirty(using=None):
"""
Returns True if the current transaction requires a commit for changes to
happen.
"""
return get_connection(using).is_dirty()
def set_dirty(using=None):
"""
Sets a dirty flag for the current thread and code streak. This can be used
to decide in a managed block of code to decide whether there are open
changes waiting for commit.
"""
get_connection(using).set_dirty()
def set_clean(using=None):
"""
Resets a dirty flag for the current thread and code streak. This can be used
to decide in a managed block of code to decide whether a commit or rollback
should happen.
"""
get_connection(using).set_clean()
def is_managed(using=None):
warnings.warn("'is_managed' is deprecated.",
RemovedInDjango18Warning, stacklevel=2)
def managed(flag=True, using=None):
warnings.warn("'managed' no longer serves a purpose.",
RemovedInDjango18Warning, stacklevel=2)
def commit_unless_managed(using=None):
warnings.warn("'commit_unless_managed' is now a no-op.",
RemovedInDjango18Warning, stacklevel=2)
def rollback_unless_managed(using=None):
warnings.warn("'rollback_unless_managed' is now a no-op.",
RemovedInDjango18Warning, stacklevel=2)
###############
# Public APIs #
###############
def get_autocommit(using=None):
"""
Get the autocommit status of the connection.
@ -149,14 +39,14 @@ def set_autocommit(autocommit, using=None):
def commit(using=None):
"""
Commits a transaction and resets the dirty flag.
Commits a transaction.
"""
get_connection(using).commit()
def rollback(using=None):
"""
Rolls back a transaction and resets the dirty flag.
Rolls back a transaction.
"""
get_connection(using).rollback()
@ -244,6 +134,8 @@ class Atomic(object):
`with oa:` multiple times.
Since database connections are thread-local, this is thread-safe.
This is a private API.
"""
def __init__(self, using, savepoint):
@ -388,149 +280,3 @@ def non_atomic_requests(using=None):
if using is None:
using = DEFAULT_DB_ALIAS
return lambda view: _non_atomic_requests(view, using)
############################################
# Deprecated decorators / context managers #
############################################
class Transaction(object):
"""
Acts as either a decorator, or a context manager. If it's a decorator it
takes a function and returns a wrapped function. If it's a contextmanager
it's used with the ``with`` statement. In either event entering/exiting
are called before and after, respectively, the function/block is executed.
autocommit, commit_on_success, and commit_manually contain the
implementations of entering and exiting.
"""
def __init__(self, entering, exiting, using):
self.entering = entering
self.exiting = exiting
self.using = using
def __enter__(self):
self.entering(self.using)
def __exit__(self, exc_type, exc_value, traceback):
self.exiting(exc_type, self.using)
def __call__(self, func):
@wraps(func)
def inner(*args, **kwargs):
with self:
return func(*args, **kwargs)
return inner
def _transaction_func(entering, exiting, using):
"""
Takes 3 things, an entering function (what to do to start this block of
transaction management), an exiting function (what to do to end it, on both
success and failure, and using which can be: None, indicating using is
DEFAULT_DB_ALIAS, a callable, indicating that using is DEFAULT_DB_ALIAS and
to return the function already wrapped.
Returns either a Transaction objects, which is both a decorator and a
context manager, or a wrapped function, if using is a callable.
"""
# Note that although the first argument is *called* `using`, it
# may actually be a function; @autocommit and @autocommit('foo')
# are both allowed forms.
if using is None:
using = DEFAULT_DB_ALIAS
if callable(using):
return Transaction(entering, exiting, DEFAULT_DB_ALIAS)(using)
return Transaction(entering, exiting, using)
def autocommit(using=None):
"""
Decorator that activates commit on save. This is Django's default behavior;
this decorator is useful if you globally activated transaction management in
your settings file and want the default behavior in some view functions.
"""
warnings.warn("autocommit is deprecated in favor of set_autocommit.",
RemovedInDjango18Warning, stacklevel=2)
def entering(using):
enter_transaction_management(managed=False, using=using)
def exiting(exc_type, using):
leave_transaction_management(using=using)
return _transaction_func(entering, exiting, using)
def commit_on_success(using=None):
"""
This decorator activates commit on response. This way, if the view function
runs successfully, a commit is made; if the viewfunc produces an exception,
a rollback is made. This is one of the most common ways to do transaction
control in Web apps.
"""
warnings.warn("commit_on_success is deprecated in favor of atomic.",
RemovedInDjango18Warning, stacklevel=2)
def entering(using):
enter_transaction_management(using=using)
def exiting(exc_type, using):
try:
if exc_type is not None:
if is_dirty(using=using):
rollback(using=using)
else:
if is_dirty(using=using):
try:
commit(using=using)
except:
rollback(using=using)
raise
finally:
leave_transaction_management(using=using)
return _transaction_func(entering, exiting, using)
def commit_manually(using=None):
"""
Decorator that activates manual transaction control. It just disables
automatic transaction control and doesn't do any commit/rollback of its
own -- it's up to the user to call the commit and rollback functions
themselves.
"""
warnings.warn("commit_manually is deprecated in favor of set_autocommit.",
RemovedInDjango18Warning, stacklevel=2)
def entering(using):
enter_transaction_management(using=using)
def exiting(exc_type, using):
leave_transaction_management(using=using)
return _transaction_func(entering, exiting, using)
def commit_on_success_unless_managed(using=None, savepoint=False):
"""
Transitory API to preserve backwards-compatibility while refactoring.
Once the legacy transaction management is fully deprecated, this should
simply be replaced by atomic. Until then, it's necessary to guarantee that
a commit occurs on exit, which atomic doesn't do when it's nested.
Unlike atomic, savepoint defaults to False because that's closer to the
legacy behavior.
"""
connection = get_connection(using)
if connection.get_autocommit() or connection.in_atomic_block:
return atomic(using, savepoint)
else:
def entering(using):
pass
def exiting(exc_type, using):
set_dirty(using=using)
return _transaction_func(entering, exiting, using)

View File

@ -1,58 +0,0 @@
import warnings
from django.core.exceptions import MiddlewareNotUsed
from django.db import connection, transaction
from django.utils.deprecation import RemovedInDjango18Warning
class TransactionMiddleware(object):
"""
Transaction middleware. If this is enabled, each view function will be run
with commit_on_response activated - that way a save() doesn't do a direct
commit, the commit is done when a successful response is created. If an
exception happens, the database is rolled back.
"""
def __init__(self):
warnings.warn(
"TransactionMiddleware is deprecated in favor of ATOMIC_REQUESTS.",
RemovedInDjango18Warning, stacklevel=2)
if connection.settings_dict['ATOMIC_REQUESTS']:
raise MiddlewareNotUsed
def process_request(self, request):
"""Enters transaction management"""
transaction.enter_transaction_management()
def process_exception(self, request, exception):
"""Rolls back the database and leaves transaction management"""
if transaction.is_dirty():
# This rollback might fail because of network failure for example.
# If rollback isn't possible it is impossible to clean the
# connection's state. So leave the connection in dirty state and
# let request_finished signal deal with cleaning the connection.
transaction.rollback()
transaction.leave_transaction_management()
def process_response(self, request, response):
"""Commits and leaves transaction management."""
if not transaction.get_autocommit():
if transaction.is_dirty():
# Note: it is possible that the commit fails. If the reason is
# closed connection or some similar reason, then there is
# little hope to proceed nicely. However, in some cases (
# deferred foreign key checks for exampl) it is still possible
# to rollback().
try:
transaction.commit()
except Exception:
# If the rollback fails, the transaction state will be
# messed up. It doesn't matter, the connection will be set
# to clean state after the request finishes. And, we can't
# clean the state here properly even if we wanted to, the
# connection is in transaction but we can't rollback...
transaction.rollback()
transaction.leave_transaction_management()
raise
transaction.leave_transaction_management()
return response

View File

@ -57,9 +57,6 @@ def to_list(value):
real_commit = transaction.commit
real_rollback = transaction.rollback
real_enter_transaction_management = transaction.enter_transaction_management
real_leave_transaction_management = transaction.leave_transaction_management
real_abort = transaction.abort
def nop(*args, **kwargs):
@ -69,17 +66,11 @@ def nop(*args, **kwargs):
def disable_transaction_methods():
transaction.commit = nop
transaction.rollback = nop
transaction.enter_transaction_management = nop
transaction.leave_transaction_management = nop
transaction.abort = nop
def restore_transaction_methods():
transaction.commit = real_commit
transaction.rollback = real_rollback
transaction.enter_transaction_management = real_enter_transaction_management
transaction.leave_transaction_management = real_leave_transaction_management
transaction.abort = real_abort
def assert_and_parse_html(self, html, user_msg, msg):
@ -772,7 +763,7 @@ class TransactionTestCase(SimpleTestCase):
sql_list = conn.ops.sequence_reset_by_name_sql(
no_style(), conn.introspection.sequence_list())
if sql_list:
with transaction.commit_on_success_unless_managed(using=db_name):
with transaction.atomic(using=db_name):
cursor = conn.cursor()
for sql in sql_list:
cursor.execute(sql)

View File

@ -371,7 +371,7 @@ class PostgresNewConnectionTests(TestCase):
# Fetch a new connection with the new_tz as default
# time zone, run a query and rollback.
new_connection.settings_dict['TIME_ZONE'] = new_tz
new_connection.enter_transaction_management()
new_connection.set_autocommit(False)
cursor = new_connection.cursor()
new_connection.rollback()

View File

@ -755,7 +755,6 @@ class ConcurrentSaveTests(TransactionTestCase):
try:
# Do not delete a directly - doing so alters its state.
Article.objects.filter(pk=a.pk).delete()
connections[DEFAULT_DB_ALIAS].commit_unless_managed()
except Exception as e:
exceptions.append(e)
finally:

View File

@ -20,17 +20,17 @@ class DeleteLockingTest(TransactionTestCase):
available_apps = ['delete_regress']
def setUp(self):
transaction.set_autocommit(False)
# Create a second connection to the default database
new_connections = ConnectionHandler(settings.DATABASES)
self.conn2 = new_connections[DEFAULT_DB_ALIAS]
# Put both DB connections into managed transaction mode
transaction.enter_transaction_management()
self.conn2.enter_transaction_management()
self.conn2.set_autocommit(False)
def tearDown(self):
transaction.rollback()
transaction.set_autocommit(True)
# Close down the second connection.
transaction.leave_transaction_management()
self.conn2.abort()
self.conn2.rollback()
self.conn2.close()
@skipUnlessDBFeature('test_db_allows_multiple_connections')
@ -38,15 +38,10 @@ class DeleteLockingTest(TransactionTestCase):
"Deletes on concurrent transactions don't collide and lock the database. Regression for #9479"
# Create some dummy data
b1 = Book(id=1, pagecount=100)
b2 = Book(id=2, pagecount=200)
b3 = Book(id=3, pagecount=300)
b1.save()
b2.save()
b3.save()
transaction.commit()
with transaction.atomic():
Book.objects.create(id=1, pagecount=100)
Book.objects.create(id=2, pagecount=200)
Book.objects.create(id=3, pagecount=300)
self.assertEqual(3, Book.objects.count())
# Delete something using connection 2.
@ -58,10 +53,9 @@ class DeleteLockingTest(TransactionTestCase):
# deleted in connection 2. This causes an infinite loop
# under MySQL InnoDB unless we keep track of already
# deleted objects.
Book.objects.filter(pagecount__lt=250).delete()
transaction.commit()
with transaction.atomic():
Book.objects.filter(pagecount__lt=250).delete()
self.assertEqual(1, Book.objects.count())
transaction.commit()
class DeleteCascadeTests(TestCase):

View File

@ -1,13 +0,0 @@
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Band(models.Model):
name = models.CharField(max_length=100)
class Meta:
ordering = ('name',)
def __str__(self):
return self.name

View File

@ -9,22 +9,16 @@ from unittest import skipIf
from django.conf import settings
from django.core import mail
from django.db import (transaction, connections, DEFAULT_DB_ALIAS,
IntegrityError)
from django.http import HttpRequest, HttpResponse, StreamingHttpResponse
from django.middleware.clickjacking import XFrameOptionsMiddleware
from django.middleware.common import CommonMiddleware, BrokenLinkEmailsMiddleware
from django.middleware.http import ConditionalGetMiddleware
from django.middleware.gzip import GZipMiddleware
from django.middleware.transaction import TransactionMiddleware
from django.test import TransactionTestCase, TestCase, RequestFactory, override_settings
from django.test.utils import IgnoreDeprecationWarningsMixin
from django.test import TestCase, RequestFactory, override_settings
from django.utils import six
from django.utils.encoding import force_str
from django.utils.six.moves import xrange
from .models import Band
class CommonMiddlewareTest(TestCase):
urls = 'middleware.urls'
@ -666,64 +660,3 @@ class ETagGZipMiddlewareTest(TestCase):
nogzip_etag = response.get('ETag')
self.assertNotEqual(gzip_etag, nogzip_etag)
class TransactionMiddlewareTest(IgnoreDeprecationWarningsMixin, TransactionTestCase):
"""
Test the transaction middleware.
"""
available_apps = ['middleware']
def setUp(self):
super(TransactionMiddlewareTest, self).setUp()
self.request = HttpRequest()
self.request.META = {
'SERVER_NAME': 'testserver',
'SERVER_PORT': 80,
}
self.request.path = self.request.path_info = "/"
self.response = HttpResponse()
self.response.status_code = 200
def tearDown(self):
transaction.abort()
super(TransactionMiddlewareTest, self).tearDown()
def test_request(self):
TransactionMiddleware().process_request(self.request)
self.assertFalse(transaction.get_autocommit())
def test_managed_response(self):
transaction.enter_transaction_management()
Band.objects.create(name='The Beatles')
self.assertTrue(transaction.is_dirty())
TransactionMiddleware().process_response(self.request, self.response)
self.assertFalse(transaction.is_dirty())
self.assertEqual(Band.objects.count(), 1)
def test_exception(self):
transaction.enter_transaction_management()
Band.objects.create(name='The Beatles')
self.assertTrue(transaction.is_dirty())
TransactionMiddleware().process_exception(self.request, None)
self.assertFalse(transaction.is_dirty())
self.assertEqual(Band.objects.count(), 0)
def test_failing_commit(self):
# It is possible that connection.commit() fails. Check that
# TransactionMiddleware handles such cases correctly.
try:
def raise_exception():
raise IntegrityError()
connections[DEFAULT_DB_ALIAS].commit = raise_exception
transaction.enter_transaction_management()
Band.objects.create(name='The Beatles')
self.assertTrue(transaction.is_dirty())
with self.assertRaises(IntegrityError):
TransactionMiddleware().process_response(self.request, None)
self.assertFalse(transaction.is_dirty())
self.assertEqual(Band.objects.count(), 0)
self.assertFalse(transaction.is_managed())
finally:
del connections[DEFAULT_DB_ALIAS].commit

View File

@ -5,15 +5,12 @@ from datetime import datetime, timedelta
from io import BytesIO
from itertools import chain
import time
from unittest import skipIf
from django.db import connection, connections
from django.core import signals
from django.core.exceptions import SuspiciousOperation
from django.core.handlers.wsgi import WSGIRequest, LimitedStream
from django.http import (HttpRequest, HttpResponse, parse_cookie,
build_request_repr, UnreadablePostError, RawPostDataException)
from django.test import SimpleTestCase, TransactionTestCase, override_settings
from django.test import SimpleTestCase, override_settings
from django.test.client import FakePayload
from django.test.utils import str_prefix
from django.utils import six
@ -696,60 +693,3 @@ class HostValidationTests(SimpleTestCase):
msg_suggestion2 % "invalid_hostname.com",
request.get_host
)
@skipIf(connection.vendor == 'sqlite'
and connection.settings_dict['TEST']['NAME'] in (None, '', ':memory:'),
"Cannot establish two connections to an in-memory SQLite database.")
class DatabaseConnectionHandlingTests(TransactionTestCase):
available_apps = []
def setUp(self):
# Use a temporary connection to avoid messing with the main one.
self._old_default_connection = connections['default']
del connections['default']
def tearDown(self):
try:
connections['default'].close()
finally:
connections['default'] = self._old_default_connection
def test_request_finished_db_state(self):
# Force closing connection on request end
connection.settings_dict['CONN_MAX_AGE'] = 0
# The GET below will not succeed, but it will give a response with
# defined ._handler_class. That is needed for sending the
# request_finished signal.
response = self.client.get('/')
# Make sure there is an open connection
connection.ensure_connection()
connection.enter_transaction_management()
signals.request_finished.send(sender=response._handler_class)
self.assertEqual(len(connection.transaction_state), 0)
def test_request_finished_failed_connection(self):
# Force closing connection on request end
connection.settings_dict['CONN_MAX_AGE'] = 0
connection.enter_transaction_management()
connection.set_dirty()
# Test that the rollback doesn't succeed (for example network failure
# could cause this).
def fail_horribly():
raise Exception("Horrible failure!")
connection._rollback = fail_horribly
try:
with self.assertRaises(Exception):
signals.request_finished.send(sender=self.__class__)
# The connection's state wasn't cleaned up
self.assertEqual(len(connection.transaction_state), 1)
finally:
del connection._rollback
# The connection will be cleaned on next request where the conn
# works again.
signals.request_finished.send(sender=self.__class__)
self.assertEqual(len(connection.transaction_state), 0)

View File

@ -31,34 +31,24 @@ class SelectForUpdateTests(TransactionTestCase):
available_apps = ['select_for_update']
def setUp(self):
transaction.enter_transaction_management()
# This is executed in autocommit mode so that code in
# run_select_for_update can see this data.
self.person = Person.objects.create(name='Reinhardt')
# We have to commit here so that code in run_select_for_update can
# see this data.
transaction.commit()
# We need another database connection to test that one connection
# issuing a SELECT ... FOR UPDATE will block.
# We need another database connection in transaction to test that one
# connection issuing a SELECT ... FOR UPDATE will block.
new_connections = ConnectionHandler(settings.DATABASES)
self.new_connection = new_connections[DEFAULT_DB_ALIAS]
self.new_connection.enter_transaction_management()
def tearDown(self):
try:
# We don't really care if this fails - some of the tests will set
# this in the course of their run.
transaction.abort()
self.new_connection.abort()
except transaction.TransactionManagementError:
pass
self.new_connection.close()
try:
self.end_blocking_transaction()
except (DatabaseError, AttributeError):
pass
self.new_connection.close()
def start_blocking_transaction(self):
self.new_connection.set_autocommit(False)
# Start a blocking transaction. At some point,
# end_blocking_transaction() should be called.
self.cursor = self.new_connection.cursor()
@ -72,6 +62,7 @@ class SelectForUpdateTests(TransactionTestCase):
def end_blocking_transaction(self):
# Roll back the blocking transaction.
self.new_connection.rollback()
self.new_connection.set_autocommit(True)
def has_for_update_sql(self, tested_connection, nowait=False):
# Examine the SQL that was executed to determine whether it
@ -146,19 +137,17 @@ class SelectForUpdateTests(TransactionTestCase):
try:
# We need to enter transaction management again, as this is done on
# per-thread basis
transaction.enter_transaction_management()
people = list(
Person.objects.all().select_for_update(nowait=nowait)
)
people[0].name = 'Fred'
people[0].save()
transaction.commit()
with transaction.atomic():
people = list(
Person.objects.all().select_for_update(nowait=nowait)
)
people[0].name = 'Fred'
people[0].save()
except DatabaseError as e:
status.append(e)
finally:
# This method is run in a separate thread. It uses its own
# database connection. Close it without waiting for the GC.
transaction.abort()
connection.close()
@requires_threading
@ -245,16 +234,6 @@ class SelectForUpdateTests(TransactionTestCase):
self.end_blocking_transaction()
self.assertIsInstance(status[-1], DatabaseError)
@skipUnlessDBFeature('has_select_for_update')
def test_transaction_dirty_managed(self):
""" Check that a select_for_update sets the transaction to be
dirty when executed under txn management. Setting the txn dirty
means that it will be either committed or rolled back by Django,
which will release any locks held by the SELECT FOR UPDATE.
"""
list(Person.objects.select_for_update())
self.assertTrue(transaction.is_dirty())
@skipUnlessDBFeature('has_select_for_update')
def test_select_for_update_on_multidb(self):
old_routers = router.routers

View File

@ -272,16 +272,13 @@ class SerializersTransactionTestBase(object):
Tests that objects ids can be referenced before they are
defined in the serialization data.
"""
# The deserialization process needs to be contained
# within a transaction in order to test forward reference
# handling.
transaction.enter_transaction_management()
objs = serializers.deserialize(self.serializer_name, self.fwd_ref_str)
with connection.constraint_checks_disabled():
for obj in objs:
obj.save()
transaction.commit()
transaction.leave_transaction_management()
# The deserialization process needs to run in a transaction in order
# to test forward reference handling.
with transaction.atomic():
objs = serializers.deserialize(self.serializer_name, self.fwd_ref_str)
with connection.constraint_checks_disabled():
for obj in objs:
obj.save()
for model_cls in (Category, Author, Article):
self.assertEqual(model_cls.objects.all().count(), 1)

View File

@ -4,8 +4,7 @@ import sys
from unittest import skipIf, skipUnless
from django.db import connection, transaction, DatabaseError, IntegrityError
from django.test import TransactionTestCase, skipIfDBFeature, skipUnlessDBFeature
from django.test.utils import IgnoreDeprecationWarningsMixin
from django.test import TransactionTestCase, skipIfDBFeature
from django.utils import six
from .models import Reporter
@ -238,20 +237,6 @@ class AtomicWithoutAutocommitTests(AtomicTests):
transaction.set_autocommit(True)
@skipIf(connection.features.autocommits_when_autocommit_is_off,
"This test requires a non-autocommit mode that doesn't autocommit.")
class AtomicInsideLegacyTransactionManagementTests(AtomicTests):
def setUp(self):
transaction.enter_transaction_management()
def tearDown(self):
# The tests access the database after exercising 'atomic', making the
# connection dirty; a rollback is required to make it clean.
transaction.rollback()
transaction.leave_transaction_management()
@skipUnless(connection.features.uses_savepoints,
"'atomic' requires transactions and savepoints.")
class AtomicMergeTests(TransactionTestCase):
@ -324,13 +309,6 @@ class AtomicErrorsTests(TransactionTestCase):
with self.assertRaises(transaction.TransactionManagementError):
transaction.rollback()
def test_atomic_prevents_calling_transaction_management_methods(self):
with transaction.atomic():
with self.assertRaises(transaction.TransactionManagementError):
transaction.enter_transaction_management()
with self.assertRaises(transaction.TransactionManagementError):
transaction.leave_transaction_management()
def test_atomic_prevents_queries_in_broken_transaction(self):
r1 = Reporter.objects.create(first_name="Archibald", last_name="Haddock")
with transaction.atomic():
@ -366,319 +344,3 @@ class AtomicMiscTests(TransactionTestCase):
pass
# Must not raise an exception
transaction.atomic(Callable())
class TransactionTests(IgnoreDeprecationWarningsMixin, TransactionTestCase):
available_apps = ['transactions']
def create_a_reporter_then_fail(self, first, last):
a = Reporter(first_name=first, last_name=last)
a.save()
raise Exception("I meant to do that")
def remove_a_reporter(self, first_name):
r = Reporter.objects.get(first_name="Alice")
r.delete()
def manually_managed(self):
r = Reporter(first_name="Dirk", last_name="Gently")
r.save()
transaction.commit()
def manually_managed_mistake(self):
r = Reporter(first_name="Edward", last_name="Woodward")
r.save()
# Oops, I forgot to commit/rollback!
@skipUnlessDBFeature('supports_transactions')
def test_autocommit(self):
"""
The default behavior is to autocommit after each save() action.
"""
self.assertRaises(
Exception,
self.create_a_reporter_then_fail,
"Alice", "Smith"
)
# The object created before the exception still exists
self.assertEqual(Reporter.objects.count(), 1)
@skipUnlessDBFeature('supports_transactions')
def test_autocommit_decorator(self):
"""
The autocommit decorator works exactly the same as the default behavior.
"""
autocomitted_create_then_fail = transaction.autocommit(
self.create_a_reporter_then_fail
)
self.assertRaises(
Exception,
autocomitted_create_then_fail,
"Alice", "Smith"
)
# Again, the object created before the exception still exists
self.assertEqual(Reporter.objects.count(), 1)
@skipUnlessDBFeature('supports_transactions')
def test_autocommit_decorator_with_using(self):
"""
The autocommit decorator also works with a using argument.
"""
autocomitted_create_then_fail = transaction.autocommit(using='default')(
self.create_a_reporter_then_fail
)
self.assertRaises(
Exception,
autocomitted_create_then_fail,
"Alice", "Smith"
)
# Again, the object created before the exception still exists
self.assertEqual(Reporter.objects.count(), 1)
@skipUnlessDBFeature('supports_transactions')
def test_commit_on_success(self):
"""
With the commit_on_success decorator, the transaction is only committed
if the function doesn't throw an exception.
"""
committed_on_success = transaction.commit_on_success(
self.create_a_reporter_then_fail)
self.assertRaises(Exception, committed_on_success, "Dirk", "Gently")
# This time the object never got saved
self.assertEqual(Reporter.objects.count(), 0)
@skipUnlessDBFeature('supports_transactions')
def test_commit_on_success_with_using(self):
"""
The commit_on_success decorator also works with a using argument.
"""
using_committed_on_success = transaction.commit_on_success(using='default')(
self.create_a_reporter_then_fail
)
self.assertRaises(
Exception,
using_committed_on_success,
"Dirk", "Gently"
)
# This time the object never got saved
self.assertEqual(Reporter.objects.count(), 0)
@skipUnlessDBFeature('supports_transactions')
def test_commit_on_success_succeed(self):
"""
If there aren't any exceptions, the data will get saved.
"""
Reporter.objects.create(first_name="Alice", last_name="Smith")
remove_comitted_on_success = transaction.commit_on_success(
self.remove_a_reporter
)
remove_comitted_on_success("Alice")
self.assertEqual(list(Reporter.objects.all()), [])
@skipUnlessDBFeature('supports_transactions')
def test_commit_on_success_exit(self):
@transaction.autocommit()
def gen_reporter():
@transaction.commit_on_success
def create_reporter():
Reporter.objects.create(first_name="Bobby", last_name="Tables")
create_reporter()
# Much more formal
r = Reporter.objects.get()
r.first_name = "Robert"
r.save()
gen_reporter()
r = Reporter.objects.get()
self.assertEqual(r.first_name, "Robert")
@skipUnlessDBFeature('supports_transactions')
def test_manually_managed(self):
"""
You can manually manage transactions if you really want to, but you
have to remember to commit/rollback.
"""
manually_managed = transaction.commit_manually(self.manually_managed)
manually_managed()
self.assertEqual(Reporter.objects.count(), 1)
@skipUnlessDBFeature('supports_transactions')
def test_manually_managed_mistake(self):
"""
If you forget, you'll get bad errors.
"""
manually_managed_mistake = transaction.commit_manually(
self.manually_managed_mistake
)
self.assertRaises(transaction.TransactionManagementError,
manually_managed_mistake)
@skipUnlessDBFeature('supports_transactions')
def test_manually_managed_with_using(self):
"""
The commit_manually function also works with a using argument.
"""
using_manually_managed_mistake = transaction.commit_manually(using='default')(
self.manually_managed_mistake
)
self.assertRaises(
transaction.TransactionManagementError,
using_manually_managed_mistake
)
class TransactionRollbackTests(IgnoreDeprecationWarningsMixin, TransactionTestCase):
available_apps = ['transactions']
def execute_bad_sql(self):
with connection.cursor() as cursor:
cursor.execute("INSERT INTO transactions_reporter (first_name, last_name) VALUES ('Douglas', 'Adams');")
@skipUnlessDBFeature('requires_rollback_on_dirty_transaction')
def test_bad_sql(self):
"""
Regression for #11900: If a function wrapped by commit_on_success
writes a transaction that can't be committed, that transaction should
be rolled back. The bug is only visible using the psycopg2 backend,
though the fix is generally a good idea.
"""
execute_bad_sql = transaction.commit_on_success(self.execute_bad_sql)
self.assertRaises(IntegrityError, execute_bad_sql)
transaction.rollback()
class TransactionContextManagerTests(IgnoreDeprecationWarningsMixin, TransactionTestCase):
available_apps = ['transactions']
def create_reporter_and_fail(self):
Reporter.objects.create(first_name="Bob", last_name="Holtzman")
raise Exception
@skipUnlessDBFeature('supports_transactions')
def test_autocommit(self):
"""
The default behavior is to autocommit after each save() action.
"""
with self.assertRaises(Exception):
self.create_reporter_and_fail()
# The object created before the exception still exists
self.assertEqual(Reporter.objects.count(), 1)
@skipUnlessDBFeature('supports_transactions')
def test_autocommit_context_manager(self):
"""
The autocommit context manager works exactly the same as the default
behavior.
"""
with self.assertRaises(Exception):
with transaction.autocommit():
self.create_reporter_and_fail()
self.assertEqual(Reporter.objects.count(), 1)
@skipUnlessDBFeature('supports_transactions')
def test_autocommit_context_manager_with_using(self):
"""
The autocommit context manager also works with a using argument.
"""
with self.assertRaises(Exception):
with transaction.autocommit(using="default"):
self.create_reporter_and_fail()
self.assertEqual(Reporter.objects.count(), 1)
@skipUnlessDBFeature('supports_transactions')
def test_commit_on_success(self):
"""
With the commit_on_success context manager, the transaction is only
committed if the block doesn't throw an exception.
"""
with self.assertRaises(Exception):
with transaction.commit_on_success():
self.create_reporter_and_fail()
self.assertEqual(Reporter.objects.count(), 0)
@skipUnlessDBFeature('supports_transactions')
def test_commit_on_success_with_using(self):
"""
The commit_on_success context manager also works with a using argument.
"""
with self.assertRaises(Exception):
with transaction.commit_on_success(using="default"):
self.create_reporter_and_fail()
self.assertEqual(Reporter.objects.count(), 0)
@skipUnlessDBFeature('supports_transactions')
def test_commit_on_success_succeed(self):
"""
If there aren't any exceptions, the data will get saved.
"""
Reporter.objects.create(first_name="Alice", last_name="Smith")
with transaction.commit_on_success():
Reporter.objects.filter(first_name="Alice").delete()
self.assertQuerysetEqual(Reporter.objects.all(), [])
@skipUnlessDBFeature('supports_transactions')
def test_commit_on_success_exit(self):
with transaction.autocommit():
with transaction.commit_on_success():
Reporter.objects.create(first_name="Bobby", last_name="Tables")
# Much more formal
r = Reporter.objects.get()
r.first_name = "Robert"
r.save()
r = Reporter.objects.get()
self.assertEqual(r.first_name, "Robert")
@skipUnlessDBFeature('supports_transactions')
def test_manually_managed(self):
"""
You can manually manage transactions if you really want to, but you
have to remember to commit/rollback.
"""
with transaction.commit_manually():
Reporter.objects.create(first_name="Libby", last_name="Holtzman")
transaction.commit()
self.assertEqual(Reporter.objects.count(), 1)
@skipUnlessDBFeature('supports_transactions')
def test_manually_managed_mistake(self):
"""
If you forget, you'll get bad errors.
"""
with self.assertRaises(transaction.TransactionManagementError):
with transaction.commit_manually():
Reporter.objects.create(first_name="Scott", last_name="Browning")
@skipUnlessDBFeature('supports_transactions')
def test_manually_managed_with_using(self):
"""
The commit_manually function also works with a using argument.
"""
with self.assertRaises(transaction.TransactionManagementError):
with transaction.commit_manually(using="default"):
Reporter.objects.create(first_name="Walter", last_name="Cronkite")
@skipUnlessDBFeature('requires_rollback_on_dirty_transaction')
def test_bad_sql(self):
"""
Regression for #11900: If a block wrapped by commit_on_success
writes a transaction that can't be committed, that transaction should
be rolled back. The bug is only visible using the psycopg2 backend,
though the fix is generally a good idea.
"""
with self.assertRaises(IntegrityError):
with transaction.commit_on_success():
with connection.cursor() as cursor:
cursor.execute("INSERT INTO transactions_reporter (first_name, last_name) VALUES ('Douglas', 'Adams');")
transaction.rollback()

View File

@ -1,17 +0,0 @@
from django.db import models
class Mod(models.Model):
fld = models.IntegerField()
class SubMod(Mod):
cnt = models.IntegerField(unique=True)
class M2mA(models.Model):
others = models.ManyToManyField('M2mB')
class M2mB(models.Model):
fld = models.IntegerField()

View File

@ -1,392 +0,0 @@
from __future__ import unicode_literals
from unittest import skipIf, skipUnless, SkipTest
from django.db import (connection, connections, transaction, DEFAULT_DB_ALIAS, DatabaseError,
IntegrityError)
from django.db.transaction import commit_on_success, commit_manually, TransactionManagementError
from django.test import TransactionTestCase, override_settings, skipUnlessDBFeature
from django.test.utils import IgnoreDeprecationWarningsMixin
from .models import Mod, M2mA, M2mB, SubMod
class ModelInheritanceTests(TransactionTestCase):
available_apps = ['transactions_regress']
def test_save(self):
# First, create a SubMod, then try to save another with conflicting
# cnt field. The problem was that transactions were committed after
# every parent save when not in managed transaction. As the cnt
# conflict is in the second model, we can check if the first save
# was committed or not.
SubMod(fld=1, cnt=1).save()
# We should have committed the transaction for the above - assert this.
connection.rollback()
self.assertEqual(SubMod.objects.count(), 1)
try:
SubMod(fld=2, cnt=1).save()
except IntegrityError:
connection.rollback()
self.assertEqual(SubMod.objects.count(), 1)
self.assertEqual(Mod.objects.count(), 1)
class TestTransactionClosing(IgnoreDeprecationWarningsMixin, TransactionTestCase):
"""
Tests to make sure that transactions are properly closed
when they should be, and aren't left pending after operations
have been performed in them. Refs #9964.
"""
available_apps = [
'transactions_regress',
'django.contrib.auth',
'django.contrib.contenttypes',
]
def test_raw_committed_on_success(self):
"""
Make sure a transaction consisting of raw SQL execution gets
committed by the commit_on_success decorator.
"""
@commit_on_success
def raw_sql():
"Write a record using raw sql under a commit_on_success decorator"
with connection.cursor() as cursor:
cursor.execute("INSERT into transactions_regress_mod (fld) values (18)")
raw_sql()
# Rollback so that if the decorator didn't commit, the record is unwritten
transaction.rollback()
self.assertEqual(Mod.objects.count(), 1)
# Check that the record is in the DB
obj = Mod.objects.all()[0]
self.assertEqual(obj.fld, 18)
def test_commit_manually_enforced(self):
"""
Make sure that under commit_manually, even "read-only" transaction require closure
(commit or rollback), and a transaction left pending is treated as an error.
"""
@commit_manually
def non_comitter():
"Execute a managed transaction with read-only operations and fail to commit"
Mod.objects.count()
self.assertRaises(TransactionManagementError, non_comitter)
def test_commit_manually_commit_ok(self):
"""
Test that under commit_manually, a committed transaction is accepted by the transaction
management mechanisms
"""
@commit_manually
def committer():
"""
Perform a database query, then commit the transaction
"""
Mod.objects.count()
transaction.commit()
try:
committer()
except TransactionManagementError:
self.fail("Commit did not clear the transaction state")
def test_commit_manually_rollback_ok(self):
"""
Test that under commit_manually, a rolled-back transaction is accepted by the transaction
management mechanisms
"""
@commit_manually
def roller_back():
"""
Perform a database query, then rollback the transaction
"""
Mod.objects.count()
transaction.rollback()
try:
roller_back()
except TransactionManagementError:
self.fail("Rollback did not clear the transaction state")
def test_commit_manually_enforced_after_commit(self):
"""
Test that under commit_manually, if a transaction is committed and an operation is
performed later, we still require the new transaction to be closed
"""
@commit_manually
def fake_committer():
"Query, commit, then query again, leaving with a pending transaction"
Mod.objects.count()
transaction.commit()
Mod.objects.count()
self.assertRaises(TransactionManagementError, fake_committer)
@skipUnlessDBFeature('supports_transactions')
def test_reuse_cursor_reference(self):
"""
Make sure transaction closure is enforced even when the queries are performed
through a single cursor reference retrieved in the beginning
(this is to show why it is wrong to set the transaction dirty only when a cursor
is fetched from the connection).
"""
@commit_on_success
def reuse_cursor_ref():
"""
Fetch a cursor, perform an query, rollback to close the transaction,
then write a record (in a new transaction) using the same cursor object
(reference). All this under commit_on_success, so the second insert should
be committed.
"""
with connection.cursor() as cursor:
cursor.execute("INSERT into transactions_regress_mod (fld) values (2)")
transaction.rollback()
cursor.execute("INSERT into transactions_regress_mod (fld) values (2)")
reuse_cursor_ref()
# Rollback so that if the decorator didn't commit, the record is unwritten
transaction.rollback()
self.assertEqual(Mod.objects.count(), 1)
obj = Mod.objects.all()[0]
self.assertEqual(obj.fld, 2)
def test_failing_query_transaction_closed(self):
"""
Make sure that under commit_on_success, a transaction is rolled back even if
the first database-modifying operation fails.
This is prompted by http://code.djangoproject.com/ticket/6669 (and based on sample
code posted there to exemplify the problem): Before Django 1.3,
transactions were only marked "dirty" by the save() function after it successfully
wrote the object to the database.
"""
from django.contrib.auth.models import User
@transaction.commit_on_success
def create_system_user():
"Create a user in a transaction"
user = User.objects.create_user(username='system', password='iamr00t',
email='root@SITENAME.com')
# Redundant, just makes sure the user id was read back from DB
Mod.objects.create(fld=user.pk)
# Create a user
create_system_user()
with self.assertRaises(DatabaseError):
# The second call to create_system_user should fail for violating
# a unique constraint (it's trying to re-create the same user)
create_system_user()
# Try to read the database. If the last transaction was indeed closed,
# this should cause no problems
User.objects.all()[0]
@override_settings(DEBUG=True)
def test_failing_query_transaction_closed_debug(self):
"""
Regression for #6669. Same test as above, with DEBUG=True.
"""
self.test_failing_query_transaction_closed()
@skipIf(connection.vendor == 'sqlite'
and connection.settings_dict['TEST']['NAME'] in (None, '', ':memory:'),
"Cannot establish two connections to an in-memory SQLite database.")
class TestNewConnection(IgnoreDeprecationWarningsMixin, TransactionTestCase):
"""
Check that new connections don't have special behavior.
"""
available_apps = ['transactions_regress']
def setUp(self):
self._old_backend = connections[DEFAULT_DB_ALIAS]
settings = self._old_backend.settings_dict.copy()
new_backend = self._old_backend.__class__(settings, DEFAULT_DB_ALIAS)
connections[DEFAULT_DB_ALIAS] = new_backend
def tearDown(self):
try:
connections[DEFAULT_DB_ALIAS].abort()
connections[DEFAULT_DB_ALIAS].close()
finally:
connections[DEFAULT_DB_ALIAS] = self._old_backend
def test_commit(self):
"""
Users are allowed to commit and rollback connections.
"""
connection.set_autocommit(False)
try:
# The starting value is False, not None.
self.assertIs(connection._dirty, False)
list(Mod.objects.all())
self.assertTrue(connection.is_dirty())
connection.commit()
self.assertFalse(connection.is_dirty())
list(Mod.objects.all())
self.assertTrue(connection.is_dirty())
connection.rollback()
self.assertFalse(connection.is_dirty())
finally:
connection.set_autocommit(True)
def test_enter_exit_management(self):
orig_dirty = connection._dirty
connection.enter_transaction_management()
connection.leave_transaction_management()
self.assertEqual(orig_dirty, connection._dirty)
@skipUnless(connection.vendor == 'postgresql',
"This test only valid for PostgreSQL")
class TestPostgresAutocommitAndIsolation(IgnoreDeprecationWarningsMixin, TransactionTestCase):
"""
Tests to make sure psycopg2's autocommit mode and isolation level
is restored after entering and leaving transaction management.
Refs #16047, #18130.
"""
available_apps = ['transactions_regress']
def setUp(self):
from psycopg2.extensions import (ISOLATION_LEVEL_AUTOCOMMIT,
ISOLATION_LEVEL_SERIALIZABLE,
TRANSACTION_STATUS_IDLE)
self._autocommit = ISOLATION_LEVEL_AUTOCOMMIT
self._serializable = ISOLATION_LEVEL_SERIALIZABLE
self._idle = TRANSACTION_STATUS_IDLE
# We want a clean backend with autocommit = True, so
# first we need to do a bit of work to have that.
self._old_backend = connections[DEFAULT_DB_ALIAS]
settings = self._old_backend.settings_dict.copy()
opts = settings['OPTIONS'].copy()
opts['isolation_level'] = ISOLATION_LEVEL_SERIALIZABLE
settings['OPTIONS'] = opts
new_backend = self._old_backend.__class__(settings, DEFAULT_DB_ALIAS)
connections[DEFAULT_DB_ALIAS] = new_backend
def tearDown(self):
try:
connections[DEFAULT_DB_ALIAS].abort()
finally:
connections[DEFAULT_DB_ALIAS].close()
connections[DEFAULT_DB_ALIAS] = self._old_backend
def test_initial_autocommit_state(self):
# Autocommit is activated when the connection is created.
connection.cursor().close()
self.assertTrue(connection.autocommit)
def test_transaction_management(self):
transaction.enter_transaction_management()
self.assertFalse(connection.autocommit)
self.assertEqual(connection.isolation_level, self._serializable)
transaction.leave_transaction_management()
self.assertTrue(connection.autocommit)
def test_transaction_stacking(self):
transaction.enter_transaction_management()
self.assertFalse(connection.autocommit)
self.assertEqual(connection.isolation_level, self._serializable)
transaction.enter_transaction_management()
self.assertFalse(connection.autocommit)
self.assertEqual(connection.isolation_level, self._serializable)
transaction.leave_transaction_management()
self.assertFalse(connection.autocommit)
self.assertEqual(connection.isolation_level, self._serializable)
transaction.leave_transaction_management()
self.assertTrue(connection.autocommit)
def test_enter_autocommit(self):
transaction.enter_transaction_management()
self.assertFalse(connection.autocommit)
self.assertEqual(connection.isolation_level, self._serializable)
list(Mod.objects.all())
self.assertTrue(transaction.is_dirty())
# Enter autocommit mode again.
transaction.enter_transaction_management(False)
self.assertFalse(transaction.is_dirty())
self.assertEqual(
connection.connection.get_transaction_status(),
self._idle)
list(Mod.objects.all())
self.assertFalse(transaction.is_dirty())
transaction.leave_transaction_management()
self.assertFalse(connection.autocommit)
self.assertEqual(connection.isolation_level, self._serializable)
transaction.leave_transaction_management()
self.assertTrue(connection.autocommit)
class TestManyToManyAddTransaction(IgnoreDeprecationWarningsMixin, TransactionTestCase):
available_apps = ['transactions_regress']
def test_manyrelated_add_commit(self):
"Test for https://code.djangoproject.com/ticket/16818"
a = M2mA.objects.create()
b = M2mB.objects.create(fld=10)
a.others.add(b)
# We're in a TransactionTestCase and have not changed transaction
# behavior from default of "autocommit", so this rollback should not
# actually do anything. If it does in fact undo our add, that's a bug
# that the bulk insert was not auto-committed.
transaction.rollback()
self.assertEqual(a.others.count(), 1)
class SavepointTest(IgnoreDeprecationWarningsMixin, TransactionTestCase):
available_apps = ['transactions_regress']
@skipIf(connection.vendor == 'sqlite',
"SQLite doesn't support savepoints in managed mode")
@skipUnlessDBFeature('uses_savepoints')
def test_savepoint_commit(self):
@commit_manually
def work():
mod = Mod.objects.create(fld=1)
pk = mod.pk
sid = transaction.savepoint()
Mod.objects.filter(pk=pk).update(fld=10)
transaction.savepoint_commit(sid)
mod2 = Mod.objects.get(pk=pk)
transaction.commit()
self.assertEqual(mod2.fld, 10)
work()
@skipIf(connection.vendor == 'sqlite',
"SQLite doesn't support savepoints in managed mode")
@skipUnlessDBFeature('uses_savepoints')
def test_savepoint_rollback(self):
# _mysql_storage_engine issues a query and as such can't be applied in
# a skipIf decorator since that would execute the query on module load.
if (connection.vendor == 'mysql' and
connection.features._mysql_storage_engine == 'MyISAM'):
raise SkipTest("MyISAM MySQL storage engine doesn't support savepoints")
@commit_manually
def work():
mod = Mod.objects.create(fld=1)
pk = mod.pk
sid = transaction.savepoint()
Mod.objects.filter(pk=pk).update(fld=20)
transaction.savepoint_rollback(sid)
mod2 = Mod.objects.get(pk=pk)
transaction.commit()
self.assertEqual(mod2.fld, 1)
work()