Refs #29444 -- Renamed DatabaseFeatures.can_return_id* to be generic for other columns.
This commit is contained in:
parent
16a5a2a2c8
commit
b131f9c79f
|
@ -22,8 +22,8 @@ class BaseDatabaseFeatures:
|
|||
supports_partially_nullable_unique_constraints = True
|
||||
|
||||
can_use_chunked_reads = True
|
||||
can_return_id_from_insert = False
|
||||
can_return_ids_from_bulk_insert = False
|
||||
can_return_columns_from_insert = False
|
||||
can_return_rows_from_bulk_insert = False
|
||||
has_bulk_insert = True
|
||||
uses_savepoints = True
|
||||
can_release_savepoints = False
|
||||
|
|
|
@ -202,7 +202,7 @@ class DatabaseWrapper(BaseDatabaseWrapper):
|
|||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
use_returning_into = self.settings_dict["OPTIONS"].get('use_returning_into', True)
|
||||
self.features.can_return_id_from_insert = use_returning_into
|
||||
self.features.can_return_columns_from_insert = use_returning_into
|
||||
|
||||
def _dsn(self):
|
||||
settings_dict = self.settings_dict
|
||||
|
|
|
@ -10,7 +10,7 @@ class DatabaseFeatures(BaseDatabaseFeatures):
|
|||
has_select_for_update_skip_locked = True
|
||||
has_select_for_update_of = True
|
||||
select_for_update_of_column = True
|
||||
can_return_id_from_insert = True
|
||||
can_return_columns_from_insert = True
|
||||
can_introspect_autofield = True
|
||||
supports_subqueries_in_group_by = False
|
||||
supports_transactions = True
|
||||
|
|
|
@ -7,8 +7,8 @@ from django.utils.functional import cached_property
|
|||
|
||||
class DatabaseFeatures(BaseDatabaseFeatures):
|
||||
allows_group_by_selected_pks = True
|
||||
can_return_id_from_insert = True
|
||||
can_return_ids_from_bulk_insert = True
|
||||
can_return_columns_from_insert = True
|
||||
can_return_rows_from_bulk_insert = True
|
||||
has_real_datatype = True
|
||||
has_native_uuid_field = True
|
||||
has_native_duration_field = True
|
||||
|
|
|
@ -431,11 +431,11 @@ class QuerySet:
|
|||
Insert each of the instances into the database. Do *not* call
|
||||
save() on each of the instances, do not send any pre/post_save
|
||||
signals, and do not set the primary key attribute if it is an
|
||||
autoincrement field (except if features.can_return_ids_from_bulk_insert=True).
|
||||
autoincrement field (except if features.can_return_rows_from_bulk_insert=True).
|
||||
Multi-table models are not supported.
|
||||
"""
|
||||
# When you bulk insert you don't get the primary keys back (if it's an
|
||||
# autoincrement, except if can_return_ids_from_bulk_insert=True), so
|
||||
# autoincrement, except if can_return_rows_from_bulk_insert=True), so
|
||||
# you can't insert into the child tables which references this. There
|
||||
# are two workarounds:
|
||||
# 1) This could be implemented if you didn't have an autoincrement pk
|
||||
|
@ -471,7 +471,7 @@ class QuerySet:
|
|||
if objs_without_pk:
|
||||
fields = [f for f in fields if not isinstance(f, AutoField)]
|
||||
ids = self._batched_insert(objs_without_pk, fields, batch_size, ignore_conflicts=ignore_conflicts)
|
||||
if connection.features.can_return_ids_from_bulk_insert and not ignore_conflicts:
|
||||
if connection.features.can_return_rows_from_bulk_insert and not ignore_conflicts:
|
||||
assert len(ids) == len(objs_without_pk)
|
||||
for obj_without_pk, pk in zip(objs_without_pk, ids):
|
||||
obj_without_pk.pk = pk
|
||||
|
@ -1185,7 +1185,7 @@ class QuerySet:
|
|||
ops = connections[self.db].ops
|
||||
batch_size = (batch_size or max(ops.bulk_batch_size(fields, objs), 1))
|
||||
inserted_ids = []
|
||||
bulk_return = connections[self.db].features.can_return_ids_from_bulk_insert
|
||||
bulk_return = connections[self.db].features.can_return_rows_from_bulk_insert
|
||||
for item in [objs[i:i + batch_size] for i in range(0, len(objs), batch_size)]:
|
||||
if bulk_return and not ignore_conflicts:
|
||||
inserted_id = self._insert(
|
||||
|
|
|
@ -1273,8 +1273,8 @@ class SQLInsertCompiler(SQLCompiler):
|
|||
ignore_conflicts_suffix_sql = self.connection.ops.ignore_conflicts_suffix_sql(
|
||||
ignore_conflicts=self.query.ignore_conflicts
|
||||
)
|
||||
if self.return_id and self.connection.features.can_return_id_from_insert:
|
||||
if self.connection.features.can_return_ids_from_bulk_insert:
|
||||
if self.return_id and self.connection.features.can_return_columns_from_insert:
|
||||
if self.connection.features.can_return_rows_from_bulk_insert:
|
||||
result.append(self.connection.ops.bulk_insert_sql(fields, placeholder_rows))
|
||||
params = param_rows
|
||||
else:
|
||||
|
@ -1307,7 +1307,7 @@ class SQLInsertCompiler(SQLCompiler):
|
|||
def execute_sql(self, return_id=False):
|
||||
assert not (
|
||||
return_id and len(self.query.objs) != 1 and
|
||||
not self.connection.features.can_return_ids_from_bulk_insert
|
||||
not self.connection.features.can_return_rows_from_bulk_insert
|
||||
)
|
||||
self.return_id = return_id
|
||||
with self.connection.cursor() as cursor:
|
||||
|
@ -1315,9 +1315,9 @@ class SQLInsertCompiler(SQLCompiler):
|
|||
cursor.execute(sql, params)
|
||||
if not return_id:
|
||||
return
|
||||
if self.connection.features.can_return_ids_from_bulk_insert and len(self.query.objs) > 1:
|
||||
if self.connection.features.can_return_rows_from_bulk_insert and len(self.query.objs) > 1:
|
||||
return self.connection.ops.fetch_returned_insert_ids(cursor)
|
||||
if self.connection.features.can_return_id_from_insert:
|
||||
if self.connection.features.can_return_columns_from_insert:
|
||||
assert len(self.query.objs) == 1
|
||||
return self.connection.ops.fetch_returned_insert_id(cursor)
|
||||
return self.connection.ops.last_insert_id(
|
||||
|
|
|
@ -218,6 +218,10 @@ backends.
|
|||
field, add ``SchemaEditor.sql_create_column_inline_fk`` with the appropriate
|
||||
SQL; otherwise, set ``DatabaseFeatures.can_create_inline_fk = False``.
|
||||
|
||||
* ``DatabaseFeatures.can_return_id_from_insert`` and
|
||||
``can_return_ids_from_bulk_insert`` are renamed to
|
||||
``can_return_columns_from_insert`` and ``can_return_rows_from_bulk_insert``.
|
||||
|
||||
Miscellaneous
|
||||
-------------
|
||||
|
||||
|
|
|
@ -226,14 +226,14 @@ class BulkCreateTests(TestCase):
|
|||
field_value = '' if isinstance(field, FileField) else None
|
||||
self.assertEqual(NullableFields.objects.filter(**{field.name: field_value}).count(), 1)
|
||||
|
||||
@skipUnlessDBFeature('can_return_ids_from_bulk_insert')
|
||||
@skipUnlessDBFeature('can_return_rows_from_bulk_insert')
|
||||
def test_set_pk_and_insert_single_item(self):
|
||||
with self.assertNumQueries(1):
|
||||
countries = Country.objects.bulk_create([self.data[0]])
|
||||
self.assertEqual(len(countries), 1)
|
||||
self.assertEqual(Country.objects.get(pk=countries[0].pk), countries[0])
|
||||
|
||||
@skipUnlessDBFeature('can_return_ids_from_bulk_insert')
|
||||
@skipUnlessDBFeature('can_return_rows_from_bulk_insert')
|
||||
def test_set_pk_and_query_efficiency(self):
|
||||
with self.assertNumQueries(1):
|
||||
countries = Country.objects.bulk_create(self.data)
|
||||
|
@ -243,7 +243,7 @@ class BulkCreateTests(TestCase):
|
|||
self.assertEqual(Country.objects.get(pk=countries[2].pk), countries[2])
|
||||
self.assertEqual(Country.objects.get(pk=countries[3].pk), countries[3])
|
||||
|
||||
@skipUnlessDBFeature('can_return_ids_from_bulk_insert')
|
||||
@skipUnlessDBFeature('can_return_rows_from_bulk_insert')
|
||||
def test_set_state(self):
|
||||
country_nl = Country(name='Netherlands', iso_two_letter='NL')
|
||||
country_be = Country(name='Belgium', iso_two_letter='BE')
|
||||
|
|
Loading…
Reference in New Issue