|
@@ -498,7 +498,6 @@ class QuerySet:
|
|
|
if not objs:
|
|
|
return objs
|
|
|
self._for_write = True
|
|
|
- connection = connections[self.db]
|
|
|
opts = self.model._meta
|
|
|
fields = opts.concrete_fields
|
|
|
objs = list(objs)
|
|
@@ -521,6 +520,7 @@ class QuerySet:
|
|
|
returned_columns = self._batched_insert(
|
|
|
objs_without_pk, fields, batch_size, ignore_conflicts=ignore_conflicts,
|
|
|
)
|
|
|
+ connection = connections[self.db]
|
|
|
if connection.features.can_return_rows_from_bulk_insert and not ignore_conflicts:
|
|
|
assert len(returned_columns) == len(objs_without_pk)
|
|
|
for obj_without_pk, results in zip(objs_without_pk, returned_columns):
|
|
@@ -551,9 +551,10 @@ class QuerySet:
|
|
|
return 0
|
|
|
# PK is used twice in the resulting update query, once in the filter
|
|
|
# and once in the WHEN. Each field will also have one CAST.
|
|
|
- max_batch_size = connections[self.db].ops.bulk_batch_size(['pk', 'pk'] + fields, objs)
|
|
|
+ connection = connections[self.db]
|
|
|
+ max_batch_size = connection.ops.bulk_batch_size(['pk', 'pk'] + fields, objs)
|
|
|
batch_size = min(batch_size, max_batch_size) if batch_size else max_batch_size
|
|
|
- requires_casting = connections[self.db].features.requires_casted_case_in_updates
|
|
|
+ requires_casting = connection.features.requires_casted_case_in_updates
|
|
|
batches = (objs[i:i + batch_size] for i in range(0, len(objs), batch_size))
|
|
|
updates = []
|
|
|
for batch_objs in batches:
|
|
@@ -1308,13 +1309,14 @@ class QuerySet:
|
|
|
"""
|
|
|
Helper method for bulk_create() to insert objs one batch at a time.
|
|
|
"""
|
|
|
- if ignore_conflicts and not connections[self.db].features.supports_ignore_conflicts:
|
|
|
+ connection = connections[self.db]
|
|
|
+ if ignore_conflicts and not connection.features.supports_ignore_conflicts:
|
|
|
raise NotSupportedError('This database backend does not support ignoring conflicts.')
|
|
|
- ops = connections[self.db].ops
|
|
|
+ ops = connection.ops
|
|
|
max_batch_size = max(ops.bulk_batch_size(fields, objs), 1)
|
|
|
batch_size = min(batch_size, max_batch_size) if batch_size else max_batch_size
|
|
|
inserted_rows = []
|
|
|
- bulk_return = connections[self.db].features.can_return_rows_from_bulk_insert
|
|
|
+ bulk_return = connection.features.can_return_rows_from_bulk_insert
|
|
|
for item in [objs[i:i + batch_size] for i in range(0, len(objs), batch_size)]:
|
|
|
if bulk_return and not ignore_conflicts:
|
|
|
inserted_rows.extend(self._insert(
|
|
@@ -1523,10 +1525,8 @@ class RawQuerySet:
|
|
|
def iterator(self):
|
|
|
# Cache some things for performance reasons outside the loop.
|
|
|
db = self.db
|
|
|
- compiler = connections[db].ops.compiler('SQLCompiler')(
|
|
|
- self.query, connections[db], db
|
|
|
- )
|
|
|
-
|
|
|
+ connection = connections[db]
|
|
|
+ compiler = connection.ops.compiler('SQLCompiler')(self.query, connection, db)
|
|
|
query = iter(self.query)
|
|
|
|
|
|
try:
|