|
@@ -411,17 +411,21 @@ class QuerySet(object):
|
|
|
Inserts each of the instances into the database. This does *not* call
|
|
|
save() on each of the instances, does not send any pre/post save
|
|
|
signals, and does not set the primary key attribute if it is an
|
|
|
- autoincrement field. Multi-table models are not supported.
|
|
|
- """
|
|
|
- # So this case is fun. When you bulk insert you don't get the primary
|
|
|
- # keys back (if it's an autoincrement), so you can't insert into the
|
|
|
- # child tables which references this. There are two workarounds, 1)
|
|
|
- # this could be implemented if you didn't have an autoincrement pk,
|
|
|
- # and 2) you could do it by doing O(n) normal inserts into the parent
|
|
|
- # tables to get the primary keys back, and then doing a single bulk
|
|
|
- # insert into the childmost table. Some databases might allow doing
|
|
|
- # this by using RETURNING clause for the insert query. We're punting
|
|
|
- # on these for now because they are relatively rare cases.
|
|
|
+ autoincrement field (except if features.can_return_ids_from_bulk_insert=True).
|
|
|
+ Multi-table models are not supported.
|
|
|
+ """
|
|
|
+ # When you bulk insert you don't get the primary keys back (if it's an
|
|
|
+ # autoincrement, except if can_return_ids_from_bulk_insert=True), so
|
|
|
+ # you can't insert into the child tables which references this. There
|
|
|
+ # are two workarounds:
|
|
|
+ # 1) This could be implemented if you didn't have an autoincrement pk
|
|
|
+ # 2) You could do it by doing O(n) normal inserts into the parent
|
|
|
+ # tables to get the primary keys back and then doing a single bulk
|
|
|
+ # insert into the childmost table.
|
|
|
+ # We currently set the primary keys on the objects when using
|
|
|
+ # PostgreSQL via the RETURNING ID clause. It should be possible for
|
|
|
+ # Oracle as well, but the semantics for extracting the primary keys is
|
|
|
+ # trickier so it's not done yet.
|
|
|
assert batch_size is None or batch_size > 0
|
|
|
# Check that the parents share the same concrete model with the our
|
|
|
# model to detect the inheritance pattern ConcreteGrandParent ->
|
|
@@ -447,7 +451,11 @@ class QuerySet(object):
|
|
|
self._batched_insert(objs_with_pk, fields, batch_size)
|
|
|
if objs_without_pk:
|
|
|
fields = [f for f in fields if not isinstance(f, AutoField)]
|
|
|
- self._batched_insert(objs_without_pk, fields, batch_size)
|
|
|
+ ids = self._batched_insert(objs_without_pk, fields, batch_size)
|
|
|
+ if connection.features.can_return_ids_from_bulk_insert:
|
|
|
+ assert len(ids) == len(objs_without_pk)
|
|
|
+ for i in range(len(ids)):
|
|
|
+ objs_without_pk[i].pk = ids[i]
|
|
|
|
|
|
return objs
|
|
|
|
|
@@ -1051,10 +1059,19 @@ class QuerySet(object):
|
|
|
return
|
|
|
ops = connections[self.db].ops
|
|
|
batch_size = (batch_size or max(ops.bulk_batch_size(fields, objs), 1))
|
|
|
- for batch in [objs[i:i + batch_size]
|
|
|
- for i in range(0, len(objs), batch_size)]:
|
|
|
- self.model._base_manager._insert(batch, fields=fields,
|
|
|
- using=self.db)
|
|
|
+ inserted_ids = []
|
|
|
+ for item in [objs[i:i + batch_size] for i in range(0, len(objs), batch_size)]:
|
|
|
+ if connections[self.db].features.can_return_ids_from_bulk_insert:
|
|
|
+ inserted_id = self.model._base_manager._insert(
|
|
|
+ item, fields=fields, using=self.db, return_id=True
|
|
|
+ )
|
|
|
+ if len(objs) > 1:
|
|
|
+ inserted_ids.extend(inserted_id)
|
|
|
+ if len(objs) == 1:
|
|
|
+ inserted_ids.append(inserted_id)
|
|
|
+ else:
|
|
|
+ self.model._base_manager._insert(item, fields=fields, using=self.db)
|
|
|
+ return inserted_ids
|
|
|
|
|
|
def _clone(self, **kwargs):
|
|
|
query = self.query.clone()
|