123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229 |
- import collections
- import json
- import re
- from functools import partial
- from itertools import chain
- from django.core.exceptions import EmptyResultSet, FieldError, FullResultSet
- from django.db import DatabaseError, NotSupportedError
- from django.db.models.constants import LOOKUP_SEP
- from django.db.models.expressions import ColPairs, F, OrderBy, RawSQL, Ref, Value
- from django.db.models.fields import AutoField, composite
- from django.db.models.functions import Cast, Random
- from django.db.models.lookups import Lookup
- from django.db.models.query_utils import select_related_descend
- from django.db.models.sql.constants import (
- CURSOR,
- GET_ITERATOR_CHUNK_SIZE,
- MULTI,
- NO_RESULTS,
- ORDER_DIR,
- ROW_COUNT,
- SINGLE,
- )
- from django.db.models.sql.query import Query, get_order_dir
- from django.db.models.sql.where import AND
- from django.db.transaction import TransactionManagementError
- from django.utils.functional import cached_property
- from django.utils.hashable import make_hashable
- from django.utils.regex_helper import _lazy_re_compile
- class PositionRef(Ref):
- def __init__(self, ordinal, refs, source):
- self.ordinal = ordinal
- super().__init__(refs, source)
- def as_sql(self, compiler, connection):
- return str(self.ordinal), ()
- class SQLCompiler:
- # Multiline ordering SQL clause may appear from RawSQL.
- ordering_parts = _lazy_re_compile(
- r"^(.*)\s(?:ASC|DESC).*",
- re.MULTILINE | re.DOTALL,
- )
- def __init__(self, query, connection, using, elide_empty=True):
- self.query = query
- self.connection = connection
- self.using = using
- # Some queries, e.g. coalesced aggregation, need to be executed even if
- # they would return an empty result set.
- self.elide_empty = elide_empty
- self.quote_cache = {"*": "*"}
- # The select, klass_info, and annotations are needed by QuerySet.iterator()
- # these are set as a side-effect of executing the query. Note that we calculate
- # separately a list of extra select columns needed for grammatical correctness
- # of the query, but these columns are not included in self.select.
- self.select = None
- self.annotation_col_map = None
- self.klass_info = None
- self._meta_ordering = None
- def __repr__(self):
- return (
- f"<{self.__class__.__qualname__} "
- f"model={self.query.model.__qualname__} "
- f"connection={self.connection!r} using={self.using!r}>"
- )
- def setup_query(self, with_col_aliases=False):
- if all(self.query.alias_refcount[a] == 0 for a in self.query.alias_map):
- self.query.get_initial_alias()
- self.select, self.klass_info, self.annotation_col_map = self.get_select(
- with_col_aliases=with_col_aliases,
- )
- self.col_count = len(self.select)
- def pre_sql_setup(self, with_col_aliases=False):
- """
- Do any necessary class setup immediately prior to producing SQL. This
- is for things that can't necessarily be done in __init__ because we
- might not have all the pieces in place at that time.
- """
- self.setup_query(with_col_aliases=with_col_aliases)
- order_by = self.get_order_by()
- self.where, self.having, self.qualify = self.query.where.split_having_qualify(
- must_group_by=self.query.group_by is not None
- )
- extra_select = self.get_extra_select(order_by, self.select)
- self.has_extra_select = bool(extra_select)
- group_by = self.get_group_by(self.select + extra_select, order_by)
- return extra_select, order_by, group_by
- def get_group_by(self, select, order_by):
- """
- Return a list of 2-tuples of form (sql, params).
- The logic of what exactly the GROUP BY clause contains is hard
- to describe in other words than "if it passes the test suite,
- then it is correct".
- """
- # Some examples:
- # SomeModel.objects.annotate(Count('somecol'))
- # GROUP BY: all fields of the model
- #
- # SomeModel.objects.values('name').annotate(Count('somecol'))
- # GROUP BY: name
- #
- # SomeModel.objects.annotate(Count('somecol')).values('name')
- # GROUP BY: all cols of the model
- #
- # SomeModel.objects.values('name', 'pk')
- # .annotate(Count('somecol')).values('pk')
- # GROUP BY: name, pk
- #
- # SomeModel.objects.values('name').annotate(Count('somecol')).values('pk')
- # GROUP BY: name, pk
- #
- # In fact, the self.query.group_by is the minimal set to GROUP BY. It
- # can't be ever restricted to a smaller set, but additional columns in
- # HAVING, ORDER BY, and SELECT clauses are added to it. Unfortunately
- # the end result is that it is impossible to force the query to have
- # a chosen GROUP BY clause - you can almost do this by using the form:
- # .values(*wanted_cols).annotate(AnAggregate())
- # but any later annotations, extra selects, values calls that
- # refer some column outside of the wanted_cols, order_by, or even
- # filter calls can alter the GROUP BY clause.
- # The query.group_by is either None (no GROUP BY at all), True
- # (group by select fields), or a list of expressions to be added
- # to the group by.
- if self.query.group_by is None:
- return []
- expressions = []
- group_by_refs = set()
- if self.query.group_by is not True:
- # If the group by is set to a list (by .values() call most likely),
- # then we need to add everything in it to the GROUP BY clause.
- # Backwards compatibility hack for setting query.group_by. Remove
- # when we have public API way of forcing the GROUP BY clause.
- # Converts string references to expressions.
- for expr in self.query.group_by:
- if not hasattr(expr, "as_sql"):
- expr = self.query.resolve_ref(expr)
- if isinstance(expr, Ref):
- if expr.refs not in group_by_refs:
- group_by_refs.add(expr.refs)
- expressions.append(expr.source)
- else:
- expressions.append(expr)
- # Note that even if the group_by is set, it is only the minimal
- # set to group by. So, we need to add cols in select, order_by, and
- # having into the select in any case.
- selected_expr_positions = {}
- for ordinal, (expr, _, alias) in enumerate(select, start=1):
- if alias:
- selected_expr_positions[expr] = ordinal
- # Skip members of the select clause that are already explicitly
- # grouped against.
- if alias in group_by_refs:
- continue
- expressions.extend(expr.get_group_by_cols())
- if not self._meta_ordering:
- for expr, (sql, params, is_ref) in order_by:
- # Skip references to the SELECT clause, as all expressions in
- # the SELECT clause are already part of the GROUP BY.
- if not is_ref:
- expressions.extend(expr.get_group_by_cols())
- having_group_by = self.having.get_group_by_cols() if self.having else ()
- for expr in having_group_by:
- expressions.append(expr)
- result = []
- seen = set()
- expressions = self.collapse_group_by(expressions, having_group_by)
- allows_group_by_select_index = (
- self.connection.features.allows_group_by_select_index
- )
- for expr in expressions:
- try:
- sql, params = self.compile(expr)
- except (EmptyResultSet, FullResultSet):
- continue
- if (
- allows_group_by_select_index
- and (position := selected_expr_positions.get(expr)) is not None
- ):
- sql, params = str(position), ()
- else:
- sql, params = expr.select_format(self, sql, params)
- params_hash = make_hashable(params)
- if (sql, params_hash) not in seen:
- result.append((sql, params))
- seen.add((sql, params_hash))
- return result
- def collapse_group_by(self, expressions, having):
- # If the database supports group by functional dependence reduction,
- # then the expressions can be reduced to the set of selected table
- # primary keys as all other columns are functionally dependent on them.
- if self.connection.features.allows_group_by_selected_pks:
- # Filter out all expressions associated with a table's primary key
- # present in the grouped columns. This is done by identifying all
- # tables that have their primary key included in the grouped
- # columns and removing non-primary key columns referring to them.
- # Unmanaged models are excluded because they could be representing
- # database views on which the optimization might not be allowed.
- pks = {
- expr
- for expr in expressions
- if (
- hasattr(expr, "target")
- and expr.target.primary_key
- and self.connection.features.allows_group_by_selected_pks_on_model(
- expr.target.model
- )
- )
- }
- aliases = {expr.alias for expr in pks}
- expressions = [
- expr
- for expr in expressions
- if expr in pks
- or expr in having
- or getattr(expr, "alias", None) not in aliases
- ]
- return expressions
- def get_select(self, with_col_aliases=False):
- """
- Return three values:
- - a list of 3-tuples of (expression, (sql, params), alias)
- - a klass_info structure,
- - a dictionary of annotations
- The (sql, params) is what the expression will produce, and alias is the
- "AS alias" for the column (possibly None).
- The klass_info structure contains the following information:
- - The base model of the query.
- - Which columns for that model are present in the query (by
- position of the select clause).
- - related_klass_infos: [f, klass_info] to descent into
- The annotations is a dictionary of {'attname': column position} values.
- """
- select = []
- klass_info = None
- annotations = {}
- assert not (self.query.select and self.query.default_cols)
- select_mask = self.query.get_select_mask()
- if self.query.default_cols:
- cols = self.get_default_columns(select_mask)
- else:
- # self.query.select is a special case. These columns never go to
- # any model.
- cols = self.query.select
- if cols:
- klass_info = {
- "model": self.query.model,
- "select_fields": list(
- range(
- len(self.query.extra_select),
- len(self.query.extra_select) + len(cols),
- )
- ),
- }
- selected = []
- if self.query.selected is None:
- selected = [
- *(
- (alias, RawSQL(*args))
- for alias, args in self.query.extra_select.items()
- ),
- *((None, col) for col in cols),
- *self.query.annotation_select.items(),
- ]
- else:
- for alias, expression in self.query.selected.items():
- # Reference to an annotation.
- if isinstance(expression, str):
- expression = self.query.annotations[expression]
- # Reference to a column.
- elif isinstance(expression, int):
- expression = cols[expression]
- # ColPairs cannot be aliased.
- if isinstance(expression, ColPairs):
- alias = None
- selected.append((alias, expression))
- for select_idx, (alias, expression) in enumerate(selected):
- if alias:
- annotations[alias] = select_idx
- select.append((expression, alias))
- if self.query.select_related:
- related_klass_infos = self.get_related_selections(select, select_mask)
- klass_info["related_klass_infos"] = related_klass_infos
- def get_select_from_parent(klass_info):
- for ki in klass_info["related_klass_infos"]:
- if ki["from_parent"]:
- ki["select_fields"] = (
- klass_info["select_fields"] + ki["select_fields"]
- )
- get_select_from_parent(ki)
- get_select_from_parent(klass_info)
- ret = []
- col_idx = 1
- for col, alias in select:
- try:
- sql, params = self.compile(col)
- except EmptyResultSet:
- empty_result_set_value = getattr(
- col, "empty_result_set_value", NotImplemented
- )
- if empty_result_set_value is NotImplemented:
- # Select a predicate that's always False.
- sql, params = "0", ()
- else:
- sql, params = self.compile(Value(empty_result_set_value))
- except FullResultSet:
- sql, params = self.compile(Value(True))
- else:
- sql, params = col.select_format(self, sql, params)
- if alias is None and with_col_aliases:
- alias = f"col{col_idx}"
- col_idx += 1
- ret.append((col, (sql, params), alias))
- return ret, klass_info, annotations
- def _order_by_pairs(self):
- if self.query.extra_order_by:
- ordering = self.query.extra_order_by
- elif not self.query.default_ordering:
- ordering = self.query.order_by
- elif self.query.order_by:
- ordering = self.query.order_by
- elif (meta := self.query.get_meta()) and meta.ordering:
- ordering = meta.ordering
- self._meta_ordering = ordering
- else:
- ordering = []
- if self.query.standard_ordering:
- default_order, _ = ORDER_DIR["ASC"]
- else:
- default_order, _ = ORDER_DIR["DESC"]
- selected_exprs = {}
- # Avoid computing `selected_exprs` if there is no `ordering` as it's
- # relatively expensive.
- if ordering and (select := self.select):
- for ordinal, (expr, _, alias) in enumerate(select, start=1):
- pos_expr = PositionRef(ordinal, alias, expr)
- if alias:
- selected_exprs[alias] = pos_expr
- selected_exprs[expr] = pos_expr
- for field in ordering:
- if hasattr(field, "resolve_expression"):
- if isinstance(field, Value):
- # output_field must be resolved for constants.
- field = Cast(field, field.output_field)
- if not isinstance(field, OrderBy):
- field = field.asc()
- if not self.query.standard_ordering:
- field = field.copy()
- field.reverse_ordering()
- select_ref = selected_exprs.get(field.expression)
- if select_ref or (
- isinstance(field.expression, F)
- and (select_ref := selected_exprs.get(field.expression.name))
- ):
- # Emulation of NULLS (FIRST|LAST) cannot be combined with
- # the usage of ordering by position.
- if (
- field.nulls_first is None and field.nulls_last is None
- ) or self.connection.features.supports_order_by_nulls_modifier:
- field = field.copy()
- field.expression = select_ref
- # Alias collisions are not possible when dealing with
- # combined queries so fallback to it if emulation of NULLS
- # handling is required.
- elif self.query.combinator:
- field = field.copy()
- field.expression = Ref(select_ref.refs, select_ref.source)
- yield field, select_ref is not None
- continue
- if field == "?": # random
- yield OrderBy(Random()), False
- continue
- col, order = get_order_dir(field, default_order)
- descending = order == "DESC"
- if select_ref := selected_exprs.get(col):
- # Reference to expression in SELECT clause
- yield (
- OrderBy(
- select_ref,
- descending=descending,
- ),
- True,
- )
- continue
- if expr := self.query.annotations.get(col):
- ref = col
- transforms = []
- else:
- ref, *transforms = col.split(LOOKUP_SEP)
- expr = self.query.annotations.get(ref)
- if expr:
- if self.query.combinator and self.select:
- if transforms:
- raise NotImplementedError(
- "Ordering combined queries by transforms is not "
- "implemented."
- )
- # Don't use the resolved annotation because other
- # combined queries might define it differently.
- expr = F(ref)
- if transforms:
- for name in transforms:
- expr = self.query.try_transform(expr, name)
- if isinstance(expr, Value):
- # output_field must be resolved for constants.
- expr = Cast(expr, expr.output_field)
- yield OrderBy(expr, descending=descending), False
- continue
- if "." in field:
- # This came in through an extra(order_by=...) addition. Pass it
- # on verbatim.
- table, col = col.split(".", 1)
- yield (
- OrderBy(
- RawSQL(
- "%s.%s" % (self.quote_name_unless_alias(table), col), []
- ),
- descending=descending,
- ),
- False,
- )
- continue
- if self.query.extra and col in self.query.extra:
- if col in self.query.extra_select:
- yield (
- OrderBy(
- Ref(col, RawSQL(*self.query.extra[col])),
- descending=descending,
- ),
- True,
- )
- else:
- yield (
- OrderBy(RawSQL(*self.query.extra[col]), descending=descending),
- False,
- )
- else:
- if self.query.combinator and self.select:
- # Don't use the first model's field because other
- # combinated queries might define it differently.
- yield OrderBy(F(col), descending=descending), False
- else:
- # 'col' is of the form 'field' or 'field1__field2' or
- # '-field1__field2__field', etc.
- yield from self.find_ordering_name(
- field,
- self.query.get_meta(),
- default_order=default_order,
- )
- def get_order_by(self):
- """
- Return a list of 2-tuples of the form (expr, (sql, params, is_ref)) for
- the ORDER BY clause.
- The order_by clause can alter the select clause (for example it can add
- aliases to clauses that do not yet have one, or it can add totally new
- select clauses).
- """
- result = []
- seen = set()
- for expr, is_ref in self._order_by_pairs():
- resolved = expr.resolve_expression(self.query, allow_joins=True, reuse=None)
- if not is_ref and self.query.combinator and self.select:
- src = resolved.expression
- expr_src = expr.expression
- for sel_expr, _, col_alias in self.select:
- if src == sel_expr:
- # When values() is used the exact alias must be used to
- # reference annotations.
- if (
- self.query.has_select_fields
- and col_alias in self.query.annotation_select
- and not (
- isinstance(expr_src, F) and col_alias == expr_src.name
- )
- ):
- continue
- resolved.set_source_expressions(
- [Ref(col_alias if col_alias else src.target.column, src)]
- )
- break
- else:
- # Add column used in ORDER BY clause to the selected
- # columns and to each combined query.
- order_by_idx = len(self.query.select) + 1
- col_alias = f"__orderbycol{order_by_idx}"
- for q in self.query.combined_queries:
- # If fields were explicitly selected through values()
- # combined queries cannot be augmented.
- if q.has_select_fields:
- raise DatabaseError(
- "ORDER BY term does not match any column in "
- "the result set."
- )
- q.add_annotation(expr_src, col_alias)
- self.query.add_select_col(resolved, col_alias)
- resolved.set_source_expressions([Ref(col_alias, src)])
- sql, params = self.compile(resolved)
- # Don't add the same column twice, but the order direction is
- # not taken into account so we strip it. When this entire method
- # is refactored into expressions, then we can check each part as we
- # generate it.
- without_ordering = self.ordering_parts.search(sql)[1]
- params_hash = make_hashable(params)
- if (without_ordering, params_hash) in seen:
- continue
- seen.add((without_ordering, params_hash))
- result.append((resolved, (sql, params, is_ref)))
- return result
- def get_extra_select(self, order_by, select):
- extra_select = []
- if self.query.distinct and not self.query.distinct_fields:
- select_sql = [t[1] for t in select]
- for expr, (sql, params, is_ref) in order_by:
- without_ordering = self.ordering_parts.search(sql)[1]
- if not is_ref and (without_ordering, params) not in select_sql:
- extra_select.append((expr, (without_ordering, params), None))
- return extra_select
- def quote_name_unless_alias(self, name):
- """
- A wrapper around connection.ops.quote_name that doesn't quote aliases
- for table names. This avoids problems with some SQL dialects that treat
- quoted strings specially (e.g. PostgreSQL).
- """
- if name in self.quote_cache:
- return self.quote_cache[name]
- if (
- (name in self.query.alias_map and name not in self.query.table_map)
- or name in self.query.extra_select
- or (
- self.query.external_aliases.get(name)
- and name not in self.query.table_map
- )
- ):
- self.quote_cache[name] = name
- return name
- r = self.connection.ops.quote_name(name)
- self.quote_cache[name] = r
- return r
- def compile(self, node):
- vendor_impl = getattr(node, "as_" + self.connection.vendor, None)
- if vendor_impl:
- sql, params = vendor_impl(self, self.connection)
- else:
- sql, params = node.as_sql(self, self.connection)
- return sql, params
- def get_combinator_sql(self, combinator, all):
- features = self.connection.features
- compilers = [
- query.get_compiler(self.using, self.connection, self.elide_empty)
- for query in self.query.combined_queries
- ]
- if not features.supports_slicing_ordering_in_compound:
- for compiler in compilers:
- if compiler.query.is_sliced:
- raise DatabaseError(
- "LIMIT/OFFSET not allowed in subqueries of compound statements."
- )
- if compiler.get_order_by():
- raise DatabaseError(
- "ORDER BY not allowed in subqueries of compound statements."
- )
- parts = []
- empty_compiler = None
- for compiler in compilers:
- try:
- parts.append(self._get_combinator_part_sql(compiler))
- except EmptyResultSet:
- # Omit the empty queryset with UNION and with DIFFERENCE if the
- # first queryset is nonempty.
- if combinator == "union" or (combinator == "difference" and parts):
- empty_compiler = compiler
- continue
- raise
- if not parts:
- raise EmptyResultSet
- elif len(parts) == 1 and combinator == "union" and self.query.is_sliced:
- # A sliced union cannot be composed of a single component because
- # in the event the later is also sliced it might result in invalid
- # SQL due to the usage of multiple LIMIT clauses. Prevent that from
- # happening by always including an empty resultset query to force
- # the creation of an union.
- empty_compiler.elide_empty = False
- parts.append(self._get_combinator_part_sql(empty_compiler))
- combinator_sql = self.connection.ops.set_operators[combinator]
- if all and combinator == "union":
- combinator_sql += " ALL"
- braces = "{}"
- if not self.query.subquery and features.supports_slicing_ordering_in_compound:
- braces = "({})"
- sql_parts, args_parts = zip(
- *((braces.format(sql), args) for sql, args in parts)
- )
- result = [" {} ".format(combinator_sql).join(sql_parts)]
- params = []
- for part in args_parts:
- params.extend(part)
- return result, params
- def _get_combinator_part_sql(self, compiler):
- features = self.connection.features
- # If the columns list is limited, then all combined queries
- # must have the same columns list. Set the selects defined on
- # the query on all combined queries, if not already set.
- selected = self.query.selected
- if selected is not None and compiler.query.selected is None:
- compiler.query = compiler.query.clone()
- compiler.query.set_values(selected)
- part_sql, part_args = compiler.as_sql(with_col_aliases=True)
- if compiler.query.combinator:
- # Wrap in a subquery if wrapping in parentheses isn't
- # supported.
- if not features.supports_parentheses_in_compound:
- part_sql = "SELECT * FROM ({})".format(part_sql)
- # Add parentheses when combining with compound query if not
- # already added for all compound queries.
- elif (
- self.query.subquery
- or not features.supports_slicing_ordering_in_compound
- ):
- part_sql = "({})".format(part_sql)
- elif self.query.subquery and features.supports_slicing_ordering_in_compound:
- part_sql = "({})".format(part_sql)
- return part_sql, part_args
- def get_qualify_sql(self):
- where_parts = []
- if self.where:
- where_parts.append(self.where)
- if self.having:
- where_parts.append(self.having)
- inner_query = self.query.clone()
- inner_query.subquery = True
- inner_query.where = inner_query.where.__class__(where_parts)
- # Augment the inner query with any window function references that
- # might have been masked via values() and alias(). If any masked
- # aliases are added they'll be masked again to avoid fetching
- # the data in the `if qual_aliases` branch below.
- select = {
- expr: alias for expr, _, alias in self.get_select(with_col_aliases=True)[0]
- }
- select_aliases = set(select.values())
- qual_aliases = set()
- replacements = {}
- def collect_replacements(expressions):
- while expressions:
- expr = expressions.pop()
- if expr in replacements:
- continue
- elif select_alias := select.get(expr):
- replacements[expr] = select_alias
- elif isinstance(expr, Lookup):
- expressions.extend(expr.get_source_expressions())
- elif isinstance(expr, Ref):
- if expr.refs not in select_aliases:
- expressions.extend(expr.get_source_expressions())
- else:
- num_qual_alias = len(qual_aliases)
- select_alias = f"qual{num_qual_alias}"
- qual_aliases.add(select_alias)
- inner_query.add_annotation(expr, select_alias)
- replacements[expr] = select_alias
- collect_replacements(list(self.qualify.leaves()))
- self.qualify = self.qualify.replace_expressions(
- {expr: Ref(alias, expr) for expr, alias in replacements.items()}
- )
- order_by = []
- for order_by_expr, *_ in self.get_order_by():
- collect_replacements(order_by_expr.get_source_expressions())
- order_by.append(
- order_by_expr.replace_expressions(
- {expr: Ref(alias, expr) for expr, alias in replacements.items()}
- )
- )
- inner_query_compiler = inner_query.get_compiler(
- self.using, connection=self.connection, elide_empty=self.elide_empty
- )
- inner_sql, inner_params = inner_query_compiler.as_sql(
- # The limits must be applied to the outer query to avoid pruning
- # results too eagerly.
- with_limits=False,
- # Force unique aliasing of selected columns to avoid collisions
- # and make rhs predicates referencing easier.
- with_col_aliases=True,
- )
- qualify_sql, qualify_params = self.compile(self.qualify)
- result = [
- "SELECT * FROM (",
- inner_sql,
- ")",
- self.connection.ops.quote_name("qualify"),
- "WHERE",
- qualify_sql,
- ]
- if qual_aliases:
- # If some select aliases were unmasked for filtering purposes they
- # must be masked back.
- cols = [self.connection.ops.quote_name(alias) for alias in select.values()]
- result = [
- "SELECT",
- ", ".join(cols),
- "FROM (",
- *result,
- ")",
- self.connection.ops.quote_name("qualify_mask"),
- ]
- params = list(inner_params) + qualify_params
- # As the SQL spec is unclear on whether or not derived tables
- # ordering must propagate it has to be explicitly repeated on the
- # outer-most query to ensure it's preserved.
- if order_by:
- ordering_sqls = []
- for ordering in order_by:
- ordering_sql, ordering_params = self.compile(ordering)
- ordering_sqls.append(ordering_sql)
- params.extend(ordering_params)
- result.extend(["ORDER BY", ", ".join(ordering_sqls)])
- return result, params
- def as_sql(self, with_limits=True, with_col_aliases=False):
- """
- Create the SQL for this query. Return the SQL string and list of
- parameters.
- If 'with_limits' is False, any limit/offset information is not included
- in the query.
- """
- refcounts_before = self.query.alias_refcount.copy()
- try:
- combinator = self.query.combinator
- extra_select, order_by, group_by = self.pre_sql_setup(
- with_col_aliases=with_col_aliases or bool(combinator),
- )
- for_update_part = None
- # Is a LIMIT/OFFSET clause needed?
- with_limit_offset = with_limits and self.query.is_sliced
- combinator = self.query.combinator
- features = self.connection.features
- if combinator:
- if not getattr(features, "supports_select_{}".format(combinator)):
- raise NotSupportedError(
- "{} is not supported on this database backend.".format(
- combinator
- )
- )
- result, params = self.get_combinator_sql(
- combinator, self.query.combinator_all
- )
- elif self.qualify:
- result, params = self.get_qualify_sql()
- order_by = None
- else:
- distinct_fields, distinct_params = self.get_distinct()
- # This must come after 'select', 'ordering', and 'distinct'
- # (see docstring of get_from_clause() for details).
- from_, f_params = self.get_from_clause()
- try:
- where, w_params = (
- self.compile(self.where) if self.where is not None else ("", [])
- )
- except EmptyResultSet:
- if self.elide_empty:
- raise
- # Use a predicate that's always False.
- where, w_params = "0 = 1", []
- except FullResultSet:
- where, w_params = "", []
- try:
- having, h_params = (
- self.compile(self.having)
- if self.having is not None
- else ("", [])
- )
- except FullResultSet:
- having, h_params = "", []
- result = ["SELECT"]
- params = []
- if self.query.distinct:
- distinct_result, distinct_params = self.connection.ops.distinct_sql(
- distinct_fields,
- distinct_params,
- )
- result += distinct_result
- params += distinct_params
- out_cols = []
- for _, (s_sql, s_params), alias in self.select + extra_select:
- if alias:
- s_sql = "%s AS %s" % (
- s_sql,
- self.connection.ops.quote_name(alias),
- )
- params.extend(s_params)
- out_cols.append(s_sql)
- result += [", ".join(out_cols)]
- if from_:
- result += ["FROM", *from_]
- elif self.connection.features.bare_select_suffix:
- result += [self.connection.features.bare_select_suffix]
- params.extend(f_params)
- if self.query.select_for_update and features.has_select_for_update:
- if (
- self.connection.get_autocommit()
- # Don't raise an exception when database doesn't
- # support transactions, as it's a noop.
- and features.supports_transactions
- ):
- raise TransactionManagementError(
- "select_for_update cannot be used outside of a transaction."
- )
- if (
- with_limit_offset
- and not features.supports_select_for_update_with_limit
- ):
- raise NotSupportedError(
- "LIMIT/OFFSET is not supported with "
- "select_for_update on this database backend."
- )
- nowait = self.query.select_for_update_nowait
- skip_locked = self.query.select_for_update_skip_locked
- of = self.query.select_for_update_of
- no_key = self.query.select_for_no_key_update
- # If it's a NOWAIT/SKIP LOCKED/OF/NO KEY query but the
- # backend doesn't support it, raise NotSupportedError to
- # prevent a possible deadlock.
- if nowait and not features.has_select_for_update_nowait:
- raise NotSupportedError(
- "NOWAIT is not supported on this database backend."
- )
- elif skip_locked and not features.has_select_for_update_skip_locked:
- raise NotSupportedError(
- "SKIP LOCKED is not supported on this database backend."
- )
- elif of and not features.has_select_for_update_of:
- raise NotSupportedError(
- "FOR UPDATE OF is not supported on this database backend."
- )
- elif no_key and not features.has_select_for_no_key_update:
- raise NotSupportedError(
- "FOR NO KEY UPDATE is not supported on this "
- "database backend."
- )
- for_update_part = self.connection.ops.for_update_sql(
- nowait=nowait,
- skip_locked=skip_locked,
- of=self.get_select_for_update_of_arguments(),
- no_key=no_key,
- )
- if for_update_part and features.for_update_after_from:
- result.append(for_update_part)
- if where:
- result.append("WHERE %s" % where)
- params.extend(w_params)
- grouping = []
- for g_sql, g_params in group_by:
- grouping.append(g_sql)
- params.extend(g_params)
- if grouping:
- if distinct_fields:
- raise NotImplementedError(
- "annotate() + distinct(fields) is not implemented."
- )
- order_by = order_by or self.connection.ops.force_no_ordering()
- result.append("GROUP BY %s" % ", ".join(grouping))
- if self._meta_ordering:
- order_by = None
- if having:
- if not grouping:
- result.extend(self.connection.ops.force_group_by())
- result.append("HAVING %s" % having)
- params.extend(h_params)
- if self.query.explain_info:
- result.insert(
- 0,
- self.connection.ops.explain_query_prefix(
- self.query.explain_info.format,
- **self.query.explain_info.options,
- ),
- )
- if order_by:
- ordering = []
- for _, (o_sql, o_params, _) in order_by:
- ordering.append(o_sql)
- params.extend(o_params)
- order_by_sql = "ORDER BY %s" % ", ".join(ordering)
- if combinator and features.requires_compound_order_by_subquery:
- result = ["SELECT * FROM (", *result, ")", order_by_sql]
- else:
- result.append(order_by_sql)
- if with_limit_offset:
- result.append(
- self.connection.ops.limit_offset_sql(
- self.query.low_mark, self.query.high_mark
- )
- )
- if for_update_part and not features.for_update_after_from:
- result.append(for_update_part)
- if self.query.subquery and extra_select:
- # If the query is used as a subquery, the extra selects would
- # result in more columns than the left-hand side expression is
- # expecting. This can happen when a subquery uses a combination
- # of order_by() and distinct(), forcing the ordering expressions
- # to be selected as well. Wrap the query in another subquery
- # to exclude extraneous selects.
- sub_selects = []
- sub_params = []
- for index, (select, _, alias) in enumerate(self.select, start=1):
- if alias:
- sub_selects.append(
- "%s.%s"
- % (
- self.connection.ops.quote_name("subquery"),
- self.connection.ops.quote_name(alias),
- )
- )
- else:
- select_clone = select.relabeled_clone(
- {select.alias: "subquery"}
- )
- subselect, subparams = select_clone.as_sql(
- self, self.connection
- )
- sub_selects.append(subselect)
- sub_params.extend(subparams)
- return "SELECT %s FROM (%s) subquery" % (
- ", ".join(sub_selects),
- " ".join(result),
- ), tuple(sub_params + params)
- return " ".join(result), tuple(params)
- finally:
- # Finally do cleanup - get rid of the joins we created above.
- self.query.reset_refcounts(refcounts_before)
- def get_default_columns(
- self, select_mask, start_alias=None, opts=None, from_parent=None
- ):
- """
- Compute the default columns for selecting every field in the base
- model. Will sometimes be called to pull in related models (e.g. via
- select_related), in which case "opts" and "start_alias" will be given
- to provide a starting point for the traversal.
- Return a list of strings, quoted appropriately for use in SQL
- directly, as well as a set of aliases used in the select statement (if
- 'as_pairs' is True, return a list of (alias, col_name) pairs instead
- of strings as the first component and None as the second component).
- """
- result = []
- if opts is None:
- if (opts := self.query.get_meta()) is None:
- return result
- start_alias = start_alias or self.query.get_initial_alias()
- # The 'seen_models' is used to optimize checking the needed parent
- # alias for a given field. This also includes None -> start_alias to
- # be used by local fields.
- seen_models = {None: start_alias}
- select_mask_fields = set(composite.unnest(select_mask))
- for field in opts.concrete_fields:
- model = field.model._meta.concrete_model
- # A proxy model will have a different model and concrete_model. We
- # will assign None if the field belongs to this model.
- if model == opts.model:
- model = None
- if (
- from_parent
- and model is not None
- and issubclass(
- from_parent._meta.concrete_model, model._meta.concrete_model
- )
- ):
- # Avoid loading data for already loaded parents.
- # We end up here in the case select_related() resolution
- # proceeds from parent model to child model. In that case the
- # parent model data is already present in the SELECT clause,
- # and we want to avoid reloading the same data again.
- continue
- if select_mask and field not in select_mask_fields:
- continue
- alias = self.query.join_parent_model(opts, model, start_alias, seen_models)
- column = field.get_col(alias)
- result.append(column)
- return result
- def get_distinct(self):
- """
- Return a quoted list of fields to use in DISTINCT ON part of the query.
- This method can alter the tables in the query, and thus it must be
- called before get_from_clause().
- """
- result = []
- params = []
- opts = self.query.get_meta()
- for name in self.query.distinct_fields:
- parts = name.split(LOOKUP_SEP)
- _, targets, alias, joins, path, _, transform_function = self._setup_joins(
- parts, opts, None
- )
- targets, alias, _ = self.query.trim_joins(targets, joins, path)
- for target in targets:
- if name in self.query.annotation_select:
- result.append(self.connection.ops.quote_name(name))
- else:
- r, p = self.compile(transform_function(target, alias))
- result.append(r)
- params.append(p)
- return result, params
- def find_ordering_name(
- self, name, opts, alias=None, default_order="ASC", already_seen=None
- ):
- """
- Return the table alias (the name might be ambiguous, the alias will
- not be) and column name for ordering by the given 'name' parameter.
- The 'name' is of the form 'field1__field2__...__fieldN'.
- """
- name, order = get_order_dir(name, default_order)
- descending = order == "DESC"
- pieces = name.split(LOOKUP_SEP)
- (
- field,
- targets,
- alias,
- joins,
- path,
- opts,
- transform_function,
- ) = self._setup_joins(pieces, opts, alias)
- # If we get to this point and the field is a relation to another model,
- # append the default ordering for that model unless it is the pk
- # shortcut or the attribute name of the field that is specified or
- # there are transforms to process.
- if (
- field.is_relation
- and opts.ordering
- and getattr(field, "attname", None) != pieces[-1]
- and name != "pk"
- and not getattr(transform_function, "has_transforms", False)
- ):
- # Firstly, avoid infinite loops.
- already_seen = already_seen or set()
- join_tuple = tuple(
- getattr(self.query.alias_map[j], "join_cols", None) for j in joins
- )
- if join_tuple in already_seen:
- raise FieldError("Infinite loop caused by ordering.")
- already_seen.add(join_tuple)
- results = []
- for item in opts.ordering:
- if hasattr(item, "resolve_expression") and not isinstance(
- item, OrderBy
- ):
- item = item.desc() if descending else item.asc()
- if isinstance(item, OrderBy):
- results.append(
- (item.prefix_references(f"{name}{LOOKUP_SEP}"), False)
- )
- continue
- results.extend(
- (expr.prefix_references(f"{name}{LOOKUP_SEP}"), is_ref)
- for expr, is_ref in self.find_ordering_name(
- item, opts, alias, order, already_seen
- )
- )
- return results
- targets, alias, _ = self.query.trim_joins(targets, joins, path)
- return [
- (OrderBy(transform_function(t, alias), descending=descending), False)
- for t in targets
- ]
- def _setup_joins(self, pieces, opts, alias):
- """
- Helper method for get_order_by() and get_distinct().
- get_ordering() and get_distinct() must produce same target columns on
- same input, as the prefixes of get_ordering() and get_distinct() must
- match. Executing SQL where this is not true is an error.
- """
- alias = alias or self.query.get_initial_alias()
- field, targets, opts, joins, path, transform_function = self.query.setup_joins(
- pieces, opts, alias
- )
- alias = joins[-1]
- return field, targets, alias, joins, path, opts, transform_function
- def get_from_clause(self):
- """
- Return a list of strings that are joined together to go after the
- "FROM" part of the query, as well as a list any extra parameters that
- need to be included. Subclasses, can override this to create a
- from-clause via a "select".
- This should only be called after any SQL construction methods that
- might change the tables that are needed. This means the select columns,
- ordering, and distinct must be done first.
- """
- result = []
- params = []
- # Copy alias_map to a tuple in case Join.as_sql() subclasses (objects
- # in alias_map) alter compiler.query.alias_map. That would otherwise
- # raise "RuntimeError: dictionary changed size during iteration".
- for alias, from_clause in tuple(self.query.alias_map.items()):
- if not self.query.alias_refcount[alias]:
- continue
- clause_sql, clause_params = self.compile(from_clause)
- result.append(clause_sql)
- params.extend(clause_params)
- for t in self.query.extra_tables:
- alias, _ = self.query.table_alias(t)
- # Only add the alias if it's not already present (the table_alias()
- # call increments the refcount, so an alias refcount of one means
- # this is the only reference).
- if (
- alias not in self.query.alias_map
- or self.query.alias_refcount[alias] == 1
- ):
- result.append(", %s" % self.quote_name_unless_alias(alias))
- return result, params
- def get_related_selections(
- self,
- select,
- select_mask,
- opts=None,
- root_alias=None,
- cur_depth=1,
- requested=None,
- restricted=None,
- ):
- """
- Fill in the information needed for a select_related query. The current
- depth is measured as the number of connections away from the root model
- (for example, cur_depth=1 means we are looking at models with direct
- connections to the root model).
- """
- def _get_field_choices():
- direct_choices = (f.name for f in opts.fields if f.is_relation)
- reverse_choices = (
- f.field.related_query_name()
- for f in opts.related_objects
- if f.field.unique
- )
- return chain(
- direct_choices, reverse_choices, self.query._filtered_relations
- )
- related_klass_infos = []
- if not restricted and cur_depth > self.query.max_depth:
- # We've recursed far enough; bail out.
- return related_klass_infos
- if not opts:
- opts = self.query.get_meta()
- root_alias = self.query.get_initial_alias()
- # Setup for the case when only particular related fields should be
- # included in the related selection.
- fields_found = set()
- if requested is None:
- restricted = isinstance(self.query.select_related, dict)
- if restricted:
- requested = self.query.select_related
- def get_related_klass_infos(klass_info, related_klass_infos):
- klass_info["related_klass_infos"] = related_klass_infos
- for f in opts.fields:
- fields_found.add(f.name)
- if restricted:
- next = requested.get(f.name, {})
- if not f.is_relation:
- # If a non-related field is used like a relation,
- # or if a single non-relational field is given.
- if next or f.name in requested:
- raise FieldError(
- "Non-relational field given in select_related: '%s'. "
- "Choices are: %s"
- % (
- f.name,
- ", ".join(_get_field_choices()) or "(none)",
- )
- )
- else:
- next = False
- if not select_related_descend(f, restricted, requested, select_mask):
- continue
- related_select_mask = select_mask.get(f) or {}
- klass_info = {
- "model": f.remote_field.model,
- "field": f,
- "reverse": False,
- "local_setter": f.set_cached_value,
- "remote_setter": (
- f.remote_field.set_cached_value if f.unique else lambda x, y: None
- ),
- "from_parent": False,
- }
- related_klass_infos.append(klass_info)
- select_fields = []
- _, _, _, joins, _, _ = self.query.setup_joins([f.name], opts, root_alias)
- alias = joins[-1]
- columns = self.get_default_columns(
- related_select_mask, start_alias=alias, opts=f.remote_field.model._meta
- )
- for col in columns:
- select_fields.append(len(select))
- select.append((col, None))
- klass_info["select_fields"] = select_fields
- next_klass_infos = self.get_related_selections(
- select,
- related_select_mask,
- f.remote_field.model._meta,
- alias,
- cur_depth + 1,
- next,
- restricted,
- )
- get_related_klass_infos(klass_info, next_klass_infos)
- if restricted:
- related_fields = [
- (o, o.field, o.related_model)
- for o in opts.related_objects
- if o.field.unique and not o.many_to_many
- ]
- for related_object, related_field, model in related_fields:
- if not select_related_descend(
- related_object,
- restricted,
- requested,
- select_mask,
- ):
- continue
- related_select_mask = select_mask.get(related_object) or {}
- related_field_name = related_field.related_query_name()
- fields_found.add(related_field_name)
- join_info = self.query.setup_joins(
- [related_field_name], opts, root_alias
- )
- alias = join_info.joins[-1]
- from_parent = issubclass(model, opts.model) and model is not opts.model
- klass_info = {
- "model": model,
- "field": related_field,
- "reverse": True,
- "local_setter": related_object.set_cached_value,
- "remote_setter": related_field.set_cached_value,
- "from_parent": from_parent,
- }
- related_klass_infos.append(klass_info)
- select_fields = []
- columns = self.get_default_columns(
- related_select_mask,
- start_alias=alias,
- opts=model._meta,
- from_parent=opts.model,
- )
- for col in columns:
- select_fields.append(len(select))
- select.append((col, None))
- klass_info["select_fields"] = select_fields
- next = requested.get(related_field_name, {})
- next_klass_infos = self.get_related_selections(
- select,
- related_select_mask,
- model._meta,
- alias,
- cur_depth + 1,
- next,
- restricted,
- )
- get_related_klass_infos(klass_info, next_klass_infos)
- def local_setter(final_field, obj, from_obj):
- # Set a reverse fk object when relation is non-empty.
- if from_obj:
- final_field.remote_field.set_cached_value(from_obj, obj)
- def local_setter_noop(obj, from_obj):
- pass
- def remote_setter(name, obj, from_obj):
- setattr(from_obj, name, obj)
- for name in list(requested):
- # Filtered relations work only on the topmost level.
- if cur_depth > 1:
- break
- if name in self.query._filtered_relations:
- fields_found.add(name)
- final_field, _, join_opts, joins, _, _ = self.query.setup_joins(
- [name], opts, root_alias
- )
- model = join_opts.model
- alias = joins[-1]
- from_parent = (
- issubclass(model, opts.model) and model is not opts.model
- )
- klass_info = {
- "model": model,
- "field": final_field,
- "reverse": True,
- "local_setter": (
- partial(local_setter, final_field)
- if len(joins) <= 2
- else local_setter_noop
- ),
- "remote_setter": partial(remote_setter, name),
- "from_parent": from_parent,
- }
- related_klass_infos.append(klass_info)
- select_fields = []
- field_select_mask = select_mask.get((name, final_field)) or {}
- columns = self.get_default_columns(
- field_select_mask,
- start_alias=alias,
- opts=model._meta,
- from_parent=opts.model,
- )
- for col in columns:
- select_fields.append(len(select))
- select.append((col, None))
- klass_info["select_fields"] = select_fields
- next_requested = requested.get(name, {})
- next_klass_infos = self.get_related_selections(
- select,
- field_select_mask,
- opts=model._meta,
- root_alias=alias,
- cur_depth=cur_depth + 1,
- requested=next_requested,
- restricted=restricted,
- )
- get_related_klass_infos(klass_info, next_klass_infos)
- fields_not_found = set(requested).difference(fields_found)
- if fields_not_found:
- invalid_fields = ("'%s'" % s for s in fields_not_found)
- raise FieldError(
- "Invalid field name(s) given in select_related: %s. "
- "Choices are: %s"
- % (
- ", ".join(invalid_fields),
- ", ".join(_get_field_choices()) or "(none)",
- )
- )
- return related_klass_infos
- def get_select_for_update_of_arguments(self):
- """
- Return a quoted list of arguments for the SELECT FOR UPDATE OF part of
- the query.
- """
- def _get_parent_klass_info(klass_info):
- concrete_model = klass_info["model"]._meta.concrete_model
- for parent_model, parent_link in concrete_model._meta.parents.items():
- all_parents = parent_model._meta.all_parents
- yield {
- "model": parent_model,
- "field": parent_link,
- "reverse": False,
- "select_fields": [
- select_index
- for select_index in klass_info["select_fields"]
- # Selected columns from a model or its parents.
- if (
- self.select[select_index][0].target.model == parent_model
- or self.select[select_index][0].target.model in all_parents
- )
- ],
- }
- def _get_first_selected_col_from_model(klass_info):
- """
- Find the first selected column from a model. If it doesn't exist,
- don't lock a model.
- select_fields is filled recursively, so it also contains fields
- from the parent models.
- """
- concrete_model = klass_info["model"]._meta.concrete_model
- for select_index in klass_info["select_fields"]:
- if self.select[select_index][0].target.model == concrete_model:
- return self.select[select_index][0]
- def _get_field_choices():
- """Yield all allowed field paths in breadth-first search order."""
- queue = collections.deque([(None, self.klass_info)])
- while queue:
- parent_path, klass_info = queue.popleft()
- if parent_path is None:
- path = []
- yield "self"
- else:
- field = klass_info["field"]
- if klass_info["reverse"]:
- field = field.remote_field
- path = parent_path + [field.name]
- yield LOOKUP_SEP.join(path)
- queue.extend(
- (path, klass_info)
- for klass_info in _get_parent_klass_info(klass_info)
- )
- queue.extend(
- (path, klass_info)
- for klass_info in klass_info.get("related_klass_infos", [])
- )
- if not self.klass_info:
- return []
- result = []
- invalid_names = []
- for name in self.query.select_for_update_of:
- klass_info = self.klass_info
- if name == "self":
- col = _get_first_selected_col_from_model(klass_info)
- else:
- for part in name.split(LOOKUP_SEP):
- klass_infos = (
- *klass_info.get("related_klass_infos", []),
- *_get_parent_klass_info(klass_info),
- )
- for related_klass_info in klass_infos:
- field = related_klass_info["field"]
- if related_klass_info["reverse"]:
- field = field.remote_field
- if field.name == part:
- klass_info = related_klass_info
- break
- else:
- klass_info = None
- break
- if klass_info is None:
- invalid_names.append(name)
- continue
- col = _get_first_selected_col_from_model(klass_info)
- if col is not None:
- if self.connection.features.select_for_update_of_column:
- result.append(self.compile(col)[0])
- else:
- result.append(self.quote_name_unless_alias(col.alias))
- if invalid_names:
- raise FieldError(
- "Invalid field name(s) given in select_for_update(of=(...)): %s. "
- "Only relational fields followed in the query are allowed. "
- "Choices are: %s."
- % (
- ", ".join(invalid_names),
- ", ".join(_get_field_choices()),
- )
- )
- return result
- def get_converters(self, expressions):
- i = 0
- converters = {}
- for expression in expressions:
- if isinstance(expression, ColPairs):
- cols = expression.get_source_expressions()
- cols_converters = self.get_converters(cols)
- for j, (convs, col) in cols_converters.items():
- converters[i + j] = (convs, col)
- i += len(expression)
- elif expression:
- backend_converters = self.connection.ops.get_db_converters(expression)
- field_converters = expression.get_db_converters(self.connection)
- if backend_converters or field_converters:
- converters[i] = (backend_converters + field_converters, expression)
- i += 1
- else:
- i += 1
- return converters
- def apply_converters(self, rows, converters):
- connection = self.connection
- converters = list(converters.items())
- for row in map(list, rows):
- for pos, (convs, expression) in converters:
- value = row[pos]
- for converter in convs:
- value = converter(value, expression, connection)
- row[pos] = value
- yield row
- def has_composite_fields(self, expressions):
- # Check for composite fields before calling the relatively costly
- # composite_fields_to_tuples.
- return any(isinstance(expression, ColPairs) for expression in expressions)
- def composite_fields_to_tuples(self, rows, expressions):
- col_pair_slices = [
- slice(i, i + len(expression))
- for i, expression in enumerate(expressions)
- if isinstance(expression, ColPairs)
- ]
- for row in map(list, rows):
- for pos in col_pair_slices:
- row[pos] = (tuple(row[pos]),)
- yield row
- def results_iter(
- self,
- results=None,
- tuple_expected=False,
- chunked_fetch=False,
- chunk_size=GET_ITERATOR_CHUNK_SIZE,
- ):
- """Return an iterator over the results from executing this query."""
- if results is None:
- results = self.execute_sql(
- MULTI, chunked_fetch=chunked_fetch, chunk_size=chunk_size
- )
- fields = [s[0] for s in self.select[0 : self.col_count]]
- converters = self.get_converters(fields)
- rows = chain.from_iterable(results)
- if converters:
- rows = self.apply_converters(rows, converters)
- if self.has_composite_fields(fields):
- rows = self.composite_fields_to_tuples(rows, fields)
- if tuple_expected:
- rows = map(tuple, rows)
- return rows
- def has_results(self):
- """
- Backends (e.g. NoSQL) can override this in order to use optimized
- versions of "query has any results."
- """
- return bool(self.execute_sql(SINGLE))
- def execute_sql(
- self, result_type=MULTI, chunked_fetch=False, chunk_size=GET_ITERATOR_CHUNK_SIZE
- ):
- """
- Run the query against the database and return the result(s). The
- return value depends on the value of result_type.
- When result_type is:
- - MULTI: Retrieves all rows using fetchmany(). Wraps in an iterator for
- chunked reads when supported.
- - SINGLE: Retrieves a single row using fetchone().
- - ROW_COUNT: Retrieves the number of rows in the result.
- - CURSOR: Runs the query, and returns the cursor object. It is the
- caller's responsibility to close the cursor.
- """
- result_type = result_type or NO_RESULTS
- try:
- sql, params = self.as_sql()
- if not sql:
- raise EmptyResultSet
- except EmptyResultSet:
- if result_type == MULTI:
- return iter([])
- else:
- return
- if chunked_fetch:
- cursor = self.connection.chunked_cursor()
- else:
- cursor = self.connection.cursor()
- try:
- cursor.execute(sql, params)
- except Exception:
- # Might fail for server-side cursors (e.g. connection closed)
- cursor.close()
- raise
- if result_type == ROW_COUNT:
- try:
- return cursor.rowcount
- finally:
- cursor.close()
- if result_type == CURSOR:
- # Give the caller the cursor to process and close.
- return cursor
- if result_type == SINGLE:
- try:
- val = cursor.fetchone()
- if val:
- return val[0 : self.col_count]
- return val
- finally:
- # done with the cursor
- cursor.close()
- if result_type == NO_RESULTS:
- cursor.close()
- return
- result = cursor_iter(
- cursor,
- self.connection.features.empty_fetchmany_value,
- self.col_count if self.has_extra_select else None,
- chunk_size,
- )
- if not chunked_fetch or not self.connection.features.can_use_chunked_reads:
- # If we are using non-chunked reads, we return the same data
- # structure as normally, but ensure it is all read into memory
- # before going any further. Use chunked_fetch if requested,
- # unless the database doesn't support it.
- return list(result)
- return result
- def as_subquery_condition(self, alias, columns, compiler):
- qn = compiler.quote_name_unless_alias
- qn2 = self.connection.ops.quote_name
- query = self.query.clone()
- for index, select_col in enumerate(query.select):
- lhs_sql, lhs_params = self.compile(select_col)
- rhs = "%s.%s" % (qn(alias), qn2(columns[index]))
- query.where.add(RawSQL("%s = %s" % (lhs_sql, rhs), lhs_params), AND)
- sql, params = query.as_sql(compiler, self.connection)
- return "EXISTS %s" % sql, params
- def explain_query(self):
- result = list(self.execute_sql())
- # Some backends return 1 item tuples with strings, and others return
- # tuples with integers and strings. Flatten them out into strings.
- format_ = self.query.explain_info.format
- output_formatter = json.dumps if format_ and format_.lower() == "json" else str
- for row in result:
- for value in row:
- if not isinstance(value, str):
- yield " ".join([output_formatter(c) for c in value])
- else:
- yield value
- class SQLInsertCompiler(SQLCompiler):
- returning_fields = None
- returning_params = ()
- def field_as_sql(self, field, get_placeholder, val):
- """
- Take a field and a value intended to be saved on that field, and
- return placeholder SQL and accompanying params. Check for raw values,
- expressions, and fields with get_placeholder() defined in that order.
- When field is None, consider the value raw and use it as the
- placeholder, with no corresponding parameters returned.
- """
- if field is None:
- # A field value of None means the value is raw.
- sql, params = val, []
- elif hasattr(val, "as_sql"):
- # This is an expression, let's compile it.
- sql, params = self.compile(val)
- elif get_placeholder is not None:
- # Some fields (e.g. geo fields) need special munging before
- # they can be inserted.
- sql, params = get_placeholder(val, self, self.connection), [val]
- else:
- # Return the common case for the placeholder
- sql, params = "%s", [val]
- # The following hook is only used by Oracle Spatial, which sometimes
- # needs to yield 'NULL' and [] as its placeholder and params instead
- # of '%s' and [None]. The 'NULL' placeholder is produced earlier by
- # OracleOperations.get_geom_placeholder(). The following line removes
- # the corresponding None parameter. See ticket #10888.
- params = self.connection.ops.modify_insert_params(sql, params)
- return sql, params
- def prepare_value(self, field, value):
- """
- Prepare a value to be used in a query by resolving it if it is an
- expression and otherwise calling the field's get_db_prep_save().
- """
- if hasattr(value, "resolve_expression"):
- value = value.resolve_expression(
- self.query, allow_joins=False, for_save=True
- )
- # Don't allow values containing Col expressions. They refer to
- # existing columns on a row, but in the case of insert the row
- # doesn't exist yet.
- if value.contains_column_references:
- raise ValueError(
- 'Failed to insert expression "%s" on %s. F() expressions '
- "can only be used to update, not to insert." % (value, field)
- )
- if value.contains_aggregate:
- raise FieldError(
- "Aggregate functions are not allowed in this query "
- "(%s=%r)." % (field.name, value)
- )
- if value.contains_over_clause:
- raise FieldError(
- "Window expressions are not allowed in this query (%s=%r)."
- % (field.name, value)
- )
- return field.get_db_prep_save(value, connection=self.connection)
- def pre_save_val(self, field, obj):
- """
- Get the given field's value off the given obj. pre_save() is used for
- things like auto_now on DateTimeField. Skip it if this is a raw query.
- """
- if self.query.raw:
- return getattr(obj, field.attname)
- return field.pre_save(obj, add=True)
- def assemble_as_sql(self, fields, value_rows):
- """
- Take a sequence of N fields and a sequence of M rows of values, and
- generate placeholder SQL and parameters for each field and value.
- Return a pair containing:
- * a sequence of M rows of N SQL placeholder strings, and
- * a sequence of M rows of corresponding parameter values.
- Each placeholder string may contain any number of '%s' interpolation
- strings, and each parameter row will contain exactly as many params
- as the total number of '%s's in the corresponding placeholder row.
- """
- if not value_rows:
- return [], []
- # list of (sql, [params]) tuples for each object to be saved
- # Shape: [n_objs][n_fields][2]
- get_placeholders = [getattr(field, "get_placeholder", None) for field in fields]
- rows_of_fields_as_sql = (
- (
- self.field_as_sql(field, get_placeholder, value)
- for field, get_placeholder, value in zip(fields, get_placeholders, row)
- )
- for row in value_rows
- )
- # tuple like ([sqls], [[params]s]) for each object to be saved
- # Shape: [n_objs][2][n_fields]
- sql_and_param_pair_rows = (zip(*row) for row in rows_of_fields_as_sql)
- # Extract separate lists for placeholders and params.
- # Each of these has shape [n_objs][n_fields]
- placeholder_rows, param_rows = zip(*sql_and_param_pair_rows)
- # Params for each field are still lists, and need to be flattened.
- param_rows = [[p for ps in row for p in ps] for row in param_rows]
- return placeholder_rows, param_rows
- def as_sql(self):
- # We don't need quote_name_unless_alias() here, since these are all
- # going to be column names (so we can avoid the extra overhead).
- qn = self.connection.ops.quote_name
- opts = self.query.get_meta()
- insert_statement = self.connection.ops.insert_statement(
- on_conflict=self.query.on_conflict,
- )
- result = ["%s %s" % (insert_statement, qn(opts.db_table))]
- if fields := list(self.query.fields):
- from django.db.models.expressions import DatabaseDefault
- supports_default_keyword_in_bulk_insert = (
- self.connection.features.supports_default_keyword_in_bulk_insert
- )
- value_cols = []
- for field in list(fields):
- field_prepare = partial(self.prepare_value, field)
- field_pre_save = partial(self.pre_save_val, field)
- field_values = [
- field_prepare(field_pre_save(obj)) for obj in self.query.objs
- ]
- if not field.has_db_default():
- value_cols.append(field_values)
- continue
- # If all values are DEFAULT don't include the field and its
- # values in the query as they are redundant and could prevent
- # optimizations. This cannot be done if we're dealing with the
- # last field as INSERT statements require at least one.
- if len(fields) > 1 and all(
- isinstance(value, DatabaseDefault) for value in field_values
- ):
- fields.remove(field)
- continue
- if supports_default_keyword_in_bulk_insert:
- value_cols.append(field_values)
- continue
- # If the field cannot be excluded from the INSERT for the
- # reasons listed above and the backend doesn't support the
- # DEFAULT keyword each values must be expanded into their
- # underlying expressions.
- prepared_db_default = field_prepare(field.db_default)
- field_values = [
- (
- prepared_db_default
- if isinstance(value, DatabaseDefault)
- else value
- )
- for value in field_values
- ]
- value_cols.append(field_values)
- value_rows = list(zip(*value_cols))
- result.append("(%s)" % ", ".join(qn(f.column) for f in fields))
- else:
- # No fields were specified but an INSERT statement must include at
- # least one column. This can only happen when the model's primary
- # key is composed of a single auto-field so default to including it
- # as a placeholder to generate a valid INSERT statement.
- value_rows = [
- [self.connection.ops.pk_default_value()] for _ in self.query.objs
- ]
- fields = [None]
- result.append("(%s)" % qn(opts.pk.column))
- # Currently the backends just accept values when generating bulk
- # queries and generate their own placeholders. Doing that isn't
- # necessary and it should be possible to use placeholders and
- # expressions in bulk inserts too.
- can_bulk = (
- not self.returning_fields and self.connection.features.has_bulk_insert
- )
- placeholder_rows, param_rows = self.assemble_as_sql(fields, value_rows)
- on_conflict_suffix_sql = self.connection.ops.on_conflict_suffix_sql(
- fields,
- self.query.on_conflict,
- (f.column for f in self.query.update_fields),
- (f.column for f in self.query.unique_fields),
- )
- if (
- self.returning_fields
- and self.connection.features.can_return_columns_from_insert
- ):
- if self.connection.features.can_return_rows_from_bulk_insert:
- result.append(
- self.connection.ops.bulk_insert_sql(fields, placeholder_rows)
- )
- params = param_rows
- else:
- result.append("VALUES (%s)" % ", ".join(placeholder_rows[0]))
- params = [param_rows[0]]
- if on_conflict_suffix_sql:
- result.append(on_conflict_suffix_sql)
- # Skip empty r_sql to allow subclasses to customize behavior for
- # 3rd party backends. Refs #19096.
- r_sql, self.returning_params = self.connection.ops.return_insert_columns(
- self.returning_fields
- )
- if r_sql:
- result.append(r_sql)
- params += [self.returning_params]
- return [(" ".join(result), tuple(chain.from_iterable(params)))]
- if can_bulk:
- result.append(self.connection.ops.bulk_insert_sql(fields, placeholder_rows))
- if on_conflict_suffix_sql:
- result.append(on_conflict_suffix_sql)
- return [(" ".join(result), tuple(p for ps in param_rows for p in ps))]
- else:
- if on_conflict_suffix_sql:
- result.append(on_conflict_suffix_sql)
- return [
- (" ".join(result + ["VALUES (%s)" % ", ".join(p)]), vals)
- for p, vals in zip(placeholder_rows, param_rows)
- ]
- def execute_sql(self, returning_fields=None):
- assert not (
- returning_fields
- and len(self.query.objs) != 1
- and not self.connection.features.can_return_rows_from_bulk_insert
- )
- opts = self.query.get_meta()
- self.returning_fields = returning_fields
- cols = []
- with self.connection.cursor() as cursor:
- for sql, params in self.as_sql():
- cursor.execute(sql, params)
- if not self.returning_fields:
- return []
- if (
- self.connection.features.can_return_rows_from_bulk_insert
- and len(self.query.objs) > 1
- ):
- rows = self.connection.ops.fetch_returned_insert_rows(cursor)
- cols = [field.get_col(opts.db_table) for field in self.returning_fields]
- elif self.connection.features.can_return_columns_from_insert:
- assert len(self.query.objs) == 1
- rows = [
- self.connection.ops.fetch_returned_insert_columns(
- cursor,
- self.returning_params,
- )
- ]
- cols = [field.get_col(opts.db_table) for field in self.returning_fields]
- elif returning_fields and isinstance(
- returning_field := returning_fields[0], AutoField
- ):
- cols = [returning_field.get_col(opts.db_table)]
- rows = [
- (
- self.connection.ops.last_insert_id(
- cursor,
- opts.db_table,
- returning_field.column,
- ),
- )
- ]
- else:
- # Backend doesn't support returning fields and no auto-field
- # that can be retrieved from `last_insert_id` was specified.
- return []
- converters = self.get_converters(cols)
- if converters:
- rows = self.apply_converters(rows, converters)
- return list(rows)
- class SQLDeleteCompiler(SQLCompiler):
- @cached_property
- def single_alias(self):
- # Ensure base table is in aliases.
- self.query.get_initial_alias()
- return sum(self.query.alias_refcount[t] > 0 for t in self.query.alias_map) == 1
- @classmethod
- def _expr_refs_base_model(cls, expr, base_model):
- if isinstance(expr, Query):
- return expr.model == base_model
- if not hasattr(expr, "get_source_expressions"):
- return False
- return any(
- cls._expr_refs_base_model(source_expr, base_model)
- for source_expr in expr.get_source_expressions()
- )
- @cached_property
- def contains_self_reference_subquery(self):
- return any(
- self._expr_refs_base_model(expr, self.query.model)
- for expr in chain(
- self.query.annotations.values(), self.query.where.children
- )
- )
- def _as_sql(self, query):
- delete = "DELETE FROM %s" % self.quote_name_unless_alias(query.base_table)
- try:
- where, params = self.compile(query.where)
- except FullResultSet:
- return delete, ()
- return f"{delete} WHERE {where}", tuple(params)
- def as_sql(self):
- """
- Create the SQL for this query. Return the SQL string and list of
- parameters.
- """
- if self.single_alias and (
- self.connection.features.delete_can_self_reference_subquery
- or not self.contains_self_reference_subquery
- ):
- return self._as_sql(self.query)
- innerq = self.query.clone()
- innerq.__class__ = Query
- innerq.clear_select_clause()
- pk = self.query.model._meta.pk
- innerq.select = [pk.get_col(self.query.get_initial_alias())]
- outerq = Query(self.query.model)
- if not self.connection.features.update_can_self_select:
- # Force the materialization of the inner query to allow reference
- # to the target table on MySQL.
- sql, params = innerq.get_compiler(connection=self.connection).as_sql()
- innerq = RawSQL("SELECT * FROM (%s) subquery" % sql, params)
- outerq.add_filter("pk__in", innerq)
- return self._as_sql(outerq)
- class SQLUpdateCompiler(SQLCompiler):
- def as_sql(self):
- """
- Create the SQL for this query. Return the SQL string and list of
- parameters.
- """
- self.pre_sql_setup()
- if not self.query.values:
- return "", ()
- qn = self.quote_name_unless_alias
- values, update_params = [], []
- for field, model, val in self.query.values:
- if hasattr(val, "resolve_expression"):
- val = val.resolve_expression(
- self.query, allow_joins=False, for_save=True
- )
- if val.contains_aggregate:
- raise FieldError(
- "Aggregate functions are not allowed in this query "
- "(%s=%r)." % (field.name, val)
- )
- if val.contains_over_clause:
- raise FieldError(
- "Window expressions are not allowed in this query "
- "(%s=%r)." % (field.name, val)
- )
- if isinstance(val, ColPairs):
- raise FieldError(
- "Composite primary keys expressions are not allowed "
- "in this query (%s=F('pk'))." % field.name
- )
- elif hasattr(val, "prepare_database_save"):
- if field.remote_field:
- val = val.prepare_database_save(field)
- else:
- raise TypeError(
- "Tried to update field %s with a model instance, %r. "
- "Use a value compatible with %s."
- % (field, val, field.__class__.__name__)
- )
- val = field.get_db_prep_save(val, connection=self.connection)
- # Getting the placeholder for the field.
- if hasattr(field, "get_placeholder"):
- placeholder = field.get_placeholder(val, self, self.connection)
- else:
- placeholder = "%s"
- name = field.column
- if hasattr(val, "as_sql"):
- sql, params = self.compile(val)
- values.append("%s = %s" % (qn(name), placeholder % sql))
- update_params.extend(params)
- elif val is not None:
- values.append("%s = %s" % (qn(name), placeholder))
- update_params.append(val)
- else:
- values.append("%s = NULL" % qn(name))
- table = self.query.base_table
- result = [
- "UPDATE %s SET" % qn(table),
- ", ".join(values),
- ]
- try:
- where, params = self.compile(self.query.where)
- except FullResultSet:
- params = []
- else:
- result.append("WHERE %s" % where)
- return " ".join(result), tuple(update_params + params)
- def execute_sql(self, result_type):
- """
- Execute the specified update. Return the number of rows affected by
- the primary update query. The "primary update query" is the first
- non-empty query that is executed. Row counts for any subsequent,
- related queries are not available.
- """
- row_count = super().execute_sql(result_type)
- is_empty = row_count is None
- row_count = row_count or 0
- for query in self.query.get_related_updates():
- # If the result_type is NO_RESULTS then the aux_row_count is None.
- aux_row_count = query.get_compiler(self.using).execute_sql(result_type)
- if is_empty and aux_row_count:
- # Returns the row count for any related updates as the number of
- # rows updated.
- row_count = aux_row_count
- is_empty = False
- return row_count
- def pre_sql_setup(self):
- """
- If the update depends on results from other tables, munge the "where"
- conditions to match the format required for (portable) SQL updates.
- If multiple updates are required, pull out the id values to update at
- this point so that they don't change as a result of the progressive
- updates.
- """
- refcounts_before = self.query.alias_refcount.copy()
- # Ensure base table is in the query
- self.query.get_initial_alias()
- count = self.query.count_active_tables()
- if not self.query.related_updates and count == 1:
- return
- query = self.query.chain(klass=Query)
- query.select_related = False
- query.clear_ordering(force=True)
- query.extra = {}
- query.select = []
- meta = query.get_meta()
- fields = [meta.pk.name]
- related_ids_index = []
- for related in self.query.related_updates:
- if all(
- path.join_field.primary_key for path in meta.get_path_to_parent(related)
- ):
- # If a primary key chain exists to the targeted related update,
- # then the meta.pk value can be used for it.
- related_ids_index.append((related, 0))
- else:
- # This branch will only be reached when updating a field of an
- # ancestor that is not part of the primary key chain of a MTI
- # tree.
- related_ids_index.append((related, len(fields)))
- fields.append(related._meta.pk.name)
- query.add_fields(fields)
- super().pre_sql_setup()
- is_composite_pk = meta.is_composite_pk
- must_pre_select = (
- count > 1 and not self.connection.features.update_can_self_select
- )
- # Now we adjust the current query: reset the where clause and get rid
- # of all the tables we don't need (since they're in the sub-select).
- self.query.clear_where()
- if self.query.related_updates or must_pre_select:
- # Either we're using the idents in multiple update queries (so
- # don't want them to change), or the db backend doesn't support
- # selecting from the updating table (e.g. MySQL).
- idents = []
- related_ids = collections.defaultdict(list)
- for rows in query.get_compiler(self.using).execute_sql(MULTI):
- pks = [row if is_composite_pk else row[0] for row in rows]
- idents.extend(pks)
- for parent, index in related_ids_index:
- related_ids[parent].extend(r[index] for r in rows)
- self.query.add_filter("pk__in", idents)
- self.query.related_ids = related_ids
- else:
- # The fast path. Filters and updates in one query.
- self.query.add_filter("pk__in", query)
- self.query.reset_refcounts(refcounts_before)
- class SQLAggregateCompiler(SQLCompiler):
- def as_sql(self):
- """
- Create the SQL for this query. Return the SQL string and list of
- parameters.
- """
- sql, params = [], []
- for annotation in self.query.annotation_select.values():
- ann_sql, ann_params = self.compile(annotation)
- ann_sql, ann_params = annotation.select_format(self, ann_sql, ann_params)
- sql.append(ann_sql)
- params.extend(ann_params)
- self.col_count = len(self.query.annotation_select)
- sql = ", ".join(sql)
- params = tuple(params)
- inner_query_sql, inner_query_params = self.query.inner_query.get_compiler(
- self.using,
- elide_empty=self.elide_empty,
- ).as_sql(with_col_aliases=True)
- sql = "SELECT %s FROM (%s) subquery" % (sql, inner_query_sql)
- params += inner_query_params
- return sql, params
- def cursor_iter(cursor, sentinel, col_count, itersize):
- """
- Yield blocks of rows from a cursor and ensure the cursor is closed when
- done.
- """
- try:
- for rows in iter((lambda: cursor.fetchmany(itersize)), sentinel):
- yield rows if col_count is None else [r[:col_count] for r in rows]
- finally:
- cursor.close()
|