plain.models 0.49.2__py3-none-any.whl → 0.50.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- plain/models/CHANGELOG.md +13 -0
- plain/models/aggregates.py +42 -19
- plain/models/backends/base/base.py +125 -105
- plain/models/backends/base/client.py +11 -3
- plain/models/backends/base/creation.py +22 -12
- plain/models/backends/base/features.py +10 -4
- plain/models/backends/base/introspection.py +29 -16
- plain/models/backends/base/operations.py +187 -91
- plain/models/backends/base/schema.py +267 -165
- plain/models/backends/base/validation.py +12 -3
- plain/models/backends/ddl_references.py +85 -43
- plain/models/backends/mysql/base.py +29 -26
- plain/models/backends/mysql/client.py +7 -2
- plain/models/backends/mysql/compiler.py +12 -3
- plain/models/backends/mysql/creation.py +5 -2
- plain/models/backends/mysql/features.py +24 -22
- plain/models/backends/mysql/introspection.py +22 -13
- plain/models/backends/mysql/operations.py +106 -39
- plain/models/backends/mysql/schema.py +48 -24
- plain/models/backends/mysql/validation.py +13 -6
- plain/models/backends/postgresql/base.py +41 -34
- plain/models/backends/postgresql/client.py +7 -2
- plain/models/backends/postgresql/creation.py +10 -5
- plain/models/backends/postgresql/introspection.py +15 -8
- plain/models/backends/postgresql/operations.py +109 -42
- plain/models/backends/postgresql/schema.py +85 -46
- plain/models/backends/sqlite3/_functions.py +151 -115
- plain/models/backends/sqlite3/base.py +37 -23
- plain/models/backends/sqlite3/client.py +7 -1
- plain/models/backends/sqlite3/creation.py +9 -5
- plain/models/backends/sqlite3/features.py +5 -3
- plain/models/backends/sqlite3/introspection.py +32 -16
- plain/models/backends/sqlite3/operations.py +125 -42
- plain/models/backends/sqlite3/schema.py +82 -58
- plain/models/backends/utils.py +52 -29
- plain/models/backups/cli.py +8 -6
- plain/models/backups/clients.py +16 -7
- plain/models/backups/core.py +24 -13
- plain/models/base.py +113 -74
- plain/models/cli.py +94 -63
- plain/models/config.py +1 -1
- plain/models/connections.py +23 -7
- plain/models/constraints.py +65 -47
- plain/models/database_url.py +1 -1
- plain/models/db.py +6 -2
- plain/models/deletion.py +66 -43
- plain/models/entrypoints.py +1 -1
- plain/models/enums.py +22 -11
- plain/models/exceptions.py +23 -8
- plain/models/expressions.py +440 -257
- plain/models/fields/__init__.py +253 -202
- plain/models/fields/json.py +120 -54
- plain/models/fields/mixins.py +12 -8
- plain/models/fields/related.py +284 -252
- plain/models/fields/related_descriptors.py +31 -22
- plain/models/fields/related_lookups.py +23 -11
- plain/models/fields/related_managers.py +81 -47
- plain/models/fields/reverse_related.py +58 -55
- plain/models/forms.py +89 -63
- plain/models/functions/comparison.py +71 -18
- plain/models/functions/datetime.py +79 -29
- plain/models/functions/math.py +43 -10
- plain/models/functions/mixins.py +24 -7
- plain/models/functions/text.py +104 -25
- plain/models/functions/window.py +12 -6
- plain/models/indexes.py +52 -28
- plain/models/lookups.py +228 -153
- plain/models/migrations/autodetector.py +86 -43
- plain/models/migrations/exceptions.py +7 -3
- plain/models/migrations/executor.py +33 -7
- plain/models/migrations/graph.py +79 -50
- plain/models/migrations/loader.py +45 -22
- plain/models/migrations/migration.py +23 -18
- plain/models/migrations/operations/base.py +37 -19
- plain/models/migrations/operations/fields.py +89 -42
- plain/models/migrations/operations/models.py +245 -143
- plain/models/migrations/operations/special.py +82 -25
- plain/models/migrations/optimizer.py +7 -2
- plain/models/migrations/questioner.py +58 -31
- plain/models/migrations/recorder.py +18 -11
- plain/models/migrations/serializer.py +50 -39
- plain/models/migrations/state.py +220 -133
- plain/models/migrations/utils.py +29 -13
- plain/models/migrations/writer.py +17 -14
- plain/models/options.py +63 -56
- plain/models/otel.py +16 -6
- plain/models/preflight.py +35 -12
- plain/models/query.py +323 -228
- plain/models/query_utils.py +93 -58
- plain/models/registry.py +34 -16
- plain/models/sql/compiler.py +146 -97
- plain/models/sql/datastructures.py +38 -25
- plain/models/sql/query.py +255 -169
- plain/models/sql/subqueries.py +32 -21
- plain/models/sql/where.py +54 -29
- plain/models/test/pytest.py +15 -11
- plain/models/test/utils.py +4 -2
- plain/models/transaction.py +20 -7
- plain/models/utils.py +13 -5
- {plain_models-0.49.2.dist-info → plain_models-0.50.0.dist-info}/METADATA +1 -1
- plain_models-0.50.0.dist-info/RECORD +122 -0
- plain_models-0.49.2.dist-info/RECORD +0 -122
- {plain_models-0.49.2.dist-info → plain_models-0.50.0.dist-info}/WHEEL +0 -0
- {plain_models-0.49.2.dist-info → plain_models-0.50.0.dist-info}/entry_points.txt +0 -0
- {plain_models-0.49.2.dist-info → plain_models-0.50.0.dist-info}/licenses/LICENSE +0 -0
plain/models/sql/compiler.py
CHANGED
@@ -1,8 +1,12 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
1
3
|
import collections
|
2
4
|
import json
|
3
5
|
import re
|
6
|
+
from collections.abc import Generator, Iterable
|
4
7
|
from functools import cached_property, partial
|
5
8
|
from itertools import chain
|
9
|
+
from typing import TYPE_CHECKING, Any
|
6
10
|
|
7
11
|
from plain.models.constants import LOOKUP_SEP
|
8
12
|
from plain.models.db import DatabaseError, NotSupportedError
|
@@ -25,13 +29,18 @@ from plain.models.transaction import TransactionManagementError
|
|
25
29
|
from plain.utils.hashable import make_hashable
|
26
30
|
from plain.utils.regex_helper import _lazy_re_compile
|
27
31
|
|
32
|
+
if TYPE_CHECKING:
|
33
|
+
from plain.models.backends.base.base import BaseDatabaseWrapper
|
34
|
+
|
28
35
|
|
29
36
|
class PositionRef(Ref):
|
30
|
-
def __init__(self, ordinal, refs, source):
|
37
|
+
def __init__(self, ordinal: int, refs: str, source: Any):
|
31
38
|
self.ordinal = ordinal
|
32
39
|
super().__init__(refs, source)
|
33
40
|
|
34
|
-
def as_sql(
|
41
|
+
def as_sql(
|
42
|
+
self, compiler: SQLCompiler, connection: BaseDatabaseWrapper
|
43
|
+
) -> tuple[str, tuple]:
|
35
44
|
return str(self.ordinal), ()
|
36
45
|
|
37
46
|
|
@@ -42,30 +51,32 @@ class SQLCompiler:
|
|
42
51
|
re.MULTILINE | re.DOTALL,
|
43
52
|
)
|
44
53
|
|
45
|
-
def __init__(
|
54
|
+
def __init__(
|
55
|
+
self, query: Query, connection: BaseDatabaseWrapper, elide_empty: bool = True
|
56
|
+
):
|
46
57
|
self.query = query
|
47
58
|
self.connection = connection
|
48
59
|
# Some queries, e.g. coalesced aggregation, need to be executed even if
|
49
60
|
# they would return an empty result set.
|
50
61
|
self.elide_empty = elide_empty
|
51
|
-
self.quote_cache = {"*": "*"}
|
62
|
+
self.quote_cache: dict[str, str] = {"*": "*"}
|
52
63
|
# The select, klass_info, and annotations are needed by QuerySet.iterator()
|
53
64
|
# these are set as a side-effect of executing the query. Note that we calculate
|
54
65
|
# separately a list of extra select columns needed for grammatical correctness
|
55
66
|
# of the query, but these columns are not included in self.select.
|
56
|
-
self.select = None
|
57
|
-
self.annotation_col_map = None
|
58
|
-
self.klass_info = None
|
59
|
-
self._meta_ordering = None
|
67
|
+
self.select: list[tuple[Any, tuple[str, tuple], str | None]] | None = None
|
68
|
+
self.annotation_col_map: dict[str, int] | None = None
|
69
|
+
self.klass_info: dict[str, Any] | None = None
|
70
|
+
self._meta_ordering: list[str] | None = None
|
60
71
|
|
61
|
-
def __repr__(self):
|
72
|
+
def __repr__(self) -> str:
|
62
73
|
return (
|
63
74
|
f"<{self.__class__.__qualname__} "
|
64
75
|
f"model={self.query.model.__qualname__} "
|
65
76
|
f"connection={self.connection!r}>"
|
66
77
|
)
|
67
78
|
|
68
|
-
def setup_query(self, with_col_aliases=False):
|
79
|
+
def setup_query(self, with_col_aliases: bool = False) -> None:
|
69
80
|
if all(self.query.alias_refcount[a] == 0 for a in self.query.alias_map):
|
70
81
|
self.query.get_initial_alias()
|
71
82
|
self.select, self.klass_info, self.annotation_col_map = self.get_select(
|
@@ -73,7 +84,9 @@ class SQLCompiler:
|
|
73
84
|
)
|
74
85
|
self.col_count = len(self.select)
|
75
86
|
|
76
|
-
def pre_sql_setup(
|
87
|
+
def pre_sql_setup(
|
88
|
+
self, with_col_aliases: bool = False
|
89
|
+
) -> tuple[list[Any], list[Any], list[tuple[str, tuple]]]:
|
77
90
|
"""
|
78
91
|
Do any necessary class setup immediately prior to producing SQL. This
|
79
92
|
is for things that can't necessarily be done in __init__ because we
|
@@ -81,7 +94,7 @@ class SQLCompiler:
|
|
81
94
|
"""
|
82
95
|
self.setup_query(with_col_aliases=with_col_aliases)
|
83
96
|
order_by = self.get_order_by()
|
84
|
-
self.where, self.having, self.qualify = self.query.where.split_having_qualify(
|
97
|
+
self.where, self.having, self.qualify = self.query.where.split_having_qualify( # type: ignore[attr-defined]
|
85
98
|
must_group_by=self.query.group_by is not None
|
86
99
|
)
|
87
100
|
extra_select = self.get_extra_select(order_by, self.select)
|
@@ -89,7 +102,9 @@ class SQLCompiler:
|
|
89
102
|
group_by = self.get_group_by(self.select + extra_select, order_by)
|
90
103
|
return extra_select, order_by, group_by
|
91
104
|
|
92
|
-
def get_group_by(
|
105
|
+
def get_group_by(
|
106
|
+
self, select: list[Any], order_by: list[Any]
|
107
|
+
) -> list[tuple[str, tuple]]:
|
93
108
|
"""
|
94
109
|
Return a list of 2-tuples of form (sql, params).
|
95
110
|
|
@@ -192,7 +207,7 @@ class SQLCompiler:
|
|
192
207
|
seen.add((sql, params_hash))
|
193
208
|
return result
|
194
209
|
|
195
|
-
def collapse_group_by(self, expressions, having):
|
210
|
+
def collapse_group_by(self, expressions: list[Any], having: list[Any]) -> list[Any]:
|
196
211
|
# If the database supports group by functional dependence reduction,
|
197
212
|
# then the expressions can be reduced to the set of selected table
|
198
213
|
# primary keys as all other columns are functionally dependent on them.
|
@@ -222,7 +237,13 @@ class SQLCompiler:
|
|
222
237
|
]
|
223
238
|
return expressions
|
224
239
|
|
225
|
-
def get_select(
|
240
|
+
def get_select(
|
241
|
+
self, with_col_aliases: bool = False
|
242
|
+
) -> tuple[
|
243
|
+
list[tuple[Any, tuple[str, tuple], str | None]],
|
244
|
+
dict[str, Any] | None,
|
245
|
+
dict[str, int],
|
246
|
+
]:
|
226
247
|
"""
|
227
248
|
Return three values:
|
228
249
|
- a list of 3-tuples of (expression, (sql, params), alias)
|
@@ -273,7 +294,7 @@ class SQLCompiler:
|
|
273
294
|
|
274
295
|
if self.query.select_related:
|
275
296
|
related_klass_infos = self.get_related_selections(select, select_mask)
|
276
|
-
klass_info["related_klass_infos"] = related_klass_infos
|
297
|
+
klass_info["related_klass_infos"] = related_klass_infos # type: ignore[index]
|
277
298
|
|
278
299
|
ret = []
|
279
300
|
col_idx = 1
|
@@ -299,7 +320,7 @@ class SQLCompiler:
|
|
299
320
|
ret.append((col, (sql, params), alias))
|
300
321
|
return ret, klass_info, annotations
|
301
322
|
|
302
|
-
def _order_by_pairs(self):
|
323
|
+
def _order_by_pairs(self) -> Generator[tuple[OrderBy, bool], None, None]:
|
303
324
|
if self.query.extra_order_by:
|
304
325
|
ordering = self.query.extra_order_by
|
305
326
|
elif not self.query.default_ordering:
|
@@ -334,24 +355,24 @@ class SQLCompiler:
|
|
334
355
|
if not self.query.standard_ordering:
|
335
356
|
field = field.copy()
|
336
357
|
field.reverse_ordering()
|
337
|
-
select_ref = selected_exprs.get(field.expression)
|
358
|
+
select_ref = selected_exprs.get(field.expression) # type: ignore[attr-defined]
|
338
359
|
if select_ref or (
|
339
|
-
isinstance(field.expression, F)
|
340
|
-
and (select_ref := selected_exprs.get(field.expression.name))
|
360
|
+
isinstance(field.expression, F) # type: ignore[attr-defined]
|
361
|
+
and (select_ref := selected_exprs.get(field.expression.name)) # type: ignore[attr-defined]
|
341
362
|
):
|
342
363
|
# Emulation of NULLS (FIRST|LAST) cannot be combined with
|
343
364
|
# the usage of ordering by position.
|
344
365
|
if (
|
345
|
-
field.nulls_first is None and field.nulls_last is None
|
366
|
+
field.nulls_first is None and field.nulls_last is None # type: ignore[attr-defined]
|
346
367
|
) or self.connection.features.supports_order_by_nulls_modifier:
|
347
368
|
field = field.copy()
|
348
|
-
field.expression = select_ref
|
369
|
+
field.expression = select_ref # type: ignore[assignment]
|
349
370
|
# Alias collisions are not possible when dealing with
|
350
371
|
# combined queries so fallback to it if emulation of NULLS
|
351
372
|
# handling is required.
|
352
373
|
elif self.query.combinator:
|
353
374
|
field = field.copy()
|
354
|
-
field.expression = Ref(select_ref.refs, select_ref.source)
|
375
|
+
field.expression = Ref(select_ref.refs, select_ref.source) # type: ignore[assignment]
|
355
376
|
yield field, select_ref is not None
|
356
377
|
continue
|
357
378
|
if field == "?": # random
|
@@ -427,7 +448,7 @@ class SQLCompiler:
|
|
427
448
|
default_order=default_order,
|
428
449
|
)
|
429
450
|
|
430
|
-
def get_order_by(self):
|
451
|
+
def get_order_by(self) -> list[tuple[Any, tuple[str, tuple, bool]]]:
|
431
452
|
"""
|
432
453
|
Return a list of 2-tuples of the form (expr, (sql, params, is_ref)) for
|
433
454
|
the ORDER BY clause.
|
@@ -488,7 +509,9 @@ class SQLCompiler:
|
|
488
509
|
result.append((resolved, (sql, params, is_ref)))
|
489
510
|
return result
|
490
511
|
|
491
|
-
def get_extra_select(
|
512
|
+
def get_extra_select(
|
513
|
+
self, order_by: list[Any], select: list[Any]
|
514
|
+
) -> list[tuple[Any, tuple[str, tuple], None]]:
|
492
515
|
extra_select = []
|
493
516
|
if self.query.distinct and not self.query.distinct_fields:
|
494
517
|
select_sql = [t[1] for t in select]
|
@@ -498,7 +521,7 @@ class SQLCompiler:
|
|
498
521
|
extra_select.append((expr, (without_ordering, params), None))
|
499
522
|
return extra_select
|
500
523
|
|
501
|
-
def quote_name_unless_alias(self, name):
|
524
|
+
def quote_name_unless_alias(self, name: str) -> str:
|
502
525
|
"""
|
503
526
|
A wrapper around connection.ops.quote_name that doesn't quote aliases
|
504
527
|
for table names. This avoids problems with some SQL dialects that treat
|
@@ -520,7 +543,7 @@ class SQLCompiler:
|
|
520
543
|
self.quote_cache[name] = r
|
521
544
|
return r
|
522
545
|
|
523
|
-
def compile(self, node):
|
546
|
+
def compile(self, node: Any) -> tuple[str, tuple]:
|
524
547
|
vendor_impl = getattr(node, "as_" + self.connection.vendor, None)
|
525
548
|
if vendor_impl:
|
526
549
|
sql, params = vendor_impl(self, self.connection)
|
@@ -528,7 +551,7 @@ class SQLCompiler:
|
|
528
551
|
sql, params = node.as_sql(self, self.connection)
|
529
552
|
return sql, params
|
530
553
|
|
531
|
-
def get_combinator_sql(self, combinator, all):
|
554
|
+
def get_combinator_sql(self, combinator: str, all: bool) -> tuple[list[str], list]:
|
532
555
|
features = self.connection.features
|
533
556
|
compilers = [
|
534
557
|
query.get_compiler(elide_empty=self.elide_empty)
|
@@ -608,7 +631,7 @@ class SQLCompiler:
|
|
608
631
|
params.extend(part)
|
609
632
|
return result, params
|
610
633
|
|
611
|
-
def get_qualify_sql(self):
|
634
|
+
def get_qualify_sql(self) -> tuple[list[str], list]:
|
612
635
|
where_parts = []
|
613
636
|
if self.where:
|
614
637
|
where_parts.append(self.where)
|
@@ -628,7 +651,7 @@ class SQLCompiler:
|
|
628
651
|
qual_aliases = set()
|
629
652
|
replacements = {}
|
630
653
|
|
631
|
-
def collect_replacements(expressions):
|
654
|
+
def collect_replacements(expressions: list[Any]) -> None:
|
632
655
|
while expressions:
|
633
656
|
expr = expressions.pop()
|
634
657
|
if expr in replacements:
|
@@ -702,7 +725,9 @@ class SQLCompiler:
|
|
702
725
|
result.extend(["ORDER BY", ", ".join(ordering_sqls)])
|
703
726
|
return result, params
|
704
727
|
|
705
|
-
def as_sql(
|
728
|
+
def as_sql(
|
729
|
+
self, with_limits: bool = True, with_col_aliases: bool = False
|
730
|
+
) -> tuple[str, tuple]:
|
706
731
|
"""
|
707
732
|
Create the SQL for this query. Return the SQL string and list of
|
708
733
|
parameters.
|
@@ -921,7 +946,9 @@ class SQLCompiler:
|
|
921
946
|
# Finally do cleanup - get rid of the joins we created above.
|
922
947
|
self.query.reset_refcounts(refcounts_before)
|
923
948
|
|
924
|
-
def get_default_columns(
|
949
|
+
def get_default_columns(
|
950
|
+
self, select_mask: Any, start_alias: str | None = None, opts: Any = None
|
951
|
+
) -> list[Any]:
|
925
952
|
"""
|
926
953
|
Compute the default columns for selecting every field in the base
|
927
954
|
model. Will sometimes be called to pull in related models (e.g. via
|
@@ -952,7 +979,7 @@ class SQLCompiler:
|
|
952
979
|
result.append(column)
|
953
980
|
return result
|
954
981
|
|
955
|
-
def get_distinct(self):
|
982
|
+
def get_distinct(self) -> tuple[list[str], list]:
|
956
983
|
"""
|
957
984
|
Return a quoted list of fields to use in DISTINCT ON part of the query.
|
958
985
|
|
@@ -979,8 +1006,13 @@ class SQLCompiler:
|
|
979
1006
|
return result, params
|
980
1007
|
|
981
1008
|
def find_ordering_name(
|
982
|
-
self,
|
983
|
-
|
1009
|
+
self,
|
1010
|
+
name: str,
|
1011
|
+
opts: Any,
|
1012
|
+
alias: str | None = None,
|
1013
|
+
default_order: str = "ASC",
|
1014
|
+
already_seen: set | None = None,
|
1015
|
+
) -> list[tuple[OrderBy, bool]]:
|
984
1016
|
"""
|
985
1017
|
Return the table alias (the name might be ambiguous, the alias will
|
986
1018
|
not be) and column name for ordering by the given 'name' parameter.
|
@@ -1042,7 +1074,9 @@ class SQLCompiler:
|
|
1042
1074
|
for t in targets
|
1043
1075
|
]
|
1044
1076
|
|
1045
|
-
def _setup_joins(
|
1077
|
+
def _setup_joins(
|
1078
|
+
self, pieces: list[str], opts: Any, alias: str | None
|
1079
|
+
) -> tuple[Any, Any, str, list, Any, Any, Any]:
|
1046
1080
|
"""
|
1047
1081
|
Helper method for get_order_by() and get_distinct().
|
1048
1082
|
|
@@ -1057,7 +1091,7 @@ class SQLCompiler:
|
|
1057
1091
|
alias = joins[-1]
|
1058
1092
|
return field, targets, alias, joins, path, opts, transform_function
|
1059
1093
|
|
1060
|
-
def get_from_clause(self):
|
1094
|
+
def get_from_clause(self) -> tuple[list[str], list]:
|
1061
1095
|
"""
|
1062
1096
|
Return a list of strings that are joined together to go after the
|
1063
1097
|
"FROM" part of the query, as well as a list any extra parameters that
|
@@ -1096,14 +1130,14 @@ class SQLCompiler:
|
|
1096
1130
|
|
1097
1131
|
def get_related_selections(
|
1098
1132
|
self,
|
1099
|
-
select,
|
1100
|
-
select_mask,
|
1101
|
-
opts=None,
|
1102
|
-
root_alias=None,
|
1103
|
-
cur_depth=1,
|
1104
|
-
requested=None,
|
1105
|
-
restricted=None,
|
1106
|
-
):
|
1133
|
+
select: list[Any],
|
1134
|
+
select_mask: Any,
|
1135
|
+
opts: Any = None,
|
1136
|
+
root_alias: str | None = None,
|
1137
|
+
cur_depth: int = 1,
|
1138
|
+
requested: dict | None = None,
|
1139
|
+
restricted: bool | None = None,
|
1140
|
+
) -> list[dict[str, Any]]:
|
1107
1141
|
"""
|
1108
1142
|
Fill in the information needed for a select_related query. The current
|
1109
1143
|
depth is measured as the number of connections away from the root model
|
@@ -1111,11 +1145,11 @@ class SQLCompiler:
|
|
1111
1145
|
connections to the root model).
|
1112
1146
|
"""
|
1113
1147
|
|
1114
|
-
def _get_field_choices():
|
1115
|
-
direct_choices = (f.name for f in opts.fields if f.is_relation)
|
1148
|
+
def _get_field_choices() -> chain:
|
1149
|
+
direct_choices = (f.name for f in opts.fields if f.is_relation) # type: ignore[attr-defined]
|
1116
1150
|
reverse_choices = (
|
1117
1151
|
f.field.related_query_name()
|
1118
|
-
for f in opts.related_objects
|
1152
|
+
for f in opts.related_objects # type: ignore[attr-defined]
|
1119
1153
|
if f.field.primary_key
|
1120
1154
|
)
|
1121
1155
|
return chain(
|
@@ -1139,18 +1173,20 @@ class SQLCompiler:
|
|
1139
1173
|
if restricted:
|
1140
1174
|
requested = self.query.select_related
|
1141
1175
|
|
1142
|
-
def get_related_klass_infos(
|
1176
|
+
def get_related_klass_infos(
|
1177
|
+
klass_info: dict, related_klass_infos: list
|
1178
|
+
) -> None:
|
1143
1179
|
klass_info["related_klass_infos"] = related_klass_infos
|
1144
1180
|
|
1145
1181
|
for f in opts.fields:
|
1146
1182
|
fields_found.add(f.name)
|
1147
1183
|
|
1148
1184
|
if restricted:
|
1149
|
-
next = requested.get(f.name, {})
|
1185
|
+
next = requested.get(f.name, {}) # type: ignore[union-attr]
|
1150
1186
|
if not f.is_relation:
|
1151
1187
|
# If a non-related field is used like a relation,
|
1152
1188
|
# or if a single non-relational field is given.
|
1153
|
-
if next or f.name in requested:
|
1189
|
+
if next or f.name in requested: # type: ignore[operator]
|
1154
1190
|
raise FieldError(
|
1155
1191
|
"Non-relational field given in select_related: '{}'. "
|
1156
1192
|
"Choices are: {}".format(
|
@@ -1161,7 +1197,7 @@ class SQLCompiler:
|
|
1161
1197
|
else:
|
1162
1198
|
next = False
|
1163
1199
|
|
1164
|
-
if not select_related_descend(f, restricted, requested, select_mask):
|
1200
|
+
if not select_related_descend(f, restricted, requested, select_mask): # type: ignore[arg-type]
|
1165
1201
|
continue
|
1166
1202
|
related_select_mask = select_mask.get(f) or {}
|
1167
1203
|
klass_info = {
|
@@ -1203,10 +1239,11 @@ class SQLCompiler:
|
|
1203
1239
|
]
|
1204
1240
|
for related_field, model in related_fields:
|
1205
1241
|
related_select_mask = select_mask.get(related_field) or {}
|
1242
|
+
# type: ignore[arg-type]
|
1206
1243
|
if not select_related_descend(
|
1207
1244
|
related_field,
|
1208
1245
|
restricted,
|
1209
|
-
requested,
|
1246
|
+
requested, # type: ignore[arg-type]
|
1210
1247
|
related_select_mask,
|
1211
1248
|
reverse=True,
|
1212
1249
|
):
|
@@ -1237,7 +1274,7 @@ class SQLCompiler:
|
|
1237
1274
|
select_fields.append(len(select))
|
1238
1275
|
select.append((col, None))
|
1239
1276
|
klass_info["select_fields"] = select_fields
|
1240
|
-
next = requested.get(related_field.related_query_name(), {})
|
1277
|
+
next = requested.get(related_field.related_query_name(), {}) # type: ignore[union-attr]
|
1241
1278
|
next_klass_infos = self.get_related_selections(
|
1242
1279
|
select,
|
1243
1280
|
related_select_mask,
|
@@ -1249,18 +1286,18 @@ class SQLCompiler:
|
|
1249
1286
|
)
|
1250
1287
|
get_related_klass_infos(klass_info, next_klass_infos)
|
1251
1288
|
|
1252
|
-
def local_setter(final_field, obj, from_obj):
|
1289
|
+
def local_setter(final_field: Any, obj: Any, from_obj: Any) -> None:
|
1253
1290
|
# Set a reverse fk object when relation is non-empty.
|
1254
1291
|
if from_obj:
|
1255
1292
|
final_field.remote_field.set_cached_value(from_obj, obj)
|
1256
1293
|
|
1257
|
-
def local_setter_noop(obj, from_obj):
|
1294
|
+
def local_setter_noop(obj: Any, from_obj: Any) -> None:
|
1258
1295
|
pass
|
1259
1296
|
|
1260
|
-
def remote_setter(name, obj, from_obj):
|
1297
|
+
def remote_setter(name: str, obj: Any, from_obj: Any) -> None:
|
1261
1298
|
setattr(from_obj, name, obj)
|
1262
1299
|
|
1263
|
-
for name in list(requested):
|
1300
|
+
for name in list(requested): # type: ignore[arg-type]
|
1264
1301
|
# Filtered relations work only on the topmost level.
|
1265
1302
|
if cur_depth > 1:
|
1266
1303
|
break
|
@@ -1294,7 +1331,7 @@ class SQLCompiler:
|
|
1294
1331
|
select_fields.append(len(select))
|
1295
1332
|
select.append((col, None))
|
1296
1333
|
klass_info["select_fields"] = select_fields
|
1297
|
-
next_requested = requested.get(name, {})
|
1334
|
+
next_requested = requested.get(name, {}) # type: ignore[union-attr]
|
1298
1335
|
next_klass_infos = self.get_related_selections(
|
1299
1336
|
select,
|
1300
1337
|
field_select_mask,
|
@@ -1305,7 +1342,7 @@ class SQLCompiler:
|
|
1305
1342
|
restricted=restricted,
|
1306
1343
|
)
|
1307
1344
|
get_related_klass_infos(klass_info, next_klass_infos)
|
1308
|
-
fields_not_found = set(requested).difference(fields_found)
|
1345
|
+
fields_not_found = set(requested).difference(fields_found) # type: ignore[arg-type]
|
1309
1346
|
if fields_not_found:
|
1310
1347
|
invalid_fields = (f"'{s}'" for s in fields_not_found)
|
1311
1348
|
raise FieldError(
|
@@ -1317,13 +1354,13 @@ class SQLCompiler:
|
|
1317
1354
|
)
|
1318
1355
|
return related_klass_infos
|
1319
1356
|
|
1320
|
-
def get_select_for_update_of_arguments(self):
|
1357
|
+
def get_select_for_update_of_arguments(self) -> list[str]:
|
1321
1358
|
"""
|
1322
1359
|
Return a quoted list of arguments for the SELECT FOR UPDATE OF part of
|
1323
1360
|
the query.
|
1324
1361
|
"""
|
1325
1362
|
|
1326
|
-
def _get_first_selected_col_from_model(klass_info):
|
1363
|
+
def _get_first_selected_col_from_model(klass_info: dict) -> Any | None:
|
1327
1364
|
"""
|
1328
1365
|
Find the first selected column from a model. If it doesn't exist,
|
1329
1366
|
don't lock a model.
|
@@ -1333,10 +1370,11 @@ class SQLCompiler:
|
|
1333
1370
|
"""
|
1334
1371
|
model = klass_info["model"]
|
1335
1372
|
for select_index in klass_info["select_fields"]:
|
1336
|
-
if self.select[select_index][0].target.model == model:
|
1337
|
-
return self.select[select_index][0]
|
1373
|
+
if self.select[select_index][0].target.model == model: # type: ignore[index]
|
1374
|
+
return self.select[select_index][0] # type: ignore[index]
|
1375
|
+
return None
|
1338
1376
|
|
1339
|
-
def _get_field_choices():
|
1377
|
+
def _get_field_choices() -> Generator[str, None, None]:
|
1340
1378
|
"""Yield all allowed field paths in breadth-first search order."""
|
1341
1379
|
queue = collections.deque([(None, self.klass_info)])
|
1342
1380
|
while queue:
|
@@ -1396,7 +1434,7 @@ class SQLCompiler:
|
|
1396
1434
|
)
|
1397
1435
|
return result
|
1398
1436
|
|
1399
|
-
def get_converters(self, expressions):
|
1437
|
+
def get_converters(self, expressions: list[Any]) -> dict[int, tuple[list, Any]]:
|
1400
1438
|
converters = {}
|
1401
1439
|
for i, expression in enumerate(expressions):
|
1402
1440
|
if expression:
|
@@ -1406,11 +1444,13 @@ class SQLCompiler:
|
|
1406
1444
|
converters[i] = (backend_converters + field_converters, expression)
|
1407
1445
|
return converters
|
1408
1446
|
|
1409
|
-
def apply_converters(
|
1447
|
+
def apply_converters(
|
1448
|
+
self, rows: Iterable, converters: dict
|
1449
|
+
) -> Generator[list, None, None]:
|
1410
1450
|
connection = self.connection
|
1411
|
-
|
1451
|
+
converters_list = list(converters.items())
|
1412
1452
|
for row in map(list, rows):
|
1413
|
-
for pos, (convs, expression) in
|
1453
|
+
for pos, (convs, expression) in converters_list:
|
1414
1454
|
value = row[pos]
|
1415
1455
|
for converter in convs:
|
1416
1456
|
value = converter(value, expression, connection)
|
@@ -1419,11 +1459,11 @@ class SQLCompiler:
|
|
1419
1459
|
|
1420
1460
|
def results_iter(
|
1421
1461
|
self,
|
1422
|
-
results=None,
|
1423
|
-
tuple_expected=False,
|
1424
|
-
chunked_fetch=False,
|
1425
|
-
chunk_size=GET_ITERATOR_CHUNK_SIZE,
|
1426
|
-
):
|
1462
|
+
results: Any = None,
|
1463
|
+
tuple_expected: bool = False,
|
1464
|
+
chunked_fetch: bool = False,
|
1465
|
+
chunk_size: int = GET_ITERATOR_CHUNK_SIZE,
|
1466
|
+
) -> Iterable:
|
1427
1467
|
"""Return an iterator over the results from executing this query."""
|
1428
1468
|
if results is None:
|
1429
1469
|
results = self.execute_sql(
|
@@ -1438,7 +1478,7 @@ class SQLCompiler:
|
|
1438
1478
|
rows = map(tuple, rows)
|
1439
1479
|
return rows
|
1440
1480
|
|
1441
|
-
def has_results(self):
|
1481
|
+
def has_results(self) -> bool:
|
1442
1482
|
"""
|
1443
1483
|
Backends (e.g. NoSQL) can override this in order to use optimized
|
1444
1484
|
versions of "query has any results."
|
@@ -1446,8 +1486,11 @@ class SQLCompiler:
|
|
1446
1486
|
return bool(self.execute_sql(SINGLE))
|
1447
1487
|
|
1448
1488
|
def execute_sql(
|
1449
|
-
self,
|
1450
|
-
|
1489
|
+
self,
|
1490
|
+
result_type: str = MULTI,
|
1491
|
+
chunked_fetch: bool = False,
|
1492
|
+
chunk_size: int = GET_ITERATOR_CHUNK_SIZE,
|
1493
|
+
) -> Any:
|
1451
1494
|
"""
|
1452
1495
|
Run the query against the database and return the result(s). The
|
1453
1496
|
return value is a single data item if result_type is SINGLE, or an
|
@@ -1511,7 +1554,9 @@ class SQLCompiler:
|
|
1511
1554
|
return list(result)
|
1512
1555
|
return result
|
1513
1556
|
|
1514
|
-
def as_subquery_condition(
|
1557
|
+
def as_subquery_condition(
|
1558
|
+
self, alias: str, columns: list[str], compiler: SQLCompiler
|
1559
|
+
) -> tuple[str, tuple]:
|
1515
1560
|
qn = compiler.quote_name_unless_alias
|
1516
1561
|
qn2 = self.connection.ops.quote_name
|
1517
1562
|
|
@@ -1523,7 +1568,7 @@ class SQLCompiler:
|
|
1523
1568
|
sql, params = self.as_sql()
|
1524
1569
|
return f"EXISTS ({sql})", params
|
1525
1570
|
|
1526
|
-
def explain_query(self):
|
1571
|
+
def explain_query(self) -> Generator[str, None, None]:
|
1527
1572
|
result = list(self.execute_sql())
|
1528
1573
|
# Some backends return 1 item tuples with strings, and others return
|
1529
1574
|
# tuples with integers and strings. Flatten them out into strings.
|
@@ -1537,10 +1582,10 @@ class SQLCompiler:
|
|
1537
1582
|
|
1538
1583
|
|
1539
1584
|
class SQLInsertCompiler(SQLCompiler):
|
1540
|
-
returning_fields = None
|
1541
|
-
returning_params = ()
|
1585
|
+
returning_fields: list | None = None
|
1586
|
+
returning_params: tuple = ()
|
1542
1587
|
|
1543
|
-
def field_as_sql(self, field, val):
|
1588
|
+
def field_as_sql(self, field: Any, val: Any) -> tuple[str, list]:
|
1544
1589
|
"""
|
1545
1590
|
Take a field and a value intended to be saved on that field, and
|
1546
1591
|
return placeholder SQL and accompanying params. Check for raw values,
|
@@ -1572,7 +1617,7 @@ class SQLInsertCompiler(SQLCompiler):
|
|
1572
1617
|
|
1573
1618
|
return sql, params
|
1574
1619
|
|
1575
|
-
def prepare_value(self, field, value):
|
1620
|
+
def prepare_value(self, field: Any, value: Any) -> Any:
|
1576
1621
|
"""
|
1577
1622
|
Prepare a value to be used in a query by resolving it if it is an
|
1578
1623
|
expression and otherwise calling the field's get_db_prep_save().
|
@@ -1600,7 +1645,7 @@ class SQLInsertCompiler(SQLCompiler):
|
|
1600
1645
|
)
|
1601
1646
|
return field.get_db_prep_save(value, connection=self.connection)
|
1602
1647
|
|
1603
|
-
def pre_save_val(self, field, obj):
|
1648
|
+
def pre_save_val(self, field: Any, obj: Any) -> Any:
|
1604
1649
|
"""
|
1605
1650
|
Get the given field's value off the given obj. pre_save() is used for
|
1606
1651
|
things like auto_now on DateTimeField. Skip it if this is a raw query.
|
@@ -1609,7 +1654,9 @@ class SQLInsertCompiler(SQLCompiler):
|
|
1609
1654
|
return getattr(obj, field.attname)
|
1610
1655
|
return field.pre_save(obj, add=True)
|
1611
1656
|
|
1612
|
-
def assemble_as_sql(
|
1657
|
+
def assemble_as_sql(
|
1658
|
+
self, fields: list[Any], value_rows: list[list[Any]]
|
1659
|
+
) -> tuple[list[list[str]], list[list]]:
|
1613
1660
|
"""
|
1614
1661
|
Take a sequence of N fields and a sequence of M rows of values, and
|
1615
1662
|
generate placeholder SQL and parameters for each field and value.
|
@@ -1644,7 +1691,7 @@ class SQLInsertCompiler(SQLCompiler):
|
|
1644
1691
|
|
1645
1692
|
return placeholder_rows, param_rows
|
1646
1693
|
|
1647
|
-
def as_sql(self):
|
1694
|
+
def as_sql(self) -> list[tuple[str, tuple]]:
|
1648
1695
|
# We don't need quote_name_unless_alias() here, since these are all
|
1649
1696
|
# going to be column names (so we can avoid the extra overhead).
|
1650
1697
|
qn = self.connection.ops.quote_name
|
@@ -1724,7 +1771,7 @@ class SQLInsertCompiler(SQLCompiler):
|
|
1724
1771
|
for p, vals in zip(placeholder_rows, param_rows)
|
1725
1772
|
]
|
1726
1773
|
|
1727
|
-
def execute_sql(self, returning_fields=None):
|
1774
|
+
def execute_sql(self, returning_fields: list | None = None) -> list:
|
1728
1775
|
assert not (
|
1729
1776
|
returning_fields
|
1730
1777
|
and len(self.query.objs) != 1
|
@@ -1769,13 +1816,13 @@ class SQLInsertCompiler(SQLCompiler):
|
|
1769
1816
|
|
1770
1817
|
class SQLDeleteCompiler(SQLCompiler):
|
1771
1818
|
@cached_property
|
1772
|
-
def single_alias(self):
|
1819
|
+
def single_alias(self) -> bool:
|
1773
1820
|
# Ensure base table is in aliases.
|
1774
1821
|
self.query.get_initial_alias()
|
1775
1822
|
return sum(self.query.alias_refcount[t] > 0 for t in self.query.alias_map) == 1
|
1776
1823
|
|
1777
1824
|
@classmethod
|
1778
|
-
def _expr_refs_base_model(cls, expr, base_model):
|
1825
|
+
def _expr_refs_base_model(cls, expr: Any, base_model: Any) -> bool:
|
1779
1826
|
if isinstance(expr, Query):
|
1780
1827
|
return expr.model == base_model
|
1781
1828
|
if not hasattr(expr, "get_source_expressions"):
|
@@ -1786,7 +1833,7 @@ class SQLDeleteCompiler(SQLCompiler):
|
|
1786
1833
|
)
|
1787
1834
|
|
1788
1835
|
@cached_property
|
1789
|
-
def contains_self_reference_subquery(self):
|
1836
|
+
def contains_self_reference_subquery(self) -> bool:
|
1790
1837
|
return any(
|
1791
1838
|
self._expr_refs_base_model(expr, self.query.model)
|
1792
1839
|
for expr in chain(
|
@@ -1794,7 +1841,7 @@ class SQLDeleteCompiler(SQLCompiler):
|
|
1794
1841
|
)
|
1795
1842
|
)
|
1796
1843
|
|
1797
|
-
def _as_sql(self, query):
|
1844
|
+
def _as_sql(self, query: Query) -> tuple[str, tuple]:
|
1798
1845
|
delete = f"DELETE FROM {self.quote_name_unless_alias(query.base_table)}"
|
1799
1846
|
try:
|
1800
1847
|
where, params = self.compile(query.where)
|
@@ -1802,7 +1849,7 @@ class SQLDeleteCompiler(SQLCompiler):
|
|
1802
1849
|
return delete, ()
|
1803
1850
|
return f"{delete} WHERE {where}", tuple(params)
|
1804
1851
|
|
1805
|
-
def as_sql(self):
|
1852
|
+
def as_sql(self) -> tuple[str, tuple]:
|
1806
1853
|
"""
|
1807
1854
|
Create the SQL for this query. Return the SQL string and list of
|
1808
1855
|
parameters.
|
@@ -1825,7 +1872,7 @@ class SQLDeleteCompiler(SQLCompiler):
|
|
1825
1872
|
|
1826
1873
|
|
1827
1874
|
class SQLUpdateCompiler(SQLCompiler):
|
1828
|
-
def as_sql(self):
|
1875
|
+
def as_sql(self) -> tuple[str, tuple]:
|
1829
1876
|
"""
|
1830
1877
|
Create the SQL for this query. Return the SQL string and list of
|
1831
1878
|
parameters.
|
@@ -1888,7 +1935,7 @@ class SQLUpdateCompiler(SQLCompiler):
|
|
1888
1935
|
result.append(f"WHERE {where}")
|
1889
1936
|
return " ".join(result), tuple(update_params + params)
|
1890
1937
|
|
1891
|
-
def execute_sql(self, result_type):
|
1938
|
+
def execute_sql(self, result_type: str) -> int:
|
1892
1939
|
"""
|
1893
1940
|
Execute the specified update. Return the number of rows affected by
|
1894
1941
|
the primary update query. The "primary update query" is the first
|
@@ -1909,7 +1956,7 @@ class SQLUpdateCompiler(SQLCompiler):
|
|
1909
1956
|
is_empty = False
|
1910
1957
|
return rows
|
1911
1958
|
|
1912
|
-
def pre_sql_setup(self):
|
1959
|
+
def pre_sql_setup(self) -> None:
|
1913
1960
|
"""
|
1914
1961
|
If the update depends on results from other tables, munge the "where"
|
1915
1962
|
conditions to match the format required for (portable) SQL updates.
|
@@ -1965,7 +2012,7 @@ class SQLUpdateCompiler(SQLCompiler):
|
|
1965
2012
|
|
1966
2013
|
|
1967
2014
|
class SQLAggregateCompiler(SQLCompiler):
|
1968
|
-
def as_sql(self):
|
2015
|
+
def as_sql(self) -> tuple[str, tuple]:
|
1969
2016
|
"""
|
1970
2017
|
Create the SQL for this query. Return the SQL string and list of
|
1971
2018
|
parameters.
|
@@ -1988,7 +2035,9 @@ class SQLAggregateCompiler(SQLCompiler):
|
|
1988
2035
|
return sql, params
|
1989
2036
|
|
1990
2037
|
|
1991
|
-
def cursor_iter(
|
2038
|
+
def cursor_iter(
|
2039
|
+
cursor: Any, sentinel: Any, col_count: int | None, itersize: int
|
2040
|
+
) -> Generator[list, None, None]:
|
1992
2041
|
"""
|
1993
2042
|
Yield blocks of rows from a cursor and ensure the cursor is closed when
|
1994
2043
|
done.
|