plain.models 0.49.2__py3-none-any.whl → 0.51.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- plain/models/CHANGELOG.md +27 -0
- plain/models/README.md +26 -42
- plain/models/__init__.py +2 -0
- plain/models/aggregates.py +42 -19
- plain/models/backends/base/base.py +125 -105
- plain/models/backends/base/client.py +11 -3
- plain/models/backends/base/creation.py +24 -14
- plain/models/backends/base/features.py +10 -4
- plain/models/backends/base/introspection.py +37 -20
- plain/models/backends/base/operations.py +187 -91
- plain/models/backends/base/schema.py +338 -218
- plain/models/backends/base/validation.py +13 -4
- plain/models/backends/ddl_references.py +85 -43
- plain/models/backends/mysql/base.py +29 -26
- plain/models/backends/mysql/client.py +7 -2
- plain/models/backends/mysql/compiler.py +13 -4
- plain/models/backends/mysql/creation.py +5 -2
- plain/models/backends/mysql/features.py +24 -22
- plain/models/backends/mysql/introspection.py +22 -13
- plain/models/backends/mysql/operations.py +107 -40
- plain/models/backends/mysql/schema.py +52 -28
- plain/models/backends/mysql/validation.py +13 -6
- plain/models/backends/postgresql/base.py +41 -34
- plain/models/backends/postgresql/client.py +7 -2
- plain/models/backends/postgresql/creation.py +10 -5
- plain/models/backends/postgresql/introspection.py +15 -8
- plain/models/backends/postgresql/operations.py +110 -43
- plain/models/backends/postgresql/schema.py +88 -49
- plain/models/backends/sqlite3/_functions.py +151 -115
- plain/models/backends/sqlite3/base.py +37 -23
- plain/models/backends/sqlite3/client.py +7 -1
- plain/models/backends/sqlite3/creation.py +9 -5
- plain/models/backends/sqlite3/features.py +5 -3
- plain/models/backends/sqlite3/introspection.py +32 -16
- plain/models/backends/sqlite3/operations.py +126 -43
- plain/models/backends/sqlite3/schema.py +127 -92
- plain/models/backends/utils.py +52 -29
- plain/models/backups/cli.py +8 -6
- plain/models/backups/clients.py +16 -7
- plain/models/backups/core.py +24 -13
- plain/models/base.py +221 -229
- plain/models/cli.py +98 -67
- plain/models/config.py +1 -1
- plain/models/connections.py +23 -7
- plain/models/constraints.py +79 -56
- plain/models/database_url.py +1 -1
- plain/models/db.py +6 -2
- plain/models/deletion.py +80 -56
- plain/models/entrypoints.py +1 -1
- plain/models/enums.py +22 -11
- plain/models/exceptions.py +23 -8
- plain/models/expressions.py +441 -258
- plain/models/fields/__init__.py +272 -217
- plain/models/fields/json.py +123 -57
- plain/models/fields/mixins.py +12 -8
- plain/models/fields/related.py +324 -290
- plain/models/fields/related_descriptors.py +33 -24
- plain/models/fields/related_lookups.py +24 -12
- plain/models/fields/related_managers.py +102 -79
- plain/models/fields/reverse_related.py +66 -63
- plain/models/forms.py +101 -75
- plain/models/functions/comparison.py +71 -18
- plain/models/functions/datetime.py +79 -29
- plain/models/functions/math.py +43 -10
- plain/models/functions/mixins.py +24 -7
- plain/models/functions/text.py +104 -25
- plain/models/functions/window.py +12 -6
- plain/models/indexes.py +57 -32
- plain/models/lookups.py +228 -153
- plain/models/meta.py +505 -0
- plain/models/migrations/autodetector.py +86 -43
- plain/models/migrations/exceptions.py +7 -3
- plain/models/migrations/executor.py +33 -7
- plain/models/migrations/graph.py +79 -50
- plain/models/migrations/loader.py +45 -22
- plain/models/migrations/migration.py +23 -18
- plain/models/migrations/operations/base.py +38 -20
- plain/models/migrations/operations/fields.py +95 -48
- plain/models/migrations/operations/models.py +246 -142
- plain/models/migrations/operations/special.py +82 -25
- plain/models/migrations/optimizer.py +7 -2
- plain/models/migrations/questioner.py +58 -31
- plain/models/migrations/recorder.py +27 -16
- plain/models/migrations/serializer.py +50 -39
- plain/models/migrations/state.py +232 -156
- plain/models/migrations/utils.py +30 -14
- plain/models/migrations/writer.py +17 -14
- plain/models/options.py +189 -518
- plain/models/otel.py +16 -6
- plain/models/preflight.py +42 -17
- plain/models/query.py +400 -251
- plain/models/query_utils.py +109 -69
- plain/models/registry.py +40 -21
- plain/models/sql/compiler.py +190 -127
- plain/models/sql/datastructures.py +38 -25
- plain/models/sql/query.py +320 -225
- plain/models/sql/subqueries.py +36 -25
- plain/models/sql/where.py +54 -29
- plain/models/test/pytest.py +15 -11
- plain/models/test/utils.py +4 -2
- plain/models/transaction.py +20 -7
- plain/models/utils.py +17 -6
- {plain_models-0.49.2.dist-info → plain_models-0.51.0.dist-info}/METADATA +27 -43
- plain_models-0.51.0.dist-info/RECORD +123 -0
- plain_models-0.49.2.dist-info/RECORD +0 -122
- {plain_models-0.49.2.dist-info → plain_models-0.51.0.dist-info}/WHEEL +0 -0
- {plain_models-0.49.2.dist-info → plain_models-0.51.0.dist-info}/entry_points.txt +0 -0
- {plain_models-0.49.2.dist-info → plain_models-0.51.0.dist-info}/licenses/LICENSE +0 -0
plain/models/query.py
CHANGED
@@ -2,11 +2,15 @@
|
|
2
2
|
The main QuerySet implementation. This provides the public API for the ORM.
|
3
3
|
"""
|
4
4
|
|
5
|
+
from __future__ import annotations
|
6
|
+
|
5
7
|
import copy
|
6
8
|
import operator
|
7
9
|
import warnings
|
10
|
+
from collections.abc import Callable, Iterator
|
8
11
|
from functools import cached_property
|
9
12
|
from itertools import chain, islice
|
13
|
+
from typing import TYPE_CHECKING, Any, Generic, Self, TypeVar
|
10
14
|
|
11
15
|
import plain.runtime
|
12
16
|
from plain.exceptions import ValidationError
|
@@ -43,6 +47,14 @@ from plain.models.utils import (
|
|
43
47
|
from plain.utils import timezone
|
44
48
|
from plain.utils.functional import partition
|
45
49
|
|
50
|
+
if TYPE_CHECKING:
|
51
|
+
from datetime import tzinfo
|
52
|
+
|
53
|
+
from plain.models import Model
|
54
|
+
|
55
|
+
# Type variable for QuerySet generic
|
56
|
+
T = TypeVar("T", bound="Model")
|
57
|
+
|
46
58
|
# The maximum number of results to fetch in a get() query.
|
47
59
|
MAX_GET_RESULTS = 21
|
48
60
|
|
@@ -52,7 +64,10 @@ REPR_OUTPUT_SIZE = 20
|
|
52
64
|
|
53
65
|
class BaseIterable:
|
54
66
|
def __init__(
|
55
|
-
self,
|
67
|
+
self,
|
68
|
+
queryset: QuerySet[Any],
|
69
|
+
chunked_fetch: bool = False,
|
70
|
+
chunk_size: int = GET_ITERATOR_CHUNK_SIZE,
|
56
71
|
):
|
57
72
|
self.queryset = queryset
|
58
73
|
self.chunked_fetch = chunked_fetch
|
@@ -62,7 +77,7 @@ class BaseIterable:
|
|
62
77
|
class ModelIterable(BaseIterable):
|
63
78
|
"""Iterable that yields a model instance for each row."""
|
64
79
|
|
65
|
-
def __iter__(self):
|
80
|
+
def __iter__(self) -> Iterator[Model]: # type: ignore[misc]
|
66
81
|
queryset = self.queryset
|
67
82
|
compiler = queryset.sql_query.get_compiler()
|
68
83
|
# Execute the query. This will also fill compiler.select, klass_info,
|
@@ -119,7 +134,7 @@ class RawModelIterable(BaseIterable):
|
|
119
134
|
Iterable that yields a model instance for each row from a raw queryset.
|
120
135
|
"""
|
121
136
|
|
122
|
-
def __iter__(self):
|
137
|
+
def __iter__(self) -> Iterator[Model]: # type: ignore[misc]
|
123
138
|
# Cache some things for performance reasons outside the loop.
|
124
139
|
query = self.queryset.sql_query
|
125
140
|
compiler = db_connection.ops.compiler("SQLCompiler")(query, db_connection)
|
@@ -136,7 +151,10 @@ class RawModelIterable(BaseIterable):
|
|
136
151
|
raise FieldDoesNotExist("Raw query must include the primary key")
|
137
152
|
fields = [self.queryset.model_fields.get(c) for c in self.queryset.columns]
|
138
153
|
converters = compiler.get_converters(
|
139
|
-
[
|
154
|
+
[
|
155
|
+
f.get_col(f.model.model_options.db_table) if f else None
|
156
|
+
for f in fields
|
157
|
+
]
|
140
158
|
)
|
141
159
|
if converters:
|
142
160
|
query_iterator = compiler.apply_converters(query_iterator, converters)
|
@@ -159,7 +177,7 @@ class ValuesIterable(BaseIterable):
|
|
159
177
|
Iterable returned by QuerySet.values() that yields a dict for each row.
|
160
178
|
"""
|
161
179
|
|
162
|
-
def __iter__(self):
|
180
|
+
def __iter__(self) -> Iterator[dict[str, Any]]: # type: ignore[misc]
|
163
181
|
queryset = self.queryset
|
164
182
|
query = queryset.sql_query
|
165
183
|
compiler = query.get_compiler()
|
@@ -183,7 +201,7 @@ class ValuesListIterable(BaseIterable):
|
|
183
201
|
for each row.
|
184
202
|
"""
|
185
203
|
|
186
|
-
def __iter__(self):
|
204
|
+
def __iter__(self) -> Iterator[tuple[Any, ...]]: # type: ignore[misc]
|
187
205
|
queryset = self.queryset
|
188
206
|
query = queryset.sql_query
|
189
207
|
compiler = query.get_compiler()
|
@@ -222,7 +240,7 @@ class NamedValuesListIterable(ValuesListIterable):
|
|
222
240
|
namedtuple for each row.
|
223
241
|
"""
|
224
242
|
|
225
|
-
def __iter__(self):
|
243
|
+
def __iter__(self) -> Iterator[tuple[Any, ...]]: # type: ignore[misc]
|
226
244
|
queryset = self.queryset
|
227
245
|
if queryset._fields:
|
228
246
|
names = queryset._fields
|
@@ -245,7 +263,7 @@ class FlatValuesListIterable(BaseIterable):
|
|
245
263
|
values.
|
246
264
|
"""
|
247
265
|
|
248
|
-
def __iter__(self):
|
266
|
+
def __iter__(self) -> Iterator[Any]: # type: ignore[misc]
|
249
267
|
queryset = self.queryset
|
250
268
|
compiler = queryset.sql_query.get_compiler()
|
251
269
|
for row in compiler.results_iter(
|
@@ -254,25 +272,76 @@ class FlatValuesListIterable(BaseIterable):
|
|
254
272
|
yield row[0]
|
255
273
|
|
256
274
|
|
257
|
-
class QuerySet:
|
258
|
-
"""
|
275
|
+
class QuerySet(Generic[T]):
|
276
|
+
"""
|
277
|
+
Represent a lazy database lookup for a set of objects.
|
259
278
|
|
260
|
-
|
261
|
-
|
262
|
-
|
263
|
-
|
264
|
-
|
265
|
-
|
266
|
-
|
267
|
-
|
268
|
-
|
269
|
-
|
270
|
-
|
271
|
-
|
272
|
-
|
279
|
+
Usage:
|
280
|
+
MyModel.query.filter(name="test").all()
|
281
|
+
|
282
|
+
Custom QuerySets:
|
283
|
+
from typing import Self
|
284
|
+
|
285
|
+
class TaskQuerySet(QuerySet["Task"]):
|
286
|
+
def active(self) -> Self:
|
287
|
+
return self.filter(is_active=True)
|
288
|
+
|
289
|
+
class Task(Model):
|
290
|
+
is_active = BooleanField(default=True)
|
291
|
+
query = TaskQuerySet()
|
292
|
+
|
293
|
+
Task.query.active().filter(name="test") # Full type inference
|
294
|
+
|
295
|
+
Custom methods should return `Self` to preserve type through method chaining.
|
296
|
+
"""
|
297
|
+
|
298
|
+
# Instance attributes (set in from_model())
|
299
|
+
model: type[T]
|
300
|
+
_query: sql.Query
|
301
|
+
_result_cache: list[T] | None
|
302
|
+
_sticky_filter: bool
|
303
|
+
_for_write: bool
|
304
|
+
_prefetch_related_lookups: tuple[Any, ...]
|
305
|
+
_prefetch_done: bool
|
306
|
+
_known_related_objects: dict[Any, dict[Any, Any]]
|
307
|
+
_iterable_class: type[BaseIterable]
|
308
|
+
_fields: tuple[str, ...] | None
|
309
|
+
_defer_next_filter: bool
|
310
|
+
_deferred_filter: tuple[bool, tuple[Any, ...], dict[str, Any]] | None
|
311
|
+
|
312
|
+
def __init__(self):
|
313
|
+
"""Minimal init for descriptor mode. Use from_model() to create instances."""
|
314
|
+
pass
|
315
|
+
|
316
|
+
@classmethod
|
317
|
+
def from_model(cls, model: type[T], query: sql.Query | None = None) -> Self:
|
318
|
+
"""Create a QuerySet instance bound to a model."""
|
319
|
+
instance = cls()
|
320
|
+
instance.model = model
|
321
|
+
instance._query = query or sql.Query(model)
|
322
|
+
instance._result_cache = None
|
323
|
+
instance._sticky_filter = False
|
324
|
+
instance._for_write = False
|
325
|
+
instance._prefetch_related_lookups = ()
|
326
|
+
instance._prefetch_done = False
|
327
|
+
instance._known_related_objects = {}
|
328
|
+
instance._iterable_class = ModelIterable
|
329
|
+
instance._fields = None
|
330
|
+
instance._defer_next_filter = False
|
331
|
+
instance._deferred_filter = None
|
332
|
+
return instance
|
333
|
+
|
334
|
+
def __get__(self, instance: Any, owner: type[T]) -> Self:
|
335
|
+
"""Descriptor protocol - return a new QuerySet bound to the model."""
|
336
|
+
if instance is not None:
|
337
|
+
raise AttributeError(
|
338
|
+
f"QuerySet is only accessible from the model class, not instances. "
|
339
|
+
f"Use {owner.__name__}.query instead."
|
340
|
+
)
|
341
|
+
return self.from_model(owner)
|
273
342
|
|
274
343
|
@property
|
275
|
-
def sql_query(self):
|
344
|
+
def sql_query(self) -> sql.Query:
|
276
345
|
if self._deferred_filter:
|
277
346
|
negate, args, kwargs = self._deferred_filter
|
278
347
|
self._filter_or_exclude_inplace(negate, args, kwargs)
|
@@ -280,7 +349,7 @@ class QuerySet:
|
|
280
349
|
return self._query
|
281
350
|
|
282
351
|
@sql_query.setter
|
283
|
-
def sql_query(self, value):
|
352
|
+
def sql_query(self, value: sql.Query) -> None:
|
284
353
|
if value.values_select:
|
285
354
|
self._iterable_class = ValuesIterable
|
286
355
|
self._query = value
|
@@ -289,9 +358,9 @@ class QuerySet:
|
|
289
358
|
# PYTHON MAGIC METHODS #
|
290
359
|
########################
|
291
360
|
|
292
|
-
def __deepcopy__(self, memo):
|
361
|
+
def __deepcopy__(self, memo: dict[int, Any]) -> QuerySet[T]:
|
293
362
|
"""Don't populate the QuerySet's cache."""
|
294
|
-
obj = self.__class__()
|
363
|
+
obj = self.__class__.from_model(self.model)
|
295
364
|
for k, v in self.__dict__.items():
|
296
365
|
if k == "_result_cache":
|
297
366
|
obj.__dict__[k] = None
|
@@ -299,12 +368,12 @@ class QuerySet:
|
|
299
368
|
obj.__dict__[k] = copy.deepcopy(v, memo)
|
300
369
|
return obj
|
301
370
|
|
302
|
-
def __getstate__(self):
|
371
|
+
def __getstate__(self) -> dict[str, Any]:
|
303
372
|
# Force the cache to be fully populated.
|
304
373
|
self._fetch_all()
|
305
374
|
return {**self.__dict__, PLAIN_VERSION_PICKLE_KEY: plain.runtime.__version__}
|
306
375
|
|
307
|
-
def __setstate__(self, state):
|
376
|
+
def __setstate__(self, state: dict[str, Any]) -> None:
|
308
377
|
pickled_version = state.get(PLAIN_VERSION_PICKLE_KEY)
|
309
378
|
if pickled_version:
|
310
379
|
if pickled_version != plain.runtime.__version__:
|
@@ -322,17 +391,17 @@ class QuerySet:
|
|
322
391
|
)
|
323
392
|
self.__dict__.update(state)
|
324
393
|
|
325
|
-
def __repr__(self):
|
394
|
+
def __repr__(self) -> str:
|
326
395
|
data = list(self[: REPR_OUTPUT_SIZE + 1])
|
327
396
|
if len(data) > REPR_OUTPUT_SIZE:
|
328
397
|
data[-1] = "...(remaining elements truncated)..."
|
329
398
|
return f"<{self.__class__.__name__} {data!r}>"
|
330
399
|
|
331
|
-
def __len__(self):
|
400
|
+
def __len__(self) -> int:
|
332
401
|
self._fetch_all()
|
333
|
-
return len(self._result_cache)
|
402
|
+
return len(self._result_cache) # type: ignore[arg-type]
|
334
403
|
|
335
|
-
def __iter__(self):
|
404
|
+
def __iter__(self) -> Iterator[T]:
|
336
405
|
"""
|
337
406
|
The queryset iterator protocol uses three nested iterators in the
|
338
407
|
default case:
|
@@ -348,13 +417,13 @@ class QuerySet:
|
|
348
417
|
- Responsible for turning the rows into model objects.
|
349
418
|
"""
|
350
419
|
self._fetch_all()
|
351
|
-
return iter(self._result_cache)
|
420
|
+
return iter(self._result_cache) # type: ignore[arg-type]
|
352
421
|
|
353
|
-
def __bool__(self):
|
422
|
+
def __bool__(self) -> bool:
|
354
423
|
self._fetch_all()
|
355
424
|
return bool(self._result_cache)
|
356
425
|
|
357
|
-
def __getitem__(self, k):
|
426
|
+
def __getitem__(self, k: int | slice) -> T | QuerySet[T]:
|
358
427
|
"""Retrieve an item or slice from the set of results."""
|
359
428
|
if not isinstance(k, int | slice):
|
360
429
|
raise TypeError(
|
@@ -383,17 +452,17 @@ class QuerySet:
|
|
383
452
|
else:
|
384
453
|
stop = None
|
385
454
|
qs.sql_query.set_limits(start, stop)
|
386
|
-
return list(qs)[:: k.step] if k.step else qs
|
455
|
+
return list(qs)[:: k.step] if k.step else qs # type: ignore[return-value]
|
387
456
|
|
388
457
|
qs = self._chain()
|
389
|
-
qs.sql_query.set_limits(k, k + 1)
|
458
|
+
qs.sql_query.set_limits(k, k + 1) # type: ignore[unsupported-operator]
|
390
459
|
qs._fetch_all()
|
391
460
|
return qs._result_cache[0]
|
392
461
|
|
393
|
-
def __class_getitem__(cls, *args, **kwargs):
|
462
|
+
def __class_getitem__(cls, *args: Any, **kwargs: Any) -> type[QuerySet[Any]]:
|
394
463
|
return cls
|
395
464
|
|
396
|
-
def __and__(self, other):
|
465
|
+
def __and__(self, other: QuerySet[T]) -> QuerySet[T]:
|
397
466
|
self._check_operator_queryset(other, "&")
|
398
467
|
self._merge_sanity_check(other)
|
399
468
|
if isinstance(other, EmptyQuerySet):
|
@@ -405,7 +474,7 @@ class QuerySet:
|
|
405
474
|
combined.sql_query.combine(other.sql_query, sql.AND)
|
406
475
|
return combined
|
407
476
|
|
408
|
-
def __or__(self, other):
|
477
|
+
def __or__(self, other: QuerySet[T]) -> QuerySet[T]:
|
409
478
|
self._check_operator_queryset(other, "|")
|
410
479
|
self._merge_sanity_check(other)
|
411
480
|
if isinstance(self, EmptyQuerySet):
|
@@ -415,16 +484,18 @@ class QuerySet:
|
|
415
484
|
query = (
|
416
485
|
self
|
417
486
|
if self.sql_query.can_filter()
|
418
|
-
else self.model.
|
487
|
+
else self.model._model_meta.base_queryset.filter(id__in=self.values("id"))
|
419
488
|
)
|
420
489
|
combined = query._chain()
|
421
490
|
combined._merge_known_related_objects(other)
|
422
491
|
if not other.sql_query.can_filter():
|
423
|
-
other = other.model.
|
492
|
+
other = other.model._model_meta.base_queryset.filter(
|
493
|
+
id__in=other.values("id")
|
494
|
+
)
|
424
495
|
combined.sql_query.combine(other.sql_query, sql.OR)
|
425
496
|
return combined
|
426
497
|
|
427
|
-
def __xor__(self, other):
|
498
|
+
def __xor__(self, other: QuerySet[T]) -> QuerySet[T]:
|
428
499
|
self._check_operator_queryset(other, "^")
|
429
500
|
self._merge_sanity_check(other)
|
430
501
|
if isinstance(self, EmptyQuerySet):
|
@@ -434,12 +505,14 @@ class QuerySet:
|
|
434
505
|
query = (
|
435
506
|
self
|
436
507
|
if self.sql_query.can_filter()
|
437
|
-
else self.model.
|
508
|
+
else self.model._model_meta.base_queryset.filter(id__in=self.values("id"))
|
438
509
|
)
|
439
510
|
combined = query._chain()
|
440
511
|
combined._merge_known_related_objects(other)
|
441
512
|
if not other.sql_query.can_filter():
|
442
|
-
other = other.model.
|
513
|
+
other = other.model._model_meta.base_queryset.filter(
|
514
|
+
id__in=other.values("id")
|
515
|
+
)
|
443
516
|
combined.sql_query.combine(other.sql_query, sql.XOR)
|
444
517
|
return combined
|
445
518
|
|
@@ -447,7 +520,7 @@ class QuerySet:
|
|
447
520
|
# METHODS THAT DO DATABASE QUERIES #
|
448
521
|
####################################
|
449
522
|
|
450
|
-
def _iterator(self, use_chunked_fetch, chunk_size):
|
523
|
+
def _iterator(self, use_chunked_fetch: bool, chunk_size: int | None) -> Iterator[T]:
|
451
524
|
iterable = self._iterable_class(
|
452
525
|
self,
|
453
526
|
chunked_fetch=use_chunked_fetch,
|
@@ -462,7 +535,7 @@ class QuerySet:
|
|
462
535
|
prefetch_related_objects(results, *self._prefetch_related_lookups)
|
463
536
|
yield from results
|
464
537
|
|
465
|
-
def iterator(self, chunk_size=None):
|
538
|
+
def iterator(self, chunk_size: int | None = None) -> Iterator[T]:
|
466
539
|
"""
|
467
540
|
An iterator over the results from applying this QuerySet to the
|
468
541
|
database. chunk_size must be provided for QuerySets that prefetch
|
@@ -481,7 +554,7 @@ class QuerySet:
|
|
481
554
|
)
|
482
555
|
return self._iterator(use_chunked_fetch, chunk_size)
|
483
556
|
|
484
|
-
def aggregate(self, *args, **kwargs):
|
557
|
+
def aggregate(self, *args: Any, **kwargs: Any) -> dict[str, Any]:
|
485
558
|
"""
|
486
559
|
Return a dictionary containing the calculations (aggregation)
|
487
560
|
over the current queryset.
|
@@ -506,7 +579,7 @@ class QuerySet:
|
|
506
579
|
|
507
580
|
return self.sql_query.chain().get_aggregation(kwargs)
|
508
581
|
|
509
|
-
def count(self):
|
582
|
+
def count(self) -> int:
|
510
583
|
"""
|
511
584
|
Perform a SELECT COUNT() and return the number of records as an
|
512
585
|
integer.
|
@@ -519,7 +592,7 @@ class QuerySet:
|
|
519
592
|
|
520
593
|
return self.sql_query.get_count()
|
521
594
|
|
522
|
-
def get(self, *args, **kwargs):
|
595
|
+
def get(self, *args: Any, **kwargs: Any) -> T:
|
523
596
|
"""
|
524
597
|
Perform the query and return a single object matching the given
|
525
598
|
keyword arguments.
|
@@ -546,46 +619,49 @@ class QuerySet:
|
|
546
619
|
return clone._result_cache[0]
|
547
620
|
if not num:
|
548
621
|
raise self.model.DoesNotExist(
|
549
|
-
f"{self.model.
|
622
|
+
f"{self.model.model_options.object_name} matching query does not exist."
|
550
623
|
)
|
551
624
|
raise self.model.MultipleObjectsReturned(
|
552
625
|
"get() returned more than one {} -- it returned {}!".format(
|
553
|
-
self.model.
|
626
|
+
self.model.model_options.object_name,
|
554
627
|
num if not limit or num < limit else "more than %s" % (limit - 1),
|
555
628
|
)
|
556
629
|
)
|
557
630
|
|
558
|
-
def get_or_none(self, *args, **kwargs):
|
631
|
+
def get_or_none(self, *args: Any, **kwargs: Any) -> T | None:
|
559
632
|
"""
|
560
633
|
Perform the query and return a single object matching the given
|
561
634
|
keyword arguments, or None if no object is found.
|
562
635
|
"""
|
563
636
|
try:
|
564
637
|
return self.get(*args, **kwargs)
|
565
|
-
except self.model.DoesNotExist:
|
638
|
+
except self.model.DoesNotExist: # type: ignore[attr-defined]
|
566
639
|
return None
|
567
640
|
|
568
|
-
def create(self, **kwargs):
|
641
|
+
def create(self, **kwargs: Any) -> T:
|
569
642
|
"""
|
570
643
|
Create a new object with the given kwargs, saving it to the database
|
571
644
|
and returning the created object.
|
572
645
|
"""
|
573
|
-
obj = self.model(**kwargs)
|
646
|
+
obj = self.model(**kwargs) # type: ignore[misc]
|
574
647
|
self._for_write = True
|
575
|
-
obj.save(force_insert=True)
|
648
|
+
obj.save(force_insert=True) # type: ignore[attr-defined]
|
576
649
|
return obj
|
577
650
|
|
578
|
-
def _prepare_for_bulk_create(self, objs):
|
579
|
-
id_field = self.model.
|
651
|
+
def _prepare_for_bulk_create(self, objs: list[T]) -> None:
|
652
|
+
id_field = self.model._model_meta.get_field("id")
|
580
653
|
for obj in objs:
|
581
|
-
if obj.id is None:
|
654
|
+
if obj.id is None: # type: ignore[attr-defined]
|
582
655
|
# Populate new primary key values.
|
583
|
-
obj.id = id_field.get_id_value_on_save(obj)
|
584
|
-
obj._prepare_related_fields_for_save(operation_name="bulk_create")
|
656
|
+
obj.id = id_field.get_id_value_on_save(obj) # type: ignore[attr-defined]
|
657
|
+
obj._prepare_related_fields_for_save(operation_name="bulk_create") # type: ignore[attr-defined]
|
585
658
|
|
586
659
|
def _check_bulk_create_options(
|
587
|
-
self,
|
588
|
-
|
660
|
+
self,
|
661
|
+
update_conflicts: bool,
|
662
|
+
update_fields: list[Field] | None,
|
663
|
+
unique_fields: list[Field] | None,
|
664
|
+
) -> OnConflict | None:
|
589
665
|
db_features = db_connection.features
|
590
666
|
if update_conflicts:
|
591
667
|
if not db_features.supports_update_conflicts:
|
@@ -628,12 +704,12 @@ class QuerySet:
|
|
628
704
|
|
629
705
|
def bulk_create(
|
630
706
|
self,
|
631
|
-
objs,
|
632
|
-
batch_size=None,
|
633
|
-
update_conflicts=False,
|
634
|
-
update_fields=None,
|
635
|
-
unique_fields=None,
|
636
|
-
):
|
707
|
+
objs: list[T],
|
708
|
+
batch_size: int | None = None,
|
709
|
+
update_conflicts: bool = False,
|
710
|
+
update_fields: list[str] | None = None,
|
711
|
+
unique_fields: list[str] | None = None,
|
712
|
+
) -> list[T]:
|
637
713
|
"""
|
638
714
|
Insert each of the instances into the database. Do *not* call
|
639
715
|
save() on each of the instances, and do not set the primary key attribute if it is an
|
@@ -657,18 +733,18 @@ class QuerySet:
|
|
657
733
|
|
658
734
|
if not objs:
|
659
735
|
return objs
|
660
|
-
|
736
|
+
meta = self.model._model_meta
|
661
737
|
if unique_fields:
|
662
|
-
unique_fields = [
|
738
|
+
unique_fields = [meta.get_field(name) for name in unique_fields]
|
663
739
|
if update_fields:
|
664
|
-
update_fields = [
|
740
|
+
update_fields = [meta.get_field(name) for name in update_fields]
|
665
741
|
on_conflict = self._check_bulk_create_options(
|
666
742
|
update_conflicts,
|
667
743
|
update_fields,
|
668
744
|
unique_fields,
|
669
745
|
)
|
670
746
|
self._for_write = True
|
671
|
-
fields =
|
747
|
+
fields = meta.concrete_fields
|
672
748
|
objs = list(objs)
|
673
749
|
self._prepare_for_bulk_create(objs)
|
674
750
|
with transaction.atomic(savepoint=False):
|
@@ -682,9 +758,9 @@ class QuerySet:
|
|
682
758
|
update_fields=update_fields,
|
683
759
|
unique_fields=unique_fields,
|
684
760
|
)
|
685
|
-
id_field =
|
761
|
+
id_field = meta.get_field("id")
|
686
762
|
for obj_with_id, results in zip(objs_with_id, returned_columns):
|
687
|
-
for result, field in zip(results,
|
763
|
+
for result, field in zip(results, meta.db_returning_fields):
|
688
764
|
if field != id_field:
|
689
765
|
setattr(obj_with_id, field.attname, result)
|
690
766
|
for obj_with_id in objs_with_id:
|
@@ -705,13 +781,15 @@ class QuerySet:
|
|
705
781
|
):
|
706
782
|
assert len(returned_columns) == len(objs_without_id)
|
707
783
|
for obj_without_id, results in zip(objs_without_id, returned_columns):
|
708
|
-
for result, field in zip(results,
|
784
|
+
for result, field in zip(results, meta.db_returning_fields):
|
709
785
|
setattr(obj_without_id, field.attname, result)
|
710
786
|
obj_without_id._state.adding = False
|
711
787
|
|
712
788
|
return objs
|
713
789
|
|
714
|
-
def bulk_update(
|
790
|
+
def bulk_update(
|
791
|
+
self, objs: list[T], fields: list[str], batch_size: int | None = None
|
792
|
+
) -> int:
|
715
793
|
"""
|
716
794
|
Update the given fields in each of the given objects in the database.
|
717
795
|
"""
|
@@ -719,42 +797,47 @@ class QuerySet:
|
|
719
797
|
raise ValueError("Batch size must be a positive integer.")
|
720
798
|
if not fields:
|
721
799
|
raise ValueError("Field names must be given to bulk_update().")
|
722
|
-
|
723
|
-
if any(obj.id is None for obj in
|
800
|
+
objs_tuple = tuple(objs)
|
801
|
+
if any(obj.id is None for obj in objs_tuple): # type: ignore[attr-defined]
|
724
802
|
raise ValueError("All bulk_update() objects must have a primary key set.")
|
725
|
-
|
726
|
-
if any(not f.concrete or f.many_to_many for f in
|
803
|
+
fields_list = [self.model._model_meta.get_field(name) for name in fields]
|
804
|
+
if any(not f.concrete or f.many_to_many for f in fields_list):
|
727
805
|
raise ValueError("bulk_update() can only be used with concrete fields.")
|
728
|
-
if any(f.primary_key for f in
|
806
|
+
if any(f.primary_key for f in fields_list):
|
729
807
|
raise ValueError("bulk_update() cannot be used with primary key fields.")
|
730
|
-
if not
|
808
|
+
if not objs_tuple:
|
731
809
|
return 0
|
732
|
-
for obj in
|
733
|
-
obj._prepare_related_fields_for_save(
|
734
|
-
operation_name="bulk_update", fields=
|
810
|
+
for obj in objs_tuple:
|
811
|
+
obj._prepare_related_fields_for_save( # type: ignore[attr-defined]
|
812
|
+
operation_name="bulk_update", fields=fields_list
|
735
813
|
)
|
736
814
|
# PK is used twice in the resulting update query, once in the filter
|
737
815
|
# and once in the WHEN. Each field will also have one CAST.
|
738
816
|
self._for_write = True
|
739
|
-
max_batch_size = db_connection.ops.bulk_batch_size(
|
817
|
+
max_batch_size = db_connection.ops.bulk_batch_size(
|
818
|
+
["id", "id"] + fields_list, objs_tuple
|
819
|
+
)
|
740
820
|
batch_size = min(batch_size, max_batch_size) if batch_size else max_batch_size
|
741
821
|
requires_casting = db_connection.features.requires_casted_case_in_updates
|
742
|
-
batches = (
|
822
|
+
batches = (
|
823
|
+
objs_tuple[i : i + batch_size]
|
824
|
+
for i in range(0, len(objs_tuple), batch_size)
|
825
|
+
)
|
743
826
|
updates = []
|
744
827
|
for batch_objs in batches:
|
745
828
|
update_kwargs = {}
|
746
|
-
for field in
|
829
|
+
for field in fields_list:
|
747
830
|
when_statements = []
|
748
831
|
for obj in batch_objs:
|
749
832
|
attr = getattr(obj, field.attname)
|
750
833
|
if not hasattr(attr, "resolve_expression"):
|
751
834
|
attr = Value(attr, output_field=field)
|
752
|
-
when_statements.append(When(id=obj.id, then=attr))
|
835
|
+
when_statements.append(When(id=obj.id, then=attr)) # type: ignore[attr-defined]
|
753
836
|
case_statement = Case(*when_statements, output_field=field)
|
754
837
|
if requires_casting:
|
755
838
|
case_statement = Cast(case_statement, output_field=field)
|
756
839
|
update_kwargs[field.attname] = case_statement
|
757
|
-
updates.append(([obj.id for obj in batch_objs], update_kwargs))
|
840
|
+
updates.append(([obj.id for obj in batch_objs], update_kwargs)) # type: ignore[attr-defined,misc]
|
758
841
|
rows_updated = 0
|
759
842
|
queryset = self._chain()
|
760
843
|
with transaction.atomic(savepoint=False):
|
@@ -762,7 +845,9 @@ class QuerySet:
|
|
762
845
|
rows_updated += queryset.filter(id__in=ids).update(**update_kwargs)
|
763
846
|
return rows_updated
|
764
847
|
|
765
|
-
def get_or_create(
|
848
|
+
def get_or_create(
|
849
|
+
self, defaults: dict[str, Any] | None = None, **kwargs: Any
|
850
|
+
) -> tuple[T, bool]:
|
766
851
|
"""
|
767
852
|
Look up an object with the given kwargs, creating one if necessary.
|
768
853
|
Return a tuple of (object, created), where created is a boolean
|
@@ -773,7 +858,7 @@ class QuerySet:
|
|
773
858
|
self._for_write = True
|
774
859
|
try:
|
775
860
|
return self.get(**kwargs), False
|
776
|
-
except self.model.DoesNotExist:
|
861
|
+
except self.model.DoesNotExist: # type: ignore[attr-defined]
|
777
862
|
params = self._extract_model_params(defaults, **kwargs)
|
778
863
|
# Try to create an object using passed params.
|
779
864
|
try:
|
@@ -790,11 +875,16 @@ class QuerySet:
|
|
790
875
|
# and return an existing object.
|
791
876
|
try:
|
792
877
|
return self.get(**kwargs), False
|
793
|
-
except self.model.DoesNotExist:
|
878
|
+
except self.model.DoesNotExist: # type: ignore[attr-defined]
|
794
879
|
pass
|
795
880
|
raise
|
796
881
|
|
797
|
-
def update_or_create(
|
882
|
+
def update_or_create(
|
883
|
+
self,
|
884
|
+
defaults: dict[str, Any] | None = None,
|
885
|
+
create_defaults: dict[str, Any] | None = None,
|
886
|
+
**kwargs: Any,
|
887
|
+
) -> tuple[T, bool]:
|
798
888
|
"""
|
799
889
|
Look up an object with the given kwargs, updating one with defaults
|
800
890
|
if it exists, otherwise create a new one. Optionally, an object can
|
@@ -820,26 +910,28 @@ class QuerySet:
|
|
820
910
|
setattr(obj, k, v)
|
821
911
|
|
822
912
|
update_fields = set(update_defaults)
|
823
|
-
concrete_field_names = self.model.
|
913
|
+
concrete_field_names = self.model._model_meta._non_pk_concrete_field_names
|
824
914
|
# update_fields does not support non-concrete fields.
|
825
915
|
if concrete_field_names.issuperset(update_fields):
|
826
916
|
# Add fields which are set on pre_save(), e.g. auto_now fields.
|
827
917
|
# This is to maintain backward compatibility as these fields
|
828
918
|
# are not updated unless explicitly specified in the
|
829
919
|
# update_fields list.
|
830
|
-
for field in self.model.
|
920
|
+
for field in self.model._model_meta.local_concrete_fields:
|
831
921
|
if not (
|
832
922
|
field.primary_key or field.__class__.pre_save is Field.pre_save
|
833
923
|
):
|
834
924
|
update_fields.add(field.name)
|
835
925
|
if field.name != field.attname:
|
836
926
|
update_fields.add(field.attname)
|
837
|
-
obj.save(update_fields=update_fields)
|
927
|
+
obj.save(update_fields=update_fields) # type: ignore[attr-defined]
|
838
928
|
else:
|
839
|
-
obj.save()
|
929
|
+
obj.save() # type: ignore[attr-defined]
|
840
930
|
return obj, False
|
841
931
|
|
842
|
-
def _extract_model_params(
|
932
|
+
def _extract_model_params(
|
933
|
+
self, defaults: dict[str, Any] | None, **kwargs: Any
|
934
|
+
) -> dict[str, Any]:
|
843
935
|
"""
|
844
936
|
Prepare `params` for creating a model instance based on the given
|
845
937
|
kwargs; for use by get_or_create().
|
@@ -847,11 +939,11 @@ class QuerySet:
|
|
847
939
|
defaults = defaults or {}
|
848
940
|
params = {k: v for k, v in kwargs.items() if LOOKUP_SEP not in k}
|
849
941
|
params.update(defaults)
|
850
|
-
property_names = self.model.
|
942
|
+
property_names = self.model._model_meta._property_names
|
851
943
|
invalid_params = []
|
852
944
|
for param in params:
|
853
945
|
try:
|
854
|
-
self.model.
|
946
|
+
self.model._model_meta.get_field(param)
|
855
947
|
except FieldDoesNotExist:
|
856
948
|
# It's okay to use a model's property if it has a setter.
|
857
949
|
if not (param in property_names and getattr(self.model, param).fset):
|
@@ -859,39 +951,43 @@ class QuerySet:
|
|
859
951
|
if invalid_params:
|
860
952
|
raise FieldError(
|
861
953
|
"Invalid field name(s) for model {}: '{}'.".format(
|
862
|
-
self.model.
|
954
|
+
self.model.model_options.object_name,
|
863
955
|
"', '".join(sorted(invalid_params)),
|
864
956
|
)
|
865
957
|
)
|
866
958
|
return params
|
867
959
|
|
868
|
-
def first(self):
|
960
|
+
def first(self) -> T | None:
|
869
961
|
"""Return the first object of a query or None if no match is found."""
|
870
962
|
for obj in self[:1]:
|
871
963
|
return obj
|
964
|
+
return None
|
872
965
|
|
873
|
-
def last(self):
|
966
|
+
def last(self) -> T | None:
|
874
967
|
"""Return the last object of a query or None if no match is found."""
|
875
968
|
queryset = self.reverse()
|
876
969
|
for obj in queryset[:1]:
|
877
970
|
return obj
|
971
|
+
return None
|
878
972
|
|
879
|
-
def in_bulk(
|
973
|
+
def in_bulk(
|
974
|
+
self, id_list: list[Any] | None = None, *, field_name: str = "id"
|
975
|
+
) -> dict[Any, T]:
|
880
976
|
"""
|
881
977
|
Return a dictionary mapping each of the given IDs to the object with
|
882
978
|
that ID. If `id_list` isn't provided, evaluate the entire QuerySet.
|
883
979
|
"""
|
884
980
|
if self.sql_query.is_sliced:
|
885
981
|
raise TypeError("Cannot use 'limit' or 'offset' with in_bulk().")
|
886
|
-
|
982
|
+
meta = self.model._model_meta
|
887
983
|
unique_fields = [
|
888
984
|
constraint.fields[0]
|
889
|
-
for constraint in
|
985
|
+
for constraint in self.model.model_options.total_unique_constraints
|
890
986
|
if len(constraint.fields) == 1
|
891
987
|
]
|
892
988
|
if (
|
893
989
|
field_name != "id"
|
894
|
-
and not
|
990
|
+
and not meta.get_field(field_name).primary_key
|
895
991
|
and field_name not in unique_fields
|
896
992
|
and self.sql_query.distinct_fields != (field_name,)
|
897
993
|
):
|
@@ -903,21 +999,21 @@ class QuerySet:
|
|
903
999
|
return {}
|
904
1000
|
filter_key = f"{field_name}__in"
|
905
1001
|
batch_size = db_connection.features.max_query_params
|
906
|
-
|
1002
|
+
id_list_tuple = tuple(id_list)
|
907
1003
|
# If the database has a limit on the number of query parameters
|
908
1004
|
# (e.g. SQLite), retrieve objects in batches if necessary.
|
909
|
-
if batch_size and batch_size < len(
|
910
|
-
qs = ()
|
911
|
-
for offset in range(0, len(
|
912
|
-
batch =
|
1005
|
+
if batch_size and batch_size < len(id_list_tuple):
|
1006
|
+
qs: tuple[T, ...] = ()
|
1007
|
+
for offset in range(0, len(id_list_tuple), batch_size):
|
1008
|
+
batch = id_list_tuple[offset : offset + batch_size]
|
913
1009
|
qs += tuple(self.filter(**{filter_key: batch}))
|
914
1010
|
else:
|
915
|
-
qs = self.filter(**{filter_key:
|
1011
|
+
qs = self.filter(**{filter_key: id_list_tuple})
|
916
1012
|
else:
|
917
1013
|
qs = self._chain()
|
918
1014
|
return {getattr(obj, field_name): obj for obj in qs}
|
919
1015
|
|
920
|
-
def delete(self):
|
1016
|
+
def delete(self) -> tuple[int, dict[str, int]]:
|
921
1017
|
"""Delete the records in the current QuerySet."""
|
922
1018
|
self._not_support_combined_queries("delete")
|
923
1019
|
if self.sql_query.is_sliced:
|
@@ -949,7 +1045,7 @@ class QuerySet:
|
|
949
1045
|
self._result_cache = None
|
950
1046
|
return deleted, _rows_count
|
951
1047
|
|
952
|
-
def _raw_delete(self):
|
1048
|
+
def _raw_delete(self) -> int:
|
953
1049
|
"""
|
954
1050
|
Delete objects found from the given queryset in single direct SQL
|
955
1051
|
query. No signals are sent and there is no protection for cascades.
|
@@ -962,7 +1058,7 @@ class QuerySet:
|
|
962
1058
|
return cursor.rowcount
|
963
1059
|
return 0
|
964
1060
|
|
965
|
-
def update(self, **kwargs):
|
1061
|
+
def update(self, **kwargs: Any) -> int:
|
966
1062
|
"""
|
967
1063
|
Update all elements in the current QuerySet, setting all the given
|
968
1064
|
fields to the appropriate values.
|
@@ -1001,7 +1097,7 @@ class QuerySet:
|
|
1001
1097
|
self._result_cache = None
|
1002
1098
|
return rows
|
1003
1099
|
|
1004
|
-
def _update(self, values):
|
1100
|
+
def _update(self, values: list[tuple[Field, Any, Any]]) -> int:
|
1005
1101
|
"""
|
1006
1102
|
A version of update() that accepts field objects instead of field names.
|
1007
1103
|
Used primarily for model saving and not intended for use by general
|
@@ -1017,7 +1113,7 @@ class QuerySet:
|
|
1017
1113
|
self._result_cache = None
|
1018
1114
|
return query.get_compiler().execute_sql(CURSOR)
|
1019
1115
|
|
1020
|
-
def exists(self):
|
1116
|
+
def exists(self) -> bool:
|
1021
1117
|
"""
|
1022
1118
|
Return True if the QuerySet would have any results, False otherwise.
|
1023
1119
|
"""
|
@@ -1025,7 +1121,7 @@ class QuerySet:
|
|
1025
1121
|
return self.sql_query.has_results()
|
1026
1122
|
return bool(self._result_cache)
|
1027
1123
|
|
1028
|
-
def contains(self, obj):
|
1124
|
+
def contains(self, obj: T) -> bool:
|
1029
1125
|
"""
|
1030
1126
|
Return True if the QuerySet contains the provided obj,
|
1031
1127
|
False otherwise.
|
@@ -1040,18 +1136,18 @@ class QuerySet:
|
|
1040
1136
|
return False
|
1041
1137
|
except AttributeError:
|
1042
1138
|
raise TypeError("'obj' must be a model instance.")
|
1043
|
-
if obj.id is None:
|
1139
|
+
if obj.id is None: # type: ignore[attr-defined]
|
1044
1140
|
raise ValueError("QuerySet.contains() cannot be used on unsaved objects.")
|
1045
1141
|
if self._result_cache is not None:
|
1046
1142
|
return obj in self._result_cache
|
1047
|
-
return self.filter(id=obj.id).exists()
|
1143
|
+
return self.filter(id=obj.id).exists() # type: ignore[attr-defined]
|
1048
1144
|
|
1049
|
-
def _prefetch_related_objects(self):
|
1145
|
+
def _prefetch_related_objects(self) -> None:
|
1050
1146
|
# This method can only be called once the result cache has been filled.
|
1051
1147
|
prefetch_related_objects(self._result_cache, *self._prefetch_related_lookups)
|
1052
1148
|
self._prefetch_done = True
|
1053
1149
|
|
1054
|
-
def explain(self, *, format=None, **options):
|
1150
|
+
def explain(self, *, format: str | None = None, **options: Any) -> str:
|
1055
1151
|
"""
|
1056
1152
|
Runs an EXPLAIN on the SQL query this QuerySet would perform, and
|
1057
1153
|
returns the results.
|
@@ -1062,7 +1158,12 @@ class QuerySet:
|
|
1062
1158
|
# PUBLIC METHODS THAT RETURN A QUERYSET SUBCLASS #
|
1063
1159
|
##################################################
|
1064
1160
|
|
1065
|
-
def raw(
|
1161
|
+
def raw(
|
1162
|
+
self,
|
1163
|
+
raw_query: str,
|
1164
|
+
params: tuple[Any, ...] = (),
|
1165
|
+
translations: dict[str, str] | None = None,
|
1166
|
+
) -> RawQuerySet:
|
1066
1167
|
qs = RawQuerySet(
|
1067
1168
|
raw_query,
|
1068
1169
|
model=self.model,
|
@@ -1072,21 +1173,23 @@ class QuerySet:
|
|
1072
1173
|
qs._prefetch_related_lookups = self._prefetch_related_lookups[:]
|
1073
1174
|
return qs
|
1074
1175
|
|
1075
|
-
def _values(self, *fields, **expressions):
|
1176
|
+
def _values(self, *fields: str, **expressions: Any) -> QuerySet[Any]:
|
1076
1177
|
clone = self._chain()
|
1077
1178
|
if expressions:
|
1078
1179
|
clone = clone.annotate(**expressions)
|
1079
|
-
clone._fields = fields
|
1180
|
+
clone._fields = fields # type: ignore[assignment]
|
1080
1181
|
clone.sql_query.set_values(fields)
|
1081
1182
|
return clone
|
1082
1183
|
|
1083
|
-
def values(self, *fields, **expressions):
|
1184
|
+
def values(self, *fields: str, **expressions: Any) -> QuerySet[Any]:
|
1084
1185
|
fields += tuple(expressions)
|
1085
1186
|
clone = self._values(*fields, **expressions)
|
1086
1187
|
clone._iterable_class = ValuesIterable
|
1087
1188
|
return clone
|
1088
1189
|
|
1089
|
-
def values_list(
|
1190
|
+
def values_list(
|
1191
|
+
self, *fields: str, flat: bool = False, named: bool = False
|
1192
|
+
) -> QuerySet[Any]:
|
1090
1193
|
if flat and named:
|
1091
1194
|
raise TypeError("'flat' and 'named' can't be used together.")
|
1092
1195
|
if flat and len(fields) > 1:
|
@@ -1124,7 +1227,7 @@ class QuerySet:
|
|
1124
1227
|
)
|
1125
1228
|
return clone
|
1126
1229
|
|
1127
|
-
def dates(self, field_name, kind, order="ASC"):
|
1230
|
+
def dates(self, field_name: str, kind: str, order: str = "ASC") -> QuerySet[Any]:
|
1128
1231
|
"""
|
1129
1232
|
Return a list of date objects representing all available dates for
|
1130
1233
|
the given field_name, scoped to 'kind'.
|
@@ -1144,7 +1247,13 @@ class QuerySet:
|
|
1144
1247
|
.order_by(("-" if order == "DESC" else "") + "datefield")
|
1145
1248
|
)
|
1146
1249
|
|
1147
|
-
def datetimes(
|
1250
|
+
def datetimes(
|
1251
|
+
self,
|
1252
|
+
field_name: str,
|
1253
|
+
kind: str,
|
1254
|
+
order: str = "ASC",
|
1255
|
+
tzinfo: tzinfo | None = None,
|
1256
|
+
) -> QuerySet[Any]:
|
1148
1257
|
"""
|
1149
1258
|
Return a list of datetime objects representing all available
|
1150
1259
|
datetimes for the given field_name, scoped to 'kind'.
|
@@ -1176,7 +1285,7 @@ class QuerySet:
|
|
1176
1285
|
.order_by(("-" if order == "DESC" else "") + "datetimefield")
|
1177
1286
|
)
|
1178
1287
|
|
1179
|
-
def none(self):
|
1288
|
+
def none(self) -> QuerySet[T]:
|
1180
1289
|
"""Return an empty QuerySet."""
|
1181
1290
|
clone = self._chain()
|
1182
1291
|
clone.sql_query.set_empty()
|
@@ -1186,14 +1295,14 @@ class QuerySet:
|
|
1186
1295
|
# PUBLIC METHODS THAT ALTER ATTRIBUTES AND RETURN A NEW QUERYSET #
|
1187
1296
|
##################################################################
|
1188
1297
|
|
1189
|
-
def all(self):
|
1298
|
+
def all(self) -> Self:
|
1190
1299
|
"""
|
1191
1300
|
Return a new QuerySet that is a copy of the current one. This allows a
|
1192
1301
|
QuerySet to proxy for a model queryset in some cases.
|
1193
1302
|
"""
|
1194
1303
|
return self._chain()
|
1195
1304
|
|
1196
|
-
def filter(self, *args, **kwargs):
|
1305
|
+
def filter(self, *args: Any, **kwargs: Any) -> Self:
|
1197
1306
|
"""
|
1198
1307
|
Return a new QuerySet instance with the args ANDed to the existing
|
1199
1308
|
set.
|
@@ -1201,7 +1310,7 @@ class QuerySet:
|
|
1201
1310
|
self._not_support_combined_queries("filter")
|
1202
1311
|
return self._filter_or_exclude(False, args, kwargs)
|
1203
1312
|
|
1204
|
-
def exclude(self, *args, **kwargs):
|
1313
|
+
def exclude(self, *args: Any, **kwargs: Any) -> Self:
|
1205
1314
|
"""
|
1206
1315
|
Return a new QuerySet instance with NOT (args) ANDed to the existing
|
1207
1316
|
set.
|
@@ -1209,7 +1318,9 @@ class QuerySet:
|
|
1209
1318
|
self._not_support_combined_queries("exclude")
|
1210
1319
|
return self._filter_or_exclude(True, args, kwargs)
|
1211
1320
|
|
1212
|
-
def _filter_or_exclude(
|
1321
|
+
def _filter_or_exclude(
|
1322
|
+
self, negate: bool, args: tuple[Any, ...], kwargs: dict[str, Any]
|
1323
|
+
) -> Self:
|
1213
1324
|
if (args or kwargs) and self.sql_query.is_sliced:
|
1214
1325
|
raise TypeError("Cannot filter a query once a slice has been taken.")
|
1215
1326
|
clone = self._chain()
|
@@ -1220,13 +1331,15 @@ class QuerySet:
|
|
1220
1331
|
clone._filter_or_exclude_inplace(negate, args, kwargs)
|
1221
1332
|
return clone
|
1222
1333
|
|
1223
|
-
def _filter_or_exclude_inplace(
|
1334
|
+
def _filter_or_exclude_inplace(
|
1335
|
+
self, negate: bool, args: tuple[Any, ...], kwargs: dict[str, Any]
|
1336
|
+
) -> None:
|
1224
1337
|
if negate:
|
1225
|
-
self._query.add_q(~Q(*args, **kwargs))
|
1338
|
+
self._query.add_q(~Q(*args, **kwargs)) # type: ignore[unsupported-operator]
|
1226
1339
|
else:
|
1227
1340
|
self._query.add_q(Q(*args, **kwargs))
|
1228
1341
|
|
1229
|
-
def complex_filter(self, filter_obj):
|
1342
|
+
def complex_filter(self, filter_obj: Q | dict[str, Any]) -> QuerySet[T]:
|
1230
1343
|
"""
|
1231
1344
|
Return a new QuerySet instance with filter_obj added to the filters.
|
1232
1345
|
|
@@ -1243,7 +1356,9 @@ class QuerySet:
|
|
1243
1356
|
else:
|
1244
1357
|
return self._filter_or_exclude(False, args=(), kwargs=filter_obj)
|
1245
1358
|
|
1246
|
-
def _combinator_query(
|
1359
|
+
def _combinator_query(
|
1360
|
+
self, combinator: str, *other_qs: QuerySet[T], all: bool = False
|
1361
|
+
) -> QuerySet[T]:
|
1247
1362
|
# Clone the query to inherit the select list and everything
|
1248
1363
|
clone = self._chain()
|
1249
1364
|
# Clear limits and ordering so they can be reapplied
|
@@ -1256,7 +1371,7 @@ class QuerySet:
|
|
1256
1371
|
clone.sql_query.combinator_all = all
|
1257
1372
|
return clone
|
1258
1373
|
|
1259
|
-
def union(self, *other_qs, all=False):
|
1374
|
+
def union(self, *other_qs: QuerySet[T], all: bool = False) -> QuerySet[T]:
|
1260
1375
|
# If the query is an EmptyQuerySet, combine all nonempty querysets.
|
1261
1376
|
if isinstance(self, EmptyQuerySet):
|
1262
1377
|
qs = [q for q in other_qs if not isinstance(q, EmptyQuerySet)]
|
@@ -1267,7 +1382,7 @@ class QuerySet:
|
|
1267
1382
|
return qs[0]._combinator_query("union", *qs[1:], all=all)
|
1268
1383
|
return self._combinator_query("union", *other_qs, all=all)
|
1269
1384
|
|
1270
|
-
def intersection(self, *other_qs):
|
1385
|
+
def intersection(self, *other_qs: QuerySet[T]) -> QuerySet[T]:
|
1271
1386
|
# If any query is an EmptyQuerySet, return it.
|
1272
1387
|
if isinstance(self, EmptyQuerySet):
|
1273
1388
|
return self
|
@@ -1276,13 +1391,19 @@ class QuerySet:
|
|
1276
1391
|
return other
|
1277
1392
|
return self._combinator_query("intersection", *other_qs)
|
1278
1393
|
|
1279
|
-
def difference(self, *other_qs):
|
1394
|
+
def difference(self, *other_qs: QuerySet[T]) -> QuerySet[T]:
|
1280
1395
|
# If the query is an EmptyQuerySet, return it.
|
1281
1396
|
if isinstance(self, EmptyQuerySet):
|
1282
1397
|
return self
|
1283
1398
|
return self._combinator_query("difference", *other_qs)
|
1284
1399
|
|
1285
|
-
def select_for_update(
|
1400
|
+
def select_for_update(
|
1401
|
+
self,
|
1402
|
+
nowait: bool = False,
|
1403
|
+
skip_locked: bool = False,
|
1404
|
+
of: tuple[str, ...] = (),
|
1405
|
+
no_key: bool = False,
|
1406
|
+
) -> QuerySet[T]:
|
1286
1407
|
"""
|
1287
1408
|
Return a new QuerySet instance that will select objects with a
|
1288
1409
|
FOR UPDATE lock.
|
@@ -1298,7 +1419,7 @@ class QuerySet:
|
|
1298
1419
|
obj.sql_query.select_for_no_key_update = no_key
|
1299
1420
|
return obj
|
1300
1421
|
|
1301
|
-
def select_related(self, *fields):
|
1422
|
+
def select_related(self, *fields: str | None) -> Self:
|
1302
1423
|
"""
|
1303
1424
|
Return a new QuerySet instance that will select related objects.
|
1304
1425
|
|
@@ -1322,7 +1443,7 @@ class QuerySet:
|
|
1322
1443
|
obj.sql_query.select_related = True
|
1323
1444
|
return obj
|
1324
1445
|
|
1325
|
-
def prefetch_related(self, *lookups):
|
1446
|
+
def prefetch_related(self, *lookups: str | Prefetch | None) -> Self:
|
1326
1447
|
"""
|
1327
1448
|
Return a new QuerySet instance that will prefetch the specified
|
1328
1449
|
Many-To-One and Many-To-Many related objects when the QuerySet is
|
@@ -1337,17 +1458,20 @@ class QuerySet:
|
|
1337
1458
|
clone._prefetch_related_lookups = ()
|
1338
1459
|
else:
|
1339
1460
|
for lookup in lookups:
|
1461
|
+
lookup_str: str
|
1340
1462
|
if isinstance(lookup, Prefetch):
|
1341
|
-
|
1342
|
-
|
1343
|
-
|
1463
|
+
lookup_str = lookup.prefetch_to
|
1464
|
+
else:
|
1465
|
+
lookup_str = lookup # type: ignore[assignment]
|
1466
|
+
lookup_str = lookup_str.split(LOOKUP_SEP, 1)[0]
|
1467
|
+
if lookup_str in self.sql_query._filtered_relations:
|
1344
1468
|
raise ValueError(
|
1345
1469
|
"prefetch_related() is not supported with FilteredRelation."
|
1346
1470
|
)
|
1347
1471
|
clone._prefetch_related_lookups = clone._prefetch_related_lookups + lookups
|
1348
1472
|
return clone
|
1349
1473
|
|
1350
|
-
def annotate(self, *args, **kwargs):
|
1474
|
+
def annotate(self, *args: Any, **kwargs: Any) -> Self:
|
1351
1475
|
"""
|
1352
1476
|
Return a query set in which the returned objects have been annotated
|
1353
1477
|
with extra data or aggregations.
|
@@ -1355,14 +1479,16 @@ class QuerySet:
|
|
1355
1479
|
self._not_support_combined_queries("annotate")
|
1356
1480
|
return self._annotate(args, kwargs, select=True)
|
1357
1481
|
|
1358
|
-
def alias(self, *args, **kwargs):
|
1482
|
+
def alias(self, *args: Any, **kwargs: Any) -> Self:
|
1359
1483
|
"""
|
1360
1484
|
Return a query set with added aliases for extra data or aggregations.
|
1361
1485
|
"""
|
1362
1486
|
self._not_support_combined_queries("alias")
|
1363
1487
|
return self._annotate(args, kwargs, select=False)
|
1364
1488
|
|
1365
|
-
def _annotate(
|
1489
|
+
def _annotate(
|
1490
|
+
self, args: tuple[Any, ...], kwargs: dict[str, Any], select: bool = True
|
1491
|
+
) -> Self:
|
1366
1492
|
self._validate_values_are_expressions(
|
1367
1493
|
args + tuple(kwargs.values()), method_name="annotate"
|
1368
1494
|
)
|
@@ -1388,7 +1514,7 @@ class QuerySet:
|
|
1388
1514
|
(field.name, field.attname)
|
1389
1515
|
if hasattr(field, "attname")
|
1390
1516
|
else (field.name,)
|
1391
|
-
for field in self.model.
|
1517
|
+
for field in self.model._model_meta.get_fields()
|
1392
1518
|
)
|
1393
1519
|
)
|
1394
1520
|
|
@@ -1415,7 +1541,7 @@ class QuerySet:
|
|
1415
1541
|
|
1416
1542
|
return clone
|
1417
1543
|
|
1418
|
-
def order_by(self, *field_names):
|
1544
|
+
def order_by(self, *field_names: str) -> Self:
|
1419
1545
|
"""Return a new QuerySet instance with the ordering changed."""
|
1420
1546
|
if self.sql_query.is_sliced:
|
1421
1547
|
raise TypeError("Cannot reorder a query once a slice has been taken.")
|
@@ -1424,7 +1550,7 @@ class QuerySet:
|
|
1424
1550
|
obj.sql_query.add_ordering(*field_names)
|
1425
1551
|
return obj
|
1426
1552
|
|
1427
|
-
def distinct(self, *field_names):
|
1553
|
+
def distinct(self, *field_names: str) -> Self:
|
1428
1554
|
"""
|
1429
1555
|
Return a new QuerySet instance that will select only distinct results.
|
1430
1556
|
"""
|
@@ -1439,13 +1565,13 @@ class QuerySet:
|
|
1439
1565
|
|
1440
1566
|
def extra(
|
1441
1567
|
self,
|
1442
|
-
select=None,
|
1443
|
-
where=None,
|
1444
|
-
params=None,
|
1445
|
-
tables=None,
|
1446
|
-
order_by=None,
|
1447
|
-
select_params=None,
|
1448
|
-
):
|
1568
|
+
select: dict[str, str] | None = None,
|
1569
|
+
where: list[str] | None = None,
|
1570
|
+
params: list[Any] | None = None,
|
1571
|
+
tables: list[str] | None = None,
|
1572
|
+
order_by: list[str] | None = None,
|
1573
|
+
select_params: list[Any] | None = None,
|
1574
|
+
) -> QuerySet[T]:
|
1449
1575
|
"""Add extra SQL fragments to the query."""
|
1450
1576
|
self._not_support_combined_queries("extra")
|
1451
1577
|
if self.sql_query.is_sliced:
|
@@ -1456,7 +1582,7 @@ class QuerySet:
|
|
1456
1582
|
)
|
1457
1583
|
return clone
|
1458
1584
|
|
1459
|
-
def reverse(self):
|
1585
|
+
def reverse(self) -> QuerySet[T]:
|
1460
1586
|
"""Reverse the ordering of the QuerySet."""
|
1461
1587
|
if self.sql_query.is_sliced:
|
1462
1588
|
raise TypeError("Cannot reverse a query once a slice has been taken.")
|
@@ -1464,7 +1590,7 @@ class QuerySet:
|
|
1464
1590
|
clone.sql_query.standard_ordering = not clone.sql_query.standard_ordering
|
1465
1591
|
return clone
|
1466
1592
|
|
1467
|
-
def defer(self, *fields):
|
1593
|
+
def defer(self, *fields: str | None) -> QuerySet[T]:
|
1468
1594
|
"""
|
1469
1595
|
Defer the loading of data for certain fields until they are accessed.
|
1470
1596
|
Add the set of deferred fields to any existing set of deferred fields.
|
@@ -1481,7 +1607,7 @@ class QuerySet:
|
|
1481
1607
|
clone.sql_query.add_deferred_loading(fields)
|
1482
1608
|
return clone
|
1483
1609
|
|
1484
|
-
def only(self, *fields):
|
1610
|
+
def only(self, *fields: str) -> QuerySet[T]:
|
1485
1611
|
"""
|
1486
1612
|
Essentially, the opposite of defer(). Only the fields passed into this
|
1487
1613
|
method and that are not already specified as deferred are loaded
|
@@ -1507,7 +1633,7 @@ class QuerySet:
|
|
1507
1633
|
###################################
|
1508
1634
|
|
1509
1635
|
@property
|
1510
|
-
def ordered(self):
|
1636
|
+
def ordered(self) -> bool:
|
1511
1637
|
"""
|
1512
1638
|
Return True if the QuerySet is ordered -- i.e. has an order_by()
|
1513
1639
|
clause or a default ordering on the model (or is empty).
|
@@ -1518,7 +1644,7 @@ class QuerySet:
|
|
1518
1644
|
return True
|
1519
1645
|
elif (
|
1520
1646
|
self.sql_query.default_ordering
|
1521
|
-
and self.sql_query.
|
1647
|
+
and self.sql_query.get_model_meta().ordering
|
1522
1648
|
and
|
1523
1649
|
# A default ordering doesn't affect GROUP BY queries.
|
1524
1650
|
not self.sql_query.group_by
|
@@ -1533,14 +1659,14 @@ class QuerySet:
|
|
1533
1659
|
|
1534
1660
|
def _insert(
|
1535
1661
|
self,
|
1536
|
-
objs,
|
1537
|
-
fields,
|
1538
|
-
returning_fields=None,
|
1539
|
-
raw=False,
|
1540
|
-
on_conflict=None,
|
1541
|
-
update_fields=None,
|
1542
|
-
unique_fields=None,
|
1543
|
-
):
|
1662
|
+
objs: list[T],
|
1663
|
+
fields: list[Field],
|
1664
|
+
returning_fields: list[Field] | None = None,
|
1665
|
+
raw: bool = False,
|
1666
|
+
on_conflict: OnConflict | None = None,
|
1667
|
+
update_fields: list[Field] | None = None,
|
1668
|
+
unique_fields: list[Field] | None = None,
|
1669
|
+
) -> list[tuple[Any, ...]] | None:
|
1544
1670
|
"""
|
1545
1671
|
Insert a new record for the given model. This provides an interface to
|
1546
1672
|
the InsertQuery class and is how Model.save() is implemented.
|
@@ -1548,7 +1674,7 @@ class QuerySet:
|
|
1548
1674
|
self._for_write = True
|
1549
1675
|
query = sql.InsertQuery(
|
1550
1676
|
self.model,
|
1551
|
-
on_conflict=on_conflict,
|
1677
|
+
on_conflict=on_conflict.value if on_conflict else None, # type: ignore[attr-defined]
|
1552
1678
|
update_fields=update_fields,
|
1553
1679
|
unique_fields=unique_fields,
|
1554
1680
|
)
|
@@ -1557,13 +1683,13 @@ class QuerySet:
|
|
1557
1683
|
|
1558
1684
|
def _batched_insert(
|
1559
1685
|
self,
|
1560
|
-
objs,
|
1561
|
-
fields,
|
1562
|
-
batch_size,
|
1563
|
-
on_conflict=None,
|
1564
|
-
update_fields=None,
|
1565
|
-
unique_fields=None,
|
1566
|
-
):
|
1686
|
+
objs: list[T],
|
1687
|
+
fields: list[Field],
|
1688
|
+
batch_size: int,
|
1689
|
+
on_conflict: OnConflict | None = None,
|
1690
|
+
update_fields: list[Field] | None = None,
|
1691
|
+
unique_fields: list[Field] | None = None,
|
1692
|
+
) -> list[tuple[Any, ...]]:
|
1567
1693
|
"""
|
1568
1694
|
Helper method for bulk_create() to insert objs one batch at a time.
|
1569
1695
|
"""
|
@@ -1578,7 +1704,7 @@ class QuerySet:
|
|
1578
1704
|
self._insert(
|
1579
1705
|
item,
|
1580
1706
|
fields=fields,
|
1581
|
-
returning_fields=self.model.
|
1707
|
+
returning_fields=self.model._model_meta.db_returning_fields,
|
1582
1708
|
)
|
1583
1709
|
)
|
1584
1710
|
else:
|
@@ -1591,7 +1717,7 @@ class QuerySet:
|
|
1591
1717
|
)
|
1592
1718
|
return inserted_rows
|
1593
1719
|
|
1594
|
-
def _chain(self):
|
1720
|
+
def _chain(self) -> Self:
|
1595
1721
|
"""
|
1596
1722
|
Return a copy of the current QuerySet that's ready for another
|
1597
1723
|
operation.
|
@@ -1602,12 +1728,12 @@ class QuerySet:
|
|
1602
1728
|
obj._sticky_filter = False
|
1603
1729
|
return obj
|
1604
1730
|
|
1605
|
-
def _clone(self):
|
1731
|
+
def _clone(self) -> Self:
|
1606
1732
|
"""
|
1607
1733
|
Return a copy of the current QuerySet. A lightweight alternative
|
1608
1734
|
to deepcopy().
|
1609
1735
|
"""
|
1610
|
-
c = self.__class__(
|
1736
|
+
c = self.__class__.from_model(
|
1611
1737
|
model=self.model,
|
1612
1738
|
query=self.sql_query.chain(),
|
1613
1739
|
)
|
@@ -1619,13 +1745,13 @@ class QuerySet:
|
|
1619
1745
|
c._fields = self._fields
|
1620
1746
|
return c
|
1621
1747
|
|
1622
|
-
def _fetch_all(self):
|
1748
|
+
def _fetch_all(self) -> None:
|
1623
1749
|
if self._result_cache is None:
|
1624
1750
|
self._result_cache = list(self._iterable_class(self))
|
1625
1751
|
if self._prefetch_related_lookups and not self._prefetch_done:
|
1626
1752
|
self._prefetch_related_objects()
|
1627
1753
|
|
1628
|
-
def _next_is_sticky(self):
|
1754
|
+
def _next_is_sticky(self) -> QuerySet[T]:
|
1629
1755
|
"""
|
1630
1756
|
Indicate that the next filter call and the one following that should
|
1631
1757
|
be treated as a single filter. This is only important when it comes to
|
@@ -1639,7 +1765,7 @@ class QuerySet:
|
|
1639
1765
|
self._sticky_filter = True
|
1640
1766
|
return self
|
1641
1767
|
|
1642
|
-
def _merge_sanity_check(self, other):
|
1768
|
+
def _merge_sanity_check(self, other: QuerySet[T]) -> None:
|
1643
1769
|
"""Check that two QuerySet classes may be merged."""
|
1644
1770
|
if self._fields is not None and (
|
1645
1771
|
set(self.sql_query.values_select) != set(other.sql_query.values_select)
|
@@ -1651,14 +1777,14 @@ class QuerySet:
|
|
1651
1777
|
f"Merging '{self.__class__.__name__}' classes must involve the same values in each case."
|
1652
1778
|
)
|
1653
1779
|
|
1654
|
-
def _merge_known_related_objects(self, other):
|
1780
|
+
def _merge_known_related_objects(self, other: QuerySet[T]) -> None:
|
1655
1781
|
"""
|
1656
1782
|
Keep track of all known related objects from either QuerySet instance.
|
1657
1783
|
"""
|
1658
1784
|
for field, objects in other._known_related_objects.items():
|
1659
1785
|
self._known_related_objects.setdefault(field, {}).update(objects)
|
1660
1786
|
|
1661
|
-
def resolve_expression(self, *args, **kwargs):
|
1787
|
+
def resolve_expression(self, *args: Any, **kwargs: Any) -> sql.Query:
|
1662
1788
|
if self._fields and len(self._fields) > 1:
|
1663
1789
|
# values() queryset can only be used as nested queries
|
1664
1790
|
# if they are set up to select only a single field.
|
@@ -1666,7 +1792,7 @@ class QuerySet:
|
|
1666
1792
|
query = self.sql_query.resolve_expression(*args, **kwargs)
|
1667
1793
|
return query
|
1668
1794
|
|
1669
|
-
def _has_filters(self):
|
1795
|
+
def _has_filters(self) -> bool:
|
1670
1796
|
"""
|
1671
1797
|
Check if this QuerySet has any filtering going on. This isn't
|
1672
1798
|
equivalent with checking if all objects are present in results, for
|
@@ -1675,7 +1801,9 @@ class QuerySet:
|
|
1675
1801
|
return self.sql_query.has_filters()
|
1676
1802
|
|
1677
1803
|
@staticmethod
|
1678
|
-
def _validate_values_are_expressions(
|
1804
|
+
def _validate_values_are_expressions(
|
1805
|
+
values: tuple[Any, ...], method_name: str
|
1806
|
+
) -> None:
|
1679
1807
|
invalid_args = sorted(
|
1680
1808
|
str(arg) for arg in values if not hasattr(arg, "resolve_expression")
|
1681
1809
|
)
|
@@ -1687,19 +1815,19 @@ class QuerySet:
|
|
1687
1815
|
)
|
1688
1816
|
)
|
1689
1817
|
|
1690
|
-
def _not_support_combined_queries(self, operation_name):
|
1818
|
+
def _not_support_combined_queries(self, operation_name: str) -> None:
|
1691
1819
|
if self.sql_query.combinator:
|
1692
1820
|
raise NotSupportedError(
|
1693
1821
|
f"Calling QuerySet.{operation_name}() after {self.sql_query.combinator}() is not supported."
|
1694
1822
|
)
|
1695
1823
|
|
1696
|
-
def _check_operator_queryset(self, other, operator_):
|
1824
|
+
def _check_operator_queryset(self, other: QuerySet[T], operator_: str) -> None:
|
1697
1825
|
if self.sql_query.combinator or other.sql_query.combinator:
|
1698
1826
|
raise TypeError(f"Cannot use {operator_} operator with combined queryset.")
|
1699
1827
|
|
1700
1828
|
|
1701
1829
|
class InstanceCheckMeta(type):
|
1702
|
-
def __instancecheck__(self, instance):
|
1830
|
+
def __instancecheck__(self, instance: object) -> bool:
|
1703
1831
|
return isinstance(instance, QuerySet) and instance.sql_query.is_empty()
|
1704
1832
|
|
1705
1833
|
|
@@ -1709,7 +1837,7 @@ class EmptyQuerySet(metaclass=InstanceCheckMeta):
|
|
1709
1837
|
isinstance(qs.none(), EmptyQuerySet) -> True
|
1710
1838
|
"""
|
1711
1839
|
|
1712
|
-
def __init__(self, *args, **kwargs):
|
1840
|
+
def __init__(self, *args: Any, **kwargs: Any):
|
1713
1841
|
raise TypeError("EmptyQuerySet can't be instantiated")
|
1714
1842
|
|
1715
1843
|
|
@@ -1721,26 +1849,30 @@ class RawQuerySet:
|
|
1721
1849
|
|
1722
1850
|
def __init__(
|
1723
1851
|
self,
|
1724
|
-
raw_query,
|
1725
|
-
model=None,
|
1726
|
-
query=None,
|
1727
|
-
params=(),
|
1728
|
-
translations=None,
|
1852
|
+
raw_query: str,
|
1853
|
+
model: type[Model] | None = None,
|
1854
|
+
query: sql.RawQuery | None = None,
|
1855
|
+
params: tuple[Any, ...] = (),
|
1856
|
+
translations: dict[str, str] | None = None,
|
1729
1857
|
):
|
1730
1858
|
self.raw_query = raw_query
|
1731
1859
|
self.model = model
|
1732
1860
|
self.sql_query = query or sql.RawQuery(sql=raw_query, params=params)
|
1733
1861
|
self.params = params
|
1734
1862
|
self.translations = translations or {}
|
1735
|
-
self._result_cache = None
|
1736
|
-
self._prefetch_related_lookups = ()
|
1863
|
+
self._result_cache: list[Model] | None = None
|
1864
|
+
self._prefetch_related_lookups: tuple[Any, ...] = ()
|
1737
1865
|
self._prefetch_done = False
|
1738
1866
|
|
1739
|
-
def resolve_model_init_order(
|
1867
|
+
def resolve_model_init_order(
|
1868
|
+
self,
|
1869
|
+
) -> tuple[list[str], list[int], list[tuple[str, int]]]:
|
1740
1870
|
"""Resolve the init field names and value positions."""
|
1741
1871
|
converter = db_connection.introspection.identifier_converter
|
1742
1872
|
model_init_fields = [
|
1743
|
-
f
|
1873
|
+
f
|
1874
|
+
for f in self.model._model_meta.fields
|
1875
|
+
if converter(f.column) in self.columns
|
1744
1876
|
]
|
1745
1877
|
annotation_fields = [
|
1746
1878
|
(column, pos)
|
@@ -1753,7 +1885,7 @@ class RawQuerySet:
|
|
1753
1885
|
model_init_names = [f.attname for f in model_init_fields]
|
1754
1886
|
return model_init_names, model_init_order, annotation_fields
|
1755
1887
|
|
1756
|
-
def prefetch_related(self, *lookups):
|
1888
|
+
def prefetch_related(self, *lookups: str | Prefetch | None) -> RawQuerySet:
|
1757
1889
|
"""Same as QuerySet.prefetch_related()"""
|
1758
1890
|
clone = self._clone()
|
1759
1891
|
if lookups == (None,):
|
@@ -1762,11 +1894,11 @@ class RawQuerySet:
|
|
1762
1894
|
clone._prefetch_related_lookups = clone._prefetch_related_lookups + lookups
|
1763
1895
|
return clone
|
1764
1896
|
|
1765
|
-
def _prefetch_related_objects(self):
|
1897
|
+
def _prefetch_related_objects(self) -> None:
|
1766
1898
|
prefetch_related_objects(self._result_cache, *self._prefetch_related_lookups)
|
1767
1899
|
self._prefetch_done = True
|
1768
1900
|
|
1769
|
-
def _clone(self):
|
1901
|
+
def _clone(self) -> RawQuerySet:
|
1770
1902
|
"""Same as QuerySet._clone()"""
|
1771
1903
|
c = self.__class__(
|
1772
1904
|
self.raw_query,
|
@@ -1778,35 +1910,35 @@ class RawQuerySet:
|
|
1778
1910
|
c._prefetch_related_lookups = self._prefetch_related_lookups[:]
|
1779
1911
|
return c
|
1780
1912
|
|
1781
|
-
def _fetch_all(self):
|
1913
|
+
def _fetch_all(self) -> None:
|
1782
1914
|
if self._result_cache is None:
|
1783
1915
|
self._result_cache = list(self.iterator())
|
1784
1916
|
if self._prefetch_related_lookups and not self._prefetch_done:
|
1785
1917
|
self._prefetch_related_objects()
|
1786
1918
|
|
1787
|
-
def __len__(self):
|
1919
|
+
def __len__(self) -> int:
|
1788
1920
|
self._fetch_all()
|
1789
|
-
return len(self._result_cache)
|
1921
|
+
return len(self._result_cache) # type: ignore[arg-type]
|
1790
1922
|
|
1791
|
-
def __bool__(self):
|
1923
|
+
def __bool__(self) -> bool:
|
1792
1924
|
self._fetch_all()
|
1793
1925
|
return bool(self._result_cache)
|
1794
1926
|
|
1795
|
-
def __iter__(self):
|
1927
|
+
def __iter__(self) -> Iterator[Model]:
|
1796
1928
|
self._fetch_all()
|
1797
|
-
return iter(self._result_cache)
|
1929
|
+
return iter(self._result_cache) # type: ignore[arg-type]
|
1798
1930
|
|
1799
|
-
def iterator(self):
|
1931
|
+
def iterator(self) -> Iterator[Model]:
|
1800
1932
|
yield from RawModelIterable(self)
|
1801
1933
|
|
1802
|
-
def __repr__(self):
|
1934
|
+
def __repr__(self) -> str:
|
1803
1935
|
return f"<{self.__class__.__name__}: {self.sql_query}>"
|
1804
1936
|
|
1805
|
-
def __getitem__(self, k):
|
1937
|
+
def __getitem__(self, k: int | slice) -> Model | list[Model]:
|
1806
1938
|
return list(self)[k]
|
1807
1939
|
|
1808
1940
|
@cached_property
|
1809
|
-
def columns(self):
|
1941
|
+
def columns(self) -> list[str]:
|
1810
1942
|
"""
|
1811
1943
|
A list of model field names in the order they'll appear in the
|
1812
1944
|
query results.
|
@@ -1824,18 +1956,23 @@ class RawQuerySet:
|
|
1824
1956
|
return columns
|
1825
1957
|
|
1826
1958
|
@cached_property
|
1827
|
-
def model_fields(self):
|
1959
|
+
def model_fields(self) -> dict[str, Field]:
|
1828
1960
|
"""A dict mapping column names to model field names."""
|
1829
1961
|
converter = db_connection.introspection.identifier_converter
|
1830
1962
|
model_fields = {}
|
1831
|
-
for field in self.model.
|
1963
|
+
for field in self.model._model_meta.fields:
|
1832
1964
|
name, column = field.get_attname_column()
|
1833
1965
|
model_fields[converter(column)] = field
|
1834
1966
|
return model_fields
|
1835
1967
|
|
1836
1968
|
|
1837
1969
|
class Prefetch:
|
1838
|
-
def __init__(
|
1970
|
+
def __init__(
|
1971
|
+
self,
|
1972
|
+
lookup: str,
|
1973
|
+
queryset: QuerySet[Any] | None = None,
|
1974
|
+
to_attr: str | None = None,
|
1975
|
+
):
|
1839
1976
|
# `prefetch_through` is the path we traverse to perform the prefetch.
|
1840
1977
|
self.prefetch_through = lookup
|
1841
1978
|
# `prefetch_to` is the path to the attribute that stores the result.
|
@@ -1858,7 +1995,7 @@ class Prefetch:
|
|
1858
1995
|
self.queryset = queryset
|
1859
1996
|
self.to_attr = to_attr
|
1860
1997
|
|
1861
|
-
def __getstate__(self):
|
1998
|
+
def __getstate__(self) -> dict[str, Any]:
|
1862
1999
|
obj_dict = self.__dict__.copy()
|
1863
2000
|
if self.queryset is not None:
|
1864
2001
|
queryset = self.queryset._chain()
|
@@ -1868,34 +2005,37 @@ class Prefetch:
|
|
1868
2005
|
obj_dict["queryset"] = queryset
|
1869
2006
|
return obj_dict
|
1870
2007
|
|
1871
|
-
def add_prefix(self, prefix):
|
2008
|
+
def add_prefix(self, prefix: str) -> None:
|
1872
2009
|
self.prefetch_through = prefix + LOOKUP_SEP + self.prefetch_through
|
1873
2010
|
self.prefetch_to = prefix + LOOKUP_SEP + self.prefetch_to
|
1874
2011
|
|
1875
|
-
def get_current_prefetch_to(self, level):
|
2012
|
+
def get_current_prefetch_to(self, level: int) -> str:
|
1876
2013
|
return LOOKUP_SEP.join(self.prefetch_to.split(LOOKUP_SEP)[: level + 1])
|
1877
2014
|
|
1878
|
-
def get_current_to_attr(self, level):
|
2015
|
+
def get_current_to_attr(self, level: int) -> tuple[str, bool]:
|
1879
2016
|
parts = self.prefetch_to.split(LOOKUP_SEP)
|
1880
2017
|
to_attr = parts[level]
|
1881
2018
|
as_attr = self.to_attr and level == len(parts) - 1
|
1882
2019
|
return to_attr, as_attr
|
1883
2020
|
|
1884
|
-
def get_current_queryset(self, level):
|
2021
|
+
def get_current_queryset(self, level: int) -> QuerySet[Any] | None:
|
1885
2022
|
if self.get_current_prefetch_to(level) == self.prefetch_to:
|
1886
2023
|
return self.queryset
|
1887
2024
|
return None
|
1888
2025
|
|
1889
|
-
def __eq__(self, other):
|
2026
|
+
def __eq__(self, other: object) -> bool:
|
1890
2027
|
if not isinstance(other, Prefetch):
|
1891
2028
|
return NotImplemented
|
1892
2029
|
return self.prefetch_to == other.prefetch_to
|
1893
2030
|
|
1894
|
-
def __hash__(self):
|
2031
|
+
def __hash__(self) -> int:
|
1895
2032
|
return hash((self.__class__, self.prefetch_to))
|
1896
2033
|
|
1897
2034
|
|
1898
|
-
def normalize_prefetch_lookups(
|
2035
|
+
def normalize_prefetch_lookups(
|
2036
|
+
lookups: tuple[str | Prefetch, ...] | list[str | Prefetch],
|
2037
|
+
prefix: str | None = None,
|
2038
|
+
) -> list[Prefetch]:
|
1899
2039
|
"""Normalize lookups into Prefetch objects."""
|
1900
2040
|
ret = []
|
1901
2041
|
for lookup in lookups:
|
@@ -1907,7 +2047,9 @@ def normalize_prefetch_lookups(lookups, prefix=None):
|
|
1907
2047
|
return ret
|
1908
2048
|
|
1909
2049
|
|
1910
|
-
def prefetch_related_objects(
|
2050
|
+
def prefetch_related_objects(
|
2051
|
+
model_instances: list[Model], *related_lookups: str | Prefetch
|
2052
|
+
) -> None:
|
1911
2053
|
"""
|
1912
2054
|
Populate prefetched object caches for a list of model instances based on
|
1913
2055
|
the lookups/Prefetch instances given.
|
@@ -1923,7 +2065,7 @@ def prefetch_related_objects(model_instances, *related_lookups):
|
|
1923
2065
|
auto_lookups = set() # we add to this as we go through.
|
1924
2066
|
followed_descriptors = set() # recursion protection
|
1925
2067
|
|
1926
|
-
all_lookups = normalize_prefetch_lookups(reversed(related_lookups))
|
2068
|
+
all_lookups = normalize_prefetch_lookups(reversed(related_lookups)) # type: ignore[arg-type]
|
1927
2069
|
while all_lookups:
|
1928
2070
|
lookup = all_lookups.pop()
|
1929
2071
|
if lookup.prefetch_to in done_queries:
|
@@ -2019,7 +2161,8 @@ def prefetch_related_objects(model_instances, *related_lookups):
|
|
2019
2161
|
):
|
2020
2162
|
done_queries[prefetch_to] = obj_list
|
2021
2163
|
new_lookups = normalize_prefetch_lookups(
|
2022
|
-
reversed(additional_lookups),
|
2164
|
+
reversed(additional_lookups), # type: ignore[arg-type]
|
2165
|
+
prefetch_to,
|
2023
2166
|
)
|
2024
2167
|
auto_lookups.update(new_lookups)
|
2025
2168
|
all_lookups.extend(new_lookups)
|
@@ -2037,7 +2180,7 @@ def prefetch_related_objects(model_instances, *related_lookups):
|
|
2037
2180
|
if through_attr in getattr(obj, "_prefetched_objects_cache", ()):
|
2038
2181
|
# If related objects have been prefetched, use the
|
2039
2182
|
# cache rather than the object's through_attr.
|
2040
|
-
new_obj = list(obj._prefetched_objects_cache.get(through_attr))
|
2183
|
+
new_obj = list(obj._prefetched_objects_cache.get(through_attr)) # type: ignore[arg-type]
|
2041
2184
|
else:
|
2042
2185
|
try:
|
2043
2186
|
new_obj = getattr(obj, through_attr)
|
@@ -2055,7 +2198,9 @@ def prefetch_related_objects(model_instances, *related_lookups):
|
|
2055
2198
|
obj_list = new_obj_list
|
2056
2199
|
|
2057
2200
|
|
2058
|
-
def get_prefetcher(
|
2201
|
+
def get_prefetcher(
|
2202
|
+
instance: Model, through_attr: str, to_attr: str
|
2203
|
+
) -> tuple[Any, Any, bool, Callable[[Model], bool]]:
|
2059
2204
|
"""
|
2060
2205
|
For the attribute 'through_attr' on the given instance, find
|
2061
2206
|
an object that has a get_prefetch_queryset().
|
@@ -2067,11 +2212,11 @@ def get_prefetcher(instance, through_attr, to_attr):
|
|
2067
2212
|
the attribute has already been fetched for that instance)
|
2068
2213
|
"""
|
2069
2214
|
|
2070
|
-
def has_to_attr_attribute(instance):
|
2215
|
+
def has_to_attr_attribute(instance: Model) -> bool:
|
2071
2216
|
return hasattr(instance, to_attr)
|
2072
2217
|
|
2073
2218
|
prefetcher = None
|
2074
|
-
is_fetched = has_to_attr_attribute
|
2219
|
+
is_fetched: Callable[[Model], bool] = has_to_attr_attribute
|
2075
2220
|
|
2076
2221
|
# For singly related objects, we have to avoid getting the attribute
|
2077
2222
|
# from the object, as this will trigger the query. So we first try
|
@@ -2101,20 +2246,22 @@ def get_prefetcher(instance, through_attr, to_attr):
|
|
2101
2246
|
getattr(instance.__class__, to_attr, None), cached_property
|
2102
2247
|
):
|
2103
2248
|
|
2104
|
-
def has_cached_property(instance):
|
2249
|
+
def has_cached_property(instance: Model) -> bool:
|
2105
2250
|
return to_attr in instance.__dict__
|
2106
2251
|
|
2107
2252
|
is_fetched = has_cached_property
|
2108
2253
|
else:
|
2109
2254
|
|
2110
|
-
def in_prefetched_cache(instance):
|
2111
|
-
return through_attr in instance._prefetched_objects_cache
|
2255
|
+
def in_prefetched_cache(instance: Model) -> bool:
|
2256
|
+
return through_attr in instance._prefetched_objects_cache # type: ignore[attr-defined]
|
2112
2257
|
|
2113
2258
|
is_fetched = in_prefetched_cache
|
2114
2259
|
return prefetcher, rel_obj_descriptor, attr_found, is_fetched
|
2115
2260
|
|
2116
2261
|
|
2117
|
-
def prefetch_one_level(
|
2262
|
+
def prefetch_one_level(
|
2263
|
+
instances: list[Model], prefetcher: Any, lookup: Prefetch, level: int
|
2264
|
+
) -> tuple[list[Model], list[Prefetch]]:
|
2118
2265
|
"""
|
2119
2266
|
Helper function for prefetch_related_objects().
|
2120
2267
|
|
@@ -2175,7 +2322,7 @@ def prefetch_one_level(instances, prefetcher, lookup, level):
|
|
2175
2322
|
# of prefetch_related), so what applies to first object applies to all.
|
2176
2323
|
model = instances[0].__class__
|
2177
2324
|
try:
|
2178
|
-
model.
|
2325
|
+
model._model_meta.get_field(to_attr)
|
2179
2326
|
except FieldDoesNotExist:
|
2180
2327
|
pass
|
2181
2328
|
else:
|
@@ -2202,7 +2349,7 @@ def prefetch_one_level(instances, prefetcher, lookup, level):
|
|
2202
2349
|
# No to_attr has been given for this prefetch operation and the
|
2203
2350
|
# cache_name does not point to a descriptor. Store the value of
|
2204
2351
|
# the field in the object's field cache.
|
2205
|
-
obj._state.fields_cache[cache_name] = val
|
2352
|
+
obj._state.fields_cache[cache_name] = val # type: ignore[index]
|
2206
2353
|
else:
|
2207
2354
|
if as_attr:
|
2208
2355
|
setattr(obj, to_attr, vals)
|
@@ -2215,7 +2362,7 @@ def prefetch_one_level(instances, prefetcher, lookup, level):
|
|
2215
2362
|
# We need a QuerySet instance to cache the prefetched values
|
2216
2363
|
if isinstance(queryset, QuerySet):
|
2217
2364
|
# It's already a QuerySet, create a new instance
|
2218
|
-
qs = queryset.__class__(
|
2365
|
+
qs = queryset.__class__.from_model(queryset.model)
|
2219
2366
|
else:
|
2220
2367
|
# It's a related manager, get its QuerySet
|
2221
2368
|
# The manager's query property returns a properly filtered QuerySet
|
@@ -2244,7 +2391,7 @@ class RelatedPopulator:
|
|
2244
2391
|
model instance.
|
2245
2392
|
"""
|
2246
2393
|
|
2247
|
-
def __init__(self, klass_info, select):
|
2394
|
+
def __init__(self, klass_info: dict[str, Any], select: list[Any]):
|
2248
2395
|
# Pre-compute needed attributes. The attributes are:
|
2249
2396
|
# - model_cls: the possibly deferred model class to instantiate
|
2250
2397
|
# - either:
|
@@ -2283,7 +2430,7 @@ class RelatedPopulator:
|
|
2283
2430
|
self.local_setter = klass_info["local_setter"]
|
2284
2431
|
self.remote_setter = klass_info["remote_setter"]
|
2285
2432
|
|
2286
|
-
def populate(self, row, from_obj):
|
2433
|
+
def populate(self, row: tuple[Any, ...], from_obj: Model) -> None:
|
2287
2434
|
if self.reorder_for_init:
|
2288
2435
|
obj_data = self.reorder_for_init(row)
|
2289
2436
|
else:
|
@@ -2299,7 +2446,9 @@ class RelatedPopulator:
|
|
2299
2446
|
self.remote_setter(obj, from_obj)
|
2300
2447
|
|
2301
2448
|
|
2302
|
-
def get_related_populators(
|
2449
|
+
def get_related_populators(
|
2450
|
+
klass_info: dict[str, Any], select: list[Any]
|
2451
|
+
) -> list[RelatedPopulator]:
|
2303
2452
|
iterators = []
|
2304
2453
|
related_klass_infos = klass_info.get("related_klass_infos", [])
|
2305
2454
|
for rel_klass_info in related_klass_infos:
|