plain.models 0.49.1__py3-none-any.whl → 0.50.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- plain/models/CHANGELOG.md +23 -0
- plain/models/aggregates.py +42 -19
- plain/models/backends/base/base.py +125 -105
- plain/models/backends/base/client.py +11 -3
- plain/models/backends/base/creation.py +22 -12
- plain/models/backends/base/features.py +10 -4
- plain/models/backends/base/introspection.py +29 -16
- plain/models/backends/base/operations.py +187 -91
- plain/models/backends/base/schema.py +267 -165
- plain/models/backends/base/validation.py +12 -3
- plain/models/backends/ddl_references.py +85 -43
- plain/models/backends/mysql/base.py +29 -26
- plain/models/backends/mysql/client.py +7 -2
- plain/models/backends/mysql/compiler.py +12 -3
- plain/models/backends/mysql/creation.py +5 -2
- plain/models/backends/mysql/features.py +24 -22
- plain/models/backends/mysql/introspection.py +22 -13
- plain/models/backends/mysql/operations.py +106 -39
- plain/models/backends/mysql/schema.py +48 -24
- plain/models/backends/mysql/validation.py +13 -6
- plain/models/backends/postgresql/base.py +41 -34
- plain/models/backends/postgresql/client.py +7 -2
- plain/models/backends/postgresql/creation.py +10 -5
- plain/models/backends/postgresql/introspection.py +15 -8
- plain/models/backends/postgresql/operations.py +109 -42
- plain/models/backends/postgresql/schema.py +85 -46
- plain/models/backends/sqlite3/_functions.py +151 -115
- plain/models/backends/sqlite3/base.py +37 -23
- plain/models/backends/sqlite3/client.py +7 -1
- plain/models/backends/sqlite3/creation.py +9 -5
- plain/models/backends/sqlite3/features.py +5 -3
- plain/models/backends/sqlite3/introspection.py +32 -16
- plain/models/backends/sqlite3/operations.py +125 -42
- plain/models/backends/sqlite3/schema.py +82 -58
- plain/models/backends/utils.py +52 -29
- plain/models/backups/cli.py +8 -6
- plain/models/backups/clients.py +16 -7
- plain/models/backups/core.py +24 -13
- plain/models/base.py +113 -74
- plain/models/cli.py +94 -63
- plain/models/config.py +1 -1
- plain/models/connections.py +23 -7
- plain/models/constraints.py +65 -47
- plain/models/database_url.py +1 -1
- plain/models/db.py +6 -2
- plain/models/deletion.py +66 -43
- plain/models/entrypoints.py +1 -1
- plain/models/enums.py +22 -11
- plain/models/exceptions.py +23 -8
- plain/models/expressions.py +440 -257
- plain/models/fields/__init__.py +253 -202
- plain/models/fields/json.py +120 -54
- plain/models/fields/mixins.py +12 -8
- plain/models/fields/related.py +284 -252
- plain/models/fields/related_descriptors.py +34 -25
- plain/models/fields/related_lookups.py +23 -11
- plain/models/fields/related_managers.py +81 -47
- plain/models/fields/reverse_related.py +58 -55
- plain/models/forms.py +89 -63
- plain/models/functions/comparison.py +71 -18
- plain/models/functions/datetime.py +79 -29
- plain/models/functions/math.py +43 -10
- plain/models/functions/mixins.py +24 -7
- plain/models/functions/text.py +104 -25
- plain/models/functions/window.py +12 -6
- plain/models/indexes.py +52 -28
- plain/models/lookups.py +228 -153
- plain/models/migrations/autodetector.py +86 -43
- plain/models/migrations/exceptions.py +7 -3
- plain/models/migrations/executor.py +33 -7
- plain/models/migrations/graph.py +79 -50
- plain/models/migrations/loader.py +45 -22
- plain/models/migrations/migration.py +23 -18
- plain/models/migrations/operations/base.py +37 -19
- plain/models/migrations/operations/fields.py +89 -42
- plain/models/migrations/operations/models.py +245 -143
- plain/models/migrations/operations/special.py +82 -25
- plain/models/migrations/optimizer.py +7 -2
- plain/models/migrations/questioner.py +58 -31
- plain/models/migrations/recorder.py +18 -11
- plain/models/migrations/serializer.py +50 -39
- plain/models/migrations/state.py +220 -133
- plain/models/migrations/utils.py +29 -13
- plain/models/migrations/writer.py +17 -14
- plain/models/options.py +63 -56
- plain/models/otel.py +16 -6
- plain/models/preflight.py +35 -12
- plain/models/query.py +323 -228
- plain/models/query_utils.py +93 -58
- plain/models/registry.py +34 -16
- plain/models/sql/compiler.py +146 -97
- plain/models/sql/datastructures.py +38 -25
- plain/models/sql/query.py +255 -169
- plain/models/sql/subqueries.py +32 -21
- plain/models/sql/where.py +54 -29
- plain/models/test/pytest.py +15 -11
- plain/models/test/utils.py +4 -2
- plain/models/transaction.py +20 -7
- plain/models/utils.py +13 -5
- {plain_models-0.49.1.dist-info → plain_models-0.50.0.dist-info}/METADATA +1 -1
- plain_models-0.50.0.dist-info/RECORD +122 -0
- plain_models-0.49.1.dist-info/RECORD +0 -122
- {plain_models-0.49.1.dist-info → plain_models-0.50.0.dist-info}/WHEEL +0 -0
- {plain_models-0.49.1.dist-info → plain_models-0.50.0.dist-info}/entry_points.txt +0 -0
- {plain_models-0.49.1.dist-info → plain_models-0.50.0.dist-info}/licenses/LICENSE +0 -0
plain/models/query.py
CHANGED
@@ -2,11 +2,15 @@
|
|
2
2
|
The main QuerySet implementation. This provides the public API for the ORM.
|
3
3
|
"""
|
4
4
|
|
5
|
+
from __future__ import annotations
|
6
|
+
|
5
7
|
import copy
|
6
8
|
import operator
|
7
9
|
import warnings
|
10
|
+
from collections.abc import Callable, Iterator
|
8
11
|
from functools import cached_property
|
9
12
|
from itertools import chain, islice
|
13
|
+
from typing import TYPE_CHECKING, Any, Generic, TypeVar
|
10
14
|
|
11
15
|
import plain.runtime
|
12
16
|
from plain.exceptions import ValidationError
|
@@ -43,6 +47,14 @@ from plain.models.utils import (
|
|
43
47
|
from plain.utils import timezone
|
44
48
|
from plain.utils.functional import partition
|
45
49
|
|
50
|
+
if TYPE_CHECKING:
|
51
|
+
from datetime import tzinfo
|
52
|
+
|
53
|
+
from plain.models import Model
|
54
|
+
|
55
|
+
# Type variable for QuerySet generic
|
56
|
+
T = TypeVar("T", bound="Model")
|
57
|
+
|
46
58
|
# The maximum number of results to fetch in a get() query.
|
47
59
|
MAX_GET_RESULTS = 21
|
48
60
|
|
@@ -52,7 +64,10 @@ REPR_OUTPUT_SIZE = 20
|
|
52
64
|
|
53
65
|
class BaseIterable:
|
54
66
|
def __init__(
|
55
|
-
self,
|
67
|
+
self,
|
68
|
+
queryset: QuerySet[Any],
|
69
|
+
chunked_fetch: bool = False,
|
70
|
+
chunk_size: int = GET_ITERATOR_CHUNK_SIZE,
|
56
71
|
):
|
57
72
|
self.queryset = queryset
|
58
73
|
self.chunked_fetch = chunked_fetch
|
@@ -62,7 +77,7 @@ class BaseIterable:
|
|
62
77
|
class ModelIterable(BaseIterable):
|
63
78
|
"""Iterable that yields a model instance for each row."""
|
64
79
|
|
65
|
-
def __iter__(self):
|
80
|
+
def __iter__(self) -> Iterator[Model]: # type: ignore[misc]
|
66
81
|
queryset = self.queryset
|
67
82
|
compiler = queryset.sql_query.get_compiler()
|
68
83
|
# Execute the query. This will also fill compiler.select, klass_info,
|
@@ -119,7 +134,7 @@ class RawModelIterable(BaseIterable):
|
|
119
134
|
Iterable that yields a model instance for each row from a raw queryset.
|
120
135
|
"""
|
121
136
|
|
122
|
-
def __iter__(self):
|
137
|
+
def __iter__(self) -> Iterator[Model]: # type: ignore[misc]
|
123
138
|
# Cache some things for performance reasons outside the loop.
|
124
139
|
query = self.queryset.sql_query
|
125
140
|
compiler = db_connection.ops.compiler("SQLCompiler")(query, db_connection)
|
@@ -159,7 +174,7 @@ class ValuesIterable(BaseIterable):
|
|
159
174
|
Iterable returned by QuerySet.values() that yields a dict for each row.
|
160
175
|
"""
|
161
176
|
|
162
|
-
def __iter__(self):
|
177
|
+
def __iter__(self) -> Iterator[dict[str, Any]]: # type: ignore[misc]
|
163
178
|
queryset = self.queryset
|
164
179
|
query = queryset.sql_query
|
165
180
|
compiler = query.get_compiler()
|
@@ -183,7 +198,7 @@ class ValuesListIterable(BaseIterable):
|
|
183
198
|
for each row.
|
184
199
|
"""
|
185
200
|
|
186
|
-
def __iter__(self):
|
201
|
+
def __iter__(self) -> Iterator[tuple[Any, ...]]: # type: ignore[misc]
|
187
202
|
queryset = self.queryset
|
188
203
|
query = queryset.sql_query
|
189
204
|
compiler = query.get_compiler()
|
@@ -222,7 +237,7 @@ class NamedValuesListIterable(ValuesListIterable):
|
|
222
237
|
namedtuple for each row.
|
223
238
|
"""
|
224
239
|
|
225
|
-
def __iter__(self):
|
240
|
+
def __iter__(self) -> Iterator[tuple[Any, ...]]: # type: ignore[misc]
|
226
241
|
queryset = self.queryset
|
227
242
|
if queryset._fields:
|
228
243
|
names = queryset._fields
|
@@ -245,7 +260,7 @@ class FlatValuesListIterable(BaseIterable):
|
|
245
260
|
values.
|
246
261
|
"""
|
247
262
|
|
248
|
-
def __iter__(self):
|
263
|
+
def __iter__(self) -> Iterator[Any]: # type: ignore[misc]
|
249
264
|
queryset = self.queryset
|
250
265
|
compiler = queryset.sql_query.get_compiler()
|
251
266
|
for row in compiler.results_iter(
|
@@ -254,25 +269,29 @@ class FlatValuesListIterable(BaseIterable):
|
|
254
269
|
yield row[0]
|
255
270
|
|
256
271
|
|
257
|
-
class QuerySet:
|
272
|
+
class QuerySet(Generic[T]):
|
258
273
|
"""Represent a lazy database lookup for a set of objects."""
|
259
274
|
|
260
|
-
def __init__(self, *, model
|
275
|
+
def __init__(self, *, model: type[T], query: sql.Query | None = None):
|
261
276
|
self.model = model
|
262
277
|
self._query = query or sql.Query(self.model)
|
263
|
-
self._result_cache = None
|
278
|
+
self._result_cache: list[T] | None = None
|
264
279
|
self._sticky_filter = False
|
265
280
|
self._for_write = False
|
266
|
-
self._prefetch_related_lookups = ()
|
281
|
+
self._prefetch_related_lookups: tuple[Any, ...] = ()
|
267
282
|
self._prefetch_done = False
|
268
|
-
self._known_related_objects
|
269
|
-
|
270
|
-
|
283
|
+
self._known_related_objects: dict[
|
284
|
+
Any, dict[Any, Any]
|
285
|
+
] = {} # {rel_field: {id: rel_obj}}
|
286
|
+
self._iterable_class: type[BaseIterable] = ModelIterable
|
287
|
+
self._fields: tuple[str, ...] | None = None
|
271
288
|
self._defer_next_filter = False
|
272
|
-
self._deferred_filter =
|
289
|
+
self._deferred_filter: tuple[bool, tuple[Any, ...], dict[str, Any]] | None = (
|
290
|
+
None
|
291
|
+
)
|
273
292
|
|
274
293
|
@property
|
275
|
-
def sql_query(self):
|
294
|
+
def sql_query(self) -> sql.Query:
|
276
295
|
if self._deferred_filter:
|
277
296
|
negate, args, kwargs = self._deferred_filter
|
278
297
|
self._filter_or_exclude_inplace(negate, args, kwargs)
|
@@ -280,7 +299,7 @@ class QuerySet:
|
|
280
299
|
return self._query
|
281
300
|
|
282
301
|
@sql_query.setter
|
283
|
-
def sql_query(self, value):
|
302
|
+
def sql_query(self, value: sql.Query) -> None:
|
284
303
|
if value.values_select:
|
285
304
|
self._iterable_class = ValuesIterable
|
286
305
|
self._query = value
|
@@ -289,9 +308,9 @@ class QuerySet:
|
|
289
308
|
# PYTHON MAGIC METHODS #
|
290
309
|
########################
|
291
310
|
|
292
|
-
def __deepcopy__(self, memo):
|
311
|
+
def __deepcopy__(self, memo: dict[int, Any]) -> QuerySet[T]:
|
293
312
|
"""Don't populate the QuerySet's cache."""
|
294
|
-
obj = self.__class__()
|
313
|
+
obj = self.__class__(model=self.model)
|
295
314
|
for k, v in self.__dict__.items():
|
296
315
|
if k == "_result_cache":
|
297
316
|
obj.__dict__[k] = None
|
@@ -299,12 +318,12 @@ class QuerySet:
|
|
299
318
|
obj.__dict__[k] = copy.deepcopy(v, memo)
|
300
319
|
return obj
|
301
320
|
|
302
|
-
def __getstate__(self):
|
321
|
+
def __getstate__(self) -> dict[str, Any]:
|
303
322
|
# Force the cache to be fully populated.
|
304
323
|
self._fetch_all()
|
305
324
|
return {**self.__dict__, PLAIN_VERSION_PICKLE_KEY: plain.runtime.__version__}
|
306
325
|
|
307
|
-
def __setstate__(self, state):
|
326
|
+
def __setstate__(self, state: dict[str, Any]) -> None:
|
308
327
|
pickled_version = state.get(PLAIN_VERSION_PICKLE_KEY)
|
309
328
|
if pickled_version:
|
310
329
|
if pickled_version != plain.runtime.__version__:
|
@@ -322,17 +341,17 @@ class QuerySet:
|
|
322
341
|
)
|
323
342
|
self.__dict__.update(state)
|
324
343
|
|
325
|
-
def __repr__(self):
|
344
|
+
def __repr__(self) -> str:
|
326
345
|
data = list(self[: REPR_OUTPUT_SIZE + 1])
|
327
346
|
if len(data) > REPR_OUTPUT_SIZE:
|
328
347
|
data[-1] = "...(remaining elements truncated)..."
|
329
348
|
return f"<{self.__class__.__name__} {data!r}>"
|
330
349
|
|
331
|
-
def __len__(self):
|
350
|
+
def __len__(self) -> int:
|
332
351
|
self._fetch_all()
|
333
|
-
return len(self._result_cache)
|
352
|
+
return len(self._result_cache) # type: ignore[arg-type]
|
334
353
|
|
335
|
-
def __iter__(self):
|
354
|
+
def __iter__(self) -> Iterator[T]:
|
336
355
|
"""
|
337
356
|
The queryset iterator protocol uses three nested iterators in the
|
338
357
|
default case:
|
@@ -348,13 +367,13 @@ class QuerySet:
|
|
348
367
|
- Responsible for turning the rows into model objects.
|
349
368
|
"""
|
350
369
|
self._fetch_all()
|
351
|
-
return iter(self._result_cache)
|
370
|
+
return iter(self._result_cache) # type: ignore[arg-type]
|
352
371
|
|
353
|
-
def __bool__(self):
|
372
|
+
def __bool__(self) -> bool:
|
354
373
|
self._fetch_all()
|
355
374
|
return bool(self._result_cache)
|
356
375
|
|
357
|
-
def __getitem__(self, k):
|
376
|
+
def __getitem__(self, k: int | slice) -> T | QuerySet[T]:
|
358
377
|
"""Retrieve an item or slice from the set of results."""
|
359
378
|
if not isinstance(k, int | slice):
|
360
379
|
raise TypeError(
|
@@ -383,17 +402,17 @@ class QuerySet:
|
|
383
402
|
else:
|
384
403
|
stop = None
|
385
404
|
qs.sql_query.set_limits(start, stop)
|
386
|
-
return list(qs)[:: k.step] if k.step else qs
|
405
|
+
return list(qs)[:: k.step] if k.step else qs # type: ignore[return-value]
|
387
406
|
|
388
407
|
qs = self._chain()
|
389
|
-
qs.sql_query.set_limits(k, k + 1)
|
408
|
+
qs.sql_query.set_limits(k, k + 1) # type: ignore[unsupported-operator]
|
390
409
|
qs._fetch_all()
|
391
410
|
return qs._result_cache[0]
|
392
411
|
|
393
|
-
def __class_getitem__(cls, *args, **kwargs):
|
412
|
+
def __class_getitem__(cls, *args: Any, **kwargs: Any) -> type[QuerySet[Any]]:
|
394
413
|
return cls
|
395
414
|
|
396
|
-
def __and__(self, other):
|
415
|
+
def __and__(self, other: QuerySet[T]) -> QuerySet[T]:
|
397
416
|
self._check_operator_queryset(other, "&")
|
398
417
|
self._merge_sanity_check(other)
|
399
418
|
if isinstance(other, EmptyQuerySet):
|
@@ -405,7 +424,7 @@ class QuerySet:
|
|
405
424
|
combined.sql_query.combine(other.sql_query, sql.AND)
|
406
425
|
return combined
|
407
426
|
|
408
|
-
def __or__(self, other):
|
427
|
+
def __or__(self, other: QuerySet[T]) -> QuerySet[T]:
|
409
428
|
self._check_operator_queryset(other, "|")
|
410
429
|
self._merge_sanity_check(other)
|
411
430
|
if isinstance(self, EmptyQuerySet):
|
@@ -415,16 +434,16 @@ class QuerySet:
|
|
415
434
|
query = (
|
416
435
|
self
|
417
436
|
if self.sql_query.can_filter()
|
418
|
-
else self.model._meta.base_queryset.filter(id__in=self.values("id"))
|
437
|
+
else self.model._meta.base_queryset.filter(id__in=self.values("id")) # type: ignore[attr-defined]
|
419
438
|
)
|
420
439
|
combined = query._chain()
|
421
440
|
combined._merge_known_related_objects(other)
|
422
441
|
if not other.sql_query.can_filter():
|
423
|
-
other = other.model._meta.base_queryset.filter(id__in=other.values("id"))
|
442
|
+
other = other.model._meta.base_queryset.filter(id__in=other.values("id")) # type: ignore[attr-defined]
|
424
443
|
combined.sql_query.combine(other.sql_query, sql.OR)
|
425
444
|
return combined
|
426
445
|
|
427
|
-
def __xor__(self, other):
|
446
|
+
def __xor__(self, other: QuerySet[T]) -> QuerySet[T]:
|
428
447
|
self._check_operator_queryset(other, "^")
|
429
448
|
self._merge_sanity_check(other)
|
430
449
|
if isinstance(self, EmptyQuerySet):
|
@@ -434,12 +453,12 @@ class QuerySet:
|
|
434
453
|
query = (
|
435
454
|
self
|
436
455
|
if self.sql_query.can_filter()
|
437
|
-
else self.model._meta.base_queryset.filter(id__in=self.values("id"))
|
456
|
+
else self.model._meta.base_queryset.filter(id__in=self.values("id")) # type: ignore[attr-defined]
|
438
457
|
)
|
439
458
|
combined = query._chain()
|
440
459
|
combined._merge_known_related_objects(other)
|
441
460
|
if not other.sql_query.can_filter():
|
442
|
-
other = other.model._meta.base_queryset.filter(id__in=other.values("id"))
|
461
|
+
other = other.model._meta.base_queryset.filter(id__in=other.values("id")) # type: ignore[attr-defined]
|
443
462
|
combined.sql_query.combine(other.sql_query, sql.XOR)
|
444
463
|
return combined
|
445
464
|
|
@@ -447,7 +466,7 @@ class QuerySet:
|
|
447
466
|
# METHODS THAT DO DATABASE QUERIES #
|
448
467
|
####################################
|
449
468
|
|
450
|
-
def _iterator(self, use_chunked_fetch, chunk_size):
|
469
|
+
def _iterator(self, use_chunked_fetch: bool, chunk_size: int | None) -> Iterator[T]:
|
451
470
|
iterable = self._iterable_class(
|
452
471
|
self,
|
453
472
|
chunked_fetch=use_chunked_fetch,
|
@@ -462,7 +481,7 @@ class QuerySet:
|
|
462
481
|
prefetch_related_objects(results, *self._prefetch_related_lookups)
|
463
482
|
yield from results
|
464
483
|
|
465
|
-
def iterator(self, chunk_size=None):
|
484
|
+
def iterator(self, chunk_size: int | None = None) -> Iterator[T]:
|
466
485
|
"""
|
467
486
|
An iterator over the results from applying this QuerySet to the
|
468
487
|
database. chunk_size must be provided for QuerySets that prefetch
|
@@ -481,7 +500,7 @@ class QuerySet:
|
|
481
500
|
)
|
482
501
|
return self._iterator(use_chunked_fetch, chunk_size)
|
483
502
|
|
484
|
-
def aggregate(self, *args, **kwargs):
|
503
|
+
def aggregate(self, *args: Any, **kwargs: Any) -> dict[str, Any]:
|
485
504
|
"""
|
486
505
|
Return a dictionary containing the calculations (aggregation)
|
487
506
|
over the current queryset.
|
@@ -506,7 +525,7 @@ class QuerySet:
|
|
506
525
|
|
507
526
|
return self.sql_query.chain().get_aggregation(kwargs)
|
508
527
|
|
509
|
-
def count(self):
|
528
|
+
def count(self) -> int:
|
510
529
|
"""
|
511
530
|
Perform a SELECT COUNT() and return the number of records as an
|
512
531
|
integer.
|
@@ -519,7 +538,7 @@ class QuerySet:
|
|
519
538
|
|
520
539
|
return self.sql_query.get_count()
|
521
540
|
|
522
|
-
def get(self, *args, **kwargs):
|
541
|
+
def get(self, *args: Any, **kwargs: Any) -> T:
|
523
542
|
"""
|
524
543
|
Perform the query and return a single object matching the given
|
525
544
|
keyword arguments.
|
@@ -555,37 +574,40 @@ class QuerySet:
|
|
555
574
|
)
|
556
575
|
)
|
557
576
|
|
558
|
-
def get_or_none(self, *args, **kwargs):
|
577
|
+
def get_or_none(self, *args: Any, **kwargs: Any) -> T | None:
|
559
578
|
"""
|
560
579
|
Perform the query and return a single object matching the given
|
561
580
|
keyword arguments, or None if no object is found.
|
562
581
|
"""
|
563
582
|
try:
|
564
583
|
return self.get(*args, **kwargs)
|
565
|
-
except self.model.DoesNotExist:
|
584
|
+
except self.model.DoesNotExist: # type: ignore[attr-defined]
|
566
585
|
return None
|
567
586
|
|
568
|
-
def create(self, **kwargs):
|
587
|
+
def create(self, **kwargs: Any) -> T:
|
569
588
|
"""
|
570
589
|
Create a new object with the given kwargs, saving it to the database
|
571
590
|
and returning the created object.
|
572
591
|
"""
|
573
|
-
obj = self.model(**kwargs)
|
592
|
+
obj = self.model(**kwargs) # type: ignore[misc]
|
574
593
|
self._for_write = True
|
575
|
-
obj.save(force_insert=True)
|
594
|
+
obj.save(force_insert=True) # type: ignore[attr-defined]
|
576
595
|
return obj
|
577
596
|
|
578
|
-
def _prepare_for_bulk_create(self, objs):
|
579
|
-
id_field = self.model._meta.get_field("id")
|
597
|
+
def _prepare_for_bulk_create(self, objs: list[T]) -> None:
|
598
|
+
id_field = self.model._meta.get_field("id") # type: ignore[attr-defined]
|
580
599
|
for obj in objs:
|
581
|
-
if obj.id is None:
|
600
|
+
if obj.id is None: # type: ignore[attr-defined]
|
582
601
|
# Populate new primary key values.
|
583
|
-
obj.id = id_field.get_id_value_on_save(obj)
|
584
|
-
obj._prepare_related_fields_for_save(operation_name="bulk_create")
|
602
|
+
obj.id = id_field.get_id_value_on_save(obj) # type: ignore[attr-defined]
|
603
|
+
obj._prepare_related_fields_for_save(operation_name="bulk_create") # type: ignore[attr-defined]
|
585
604
|
|
586
605
|
def _check_bulk_create_options(
|
587
|
-
self,
|
588
|
-
|
606
|
+
self,
|
607
|
+
update_conflicts: bool,
|
608
|
+
update_fields: list[Field] | None,
|
609
|
+
unique_fields: list[Field] | None,
|
610
|
+
) -> OnConflict | None:
|
589
611
|
db_features = db_connection.features
|
590
612
|
if update_conflicts:
|
591
613
|
if not db_features.supports_update_conflicts:
|
@@ -628,12 +650,12 @@ class QuerySet:
|
|
628
650
|
|
629
651
|
def bulk_create(
|
630
652
|
self,
|
631
|
-
objs,
|
632
|
-
batch_size=None,
|
633
|
-
update_conflicts=False,
|
634
|
-
update_fields=None,
|
635
|
-
unique_fields=None,
|
636
|
-
):
|
653
|
+
objs: list[T],
|
654
|
+
batch_size: int | None = None,
|
655
|
+
update_conflicts: bool = False,
|
656
|
+
update_fields: list[str] | None = None,
|
657
|
+
unique_fields: list[str] | None = None,
|
658
|
+
) -> list[T]:
|
637
659
|
"""
|
638
660
|
Insert each of the instances into the database. Do *not* call
|
639
661
|
save() on each of the instances, and do not set the primary key attribute if it is an
|
@@ -711,7 +733,9 @@ class QuerySet:
|
|
711
733
|
|
712
734
|
return objs
|
713
735
|
|
714
|
-
def bulk_update(
|
736
|
+
def bulk_update(
|
737
|
+
self, objs: list[T], fields: list[str], batch_size: int | None = None
|
738
|
+
) -> int:
|
715
739
|
"""
|
716
740
|
Update the given fields in each of the given objects in the database.
|
717
741
|
"""
|
@@ -719,42 +743,47 @@ class QuerySet:
|
|
719
743
|
raise ValueError("Batch size must be a positive integer.")
|
720
744
|
if not fields:
|
721
745
|
raise ValueError("Field names must be given to bulk_update().")
|
722
|
-
|
723
|
-
if any(obj.id is None for obj in
|
746
|
+
objs_tuple = tuple(objs)
|
747
|
+
if any(obj.id is None for obj in objs_tuple): # type: ignore[attr-defined]
|
724
748
|
raise ValueError("All bulk_update() objects must have a primary key set.")
|
725
|
-
|
726
|
-
if any(not f.concrete or f.many_to_many for f in
|
749
|
+
fields_list = [self.model._meta.get_field(name) for name in fields] # type: ignore[attr-defined]
|
750
|
+
if any(not f.concrete or f.many_to_many for f in fields_list):
|
727
751
|
raise ValueError("bulk_update() can only be used with concrete fields.")
|
728
|
-
if any(f.primary_key for f in
|
752
|
+
if any(f.primary_key for f in fields_list):
|
729
753
|
raise ValueError("bulk_update() cannot be used with primary key fields.")
|
730
|
-
if not
|
754
|
+
if not objs_tuple:
|
731
755
|
return 0
|
732
|
-
for obj in
|
733
|
-
obj._prepare_related_fields_for_save(
|
734
|
-
operation_name="bulk_update", fields=
|
756
|
+
for obj in objs_tuple:
|
757
|
+
obj._prepare_related_fields_for_save( # type: ignore[attr-defined]
|
758
|
+
operation_name="bulk_update", fields=fields_list
|
735
759
|
)
|
736
760
|
# PK is used twice in the resulting update query, once in the filter
|
737
761
|
# and once in the WHEN. Each field will also have one CAST.
|
738
762
|
self._for_write = True
|
739
|
-
max_batch_size = db_connection.ops.bulk_batch_size(
|
763
|
+
max_batch_size = db_connection.ops.bulk_batch_size(
|
764
|
+
["id", "id"] + fields_list, objs_tuple
|
765
|
+
)
|
740
766
|
batch_size = min(batch_size, max_batch_size) if batch_size else max_batch_size
|
741
767
|
requires_casting = db_connection.features.requires_casted_case_in_updates
|
742
|
-
batches = (
|
768
|
+
batches = (
|
769
|
+
objs_tuple[i : i + batch_size]
|
770
|
+
for i in range(0, len(objs_tuple), batch_size)
|
771
|
+
)
|
743
772
|
updates = []
|
744
773
|
for batch_objs in batches:
|
745
774
|
update_kwargs = {}
|
746
|
-
for field in
|
775
|
+
for field in fields_list:
|
747
776
|
when_statements = []
|
748
777
|
for obj in batch_objs:
|
749
778
|
attr = getattr(obj, field.attname)
|
750
779
|
if not hasattr(attr, "resolve_expression"):
|
751
780
|
attr = Value(attr, output_field=field)
|
752
|
-
when_statements.append(When(id=obj.id, then=attr))
|
781
|
+
when_statements.append(When(id=obj.id, then=attr)) # type: ignore[attr-defined]
|
753
782
|
case_statement = Case(*when_statements, output_field=field)
|
754
783
|
if requires_casting:
|
755
784
|
case_statement = Cast(case_statement, output_field=field)
|
756
785
|
update_kwargs[field.attname] = case_statement
|
757
|
-
updates.append(([obj.id for obj in batch_objs], update_kwargs))
|
786
|
+
updates.append(([obj.id for obj in batch_objs], update_kwargs)) # type: ignore[attr-defined,misc]
|
758
787
|
rows_updated = 0
|
759
788
|
queryset = self._chain()
|
760
789
|
with transaction.atomic(savepoint=False):
|
@@ -762,7 +791,9 @@ class QuerySet:
|
|
762
791
|
rows_updated += queryset.filter(id__in=ids).update(**update_kwargs)
|
763
792
|
return rows_updated
|
764
793
|
|
765
|
-
def get_or_create(
|
794
|
+
def get_or_create(
|
795
|
+
self, defaults: dict[str, Any] | None = None, **kwargs: Any
|
796
|
+
) -> tuple[T, bool]:
|
766
797
|
"""
|
767
798
|
Look up an object with the given kwargs, creating one if necessary.
|
768
799
|
Return a tuple of (object, created), where created is a boolean
|
@@ -773,7 +804,7 @@ class QuerySet:
|
|
773
804
|
self._for_write = True
|
774
805
|
try:
|
775
806
|
return self.get(**kwargs), False
|
776
|
-
except self.model.DoesNotExist:
|
807
|
+
except self.model.DoesNotExist: # type: ignore[attr-defined]
|
777
808
|
params = self._extract_model_params(defaults, **kwargs)
|
778
809
|
# Try to create an object using passed params.
|
779
810
|
try:
|
@@ -790,11 +821,16 @@ class QuerySet:
|
|
790
821
|
# and return an existing object.
|
791
822
|
try:
|
792
823
|
return self.get(**kwargs), False
|
793
|
-
except self.model.DoesNotExist:
|
824
|
+
except self.model.DoesNotExist: # type: ignore[attr-defined]
|
794
825
|
pass
|
795
826
|
raise
|
796
827
|
|
797
|
-
def update_or_create(
|
828
|
+
def update_or_create(
|
829
|
+
self,
|
830
|
+
defaults: dict[str, Any] | None = None,
|
831
|
+
create_defaults: dict[str, Any] | None = None,
|
832
|
+
**kwargs: Any,
|
833
|
+
) -> tuple[T, bool]:
|
798
834
|
"""
|
799
835
|
Look up an object with the given kwargs, updating one with defaults
|
800
836
|
if it exists, otherwise create a new one. Optionally, an object can
|
@@ -820,26 +856,28 @@ class QuerySet:
|
|
820
856
|
setattr(obj, k, v)
|
821
857
|
|
822
858
|
update_fields = set(update_defaults)
|
823
|
-
concrete_field_names = self.model._meta._non_pk_concrete_field_names
|
859
|
+
concrete_field_names = self.model._meta._non_pk_concrete_field_names # type: ignore[attr-defined]
|
824
860
|
# update_fields does not support non-concrete fields.
|
825
861
|
if concrete_field_names.issuperset(update_fields):
|
826
862
|
# Add fields which are set on pre_save(), e.g. auto_now fields.
|
827
863
|
# This is to maintain backward compatibility as these fields
|
828
864
|
# are not updated unless explicitly specified in the
|
829
865
|
# update_fields list.
|
830
|
-
for field in self.model._meta.local_concrete_fields:
|
866
|
+
for field in self.model._meta.local_concrete_fields: # type: ignore[attr-defined]
|
831
867
|
if not (
|
832
868
|
field.primary_key or field.__class__.pre_save is Field.pre_save
|
833
869
|
):
|
834
870
|
update_fields.add(field.name)
|
835
871
|
if field.name != field.attname:
|
836
872
|
update_fields.add(field.attname)
|
837
|
-
obj.save(update_fields=update_fields)
|
873
|
+
obj.save(update_fields=update_fields) # type: ignore[attr-defined]
|
838
874
|
else:
|
839
|
-
obj.save()
|
875
|
+
obj.save() # type: ignore[attr-defined]
|
840
876
|
return obj, False
|
841
877
|
|
842
|
-
def _extract_model_params(
|
878
|
+
def _extract_model_params(
|
879
|
+
self, defaults: dict[str, Any] | None, **kwargs: Any
|
880
|
+
) -> dict[str, Any]:
|
843
881
|
"""
|
844
882
|
Prepare `params` for creating a model instance based on the given
|
845
883
|
kwargs; for use by get_or_create().
|
@@ -847,11 +885,11 @@ class QuerySet:
|
|
847
885
|
defaults = defaults or {}
|
848
886
|
params = {k: v for k, v in kwargs.items() if LOOKUP_SEP not in k}
|
849
887
|
params.update(defaults)
|
850
|
-
property_names = self.model._meta._property_names
|
888
|
+
property_names = self.model._meta._property_names # type: ignore[attr-defined]
|
851
889
|
invalid_params = []
|
852
890
|
for param in params:
|
853
891
|
try:
|
854
|
-
self.model._meta.get_field(param)
|
892
|
+
self.model._meta.get_field(param) # type: ignore[attr-defined]
|
855
893
|
except FieldDoesNotExist:
|
856
894
|
# It's okay to use a model's property if it has a setter.
|
857
895
|
if not (param in property_names and getattr(self.model, param).fset):
|
@@ -859,31 +897,35 @@ class QuerySet:
|
|
859
897
|
if invalid_params:
|
860
898
|
raise FieldError(
|
861
899
|
"Invalid field name(s) for model {}: '{}'.".format(
|
862
|
-
self.model._meta.object_name,
|
900
|
+
self.model._meta.object_name, # type: ignore[attr-defined]
|
863
901
|
"', '".join(sorted(invalid_params)),
|
864
902
|
)
|
865
903
|
)
|
866
904
|
return params
|
867
905
|
|
868
|
-
def first(self):
|
906
|
+
def first(self) -> T | None:
|
869
907
|
"""Return the first object of a query or None if no match is found."""
|
870
908
|
for obj in self[:1]:
|
871
909
|
return obj
|
910
|
+
return None
|
872
911
|
|
873
|
-
def last(self):
|
912
|
+
def last(self) -> T | None:
|
874
913
|
"""Return the last object of a query or None if no match is found."""
|
875
914
|
queryset = self.reverse()
|
876
915
|
for obj in queryset[:1]:
|
877
916
|
return obj
|
917
|
+
return None
|
878
918
|
|
879
|
-
def in_bulk(
|
919
|
+
def in_bulk(
|
920
|
+
self, id_list: list[Any] | None = None, *, field_name: str = "id"
|
921
|
+
) -> dict[Any, T]:
|
880
922
|
"""
|
881
923
|
Return a dictionary mapping each of the given IDs to the object with
|
882
924
|
that ID. If `id_list` isn't provided, evaluate the entire QuerySet.
|
883
925
|
"""
|
884
926
|
if self.sql_query.is_sliced:
|
885
927
|
raise TypeError("Cannot use 'limit' or 'offset' with in_bulk().")
|
886
|
-
opts = self.model._meta
|
928
|
+
opts = self.model._meta # type: ignore[attr-defined]
|
887
929
|
unique_fields = [
|
888
930
|
constraint.fields[0]
|
889
931
|
for constraint in opts.total_unique_constraints
|
@@ -903,21 +945,21 @@ class QuerySet:
|
|
903
945
|
return {}
|
904
946
|
filter_key = f"{field_name}__in"
|
905
947
|
batch_size = db_connection.features.max_query_params
|
906
|
-
|
948
|
+
id_list_tuple = tuple(id_list)
|
907
949
|
# If the database has a limit on the number of query parameters
|
908
950
|
# (e.g. SQLite), retrieve objects in batches if necessary.
|
909
|
-
if batch_size and batch_size < len(
|
910
|
-
qs = ()
|
911
|
-
for offset in range(0, len(
|
912
|
-
batch =
|
951
|
+
if batch_size and batch_size < len(id_list_tuple):
|
952
|
+
qs: tuple[T, ...] = ()
|
953
|
+
for offset in range(0, len(id_list_tuple), batch_size):
|
954
|
+
batch = id_list_tuple[offset : offset + batch_size]
|
913
955
|
qs += tuple(self.filter(**{filter_key: batch}))
|
914
956
|
else:
|
915
|
-
qs = self.filter(**{filter_key:
|
957
|
+
qs = self.filter(**{filter_key: id_list_tuple})
|
916
958
|
else:
|
917
959
|
qs = self._chain()
|
918
960
|
return {getattr(obj, field_name): obj for obj in qs}
|
919
961
|
|
920
|
-
def delete(self):
|
962
|
+
def delete(self) -> tuple[int, dict[str, int]]:
|
921
963
|
"""Delete the records in the current QuerySet."""
|
922
964
|
self._not_support_combined_queries("delete")
|
923
965
|
if self.sql_query.is_sliced:
|
@@ -949,7 +991,7 @@ class QuerySet:
|
|
949
991
|
self._result_cache = None
|
950
992
|
return deleted, _rows_count
|
951
993
|
|
952
|
-
def _raw_delete(self):
|
994
|
+
def _raw_delete(self) -> int:
|
953
995
|
"""
|
954
996
|
Delete objects found from the given queryset in single direct SQL
|
955
997
|
query. No signals are sent and there is no protection for cascades.
|
@@ -962,7 +1004,7 @@ class QuerySet:
|
|
962
1004
|
return cursor.rowcount
|
963
1005
|
return 0
|
964
1006
|
|
965
|
-
def update(self, **kwargs):
|
1007
|
+
def update(self, **kwargs: Any) -> int:
|
966
1008
|
"""
|
967
1009
|
Update all elements in the current QuerySet, setting all the given
|
968
1010
|
fields to the appropriate values.
|
@@ -1001,7 +1043,7 @@ class QuerySet:
|
|
1001
1043
|
self._result_cache = None
|
1002
1044
|
return rows
|
1003
1045
|
|
1004
|
-
def _update(self, values):
|
1046
|
+
def _update(self, values: list[tuple[Field, Any, Any]]) -> int:
|
1005
1047
|
"""
|
1006
1048
|
A version of update() that accepts field objects instead of field names.
|
1007
1049
|
Used primarily for model saving and not intended for use by general
|
@@ -1017,7 +1059,7 @@ class QuerySet:
|
|
1017
1059
|
self._result_cache = None
|
1018
1060
|
return query.get_compiler().execute_sql(CURSOR)
|
1019
1061
|
|
1020
|
-
def exists(self):
|
1062
|
+
def exists(self) -> bool:
|
1021
1063
|
"""
|
1022
1064
|
Return True if the QuerySet would have any results, False otherwise.
|
1023
1065
|
"""
|
@@ -1025,7 +1067,7 @@ class QuerySet:
|
|
1025
1067
|
return self.sql_query.has_results()
|
1026
1068
|
return bool(self._result_cache)
|
1027
1069
|
|
1028
|
-
def contains(self, obj):
|
1070
|
+
def contains(self, obj: T) -> bool:
|
1029
1071
|
"""
|
1030
1072
|
Return True if the QuerySet contains the provided obj,
|
1031
1073
|
False otherwise.
|
@@ -1040,18 +1082,18 @@ class QuerySet:
|
|
1040
1082
|
return False
|
1041
1083
|
except AttributeError:
|
1042
1084
|
raise TypeError("'obj' must be a model instance.")
|
1043
|
-
if obj.id is None:
|
1085
|
+
if obj.id is None: # type: ignore[attr-defined]
|
1044
1086
|
raise ValueError("QuerySet.contains() cannot be used on unsaved objects.")
|
1045
1087
|
if self._result_cache is not None:
|
1046
1088
|
return obj in self._result_cache
|
1047
|
-
return self.filter(id=obj.id).exists()
|
1089
|
+
return self.filter(id=obj.id).exists() # type: ignore[attr-defined]
|
1048
1090
|
|
1049
|
-
def _prefetch_related_objects(self):
|
1091
|
+
def _prefetch_related_objects(self) -> None:
|
1050
1092
|
# This method can only be called once the result cache has been filled.
|
1051
1093
|
prefetch_related_objects(self._result_cache, *self._prefetch_related_lookups)
|
1052
1094
|
self._prefetch_done = True
|
1053
1095
|
|
1054
|
-
def explain(self, *, format=None, **options):
|
1096
|
+
def explain(self, *, format: str | None = None, **options: Any) -> str:
|
1055
1097
|
"""
|
1056
1098
|
Runs an EXPLAIN on the SQL query this QuerySet would perform, and
|
1057
1099
|
returns the results.
|
@@ -1062,7 +1104,12 @@ class QuerySet:
|
|
1062
1104
|
# PUBLIC METHODS THAT RETURN A QUERYSET SUBCLASS #
|
1063
1105
|
##################################################
|
1064
1106
|
|
1065
|
-
def raw(
|
1107
|
+
def raw(
|
1108
|
+
self,
|
1109
|
+
raw_query: str,
|
1110
|
+
params: tuple[Any, ...] = (),
|
1111
|
+
translations: dict[str, str] | None = None,
|
1112
|
+
) -> RawQuerySet:
|
1066
1113
|
qs = RawQuerySet(
|
1067
1114
|
raw_query,
|
1068
1115
|
model=self.model,
|
@@ -1072,21 +1119,23 @@ class QuerySet:
|
|
1072
1119
|
qs._prefetch_related_lookups = self._prefetch_related_lookups[:]
|
1073
1120
|
return qs
|
1074
1121
|
|
1075
|
-
def _values(self, *fields, **expressions):
|
1122
|
+
def _values(self, *fields: str, **expressions: Any) -> QuerySet[Any]:
|
1076
1123
|
clone = self._chain()
|
1077
1124
|
if expressions:
|
1078
1125
|
clone = clone.annotate(**expressions)
|
1079
|
-
clone._fields = fields
|
1126
|
+
clone._fields = fields # type: ignore[assignment]
|
1080
1127
|
clone.sql_query.set_values(fields)
|
1081
1128
|
return clone
|
1082
1129
|
|
1083
|
-
def values(self, *fields, **expressions):
|
1130
|
+
def values(self, *fields: str, **expressions: Any) -> QuerySet[Any]:
|
1084
1131
|
fields += tuple(expressions)
|
1085
1132
|
clone = self._values(*fields, **expressions)
|
1086
1133
|
clone._iterable_class = ValuesIterable
|
1087
1134
|
return clone
|
1088
1135
|
|
1089
|
-
def values_list(
|
1136
|
+
def values_list(
|
1137
|
+
self, *fields: str, flat: bool = False, named: bool = False
|
1138
|
+
) -> QuerySet[Any]:
|
1090
1139
|
if flat and named:
|
1091
1140
|
raise TypeError("'flat' and 'named' can't be used together.")
|
1092
1141
|
if flat and len(fields) > 1:
|
@@ -1124,7 +1173,7 @@ class QuerySet:
|
|
1124
1173
|
)
|
1125
1174
|
return clone
|
1126
1175
|
|
1127
|
-
def dates(self, field_name, kind, order="ASC"):
|
1176
|
+
def dates(self, field_name: str, kind: str, order: str = "ASC") -> QuerySet[Any]:
|
1128
1177
|
"""
|
1129
1178
|
Return a list of date objects representing all available dates for
|
1130
1179
|
the given field_name, scoped to 'kind'.
|
@@ -1144,7 +1193,13 @@ class QuerySet:
|
|
1144
1193
|
.order_by(("-" if order == "DESC" else "") + "datefield")
|
1145
1194
|
)
|
1146
1195
|
|
1147
|
-
def datetimes(
|
1196
|
+
def datetimes(
|
1197
|
+
self,
|
1198
|
+
field_name: str,
|
1199
|
+
kind: str,
|
1200
|
+
order: str = "ASC",
|
1201
|
+
tzinfo: tzinfo | None = None,
|
1202
|
+
) -> QuerySet[Any]:
|
1148
1203
|
"""
|
1149
1204
|
Return a list of datetime objects representing all available
|
1150
1205
|
datetimes for the given field_name, scoped to 'kind'.
|
@@ -1176,7 +1231,7 @@ class QuerySet:
|
|
1176
1231
|
.order_by(("-" if order == "DESC" else "") + "datetimefield")
|
1177
1232
|
)
|
1178
1233
|
|
1179
|
-
def none(self):
|
1234
|
+
def none(self) -> QuerySet[T]:
|
1180
1235
|
"""Return an empty QuerySet."""
|
1181
1236
|
clone = self._chain()
|
1182
1237
|
clone.sql_query.set_empty()
|
@@ -1186,14 +1241,14 @@ class QuerySet:
|
|
1186
1241
|
# PUBLIC METHODS THAT ALTER ATTRIBUTES AND RETURN A NEW QUERYSET #
|
1187
1242
|
##################################################################
|
1188
1243
|
|
1189
|
-
def all(self):
|
1244
|
+
def all(self) -> QuerySet[T]:
|
1190
1245
|
"""
|
1191
1246
|
Return a new QuerySet that is a copy of the current one. This allows a
|
1192
1247
|
QuerySet to proxy for a model queryset in some cases.
|
1193
1248
|
"""
|
1194
1249
|
return self._chain()
|
1195
1250
|
|
1196
|
-
def filter(self, *args, **kwargs):
|
1251
|
+
def filter(self, *args: Any, **kwargs: Any) -> QuerySet[T]:
|
1197
1252
|
"""
|
1198
1253
|
Return a new QuerySet instance with the args ANDed to the existing
|
1199
1254
|
set.
|
@@ -1201,7 +1256,7 @@ class QuerySet:
|
|
1201
1256
|
self._not_support_combined_queries("filter")
|
1202
1257
|
return self._filter_or_exclude(False, args, kwargs)
|
1203
1258
|
|
1204
|
-
def exclude(self, *args, **kwargs):
|
1259
|
+
def exclude(self, *args: Any, **kwargs: Any) -> QuerySet[T]:
|
1205
1260
|
"""
|
1206
1261
|
Return a new QuerySet instance with NOT (args) ANDed to the existing
|
1207
1262
|
set.
|
@@ -1209,7 +1264,9 @@ class QuerySet:
|
|
1209
1264
|
self._not_support_combined_queries("exclude")
|
1210
1265
|
return self._filter_or_exclude(True, args, kwargs)
|
1211
1266
|
|
1212
|
-
def _filter_or_exclude(
|
1267
|
+
def _filter_or_exclude(
|
1268
|
+
self, negate: bool, args: tuple[Any, ...], kwargs: dict[str, Any]
|
1269
|
+
) -> QuerySet[T]:
|
1213
1270
|
if (args or kwargs) and self.sql_query.is_sliced:
|
1214
1271
|
raise TypeError("Cannot filter a query once a slice has been taken.")
|
1215
1272
|
clone = self._chain()
|
@@ -1220,13 +1277,15 @@ class QuerySet:
|
|
1220
1277
|
clone._filter_or_exclude_inplace(negate, args, kwargs)
|
1221
1278
|
return clone
|
1222
1279
|
|
1223
|
-
def _filter_or_exclude_inplace(
|
1280
|
+
def _filter_or_exclude_inplace(
|
1281
|
+
self, negate: bool, args: tuple[Any, ...], kwargs: dict[str, Any]
|
1282
|
+
) -> None:
|
1224
1283
|
if negate:
|
1225
|
-
self._query.add_q(~Q(*args, **kwargs))
|
1284
|
+
self._query.add_q(~Q(*args, **kwargs)) # type: ignore[unsupported-operator]
|
1226
1285
|
else:
|
1227
1286
|
self._query.add_q(Q(*args, **kwargs))
|
1228
1287
|
|
1229
|
-
def complex_filter(self, filter_obj):
|
1288
|
+
def complex_filter(self, filter_obj: Q | dict[str, Any]) -> QuerySet[T]:
|
1230
1289
|
"""
|
1231
1290
|
Return a new QuerySet instance with filter_obj added to the filters.
|
1232
1291
|
|
@@ -1243,7 +1302,9 @@ class QuerySet:
|
|
1243
1302
|
else:
|
1244
1303
|
return self._filter_or_exclude(False, args=(), kwargs=filter_obj)
|
1245
1304
|
|
1246
|
-
def _combinator_query(
|
1305
|
+
def _combinator_query(
|
1306
|
+
self, combinator: str, *other_qs: QuerySet[T], all: bool = False
|
1307
|
+
) -> QuerySet[T]:
|
1247
1308
|
# Clone the query to inherit the select list and everything
|
1248
1309
|
clone = self._chain()
|
1249
1310
|
# Clear limits and ordering so they can be reapplied
|
@@ -1256,7 +1317,7 @@ class QuerySet:
|
|
1256
1317
|
clone.sql_query.combinator_all = all
|
1257
1318
|
return clone
|
1258
1319
|
|
1259
|
-
def union(self, *other_qs, all=False):
|
1320
|
+
def union(self, *other_qs: QuerySet[T], all: bool = False) -> QuerySet[T]:
|
1260
1321
|
# If the query is an EmptyQuerySet, combine all nonempty querysets.
|
1261
1322
|
if isinstance(self, EmptyQuerySet):
|
1262
1323
|
qs = [q for q in other_qs if not isinstance(q, EmptyQuerySet)]
|
@@ -1267,7 +1328,7 @@ class QuerySet:
|
|
1267
1328
|
return qs[0]._combinator_query("union", *qs[1:], all=all)
|
1268
1329
|
return self._combinator_query("union", *other_qs, all=all)
|
1269
1330
|
|
1270
|
-
def intersection(self, *other_qs):
|
1331
|
+
def intersection(self, *other_qs: QuerySet[T]) -> QuerySet[T]:
|
1271
1332
|
# If any query is an EmptyQuerySet, return it.
|
1272
1333
|
if isinstance(self, EmptyQuerySet):
|
1273
1334
|
return self
|
@@ -1276,13 +1337,19 @@ class QuerySet:
|
|
1276
1337
|
return other
|
1277
1338
|
return self._combinator_query("intersection", *other_qs)
|
1278
1339
|
|
1279
|
-
def difference(self, *other_qs):
|
1340
|
+
def difference(self, *other_qs: QuerySet[T]) -> QuerySet[T]:
|
1280
1341
|
# If the query is an EmptyQuerySet, return it.
|
1281
1342
|
if isinstance(self, EmptyQuerySet):
|
1282
1343
|
return self
|
1283
1344
|
return self._combinator_query("difference", *other_qs)
|
1284
1345
|
|
1285
|
-
def select_for_update(
|
1346
|
+
def select_for_update(
|
1347
|
+
self,
|
1348
|
+
nowait: bool = False,
|
1349
|
+
skip_locked: bool = False,
|
1350
|
+
of: tuple[str, ...] = (),
|
1351
|
+
no_key: bool = False,
|
1352
|
+
) -> QuerySet[T]:
|
1286
1353
|
"""
|
1287
1354
|
Return a new QuerySet instance that will select objects with a
|
1288
1355
|
FOR UPDATE lock.
|
@@ -1298,7 +1365,7 @@ class QuerySet:
|
|
1298
1365
|
obj.sql_query.select_for_no_key_update = no_key
|
1299
1366
|
return obj
|
1300
1367
|
|
1301
|
-
def select_related(self, *fields):
|
1368
|
+
def select_related(self, *fields: str | None) -> QuerySet[T]:
|
1302
1369
|
"""
|
1303
1370
|
Return a new QuerySet instance that will select related objects.
|
1304
1371
|
|
@@ -1322,7 +1389,7 @@ class QuerySet:
|
|
1322
1389
|
obj.sql_query.select_related = True
|
1323
1390
|
return obj
|
1324
1391
|
|
1325
|
-
def prefetch_related(self, *lookups):
|
1392
|
+
def prefetch_related(self, *lookups: str | Prefetch | None) -> QuerySet[T]:
|
1326
1393
|
"""
|
1327
1394
|
Return a new QuerySet instance that will prefetch the specified
|
1328
1395
|
Many-To-One and Many-To-Many related objects when the QuerySet is
|
@@ -1337,17 +1404,20 @@ class QuerySet:
|
|
1337
1404
|
clone._prefetch_related_lookups = ()
|
1338
1405
|
else:
|
1339
1406
|
for lookup in lookups:
|
1407
|
+
lookup_str: str
|
1340
1408
|
if isinstance(lookup, Prefetch):
|
1341
|
-
|
1342
|
-
|
1343
|
-
|
1409
|
+
lookup_str = lookup.prefetch_to
|
1410
|
+
else:
|
1411
|
+
lookup_str = lookup # type: ignore[assignment]
|
1412
|
+
lookup_str = lookup_str.split(LOOKUP_SEP, 1)[0]
|
1413
|
+
if lookup_str in self.sql_query._filtered_relations:
|
1344
1414
|
raise ValueError(
|
1345
1415
|
"prefetch_related() is not supported with FilteredRelation."
|
1346
1416
|
)
|
1347
1417
|
clone._prefetch_related_lookups = clone._prefetch_related_lookups + lookups
|
1348
1418
|
return clone
|
1349
1419
|
|
1350
|
-
def annotate(self, *args, **kwargs):
|
1420
|
+
def annotate(self, *args: Any, **kwargs: Any) -> QuerySet[T]:
|
1351
1421
|
"""
|
1352
1422
|
Return a query set in which the returned objects have been annotated
|
1353
1423
|
with extra data or aggregations.
|
@@ -1355,14 +1425,16 @@ class QuerySet:
|
|
1355
1425
|
self._not_support_combined_queries("annotate")
|
1356
1426
|
return self._annotate(args, kwargs, select=True)
|
1357
1427
|
|
1358
|
-
def alias(self, *args, **kwargs):
|
1428
|
+
def alias(self, *args: Any, **kwargs: Any) -> QuerySet[T]:
|
1359
1429
|
"""
|
1360
1430
|
Return a query set with added aliases for extra data or aggregations.
|
1361
1431
|
"""
|
1362
1432
|
self._not_support_combined_queries("alias")
|
1363
1433
|
return self._annotate(args, kwargs, select=False)
|
1364
1434
|
|
1365
|
-
def _annotate(
|
1435
|
+
def _annotate(
|
1436
|
+
self, args: tuple[Any, ...], kwargs: dict[str, Any], select: bool = True
|
1437
|
+
) -> QuerySet[T]:
|
1366
1438
|
self._validate_values_are_expressions(
|
1367
1439
|
args + tuple(kwargs.values()), method_name="annotate"
|
1368
1440
|
)
|
@@ -1388,7 +1460,7 @@ class QuerySet:
|
|
1388
1460
|
(field.name, field.attname)
|
1389
1461
|
if hasattr(field, "attname")
|
1390
1462
|
else (field.name,)
|
1391
|
-
for field in self.model._meta.get_fields()
|
1463
|
+
for field in self.model._meta.get_fields() # type: ignore[attr-defined]
|
1392
1464
|
)
|
1393
1465
|
)
|
1394
1466
|
|
@@ -1415,7 +1487,7 @@ class QuerySet:
|
|
1415
1487
|
|
1416
1488
|
return clone
|
1417
1489
|
|
1418
|
-
def order_by(self, *field_names):
|
1490
|
+
def order_by(self, *field_names: str) -> QuerySet[T]:
|
1419
1491
|
"""Return a new QuerySet instance with the ordering changed."""
|
1420
1492
|
if self.sql_query.is_sliced:
|
1421
1493
|
raise TypeError("Cannot reorder a query once a slice has been taken.")
|
@@ -1424,7 +1496,7 @@ class QuerySet:
|
|
1424
1496
|
obj.sql_query.add_ordering(*field_names)
|
1425
1497
|
return obj
|
1426
1498
|
|
1427
|
-
def distinct(self, *field_names):
|
1499
|
+
def distinct(self, *field_names: str) -> QuerySet[T]:
|
1428
1500
|
"""
|
1429
1501
|
Return a new QuerySet instance that will select only distinct results.
|
1430
1502
|
"""
|
@@ -1439,13 +1511,13 @@ class QuerySet:
|
|
1439
1511
|
|
1440
1512
|
def extra(
|
1441
1513
|
self,
|
1442
|
-
select=None,
|
1443
|
-
where=None,
|
1444
|
-
params=None,
|
1445
|
-
tables=None,
|
1446
|
-
order_by=None,
|
1447
|
-
select_params=None,
|
1448
|
-
):
|
1514
|
+
select: dict[str, str] | None = None,
|
1515
|
+
where: list[str] | None = None,
|
1516
|
+
params: list[Any] | None = None,
|
1517
|
+
tables: list[str] | None = None,
|
1518
|
+
order_by: list[str] | None = None,
|
1519
|
+
select_params: list[Any] | None = None,
|
1520
|
+
) -> QuerySet[T]:
|
1449
1521
|
"""Add extra SQL fragments to the query."""
|
1450
1522
|
self._not_support_combined_queries("extra")
|
1451
1523
|
if self.sql_query.is_sliced:
|
@@ -1456,7 +1528,7 @@ class QuerySet:
|
|
1456
1528
|
)
|
1457
1529
|
return clone
|
1458
1530
|
|
1459
|
-
def reverse(self):
|
1531
|
+
def reverse(self) -> QuerySet[T]:
|
1460
1532
|
"""Reverse the ordering of the QuerySet."""
|
1461
1533
|
if self.sql_query.is_sliced:
|
1462
1534
|
raise TypeError("Cannot reverse a query once a slice has been taken.")
|
@@ -1464,7 +1536,7 @@ class QuerySet:
|
|
1464
1536
|
clone.sql_query.standard_ordering = not clone.sql_query.standard_ordering
|
1465
1537
|
return clone
|
1466
1538
|
|
1467
|
-
def defer(self, *fields):
|
1539
|
+
def defer(self, *fields: str | None) -> QuerySet[T]:
|
1468
1540
|
"""
|
1469
1541
|
Defer the loading of data for certain fields until they are accessed.
|
1470
1542
|
Add the set of deferred fields to any existing set of deferred fields.
|
@@ -1481,7 +1553,7 @@ class QuerySet:
|
|
1481
1553
|
clone.sql_query.add_deferred_loading(fields)
|
1482
1554
|
return clone
|
1483
1555
|
|
1484
|
-
def only(self, *fields):
|
1556
|
+
def only(self, *fields: str) -> QuerySet[T]:
|
1485
1557
|
"""
|
1486
1558
|
Essentially, the opposite of defer(). Only the fields passed into this
|
1487
1559
|
method and that are not already specified as deferred are loaded
|
@@ -1507,7 +1579,7 @@ class QuerySet:
|
|
1507
1579
|
###################################
|
1508
1580
|
|
1509
1581
|
@property
|
1510
|
-
def ordered(self):
|
1582
|
+
def ordered(self) -> bool:
|
1511
1583
|
"""
|
1512
1584
|
Return True if the QuerySet is ordered -- i.e. has an order_by()
|
1513
1585
|
clause or a default ordering on the model (or is empty).
|
@@ -1533,14 +1605,14 @@ class QuerySet:
|
|
1533
1605
|
|
1534
1606
|
def _insert(
|
1535
1607
|
self,
|
1536
|
-
objs,
|
1537
|
-
fields,
|
1538
|
-
returning_fields=None,
|
1539
|
-
raw=False,
|
1540
|
-
on_conflict=None,
|
1541
|
-
update_fields=None,
|
1542
|
-
unique_fields=None,
|
1543
|
-
):
|
1608
|
+
objs: list[T],
|
1609
|
+
fields: list[Field],
|
1610
|
+
returning_fields: list[Field] | None = None,
|
1611
|
+
raw: bool = False,
|
1612
|
+
on_conflict: OnConflict | None = None,
|
1613
|
+
update_fields: list[Field] | None = None,
|
1614
|
+
unique_fields: list[Field] | None = None,
|
1615
|
+
) -> list[tuple[Any, ...]] | None:
|
1544
1616
|
"""
|
1545
1617
|
Insert a new record for the given model. This provides an interface to
|
1546
1618
|
the InsertQuery class and is how Model.save() is implemented.
|
@@ -1548,7 +1620,7 @@ class QuerySet:
|
|
1548
1620
|
self._for_write = True
|
1549
1621
|
query = sql.InsertQuery(
|
1550
1622
|
self.model,
|
1551
|
-
on_conflict=on_conflict,
|
1623
|
+
on_conflict=on_conflict.value if on_conflict else None, # type: ignore[attr-defined]
|
1552
1624
|
update_fields=update_fields,
|
1553
1625
|
unique_fields=unique_fields,
|
1554
1626
|
)
|
@@ -1557,13 +1629,13 @@ class QuerySet:
|
|
1557
1629
|
|
1558
1630
|
def _batched_insert(
|
1559
1631
|
self,
|
1560
|
-
objs,
|
1561
|
-
fields,
|
1562
|
-
batch_size,
|
1563
|
-
on_conflict=None,
|
1564
|
-
update_fields=None,
|
1565
|
-
unique_fields=None,
|
1566
|
-
):
|
1632
|
+
objs: list[T],
|
1633
|
+
fields: list[Field],
|
1634
|
+
batch_size: int,
|
1635
|
+
on_conflict: OnConflict | None = None,
|
1636
|
+
update_fields: list[Field] | None = None,
|
1637
|
+
unique_fields: list[Field] | None = None,
|
1638
|
+
) -> list[tuple[Any, ...]]:
|
1567
1639
|
"""
|
1568
1640
|
Helper method for bulk_create() to insert objs one batch at a time.
|
1569
1641
|
"""
|
@@ -1578,7 +1650,7 @@ class QuerySet:
|
|
1578
1650
|
self._insert(
|
1579
1651
|
item,
|
1580
1652
|
fields=fields,
|
1581
|
-
returning_fields=self.model._meta.db_returning_fields,
|
1653
|
+
returning_fields=self.model._meta.db_returning_fields, # type: ignore[attr-defined]
|
1582
1654
|
)
|
1583
1655
|
)
|
1584
1656
|
else:
|
@@ -1591,7 +1663,7 @@ class QuerySet:
|
|
1591
1663
|
)
|
1592
1664
|
return inserted_rows
|
1593
1665
|
|
1594
|
-
def _chain(self):
|
1666
|
+
def _chain(self) -> QuerySet[T]:
|
1595
1667
|
"""
|
1596
1668
|
Return a copy of the current QuerySet that's ready for another
|
1597
1669
|
operation.
|
@@ -1602,7 +1674,7 @@ class QuerySet:
|
|
1602
1674
|
obj._sticky_filter = False
|
1603
1675
|
return obj
|
1604
1676
|
|
1605
|
-
def _clone(self):
|
1677
|
+
def _clone(self) -> QuerySet[T]:
|
1606
1678
|
"""
|
1607
1679
|
Return a copy of the current QuerySet. A lightweight alternative
|
1608
1680
|
to deepcopy().
|
@@ -1619,13 +1691,13 @@ class QuerySet:
|
|
1619
1691
|
c._fields = self._fields
|
1620
1692
|
return c
|
1621
1693
|
|
1622
|
-
def _fetch_all(self):
|
1694
|
+
def _fetch_all(self) -> None:
|
1623
1695
|
if self._result_cache is None:
|
1624
1696
|
self._result_cache = list(self._iterable_class(self))
|
1625
1697
|
if self._prefetch_related_lookups and not self._prefetch_done:
|
1626
1698
|
self._prefetch_related_objects()
|
1627
1699
|
|
1628
|
-
def _next_is_sticky(self):
|
1700
|
+
def _next_is_sticky(self) -> QuerySet[T]:
|
1629
1701
|
"""
|
1630
1702
|
Indicate that the next filter call and the one following that should
|
1631
1703
|
be treated as a single filter. This is only important when it comes to
|
@@ -1639,7 +1711,7 @@ class QuerySet:
|
|
1639
1711
|
self._sticky_filter = True
|
1640
1712
|
return self
|
1641
1713
|
|
1642
|
-
def _merge_sanity_check(self, other):
|
1714
|
+
def _merge_sanity_check(self, other: QuerySet[T]) -> None:
|
1643
1715
|
"""Check that two QuerySet classes may be merged."""
|
1644
1716
|
if self._fields is not None and (
|
1645
1717
|
set(self.sql_query.values_select) != set(other.sql_query.values_select)
|
@@ -1651,14 +1723,14 @@ class QuerySet:
|
|
1651
1723
|
f"Merging '{self.__class__.__name__}' classes must involve the same values in each case."
|
1652
1724
|
)
|
1653
1725
|
|
1654
|
-
def _merge_known_related_objects(self, other):
|
1726
|
+
def _merge_known_related_objects(self, other: QuerySet[T]) -> None:
|
1655
1727
|
"""
|
1656
1728
|
Keep track of all known related objects from either QuerySet instance.
|
1657
1729
|
"""
|
1658
1730
|
for field, objects in other._known_related_objects.items():
|
1659
1731
|
self._known_related_objects.setdefault(field, {}).update(objects)
|
1660
1732
|
|
1661
|
-
def resolve_expression(self, *args, **kwargs):
|
1733
|
+
def resolve_expression(self, *args: Any, **kwargs: Any) -> sql.Query:
|
1662
1734
|
if self._fields and len(self._fields) > 1:
|
1663
1735
|
# values() queryset can only be used as nested queries
|
1664
1736
|
# if they are set up to select only a single field.
|
@@ -1666,7 +1738,7 @@ class QuerySet:
|
|
1666
1738
|
query = self.sql_query.resolve_expression(*args, **kwargs)
|
1667
1739
|
return query
|
1668
1740
|
|
1669
|
-
def _has_filters(self):
|
1741
|
+
def _has_filters(self) -> bool:
|
1670
1742
|
"""
|
1671
1743
|
Check if this QuerySet has any filtering going on. This isn't
|
1672
1744
|
equivalent with checking if all objects are present in results, for
|
@@ -1675,7 +1747,9 @@ class QuerySet:
|
|
1675
1747
|
return self.sql_query.has_filters()
|
1676
1748
|
|
1677
1749
|
@staticmethod
|
1678
|
-
def _validate_values_are_expressions(
|
1750
|
+
def _validate_values_are_expressions(
|
1751
|
+
values: tuple[Any, ...], method_name: str
|
1752
|
+
) -> None:
|
1679
1753
|
invalid_args = sorted(
|
1680
1754
|
str(arg) for arg in values if not hasattr(arg, "resolve_expression")
|
1681
1755
|
)
|
@@ -1687,19 +1761,19 @@ class QuerySet:
|
|
1687
1761
|
)
|
1688
1762
|
)
|
1689
1763
|
|
1690
|
-
def _not_support_combined_queries(self, operation_name):
|
1764
|
+
def _not_support_combined_queries(self, operation_name: str) -> None:
|
1691
1765
|
if self.sql_query.combinator:
|
1692
1766
|
raise NotSupportedError(
|
1693
1767
|
f"Calling QuerySet.{operation_name}() after {self.sql_query.combinator}() is not supported."
|
1694
1768
|
)
|
1695
1769
|
|
1696
|
-
def _check_operator_queryset(self, other, operator_):
|
1770
|
+
def _check_operator_queryset(self, other: QuerySet[T], operator_: str) -> None:
|
1697
1771
|
if self.sql_query.combinator or other.sql_query.combinator:
|
1698
1772
|
raise TypeError(f"Cannot use {operator_} operator with combined queryset.")
|
1699
1773
|
|
1700
1774
|
|
1701
1775
|
class InstanceCheckMeta(type):
|
1702
|
-
def __instancecheck__(self, instance):
|
1776
|
+
def __instancecheck__(self, instance: object) -> bool:
|
1703
1777
|
return isinstance(instance, QuerySet) and instance.sql_query.is_empty()
|
1704
1778
|
|
1705
1779
|
|
@@ -1709,7 +1783,7 @@ class EmptyQuerySet(metaclass=InstanceCheckMeta):
|
|
1709
1783
|
isinstance(qs.none(), EmptyQuerySet) -> True
|
1710
1784
|
"""
|
1711
1785
|
|
1712
|
-
def __init__(self, *args, **kwargs):
|
1786
|
+
def __init__(self, *args: Any, **kwargs: Any):
|
1713
1787
|
raise TypeError("EmptyQuerySet can't be instantiated")
|
1714
1788
|
|
1715
1789
|
|
@@ -1721,26 +1795,30 @@ class RawQuerySet:
|
|
1721
1795
|
|
1722
1796
|
def __init__(
|
1723
1797
|
self,
|
1724
|
-
raw_query,
|
1725
|
-
model=None,
|
1726
|
-
query=None,
|
1727
|
-
params=(),
|
1728
|
-
translations=None,
|
1798
|
+
raw_query: str,
|
1799
|
+
model: type[Model] | None = None,
|
1800
|
+
query: sql.RawQuery | None = None,
|
1801
|
+
params: tuple[Any, ...] = (),
|
1802
|
+
translations: dict[str, str] | None = None,
|
1729
1803
|
):
|
1730
1804
|
self.raw_query = raw_query
|
1731
1805
|
self.model = model
|
1732
1806
|
self.sql_query = query or sql.RawQuery(sql=raw_query, params=params)
|
1733
1807
|
self.params = params
|
1734
1808
|
self.translations = translations or {}
|
1735
|
-
self._result_cache = None
|
1736
|
-
self._prefetch_related_lookups = ()
|
1809
|
+
self._result_cache: list[Model] | None = None
|
1810
|
+
self._prefetch_related_lookups: tuple[Any, ...] = ()
|
1737
1811
|
self._prefetch_done = False
|
1738
1812
|
|
1739
|
-
def resolve_model_init_order(
|
1813
|
+
def resolve_model_init_order(
|
1814
|
+
self,
|
1815
|
+
) -> tuple[list[str], list[int], list[tuple[str, int]]]:
|
1740
1816
|
"""Resolve the init field names and value positions."""
|
1741
1817
|
converter = db_connection.introspection.identifier_converter
|
1742
1818
|
model_init_fields = [
|
1743
|
-
f
|
1819
|
+
f
|
1820
|
+
for f in self.model._meta.fields # type: ignore[attr-defined]
|
1821
|
+
if converter(f.column) in self.columns
|
1744
1822
|
]
|
1745
1823
|
annotation_fields = [
|
1746
1824
|
(column, pos)
|
@@ -1753,7 +1831,7 @@ class RawQuerySet:
|
|
1753
1831
|
model_init_names = [f.attname for f in model_init_fields]
|
1754
1832
|
return model_init_names, model_init_order, annotation_fields
|
1755
1833
|
|
1756
|
-
def prefetch_related(self, *lookups):
|
1834
|
+
def prefetch_related(self, *lookups: str | Prefetch | None) -> RawQuerySet:
|
1757
1835
|
"""Same as QuerySet.prefetch_related()"""
|
1758
1836
|
clone = self._clone()
|
1759
1837
|
if lookups == (None,):
|
@@ -1762,11 +1840,11 @@ class RawQuerySet:
|
|
1762
1840
|
clone._prefetch_related_lookups = clone._prefetch_related_lookups + lookups
|
1763
1841
|
return clone
|
1764
1842
|
|
1765
|
-
def _prefetch_related_objects(self):
|
1843
|
+
def _prefetch_related_objects(self) -> None:
|
1766
1844
|
prefetch_related_objects(self._result_cache, *self._prefetch_related_lookups)
|
1767
1845
|
self._prefetch_done = True
|
1768
1846
|
|
1769
|
-
def _clone(self):
|
1847
|
+
def _clone(self) -> RawQuerySet:
|
1770
1848
|
"""Same as QuerySet._clone()"""
|
1771
1849
|
c = self.__class__(
|
1772
1850
|
self.raw_query,
|
@@ -1778,35 +1856,35 @@ class RawQuerySet:
|
|
1778
1856
|
c._prefetch_related_lookups = self._prefetch_related_lookups[:]
|
1779
1857
|
return c
|
1780
1858
|
|
1781
|
-
def _fetch_all(self):
|
1859
|
+
def _fetch_all(self) -> None:
|
1782
1860
|
if self._result_cache is None:
|
1783
1861
|
self._result_cache = list(self.iterator())
|
1784
1862
|
if self._prefetch_related_lookups and not self._prefetch_done:
|
1785
1863
|
self._prefetch_related_objects()
|
1786
1864
|
|
1787
|
-
def __len__(self):
|
1865
|
+
def __len__(self) -> int:
|
1788
1866
|
self._fetch_all()
|
1789
|
-
return len(self._result_cache)
|
1867
|
+
return len(self._result_cache) # type: ignore[arg-type]
|
1790
1868
|
|
1791
|
-
def __bool__(self):
|
1869
|
+
def __bool__(self) -> bool:
|
1792
1870
|
self._fetch_all()
|
1793
1871
|
return bool(self._result_cache)
|
1794
1872
|
|
1795
|
-
def __iter__(self):
|
1873
|
+
def __iter__(self) -> Iterator[Model]:
|
1796
1874
|
self._fetch_all()
|
1797
|
-
return iter(self._result_cache)
|
1875
|
+
return iter(self._result_cache) # type: ignore[arg-type]
|
1798
1876
|
|
1799
|
-
def iterator(self):
|
1877
|
+
def iterator(self) -> Iterator[Model]:
|
1800
1878
|
yield from RawModelIterable(self)
|
1801
1879
|
|
1802
|
-
def __repr__(self):
|
1880
|
+
def __repr__(self) -> str:
|
1803
1881
|
return f"<{self.__class__.__name__}: {self.sql_query}>"
|
1804
1882
|
|
1805
|
-
def __getitem__(self, k):
|
1883
|
+
def __getitem__(self, k: int | slice) -> Model | list[Model]:
|
1806
1884
|
return list(self)[k]
|
1807
1885
|
|
1808
1886
|
@cached_property
|
1809
|
-
def columns(self):
|
1887
|
+
def columns(self) -> list[str]:
|
1810
1888
|
"""
|
1811
1889
|
A list of model field names in the order they'll appear in the
|
1812
1890
|
query results.
|
@@ -1824,18 +1902,23 @@ class RawQuerySet:
|
|
1824
1902
|
return columns
|
1825
1903
|
|
1826
1904
|
@cached_property
|
1827
|
-
def model_fields(self):
|
1905
|
+
def model_fields(self) -> dict[str, Field]:
|
1828
1906
|
"""A dict mapping column names to model field names."""
|
1829
1907
|
converter = db_connection.introspection.identifier_converter
|
1830
1908
|
model_fields = {}
|
1831
|
-
for field in self.model._meta.fields:
|
1909
|
+
for field in self.model._meta.fields: # type: ignore[attr-defined]
|
1832
1910
|
name, column = field.get_attname_column()
|
1833
1911
|
model_fields[converter(column)] = field
|
1834
1912
|
return model_fields
|
1835
1913
|
|
1836
1914
|
|
1837
1915
|
class Prefetch:
|
1838
|
-
def __init__(
|
1916
|
+
def __init__(
|
1917
|
+
self,
|
1918
|
+
lookup: str,
|
1919
|
+
queryset: QuerySet[Any] | None = None,
|
1920
|
+
to_attr: str | None = None,
|
1921
|
+
):
|
1839
1922
|
# `prefetch_through` is the path we traverse to perform the prefetch.
|
1840
1923
|
self.prefetch_through = lookup
|
1841
1924
|
# `prefetch_to` is the path to the attribute that stores the result.
|
@@ -1858,7 +1941,7 @@ class Prefetch:
|
|
1858
1941
|
self.queryset = queryset
|
1859
1942
|
self.to_attr = to_attr
|
1860
1943
|
|
1861
|
-
def __getstate__(self):
|
1944
|
+
def __getstate__(self) -> dict[str, Any]:
|
1862
1945
|
obj_dict = self.__dict__.copy()
|
1863
1946
|
if self.queryset is not None:
|
1864
1947
|
queryset = self.queryset._chain()
|
@@ -1868,34 +1951,37 @@ class Prefetch:
|
|
1868
1951
|
obj_dict["queryset"] = queryset
|
1869
1952
|
return obj_dict
|
1870
1953
|
|
1871
|
-
def add_prefix(self, prefix):
|
1954
|
+
def add_prefix(self, prefix: str) -> None:
|
1872
1955
|
self.prefetch_through = prefix + LOOKUP_SEP + self.prefetch_through
|
1873
1956
|
self.prefetch_to = prefix + LOOKUP_SEP + self.prefetch_to
|
1874
1957
|
|
1875
|
-
def get_current_prefetch_to(self, level):
|
1958
|
+
def get_current_prefetch_to(self, level: int) -> str:
|
1876
1959
|
return LOOKUP_SEP.join(self.prefetch_to.split(LOOKUP_SEP)[: level + 1])
|
1877
1960
|
|
1878
|
-
def get_current_to_attr(self, level):
|
1961
|
+
def get_current_to_attr(self, level: int) -> tuple[str, bool]:
|
1879
1962
|
parts = self.prefetch_to.split(LOOKUP_SEP)
|
1880
1963
|
to_attr = parts[level]
|
1881
1964
|
as_attr = self.to_attr and level == len(parts) - 1
|
1882
1965
|
return to_attr, as_attr
|
1883
1966
|
|
1884
|
-
def get_current_queryset(self, level):
|
1967
|
+
def get_current_queryset(self, level: int) -> QuerySet[Any] | None:
|
1885
1968
|
if self.get_current_prefetch_to(level) == self.prefetch_to:
|
1886
1969
|
return self.queryset
|
1887
1970
|
return None
|
1888
1971
|
|
1889
|
-
def __eq__(self, other):
|
1972
|
+
def __eq__(self, other: object) -> bool:
|
1890
1973
|
if not isinstance(other, Prefetch):
|
1891
1974
|
return NotImplemented
|
1892
1975
|
return self.prefetch_to == other.prefetch_to
|
1893
1976
|
|
1894
|
-
def __hash__(self):
|
1977
|
+
def __hash__(self) -> int:
|
1895
1978
|
return hash((self.__class__, self.prefetch_to))
|
1896
1979
|
|
1897
1980
|
|
1898
|
-
def normalize_prefetch_lookups(
|
1981
|
+
def normalize_prefetch_lookups(
|
1982
|
+
lookups: tuple[str | Prefetch, ...] | list[str | Prefetch],
|
1983
|
+
prefix: str | None = None,
|
1984
|
+
) -> list[Prefetch]:
|
1899
1985
|
"""Normalize lookups into Prefetch objects."""
|
1900
1986
|
ret = []
|
1901
1987
|
for lookup in lookups:
|
@@ -1907,7 +1993,9 @@ def normalize_prefetch_lookups(lookups, prefix=None):
|
|
1907
1993
|
return ret
|
1908
1994
|
|
1909
1995
|
|
1910
|
-
def prefetch_related_objects(
|
1996
|
+
def prefetch_related_objects(
|
1997
|
+
model_instances: list[Model], *related_lookups: str | Prefetch
|
1998
|
+
) -> None:
|
1911
1999
|
"""
|
1912
2000
|
Populate prefetched object caches for a list of model instances based on
|
1913
2001
|
the lookups/Prefetch instances given.
|
@@ -1923,7 +2011,7 @@ def prefetch_related_objects(model_instances, *related_lookups):
|
|
1923
2011
|
auto_lookups = set() # we add to this as we go through.
|
1924
2012
|
followed_descriptors = set() # recursion protection
|
1925
2013
|
|
1926
|
-
all_lookups = normalize_prefetch_lookups(reversed(related_lookups))
|
2014
|
+
all_lookups = normalize_prefetch_lookups(reversed(related_lookups)) # type: ignore[arg-type]
|
1927
2015
|
while all_lookups:
|
1928
2016
|
lookup = all_lookups.pop()
|
1929
2017
|
if lookup.prefetch_to in done_queries:
|
@@ -2019,7 +2107,8 @@ def prefetch_related_objects(model_instances, *related_lookups):
|
|
2019
2107
|
):
|
2020
2108
|
done_queries[prefetch_to] = obj_list
|
2021
2109
|
new_lookups = normalize_prefetch_lookups(
|
2022
|
-
reversed(additional_lookups),
|
2110
|
+
reversed(additional_lookups), # type: ignore[arg-type]
|
2111
|
+
prefetch_to,
|
2023
2112
|
)
|
2024
2113
|
auto_lookups.update(new_lookups)
|
2025
2114
|
all_lookups.extend(new_lookups)
|
@@ -2037,7 +2126,7 @@ def prefetch_related_objects(model_instances, *related_lookups):
|
|
2037
2126
|
if through_attr in getattr(obj, "_prefetched_objects_cache", ()):
|
2038
2127
|
# If related objects have been prefetched, use the
|
2039
2128
|
# cache rather than the object's through_attr.
|
2040
|
-
new_obj = list(obj._prefetched_objects_cache.get(through_attr))
|
2129
|
+
new_obj = list(obj._prefetched_objects_cache.get(through_attr)) # type: ignore[arg-type]
|
2041
2130
|
else:
|
2042
2131
|
try:
|
2043
2132
|
new_obj = getattr(obj, through_attr)
|
@@ -2055,7 +2144,9 @@ def prefetch_related_objects(model_instances, *related_lookups):
|
|
2055
2144
|
obj_list = new_obj_list
|
2056
2145
|
|
2057
2146
|
|
2058
|
-
def get_prefetcher(
|
2147
|
+
def get_prefetcher(
|
2148
|
+
instance: Model, through_attr: str, to_attr: str
|
2149
|
+
) -> tuple[Any, Any, bool, Callable[[Model], bool]]:
|
2059
2150
|
"""
|
2060
2151
|
For the attribute 'through_attr' on the given instance, find
|
2061
2152
|
an object that has a get_prefetch_queryset().
|
@@ -2067,11 +2158,11 @@ def get_prefetcher(instance, through_attr, to_attr):
|
|
2067
2158
|
the attribute has already been fetched for that instance)
|
2068
2159
|
"""
|
2069
2160
|
|
2070
|
-
def has_to_attr_attribute(instance):
|
2161
|
+
def has_to_attr_attribute(instance: Model) -> bool:
|
2071
2162
|
return hasattr(instance, to_attr)
|
2072
2163
|
|
2073
2164
|
prefetcher = None
|
2074
|
-
is_fetched = has_to_attr_attribute
|
2165
|
+
is_fetched: Callable[[Model], bool] = has_to_attr_attribute
|
2075
2166
|
|
2076
2167
|
# For singly related objects, we have to avoid getting the attribute
|
2077
2168
|
# from the object, as this will trigger the query. So we first try
|
@@ -2101,20 +2192,22 @@ def get_prefetcher(instance, through_attr, to_attr):
|
|
2101
2192
|
getattr(instance.__class__, to_attr, None), cached_property
|
2102
2193
|
):
|
2103
2194
|
|
2104
|
-
def has_cached_property(instance):
|
2195
|
+
def has_cached_property(instance: Model) -> bool:
|
2105
2196
|
return to_attr in instance.__dict__
|
2106
2197
|
|
2107
2198
|
is_fetched = has_cached_property
|
2108
2199
|
else:
|
2109
2200
|
|
2110
|
-
def in_prefetched_cache(instance):
|
2111
|
-
return through_attr in instance._prefetched_objects_cache
|
2201
|
+
def in_prefetched_cache(instance: Model) -> bool:
|
2202
|
+
return through_attr in instance._prefetched_objects_cache # type: ignore[attr-defined]
|
2112
2203
|
|
2113
2204
|
is_fetched = in_prefetched_cache
|
2114
2205
|
return prefetcher, rel_obj_descriptor, attr_found, is_fetched
|
2115
2206
|
|
2116
2207
|
|
2117
|
-
def prefetch_one_level(
|
2208
|
+
def prefetch_one_level(
|
2209
|
+
instances: list[Model], prefetcher: Any, lookup: Prefetch, level: int
|
2210
|
+
) -> tuple[list[Model], list[Prefetch]]:
|
2118
2211
|
"""
|
2119
2212
|
Helper function for prefetch_related_objects().
|
2120
2213
|
|
@@ -2202,7 +2295,7 @@ def prefetch_one_level(instances, prefetcher, lookup, level):
|
|
2202
2295
|
# No to_attr has been given for this prefetch operation and the
|
2203
2296
|
# cache_name does not point to a descriptor. Store the value of
|
2204
2297
|
# the field in the object's field cache.
|
2205
|
-
obj._state.fields_cache[cache_name] = val
|
2298
|
+
obj._state.fields_cache[cache_name] = val # type: ignore[index]
|
2206
2299
|
else:
|
2207
2300
|
if as_attr:
|
2208
2301
|
setattr(obj, to_attr, vals)
|
@@ -2244,7 +2337,7 @@ class RelatedPopulator:
|
|
2244
2337
|
model instance.
|
2245
2338
|
"""
|
2246
2339
|
|
2247
|
-
def __init__(self, klass_info, select):
|
2340
|
+
def __init__(self, klass_info: dict[str, Any], select: list[Any]):
|
2248
2341
|
# Pre-compute needed attributes. The attributes are:
|
2249
2342
|
# - model_cls: the possibly deferred model class to instantiate
|
2250
2343
|
# - either:
|
@@ -2283,7 +2376,7 @@ class RelatedPopulator:
|
|
2283
2376
|
self.local_setter = klass_info["local_setter"]
|
2284
2377
|
self.remote_setter = klass_info["remote_setter"]
|
2285
2378
|
|
2286
|
-
def populate(self, row, from_obj):
|
2379
|
+
def populate(self, row: tuple[Any, ...], from_obj: Model) -> None:
|
2287
2380
|
if self.reorder_for_init:
|
2288
2381
|
obj_data = self.reorder_for_init(row)
|
2289
2382
|
else:
|
@@ -2299,7 +2392,9 @@ class RelatedPopulator:
|
|
2299
2392
|
self.remote_setter(obj, from_obj)
|
2300
2393
|
|
2301
2394
|
|
2302
|
-
def get_related_populators(
|
2395
|
+
def get_related_populators(
|
2396
|
+
klass_info: dict[str, Any], select: list[Any]
|
2397
|
+
) -> list[RelatedPopulator]:
|
2303
2398
|
iterators = []
|
2304
2399
|
related_klass_infos = klass_info.get("related_klass_infos", [])
|
2305
2400
|
for rel_klass_info in related_klass_infos:
|