plain.models 0.50.0__py3-none-any.whl → 0.51.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- plain/models/CHANGELOG.md +14 -0
- plain/models/README.md +26 -42
- plain/models/__init__.py +2 -0
- plain/models/backends/base/creation.py +2 -2
- plain/models/backends/base/introspection.py +8 -4
- plain/models/backends/base/schema.py +89 -71
- plain/models/backends/base/validation.py +1 -1
- plain/models/backends/mysql/compiler.py +1 -1
- plain/models/backends/mysql/operations.py +1 -1
- plain/models/backends/mysql/schema.py +4 -4
- plain/models/backends/postgresql/operations.py +1 -1
- plain/models/backends/postgresql/schema.py +3 -3
- plain/models/backends/sqlite3/operations.py +1 -1
- plain/models/backends/sqlite3/schema.py +61 -50
- plain/models/base.py +116 -163
- plain/models/cli.py +4 -4
- plain/models/constraints.py +14 -9
- plain/models/deletion.py +15 -14
- plain/models/expressions.py +1 -1
- plain/models/fields/__init__.py +20 -16
- plain/models/fields/json.py +3 -3
- plain/models/fields/related.py +73 -71
- plain/models/fields/related_descriptors.py +2 -2
- plain/models/fields/related_lookups.py +1 -1
- plain/models/fields/related_managers.py +21 -32
- plain/models/fields/reverse_related.py +8 -8
- plain/models/forms.py +12 -12
- plain/models/indexes.py +5 -4
- plain/models/meta.py +505 -0
- plain/models/migrations/operations/base.py +1 -1
- plain/models/migrations/operations/fields.py +6 -6
- plain/models/migrations/operations/models.py +18 -16
- plain/models/migrations/recorder.py +9 -5
- plain/models/migrations/state.py +35 -46
- plain/models/migrations/utils.py +1 -1
- plain/models/options.py +182 -518
- plain/models/preflight.py +7 -5
- plain/models/query.py +119 -65
- plain/models/query_utils.py +18 -13
- plain/models/registry.py +6 -5
- plain/models/sql/compiler.py +51 -37
- plain/models/sql/query.py +77 -68
- plain/models/sql/subqueries.py +4 -4
- plain/models/utils.py +4 -1
- {plain_models-0.50.0.dist-info → plain_models-0.51.0.dist-info}/METADATA +27 -43
- {plain_models-0.50.0.dist-info → plain_models-0.51.0.dist-info}/RECORD +49 -48
- {plain_models-0.50.0.dist-info → plain_models-0.51.0.dist-info}/WHEEL +0 -0
- {plain_models-0.50.0.dist-info → plain_models-0.51.0.dist-info}/entry_points.txt +0 -0
- {plain_models-0.50.0.dist-info → plain_models-0.51.0.dist-info}/licenses/LICENSE +0 -0
plain/models/preflight.py
CHANGED
@@ -30,7 +30,9 @@ class CheckAllModels(PreflightCheck):
|
|
30
30
|
errors = []
|
31
31
|
models = models_registry.get_models()
|
32
32
|
for model in models:
|
33
|
-
db_table_models[model.
|
33
|
+
db_table_models[model.model_options.db_table].append(
|
34
|
+
model.model_options.label
|
35
|
+
)
|
34
36
|
if not inspect.ismethod(model.preflight):
|
35
37
|
errors.append(
|
36
38
|
PreflightResult(
|
@@ -41,10 +43,10 @@ class CheckAllModels(PreflightCheck):
|
|
41
43
|
)
|
42
44
|
else:
|
43
45
|
errors.extend(model.preflight())
|
44
|
-
for model_index in model.
|
45
|
-
indexes[model_index.name].append(model.
|
46
|
-
for model_constraint in model.
|
47
|
-
constraints[model_constraint.name].append(model.
|
46
|
+
for model_index in model.model_options.indexes:
|
47
|
+
indexes[model_index.name].append(model.model_options.label)
|
48
|
+
for model_constraint in model.model_options.constraints:
|
49
|
+
constraints[model_constraint.name].append(model.model_options.label)
|
48
50
|
for db_table, model_labels in db_table_models.items():
|
49
51
|
if len(model_labels) != 1:
|
50
52
|
model_labels_str = ", ".join(model_labels)
|
plain/models/query.py
CHANGED
@@ -10,7 +10,7 @@ import warnings
|
|
10
10
|
from collections.abc import Callable, Iterator
|
11
11
|
from functools import cached_property
|
12
12
|
from itertools import chain, islice
|
13
|
-
from typing import TYPE_CHECKING, Any, Generic, TypeVar
|
13
|
+
from typing import TYPE_CHECKING, Any, Generic, Self, TypeVar
|
14
14
|
|
15
15
|
import plain.runtime
|
16
16
|
from plain.exceptions import ValidationError
|
@@ -151,7 +151,10 @@ class RawModelIterable(BaseIterable):
|
|
151
151
|
raise FieldDoesNotExist("Raw query must include the primary key")
|
152
152
|
fields = [self.queryset.model_fields.get(c) for c in self.queryset.columns]
|
153
153
|
converters = compiler.get_converters(
|
154
|
-
[
|
154
|
+
[
|
155
|
+
f.get_col(f.model.model_options.db_table) if f else None
|
156
|
+
for f in fields
|
157
|
+
]
|
155
158
|
)
|
156
159
|
if converters:
|
157
160
|
query_iterator = compiler.apply_converters(query_iterator, converters)
|
@@ -270,25 +273,72 @@ class FlatValuesListIterable(BaseIterable):
|
|
270
273
|
|
271
274
|
|
272
275
|
class QuerySet(Generic[T]):
|
273
|
-
"""
|
276
|
+
"""
|
277
|
+
Represent a lazy database lookup for a set of objects.
|
274
278
|
|
275
|
-
|
276
|
-
|
277
|
-
|
278
|
-
|
279
|
-
|
280
|
-
|
281
|
-
|
282
|
-
|
283
|
-
|
284
|
-
|
285
|
-
|
286
|
-
|
287
|
-
|
288
|
-
|
289
|
-
|
290
|
-
|
291
|
-
|
279
|
+
Usage:
|
280
|
+
MyModel.query.filter(name="test").all()
|
281
|
+
|
282
|
+
Custom QuerySets:
|
283
|
+
from typing import Self
|
284
|
+
|
285
|
+
class TaskQuerySet(QuerySet["Task"]):
|
286
|
+
def active(self) -> Self:
|
287
|
+
return self.filter(is_active=True)
|
288
|
+
|
289
|
+
class Task(Model):
|
290
|
+
is_active = BooleanField(default=True)
|
291
|
+
query = TaskQuerySet()
|
292
|
+
|
293
|
+
Task.query.active().filter(name="test") # Full type inference
|
294
|
+
|
295
|
+
Custom methods should return `Self` to preserve type through method chaining.
|
296
|
+
"""
|
297
|
+
|
298
|
+
# Instance attributes (set in from_model())
|
299
|
+
model: type[T]
|
300
|
+
_query: sql.Query
|
301
|
+
_result_cache: list[T] | None
|
302
|
+
_sticky_filter: bool
|
303
|
+
_for_write: bool
|
304
|
+
_prefetch_related_lookups: tuple[Any, ...]
|
305
|
+
_prefetch_done: bool
|
306
|
+
_known_related_objects: dict[Any, dict[Any, Any]]
|
307
|
+
_iterable_class: type[BaseIterable]
|
308
|
+
_fields: tuple[str, ...] | None
|
309
|
+
_defer_next_filter: bool
|
310
|
+
_deferred_filter: tuple[bool, tuple[Any, ...], dict[str, Any]] | None
|
311
|
+
|
312
|
+
def __init__(self):
|
313
|
+
"""Minimal init for descriptor mode. Use from_model() to create instances."""
|
314
|
+
pass
|
315
|
+
|
316
|
+
@classmethod
|
317
|
+
def from_model(cls, model: type[T], query: sql.Query | None = None) -> Self:
|
318
|
+
"""Create a QuerySet instance bound to a model."""
|
319
|
+
instance = cls()
|
320
|
+
instance.model = model
|
321
|
+
instance._query = query or sql.Query(model)
|
322
|
+
instance._result_cache = None
|
323
|
+
instance._sticky_filter = False
|
324
|
+
instance._for_write = False
|
325
|
+
instance._prefetch_related_lookups = ()
|
326
|
+
instance._prefetch_done = False
|
327
|
+
instance._known_related_objects = {}
|
328
|
+
instance._iterable_class = ModelIterable
|
329
|
+
instance._fields = None
|
330
|
+
instance._defer_next_filter = False
|
331
|
+
instance._deferred_filter = None
|
332
|
+
return instance
|
333
|
+
|
334
|
+
def __get__(self, instance: Any, owner: type[T]) -> Self:
|
335
|
+
"""Descriptor protocol - return a new QuerySet bound to the model."""
|
336
|
+
if instance is not None:
|
337
|
+
raise AttributeError(
|
338
|
+
f"QuerySet is only accessible from the model class, not instances. "
|
339
|
+
f"Use {owner.__name__}.query instead."
|
340
|
+
)
|
341
|
+
return self.from_model(owner)
|
292
342
|
|
293
343
|
@property
|
294
344
|
def sql_query(self) -> sql.Query:
|
@@ -310,7 +360,7 @@ class QuerySet(Generic[T]):
|
|
310
360
|
|
311
361
|
def __deepcopy__(self, memo: dict[int, Any]) -> QuerySet[T]:
|
312
362
|
"""Don't populate the QuerySet's cache."""
|
313
|
-
obj = self.__class__(
|
363
|
+
obj = self.__class__.from_model(self.model)
|
314
364
|
for k, v in self.__dict__.items():
|
315
365
|
if k == "_result_cache":
|
316
366
|
obj.__dict__[k] = None
|
@@ -434,12 +484,14 @@ class QuerySet(Generic[T]):
|
|
434
484
|
query = (
|
435
485
|
self
|
436
486
|
if self.sql_query.can_filter()
|
437
|
-
else self.model.
|
487
|
+
else self.model._model_meta.base_queryset.filter(id__in=self.values("id"))
|
438
488
|
)
|
439
489
|
combined = query._chain()
|
440
490
|
combined._merge_known_related_objects(other)
|
441
491
|
if not other.sql_query.can_filter():
|
442
|
-
other = other.model.
|
492
|
+
other = other.model._model_meta.base_queryset.filter(
|
493
|
+
id__in=other.values("id")
|
494
|
+
)
|
443
495
|
combined.sql_query.combine(other.sql_query, sql.OR)
|
444
496
|
return combined
|
445
497
|
|
@@ -453,12 +505,14 @@ class QuerySet(Generic[T]):
|
|
453
505
|
query = (
|
454
506
|
self
|
455
507
|
if self.sql_query.can_filter()
|
456
|
-
else self.model.
|
508
|
+
else self.model._model_meta.base_queryset.filter(id__in=self.values("id"))
|
457
509
|
)
|
458
510
|
combined = query._chain()
|
459
511
|
combined._merge_known_related_objects(other)
|
460
512
|
if not other.sql_query.can_filter():
|
461
|
-
other = other.model.
|
513
|
+
other = other.model._model_meta.base_queryset.filter(
|
514
|
+
id__in=other.values("id")
|
515
|
+
)
|
462
516
|
combined.sql_query.combine(other.sql_query, sql.XOR)
|
463
517
|
return combined
|
464
518
|
|
@@ -565,11 +619,11 @@ class QuerySet(Generic[T]):
|
|
565
619
|
return clone._result_cache[0]
|
566
620
|
if not num:
|
567
621
|
raise self.model.DoesNotExist(
|
568
|
-
f"{self.model.
|
622
|
+
f"{self.model.model_options.object_name} matching query does not exist."
|
569
623
|
)
|
570
624
|
raise self.model.MultipleObjectsReturned(
|
571
625
|
"get() returned more than one {} -- it returned {}!".format(
|
572
|
-
self.model.
|
626
|
+
self.model.model_options.object_name,
|
573
627
|
num if not limit or num < limit else "more than %s" % (limit - 1),
|
574
628
|
)
|
575
629
|
)
|
@@ -595,7 +649,7 @@ class QuerySet(Generic[T]):
|
|
595
649
|
return obj
|
596
650
|
|
597
651
|
def _prepare_for_bulk_create(self, objs: list[T]) -> None:
|
598
|
-
id_field = self.model.
|
652
|
+
id_field = self.model._model_meta.get_field("id")
|
599
653
|
for obj in objs:
|
600
654
|
if obj.id is None: # type: ignore[attr-defined]
|
601
655
|
# Populate new primary key values.
|
@@ -679,18 +733,18 @@ class QuerySet(Generic[T]):
|
|
679
733
|
|
680
734
|
if not objs:
|
681
735
|
return objs
|
682
|
-
|
736
|
+
meta = self.model._model_meta
|
683
737
|
if unique_fields:
|
684
|
-
unique_fields = [
|
738
|
+
unique_fields = [meta.get_field(name) for name in unique_fields]
|
685
739
|
if update_fields:
|
686
|
-
update_fields = [
|
740
|
+
update_fields = [meta.get_field(name) for name in update_fields]
|
687
741
|
on_conflict = self._check_bulk_create_options(
|
688
742
|
update_conflicts,
|
689
743
|
update_fields,
|
690
744
|
unique_fields,
|
691
745
|
)
|
692
746
|
self._for_write = True
|
693
|
-
fields =
|
747
|
+
fields = meta.concrete_fields
|
694
748
|
objs = list(objs)
|
695
749
|
self._prepare_for_bulk_create(objs)
|
696
750
|
with transaction.atomic(savepoint=False):
|
@@ -704,9 +758,9 @@ class QuerySet(Generic[T]):
|
|
704
758
|
update_fields=update_fields,
|
705
759
|
unique_fields=unique_fields,
|
706
760
|
)
|
707
|
-
id_field =
|
761
|
+
id_field = meta.get_field("id")
|
708
762
|
for obj_with_id, results in zip(objs_with_id, returned_columns):
|
709
|
-
for result, field in zip(results,
|
763
|
+
for result, field in zip(results, meta.db_returning_fields):
|
710
764
|
if field != id_field:
|
711
765
|
setattr(obj_with_id, field.attname, result)
|
712
766
|
for obj_with_id in objs_with_id:
|
@@ -727,7 +781,7 @@ class QuerySet(Generic[T]):
|
|
727
781
|
):
|
728
782
|
assert len(returned_columns) == len(objs_without_id)
|
729
783
|
for obj_without_id, results in zip(objs_without_id, returned_columns):
|
730
|
-
for result, field in zip(results,
|
784
|
+
for result, field in zip(results, meta.db_returning_fields):
|
731
785
|
setattr(obj_without_id, field.attname, result)
|
732
786
|
obj_without_id._state.adding = False
|
733
787
|
|
@@ -746,7 +800,7 @@ class QuerySet(Generic[T]):
|
|
746
800
|
objs_tuple = tuple(objs)
|
747
801
|
if any(obj.id is None for obj in objs_tuple): # type: ignore[attr-defined]
|
748
802
|
raise ValueError("All bulk_update() objects must have a primary key set.")
|
749
|
-
fields_list = [self.model.
|
803
|
+
fields_list = [self.model._model_meta.get_field(name) for name in fields]
|
750
804
|
if any(not f.concrete or f.many_to_many for f in fields_list):
|
751
805
|
raise ValueError("bulk_update() can only be used with concrete fields.")
|
752
806
|
if any(f.primary_key for f in fields_list):
|
@@ -856,14 +910,14 @@ class QuerySet(Generic[T]):
|
|
856
910
|
setattr(obj, k, v)
|
857
911
|
|
858
912
|
update_fields = set(update_defaults)
|
859
|
-
concrete_field_names = self.model.
|
913
|
+
concrete_field_names = self.model._model_meta._non_pk_concrete_field_names
|
860
914
|
# update_fields does not support non-concrete fields.
|
861
915
|
if concrete_field_names.issuperset(update_fields):
|
862
916
|
# Add fields which are set on pre_save(), e.g. auto_now fields.
|
863
917
|
# This is to maintain backward compatibility as these fields
|
864
918
|
# are not updated unless explicitly specified in the
|
865
919
|
# update_fields list.
|
866
|
-
for field in self.model.
|
920
|
+
for field in self.model._model_meta.local_concrete_fields:
|
867
921
|
if not (
|
868
922
|
field.primary_key or field.__class__.pre_save is Field.pre_save
|
869
923
|
):
|
@@ -885,11 +939,11 @@ class QuerySet(Generic[T]):
|
|
885
939
|
defaults = defaults or {}
|
886
940
|
params = {k: v for k, v in kwargs.items() if LOOKUP_SEP not in k}
|
887
941
|
params.update(defaults)
|
888
|
-
property_names = self.model.
|
942
|
+
property_names = self.model._model_meta._property_names
|
889
943
|
invalid_params = []
|
890
944
|
for param in params:
|
891
945
|
try:
|
892
|
-
self.model.
|
946
|
+
self.model._model_meta.get_field(param)
|
893
947
|
except FieldDoesNotExist:
|
894
948
|
# It's okay to use a model's property if it has a setter.
|
895
949
|
if not (param in property_names and getattr(self.model, param).fset):
|
@@ -897,7 +951,7 @@ class QuerySet(Generic[T]):
|
|
897
951
|
if invalid_params:
|
898
952
|
raise FieldError(
|
899
953
|
"Invalid field name(s) for model {}: '{}'.".format(
|
900
|
-
self.model.
|
954
|
+
self.model.model_options.object_name,
|
901
955
|
"', '".join(sorted(invalid_params)),
|
902
956
|
)
|
903
957
|
)
|
@@ -925,15 +979,15 @@ class QuerySet(Generic[T]):
|
|
925
979
|
"""
|
926
980
|
if self.sql_query.is_sliced:
|
927
981
|
raise TypeError("Cannot use 'limit' or 'offset' with in_bulk().")
|
928
|
-
|
982
|
+
meta = self.model._model_meta
|
929
983
|
unique_fields = [
|
930
984
|
constraint.fields[0]
|
931
|
-
for constraint in
|
985
|
+
for constraint in self.model.model_options.total_unique_constraints
|
932
986
|
if len(constraint.fields) == 1
|
933
987
|
]
|
934
988
|
if (
|
935
989
|
field_name != "id"
|
936
|
-
and not
|
990
|
+
and not meta.get_field(field_name).primary_key
|
937
991
|
and field_name not in unique_fields
|
938
992
|
and self.sql_query.distinct_fields != (field_name,)
|
939
993
|
):
|
@@ -1241,14 +1295,14 @@ class QuerySet(Generic[T]):
|
|
1241
1295
|
# PUBLIC METHODS THAT ALTER ATTRIBUTES AND RETURN A NEW QUERYSET #
|
1242
1296
|
##################################################################
|
1243
1297
|
|
1244
|
-
def all(self) ->
|
1298
|
+
def all(self) -> Self:
|
1245
1299
|
"""
|
1246
1300
|
Return a new QuerySet that is a copy of the current one. This allows a
|
1247
1301
|
QuerySet to proxy for a model queryset in some cases.
|
1248
1302
|
"""
|
1249
1303
|
return self._chain()
|
1250
1304
|
|
1251
|
-
def filter(self, *args: Any, **kwargs: Any) ->
|
1305
|
+
def filter(self, *args: Any, **kwargs: Any) -> Self:
|
1252
1306
|
"""
|
1253
1307
|
Return a new QuerySet instance with the args ANDed to the existing
|
1254
1308
|
set.
|
@@ -1256,7 +1310,7 @@ class QuerySet(Generic[T]):
|
|
1256
1310
|
self._not_support_combined_queries("filter")
|
1257
1311
|
return self._filter_or_exclude(False, args, kwargs)
|
1258
1312
|
|
1259
|
-
def exclude(self, *args: Any, **kwargs: Any) ->
|
1313
|
+
def exclude(self, *args: Any, **kwargs: Any) -> Self:
|
1260
1314
|
"""
|
1261
1315
|
Return a new QuerySet instance with NOT (args) ANDed to the existing
|
1262
1316
|
set.
|
@@ -1266,7 +1320,7 @@ class QuerySet(Generic[T]):
|
|
1266
1320
|
|
1267
1321
|
def _filter_or_exclude(
|
1268
1322
|
self, negate: bool, args: tuple[Any, ...], kwargs: dict[str, Any]
|
1269
|
-
) ->
|
1323
|
+
) -> Self:
|
1270
1324
|
if (args or kwargs) and self.sql_query.is_sliced:
|
1271
1325
|
raise TypeError("Cannot filter a query once a slice has been taken.")
|
1272
1326
|
clone = self._chain()
|
@@ -1365,7 +1419,7 @@ class QuerySet(Generic[T]):
|
|
1365
1419
|
obj.sql_query.select_for_no_key_update = no_key
|
1366
1420
|
return obj
|
1367
1421
|
|
1368
|
-
def select_related(self, *fields: str | None) ->
|
1422
|
+
def select_related(self, *fields: str | None) -> Self:
|
1369
1423
|
"""
|
1370
1424
|
Return a new QuerySet instance that will select related objects.
|
1371
1425
|
|
@@ -1389,7 +1443,7 @@ class QuerySet(Generic[T]):
|
|
1389
1443
|
obj.sql_query.select_related = True
|
1390
1444
|
return obj
|
1391
1445
|
|
1392
|
-
def prefetch_related(self, *lookups: str | Prefetch | None) ->
|
1446
|
+
def prefetch_related(self, *lookups: str | Prefetch | None) -> Self:
|
1393
1447
|
"""
|
1394
1448
|
Return a new QuerySet instance that will prefetch the specified
|
1395
1449
|
Many-To-One and Many-To-Many related objects when the QuerySet is
|
@@ -1417,7 +1471,7 @@ class QuerySet(Generic[T]):
|
|
1417
1471
|
clone._prefetch_related_lookups = clone._prefetch_related_lookups + lookups
|
1418
1472
|
return clone
|
1419
1473
|
|
1420
|
-
def annotate(self, *args: Any, **kwargs: Any) ->
|
1474
|
+
def annotate(self, *args: Any, **kwargs: Any) -> Self:
|
1421
1475
|
"""
|
1422
1476
|
Return a query set in which the returned objects have been annotated
|
1423
1477
|
with extra data or aggregations.
|
@@ -1425,7 +1479,7 @@ class QuerySet(Generic[T]):
|
|
1425
1479
|
self._not_support_combined_queries("annotate")
|
1426
1480
|
return self._annotate(args, kwargs, select=True)
|
1427
1481
|
|
1428
|
-
def alias(self, *args: Any, **kwargs: Any) ->
|
1482
|
+
def alias(self, *args: Any, **kwargs: Any) -> Self:
|
1429
1483
|
"""
|
1430
1484
|
Return a query set with added aliases for extra data or aggregations.
|
1431
1485
|
"""
|
@@ -1434,7 +1488,7 @@ class QuerySet(Generic[T]):
|
|
1434
1488
|
|
1435
1489
|
def _annotate(
|
1436
1490
|
self, args: tuple[Any, ...], kwargs: dict[str, Any], select: bool = True
|
1437
|
-
) ->
|
1491
|
+
) -> Self:
|
1438
1492
|
self._validate_values_are_expressions(
|
1439
1493
|
args + tuple(kwargs.values()), method_name="annotate"
|
1440
1494
|
)
|
@@ -1460,7 +1514,7 @@ class QuerySet(Generic[T]):
|
|
1460
1514
|
(field.name, field.attname)
|
1461
1515
|
if hasattr(field, "attname")
|
1462
1516
|
else (field.name,)
|
1463
|
-
for field in self.model.
|
1517
|
+
for field in self.model._model_meta.get_fields()
|
1464
1518
|
)
|
1465
1519
|
)
|
1466
1520
|
|
@@ -1487,7 +1541,7 @@ class QuerySet(Generic[T]):
|
|
1487
1541
|
|
1488
1542
|
return clone
|
1489
1543
|
|
1490
|
-
def order_by(self, *field_names: str) ->
|
1544
|
+
def order_by(self, *field_names: str) -> Self:
|
1491
1545
|
"""Return a new QuerySet instance with the ordering changed."""
|
1492
1546
|
if self.sql_query.is_sliced:
|
1493
1547
|
raise TypeError("Cannot reorder a query once a slice has been taken.")
|
@@ -1496,7 +1550,7 @@ class QuerySet(Generic[T]):
|
|
1496
1550
|
obj.sql_query.add_ordering(*field_names)
|
1497
1551
|
return obj
|
1498
1552
|
|
1499
|
-
def distinct(self, *field_names: str) ->
|
1553
|
+
def distinct(self, *field_names: str) -> Self:
|
1500
1554
|
"""
|
1501
1555
|
Return a new QuerySet instance that will select only distinct results.
|
1502
1556
|
"""
|
@@ -1590,7 +1644,7 @@ class QuerySet(Generic[T]):
|
|
1590
1644
|
return True
|
1591
1645
|
elif (
|
1592
1646
|
self.sql_query.default_ordering
|
1593
|
-
and self.sql_query.
|
1647
|
+
and self.sql_query.get_model_meta().ordering
|
1594
1648
|
and
|
1595
1649
|
# A default ordering doesn't affect GROUP BY queries.
|
1596
1650
|
not self.sql_query.group_by
|
@@ -1650,7 +1704,7 @@ class QuerySet(Generic[T]):
|
|
1650
1704
|
self._insert(
|
1651
1705
|
item,
|
1652
1706
|
fields=fields,
|
1653
|
-
returning_fields=self.model.
|
1707
|
+
returning_fields=self.model._model_meta.db_returning_fields,
|
1654
1708
|
)
|
1655
1709
|
)
|
1656
1710
|
else:
|
@@ -1663,7 +1717,7 @@ class QuerySet(Generic[T]):
|
|
1663
1717
|
)
|
1664
1718
|
return inserted_rows
|
1665
1719
|
|
1666
|
-
def _chain(self) ->
|
1720
|
+
def _chain(self) -> Self:
|
1667
1721
|
"""
|
1668
1722
|
Return a copy of the current QuerySet that's ready for another
|
1669
1723
|
operation.
|
@@ -1674,12 +1728,12 @@ class QuerySet(Generic[T]):
|
|
1674
1728
|
obj._sticky_filter = False
|
1675
1729
|
return obj
|
1676
1730
|
|
1677
|
-
def _clone(self) ->
|
1731
|
+
def _clone(self) -> Self:
|
1678
1732
|
"""
|
1679
1733
|
Return a copy of the current QuerySet. A lightweight alternative
|
1680
1734
|
to deepcopy().
|
1681
1735
|
"""
|
1682
|
-
c = self.__class__(
|
1736
|
+
c = self.__class__.from_model(
|
1683
1737
|
model=self.model,
|
1684
1738
|
query=self.sql_query.chain(),
|
1685
1739
|
)
|
@@ -1817,7 +1871,7 @@ class RawQuerySet:
|
|
1817
1871
|
converter = db_connection.introspection.identifier_converter
|
1818
1872
|
model_init_fields = [
|
1819
1873
|
f
|
1820
|
-
for f in self.model.
|
1874
|
+
for f in self.model._model_meta.fields
|
1821
1875
|
if converter(f.column) in self.columns
|
1822
1876
|
]
|
1823
1877
|
annotation_fields = [
|
@@ -1906,7 +1960,7 @@ class RawQuerySet:
|
|
1906
1960
|
"""A dict mapping column names to model field names."""
|
1907
1961
|
converter = db_connection.introspection.identifier_converter
|
1908
1962
|
model_fields = {}
|
1909
|
-
for field in self.model.
|
1963
|
+
for field in self.model._model_meta.fields:
|
1910
1964
|
name, column = field.get_attname_column()
|
1911
1965
|
model_fields[converter(column)] = field
|
1912
1966
|
return model_fields
|
@@ -2268,7 +2322,7 @@ def prefetch_one_level(
|
|
2268
2322
|
# of prefetch_related), so what applies to first object applies to all.
|
2269
2323
|
model = instances[0].__class__
|
2270
2324
|
try:
|
2271
|
-
model.
|
2325
|
+
model._model_meta.get_field(to_attr)
|
2272
2326
|
except FieldDoesNotExist:
|
2273
2327
|
pass
|
2274
2328
|
else:
|
@@ -2308,7 +2362,7 @@ def prefetch_one_level(
|
|
2308
2362
|
# We need a QuerySet instance to cache the prefetched values
|
2309
2363
|
if isinstance(queryset, QuerySet):
|
2310
2364
|
# It's already a QuerySet, create a new instance
|
2311
|
-
qs = queryset.__class__(
|
2365
|
+
qs = queryset.__class__.from_model(queryset.model)
|
2312
2366
|
else:
|
2313
2367
|
# It's a related manager, get its QuerySet
|
2314
2368
|
# The manager's query property returns a properly filtered QuerySet
|
plain/models/query_utils.py
CHANGED
@@ -22,16 +22,19 @@ from plain.utils import tree
|
|
22
22
|
|
23
23
|
if TYPE_CHECKING:
|
24
24
|
from plain.models.backends.base.base import BaseDatabaseWrapper
|
25
|
+
from plain.models.base import Model
|
26
|
+
from plain.models.fields import Field
|
27
|
+
from plain.models.meta import Meta
|
25
28
|
from plain.models.sql.compiler import SQLCompiler
|
26
29
|
|
27
30
|
logger = logging.getLogger("plain.models")
|
28
31
|
|
29
32
|
# PathInfo is used when converting lookups (fk__somecol). The contents
|
30
|
-
# describe the relation in Model terms (
|
31
|
-
# sides of the relation. The join_field is the field backing the relation.
|
33
|
+
# describe the relation in Model terms (Meta and Fields for both
|
34
|
+
# sides of the relation). The join_field is the field backing the relation.
|
32
35
|
PathInfo = namedtuple(
|
33
36
|
"PathInfo",
|
34
|
-
"
|
37
|
+
"from_meta to_meta target_fields join_field m2m direct filtered_relation",
|
35
38
|
)
|
36
39
|
|
37
40
|
|
@@ -360,7 +363,7 @@ def select_related_descend(
|
|
360
363
|
and field not in select_mask
|
361
364
|
):
|
362
365
|
raise FieldError(
|
363
|
-
f"Field {field.model.
|
366
|
+
f"Field {field.model.model_options.object_name}.{field.name} cannot be both "
|
364
367
|
"deferred and traversed using select_related at the same time."
|
365
368
|
)
|
366
369
|
return True
|
@@ -381,16 +384,18 @@ def refs_expression(
|
|
381
384
|
return None, ()
|
382
385
|
|
383
386
|
|
384
|
-
def check_rel_lookup_compatibility(
|
387
|
+
def check_rel_lookup_compatibility(
|
388
|
+
model: type[Model], target_meta: Meta, field: Field
|
389
|
+
) -> bool:
|
385
390
|
"""
|
386
|
-
Check that
|
391
|
+
Check that model is compatible with target_meta. Compatibility
|
387
392
|
is OK if:
|
388
|
-
1) model and
|
389
|
-
2) model is parent of
|
393
|
+
1) model and meta.model match (where proxy inheritance is removed)
|
394
|
+
2) model is parent of meta's model or the other way around
|
390
395
|
"""
|
391
396
|
|
392
|
-
def check(
|
393
|
-
return model ==
|
397
|
+
def check(meta: Meta) -> bool:
|
398
|
+
return model == meta.model
|
394
399
|
|
395
400
|
# If the field is a primary key, then doing a query against the field's
|
396
401
|
# model is ok, too. Consider the case:
|
@@ -398,11 +403,11 @@ def check_rel_lookup_compatibility(model: type, target_opts: Any, field: Any) ->
|
|
398
403
|
# place = OneToOneField(Place, primary_key=True):
|
399
404
|
# Restaurant.query.filter(id__in=Restaurant.query.all()).
|
400
405
|
# If we didn't have the primary key check, then id__in (== place__in) would
|
401
|
-
# give Place's
|
406
|
+
# give Place's meta as the target meta, but Restaurant isn't compatible
|
402
407
|
# with that. This logic applies only to primary keys, as when doing __in=qs,
|
403
408
|
# we are going to turn this into __in=qs.values('id') later on.
|
404
|
-
return check(
|
405
|
-
getattr(field, "primary_key", False) and check(field.model.
|
409
|
+
return check(target_meta) or (
|
410
|
+
getattr(field, "primary_key", False) and check(field.model._model_meta)
|
406
411
|
)
|
407
412
|
|
408
413
|
|
plain/models/registry.py
CHANGED
@@ -107,7 +107,7 @@ class ModelsRegistry:
|
|
107
107
|
# Since this method is called when models are imported, it cannot
|
108
108
|
# perform imports because of the risk of import loops. It mustn't
|
109
109
|
# call get_package_config().
|
110
|
-
model_name = model.
|
110
|
+
model_name = model.model_options.model_name
|
111
111
|
app_models = self.all_models[package_label]
|
112
112
|
if model_name in app_models:
|
113
113
|
if (
|
@@ -156,7 +156,7 @@ class ModelsRegistry:
|
|
156
156
|
# This particularly prevents that an empty value is cached while cloning.
|
157
157
|
for package_models in self.all_models.values():
|
158
158
|
for model in package_models.values():
|
159
|
-
model.
|
159
|
+
model._model_meta._expire_cache()
|
160
160
|
|
161
161
|
def lazy_model_operation(
|
162
162
|
self, function: Callable[..., None], *model_keys: tuple[str, str]
|
@@ -205,7 +205,7 @@ class ModelsRegistry:
|
|
205
205
|
Take a newly-prepared model and pass it to each function waiting for
|
206
206
|
it. This is called at the very end of Models.register_model().
|
207
207
|
"""
|
208
|
-
key = model.
|
208
|
+
key = model.model_options.package_label, model.model_options.model_name
|
209
209
|
for function in self._pending_operations.pop(key, []):
|
210
210
|
function(model)
|
211
211
|
|
@@ -215,7 +215,8 @@ models_registry = ModelsRegistry()
|
|
215
215
|
|
216
216
|
# Decorator to register a model (using the internal registry for the correct state).
|
217
217
|
def register_model(model_class: M) -> M:
|
218
|
-
model_class.
|
219
|
-
model_class.
|
218
|
+
model_class._model_meta.models_registry.register_model(
|
219
|
+
model_class.model_options.package_label,
|
220
|
+
model_class, # type: ignore[arg-type]
|
220
221
|
)
|
221
222
|
return model_class
|