plain.models 0.37.0__py3-none-any.whl → 0.39.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- plain/models/CHANGELOG.md +29 -0
- plain/models/README.md +3 -0
- plain/models/__init__.py +2 -2
- plain/models/backends/base/creation.py +1 -1
- plain/models/backends/base/operations.py +1 -3
- plain/models/backends/base/schema.py +4 -8
- plain/models/backends/mysql/base.py +1 -3
- plain/models/backends/mysql/introspection.py +2 -6
- plain/models/backends/mysql/operations.py +2 -4
- plain/models/backends/postgresql/base.py +2 -6
- plain/models/backends/postgresql/introspection.py +2 -6
- plain/models/backends/postgresql/operations.py +1 -3
- plain/models/backends/postgresql/schema.py +2 -10
- plain/models/backends/sqlite3/base.py +2 -6
- plain/models/backends/sqlite3/introspection.py +2 -8
- plain/models/base.py +46 -74
- plain/models/constraints.py +3 -3
- plain/models/deletion.py +9 -9
- plain/models/fields/__init__.py +30 -104
- plain/models/fields/related.py +90 -343
- plain/models/fields/related_descriptors.py +14 -14
- plain/models/fields/related_lookups.py +2 -2
- plain/models/fields/reverse_related.py +6 -14
- plain/models/forms.py +14 -76
- plain/models/lookups.py +2 -2
- plain/models/migrations/autodetector.py +2 -25
- plain/models/migrations/operations/fields.py +0 -6
- plain/models/migrations/state.py +2 -26
- plain/models/migrations/utils.py +4 -14
- plain/models/options.py +4 -12
- plain/models/query.py +56 -54
- plain/models/query_utils.py +3 -5
- plain/models/sql/compiler.py +16 -18
- plain/models/sql/query.py +12 -11
- plain/models/sql/subqueries.py +10 -10
- {plain_models-0.37.0.dist-info → plain_models-0.39.0.dist-info}/METADATA +4 -1
- {plain_models-0.37.0.dist-info → plain_models-0.39.0.dist-info}/RECORD +40 -40
- {plain_models-0.37.0.dist-info → plain_models-0.39.0.dist-info}/WHEEL +0 -0
- {plain_models-0.37.0.dist-info → plain_models-0.39.0.dist-info}/entry_points.txt +0 -0
- {plain_models-0.37.0.dist-info → plain_models-0.39.0.dist-info}/licenses/LICENSE +0 -0
plain/models/query.py
CHANGED
@@ -24,10 +24,10 @@ from plain.models.db import (
|
|
24
24
|
)
|
25
25
|
from plain.models.expressions import Case, F, Value, When
|
26
26
|
from plain.models.fields import (
|
27
|
-
AutoField,
|
28
27
|
DateField,
|
29
28
|
DateTimeField,
|
30
29
|
Field,
|
30
|
+
PrimaryKeyField,
|
31
31
|
)
|
32
32
|
from plain.models.functions import Cast, Trunc
|
33
33
|
from plain.models.query_utils import FilteredRelation, Q
|
@@ -82,14 +82,7 @@ class ModelIterable(BaseIterable):
|
|
82
82
|
(
|
83
83
|
field,
|
84
84
|
related_objs,
|
85
|
-
operator.attrgetter(
|
86
|
-
*[
|
87
|
-
field.attname
|
88
|
-
if from_field == "self"
|
89
|
-
else queryset.model._meta.get_field(from_field).attname
|
90
|
-
for from_field in field.from_fields
|
91
|
-
]
|
92
|
-
),
|
85
|
+
operator.attrgetter(field.attname),
|
93
86
|
)
|
94
87
|
for field, related_objs in queryset._known_related_objects.items()
|
95
88
|
]
|
@@ -135,7 +128,7 @@ class RawModelIterable(BaseIterable):
|
|
135
128
|
annotation_fields,
|
136
129
|
) = self.queryset.resolve_model_init_order()
|
137
130
|
model_cls = self.queryset.model
|
138
|
-
if
|
131
|
+
if "id" not in model_init_names:
|
139
132
|
raise exceptions.FieldDoesNotExist(
|
140
133
|
"Raw query must include the primary key"
|
141
134
|
)
|
@@ -271,7 +264,7 @@ class QuerySet:
|
|
271
264
|
self._for_write = False
|
272
265
|
self._prefetch_related_lookups = ()
|
273
266
|
self._prefetch_done = False
|
274
|
-
self._known_related_objects = {} # {rel_field: {
|
267
|
+
self._known_related_objects = {} # {rel_field: {id: rel_obj}}
|
275
268
|
self._iterable_class = ModelIterable
|
276
269
|
self._fields = None
|
277
270
|
self._defer_next_filter = False
|
@@ -432,12 +425,12 @@ class QuerySet:
|
|
432
425
|
query = (
|
433
426
|
self
|
434
427
|
if self.query.can_filter()
|
435
|
-
else self.model._base_manager.filter(
|
428
|
+
else self.model._base_manager.filter(id__in=self.values("id"))
|
436
429
|
)
|
437
430
|
combined = query._chain()
|
438
431
|
combined._merge_known_related_objects(other)
|
439
432
|
if not other.query.can_filter():
|
440
|
-
other = other.model._base_manager.filter(
|
433
|
+
other = other.model._base_manager.filter(id__in=other.values("id"))
|
441
434
|
combined.query.combine(other.query, sql.OR)
|
442
435
|
return combined
|
443
436
|
|
@@ -451,12 +444,12 @@ class QuerySet:
|
|
451
444
|
query = (
|
452
445
|
self
|
453
446
|
if self.query.can_filter()
|
454
|
-
else self.model._base_manager.filter(
|
447
|
+
else self.model._base_manager.filter(id__in=self.values("id"))
|
455
448
|
)
|
456
449
|
combined = query._chain()
|
457
450
|
combined._merge_known_related_objects(other)
|
458
451
|
if not other.query.can_filter():
|
459
|
-
other = other.model._base_manager.filter(
|
452
|
+
other = other.model._base_manager.filter(id__in=other.values("id"))
|
460
453
|
combined.query.combine(other.query, sql.XOR)
|
461
454
|
return combined
|
462
455
|
|
@@ -570,6 +563,16 @@ class QuerySet:
|
|
570
563
|
)
|
571
564
|
)
|
572
565
|
|
566
|
+
def get_or_none(self, *args, **kwargs):
|
567
|
+
"""
|
568
|
+
Perform the query and return a single object matching the given
|
569
|
+
keyword arguments, or None if no object is found.
|
570
|
+
"""
|
571
|
+
try:
|
572
|
+
return self.get(*args, **kwargs)
|
573
|
+
except self.model.DoesNotExist:
|
574
|
+
return None
|
575
|
+
|
573
576
|
def create(self, **kwargs):
|
574
577
|
"""
|
575
578
|
Create a new object with the given kwargs, saving it to the database
|
@@ -581,10 +584,11 @@ class QuerySet:
|
|
581
584
|
return obj
|
582
585
|
|
583
586
|
def _prepare_for_bulk_create(self, objs):
|
587
|
+
id_field = self.model._meta.get_field("id")
|
584
588
|
for obj in objs:
|
585
|
-
if obj.
|
586
|
-
# Populate new
|
587
|
-
obj.
|
589
|
+
if obj.id is None:
|
590
|
+
# Populate new primary key values.
|
591
|
+
obj.id = id_field.get_id_value_on_save(obj)
|
588
592
|
obj._prepare_related_fields_for_save(operation_name="bulk_create")
|
589
593
|
|
590
594
|
def _check_bulk_create_options(
|
@@ -663,11 +667,7 @@ class QuerySet:
|
|
663
667
|
return objs
|
664
668
|
opts = self.model._meta
|
665
669
|
if unique_fields:
|
666
|
-
|
667
|
-
unique_fields = [
|
668
|
-
self.model._meta.get_field(opts.pk.name if name == "pk" else name)
|
669
|
-
for name in unique_fields
|
670
|
-
]
|
670
|
+
unique_fields = [self.model._meta.get_field(name) for name in unique_fields]
|
671
671
|
if update_fields:
|
672
672
|
update_fields = [self.model._meta.get_field(name) for name in update_fields]
|
673
673
|
on_conflict = self._check_bulk_create_options(
|
@@ -680,26 +680,27 @@ class QuerySet:
|
|
680
680
|
objs = list(objs)
|
681
681
|
self._prepare_for_bulk_create(objs)
|
682
682
|
with transaction.atomic(savepoint=False):
|
683
|
-
|
684
|
-
if
|
683
|
+
objs_with_id, objs_without_id = partition(lambda o: o.id is None, objs)
|
684
|
+
if objs_with_id:
|
685
685
|
returned_columns = self._batched_insert(
|
686
|
-
|
686
|
+
objs_with_id,
|
687
687
|
fields,
|
688
688
|
batch_size,
|
689
689
|
on_conflict=on_conflict,
|
690
690
|
update_fields=update_fields,
|
691
691
|
unique_fields=unique_fields,
|
692
692
|
)
|
693
|
-
|
693
|
+
id_field = opts.get_field("id")
|
694
|
+
for obj_with_id, results in zip(objs_with_id, returned_columns):
|
694
695
|
for result, field in zip(results, opts.db_returning_fields):
|
695
|
-
if field !=
|
696
|
-
setattr(
|
697
|
-
for
|
698
|
-
|
699
|
-
if
|
700
|
-
fields = [f for f in fields if not isinstance(f,
|
696
|
+
if field != id_field:
|
697
|
+
setattr(obj_with_id, field.attname, result)
|
698
|
+
for obj_with_id in objs_with_id:
|
699
|
+
obj_with_id._state.adding = False
|
700
|
+
if objs_without_id:
|
701
|
+
fields = [f for f in fields if not isinstance(f, PrimaryKeyField)]
|
701
702
|
returned_columns = self._batched_insert(
|
702
|
-
|
703
|
+
objs_without_id,
|
703
704
|
fields,
|
704
705
|
batch_size,
|
705
706
|
on_conflict=on_conflict,
|
@@ -710,11 +711,11 @@ class QuerySet:
|
|
710
711
|
db_connection.features.can_return_rows_from_bulk_insert
|
711
712
|
and on_conflict is None
|
712
713
|
):
|
713
|
-
assert len(returned_columns) == len(
|
714
|
-
for
|
714
|
+
assert len(returned_columns) == len(objs_without_id)
|
715
|
+
for obj_without_id, results in zip(objs_without_id, returned_columns):
|
715
716
|
for result, field in zip(results, opts.db_returning_fields):
|
716
|
-
setattr(
|
717
|
-
|
717
|
+
setattr(obj_without_id, field.attname, result)
|
718
|
+
obj_without_id._state.adding = False
|
718
719
|
|
719
720
|
return objs
|
720
721
|
|
@@ -727,7 +728,7 @@ class QuerySet:
|
|
727
728
|
if not fields:
|
728
729
|
raise ValueError("Field names must be given to bulk_update().")
|
729
730
|
objs = tuple(objs)
|
730
|
-
if any(obj.
|
731
|
+
if any(obj.id is None for obj in objs):
|
731
732
|
raise ValueError("All bulk_update() objects must have a primary key set.")
|
732
733
|
fields = [self.model._meta.get_field(name) for name in fields]
|
733
734
|
if any(not f.concrete or f.many_to_many for f in fields):
|
@@ -743,7 +744,7 @@ class QuerySet:
|
|
743
744
|
# PK is used twice in the resulting update query, once in the filter
|
744
745
|
# and once in the WHEN. Each field will also have one CAST.
|
745
746
|
self._for_write = True
|
746
|
-
max_batch_size = db_connection.ops.bulk_batch_size(["
|
747
|
+
max_batch_size = db_connection.ops.bulk_batch_size(["id", "id"] + fields, objs)
|
747
748
|
batch_size = min(batch_size, max_batch_size) if batch_size else max_batch_size
|
748
749
|
requires_casting = db_connection.features.requires_casted_case_in_updates
|
749
750
|
batches = (objs[i : i + batch_size] for i in range(0, len(objs), batch_size))
|
@@ -756,17 +757,17 @@ class QuerySet:
|
|
756
757
|
attr = getattr(obj, field.attname)
|
757
758
|
if not hasattr(attr, "resolve_expression"):
|
758
759
|
attr = Value(attr, output_field=field)
|
759
|
-
when_statements.append(When(
|
760
|
+
when_statements.append(When(id=obj.id, then=attr))
|
760
761
|
case_statement = Case(*when_statements, output_field=field)
|
761
762
|
if requires_casting:
|
762
763
|
case_statement = Cast(case_statement, output_field=field)
|
763
764
|
update_kwargs[field.attname] = case_statement
|
764
|
-
updates.append(([obj.
|
765
|
+
updates.append(([obj.id for obj in batch_objs], update_kwargs))
|
765
766
|
rows_updated = 0
|
766
767
|
queryset = self._chain()
|
767
768
|
with transaction.atomic(savepoint=False):
|
768
|
-
for
|
769
|
-
rows_updated += queryset.filter(
|
769
|
+
for ids, update_kwargs in updates:
|
770
|
+
rows_updated += queryset.filter(id__in=ids).update(**update_kwargs)
|
770
771
|
return rows_updated
|
771
772
|
|
772
773
|
def get_or_create(self, defaults=None, **kwargs):
|
@@ -914,7 +915,7 @@ class QuerySet:
|
|
914
915
|
queryset = self
|
915
916
|
else:
|
916
917
|
self._check_ordering_first_last_queryset_aggregation(method="first")
|
917
|
-
queryset = self.order_by("
|
918
|
+
queryset = self.order_by("id")
|
918
919
|
for obj in queryset[:1]:
|
919
920
|
return obj
|
920
921
|
|
@@ -924,11 +925,11 @@ class QuerySet:
|
|
924
925
|
queryset = self.reverse()
|
925
926
|
else:
|
926
927
|
self._check_ordering_first_last_queryset_aggregation(method="last")
|
927
|
-
queryset = self.order_by("-
|
928
|
+
queryset = self.order_by("-id")
|
928
929
|
for obj in queryset[:1]:
|
929
930
|
return obj
|
930
931
|
|
931
|
-
def in_bulk(self, id_list=None, *, field_name="
|
932
|
+
def in_bulk(self, id_list=None, *, field_name="id"):
|
932
933
|
"""
|
933
934
|
Return a dictionary mapping each of the given IDs to the object with
|
934
935
|
that ID. If `id_list` isn't provided, evaluate the entire QuerySet.
|
@@ -942,7 +943,7 @@ class QuerySet:
|
|
942
943
|
if len(constraint.fields) == 1
|
943
944
|
]
|
944
945
|
if (
|
945
|
-
field_name != "
|
946
|
+
field_name != "id"
|
946
947
|
and not opts.get_field(field_name).primary_key
|
947
948
|
and field_name not in unique_fields
|
948
949
|
and self.query.distinct_fields != (field_name,)
|
@@ -1096,11 +1097,11 @@ class QuerySet:
|
|
1096
1097
|
return False
|
1097
1098
|
except AttributeError:
|
1098
1099
|
raise TypeError("'obj' must be a model instance.")
|
1099
|
-
if obj.
|
1100
|
+
if obj.id is None:
|
1100
1101
|
raise ValueError("QuerySet.contains() cannot be used on unsaved objects.")
|
1101
1102
|
if self._result_cache is not None:
|
1102
1103
|
return obj in self._result_cache
|
1103
|
-
return self.filter(
|
1104
|
+
return self.filter(id=obj.id).exists()
|
1104
1105
|
|
1105
1106
|
def _prefetch_related_objects(self):
|
1106
1107
|
# This method can only be called once the result cache has been filled.
|
@@ -1764,7 +1765,8 @@ class QuerySet:
|
|
1764
1765
|
|
1765
1766
|
def _check_ordering_first_last_queryset_aggregation(self, method):
|
1766
1767
|
if isinstance(self.query.group_by, tuple) and not any(
|
1767
|
-
col.output_field is self.model._meta.
|
1768
|
+
col.output_field is self.model._meta.get_field("id")
|
1769
|
+
for col in self.query.group_by
|
1768
1770
|
):
|
1769
1771
|
raise TypeError(
|
1770
1772
|
f"Cannot use QuerySet.{method}() on an unordered queryset performing "
|
@@ -2327,7 +2329,7 @@ class RelatedPopulator:
|
|
2327
2329
|
# we have to reorder the parent data. The reorder_for_init
|
2328
2330
|
# attribute contains a function used to reorder the field data
|
2329
2331
|
# in the order __init__ expects it.
|
2330
|
-
# -
|
2332
|
+
# - id_idx: the index of the primary key field in the reordered
|
2331
2333
|
# model data. Used to check if a related object exists at all.
|
2332
2334
|
# - init_list: the field attnames fetched from the database. For
|
2333
2335
|
# deferred models this isn't the same as all attnames of the
|
@@ -2347,7 +2349,7 @@ class RelatedPopulator:
|
|
2347
2349
|
self.reorder_for_init = None
|
2348
2350
|
|
2349
2351
|
self.model_cls = klass_info["model"]
|
2350
|
-
self.
|
2352
|
+
self.id_idx = self.init_list.index("id")
|
2351
2353
|
self.related_populators = get_related_populators(klass_info, select)
|
2352
2354
|
self.local_setter = klass_info["local_setter"]
|
2353
2355
|
self.remote_setter = klass_info["remote_setter"]
|
@@ -2357,7 +2359,7 @@ class RelatedPopulator:
|
|
2357
2359
|
obj_data = self.reorder_for_init(row)
|
2358
2360
|
else:
|
2359
2361
|
obj_data = row[self.cols_start : self.cols_end]
|
2360
|
-
if obj_data[self.
|
2362
|
+
if obj_data[self.id_idx] is None:
|
2361
2363
|
obj = None
|
2362
2364
|
else:
|
2363
2365
|
obj = self.model_cls.from_db(self.init_list, obj_data)
|
plain/models/query_utils.py
CHANGED
@@ -313,8 +313,6 @@ def select_related_descend(field, restricted, requested, select_mask, reverse=Fa
|
|
313
313
|
"""
|
314
314
|
if not field.remote_field:
|
315
315
|
return False
|
316
|
-
if field.remote_field.parent_link and not reverse:
|
317
|
-
return False
|
318
316
|
if restricted:
|
319
317
|
if reverse and field.related_query_name() not in requested:
|
320
318
|
return False
|
@@ -363,11 +361,11 @@ def check_rel_lookup_compatibility(model, target_opts, field):
|
|
363
361
|
# model is ok, too. Consider the case:
|
364
362
|
# class Restaurant(models.Model):
|
365
363
|
# place = OneToOneField(Place, primary_key=True):
|
366
|
-
# Restaurant.objects.filter(
|
367
|
-
# If we didn't have the primary key check, then
|
364
|
+
# Restaurant.objects.filter(id__in=Restaurant.objects.all()).
|
365
|
+
# If we didn't have the primary key check, then id__in (== place__in) would
|
368
366
|
# give Place's opts as the target opts, but Restaurant isn't compatible
|
369
367
|
# with that. This logic applies only to primary keys, as when doing __in=qs,
|
370
|
-
# we are going to turn this into __in=qs.values('
|
368
|
+
# we are going to turn this into __in=qs.values('id') later on.
|
371
369
|
return check(target_opts) or (
|
372
370
|
getattr(field, "primary_key", False) and check(field.model._meta)
|
373
371
|
)
|
plain/models/sql/compiler.py
CHANGED
@@ -107,12 +107,12 @@ class SQLCompiler:
|
|
107
107
|
# SomeModel.objects.annotate(Count('somecol')).values('name')
|
108
108
|
# GROUP BY: all cols of the model
|
109
109
|
#
|
110
|
-
# SomeModel.objects.values('name', '
|
111
|
-
# .annotate(Count('somecol')).values('
|
112
|
-
# GROUP BY: name,
|
110
|
+
# SomeModel.objects.values('name', 'id')
|
111
|
+
# .annotate(Count('somecol')).values('id')
|
112
|
+
# GROUP BY: name, id
|
113
113
|
#
|
114
|
-
# SomeModel.objects.values('name').annotate(Count('somecol')).values('
|
115
|
-
# GROUP BY: name,
|
114
|
+
# SomeModel.objects.values('name').annotate(Count('somecol')).values('id')
|
115
|
+
# GROUP BY: name, id
|
116
116
|
#
|
117
117
|
# In fact, the self.query.group_by is the minimal set to GROUP BY. It
|
118
118
|
# can't be ever restricted to a smaller set, but additional columns in
|
@@ -1000,14 +1000,13 @@ class SQLCompiler:
|
|
1000
1000
|
) = self._setup_joins(pieces, opts, alias)
|
1001
1001
|
|
1002
1002
|
# If we get to this point and the field is a relation to another model,
|
1003
|
-
# append the default ordering for that model unless it is the
|
1004
|
-
#
|
1003
|
+
# append the default ordering for that model unless it is the
|
1004
|
+
# attribute name of the field that is specified or
|
1005
1005
|
# there are transforms to process.
|
1006
1006
|
if (
|
1007
1007
|
field.is_relation
|
1008
1008
|
and opts.ordering
|
1009
1009
|
and getattr(field, "attname", None) != pieces[-1]
|
1010
|
-
and name != "pk"
|
1011
1010
|
and not getattr(transform_function, "has_transforms", False)
|
1012
1011
|
):
|
1013
1012
|
# Firstly, avoid infinite loops.
|
@@ -1654,7 +1653,7 @@ class SQLInsertCompiler(SQLCompiler):
|
|
1654
1653
|
on_conflict=self.query.on_conflict,
|
1655
1654
|
)
|
1656
1655
|
result = [f"{insert_statement} {qn(opts.db_table)}"]
|
1657
|
-
fields = self.query.fields or [opts.
|
1656
|
+
fields = self.query.fields or [opts.get_field("id")]
|
1658
1657
|
result.append("({})".format(", ".join(qn(f.column) for f in fields)))
|
1659
1658
|
|
1660
1659
|
if self.query.fields:
|
@@ -1757,7 +1756,7 @@ class SQLInsertCompiler(SQLCompiler):
|
|
1757
1756
|
self.connection.ops.last_insert_id(
|
1758
1757
|
cursor,
|
1759
1758
|
opts.db_table,
|
1760
|
-
opts.
|
1759
|
+
opts.get_field("id").column,
|
1761
1760
|
),
|
1762
1761
|
)
|
1763
1762
|
]
|
@@ -1813,15 +1812,15 @@ class SQLDeleteCompiler(SQLCompiler):
|
|
1813
1812
|
innerq = self.query.clone()
|
1814
1813
|
innerq.__class__ = Query
|
1815
1814
|
innerq.clear_select_clause()
|
1816
|
-
|
1817
|
-
innerq.select = [
|
1815
|
+
id_field = self.query.model._meta.get_field("id")
|
1816
|
+
innerq.select = [id_field.get_col(self.query.get_initial_alias())]
|
1818
1817
|
outerq = Query(self.query.model)
|
1819
1818
|
if not self.connection.features.update_can_self_select:
|
1820
1819
|
# Force the materialization of the inner query to allow reference
|
1821
1820
|
# to the target table on MySQL.
|
1822
1821
|
sql, params = innerq.get_compiler().as_sql()
|
1823
1822
|
innerq = RawSQL(f"SELECT * FROM ({sql}) subquery", params)
|
1824
|
-
outerq.add_filter("
|
1823
|
+
outerq.add_filter("id__in", innerq)
|
1825
1824
|
return self._as_sql(outerq)
|
1826
1825
|
|
1827
1826
|
|
@@ -1930,12 +1929,11 @@ class SQLUpdateCompiler(SQLCompiler):
|
|
1930
1929
|
query.clear_ordering(force=True)
|
1931
1930
|
query.extra = {}
|
1932
1931
|
query.select = []
|
1933
|
-
|
1934
|
-
fields = [meta.pk.name]
|
1932
|
+
fields = ["id"]
|
1935
1933
|
related_ids_index = []
|
1936
1934
|
for related in self.query.related_updates:
|
1937
1935
|
# If a primary key chain exists to the targeted related update,
|
1938
|
-
# then the
|
1936
|
+
# then the primary key value can be used for it.
|
1939
1937
|
related_ids_index.append((related, 0))
|
1940
1938
|
|
1941
1939
|
query.add_fields(fields)
|
@@ -1958,11 +1956,11 @@ class SQLUpdateCompiler(SQLCompiler):
|
|
1958
1956
|
idents.extend(r[0] for r in rows)
|
1959
1957
|
for parent, index in related_ids_index:
|
1960
1958
|
related_ids[parent].extend(r[index] for r in rows)
|
1961
|
-
self.query.add_filter("
|
1959
|
+
self.query.add_filter("id__in", idents)
|
1962
1960
|
self.query.related_ids = related_ids
|
1963
1961
|
else:
|
1964
1962
|
# The fast path. Filters and updates in one query.
|
1965
|
-
self.query.add_filter("
|
1963
|
+
self.query.add_filter("id__in", query)
|
1966
1964
|
self.query.reset_refcounts(refcounts_before)
|
1967
1965
|
|
1968
1966
|
|
plain/models/sql/query.py
CHANGED
@@ -438,7 +438,9 @@ class Query(BaseExpression):
|
|
438
438
|
# used.
|
439
439
|
if inner_query.default_cols and has_existing_aggregation:
|
440
440
|
inner_query.group_by = (
|
441
|
-
self.model._meta.
|
441
|
+
self.model._meta.get_field("id").get_col(
|
442
|
+
inner_query.get_initial_alias()
|
443
|
+
),
|
442
444
|
)
|
443
445
|
inner_query.default_cols = False
|
444
446
|
if not qualify:
|
@@ -480,7 +482,9 @@ class Query(BaseExpression):
|
|
480
482
|
# field selected in the inner query, yet we must use a subquery.
|
481
483
|
# So, make sure at least one field is selected.
|
482
484
|
inner_query.select = (
|
483
|
-
self.model._meta.
|
485
|
+
self.model._meta.get_field("id").get_col(
|
486
|
+
inner_query.get_initial_alias()
|
487
|
+
),
|
484
488
|
)
|
485
489
|
else:
|
486
490
|
outer_query = self
|
@@ -689,7 +693,7 @@ class Query(BaseExpression):
|
|
689
693
|
def _get_defer_select_mask(self, opts, mask, select_mask=None):
|
690
694
|
if select_mask is None:
|
691
695
|
select_mask = {}
|
692
|
-
select_mask[opts.
|
696
|
+
select_mask[opts.get_field("id")] = {}
|
693
697
|
# All concrete fields that are not part of the defer mask must be
|
694
698
|
# loaded. If a relational field is encountered it gets added to the
|
695
699
|
# mask for it be considered if `select_related` and the cycle continues
|
@@ -726,7 +730,7 @@ class Query(BaseExpression):
|
|
726
730
|
def _get_only_select_mask(self, opts, mask, select_mask=None):
|
727
731
|
if select_mask is None:
|
728
732
|
select_mask = {}
|
729
|
-
select_mask[opts.
|
733
|
+
select_mask[opts.get_field("id")] = {}
|
730
734
|
# Only include fields mentioned in the mask.
|
731
735
|
for field_name, field_mask in mask.items():
|
732
736
|
field = opts.get_field(field_name)
|
@@ -1567,8 +1571,6 @@ class Query(BaseExpression):
|
|
1567
1571
|
path, names_with_path = [], []
|
1568
1572
|
for pos, name in enumerate(names):
|
1569
1573
|
cur_names_with_path = (name, [])
|
1570
|
-
if name == "pk":
|
1571
|
-
name = opts.pk.name
|
1572
1574
|
|
1573
1575
|
field = None
|
1574
1576
|
filtered_relation = None
|
@@ -1917,14 +1919,16 @@ class Query(BaseExpression):
|
|
1917
1919
|
select_field = col.target
|
1918
1920
|
alias = col.alias
|
1919
1921
|
if alias in can_reuse:
|
1920
|
-
|
1922
|
+
id_field = select_field.model._meta.get_field("id")
|
1921
1923
|
# Need to add a restriction so that outer query's filters are in effect for
|
1922
1924
|
# the subquery, too.
|
1923
1925
|
query.bump_prefix(self)
|
1924
1926
|
lookup_class = select_field.get_lookup("exact")
|
1925
1927
|
# Note that the query.select[0].alias is different from alias
|
1926
1928
|
# due to bump_prefix above.
|
1927
|
-
lookup = lookup_class(
|
1929
|
+
lookup = lookup_class(
|
1930
|
+
id_field.get_col(query.select[0].alias), id_field.get_col(alias)
|
1931
|
+
)
|
1928
1932
|
query.where.add(lookup, AND)
|
1929
1933
|
query.external_aliases[alias] = True
|
1930
1934
|
|
@@ -2257,9 +2261,6 @@ class Query(BaseExpression):
|
|
2257
2261
|
"""
|
2258
2262
|
existing, defer = self.deferred_loading
|
2259
2263
|
field_names = set(field_names)
|
2260
|
-
if "pk" in field_names:
|
2261
|
-
field_names.remove("pk")
|
2262
|
-
field_names.add(self.get_meta().pk.name)
|
2263
2264
|
|
2264
2265
|
if defer:
|
2265
2266
|
# Remove any existing deferred names from the current set before
|
plain/models/sql/subqueries.py
CHANGED
@@ -23,21 +23,21 @@ class DeleteQuery(Query):
|
|
23
23
|
return cursor.rowcount
|
24
24
|
return 0
|
25
25
|
|
26
|
-
def delete_batch(self,
|
26
|
+
def delete_batch(self, id_list):
|
27
27
|
"""
|
28
|
-
Set up and execute delete queries for all the objects in
|
28
|
+
Set up and execute delete queries for all the objects in id_list.
|
29
29
|
|
30
30
|
More than one physical query may be executed if there are a
|
31
|
-
lot of values in
|
31
|
+
lot of values in id_list.
|
32
32
|
"""
|
33
33
|
# number of objects deleted
|
34
34
|
num_deleted = 0
|
35
|
-
field = self.get_meta().
|
36
|
-
for offset in range(0, len(
|
35
|
+
field = self.get_meta().get_field("id")
|
36
|
+
for offset in range(0, len(id_list), GET_ITERATOR_CHUNK_SIZE):
|
37
37
|
self.clear_where()
|
38
38
|
self.add_filter(
|
39
39
|
f"{field.attname}__in",
|
40
|
-
|
40
|
+
id_list[offset : offset + GET_ITERATOR_CHUNK_SIZE],
|
41
41
|
)
|
42
42
|
num_deleted += self.do_query(self.get_meta().db_table, self.where)
|
43
43
|
return num_deleted
|
@@ -66,12 +66,12 @@ class UpdateQuery(Query):
|
|
66
66
|
obj.related_updates = self.related_updates.copy()
|
67
67
|
return obj
|
68
68
|
|
69
|
-
def update_batch(self,
|
69
|
+
def update_batch(self, id_list, values):
|
70
70
|
self.add_update_values(values)
|
71
|
-
for offset in range(0, len(
|
71
|
+
for offset in range(0, len(id_list), GET_ITERATOR_CHUNK_SIZE):
|
72
72
|
self.clear_where()
|
73
73
|
self.add_filter(
|
74
|
-
"
|
74
|
+
"id__in", id_list[offset : offset + GET_ITERATOR_CHUNK_SIZE]
|
75
75
|
)
|
76
76
|
self.get_compiler().execute_sql(NO_RESULTS)
|
77
77
|
|
@@ -132,7 +132,7 @@ class UpdateQuery(Query):
|
|
132
132
|
query = UpdateQuery(model)
|
133
133
|
query.values = values
|
134
134
|
if self.related_ids is not None:
|
135
|
-
query.add_filter("
|
135
|
+
query.add_filter("id__in", self.related_ids[model])
|
136
136
|
result.append(query)
|
137
137
|
return result
|
138
138
|
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: plain.models
|
3
|
-
Version: 0.
|
3
|
+
Version: 0.39.0
|
4
4
|
Summary: Database models for Plain.
|
5
5
|
Author-email: Dave Gaeddert <dave.gaeddert@dropseed.dev>
|
6
6
|
License-File: LICENSE
|
@@ -30,6 +30,9 @@ class User(models.Model):
|
|
30
30
|
return self.email
|
31
31
|
```
|
32
32
|
|
33
|
+
Every model automatically includes an `id` field which serves as the primary
|
34
|
+
key. The name `id` is reserved and can't be used for other fields.
|
35
|
+
|
33
36
|
Create, update, and delete instances of your models:
|
34
37
|
|
35
38
|
```python
|