plain.models 0.33.1__py3-none-any.whl → 0.34.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. plain/models/CHANGELOG.md +27 -0
  2. plain/models/README.md +8 -10
  3. plain/models/__init__.py +2 -6
  4. plain/models/backends/base/base.py +10 -18
  5. plain/models/backends/base/creation.py +3 -4
  6. plain/models/backends/base/introspection.py +2 -3
  7. plain/models/backends/base/schema.py +3 -9
  8. plain/models/backends/mysql/validation.py +1 -1
  9. plain/models/backends/postgresql/base.py +15 -23
  10. plain/models/backends/postgresql/schema.py +0 -2
  11. plain/models/backends/sqlite3/base.py +1 -1
  12. plain/models/backends/sqlite3/creation.py +2 -2
  13. plain/models/backends/sqlite3/features.py +1 -1
  14. plain/models/backends/sqlite3/schema.py +1 -1
  15. plain/models/backends/utils.py +2 -6
  16. plain/models/backups/core.py +15 -22
  17. plain/models/base.py +179 -225
  18. plain/models/cli.py +25 -62
  19. plain/models/connections.py +48 -165
  20. plain/models/constraints.py +10 -10
  21. plain/models/db.py +7 -15
  22. plain/models/default_settings.py +13 -20
  23. plain/models/deletion.py +14 -16
  24. plain/models/expressions.py +7 -10
  25. plain/models/fields/__init__.py +56 -76
  26. plain/models/fields/json.py +9 -12
  27. plain/models/fields/related.py +5 -17
  28. plain/models/fields/related_descriptors.py +43 -95
  29. plain/models/forms.py +2 -4
  30. plain/models/indexes.py +2 -3
  31. plain/models/lookups.py +0 -7
  32. plain/models/manager.py +1 -14
  33. plain/models/migrations/executor.py +0 -16
  34. plain/models/migrations/loader.py +1 -1
  35. plain/models/migrations/migration.py +1 -1
  36. plain/models/migrations/operations/base.py +4 -11
  37. plain/models/migrations/operations/fields.py +4 -4
  38. plain/models/migrations/operations/models.py +10 -10
  39. plain/models/migrations/operations/special.py +6 -14
  40. plain/models/migrations/recorder.py +1 -1
  41. plain/models/options.py +4 -7
  42. plain/models/preflight.py +25 -44
  43. plain/models/query.py +47 -102
  44. plain/models/query_utils.py +4 -4
  45. plain/models/sql/compiler.py +7 -11
  46. plain/models/sql/query.py +32 -42
  47. plain/models/sql/subqueries.py +6 -8
  48. plain/models/sql/where.py +1 -1
  49. plain/models/test/pytest.py +21 -32
  50. plain/models/test/utils.py +7 -143
  51. plain/models/transaction.py +66 -164
  52. {plain_models-0.33.1.dist-info → plain_models-0.34.1.dist-info}/METADATA +9 -11
  53. {plain_models-0.33.1.dist-info → plain_models-0.34.1.dist-info}/RECORD +56 -55
  54. {plain_models-0.33.1.dist-info → plain_models-0.34.1.dist-info}/WHEEL +0 -0
  55. {plain_models-0.33.1.dist-info → plain_models-0.34.1.dist-info}/entry_points.txt +0 -0
  56. {plain_models-0.33.1.dist-info → plain_models-0.34.1.dist-info}/licenses/LICENSE +0 -0
plain/models/query.py CHANGED
@@ -20,8 +20,7 @@ from plain.models.db import (
20
20
  PLAIN_VERSION_PICKLE_KEY,
21
21
  IntegrityError,
22
22
  NotSupportedError,
23
- connections,
24
- router,
23
+ db_connection,
25
24
  )
26
25
  from plain.models.expressions import Case, F, Value, When
27
26
  from plain.models.fields import (
@@ -61,8 +60,7 @@ class ModelIterable(BaseIterable):
61
60
 
62
61
  def __iter__(self):
63
62
  queryset = self.queryset
64
- db = queryset.db
65
- compiler = queryset.query.get_compiler(using=db)
63
+ compiler = queryset.query.get_compiler()
66
64
  # Execute the query. This will also fill compiler.select, klass_info,
67
65
  # and annotations.
68
66
  results = compiler.execute_sql(
@@ -79,7 +77,7 @@ class ModelIterable(BaseIterable):
79
77
  init_list = [
80
78
  f[0].target.attname for f in select[model_fields_start:model_fields_end]
81
79
  ]
82
- related_populators = get_related_populators(klass_info, select, db)
80
+ related_populators = get_related_populators(klass_info, select)
83
81
  known_related_objects = [
84
82
  (
85
83
  field,
@@ -96,9 +94,7 @@ class ModelIterable(BaseIterable):
96
94
  for field, related_objs in queryset._known_related_objects.items()
97
95
  ]
98
96
  for row in compiler.results_iter(results):
99
- obj = model_cls.from_db(
100
- db, init_list, row[model_fields_start:model_fields_end]
101
- )
97
+ obj = model_cls.from_db(init_list, row[model_fields_start:model_fields_end])
102
98
  for rel_populator in related_populators:
103
99
  rel_populator.populate(row, obj)
104
100
  if annotation_col_map:
@@ -128,10 +124,8 @@ class RawModelIterable(BaseIterable):
128
124
 
129
125
  def __iter__(self):
130
126
  # Cache some things for performance reasons outside the loop.
131
- db = self.queryset.db
132
127
  query = self.queryset.query
133
- connection = connections[db]
134
- compiler = connection.ops.compiler("SQLCompiler")(query, connection, db)
128
+ compiler = db_connection.ops.compiler("SQLCompiler")(query, db_connection)
135
129
  query_iterator = iter(query)
136
130
 
137
131
  try:
@@ -154,7 +148,7 @@ class RawModelIterable(BaseIterable):
154
148
  for values in query_iterator:
155
149
  # Associate fields to values
156
150
  model_init_values = [values[pos] for pos in model_init_pos]
157
- instance = model_cls.from_db(db, model_init_names, model_init_values)
151
+ instance = model_cls.from_db(model_init_names, model_init_values)
158
152
  if annotation_fields:
159
153
  for column, pos in annotation_fields:
160
154
  setattr(instance, column, values[pos])
@@ -173,7 +167,7 @@ class ValuesIterable(BaseIterable):
173
167
  def __iter__(self):
174
168
  queryset = self.queryset
175
169
  query = queryset.query
176
- compiler = query.get_compiler(queryset.db)
170
+ compiler = query.get_compiler()
177
171
 
178
172
  # extra(select=...) cols are always at the start of the row.
179
173
  names = [
@@ -197,7 +191,7 @@ class ValuesListIterable(BaseIterable):
197
191
  def __iter__(self):
198
192
  queryset = self.queryset
199
193
  query = queryset.query
200
- compiler = query.get_compiler(queryset.db)
194
+ compiler = query.get_compiler()
201
195
 
202
196
  if queryset._fields:
203
197
  # extra(select=...) cols are always at the start of the row.
@@ -258,7 +252,7 @@ class FlatValuesListIterable(BaseIterable):
258
252
 
259
253
  def __iter__(self):
260
254
  queryset = self.queryset
261
- compiler = queryset.query.get_compiler(queryset.db)
255
+ compiler = queryset.query.get_compiler()
262
256
  for row in compiler.results_iter(
263
257
  chunked_fetch=self.chunked_fetch, chunk_size=self.chunk_size
264
258
  ):
@@ -268,9 +262,8 @@ class FlatValuesListIterable(BaseIterable):
268
262
  class QuerySet:
269
263
  """Represent a lazy database lookup for a set of objects."""
270
264
 
271
- def __init__(self, model=None, query=None, using=None, hints=None):
265
+ def __init__(self, model=None, query=None, hints=None):
272
266
  self.model = model
273
- self._db = using
274
267
  self._hints = hints or {}
275
268
  self._query = query or sql.Query(self.model)
276
269
  self._result_cache = None
@@ -500,7 +493,7 @@ class QuerySet:
500
493
  )
501
494
  elif chunk_size <= 0:
502
495
  raise ValueError("Chunk size must be strictly positive.")
503
- use_chunked_fetch = not connections[self.db].settings_dict.get(
496
+ use_chunked_fetch = not db_connection.settings_dict.get(
504
497
  "DISABLE_SERVER_SIDE_CURSORS"
505
498
  )
506
499
  return self._iterator(use_chunked_fetch, chunk_size)
@@ -528,7 +521,7 @@ class QuerySet:
528
521
  raise TypeError("Complex aggregates require an alias")
529
522
  kwargs[arg.default_alias] = arg
530
523
 
531
- return self.query.chain().get_aggregation(self.db, kwargs)
524
+ return self.query.chain().get_aggregation(kwargs)
532
525
 
533
526
  def count(self):
534
527
  """
@@ -541,7 +534,7 @@ class QuerySet:
541
534
  if self._result_cache is not None:
542
535
  return len(self._result_cache)
543
536
 
544
- return self.query.get_count(using=self.db)
537
+ return self.query.get_count()
545
538
 
546
539
  def get(self, *args, **kwargs):
547
540
  """
@@ -559,7 +552,7 @@ class QuerySet:
559
552
  limit = None
560
553
  if (
561
554
  not clone.query.select_for_update
562
- or connections[clone.db].features.supports_select_for_update_with_limit
555
+ or db_connection.features.supports_select_for_update_with_limit
563
556
  ):
564
557
  limit = MAX_GET_RESULTS
565
558
  clone.query.set_limits(high=limit)
@@ -584,7 +577,7 @@ class QuerySet:
584
577
  """
585
578
  obj = self.model(**kwargs)
586
579
  self._for_write = True
587
- obj.save(force_insert=True, using=self.db)
580
+ obj.save(force_insert=True)
588
581
  return obj
589
582
 
590
583
  def _prepare_for_bulk_create(self, objs):
@@ -597,7 +590,7 @@ class QuerySet:
597
590
  def _check_bulk_create_options(
598
591
  self, update_conflicts, update_fields, unique_fields
599
592
  ):
600
- db_features = connections[self.db].features
593
+ db_features = db_connection.features
601
594
  if update_conflicts:
602
595
  if not db_features.supports_update_conflicts:
603
596
  raise NotSupportedError(
@@ -686,7 +679,7 @@ class QuerySet:
686
679
  fields = opts.concrete_fields
687
680
  objs = list(objs)
688
681
  self._prepare_for_bulk_create(objs)
689
- with transaction.atomic(using=self.db, savepoint=False):
682
+ with transaction.atomic(savepoint=False):
690
683
  objs_with_pk, objs_without_pk = partition(lambda o: o.pk is None, objs)
691
684
  if objs_with_pk:
692
685
  returned_columns = self._batched_insert(
@@ -703,7 +696,6 @@ class QuerySet:
703
696
  setattr(obj_with_pk, field.attname, result)
704
697
  for obj_with_pk in objs_with_pk:
705
698
  obj_with_pk._state.adding = False
706
- obj_with_pk._state.db = self.db
707
699
  if objs_without_pk:
708
700
  fields = [f for f in fields if not isinstance(f, AutoField)]
709
701
  returned_columns = self._batched_insert(
@@ -714,9 +706,8 @@ class QuerySet:
714
706
  update_fields=update_fields,
715
707
  unique_fields=unique_fields,
716
708
  )
717
- connection = connections[self.db]
718
709
  if (
719
- connection.features.can_return_rows_from_bulk_insert
710
+ db_connection.features.can_return_rows_from_bulk_insert
720
711
  and on_conflict is None
721
712
  ):
722
713
  assert len(returned_columns) == len(objs_without_pk)
@@ -724,7 +715,6 @@ class QuerySet:
724
715
  for result, field in zip(results, opts.db_returning_fields):
725
716
  setattr(obj_without_pk, field.attname, result)
726
717
  obj_without_pk._state.adding = False
727
- obj_without_pk._state.db = self.db
728
718
 
729
719
  return objs
730
720
 
@@ -753,10 +743,9 @@ class QuerySet:
753
743
  # PK is used twice in the resulting update query, once in the filter
754
744
  # and once in the WHEN. Each field will also have one CAST.
755
745
  self._for_write = True
756
- connection = connections[self.db]
757
- max_batch_size = connection.ops.bulk_batch_size(["pk", "pk"] + fields, objs)
746
+ max_batch_size = db_connection.ops.bulk_batch_size(["pk", "pk"] + fields, objs)
758
747
  batch_size = min(batch_size, max_batch_size) if batch_size else max_batch_size
759
- requires_casting = connection.features.requires_casted_case_in_updates
748
+ requires_casting = db_connection.features.requires_casted_case_in_updates
760
749
  batches = (objs[i : i + batch_size] for i in range(0, len(objs), batch_size))
761
750
  updates = []
762
751
  for batch_objs in batches:
@@ -774,8 +763,8 @@ class QuerySet:
774
763
  update_kwargs[field.attname] = case_statement
775
764
  updates.append(([obj.pk for obj in batch_objs], update_kwargs))
776
765
  rows_updated = 0
777
- queryset = self.using(self.db)
778
- with transaction.atomic(using=self.db, savepoint=False):
766
+ queryset = self._chain()
767
+ with transaction.atomic(savepoint=False):
779
768
  for pks, update_kwargs in updates:
780
769
  rows_updated += queryset.filter(pk__in=pks).update(**update_kwargs)
781
770
  return rows_updated
@@ -795,7 +784,7 @@ class QuerySet:
795
784
  params = self._extract_model_params(defaults, **kwargs)
796
785
  # Try to create an object using passed params.
797
786
  try:
798
- with transaction.atomic(using=self.db):
787
+ with transaction.atomic():
799
788
  params = dict(resolve_callables(params))
800
789
  return self.create(**params), True
801
790
  except (IntegrityError, ValidationError):
@@ -826,7 +815,7 @@ class QuerySet:
826
815
  else:
827
816
  update_defaults = defaults or {}
828
817
  self._for_write = True
829
- with transaction.atomic(using=self.db):
818
+ with transaction.atomic():
830
819
  # Lock the row so that a concurrent update is blocked until
831
820
  # update_or_create() has performed its save.
832
821
  obj, created = self.select_for_update().get_or_create(
@@ -852,9 +841,9 @@ class QuerySet:
852
841
  update_fields.add(field.name)
853
842
  if field.name != field.attname:
854
843
  update_fields.add(field.attname)
855
- obj.save(using=self.db, update_fields=update_fields)
844
+ obj.save(update_fields=update_fields)
856
845
  else:
857
- obj.save(using=self.db)
846
+ obj.save()
858
847
  return obj, False
859
848
 
860
849
  def _extract_model_params(self, defaults, **kwargs):
@@ -965,7 +954,7 @@ class QuerySet:
965
954
  if not id_list:
966
955
  return {}
967
956
  filter_key = f"{field_name}__in"
968
- batch_size = connections[self.db].features.max_query_params
957
+ batch_size = db_connection.features.max_query_params
969
958
  id_list = tuple(id_list)
970
959
  # If the database has a limit on the number of query parameters
971
960
  # (e.g. SQLite), retrieve objects in batches if necessary.
@@ -1004,7 +993,7 @@ class QuerySet:
1004
993
 
1005
994
  from plain.models.deletion import Collector
1006
995
 
1007
- collector = Collector(using=del_query.db, origin=self)
996
+ collector = Collector(origin=self)
1008
997
  collector.collect(del_query)
1009
998
  deleted, _rows_count = collector.delete()
1010
999
 
@@ -1014,14 +1003,14 @@ class QuerySet:
1014
1003
 
1015
1004
  delete.queryset_only = True
1016
1005
 
1017
- def _raw_delete(self, using):
1006
+ def _raw_delete(self):
1018
1007
  """
1019
1008
  Delete objects found from the given queryset in single direct SQL
1020
1009
  query. No signals are sent and there is no protection for cascades.
1021
1010
  """
1022
1011
  query = self.query.clone()
1023
1012
  query.__class__ = sql.DeleteQuery
1024
- cursor = query.get_compiler(using).execute_sql(CURSOR)
1013
+ cursor = query.get_compiler().execute_sql(CURSOR)
1025
1014
  if cursor:
1026
1015
  with cursor:
1027
1016
  return cursor.rowcount
@@ -1061,8 +1050,8 @@ class QuerySet:
1061
1050
 
1062
1051
  # Clear any annotations so that they won't be present in subqueries.
1063
1052
  query.annotations = {}
1064
- with transaction.mark_for_rollback_on_error(using=self.db):
1065
- rows = query.get_compiler(self.db).execute_sql(CURSOR)
1053
+ with transaction.mark_for_rollback_on_error():
1054
+ rows = query.get_compiler().execute_sql(CURSOR)
1066
1055
  self._result_cache = None
1067
1056
  return rows
1068
1057
 
@@ -1080,7 +1069,7 @@ class QuerySet:
1080
1069
  # Clear any annotations so that they won't be present in subqueries.
1081
1070
  query.annotations = {}
1082
1071
  self._result_cache = None
1083
- return query.get_compiler(self.db).execute_sql(CURSOR)
1072
+ return query.get_compiler().execute_sql(CURSOR)
1084
1073
 
1085
1074
  _update.queryset_only = False
1086
1075
 
@@ -1089,7 +1078,7 @@ class QuerySet:
1089
1078
  Return True if the QuerySet would have any results, False otherwise.
1090
1079
  """
1091
1080
  if self._result_cache is None:
1092
- return self.query.has_results(using=self.db)
1081
+ return self.query.has_results()
1093
1082
  return bool(self._result_cache)
1094
1083
 
1095
1084
  def contains(self, obj):
@@ -1123,21 +1112,18 @@ class QuerySet:
1123
1112
  Runs an EXPLAIN on the SQL query this QuerySet would perform, and
1124
1113
  returns the results.
1125
1114
  """
1126
- return self.query.explain(using=self.db, format=format, **options)
1115
+ return self.query.explain(format=format, **options)
1127
1116
 
1128
1117
  ##################################################
1129
1118
  # PUBLIC METHODS THAT RETURN A QUERYSET SUBCLASS #
1130
1119
  ##################################################
1131
1120
 
1132
- def raw(self, raw_query, params=(), translations=None, using=None):
1133
- if using is None:
1134
- using = self.db
1121
+ def raw(self, raw_query, params=(), translations=None):
1135
1122
  qs = RawQuerySet(
1136
1123
  raw_query,
1137
1124
  model=self.model,
1138
1125
  params=params,
1139
1126
  translations=translations,
1140
- using=using,
1141
1127
  )
1142
1128
  qs._prefetch_related_lookups = self._prefetch_related_lookups[:]
1143
1129
  return qs
@@ -1570,12 +1556,6 @@ class QuerySet:
1570
1556
  clone.query.add_immediate_loading(fields)
1571
1557
  return clone
1572
1558
 
1573
- def using(self, alias):
1574
- """Select which database this QuerySet should execute against."""
1575
- clone = self._chain()
1576
- clone._db = alias
1577
- return clone
1578
-
1579
1559
  ###################################
1580
1560
  # PUBLIC INTROSPECTION ATTRIBUTES #
1581
1561
  ###################################
@@ -1601,13 +1581,6 @@ class QuerySet:
1601
1581
  else:
1602
1582
  return False
1603
1583
 
1604
- @property
1605
- def db(self):
1606
- """Return the database used if this query is executed now."""
1607
- if self._for_write:
1608
- return self._db or router.db_for_write(self.model, **self._hints)
1609
- return self._db or router.db_for_read(self.model, **self._hints)
1610
-
1611
1584
  ###################
1612
1585
  # PRIVATE METHODS #
1613
1586
  ###################
@@ -1618,7 +1591,6 @@ class QuerySet:
1618
1591
  fields,
1619
1592
  returning_fields=None,
1620
1593
  raw=False,
1621
- using=None,
1622
1594
  on_conflict=None,
1623
1595
  update_fields=None,
1624
1596
  unique_fields=None,
@@ -1628,8 +1600,6 @@ class QuerySet:
1628
1600
  the InsertQuery class and is how Model.save() is implemented.
1629
1601
  """
1630
1602
  self._for_write = True
1631
- if using is None:
1632
- using = self.db
1633
1603
  query = sql.InsertQuery(
1634
1604
  self.model,
1635
1605
  on_conflict=on_conflict,
@@ -1637,7 +1607,7 @@ class QuerySet:
1637
1607
  unique_fields=unique_fields,
1638
1608
  )
1639
1609
  query.insert_values(fields, objs, raw=raw)
1640
- return query.get_compiler(using=using).execute_sql(returning_fields)
1610
+ return query.get_compiler().execute_sql(returning_fields)
1641
1611
 
1642
1612
  _insert.queryset_only = False
1643
1613
 
@@ -1653,19 +1623,17 @@ class QuerySet:
1653
1623
  """
1654
1624
  Helper method for bulk_create() to insert objs one batch at a time.
1655
1625
  """
1656
- connection = connections[self.db]
1657
- ops = connection.ops
1626
+ ops = db_connection.ops
1658
1627
  max_batch_size = max(ops.bulk_batch_size(fields, objs), 1)
1659
1628
  batch_size = min(batch_size, max_batch_size) if batch_size else max_batch_size
1660
1629
  inserted_rows = []
1661
- bulk_return = connection.features.can_return_rows_from_bulk_insert
1630
+ bulk_return = db_connection.features.can_return_rows_from_bulk_insert
1662
1631
  for item in [objs[i : i + batch_size] for i in range(0, len(objs), batch_size)]:
1663
1632
  if bulk_return and on_conflict is None:
1664
1633
  inserted_rows.extend(
1665
1634
  self._insert(
1666
1635
  item,
1667
1636
  fields=fields,
1668
- using=self.db,
1669
1637
  returning_fields=self.model._meta.db_returning_fields,
1670
1638
  )
1671
1639
  )
@@ -1673,7 +1641,6 @@ class QuerySet:
1673
1641
  self._insert(
1674
1642
  item,
1675
1643
  fields=fields,
1676
- using=self.db,
1677
1644
  on_conflict=on_conflict,
1678
1645
  update_fields=update_fields,
1679
1646
  unique_fields=unique_fields,
@@ -1699,7 +1666,6 @@ class QuerySet:
1699
1666
  c = self.__class__(
1700
1667
  model=self.model,
1701
1668
  query=self.query.chain(),
1702
- using=self._db,
1703
1669
  hints=self._hints,
1704
1670
  )
1705
1671
  c._sticky_filter = self._sticky_filter
@@ -1754,7 +1720,6 @@ class QuerySet:
1754
1720
  # if they are set up to select only a single field.
1755
1721
  raise TypeError("Cannot use multi-field values as a filter value.")
1756
1722
  query = self.query.resolve_expression(*args, **kwargs)
1757
- query._db = self._db
1758
1723
  return query
1759
1724
 
1760
1725
  resolve_expression.queryset_only = True
@@ -1835,14 +1800,12 @@ class RawQuerySet:
1835
1800
  query=None,
1836
1801
  params=(),
1837
1802
  translations=None,
1838
- using=None,
1839
1803
  hints=None,
1840
1804
  ):
1841
1805
  self.raw_query = raw_query
1842
1806
  self.model = model
1843
- self._db = using
1844
1807
  self._hints = hints or {}
1845
- self.query = query or sql.RawQuery(sql=raw_query, using=self.db, params=params)
1808
+ self.query = query or sql.RawQuery(sql=raw_query, params=params)
1846
1809
  self.params = params
1847
1810
  self.translations = translations or {}
1848
1811
  self._result_cache = None
@@ -1851,7 +1814,7 @@ class RawQuerySet:
1851
1814
 
1852
1815
  def resolve_model_init_order(self):
1853
1816
  """Resolve the init field names and value positions."""
1854
- converter = connections[self.db].introspection.identifier_converter
1817
+ converter = db_connection.introspection.identifier_converter
1855
1818
  model_init_fields = [
1856
1819
  f for f in self.model._meta.fields if converter(f.column) in self.columns
1857
1820
  ]
@@ -1887,7 +1850,6 @@ class RawQuerySet:
1887
1850
  query=self.query,
1888
1851
  params=self.params,
1889
1852
  translations=self.translations,
1890
- using=self._db,
1891
1853
  hints=self._hints,
1892
1854
  )
1893
1855
  c._prefetch_related_lookups = self._prefetch_related_lookups[:]
@@ -1920,22 +1882,6 @@ class RawQuerySet:
1920
1882
  def __getitem__(self, k):
1921
1883
  return list(self)[k]
1922
1884
 
1923
- @property
1924
- def db(self):
1925
- """Return the database used if this query is executed now."""
1926
- return self._db or router.db_for_read(self.model, **self._hints)
1927
-
1928
- def using(self, alias):
1929
- """Select the database this RawQuerySet should execute against."""
1930
- return RawQuerySet(
1931
- self.raw_query,
1932
- model=self.model,
1933
- query=self.query.chain(using=alias),
1934
- params=self.params,
1935
- translations=self.translations,
1936
- using=alias,
1937
- )
1938
-
1939
1885
  @cached_property
1940
1886
  def columns(self):
1941
1887
  """
@@ -1957,7 +1903,7 @@ class RawQuerySet:
1957
1903
  @cached_property
1958
1904
  def model_fields(self):
1959
1905
  """A dict mapping column names to model field names."""
1960
- converter = connections[self.db].introspection.identifier_converter
1906
+ converter = db_connection.introspection.identifier_converter
1961
1907
  model_fields = {}
1962
1908
  for field in self.model._meta.fields:
1963
1909
  name, column = field.get_attname_column()
@@ -2367,8 +2313,7 @@ class RelatedPopulator:
2367
2313
  model instance.
2368
2314
  """
2369
2315
 
2370
- def __init__(self, klass_info, select, db):
2371
- self.db = db
2316
+ def __init__(self, klass_info, select):
2372
2317
  # Pre-compute needed attributes. The attributes are:
2373
2318
  # - model_cls: the possibly deferred model class to instantiate
2374
2319
  # - either:
@@ -2403,7 +2348,7 @@ class RelatedPopulator:
2403
2348
 
2404
2349
  self.model_cls = klass_info["model"]
2405
2350
  self.pk_idx = self.init_list.index(self.model_cls._meta.pk.attname)
2406
- self.related_populators = get_related_populators(klass_info, select, self.db)
2351
+ self.related_populators = get_related_populators(klass_info, select)
2407
2352
  self.local_setter = klass_info["local_setter"]
2408
2353
  self.remote_setter = klass_info["remote_setter"]
2409
2354
 
@@ -2415,7 +2360,7 @@ class RelatedPopulator:
2415
2360
  if obj_data[self.pk_idx] is None:
2416
2361
  obj = None
2417
2362
  else:
2418
- obj = self.model_cls.from_db(self.db, self.init_list, obj_data)
2363
+ obj = self.model_cls.from_db(self.init_list, obj_data)
2419
2364
  for rel_iter in self.related_populators:
2420
2365
  rel_iter.populate(row, obj)
2421
2366
  self.local_setter(from_obj, obj)
@@ -2423,10 +2368,10 @@ class RelatedPopulator:
2423
2368
  self.remote_setter(obj, from_obj)
2424
2369
 
2425
2370
 
2426
- def get_related_populators(klass_info, select, db):
2371
+ def get_related_populators(klass_info, select):
2427
2372
  iterators = []
2428
2373
  related_klass_infos = klass_info.get("related_klass_infos", [])
2429
2374
  for rel_klass_info in related_klass_infos:
2430
- rel_cls = RelatedPopulator(rel_klass_info, select, db)
2375
+ rel_cls = RelatedPopulator(rel_klass_info, select)
2431
2376
  iterators.append(rel_cls)
2432
2377
  return iterators
@@ -13,7 +13,7 @@ from collections import namedtuple
13
13
 
14
14
  from plain.exceptions import FieldError
15
15
  from plain.models.constants import LOOKUP_SEP
16
- from plain.models.db import DEFAULT_DB_ALIAS, DatabaseError, connections
16
+ from plain.models.db import DatabaseError, db_connection
17
17
  from plain.utils import tree
18
18
 
19
19
  logger = logging.getLogger("plain.models")
@@ -111,7 +111,7 @@ class Q(tree.Node):
111
111
  else:
112
112
  yield child
113
113
 
114
- def check(self, against, using=DEFAULT_DB_ALIAS):
114
+ def check(self, against):
115
115
  """
116
116
  Do a database query to check if the expressions of the Q instance
117
117
  matches against the expressions.
@@ -130,11 +130,11 @@ class Q(tree.Node):
130
130
  query.add_annotation(value, name, select=False)
131
131
  query.add_annotation(Value(1), "_check")
132
132
  # This will raise a FieldError if a field is missing in "against".
133
- if connections[using].features.supports_comparing_boolean_expr:
133
+ if db_connection.features.supports_comparing_boolean_expr:
134
134
  query.add_q(Q(Coalesce(self, True, output_field=BooleanField())))
135
135
  else:
136
136
  query.add_q(self)
137
- compiler = query.get_compiler(using=using)
137
+ compiler = query.get_compiler()
138
138
  try:
139
139
  return compiler.execute_sql(SINGLE) is not None
140
140
  except DatabaseError as e:
@@ -42,10 +42,9 @@ class SQLCompiler:
42
42
  re.MULTILINE | re.DOTALL,
43
43
  )
44
44
 
45
- def __init__(self, query, connection, using, elide_empty=True):
45
+ def __init__(self, query, connection, elide_empty=True):
46
46
  self.query = query
47
47
  self.connection = connection
48
- self.using = using
49
48
  # Some queries, e.g. coalesced aggregation, need to be executed even if
50
49
  # they would return an empty result set.
51
50
  self.elide_empty = elide_empty
@@ -63,7 +62,7 @@ class SQLCompiler:
63
62
  return (
64
63
  f"<{self.__class__.__qualname__} "
65
64
  f"model={self.query.model.__qualname__} "
66
- f"connection={self.connection!r} using={self.using!r}>"
65
+ f"connection={self.connection!r}>"
67
66
  )
68
67
 
69
68
  def setup_query(self, with_col_aliases=False):
@@ -532,7 +531,7 @@ class SQLCompiler:
532
531
  def get_combinator_sql(self, combinator, all):
533
532
  features = self.connection.features
534
533
  compilers = [
535
- query.get_compiler(self.using, self.connection, self.elide_empty)
534
+ query.get_compiler(elide_empty=self.elide_empty)
536
535
  for query in self.query.combined_queries
537
536
  ]
538
537
  if not features.supports_slicing_ordering_in_compound:
@@ -660,9 +659,7 @@ class SQLCompiler:
660
659
  {expr: Ref(alias, expr) for expr, alias in replacements.items()}
661
660
  )
662
661
  )
663
- inner_query_compiler = inner_query.get_compiler(
664
- self.using, connection=self.connection, elide_empty=self.elide_empty
665
- )
662
+ inner_query_compiler = inner_query.get_compiler(elide_empty=self.elide_empty)
666
663
  inner_sql, inner_params = inner_query_compiler.as_sql(
667
664
  # The limits must be applied to the outer query to avoid pruning
668
665
  # results too eagerly.
@@ -1822,7 +1819,7 @@ class SQLDeleteCompiler(SQLCompiler):
1822
1819
  if not self.connection.features.update_can_self_select:
1823
1820
  # Force the materialization of the inner query to allow reference
1824
1821
  # to the target table on MySQL.
1825
- sql, params = innerq.get_compiler(connection=self.connection).as_sql()
1822
+ sql, params = innerq.get_compiler().as_sql()
1826
1823
  innerq = RawSQL(f"SELECT * FROM ({sql}) subquery", params)
1827
1824
  outerq.add_filter("pk__in", innerq)
1828
1825
  return self._as_sql(outerq)
@@ -1907,7 +1904,7 @@ class SQLUpdateCompiler(SQLCompiler):
1907
1904
  if cursor:
1908
1905
  cursor.close()
1909
1906
  for query in self.query.get_related_updates():
1910
- aux_rows = query.get_compiler(self.using).execute_sql(result_type)
1907
+ aux_rows = query.get_compiler().execute_sql(result_type)
1911
1908
  if is_empty and aux_rows:
1912
1909
  rows = aux_rows
1913
1910
  is_empty = False
@@ -1957,7 +1954,7 @@ class SQLUpdateCompiler(SQLCompiler):
1957
1954
  # selecting from the updating table (e.g. MySQL).
1958
1955
  idents = []
1959
1956
  related_ids = collections.defaultdict(list)
1960
- for rows in query.get_compiler(self.using).execute_sql(MULTI):
1957
+ for rows in query.get_compiler().execute_sql(MULTI):
1961
1958
  idents.extend(r[0] for r in rows)
1962
1959
  for parent, index in related_ids_index:
1963
1960
  related_ids[parent].extend(r[index] for r in rows)
@@ -1986,7 +1983,6 @@ class SQLAggregateCompiler(SQLCompiler):
1986
1983
  params = tuple(params)
1987
1984
 
1988
1985
  inner_query_sql, inner_query_params = self.query.inner_query.get_compiler(
1989
- self.using,
1990
1986
  elide_empty=self.elide_empty,
1991
1987
  ).as_sql(with_col_aliases=True)
1992
1988
  sql = f"SELECT {sql} FROM ({inner_query_sql}) subquery"