sqliter-py 0.12.0__py3-none-any.whl → 0.16.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. sqliter/constants.py +4 -3
  2. sqliter/exceptions.py +13 -0
  3. sqliter/model/model.py +42 -3
  4. sqliter/orm/__init__.py +16 -0
  5. sqliter/orm/fields.py +412 -0
  6. sqliter/orm/foreign_key.py +8 -0
  7. sqliter/orm/model.py +243 -0
  8. sqliter/orm/query.py +221 -0
  9. sqliter/orm/registry.py +169 -0
  10. sqliter/query/query.py +573 -51
  11. sqliter/sqliter.py +141 -47
  12. sqliter/tui/__init__.py +62 -0
  13. sqliter/tui/__main__.py +6 -0
  14. sqliter/tui/app.py +179 -0
  15. sqliter/tui/demos/__init__.py +96 -0
  16. sqliter/tui/demos/base.py +114 -0
  17. sqliter/tui/demos/caching.py +283 -0
  18. sqliter/tui/demos/connection.py +150 -0
  19. sqliter/tui/demos/constraints.py +211 -0
  20. sqliter/tui/demos/crud.py +154 -0
  21. sqliter/tui/demos/errors.py +231 -0
  22. sqliter/tui/demos/field_selection.py +150 -0
  23. sqliter/tui/demos/filters.py +389 -0
  24. sqliter/tui/demos/models.py +248 -0
  25. sqliter/tui/demos/ordering.py +156 -0
  26. sqliter/tui/demos/orm.py +460 -0
  27. sqliter/tui/demos/results.py +241 -0
  28. sqliter/tui/demos/string_filters.py +210 -0
  29. sqliter/tui/demos/timestamps.py +126 -0
  30. sqliter/tui/demos/transactions.py +177 -0
  31. sqliter/tui/runner.py +116 -0
  32. sqliter/tui/styles/app.tcss +130 -0
  33. sqliter/tui/widgets/__init__.py +7 -0
  34. sqliter/tui/widgets/code_display.py +81 -0
  35. sqliter/tui/widgets/demo_list.py +65 -0
  36. sqliter/tui/widgets/output_display.py +92 -0
  37. {sqliter_py-0.12.0.dist-info → sqliter_py-0.16.0.dist-info}/METADATA +23 -7
  38. sqliter_py-0.16.0.dist-info/RECORD +47 -0
  39. {sqliter_py-0.12.0.dist-info → sqliter_py-0.16.0.dist-info}/WHEEL +2 -2
  40. sqliter_py-0.16.0.dist-info/entry_points.txt +3 -0
  41. sqliter_py-0.12.0.dist-info/RECORD +0 -15
sqliter/query/query.py CHANGED
@@ -11,8 +11,10 @@ from __future__ import annotations
11
11
 
12
12
  import hashlib
13
13
  import json
14
+ import re
14
15
  import sqlite3
15
16
  import warnings
17
+ from dataclasses import dataclass
16
18
  from typing import (
17
19
  TYPE_CHECKING,
18
20
  Any,
@@ -26,13 +28,14 @@ from typing import (
26
28
  overload,
27
29
  )
28
30
 
29
- from typing_extensions import LiteralString, Self
31
+ from typing_extensions import Self
30
32
 
31
33
  from sqliter.constants import OPERATOR_MAPPING
32
34
  from sqliter.exceptions import (
33
35
  InvalidFilterError,
34
36
  InvalidOffsetError,
35
37
  InvalidOrderError,
38
+ InvalidRelationshipError,
36
39
  RecordDeletionError,
37
40
  RecordFetchError,
38
41
  )
@@ -52,6 +55,33 @@ FilterValue = Union[
52
55
  ]
53
56
 
54
57
 
58
+ @dataclass
59
+ class JoinInfo:
60
+ """Metadata for a JOIN clause.
61
+
62
+ Attributes:
63
+ alias: Table alias for the JOIN (e.g., "t1", "t2").
64
+ table_name: Actual table name in the database.
65
+ model_class: The model class for the joined table.
66
+ fk_field: FK field name on the parent model.
67
+ parent_alias: Alias of the parent table in the JOIN chain.
68
+ fk_column: The FK column name (e.g., "author_id").
69
+ join_type: Type of JOIN ("LEFT" or "INNER").
70
+ path: Full relationship path (e.g., "post__author").
71
+ is_nullable: Whether the FK is nullable.
72
+ """
73
+
74
+ alias: str
75
+ table_name: str
76
+ model_class: type[BaseDBModel]
77
+ fk_field: str
78
+ parent_alias: str
79
+ fk_column: str
80
+ join_type: str
81
+ path: str
82
+ is_nullable: bool
83
+
84
+
55
85
  class QueryBuilder(Generic[T]):
56
86
  """Builds and executes database queries for a specific model.
57
87
 
@@ -93,6 +123,9 @@ class QueryBuilder(Generic[T]):
93
123
  self._fields: Optional[list[str]] = fields
94
124
  self._bypass_cache: bool = False
95
125
  self._query_cache_ttl: Optional[int] = None
126
+ # Eager loading support
127
+ self._select_related_paths: list[str] = []
128
+ self._join_info: list[JoinInfo] = []
96
129
 
97
130
  if self._fields:
98
131
  self._validate_fields()
@@ -113,7 +146,7 @@ class QueryBuilder(Generic[T]):
113
146
  )
114
147
  raise ValueError(err_message)
115
148
 
116
- def filter(self, **conditions: str | float | None) -> Self:
149
+ def filter(self, **conditions: FilterValue) -> Self:
117
150
  """Apply filter conditions to the query.
118
151
 
119
152
  This method allows adding one or more filter conditions to the query.
@@ -138,16 +171,70 @@ class QueryBuilder(Generic[T]):
138
171
 
139
172
  for field, value in conditions.items():
140
173
  field_name, operator = self._parse_field_operator(field)
141
- self._validate_field(field_name, valid_fields)
142
174
 
143
- if operator in ["__isnull", "__notnull"]:
144
- self._handle_null(field_name, value, operator)
175
+ # Check for relationship traversal (e.g., author__name)
176
+ if "__" in field_name and operator not in {
177
+ "__isnull",
178
+ "__notnull",
179
+ }:
180
+ # Handle relationship filter traversal
181
+ self._handle_relationship_filter(field_name, value, operator)
145
182
  else:
146
- handler = self._get_operator_handler(operator)
147
- handler(field_name, value, operator)
183
+ # Normal field filter
184
+ self._validate_field(field_name, valid_fields)
185
+ if operator in ["__isnull", "__notnull"]:
186
+ self._handle_null(field_name, value, operator)
187
+ else:
188
+ handler = self._get_operator_handler(operator)
189
+ handler(field_name, value, operator)
148
190
 
149
191
  return self
150
192
 
193
+ def _handle_relationship_filter(
194
+ self, field_name: str, value: FilterValue, operator: str
195
+ ) -> None:
196
+ """Handle filter conditions across relationships.
197
+
198
+ Args:
199
+ field_name: The field name with relationship path
200
+ (e.g., "author__name").
201
+ value: The filter value.
202
+ operator: The filter operator.
203
+
204
+ Raises:
205
+ InvalidRelationshipError: If the relationship path is invalid.
206
+ """
207
+ # Split into relationship path and target field
208
+ parts = field_name.split("__")
209
+ relationship_path = "__".join(parts[:-1])
210
+ target_field = parts[-1]
211
+
212
+ # Build JOIN info for the relationship path
213
+ # This validates the path and populates _join_info
214
+ self._validate_and_build_join_info(relationship_path)
215
+
216
+ # Find the join info for this relationship path
217
+ join_info = next(
218
+ j for j in self._join_info if j.path == relationship_path
219
+ )
220
+
221
+ # Validate target field exists on the related model
222
+ if target_field not in join_info.model_class.model_fields:
223
+ error_msg = (
224
+ f"{field_name} - field '{target_field}' not found in "
225
+ f"{join_info.model_class.__name__}"
226
+ )
227
+ raise InvalidFilterError(error_msg)
228
+
229
+ # Apply filter with table alias
230
+ qualified_field = f'{join_info.alias}."{target_field}"'
231
+
232
+ # Use the appropriate handler
233
+ # Note: __isnull/__notnull operators don't reach here due to
234
+ # filter() method check at line 176-179
235
+ handler = self._get_operator_handler(operator)
236
+ handler(qualified_field, value, operator)
237
+
151
238
  def fields(self, fields: Optional[list[str]] = None) -> Self:
152
239
  """Specify which fields to select in the query.
153
240
 
@@ -230,6 +317,118 @@ class QueryBuilder(Generic[T]):
230
317
  self._fields = [field, "pk"]
231
318
  return self
232
319
 
320
+ def select_related(self, *paths: str) -> Self:
321
+ """Specify foreign key relationships to eager load via JOIN.
322
+
323
+ This method reduces the N+1 query problem by fetching related objects
324
+ in a single query using JOINs instead of lazy loading.
325
+
326
+ Args:
327
+ *paths: One or more relationship paths to eager load.
328
+ Single level: "author"
329
+ Nested levels: "post__author"
330
+ Multiple: "author", "publisher"
331
+
332
+ Returns:
333
+ The QueryBuilder instance for method chaining.
334
+
335
+ Raises:
336
+ InvalidRelationshipError: If a path contains invalid fields.
337
+
338
+ Examples:
339
+ >>> # Single level eager load
340
+ >>> db.select(Book).select_related("author").fetch_all()
341
+ >>> # Nested eager load
342
+ >>> db.select(Comment).select_related(
343
+ ... "post__author"
344
+ ... ).fetch_all()
345
+ >>> # Multiple paths
346
+ >>> db.select(Book).select_related(
347
+ ... "author", "publisher"
348
+ ... ).fetch_all()
349
+ """
350
+ # Store the paths
351
+ self._select_related_paths.extend(paths)
352
+
353
+ # Validate and build join info for each path
354
+ for path in paths:
355
+ self._validate_and_build_join_info(path)
356
+
357
+ return self
358
+
359
+ def _validate_and_build_join_info(self, path: str) -> None:
360
+ """Validate a relationship path and build JoinInfo entries.
361
+
362
+ Args:
363
+ path: Relationship path (e.g., "author" or "post__author").
364
+
365
+ Raises:
366
+ InvalidRelationshipError: If path contains invalid fields.
367
+ """
368
+ # Split path into segments
369
+ segments = path.split("__")
370
+
371
+ # Start with current model as parent
372
+ current_model: type[BaseDBModel] = self.model_class
373
+ parent_alias = "t0" # Main table alias
374
+
375
+ # Get next available alias number based on existing joins
376
+ next_alias_num = len(self._join_info) + 1
377
+
378
+ # Track progressive path for nested relationships
379
+ progressive_path = []
380
+
381
+ for segment in segments:
382
+ # Check if segment is a valid FK field on current model
383
+ fk_descriptors = getattr(current_model, "fk_descriptors", {})
384
+
385
+ if segment not in fk_descriptors:
386
+ # Not an ORM-style FK - select_related() only supports ORM FKs
387
+ model_name = current_model.__name__
388
+ raise InvalidRelationshipError(path, segment, model_name)
389
+
390
+ # ORM FK descriptor
391
+ fk_descriptor = fk_descriptors[segment]
392
+ to_model = fk_descriptor.to_model
393
+ fk_column = f"{segment}_id"
394
+ is_nullable = fk_descriptor.fk_info.null
395
+
396
+ # Create alias for this join using global counter
397
+ alias = f"t{next_alias_num}"
398
+ next_alias_num += 1
399
+
400
+ # Build progressive path for this level
401
+ progressive_path.append(segment)
402
+ current_path = "__".join(progressive_path)
403
+
404
+ # Check if this path segment already exists to avoid duplicate JOINs
405
+ if any(j.path == current_path for j in self._join_info):
406
+ # Path exists - find existing JoinInfo to continue chain
407
+ existing_join = next(
408
+ j for j in self._join_info if j.path == current_path
409
+ )
410
+ current_model = existing_join.model_class
411
+ parent_alias = existing_join.alias
412
+ continue
413
+
414
+ # Build JoinInfo
415
+ join_info = JoinInfo(
416
+ alias=alias,
417
+ table_name=to_model.get_table_name(),
418
+ model_class=to_model,
419
+ fk_field=segment,
420
+ parent_alias=parent_alias,
421
+ fk_column=fk_column,
422
+ join_type="LEFT" if is_nullable else "INNER",
423
+ path=current_path,
424
+ is_nullable=is_nullable,
425
+ )
426
+ self._join_info.append(join_info)
427
+
428
+ # Move to next level
429
+ current_model = to_model
430
+ parent_alias = alias
431
+
233
432
  def _get_operator_handler(
234
433
  self, operator: str
235
434
  ) -> Callable[[str, Any, str], None]:
@@ -246,6 +445,7 @@ class QueryBuilder(Generic[T]):
246
445
  "__notnull": self._handle_null,
247
446
  "__in": self._handle_in,
248
447
  "__not_in": self._handle_in,
448
+ "__like": self._handle_like,
249
449
  "__startswith": self._handle_like,
250
450
  "__endswith": self._handle_like,
251
451
  "__contains": self._handle_like,
@@ -285,16 +485,22 @@ class QueryBuilder(Generic[T]):
285
485
  value: The value to compare against.
286
486
  operator: The operator string (usually '__eq').
287
487
 
488
+ Raises:
489
+ TypeError: If the value is a list (lists only valid with __in).
490
+
288
491
  This method adds an equality condition to the filters list, handling
289
492
  NULL values separately.
290
493
  """
494
+ if isinstance(value, list):
495
+ msg = f"{field_name} requires scalar for '{operator}', not list"
496
+ raise TypeError(msg)
291
497
  if value is None:
292
498
  self.filters.append((f"{field_name} IS NULL", None, "__isnull"))
293
499
  else:
294
500
  self.filters.append((field_name, value, operator))
295
501
 
296
502
  def _handle_null(
297
- self, field_name: str, value: Union[str, float, None], operator: str
503
+ self, field_name: str, value: FilterValue, operator: str
298
504
  ) -> None:
299
505
  """Handle IS NULL and IS NOT NULL filter conditions.
300
506
 
@@ -348,7 +554,7 @@ class QueryBuilder(Generic[T]):
348
554
  Args:
349
555
  field_name: The name of the field to filter on.
350
556
  value: The pattern to match against.
351
- operator: The operator string (e.g., '__startswith', '__contains').
557
+ operator: The operator string (e.g., '__like', '__startswith').
352
558
 
353
559
  Raises:
354
560
  TypeError: If the value is not a string.
@@ -359,19 +565,33 @@ class QueryBuilder(Generic[T]):
359
565
  if not isinstance(value, str):
360
566
  err = f"{field_name} requires a string value for '{operator}'"
361
567
  raise TypeError(err)
362
- formatted_value = self._format_string_for_operator(operator, value)
363
- if operator in ["__startswith", "__endswith", "__contains"]:
568
+ if operator == "__like":
569
+ # Raw LIKE - user provides the full pattern with % wildcards
364
570
  self.filters.append(
365
571
  (
366
- f"{field_name} GLOB ?",
367
- [formatted_value],
572
+ f"{field_name} LIKE ?",
573
+ [value],
368
574
  operator,
369
575
  )
370
576
  )
371
- elif operator in ["__istartswith", "__iendswith", "__icontains"]:
577
+ elif operator in [
578
+ "__startswith",
579
+ "__endswith",
580
+ "__contains",
581
+ "__istartswith",
582
+ "__iendswith",
583
+ "__icontains",
584
+ ]:
585
+ formatted_value = self._format_string_for_operator(operator, value)
586
+ sql_operator = OPERATOR_MAPPING[operator]
587
+ field_expr = (
588
+ f"{field_name} COLLATE NOCASE"
589
+ if operator in {"__istartswith", "__iendswith", "__icontains"}
590
+ else field_name
591
+ )
372
592
  self.filters.append(
373
593
  (
374
- f"{field_name} LIKE ?",
594
+ f"{field_expr} {sql_operator} ?",
375
595
  [formatted_value],
376
596
  operator,
377
597
  )
@@ -387,8 +607,14 @@ class QueryBuilder(Generic[T]):
387
607
  value: The value to compare against.
388
608
  operator: The comparison operator string (e.g., '__lt', '__gte').
389
609
 
610
+ Raises:
611
+ TypeError: If the value is a list (lists only valid with __in).
612
+
390
613
  This method adds a comparison condition to the filters list.
391
614
  """
615
+ if isinstance(value, list):
616
+ msg = f"{field_name} requires scalar for '{operator}', not list"
617
+ raise TypeError(msg)
392
618
  sql_operator = OPERATOR_MAPPING[operator]
393
619
  self.filters.append((f"{field_name} {sql_operator} ?", value, operator))
394
620
 
@@ -431,6 +657,54 @@ class QueryBuilder(Generic[T]):
431
657
  # Return the formatted string or the original value if no match
432
658
  return format_map.get(operator, value)
433
659
 
660
+ def _build_join_sql(
661
+ self,
662
+ ) -> tuple[
663
+ str,
664
+ str,
665
+ list[tuple[str, str, type[BaseDBModel]]],
666
+ ]:
667
+ """Build JOIN clauses and aliased column SELECT statements.
668
+
669
+ Returns:
670
+ A tuple containing:
671
+ - join_clause: SQL JOIN clauses
672
+ (e.g., "LEFT JOIN authors AS t1 ON ...")
673
+ - select_clause: SELECT clause with aliased columns
674
+ - column_names: List of (alias, field_name, model_class) tuples
675
+ """
676
+ # Note: Only called when _join_info is not empty (line 840)
677
+ select_parts: list[str] = []
678
+ column_names: list[tuple[str, str, type[BaseDBModel]]] = []
679
+ join_parts: list[str] = []
680
+
681
+ # Main table columns (t0)
682
+ for field in self.model_class.model_fields:
683
+ alias = f"t0__{field}"
684
+ select_parts.append(f't0."{field}" AS "{alias}"')
685
+ column_names.append(("t0", field, self.model_class))
686
+
687
+ # Add JOINed table columns
688
+ for join in self._join_info:
689
+ # Build JOIN clause
690
+ join_clause = (
691
+ f"{join.join_type} JOIN "
692
+ f'"{join.table_name}" AS {join.alias} '
693
+ f'ON {join.parent_alias}."{join.fk_column}" = {join.alias}."pk"'
694
+ )
695
+ join_parts.append(join_clause)
696
+
697
+ # Add columns from joined table
698
+ for field in join.model_class.model_fields:
699
+ alias = f"{join.alias}__{field}"
700
+ select_parts.append(f'{join.alias}."{field}" AS "{alias}"')
701
+ column_names.append((join.alias, field, join.model_class))
702
+
703
+ select_clause = ", ".join(select_parts)
704
+ join_clause = " ".join(join_parts)
705
+
706
+ return join_clause, select_clause, column_names
707
+
434
708
  def limit(self, limit_value: int) -> Self:
435
709
  """Limit the number of results returned by the query.
436
710
 
@@ -523,12 +797,15 @@ class QueryBuilder(Generic[T]):
523
797
  self._order_by = f'"{order_by_field}" {sort_order}'
524
798
  return self
525
799
 
526
- def _execute_query(
800
+ def _execute_query( # noqa: C901, PLR0912, PLR0915
527
801
  self,
528
802
  *,
529
803
  fetch_one: bool = False,
530
804
  count_only: bool = False,
531
- ) -> list[tuple[Any, ...]] | Optional[tuple[Any, ...]]:
805
+ ) -> tuple[
806
+ list[tuple[Any, ...]] | tuple[Any, ...],
807
+ list[tuple[str, str, type[BaseDBModel]]],
808
+ ]:
532
809
  """Execute the constructed SQL query.
533
810
 
534
811
  Args:
@@ -536,12 +813,101 @@ class QueryBuilder(Generic[T]):
536
813
  count_only: If True, return only the count of results.
537
814
 
538
815
  Returns:
539
- A list of tuples (all results), a single tuple (one result),
540
- or None if no results are found.
816
+ A tuple containing:
817
+ - Query results (list of tuples or single tuple)
818
+ - Column metadata (list of (alias, field_name, model_class) tuples)
819
+ Empty list for non-JOIN queries (backward compatible).
541
820
 
542
821
  Raises:
543
822
  RecordFetchError: If there's an error executing the query.
544
823
  """
824
+ # Check if we need JOINs for eager loading or relationship filters
825
+ # Need JOIN if: we have join_info AND (not count/fields OR filters
826
+ # use joins)
827
+ needs_join_for_filters = False
828
+ if self._join_info and (count_only or self._fields):
829
+ # Parse filter to check if it references joined tables
830
+ values, where_clause = self._parse_filter()
831
+ # Check for table aliases like t1., t2., etc.
832
+ if re.search(r"\bt\d+\.", where_clause):
833
+ needs_join_for_filters = True
834
+
835
+ if self._join_info and (
836
+ not (count_only or self._fields) or needs_join_for_filters
837
+ ):
838
+ # Use JOIN-based query
839
+ join_clause, select_clause, column_names = self._build_join_sql()
840
+
841
+ # For count_only with JOINs, we don't need all the columns
842
+ if count_only and needs_join_for_filters:
843
+ # table_name validated - safe from SQL injection
844
+ sql = (
845
+ f'SELECT COUNT(*) FROM "{self.table_name}" AS t0 ' # noqa: S608
846
+ f"{join_clause}"
847
+ )
848
+ elif self._fields:
849
+ # Build custom field selection with JOINs
850
+ field_list = ", ".join(f't0."{f}"' for f in self._fields)
851
+ # table_name and fields validated - safe from SQL injection
852
+ sql = (
853
+ f"SELECT {field_list} FROM " # noqa: S608
854
+ f'"{self.table_name}" AS t0 {join_clause}'
855
+ )
856
+ # Rebuild column_names to match selected fields only
857
+ column_names = [
858
+ ("t0", field, self.model_class) for field in self._fields
859
+ ]
860
+ else:
861
+ # table_name validated - safe from SQL injection
862
+ sql = (
863
+ f"SELECT {select_clause} FROM " # noqa: S608
864
+ f'"{self.table_name}" AS t0 {join_clause}'
865
+ )
866
+
867
+ # Build WHERE clause with special handling for NULL
868
+ values, where_clause = self._parse_filter()
869
+
870
+ if self.filters:
871
+ sql += f" WHERE {where_clause}"
872
+
873
+ if self._order_by:
874
+ # Qualify ORDER BY column with t0 alias to avoid ambiguity
875
+ # Extract field name and direction from _order_by
876
+ # _order_by format: '"field" ASC' or '"field" DESC'
877
+ match = re.match(r'"([^"]+)"\s+(.*)', self._order_by)
878
+ if match:
879
+ field_name = match.group(1)
880
+ direction = match.group(2)
881
+ sql += f' ORDER BY t0."{field_name}" {direction}'
882
+ elif self._order_by.lower().startswith("rowid"):
883
+ # Fallback for non-quoted patterns such as "rowid DESC"
884
+ sql += f" ORDER BY t0.{self._order_by}"
885
+
886
+ if self._limit is not None:
887
+ sql += " LIMIT ?"
888
+ values.append(self._limit)
889
+
890
+ if self._offset is not None:
891
+ sql += " OFFSET ?"
892
+ values.append(self._offset)
893
+
894
+ # Log the SQL if debug is enabled
895
+ if self.db.debug:
896
+ self.db._log_sql(sql, values) # noqa: SLF001
897
+
898
+ try:
899
+ conn = self.db.connect()
900
+ cursor = conn.cursor()
901
+ cursor.execute(sql, values)
902
+ results = (
903
+ cursor.fetchall() if not fetch_one else cursor.fetchone()
904
+ )
905
+ except sqlite3.Error as exc:
906
+ raise RecordFetchError(self.table_name) from exc
907
+ else:
908
+ return (results, column_names)
909
+
910
+ # Non-JOIN query path (original behavior)
545
911
  if count_only:
546
912
  fields = "COUNT(*)"
547
913
  elif self._fields:
@@ -553,7 +919,7 @@ class QueryBuilder(Generic[T]):
553
919
  f'"{field}"' for field in self.model_class.model_fields
554
920
  )
555
921
 
556
- sql = f'SELECT {fields} FROM "{self.table_name}"' # noqa: S608 # nosec
922
+ sql = f'SELECT {fields} FROM "{self.table_name}"' # noqa: S608
557
923
 
558
924
  # Build the WHERE clause with special handling for None (NULL in SQL)
559
925
  values, where_clause = self._parse_filter()
@@ -572,20 +938,21 @@ class QueryBuilder(Generic[T]):
572
938
  sql += " OFFSET ?"
573
939
  values.append(self._offset)
574
940
 
575
- # Print the raw SQL and values if debug is enabled
576
941
  # Log the SQL if debug is enabled
577
942
  if self.db.debug:
578
943
  self.db._log_sql(sql, values) # noqa: SLF001
579
944
 
580
945
  try:
581
- with self.db.connect() as conn:
582
- cursor = conn.cursor()
583
- cursor.execute(sql, values)
584
- return cursor.fetchall() if not fetch_one else cursor.fetchone()
946
+ conn = self.db.connect()
947
+ cursor = conn.cursor()
948
+ cursor.execute(sql, values)
949
+ results = cursor.fetchall() if not fetch_one else cursor.fetchone()
585
950
  except sqlite3.Error as exc:
586
951
  raise RecordFetchError(self.table_name) from exc
952
+ else:
953
+ return (results, []) # Empty column_names for backward compat
587
954
 
588
- def _parse_filter(self) -> tuple[list[Any], LiteralString]:
955
+ def _parse_filter(self) -> tuple[list[Any], str]:
589
956
  """Parse the filter conditions into SQL clauses and values.
590
957
 
591
958
  Returns:
@@ -624,13 +991,118 @@ class QueryBuilder(Generic[T]):
624
991
  field: self._deserialize(field, row[idx])
625
992
  for idx, field in enumerate(self._fields)
626
993
  }
627
- return self.model_class.model_validate_partial(data)
994
+ instance = self.model_class.model_validate_partial(data)
995
+ else:
996
+ data = {
997
+ field: self._deserialize(field, row[idx])
998
+ for idx, field in enumerate(self.model_class.model_fields)
999
+ }
1000
+ # For ORM mode, exclude FK descriptor fields from data
1001
+ for fk_field in getattr(self.model_class, "fk_descriptors", {}):
1002
+ data.pop(fk_field, None)
1003
+ instance = self.model_class(**data)
1004
+
1005
+ # Set db_context for ORM lazy loading and reverse relationships
1006
+ if hasattr(instance, "db_context"):
1007
+ instance.db_context = self.db
1008
+ return instance
1009
+
1010
+ def _convert_joined_row_to_model(
1011
+ self,
1012
+ row: tuple[Any, ...],
1013
+ column_names: list[tuple[str, str, type[BaseDBModel]]],
1014
+ ) -> T:
1015
+ """Convert a JOINed database row to model instances with relationships.
1016
+
1017
+ This method parses aliased columns from JOIN queries, creates the main
1018
+ model instance, and populates related objects in the _fk_cache to avoid
1019
+ lazy loading.
628
1020
 
629
- data = {
630
- field: self._deserialize(field, row[idx])
631
- for idx, field in enumerate(self.model_class.model_fields)
1021
+ Args:
1022
+ row: A tuple representing a database row from a JOIN query.
1023
+ column_names: List of (alias, field_name, model_class) tuples
1024
+ describing each column in the result.
1025
+
1026
+ Returns:
1027
+ An instance of the main model class with populated relationships.
1028
+ """
1029
+ # Group columns by table alias
1030
+ tables_data: dict[str, dict[str, Any]] = {}
1031
+ tables_models: dict[str, type[BaseDBModel]] = {}
1032
+
1033
+ for idx, (alias, field_name, model_class) in enumerate(column_names):
1034
+ if alias not in tables_data:
1035
+ tables_data[alias] = {}
1036
+ tables_models[alias] = model_class
1037
+ tables_data[alias][field_name] = row[idx]
1038
+
1039
+ # Build main model (t0)
1040
+ main_data = tables_data["t0"]
1041
+
1042
+ # Deserialize and create main instance
1043
+ main_instance_data = {
1044
+ field: self._deserialize(field, main_data[field])
1045
+ for field in self.model_class.model_fields
1046
+ if field in main_data
632
1047
  }
633
- return self.model_class(**data)
1048
+
1049
+ # For ORM mode, exclude FK descriptor fields from data
1050
+ for fk_field in getattr(self.model_class, "fk_descriptors", {}):
1051
+ main_instance_data.pop(fk_field, None)
1052
+
1053
+ if self._fields:
1054
+ # Partial field selection: use model_validate_partial to
1055
+ # avoid validation errors for missing required fields
1056
+ main_instance = self.model_class.model_validate_partial(
1057
+ main_instance_data
1058
+ )
1059
+ else:
1060
+ main_instance = self.model_class(**main_instance_data)
1061
+ main_instance.db_context = self.db # type: ignore[attr-defined]
1062
+
1063
+ # Process JOINed tables and populate _fk_cache
1064
+ # Track instances per alias for nested cache wiring
1065
+ instances_by_alias: dict[str, BaseDBModel] = {"t0": main_instance}
1066
+
1067
+ for join_info in self._join_info:
1068
+ alias = join_info.alias
1069
+ related_data = tables_data.get(alias)
1070
+ if related_data is None:
1071
+ continue
1072
+
1073
+ # Check if all fields are NULL (LEFT JOIN with no match)
1074
+ if all(v is None for v in related_data.values()):
1075
+ # No related object, skip
1076
+ continue
1077
+
1078
+ # Deserialize related object
1079
+ related_instance_data = {
1080
+ field: self._deserialize(field, related_data[field])
1081
+ for field in join_info.model_class.model_fields
1082
+ if field in related_data
1083
+ }
1084
+
1085
+ # Exclude FK descriptors from related data
1086
+ for fk_field in getattr(
1087
+ join_info.model_class, "fk_descriptors", {}
1088
+ ):
1089
+ related_instance_data.pop(fk_field, None)
1090
+
1091
+ related_instance = join_info.model_class(**related_instance_data)
1092
+ related_instance.db_context = self.db # type: ignore[attr-defined]
1093
+
1094
+ instances_by_alias[alias] = related_instance
1095
+
1096
+ # Attach to parent instance cache (supports nesting)
1097
+ parent_instance = instances_by_alias.get(join_info.parent_alias)
1098
+ if parent_instance is not None:
1099
+ parent_fk_cache = getattr(parent_instance, "_fk_cache", {})
1100
+ parent_fk_cache[join_info.fk_field] = related_instance
1101
+ object.__setattr__(
1102
+ parent_instance, "_fk_cache", parent_fk_cache
1103
+ )
1104
+
1105
+ return main_instance
634
1106
 
635
1107
  def _deserialize(
636
1108
  self, field_name: str, value: SerializableField
@@ -725,6 +1197,7 @@ class QueryBuilder(Generic[T]):
725
1197
  "order_by": self._order_by,
726
1198
  "fields": tuple(sorted(self._fields)) if self._fields else None,
727
1199
  "fetch_one": fetch_one,
1200
+ "select_related": tuple(sorted(self._select_related_paths)),
728
1201
  }
729
1202
 
730
1203
  # Hash the key parts
@@ -737,7 +1210,7 @@ class QueryBuilder(Generic[T]):
737
1210
  @overload
738
1211
  def _fetch_result(self, *, fetch_one: Literal[False]) -> list[T]: ...
739
1212
 
740
- def _fetch_result(
1213
+ def _fetch_result( # noqa: C901, PLR0911
741
1214
  self, *, fetch_one: bool = False
742
1215
  ) -> Union[list[T], Optional[T]]:
743
1216
  """Fetch and convert query results to model instances.
@@ -757,10 +1230,12 @@ class QueryBuilder(Generic[T]):
757
1230
  # Cache stores correctly typed data, cast from Any
758
1231
  return cast("Union[list[T], Optional[T]]", cached)
759
1232
 
760
- result = self._execute_query(fetch_one=fetch_one)
1233
+ result, column_names = self._execute_query(fetch_one=fetch_one)
761
1234
 
762
1235
  if not result:
763
1236
  if not self._bypass_cache:
1237
+ # Generate cache key for empty result
1238
+ cache_key = self._make_cache_key(fetch_one=fetch_one)
764
1239
  if fetch_one:
765
1240
  # Cache empty result
766
1241
  self.db._cache_set( # noqa: SLF001
@@ -777,15 +1252,55 @@ class QueryBuilder(Generic[T]):
777
1252
  return []
778
1253
  return None if fetch_one else []
779
1254
 
1255
+ # Convert results based on whether we have JOIN data
1256
+ if column_names:
1257
+ # JOIN-aware converter - needs column_names
1258
+ if fetch_one:
1259
+ # When fetch_one=True, result is a single tuple
1260
+ # Narrow the type from the union
1261
+ single_row: tuple[Any, ...] = (
1262
+ result if isinstance(result, tuple) else result[0]
1263
+ )
1264
+ single_result = self._convert_joined_row_to_model(
1265
+ single_row, column_names
1266
+ )
1267
+ if not self._bypass_cache:
1268
+ cache_key = self._make_cache_key(fetch_one=True)
1269
+ self.db._cache_set( # noqa: SLF001
1270
+ self.table_name,
1271
+ cache_key,
1272
+ single_result,
1273
+ ttl=self._query_cache_ttl,
1274
+ )
1275
+ return single_result
1276
+
1277
+ # When fetch_one=False, result is a list of tuples
1278
+ # Narrow the type from the union
1279
+ row_list: list[tuple[Any, ...]] = (
1280
+ result if isinstance(result, list) else [result]
1281
+ )
1282
+ list_results = [
1283
+ self._convert_joined_row_to_model(row, column_names)
1284
+ for row in row_list
1285
+ ]
1286
+ if not self._bypass_cache:
1287
+ cache_key = self._make_cache_key(fetch_one=False)
1288
+ self.db._cache_set( # noqa: SLF001
1289
+ self.table_name,
1290
+ cache_key,
1291
+ list_results,
1292
+ ttl=self._query_cache_ttl,
1293
+ )
1294
+ return list_results
1295
+
1296
+ # Standard converter
780
1297
  if fetch_one:
781
- # Ensure we pass a tuple, not a list, to _convert_row_to_model
782
- if isinstance(result, list):
783
- result = result[
784
- 0
785
- ] # Get the first (and only) result if it's wrapped in a list.
786
- single_result = self._convert_row_to_model(result)
787
- # Cache single result (unless bypass is enabled)
1298
+ std_single_row: tuple[Any, ...] = (
1299
+ result if isinstance(result, tuple) else result[0]
1300
+ )
1301
+ single_result = self._convert_row_to_model(std_single_row)
788
1302
  if not self._bypass_cache:
1303
+ cache_key = self._make_cache_key(fetch_one=True)
789
1304
  self.db._cache_set( # noqa: SLF001
790
1305
  self.table_name,
791
1306
  cache_key,
@@ -794,9 +1309,12 @@ class QueryBuilder(Generic[T]):
794
1309
  )
795
1310
  return single_result
796
1311
 
797
- list_results = [self._convert_row_to_model(row) for row in result]
798
- # Cache list result (unless bypass is enabled)
1312
+ std_row_list: list[tuple[Any, ...]] = (
1313
+ result if isinstance(result, list) else [result]
1314
+ )
1315
+ list_results = [self._convert_row_to_model(row) for row in std_row_list]
799
1316
  if not self._bypass_cache:
1317
+ cache_key = self._make_cache_key(fetch_one=False)
800
1318
  self.db._cache_set( # noqa: SLF001
801
1319
  self.table_name,
802
1320
  cache_key,
@@ -846,7 +1364,7 @@ class QueryBuilder(Generic[T]):
846
1364
  Returns:
847
1365
  The number of results that match the current query conditions.
848
1366
  """
849
- result = self._execute_query(count_only=True)
1367
+ result, _column_names = self._execute_query(count_only=True)
850
1368
 
851
1369
  return int(result[0][0]) if result else 0
852
1370
 
@@ -867,7 +1385,7 @@ class QueryBuilder(Generic[T]):
867
1385
  Raises:
868
1386
  RecordDeletionError: If there's an error deleting the records.
869
1387
  """
870
- sql = f'DELETE FROM "{self.table_name}"' # noqa: S608 # nosec
1388
+ sql = f'DELETE FROM "{self.table_name}"' # nosec # noqa: S608
871
1389
 
872
1390
  # Build the WHERE clause with special handling for None (NULL in SQL)
873
1391
  values, where_clause = self._parse_filter()
@@ -880,12 +1398,16 @@ class QueryBuilder(Generic[T]):
880
1398
  self.db._log_sql(sql, values) # noqa: SLF001
881
1399
 
882
1400
  try:
883
- with self.db.connect() as conn:
884
- cursor = conn.cursor()
885
- cursor.execute(sql, values)
886
- deleted_count = cursor.rowcount
887
- self.db._maybe_commit() # noqa: SLF001
888
- self.db._cache_invalidate_table(self.table_name) # noqa: SLF001
889
- return deleted_count
1401
+ conn = self.db.connect()
1402
+ cursor = conn.cursor()
1403
+ cursor.execute(sql, values)
1404
+ deleted_count = cursor.rowcount
1405
+ self.db._maybe_commit() # noqa: SLF001
1406
+ self.db._cache_invalidate_table(self.table_name) # noqa: SLF001
890
1407
  except sqlite3.Error as exc:
1408
+ # Rollback implicit transaction if not in user-managed transaction
1409
+ if not self.db._in_transaction and self.db.conn: # noqa: SLF001
1410
+ self.db.conn.rollback()
891
1411
  raise RecordDeletionError(self.table_name) from exc
1412
+ else:
1413
+ return deleted_count