sqliter-py 0.9.0__py3-none-any.whl → 0.16.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. sqliter/constants.py +4 -3
  2. sqliter/exceptions.py +43 -0
  3. sqliter/model/__init__.py +38 -3
  4. sqliter/model/foreign_key.py +153 -0
  5. sqliter/model/model.py +42 -3
  6. sqliter/model/unique.py +20 -11
  7. sqliter/orm/__init__.py +16 -0
  8. sqliter/orm/fields.py +412 -0
  9. sqliter/orm/foreign_key.py +8 -0
  10. sqliter/orm/model.py +243 -0
  11. sqliter/orm/query.py +221 -0
  12. sqliter/orm/registry.py +169 -0
  13. sqliter/query/query.py +720 -69
  14. sqliter/sqliter.py +533 -76
  15. sqliter/tui/__init__.py +62 -0
  16. sqliter/tui/__main__.py +6 -0
  17. sqliter/tui/app.py +179 -0
  18. sqliter/tui/demos/__init__.py +96 -0
  19. sqliter/tui/demos/base.py +114 -0
  20. sqliter/tui/demos/caching.py +283 -0
  21. sqliter/tui/demos/connection.py +150 -0
  22. sqliter/tui/demos/constraints.py +211 -0
  23. sqliter/tui/demos/crud.py +154 -0
  24. sqliter/tui/demos/errors.py +231 -0
  25. sqliter/tui/demos/field_selection.py +150 -0
  26. sqliter/tui/demos/filters.py +389 -0
  27. sqliter/tui/demos/models.py +248 -0
  28. sqliter/tui/demos/ordering.py +156 -0
  29. sqliter/tui/demos/orm.py +460 -0
  30. sqliter/tui/demos/results.py +241 -0
  31. sqliter/tui/demos/string_filters.py +210 -0
  32. sqliter/tui/demos/timestamps.py +126 -0
  33. sqliter/tui/demos/transactions.py +177 -0
  34. sqliter/tui/runner.py +116 -0
  35. sqliter/tui/styles/app.tcss +130 -0
  36. sqliter/tui/widgets/__init__.py +7 -0
  37. sqliter/tui/widgets/code_display.py +81 -0
  38. sqliter/tui/widgets/demo_list.py +65 -0
  39. sqliter/tui/widgets/output_display.py +92 -0
  40. {sqliter_py-0.9.0.dist-info → sqliter_py-0.16.0.dist-info}/METADATA +27 -11
  41. sqliter_py-0.16.0.dist-info/RECORD +47 -0
  42. {sqliter_py-0.9.0.dist-info → sqliter_py-0.16.0.dist-info}/WHEEL +2 -2
  43. sqliter_py-0.16.0.dist-info/entry_points.txt +3 -0
  44. sqliter_py-0.9.0.dist-info/RECORD +0 -14
sqliter/query/query.py CHANGED
@@ -9,25 +9,33 @@ raw SQL.
9
9
 
10
10
  from __future__ import annotations
11
11
 
12
+ import hashlib
13
+ import json
14
+ import re
12
15
  import sqlite3
13
16
  import warnings
17
+ from dataclasses import dataclass
14
18
  from typing import (
15
19
  TYPE_CHECKING,
16
20
  Any,
17
21
  Callable,
22
+ Generic,
18
23
  Literal,
19
24
  Optional,
25
+ TypeVar,
20
26
  Union,
27
+ cast,
21
28
  overload,
22
29
  )
23
30
 
24
- from typing_extensions import LiteralString, Self
31
+ from typing_extensions import Self
25
32
 
26
33
  from sqliter.constants import OPERATOR_MAPPING
27
34
  from sqliter.exceptions import (
28
35
  InvalidFilterError,
29
36
  InvalidOffsetError,
30
37
  InvalidOrderError,
38
+ InvalidRelationshipError,
31
39
  RecordDeletionError,
32
40
  RecordFetchError,
33
41
  )
@@ -38,13 +46,43 @@ if TYPE_CHECKING: # pragma: no cover
38
46
  from sqliter import SqliterDB
39
47
  from sqliter.model import BaseDBModel, SerializableField
40
48
 
49
+ # TypeVar for generic QueryBuilder
50
+ T = TypeVar("T", bound="BaseDBModel")
51
+
41
52
  # Define a type alias for the possible value types
42
53
  FilterValue = Union[
43
54
  str, int, float, bool, None, list[Union[str, int, float, bool]]
44
55
  ]
45
56
 
46
57
 
47
- class QueryBuilder:
58
+ @dataclass
59
+ class JoinInfo:
60
+ """Metadata for a JOIN clause.
61
+
62
+ Attributes:
63
+ alias: Table alias for the JOIN (e.g., "t1", "t2").
64
+ table_name: Actual table name in the database.
65
+ model_class: The model class for the joined table.
66
+ fk_field: FK field name on the parent model.
67
+ parent_alias: Alias of the parent table in the JOIN chain.
68
+ fk_column: The FK column name (e.g., "author_id").
69
+ join_type: Type of JOIN ("LEFT" or "INNER").
70
+ path: Full relationship path (e.g., "post__author").
71
+ is_nullable: Whether the FK is nullable.
72
+ """
73
+
74
+ alias: str
75
+ table_name: str
76
+ model_class: type[BaseDBModel]
77
+ fk_field: str
78
+ parent_alias: str
79
+ fk_column: str
80
+ join_type: str
81
+ path: str
82
+ is_nullable: bool
83
+
84
+
85
+ class QueryBuilder(Generic[T]):
48
86
  """Builds and executes database queries for a specific model.
49
87
 
50
88
  This class provides methods to construct SQL queries, apply filters,
@@ -52,7 +90,7 @@ class QueryBuilder:
52
90
 
53
91
  Attributes:
54
92
  db (SqliterDB): The database connection object.
55
- model_class (type[BaseDBModel]): The Pydantic model class.
93
+ model_class (type[T]): The Pydantic model class.
56
94
  table_name (str): The name of the database table.
57
95
  filters (list): List of applied filter conditions.
58
96
  _limit (Optional[int]): The LIMIT clause value, if any.
@@ -64,7 +102,7 @@ class QueryBuilder:
64
102
  def __init__(
65
103
  self,
66
104
  db: SqliterDB,
67
- model_class: type[BaseDBModel],
105
+ model_class: type[T],
68
106
  fields: Optional[list[str]] = None,
69
107
  ) -> None:
70
108
  """Initialize a new QueryBuilder instance.
@@ -76,13 +114,18 @@ class QueryBuilder:
76
114
  are selected.
77
115
  """
78
116
  self.db = db
79
- self.model_class = model_class
117
+ self.model_class: type[T] = model_class
80
118
  self.table_name = model_class.get_table_name() # Use model_class method
81
119
  self.filters: list[tuple[str, Any, str]] = []
82
120
  self._limit: Optional[int] = None
83
121
  self._offset: Optional[int] = None
84
122
  self._order_by: Optional[str] = None
85
123
  self._fields: Optional[list[str]] = fields
124
+ self._bypass_cache: bool = False
125
+ self._query_cache_ttl: Optional[int] = None
126
+ # Eager loading support
127
+ self._select_related_paths: list[str] = []
128
+ self._join_info: list[JoinInfo] = []
86
129
 
87
130
  if self._fields:
88
131
  self._validate_fields()
@@ -103,7 +146,7 @@ class QueryBuilder:
103
146
  )
104
147
  raise ValueError(err_message)
105
148
 
106
- def filter(self, **conditions: str | float | None) -> QueryBuilder:
149
+ def filter(self, **conditions: FilterValue) -> Self:
107
150
  """Apply filter conditions to the query.
108
151
 
109
152
  This method allows adding one or more filter conditions to the query.
@@ -128,17 +171,71 @@ class QueryBuilder:
128
171
 
129
172
  for field, value in conditions.items():
130
173
  field_name, operator = self._parse_field_operator(field)
131
- self._validate_field(field_name, valid_fields)
132
174
 
133
- if operator in ["__isnull", "__notnull"]:
134
- self._handle_null(field_name, value, operator)
175
+ # Check for relationship traversal (e.g., author__name)
176
+ if "__" in field_name and operator not in {
177
+ "__isnull",
178
+ "__notnull",
179
+ }:
180
+ # Handle relationship filter traversal
181
+ self._handle_relationship_filter(field_name, value, operator)
135
182
  else:
136
- handler = self._get_operator_handler(operator)
137
- handler(field_name, value, operator)
183
+ # Normal field filter
184
+ self._validate_field(field_name, valid_fields)
185
+ if operator in ["__isnull", "__notnull"]:
186
+ self._handle_null(field_name, value, operator)
187
+ else:
188
+ handler = self._get_operator_handler(operator)
189
+ handler(field_name, value, operator)
138
190
 
139
191
  return self
140
192
 
141
- def fields(self, fields: Optional[list[str]] = None) -> QueryBuilder:
193
+ def _handle_relationship_filter(
194
+ self, field_name: str, value: FilterValue, operator: str
195
+ ) -> None:
196
+ """Handle filter conditions across relationships.
197
+
198
+ Args:
199
+ field_name: The field name with relationship path
200
+ (e.g., "author__name").
201
+ value: The filter value.
202
+ operator: The filter operator.
203
+
204
+ Raises:
205
+ InvalidRelationshipError: If the relationship path is invalid.
206
+ """
207
+ # Split into relationship path and target field
208
+ parts = field_name.split("__")
209
+ relationship_path = "__".join(parts[:-1])
210
+ target_field = parts[-1]
211
+
212
+ # Build JOIN info for the relationship path
213
+ # This validates the path and populates _join_info
214
+ self._validate_and_build_join_info(relationship_path)
215
+
216
+ # Find the join info for this relationship path
217
+ join_info = next(
218
+ j for j in self._join_info if j.path == relationship_path
219
+ )
220
+
221
+ # Validate target field exists on the related model
222
+ if target_field not in join_info.model_class.model_fields:
223
+ error_msg = (
224
+ f"{field_name} - field '{target_field}' not found in "
225
+ f"{join_info.model_class.__name__}"
226
+ )
227
+ raise InvalidFilterError(error_msg)
228
+
229
+ # Apply filter with table alias
230
+ qualified_field = f'{join_info.alias}."{target_field}"'
231
+
232
+ # Use the appropriate handler
233
+ # Note: __isnull/__notnull operators don't reach here due to
234
+ # filter() method check at line 176-179
235
+ handler = self._get_operator_handler(operator)
236
+ handler(qualified_field, value, operator)
237
+
238
+ def fields(self, fields: Optional[list[str]] = None) -> Self:
142
239
  """Specify which fields to select in the query.
143
240
 
144
241
  Args:
@@ -155,7 +252,7 @@ class QueryBuilder:
155
252
  self._validate_fields()
156
253
  return self
157
254
 
158
- def exclude(self, fields: Optional[list[str]] = None) -> QueryBuilder:
255
+ def exclude(self, fields: Optional[list[str]] = None) -> Self:
159
256
  """Specify which fields to exclude from the query results.
160
257
 
161
258
  Args:
@@ -197,7 +294,7 @@ class QueryBuilder:
197
294
 
198
295
  return self
199
296
 
200
- def only(self, field: str) -> QueryBuilder:
297
+ def only(self, field: str) -> Self:
201
298
  """Specify a single field to select in the query.
202
299
 
203
300
  Args:
@@ -220,6 +317,118 @@ class QueryBuilder:
220
317
  self._fields = [field, "pk"]
221
318
  return self
222
319
 
320
+ def select_related(self, *paths: str) -> Self:
321
+ """Specify foreign key relationships to eager load via JOIN.
322
+
323
+ This method reduces the N+1 query problem by fetching related objects
324
+ in a single query using JOINs instead of lazy loading.
325
+
326
+ Args:
327
+ *paths: One or more relationship paths to eager load.
328
+ Single level: "author"
329
+ Nested levels: "post__author"
330
+ Multiple: "author", "publisher"
331
+
332
+ Returns:
333
+ The QueryBuilder instance for method chaining.
334
+
335
+ Raises:
336
+ InvalidRelationshipError: If a path contains invalid fields.
337
+
338
+ Examples:
339
+ >>> # Single level eager load
340
+ >>> db.select(Book).select_related("author").fetch_all()
341
+ >>> # Nested eager load
342
+ >>> db.select(Comment).select_related(
343
+ ... "post__author"
344
+ ... ).fetch_all()
345
+ >>> # Multiple paths
346
+ >>> db.select(Book).select_related(
347
+ ... "author", "publisher"
348
+ ... ).fetch_all()
349
+ """
350
+ # Store the paths
351
+ self._select_related_paths.extend(paths)
352
+
353
+ # Validate and build join info for each path
354
+ for path in paths:
355
+ self._validate_and_build_join_info(path)
356
+
357
+ return self
358
+
359
+ def _validate_and_build_join_info(self, path: str) -> None:
360
+ """Validate a relationship path and build JoinInfo entries.
361
+
362
+ Args:
363
+ path: Relationship path (e.g., "author" or "post__author").
364
+
365
+ Raises:
366
+ InvalidRelationshipError: If path contains invalid fields.
367
+ """
368
+ # Split path into segments
369
+ segments = path.split("__")
370
+
371
+ # Start with current model as parent
372
+ current_model: type[BaseDBModel] = self.model_class
373
+ parent_alias = "t0" # Main table alias
374
+
375
+ # Get next available alias number based on existing joins
376
+ next_alias_num = len(self._join_info) + 1
377
+
378
+ # Track progressive path for nested relationships
379
+ progressive_path = []
380
+
381
+ for segment in segments:
382
+ # Check if segment is a valid FK field on current model
383
+ fk_descriptors = getattr(current_model, "fk_descriptors", {})
384
+
385
+ if segment not in fk_descriptors:
386
+ # Not an ORM-style FK - select_related() only supports ORM FKs
387
+ model_name = current_model.__name__
388
+ raise InvalidRelationshipError(path, segment, model_name)
389
+
390
+ # ORM FK descriptor
391
+ fk_descriptor = fk_descriptors[segment]
392
+ to_model = fk_descriptor.to_model
393
+ fk_column = f"{segment}_id"
394
+ is_nullable = fk_descriptor.fk_info.null
395
+
396
+ # Create alias for this join using global counter
397
+ alias = f"t{next_alias_num}"
398
+ next_alias_num += 1
399
+
400
+ # Build progressive path for this level
401
+ progressive_path.append(segment)
402
+ current_path = "__".join(progressive_path)
403
+
404
+ # Check if this path segment already exists to avoid duplicate JOINs
405
+ if any(j.path == current_path for j in self._join_info):
406
+ # Path exists - find existing JoinInfo to continue chain
407
+ existing_join = next(
408
+ j for j in self._join_info if j.path == current_path
409
+ )
410
+ current_model = existing_join.model_class
411
+ parent_alias = existing_join.alias
412
+ continue
413
+
414
+ # Build JoinInfo
415
+ join_info = JoinInfo(
416
+ alias=alias,
417
+ table_name=to_model.get_table_name(),
418
+ model_class=to_model,
419
+ fk_field=segment,
420
+ parent_alias=parent_alias,
421
+ fk_column=fk_column,
422
+ join_type="LEFT" if is_nullable else "INNER",
423
+ path=current_path,
424
+ is_nullable=is_nullable,
425
+ )
426
+ self._join_info.append(join_info)
427
+
428
+ # Move to next level
429
+ current_model = to_model
430
+ parent_alias = alias
431
+
223
432
  def _get_operator_handler(
224
433
  self, operator: str
225
434
  ) -> Callable[[str, Any, str], None]:
@@ -236,6 +445,7 @@ class QueryBuilder:
236
445
  "__notnull": self._handle_null,
237
446
  "__in": self._handle_in,
238
447
  "__not_in": self._handle_in,
448
+ "__like": self._handle_like,
239
449
  "__startswith": self._handle_like,
240
450
  "__endswith": self._handle_like,
241
451
  "__contains": self._handle_like,
@@ -275,16 +485,22 @@ class QueryBuilder:
275
485
  value: The value to compare against.
276
486
  operator: The operator string (usually '__eq').
277
487
 
488
+ Raises:
489
+ TypeError: If the value is a list (lists only valid with __in).
490
+
278
491
  This method adds an equality condition to the filters list, handling
279
492
  NULL values separately.
280
493
  """
494
+ if isinstance(value, list):
495
+ msg = f"{field_name} requires scalar for '{operator}', not list"
496
+ raise TypeError(msg)
281
497
  if value is None:
282
498
  self.filters.append((f"{field_name} IS NULL", None, "__isnull"))
283
499
  else:
284
500
  self.filters.append((field_name, value, operator))
285
501
 
286
502
  def _handle_null(
287
- self, field_name: str, value: Union[str, float, None], operator: str
503
+ self, field_name: str, value: FilterValue, operator: str
288
504
  ) -> None:
289
505
  """Handle IS NULL and IS NOT NULL filter conditions.
290
506
 
@@ -338,7 +554,7 @@ class QueryBuilder:
338
554
  Args:
339
555
  field_name: The name of the field to filter on.
340
556
  value: The pattern to match against.
341
- operator: The operator string (e.g., '__startswith', '__contains').
557
+ operator: The operator string (e.g., '__like', '__startswith').
342
558
 
343
559
  Raises:
344
560
  TypeError: If the value is not a string.
@@ -349,19 +565,33 @@ class QueryBuilder:
349
565
  if not isinstance(value, str):
350
566
  err = f"{field_name} requires a string value for '{operator}'"
351
567
  raise TypeError(err)
352
- formatted_value = self._format_string_for_operator(operator, value)
353
- if operator in ["__startswith", "__endswith", "__contains"]:
568
+ if operator == "__like":
569
+ # Raw LIKE - user provides the full pattern with % wildcards
354
570
  self.filters.append(
355
571
  (
356
- f"{field_name} GLOB ?",
357
- [formatted_value],
572
+ f"{field_name} LIKE ?",
573
+ [value],
358
574
  operator,
359
575
  )
360
576
  )
361
- elif operator in ["__istartswith", "__iendswith", "__icontains"]:
577
+ elif operator in [
578
+ "__startswith",
579
+ "__endswith",
580
+ "__contains",
581
+ "__istartswith",
582
+ "__iendswith",
583
+ "__icontains",
584
+ ]:
585
+ formatted_value = self._format_string_for_operator(operator, value)
586
+ sql_operator = OPERATOR_MAPPING[operator]
587
+ field_expr = (
588
+ f"{field_name} COLLATE NOCASE"
589
+ if operator in {"__istartswith", "__iendswith", "__icontains"}
590
+ else field_name
591
+ )
362
592
  self.filters.append(
363
593
  (
364
- f"{field_name} LIKE ?",
594
+ f"{field_expr} {sql_operator} ?",
365
595
  [formatted_value],
366
596
  operator,
367
597
  )
@@ -377,8 +607,14 @@ class QueryBuilder:
377
607
  value: The value to compare against.
378
608
  operator: The comparison operator string (e.g., '__lt', '__gte').
379
609
 
610
+ Raises:
611
+ TypeError: If the value is a list (lists only valid with __in).
612
+
380
613
  This method adds a comparison condition to the filters list.
381
614
  """
615
+ if isinstance(value, list):
616
+ msg = f"{field_name} requires scalar for '{operator}', not list"
617
+ raise TypeError(msg)
382
618
  sql_operator = OPERATOR_MAPPING[operator]
383
619
  self.filters.append((f"{field_name} {sql_operator} ?", value, operator))
384
620
 
@@ -421,6 +657,54 @@ class QueryBuilder:
421
657
  # Return the formatted string or the original value if no match
422
658
  return format_map.get(operator, value)
423
659
 
660
+ def _build_join_sql(
661
+ self,
662
+ ) -> tuple[
663
+ str,
664
+ str,
665
+ list[tuple[str, str, type[BaseDBModel]]],
666
+ ]:
667
+ """Build JOIN clauses and aliased column SELECT statements.
668
+
669
+ Returns:
670
+ A tuple containing:
671
+ - join_clause: SQL JOIN clauses
672
+ (e.g., "LEFT JOIN authors AS t1 ON ...")
673
+ - select_clause: SELECT clause with aliased columns
674
+ - column_names: List of (alias, field_name, model_class) tuples
675
+ """
676
+ # Note: Only called when _join_info is not empty (line 840)
677
+ select_parts: list[str] = []
678
+ column_names: list[tuple[str, str, type[BaseDBModel]]] = []
679
+ join_parts: list[str] = []
680
+
681
+ # Main table columns (t0)
682
+ for field in self.model_class.model_fields:
683
+ alias = f"t0__{field}"
684
+ select_parts.append(f't0."{field}" AS "{alias}"')
685
+ column_names.append(("t0", field, self.model_class))
686
+
687
+ # Add JOINed table columns
688
+ for join in self._join_info:
689
+ # Build JOIN clause
690
+ join_clause = (
691
+ f"{join.join_type} JOIN "
692
+ f'"{join.table_name}" AS {join.alias} '
693
+ f'ON {join.parent_alias}."{join.fk_column}" = {join.alias}."pk"'
694
+ )
695
+ join_parts.append(join_clause)
696
+
697
+ # Add columns from joined table
698
+ for field in join.model_class.model_fields:
699
+ alias = f"{join.alias}__{field}"
700
+ select_parts.append(f'{join.alias}."{field}" AS "{alias}"')
701
+ column_names.append((join.alias, field, join.model_class))
702
+
703
+ select_clause = ", ".join(select_parts)
704
+ join_clause = " ".join(join_parts)
705
+
706
+ return join_clause, select_clause, column_names
707
+
424
708
  def limit(self, limit_value: int) -> Self:
425
709
  """Limit the number of results returned by the query.
426
710
 
@@ -513,12 +797,15 @@ class QueryBuilder:
513
797
  self._order_by = f'"{order_by_field}" {sort_order}'
514
798
  return self
515
799
 
516
- def _execute_query(
800
+ def _execute_query( # noqa: C901, PLR0912, PLR0915
517
801
  self,
518
802
  *,
519
803
  fetch_one: bool = False,
520
804
  count_only: bool = False,
521
- ) -> list[tuple[Any, ...]] | Optional[tuple[Any, ...]]:
805
+ ) -> tuple[
806
+ list[tuple[Any, ...]] | tuple[Any, ...],
807
+ list[tuple[str, str, type[BaseDBModel]]],
808
+ ]:
522
809
  """Execute the constructed SQL query.
523
810
 
524
811
  Args:
@@ -526,12 +813,101 @@ class QueryBuilder:
526
813
  count_only: If True, return only the count of results.
527
814
 
528
815
  Returns:
529
- A list of tuples (all results), a single tuple (one result),
530
- or None if no results are found.
816
+ A tuple containing:
817
+ - Query results (list of tuples or single tuple)
818
+ - Column metadata (list of (alias, field_name, model_class) tuples)
819
+ Empty list for non-JOIN queries (backward compatible).
531
820
 
532
821
  Raises:
533
822
  RecordFetchError: If there's an error executing the query.
534
823
  """
824
+ # Check if we need JOINs for eager loading or relationship filters
825
+ # Need JOIN if: we have join_info AND (not count/fields OR filters
826
+ # use joins)
827
+ needs_join_for_filters = False
828
+ if self._join_info and (count_only or self._fields):
829
+ # Parse filter to check if it references joined tables
830
+ values, where_clause = self._parse_filter()
831
+ # Check for table aliases like t1., t2., etc.
832
+ if re.search(r"\bt\d+\.", where_clause):
833
+ needs_join_for_filters = True
834
+
835
+ if self._join_info and (
836
+ not (count_only or self._fields) or needs_join_for_filters
837
+ ):
838
+ # Use JOIN-based query
839
+ join_clause, select_clause, column_names = self._build_join_sql()
840
+
841
+ # For count_only with JOINs, we don't need all the columns
842
+ if count_only and needs_join_for_filters:
843
+ # table_name validated - safe from SQL injection
844
+ sql = (
845
+ f'SELECT COUNT(*) FROM "{self.table_name}" AS t0 ' # noqa: S608
846
+ f"{join_clause}"
847
+ )
848
+ elif self._fields:
849
+ # Build custom field selection with JOINs
850
+ field_list = ", ".join(f't0."{f}"' for f in self._fields)
851
+ # table_name and fields validated - safe from SQL injection
852
+ sql = (
853
+ f"SELECT {field_list} FROM " # noqa: S608
854
+ f'"{self.table_name}" AS t0 {join_clause}'
855
+ )
856
+ # Rebuild column_names to match selected fields only
857
+ column_names = [
858
+ ("t0", field, self.model_class) for field in self._fields
859
+ ]
860
+ else:
861
+ # table_name validated - safe from SQL injection
862
+ sql = (
863
+ f"SELECT {select_clause} FROM " # noqa: S608
864
+ f'"{self.table_name}" AS t0 {join_clause}'
865
+ )
866
+
867
+ # Build WHERE clause with special handling for NULL
868
+ values, where_clause = self._parse_filter()
869
+
870
+ if self.filters:
871
+ sql += f" WHERE {where_clause}"
872
+
873
+ if self._order_by:
874
+ # Qualify ORDER BY column with t0 alias to avoid ambiguity
875
+ # Extract field name and direction from _order_by
876
+ # _order_by format: '"field" ASC' or '"field" DESC'
877
+ match = re.match(r'"([^"]+)"\s+(.*)', self._order_by)
878
+ if match:
879
+ field_name = match.group(1)
880
+ direction = match.group(2)
881
+ sql += f' ORDER BY t0."{field_name}" {direction}'
882
+ elif self._order_by.lower().startswith("rowid"):
883
+ # Fallback for non-quoted patterns such as "rowid DESC"
884
+ sql += f" ORDER BY t0.{self._order_by}"
885
+
886
+ if self._limit is not None:
887
+ sql += " LIMIT ?"
888
+ values.append(self._limit)
889
+
890
+ if self._offset is not None:
891
+ sql += " OFFSET ?"
892
+ values.append(self._offset)
893
+
894
+ # Log the SQL if debug is enabled
895
+ if self.db.debug:
896
+ self.db._log_sql(sql, values) # noqa: SLF001
897
+
898
+ try:
899
+ conn = self.db.connect()
900
+ cursor = conn.cursor()
901
+ cursor.execute(sql, values)
902
+ results = (
903
+ cursor.fetchall() if not fetch_one else cursor.fetchone()
904
+ )
905
+ except sqlite3.Error as exc:
906
+ raise RecordFetchError(self.table_name) from exc
907
+ else:
908
+ return (results, column_names)
909
+
910
+ # Non-JOIN query path (original behavior)
535
911
  if count_only:
536
912
  fields = "COUNT(*)"
537
913
  elif self._fields:
@@ -543,7 +919,7 @@ class QueryBuilder:
543
919
  f'"{field}"' for field in self.model_class.model_fields
544
920
  )
545
921
 
546
- sql = f'SELECT {fields} FROM "{self.table_name}"' # noqa: S608 # nosec
922
+ sql = f'SELECT {fields} FROM "{self.table_name}"' # noqa: S608
547
923
 
548
924
  # Build the WHERE clause with special handling for None (NULL in SQL)
549
925
  values, where_clause = self._parse_filter()
@@ -562,20 +938,21 @@ class QueryBuilder:
562
938
  sql += " OFFSET ?"
563
939
  values.append(self._offset)
564
940
 
565
- # Print the raw SQL and values if debug is enabled
566
941
  # Log the SQL if debug is enabled
567
942
  if self.db.debug:
568
943
  self.db._log_sql(sql, values) # noqa: SLF001
569
944
 
570
945
  try:
571
- with self.db.connect() as conn:
572
- cursor = conn.cursor()
573
- cursor.execute(sql, values)
574
- return cursor.fetchall() if not fetch_one else cursor.fetchone()
946
+ conn = self.db.connect()
947
+ cursor = conn.cursor()
948
+ cursor.execute(sql, values)
949
+ results = cursor.fetchall() if not fetch_one else cursor.fetchone()
575
950
  except sqlite3.Error as exc:
576
951
  raise RecordFetchError(self.table_name) from exc
952
+ else:
953
+ return (results, []) # Empty column_names for backward compat
577
954
 
578
- def _parse_filter(self) -> tuple[list[Any], LiteralString]:
955
+ def _parse_filter(self) -> tuple[list[Any], str]:
579
956
  """Parse the filter conditions into SQL clauses and values.
580
957
 
581
958
  Returns:
@@ -600,7 +977,7 @@ class QueryBuilder:
600
977
  where_clause = " AND ".join(where_clauses)
601
978
  return values, where_clause
602
979
 
603
- def _convert_row_to_model(self, row: tuple[Any, ...]) -> BaseDBModel:
980
+ def _convert_row_to_model(self, row: tuple[Any, ...]) -> T:
604
981
  """Convert a database row to a model instance.
605
982
 
606
983
  Args:
@@ -614,13 +991,118 @@ class QueryBuilder:
614
991
  field: self._deserialize(field, row[idx])
615
992
  for idx, field in enumerate(self._fields)
616
993
  }
617
- return self.model_class.model_validate_partial(data)
994
+ instance = self.model_class.model_validate_partial(data)
995
+ else:
996
+ data = {
997
+ field: self._deserialize(field, row[idx])
998
+ for idx, field in enumerate(self.model_class.model_fields)
999
+ }
1000
+ # For ORM mode, exclude FK descriptor fields from data
1001
+ for fk_field in getattr(self.model_class, "fk_descriptors", {}):
1002
+ data.pop(fk_field, None)
1003
+ instance = self.model_class(**data)
618
1004
 
619
- data = {
620
- field: self._deserialize(field, row[idx])
621
- for idx, field in enumerate(self.model_class.model_fields)
1005
+ # Set db_context for ORM lazy loading and reverse relationships
1006
+ if hasattr(instance, "db_context"):
1007
+ instance.db_context = self.db
1008
+ return instance
1009
+
1010
+ def _convert_joined_row_to_model(
1011
+ self,
1012
+ row: tuple[Any, ...],
1013
+ column_names: list[tuple[str, str, type[BaseDBModel]]],
1014
+ ) -> T:
1015
+ """Convert a JOINed database row to model instances with relationships.
1016
+
1017
+ This method parses aliased columns from JOIN queries, creates the main
1018
+ model instance, and populates related objects in the _fk_cache to avoid
1019
+ lazy loading.
1020
+
1021
+ Args:
1022
+ row: A tuple representing a database row from a JOIN query.
1023
+ column_names: List of (alias, field_name, model_class) tuples
1024
+ describing each column in the result.
1025
+
1026
+ Returns:
1027
+ An instance of the main model class with populated relationships.
1028
+ """
1029
+ # Group columns by table alias
1030
+ tables_data: dict[str, dict[str, Any]] = {}
1031
+ tables_models: dict[str, type[BaseDBModel]] = {}
1032
+
1033
+ for idx, (alias, field_name, model_class) in enumerate(column_names):
1034
+ if alias not in tables_data:
1035
+ tables_data[alias] = {}
1036
+ tables_models[alias] = model_class
1037
+ tables_data[alias][field_name] = row[idx]
1038
+
1039
+ # Build main model (t0)
1040
+ main_data = tables_data["t0"]
1041
+
1042
+ # Deserialize and create main instance
1043
+ main_instance_data = {
1044
+ field: self._deserialize(field, main_data[field])
1045
+ for field in self.model_class.model_fields
1046
+ if field in main_data
622
1047
  }
623
- return self.model_class(**data)
1048
+
1049
+ # For ORM mode, exclude FK descriptor fields from data
1050
+ for fk_field in getattr(self.model_class, "fk_descriptors", {}):
1051
+ main_instance_data.pop(fk_field, None)
1052
+
1053
+ if self._fields:
1054
+ # Partial field selection: use model_validate_partial to
1055
+ # avoid validation errors for missing required fields
1056
+ main_instance = self.model_class.model_validate_partial(
1057
+ main_instance_data
1058
+ )
1059
+ else:
1060
+ main_instance = self.model_class(**main_instance_data)
1061
+ main_instance.db_context = self.db # type: ignore[attr-defined]
1062
+
1063
+ # Process JOINed tables and populate _fk_cache
1064
+ # Track instances per alias for nested cache wiring
1065
+ instances_by_alias: dict[str, BaseDBModel] = {"t0": main_instance}
1066
+
1067
+ for join_info in self._join_info:
1068
+ alias = join_info.alias
1069
+ related_data = tables_data.get(alias)
1070
+ if related_data is None:
1071
+ continue
1072
+
1073
+ # Check if all fields are NULL (LEFT JOIN with no match)
1074
+ if all(v is None for v in related_data.values()):
1075
+ # No related object, skip
1076
+ continue
1077
+
1078
+ # Deserialize related object
1079
+ related_instance_data = {
1080
+ field: self._deserialize(field, related_data[field])
1081
+ for field in join_info.model_class.model_fields
1082
+ if field in related_data
1083
+ }
1084
+
1085
+ # Exclude FK descriptors from related data
1086
+ for fk_field in getattr(
1087
+ join_info.model_class, "fk_descriptors", {}
1088
+ ):
1089
+ related_instance_data.pop(fk_field, None)
1090
+
1091
+ related_instance = join_info.model_class(**related_instance_data)
1092
+ related_instance.db_context = self.db # type: ignore[attr-defined]
1093
+
1094
+ instances_by_alias[alias] = related_instance
1095
+
1096
+ # Attach to parent instance cache (supports nesting)
1097
+ parent_instance = instances_by_alias.get(join_info.parent_alias)
1098
+ if parent_instance is not None:
1099
+ parent_fk_cache = getattr(parent_instance, "_fk_cache", {})
1100
+ parent_fk_cache[join_info.fk_field] = related_instance
1101
+ object.__setattr__(
1102
+ parent_instance, "_fk_cache", parent_fk_cache
1103
+ )
1104
+
1105
+ return main_instance
624
1106
 
625
1107
  def _deserialize(
626
1108
  self, field_name: str, value: SerializableField
@@ -638,19 +1120,99 @@ class QueryBuilder:
638
1120
  field_name, value, return_local_time=self.db.return_local_time
639
1121
  )
640
1122
 
1123
+ def bypass_cache(self) -> Self:
1124
+ """Bypass the cache for this specific query.
1125
+
1126
+ When called, the query will always hit the database regardless of
1127
+ the global cache setting. This is useful for queries that require
1128
+ fresh data.
1129
+
1130
+ Returns:
1131
+ The QueryBuilder instance for method chaining.
1132
+
1133
+ Example:
1134
+ >>> db.select(User).filter(name="Alice").bypass_cache().fetch_one()
1135
+ """
1136
+ self._bypass_cache = True
1137
+ return self
1138
+
1139
+ def cache_ttl(self, ttl: int) -> Self:
1140
+ """Set a custom TTL (time-to-live) for this specific query.
1141
+
1142
+ When called, the cached result of this query will expire after the
1143
+ specified number of seconds, overriding the global cache_ttl setting.
1144
+
1145
+ Args:
1146
+ ttl: Time-to-live in seconds for the cached result.
1147
+
1148
+ Returns:
1149
+ The QueryBuilder instance for method chaining.
1150
+
1151
+ Raises:
1152
+ ValueError: If ttl is negative.
1153
+
1154
+ Example:
1155
+ >>> db.select(User).cache_ttl(60).fetch_all()
1156
+ """
1157
+ if ttl < 0:
1158
+ msg = "TTL must be non-negative"
1159
+ raise ValueError(msg)
1160
+ self._query_cache_ttl = ttl
1161
+ return self
1162
+
1163
+ def _make_cache_key(self, *, fetch_one: bool) -> str:
1164
+ """Generate a cache key from the current query state.
1165
+
1166
+ Args:
1167
+ fetch_one: Whether this is a fetch_one or fetch_all query.
1168
+
1169
+ Returns:
1170
+ A SHA256 hash representing the current query state.
1171
+
1172
+ Raises:
1173
+ ValueError: If filters contain incomparable types that prevent
1174
+ cache key generation (e.g., filtering the same field with
1175
+ both string and numeric values).
1176
+ """
1177
+ # Sort filters for consistent cache keys
1178
+ # Note: This requires filter values to be comparable. Avoid filtering
1179
+ # the same field with incompatible types (e.g., name="Alice" and
1180
+ # name=42 in the same query).
1181
+ try:
1182
+ sorted_filters = sorted(self.filters)
1183
+ except TypeError as exc:
1184
+ msg = (
1185
+ "Cannot generate cache key: filters contain incomparable "
1186
+ "types. Avoid filtering the same field with incompatible "
1187
+ "value types (e.g., strings and numbers)."
1188
+ )
1189
+ raise ValueError(msg) from exc
1190
+
1191
+ # Create a deterministic representation of the query
1192
+ key_parts = {
1193
+ "table": self.table_name,
1194
+ "filters": sorted_filters,
1195
+ "limit": self._limit,
1196
+ "offset": self._offset,
1197
+ "order_by": self._order_by,
1198
+ "fields": tuple(sorted(self._fields)) if self._fields else None,
1199
+ "fetch_one": fetch_one,
1200
+ "select_related": tuple(sorted(self._select_related_paths)),
1201
+ }
1202
+
1203
+ # Hash the key parts
1204
+ key_json = json.dumps(key_parts, sort_keys=True, default=str)
1205
+ return hashlib.sha256(key_json.encode()).hexdigest()
1206
+
641
1207
  @overload
642
- def _fetch_result(
643
- self, *, fetch_one: Literal[True]
644
- ) -> Optional[BaseDBModel]: ...
1208
+ def _fetch_result(self, *, fetch_one: Literal[True]) -> Optional[T]: ...
645
1209
 
646
1210
  @overload
647
- def _fetch_result(
648
- self, *, fetch_one: Literal[False]
649
- ) -> list[BaseDBModel]: ...
1211
+ def _fetch_result(self, *, fetch_one: Literal[False]) -> list[T]: ...
650
1212
 
651
- def _fetch_result(
1213
+ def _fetch_result( # noqa: C901, PLR0911
652
1214
  self, *, fetch_one: bool = False
653
- ) -> Union[list[BaseDBModel], Optional[BaseDBModel]]:
1215
+ ) -> Union[list[T], Optional[T]]:
654
1216
  """Fetch and convert query results to model instances.
655
1217
 
656
1218
  Args:
@@ -660,24 +1222,108 @@ class QueryBuilder:
660
1222
  A list of model instances, a single model instance, or None if no
661
1223
  results are found.
662
1224
  """
663
- result = self._execute_query(fetch_one=fetch_one)
1225
+ # Check cache first (unless bypass is enabled)
1226
+ if not self._bypass_cache:
1227
+ cache_key = self._make_cache_key(fetch_one=fetch_one)
1228
+ hit, cached = self.db._cache_get(self.table_name, cache_key) # noqa: SLF001
1229
+ if hit:
1230
+ # Cache stores correctly typed data, cast from Any
1231
+ return cast("Union[list[T], Optional[T]]", cached)
1232
+
1233
+ result, column_names = self._execute_query(fetch_one=fetch_one)
664
1234
 
665
1235
  if not result:
1236
+ if not self._bypass_cache:
1237
+ # Generate cache key for empty result
1238
+ cache_key = self._make_cache_key(fetch_one=fetch_one)
1239
+ if fetch_one:
1240
+ # Cache empty result
1241
+ self.db._cache_set( # noqa: SLF001
1242
+ self.table_name,
1243
+ cache_key,
1244
+ None,
1245
+ ttl=self._query_cache_ttl,
1246
+ )
1247
+ return None
1248
+ # Cache empty list
1249
+ self.db._cache_set( # noqa: SLF001
1250
+ self.table_name, cache_key, [], ttl=self._query_cache_ttl
1251
+ )
1252
+ return []
1253
+ return None if fetch_one else []
1254
+
1255
+ # Convert results based on whether we have JOIN data
1256
+ if column_names:
1257
+ # JOIN-aware converter - needs column_names
666
1258
  if fetch_one:
667
- return None
668
- return []
1259
+ # When fetch_one=True, result is a single tuple
1260
+ # Narrow the type from the union
1261
+ single_row: tuple[Any, ...] = (
1262
+ result if isinstance(result, tuple) else result[0]
1263
+ )
1264
+ single_result = self._convert_joined_row_to_model(
1265
+ single_row, column_names
1266
+ )
1267
+ if not self._bypass_cache:
1268
+ cache_key = self._make_cache_key(fetch_one=True)
1269
+ self.db._cache_set( # noqa: SLF001
1270
+ self.table_name,
1271
+ cache_key,
1272
+ single_result,
1273
+ ttl=self._query_cache_ttl,
1274
+ )
1275
+ return single_result
1276
+
1277
+ # When fetch_one=False, result is a list of tuples
1278
+ # Narrow the type from the union
1279
+ row_list: list[tuple[Any, ...]] = (
1280
+ result if isinstance(result, list) else [result]
1281
+ )
1282
+ list_results = [
1283
+ self._convert_joined_row_to_model(row, column_names)
1284
+ for row in row_list
1285
+ ]
1286
+ if not self._bypass_cache:
1287
+ cache_key = self._make_cache_key(fetch_one=False)
1288
+ self.db._cache_set( # noqa: SLF001
1289
+ self.table_name,
1290
+ cache_key,
1291
+ list_results,
1292
+ ttl=self._query_cache_ttl,
1293
+ )
1294
+ return list_results
669
1295
 
1296
+ # Standard converter
670
1297
  if fetch_one:
671
- # Ensure we pass a tuple, not a list, to _convert_row_to_model
672
- if isinstance(result, list):
673
- result = result[
674
- 0
675
- ] # Get the first (and only) result if it's wrapped in a list.
676
- return self._convert_row_to_model(result)
1298
+ std_single_row: tuple[Any, ...] = (
1299
+ result if isinstance(result, tuple) else result[0]
1300
+ )
1301
+ single_result = self._convert_row_to_model(std_single_row)
1302
+ if not self._bypass_cache:
1303
+ cache_key = self._make_cache_key(fetch_one=True)
1304
+ self.db._cache_set( # noqa: SLF001
1305
+ self.table_name,
1306
+ cache_key,
1307
+ single_result,
1308
+ ttl=self._query_cache_ttl,
1309
+ )
1310
+ return single_result
677
1311
 
678
- return [self._convert_row_to_model(row) for row in result]
1312
+ std_row_list: list[tuple[Any, ...]] = (
1313
+ result if isinstance(result, list) else [result]
1314
+ )
1315
+ list_results = [self._convert_row_to_model(row) for row in std_row_list]
1316
+ if not self._bypass_cache:
1317
+ cache_key = self._make_cache_key(fetch_one=False)
1318
+ self.db._cache_set( # noqa: SLF001
1319
+ self.table_name,
1320
+ cache_key,
1321
+ list_results,
1322
+ ttl=self._query_cache_ttl,
1323
+ )
1324
+ return list_results
679
1325
 
680
- def fetch_all(self) -> list[BaseDBModel]:
1326
+ def fetch_all(self) -> list[T]:
681
1327
  """Fetch all results of the query.
682
1328
 
683
1329
  Returns:
@@ -685,7 +1331,7 @@ class QueryBuilder:
685
1331
  """
686
1332
  return self._fetch_result(fetch_one=False)
687
1333
 
688
- def fetch_one(self) -> Optional[BaseDBModel]:
1334
+ def fetch_one(self) -> Optional[T]:
689
1335
  """Fetch a single result of the query.
690
1336
 
691
1337
  Returns:
@@ -693,7 +1339,7 @@ class QueryBuilder:
693
1339
  """
694
1340
  return self._fetch_result(fetch_one=True)
695
1341
 
696
- def fetch_first(self) -> Optional[BaseDBModel]:
1342
+ def fetch_first(self) -> Optional[T]:
697
1343
  """Fetch the first result of the query.
698
1344
 
699
1345
  Returns:
@@ -702,7 +1348,7 @@ class QueryBuilder:
702
1348
  self._limit = 1
703
1349
  return self._fetch_result(fetch_one=True)
704
1350
 
705
- def fetch_last(self) -> Optional[BaseDBModel]:
1351
+ def fetch_last(self) -> Optional[T]:
706
1352
  """Fetch the last result of the query.
707
1353
 
708
1354
  Returns:
@@ -718,7 +1364,7 @@ class QueryBuilder:
718
1364
  Returns:
719
1365
  The number of results that match the current query conditions.
720
1366
  """
721
- result = self._execute_query(count_only=True)
1367
+ result, _column_names = self._execute_query(count_only=True)
722
1368
 
723
1369
  return int(result[0][0]) if result else 0
724
1370
 
@@ -739,7 +1385,7 @@ class QueryBuilder:
739
1385
  Raises:
740
1386
  RecordDeletionError: If there's an error deleting the records.
741
1387
  """
742
- sql = f'DELETE FROM "{self.table_name}"' # noqa: S608 # nosec
1388
+ sql = f'DELETE FROM "{self.table_name}"' # nosec # noqa: S608
743
1389
 
744
1390
  # Build the WHERE clause with special handling for None (NULL in SQL)
745
1391
  values, where_clause = self._parse_filter()
@@ -752,11 +1398,16 @@ class QueryBuilder:
752
1398
  self.db._log_sql(sql, values) # noqa: SLF001
753
1399
 
754
1400
  try:
755
- with self.db.connect() as conn:
756
- cursor = conn.cursor()
757
- cursor.execute(sql, values)
758
- deleted_count = cursor.rowcount
759
- self.db._maybe_commit() # noqa: SLF001
760
- return deleted_count
1401
+ conn = self.db.connect()
1402
+ cursor = conn.cursor()
1403
+ cursor.execute(sql, values)
1404
+ deleted_count = cursor.rowcount
1405
+ self.db._maybe_commit() # noqa: SLF001
1406
+ self.db._cache_invalidate_table(self.table_name) # noqa: SLF001
761
1407
  except sqlite3.Error as exc:
1408
+ # Rollback implicit transaction if not in user-managed transaction
1409
+ if not self.db._in_transaction and self.db.conn: # noqa: SLF001
1410
+ self.db.conn.rollback()
762
1411
  raise RecordDeletionError(self.table_name) from exc
1412
+ else:
1413
+ return deleted_count