sqlobjects 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- sqlobjects/__init__.py +38 -0
- sqlobjects/config.py +519 -0
- sqlobjects/database.py +586 -0
- sqlobjects/exceptions.py +538 -0
- sqlobjects/expressions.py +1054 -0
- sqlobjects/fields.py +1866 -0
- sqlobjects/history.py +101 -0
- sqlobjects/metadata.py +1130 -0
- sqlobjects/model.py +1009 -0
- sqlobjects/objects.py +812 -0
- sqlobjects/queries.py +1059 -0
- sqlobjects/relations.py +843 -0
- sqlobjects/session.py +389 -0
- sqlobjects/signals.py +464 -0
- sqlobjects/utils/__init__.py +5 -0
- sqlobjects/utils/naming.py +53 -0
- sqlobjects/utils/pattern.py +644 -0
- sqlobjects/validators.py +294 -0
- sqlobjects-0.1.0.dist-info/METADATA +29 -0
- sqlobjects-0.1.0.dist-info/RECORD +23 -0
- sqlobjects-0.1.0.dist-info/WHEEL +5 -0
- sqlobjects-0.1.0.dist-info/licenses/LICENSE +21 -0
- sqlobjects-0.1.0.dist-info/top_level.txt +1 -0
sqlobjects/queries.py
ADDED
|
@@ -0,0 +1,1059 @@
|
|
|
1
|
+
"""
|
|
2
|
+
SQLObjects Queries Module - Component-based Architecture
|
|
3
|
+
|
|
4
|
+
This module provides the core query building system for SQLObjects, using
|
|
5
|
+
composition pattern for better maintainability and performance.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import gc
|
|
9
|
+
import hashlib
|
|
10
|
+
from collections.abc import AsyncGenerator
|
|
11
|
+
from typing import Any, Generic, Literal, TypeVar, Union
|
|
12
|
+
|
|
13
|
+
from sqlalchemy import (
|
|
14
|
+
BinaryExpression,
|
|
15
|
+
ClauseElement,
|
|
16
|
+
ColumnElement,
|
|
17
|
+
Table,
|
|
18
|
+
and_,
|
|
19
|
+
asc,
|
|
20
|
+
delete,
|
|
21
|
+
desc,
|
|
22
|
+
exists,
|
|
23
|
+
func,
|
|
24
|
+
literal,
|
|
25
|
+
not_,
|
|
26
|
+
or_,
|
|
27
|
+
select,
|
|
28
|
+
text,
|
|
29
|
+
update,
|
|
30
|
+
)
|
|
31
|
+
|
|
32
|
+
from .exceptions import DoesNotExist, MultipleObjectsReturned
|
|
33
|
+
from .expressions import SubqueryExpression
|
|
34
|
+
from .session import AsyncSession
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
# Export classes for use in other modules
|
|
38
|
+
__all__ = ["Q", "QuerySet", "QueryBuilder", "QueryCache", "QueryExecutor", "T"]
|
|
39
|
+
|
|
40
|
+
# Type variables for generic support
|
|
41
|
+
T = TypeVar("T")
|
|
42
|
+
|
|
43
|
+
# Supported expression types for Q object combinations
|
|
44
|
+
QueryType = Union[
|
|
45
|
+
"Q",
|
|
46
|
+
ColumnElement,
|
|
47
|
+
BinaryExpression,
|
|
48
|
+
ClauseElement,
|
|
49
|
+
Any, # For FunctionExpression and other SQLObjects expressions
|
|
50
|
+
]
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
class Q:
|
|
54
|
+
"""Q object for logical combination of SQLAlchemy expressions.
|
|
55
|
+
|
|
56
|
+
Focuses on combining SQLAlchemy expressions using logical operators (AND, OR, NOT).
|
|
57
|
+
Supports both single and multiple expressions with automatic AND combination.
|
|
58
|
+
|
|
59
|
+
Examples:
|
|
60
|
+
# Single expression
|
|
61
|
+
Q(User.age >= 18)
|
|
62
|
+
|
|
63
|
+
# Multiple expressions (AND combination)
|
|
64
|
+
Q(User.age >= 18, User.is_active == True)
|
|
65
|
+
|
|
66
|
+
# Logical combinations
|
|
67
|
+
Q(User.name == "John") | Q(User.name == "Jane")
|
|
68
|
+
Q(User.age >= 18) & Q(User.is_active == True)
|
|
69
|
+
~Q(User.is_deleted == True)
|
|
70
|
+
|
|
71
|
+
# Mixed with SQLAlchemy expressions
|
|
72
|
+
Q(User.name == "John") & (User.age > 25)
|
|
73
|
+
"""
|
|
74
|
+
|
|
75
|
+
def __init__(self, *expressions: Any):
|
|
76
|
+
"""Initialize Q object with SQLAlchemy expressions.
|
|
77
|
+
|
|
78
|
+
Args:
|
|
79
|
+
*expressions: SQLAlchemy expressions to combine with AND logic
|
|
80
|
+
"""
|
|
81
|
+
self.expressions = list(expressions)
|
|
82
|
+
self.connector = "AND"
|
|
83
|
+
self.negated = False
|
|
84
|
+
self.children: list[Q] = []
|
|
85
|
+
|
|
86
|
+
def __and__(self, other: QueryType) -> "Q":
|
|
87
|
+
"""Combine with another expression using AND logic.
|
|
88
|
+
|
|
89
|
+
Args:
|
|
90
|
+
other: Another Q object or SQLAlchemy expression
|
|
91
|
+
|
|
92
|
+
Returns:
|
|
93
|
+
New Q object representing the AND combination
|
|
94
|
+
|
|
95
|
+
Raises:
|
|
96
|
+
ArgumentError: If SQLAlchemy expression is on left side with Q object
|
|
97
|
+
"""
|
|
98
|
+
new_q = Q()
|
|
99
|
+
new_q.connector = "AND"
|
|
100
|
+
|
|
101
|
+
if isinstance(other, Q):
|
|
102
|
+
new_q.children = [self, other]
|
|
103
|
+
else:
|
|
104
|
+
# Q object must be on left side for SQLAlchemy expression combinations
|
|
105
|
+
new_q.children = [self]
|
|
106
|
+
new_q.expressions = [other]
|
|
107
|
+
|
|
108
|
+
return new_q
|
|
109
|
+
|
|
110
|
+
def __or__(self, other: QueryType) -> "Q":
|
|
111
|
+
"""Combine with another expression using OR logic.
|
|
112
|
+
|
|
113
|
+
Args:
|
|
114
|
+
other: Another Q object or SQLAlchemy expression
|
|
115
|
+
|
|
116
|
+
Returns:
|
|
117
|
+
New Q object representing the OR combination
|
|
118
|
+
"""
|
|
119
|
+
new_q = Q()
|
|
120
|
+
new_q.connector = "OR"
|
|
121
|
+
|
|
122
|
+
if isinstance(other, Q):
|
|
123
|
+
new_q.children = [self, other]
|
|
124
|
+
else:
|
|
125
|
+
new_q.children = [self]
|
|
126
|
+
new_q.expressions = [other]
|
|
127
|
+
|
|
128
|
+
return new_q
|
|
129
|
+
|
|
130
|
+
def __invert__(self) -> "Q":
|
|
131
|
+
"""Negate this Q object using NOT logic.
|
|
132
|
+
|
|
133
|
+
Returns:
|
|
134
|
+
New Q object representing the negated condition
|
|
135
|
+
"""
|
|
136
|
+
new_q = Q(*self.expressions)
|
|
137
|
+
new_q.connector = self.connector
|
|
138
|
+
new_q.negated = not self.negated
|
|
139
|
+
new_q.children = self.children.copy()
|
|
140
|
+
return new_q
|
|
141
|
+
|
|
142
|
+
def _to_sqlalchemy(self, table: Table) -> Any:
|
|
143
|
+
"""Convert Q object to SQLAlchemy condition expression.
|
|
144
|
+
|
|
145
|
+
Args:
|
|
146
|
+
table: The table for expression resolution
|
|
147
|
+
|
|
148
|
+
Returns:
|
|
149
|
+
SQLAlchemy condition expression
|
|
150
|
+
"""
|
|
151
|
+
conditions = []
|
|
152
|
+
|
|
153
|
+
# Handle child Q objects
|
|
154
|
+
if self.children:
|
|
155
|
+
child_conditions = [child._to_sqlalchemy(table) for child in self.children]
|
|
156
|
+
conditions.extend(child_conditions)
|
|
157
|
+
|
|
158
|
+
# Handle direct expressions
|
|
159
|
+
if self.expressions:
|
|
160
|
+
for expr in self.expressions:
|
|
161
|
+
if hasattr(expr, "resolve"):
|
|
162
|
+
# Resolve SQLObjects expressions
|
|
163
|
+
conditions.append(expr.resolve(table))
|
|
164
|
+
else:
|
|
165
|
+
# Direct SQLAlchemy expressions
|
|
166
|
+
conditions.append(expr)
|
|
167
|
+
|
|
168
|
+
# Combine conditions based on connector
|
|
169
|
+
if len(conditions) == 0:
|
|
170
|
+
# No conditions, return a true condition
|
|
171
|
+
condition = literal(True)
|
|
172
|
+
elif len(conditions) == 1:
|
|
173
|
+
condition = conditions[0]
|
|
174
|
+
else:
|
|
175
|
+
if self.connector == "AND":
|
|
176
|
+
condition = and_(*conditions)
|
|
177
|
+
else: # OR
|
|
178
|
+
condition = or_(*conditions)
|
|
179
|
+
|
|
180
|
+
return not_(condition) if self.negated else condition
|
|
181
|
+
|
|
182
|
+
|
|
183
|
+
class QueryBuilder:
|
|
184
|
+
"""Query builder with optimization - handles SQL construction and optimization."""
|
|
185
|
+
|
|
186
|
+
def __init__(self, model_class):
|
|
187
|
+
self.model_class = model_class
|
|
188
|
+
self.conditions: list[Any] = [] # SQLAlchemy expressions, Q objects, etc.
|
|
189
|
+
self.ordering: list[Any] = [] # Strings or SQLAlchemy expressions
|
|
190
|
+
self.limits: int | None = None
|
|
191
|
+
self.offset_value: int | None = None
|
|
192
|
+
self.relationships: set[str] = set()
|
|
193
|
+
self.selected_fields: set[str] = set()
|
|
194
|
+
self.deferred_fields: set[str] = set()
|
|
195
|
+
self.distinct_fields: list[str] = []
|
|
196
|
+
self.annotations: dict[str, Any] = {}
|
|
197
|
+
self.group_clauses: list[Any] = []
|
|
198
|
+
self.having_conditions: list[Any] = []
|
|
199
|
+
self.joins: list[tuple[Table, Any, str]] = [] # (table, condition, join_type)
|
|
200
|
+
self.lock_mode: str | None = None
|
|
201
|
+
self.lock_options: dict[str, bool] = {}
|
|
202
|
+
self.extra_columns: dict[str, str] = {}
|
|
203
|
+
self.extra_where: list[str] = []
|
|
204
|
+
self.extra_params: dict[str, Any] = {}
|
|
205
|
+
self.is_none_query: bool = False
|
|
206
|
+
self.is_reversed: bool = False
|
|
207
|
+
|
|
208
|
+
def add_filter(self, *conditions):
|
|
209
|
+
"""Add filter conditions."""
|
|
210
|
+
new_builder = self.copy()
|
|
211
|
+
new_builder.conditions.extend(conditions)
|
|
212
|
+
return new_builder
|
|
213
|
+
|
|
214
|
+
def add_ordering(self, *fields):
|
|
215
|
+
"""Add ordering fields."""
|
|
216
|
+
new_builder = self.copy()
|
|
217
|
+
new_builder.ordering.extend(fields)
|
|
218
|
+
return new_builder
|
|
219
|
+
|
|
220
|
+
def add_limit(self, count: int):
|
|
221
|
+
"""Add limit."""
|
|
222
|
+
new_builder = self.copy()
|
|
223
|
+
new_builder.limits = count
|
|
224
|
+
return new_builder
|
|
225
|
+
|
|
226
|
+
def add_offset(self, count: int):
|
|
227
|
+
"""Add offset."""
|
|
228
|
+
new_builder = self.copy()
|
|
229
|
+
new_builder.offset_value = count
|
|
230
|
+
return new_builder
|
|
231
|
+
|
|
232
|
+
def add_relationships(self, *fields):
|
|
233
|
+
"""Add relationship fields (select_related/prefetch_related)."""
|
|
234
|
+
new_builder = self.copy()
|
|
235
|
+
new_builder.relationships.update(fields)
|
|
236
|
+
return new_builder
|
|
237
|
+
|
|
238
|
+
def add_selected_fields(self, *fields):
|
|
239
|
+
"""Add selected fields (only)."""
|
|
240
|
+
new_builder = self.copy()
|
|
241
|
+
new_builder.selected_fields.update(fields)
|
|
242
|
+
return new_builder
|
|
243
|
+
|
|
244
|
+
def add_deferred_fields(self, *fields):
|
|
245
|
+
"""Add deferred fields (defer)."""
|
|
246
|
+
new_builder = self.copy()
|
|
247
|
+
new_builder.deferred_fields.update(fields)
|
|
248
|
+
return new_builder
|
|
249
|
+
|
|
250
|
+
def add_distinct(self, *fields):
|
|
251
|
+
"""Add distinct fields."""
|
|
252
|
+
new_builder = self.copy()
|
|
253
|
+
new_builder.distinct_fields = list(fields)
|
|
254
|
+
return new_builder
|
|
255
|
+
|
|
256
|
+
def add_annotations(self, **kwargs):
|
|
257
|
+
"""Add annotation expressions."""
|
|
258
|
+
new_builder = self.copy()
|
|
259
|
+
new_builder.annotations.update(kwargs)
|
|
260
|
+
return new_builder
|
|
261
|
+
|
|
262
|
+
def add_group_by(self, *fields):
|
|
263
|
+
"""Add group by clauses."""
|
|
264
|
+
new_builder = self.copy()
|
|
265
|
+
new_builder.group_clauses.extend(fields)
|
|
266
|
+
return new_builder
|
|
267
|
+
|
|
268
|
+
def add_having(self, *conditions):
|
|
269
|
+
"""Add having conditions."""
|
|
270
|
+
new_builder = self.copy()
|
|
271
|
+
new_builder.having_conditions.extend(conditions)
|
|
272
|
+
return new_builder
|
|
273
|
+
|
|
274
|
+
def add_join(self, table: Table, condition: Any, join_type: str = "inner"):
|
|
275
|
+
"""Add join clause."""
|
|
276
|
+
new_builder = self.copy()
|
|
277
|
+
new_builder.joins.append((table, condition, join_type))
|
|
278
|
+
return new_builder
|
|
279
|
+
|
|
280
|
+
def add_lock(self, mode: str, **options):
|
|
281
|
+
"""Add row locking."""
|
|
282
|
+
new_builder = self.copy()
|
|
283
|
+
new_builder.lock_mode = mode
|
|
284
|
+
new_builder.lock_options = options
|
|
285
|
+
return new_builder
|
|
286
|
+
|
|
287
|
+
def add_extra(
|
|
288
|
+
self, columns: dict[str, str] | None = None, where: list[str] | None = None, params: dict | None = None
|
|
289
|
+
):
|
|
290
|
+
"""Add extra SQL fragments."""
|
|
291
|
+
new_builder = self.copy()
|
|
292
|
+
if columns:
|
|
293
|
+
new_builder.extra_columns.update(columns)
|
|
294
|
+
if where:
|
|
295
|
+
new_builder.extra_where.extend(where)
|
|
296
|
+
if params:
|
|
297
|
+
new_builder.extra_params.update(params)
|
|
298
|
+
return new_builder
|
|
299
|
+
|
|
300
|
+
def set_none(self):
|
|
301
|
+
"""Set as none query."""
|
|
302
|
+
new_builder = self.copy()
|
|
303
|
+
new_builder.is_none_query = True
|
|
304
|
+
return new_builder
|
|
305
|
+
|
|
306
|
+
def set_reversed(self):
|
|
307
|
+
"""Set as reversed query."""
|
|
308
|
+
new_builder = self.copy()
|
|
309
|
+
new_builder.is_reversed = True
|
|
310
|
+
return new_builder
|
|
311
|
+
|
|
312
|
+
def build(self, table):
|
|
313
|
+
"""Build SQLAlchemy query object."""
|
|
314
|
+
# Handle none query
|
|
315
|
+
if self.is_none_query:
|
|
316
|
+
return select(table).where(literal(False))
|
|
317
|
+
|
|
318
|
+
query = select(table)
|
|
319
|
+
|
|
320
|
+
# Apply joins
|
|
321
|
+
for join_table, join_condition, join_type in self.joins:
|
|
322
|
+
if join_type == "left":
|
|
323
|
+
query = query.outerjoin(join_table, join_condition)
|
|
324
|
+
else: # inner join
|
|
325
|
+
query = query.join(join_table, join_condition)
|
|
326
|
+
|
|
327
|
+
# Apply conditions
|
|
328
|
+
if self.conditions:
|
|
329
|
+
query = query.where(and_(*self.conditions))
|
|
330
|
+
|
|
331
|
+
# Apply extra where clauses
|
|
332
|
+
if self.extra_where:
|
|
333
|
+
extra_conditions = []
|
|
334
|
+
for clause in self.extra_where:
|
|
335
|
+
if self.extra_params:
|
|
336
|
+
extra_conditions.append(text(clause).bindparams(**self.extra_params))
|
|
337
|
+
else:
|
|
338
|
+
extra_conditions.append(text(clause))
|
|
339
|
+
query = query.where(and_(*extra_conditions))
|
|
340
|
+
|
|
341
|
+
# Apply distinct
|
|
342
|
+
if self.distinct_fields:
|
|
343
|
+
columns = [table.c[field] for field in self.distinct_fields if field in table.c]
|
|
344
|
+
if columns:
|
|
345
|
+
query = query.distinct(*columns)
|
|
346
|
+
else:
|
|
347
|
+
query = query.distinct()
|
|
348
|
+
|
|
349
|
+
# Apply annotations
|
|
350
|
+
if self.annotations:
|
|
351
|
+
annotation_columns = []
|
|
352
|
+
for alias, expr in self.annotations.items():
|
|
353
|
+
if hasattr(expr, "resolve"):
|
|
354
|
+
annotation_columns.append(expr.resolve(table).label(alias))
|
|
355
|
+
else:
|
|
356
|
+
annotation_columns.append(expr.label(alias))
|
|
357
|
+
query = query.add_columns(*annotation_columns)
|
|
358
|
+
|
|
359
|
+
# Apply extra columns
|
|
360
|
+
if self.extra_columns:
|
|
361
|
+
extra_cols = []
|
|
362
|
+
for alias, sql in self.extra_columns.items():
|
|
363
|
+
if self.extra_params:
|
|
364
|
+
extra_cols.append(text(sql).bindparams(**self.extra_params).label(alias))
|
|
365
|
+
else:
|
|
366
|
+
extra_cols.append(text(sql).label(alias))
|
|
367
|
+
query = query.add_columns(*extra_cols)
|
|
368
|
+
|
|
369
|
+
# Apply group by
|
|
370
|
+
if self.group_clauses:
|
|
371
|
+
group_columns = []
|
|
372
|
+
for field in self.group_clauses:
|
|
373
|
+
if isinstance(field, str) and field in table.c:
|
|
374
|
+
group_columns.append(table.c[field])
|
|
375
|
+
elif hasattr(field, "resolve") and not isinstance(field, str):
|
|
376
|
+
group_columns.append(field.resolve(table))
|
|
377
|
+
else:
|
|
378
|
+
group_columns.append(field)
|
|
379
|
+
query = query.group_by(*group_columns)
|
|
380
|
+
|
|
381
|
+
# Apply having
|
|
382
|
+
if self.having_conditions:
|
|
383
|
+
having_exprs = []
|
|
384
|
+
for condition in self.having_conditions:
|
|
385
|
+
if hasattr(condition, "resolve"):
|
|
386
|
+
having_exprs.append(condition.resolve(table))
|
|
387
|
+
else:
|
|
388
|
+
having_exprs.append(condition)
|
|
389
|
+
query = query.having(and_(*having_exprs))
|
|
390
|
+
|
|
391
|
+
# Apply ordering
|
|
392
|
+
if self.ordering:
|
|
393
|
+
order_clauses = []
|
|
394
|
+
for field in self.ordering:
|
|
395
|
+
if isinstance(field, str):
|
|
396
|
+
if field.startswith("-"):
|
|
397
|
+
field_name = field[1:]
|
|
398
|
+
if field_name in table.c:
|
|
399
|
+
order_clauses.append(desc(table.c[field_name]))
|
|
400
|
+
else:
|
|
401
|
+
if field in table.c:
|
|
402
|
+
order_clauses.append(asc(table.c[field]))
|
|
403
|
+
elif hasattr(field, "resolve"):
|
|
404
|
+
order_clauses.append(field.resolve(table))
|
|
405
|
+
else:
|
|
406
|
+
order_clauses.append(field)
|
|
407
|
+
if order_clauses:
|
|
408
|
+
if self.is_reversed:
|
|
409
|
+
reversed_clauses = []
|
|
410
|
+
for clause in order_clauses:
|
|
411
|
+
if hasattr(clause, "desc") and clause.desc:
|
|
412
|
+
reversed_clauses.append(asc(clause.element))
|
|
413
|
+
else:
|
|
414
|
+
reversed_clauses.append(desc(clause.element if hasattr(clause, "element") else clause))
|
|
415
|
+
query = query.order_by(*reversed_clauses)
|
|
416
|
+
else:
|
|
417
|
+
query = query.order_by(*order_clauses)
|
|
418
|
+
|
|
419
|
+
# Apply row locking
|
|
420
|
+
if self.lock_mode:
|
|
421
|
+
lock_kwargs = {k: v for k, v in self.lock_options.items() if k in ("nowait", "skip_locked")}
|
|
422
|
+
if self.lock_mode == "update":
|
|
423
|
+
query = query.with_for_update(**lock_kwargs) # type: ignore[arg-type]
|
|
424
|
+
elif self.lock_mode == "share":
|
|
425
|
+
query = query.with_for_update(read=True, **lock_kwargs) # type: ignore[arg-type]
|
|
426
|
+
|
|
427
|
+
# Apply limit and offset
|
|
428
|
+
if self.limits is not None:
|
|
429
|
+
query = query.limit(self.limits)
|
|
430
|
+
if self.offset_value is not None:
|
|
431
|
+
query = query.offset(self.offset_value)
|
|
432
|
+
|
|
433
|
+
return query
|
|
434
|
+
|
|
435
|
+
def copy(self):
|
|
436
|
+
"""Create a copy of this builder."""
|
|
437
|
+
new_builder = QueryBuilder(self.model_class)
|
|
438
|
+
new_builder.conditions = self.conditions.copy()
|
|
439
|
+
new_builder.ordering = self.ordering.copy()
|
|
440
|
+
new_builder.limits = self.limits
|
|
441
|
+
new_builder.offset_value = self.offset_value
|
|
442
|
+
new_builder.relationships = self.relationships.copy()
|
|
443
|
+
new_builder.selected_fields = self.selected_fields.copy()
|
|
444
|
+
new_builder.deferred_fields = self.deferred_fields.copy()
|
|
445
|
+
new_builder.distinct_fields = self.distinct_fields.copy()
|
|
446
|
+
new_builder.annotations = self.annotations.copy()
|
|
447
|
+
new_builder.group_clauses = self.group_clauses.copy()
|
|
448
|
+
new_builder.having_conditions = self.having_conditions.copy()
|
|
449
|
+
new_builder.joins = self.joins.copy()
|
|
450
|
+
new_builder.lock_mode = self.lock_mode
|
|
451
|
+
new_builder.lock_options = self.lock_options.copy()
|
|
452
|
+
new_builder.extra_columns = self.extra_columns.copy()
|
|
453
|
+
new_builder.extra_where = self.extra_where.copy()
|
|
454
|
+
new_builder.extra_params = self.extra_params.copy()
|
|
455
|
+
new_builder.is_none_query = self.is_none_query
|
|
456
|
+
new_builder.is_reversed = self.is_reversed
|
|
457
|
+
return new_builder
|
|
458
|
+
|
|
459
|
+
|
|
460
|
+
class QueryCache:
|
|
461
|
+
"""Query result caching."""
|
|
462
|
+
|
|
463
|
+
def __init__(self, maxsize: int = 1000):
|
|
464
|
+
self.cache: dict[str, Any] = {}
|
|
465
|
+
self.maxsize = maxsize
|
|
466
|
+
self.hits = 0
|
|
467
|
+
self.misses = 0
|
|
468
|
+
|
|
469
|
+
def get(self, cache_key: str):
|
|
470
|
+
"""Get cached query result."""
|
|
471
|
+
if cache_key in self.cache:
|
|
472
|
+
self.hits += 1
|
|
473
|
+
return self.cache[cache_key]
|
|
474
|
+
self.misses += 1
|
|
475
|
+
return None
|
|
476
|
+
|
|
477
|
+
def set(self, cache_key: str, result: Any):
|
|
478
|
+
"""Cache query result."""
|
|
479
|
+
if len(self.cache) >= self.maxsize:
|
|
480
|
+
oldest_key = next(iter(self.cache))
|
|
481
|
+
del self.cache[oldest_key]
|
|
482
|
+
self.cache[cache_key] = result
|
|
483
|
+
|
|
484
|
+
def clear(self):
|
|
485
|
+
"""Clear all cached results."""
|
|
486
|
+
self.cache.clear()
|
|
487
|
+
self.hits = 0
|
|
488
|
+
self.misses = 0
|
|
489
|
+
|
|
490
|
+
def get_stats(self) -> dict[str, int | float]:
|
|
491
|
+
"""Get cache statistics."""
|
|
492
|
+
total = self.hits + self.misses
|
|
493
|
+
hit_rate = self.hits / total if total > 0 else 0
|
|
494
|
+
return {"hits": self.hits, "misses": self.misses, "hit_rate": hit_rate, "cache_size": len(self.cache)}
|
|
495
|
+
|
|
496
|
+
|
|
497
|
+
class QueryExecutor:
|
|
498
|
+
"""Query execution with iterator support."""
|
|
499
|
+
|
|
500
|
+
def __init__(self, session=None):
|
|
501
|
+
self.session = session
|
|
502
|
+
|
|
503
|
+
async def execute(
|
|
504
|
+
self, query, query_type: str = "all", cache: QueryCache | None = None, use_cache: bool = True, **kwargs
|
|
505
|
+
):
|
|
506
|
+
"""Unified query execution with caching.
|
|
507
|
+
|
|
508
|
+
Args:
|
|
509
|
+
query: SQLAlchemy query object
|
|
510
|
+
query_type: Type of query execution
|
|
511
|
+
cache: Cache instance to use
|
|
512
|
+
use_cache: Whether to use cache for this operation
|
|
513
|
+
**kwargs: Additional parameters for query building
|
|
514
|
+
"""
|
|
515
|
+
# Build the actual query based on type
|
|
516
|
+
actual_query = self._build_query_by_type(query, query_type, **kwargs)
|
|
517
|
+
|
|
518
|
+
# Handle caching (skip for update/delete operations or when use_cache=False)
|
|
519
|
+
cache_key = None
|
|
520
|
+
if cache and use_cache and query_type in ("all", "count", "exists"):
|
|
521
|
+
cache_key = hashlib.md5(str(actual_query).encode()).hexdigest()
|
|
522
|
+
cached = cache.get(cache_key)
|
|
523
|
+
if cached is not None:
|
|
524
|
+
return cached
|
|
525
|
+
|
|
526
|
+
# Execute query
|
|
527
|
+
result = await self._execute_query(actual_query, query_type)
|
|
528
|
+
|
|
529
|
+
# Cache result (only for read operations and when use_cache=True)
|
|
530
|
+
if cache and use_cache and cache_key:
|
|
531
|
+
cache.set(cache_key, result)
|
|
532
|
+
|
|
533
|
+
return result
|
|
534
|
+
|
|
535
|
+
async def iterator(self, query, chunk_size: int = 1000):
|
|
536
|
+
"""Async iterator for processing large datasets in chunks."""
|
|
537
|
+
offset = 0
|
|
538
|
+
processed_chunks = 0
|
|
539
|
+
|
|
540
|
+
while True:
|
|
541
|
+
chunk_query = query.offset(offset).limit(chunk_size)
|
|
542
|
+
chunk = await self._execute_query(chunk_query, "all")
|
|
543
|
+
|
|
544
|
+
if not chunk:
|
|
545
|
+
break
|
|
546
|
+
|
|
547
|
+
for item in chunk:
|
|
548
|
+
yield item
|
|
549
|
+
|
|
550
|
+
offset += len(chunk)
|
|
551
|
+
processed_chunks += 1
|
|
552
|
+
|
|
553
|
+
# Periodic memory cleanup
|
|
554
|
+
if processed_chunks % 10 == 0:
|
|
555
|
+
gc.collect()
|
|
556
|
+
|
|
557
|
+
@staticmethod
|
|
558
|
+
def _build_query_by_type(query, query_type: str, **kwargs):
|
|
559
|
+
"""Build query based on execution type."""
|
|
560
|
+
if query_type == "count":
|
|
561
|
+
return (
|
|
562
|
+
select(func.count())
|
|
563
|
+
.select_from(query.froms[0] if query.froms else query.table)
|
|
564
|
+
.where(query.whereclause)
|
|
565
|
+
if query.whereclause is not None
|
|
566
|
+
else select(func.count()).select_from(query.froms[0] if query.froms else query.table)
|
|
567
|
+
)
|
|
568
|
+
elif query_type == "exists":
|
|
569
|
+
return select(exists(query))
|
|
570
|
+
elif query_type == "update":
|
|
571
|
+
table = query.froms[0] if query.froms else query.table
|
|
572
|
+
update_query = update(table).values(**kwargs.get("values", {}))
|
|
573
|
+
if query.whereclause is not None:
|
|
574
|
+
update_query = update_query.where(query.whereclause)
|
|
575
|
+
return update_query
|
|
576
|
+
elif query_type == "delete":
|
|
577
|
+
table = query.froms[0] if query.froms else query.table
|
|
578
|
+
delete_query = delete(table)
|
|
579
|
+
if query.whereclause is not None:
|
|
580
|
+
delete_query = delete_query.where(query.whereclause)
|
|
581
|
+
return delete_query
|
|
582
|
+
elif query_type in ("values", "values_list"):
|
|
583
|
+
fields = kwargs.get("fields", [])
|
|
584
|
+
if fields:
|
|
585
|
+
table = query.froms[0] if query.froms else query.table
|
|
586
|
+
columns = [table.c[field] for field in fields if field in table.c]
|
|
587
|
+
new_query = select(*columns)
|
|
588
|
+
if query.whereclause is not None:
|
|
589
|
+
new_query = new_query.where(query.whereclause)
|
|
590
|
+
if hasattr(query, "_order_by") and query._order_by: # noqa
|
|
591
|
+
new_query = new_query.order_by(*query._order_by) # noqa
|
|
592
|
+
return new_query
|
|
593
|
+
return query
|
|
594
|
+
elif query_type == "aggregate":
|
|
595
|
+
aggregations = kwargs.get("aggregations", [])
|
|
596
|
+
table = query.froms[0] if query.froms else query.table
|
|
597
|
+
agg_query = select(*aggregations).select_from(table)
|
|
598
|
+
if query.whereclause is not None:
|
|
599
|
+
agg_query = agg_query.where(query.whereclause)
|
|
600
|
+
return agg_query
|
|
601
|
+
else: # "all"
|
|
602
|
+
return query
|
|
603
|
+
|
|
604
|
+
async def _execute_query(self, query, query_type: str):
|
|
605
|
+
"""Execute query and return appropriate result."""
|
|
606
|
+
if not self.session:
|
|
607
|
+
if query_type == "all":
|
|
608
|
+
return []
|
|
609
|
+
elif query_type in ("count", "update", "delete"):
|
|
610
|
+
return 0
|
|
611
|
+
elif query_type in ("values", "values_list", "aggregate"):
|
|
612
|
+
return []
|
|
613
|
+
else: # exists
|
|
614
|
+
return False
|
|
615
|
+
|
|
616
|
+
result = await self.session.execute(query)
|
|
617
|
+
|
|
618
|
+
if query_type == "all":
|
|
619
|
+
return result.fetchall()
|
|
620
|
+
elif query_type in ("count", "exists"):
|
|
621
|
+
return result.scalar_one()
|
|
622
|
+
elif query_type in ("update", "delete"):
|
|
623
|
+
return result.rowcount
|
|
624
|
+
elif query_type in ("values", "values_list", "aggregate"):
|
|
625
|
+
return result.fetchall()
|
|
626
|
+
else:
|
|
627
|
+
return result.fetchall()
|
|
628
|
+
|
|
629
|
+
|
|
630
|
+
class QuerySet(Generic[T]):
|
|
631
|
+
"""
|
|
632
|
+
Refactored QuerySet using composition pattern.
|
|
633
|
+
|
|
634
|
+
This implementation uses independent components to handle different
|
|
635
|
+
aspects of query processing, avoiding MRO issues and improving
|
|
636
|
+
maintainability.
|
|
637
|
+
"""
|
|
638
|
+
|
|
639
|
+
def __init__(
|
|
640
|
+
self,
|
|
641
|
+
table: Table,
|
|
642
|
+
model_class: type[T],
|
|
643
|
+
db_or_session: str | AsyncSession | None = None,
|
|
644
|
+
default_ordering: bool = True,
|
|
645
|
+
use_cache: bool = True,
|
|
646
|
+
) -> None:
|
|
647
|
+
"""Initialize QuerySet with component composition."""
|
|
648
|
+
self._table = table
|
|
649
|
+
self._model_class = model_class
|
|
650
|
+
self._db_or_session = db_or_session
|
|
651
|
+
self._default_ordering = default_ordering
|
|
652
|
+
self._use_cache = use_cache
|
|
653
|
+
|
|
654
|
+
# Initialize components using composition
|
|
655
|
+
self._builder = QueryBuilder(model_class)
|
|
656
|
+
self._cache = QueryCache()
|
|
657
|
+
self._executor = QueryExecutor(db_or_session)
|
|
658
|
+
|
|
659
|
+
# Apply default ordering if needed
|
|
660
|
+
if default_ordering and self._has_default_ordering():
|
|
661
|
+
ordering = getattr(self._model_class, "_default_ordering", [])
|
|
662
|
+
self._builder = self._builder.add_ordering(*ordering)
|
|
663
|
+
|
|
664
|
+
def _has_default_ordering(self) -> bool:
|
|
665
|
+
"""Check if model has default ordering configured."""
|
|
666
|
+
return hasattr(self._model_class, "_default_ordering") and bool(
|
|
667
|
+
getattr(self._model_class, "_default_ordering", [])
|
|
668
|
+
)
|
|
669
|
+
|
|
670
|
+
def _create_new_queryset(self, builder: QueryBuilder | None = None) -> "QuerySet[T]":
|
|
671
|
+
"""Create new QuerySet instance sharing components."""
|
|
672
|
+
new_qs = QuerySet(self._table, self._model_class, self._db_or_session, self._default_ordering, self._use_cache)
|
|
673
|
+
new_qs._builder = builder or self._builder.copy()
|
|
674
|
+
new_qs._cache = self._cache # Shared cache
|
|
675
|
+
new_qs._executor = self._executor # Shared executor
|
|
676
|
+
return new_qs
|
|
677
|
+
|
|
678
|
+
# ========================================
|
|
679
|
+
# Query Building Methods - Return QuerySet
|
|
680
|
+
# ========================================
|
|
681
|
+
|
|
682
|
+
def using(self, db_or_session: str | AsyncSession) -> "QuerySet[T]":
|
|
683
|
+
"""Specify database name or session object."""
|
|
684
|
+
new_qs = QuerySet(self._table, self._model_class, db_or_session, self._default_ordering, self._use_cache)
|
|
685
|
+
new_qs._builder = self._builder.copy()
|
|
686
|
+
new_qs._cache = self._cache
|
|
687
|
+
new_qs._executor = QueryExecutor(db_or_session) # New executor with different session
|
|
688
|
+
return new_qs
|
|
689
|
+
|
|
690
|
+
def skip_default_ordering(self) -> "QuerySet[T]":
|
|
691
|
+
"""Return QuerySet that skips applying default ordering."""
|
|
692
|
+
new_qs = QuerySet(
|
|
693
|
+
self._table, self._model_class, self._db_or_session, default_ordering=False, use_cache=self._use_cache
|
|
694
|
+
)
|
|
695
|
+
new_qs._builder = self._builder.copy()
|
|
696
|
+
new_qs._cache = self._cache
|
|
697
|
+
new_qs._executor = self._executor
|
|
698
|
+
return new_qs
|
|
699
|
+
|
|
700
|
+
def filter(self, *conditions) -> "QuerySet[T]":
|
|
701
|
+
"""Filter QuerySet to include only objects matching conditions."""
|
|
702
|
+
new_builder = self._builder.add_filter(*conditions)
|
|
703
|
+
return self._create_new_queryset(new_builder)
|
|
704
|
+
|
|
705
|
+
def exclude(self, *conditions) -> "QuerySet[T]":
|
|
706
|
+
"""Exclude objects matching conditions from QuerySet."""
|
|
707
|
+
# Convert conditions to negated conditions
|
|
708
|
+
negated_conditions = [not_(cond) for cond in conditions]
|
|
709
|
+
new_builder = self._builder.add_filter(*negated_conditions)
|
|
710
|
+
return self._create_new_queryset(new_builder)
|
|
711
|
+
|
|
712
|
+
def order_by(self, *fields) -> "QuerySet[T]":
|
|
713
|
+
"""Order QuerySet results by specified fields."""
|
|
714
|
+
new_builder = self._builder.add_ordering(*fields)
|
|
715
|
+
return self._create_new_queryset(new_builder)
|
|
716
|
+
|
|
717
|
+
def limit(self, count: int) -> "QuerySet[T]":
|
|
718
|
+
"""Limit number of results returned."""
|
|
719
|
+
new_builder = self._builder.add_limit(count)
|
|
720
|
+
return self._create_new_queryset(new_builder)
|
|
721
|
+
|
|
722
|
+
def offset(self, count: int) -> "QuerySet[T]":
|
|
723
|
+
"""Skip specified number of results from beginning."""
|
|
724
|
+
new_builder = self._builder.add_offset(count)
|
|
725
|
+
return self._create_new_queryset(new_builder)
|
|
726
|
+
|
|
727
|
+
def only(self, *fields) -> "QuerySet[T]":
|
|
728
|
+
"""Load only specified fields from database."""
|
|
729
|
+
new_builder = self._builder.add_selected_fields(*fields)
|
|
730
|
+
return self._create_new_queryset(new_builder)
|
|
731
|
+
|
|
732
|
+
def defer(self, *fields) -> "QuerySet[T]":
|
|
733
|
+
"""Defer loading of specified fields until accessed."""
|
|
734
|
+
new_builder = self._builder.add_deferred_fields(*fields)
|
|
735
|
+
return self._create_new_queryset(new_builder)
|
|
736
|
+
|
|
737
|
+
def select_related(self, *fields) -> "QuerySet[T]":
|
|
738
|
+
"""JOIN preload related objects."""
|
|
739
|
+
new_builder = self._builder.add_relationships(*fields)
|
|
740
|
+
return self._create_new_queryset(new_builder)
|
|
741
|
+
|
|
742
|
+
def prefetch_related(self, *fields) -> "QuerySet[T]":
|
|
743
|
+
"""Separate query preload related objects."""
|
|
744
|
+
new_builder = self._builder.add_relationships(*fields)
|
|
745
|
+
return self._create_new_queryset(new_builder)
|
|
746
|
+
|
|
747
|
+
# Advanced query building
|
|
748
|
+
|
|
749
|
+
def distinct(self, *fields) -> "QuerySet[T]":
|
|
750
|
+
"""Apply DISTINCT clause to eliminate duplicate rows."""
|
|
751
|
+
new_builder = self._builder.add_distinct(*fields)
|
|
752
|
+
return self._create_new_queryset(new_builder)
|
|
753
|
+
|
|
754
|
+
def annotate(self, **kwargs) -> "QuerySet[T]":
|
|
755
|
+
"""Add annotation fields to the queryset."""
|
|
756
|
+
new_builder = self._builder.add_annotations(**kwargs)
|
|
757
|
+
return self._create_new_queryset(new_builder)
|
|
758
|
+
|
|
759
|
+
def group_by(self, *fields) -> "QuerySet[T]":
|
|
760
|
+
"""Add GROUP BY clause."""
|
|
761
|
+
new_builder = self._builder.add_group_by(*fields)
|
|
762
|
+
return self._create_new_queryset(new_builder)
|
|
763
|
+
|
|
764
|
+
def having(self, *conditions) -> "QuerySet[T]":
|
|
765
|
+
"""Add HAVING clause for aggregated queries."""
|
|
766
|
+
new_builder = self._builder.add_having(*conditions)
|
|
767
|
+
return self._create_new_queryset(new_builder)
|
|
768
|
+
|
|
769
|
+
def join(self, target_table: Table, on_condition: Any, join_type: str = "inner") -> "QuerySet[T]":
|
|
770
|
+
"""Perform manual JOIN with another table."""
|
|
771
|
+
new_builder = self._builder.add_join(target_table, on_condition, join_type)
|
|
772
|
+
return self._create_new_queryset(new_builder)
|
|
773
|
+
|
|
774
|
+
def leftjoin(self, target_table: Table, on_condition: Any) -> "QuerySet[T]":
|
|
775
|
+
"""Perform LEFT JOIN with another table."""
|
|
776
|
+
new_builder = self._builder.add_join(target_table, on_condition, "left")
|
|
777
|
+
return self._create_new_queryset(new_builder)
|
|
778
|
+
|
|
779
|
+
def outerjoin(self, target_table: Table, on_condition: Any) -> "QuerySet[T]":
|
|
780
|
+
"""Perform OUTER JOIN with another table."""
|
|
781
|
+
new_builder = self._builder.add_join(target_table, on_condition, "left")
|
|
782
|
+
return self._create_new_queryset(new_builder)
|
|
783
|
+
|
|
784
|
+
def select_for_update(self, nowait: bool = False, skip_locked: bool = False) -> "QuerySet[T]":
|
|
785
|
+
"""Apply row-level locking using FOR UPDATE."""
|
|
786
|
+
options = {}
|
|
787
|
+
if nowait:
|
|
788
|
+
options["nowait"] = True
|
|
789
|
+
if skip_locked:
|
|
790
|
+
options["skip_locked"] = True
|
|
791
|
+
new_builder = self._builder.add_lock("update", **options)
|
|
792
|
+
return self._create_new_queryset(new_builder)
|
|
793
|
+
|
|
794
|
+
def select_for_share(self, nowait: bool = False, skip_locked: bool = False) -> "QuerySet[T]":
|
|
795
|
+
"""Apply shared row-level locking using FOR SHARE."""
|
|
796
|
+
options = {}
|
|
797
|
+
if nowait:
|
|
798
|
+
options["nowait"] = True
|
|
799
|
+
if skip_locked:
|
|
800
|
+
options["skip_locked"] = True
|
|
801
|
+
new_builder = self._builder.add_lock("share", **options)
|
|
802
|
+
return self._create_new_queryset(new_builder)
|
|
803
|
+
|
|
804
|
+
def extra(
|
|
805
|
+
self, columns: dict[str, str] | None = None, where: list[str] | None = None, params: dict | None = None
|
|
806
|
+
) -> "QuerySet[T]":
|
|
807
|
+
"""Add extra SQL fragments to the query."""
|
|
808
|
+
new_builder = self._builder.add_extra(columns, where, params)
|
|
809
|
+
return self._create_new_queryset(new_builder)
|
|
810
|
+
|
|
811
|
+
def none(self) -> "QuerySet[T]":
|
|
812
|
+
"""Return an empty queryset that will never match any objects."""
|
|
813
|
+
new_builder = self._builder.set_none()
|
|
814
|
+
return self._create_new_queryset(new_builder)
|
|
815
|
+
|
|
816
|
+
def reverse(self) -> "QuerySet[T]":
|
|
817
|
+
"""Reverse the ordering of the queryset."""
|
|
818
|
+
new_builder = self._builder.set_reversed()
|
|
819
|
+
return self._create_new_queryset(new_builder)
|
|
820
|
+
|
|
821
|
+
def no_cache(self) -> "QuerySet[T]":
|
|
822
|
+
"""Return QuerySet that skips cache for this operation."""
|
|
823
|
+
new_qs = QuerySet(self._table, self._model_class, self._db_or_session, self._default_ordering, use_cache=False)
|
|
824
|
+
new_qs._builder = self._builder.copy()
|
|
825
|
+
new_qs._cache = self._cache
|
|
826
|
+
new_qs._executor = self._executor
|
|
827
|
+
return new_qs
|
|
828
|
+
|
|
829
|
+
# ========================================
|
|
830
|
+
# Query Execution Methods - Execute queries
|
|
831
|
+
# ========================================
|
|
832
|
+
|
|
833
|
+
async def all(self) -> list[T]:
|
|
834
|
+
"""Execute query and return all matching objects."""
|
|
835
|
+
query = self._builder.build(self._table)
|
|
836
|
+
result = await self._executor.execute(query, "all", self._cache, self._use_cache)
|
|
837
|
+
return result if isinstance(result, list) else []
|
|
838
|
+
|
|
839
|
+
async def get(self, *conditions) -> T:
|
|
840
|
+
"""Get single object matching conditions."""
|
|
841
|
+
if conditions:
|
|
842
|
+
queryset = self.filter(*conditions)
|
|
843
|
+
else:
|
|
844
|
+
queryset = self
|
|
845
|
+
|
|
846
|
+
results = await queryset.limit(2).all()
|
|
847
|
+
if not results:
|
|
848
|
+
raise DoesNotExist(f"{self._model_class.__name__} matching query does not exist")
|
|
849
|
+
if len(results) > 1:
|
|
850
|
+
raise MultipleObjectsReturned(f"Multiple {self._model_class.__name__} objects returned")
|
|
851
|
+
return results[0]
|
|
852
|
+
|
|
853
|
+
async def first(self) -> T | None:
|
|
854
|
+
"""Get first object matching conditions."""
|
|
855
|
+
results = await self.limit(1).all()
|
|
856
|
+
return results[0] if results else None
|
|
857
|
+
|
|
858
|
+
async def count(self) -> int:
|
|
859
|
+
"""Count number of objects matching query conditions."""
|
|
860
|
+
query = self._builder.build(self._table)
|
|
861
|
+
result = await self._executor.execute(query, "count", self._cache, self._use_cache)
|
|
862
|
+
return result if isinstance(result, int) else 0
|
|
863
|
+
|
|
864
|
+
async def exists(self) -> bool:
|
|
865
|
+
"""Check if any objects match query conditions."""
|
|
866
|
+
query = self._builder.build(self._table)
|
|
867
|
+
result = await self._executor.execute(query, "exists", self._cache, self._use_cache)
|
|
868
|
+
return bool(result)
|
|
869
|
+
|
|
870
|
+
async def last(self) -> T | None:
|
|
871
|
+
"""Get the last object matching the QuerySet conditions."""
|
|
872
|
+
reversed_qs = self.reverse()
|
|
873
|
+
return await reversed_qs.first()
|
|
874
|
+
|
|
875
|
+
async def earliest(self, *fields) -> T | None:
|
|
876
|
+
"""Get the earliest object based on the specified fields."""
|
|
877
|
+
if not fields:
|
|
878
|
+
fields = ["id"]
|
|
879
|
+
order_fields = [field.lstrip("-") for field in fields]
|
|
880
|
+
ordered_qs = self.order_by(*order_fields)
|
|
881
|
+
return await ordered_qs.first()
|
|
882
|
+
|
|
883
|
+
async def latest(self, *fields) -> T | None:
|
|
884
|
+
"""Get the latest object based on the specified fields."""
|
|
885
|
+
if not fields:
|
|
886
|
+
fields = ["id"]
|
|
887
|
+
order_fields = [f"-{field.lstrip('-')}" for field in fields]
|
|
888
|
+
ordered_qs = self.order_by(*order_fields)
|
|
889
|
+
return await ordered_qs.first()
|
|
890
|
+
|
|
891
|
+
async def values(self, *fields) -> list[dict[str, Any]]:
|
|
892
|
+
"""Get dictionaries containing only the specified field values."""
|
|
893
|
+
if not fields:
|
|
894
|
+
fields = tuple(col.name for col in self._table.columns) # noqa
|
|
895
|
+
|
|
896
|
+
query = self._builder.build(self._table)
|
|
897
|
+
result = await self._executor.execute(query, "values", self._cache, self._use_cache, fields=fields)
|
|
898
|
+
|
|
899
|
+
if isinstance(result, list):
|
|
900
|
+
return [dict(zip(fields, row, strict=False)) for row in result]
|
|
901
|
+
return []
|
|
902
|
+
|
|
903
|
+
async def values_list(self, *fields, flat: bool = False) -> list[Any] | list[tuple[Any, ...]]:
|
|
904
|
+
"""Get list of tuples or single values for the specified fields."""
|
|
905
|
+
if not fields:
|
|
906
|
+
raise ValueError("values_list() requires at least one field name")
|
|
907
|
+
|
|
908
|
+
query = self._builder.build(self._table)
|
|
909
|
+
result = await self._executor.execute(query, "values_list", self._cache, self._use_cache, fields=fields)
|
|
910
|
+
|
|
911
|
+
if isinstance(result, list):
|
|
912
|
+
if flat and len(fields) == 1:
|
|
913
|
+
return [row[0] for row in result]
|
|
914
|
+
return [tuple(row) for row in result]
|
|
915
|
+
return []
|
|
916
|
+
|
|
917
|
+
async def aggregate(self, **kwargs) -> dict[str, Any]:
|
|
918
|
+
"""Perform aggregation operations on the QuerySet."""
|
|
919
|
+
aggregations = []
|
|
920
|
+
labels = []
|
|
921
|
+
|
|
922
|
+
for alias, expr in kwargs.items():
|
|
923
|
+
if hasattr(expr, "resolve"):
|
|
924
|
+
aggregations.append(expr.resolve(self._table).label(alias))
|
|
925
|
+
else:
|
|
926
|
+
aggregations.append(expr.label(alias))
|
|
927
|
+
labels.append(alias)
|
|
928
|
+
|
|
929
|
+
query = self._builder.build(self._table)
|
|
930
|
+
result = await self._executor.execute(
|
|
931
|
+
query, "aggregate", self._cache, self._use_cache, aggregations=aggregations
|
|
932
|
+
)
|
|
933
|
+
|
|
934
|
+
if isinstance(result, list) and result:
|
|
935
|
+
first_result = result[0]
|
|
936
|
+
return dict(zip(labels, first_result, strict=False))
|
|
937
|
+
return {}
|
|
938
|
+
|
|
939
|
+
async def iterator(self, chunk_size: int = 1000) -> AsyncGenerator[T, None]:
|
|
940
|
+
"""Async iterator for processing large datasets in chunks."""
|
|
941
|
+
query = self._builder.build(self._table)
|
|
942
|
+
async for item in self._executor.iterator(query, chunk_size):
|
|
943
|
+
yield item
|
|
944
|
+
|
|
945
|
+
async def raw(self, sql: str, params: dict | None = None) -> list[T]:
|
|
946
|
+
"""Execute raw SQL query and return model instances."""
|
|
947
|
+
if not self._executor.session:
|
|
948
|
+
return []
|
|
949
|
+
|
|
950
|
+
query = text(sql)
|
|
951
|
+
result = await self._executor.session.execute(query, params or {})
|
|
952
|
+
|
|
953
|
+
instances = []
|
|
954
|
+
for row in result:
|
|
955
|
+
if hasattr(row, "_mapping"):
|
|
956
|
+
data = dict(row._mapping) # noqa
|
|
957
|
+
else:
|
|
958
|
+
column_names = [col.name for col in self._table.columns] # noqa
|
|
959
|
+
data = dict(zip(column_names, row, strict=False))
|
|
960
|
+
|
|
961
|
+
table_columns = {col.name for col in self._table.columns} # noqa
|
|
962
|
+
filtered_data = {k: v for k, v in data.items() if k in table_columns}
|
|
963
|
+
|
|
964
|
+
if filtered_data:
|
|
965
|
+
instances.append(self._model_class(**filtered_data))
|
|
966
|
+
|
|
967
|
+
return instances
|
|
968
|
+
|
|
969
|
+
# ========================================
|
|
970
|
+
# Data Operations Methods - Modify data
|
|
971
|
+
# ========================================
|
|
972
|
+
|
|
973
|
+
async def create(self, validate: bool = True, **kwargs) -> T:
|
|
974
|
+
"""Create new object with given field values."""
|
|
975
|
+
# Create instance for validation
|
|
976
|
+
instance = self._model_class(**kwargs)
|
|
977
|
+
if validate and hasattr(instance, "validate_all"):
|
|
978
|
+
validate_method = getattr(instance, "validate_all", None)
|
|
979
|
+
if validate_method:
|
|
980
|
+
validate_method()
|
|
981
|
+
|
|
982
|
+
# Actual insertion would be implemented here
|
|
983
|
+
# For now, return the created instance (simplified)
|
|
984
|
+
return instance
|
|
985
|
+
|
|
986
|
+
async def update(self, **values) -> int:
|
|
987
|
+
"""Perform bulk update on objects matching query conditions."""
|
|
988
|
+
query = self._builder.build(self._table)
|
|
989
|
+
result = await self._executor.execute(query, "update", values=values)
|
|
990
|
+
return result if isinstance(result, int) else 0
|
|
991
|
+
|
|
992
|
+
async def delete(self) -> int:
|
|
993
|
+
"""Perform bulk delete on objects matching query conditions."""
|
|
994
|
+
query = self._builder.build(self._table)
|
|
995
|
+
result = await self._executor.execute(query, "delete")
|
|
996
|
+
return result if isinstance(result, int) else 0
|
|
997
|
+
|
|
998
|
+
# ========================================
|
|
999
|
+
# Subquery Methods
|
|
1000
|
+
# ========================================
|
|
1001
|
+
|
|
1002
|
+
def subquery(
|
|
1003
|
+
self, name: str | None = None, query_type: Literal["auto", "table", "scalar", "exists"] = "auto"
|
|
1004
|
+
) -> SubqueryExpression:
|
|
1005
|
+
"""Convert current QuerySet to subquery expression."""
|
|
1006
|
+
# Convert to SQLAlchemy query for SubqueryExpression
|
|
1007
|
+
# This would need integration with existing SubqueryExpression
|
|
1008
|
+
sqlalchemy_query = select(self._table) # Simplified
|
|
1009
|
+
return SubqueryExpression(sqlalchemy_query, name, query_type)
|
|
1010
|
+
|
|
1011
|
+
# ========================================
|
|
1012
|
+
# Utility Methods
|
|
1013
|
+
# ========================================
|
|
1014
|
+
|
|
1015
|
+
def get_instance_cache_stats(self) -> dict[str, Any]:
|
|
1016
|
+
"""Get instance cache statistics."""
|
|
1017
|
+
return self._cache.get_stats()
|
|
1018
|
+
|
|
1019
|
+
def clear_instance_cache(self) -> None:
|
|
1020
|
+
"""Clear instance query cache."""
|
|
1021
|
+
self._cache.clear()
|
|
1022
|
+
|
|
1023
|
+
@classmethod
|
|
1024
|
+
def get_cache_stats(cls) -> dict[str, Any]:
|
|
1025
|
+
"""Get query cache statistics."""
|
|
1026
|
+
return {"hits": 0, "misses": 0, "hit_rate": 0, "cache_size": 0}
|
|
1027
|
+
|
|
1028
|
+
@classmethod
|
|
1029
|
+
def clear_query_cache(cls) -> None:
|
|
1030
|
+
"""Clear the query cache."""
|
|
1031
|
+
pass
|
|
1032
|
+
|
|
1033
|
+
# ========================================
|
|
1034
|
+
# Magic Methods
|
|
1035
|
+
# ========================================
|
|
1036
|
+
|
|
1037
|
+
def __getitem__(self, key) -> "QuerySet[T]":
|
|
1038
|
+
"""Support slice syntax access."""
|
|
1039
|
+
if isinstance(key, slice):
|
|
1040
|
+
start = key.start or 0
|
|
1041
|
+
stop = key.stop
|
|
1042
|
+
if stop is not None:
|
|
1043
|
+
return self.offset(start).limit(stop - start)
|
|
1044
|
+
else:
|
|
1045
|
+
return self.offset(start)
|
|
1046
|
+
elif isinstance(key, int):
|
|
1047
|
+
if key < 0:
|
|
1048
|
+
raise ValueError("Negative indexing is not supported")
|
|
1049
|
+
return self.offset(key).limit(1)
|
|
1050
|
+
else:
|
|
1051
|
+
raise TypeError("Invalid key type for indexing")
|
|
1052
|
+
|
|
1053
|
+
def __aiter__(self) -> AsyncGenerator[T, None]:
|
|
1054
|
+
"""Async iterator support."""
|
|
1055
|
+
return self.iterator()
|
|
1056
|
+
|
|
1057
|
+
def __repr__(self) -> str:
|
|
1058
|
+
"""String representation."""
|
|
1059
|
+
return f"<QuerySet: {self._model_class.__name__}>"
|