hypern 0.3.11__cp310-cp310-macosx_11_0_arm64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hypern/__init__.py +24 -0
- hypern/application.py +495 -0
- hypern/args_parser.py +73 -0
- hypern/auth/__init__.py +0 -0
- hypern/auth/authorization.py +2 -0
- hypern/background.py +4 -0
- hypern/caching/__init__.py +6 -0
- hypern/caching/backend.py +31 -0
- hypern/caching/redis_backend.py +201 -0
- hypern/caching/strategies.py +208 -0
- hypern/cli/__init__.py +0 -0
- hypern/cli/commands.py +0 -0
- hypern/config.py +246 -0
- hypern/database/__init__.py +0 -0
- hypern/database/sqlalchemy/__init__.py +4 -0
- hypern/database/sqlalchemy/config.py +66 -0
- hypern/database/sqlalchemy/repository.py +290 -0
- hypern/database/sqlx/__init__.py +36 -0
- hypern/database/sqlx/field.py +246 -0
- hypern/database/sqlx/migrate.py +263 -0
- hypern/database/sqlx/model.py +117 -0
- hypern/database/sqlx/query.py +904 -0
- hypern/datastructures.py +40 -0
- hypern/enum.py +13 -0
- hypern/exceptions/__init__.py +34 -0
- hypern/exceptions/base.py +62 -0
- hypern/exceptions/common.py +12 -0
- hypern/exceptions/errors.py +15 -0
- hypern/exceptions/formatters.py +56 -0
- hypern/exceptions/http.py +76 -0
- hypern/gateway/__init__.py +6 -0
- hypern/gateway/aggregator.py +32 -0
- hypern/gateway/gateway.py +41 -0
- hypern/gateway/proxy.py +60 -0
- hypern/gateway/service.py +52 -0
- hypern/hypern.cpython-310-darwin.so +0 -0
- hypern/hypern.pyi +333 -0
- hypern/i18n/__init__.py +0 -0
- hypern/logging/__init__.py +3 -0
- hypern/logging/logger.py +82 -0
- hypern/middleware/__init__.py +17 -0
- hypern/middleware/base.py +13 -0
- hypern/middleware/cache.py +177 -0
- hypern/middleware/compress.py +78 -0
- hypern/middleware/cors.py +41 -0
- hypern/middleware/i18n.py +1 -0
- hypern/middleware/limit.py +177 -0
- hypern/middleware/security.py +184 -0
- hypern/openapi/__init__.py +5 -0
- hypern/openapi/schemas.py +51 -0
- hypern/openapi/swagger.py +3 -0
- hypern/processpool.py +139 -0
- hypern/py.typed +0 -0
- hypern/reload.py +46 -0
- hypern/response/__init__.py +3 -0
- hypern/response/response.py +142 -0
- hypern/routing/__init__.py +5 -0
- hypern/routing/dispatcher.py +70 -0
- hypern/routing/endpoint.py +30 -0
- hypern/routing/parser.py +98 -0
- hypern/routing/queue.py +175 -0
- hypern/routing/route.py +280 -0
- hypern/scheduler.py +5 -0
- hypern/worker.py +274 -0
- hypern/ws/__init__.py +4 -0
- hypern/ws/channel.py +80 -0
- hypern/ws/heartbeat.py +74 -0
- hypern/ws/room.py +76 -0
- hypern/ws/route.py +26 -0
- hypern-0.3.11.dist-info/METADATA +134 -0
- hypern-0.3.11.dist-info/RECORD +73 -0
- hypern-0.3.11.dist-info/WHEEL +4 -0
- hypern-0.3.11.dist-info/licenses/LICENSE +24 -0
@@ -0,0 +1,904 @@
|
|
1
|
+
from enum import Enum
|
2
|
+
from typing import Any, Dict, List, Tuple, Union
|
3
|
+
from hypern.database.sqlx.field import ForeignKeyField
|
4
|
+
|
5
|
+
|
6
|
+
class JoinType(Enum):
|
7
|
+
INNER = "INNER JOIN"
|
8
|
+
LEFT = "LEFT JOIN"
|
9
|
+
RIGHT = "RIGHT JOIN"
|
10
|
+
FULL = "FULL JOIN"
|
11
|
+
CROSS = "CROSS JOIN"
|
12
|
+
|
13
|
+
|
14
|
+
class Operator(Enum):
|
15
|
+
EQ = "="
|
16
|
+
GT = ">"
|
17
|
+
LT = "<"
|
18
|
+
GTE = ">="
|
19
|
+
LTE = "<="
|
20
|
+
NEQ = "!="
|
21
|
+
IN = "IN"
|
22
|
+
NOT_IN = "NOT IN"
|
23
|
+
LIKE = "LIKE"
|
24
|
+
ILIKE = "ILIKE"
|
25
|
+
BETWEEN = "BETWEEN"
|
26
|
+
IS_NULL = "IS NULL"
|
27
|
+
IS_NOT_NULL = "IS NOT NULL"
|
28
|
+
REGEXP = "~"
|
29
|
+
IREGEXP = "~*"
|
30
|
+
|
31
|
+
|
32
|
+
class Expression:
|
33
|
+
"""Class for representing SQL expressions with parameters"""
|
34
|
+
|
35
|
+
def __init__(self, sql: str, params: list):
|
36
|
+
self.sql = sql
|
37
|
+
self.params = params
|
38
|
+
|
39
|
+
def over(self, partition_by=None, order_by=None, frame=None, window_name=None): # NOSONAR
|
40
|
+
"""
|
41
|
+
Add OVER clause for window functions with support for:
|
42
|
+
- Named windows
|
43
|
+
- Custom frame definitions
|
44
|
+
- Flexible partitioning and ordering
|
45
|
+
"""
|
46
|
+
if window_name:
|
47
|
+
self.sql = f"{self.sql} OVER {window_name}"
|
48
|
+
return self
|
49
|
+
|
50
|
+
parts = ["OVER("]
|
51
|
+
clauses = []
|
52
|
+
|
53
|
+
if partition_by:
|
54
|
+
if isinstance(partition_by, str):
|
55
|
+
partition_by = [partition_by]
|
56
|
+
# Handle both raw SQL and Django-style field references
|
57
|
+
formatted_fields = []
|
58
|
+
for field in partition_by:
|
59
|
+
if "__" in field: # Django-style field reference
|
60
|
+
field = field.replace("__", ".")
|
61
|
+
formatted_fields.append(field)
|
62
|
+
clauses.append(f"PARTITION BY {', '.join(formatted_fields)}")
|
63
|
+
|
64
|
+
if order_by:
|
65
|
+
if isinstance(order_by, str):
|
66
|
+
order_by = [order_by]
|
67
|
+
# Handle both raw SQL and Django-style ordering
|
68
|
+
formatted_order = []
|
69
|
+
for field in order_by:
|
70
|
+
if isinstance(field, str):
|
71
|
+
if field.startswith("-"):
|
72
|
+
field = f"{field[1:]} DESC"
|
73
|
+
elif field.startswith("+"):
|
74
|
+
field = f"{field[1:]} ASC"
|
75
|
+
if "__" in field: # Django-style field reference
|
76
|
+
field = field.replace("__", ".")
|
77
|
+
formatted_order.append(field)
|
78
|
+
clauses.append(f"ORDER BY {', '.join(formatted_order)}")
|
79
|
+
|
80
|
+
if frame:
|
81
|
+
if isinstance(frame, str):
|
82
|
+
clauses.append(frame)
|
83
|
+
elif isinstance(frame, (list, tuple)):
|
84
|
+
frame_type = "ROWS" # Default frame type
|
85
|
+
if len(frame) == 3 and frame[0].upper() in ("ROWS", "RANGE", "GROUPS"):
|
86
|
+
frame_type = frame[0].upper()
|
87
|
+
frame = frame[1:]
|
88
|
+
frame_clause = f"{frame_type} BETWEEN {frame[0]} AND {frame[1]}"
|
89
|
+
clauses.append(frame_clause)
|
90
|
+
|
91
|
+
parts.append(" ".join(clauses))
|
92
|
+
parts.append(")")
|
93
|
+
|
94
|
+
self.sql = f"{self.sql} {' '.join(parts)}"
|
95
|
+
return self
|
96
|
+
|
97
|
+
|
98
|
+
class F:
|
99
|
+
"""Class for creating SQL expressions and column references"""
|
100
|
+
|
101
|
+
def __init__(self, field: str):
|
102
|
+
self.field = field.replace("__", ".")
|
103
|
+
|
104
|
+
def __add__(self, other):
|
105
|
+
if isinstance(other, F):
|
106
|
+
return Expression(f"{self.field} + {other.field}", [])
|
107
|
+
return Expression(f"{self.field} + ?", [other])
|
108
|
+
|
109
|
+
def __sub__(self, other):
|
110
|
+
if isinstance(other, F):
|
111
|
+
return Expression(f"{self.field} - {other.field}", [])
|
112
|
+
return Expression(f"{self.field} - ?", [other])
|
113
|
+
|
114
|
+
def __mul__(self, other):
|
115
|
+
if isinstance(other, F):
|
116
|
+
return Expression(f"{self.field} * {other.field}", [])
|
117
|
+
return Expression(f"{self.field} * ?", [other])
|
118
|
+
|
119
|
+
def __truediv__(self, other):
|
120
|
+
if isinstance(other, F):
|
121
|
+
return Expression(f"{self.field} / {other.field}", [])
|
122
|
+
return Expression(f"{self.field} / ?", [other])
|
123
|
+
|
124
|
+
# Window function methods
|
125
|
+
def sum(self):
|
126
|
+
"""SUM window function"""
|
127
|
+
return Expression(f"SUM({self.field})", [])
|
128
|
+
|
129
|
+
def avg(self):
|
130
|
+
"""AVG window function"""
|
131
|
+
return Expression(f"AVG({self.field})", [])
|
132
|
+
|
133
|
+
def count(self):
|
134
|
+
"""COUNT window function"""
|
135
|
+
return Expression(f"COUNT({self.field})", [])
|
136
|
+
|
137
|
+
def max(self):
|
138
|
+
"""MAX window function"""
|
139
|
+
return Expression(f"MAX({self.field})", [])
|
140
|
+
|
141
|
+
def min(self):
|
142
|
+
"""MIN window function"""
|
143
|
+
return Expression(f"MIN({self.field})", [])
|
144
|
+
|
145
|
+
def lag(self, offset=1, default=None):
|
146
|
+
"""LAG window function"""
|
147
|
+
if default is None:
|
148
|
+
return Expression(f"LAG({self.field}, {offset})", [])
|
149
|
+
return Expression(f"LAG({self.field}, {offset}, ?)", [default])
|
150
|
+
|
151
|
+
def lead(self, offset=1, default=None):
|
152
|
+
"""LEAD window function"""
|
153
|
+
if default is None:
|
154
|
+
return Expression(f"LEAD({self.field}, {offset})", [])
|
155
|
+
return Expression(f"LEAD({self.field}, {offset}, ?)", [default])
|
156
|
+
|
157
|
+
def row_number(self):
|
158
|
+
"""ROW_NUMBER window function"""
|
159
|
+
return Expression("ROW_NUMBER()", [])
|
160
|
+
|
161
|
+
def rank(self):
|
162
|
+
"""RANK window function"""
|
163
|
+
return Expression("RANK()", [])
|
164
|
+
|
165
|
+
def dense_rank(self):
|
166
|
+
"""DENSE_RANK window function"""
|
167
|
+
return Expression("DENSE_RANK()", [])
|
168
|
+
|
169
|
+
|
170
|
+
class Window:
|
171
|
+
"""Class for defining named windows"""
|
172
|
+
|
173
|
+
def __init__(self, name: str, partition_by=None, order_by=None, frame=None):
|
174
|
+
self.name = name
|
175
|
+
self.partition_by = partition_by
|
176
|
+
self.order_by = order_by
|
177
|
+
self.frame = frame
|
178
|
+
|
179
|
+
def to_sql(self): # NOSONAR
|
180
|
+
"""Convert window definition to SQL"""
|
181
|
+
parts = [f"{self.name} AS ("]
|
182
|
+
clauses = []
|
183
|
+
|
184
|
+
if self.partition_by:
|
185
|
+
if isinstance(self.partition_by, str):
|
186
|
+
self.partition_by = [self.partition_by]
|
187
|
+
formatted_fields = [f.replace("__", ".") for f in self.partition_by]
|
188
|
+
clauses.append(f"PARTITION BY {', '.join(formatted_fields)}")
|
189
|
+
|
190
|
+
if self.order_by:
|
191
|
+
if isinstance(self.order_by, str):
|
192
|
+
self.order_by = [self.order_by]
|
193
|
+
formatted_order = []
|
194
|
+
for field in self.order_by:
|
195
|
+
if field.startswith("-"):
|
196
|
+
field = f"{field[1:].replace('__', '.')} DESC"
|
197
|
+
elif field.startswith("+"):
|
198
|
+
field = f"{field[1:].replace('__', '.')} ASC"
|
199
|
+
else:
|
200
|
+
field = field.replace("__", ".")
|
201
|
+
formatted_order.append(field)
|
202
|
+
clauses.append(f"ORDER BY {', '.join(formatted_order)}")
|
203
|
+
|
204
|
+
if self.frame:
|
205
|
+
if isinstance(self.frame, str):
|
206
|
+
clauses.append(self.frame)
|
207
|
+
elif isinstance(self.frame, (list, tuple)):
|
208
|
+
frame_type = "ROWS"
|
209
|
+
if len(self.frame) == 3 and self.frame[0].upper() in ("ROWS", "RANGE", "GROUPS"):
|
210
|
+
frame_type = self.frame[0].upper()
|
211
|
+
self.frame = self.frame[1:]
|
212
|
+
frame_clause = f"{frame_type} BETWEEN {self.frame[0]} AND {self.frame[1]}"
|
213
|
+
clauses.append(frame_clause)
|
214
|
+
|
215
|
+
parts.append(" ".join(clauses))
|
216
|
+
parts.append(")")
|
217
|
+
return " ".join(parts)
|
218
|
+
|
219
|
+
|
220
|
+
class Q:
|
221
|
+
"""Class for complex WHERE conditions with AND/OR operations"""
|
222
|
+
|
223
|
+
def __init__(self, *args, **kwargs):
|
224
|
+
self.children = list(args)
|
225
|
+
self.connector = "AND"
|
226
|
+
self.negated = False
|
227
|
+
|
228
|
+
if kwargs:
|
229
|
+
# Convert kwargs to Q objects and add them
|
230
|
+
for key, value in kwargs.items():
|
231
|
+
condition = {key: value}
|
232
|
+
self.children.append(condition)
|
233
|
+
|
234
|
+
def __and__(self, other):
|
235
|
+
if getattr(other, "connector", "AND") == "AND" and not other.negated:
|
236
|
+
# If other is also an AND condition and not negated,
|
237
|
+
# we can merge their children
|
238
|
+
clone = self._clone()
|
239
|
+
clone.children.extend(other.children)
|
240
|
+
return clone
|
241
|
+
else:
|
242
|
+
q = Q()
|
243
|
+
q.connector = "AND"
|
244
|
+
q.children = [self, other]
|
245
|
+
return q
|
246
|
+
|
247
|
+
def __or__(self, other):
|
248
|
+
if getattr(other, "connector", "OR") == "OR" and not other.negated:
|
249
|
+
# If other is also an OR condition and not negated,
|
250
|
+
# we can merge their children
|
251
|
+
clone = self._clone()
|
252
|
+
clone.connector = "OR"
|
253
|
+
clone.children.extend(other.children)
|
254
|
+
return clone
|
255
|
+
else:
|
256
|
+
q = Q()
|
257
|
+
q.connector = "OR"
|
258
|
+
q.children = [self, other]
|
259
|
+
return q
|
260
|
+
|
261
|
+
def __invert__(self):
|
262
|
+
clone = self._clone()
|
263
|
+
clone.negated = not self.negated
|
264
|
+
return clone
|
265
|
+
|
266
|
+
def _clone(self):
|
267
|
+
"""Create a copy of the current Q object"""
|
268
|
+
clone = Q()
|
269
|
+
clone.connector = self.connector
|
270
|
+
clone.negated = self.negated
|
271
|
+
clone.children = self.children[:]
|
272
|
+
return clone
|
273
|
+
|
274
|
+
def add(self, child, connector):
|
275
|
+
"""Add a child node, updating connector if necessary"""
|
276
|
+
if connector != self.connector:
|
277
|
+
# If connectors don't match, we need to nest the existing children
|
278
|
+
self.children = [Q(*self.children, connector=self.connector)]
|
279
|
+
self.connector = connector
|
280
|
+
|
281
|
+
if isinstance(child, Q):
|
282
|
+
if child.connector == connector and not child.negated:
|
283
|
+
# If child has same connector and is not negated,
|
284
|
+
# we can merge its children directly
|
285
|
+
self.children.extend(child.children)
|
286
|
+
else:
|
287
|
+
self.children.append(child)
|
288
|
+
else:
|
289
|
+
self.children.append(child)
|
290
|
+
|
291
|
+
def _combine(self, other, connector):
|
292
|
+
"""
|
293
|
+
Combine this Q object with another one using the given connector.
|
294
|
+
This is an internal method used by __and__ and __or__.
|
295
|
+
"""
|
296
|
+
if not other:
|
297
|
+
return self._clone()
|
298
|
+
|
299
|
+
if not self:
|
300
|
+
return other._clone() if isinstance(other, Q) else Q(other)
|
301
|
+
|
302
|
+
q = Q()
|
303
|
+
q.connector = connector
|
304
|
+
q.children = [self, other]
|
305
|
+
return q
|
306
|
+
|
307
|
+
def __bool__(self):
|
308
|
+
"""Return True if this Q object has any children"""
|
309
|
+
return bool(self.children)
|
310
|
+
|
311
|
+
def __str__(self):
|
312
|
+
"""
|
313
|
+
Return a string representation of the Q object,
|
314
|
+
useful for debugging
|
315
|
+
"""
|
316
|
+
if self.negated:
|
317
|
+
return f"NOT ({self._str_inner()})"
|
318
|
+
return self._str_inner()
|
319
|
+
|
320
|
+
def _str_inner(self):
|
321
|
+
"""Helper method for __str__"""
|
322
|
+
if not self.children:
|
323
|
+
return ""
|
324
|
+
|
325
|
+
children_str = []
|
326
|
+
for child in self.children:
|
327
|
+
if isinstance(child, Q):
|
328
|
+
child_str = str(child)
|
329
|
+
elif isinstance(child, dict):
|
330
|
+
child_str = " AND ".join(f"{k}={v}" for k, v in child.items()) # NOSONAR
|
331
|
+
else:
|
332
|
+
child_str = str(child)
|
333
|
+
children_str.append(f"({child_str})")
|
334
|
+
|
335
|
+
return f" {self.connector} ".join(children_str)
|
336
|
+
|
337
|
+
|
338
|
+
class QuerySet:
|
339
|
+
def __init__(self, model):
|
340
|
+
self.model = model
|
341
|
+
self.query_parts = {
|
342
|
+
"select": ["*"],
|
343
|
+
"where": [],
|
344
|
+
"order_by": [],
|
345
|
+
"limit": None,
|
346
|
+
"offset": None,
|
347
|
+
"joins": [],
|
348
|
+
"group_by": [],
|
349
|
+
"having": [],
|
350
|
+
"with": [],
|
351
|
+
"window": [],
|
352
|
+
}
|
353
|
+
self.params = []
|
354
|
+
self._distinct = False
|
355
|
+
self._for_update = False
|
356
|
+
self._for_share = False
|
357
|
+
self._nowait = False
|
358
|
+
self._skip_locked = False
|
359
|
+
self._param_counter = 1
|
360
|
+
self._selected_related = set()
|
361
|
+
|
362
|
+
def __get_next_param(self):
|
363
|
+
param_name = f"${self._param_counter}"
|
364
|
+
self._param_counter += 1
|
365
|
+
return param_name
|
366
|
+
|
367
|
+
def clone(self) -> "QuerySet":
|
368
|
+
new_qs = QuerySet(self.model)
|
369
|
+
new_qs.query_parts = {k: v[:] if isinstance(v, list) else v for k, v in self.query_parts.items()}
|
370
|
+
new_qs.params = self.params[:]
|
371
|
+
new_qs._distinct = self._distinct
|
372
|
+
new_qs._for_update = self._for_update
|
373
|
+
new_qs._for_share = self._for_share
|
374
|
+
new_qs._nowait = self._nowait
|
375
|
+
new_qs._skip_locked = self._skip_locked
|
376
|
+
new_qs._param_counter = self._param_counter
|
377
|
+
new_qs._selected_related = self._selected_related.copy()
|
378
|
+
return new_qs
|
379
|
+
|
380
|
+
def select(self, *fields, distinct: bool = False) -> "QuerySet":
|
381
|
+
qs = self.clone()
|
382
|
+
qs.query_parts["select"] = list(map(lambda x: f"{qs.model.Meta.table_name}.{x}" if x != "*" else x, fields))
|
383
|
+
qs._distinct = distinct
|
384
|
+
return qs
|
385
|
+
|
386
|
+
def _process_q_object(self, q_obj: Q, params: List = None) -> Tuple[str, List]:
|
387
|
+
if params is None:
|
388
|
+
params = []
|
389
|
+
|
390
|
+
if not q_obj.children:
|
391
|
+
return "", params
|
392
|
+
|
393
|
+
sql_parts = []
|
394
|
+
local_params = []
|
395
|
+
|
396
|
+
for child in q_obj.children:
|
397
|
+
if isinstance(child, Q):
|
398
|
+
inner_sql, inner_params = self._process_q_object(child)
|
399
|
+
sql_parts.append(f"({inner_sql})")
|
400
|
+
local_params.extend(inner_params)
|
401
|
+
elif isinstance(child, dict):
|
402
|
+
for key, value in child.items():
|
403
|
+
field_sql, field_params = self._process_where_item(key, value)
|
404
|
+
sql_parts.append(field_sql)
|
405
|
+
local_params.extend(field_params)
|
406
|
+
elif isinstance(child, tuple):
|
407
|
+
field_sql, field_params = self._process_where_item(child[0], child[1])
|
408
|
+
sql_parts.append(field_sql)
|
409
|
+
local_params.extend(field_params)
|
410
|
+
|
411
|
+
joined = f" {q_obj.connector} ".join(sql_parts)
|
412
|
+
if q_obj.negated:
|
413
|
+
joined = f"NOT ({joined})"
|
414
|
+
|
415
|
+
params.extend(local_params)
|
416
|
+
return joined, params
|
417
|
+
|
418
|
+
def _process_where_item(self, key: str, value: Any) -> Tuple[str, List]:
|
419
|
+
parts = key.split("__")
|
420
|
+
field = parts[0]
|
421
|
+
op = "=" if len(parts) == 1 else parts[1]
|
422
|
+
|
423
|
+
if isinstance(value, F):
|
424
|
+
return self._process_f_value(field, op, value)
|
425
|
+
|
426
|
+
if isinstance(value, Expression):
|
427
|
+
return self._process_expression_value(field, op, value)
|
428
|
+
|
429
|
+
return self._process_standard_value(field, op, value)
|
430
|
+
|
431
|
+
def _process_f_value(self, field: str, op: str, value: F) -> Tuple[str, List]:
|
432
|
+
return f"{self.model.Meta.table_name}.{field} {op} {value.field}", []
|
433
|
+
|
434
|
+
def _process_expression_value(self, field: str, op: str, value: Expression) -> Tuple[str, List]:
|
435
|
+
return f"{self.model.Meta.table_name}.{field} {op} {value.sql}", value.params
|
436
|
+
|
437
|
+
def _process_standard_value(self, field: str, op: str, value: Any) -> Tuple[str, List]:
|
438
|
+
op_map = {
|
439
|
+
"gt": Operator.GT.value,
|
440
|
+
"lt": Operator.LT.value,
|
441
|
+
"gte": Operator.GTE.value,
|
442
|
+
"lte": Operator.LTE.value,
|
443
|
+
"contains": Operator.LIKE.value,
|
444
|
+
"icontains": Operator.ILIKE.value,
|
445
|
+
"startswith": Operator.LIKE.value,
|
446
|
+
"endswith": Operator.LIKE.value,
|
447
|
+
"in": Operator.IN.value,
|
448
|
+
"not_in": Operator.NOT_IN.value,
|
449
|
+
"isnull": Operator.IS_NULL.value,
|
450
|
+
"between": Operator.BETWEEN.value,
|
451
|
+
"regex": Operator.REGEXP.value,
|
452
|
+
"iregex": Operator.IREGEXP.value,
|
453
|
+
}
|
454
|
+
|
455
|
+
if op in op_map:
|
456
|
+
return self._process_op_map_value(field, op, value, op_map)
|
457
|
+
else:
|
458
|
+
param_name = self.__get_next_param()
|
459
|
+
return f"{self.model.Meta.table_name}.{field} = {param_name}", [value]
|
460
|
+
|
461
|
+
def _process_op_map_value(self, field: str, op: str, value: Any, op_map: dict) -> Tuple[str, List]:
|
462
|
+
param_name = self.__get_next_param()
|
463
|
+
combine_field_name = f"{self.model.Meta.table_name}.{field}"
|
464
|
+
if op in ("contains", "icontains"):
|
465
|
+
return f"{combine_field_name} {op_map[op]} {param_name}", [f"%{value}%"]
|
466
|
+
elif op == "startswith":
|
467
|
+
return f"{combine_field_name} {op_map[op]} {param_name}", [f"{value}%"]
|
468
|
+
elif op == "endswith":
|
469
|
+
return f"{combine_field_name} {op_map[op]} {param_name}", [f"%{value}"]
|
470
|
+
elif op == "isnull":
|
471
|
+
return f"{combine_field_name} {Operator.IS_NULL.value if value else Operator.IS_NOT_NULL.value}", []
|
472
|
+
elif op == "between":
|
473
|
+
return f"{combine_field_name} {op_map[op]} {param_name} AND {param_name}", [value[0], value[1]]
|
474
|
+
elif op in ("in", "not_in"):
|
475
|
+
placeholders = ",".join(["{param_name}" for _ in value])
|
476
|
+
return f"{combine_field_name} {op_map[op]} ({placeholders})", list(value)
|
477
|
+
else:
|
478
|
+
return f"{combine_field_name} {op_map[op]} {param_name}", [value]
|
479
|
+
|
480
|
+
def where(self, *args, **kwargs) -> "QuerySet":
|
481
|
+
qs = self.clone()
|
482
|
+
|
483
|
+
# Process Q objects
|
484
|
+
for arg in args:
|
485
|
+
if isinstance(arg, Q):
|
486
|
+
sql, params = qs._process_q_object(arg, [])
|
487
|
+
if sql:
|
488
|
+
qs.query_parts["where"].append(sql)
|
489
|
+
qs.params.extend(params)
|
490
|
+
elif isinstance(arg, Expression):
|
491
|
+
qs.query_parts["where"].append(arg.sql)
|
492
|
+
qs.params.extend(arg.params)
|
493
|
+
else:
|
494
|
+
qs.query_parts["where"].append(str(arg))
|
495
|
+
|
496
|
+
# Process keyword arguments
|
497
|
+
if kwargs:
|
498
|
+
q = Q(**kwargs)
|
499
|
+
sql, params = qs._process_q_object(q, [])
|
500
|
+
if sql:
|
501
|
+
qs.query_parts["where"].append(sql)
|
502
|
+
qs.params.extend(params)
|
503
|
+
return qs
|
504
|
+
|
505
|
+
def annotate(self, **annotations) -> "QuerySet":
|
506
|
+
qs = self.clone()
|
507
|
+
select_parts = []
|
508
|
+
|
509
|
+
for alias, expression in annotations.items():
|
510
|
+
if isinstance(expression, F):
|
511
|
+
select_parts.append(f"{expression.field} AS {alias}")
|
512
|
+
elif isinstance(expression, Expression):
|
513
|
+
select_parts.append(f"({expression.sql.replace('?', qs.__get_next_param())}) AS {alias}")
|
514
|
+
qs.params.extend(expression.params)
|
515
|
+
else:
|
516
|
+
select_parts.append(f"{expression} AS {alias}")
|
517
|
+
|
518
|
+
qs.query_parts["select"].extend(select_parts)
|
519
|
+
return qs
|
520
|
+
|
521
|
+
def values(self, *fields) -> "QuerySet":
|
522
|
+
return self.select(*fields)
|
523
|
+
|
524
|
+
def values_list(self, *fields, flat: bool = False) -> "QuerySet":
|
525
|
+
if flat and len(fields) > 1:
|
526
|
+
raise ValueError("'flat' is not valid when values_list is called with more than one field.")
|
527
|
+
return self.select(*fields)
|
528
|
+
|
529
|
+
def order_by(self, *fields) -> "QuerySet":
|
530
|
+
qs = self.clone()
|
531
|
+
order_parts = []
|
532
|
+
|
533
|
+
for field in fields:
|
534
|
+
if isinstance(field, F):
|
535
|
+
order_parts.append(field.field)
|
536
|
+
elif isinstance(field, Expression):
|
537
|
+
order_parts.append(field.sql)
|
538
|
+
qs.params.extend(field.params)
|
539
|
+
elif field.startswith("-"):
|
540
|
+
order_parts.append(f"{field[1:]} DESC")
|
541
|
+
else:
|
542
|
+
order_parts.append(f"{qs.model.Meta.table_name}.{field} ASC")
|
543
|
+
|
544
|
+
qs.query_parts["order_by"] = order_parts
|
545
|
+
return qs
|
546
|
+
|
547
|
+
def select_related(self, *fields) -> "QuerySet":
|
548
|
+
"""
|
549
|
+
Include related objects in the query results.
|
550
|
+
|
551
|
+
Args:
|
552
|
+
*fields: Names of foreign key fields to include
|
553
|
+
"""
|
554
|
+
qs = self.clone()
|
555
|
+
for field in fields:
|
556
|
+
if field in qs.model._fields and isinstance(qs.model._fields[field], ForeignKeyField):
|
557
|
+
qs._selected_related.add(field)
|
558
|
+
return qs
|
559
|
+
|
560
|
+
def join(self, table: Any, on: Union[str, Expression], join_type: Union[str, JoinType] = JoinType.INNER) -> "QuerySet":
|
561
|
+
qs = self.clone()
|
562
|
+
joined_table = table.Meta.table_name if hasattr(table, "Meta") else table
|
563
|
+
|
564
|
+
if isinstance(join_type, JoinType):
|
565
|
+
join_type = join_type.value
|
566
|
+
|
567
|
+
if isinstance(on, Expression):
|
568
|
+
qs.query_parts["joins"].append(f"{join_type} {joined_table} ON {on.sql}")
|
569
|
+
qs.params.extend(on.params)
|
570
|
+
else:
|
571
|
+
qs.query_parts["joins"].append(f"{join_type} {joined_table} ON {on}")
|
572
|
+
|
573
|
+
return qs
|
574
|
+
|
575
|
+
def group_by(self, *fields) -> "QuerySet":
|
576
|
+
qs = self.clone()
|
577
|
+
group_parts = []
|
578
|
+
|
579
|
+
for field in fields:
|
580
|
+
if isinstance(field, F):
|
581
|
+
group_parts.append(field.field)
|
582
|
+
elif isinstance(field, Expression):
|
583
|
+
group_parts.append(field.sql)
|
584
|
+
qs.params.extend(field.params)
|
585
|
+
else:
|
586
|
+
group_parts.append(f"{qs.model.Meta.table_name}.{field}")
|
587
|
+
|
588
|
+
qs.query_parts["group_by"] = group_parts
|
589
|
+
return qs
|
590
|
+
|
591
|
+
def having(self, *conditions) -> "QuerySet":
|
592
|
+
qs = self.clone()
|
593
|
+
having_parts = []
|
594
|
+
|
595
|
+
for condition in conditions:
|
596
|
+
if isinstance(condition, Expression):
|
597
|
+
having_parts.append(condition.sql)
|
598
|
+
qs.params.extend(condition.params)
|
599
|
+
else:
|
600
|
+
having_parts.append(str(condition))
|
601
|
+
|
602
|
+
qs.query_parts["having"] = having_parts
|
603
|
+
return qs
|
604
|
+
|
605
|
+
def window(self, alias: str, partition_by: List = None, order_by: List = None) -> "QuerySet":
|
606
|
+
qs = self.clone()
|
607
|
+
parts = [f"{alias} AS ("]
|
608
|
+
|
609
|
+
if partition_by:
|
610
|
+
parts.append(qs._process_partition_by(partition_by, qs))
|
611
|
+
|
612
|
+
if order_by:
|
613
|
+
parts.append(qs._process_order_by(order_by, qs))
|
614
|
+
|
615
|
+
parts.append(")")
|
616
|
+
qs.query_parts["window"].append(" ".join(parts))
|
617
|
+
return qs
|
618
|
+
|
619
|
+
def _process_partition_by(self, partition_by: List, qs: "QuerySet") -> str:
|
620
|
+
partition_parts = []
|
621
|
+
for field in partition_by:
|
622
|
+
if isinstance(field, F):
|
623
|
+
partition_parts.append(field.field)
|
624
|
+
elif isinstance(field, Expression):
|
625
|
+
partition_parts.append(field.sql)
|
626
|
+
qs.params.extend(field.params)
|
627
|
+
else:
|
628
|
+
partition_parts.append(f"{self.model.Meta.table_name}.{field}")
|
629
|
+
return f"PARTITION BY {', '.join(partition_parts)}"
|
630
|
+
|
631
|
+
def _process_order_by(self, order_by: List, qs: "QuerySet") -> str:
|
632
|
+
order_parts = []
|
633
|
+
for field in order_by:
|
634
|
+
if isinstance(field, F):
|
635
|
+
order_parts.append(field.field)
|
636
|
+
elif isinstance(field, Expression):
|
637
|
+
order_parts.append(field.sql)
|
638
|
+
qs.params.extend(field.params)
|
639
|
+
elif field.startswith("-"):
|
640
|
+
order_parts.append(f"{qs.model.Meta.table_name}.{field[1:]} DESC")
|
641
|
+
else:
|
642
|
+
order_parts.append(f"{qs.model.Meta.table_name}.{field} ASC")
|
643
|
+
return f"ORDER BY {', '.join(order_parts)}"
|
644
|
+
|
645
|
+
def limit(self, limit: int) -> "QuerySet":
|
646
|
+
qs = self.clone()
|
647
|
+
qs.query_parts["limit"] = limit
|
648
|
+
return qs
|
649
|
+
|
650
|
+
def offset(self, offset: int) -> "QuerySet":
|
651
|
+
qs = self.clone()
|
652
|
+
qs.query_parts["offset"] = offset
|
653
|
+
return qs
|
654
|
+
|
655
|
+
def for_update(self, nowait: bool = False, skip_locked: bool = False) -> "QuerySet":
|
656
|
+
qs = self.clone()
|
657
|
+
qs._for_update = True
|
658
|
+
qs._nowait = nowait
|
659
|
+
qs._skip_locked = skip_locked
|
660
|
+
return qs
|
661
|
+
|
662
|
+
def for_share(self, nowait: bool = False, skip_locked: bool = False) -> "QuerySet":
|
663
|
+
qs = self.clone()
|
664
|
+
qs._for_share = True
|
665
|
+
qs._nowait = nowait
|
666
|
+
qs._skip_locked = skip_locked
|
667
|
+
return qs
|
668
|
+
|
669
|
+
def with_recursive(self, name: str, initial_query: str, recursive_query: str) -> "QuerySet":
|
670
|
+
qs = self.clone()
|
671
|
+
cte = f"WITH RECURSIVE {name} AS ({initial_query} UNION ALL {recursive_query})"
|
672
|
+
qs.query_parts["with"].append(cte)
|
673
|
+
return qs
|
674
|
+
|
675
|
+
def union(self, other_qs: "QuerySet", all: bool = False) -> "QuerySet":
|
676
|
+
sql1, params1 = self.to_sql()
|
677
|
+
sql2, params2 = other_qs.to_sql()
|
678
|
+
union_type = "UNION ALL" if all else "UNION"
|
679
|
+
combined_sql = f"({sql1}) {union_type} ({sql2})"
|
680
|
+
combined_params = params1 + params2
|
681
|
+
|
682
|
+
new_qs = self.clone()
|
683
|
+
new_qs.query_parts["raw_sql"] = combined_sql
|
684
|
+
new_qs.params = combined_params
|
685
|
+
return new_qs
|
686
|
+
|
687
|
+
def intersect(self, other_qs: "QuerySet", all: bool = False) -> "QuerySet":
|
688
|
+
sql1, params1 = self.to_sql()
|
689
|
+
sql2, params2 = other_qs.to_sql()
|
690
|
+
intersect_type = "INTERSECT ALL" if all else "INTERSECT"
|
691
|
+
combined_sql = f"({sql1}) {intersect_type} ({sql2})"
|
692
|
+
combined_params = params1 + params2
|
693
|
+
|
694
|
+
new_qs = self.clone()
|
695
|
+
new_qs.query_parts["raw_sql"] = combined_sql
|
696
|
+
new_qs.params = combined_params
|
697
|
+
return new_qs
|
698
|
+
|
699
|
+
def except_(self, other_qs: "QuerySet", all: bool = False) -> "QuerySet":
|
700
|
+
sql1, params1 = self.to_sql()
|
701
|
+
sql2, params2 = other_qs.to_sql()
|
702
|
+
except_type = "EXCEPT ALL" if all else "EXCEPT"
|
703
|
+
combined_sql = f"({sql1}) {except_type} ({sql2})"
|
704
|
+
combined_params = params1 + params2
|
705
|
+
|
706
|
+
new_qs = self.clone()
|
707
|
+
new_qs.query_parts["raw_sql"] = combined_sql
|
708
|
+
new_qs.params = combined_params
|
709
|
+
return new_qs
|
710
|
+
|
711
|
+
def subquery(self, alias: str) -> Expression:
|
712
|
+
"""Convert this queryset into a subquery expression"""
|
713
|
+
sql, params = self.to_sql()
|
714
|
+
return Expression(f"({sql}) AS {alias}", params)
|
715
|
+
|
716
|
+
def to_sql(self) -> Tuple[str, List]:
|
717
|
+
"""Convert the QuerySet into an SQL query string and parameters"""
|
718
|
+
if "raw_sql" in self.query_parts:
|
719
|
+
return self.query_parts["raw_sql"], self.params
|
720
|
+
|
721
|
+
parts = []
|
722
|
+
self._build_sql_parts(parts)
|
723
|
+
return " ".join(parts), self.params
|
724
|
+
|
725
|
+
def _build_sql_parts(self, parts):
|
726
|
+
self._add_with_clause(parts)
|
727
|
+
self._add_select_clause(parts)
|
728
|
+
self._add_from_clause(parts)
|
729
|
+
self._add_joins_clause(parts)
|
730
|
+
self._add_where_clause(parts)
|
731
|
+
self._add_group_by_clause(parts)
|
732
|
+
self._add_having_clause(parts)
|
733
|
+
self._add_window_clause(parts)
|
734
|
+
self._add_order_by_clause(parts)
|
735
|
+
self._add_limit_clause(parts)
|
736
|
+
self._add_offset_clause(parts)
|
737
|
+
self._add_locking_clauses(parts)
|
738
|
+
|
739
|
+
def _add_with_clause(self, parts):
|
740
|
+
if self.query_parts["with"]:
|
741
|
+
parts.append(" ".join(self.query_parts["with"]))
|
742
|
+
|
743
|
+
def _add_select_clause(self, parts):
|
744
|
+
select_clause = "SELECT"
|
745
|
+
if self._distinct:
|
746
|
+
select_clause += " DISTINCT"
|
747
|
+
|
748
|
+
# Add selected fields
|
749
|
+
select_related_fields = []
|
750
|
+
for field in self._selected_related:
|
751
|
+
related_table = self.model._fields[field].to_model
|
752
|
+
select_related_fields.append(f"{related_table}.*")
|
753
|
+
|
754
|
+
select_clause += " " + ", ".join(self.query_parts["select"] + select_related_fields)
|
755
|
+
parts.append(select_clause)
|
756
|
+
|
757
|
+
def _add_from_clause(self, parts):
|
758
|
+
parts.append(f"FROM {self.model.Meta.table_name}")
|
759
|
+
|
760
|
+
def _add_joins_clause(self, parts):
|
761
|
+
if self.query_parts["joins"]:
|
762
|
+
parts.extend(self.query_parts["joins"])
|
763
|
+
|
764
|
+
def _add_where_clause(self, parts):
|
765
|
+
if self.query_parts["where"]:
|
766
|
+
parts.append("WHERE " + " AND ".join(f"({condition})" for condition in self.query_parts["where"]))
|
767
|
+
|
768
|
+
def _add_group_by_clause(self, parts):
|
769
|
+
if self.query_parts["group_by"]:
|
770
|
+
parts.append("GROUP BY " + ", ".join(self.query_parts["group_by"]))
|
771
|
+
|
772
|
+
def _add_having_clause(self, parts):
|
773
|
+
if self.query_parts["having"]:
|
774
|
+
parts.append("HAVING " + " AND ".join(self.query_parts["having"]))
|
775
|
+
|
776
|
+
def _add_window_clause(self, parts):
|
777
|
+
if self.query_parts["window"]:
|
778
|
+
parts.append("WINDOW " + ", ".join(self.query_parts["window"]))
|
779
|
+
|
780
|
+
def _add_order_by_clause(self, parts):
|
781
|
+
if self.query_parts["order_by"]:
|
782
|
+
parts.append("ORDER BY " + ", ".join(self.query_parts["order_by"]))
|
783
|
+
|
784
|
+
def _add_limit_clause(self, parts):
|
785
|
+
if self.query_parts["limit"] is not None:
|
786
|
+
parts.append(f"LIMIT {self.query_parts['limit']}")
|
787
|
+
|
788
|
+
def _add_offset_clause(self, parts):
|
789
|
+
if self.query_parts["offset"] is not None:
|
790
|
+
parts.append(f"OFFSET {self.query_parts['offset']}")
|
791
|
+
|
792
|
+
def _add_locking_clauses(self, parts):
|
793
|
+
if self._for_update:
|
794
|
+
parts.append("FOR UPDATE")
|
795
|
+
if self._nowait:
|
796
|
+
parts.append("NOWAIT")
|
797
|
+
elif self._skip_locked:
|
798
|
+
parts.append("SKIP LOCKED")
|
799
|
+
elif self._for_share:
|
800
|
+
parts.append("FOR SHARE")
|
801
|
+
if self._nowait:
|
802
|
+
parts.append("NOWAIT")
|
803
|
+
elif self._skip_locked:
|
804
|
+
parts.append("SKIP LOCKED")
|
805
|
+
|
806
|
+
def execute(self) -> List[Tuple]:
|
807
|
+
"""Execute the query and return results"""
|
808
|
+
sql, params = self.to_sql()
|
809
|
+
result = self.model.get_session().fetch_all(sql, params)
|
810
|
+
return result
|
811
|
+
|
812
|
+
def count(self) -> int:
|
813
|
+
"""Return the count of rows that would be returned by this query"""
|
814
|
+
qs = self.clone()
|
815
|
+
qs.query_parts["select"] = ["COUNT(*)"]
|
816
|
+
qs.query_parts["order_by"] = [] # Clear order_by as it's unnecessary for count
|
817
|
+
sql, params = qs.to_sql()
|
818
|
+
|
819
|
+
# Execute count query
|
820
|
+
result = self.model.get_session().fetch_all(sql, params)
|
821
|
+
return result
|
822
|
+
|
823
|
+
def exists(self) -> bool:
|
824
|
+
"""Return True if the query would return any results"""
|
825
|
+
qs = self.clone()
|
826
|
+
qs.query_parts["select"] = ["1"]
|
827
|
+
qs.query_parts["order_by"] = []
|
828
|
+
qs = qs.limit(1)
|
829
|
+
sql, params = qs.to_sql()
|
830
|
+
|
831
|
+
result = self.model.get_session().fetch_all(sql, params)
|
832
|
+
return result
|
833
|
+
|
834
|
+
def update(self, **kwargs) -> int:
|
835
|
+
"""Update records that match the query conditions"""
|
836
|
+
updates = []
|
837
|
+
params = []
|
838
|
+
|
839
|
+
for field, value in kwargs.items():
|
840
|
+
param_name = self.__get_next_param()
|
841
|
+
if isinstance(value, F):
|
842
|
+
updates.append(f"{field} = {value.field}")
|
843
|
+
elif isinstance(value, Expression):
|
844
|
+
updates.append(f"{field} = {value.sql}")
|
845
|
+
params.extend(value.params)
|
846
|
+
else:
|
847
|
+
updates.append(f"{field} = {param_name}")
|
848
|
+
params.append(value)
|
849
|
+
|
850
|
+
where_sql = " AND ".join(f"({condition})" for condition in self.query_parts["where"])
|
851
|
+
|
852
|
+
sql = f"UPDATE {self.model.Meta.table_name} SET {', '.join(updates)}"
|
853
|
+
if where_sql:
|
854
|
+
sql += f" WHERE {where_sql}"
|
855
|
+
params = self.params + params
|
856
|
+
result = self.model.get_session().bulk_change(sql, [params], 1)
|
857
|
+
return result
|
858
|
+
|
859
|
+
def delete(self) -> int:
|
860
|
+
"""Delete records that match the query conditions"""
|
861
|
+
where_sql = " AND ".join(f"({condition})" for condition in self.query_parts["where"])
|
862
|
+
|
863
|
+
sql = f"DELETE FROM {self.model.Meta.table_name}"
|
864
|
+
if where_sql:
|
865
|
+
sql += f" WHERE {where_sql}"
|
866
|
+
|
867
|
+
return self.model.get_session().bulk_change(sql, [self.params], 1)
|
868
|
+
|
869
|
+
def bulk_create(self, objs: List[Any], batch_size: int = None) -> int | None:
|
870
|
+
"""Insert multiple records in an efficient way"""
|
871
|
+
if not objs:
|
872
|
+
return
|
873
|
+
|
874
|
+
# Get fields from the first object
|
875
|
+
fields = [name for name, f in self.model._fields.items() if not f.auto_increment]
|
876
|
+
placeholders = ",".join([self.__get_next_param() for _ in fields])
|
877
|
+
|
878
|
+
sql = f"INSERT INTO {self.model.Meta.table_name} ({','.join(fields)}) VALUES ({placeholders})"
|
879
|
+
|
880
|
+
values = []
|
881
|
+
for obj in objs:
|
882
|
+
values.append([obj._data[i] for i in fields])
|
883
|
+
|
884
|
+
return self.model.get_session().bulk_change(sql, values, batch_size or len(values))
|
885
|
+
|
886
|
+
def explain(self, analyze: bool = False, verbose: bool = False, costs: bool = False, buffers: bool = False, timing: bool = False) -> Dict:
|
887
|
+
"""Get the query execution plan"""
|
888
|
+
options = []
|
889
|
+
if analyze:
|
890
|
+
options.append("ANALYZE")
|
891
|
+
if verbose:
|
892
|
+
options.append("VERBOSE")
|
893
|
+
if costs:
|
894
|
+
options.append("COSTS")
|
895
|
+
if buffers:
|
896
|
+
options.append("BUFFERS")
|
897
|
+
if timing:
|
898
|
+
options.append("TIMING")
|
899
|
+
|
900
|
+
sql, params = self.to_sql()
|
901
|
+
explain_sql = f"EXPLAIN ({' '.join(options)}) {sql}"
|
902
|
+
|
903
|
+
result = self.model.get_session().fetch_all(explain_sql, params)
|
904
|
+
return result
|