sqlobjects 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- sqlobjects/__init__.py +38 -0
- sqlobjects/config.py +519 -0
- sqlobjects/database.py +586 -0
- sqlobjects/exceptions.py +538 -0
- sqlobjects/expressions.py +1054 -0
- sqlobjects/fields.py +1866 -0
- sqlobjects/history.py +101 -0
- sqlobjects/metadata.py +1130 -0
- sqlobjects/model.py +1009 -0
- sqlobjects/objects.py +812 -0
- sqlobjects/queries.py +1059 -0
- sqlobjects/relations.py +843 -0
- sqlobjects/session.py +389 -0
- sqlobjects/signals.py +464 -0
- sqlobjects/utils/__init__.py +5 -0
- sqlobjects/utils/naming.py +53 -0
- sqlobjects/utils/pattern.py +644 -0
- sqlobjects/validators.py +294 -0
- sqlobjects-0.1.0.dist-info/METADATA +29 -0
- sqlobjects-0.1.0.dist-info/RECORD +23 -0
- sqlobjects-0.1.0.dist-info/WHEEL +5 -0
- sqlobjects-0.1.0.dist-info/licenses/LICENSE +21 -0
- sqlobjects-0.1.0.dist-info/top_level.txt +1 -0
sqlobjects/objects.py
ADDED
|
@@ -0,0 +1,812 @@
|
|
|
1
|
+
"""SQLObjects Objects Manager - Core-based Database Operations
|
|
2
|
+
|
|
3
|
+
This module provides Django-style objects manager for database operations
|
|
4
|
+
using SQLAlchemy Core, offering high-performance database access with
|
|
5
|
+
familiar ORM-like interface.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from typing import Any, Generic
|
|
9
|
+
|
|
10
|
+
from sqlalchemy import bindparam, delete, insert, select, text, update
|
|
11
|
+
|
|
12
|
+
from .exceptions import DoesNotExist, MultipleObjectsReturned
|
|
13
|
+
from .queries import QuerySet, T
|
|
14
|
+
from .session import AsyncSession, SessionContextManager
|
|
15
|
+
from .signals import Operation, emit_signals
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
__all__ = ["ObjectsDescriptor", "ObjectsManager"]
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class ObjectsDescriptor(Generic[T]):
|
|
22
|
+
"""Descriptor that provides Django-style objects attribute for model classes.
|
|
23
|
+
|
|
24
|
+
This descriptor is automatically attached to model classes to provide the
|
|
25
|
+
'objects' attribute that returns an ObjectsManager instance for database operations.
|
|
26
|
+
It implements the descriptor protocol to ensure each model class gets its own
|
|
27
|
+
manager instance.
|
|
28
|
+
"""
|
|
29
|
+
|
|
30
|
+
def __init__(self, model_class: type[T]) -> None:
|
|
31
|
+
"""Initialize the descriptor with the model class.
|
|
32
|
+
|
|
33
|
+
Args:
|
|
34
|
+
model_class: The model class this descriptor is attached to
|
|
35
|
+
"""
|
|
36
|
+
self._model_class = model_class
|
|
37
|
+
|
|
38
|
+
def __get__(self, obj: Any, owner: type[T]) -> "ObjectsManager[T]":
|
|
39
|
+
"""Return an ObjectsManager instance for the model class.
|
|
40
|
+
|
|
41
|
+
This method is called when accessing the 'objects' attribute on a model class.
|
|
42
|
+
|
|
43
|
+
Args:
|
|
44
|
+
obj: The instance accessing the attribute (None for class access)
|
|
45
|
+
owner: The class that owns this descriptor
|
|
46
|
+
|
|
47
|
+
Returns:
|
|
48
|
+
ObjectsManager instance configured for the model class
|
|
49
|
+
"""
|
|
50
|
+
return ObjectsManager(self._model_class)
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
class ObjectsManager(Generic[T]):
|
|
54
|
+
"""Object manager providing Django ORM-like interface using SQLAlchemy Core."""
|
|
55
|
+
|
|
56
|
+
def __init__(self, model_class: type[T], db_or_session: str | AsyncSession | None = None):
|
|
57
|
+
self._model_class = model_class
|
|
58
|
+
self._table = model_class.get_table() # type: ignore[reportAttributeAccessIssue]
|
|
59
|
+
self._db_or_session = db_or_session
|
|
60
|
+
|
|
61
|
+
# ========================================
|
|
62
|
+
# 1. Internal Helper Methods (内部辅助方法)
|
|
63
|
+
# ========================================
|
|
64
|
+
|
|
65
|
+
def _get_session(self, readonly: bool = True) -> AsyncSession:
|
|
66
|
+
"""Get session with explicit readonly parameter"""
|
|
67
|
+
if self._db_or_session is None:
|
|
68
|
+
return SessionContextManager.get_session(readonly=readonly)
|
|
69
|
+
elif isinstance(self._db_or_session, str):
|
|
70
|
+
return SessionContextManager.get_session(self._db_or_session, readonly=readonly)
|
|
71
|
+
else:
|
|
72
|
+
return self._db_or_session
|
|
73
|
+
|
|
74
|
+
def _validate_field_names(self, **kwargs) -> None:
|
|
75
|
+
"""Validate that all field names exist on the model."""
|
|
76
|
+
table_fields = set(self._table.columns.keys())
|
|
77
|
+
for field_name in kwargs.keys():
|
|
78
|
+
if field_name not in table_fields:
|
|
79
|
+
raise AttributeError(f"'{self._model_class.__name__}' has no field '{field_name}'")
|
|
80
|
+
|
|
81
|
+
# ========================================
|
|
82
|
+
# 2. Session Management Methods (会话管理方法)
|
|
83
|
+
# ========================================
|
|
84
|
+
|
|
85
|
+
def using(self, db_or_session: str | AsyncSession) -> "ObjectsManager[T]":
|
|
86
|
+
"""Specify database name or session object"""
|
|
87
|
+
return ObjectsManager(self._model_class, db_or_session)
|
|
88
|
+
|
|
89
|
+
# ========================================
|
|
90
|
+
# 3. Query Building Methods (查询构建方法) - 返回 QuerySet
|
|
91
|
+
# ========================================
|
|
92
|
+
|
|
93
|
+
def filter(self, *args) -> QuerySet[T]:
|
|
94
|
+
"""Filter objects using Q objects SQLAlchemy expressions and keyword arguments.
|
|
95
|
+
|
|
96
|
+
Args:
|
|
97
|
+
*args: Q objects or SQLAlchemy expressions for complex conditions
|
|
98
|
+
|
|
99
|
+
Returns:
|
|
100
|
+
QuerySet with filter conditions applied
|
|
101
|
+
"""
|
|
102
|
+
return QuerySet(self._table, self._model_class, db_or_session=self._db_or_session).filter(*args)
|
|
103
|
+
|
|
104
|
+
def defer(self, *fields) -> QuerySet[T]:
|
|
105
|
+
"""Defer loading of specified fields until accessed.
|
|
106
|
+
|
|
107
|
+
Args:
|
|
108
|
+
*fields: Field names to defer
|
|
109
|
+
|
|
110
|
+
Returns:
|
|
111
|
+
QuerySet with deferred fields
|
|
112
|
+
"""
|
|
113
|
+
return QuerySet(self._table, self._model_class, db_or_session=self._db_or_session).defer(*fields)
|
|
114
|
+
|
|
115
|
+
# ========================================
|
|
116
|
+
# 4. Query Execution Methods (查询执行方法) - 执行查询并返回结果
|
|
117
|
+
# ========================================
|
|
118
|
+
|
|
119
|
+
# 基本执行方法
|
|
120
|
+
|
|
121
|
+
async def all(self) -> list[T]:
|
|
122
|
+
"""Get all objects of this model.
|
|
123
|
+
|
|
124
|
+
Returns:
|
|
125
|
+
List of all model instances
|
|
126
|
+
"""
|
|
127
|
+
return await self.filter().all()
|
|
128
|
+
|
|
129
|
+
async def get(self, *args) -> T:
|
|
130
|
+
"""Get a single object matching the given conditions.
|
|
131
|
+
|
|
132
|
+
Args:
|
|
133
|
+
*args: Q objects or SQLAlchemy expressions for complex conditions
|
|
134
|
+
|
|
135
|
+
Returns:
|
|
136
|
+
Single model instance
|
|
137
|
+
|
|
138
|
+
Raises:
|
|
139
|
+
DoesNotExist: If no object matches the conditions
|
|
140
|
+
MultipleObjectsReturned: If multiple objects match the conditions
|
|
141
|
+
ValidationError: If field lookup conditions are invalid
|
|
142
|
+
DatabaseError: If database connection or query execution fails
|
|
143
|
+
AttributeError: If specified field names don't exist on the model
|
|
144
|
+
|
|
145
|
+
Examples:
|
|
146
|
+
# Basic usage with default session
|
|
147
|
+
user = await User.objects.get(User.username=="john")
|
|
148
|
+
|
|
149
|
+
# Using specific database session
|
|
150
|
+
user = await User.objects.using(analytics_session).get(User.username=="john")
|
|
151
|
+
|
|
152
|
+
# Complex query with session
|
|
153
|
+
user = await User.objects.using(analytics_session).get(
|
|
154
|
+
Q(User.username=="john", User.email=="john@example.com")
|
|
155
|
+
)
|
|
156
|
+
"""
|
|
157
|
+
results = await self.filter(*args).limit(2).all()
|
|
158
|
+
if not results:
|
|
159
|
+
raise DoesNotExist(f"{self._model_class.__name__} matching query does not exist")
|
|
160
|
+
if len(results) > 1:
|
|
161
|
+
raise MultipleObjectsReturned(f"Multiple {self._model_class.__name__} objects returned")
|
|
162
|
+
return results[0]
|
|
163
|
+
|
|
164
|
+
async def first(self) -> T | None:
|
|
165
|
+
"""Get the first object according to the default ordering.
|
|
166
|
+
|
|
167
|
+
Returns:
|
|
168
|
+
First model instance or None if no objects exist
|
|
169
|
+
"""
|
|
170
|
+
return await self.filter().first()
|
|
171
|
+
|
|
172
|
+
async def last(self) -> T | None:
|
|
173
|
+
"""Get the last object according to the default ordering.
|
|
174
|
+
|
|
175
|
+
Returns:
|
|
176
|
+
Last model instance or None if no objects exist
|
|
177
|
+
"""
|
|
178
|
+
return await self.filter().last()
|
|
179
|
+
|
|
180
|
+
# 排序相关执行方法
|
|
181
|
+
|
|
182
|
+
async def earliest(self, *fields) -> T | None:
|
|
183
|
+
"""Get the earliest object by specified fields.
|
|
184
|
+
|
|
185
|
+
Args:
|
|
186
|
+
*fields: Field names to order by
|
|
187
|
+
|
|
188
|
+
Returns:
|
|
189
|
+
Earliest model instance or None if no objects exist
|
|
190
|
+
"""
|
|
191
|
+
return await self.filter().earliest(*fields)
|
|
192
|
+
|
|
193
|
+
async def latest(self, *fields) -> T | None:
|
|
194
|
+
"""Get the latest object by specified fields.
|
|
195
|
+
|
|
196
|
+
Args:
|
|
197
|
+
*fields: Field names to order by
|
|
198
|
+
|
|
199
|
+
Returns:
|
|
200
|
+
Latest model instance or None if no objects exist
|
|
201
|
+
"""
|
|
202
|
+
return await self.filter().latest(*fields)
|
|
203
|
+
|
|
204
|
+
# 数据提取方法
|
|
205
|
+
|
|
206
|
+
async def values(self, *fields) -> list[dict[str, Any]]:
|
|
207
|
+
"""Get dictionaries of field values.
|
|
208
|
+
|
|
209
|
+
Args:
|
|
210
|
+
*fields: Field names to include
|
|
211
|
+
|
|
212
|
+
Returns:
|
|
213
|
+
List of dictionaries with field values
|
|
214
|
+
"""
|
|
215
|
+
return await self.filter().values(*fields)
|
|
216
|
+
|
|
217
|
+
async def values_list(self, *fields, flat: bool = False) -> list:
|
|
218
|
+
"""Get tuples or flat list of field values.
|
|
219
|
+
|
|
220
|
+
Args:
|
|
221
|
+
*fields: Field names to include
|
|
222
|
+
flat: If True and single field, return flat list
|
|
223
|
+
|
|
224
|
+
Returns:
|
|
225
|
+
List of tuples or flat list of values
|
|
226
|
+
"""
|
|
227
|
+
return await self.filter().values_list(*fields, flat=flat)
|
|
228
|
+
|
|
229
|
+
async def dates(self, field: str, kind: str, order: str = "ASC") -> list[Any]:
|
|
230
|
+
"""Get list of dates for a field.
|
|
231
|
+
|
|
232
|
+
Args:
|
|
233
|
+
field: Field name to extract dates from
|
|
234
|
+
kind: Date part to extract ('year', 'month', 'day')
|
|
235
|
+
order: Sort order ('ASC' or 'DESC')
|
|
236
|
+
|
|
237
|
+
Returns:
|
|
238
|
+
List of date values
|
|
239
|
+
"""
|
|
240
|
+
return await self.filter().dates(field, kind, order) # type: ignore
|
|
241
|
+
|
|
242
|
+
async def datetimes(self, field: str, kind: str, order: str = "ASC") -> list[Any]:
|
|
243
|
+
"""Get list of datetimes for a field.
|
|
244
|
+
|
|
245
|
+
Args:
|
|
246
|
+
field: Field name to extract datetimes from
|
|
247
|
+
kind: Datetime part to extract ('year', 'month', 'day', 'hour')
|
|
248
|
+
order: Sort order ('ASC' or 'DESC')
|
|
249
|
+
|
|
250
|
+
Returns:
|
|
251
|
+
List of datetime values
|
|
252
|
+
"""
|
|
253
|
+
return await self.filter().datetimes(field, kind, order) # type: ignore
|
|
254
|
+
|
|
255
|
+
# 高级执行方法
|
|
256
|
+
|
|
257
|
+
async def iterator(self, chunk_size: int = 1000):
|
|
258
|
+
"""Async iterator for large datasets.
|
|
259
|
+
|
|
260
|
+
Args:
|
|
261
|
+
chunk_size: Number of objects to fetch per chunk
|
|
262
|
+
|
|
263
|
+
Yields:
|
|
264
|
+
Model instances one by one
|
|
265
|
+
"""
|
|
266
|
+
async for obj in self.filter().iterator(chunk_size):
|
|
267
|
+
yield obj
|
|
268
|
+
|
|
269
|
+
async def get_item(self, key) -> T | list[T]:
|
|
270
|
+
"""Get item by index or slice.
|
|
271
|
+
|
|
272
|
+
Args:
|
|
273
|
+
key: Integer index or slice object
|
|
274
|
+
|
|
275
|
+
Returns:
|
|
276
|
+
Single model instance for index, list for slice
|
|
277
|
+
"""
|
|
278
|
+
return await self.filter().get_item(key) # type: ignore
|
|
279
|
+
|
|
280
|
+
# ========================================
|
|
281
|
+
# 5. Data Operations Methods (数据操作方法) - 创建和修改数据
|
|
282
|
+
# ========================================
|
|
283
|
+
|
|
284
|
+
# 创建操作
|
|
285
|
+
|
|
286
|
+
async def get_or_create(
|
|
287
|
+
self, defaults: dict[str, Any] | None = None, validate: bool = True, **lookup
|
|
288
|
+
) -> tuple[T, bool]:
|
|
289
|
+
"""Get an existing object or create a new one if it doesn't exist.
|
|
290
|
+
|
|
291
|
+
Args:
|
|
292
|
+
defaults: Additional values to use when creating a new object
|
|
293
|
+
validate: Whether to validate when creating
|
|
294
|
+
**lookup: Field lookup conditions (only equality supported)
|
|
295
|
+
|
|
296
|
+
Returns:
|
|
297
|
+
Tuple of (object, created) where created is True if object was created
|
|
298
|
+
|
|
299
|
+
Raises:
|
|
300
|
+
AttributeError: If specified field names don't exist on the model
|
|
301
|
+
ValidationError: If validation fails during creation
|
|
302
|
+
IntegrityError: If database constraints are violated
|
|
303
|
+
|
|
304
|
+
Examples:
|
|
305
|
+
# Simple field lookup
|
|
306
|
+
user, created = await User.objects.get_or_create(
|
|
307
|
+
username="john",
|
|
308
|
+
defaults={"email": "john@example.com"}
|
|
309
|
+
)
|
|
310
|
+
|
|
311
|
+
# Multiple conditions
|
|
312
|
+
user, created = await User.objects.get_or_create(
|
|
313
|
+
username="john",
|
|
314
|
+
is_active=True,
|
|
315
|
+
defaults={"email": "john@example.com"}
|
|
316
|
+
)
|
|
317
|
+
|
|
318
|
+
# Using specific session
|
|
319
|
+
user, created = await User.objects.using(session).get_or_create(
|
|
320
|
+
username="john",
|
|
321
|
+
defaults={"email": "john@example.com"}
|
|
322
|
+
)
|
|
323
|
+
"""
|
|
324
|
+
if not lookup:
|
|
325
|
+
raise ValueError("get_or_create requires at least one lookup field")
|
|
326
|
+
|
|
327
|
+
# Validate field names
|
|
328
|
+
self._validate_field_names(**lookup)
|
|
329
|
+
if defaults:
|
|
330
|
+
self._validate_field_names(**defaults)
|
|
331
|
+
|
|
332
|
+
try:
|
|
333
|
+
# Try to get existing object
|
|
334
|
+
conditions = [self._table.c[field] == value for field, value in lookup.items()]
|
|
335
|
+
obj = await self.filter(*conditions).get()
|
|
336
|
+
return obj, False
|
|
337
|
+
except DoesNotExist:
|
|
338
|
+
# Create new object with lookup fields + defaults
|
|
339
|
+
create_data = lookup.copy()
|
|
340
|
+
if defaults:
|
|
341
|
+
# defaults override lookup values if there's conflict
|
|
342
|
+
create_data.update(defaults)
|
|
343
|
+
|
|
344
|
+
# Create instance and use save() method to trigger signals
|
|
345
|
+
obj = self._model_class(**create_data)
|
|
346
|
+
await obj.using(self._get_session(readonly=False)).save(validate=validate) # type: ignore[reportAttributeAccessIssue]
|
|
347
|
+
return obj, True
|
|
348
|
+
|
|
349
|
+
async def update_or_create(
|
|
350
|
+
self, defaults: dict[str, Any] | None = None, validate: bool = True, **lookup
|
|
351
|
+
) -> tuple[T, bool]:
|
|
352
|
+
"""Update an existing object or create a new one if it doesn't exist.
|
|
353
|
+
|
|
354
|
+
Args:
|
|
355
|
+
defaults: Values to update/set when object exists or is created
|
|
356
|
+
validate: Whether to validate when updating/creating
|
|
357
|
+
**lookup: Field lookup conditions (only equality supported)
|
|
358
|
+
|
|
359
|
+
Returns:
|
|
360
|
+
Tuple of (object, created) where created is True if object was created
|
|
361
|
+
|
|
362
|
+
Raises:
|
|
363
|
+
AttributeError: If specified field names don't exist on the model
|
|
364
|
+
ValidationError: If validation fails during update/creation
|
|
365
|
+
IntegrityError: If database constraints are violated
|
|
366
|
+
|
|
367
|
+
Examples:
|
|
368
|
+
# Simple field lookup
|
|
369
|
+
user, created = await User.objects.update_or_create(
|
|
370
|
+
username="john",
|
|
371
|
+
defaults={"last_login": datetime.now()}
|
|
372
|
+
)
|
|
373
|
+
|
|
374
|
+
# Multiple conditions
|
|
375
|
+
user, created = await User.objects.update_or_create(
|
|
376
|
+
username="john",
|
|
377
|
+
is_active=True,
|
|
378
|
+
defaults={"last_login": datetime.now()}
|
|
379
|
+
)
|
|
380
|
+
|
|
381
|
+
# Using specific session
|
|
382
|
+
user, created = await User.objects.using(session).update_or_create(
|
|
383
|
+
username="john",
|
|
384
|
+
defaults={"last_login": datetime.now()}
|
|
385
|
+
)
|
|
386
|
+
"""
|
|
387
|
+
if not lookup:
|
|
388
|
+
raise ValueError("update_or_create requires at least one lookup field")
|
|
389
|
+
|
|
390
|
+
# Validate field names
|
|
391
|
+
self._validate_field_names(**lookup)
|
|
392
|
+
if defaults:
|
|
393
|
+
self._validate_field_names(**defaults)
|
|
394
|
+
|
|
395
|
+
try:
|
|
396
|
+
# Try to get existing object
|
|
397
|
+
conditions = [self._table.c[field] == value for field, value in lookup.items()]
|
|
398
|
+
obj = await self.filter(*conditions).get()
|
|
399
|
+
|
|
400
|
+
# Update existing object with defaults using save() method
|
|
401
|
+
if defaults:
|
|
402
|
+
for key, value in defaults.items():
|
|
403
|
+
setattr(obj, key, value)
|
|
404
|
+
await obj.using(self._get_session(readonly=False)).save(validate=validate) # type: ignore[reportAttributeAccessIssue]
|
|
405
|
+
|
|
406
|
+
return obj, False
|
|
407
|
+
except DoesNotExist:
|
|
408
|
+
# Create new object with lookup fields + defaults
|
|
409
|
+
create_data = lookup.copy()
|
|
410
|
+
if defaults:
|
|
411
|
+
# defaults override lookup values if there's conflict
|
|
412
|
+
create_data.update(defaults)
|
|
413
|
+
|
|
414
|
+
# Create instance and use save() method to trigger signals
|
|
415
|
+
obj = self._model_class(**create_data)
|
|
416
|
+
await obj.using(self._get_session(readonly=False)).save(validate=validate) # type: ignore[reportAttributeAccessIssue]
|
|
417
|
+
return obj, True
|
|
418
|
+
|
|
419
|
+
async def in_bulk(self, id_list: list[Any] | None = None, field_name: str = "pk") -> dict[Any, T]:
|
|
420
|
+
"""Get multiple objects as a dictionary mapping field values to objects.
|
|
421
|
+
|
|
422
|
+
This method is useful for efficiently retrieving multiple objects when you
|
|
423
|
+
have a list of identifiers and want to access them by their field values.
|
|
424
|
+
|
|
425
|
+
Args:
|
|
426
|
+
id_list: List of values to match against the specified field
|
|
427
|
+
field_name: Name of the field to use as dictionary keys ('pk' for primary key)
|
|
428
|
+
|
|
429
|
+
Returns:
|
|
430
|
+
Dictionary mapping field values to model instances
|
|
431
|
+
"""
|
|
432
|
+
if field_name == "pk":
|
|
433
|
+
pk_columns = list(self._table.primary_key.columns)
|
|
434
|
+
actual_field = pk_columns[0].name if pk_columns else "id"
|
|
435
|
+
else:
|
|
436
|
+
actual_field = field_name
|
|
437
|
+
|
|
438
|
+
queryset = self.filter()
|
|
439
|
+
if id_list is not None:
|
|
440
|
+
field_column = self._table.c[actual_field]
|
|
441
|
+
queryset = queryset.filter(field_column.in_(id_list))
|
|
442
|
+
|
|
443
|
+
objects = await queryset.all()
|
|
444
|
+
return {getattr(obj, actual_field): obj for obj in objects}
|
|
445
|
+
|
|
446
|
+
# 更新和删除操作
|
|
447
|
+
|
|
448
|
+
@emit_signals(Operation.SAVE)
|
|
449
|
+
async def create(self, validate: bool = True, **kwargs) -> T:
|
|
450
|
+
"""Create a new object with the given field values.
|
|
451
|
+
|
|
452
|
+
Args:
|
|
453
|
+
validate: Whether to execute all validation (both SQLObjects and SQLAlchemy validators)
|
|
454
|
+
**kwargs: Field values for the new object
|
|
455
|
+
|
|
456
|
+
Returns:
|
|
457
|
+
Created model instance
|
|
458
|
+
|
|
459
|
+
Raises:
|
|
460
|
+
ValidationError: If validation fails during creation
|
|
461
|
+
IntegrityError: If database constraints are violated (unique, foreign key, etc.)
|
|
462
|
+
DatabaseError: If database connection or transaction fails
|
|
463
|
+
TypeError: If invalid field names or values are provided
|
|
464
|
+
AttributeError: If specified field names don't exist on the model
|
|
465
|
+
"""
|
|
466
|
+
try:
|
|
467
|
+
obj = self._model_class(**kwargs)
|
|
468
|
+
# Execute database operation directly, don't call obj.save() to avoid duplicate signals
|
|
469
|
+
if validate:
|
|
470
|
+
obj.validate_all_fields() # type: ignore[reportAttributeAccessIssue]
|
|
471
|
+
|
|
472
|
+
stmt = insert(self._table).values(**obj._get_all_data()) # noqa # type: ignore[reportAttributeAccessIssue]
|
|
473
|
+
session = self._get_session(readonly=False)
|
|
474
|
+
result = await session.execute(stmt)
|
|
475
|
+
|
|
476
|
+
# Set primary key values from result
|
|
477
|
+
if result.inserted_primary_key:
|
|
478
|
+
obj._set_primary_key_values(result.inserted_primary_key) # noqa # type: ignore[reportAttributeAccessIssue]
|
|
479
|
+
|
|
480
|
+
# Session auto-commits with auto_commit=True
|
|
481
|
+
return obj
|
|
482
|
+
except Exception as e:
|
|
483
|
+
raise RuntimeError(f"Failed to create {self._model_class.__name__}: {e}") from e
|
|
484
|
+
|
|
485
|
+
@emit_signals(Operation.SAVE, is_bulk=True)
|
|
486
|
+
async def bulk_create(self, objects: list[dict[str, Any]]) -> None:
|
|
487
|
+
"""Create multiple objects for better performance.
|
|
488
|
+
|
|
489
|
+
Args:
|
|
490
|
+
objects: List of dictionaries containing object data
|
|
491
|
+
"""
|
|
492
|
+
if not objects:
|
|
493
|
+
return
|
|
494
|
+
|
|
495
|
+
stmt = insert(self._table).values(objects)
|
|
496
|
+
session = self._get_session(readonly=False)
|
|
497
|
+
await session.execute(stmt)
|
|
498
|
+
|
|
499
|
+
# 聚合和统计
|
|
500
|
+
|
|
501
|
+
@emit_signals(Operation.SAVE, is_bulk=True)
|
|
502
|
+
async def bulk_update(
|
|
503
|
+
self, mappings: list[dict[str, Any]], match_fields: list[str] | None = None, batch_size: int = 1000
|
|
504
|
+
) -> int:
|
|
505
|
+
"""Perform true bulk update operations for better performance.
|
|
506
|
+
|
|
507
|
+
Args:
|
|
508
|
+
mappings: List of dictionaries containing match fields and update values
|
|
509
|
+
match_fields: Fields to use for matching records (defaults to ["id"])
|
|
510
|
+
batch_size: Number of records to process in each batch
|
|
511
|
+
|
|
512
|
+
Returns:
|
|
513
|
+
Total number of affected rows
|
|
514
|
+
|
|
515
|
+
Raises:
|
|
516
|
+
ValidationError: If mappings is empty or invalid
|
|
517
|
+
IntegrityError: If database constraints are violated during update
|
|
518
|
+
DatabaseError: If database connection or transaction fails
|
|
519
|
+
"""
|
|
520
|
+
if not mappings:
|
|
521
|
+
raise ValueError("Bulk update requires non-empty mappings list")
|
|
522
|
+
|
|
523
|
+
if match_fields is None:
|
|
524
|
+
match_fields = ["id"]
|
|
525
|
+
|
|
526
|
+
total_affected = 0
|
|
527
|
+
|
|
528
|
+
# Process in batches using Core-level update
|
|
529
|
+
for i in range(0, len(mappings), batch_size):
|
|
530
|
+
batch = mappings[i : i + batch_size]
|
|
531
|
+
|
|
532
|
+
# Build WHERE conditions using match_fields
|
|
533
|
+
where_conditions = []
|
|
534
|
+
for field in match_fields:
|
|
535
|
+
where_conditions.append(self._table.c[field] == bindparam(f"match_{field}"))
|
|
536
|
+
|
|
537
|
+
# Create Core-level update statement
|
|
538
|
+
stmt = update(self._table).where(*where_conditions)
|
|
539
|
+
|
|
540
|
+
# Add update values (exclude match fields from values)
|
|
541
|
+
update_values = {}
|
|
542
|
+
for key in batch[0].keys():
|
|
543
|
+
if key not in match_fields:
|
|
544
|
+
update_values[key] = bindparam(f"update_{key}")
|
|
545
|
+
|
|
546
|
+
if update_values:
|
|
547
|
+
stmt = stmt.values(**update_values)
|
|
548
|
+
|
|
549
|
+
# Prepare parameter mappings
|
|
550
|
+
param_mappings = []
|
|
551
|
+
for mapping in batch:
|
|
552
|
+
param_dict = {}
|
|
553
|
+
# Add match field parameters
|
|
554
|
+
for field in match_fields:
|
|
555
|
+
param_dict[f"match_{field}"] = mapping[field]
|
|
556
|
+
# Add update value parameters
|
|
557
|
+
for key, value in mapping.items():
|
|
558
|
+
if key not in match_fields:
|
|
559
|
+
param_dict[f"update_{key}"] = value
|
|
560
|
+
param_mappings.append(param_dict)
|
|
561
|
+
|
|
562
|
+
# Execute bulk update directly with session
|
|
563
|
+
session = self._get_session(readonly=False)
|
|
564
|
+
result = await session.execute(stmt, param_mappings)
|
|
565
|
+
total_affected += result.rowcount if result.rowcount is not None else 0
|
|
566
|
+
|
|
567
|
+
# Session auto-commits with auto_commit=True
|
|
568
|
+
|
|
569
|
+
return total_affected
|
|
570
|
+
|
|
571
|
+
@emit_signals(Operation.DELETE, is_bulk=True)
|
|
572
|
+
async def bulk_delete(self, ids: list[Any], id_field: str = "id", batch_size: int = 1000) -> int:
|
|
573
|
+
"""Perform true bulk delete operations for better performance.
|
|
574
|
+
|
|
575
|
+
Args:
|
|
576
|
+
ids: List of IDs to delete
|
|
577
|
+
id_field: Field name to use for matching (defaults to "id")
|
|
578
|
+
batch_size: Number of records to process in each batch
|
|
579
|
+
|
|
580
|
+
Returns:
|
|
581
|
+
Total number of deleted rows
|
|
582
|
+
|
|
583
|
+
Raises:
|
|
584
|
+
ValidationError: If ids list is empty
|
|
585
|
+
IntegrityError: If foreign key constraints prevent deletion
|
|
586
|
+
DatabaseError: If database connection or transaction fails
|
|
587
|
+
"""
|
|
588
|
+
if not ids:
|
|
589
|
+
raise ValueError("Bulk delete requires non-empty ids list")
|
|
590
|
+
|
|
591
|
+
total_affected = 0
|
|
592
|
+
|
|
593
|
+
# Process in batches using IN clause
|
|
594
|
+
for i in range(0, len(ids), batch_size):
|
|
595
|
+
batch_ids = ids[i : i + batch_size]
|
|
596
|
+
|
|
597
|
+
# Create delete statement with IN clause
|
|
598
|
+
stmt = delete(self._table).where(self._table.c[id_field].in_(batch_ids))
|
|
599
|
+
session = self._get_session(readonly=False)
|
|
600
|
+
result = await session.execute(stmt)
|
|
601
|
+
total_affected += result.rowcount if result.rowcount is not None else 0
|
|
602
|
+
|
|
603
|
+
# Session auto-commits with auto_commit=True
|
|
604
|
+
return total_affected
|
|
605
|
+
|
|
606
|
+
async def delete_all(self, fast: bool = False) -> int:
|
|
607
|
+
"""Delete all records from the table.
|
|
608
|
+
|
|
609
|
+
Args:
|
|
610
|
+
fast: Whether to use TRUNCATE for fast deletion
|
|
611
|
+
Note: TRUNCATE doesn't support transaction rollback and doesn't trigger signals
|
|
612
|
+
Use with caution in production environments
|
|
613
|
+
|
|
614
|
+
Returns:
|
|
615
|
+
Number of deleted rows (-1 for TRUNCATE as it cannot return accurate count)
|
|
616
|
+
"""
|
|
617
|
+
if fast:
|
|
618
|
+
# Use TRUNCATE for maximum performance on large tables
|
|
619
|
+
# Warning: This bypasses transaction safety and signal triggering
|
|
620
|
+
table_name = self._table.name
|
|
621
|
+
session = self._get_session(readonly=False)
|
|
622
|
+
await session.execute(text(f"TRUNCATE TABLE {table_name}"))
|
|
623
|
+
return -1 # TRUNCATE cannot return accurate row count
|
|
624
|
+
else:
|
|
625
|
+
# Use QuerySet.delete() for transaction safety and signal support
|
|
626
|
+
return await self.filter().delete()
|
|
627
|
+
|
|
628
|
+
async def update_all(self, **values) -> int:
|
|
629
|
+
"""Update all records in the table with the given values.
|
|
630
|
+
|
|
631
|
+
Args:
|
|
632
|
+
**values: Field values to update
|
|
633
|
+
|
|
634
|
+
Returns:
|
|
635
|
+
Number of updated rows
|
|
636
|
+
|
|
637
|
+
Examples:
|
|
638
|
+
# Update all users' status
|
|
639
|
+
affected = await User.objects.update_all(status="migrated")
|
|
640
|
+
"""
|
|
641
|
+
return await self.filter().update(**values)
|
|
642
|
+
|
|
643
|
+
# QuerySet 快捷方法
|
|
644
|
+
|
|
645
|
+
async def count(self) -> int:
|
|
646
|
+
"""Count the total number of objects.
|
|
647
|
+
|
|
648
|
+
Returns:
|
|
649
|
+
Total number of objects
|
|
650
|
+
"""
|
|
651
|
+
return await self.filter().count()
|
|
652
|
+
|
|
653
|
+
async def aggregate(self, **kwargs) -> dict[str, Any]:
|
|
654
|
+
"""Perform aggregation operations on the queryset.
|
|
655
|
+
|
|
656
|
+
Args:
|
|
657
|
+
**kwargs: Aggregation expressions with their aliases
|
|
658
|
+
|
|
659
|
+
Returns:
|
|
660
|
+
Dictionary with aggregation results
|
|
661
|
+
"""
|
|
662
|
+
aggregations = []
|
|
663
|
+
labels = []
|
|
664
|
+
|
|
665
|
+
for alias, expr in kwargs.items():
|
|
666
|
+
if hasattr(expr, "resolve"):
|
|
667
|
+
# SQLObjects function
|
|
668
|
+
aggregations.append(expr.resolve(self._table).label(alias))
|
|
669
|
+
else:
|
|
670
|
+
aggregations.append(expr.label(alias))
|
|
671
|
+
labels.append(alias)
|
|
672
|
+
|
|
673
|
+
query = select(*aggregations).select_from(self._table)
|
|
674
|
+
session = self._get_session(readonly=True)
|
|
675
|
+
result = await session.execute(query)
|
|
676
|
+
first_result = result.first()
|
|
677
|
+
return dict(zip(labels, first_result, strict=False)) if first_result else {}
|
|
678
|
+
|
|
679
|
+
def distinct(self, *fields) -> QuerySet[T]:
|
|
680
|
+
"""Apply DISTINCT clause to eliminate duplicate rows.
|
|
681
|
+
|
|
682
|
+
Args:
|
|
683
|
+
*fields: Field names or SQLAlchemy expressions to apply DISTINCT on, if empty applies to all
|
|
684
|
+
|
|
685
|
+
Returns:
|
|
686
|
+
QuerySet with DISTINCT applied
|
|
687
|
+
"""
|
|
688
|
+
return self.filter().distinct(*fields)
|
|
689
|
+
|
|
690
|
+
def exclude(self, *args) -> QuerySet[T]:
|
|
691
|
+
"""Exclude objects matching the given conditions.
|
|
692
|
+
|
|
693
|
+
Args:
|
|
694
|
+
*args: Q objects or SQLAlchemy expressions for complex conditions
|
|
695
|
+
|
|
696
|
+
Returns:
|
|
697
|
+
QuerySet with exclusion conditions applied
|
|
698
|
+
"""
|
|
699
|
+
return self.filter().exclude(*args)
|
|
700
|
+
|
|
701
|
+
def order_by(self, *fields) -> QuerySet[T]:
|
|
702
|
+
"""Order results by the specified fields.
|
|
703
|
+
|
|
704
|
+
Args:
|
|
705
|
+
*fields: Field names or SQLAlchemy expressions
|
|
706
|
+
(prefix string fields with '-' for descending order)
|
|
707
|
+
|
|
708
|
+
Returns:
|
|
709
|
+
QuerySet with ordering applied
|
|
710
|
+
"""
|
|
711
|
+
return self.filter().order_by(*fields)
|
|
712
|
+
|
|
713
|
+
def limit(self, count: int) -> QuerySet[T]:
|
|
714
|
+
"""Limit the number of results.
|
|
715
|
+
|
|
716
|
+
Args:
|
|
717
|
+
count: Maximum number of results to return
|
|
718
|
+
|
|
719
|
+
Returns:
|
|
720
|
+
QuerySet with limit applied
|
|
721
|
+
"""
|
|
722
|
+
return self.filter().limit(count)
|
|
723
|
+
|
|
724
|
+
def offset(self, count: int) -> QuerySet[T]:
|
|
725
|
+
"""Skip the specified number of results.
|
|
726
|
+
|
|
727
|
+
Args:
|
|
728
|
+
count: Number of results to skip
|
|
729
|
+
|
|
730
|
+
Returns:
|
|
731
|
+
QuerySet with offset applied
|
|
732
|
+
"""
|
|
733
|
+
return self.filter().offset(count)
|
|
734
|
+
|
|
735
|
+
def only(self, *fields) -> QuerySet[T]:
|
|
736
|
+
"""Load only the specified fields from the database.
|
|
737
|
+
|
|
738
|
+
Args:
|
|
739
|
+
*fields: Field names to load (string field names only)
|
|
740
|
+
|
|
741
|
+
Returns:
|
|
742
|
+
QuerySet that loads only the specified fields
|
|
743
|
+
"""
|
|
744
|
+
return self.filter().only(*fields)
|
|
745
|
+
|
|
746
|
+
def none(self) -> QuerySet[T]:
|
|
747
|
+
"""Return an empty queryset that will never match any objects.
|
|
748
|
+
|
|
749
|
+
Returns:
|
|
750
|
+
QuerySet that returns no results
|
|
751
|
+
"""
|
|
752
|
+
return self.filter().none()
|
|
753
|
+
|
|
754
|
+
def reverse(self) -> QuerySet[T]:
|
|
755
|
+
"""Reverse the ordering of the queryset.
|
|
756
|
+
|
|
757
|
+
Returns:
|
|
758
|
+
QuerySet with reversed ordering
|
|
759
|
+
"""
|
|
760
|
+
return self.filter().reverse()
|
|
761
|
+
|
|
762
|
+
def select_related(self, *fields) -> QuerySet[T]:
|
|
763
|
+
"""JOIN preload related objects.
|
|
764
|
+
|
|
765
|
+
Args:
|
|
766
|
+
*fields: Related field names to preload (supports nested paths with '__')
|
|
767
|
+
|
|
768
|
+
Returns:
|
|
769
|
+
QuerySet with related objects preloaded
|
|
770
|
+
|
|
771
|
+
Examples:
|
|
772
|
+
# Single relationship
|
|
773
|
+
posts = await Post.objects.select_related('author').all()
|
|
774
|
+
|
|
775
|
+
# Multiple relationships
|
|
776
|
+
posts = await Post.objects.select_related('author', 'category').all()
|
|
777
|
+
|
|
778
|
+
# Nested relationships
|
|
779
|
+
comments = await Comment.objects.select_related('post__author').all()
|
|
780
|
+
"""
|
|
781
|
+
return self.filter().select_related(*fields)
|
|
782
|
+
|
|
783
|
+
def prefetch_related(self, *fields: str, **queryset_configs: QuerySet) -> QuerySet[T]:
|
|
784
|
+
"""Separate query preload related objects with advanced configuration support.
|
|
785
|
+
|
|
786
|
+
Args:
|
|
787
|
+
*fields: Simple prefetch field names (supports nested paths with '__')
|
|
788
|
+
**queryset_configs: Advanced prefetch with custom QuerySets for filtering/ordering
|
|
789
|
+
|
|
790
|
+
Returns:
|
|
791
|
+
QuerySet with related objects prefetched
|
|
792
|
+
|
|
793
|
+
Examples:
|
|
794
|
+
# Simple prefetch
|
|
795
|
+
users = await User.objects.prefetch_related('posts', 'profile').all()
|
|
796
|
+
|
|
797
|
+
# Advanced prefetch with filtering and ordering
|
|
798
|
+
users = await User.objects.prefetch_related(
|
|
799
|
+
published_posts=Post.objects.filter(Post.is_published == True)
|
|
800
|
+
.order_by('-created_at')
|
|
801
|
+
.limit(5)
|
|
802
|
+
).all()
|
|
803
|
+
|
|
804
|
+
# Mixed usage
|
|
805
|
+
users = await User.objects.prefetch_related(
|
|
806
|
+
'profile', # Simple
|
|
807
|
+
recent_posts=Post.objects.filter(
|
|
808
|
+
Post.created_at >= datetime.now() - timedelta(days=30)
|
|
809
|
+
).order_by('-created_at')
|
|
810
|
+
).all()
|
|
811
|
+
"""
|
|
812
|
+
return self.filter().prefetch_related(*fields, **queryset_configs)
|