sqlobjects 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- sqlobjects/__init__.py +38 -0
- sqlobjects/config.py +519 -0
- sqlobjects/database.py +586 -0
- sqlobjects/exceptions.py +538 -0
- sqlobjects/expressions.py +1054 -0
- sqlobjects/fields.py +1866 -0
- sqlobjects/history.py +101 -0
- sqlobjects/metadata.py +1130 -0
- sqlobjects/model.py +1009 -0
- sqlobjects/objects.py +812 -0
- sqlobjects/queries.py +1059 -0
- sqlobjects/relations.py +843 -0
- sqlobjects/session.py +389 -0
- sqlobjects/signals.py +464 -0
- sqlobjects/utils/__init__.py +5 -0
- sqlobjects/utils/naming.py +53 -0
- sqlobjects/utils/pattern.py +644 -0
- sqlobjects/validators.py +294 -0
- sqlobjects-0.1.0.dist-info/METADATA +29 -0
- sqlobjects-0.1.0.dist-info/RECORD +23 -0
- sqlobjects-0.1.0.dist-info/WHEEL +5 -0
- sqlobjects-0.1.0.dist-info/licenses/LICENSE +21 -0
- sqlobjects-0.1.0.dist-info/top_level.txt +1 -0
sqlobjects/model.py
ADDED
|
@@ -0,0 +1,1009 @@
|
|
|
1
|
+
"""SQLObjects Model Module - Optimized core model implementation"""
|
|
2
|
+
|
|
3
|
+
from functools import lru_cache
|
|
4
|
+
from typing import TYPE_CHECKING, Any, TypeVar
|
|
5
|
+
|
|
6
|
+
from sqlalchemy import Table, and_, delete, insert, select, update
|
|
7
|
+
|
|
8
|
+
from .exceptions import DeferredFieldError, PrimaryKeyError, ValidationError
|
|
9
|
+
from .history import HistoryTrackingMixin
|
|
10
|
+
from .metadata import ModelProcessor
|
|
11
|
+
from .session import AsyncSession, SessionContextManager
|
|
12
|
+
from .signals import Operation, SignalMixin, emit_signals
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
if TYPE_CHECKING:
|
|
16
|
+
pass
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
__all__ = [
|
|
20
|
+
"ObjectModel",
|
|
21
|
+
"ModelMixin",
|
|
22
|
+
"DeferredFieldProxy",
|
|
23
|
+
"RelationFieldProxy",
|
|
24
|
+
"StateManager",
|
|
25
|
+
"BaseMixin",
|
|
26
|
+
"SessionMixin",
|
|
27
|
+
"PrimaryKeyMixin",
|
|
28
|
+
"ValidationMixin",
|
|
29
|
+
"DeferredLoadingMixin",
|
|
30
|
+
"DataConversionMixin",
|
|
31
|
+
"FieldCacheMixin",
|
|
32
|
+
]
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class DeferredFieldProxy:
|
|
36
|
+
"""Optimized proxy for deferred fields with caching."""
|
|
37
|
+
|
|
38
|
+
def __init__(self, instance: "DeferredLoadingMixin", field_name: str) -> None:
|
|
39
|
+
self.instance = instance
|
|
40
|
+
self.field_name = field_name
|
|
41
|
+
self._cached_value = None
|
|
42
|
+
self._is_loaded = False
|
|
43
|
+
|
|
44
|
+
async def fetch(self) -> Any:
|
|
45
|
+
"""Fetch field value, auto-loading if not loaded."""
|
|
46
|
+
if not self._is_loaded:
|
|
47
|
+
await self.instance.load_deferred_field(self.field_name)
|
|
48
|
+
self._cached_value = getattr(self.instance, f"_{self.field_name}", None)
|
|
49
|
+
self._is_loaded = True
|
|
50
|
+
return self._cached_value
|
|
51
|
+
|
|
52
|
+
def is_loaded(self) -> bool:
|
|
53
|
+
return self.instance.is_field_loaded(self.field_name)
|
|
54
|
+
|
|
55
|
+
def is_deferred(self) -> bool:
|
|
56
|
+
return self.instance.is_field_deferred(self.field_name)
|
|
57
|
+
|
|
58
|
+
def __iter__(self):
|
|
59
|
+
raise DeferredFieldError(
|
|
60
|
+
f"Cannot iterate over deferred field '{self.field_name}' on {self.instance.__class__.__name__}"
|
|
61
|
+
)
|
|
62
|
+
|
|
63
|
+
def __len__(self):
|
|
64
|
+
raise DeferredFieldError(
|
|
65
|
+
f"Cannot get length of deferred field '{self.field_name}' on {self.instance.__class__.__name__}"
|
|
66
|
+
)
|
|
67
|
+
|
|
68
|
+
def __bool__(self):
|
|
69
|
+
raise DeferredFieldError(
|
|
70
|
+
f"Cannot check boolean value of deferred field '{self.field_name}' on {self.instance.__class__.__name__}"
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
def __getitem__(self, key):
|
|
74
|
+
raise DeferredFieldError(
|
|
75
|
+
f"Cannot access items of deferred field '{self.field_name}' on {self.instance.__class__.__name__}"
|
|
76
|
+
)
|
|
77
|
+
|
|
78
|
+
def __contains__(self, item):
|
|
79
|
+
raise DeferredFieldError(
|
|
80
|
+
f"Cannot check containment in deferred field '{self.field_name}' on {self.instance.__class__.__name__}"
|
|
81
|
+
)
|
|
82
|
+
|
|
83
|
+
def __add__(self, other):
|
|
84
|
+
raise DeferredFieldError(
|
|
85
|
+
f"Cannot perform arithmetic on deferred field '{self.field_name}' on {self.instance.__class__.__name__}"
|
|
86
|
+
)
|
|
87
|
+
|
|
88
|
+
def __str__(self):
|
|
89
|
+
return f"<DeferredField: {self.field_name}>"
|
|
90
|
+
|
|
91
|
+
def __repr__(self):
|
|
92
|
+
return f"DeferredFieldProxy(field_name='{self.field_name}')"
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
class RelationFieldProxy:
|
|
96
|
+
"""Optimized proxy for relationship fields with caching."""
|
|
97
|
+
|
|
98
|
+
def __init__(self, instance: Any, field_name: str) -> None:
|
|
99
|
+
self.instance = instance
|
|
100
|
+
self.field_name = field_name
|
|
101
|
+
self._cached_objects = None
|
|
102
|
+
self._is_loaded = False
|
|
103
|
+
|
|
104
|
+
async def fetch(self) -> Any:
|
|
105
|
+
"""Fetch relationship objects, auto-loading if not loaded."""
|
|
106
|
+
if not self._is_loaded:
|
|
107
|
+
await self._load_relationship()
|
|
108
|
+
self._cached_objects = self._get_cached_objects()
|
|
109
|
+
self._is_loaded = True
|
|
110
|
+
return self._cached_objects
|
|
111
|
+
|
|
112
|
+
def is_loaded(self) -> bool:
|
|
113
|
+
cache_attr = f"_{self.field_name}_cache"
|
|
114
|
+
return hasattr(self.instance, cache_attr)
|
|
115
|
+
|
|
116
|
+
def is_deferred(self) -> bool:
|
|
117
|
+
return not self.is_loaded()
|
|
118
|
+
|
|
119
|
+
async def _load_relationship(self) -> None:
|
|
120
|
+
"""Load relationship using existing relationship loading logic."""
|
|
121
|
+
if not hasattr(self.instance.__class__, "_relationships"):
|
|
122
|
+
return
|
|
123
|
+
|
|
124
|
+
relationships = getattr(self.instance.__class__, "_relationships", {})
|
|
125
|
+
if self.field_name not in relationships:
|
|
126
|
+
return
|
|
127
|
+
|
|
128
|
+
relationship_desc = relationships[self.field_name]
|
|
129
|
+
|
|
130
|
+
from .queries import QuerySet
|
|
131
|
+
|
|
132
|
+
table = self.instance.get_table()
|
|
133
|
+
queryset = QuerySet(table, self.instance.__class__)
|
|
134
|
+
|
|
135
|
+
session = self.instance._get_session() # noqa
|
|
136
|
+
if hasattr(queryset, "_prefetch_relationship") and relationship_desc.property.resolved_model:
|
|
137
|
+
await queryset._prefetch_relationship( # noqa # type: ignore
|
|
138
|
+
[self.instance], relationship_desc, relationship_desc.property.resolved_model, session
|
|
139
|
+
)
|
|
140
|
+
|
|
141
|
+
def _get_cached_objects(self) -> Any:
|
|
142
|
+
cache_attr = f"_{self.field_name}_cache"
|
|
143
|
+
return getattr(self.instance, cache_attr, None)
|
|
144
|
+
|
|
145
|
+
def __iter__(self):
|
|
146
|
+
raise DeferredFieldError(
|
|
147
|
+
f"Cannot iterate over unloaded relationship '{self.field_name}' on {self.instance.__class__.__name__}"
|
|
148
|
+
)
|
|
149
|
+
|
|
150
|
+
def __len__(self):
|
|
151
|
+
raise DeferredFieldError(
|
|
152
|
+
f"Cannot get length of unloaded relationship '{self.field_name}' on {self.instance.__class__.__name__}"
|
|
153
|
+
)
|
|
154
|
+
|
|
155
|
+
def __bool__(self):
|
|
156
|
+
raise DeferredFieldError(
|
|
157
|
+
f"Cannot check boolean value of unloaded relationship '{self.field_name}' "
|
|
158
|
+
f"on {self.instance.__class__.__name__}"
|
|
159
|
+
)
|
|
160
|
+
|
|
161
|
+
def __getitem__(self, key):
|
|
162
|
+
raise DeferredFieldError(
|
|
163
|
+
f"Cannot access items of unloaded relationship '{self.field_name}' on {self.instance.__class__.__name__}"
|
|
164
|
+
)
|
|
165
|
+
|
|
166
|
+
def __contains__(self, item):
|
|
167
|
+
raise DeferredFieldError(
|
|
168
|
+
f"Cannot check containment in unloaded relationship '{self.field_name}' "
|
|
169
|
+
f"on {self.instance.__class__.__name__}"
|
|
170
|
+
)
|
|
171
|
+
|
|
172
|
+
def __add__(self, other):
|
|
173
|
+
raise DeferredFieldError(
|
|
174
|
+
f"Cannot perform arithmetic on unloaded relationship '{self.field_name}' "
|
|
175
|
+
f"on {self.instance.__class__.__name__}"
|
|
176
|
+
)
|
|
177
|
+
|
|
178
|
+
def __str__(self):
|
|
179
|
+
return f"<RelationField: {self.field_name}>"
|
|
180
|
+
|
|
181
|
+
def __repr__(self):
|
|
182
|
+
return f"RelationFieldProxy(field_name='{self.field_name}')"
|
|
183
|
+
|
|
184
|
+
|
|
185
|
+
class StateManager:
|
|
186
|
+
"""Unified state management for model instances."""
|
|
187
|
+
|
|
188
|
+
def __init__(self):
|
|
189
|
+
"""Initialize empty state dictionary."""
|
|
190
|
+
self._state: dict[str, Any] = {}
|
|
191
|
+
|
|
192
|
+
def get(self, key: str, default=None):
|
|
193
|
+
"""Get state value by key.
|
|
194
|
+
|
|
195
|
+
Args:
|
|
196
|
+
key: State key to retrieve
|
|
197
|
+
default: Default value if key not found
|
|
198
|
+
|
|
199
|
+
Returns:
|
|
200
|
+
State value or default
|
|
201
|
+
"""
|
|
202
|
+
return self._state.get(key, default)
|
|
203
|
+
|
|
204
|
+
def set(self, key: str, value):
|
|
205
|
+
"""Set state value by key.
|
|
206
|
+
|
|
207
|
+
Args:
|
|
208
|
+
key: State key to set
|
|
209
|
+
value: Value to store
|
|
210
|
+
"""
|
|
211
|
+
self._state[key] = value
|
|
212
|
+
|
|
213
|
+
|
|
214
|
+
class BaseMixin:
|
|
215
|
+
"""Base mixin with common functionality and state management."""
|
|
216
|
+
|
|
217
|
+
def __init__(self):
|
|
218
|
+
"""Initialize state manager if not already present."""
|
|
219
|
+
if not hasattr(self, "_state_manager"):
|
|
220
|
+
self._state_manager = StateManager()
|
|
221
|
+
|
|
222
|
+
@classmethod
|
|
223
|
+
def get_table(cls) -> Table:
|
|
224
|
+
"""Get SQLAlchemy Core Table definition.
|
|
225
|
+
|
|
226
|
+
Returns:
|
|
227
|
+
SQLAlchemy Table instance for this model
|
|
228
|
+
"""
|
|
229
|
+
...
|
|
230
|
+
|
|
231
|
+
@classmethod
|
|
232
|
+
@lru_cache(maxsize=1)
|
|
233
|
+
def _get_field_names(cls) -> list[str]:
|
|
234
|
+
"""Get field names from the table definition (cached).
|
|
235
|
+
|
|
236
|
+
Returns:
|
|
237
|
+
List of field names from the table columns
|
|
238
|
+
"""
|
|
239
|
+
return list(cls.get_table().columns.keys())
|
|
240
|
+
|
|
241
|
+
|
|
242
|
+
class SessionMixin(BaseMixin):
|
|
243
|
+
"""Session management - Layer 1."""
|
|
244
|
+
|
|
245
|
+
def get_session(self) -> AsyncSession:
|
|
246
|
+
"""Get the effective session for database operations.
|
|
247
|
+
|
|
248
|
+
Returns:
|
|
249
|
+
AsyncSession instance for database operations
|
|
250
|
+
"""
|
|
251
|
+
bound_session = self._state_manager.get("bound_session")
|
|
252
|
+
if isinstance(bound_session, str):
|
|
253
|
+
return SessionContextManager.get_session(bound_session)
|
|
254
|
+
return bound_session or SessionContextManager.get_session()
|
|
255
|
+
|
|
256
|
+
def using(self, db_or_session: str | AsyncSession):
|
|
257
|
+
"""Return self bound to specific database/connection.
|
|
258
|
+
|
|
259
|
+
Args:
|
|
260
|
+
db_or_session: Database name or AsyncSession instance
|
|
261
|
+
|
|
262
|
+
Returns:
|
|
263
|
+
Self with bound session for method chaining
|
|
264
|
+
"""
|
|
265
|
+
self._state_manager.set("bound_session", db_or_session)
|
|
266
|
+
return self
|
|
267
|
+
|
|
268
|
+
|
|
269
|
+
class PrimaryKeyMixin(SessionMixin):
|
|
270
|
+
"""Primary key operations - Layer 2."""
|
|
271
|
+
|
|
272
|
+
@classmethod
|
|
273
|
+
@lru_cache(maxsize=1)
|
|
274
|
+
def _get_primary_key_info(cls) -> dict[str, Any]:
|
|
275
|
+
"""Cache primary key information at class level.
|
|
276
|
+
|
|
277
|
+
Returns:
|
|
278
|
+
Dictionary with 'columns' and 'names' keys containing
|
|
279
|
+
primary key column objects and names respectively
|
|
280
|
+
"""
|
|
281
|
+
table = cls.get_table()
|
|
282
|
+
pk_columns = list(table.primary_key.columns)
|
|
283
|
+
return {"columns": pk_columns, "names": [col.name for col in pk_columns]}
|
|
284
|
+
|
|
285
|
+
def _get_primary_key_values(self) -> dict[str, Any]:
|
|
286
|
+
"""Get primary key values as dict.
|
|
287
|
+
|
|
288
|
+
Returns:
|
|
289
|
+
Dictionary mapping primary key field names to their values
|
|
290
|
+
"""
|
|
291
|
+
pk_info = self._get_primary_key_info()
|
|
292
|
+
return {name: getattr(self, name, None) for name in pk_info["names"]}
|
|
293
|
+
|
|
294
|
+
def _has_primary_key_values(self) -> bool:
|
|
295
|
+
"""Check if instance has primary key values set.
|
|
296
|
+
|
|
297
|
+
Returns:
|
|
298
|
+
True if all primary key fields have non-None values
|
|
299
|
+
"""
|
|
300
|
+
pk_values = self._get_primary_key_values()
|
|
301
|
+
return all(value is not None for value in pk_values.values())
|
|
302
|
+
|
|
303
|
+
def _build_pk_conditions(self) -> list:
|
|
304
|
+
"""Build primary key conditions for queries.
|
|
305
|
+
|
|
306
|
+
Returns:
|
|
307
|
+
List of SQLAlchemy condition expressions for primary key matching
|
|
308
|
+
|
|
309
|
+
Raises:
|
|
310
|
+
PrimaryKeyError: If primary key values are not set
|
|
311
|
+
"""
|
|
312
|
+
if not self._has_primary_key_values():
|
|
313
|
+
raise PrimaryKeyError("Cannot build conditions without primary key values")
|
|
314
|
+
|
|
315
|
+
table = self.get_table()
|
|
316
|
+
pk_values = self._get_primary_key_values()
|
|
317
|
+
return [table.c[name] == value for name, value in pk_values.items()]
|
|
318
|
+
|
|
319
|
+
|
|
320
|
+
class ValidationMixin(PrimaryKeyMixin):
|
|
321
|
+
"""Validation logic - Layer 3."""
|
|
322
|
+
|
|
323
|
+
def validate_field(self, field_name: str) -> None:
|
|
324
|
+
"""Validate a single field.
|
|
325
|
+
|
|
326
|
+
Args:
|
|
327
|
+
field_name: Name of the field to validate
|
|
328
|
+
|
|
329
|
+
Raises:
|
|
330
|
+
ValueError: If field does not exist
|
|
331
|
+
ValidationError: If validation fails
|
|
332
|
+
"""
|
|
333
|
+
if field_name not in self._get_field_names():
|
|
334
|
+
raise ValueError(f"Field '{field_name}' does not exist")
|
|
335
|
+
|
|
336
|
+
field_attr = getattr(self.__class__, field_name, None)
|
|
337
|
+
if field_attr and hasattr(field_attr, "column") and field_attr.column is not None:
|
|
338
|
+
validators = field_attr.column.info.get("_enhanced", {}).get("validators", [])
|
|
339
|
+
if validators:
|
|
340
|
+
value = getattr(self, field_name, None)
|
|
341
|
+
try:
|
|
342
|
+
from .validators import validate_field_value
|
|
343
|
+
|
|
344
|
+
validated_value = validate_field_value(validators, value, field_name)
|
|
345
|
+
setattr(self, field_name, validated_value)
|
|
346
|
+
except Exception as e:
|
|
347
|
+
raise ValidationError(str(e), field=field_name) from e
|
|
348
|
+
|
|
349
|
+
def validate_all_fields(self, fields: list[str] | None = None) -> None:
|
|
350
|
+
"""Validate multiple fields efficiently.
|
|
351
|
+
|
|
352
|
+
Args:
|
|
353
|
+
fields: List of field names to validate, or None for all fields
|
|
354
|
+
|
|
355
|
+
Raises:
|
|
356
|
+
ValidationError: If any validation fails, with combined error messages
|
|
357
|
+
"""
|
|
358
|
+
field_names = fields if fields is not None else self._get_field_names()
|
|
359
|
+
errors = []
|
|
360
|
+
for field_name in field_names:
|
|
361
|
+
try:
|
|
362
|
+
self.validate_field(field_name)
|
|
363
|
+
except ValidationError as e:
|
|
364
|
+
errors.append(e)
|
|
365
|
+
if errors:
|
|
366
|
+
error_messages = [str(e) for e in errors]
|
|
367
|
+
raise ValidationError("; ".join(error_messages))
|
|
368
|
+
|
|
369
|
+
|
|
370
|
+
class DeferredLoadingMixin(ValidationMixin):
|
|
371
|
+
"""Deferred loading functionality - Layer 4."""
|
|
372
|
+
|
|
373
|
+
def __init__(self):
|
|
374
|
+
"""Initialize deferred loading state."""
|
|
375
|
+
super().__init__()
|
|
376
|
+
self._state_manager.set("deferred_fields", set())
|
|
377
|
+
self._state_manager.set("loaded_deferred_fields", set())
|
|
378
|
+
self._state_manager.set("is_from_db", False)
|
|
379
|
+
|
|
380
|
+
@property
|
|
381
|
+
def _deferred_fields(self) -> set[str]:
|
|
382
|
+
result = self._state_manager.get("deferred_fields", set())
|
|
383
|
+
return result if isinstance(result, set) else set()
|
|
384
|
+
|
|
385
|
+
@property
|
|
386
|
+
def _loaded_deferred_fields(self) -> set[str]:
|
|
387
|
+
result = self._state_manager.get("loaded_deferred_fields", set())
|
|
388
|
+
return result if isinstance(result, set) else set()
|
|
389
|
+
|
|
390
|
+
def get_deferred_fields(self) -> set[str]:
|
|
391
|
+
"""Get all deferred fields.
|
|
392
|
+
|
|
393
|
+
Returns:
|
|
394
|
+
Set of field names that are deferred
|
|
395
|
+
"""
|
|
396
|
+
return self._deferred_fields.copy()
|
|
397
|
+
|
|
398
|
+
def get_loaded_deferred_fields(self) -> set[str]:
|
|
399
|
+
"""Get loaded deferred fields.
|
|
400
|
+
|
|
401
|
+
Returns:
|
|
402
|
+
Set of deferred field names that have been loaded
|
|
403
|
+
"""
|
|
404
|
+
return self._loaded_deferred_fields.copy()
|
|
405
|
+
|
|
406
|
+
def is_field_deferred(self, field_name: str) -> bool:
|
|
407
|
+
"""Check if field is deferred.
|
|
408
|
+
|
|
409
|
+
Args:
|
|
410
|
+
field_name: Name of the field to check
|
|
411
|
+
|
|
412
|
+
Returns:
|
|
413
|
+
True if field is deferred
|
|
414
|
+
"""
|
|
415
|
+
return field_name in self._deferred_fields
|
|
416
|
+
|
|
417
|
+
def is_field_loaded(self, field_name: str) -> bool:
|
|
418
|
+
"""Check if deferred field is loaded.
|
|
419
|
+
|
|
420
|
+
Args:
|
|
421
|
+
field_name: Name of the field to check
|
|
422
|
+
|
|
423
|
+
Returns:
|
|
424
|
+
True if field is not deferred or has been loaded
|
|
425
|
+
"""
|
|
426
|
+
if field_name not in self._deferred_fields:
|
|
427
|
+
return True
|
|
428
|
+
return field_name in self._loaded_deferred_fields
|
|
429
|
+
|
|
430
|
+
def is_from_database(self) -> bool:
|
|
431
|
+
"""Check if instance was loaded from database.
|
|
432
|
+
|
|
433
|
+
Returns:
|
|
434
|
+
True if instance was loaded from database
|
|
435
|
+
"""
|
|
436
|
+
result = self._state_manager.get("is_from_db", False)
|
|
437
|
+
return bool(result)
|
|
438
|
+
|
|
439
|
+
async def load_deferred_field(self, field_name: str) -> None:
|
|
440
|
+
"""Load a single deferred field.
|
|
441
|
+
|
|
442
|
+
Args:
|
|
443
|
+
field_name: Name of the field to load
|
|
444
|
+
"""
|
|
445
|
+
await self.load_deferred_fields([field_name])
|
|
446
|
+
|
|
447
|
+
async def load_deferred_fields(self, fields: list[str] | None = None) -> None:
|
|
448
|
+
"""Load multiple deferred fields efficiently.
|
|
449
|
+
|
|
450
|
+
Args:
|
|
451
|
+
fields: List of field names to load, or None for all deferred fields
|
|
452
|
+
|
|
453
|
+
Raises:
|
|
454
|
+
PrimaryKeyError: If primary key values are not set
|
|
455
|
+
"""
|
|
456
|
+
table = self.get_table()
|
|
457
|
+
pk_columns = list(table.primary_key.columns)
|
|
458
|
+
pk_values = [getattr(self, col.name, None) for col in pk_columns]
|
|
459
|
+
if not all(value is not None for value in pk_values):
|
|
460
|
+
raise PrimaryKeyError("Cannot load deferred fields without primary key")
|
|
461
|
+
|
|
462
|
+
if fields is None:
|
|
463
|
+
fields_to_load = self._deferred_fields - self._loaded_deferred_fields
|
|
464
|
+
else:
|
|
465
|
+
fields_to_load = set(fields) & self._deferred_fields - self._loaded_deferred_fields
|
|
466
|
+
|
|
467
|
+
if not fields_to_load:
|
|
468
|
+
return
|
|
469
|
+
|
|
470
|
+
valid_fields = [f for f in fields_to_load if f in table.columns]
|
|
471
|
+
if not valid_fields:
|
|
472
|
+
return
|
|
473
|
+
|
|
474
|
+
pk_conditions = [table.c[col.name] == getattr(self, col.name) for col in pk_columns]
|
|
475
|
+
columns = [table.c[field] for field in valid_fields]
|
|
476
|
+
|
|
477
|
+
stmt = select(*columns).where(and_(*pk_conditions))
|
|
478
|
+
session = self.get_session()
|
|
479
|
+
result = await session.execute(stmt)
|
|
480
|
+
row = result.first()
|
|
481
|
+
|
|
482
|
+
if row:
|
|
483
|
+
loaded_fields = self._state_manager.get("loaded_deferred_fields", set())
|
|
484
|
+
if isinstance(loaded_fields, set):
|
|
485
|
+
for i, field in enumerate(valid_fields):
|
|
486
|
+
setattr(self, field, row[i])
|
|
487
|
+
loaded_fields.add(field)
|
|
488
|
+
|
|
489
|
+
|
|
490
|
+
class DataConversionMixin(DeferredLoadingMixin):
|
|
491
|
+
"""Data conversion functionality - Layer 5."""
|
|
492
|
+
|
|
493
|
+
def to_dict(
|
|
494
|
+
self,
|
|
495
|
+
include: list[str] | None = None,
|
|
496
|
+
exclude: list[str] | None = None,
|
|
497
|
+
include_deferred: bool = False,
|
|
498
|
+
safe_access: bool = True,
|
|
499
|
+
) -> dict[str, Any]:
|
|
500
|
+
"""Convert model instance to dictionary.
|
|
501
|
+
|
|
502
|
+
Args:
|
|
503
|
+
include: List of fields to include, or None for all fields
|
|
504
|
+
exclude: List of fields to exclude
|
|
505
|
+
include_deferred: Whether to include deferred fields
|
|
506
|
+
safe_access: Whether to skip unloaded deferred fields safely
|
|
507
|
+
|
|
508
|
+
Returns:
|
|
509
|
+
Dictionary representation of the model instance
|
|
510
|
+
"""
|
|
511
|
+
all_fields = set(self._get_field_names())
|
|
512
|
+
|
|
513
|
+
if include is not None:
|
|
514
|
+
fields = set(include) & all_fields
|
|
515
|
+
else:
|
|
516
|
+
fields = all_fields
|
|
517
|
+
|
|
518
|
+
if exclude is not None:
|
|
519
|
+
fields = fields - set(exclude)
|
|
520
|
+
|
|
521
|
+
if not include_deferred:
|
|
522
|
+
fields = fields - self._deferred_fields
|
|
523
|
+
|
|
524
|
+
result = {}
|
|
525
|
+
for field in fields:
|
|
526
|
+
if safe_access and field in self._deferred_fields and field not in self._loaded_deferred_fields:
|
|
527
|
+
continue
|
|
528
|
+
try:
|
|
529
|
+
result[field] = getattr(self, field)
|
|
530
|
+
except AttributeError:
|
|
531
|
+
if not safe_access:
|
|
532
|
+
raise
|
|
533
|
+
continue
|
|
534
|
+
|
|
535
|
+
return result
|
|
536
|
+
|
|
537
|
+
@classmethod
|
|
538
|
+
def from_dict(cls, data: dict[str, Any], validate: bool = True):
|
|
539
|
+
"""Create model instance from dictionary with validation.
|
|
540
|
+
|
|
541
|
+
Args:
|
|
542
|
+
data: Dictionary of field values
|
|
543
|
+
validate: Whether to validate fields after creation
|
|
544
|
+
|
|
545
|
+
Returns:
|
|
546
|
+
New model instance created from dictionary data
|
|
547
|
+
"""
|
|
548
|
+
all_fields = set(cls._get_field_names())
|
|
549
|
+
filtered_data = {k: v for k, v in data.items() if k in all_fields}
|
|
550
|
+
|
|
551
|
+
table = cls.get_table()
|
|
552
|
+
for col in table.columns: # noqa
|
|
553
|
+
if col.name not in filtered_data:
|
|
554
|
+
field_attr = getattr(cls, col.name, None)
|
|
555
|
+
if field_attr and hasattr(field_attr, "get_default_factory"):
|
|
556
|
+
factory = field_attr.get_default_factory()
|
|
557
|
+
if factory and callable(factory):
|
|
558
|
+
filtered_data[col.name] = factory()
|
|
559
|
+
continue
|
|
560
|
+
|
|
561
|
+
if col.default is not None:
|
|
562
|
+
if hasattr(col.default, "is_scalar") and col.default.is_scalar:
|
|
563
|
+
filtered_data[col.name] = getattr(col.default, "arg", None)
|
|
564
|
+
|
|
565
|
+
init_data = {}
|
|
566
|
+
non_init_data = {}
|
|
567
|
+
|
|
568
|
+
for field_name, value in filtered_data.items():
|
|
569
|
+
field_attr = getattr(cls, field_name, None)
|
|
570
|
+
if field_attr and hasattr(field_attr, "get_codegen_params"):
|
|
571
|
+
codegen_params = field_attr.get_codegen_params()
|
|
572
|
+
if codegen_params.get("init", True):
|
|
573
|
+
init_data[field_name] = value
|
|
574
|
+
else:
|
|
575
|
+
non_init_data[field_name] = value
|
|
576
|
+
else:
|
|
577
|
+
init_data[field_name] = value
|
|
578
|
+
|
|
579
|
+
instance = cls(**init_data) # noqa
|
|
580
|
+
|
|
581
|
+
for field_name, value in non_init_data.items():
|
|
582
|
+
setattr(instance, field_name, value)
|
|
583
|
+
|
|
584
|
+
if validate:
|
|
585
|
+
instance.validate_all_fields()
|
|
586
|
+
|
|
587
|
+
return instance
|
|
588
|
+
|
|
589
|
+
|
|
590
|
+
class FieldCacheMixin(DataConversionMixin):
|
|
591
|
+
"""Field caching and attribute access optimization - Layer 6."""
|
|
592
|
+
|
|
593
|
+
@classmethod
|
|
594
|
+
def _get_field_cache(cls):
|
|
595
|
+
"""Auto-initialize and cache field information.
|
|
596
|
+
|
|
597
|
+
Returns:
|
|
598
|
+
Dictionary containing categorized field information
|
|
599
|
+
"""
|
|
600
|
+
cache_attr = "_cached_field_info"
|
|
601
|
+
if not hasattr(cls, cache_attr):
|
|
602
|
+
setattr(cls, cache_attr, cls._build_field_cache())
|
|
603
|
+
return getattr(cls, cache_attr)
|
|
604
|
+
|
|
605
|
+
@classmethod
|
|
606
|
+
def _build_field_cache(cls):
|
|
607
|
+
"""Build field cache with error handling.
|
|
608
|
+
|
|
609
|
+
Returns:
|
|
610
|
+
Dictionary with field categories: deferred_fields, relationship_fields, regular_fields
|
|
611
|
+
"""
|
|
612
|
+
cache = {"deferred_fields": set(), "relationship_fields": set(), "regular_fields": set()}
|
|
613
|
+
|
|
614
|
+
try:
|
|
615
|
+
if hasattr(cls, "__table__"):
|
|
616
|
+
table = getattr(cls, "__table__", None)
|
|
617
|
+
if table is not None:
|
|
618
|
+
for col_name in table.columns.keys():
|
|
619
|
+
cls._categorize_field(col_name, cache)
|
|
620
|
+
|
|
621
|
+
if hasattr(cls, "_relationships"):
|
|
622
|
+
relationships = getattr(cls, "_relationships", {})
|
|
623
|
+
cache["relationship_fields"].update(relationships.keys())
|
|
624
|
+
except Exception: # noqa
|
|
625
|
+
pass
|
|
626
|
+
|
|
627
|
+
return cache
|
|
628
|
+
|
|
629
|
+
@classmethod
|
|
630
|
+
def _categorize_field(cls, field_name, cache):
|
|
631
|
+
"""Categorize a single field into cache.
|
|
632
|
+
|
|
633
|
+
Args:
|
|
634
|
+
field_name: Name of the field to categorize
|
|
635
|
+
cache: Cache dictionary to update
|
|
636
|
+
"""
|
|
637
|
+
try:
|
|
638
|
+
attr = getattr(cls, field_name, None)
|
|
639
|
+
if attr and hasattr(attr, "column") and attr.column is not None:
|
|
640
|
+
if hasattr(attr.column, "info") and attr.column.info is not None:
|
|
641
|
+
performance_params = attr.column.info.get("_performance", {})
|
|
642
|
+
if performance_params.get("deferred", False):
|
|
643
|
+
cache["deferred_fields"].add(field_name)
|
|
644
|
+
else:
|
|
645
|
+
cache["regular_fields"].add(field_name)
|
|
646
|
+
else:
|
|
647
|
+
cache["regular_fields"].add(field_name)
|
|
648
|
+
except (AttributeError, TypeError):
|
|
649
|
+
cache["regular_fields"].add(field_name)
|
|
650
|
+
|
|
651
|
+
@classmethod
|
|
652
|
+
def _invalidate_field_cache(cls):
|
|
653
|
+
"""Manually invalidate field cache.
|
|
654
|
+
|
|
655
|
+
Use this when field definitions change at runtime.
|
|
656
|
+
"""
|
|
657
|
+
cache_attr = "_cached_field_info"
|
|
658
|
+
if hasattr(cls, cache_attr):
|
|
659
|
+
delattr(cls, cache_attr)
|
|
660
|
+
|
|
661
|
+
def __getattribute__(self, name: str):
|
|
662
|
+
"""Optimized attribute access using automatic field cache.
|
|
663
|
+
|
|
664
|
+
Provides intelligent attribute access with proxy objects for
|
|
665
|
+
deferred and relationship fields. Skips optimization for
|
|
666
|
+
special attributes and methods to avoid recursion.
|
|
667
|
+
|
|
668
|
+
Args:
|
|
669
|
+
name: Attribute name to access
|
|
670
|
+
|
|
671
|
+
Returns:
|
|
672
|
+
Attribute value or proxy object
|
|
673
|
+
"""
|
|
674
|
+
if name.startswith("_") or name in (
|
|
675
|
+
"get_table",
|
|
676
|
+
"load_deferred_fields",
|
|
677
|
+
"validate_all_fields",
|
|
678
|
+
"save",
|
|
679
|
+
"delete",
|
|
680
|
+
"refresh",
|
|
681
|
+
"to_dict",
|
|
682
|
+
"from_dict",
|
|
683
|
+
"using",
|
|
684
|
+
"is_field_deferred",
|
|
685
|
+
"is_field_loaded",
|
|
686
|
+
"get_deferred_fields",
|
|
687
|
+
"_get_field_cache",
|
|
688
|
+
"get_session",
|
|
689
|
+
"validate_field",
|
|
690
|
+
"load_deferred_field",
|
|
691
|
+
"is_from_database",
|
|
692
|
+
):
|
|
693
|
+
return super().__getattribute__(name)
|
|
694
|
+
|
|
695
|
+
model_class = super().__getattribute__("__class__")
|
|
696
|
+
field_cache = model_class._get_field_cache() # noqa
|
|
697
|
+
|
|
698
|
+
deferred_fields = field_cache.get("deferred_fields", set())
|
|
699
|
+
if isinstance(deferred_fields, set) and name in deferred_fields:
|
|
700
|
+
if (
|
|
701
|
+
hasattr(self, "_state_manager")
|
|
702
|
+
and self._state_manager.get("is_from_db", False)
|
|
703
|
+
and name in self._deferred_fields
|
|
704
|
+
and not self.is_field_loaded(name)
|
|
705
|
+
):
|
|
706
|
+
proxy_cache = self._state_manager.get("proxy_cache", {})
|
|
707
|
+
if isinstance(proxy_cache, dict) and name not in proxy_cache:
|
|
708
|
+
proxy_cache[name] = DeferredFieldProxy(self, name)
|
|
709
|
+
self._state_manager.set("proxy_cache", proxy_cache)
|
|
710
|
+
if isinstance(proxy_cache, dict):
|
|
711
|
+
return proxy_cache[name]
|
|
712
|
+
|
|
713
|
+
relationship_fields = field_cache.get("relationship_fields", set())
|
|
714
|
+
if isinstance(relationship_fields, set) and name in relationship_fields:
|
|
715
|
+
cache_name = f"_{name}_cache"
|
|
716
|
+
try:
|
|
717
|
+
if hasattr(self, cache_name):
|
|
718
|
+
cached_value = super().__getattribute__(cache_name)
|
|
719
|
+
if cached_value is not None:
|
|
720
|
+
return cached_value
|
|
721
|
+
except AttributeError:
|
|
722
|
+
pass
|
|
723
|
+
|
|
724
|
+
proxy_cache = self._state_manager.get("proxy_cache", {})
|
|
725
|
+
if isinstance(proxy_cache, dict) and name not in proxy_cache:
|
|
726
|
+
proxy_cache[name] = RelationFieldProxy(self, name)
|
|
727
|
+
self._state_manager.set("proxy_cache", proxy_cache)
|
|
728
|
+
if isinstance(proxy_cache, dict):
|
|
729
|
+
return proxy_cache[name]
|
|
730
|
+
|
|
731
|
+
return super().__getattribute__(name)
|
|
732
|
+
|
|
733
|
+
|
|
734
|
+
# Type variable for ModelMixin
|
|
735
|
+
M = TypeVar("M", bound="ModelMixin")
|
|
736
|
+
|
|
737
|
+
|
|
738
|
+
class ModelMixin(FieldCacheMixin, SignalMixin, HistoryTrackingMixin):
|
|
739
|
+
"""Optimized mixin class with linear inheritance and performance improvements.
|
|
740
|
+
|
|
741
|
+
Combines field caching, signal handling, and history tracking into a single
|
|
742
|
+
optimized mixin. Provides core CRUD operations with intelligent dirty field
|
|
743
|
+
tracking and efficient database operations.
|
|
744
|
+
|
|
745
|
+
Features:
|
|
746
|
+
- Automatic dirty field tracking for optimized updates
|
|
747
|
+
- Signal emission for lifecycle events
|
|
748
|
+
- History tracking for audit trails
|
|
749
|
+
- Deferred loading support
|
|
750
|
+
- Validation integration
|
|
751
|
+
"""
|
|
752
|
+
|
|
753
|
+
@classmethod
|
|
754
|
+
def get_table(cls):
|
|
755
|
+
"""Get SQLAlchemy Core Table definition.
|
|
756
|
+
|
|
757
|
+
Returns:
|
|
758
|
+
SQLAlchemy Table instance for this model
|
|
759
|
+
|
|
760
|
+
Raises:
|
|
761
|
+
AttributeError: If model has no __table__ attribute
|
|
762
|
+
"""
|
|
763
|
+
table = getattr(cls, "__table__", None)
|
|
764
|
+
if table is None:
|
|
765
|
+
raise AttributeError(f"Model {cls.__name__} has no __table__ attribute")
|
|
766
|
+
return table
|
|
767
|
+
|
|
768
|
+
def __init__(self, **kwargs):
|
|
769
|
+
"""Initialize optimized model instance.
|
|
770
|
+
|
|
771
|
+
Args:
|
|
772
|
+
**kwargs: Field values to set on the instance
|
|
773
|
+
"""
|
|
774
|
+
super().__init__()
|
|
775
|
+
self._state_manager.set("dirty_fields", set())
|
|
776
|
+
|
|
777
|
+
# Set history initialization flag before setting values
|
|
778
|
+
if hasattr(self, "_history_initialized"):
|
|
779
|
+
self._history_initialized = False
|
|
780
|
+
|
|
781
|
+
# Set field values
|
|
782
|
+
for key, value in kwargs.items():
|
|
783
|
+
setattr(self, key, value)
|
|
784
|
+
|
|
785
|
+
# Enable history tracking after initialization
|
|
786
|
+
if hasattr(self, "_history_initialized"):
|
|
787
|
+
self._history_initialized = True
|
|
788
|
+
|
|
789
|
+
def validate(self) -> None:
|
|
790
|
+
"""Model-level validation hook that subclasses can override.
|
|
791
|
+
|
|
792
|
+
Override this method to implement custom model-level validation
|
|
793
|
+
logic that goes beyond field-level validation.
|
|
794
|
+
|
|
795
|
+
Raises:
|
|
796
|
+
ValidationError: If validation fails
|
|
797
|
+
"""
|
|
798
|
+
pass
|
|
799
|
+
|
|
800
|
+
def _get_all_data(self) -> dict:
|
|
801
|
+
"""Get all field data.
|
|
802
|
+
|
|
803
|
+
Returns:
|
|
804
|
+
Dictionary mapping field names to their current values
|
|
805
|
+
"""
|
|
806
|
+
return {name: getattr(self, name, None) for name in self._get_field_names()}
|
|
807
|
+
|
|
808
|
+
def _get_dirty_data(self) -> dict:
|
|
809
|
+
"""Get modified field data.
|
|
810
|
+
|
|
811
|
+
Returns:
|
|
812
|
+
Dictionary mapping dirty field names to their current values,
|
|
813
|
+
or all field data if no dirty fields are tracked
|
|
814
|
+
"""
|
|
815
|
+
dirty_fields = self._state_manager.get("dirty_fields", set())
|
|
816
|
+
if not dirty_fields:
|
|
817
|
+
return self._get_all_data()
|
|
818
|
+
return {name: getattr(self, name, None) for name in dirty_fields}
|
|
819
|
+
|
|
820
|
+
def _set_primary_key_values(self, pk_values):
|
|
821
|
+
"""Set primary key values.
|
|
822
|
+
|
|
823
|
+
Args:
|
|
824
|
+
pk_values: Sequence of primary key values to set
|
|
825
|
+
"""
|
|
826
|
+
table = self.get_table()
|
|
827
|
+
pk_columns = list(table.primary_key.columns)
|
|
828
|
+
for i, col in enumerate(pk_columns):
|
|
829
|
+
if i < len(pk_values):
|
|
830
|
+
setattr(self, col.name, pk_values[i])
|
|
831
|
+
|
|
832
|
+
@emit_signals(Operation.SAVE)
|
|
833
|
+
async def save(self, validate: bool = True):
|
|
834
|
+
"""Optimized save operation with better error handling.
|
|
835
|
+
|
|
836
|
+
Automatically determines whether to INSERT or UPDATE based on
|
|
837
|
+
primary key presence. Uses dirty field tracking for efficient
|
|
838
|
+
updates that only modify changed fields.
|
|
839
|
+
|
|
840
|
+
Args:
|
|
841
|
+
validate: Whether to run validation before saving
|
|
842
|
+
|
|
843
|
+
Returns:
|
|
844
|
+
Self for method chaining
|
|
845
|
+
|
|
846
|
+
Raises:
|
|
847
|
+
PrimaryKeyError: If save operation fails
|
|
848
|
+
ValidationError: If validation fails and validate=True
|
|
849
|
+
"""
|
|
850
|
+
session = self.get_session()
|
|
851
|
+
table = self.get_table()
|
|
852
|
+
|
|
853
|
+
if validate:
|
|
854
|
+
self.validate_all_fields()
|
|
855
|
+
|
|
856
|
+
try:
|
|
857
|
+
if self._has_primary_key_values():
|
|
858
|
+
# UPDATE operation
|
|
859
|
+
pk_conditions = self._build_pk_conditions()
|
|
860
|
+
update_data = self._get_dirty_data()
|
|
861
|
+
if update_data:
|
|
862
|
+
stmt = update(table).where(and_(*pk_conditions)).values(**update_data)
|
|
863
|
+
await session.execute(stmt)
|
|
864
|
+
else:
|
|
865
|
+
# INSERT operation
|
|
866
|
+
stmt = insert(table).values(**self._get_all_data())
|
|
867
|
+
result = await session.execute(stmt)
|
|
868
|
+
if result.inserted_primary_key:
|
|
869
|
+
self._set_primary_key_values(result.inserted_primary_key)
|
|
870
|
+
except Exception as e:
|
|
871
|
+
raise PrimaryKeyError(f"Save operation failed: {e}") from e
|
|
872
|
+
|
|
873
|
+
# Clear dirty fields after successful save
|
|
874
|
+
dirty_fields = self._state_manager.get("dirty_fields", set())
|
|
875
|
+
if isinstance(dirty_fields, set):
|
|
876
|
+
dirty_fields.clear()
|
|
877
|
+
return self
|
|
878
|
+
|
|
879
|
+
@emit_signals(Operation.DELETE)
|
|
880
|
+
async def delete(self):
|
|
881
|
+
"""Delete this model instance from the database.
|
|
882
|
+
|
|
883
|
+
Raises:
|
|
884
|
+
PrimaryKeyError: If instance has no primary key values or delete fails
|
|
885
|
+
"""
|
|
886
|
+
session = self.get_session()
|
|
887
|
+
table = self.get_table()
|
|
888
|
+
|
|
889
|
+
if not self._has_primary_key_values():
|
|
890
|
+
raise PrimaryKeyError("Cannot delete instance without primary key values")
|
|
891
|
+
|
|
892
|
+
try:
|
|
893
|
+
pk_conditions = self._build_pk_conditions()
|
|
894
|
+
stmt = delete(table).where(and_(*pk_conditions))
|
|
895
|
+
await session.execute(stmt)
|
|
896
|
+
except Exception as e:
|
|
897
|
+
raise PrimaryKeyError(f"Delete operation failed: {e}") from e
|
|
898
|
+
|
|
899
|
+
async def refresh(self, fields: list[str] | None = None, include_deferred: bool = True):
|
|
900
|
+
"""Refresh this instance with the latest data from the database.
|
|
901
|
+
|
|
902
|
+
Args:
|
|
903
|
+
fields: Specific fields to refresh, or None for all fields
|
|
904
|
+
include_deferred: Whether to include deferred fields in refresh
|
|
905
|
+
|
|
906
|
+
Returns:
|
|
907
|
+
Self for method chaining
|
|
908
|
+
|
|
909
|
+
Raises:
|
|
910
|
+
ValueError: If instance has no primary key values
|
|
911
|
+
"""
|
|
912
|
+
session = self.get_session()
|
|
913
|
+
table = self.get_table()
|
|
914
|
+
|
|
915
|
+
if not self._has_primary_key_values():
|
|
916
|
+
raise ValueError("Cannot refresh instance without primary key values")
|
|
917
|
+
|
|
918
|
+
pk_conditions = self._build_pk_conditions()
|
|
919
|
+
|
|
920
|
+
if fields:
|
|
921
|
+
columns_to_select = [table.c[field] for field in fields]
|
|
922
|
+
else:
|
|
923
|
+
if not include_deferred:
|
|
924
|
+
field_names = [f for f in self._get_field_names() if f not in self._deferred_fields]
|
|
925
|
+
columns_to_select = [table.c[field] for field in field_names]
|
|
926
|
+
else:
|
|
927
|
+
columns_to_select = [table]
|
|
928
|
+
|
|
929
|
+
stmt = select(*columns_to_select).where(and_(*pk_conditions))
|
|
930
|
+
result = await session.execute(stmt)
|
|
931
|
+
fresh_data = result.first()
|
|
932
|
+
|
|
933
|
+
if fresh_data:
|
|
934
|
+
loaded_deferred_fields = self._state_manager.get("loaded_deferred_fields", set())
|
|
935
|
+
if isinstance(loaded_deferred_fields, set):
|
|
936
|
+
if fields:
|
|
937
|
+
for i, field in enumerate(fields):
|
|
938
|
+
setattr(self, field, fresh_data[i])
|
|
939
|
+
if field in self._deferred_fields:
|
|
940
|
+
loaded_deferred_fields.add(field)
|
|
941
|
+
else:
|
|
942
|
+
for col_name, value in fresh_data._mapping.items(): # noqa
|
|
943
|
+
setattr(self, col_name, value)
|
|
944
|
+
if col_name in self._deferred_fields:
|
|
945
|
+
loaded_deferred_fields.add(col_name)
|
|
946
|
+
|
|
947
|
+
return self
|
|
948
|
+
|
|
949
|
+
def __setattr__(self, name, value):
|
|
950
|
+
"""Track dirty fields when setting attributes.
|
|
951
|
+
|
|
952
|
+
Automatically tracks field modifications for optimized UPDATE
|
|
953
|
+
operations. Skips tracking for private attributes and during
|
|
954
|
+
initialization.
|
|
955
|
+
|
|
956
|
+
Args:
|
|
957
|
+
name: Attribute name
|
|
958
|
+
value: Attribute value
|
|
959
|
+
"""
|
|
960
|
+
if not name.startswith("_") and hasattr(self, "_state_manager"):
|
|
961
|
+
dirty_fields = self._state_manager.get("dirty_fields", set())
|
|
962
|
+
if isinstance(dirty_fields, set):
|
|
963
|
+
dirty_fields.add(name)
|
|
964
|
+
super().__setattr__(name, value)
|
|
965
|
+
|
|
966
|
+
|
|
967
|
+
class ObjectModel(ModelMixin, metaclass=ModelProcessor):
|
|
968
|
+
"""Base model class with configuration support and common functionality.
|
|
969
|
+
|
|
970
|
+
This is the main base class for all SQLObjects models. It combines
|
|
971
|
+
the ModelProcessor metaclass for automatic table generation with
|
|
972
|
+
the ModelMixin for runtime functionality.
|
|
973
|
+
|
|
974
|
+
Features:
|
|
975
|
+
- Automatic table generation from field definitions
|
|
976
|
+
- Built-in CRUD operations with signal support
|
|
977
|
+
- Query manager (objects) for database operations
|
|
978
|
+
- Validation and history tracking
|
|
979
|
+
- Deferred loading and field caching
|
|
980
|
+
|
|
981
|
+
Usage:
|
|
982
|
+
class User(ObjectModel):
|
|
983
|
+
name: Column[str] = str_column(length=100)
|
|
984
|
+
email: Column[str] = str_column(length=255, unique=True)
|
|
985
|
+
"""
|
|
986
|
+
|
|
987
|
+
__abstract__ = True
|
|
988
|
+
|
|
989
|
+
def __init_subclass__(cls, **kwargs):
|
|
990
|
+
"""Process subclass initialization and setup objects manager.
|
|
991
|
+
|
|
992
|
+
Automatically sets up the objects manager for database operations
|
|
993
|
+
and initializes validators for non-abstract model classes.
|
|
994
|
+
|
|
995
|
+
Args:
|
|
996
|
+
**kwargs: Additional keyword arguments passed to parent
|
|
997
|
+
"""
|
|
998
|
+
super().__init_subclass__(**kwargs)
|
|
999
|
+
|
|
1000
|
+
# Setup objects manager for non-abstract models
|
|
1001
|
+
if not getattr(cls, "__abstract__", False) and not hasattr(cls, "objects"):
|
|
1002
|
+
from .objects import ObjectsDescriptor
|
|
1003
|
+
|
|
1004
|
+
cls.objects = ObjectsDescriptor(cls)
|
|
1005
|
+
|
|
1006
|
+
# Setup validators if method exists
|
|
1007
|
+
setup_validators = getattr(cls, "_setup_validators", None)
|
|
1008
|
+
if setup_validators and callable(setup_validators):
|
|
1009
|
+
setup_validators()
|