sqlobjects 1.0.2__tar.gz → 1.0.4__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {sqlobjects-1.0.2/sqlobjects.egg-info → sqlobjects-1.0.4}/PKG-INFO +1 -1
- {sqlobjects-1.0.2 → sqlobjects-1.0.4}/pyproject.toml +1 -1
- {sqlobjects-1.0.2 → sqlobjects-1.0.4}/sqlobjects/mixins.py +54 -13
- {sqlobjects-1.0.2 → sqlobjects-1.0.4}/sqlobjects/model.py +74 -30
- {sqlobjects-1.0.2 → sqlobjects-1.0.4}/sqlobjects/objects/bulk.py +10 -13
- {sqlobjects-1.0.2 → sqlobjects-1.0.4/sqlobjects.egg-info}/PKG-INFO +1 -1
- {sqlobjects-1.0.2 → sqlobjects-1.0.4}/LICENSE +0 -0
- {sqlobjects-1.0.2 → sqlobjects-1.0.4}/README.md +0 -0
- {sqlobjects-1.0.2 → sqlobjects-1.0.4}/setup.cfg +0 -0
- {sqlobjects-1.0.2 → sqlobjects-1.0.4}/sqlobjects/__init__.py +0 -0
- {sqlobjects-1.0.2 → sqlobjects-1.0.4}/sqlobjects/cascade.py +0 -0
- {sqlobjects-1.0.2 → sqlobjects-1.0.4}/sqlobjects/database/__init__.py +0 -0
- {sqlobjects-1.0.2 → sqlobjects-1.0.4}/sqlobjects/database/config.py +0 -0
- {sqlobjects-1.0.2 → sqlobjects-1.0.4}/sqlobjects/database/manager.py +0 -0
- {sqlobjects-1.0.2 → sqlobjects-1.0.4}/sqlobjects/exceptions.py +0 -0
- {sqlobjects-1.0.2 → sqlobjects-1.0.4}/sqlobjects/expressions/__init__.py +0 -0
- {sqlobjects-1.0.2 → sqlobjects-1.0.4}/sqlobjects/expressions/aggregate.py +0 -0
- {sqlobjects-1.0.2 → sqlobjects-1.0.4}/sqlobjects/expressions/base.py +0 -0
- {sqlobjects-1.0.2 → sqlobjects-1.0.4}/sqlobjects/expressions/function.py +0 -0
- {sqlobjects-1.0.2 → sqlobjects-1.0.4}/sqlobjects/expressions/mixins.py +0 -0
- {sqlobjects-1.0.2 → sqlobjects-1.0.4}/sqlobjects/expressions/scalar.py +0 -0
- {sqlobjects-1.0.2 → sqlobjects-1.0.4}/sqlobjects/expressions/subquery.py +0 -0
- {sqlobjects-1.0.2 → sqlobjects-1.0.4}/sqlobjects/expressions/terminal.py +0 -0
- {sqlobjects-1.0.2 → sqlobjects-1.0.4}/sqlobjects/fields/__init__.py +0 -0
- {sqlobjects-1.0.2 → sqlobjects-1.0.4}/sqlobjects/fields/core.py +0 -0
- {sqlobjects-1.0.2 → sqlobjects-1.0.4}/sqlobjects/fields/functions.py +0 -0
- {sqlobjects-1.0.2 → sqlobjects-1.0.4}/sqlobjects/fields/proxies.py +0 -0
- {sqlobjects-1.0.2 → sqlobjects-1.0.4}/sqlobjects/fields/relations/__init__.py +0 -0
- {sqlobjects-1.0.2 → sqlobjects-1.0.4}/sqlobjects/fields/relations/descriptors.py +0 -0
- {sqlobjects-1.0.2 → sqlobjects-1.0.4}/sqlobjects/fields/relations/managers.py +0 -0
- {sqlobjects-1.0.2 → sqlobjects-1.0.4}/sqlobjects/fields/relations/proxies.py +0 -0
- {sqlobjects-1.0.2 → sqlobjects-1.0.4}/sqlobjects/fields/relations/utils.py +0 -0
- {sqlobjects-1.0.2 → sqlobjects-1.0.4}/sqlobjects/fields/shortcuts.py +0 -0
- {sqlobjects-1.0.2 → sqlobjects-1.0.4}/sqlobjects/fields/types/__init__.py +0 -0
- {sqlobjects-1.0.2 → sqlobjects-1.0.4}/sqlobjects/fields/types/base.py +0 -0
- {sqlobjects-1.0.2 → sqlobjects-1.0.4}/sqlobjects/fields/types/comparators.py +0 -0
- {sqlobjects-1.0.2 → sqlobjects-1.0.4}/sqlobjects/fields/types/registry.py +0 -0
- {sqlobjects-1.0.2 → sqlobjects-1.0.4}/sqlobjects/fields/utils.py +0 -0
- {sqlobjects-1.0.2 → sqlobjects-1.0.4}/sqlobjects/metadata.py +0 -0
- {sqlobjects-1.0.2 → sqlobjects-1.0.4}/sqlobjects/objects/__init__.py +0 -0
- {sqlobjects-1.0.2 → sqlobjects-1.0.4}/sqlobjects/objects/core.py +0 -0
- {sqlobjects-1.0.2 → sqlobjects-1.0.4}/sqlobjects/queries/__init__.py +0 -0
- {sqlobjects-1.0.2 → sqlobjects-1.0.4}/sqlobjects/queries/builder.py +0 -0
- {sqlobjects-1.0.2 → sqlobjects-1.0.4}/sqlobjects/queries/executor.py +0 -0
- {sqlobjects-1.0.2 → sqlobjects-1.0.4}/sqlobjects/queryset.py +0 -0
- {sqlobjects-1.0.2 → sqlobjects-1.0.4}/sqlobjects/session.py +0 -0
- {sqlobjects-1.0.2 → sqlobjects-1.0.4}/sqlobjects/signals.py +0 -0
- {sqlobjects-1.0.2 → sqlobjects-1.0.4}/sqlobjects/utils/__init__.py +0 -0
- {sqlobjects-1.0.2 → sqlobjects-1.0.4}/sqlobjects/utils/inspect.py +0 -0
- {sqlobjects-1.0.2 → sqlobjects-1.0.4}/sqlobjects/utils/naming.py +0 -0
- {sqlobjects-1.0.2 → sqlobjects-1.0.4}/sqlobjects/utils/pattern.py +0 -0
- {sqlobjects-1.0.2 → sqlobjects-1.0.4}/sqlobjects/validators.py +0 -0
- {sqlobjects-1.0.2 → sqlobjects-1.0.4}/sqlobjects.egg-info/SOURCES.txt +0 -0
- {sqlobjects-1.0.2 → sqlobjects-1.0.4}/sqlobjects.egg-info/dependency_links.txt +0 -0
- {sqlobjects-1.0.2 → sqlobjects-1.0.4}/sqlobjects.egg-info/requires.txt +0 -0
- {sqlobjects-1.0.2 → sqlobjects-1.0.4}/sqlobjects.egg-info/top_level.txt +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: sqlobjects
|
|
3
|
-
Version: 1.0.
|
|
3
|
+
Version: 1.0.4
|
|
4
4
|
Summary: Django-style async ORM library based on SQLAlchemy with chainable queries, Q objects, and relationship loading
|
|
5
5
|
Author-email: XtraVisions <gitadmin@xtravisions.com>, Chen Hao <chenhao@xtravisions.com>
|
|
6
6
|
Maintainer-email: XtraVisions <gitadmin@xtravisions.com>, Chen Hao <chenhao@xtravisions.com>
|
|
@@ -392,19 +392,7 @@ class DataConversionMixin(DeferredLoadingMixin):
|
|
|
392
392
|
all_fields = set(cls._get_field_names())
|
|
393
393
|
filtered_data = {k: v for k, v in data.items() if k in all_fields}
|
|
394
394
|
|
|
395
|
-
|
|
396
|
-
for col in table.columns: # noqa
|
|
397
|
-
if col.name not in filtered_data:
|
|
398
|
-
field_attr = getattr(cls, col.name, None)
|
|
399
|
-
if field_attr is not None and hasattr(field_attr, "get_default_factory"):
|
|
400
|
-
factory = field_attr.get_default_factory()
|
|
401
|
-
if factory and callable(factory):
|
|
402
|
-
filtered_data[col.name] = factory()
|
|
403
|
-
continue
|
|
404
|
-
|
|
405
|
-
if col.default is not None:
|
|
406
|
-
if hasattr(col.default, "is_scalar") and col.default.is_scalar:
|
|
407
|
-
filtered_data[col.name] = getattr(col.default, "arg", None)
|
|
395
|
+
# Default values will be handled by __init__ method
|
|
408
396
|
|
|
409
397
|
init_data = {}
|
|
410
398
|
non_init_data = {}
|
|
@@ -427,6 +415,11 @@ class DataConversionMixin(DeferredLoadingMixin):
|
|
|
427
415
|
instance = cls(**init_data) # noqa
|
|
428
416
|
|
|
429
417
|
for field_name, value in non_init_data.items():
|
|
418
|
+
# Apply default value if value is None
|
|
419
|
+
if value is None:
|
|
420
|
+
default_value = instance._get_field_default_value(field_name) # noqa
|
|
421
|
+
if default_value is not None:
|
|
422
|
+
value = default_value
|
|
430
423
|
setattr(instance, field_name, value)
|
|
431
424
|
|
|
432
425
|
# Clear dirty fields since this is initial creation from dict
|
|
@@ -439,6 +432,46 @@ class DataConversionMixin(DeferredLoadingMixin):
|
|
|
439
432
|
|
|
440
433
|
return instance
|
|
441
434
|
|
|
435
|
+
def _apply_default_values(self, kwargs: dict):
|
|
436
|
+
"""Apply default values for fields not provided in kwargs.
|
|
437
|
+
|
|
438
|
+
Args:
|
|
439
|
+
kwargs: Dictionary of provided field values (will be modified)
|
|
440
|
+
"""
|
|
441
|
+
for field_name in self._get_field_names():
|
|
442
|
+
if field_name not in kwargs or kwargs[field_name] is None:
|
|
443
|
+
default_value = self._get_field_default_value(field_name)
|
|
444
|
+
if default_value is not None:
|
|
445
|
+
kwargs[field_name] = default_value
|
|
446
|
+
|
|
447
|
+
def _get_field_default_value(self, field_name: str):
|
|
448
|
+
"""Get default value for a field.
|
|
449
|
+
|
|
450
|
+
Args:
|
|
451
|
+
field_name: Name of the field
|
|
452
|
+
|
|
453
|
+
Returns:
|
|
454
|
+
Default value or None if no default
|
|
455
|
+
"""
|
|
456
|
+
field_attr = getattr(self.__class__, field_name, None)
|
|
457
|
+
if field_attr is None:
|
|
458
|
+
return None
|
|
459
|
+
|
|
460
|
+
# Priority: default_factory > SQLAlchemy default
|
|
461
|
+
if hasattr(field_attr, "get_default_factory"):
|
|
462
|
+
factory = field_attr.get_default_factory()
|
|
463
|
+
if factory and callable(factory):
|
|
464
|
+
return factory()
|
|
465
|
+
|
|
466
|
+
if hasattr(field_attr, "default") and field_attr.default is not None:
|
|
467
|
+
default_value = field_attr.default
|
|
468
|
+
if callable(default_value):
|
|
469
|
+
return default_value()
|
|
470
|
+
else:
|
|
471
|
+
return default_value
|
|
472
|
+
|
|
473
|
+
return None
|
|
474
|
+
|
|
442
475
|
|
|
443
476
|
class FieldCacheMixin(DataConversionMixin):
|
|
444
477
|
"""Field caching and attribute access optimization - Layer 6."""
|
|
@@ -599,6 +632,13 @@ class FieldCacheMixin(DataConversionMixin):
|
|
|
599
632
|
|
|
600
633
|
relationship_fields = field_cache.get("relationship_fields", set())
|
|
601
634
|
if isinstance(relationship_fields, set) and name in relationship_fields:
|
|
635
|
+
# Check cascade_relationships first (manually assigned values)
|
|
636
|
+
if hasattr(self, "_state_manager"):
|
|
637
|
+
cascade_relationships: dict = self._state_manager.get("cascade_relationships", {}) # type: ignore[reportAssignmentType]
|
|
638
|
+
if name in cascade_relationships:
|
|
639
|
+
return cascade_relationships[name]
|
|
640
|
+
|
|
641
|
+
# Check preloaded cache
|
|
602
642
|
cache_name = f"_{name}_cache"
|
|
603
643
|
try:
|
|
604
644
|
if hasattr(self, cache_name):
|
|
@@ -608,6 +648,7 @@ class FieldCacheMixin(DataConversionMixin):
|
|
|
608
648
|
except AttributeError:
|
|
609
649
|
pass
|
|
610
650
|
|
|
651
|
+
# Only create proxy if relationship is not loaded
|
|
611
652
|
proxy_cache = self._state_manager.get("proxy_cache", {})
|
|
612
653
|
if isinstance(proxy_cache, dict) and name not in proxy_cache:
|
|
613
654
|
proxy_cache[name] = RelationFieldProxy(self, name)
|
|
@@ -56,6 +56,9 @@ class ModelMixin(FieldCacheMixin, SignalMixin):
|
|
|
56
56
|
if hasattr(self, "_history_initialized"):
|
|
57
57
|
self._history_initialized = False
|
|
58
58
|
|
|
59
|
+
# Generate default values for fields not provided in kwargs
|
|
60
|
+
self._apply_default_values(kwargs)
|
|
61
|
+
|
|
59
62
|
# Set field values
|
|
60
63
|
for key, value in kwargs.items():
|
|
61
64
|
setattr(self, key, value)
|
|
@@ -107,8 +110,36 @@ class ModelMixin(FieldCacheMixin, SignalMixin):
|
|
|
107
110
|
if i < len(pk_values):
|
|
108
111
|
setattr(self, col.name, pk_values[i])
|
|
109
112
|
|
|
113
|
+
def _get_upsert_statement(self, table, data):
|
|
114
|
+
"""Construct UPSERT statement based on database dialect."""
|
|
115
|
+
dialect = self.get_session().bind.dialect.name
|
|
116
|
+
|
|
117
|
+
pk_columns = list(table.primary_key.columns)
|
|
118
|
+
|
|
119
|
+
if dialect == "postgresql":
|
|
120
|
+
from sqlalchemy.dialects.postgresql import insert
|
|
121
|
+
|
|
122
|
+
stmt = insert(table).values(**data)
|
|
123
|
+
return stmt.on_conflict_do_update(index_elements=pk_columns, set_=data)
|
|
124
|
+
|
|
125
|
+
elif dialect == "mysql":
|
|
126
|
+
from sqlalchemy.dialects.mysql import insert
|
|
127
|
+
|
|
128
|
+
stmt = insert(table).values(**data)
|
|
129
|
+
return stmt.on_duplicate_key_update(**data)
|
|
130
|
+
|
|
131
|
+
elif dialect == "sqlite":
|
|
132
|
+
from sqlalchemy.dialects.sqlite import insert
|
|
133
|
+
|
|
134
|
+
stmt = insert(table).values(**data)
|
|
135
|
+
return stmt.on_conflict_do_update(index_elements=pk_columns, set_=data)
|
|
136
|
+
|
|
137
|
+
else:
|
|
138
|
+
# Return None for unsupported dialects to trigger fallback
|
|
139
|
+
return None
|
|
140
|
+
|
|
110
141
|
async def _save_internal(self, validate: bool = True, session=None):
|
|
111
|
-
"""Internal save operation
|
|
142
|
+
"""Internal save operation using UPSERT with fallback to query-then-save.
|
|
112
143
|
|
|
113
144
|
This method contains the core save logic that can be reused by both
|
|
114
145
|
the public save() method and the cascade executor without triggering
|
|
@@ -132,17 +163,37 @@ class ModelMixin(FieldCacheMixin, SignalMixin):
|
|
|
132
163
|
if validate:
|
|
133
164
|
self.validate_all_fields()
|
|
134
165
|
|
|
166
|
+
data = self._get_all_data()
|
|
167
|
+
|
|
168
|
+
# Try UPSERT for supported databases
|
|
169
|
+
upsert_stmt = self._get_upsert_statement(table, data)
|
|
170
|
+
if upsert_stmt is not None:
|
|
171
|
+
try:
|
|
172
|
+
result = await session.execute(upsert_stmt)
|
|
173
|
+
if result.inserted_primary_key:
|
|
174
|
+
self._set_primary_key_values(result.inserted_primary_key)
|
|
175
|
+
# Clear dirty fields after successful save
|
|
176
|
+
dirty_fields = self._state_manager.get("dirty_fields", set())
|
|
177
|
+
if isinstance(dirty_fields, set):
|
|
178
|
+
dirty_fields.clear()
|
|
179
|
+
return self
|
|
180
|
+
except Exception as e:
|
|
181
|
+
raise PrimaryKeyError(f"Upsert operation failed: {e}") from e
|
|
182
|
+
|
|
183
|
+
# Fallback: query database to determine INSERT or UPDATE
|
|
135
184
|
try:
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
185
|
+
pk_conditions = self._build_pk_conditions()
|
|
186
|
+
existing = await session.execute(select(table).where(and_(*pk_conditions)))
|
|
187
|
+
|
|
188
|
+
if existing.first():
|
|
189
|
+
# Record exists, perform UPDATE
|
|
139
190
|
update_data = self._get_dirty_data()
|
|
140
191
|
if update_data:
|
|
141
192
|
stmt = update(table).where(and_(*pk_conditions)).values(**update_data)
|
|
142
193
|
await session.execute(stmt)
|
|
143
194
|
else:
|
|
144
|
-
# INSERT
|
|
145
|
-
stmt = insert(table).values(**
|
|
195
|
+
# Record does not exist, perform INSERT
|
|
196
|
+
stmt = insert(table).values(**data)
|
|
146
197
|
result = await session.execute(stmt)
|
|
147
198
|
if result.inserted_primary_key:
|
|
148
199
|
self._set_primary_key_values(result.inserted_primary_key)
|
|
@@ -216,8 +267,8 @@ class ModelMixin(FieldCacheMixin, SignalMixin):
|
|
|
216
267
|
for rel_name, new_related_objects in cascade_relationships.items():
|
|
217
268
|
await self._process_relationship_update(rel_name, new_related_objects, session)
|
|
218
269
|
|
|
219
|
-
#
|
|
220
|
-
self._state_manager.set("cascade_relationships", {})
|
|
270
|
+
# keep cascade_relationships
|
|
271
|
+
# self._state_manager.set("cascade_relationships", {})
|
|
221
272
|
self._state_manager.set("needs_cascade_save", False)
|
|
222
273
|
|
|
223
274
|
async def _process_relationship_update(self, rel_name: str, new_related_objects, session):
|
|
@@ -258,32 +309,30 @@ class ModelMixin(FieldCacheMixin, SignalMixin):
|
|
|
258
309
|
|
|
259
310
|
async def _fetch_current_related_objects(self, rel_name: str, session) -> list:
|
|
260
311
|
"""Fetch current related objects from database."""
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
"profile": ("CascadeProfile", "user_id"),
|
|
265
|
-
}
|
|
312
|
+
relationships = getattr(self.__class__, "_relationships", {})
|
|
313
|
+
if rel_name not in relationships:
|
|
314
|
+
return []
|
|
266
315
|
|
|
267
|
-
|
|
316
|
+
rel_descriptor = relationships[rel_name]
|
|
317
|
+
if not hasattr(rel_descriptor.property, "resolved_model") or not rel_descriptor.property.resolved_model:
|
|
268
318
|
return []
|
|
269
319
|
|
|
270
|
-
|
|
320
|
+
related_model = rel_descriptor.property.resolved_model
|
|
321
|
+
foreign_keys = rel_descriptor.property.foreign_keys
|
|
271
322
|
|
|
272
|
-
|
|
273
|
-
if related_model_name == "CascadePost":
|
|
274
|
-
from tests.integration.test_cascade_integration import CascadePost as RelatedModel
|
|
275
|
-
elif related_model_name == "CascadeProfile":
|
|
276
|
-
from tests.integration.test_cascade_integration import CascadeProfile as RelatedModel
|
|
277
|
-
else:
|
|
323
|
+
if not foreign_keys:
|
|
278
324
|
return []
|
|
279
325
|
|
|
280
|
-
#
|
|
326
|
+
# fetch foreign keys
|
|
327
|
+
fk_field = foreign_keys if isinstance(foreign_keys, str) else foreign_keys[0]
|
|
328
|
+
|
|
329
|
+
# get pk
|
|
281
330
|
pk_value = getattr(self, self._get_primary_key_field())
|
|
282
331
|
if pk_value is None:
|
|
283
332
|
return []
|
|
284
333
|
|
|
285
334
|
current_objects = (
|
|
286
|
-
await
|
|
335
|
+
await related_model.objects.using(session).filter(getattr(related_model, fk_field) == pk_value).all()
|
|
287
336
|
)
|
|
288
337
|
|
|
289
338
|
return current_objects
|
|
@@ -428,23 +477,18 @@ class ModelMixin(FieldCacheMixin, SignalMixin):
|
|
|
428
477
|
from .cascade import OnDelete
|
|
429
478
|
|
|
430
479
|
relationships = getattr(self.__class__, "_relationships", {})
|
|
431
|
-
|
|
432
|
-
for
|
|
433
|
-
print(f"DEBUG: Checking relationship {rel_name}")
|
|
480
|
+
|
|
481
|
+
for _, rel_descriptor in relationships.items():
|
|
434
482
|
if hasattr(rel_descriptor, "property") and hasattr(rel_descriptor.property, "cascade"):
|
|
435
483
|
cascade_str = rel_descriptor.property.cascade
|
|
436
|
-
print(f"DEBUG: Cascade string: {cascade_str}")
|
|
437
484
|
if cascade_str and ("delete" in cascade_str or "all" in cascade_str):
|
|
438
|
-
print(f"DEBUG: Found delete cascade relationship: {rel_name}")
|
|
439
485
|
return True
|
|
440
486
|
if (
|
|
441
487
|
hasattr(rel_descriptor, "property")
|
|
442
488
|
and hasattr(rel_descriptor.property, "on_delete")
|
|
443
489
|
and rel_descriptor.property.on_delete != OnDelete.NO_ACTION
|
|
444
490
|
):
|
|
445
|
-
print(f"DEBUG: Found on_delete relationship: {rel_name}")
|
|
446
491
|
return True
|
|
447
|
-
print("DEBUG: No on_delete relations found")
|
|
448
492
|
return False
|
|
449
493
|
|
|
450
494
|
def _get_primary_key_field(self) -> str:
|
|
@@ -323,19 +323,16 @@ class BulkOperationHandler:
|
|
|
323
323
|
exec_session = session or self.session
|
|
324
324
|
|
|
325
325
|
if return_columns and self.supports_returning(operation):
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
return objects, result.rowcount or 0, True
|
|
337
|
-
except Exception: # noqa
|
|
338
|
-
pass # Fall through to regular execution
|
|
326
|
+
stmt_with_returning = stmt.returning(*return_columns)
|
|
327
|
+
# For INSERT operations, use the data directly as parameters
|
|
328
|
+
if operation == "insert" and isinstance(parameters, list):
|
|
329
|
+
result = await exec_session.execute(stmt_with_returning, parameters)
|
|
330
|
+
elif parameters:
|
|
331
|
+
result = await exec_session.execute(stmt_with_returning, parameters)
|
|
332
|
+
else:
|
|
333
|
+
result = await exec_session.execute(stmt_with_returning)
|
|
334
|
+
objects = self.create_objects_from_rows(result.fetchall(), return_fields)
|
|
335
|
+
return objects, result.rowcount or 0, True
|
|
339
336
|
|
|
340
337
|
# Regular execution without RETURNING
|
|
341
338
|
if parameters is not None and isinstance(parameters, list) and len(parameters) > 1:
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: sqlobjects
|
|
3
|
-
Version: 1.0.
|
|
3
|
+
Version: 1.0.4
|
|
4
4
|
Summary: Django-style async ORM library based on SQLAlchemy with chainable queries, Q objects, and relationship loading
|
|
5
5
|
Author-email: XtraVisions <gitadmin@xtravisions.com>, Chen Hao <chenhao@xtravisions.com>
|
|
6
6
|
Maintainer-email: XtraVisions <gitadmin@xtravisions.com>, Chen Hao <chenhao@xtravisions.com>
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|