dapper-sqls 0.9.7__py3-none-any.whl → 1.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dapper_sqls/__init__.py +4 -2
- dapper_sqls/_types.py +25 -2
- dapper_sqls/async_dapper/async_dapper.py +1 -1
- dapper_sqls/async_dapper/async_executors.py +128 -53
- dapper_sqls/builders/model/model.py +421 -36
- dapper_sqls/builders/model/utils.py +337 -45
- dapper_sqls/builders/query.py +165 -44
- dapper_sqls/builders/stored.py +16 -10
- dapper_sqls/builders/stp.py +6 -2
- dapper_sqls/config.py +41 -32
- dapper_sqls/dapper/dapper.py +1 -1
- dapper_sqls/dapper/executors.py +131 -56
- dapper_sqls/decorators.py +5 -3
- dapper_sqls/http/__init__.py +4 -0
- dapper_sqls/http/aiohttp.py +155 -0
- dapper_sqls/http/decorators.py +123 -0
- dapper_sqls/http/models.py +58 -0
- dapper_sqls/http/request.py +140 -0
- dapper_sqls/models/__init__.py +3 -5
- dapper_sqls/models/base.py +246 -20
- dapper_sqls/models/connection.py +2 -2
- dapper_sqls/models/query_field.py +214 -0
- dapper_sqls/models/result.py +315 -45
- dapper_sqls/sqlite/__init__.py +5 -1
- dapper_sqls/sqlite/async_local_database.py +168 -0
- dapper_sqls/sqlite/decorators.py +69 -0
- dapper_sqls/sqlite/installer.py +97 -0
- dapper_sqls/sqlite/local_database.py +67 -185
- dapper_sqls/sqlite/models.py +51 -1
- dapper_sqls/sqlite/utils.py +9 -0
- dapper_sqls/utils.py +18 -6
- dapper_sqls-1.2.0.dist-info/METADATA +41 -0
- dapper_sqls-1.2.0.dist-info/RECORD +40 -0
- {dapper_sqls-0.9.7.dist-info → dapper_sqls-1.2.0.dist-info}/WHEEL +1 -1
- dapper_sqls-0.9.7.dist-info/METADATA +0 -19
- dapper_sqls-0.9.7.dist-info/RECORD +0 -30
- {dapper_sqls-0.9.7.dist-info → dapper_sqls-1.2.0.dist-info}/top_level.txt +0 -0
@@ -1,24 +1,9 @@
|
|
1
|
-
#
|
1
|
+
# coding: utf-8
|
2
2
|
import re
|
3
|
-
from pydantic import Field, BaseModel
|
3
|
+
from pydantic import Field, BaseModel, model_validator
|
4
|
+
from typing import Optional, List, Set, Any
|
5
|
+
import json
|
4
6
|
|
5
|
-
class InformationSchemaTables(BaseModel):
|
6
|
-
TABLE_CATALOG : str = Field(None, description="")
|
7
|
-
TABLE_SCHEMA : str = Field(None, description="")
|
8
|
-
TABLE_NAME : str = Field(None, description="")
|
9
|
-
COLUMN_NAME : str = Field(None, description="")
|
10
|
-
DATA_TYPE : str = Field(None, description="")
|
11
|
-
IS_NULLABLE : str = Field(None, description="")
|
12
|
-
|
13
|
-
class InformationSchemaRoutines(BaseModel):
|
14
|
-
SPECIFIC_CATALOG : str = Field(None, description="")
|
15
|
-
SPECIFIC_SCHEMA : str = Field(None, description="")
|
16
|
-
SPECIFIC_NAME : str = Field(None, description="")
|
17
|
-
ORDINAL_POSITION : int = Field(None, description="")
|
18
|
-
PARAMETER_NAME : str = Field(None, description="")
|
19
|
-
DATA_TYPE : str = Field(None, description="")
|
20
|
-
PROCEDURE_DEFINITION : str = Field(None, description="")
|
21
|
-
|
22
7
|
type_mapping = {
|
23
8
|
'int': 'int',
|
24
9
|
'bigint': 'int',
|
@@ -44,22 +29,318 @@ type_mapping = {
|
|
44
29
|
'rowversion': 'bytes',
|
45
30
|
}
|
46
31
|
|
47
|
-
|
32
|
+
QUERY_FIELD_TYPES = {
|
33
|
+
'str': 'StringQueryField',
|
34
|
+
'int': 'NumericQueryField',
|
35
|
+
'float': 'NumericQueryField',
|
36
|
+
'bool': 'BoolQueryField',
|
37
|
+
'datetime': 'DateQueryField',
|
38
|
+
'datetime.time': 'DateQueryField',
|
39
|
+
'bytes': 'BytesQueryField',
|
40
|
+
}
|
41
|
+
|
42
|
+
JOIN_CONDITIONAL_FIELD_TYPES = {
|
43
|
+
'str': 'JoinStringCondition',
|
44
|
+
'int': 'JoinNumericCondition',
|
45
|
+
'float': 'JoinNumericCondition',
|
46
|
+
'bool': 'JoinBooleanCondition',
|
47
|
+
'datetime': 'JoinDateCondition',
|
48
|
+
'datetime.time': 'JoinDateCondition',
|
49
|
+
'bytes': 'JoinBytesCondition',
|
50
|
+
}
|
51
|
+
|
52
|
+
class Relation(BaseModel):
|
53
|
+
original : bool = True
|
54
|
+
table : str
|
55
|
+
column : str
|
56
|
+
|
57
|
+
class SqlTableAuth(BaseModel):
|
58
|
+
table : str = ""
|
59
|
+
column : str = ""
|
60
|
+
|
61
|
+
class TableSettings(BaseModel):
|
62
|
+
insert : bool = False
|
63
|
+
update : bool = False
|
64
|
+
delete : bool = False
|
65
|
+
search : bool = True
|
66
|
+
|
67
|
+
class ForeignKey(BaseModel):
|
68
|
+
ref_table: str = ""
|
69
|
+
ref_column: str = ""
|
70
|
+
on_delete: str = ""
|
71
|
+
on_update: str = ""
|
72
|
+
|
73
|
+
class AutoColumnDescription(BaseModel):
|
74
|
+
nullable: Optional[bool] = None
|
75
|
+
type: Optional[str] = None
|
76
|
+
identity: Optional[bool] = None
|
77
|
+
default: Optional[Any] = None
|
78
|
+
primary_key: Optional[bool] = None
|
79
|
+
unique: Optional[bool] = None
|
80
|
+
foreign_key: Optional[ForeignKey] = None
|
81
|
+
|
82
|
+
class ColumnInformation(BaseModel):
|
83
|
+
TABLE_CATALOG : str = Field("", description="")
|
84
|
+
TABLE_SCHEMA : str = Field("", description="")
|
85
|
+
TABLE_NAME : str = Field("", description="")
|
86
|
+
COLUMN_NAME : str = Field("", description="")
|
87
|
+
DATA_TYPE : str = Field("", description="")
|
88
|
+
IS_NULLABLE : str = Field("", description="")
|
89
|
+
IS_IDENTITY : str = Field("", description="")
|
90
|
+
IS_PRIMARY_KEY : str = Field("", description="")
|
91
|
+
IS_UNIQUE : str = Field("", description="")
|
92
|
+
CHARACTER_MAXIMUM_LENGTH : Optional[int] = Field(None, description="")
|
93
|
+
COLUMN_DESCRIPTION : Optional[str] = Field("", description="")
|
94
|
+
AUTO_COLUMN_DESCRIPTION : Optional[AutoColumnDescription] = None
|
95
|
+
|
96
|
+
column_authentication : str = Field("", description="")
|
97
|
+
relation : Optional[Relation] = None
|
98
|
+
available : bool = True
|
99
|
+
|
100
|
+
@model_validator(mode="before")
|
101
|
+
@classmethod
|
102
|
+
def parse_json_fields(cls, data):
|
103
|
+
if isinstance(data, dict):
|
104
|
+
# Se o campo vier como string JSON, converte para dict e depois para AutoColumnDescription
|
105
|
+
if isinstance(data.get("AUTO_COLUMN_DESCRIPTION"), str):
|
106
|
+
try:
|
107
|
+
auto_col_dict = json.loads(data["AUTO_COLUMN_DESCRIPTION"])
|
108
|
+
if not auto_col_dict['foreign_key']:
|
109
|
+
auto_col_dict['foreign_key'] = None
|
110
|
+
data["AUTO_COLUMN_DESCRIPTION"] = auto_col_dict
|
111
|
+
description : AutoColumnDescription = AutoColumnDescription(**auto_col_dict)
|
112
|
+
if description.foreign_key:
|
113
|
+
data['relation'] = Relation(table=description.foreign_key.ref_table, column=description.foreign_key.ref_column)
|
114
|
+
|
115
|
+
except json.JSONDecodeError:
|
116
|
+
data["AUTO_COLUMN_DESCRIPTION"] = None
|
117
|
+
|
118
|
+
return data
|
119
|
+
|
120
|
+
class ForeignKey(BaseModel):
|
121
|
+
name: str
|
122
|
+
column: str
|
123
|
+
ref_table: str
|
124
|
+
ref_column: str
|
125
|
+
on_delete: str
|
126
|
+
on_update: str
|
127
|
+
|
128
|
+
class UniqueConstraint(BaseModel):
|
129
|
+
name: str
|
130
|
+
columns: List[str]
|
131
|
+
|
132
|
+
class PrimaryKey(BaseModel):
|
133
|
+
name: str
|
134
|
+
columns: List[str]
|
135
|
+
|
136
|
+
class AutoTableDescription(BaseModel):
|
137
|
+
columns: int
|
138
|
+
identity: List[str]
|
139
|
+
primary_keys: Optional[PrimaryKey]
|
140
|
+
unique_constraints: List[UniqueConstraint]
|
141
|
+
foreign_keys: List[ForeignKey]
|
142
|
+
|
143
|
+
class TableInformation(BaseModel):
|
144
|
+
TABLE_CATALOG : str = Field("", description="")
|
145
|
+
TABLE_SCHEMA : str = Field("", description="")
|
146
|
+
TABLE_NAME : str = Field("", description="")
|
147
|
+
AUTO_TABLE_DESCRIPTION : AutoTableDescription = Field(..., description="")
|
148
|
+
TABLE_DESCRIPTION : Optional[str] = Field("", description="")
|
149
|
+
settings : TableSettings = TableSettings()
|
150
|
+
available : bool = True
|
151
|
+
|
152
|
+
@model_validator(mode="before")
|
153
|
+
@classmethod
|
154
|
+
def parse_json_fields(cls, data):
|
155
|
+
if isinstance(data, dict):
|
156
|
+
if isinstance(data.get("AUTO_TABLE_DESCRIPTION"), str):
|
157
|
+
data["AUTO_TABLE_DESCRIPTION"] = json.loads(data["AUTO_TABLE_DESCRIPTION"])
|
158
|
+
return data
|
159
|
+
|
160
|
+
class SqlTable(TableInformation):
|
161
|
+
COLUMNS : list[ColumnInformation] = []
|
162
|
+
|
163
|
+
class InformationSchemaRoutines(BaseModel):
|
164
|
+
SPECIFIC_CATALOG : str = Field("", description="")
|
165
|
+
SPECIFIC_SCHEMA : str = Field("", description="")
|
166
|
+
SPECIFIC_NAME : str = Field("", description="")
|
167
|
+
ORDINAL_POSITION : int = Field(None, description="")
|
168
|
+
PARAMETER_NAME : str = Field("", description="")
|
169
|
+
DATA_TYPE : str = Field("", description="")
|
170
|
+
PROCEDURE_DEFINITION : str = Field("", description="")
|
48
171
|
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
172
|
+
def create_database_description(tables: List[TableInformation]) -> str:
|
173
|
+
lines = []
|
174
|
+
for table in tables:
|
175
|
+
auto = table.AUTO_TABLE_DESCRIPTION
|
176
|
+
lines.append(f"\n[Tabela: {table.TABLE_NAME}]")
|
177
|
+
|
178
|
+
# Chave primária
|
179
|
+
if auto.primary_keys:
|
180
|
+
pk_cols = ", ".join(auto.primary_keys.columns or [])
|
181
|
+
lines.append(f"- PK: {pk_cols}")
|
182
|
+
|
183
|
+
# Restrições únicas
|
184
|
+
if auto.unique_constraints:
|
185
|
+
for uc in auto.unique_constraints:
|
186
|
+
cols = ", ".join(uc.columns or [])
|
187
|
+
lines.append(f"- UNIQUE ({uc.name}): {cols}")
|
188
|
+
|
189
|
+
# Chaves estrangeiras
|
190
|
+
if auto.foreign_keys:
|
191
|
+
for fk in auto.foreign_keys:
|
192
|
+
lines.append(
|
193
|
+
f"- FK: {fk.column} → {fk.ref_table}.{fk.ref_column} "
|
194
|
+
f"[ON DELETE {fk.on_delete}, ON UPDATE {fk.on_update}]"
|
195
|
+
)
|
196
|
+
|
197
|
+
# Colunas identidade
|
198
|
+
if auto.identity:
|
199
|
+
identity_cols = ", ".join(auto.identity)
|
200
|
+
lines.append(f"- Identity: {identity_cols}")
|
201
|
+
|
202
|
+
return "\n".join(lines)
|
203
|
+
|
204
|
+
def create_table_description(table_info: TableInformation) -> str:
|
205
|
+
parts = []
|
206
|
+
|
207
|
+
# Identity columns
|
208
|
+
identity = table_info.AUTO_TABLE_DESCRIPTION.identity or []
|
209
|
+
if identity:
|
210
|
+
identity_cols = ", ".join(identity)
|
211
|
+
parts.append(f"Identity columns: {identity_cols}")
|
212
|
+
|
213
|
+
# Primary key
|
214
|
+
pk = table_info.AUTO_TABLE_DESCRIPTION.primary_keys
|
215
|
+
if pk:
|
216
|
+
pk_cols = ", ".join(pk.columns or [])
|
217
|
+
parts.append(f"Primary key ({pk.name}: {pk_cols})" if pk_cols else f"Primary key ({pk.name})")
|
218
|
+
|
219
|
+
# Unique constraints
|
220
|
+
unique_list = table_info.AUTO_TABLE_DESCRIPTION.unique_constraints or []
|
221
|
+
for uc in unique_list:
|
222
|
+
cols = ", ".join(uc.columns or [])
|
223
|
+
parts.append(f"Unique constraint ({uc.name}: {cols})" if cols else f"Unique constraint ({uc.name})")
|
224
|
+
|
225
|
+
# Foreign keys
|
226
|
+
# for fk in table_info.AUTO_TABLE_DESCRIPTION.foreign_keys or []:
|
227
|
+
# ref = f"{fk.ref_table}.{fk.ref_column}"
|
228
|
+
# action = f"ON DELETE {fk.on_delete}, ON UPDATE {fk.on_update}"
|
229
|
+
# parts.append(f"Foreign key ({fk.column} → {ref}) [{action}]")
|
230
|
+
|
231
|
+
# Final assembly
|
232
|
+
auto_summary = "; ".join(parts)
|
233
|
+
|
234
|
+
if table_info.TABLE_DESCRIPTION and table_info.TABLE_DESCRIPTION.strip():
|
235
|
+
return f"{table_info.TABLE_DESCRIPTION.strip()} — [{auto_summary}]"
|
236
|
+
else:
|
237
|
+
return f"[{auto_summary}]"
|
238
|
+
|
239
|
+
def create_column_description(column_desc: Optional[str], auto_desc: Optional[AutoColumnDescription]) -> str:
|
240
|
+
if auto_desc is None:
|
241
|
+
return column_desc or ""
|
242
|
+
|
243
|
+
parts = []
|
244
|
+
|
245
|
+
# Nullable
|
246
|
+
if auto_desc.nullable is False:
|
247
|
+
parts.append("Required")
|
248
|
+
elif auto_desc.nullable is True:
|
249
|
+
parts.append("Optional")
|
250
|
+
|
251
|
+
# Type
|
252
|
+
if auto_desc.type:
|
253
|
+
parts.append(f"Type: {auto_desc.type}")
|
254
|
+
|
255
|
+
# Identity (Auto Increment)
|
256
|
+
if auto_desc.identity:
|
257
|
+
parts.append("Identity (auto increment)")
|
258
|
+
|
259
|
+
# Default value
|
260
|
+
if auto_desc.default is not None and str(auto_desc.default).lower() != 'null':
|
261
|
+
parts.append(f"Default: {auto_desc.default}")
|
262
|
+
|
263
|
+
# Primary key
|
264
|
+
if auto_desc.primary_key:
|
265
|
+
parts.append("Primary key")
|
266
|
+
|
267
|
+
# Unique
|
268
|
+
if auto_desc.unique:
|
269
|
+
parts.append("Unique")
|
270
|
+
|
271
|
+
# Foreign key
|
272
|
+
# if auto_desc.foreign_key:
|
273
|
+
# fk = auto_desc.foreign_key
|
274
|
+
# details = []
|
275
|
+
# if fk.ref_table and fk.ref_column:
|
276
|
+
# details.append(f"{fk.ref_table}.{fk.ref_column}")
|
277
|
+
# if fk.on_delete:
|
278
|
+
# details.append(f"ON DELETE {fk.on_delete}")
|
279
|
+
# if fk.on_update:
|
280
|
+
# details.append(f"ON UPDATE {fk.on_update}")
|
281
|
+
# fk_info = " | ".join(details) if details else "Foreign key"
|
282
|
+
# parts.append(f"Foreign key → {fk_info}")
|
283
|
+
|
284
|
+
auto_summary = "; ".join(parts)
|
285
|
+
|
286
|
+
if column_desc:
|
287
|
+
return f"{column_desc.strip()} — [{auto_summary}]"
|
288
|
+
else:
|
289
|
+
return f"[{auto_summary}]"
|
53
290
|
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
291
|
+
def create_field(data: ColumnInformation, all_optional=True):
|
292
|
+
sql_data_type = data.DATA_TYPE.lower()
|
293
|
+
python_type = type_mapping.get(sql_data_type, 'str')
|
294
|
+
|
295
|
+
field_args = [
|
296
|
+
f'description="{create_column_description(data.COLUMN_DESCRIPTION, data.AUTO_COLUMN_DESCRIPTION)}"'
|
297
|
+
]
|
298
|
+
|
299
|
+
# Define QueryField type adicional
|
300
|
+
query_field_type = QUERY_FIELD_TYPES.get(python_type)
|
301
|
+
|
302
|
+
join_conditional_field_type = JOIN_CONDITIONAL_FIELD_TYPES.get(python_type)
|
303
|
+
|
304
|
+
# Combina os tipos
|
305
|
+
if python_type in ['datetime', 'datetime.time']:
|
306
|
+
annotated_type = f'Union[{python_type}, str]'
|
307
|
+
else:
|
308
|
+
annotated_type = python_type
|
309
|
+
|
310
|
+
if query_field_type:
|
311
|
+
annotated_type = f'Union[{annotated_type}, {query_field_type}, {join_conditional_field_type}]'
|
312
|
+
|
313
|
+
if data.IS_NULLABLE == "YES" or all_optional:
|
314
|
+
annotated_type = f'Optional[{annotated_type}]'
|
315
|
+
field_def = f'Field(None, {", ".join(field_args)})' if field_args else 'Field(None)'
|
316
|
+
else:
|
317
|
+
field_def = f'Field(..., {", ".join(field_args)})' if field_args else 'Field(...)'
|
318
|
+
|
319
|
+
return f'{data.COLUMN_NAME}: {annotated_type} = {field_def}'
|
320
|
+
|
321
|
+
def create_arg(
|
322
|
+
data: ColumnInformation,
|
323
|
+
default_type: str = None,
|
324
|
+
default_value: str = "None",
|
325
|
+
with_query_field: bool = False
|
326
|
+
):
|
327
|
+
sql_data_type = data.DATA_TYPE.lower()
|
328
|
+
python_type = default_type or type_mapping.get(sql_data_type, 'str')
|
329
|
+
|
330
|
+
# Define o tipo base
|
331
|
+
if python_type in ['datetime', 'datetime.time']:
|
332
|
+
annotated_type = f'Union[{python_type}, str]'
|
333
|
+
else:
|
334
|
+
annotated_type = python_type
|
335
|
+
|
336
|
+
# Adiciona o QueryField, se for solicitado
|
337
|
+
if with_query_field:
|
338
|
+
query_field_type = QUERY_FIELD_TYPES.get(python_type)
|
339
|
+
join_conditional_field_type = JOIN_CONDITIONAL_FIELD_TYPES.get(python_type)
|
340
|
+
if query_field_type:
|
341
|
+
annotated_type = f'Union[{annotated_type}, {query_field_type}, {join_conditional_field_type}]'
|
342
|
+
|
343
|
+
return f'{data.COLUMN_NAME}: {annotated_type} = {default_value}'
|
63
344
|
|
64
345
|
def get_parameters_with_defaults(stored_procedure):
|
65
346
|
# Regular expression to capture parameters and their default values
|
@@ -100,6 +381,17 @@ def get_parameters_with_defaults(stored_procedure):
|
|
100
381
|
|
101
382
|
return params_with_defaults
|
102
383
|
|
384
|
+
def create_queue_update(fields_args_str : str):
|
385
|
+
return f'''def queue_update(self, *, {fields_args_str}):
|
386
|
+
super().queue_update(**locals())
|
387
|
+
'''
|
388
|
+
|
389
|
+
def create_set_ignored_fields(fields_args_str : str):
|
390
|
+
return f'''@classmethod
|
391
|
+
def set_ignored_fields(cls, *, {fields_args_str}):
|
392
|
+
super().set_ignored_fields(**locals())
|
393
|
+
'''
|
394
|
+
|
103
395
|
def create_params_routine(data : InformationSchemaRoutines, defaults_values : dict[str, str | int | None]):
|
104
396
|
sql_data_type = data.DATA_TYPE.lower()
|
105
397
|
python_type = type_mapping.get(sql_data_type)
|
@@ -118,7 +410,7 @@ def create_params_routine(data : InformationSchemaRoutines, defaults_values : di
|
|
118
410
|
|
119
411
|
def create_content_orm(class_name : str, fields_args_str : str):
|
120
412
|
|
121
|
-
return f'''#
|
413
|
+
return f'''# coding: utf-8
|
122
414
|
|
123
415
|
from datetime import datetime
|
124
416
|
from typing import overload, Union
|
@@ -139,10 +431,13 @@ class BaseExecutorORM(BaseExecutor):
|
|
139
431
|
def executor(self):
|
140
432
|
return self._executor
|
141
433
|
|
142
|
-
def
|
434
|
+
def count(self, additional_sql : str = "", *, {fields_args_str}):
|
435
|
+
return self.executor.count(self, {class_name}(**get_dict_args(locals(), ['additional_sql'])), additional_sql)
|
436
|
+
|
437
|
+
def fetchone(self, additional_sql : str = "", *, {fields_args_str}) -> Result.FetchoneModel[{class_name}]:
|
143
438
|
return self.executor.fetchone(self, {class_name}(**get_dict_args(locals(), ['additional_sql'])), additional_sql)
|
144
439
|
|
145
|
-
def fetchall(self, additional_sql : str = "", select_top : int = None, *, {fields_args_str}) ->
|
440
|
+
def fetchall(self, additional_sql : str = "", select_top : int = None, *, {fields_args_str}) -> Result.FetchallModel[{class_name}]:
|
146
441
|
return self.executor.fetchall(self, {class_name}(**get_dict_args(locals(), ['additional_sql', 'select_top'])), additional_sql, select_top)
|
147
442
|
|
148
443
|
def delete(self, *, {fields_args_str}) -> Result.Send:
|
@@ -212,9 +507,6 @@ class {class_name}ORM(object):
|
|
212
507
|
def __init__(self, dapper : Dapper):
|
213
508
|
self._dapper = dapper
|
214
509
|
|
215
|
-
class {class_name}({class_name}):
|
216
|
-
...
|
217
|
-
|
218
510
|
@property
|
219
511
|
def dapper(self):
|
220
512
|
return self._dapper
|
@@ -268,7 +560,7 @@ class {class_name}ORM(object):
|
|
268
560
|
|
269
561
|
def create_content_async_orm(class_name : str, fields_args_str : str):
|
270
562
|
|
271
|
-
return f'''#
|
563
|
+
return f'''# coding: utf-8
|
272
564
|
|
273
565
|
from datetime import datetime
|
274
566
|
from typing import overload, Union
|
@@ -289,10 +581,13 @@ class AsyncBaseExecutorORM(AsyncBaseExecutor):
|
|
289
581
|
def executor(self):
|
290
582
|
return self._executor
|
291
583
|
|
292
|
-
async def
|
584
|
+
async def count(self, additional_sql : str = "", *, {fields_args_str}):
|
585
|
+
return await self.executor.count(self, {class_name}(**get_dict_args(locals(), ['additional_sql'])), additional_sql)
|
586
|
+
|
587
|
+
async def fetchone(self, additional_sql : str = "", *, {fields_args_str}) -> Result.FetchoneModel[{class_name}]:
|
293
588
|
return await self.executor.fetchone(self, {class_name}(**get_dict_args(locals(), ['additional_sql'])), additional_sql)
|
294
589
|
|
295
|
-
async def fetchall(self, additional_sql : str = "", select_top : int = None, *, {fields_args_str}) ->
|
590
|
+
async def fetchall(self, additional_sql : str = "", select_top : int = None, *, {fields_args_str}) -> Result.FetchallModel[{class_name}]:
|
296
591
|
return await self.executor.fetchall(self, {class_name}(**get_dict_args(locals(), ['additional_sql', 'select_top'])), additional_sql, select_top)
|
297
592
|
|
298
593
|
async def delete(self, *, {fields_args_str}) -> Result.Send:
|
@@ -362,9 +657,6 @@ class Async{class_name}ORM(object):
|
|
362
657
|
def __init__(self, async_dapper : AsyncDapper):
|
363
658
|
self._async_dapper = async_dapper
|
364
659
|
|
365
|
-
class {class_name}({class_name}):
|
366
|
-
...
|
367
|
-
|
368
660
|
@property
|
369
661
|
def async_dapper(self):
|
370
662
|
return self._async_dapper
|