dapper-sqls 0.9.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,420 @@
1
+ # -*- coding: latin -*-
2
+ import re
3
+ from pydantic import Field, BaseModel
4
+
5
+ class InformationSchemaTables(BaseModel):
6
+ TABLE_CATALOG : str = Field(None, description="")
7
+ TABLE_SCHEMA : str = Field(None, description="")
8
+ TABLE_NAME : str = Field(None, description="")
9
+ COLUMN_NAME : str = Field(None, description="")
10
+ DATA_TYPE : str = Field(None, description="")
11
+ IS_NULLABLE : str = Field(None, description="")
12
+
13
+ class InformationSchemaRoutines(BaseModel):
14
+ SPECIFIC_CATALOG : str = Field(None, description="")
15
+ SPECIFIC_SCHEMA : str = Field(None, description="")
16
+ SPECIFIC_NAME : str = Field(None, description="")
17
+ ORDINAL_POSITION : int = Field(None, description="")
18
+ PARAMETER_NAME : str = Field(None, description="")
19
+ DATA_TYPE : str = Field(None, description="")
20
+ PROCEDURE_DEFINITION : str = Field(None, description="")
21
+
22
+ type_mapping = {
23
+ 'int': 'int',
24
+ 'bigint': 'int',
25
+ 'smallint': 'int',
26
+ 'tinyint': 'int',
27
+ 'bit': 'bool',
28
+ 'decimal': 'float',
29
+ 'numeric': 'float',
30
+ 'money': 'float',
31
+ 'smallmoney': 'float',
32
+ 'float': 'float',
33
+ 'real': 'float',
34
+ 'date': 'datetime',
35
+ 'datetime': 'datetime',
36
+ 'datetime2': 'datetime',
37
+ 'datetimeoffset': 'datetime',
38
+ 'smalldatetime': 'datetime',
39
+ 'time': 'datetime.time',
40
+ 'binary': 'bytes',
41
+ 'varbinary': 'bytes',
42
+ 'image': 'bytes',
43
+ 'timestamp': 'bytes',
44
+ 'rowversion': 'bytes',
45
+ }
46
+
47
+ def create_field(data : InformationSchemaTables, field = 'Field(None, description="")'):
48
+
49
+ sql_data_type = data.DATA_TYPE.lower()
50
+ python_type = type_mapping.get(sql_data_type)
51
+ if python_type is None:
52
+ python_type = 'str'
53
+
54
+ if field == 'Field(None, description="")':
55
+ if data.IS_NULLABLE:
56
+ if python_type != 'str' and python_type != 'bool':
57
+ return f'{data.COLUMN_NAME} : Union[{python_type}, str, None] = {field}'
58
+ return f'{data.COLUMN_NAME} : Union[{python_type}, None] = {field}'
59
+
60
+ if python_type != 'str' and python_type != 'bool':
61
+ return f'{data.COLUMN_NAME} : Union[{python_type}, str] = {field}'
62
+ return f'{data.COLUMN_NAME} : {python_type} = {field}'
63
+
64
+ def get_parameters_with_defaults(stored_procedure):
65
+ # Regular expression to capture parameters and their default values
66
+ pattern = r"@(\w+)\s+\w+(?:\(\d+\))?\s*(?:=\s*(NULL|'[^']*'|\"[^\"]*\"|\d+))?"
67
+
68
+ # Dictionary to hold parameters and their default values
69
+ params_with_defaults = {}
70
+
71
+ # Extract the parameter section of the stored procedure
72
+ param_section_match = re.search(r'\(\s*(.*?)\s*\)\s*AS', stored_procedure, re.S | re.I)
73
+ if not param_section_match:
74
+ return params_with_defaults # Return an empty dictionary if no parameters found
75
+
76
+ param_section = param_section_match.group(1)
77
+
78
+ # Find all parameter definitions in the extracted section
79
+ matches = re.findall(pattern, param_section, re.IGNORECASE)
80
+
81
+ for match in matches:
82
+ param_name = match[0] # Parameter name
83
+ default_value = match[1] if match[1] else False # Default value or None if not present
84
+
85
+ # Validate the default value to be a string or an integer
86
+ if default_value != False:
87
+ # Check if it's a string (enclosed in quotes)
88
+ if default_value.startswith(("'", '"')) and default_value.endswith(("'", '"')):
89
+ # Remove quotes for the final value
90
+ default_value = default_value
91
+ # Check if it's an integer
92
+ elif default_value.isdigit():
93
+ default_value = int(default_value)
94
+ else:
95
+ # If it's not a valid string or integer, set to None
96
+ default_value = None
97
+
98
+ # Add to dictionary
99
+ params_with_defaults[param_name] = default_value
100
+
101
+ return params_with_defaults
102
+
103
+ def create_params_routine(data : InformationSchemaRoutines, defaults_values : dict[str, str | int | None]):
104
+ sql_data_type = data.DATA_TYPE.lower()
105
+ python_type = type_mapping.get(sql_data_type)
106
+ if python_type is None:
107
+ python_type = 'str'
108
+ name = data.PARAMETER_NAME.replace('@', '')
109
+ default_value = defaults_values.get(name)
110
+ if default_value == False:
111
+ if python_type != 'str' and python_type != 'bool':
112
+ return f'{name} : Union[{python_type}, str]'
113
+ return f'{name} : {python_type}'
114
+ else:
115
+ if python_type != 'str' and python_type != 'bool':
116
+ return f'{name} : Union[{python_type}, str] = {default_value}'
117
+ return f'{name} : {python_type} = {default_value}'
118
+
119
+ def create_content_orm(class_name : str, fields_args_str : str):
120
+
121
+ return f'''# -*- coding: latin -*-
122
+
123
+ from datetime import datetime
124
+ from typing import overload, Union
125
+ from dapper_sqls import Dapper
126
+ from dapper_sqls.dapper.dapper import Stored, Query
127
+ from dapper_sqls.dapper.executors import BaseExecutor, StoredUpdate, QueryUpdate
128
+ from dapper_sqls.utils import get_dict_args
129
+ from dapper_sqls.models import ConnectionStringData, Result
130
+ from .model import {class_name}
131
+
132
+
133
+ class BaseExecutorORM(BaseExecutor):
134
+ def __init__(self, executor : Query | Stored , connectionStringData : ConnectionStringData, attempts : int, wait_timeout : int, sql_version : int | None, api_environment : bool):
135
+ BaseExecutor.__init__(self, connectionStringData, attempts, wait_timeout, sql_version, api_environment)
136
+ self._executor = executor
137
+
138
+ @property
139
+ def executor(self):
140
+ return self._executor
141
+
142
+ def fetchone(self, additional_sql : str = "", *, {fields_args_str}) -> {class_name}:
143
+ return self.executor.fetchone(self, {class_name}(**get_dict_args(locals(), ['additional_sql'])), additional_sql)
144
+
145
+ def fetchall(self, additional_sql : str = "", select_top : int = None, *, {fields_args_str}) -> list[{class_name}]:
146
+ return self.executor.fetchall(self, {class_name}(**get_dict_args(locals(), ['additional_sql', 'select_top'])), additional_sql, select_top)
147
+
148
+ def delete(self, *, {fields_args_str}) -> Result.Send:
149
+ return self.executor.delete(self, {class_name}(**get_dict_args(locals())))
150
+
151
+ def insert(self, *, {fields_args_str}) -> Result.Insert:
152
+ return self.executor.insert(self, {class_name}(**get_dict_args(locals())))
153
+
154
+ def _exec_(self, *args):
155
+ return self.executor._exec_(self, *args)
156
+
157
+ class QueryUpdate{class_name}ORM(object):
158
+ def __init__(self, executor, model : {class_name}):
159
+ self._set_data = model
160
+ self._executor = executor
161
+
162
+ @property
163
+ def set_data(self):
164
+ return self._set_data
165
+
166
+ @property
167
+ def executor(self):
168
+ return self._executor
169
+
170
+ @overload
171
+ def where(self, query : str = None, *, {fields_args_str}) -> Result.Send:
172
+ pass
173
+
174
+ def where(self, *args, **kwargs) -> Result.Send:
175
+ query = kwargs.get('query')
176
+ if query:
177
+ return QueryUpdate(self._executor, self.set_data).where(query)
178
+ return QueryUpdate(self._executor, self.set_data).where({class_name}(**kwargs))
179
+
180
+ class Query{class_name}ORM(BaseExecutorORM):
181
+ def __init__(self, connectionStringData : ConnectionStringData, attempts : int, wait_timeout : int, sql_version : int | None, api_environment : bool):
182
+ BaseExecutorORM.__init__(self, Query, connectionStringData, attempts, wait_timeout, sql_version, api_environment)
183
+
184
+ def update(self, *, {fields_args_str}):
185
+ return QueryUpdate{class_name}ORM(self, {class_name}(**get_dict_args(locals())))
186
+
187
+ class StoredUpdate{class_name}ORM(object):
188
+ def __init__(self, executor, model : {class_name}):
189
+ self._set_data = model
190
+ self._executor = executor
191
+
192
+ @property
193
+ def set_data(self):
194
+ return self._set_data
195
+
196
+ @property
197
+ def executor(self):
198
+ return self._executor
199
+
200
+ def where(self, *, {fields_args_str}) -> Result.Send:
201
+ return StoredUpdate(self._executor, self.set_data).where({class_name}(**get_dict_args(locals())))
202
+
203
+ class Stored{class_name}ORM(BaseExecutorORM):
204
+ def __init__(self, connectionStringData : ConnectionStringData, attempts : int, wait_timeout : int, sql_version : int | None, api_environment : bool):
205
+ BaseExecutorORM.__init__(self, Stored, connectionStringData, attempts, wait_timeout, sql_version, api_environment)
206
+
207
+ def update(self, {fields_args_str}):
208
+ return StoredUpdate{class_name}ORM(self, {class_name}(**get_dict_args(locals())))
209
+
210
+ class {class_name}ORM(object):
211
+
212
+ def __init__(self, dapper : Dapper):
213
+ self._dapper = dapper
214
+
215
+ class {class_name}({class_name}):
216
+ ...
217
+
218
+ @property
219
+ def dapper(self):
220
+ return self._dapper
221
+
222
+ def query(self, attempts : int = None, wait_timeout : int = None):
223
+ attempts = attempts if attempts else self.dapper.config.default_attempts
224
+ wait_timeout = wait_timeout if wait_timeout else self.dapper.config.default_wait_timeout
225
+ return Query{class_name}ORM(self.dapper.config.connectionStringDataQuery.get(), attempts, wait_timeout, self.dapper.config.sql_version, self.dapper.config.api_environment)
226
+
227
+ def stored(self, attempts : int = None, wait_timeout : int = None):
228
+ attempts = attempts if attempts else self.dapper.config.default_attempts
229
+ wait_timeout = wait_timeout if wait_timeout else self.dapper.config.default_wait_timeout
230
+ return Stored{class_name}ORM(self.dapper.config.connectionStringDataStored.get() , attempts, wait_timeout, self.dapper.config.sql_version, self.dapper.config.api_environment)
231
+
232
+ @overload
233
+ @staticmethod
234
+ def load(dict_data : dict) -> {class_name}:
235
+ pass
236
+
237
+ @overload
238
+ @staticmethod
239
+ def load(list_dict_data : list[dict]) -> list[{class_name}]:
240
+ pass
241
+
242
+ @overload
243
+ @staticmethod
244
+ def load(fetchone : Result.Fetchone) -> {class_name}:
245
+ pass
246
+
247
+ @overload
248
+ @staticmethod
249
+ def load(fetchall : Result.Fetchall) -> list[{class_name}]:
250
+ pass
251
+
252
+ @staticmethod
253
+ def load(*args):
254
+ data = args[0]
255
+ if isinstance(data, dict) or isinstance(data, Result.Fetchone):
256
+ if isinstance(data, Result.Fetchone):
257
+ data = data.dict
258
+ if all(value is None for value in data.values()):
259
+ return {class_name}()
260
+
261
+ return {class_name}(**data)
262
+
263
+ if isinstance(data, Result.Fetchall):
264
+ data = data.list_dict
265
+
266
+ return [{class_name}(**d) for d in data]
267
+ '''
268
+
269
+ def create_content_async_orm(class_name : str, fields_args_str : str):
270
+
271
+ return f'''# -*- coding: latin -*-
272
+
273
+ from datetime import datetime
274
+ from typing import overload, Union
275
+ from dapper_sqls import AsyncDapper
276
+ from dapper_sqls.async_dapper.async_dapper import AsyncStored, AsyncQuery
277
+ from dapper_sqls.async_dapper.async_executors import AsyncBaseExecutor, AsyncQueryUpdate, AsyncStoredUpdate
278
+ from dapper_sqls.utils import get_dict_args
279
+ from dapper_sqls.models import ConnectionStringData, Result
280
+ from .model import {class_name}
281
+
282
+
283
+ class AsyncBaseExecutorORM(AsyncBaseExecutor):
284
+ def __init__(self, executor : AsyncQuery | AsyncStored , connectionStringData : ConnectionStringData, attempts : int, wait_timeout : int, sql_version : int | None, api_environment : bool):
285
+ AsyncBaseExecutor.__init__(self, connectionStringData, attempts, wait_timeout, sql_version, api_environment)
286
+ self._executor = executor
287
+
288
+ @property
289
+ def executor(self):
290
+ return self._executor
291
+
292
+ async def fetchone(self, additional_sql : str = "", *, {fields_args_str}) -> {class_name}:
293
+ return await self.executor.fetchone(self, {class_name}(**get_dict_args(locals(), ['additional_sql'])), additional_sql)
294
+
295
+ async def fetchall(self, additional_sql : str = "", select_top : int = None, *, {fields_args_str}) -> list[{class_name}]:
296
+ return await self.executor.fetchall(self, {class_name}(**get_dict_args(locals(), ['additional_sql', 'select_top'])), additional_sql, select_top)
297
+
298
+ async def delete(self, *, {fields_args_str}) -> Result.Send:
299
+ return await self.executor.delete(self, {class_name}(**get_dict_args(locals())))
300
+
301
+ async def insert(self, *, {fields_args_str}) -> Result.Insert:
302
+ return await self.executor.insert(self, {class_name}(**get_dict_args(locals())))
303
+
304
+ async def _exec_(self, *args):
305
+ return await self.executor._exec_(self, *args)
306
+
307
+ class AsyncQueryUpdate{class_name}ORM(object):
308
+ def __init__(self, executor, model : {class_name}):
309
+ self._set_data = model
310
+ self._executor = executor
311
+
312
+ @property
313
+ def set_data(self):
314
+ return self._set_data
315
+
316
+ @property
317
+ def executor(self):
318
+ return self._executor
319
+
320
+ @overload
321
+ async def where(self, query : str = None, *, {fields_args_str}) -> Result.Send:
322
+ pass
323
+
324
+ async def where(self, *args, **kwargs) -> Result.Send:
325
+ query = kwargs.get('query')
326
+ if query:
327
+ return await AsyncQueryUpdate(self._executor, self.set_data).where(query)
328
+ return await AsyncQueryUpdate(self._executor, self.set_data).where({class_name}(**kwargs))
329
+
330
+ class AsyncQuery{class_name}ORM(AsyncBaseExecutorORM):
331
+ def __init__(self, connectionStringData : ConnectionStringData, attempts : int, wait_timeout : int, sql_version : int | None, api_environment : bool):
332
+ AsyncBaseExecutorORM.__init__(self, AsyncQuery, connectionStringData, attempts, wait_timeout, sql_version, api_environment)
333
+
334
+ def update(self, *, {fields_args_str}):
335
+ return AsyncQueryUpdate{class_name}ORM(self, {class_name}(**get_dict_args(locals())))
336
+
337
+ class AsyncStoredUpdate{class_name}ORM(object):
338
+ def __init__(self, executor, model : {class_name}):
339
+ self._set_data = model
340
+ self._executor = executor
341
+
342
+ @property
343
+ def set_data(self):
344
+ return self._set_data
345
+
346
+ @property
347
+ def executor(self):
348
+ return self._executor
349
+
350
+ async def where(self, *, {fields_args_str}) -> Result.Send:
351
+ return await AsyncStoredUpdate(self._executor, self.set_data).where({class_name}(**get_dict_args(locals())))
352
+
353
+ class AsyncStored{class_name}ORM(AsyncBaseExecutorORM):
354
+ def __init__(self, connectionStringData : ConnectionStringData, attempts : int, wait_timeout : int, sql_version : int | None, api_environment : bool):
355
+ AsyncBaseExecutorORM.__init__(self, AsyncStored, connectionStringData, attempts, wait_timeout, sql_version, api_environment)
356
+
357
+ def update(self, {fields_args_str}):
358
+ return AsyncStoredUpdate{class_name}ORM(self, {class_name}(**get_dict_args(locals())))
359
+
360
+ class Async{class_name}ORM(object):
361
+
362
+ def __init__(self, async_dapper : AsyncDapper):
363
+ self._async_dapper = async_dapper
364
+
365
+ class {class_name}({class_name}):
366
+ ...
367
+
368
+ @property
369
+ def async_dapper(self):
370
+ return self._async_dapper
371
+
372
+ async def query(self, attempts : int = None, wait_timeout : int = None):
373
+ attempts = attempts if attempts else self.async_dapper.config.default_attempts
374
+ wait_timeout = wait_timeout if wait_timeout else self.async_dapper.config.default_wait_timeout
375
+ return AsyncQuery{class_name}ORM(self.async_dapper.config.connectionStringDataQuery.get(), attempts, wait_timeout, self.async_dapper.config.sql_version, self.async_dapper.config.api_environment)
376
+
377
+ async def stored(self, attempts : int = None, wait_timeout : int = None):
378
+ attempts = attempts if attempts else self.async_dapper.config.default_attempts
379
+ wait_timeout = wait_timeout if wait_timeout else self.async_dapper.config.default_wait_timeout
380
+ return AsyncStored{class_name}ORM(self.async_dapper.config.connectionStringDataStored.get() , attempts, wait_timeout, self.async_dapper.config.sql_version, self.async_dapper.config.api_environment)
381
+
382
+ @overload
383
+ @staticmethod
384
+ def load(dict_data : dict) -> {class_name}:
385
+ pass
386
+
387
+ @overload
388
+ @staticmethod
389
+ def load(list_dict_data : list[dict]) -> list[{class_name}]:
390
+ pass
391
+
392
+ @overload
393
+ @staticmethod
394
+ def load(fetchone : Result.Fetchone) -> {class_name}:
395
+ pass
396
+
397
+ @overload
398
+ @staticmethod
399
+ def load(fetchall : Result.Fetchall) -> list[{class_name}]:
400
+ pass
401
+
402
+ @staticmethod
403
+ def load(*args):
404
+ data = args[0]
405
+ if isinstance(data, dict) or isinstance(data, Result.Fetchone):
406
+ if isinstance(data, Result.Fetchone):
407
+ data = data.dict
408
+ if all(value is None for value in data.values()):
409
+ return {class_name}()
410
+
411
+ return {class_name}(**data)
412
+
413
+ if isinstance(data, Result.Fetchall):
414
+ data = data.list_dict
415
+
416
+ return [{class_name}(**d) for d in data]
417
+ '''
418
+
419
+
420
+
@@ -0,0 +1,80 @@
1
+ # -*- coding: latin -*-
2
+ from typing import Type, Union
3
+ from pydantic import BaseModel
4
+ from datetime import datetime, date
5
+ import json
6
+
7
+ class Value:
8
+ def __init__(self, value : Union[str, int, bytes, float, datetime, date], prefix : str, suffix : str):
9
+ self.value = value
10
+ self.prefix = prefix
11
+ self.suffix = suffix
12
+
13
+ class QueryBuilder(object):
14
+
15
+ def value(value : Union[str, int, bytes, float, datetime, date], prefix = "=", suffix = ""):
16
+ json_value = json.dumps({'prefix': prefix, 'value': value, 'suffix': suffix})
17
+ return f"#QueryBuilderValue#{json_value}#QueryBuilderValue#"
18
+
19
+ @classmethod
20
+ def _build_where_clause(cls, **kwargs):
21
+ conditions = []
22
+ for field, value in kwargs.items():
23
+ if value is not None:
24
+ if isinstance(value, str):
25
+ if '#QueryBuilderValue#' in value:
26
+ value = value.replace('#QueryBuilderValue#', '')
27
+ value = Value(**json.loads(value.strip()))
28
+ if isinstance(value.value, str):
29
+ conditions.append(f"{field} {value.prefix} '{value.value}' {value.suffix}")
30
+ else:
31
+ conditions.append(f"{field} {value.prefix} {value.value} {value.suffix}")
32
+ else:
33
+ conditions.append(f"{field} = '{value}'")
34
+ else:
35
+ conditions.append(f"{field} = {value}")
36
+ return " AND ".join(conditions)
37
+
38
+ @classmethod
39
+ def update(cls, model: Type[BaseModel], where : Union[str , Type[BaseModel]]):
40
+ update_data = model.model_dump(exclude_none=True)
41
+ if not isinstance(where, str):
42
+ where_data = where.model_dump(exclude_none=True)
43
+ where = cls._build_where_clause(**where_data)
44
+
45
+ set_clause = ", ".join([f"{key} = '{value}'" if isinstance(value, str) else f"{key} = {value}" for key, value in update_data.items()])
46
+ sql_query = f"UPDATE {model.TABLE_NAME} SET {set_clause} WHERE {where}"
47
+ return sql_query
48
+
49
+ def insert(model: Type[BaseModel], name_column_id = 'Id'):
50
+ insert_data = model.model_dump(exclude_none=True)
51
+ columns = ", ".join(insert_data.keys())
52
+ values = ", ".join([f"'{value}'" if isinstance(value, str) else str(value) for value in insert_data.values()])
53
+ sql_query = f"""
54
+ INSERT INTO {model.TABLE_NAME} ({columns})
55
+ OUTPUT INSERTED.{name_column_id} AS Id
56
+ VALUES ({values})
57
+ """
58
+ return sql_query
59
+
60
+ @classmethod
61
+ def select(cls, model: Type[BaseModel], additional_sql : str = "" ,select_top : int= None):
62
+ top_clause = f"TOP ({select_top}) * " if select_top else "*"
63
+ select_data = model.model_dump(exclude_none=True)
64
+ where_clause = cls._build_where_clause(**select_data)
65
+
66
+ sql_query = f"SELECT {top_clause} FROM {model.TABLE_NAME}"
67
+ if where_clause:
68
+ sql_query += f" WHERE {where_clause}"
69
+ sql_query = f'{sql_query} {additional_sql}'
70
+ return sql_query
71
+
72
+ @classmethod
73
+ def delete(cls, model: Type[BaseModel]):
74
+ delete_data = model.model_dump(exclude_none=True)
75
+ where_clause = cls._build_where_clause(**delete_data)
76
+ if not where_clause:
77
+ raise ValueError("DELETE operation requires at least one condition.")
78
+ sql_query = f"DELETE FROM {model.TABLE_NAME} WHERE {where_clause}"
79
+ return sql_query
80
+
@@ -0,0 +1,66 @@
1
+ # -*- coding: latin -*-
2
+ from typing import Type
3
+ from pydantic import BaseModel
4
+
5
+ class StoredBuilder:
6
+ @staticmethod
7
+ def _build_where_clause(**kwargs):
8
+ conditions = []
9
+ parameters = []
10
+ for field, value in kwargs.items():
11
+ if value is not None:
12
+ if isinstance(value, str):
13
+ conditions.append(f"{field} = ?")
14
+ parameters.append(value)
15
+ else:
16
+ conditions.append(f"{field} = ?")
17
+ parameters.append(value)
18
+ return " AND ".join(conditions), tuple(parameters)
19
+
20
+ @classmethod
21
+ def update(cls, model: Type[BaseModel], where: Type[BaseModel]):
22
+ update_data = model.model_dump(exclude_none=True)
23
+ where_data = where.model_dump(exclude_none=True)
24
+ where_clause, where_params = cls._build_where_clause(**where_data)
25
+
26
+ set_clause = ", ".join([f"{key} = ?" for key in update_data.keys()])
27
+ sql_query = f"UPDATE {model.TABLE_NAME} SET {set_clause} WHERE {where_clause}"
28
+
29
+ return sql_query, tuple(update_data.values()) + where_params
30
+
31
+ @classmethod
32
+ def insert(cls, model : Type[BaseModel], name_column_id = 'Id'):
33
+ insert_data = model.model_dump(exclude_none=True)
34
+ columns = ", ".join(insert_data.keys())
35
+ values = ", ".join(["?" for _ in insert_data.values()])
36
+ sql_query = f"""
37
+ INSERT INTO {model.TABLE_NAME} ({columns})
38
+ OUTPUT INSERTED.{name_column_id} AS Id
39
+ VALUES ({values})
40
+ """
41
+ return sql_query, tuple(insert_data.values())
42
+
43
+ @classmethod
44
+ def select(cls, model : Type[BaseModel], additional_sql : str = "" ,select_top : int= None):
45
+ top_clause = f"TOP ({select_top}) * " if select_top else "*"
46
+ select_data = model.model_dump(exclude_none=True)
47
+ where_clause, parameters = cls._build_where_clause(**select_data)
48
+
49
+ sql_query = f"SELECT {top_clause} FROM {model.TABLE_NAME}"
50
+ if where_clause:
51
+ sql_query += f" WHERE {where_clause}"
52
+ sql_query = f'{sql_query} {additional_sql}'
53
+ return sql_query, parameters
54
+
55
+ @classmethod
56
+ def delete(cls, model : Type[BaseModel]):
57
+ delete_data = model.model_dump(exclude_none=True)
58
+ where_clause, parameters = cls._build_where_clause(**delete_data)
59
+ if not where_clause:
60
+ raise ValueError("DELETE operation requires at least one condition.")
61
+ sql_query = f"DELETE FROM {model.TABLE_NAME} WHERE {where_clause}"
62
+ return sql_query, parameters
63
+
64
+
65
+
66
+