dapper-sqls 0.9.7__py3-none-any.whl → 1.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dapper_sqls/__init__.py +4 -2
- dapper_sqls/_types.py +25 -2
- dapper_sqls/async_dapper/async_dapper.py +1 -1
- dapper_sqls/async_dapper/async_executors.py +128 -53
- dapper_sqls/builders/model/model.py +421 -36
- dapper_sqls/builders/model/utils.py +337 -45
- dapper_sqls/builders/query.py +165 -44
- dapper_sqls/builders/stored.py +16 -10
- dapper_sqls/builders/stp.py +6 -2
- dapper_sqls/config.py +41 -32
- dapper_sqls/dapper/dapper.py +1 -1
- dapper_sqls/dapper/executors.py +131 -56
- dapper_sqls/decorators.py +5 -3
- dapper_sqls/http/__init__.py +4 -0
- dapper_sqls/http/aiohttp.py +155 -0
- dapper_sqls/http/decorators.py +123 -0
- dapper_sqls/http/models.py +58 -0
- dapper_sqls/http/request.py +140 -0
- dapper_sqls/models/__init__.py +3 -5
- dapper_sqls/models/base.py +246 -20
- dapper_sqls/models/connection.py +2 -2
- dapper_sqls/models/query_field.py +214 -0
- dapper_sqls/models/result.py +315 -45
- dapper_sqls/sqlite/__init__.py +5 -1
- dapper_sqls/sqlite/async_local_database.py +168 -0
- dapper_sqls/sqlite/decorators.py +69 -0
- dapper_sqls/sqlite/installer.py +97 -0
- dapper_sqls/sqlite/local_database.py +67 -185
- dapper_sqls/sqlite/models.py +51 -1
- dapper_sqls/sqlite/utils.py +9 -0
- dapper_sqls/utils.py +18 -6
- dapper_sqls-1.2.0.dist-info/METADATA +41 -0
- dapper_sqls-1.2.0.dist-info/RECORD +40 -0
- {dapper_sqls-0.9.7.dist-info → dapper_sqls-1.2.0.dist-info}/WHEEL +1 -1
- dapper_sqls-0.9.7.dist-info/METADATA +0 -19
- dapper_sqls-0.9.7.dist-info/RECORD +0 -30
- {dapper_sqls-0.9.7.dist-info → dapper_sqls-1.2.0.dist-info}/top_level.txt +0 -0
dapper_sqls/dapper/executors.py
CHANGED
@@ -1,12 +1,11 @@
|
|
1
|
-
#
|
1
|
+
# coding: utf-8
|
2
2
|
import pyodbc
|
3
3
|
from time import sleep
|
4
4
|
from datetime import datetime
|
5
5
|
from typing import overload
|
6
6
|
from abc import ABC, abstractmethod
|
7
|
-
from ..models import ConnectionStringData, Result, UnavailableServiceException, BaseUpdate
|
7
|
+
from ..models import ConnectionStringData, Result, UnavailableServiceException, BaseUpdate, SearchTable, JoinSearchTable
|
8
8
|
from .._types import T, ExecType
|
9
|
-
from ..config import Config
|
10
9
|
from ..builders import QueryBuilder, StoredBuilder
|
11
10
|
from ..utils import Utils
|
12
11
|
|
@@ -14,6 +13,7 @@ class BaseExecutor(ABC, object):
|
|
14
13
|
def __init__(self, connectionStringData : ConnectionStringData, attempts : int, wait_timeout : int, sql_version : int | None, api_environment : bool):
|
15
14
|
self._connectionStringData = connectionStringData
|
16
15
|
self._cursor = None
|
16
|
+
self._connection_error : Exception = None
|
17
17
|
self._connection = None
|
18
18
|
self._wait_timeout = wait_timeout
|
19
19
|
self._attempts = attempts
|
@@ -25,19 +25,19 @@ class BaseExecutor(ABC, object):
|
|
25
25
|
for n in range(self._attempts):
|
26
26
|
odbc_version = f'ODBC Driver {self._sql_version} for SQL Server'
|
27
27
|
if not self._sql_version:
|
28
|
-
|
29
|
-
if
|
30
|
-
|
31
|
-
|
32
|
-
raise RuntimeError("Nenhuma vers�o do driver ODBC for SQL Server foi encontrada.")
|
28
|
+
drivers = [d for d in pyodbc.drivers() if 'SQL Server' in d]
|
29
|
+
if not drivers:
|
30
|
+
raise RuntimeError("Nenhum driver ODBC do SQL Server encontrado. Instale o ODBC Driver 17 ou 18.")
|
31
|
+
odbc_version = drivers[-1]
|
33
32
|
try:
|
34
33
|
connection_string = f'DRIVER={{{odbc_version}}};SERVER={cs_data.server};DATABASE={cs_data.database};UID={cs_data.username};PWD={cs_data.password}'
|
35
34
|
self._connection = pyodbc.connect(connection_string)
|
36
35
|
self._cursor = self._connection.cursor()
|
37
36
|
|
38
37
|
except Exception as e:
|
38
|
+
self._connection_error = e
|
39
39
|
print(e)
|
40
|
-
print(f'Erro na
|
40
|
+
print(f'Erro na conexção com a base de dados, nova tentativa em {self._wait_timeout}s')
|
41
41
|
sleep(self._wait_timeout)
|
42
42
|
|
43
43
|
return self
|
@@ -81,7 +81,7 @@ class BaseExecutor(ABC, object):
|
|
81
81
|
if isinstance(value, int):
|
82
82
|
self._attempts = value
|
83
83
|
else:
|
84
|
-
raise ValueError("O
|
84
|
+
raise ValueError("O número de tentativas deve ser um número inteiro.")
|
85
85
|
|
86
86
|
@property
|
87
87
|
def wait_timeout(self):
|
@@ -92,7 +92,7 @@ class BaseExecutor(ABC, object):
|
|
92
92
|
if isinstance(value, int):
|
93
93
|
self._wait_timeout = value
|
94
94
|
else:
|
95
|
-
raise ValueError("O tempo de espera deve ser um
|
95
|
+
raise ValueError("O tempo de espera deve ser um número inteiro.")
|
96
96
|
|
97
97
|
@abstractmethod
|
98
98
|
def _exec_(self, connection, operation_sql, exec_type):
|
@@ -120,41 +120,79 @@ class Query(BaseExecutor):
|
|
120
120
|
def __init__(self, connectionStringData : ConnectionStringData, attempts : int, wait_timeout : int, sql_version : int | None, api_environment : bool):
|
121
121
|
super().__init__(connectionStringData, attempts, wait_timeout, sql_version, api_environment)
|
122
122
|
|
123
|
+
@overload
|
124
|
+
def count(self, query : str) -> Result.Count:
|
125
|
+
pass
|
126
|
+
|
127
|
+
@overload
|
128
|
+
def count(self, model : T, additional_sql : str = "", select_top : int = None) -> Result.Count:
|
129
|
+
pass
|
130
|
+
|
131
|
+
def count(self, *args, **kwargs) -> T | Result.Count:
|
132
|
+
args = Utils.args_query(*args, **kwargs)
|
133
|
+
if args.model:
|
134
|
+
args.query = QueryBuilder.select(args.model, args.additional_sql, args.select_top)
|
135
|
+
|
136
|
+
count_query = f"""
|
137
|
+
SELECT COUNT(*) AS Count FROM (
|
138
|
+
{args.query}
|
139
|
+
) AS count_subquery
|
140
|
+
"""
|
141
|
+
|
142
|
+
result : Result.Fetchone = self._exec_(self._connection, count_query, ExecType.fetchone)
|
143
|
+
if result.success:
|
144
|
+
return Result.Count(count_query, result.dict.get('Count', 0), result.status_code, result.error)
|
145
|
+
return Result.Count(count_query, 0, result.status_code, result.error)
|
146
|
+
|
123
147
|
@overload
|
124
148
|
def fetchone(self, query : str) -> Result.Fetchone:
|
125
149
|
pass
|
126
150
|
|
127
151
|
@overload
|
128
|
-
def fetchone(self, model : T, additional_sql : str = "") -> T:
|
152
|
+
def fetchone(self, model : T, additional_sql : str = "") -> Result.FetchoneModel[T]:
|
129
153
|
pass
|
130
154
|
|
131
|
-
def fetchone(self, *args, **kwargs) -> T | Result.Fetchone:
|
155
|
+
def fetchone(self, *args, **kwargs) -> Result.FetchoneModel[T] | Result.Fetchone:
|
132
156
|
args = Utils.args_query(*args, **kwargs)
|
133
157
|
if args.model:
|
134
158
|
args.query = QueryBuilder.select(args.model, args.additional_sql, args.select_top)
|
135
159
|
|
136
160
|
result = self._exec_(self._connection, args.query, ExecType.fetchone)
|
137
161
|
if args.model:
|
138
|
-
|
162
|
+
model_instance = args.model.__class__(**result.dict) if result.success else args.model.__class__()
|
163
|
+
return Result.FetchoneModel(model_instance, result)
|
139
164
|
return result
|
165
|
+
|
166
|
+
def fetchone_with_joins(self, main_search: SearchTable, joins: list[JoinSearchTable] = [], additional_sql: str = "", select_top: int = None) -> Result.Fetchone:
|
167
|
+
query = QueryBuilder.select_with_joins(main_search, joins, additional_sql, select_top)
|
168
|
+
result = self._exec_(self._connection, query, ExecType.fetchone)
|
169
|
+
result._organize_joined_tables(joins)
|
170
|
+
return result
|
140
171
|
|
141
172
|
@overload
|
142
173
|
def fetchall(self, query : str) -> Result.Fetchall:
|
143
174
|
pass
|
144
175
|
|
145
176
|
@overload
|
146
|
-
def fetchall(self, model : T, additional_sql : str = "", select_top : int = None) ->
|
177
|
+
def fetchall(self, model : T, additional_sql : str = "", select_top : int = None) -> Result.FetchallModel[T]:
|
147
178
|
pass
|
148
179
|
|
149
|
-
def fetchall(self, *args, **kwargs) ->
|
180
|
+
def fetchall(self, *args, **kwargs) -> Result.FetchallModel[T] | Result.Fetchall:
|
150
181
|
args = Utils.args_query(*args, **kwargs)
|
151
182
|
if args.model:
|
152
183
|
args.query = QueryBuilder.select(args.model, args.additional_sql, args.select_top)
|
153
184
|
|
154
185
|
result = self._exec_(self._connection, args.query, ExecType.fetchall)
|
155
186
|
if args.model:
|
156
|
-
|
187
|
+
models = [args.model.__class__(**r) for r in result.list_dict] if result.success else []
|
188
|
+
return Result.FetchallModel(models, result)
|
157
189
|
return result
|
190
|
+
|
191
|
+
def fetchall_with_joins(self, main_search: SearchTable, joins: list[JoinSearchTable] = [], additional_sql: str = "", select_top: int = None) -> Result.Fetchall:
|
192
|
+
query = QueryBuilder.select_with_joins(main_search, joins, additional_sql, select_top)
|
193
|
+
result = self._exec_(self._connection, query, ExecType.fetchall)
|
194
|
+
result._organize_joined_tables(joins)
|
195
|
+
return result
|
158
196
|
|
159
197
|
def execute(self, query : str) -> Result.Send:
|
160
198
|
return self._exec_(self._connection, query, ExecType.send)
|
@@ -174,21 +212,21 @@ class Query(BaseExecutor):
|
|
174
212
|
query = QueryBuilder.insert(model, name_column_id)
|
175
213
|
result : Result.Fetchone = self._exec_(self._connection, query, ExecType.fetchone)
|
176
214
|
if result.success:
|
177
|
-
return Result.Insert(result.dict.get('Id', 0), result.status_code, result.
|
178
|
-
return Result.Insert(0, result.status_code, result.
|
215
|
+
return Result.Insert(query, result.dict.get('Id', 0), result.status_code, result.error)
|
216
|
+
return Result.Insert(query, 0, result.status_code, result.error)
|
179
217
|
|
180
|
-
def _exec_(self, connection, query_sql : str, exec_type : ExecType)
|
218
|
+
def _exec_(self, connection, query_sql : str, exec_type : ExecType):
|
181
219
|
|
182
220
|
if not self._cursor:
|
183
221
|
if self._api_environment:
|
184
222
|
raise UnavailableServiceException()
|
185
223
|
|
186
224
|
if exec_type == ExecType.fetchone:
|
187
|
-
return Result.Fetchone(None, None,
|
225
|
+
return Result.Fetchone(query_sql, None, None, self._connection_error)
|
188
226
|
elif exec_type == ExecType.fetchall:
|
189
|
-
return Result.Fetchall(None, None,
|
227
|
+
return Result.Fetchall(query_sql, None, None, self._connection_error)
|
190
228
|
elif exec_type == ExecType.send:
|
191
|
-
return Result.Send(False,
|
229
|
+
return Result.Send(query_sql, False, self._connection_error)
|
192
230
|
|
193
231
|
try:
|
194
232
|
# executar
|
@@ -196,23 +234,23 @@ class Query(BaseExecutor):
|
|
196
234
|
|
197
235
|
# ober resultado se nessesario
|
198
236
|
if exec_type == ExecType.fetchone:
|
199
|
-
result = Result.Fetchone(self._cursor, response.fetchone())
|
237
|
+
result = Result.Fetchone(query_sql, self._cursor, response.fetchone())
|
200
238
|
elif exec_type == ExecType.fetchall:
|
201
|
-
result = Result.Fetchall(self._cursor, response.fetchall())
|
239
|
+
result = Result.Fetchall(query_sql, self._cursor, response.fetchall())
|
202
240
|
elif exec_type == ExecType.send:
|
203
|
-
result = Result.Send(True)
|
241
|
+
result = Result.Send(query_sql, True)
|
204
242
|
|
205
243
|
# fazer o commit
|
206
244
|
connection.commit()
|
207
245
|
|
208
246
|
except Exception as ex:
|
209
247
|
if exec_type == ExecType.fetchone:
|
210
|
-
return Result.Fetchone(
|
248
|
+
return Result.Fetchone(query_sql, None, None, ex)
|
211
249
|
elif exec_type == ExecType.fetchall:
|
212
|
-
return Result.Fetchall(
|
250
|
+
return Result.Fetchall(query_sql, None, None, ex)
|
213
251
|
elif exec_type == ExecType.send:
|
214
|
-
return Result.Send(
|
215
|
-
|
252
|
+
return Result.Send(query_sql, False, str(ex))
|
253
|
+
|
216
254
|
# retorna o resultado
|
217
255
|
return result
|
218
256
|
|
@@ -229,6 +267,30 @@ class Stored(BaseExecutor):
|
|
229
267
|
def __init__(self, connectionStringData : ConnectionStringData, attempts : int, wait_timeout : int,sql_version : int | None, api_environment : bool):
|
230
268
|
super().__init__(connectionStringData, attempts, wait_timeout, sql_version, api_environment)
|
231
269
|
|
270
|
+
@overload
|
271
|
+
def count(self, query : str) -> Result.Count:
|
272
|
+
pass
|
273
|
+
|
274
|
+
@overload
|
275
|
+
def count(self, model : T, additional_sql : str = "", select_top : int = None) -> Result.Count:
|
276
|
+
pass
|
277
|
+
|
278
|
+
def count(self, *args, **kwargs) -> T | Result.Count:
|
279
|
+
args = Utils.args_query(*args, **kwargs)
|
280
|
+
if args.model:
|
281
|
+
args.query = StoredBuilder.select(args.model, args.additional_sql, args.select_top)
|
282
|
+
|
283
|
+
count_query = f"""
|
284
|
+
SELECT COUNT(*) AS Count FROM (
|
285
|
+
{args.query}
|
286
|
+
) AS count_subquery
|
287
|
+
"""
|
288
|
+
|
289
|
+
result : Result.Fetchone = self._exec_(self._connection, count_query, ExecType.fetchone)
|
290
|
+
if result.success:
|
291
|
+
return Result.Count(count_query, result.dict.get('Count', 0), result.status_code, result.error)
|
292
|
+
return Result.Count(count_query, 0, result.status_code, result.error)
|
293
|
+
|
232
294
|
@overload
|
233
295
|
def fetchone(self, query : str, params : list | tuple) -> Result.Fetchone:
|
234
296
|
pass
|
@@ -238,17 +300,18 @@ class Stored(BaseExecutor):
|
|
238
300
|
pass
|
239
301
|
|
240
302
|
@overload
|
241
|
-
def fetchone(self, model : T, additional_sql : str = "") -> T:
|
303
|
+
def fetchone(self, model : T, additional_sql : str = "") -> Result.FetchoneModel[T]:
|
242
304
|
pass
|
243
305
|
|
244
|
-
def fetchone(self, *args, **kwargs) -> T | Result.Fetchone:
|
306
|
+
def fetchone(self, *args, **kwargs) -> Result.FetchoneModel[T] | Result.Fetchone:
|
245
307
|
args = Utils.args_stored(*args, **kwargs)
|
246
308
|
if args.model:
|
247
309
|
args.query, args.params = StoredBuilder.select(args.model, args.additional_sql, args.select_top)
|
248
310
|
|
249
311
|
result = self._exec_(self._connection, (args.query, *args.params), ExecType.fetchone)
|
250
312
|
if args.model:
|
251
|
-
|
313
|
+
model_instance = args.model.__class__(**result.dict) if result.success else args.model.__class__()
|
314
|
+
return Result.FetchoneModel(model_instance, result)
|
252
315
|
return result
|
253
316
|
|
254
317
|
@overload
|
@@ -260,17 +323,18 @@ class Stored(BaseExecutor):
|
|
260
323
|
pass
|
261
324
|
|
262
325
|
@overload
|
263
|
-
def fetchall(self, model : T, additional_sql : str = "", select_top : int = None) ->
|
326
|
+
def fetchall(self, model : T, additional_sql : str = "", select_top : int = None) -> Result.FetchallModel[T]:
|
264
327
|
pass
|
265
328
|
|
266
|
-
def fetchall(self, *args, **kwargs) ->
|
329
|
+
def fetchall(self, *args, **kwargs) -> Result.FetchallModel[T] | Result.Fetchall:
|
267
330
|
args = Utils.args_stored(*args, **kwargs)
|
268
331
|
if args.model:
|
269
332
|
args.query, args.params = StoredBuilder.select(args.model, args.additional_sql, args.select_top)
|
270
333
|
|
271
334
|
result = self._exec_(self._connection, (args.query, *args.params), ExecType.fetchall)
|
272
335
|
if args.model:
|
273
|
-
|
336
|
+
models = [args.model.__class__(**r) for r in result.list_dict] if result.success else []
|
337
|
+
return Result.FetchallModel(models, result)
|
274
338
|
return result
|
275
339
|
|
276
340
|
@overload
|
@@ -301,10 +365,11 @@ class Stored(BaseExecutor):
|
|
301
365
|
name_column_id = next(iter(insert_data.keys()))
|
302
366
|
|
303
367
|
query, params = StoredBuilder.insert(model, name_column_id)
|
304
|
-
|
368
|
+
stored_procedure = (query, *params)
|
369
|
+
result = self._exec_(self._connection, stored_procedure, ExecType.fetchone)
|
305
370
|
if result.success:
|
306
|
-
return Result.Insert(result.dict.get('Id', 0), result.status_code, result.
|
307
|
-
return Result.Insert(0, result.status_code, result.
|
371
|
+
return Result.Insert(stored_procedure, result.dict.get('Id', 0), result.status_code, result.error)
|
372
|
+
return Result.Insert(stored_procedure, 0, result.status_code, result.error)
|
308
373
|
|
309
374
|
def _exec_(self, connection , stored_procedure : tuple, exec_type : ExecType):
|
310
375
|
|
@@ -313,29 +378,39 @@ class Stored(BaseExecutor):
|
|
313
378
|
raise UnavailableServiceException()
|
314
379
|
|
315
380
|
if exec_type == ExecType.fetchone:
|
316
|
-
return Result.Fetchone(None, None,
|
381
|
+
return Result.Fetchone(stored_procedure, None, None, self._connection_error)
|
382
|
+
elif exec_type == ExecType.fetchall:
|
383
|
+
return Result.Fetchall(stored_procedure, None, None, self._connection_error)
|
384
|
+
elif exec_type == ExecType.send:
|
385
|
+
return Result.Send(stored_procedure, False, self._connection_error)
|
386
|
+
|
387
|
+
try:
|
388
|
+
# executar
|
389
|
+
response = self._cursor.execute(*stored_procedure)
|
390
|
+
|
391
|
+
# ober resultado se nessesario
|
392
|
+
if exec_type == ExecType.fetchone:
|
393
|
+
result = Result.Fetchone(stored_procedure, self._cursor, response.fetchone())
|
317
394
|
elif exec_type == ExecType.fetchall:
|
318
|
-
|
395
|
+
result = Result.Fetchall(stored_procedure, self._cursor, response.fetchall())
|
319
396
|
elif exec_type == ExecType.send:
|
320
|
-
|
321
|
-
|
322
|
-
|
323
|
-
|
324
|
-
|
325
|
-
# ober resultado se nessesario
|
326
|
-
if exec_type == ExecType.fetchone:
|
327
|
-
result = Result.Fetchone(self._cursor, response.fetchone())
|
328
|
-
elif exec_type == ExecType.fetchall:
|
329
|
-
result = Result.Fetchall(self._cursor, response.fetchall())
|
330
|
-
elif exec_type == ExecType.send:
|
331
|
-
result = Result.Send(True)
|
332
|
-
|
333
|
-
# fazer o commit
|
334
|
-
connection.commit()
|
397
|
+
result = Result.Send(stored_procedure, True)
|
398
|
+
|
399
|
+
# fazer o commit
|
400
|
+
connection.commit()
|
335
401
|
|
402
|
+
except Exception as ex:
|
403
|
+
if exec_type == ExecType.fetchone:
|
404
|
+
return Result.Fetchone(stored_procedure, None, None, ex)
|
405
|
+
elif exec_type == ExecType.fetchall:
|
406
|
+
return Result.Fetchall(stored_procedure, None, None, ex)
|
407
|
+
elif exec_type == ExecType.send:
|
408
|
+
return Result.Send(stored_procedure, False, ex)
|
409
|
+
|
336
410
|
# retorna o resultado
|
337
411
|
return result
|
338
412
|
|
413
|
+
|
339
414
|
|
340
415
|
|
341
416
|
|
dapper_sqls/decorators.py
CHANGED
@@ -1,10 +1,10 @@
|
|
1
|
-
#
|
1
|
+
# coding: utf-8
|
2
2
|
from functools import wraps
|
3
3
|
import asyncio
|
4
4
|
from time import perf_counter
|
5
5
|
from typing import Callable
|
6
6
|
|
7
|
-
def func_validation(callable_msg_error: Callable = None, use_raise: bool = False, use_log: bool = True):
|
7
|
+
def func_validation(callable_msg_error: Callable = None, use_raise: bool = False, use_log: bool = True, default_value = None):
|
8
8
|
"""
|
9
9
|
Synchronous function decorator for validation, error handling, and logging execution time.
|
10
10
|
"""
|
@@ -25,6 +25,7 @@ def func_validation(callable_msg_error: Callable = None, use_raise: bool = False
|
|
25
25
|
raise
|
26
26
|
else:
|
27
27
|
print(f"Unhandled exception in '{func.__name__}': {error_message}")
|
28
|
+
return default_value
|
28
29
|
finally:
|
29
30
|
if use_log:
|
30
31
|
stop = perf_counter()
|
@@ -33,7 +34,7 @@ def func_validation(callable_msg_error: Callable = None, use_raise: bool = False
|
|
33
34
|
return wrapper
|
34
35
|
return decorator
|
35
36
|
|
36
|
-
def async_func_validation(callable_msg_error: Callable = None, use_raise: bool = False, use_log: bool = True):
|
37
|
+
def async_func_validation(callable_msg_error: Callable = None, use_raise: bool = False, use_log: bool = True, default_value = None):
|
37
38
|
"""
|
38
39
|
Asynchronous function decorator for validation, error handling, and logging execution time.
|
39
40
|
"""
|
@@ -54,6 +55,7 @@ def async_func_validation(callable_msg_error: Callable = None, use_raise: bool =
|
|
54
55
|
raise
|
55
56
|
else:
|
56
57
|
print(f"Unhandled exception in async function '{func.__name__}': {error_message}")
|
58
|
+
return default_value
|
57
59
|
finally:
|
58
60
|
if use_log:
|
59
61
|
stop = perf_counter()
|
@@ -0,0 +1,155 @@
|
|
1
|
+
# coding: utf-8
|
2
|
+
|
3
|
+
import asyncio
|
4
|
+
from time import perf_counter
|
5
|
+
import aiohttp
|
6
|
+
from .models import HttpMethod, UnavailableService, DataFetchHttpResult, DataFetchHttp
|
7
|
+
from typing import overload
|
8
|
+
import re
|
9
|
+
from urllib.parse import urlparse
|
10
|
+
import jwt
|
11
|
+
from datetime import datetime
|
12
|
+
|
13
|
+
class AioHttp(object):
|
14
|
+
|
15
|
+
@staticmethod
|
16
|
+
def is_valid_jwt(token: str) -> bool:
|
17
|
+
"""Verifica se uma string tem o formato de um JWT"""
|
18
|
+
parts = token.split(".")
|
19
|
+
if len(parts) != 3:
|
20
|
+
return False # Deve ter exatamente 3 partes
|
21
|
+
|
22
|
+
try:
|
23
|
+
# Apenas decodifica sem verificar a assinatura
|
24
|
+
jwt.decode(token, options={"verify_signature": False})
|
25
|
+
return True # Decodificação sem erro -> é um JWT válido
|
26
|
+
except jwt.DecodeError:
|
27
|
+
return False # Não é um JWT válido
|
28
|
+
except jwt.ExpiredSignatureError:
|
29
|
+
return True # É um JWT, mas está expirado
|
30
|
+
except jwt.InvalidTokenError:
|
31
|
+
return False # Token inválido
|
32
|
+
|
33
|
+
def get_token_expiration(token: str) -> datetime | None:
|
34
|
+
"""Obtém a data de expiração (exp) de um JWT sem precisar validar a assinatura."""
|
35
|
+
try:
|
36
|
+
decoded = jwt.decode(token, options={"verify_signature": False}) # Decodifica sem verificar
|
37
|
+
exp_timestamp = decoded.get("exp") # Obtém o timestamp de expiração
|
38
|
+
if exp_timestamp:
|
39
|
+
return datetime.utcfromtimestamp(exp_timestamp) # Converte para datetime
|
40
|
+
except jwt.DecodeError:
|
41
|
+
return None # Token inválido
|
42
|
+
|
43
|
+
return None # Token sem expiração definida
|
44
|
+
|
45
|
+
@staticmethod
|
46
|
+
def is_valid_url(url: str) -> bool:
|
47
|
+
parsed_url = urlparse(url)
|
48
|
+
|
49
|
+
# Verifica se o esquema (scheme) é http ou https e se há um domínio válido
|
50
|
+
if parsed_url.scheme not in {"http", "https"} or not parsed_url.netloc:
|
51
|
+
return False
|
52
|
+
|
53
|
+
# Regex para verificar um domínio válido
|
54
|
+
domain_pattern = re.compile(
|
55
|
+
r"^(?:[a-zA-Z0-9-]{1,63}\.)+[a-zA-Z]{2,63}$"
|
56
|
+
)
|
57
|
+
|
58
|
+
return bool(domain_pattern.match(parsed_url.netloc))
|
59
|
+
|
60
|
+
def __init__(self, base_url = "http://127.0.0.1:8000/", raise_error = False):
|
61
|
+
self.base_url = base_url
|
62
|
+
self.headers = {'Content-Type': 'application/json'}
|
63
|
+
self.endpoint_test_connection = 'test-connection'
|
64
|
+
self.raise_error = raise_error
|
65
|
+
|
66
|
+
async def test_connection(self):
|
67
|
+
data = DataFetchHttp("test-connection", self.endpoint_test_connection, HttpMethod.GET )
|
68
|
+
try:
|
69
|
+
res = await self.fetch(data)
|
70
|
+
if res.success:
|
71
|
+
return True
|
72
|
+
except:
|
73
|
+
...
|
74
|
+
|
75
|
+
@overload
|
76
|
+
async def fetch(self, data: DataFetchHttp, session: aiohttp.ClientSession) -> DataFetchHttpResult:
|
77
|
+
pass
|
78
|
+
|
79
|
+
@overload
|
80
|
+
async def fetch(self, data: DataFetchHttp) -> DataFetchHttpResult:
|
81
|
+
pass
|
82
|
+
|
83
|
+
@overload
|
84
|
+
async def fetch(self, endpoint : str, http_method : HttpMethod, data : dict = {}) -> DataFetchHttpResult:
|
85
|
+
pass
|
86
|
+
|
87
|
+
@overload
|
88
|
+
async def fetch(self, endpoint : str, http_method : HttpMethod) -> DataFetchHttpResult:
|
89
|
+
pass
|
90
|
+
|
91
|
+
async def fetch(self, *args) -> DataFetchHttpResult:
|
92
|
+
if type(args[0]) == str:
|
93
|
+
if len(args) == 3:
|
94
|
+
endpoint, http_method, data = args
|
95
|
+
else:
|
96
|
+
endpoint, http_method = args
|
97
|
+
data = {}
|
98
|
+
data_fetch_http = DataFetchHttp("", endpoint, http_method, data)
|
99
|
+
async with aiohttp.ClientSession() as session:
|
100
|
+
return await self._do_fetch(data_fetch_http, session)
|
101
|
+
else:
|
102
|
+
data_fetch_http = args[0]
|
103
|
+
if len(args) == 1:
|
104
|
+
async with aiohttp.ClientSession() as session:
|
105
|
+
return await self._do_fetch(data_fetch_http, session)
|
106
|
+
else:
|
107
|
+
session = args[1]
|
108
|
+
return await self._do_fetch(data_fetch_http, session)
|
109
|
+
|
110
|
+
async def _do_fetch(self, data_fetch_http: DataFetchHttp, session: aiohttp.ClientSession) -> DataFetchHttpResult:
|
111
|
+
url = f'{self.base_url}{data_fetch_http.endpoint}'
|
112
|
+
|
113
|
+
try:
|
114
|
+
method = session.get
|
115
|
+
if data_fetch_http.http_method == HttpMethod.POST:
|
116
|
+
method = session.post
|
117
|
+
elif data_fetch_http.http_method == HttpMethod.PUT:
|
118
|
+
method = session.put
|
119
|
+
elif data_fetch_http.http_method == HttpMethod.DELETE:
|
120
|
+
method = session.delete
|
121
|
+
|
122
|
+
start = perf_counter()
|
123
|
+
async with method(url, headers=self.headers, json=data_fetch_http.data) as r:
|
124
|
+
content = await r.json() if r.headers.get("Content-Type") == "application/json" else {"text": await r.text()}
|
125
|
+
|
126
|
+
stop = perf_counter()
|
127
|
+
delay = round(stop - start, 3)
|
128
|
+
if r.status == 200:
|
129
|
+
return DataFetchHttpResult(name=data_fetch_http.name, success=True, content=content, status_code=r.status, delay=delay)
|
130
|
+
elif r.status == 503:
|
131
|
+
if self.raise_error:
|
132
|
+
raise UnavailableService()
|
133
|
+
else:
|
134
|
+
return DataFetchHttpResult(name=data_fetch_http.name, success=False, status_code=503, content={'error': 'Database unavailable'}, delay=delay)
|
135
|
+
return DataFetchHttpResult(name=data_fetch_http.name, success=False, content=content, status_code=r.status, delay=delay)
|
136
|
+
except aiohttp.ClientError as e:
|
137
|
+
stop = perf_counter()
|
138
|
+
delay = round(stop - start, 3)
|
139
|
+
return DataFetchHttpResult(
|
140
|
+
name=data_fetch_http.name,
|
141
|
+
success=False,
|
142
|
+
content={"error": str(e)},
|
143
|
+
status_code=0,
|
144
|
+
delay=delay
|
145
|
+
)
|
146
|
+
|
147
|
+
async def fetch_all(self, list_data_fetch_http: list[DataFetchHttp]) -> dict[str, DataFetchHttpResult]:
|
148
|
+
async with aiohttp.ClientSession() as session:
|
149
|
+
tasks = [self.fetch(data, session) for data in list_data_fetch_http]
|
150
|
+
res = await asyncio.gather(*tasks)
|
151
|
+
return {t.name: t for t in res}
|
152
|
+
|
153
|
+
|
154
|
+
|
155
|
+
|
@@ -0,0 +1,123 @@
|
|
1
|
+
# coding: utf-8
|
2
|
+
from functools import wraps
|
3
|
+
import asyncio
|
4
|
+
from time import perf_counter
|
5
|
+
from .models import UnavailableService, InternalServerError
|
6
|
+
import http
|
7
|
+
from collections.abc import Mapping
|
8
|
+
from typing_extensions import Annotated, Doc
|
9
|
+
from typing import Any, Dict, Optional
|
10
|
+
|
11
|
+
class StarletteHTTPException(Exception):
|
12
|
+
def __init__(self, status_code: int, detail: str | None = None, headers: Mapping[str, str] | None = None) -> None:
|
13
|
+
if detail is None:
|
14
|
+
detail = http.HTTPStatus(status_code).phrase
|
15
|
+
self.status_code = status_code
|
16
|
+
self.detail = detail
|
17
|
+
self.headers = headers
|
18
|
+
|
19
|
+
def __str__(self) -> str:
|
20
|
+
return f"{self.status_code}: {self.detail}"
|
21
|
+
|
22
|
+
def __repr__(self) -> str:
|
23
|
+
class_name = self.__class__.__name__
|
24
|
+
return f"{class_name}(status_code={self.status_code!r}, detail={self.detail!r})"
|
25
|
+
|
26
|
+
class HTTPException(StarletteHTTPException):
|
27
|
+
"""
|
28
|
+
An HTTP exception you can raise in your own code to show errors to the client.
|
29
|
+
|
30
|
+
This is for client errors, invalid authentication, invalid data, etc. Not for server
|
31
|
+
errors in your code.
|
32
|
+
|
33
|
+
Read more about it in the
|
34
|
+
[FastAPI docs for Handling Errors](https://fastapi.tiangolo.com/tutorial/handling-errors/).
|
35
|
+
|
36
|
+
## Example
|
37
|
+
|
38
|
+
```python
|
39
|
+
from fastapi import FastAPI, HTTPException
|
40
|
+
|
41
|
+
app = FastAPI()
|
42
|
+
|
43
|
+
items = {"foo": "The Foo Wrestlers"}
|
44
|
+
|
45
|
+
|
46
|
+
@app.get("/items/{item_id}")
|
47
|
+
async def read_item(item_id: str):
|
48
|
+
if item_id not in items:
|
49
|
+
raise HTTPException(status_code=404, detail="Item not found")
|
50
|
+
return {"item": items[item_id]}
|
51
|
+
```
|
52
|
+
"""
|
53
|
+
|
54
|
+
def __init__(
|
55
|
+
self,
|
56
|
+
status_code: Annotated[
|
57
|
+
int,
|
58
|
+
Doc(
|
59
|
+
"""
|
60
|
+
HTTP status code to send to the client.
|
61
|
+
"""
|
62
|
+
),
|
63
|
+
],
|
64
|
+
detail: Annotated[
|
65
|
+
Any,
|
66
|
+
Doc(
|
67
|
+
"""
|
68
|
+
Any data to be sent to the client in the `detail` key of the JSON
|
69
|
+
response.
|
70
|
+
"""
|
71
|
+
),
|
72
|
+
] = None,
|
73
|
+
headers: Annotated[
|
74
|
+
Optional[Dict[str, str]],
|
75
|
+
Doc(
|
76
|
+
"""
|
77
|
+
Any headers to send to the client in the response.
|
78
|
+
"""
|
79
|
+
),
|
80
|
+
] = None,
|
81
|
+
) -> None:
|
82
|
+
super().__init__(status_code=status_code, detail=detail, headers=headers)
|
83
|
+
|
84
|
+
def _create_error(e : Exception):
|
85
|
+
error_message = str(e)
|
86
|
+
error_type = None
|
87
|
+
error_code = None
|
88
|
+
error_status_code = None
|
89
|
+
|
90
|
+
if hasattr(e , 'message'):
|
91
|
+
error_message = e.message
|
92
|
+
if hasattr(e, 'type'):
|
93
|
+
error_type = e.type
|
94
|
+
if hasattr(e, 'code'):
|
95
|
+
error_code = e.code
|
96
|
+
if hasattr(e, 'status_code'):
|
97
|
+
error_status_code = e.status_code
|
98
|
+
|
99
|
+
return InternalServerError(message=error_message, status_code=error_status_code, type=error_type, code=error_code)
|
100
|
+
|
101
|
+
def func_router_validation(use_log = True):
|
102
|
+
def decorator(func):
|
103
|
+
@wraps(func)
|
104
|
+
async def wrapper(*args, **kwargs):
|
105
|
+
if use_log:
|
106
|
+
start = perf_counter()
|
107
|
+
try:
|
108
|
+
return await asyncio.create_task(func(*args, **kwargs))
|
109
|
+
except Exception as e:
|
110
|
+
error = _create_error(e)
|
111
|
+
if error.status_code == 503:
|
112
|
+
raise HTTPException(status_code=503, detail=UnavailableService().model_dump())
|
113
|
+
|
114
|
+
raise HTTPException(status_code=500, detail=error.model_dump())
|
115
|
+
finally:
|
116
|
+
if use_log:
|
117
|
+
stop = perf_counter()
|
118
|
+
execution_time = round(stop - start, 3)
|
119
|
+
print(f"The function '{func.__name__}' executed in {execution_time} seconds.")
|
120
|
+
|
121
|
+
return wrapper
|
122
|
+
|
123
|
+
return decorator
|