dapper-sqls 1.1.3__py3-none-any.whl → 1.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dapper_sqls/__init__.py +3 -1
- dapper_sqls/_types.py +25 -2
- dapper_sqls/async_dapper/async_executors.py +127 -52
- dapper_sqls/builders/model/model.py +418 -33
- dapper_sqls/builders/model/utils.py +334 -42
- dapper_sqls/builders/query.py +164 -43
- dapper_sqls/builders/stored.py +15 -5
- dapper_sqls/builders/stp.py +6 -2
- dapper_sqls/config.py +40 -31
- dapper_sqls/dapper/executors.py +130 -55
- dapper_sqls/http/__init__.py +4 -0
- dapper_sqls/http/aiohttp.py +155 -0
- dapper_sqls/http/decorators.py +123 -0
- dapper_sqls/http/models.py +58 -0
- dapper_sqls/http/request.py +140 -0
- dapper_sqls/models/__init__.py +3 -5
- dapper_sqls/models/base.py +246 -20
- dapper_sqls/models/connection.py +1 -1
- dapper_sqls/models/query_field.py +214 -0
- dapper_sqls/models/result.py +314 -44
- dapper_sqls/sqlite/__init__.py +1 -0
- dapper_sqls/sqlite/async_local_database.py +69 -5
- dapper_sqls/sqlite/decorators.py +69 -0
- dapper_sqls/sqlite/installer.py +8 -4
- dapper_sqls/sqlite/local_database.py +39 -5
- dapper_sqls/sqlite/models.py +25 -1
- dapper_sqls/sqlite/utils.py +2 -1
- dapper_sqls/utils.py +16 -4
- dapper_sqls-1.2.0.dist-info/METADATA +41 -0
- dapper_sqls-1.2.0.dist-info/RECORD +40 -0
- {dapper_sqls-1.1.3.dist-info → dapper_sqls-1.2.0.dist-info}/WHEEL +1 -1
- dapper_sqls-1.1.3.dist-info/METADATA +0 -10
- dapper_sqls-1.1.3.dist-info/RECORD +0 -33
- {dapper_sqls-1.1.3.dist-info → dapper_sqls-1.2.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,155 @@
|
|
1
|
+
# coding: utf-8
|
2
|
+
|
3
|
+
import asyncio
|
4
|
+
from time import perf_counter
|
5
|
+
import aiohttp
|
6
|
+
from .models import HttpMethod, UnavailableService, DataFetchHttpResult, DataFetchHttp
|
7
|
+
from typing import overload
|
8
|
+
import re
|
9
|
+
from urllib.parse import urlparse
|
10
|
+
import jwt
|
11
|
+
from datetime import datetime
|
12
|
+
|
13
|
+
class AioHttp(object):
|
14
|
+
|
15
|
+
@staticmethod
|
16
|
+
def is_valid_jwt(token: str) -> bool:
|
17
|
+
"""Verifica se uma string tem o formato de um JWT"""
|
18
|
+
parts = token.split(".")
|
19
|
+
if len(parts) != 3:
|
20
|
+
return False # Deve ter exatamente 3 partes
|
21
|
+
|
22
|
+
try:
|
23
|
+
# Apenas decodifica sem verificar a assinatura
|
24
|
+
jwt.decode(token, options={"verify_signature": False})
|
25
|
+
return True # Decodificação sem erro -> é um JWT válido
|
26
|
+
except jwt.DecodeError:
|
27
|
+
return False # Não é um JWT válido
|
28
|
+
except jwt.ExpiredSignatureError:
|
29
|
+
return True # É um JWT, mas está expirado
|
30
|
+
except jwt.InvalidTokenError:
|
31
|
+
return False # Token inválido
|
32
|
+
|
33
|
+
def get_token_expiration(token: str) -> datetime | None:
|
34
|
+
"""Obtém a data de expiração (exp) de um JWT sem precisar validar a assinatura."""
|
35
|
+
try:
|
36
|
+
decoded = jwt.decode(token, options={"verify_signature": False}) # Decodifica sem verificar
|
37
|
+
exp_timestamp = decoded.get("exp") # Obtém o timestamp de expiração
|
38
|
+
if exp_timestamp:
|
39
|
+
return datetime.utcfromtimestamp(exp_timestamp) # Converte para datetime
|
40
|
+
except jwt.DecodeError:
|
41
|
+
return None # Token inválido
|
42
|
+
|
43
|
+
return None # Token sem expiração definida
|
44
|
+
|
45
|
+
@staticmethod
|
46
|
+
def is_valid_url(url: str) -> bool:
|
47
|
+
parsed_url = urlparse(url)
|
48
|
+
|
49
|
+
# Verifica se o esquema (scheme) é http ou https e se há um domínio válido
|
50
|
+
if parsed_url.scheme not in {"http", "https"} or not parsed_url.netloc:
|
51
|
+
return False
|
52
|
+
|
53
|
+
# Regex para verificar um domínio válido
|
54
|
+
domain_pattern = re.compile(
|
55
|
+
r"^(?:[a-zA-Z0-9-]{1,63}\.)+[a-zA-Z]{2,63}$"
|
56
|
+
)
|
57
|
+
|
58
|
+
return bool(domain_pattern.match(parsed_url.netloc))
|
59
|
+
|
60
|
+
def __init__(self, base_url = "http://127.0.0.1:8000/", raise_error = False):
|
61
|
+
self.base_url = base_url
|
62
|
+
self.headers = {'Content-Type': 'application/json'}
|
63
|
+
self.endpoint_test_connection = 'test-connection'
|
64
|
+
self.raise_error = raise_error
|
65
|
+
|
66
|
+
async def test_connection(self):
|
67
|
+
data = DataFetchHttp("test-connection", self.endpoint_test_connection, HttpMethod.GET )
|
68
|
+
try:
|
69
|
+
res = await self.fetch(data)
|
70
|
+
if res.success:
|
71
|
+
return True
|
72
|
+
except:
|
73
|
+
...
|
74
|
+
|
75
|
+
@overload
|
76
|
+
async def fetch(self, data: DataFetchHttp, session: aiohttp.ClientSession) -> DataFetchHttpResult:
|
77
|
+
pass
|
78
|
+
|
79
|
+
@overload
|
80
|
+
async def fetch(self, data: DataFetchHttp) -> DataFetchHttpResult:
|
81
|
+
pass
|
82
|
+
|
83
|
+
@overload
|
84
|
+
async def fetch(self, endpoint : str, http_method : HttpMethod, data : dict = {}) -> DataFetchHttpResult:
|
85
|
+
pass
|
86
|
+
|
87
|
+
@overload
|
88
|
+
async def fetch(self, endpoint : str, http_method : HttpMethod) -> DataFetchHttpResult:
|
89
|
+
pass
|
90
|
+
|
91
|
+
async def fetch(self, *args) -> DataFetchHttpResult:
|
92
|
+
if type(args[0]) == str:
|
93
|
+
if len(args) == 3:
|
94
|
+
endpoint, http_method, data = args
|
95
|
+
else:
|
96
|
+
endpoint, http_method = args
|
97
|
+
data = {}
|
98
|
+
data_fetch_http = DataFetchHttp("", endpoint, http_method, data)
|
99
|
+
async with aiohttp.ClientSession() as session:
|
100
|
+
return await self._do_fetch(data_fetch_http, session)
|
101
|
+
else:
|
102
|
+
data_fetch_http = args[0]
|
103
|
+
if len(args) == 1:
|
104
|
+
async with aiohttp.ClientSession() as session:
|
105
|
+
return await self._do_fetch(data_fetch_http, session)
|
106
|
+
else:
|
107
|
+
session = args[1]
|
108
|
+
return await self._do_fetch(data_fetch_http, session)
|
109
|
+
|
110
|
+
async def _do_fetch(self, data_fetch_http: DataFetchHttp, session: aiohttp.ClientSession) -> DataFetchHttpResult:
|
111
|
+
url = f'{self.base_url}{data_fetch_http.endpoint}'
|
112
|
+
|
113
|
+
try:
|
114
|
+
method = session.get
|
115
|
+
if data_fetch_http.http_method == HttpMethod.POST:
|
116
|
+
method = session.post
|
117
|
+
elif data_fetch_http.http_method == HttpMethod.PUT:
|
118
|
+
method = session.put
|
119
|
+
elif data_fetch_http.http_method == HttpMethod.DELETE:
|
120
|
+
method = session.delete
|
121
|
+
|
122
|
+
start = perf_counter()
|
123
|
+
async with method(url, headers=self.headers, json=data_fetch_http.data) as r:
|
124
|
+
content = await r.json() if r.headers.get("Content-Type") == "application/json" else {"text": await r.text()}
|
125
|
+
|
126
|
+
stop = perf_counter()
|
127
|
+
delay = round(stop - start, 3)
|
128
|
+
if r.status == 200:
|
129
|
+
return DataFetchHttpResult(name=data_fetch_http.name, success=True, content=content, status_code=r.status, delay=delay)
|
130
|
+
elif r.status == 503:
|
131
|
+
if self.raise_error:
|
132
|
+
raise UnavailableService()
|
133
|
+
else:
|
134
|
+
return DataFetchHttpResult(name=data_fetch_http.name, success=False, status_code=503, content={'error': 'Database unavailable'}, delay=delay)
|
135
|
+
return DataFetchHttpResult(name=data_fetch_http.name, success=False, content=content, status_code=r.status, delay=delay)
|
136
|
+
except aiohttp.ClientError as e:
|
137
|
+
stop = perf_counter()
|
138
|
+
delay = round(stop - start, 3)
|
139
|
+
return DataFetchHttpResult(
|
140
|
+
name=data_fetch_http.name,
|
141
|
+
success=False,
|
142
|
+
content={"error": str(e)},
|
143
|
+
status_code=0,
|
144
|
+
delay=delay
|
145
|
+
)
|
146
|
+
|
147
|
+
async def fetch_all(self, list_data_fetch_http: list[DataFetchHttp]) -> dict[str, DataFetchHttpResult]:
|
148
|
+
async with aiohttp.ClientSession() as session:
|
149
|
+
tasks = [self.fetch(data, session) for data in list_data_fetch_http]
|
150
|
+
res = await asyncio.gather(*tasks)
|
151
|
+
return {t.name: t for t in res}
|
152
|
+
|
153
|
+
|
154
|
+
|
155
|
+
|
@@ -0,0 +1,123 @@
|
|
1
|
+
# coding: utf-8
|
2
|
+
from functools import wraps
|
3
|
+
import asyncio
|
4
|
+
from time import perf_counter
|
5
|
+
from .models import UnavailableService, InternalServerError
|
6
|
+
import http
|
7
|
+
from collections.abc import Mapping
|
8
|
+
from typing_extensions import Annotated, Doc
|
9
|
+
from typing import Any, Dict, Optional
|
10
|
+
|
11
|
+
class StarletteHTTPException(Exception):
|
12
|
+
def __init__(self, status_code: int, detail: str | None = None, headers: Mapping[str, str] | None = None) -> None:
|
13
|
+
if detail is None:
|
14
|
+
detail = http.HTTPStatus(status_code).phrase
|
15
|
+
self.status_code = status_code
|
16
|
+
self.detail = detail
|
17
|
+
self.headers = headers
|
18
|
+
|
19
|
+
def __str__(self) -> str:
|
20
|
+
return f"{self.status_code}: {self.detail}"
|
21
|
+
|
22
|
+
def __repr__(self) -> str:
|
23
|
+
class_name = self.__class__.__name__
|
24
|
+
return f"{class_name}(status_code={self.status_code!r}, detail={self.detail!r})"
|
25
|
+
|
26
|
+
class HTTPException(StarletteHTTPException):
|
27
|
+
"""
|
28
|
+
An HTTP exception you can raise in your own code to show errors to the client.
|
29
|
+
|
30
|
+
This is for client errors, invalid authentication, invalid data, etc. Not for server
|
31
|
+
errors in your code.
|
32
|
+
|
33
|
+
Read more about it in the
|
34
|
+
[FastAPI docs for Handling Errors](https://fastapi.tiangolo.com/tutorial/handling-errors/).
|
35
|
+
|
36
|
+
## Example
|
37
|
+
|
38
|
+
```python
|
39
|
+
from fastapi import FastAPI, HTTPException
|
40
|
+
|
41
|
+
app = FastAPI()
|
42
|
+
|
43
|
+
items = {"foo": "The Foo Wrestlers"}
|
44
|
+
|
45
|
+
|
46
|
+
@app.get("/items/{item_id}")
|
47
|
+
async def read_item(item_id: str):
|
48
|
+
if item_id not in items:
|
49
|
+
raise HTTPException(status_code=404, detail="Item not found")
|
50
|
+
return {"item": items[item_id]}
|
51
|
+
```
|
52
|
+
"""
|
53
|
+
|
54
|
+
def __init__(
|
55
|
+
self,
|
56
|
+
status_code: Annotated[
|
57
|
+
int,
|
58
|
+
Doc(
|
59
|
+
"""
|
60
|
+
HTTP status code to send to the client.
|
61
|
+
"""
|
62
|
+
),
|
63
|
+
],
|
64
|
+
detail: Annotated[
|
65
|
+
Any,
|
66
|
+
Doc(
|
67
|
+
"""
|
68
|
+
Any data to be sent to the client in the `detail` key of the JSON
|
69
|
+
response.
|
70
|
+
"""
|
71
|
+
),
|
72
|
+
] = None,
|
73
|
+
headers: Annotated[
|
74
|
+
Optional[Dict[str, str]],
|
75
|
+
Doc(
|
76
|
+
"""
|
77
|
+
Any headers to send to the client in the response.
|
78
|
+
"""
|
79
|
+
),
|
80
|
+
] = None,
|
81
|
+
) -> None:
|
82
|
+
super().__init__(status_code=status_code, detail=detail, headers=headers)
|
83
|
+
|
84
|
+
def _create_error(e : Exception):
|
85
|
+
error_message = str(e)
|
86
|
+
error_type = None
|
87
|
+
error_code = None
|
88
|
+
error_status_code = None
|
89
|
+
|
90
|
+
if hasattr(e , 'message'):
|
91
|
+
error_message = e.message
|
92
|
+
if hasattr(e, 'type'):
|
93
|
+
error_type = e.type
|
94
|
+
if hasattr(e, 'code'):
|
95
|
+
error_code = e.code
|
96
|
+
if hasattr(e, 'status_code'):
|
97
|
+
error_status_code = e.status_code
|
98
|
+
|
99
|
+
return InternalServerError(message=error_message, status_code=error_status_code, type=error_type, code=error_code)
|
100
|
+
|
101
|
+
def func_router_validation(use_log = True):
|
102
|
+
def decorator(func):
|
103
|
+
@wraps(func)
|
104
|
+
async def wrapper(*args, **kwargs):
|
105
|
+
if use_log:
|
106
|
+
start = perf_counter()
|
107
|
+
try:
|
108
|
+
return await asyncio.create_task(func(*args, **kwargs))
|
109
|
+
except Exception as e:
|
110
|
+
error = _create_error(e)
|
111
|
+
if error.status_code == 503:
|
112
|
+
raise HTTPException(status_code=503, detail=UnavailableService().model_dump())
|
113
|
+
|
114
|
+
raise HTTPException(status_code=500, detail=error.model_dump())
|
115
|
+
finally:
|
116
|
+
if use_log:
|
117
|
+
stop = perf_counter()
|
118
|
+
execution_time = round(stop - start, 3)
|
119
|
+
print(f"The function '{func.__name__}' executed in {execution_time} seconds.")
|
120
|
+
|
121
|
+
return wrapper
|
122
|
+
|
123
|
+
return decorator
|
@@ -0,0 +1,58 @@
|
|
1
|
+
from enum import Enum
|
2
|
+
from pydantic import BaseModel, Field
|
3
|
+
from typing import Optional, Literal
|
4
|
+
|
5
|
+
class DataFetchHttpResult(BaseModel):
|
6
|
+
name : str = Field(..., description="")
|
7
|
+
success : bool = Field(..., description="")
|
8
|
+
content : dict | list[dict] = Field(..., description="")
|
9
|
+
status_code : int = Field(..., description="")
|
10
|
+
delay : float = Field(..., description="")
|
11
|
+
|
12
|
+
class HttpMethod(Enum):
|
13
|
+
GET = "Get"
|
14
|
+
POST = "Post"
|
15
|
+
PUT = "Put"
|
16
|
+
DELETE = "Delete"
|
17
|
+
|
18
|
+
class DataFetchHttp:
|
19
|
+
def __init__(self, name : str, endpoint : str, http_method : HttpMethod, data : dict = {}):
|
20
|
+
self.name = name
|
21
|
+
self.endpoint = endpoint
|
22
|
+
self.http_method = http_method
|
23
|
+
self.data = data
|
24
|
+
|
25
|
+
class BaseError(BaseModel):
|
26
|
+
"""
|
27
|
+
Base class to represent errors in the system.
|
28
|
+
"""
|
29
|
+
message: str = Field(..., description="A descriptive error message.")
|
30
|
+
status_code: Optional[int] = Field(
|
31
|
+
None, description="The status code associated with the error (if applicable)."
|
32
|
+
)
|
33
|
+
code: Optional[str] = Field(
|
34
|
+
None, description="The error code (if available)."
|
35
|
+
)
|
36
|
+
type: Optional[str] = Field(
|
37
|
+
None, description="The type of the error (if available)."
|
38
|
+
)
|
39
|
+
|
40
|
+
|
41
|
+
class InternalServerError(BaseError):
|
42
|
+
"""
|
43
|
+
Represents an internal server error.
|
44
|
+
"""
|
45
|
+
message: str = Field(
|
46
|
+
"Internal server error", description="The error message."
|
47
|
+
)
|
48
|
+
|
49
|
+
|
50
|
+
class UnavailableService(BaseError):
|
51
|
+
"""
|
52
|
+
Represents an unavailable service error.
|
53
|
+
"""
|
54
|
+
message: Literal["Database unavailable"] = Field(
|
55
|
+
"Database unavailable", description="The unavailable service message."
|
56
|
+
)
|
57
|
+
|
58
|
+
|
@@ -0,0 +1,140 @@
|
|
1
|
+
# coding: utf-8
|
2
|
+
from .models import HttpMethod, UnavailableService, DataFetchHttpResult, DataFetchHttp
|
3
|
+
from time import perf_counter
|
4
|
+
import requests
|
5
|
+
from concurrent.futures import ThreadPoolExecutor
|
6
|
+
from typing import overload
|
7
|
+
from urllib.parse import urlparse
|
8
|
+
import jwt
|
9
|
+
from datetime import datetime
|
10
|
+
import re
|
11
|
+
|
12
|
+
class Request(object):
|
13
|
+
|
14
|
+
@staticmethod
|
15
|
+
def is_valid_jwt(token: str) -> bool:
|
16
|
+
"""Verifica se uma string tem o formato de um JWT"""
|
17
|
+
parts = token.split(".")
|
18
|
+
if len(parts) != 3:
|
19
|
+
return False # Deve ter exatamente 3 partes
|
20
|
+
|
21
|
+
try:
|
22
|
+
# Apenas decodifica sem verificar a assinatura
|
23
|
+
jwt.decode(token, options={"verify_signature": False})
|
24
|
+
return True # Decodificação sem erro -> é um JWT válido
|
25
|
+
except jwt.DecodeError:
|
26
|
+
return False # Não é um JWT válido
|
27
|
+
except jwt.ExpiredSignatureError:
|
28
|
+
return True # É um JWT, mas está expirado
|
29
|
+
except jwt.InvalidTokenError:
|
30
|
+
return False # Token inválido
|
31
|
+
|
32
|
+
def get_token_expiration(token: str) -> datetime | None:
|
33
|
+
"""Obtém a data de expiração (exp) de um JWT sem precisar validar a assinatura."""
|
34
|
+
try:
|
35
|
+
decoded = jwt.decode(token, options={"verify_signature": False}) # Decodifica sem verificar
|
36
|
+
exp_timestamp = decoded.get("exp") # Obtém o timestamp de expiração
|
37
|
+
if exp_timestamp:
|
38
|
+
return datetime.utcfromtimestamp(exp_timestamp) # Converte para datetime
|
39
|
+
except jwt.DecodeError:
|
40
|
+
return None # Token inválido
|
41
|
+
|
42
|
+
return None # Token sem expiração definida
|
43
|
+
|
44
|
+
@staticmethod
|
45
|
+
def is_valid_url(url: str) -> bool:
|
46
|
+
parsed_url = urlparse(url)
|
47
|
+
|
48
|
+
# Verifica se o esquema (scheme) é http ou https e se há um domínio válido
|
49
|
+
if parsed_url.scheme not in {"http", "https"} or not parsed_url.netloc:
|
50
|
+
return False
|
51
|
+
|
52
|
+
# Regex para verificar um domínio válido
|
53
|
+
domain_pattern = re.compile(
|
54
|
+
r"^(?:[a-zA-Z0-9-]{1,63}\.)+[a-zA-Z]{2,63}$"
|
55
|
+
)
|
56
|
+
|
57
|
+
return bool(domain_pattern.match(parsed_url.netloc))
|
58
|
+
|
59
|
+
def __init__(self, base_url = "http://127.0.0.1:8000/", raise_error = False):
|
60
|
+
self.base_url = base_url
|
61
|
+
self.headers = {'Content-Type': 'application/json'}
|
62
|
+
self.endpoint_test_connection = 'test-connection'
|
63
|
+
self.raise_error = raise_error
|
64
|
+
|
65
|
+
def test_connection(self):
|
66
|
+
try:
|
67
|
+
response = requests.get(f"{self.base_url}{self.endpoint_test_connection}")
|
68
|
+
if response.status_code == 200:
|
69
|
+
return True
|
70
|
+
except:
|
71
|
+
...
|
72
|
+
|
73
|
+
@overload
|
74
|
+
def fetch(self, endpoint : str, http_method : HttpMethod, data : dict = {}) -> DataFetchHttpResult:
|
75
|
+
pass
|
76
|
+
|
77
|
+
@overload
|
78
|
+
def fetch(self, endpoint : str, http_method : HttpMethod) -> DataFetchHttpResult:
|
79
|
+
pass
|
80
|
+
|
81
|
+
@overload
|
82
|
+
def fetch(self, data_fetch_http : DataFetchHttp) -> DataFetchHttpResult:
|
83
|
+
pass
|
84
|
+
|
85
|
+
def fetch(self, *args) -> DataFetchHttpResult:
|
86
|
+
if len(args) > 1:
|
87
|
+
if len(args) == 3:
|
88
|
+
endpoint, http_method, data = args
|
89
|
+
else:
|
90
|
+
endpoint, http_method = args
|
91
|
+
data = {}
|
92
|
+
data_fetch_http = DataFetchHttp("", endpoint, http_method, data)
|
93
|
+
else:
|
94
|
+
data_fetch_http = args[0]
|
95
|
+
|
96
|
+
url = f'{self.base_url}{data_fetch_http.endpoint}'
|
97
|
+
|
98
|
+
try:
|
99
|
+
method = requests.get
|
100
|
+
if data_fetch_http.http_method == HttpMethod.POST:
|
101
|
+
method = requests.post
|
102
|
+
elif data_fetch_http.http_method == HttpMethod.PUT:
|
103
|
+
method = requests.put
|
104
|
+
elif data_fetch_http.http_method == HttpMethod.DELETE:
|
105
|
+
method = requests.delete
|
106
|
+
|
107
|
+
start = perf_counter()
|
108
|
+
res = method(url, json=data_fetch_http.data, headers=self.headers)
|
109
|
+
|
110
|
+
stop = perf_counter()
|
111
|
+
delay = round(stop - start, 3)
|
112
|
+
|
113
|
+
content = res.json() if res.headers.get("Content-Type") == "application/json" else {"text": res.text}
|
114
|
+
if res.status_code == 200:
|
115
|
+
return DataFetchHttpResult(name=data_fetch_http.name, success=True, content=content, status_code=res.status_code, delay=delay)
|
116
|
+
elif res.status_code == 503:
|
117
|
+
if self.raise_error:
|
118
|
+
raise UnavailableService()
|
119
|
+
else:
|
120
|
+
return DataFetchHttpResult(name=data_fetch_http.name, success=False, status_code=503, content={'error': 'Database unavailable'}, delay=delay)
|
121
|
+
return DataFetchHttpResult(name=data_fetch_http.name, success=False, content=content, status_code=res.status_code, delay=delay)
|
122
|
+
except requests.exceptions.RequestException as e:
|
123
|
+
stop = perf_counter()
|
124
|
+
delay = round(stop - start, 3)
|
125
|
+
return DataFetchHttpResult(
|
126
|
+
name=data_fetch_http.name,
|
127
|
+
success=False,
|
128
|
+
content={"error": str(e)},
|
129
|
+
status_code=0,
|
130
|
+
delay=delay
|
131
|
+
)
|
132
|
+
|
133
|
+
def fetch_all(self, list_data_fetch_http: list[DataFetchHttp]) -> dict[str, DataFetchHttpResult]:
|
134
|
+
|
135
|
+
with ThreadPoolExecutor() as executor:
|
136
|
+
results = executor.map(self.fetch, list_data_fetch_http)
|
137
|
+
|
138
|
+
all_results = list(results)
|
139
|
+
results_dict = {result.name: result for result in all_results}
|
140
|
+
return results_dict
|
dapper_sqls/models/__init__.py
CHANGED
@@ -1,8 +1,6 @@
|
|
1
1
|
from .http import UnavailableServiceException
|
2
2
|
from .result import Result
|
3
3
|
from .connection import ConnectionStringData
|
4
|
-
from .base import TableBaseModel, BaseUpdate
|
5
|
-
|
6
|
-
|
7
|
-
|
8
|
-
|
4
|
+
from .base import TableBaseModel, BaseUpdate, SensitiveFields, SearchTable, JoinSearchTable
|
5
|
+
from .query_field import (StringQueryField, NumericQueryField, BoolQueryField, DateQueryField, BytesQueryField, QueryFieldBase,
|
6
|
+
BaseJoinConditionField, JoinNumericCondition, JoinStringCondition, JoinBooleanCondition, JoinDateCondition, JoinBytesCondition)
|