dapper-sqls 0.9.7__py3-none-any.whl → 1.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dapper_sqls/__init__.py +4 -2
- dapper_sqls/_types.py +25 -2
- dapper_sqls/async_dapper/async_dapper.py +1 -1
- dapper_sqls/async_dapper/async_executors.py +128 -53
- dapper_sqls/builders/model/model.py +421 -36
- dapper_sqls/builders/model/utils.py +337 -45
- dapper_sqls/builders/query.py +165 -44
- dapper_sqls/builders/stored.py +16 -10
- dapper_sqls/builders/stp.py +6 -2
- dapper_sqls/config.py +41 -32
- dapper_sqls/dapper/dapper.py +1 -1
- dapper_sqls/dapper/executors.py +131 -56
- dapper_sqls/decorators.py +5 -3
- dapper_sqls/http/__init__.py +4 -0
- dapper_sqls/http/aiohttp.py +155 -0
- dapper_sqls/http/decorators.py +123 -0
- dapper_sqls/http/models.py +58 -0
- dapper_sqls/http/request.py +140 -0
- dapper_sqls/models/__init__.py +3 -5
- dapper_sqls/models/base.py +246 -20
- dapper_sqls/models/connection.py +2 -2
- dapper_sqls/models/query_field.py +214 -0
- dapper_sqls/models/result.py +315 -45
- dapper_sqls/sqlite/__init__.py +5 -1
- dapper_sqls/sqlite/async_local_database.py +168 -0
- dapper_sqls/sqlite/decorators.py +69 -0
- dapper_sqls/sqlite/installer.py +97 -0
- dapper_sqls/sqlite/local_database.py +67 -185
- dapper_sqls/sqlite/models.py +51 -1
- dapper_sqls/sqlite/utils.py +9 -0
- dapper_sqls/utils.py +18 -6
- dapper_sqls-1.2.0.dist-info/METADATA +41 -0
- dapper_sqls-1.2.0.dist-info/RECORD +40 -0
- {dapper_sqls-0.9.7.dist-info → dapper_sqls-1.2.0.dist-info}/WHEEL +1 -1
- dapper_sqls-0.9.7.dist-info/METADATA +0 -19
- dapper_sqls-0.9.7.dist-info/RECORD +0 -30
- {dapper_sqls-0.9.7.dist-info → dapper_sqls-1.2.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,58 @@
|
|
1
|
+
from enum import Enum
|
2
|
+
from pydantic import BaseModel, Field
|
3
|
+
from typing import Optional, Literal
|
4
|
+
|
5
|
+
class DataFetchHttpResult(BaseModel):
|
6
|
+
name : str = Field(..., description="")
|
7
|
+
success : bool = Field(..., description="")
|
8
|
+
content : dict | list[dict] = Field(..., description="")
|
9
|
+
status_code : int = Field(..., description="")
|
10
|
+
delay : float = Field(..., description="")
|
11
|
+
|
12
|
+
class HttpMethod(Enum):
|
13
|
+
GET = "Get"
|
14
|
+
POST = "Post"
|
15
|
+
PUT = "Put"
|
16
|
+
DELETE = "Delete"
|
17
|
+
|
18
|
+
class DataFetchHttp:
|
19
|
+
def __init__(self, name : str, endpoint : str, http_method : HttpMethod, data : dict = {}):
|
20
|
+
self.name = name
|
21
|
+
self.endpoint = endpoint
|
22
|
+
self.http_method = http_method
|
23
|
+
self.data = data
|
24
|
+
|
25
|
+
class BaseError(BaseModel):
|
26
|
+
"""
|
27
|
+
Base class to represent errors in the system.
|
28
|
+
"""
|
29
|
+
message: str = Field(..., description="A descriptive error message.")
|
30
|
+
status_code: Optional[int] = Field(
|
31
|
+
None, description="The status code associated with the error (if applicable)."
|
32
|
+
)
|
33
|
+
code: Optional[str] = Field(
|
34
|
+
None, description="The error code (if available)."
|
35
|
+
)
|
36
|
+
type: Optional[str] = Field(
|
37
|
+
None, description="The type of the error (if available)."
|
38
|
+
)
|
39
|
+
|
40
|
+
|
41
|
+
class InternalServerError(BaseError):
|
42
|
+
"""
|
43
|
+
Represents an internal server error.
|
44
|
+
"""
|
45
|
+
message: str = Field(
|
46
|
+
"Internal server error", description="The error message."
|
47
|
+
)
|
48
|
+
|
49
|
+
|
50
|
+
class UnavailableService(BaseError):
|
51
|
+
"""
|
52
|
+
Represents an unavailable service error.
|
53
|
+
"""
|
54
|
+
message: Literal["Database unavailable"] = Field(
|
55
|
+
"Database unavailable", description="The unavailable service message."
|
56
|
+
)
|
57
|
+
|
58
|
+
|
@@ -0,0 +1,140 @@
|
|
1
|
+
# coding: utf-8
|
2
|
+
from .models import HttpMethod, UnavailableService, DataFetchHttpResult, DataFetchHttp
|
3
|
+
from time import perf_counter
|
4
|
+
import requests
|
5
|
+
from concurrent.futures import ThreadPoolExecutor
|
6
|
+
from typing import overload
|
7
|
+
from urllib.parse import urlparse
|
8
|
+
import jwt
|
9
|
+
from datetime import datetime
|
10
|
+
import re
|
11
|
+
|
12
|
+
class Request(object):
|
13
|
+
|
14
|
+
@staticmethod
|
15
|
+
def is_valid_jwt(token: str) -> bool:
|
16
|
+
"""Verifica se uma string tem o formato de um JWT"""
|
17
|
+
parts = token.split(".")
|
18
|
+
if len(parts) != 3:
|
19
|
+
return False # Deve ter exatamente 3 partes
|
20
|
+
|
21
|
+
try:
|
22
|
+
# Apenas decodifica sem verificar a assinatura
|
23
|
+
jwt.decode(token, options={"verify_signature": False})
|
24
|
+
return True # Decodificação sem erro -> é um JWT válido
|
25
|
+
except jwt.DecodeError:
|
26
|
+
return False # Não é um JWT válido
|
27
|
+
except jwt.ExpiredSignatureError:
|
28
|
+
return True # É um JWT, mas está expirado
|
29
|
+
except jwt.InvalidTokenError:
|
30
|
+
return False # Token inválido
|
31
|
+
|
32
|
+
def get_token_expiration(token: str) -> datetime | None:
|
33
|
+
"""Obtém a data de expiração (exp) de um JWT sem precisar validar a assinatura."""
|
34
|
+
try:
|
35
|
+
decoded = jwt.decode(token, options={"verify_signature": False}) # Decodifica sem verificar
|
36
|
+
exp_timestamp = decoded.get("exp") # Obtém o timestamp de expiração
|
37
|
+
if exp_timestamp:
|
38
|
+
return datetime.utcfromtimestamp(exp_timestamp) # Converte para datetime
|
39
|
+
except jwt.DecodeError:
|
40
|
+
return None # Token inválido
|
41
|
+
|
42
|
+
return None # Token sem expiração definida
|
43
|
+
|
44
|
+
@staticmethod
|
45
|
+
def is_valid_url(url: str) -> bool:
|
46
|
+
parsed_url = urlparse(url)
|
47
|
+
|
48
|
+
# Verifica se o esquema (scheme) é http ou https e se há um domínio válido
|
49
|
+
if parsed_url.scheme not in {"http", "https"} or not parsed_url.netloc:
|
50
|
+
return False
|
51
|
+
|
52
|
+
# Regex para verificar um domínio válido
|
53
|
+
domain_pattern = re.compile(
|
54
|
+
r"^(?:[a-zA-Z0-9-]{1,63}\.)+[a-zA-Z]{2,63}$"
|
55
|
+
)
|
56
|
+
|
57
|
+
return bool(domain_pattern.match(parsed_url.netloc))
|
58
|
+
|
59
|
+
def __init__(self, base_url = "http://127.0.0.1:8000/", raise_error = False):
|
60
|
+
self.base_url = base_url
|
61
|
+
self.headers = {'Content-Type': 'application/json'}
|
62
|
+
self.endpoint_test_connection = 'test-connection'
|
63
|
+
self.raise_error = raise_error
|
64
|
+
|
65
|
+
def test_connection(self):
|
66
|
+
try:
|
67
|
+
response = requests.get(f"{self.base_url}{self.endpoint_test_connection}")
|
68
|
+
if response.status_code == 200:
|
69
|
+
return True
|
70
|
+
except:
|
71
|
+
...
|
72
|
+
|
73
|
+
@overload
|
74
|
+
def fetch(self, endpoint : str, http_method : HttpMethod, data : dict = {}) -> DataFetchHttpResult:
|
75
|
+
pass
|
76
|
+
|
77
|
+
@overload
|
78
|
+
def fetch(self, endpoint : str, http_method : HttpMethod) -> DataFetchHttpResult:
|
79
|
+
pass
|
80
|
+
|
81
|
+
@overload
|
82
|
+
def fetch(self, data_fetch_http : DataFetchHttp) -> DataFetchHttpResult:
|
83
|
+
pass
|
84
|
+
|
85
|
+
def fetch(self, *args) -> DataFetchHttpResult:
|
86
|
+
if len(args) > 1:
|
87
|
+
if len(args) == 3:
|
88
|
+
endpoint, http_method, data = args
|
89
|
+
else:
|
90
|
+
endpoint, http_method = args
|
91
|
+
data = {}
|
92
|
+
data_fetch_http = DataFetchHttp("", endpoint, http_method, data)
|
93
|
+
else:
|
94
|
+
data_fetch_http = args[0]
|
95
|
+
|
96
|
+
url = f'{self.base_url}{data_fetch_http.endpoint}'
|
97
|
+
|
98
|
+
try:
|
99
|
+
method = requests.get
|
100
|
+
if data_fetch_http.http_method == HttpMethod.POST:
|
101
|
+
method = requests.post
|
102
|
+
elif data_fetch_http.http_method == HttpMethod.PUT:
|
103
|
+
method = requests.put
|
104
|
+
elif data_fetch_http.http_method == HttpMethod.DELETE:
|
105
|
+
method = requests.delete
|
106
|
+
|
107
|
+
start = perf_counter()
|
108
|
+
res = method(url, json=data_fetch_http.data, headers=self.headers)
|
109
|
+
|
110
|
+
stop = perf_counter()
|
111
|
+
delay = round(stop - start, 3)
|
112
|
+
|
113
|
+
content = res.json() if res.headers.get("Content-Type") == "application/json" else {"text": res.text}
|
114
|
+
if res.status_code == 200:
|
115
|
+
return DataFetchHttpResult(name=data_fetch_http.name, success=True, content=content, status_code=res.status_code, delay=delay)
|
116
|
+
elif res.status_code == 503:
|
117
|
+
if self.raise_error:
|
118
|
+
raise UnavailableService()
|
119
|
+
else:
|
120
|
+
return DataFetchHttpResult(name=data_fetch_http.name, success=False, status_code=503, content={'error': 'Database unavailable'}, delay=delay)
|
121
|
+
return DataFetchHttpResult(name=data_fetch_http.name, success=False, content=content, status_code=res.status_code, delay=delay)
|
122
|
+
except requests.exceptions.RequestException as e:
|
123
|
+
stop = perf_counter()
|
124
|
+
delay = round(stop - start, 3)
|
125
|
+
return DataFetchHttpResult(
|
126
|
+
name=data_fetch_http.name,
|
127
|
+
success=False,
|
128
|
+
content={"error": str(e)},
|
129
|
+
status_code=0,
|
130
|
+
delay=delay
|
131
|
+
)
|
132
|
+
|
133
|
+
def fetch_all(self, list_data_fetch_http: list[DataFetchHttp]) -> dict[str, DataFetchHttpResult]:
|
134
|
+
|
135
|
+
with ThreadPoolExecutor() as executor:
|
136
|
+
results = executor.map(self.fetch, list_data_fetch_http)
|
137
|
+
|
138
|
+
all_results = list(results)
|
139
|
+
results_dict = {result.name: result for result in all_results}
|
140
|
+
return results_dict
|
dapper_sqls/models/__init__.py
CHANGED
@@ -1,8 +1,6 @@
|
|
1
1
|
from .http import UnavailableServiceException
|
2
2
|
from .result import Result
|
3
3
|
from .connection import ConnectionStringData
|
4
|
-
from .base import TableBaseModel, BaseUpdate
|
5
|
-
|
6
|
-
|
7
|
-
|
8
|
-
|
4
|
+
from .base import TableBaseModel, BaseUpdate, SensitiveFields, SearchTable, JoinSearchTable
|
5
|
+
from .query_field import (StringQueryField, NumericQueryField, BoolQueryField, DateQueryField, BytesQueryField, QueryFieldBase,
|
6
|
+
BaseJoinConditionField, JoinNumericCondition, JoinStringCondition, JoinBooleanCondition, JoinDateCondition, JoinBytesCondition)
|
dapper_sqls/models/base.py
CHANGED
@@ -1,35 +1,261 @@
|
|
1
|
-
from pydantic import BaseModel,
|
2
|
-
from abc import ABC
|
1
|
+
from pydantic import BaseModel, ConfigDict, PrivateAttr, Field, create_model
|
3
2
|
from abc import ABC, abstractmethod
|
4
|
-
from
|
3
|
+
from typing import Set, Any, ClassVar, get_origin, get_args, Union, Optional, Literal, get_type_hints, List
|
4
|
+
from ..utils import get_dict_args
|
5
|
+
from dataclasses import asdict
|
6
|
+
import copy
|
7
|
+
import datetime
|
8
|
+
|
9
|
+
QUERY_FIELD_TYPES = {
|
10
|
+
'StringQueryField',
|
11
|
+
'NumericQueryField',
|
12
|
+
'BoolQueryField',
|
13
|
+
'DateQueryField',
|
14
|
+
'BytesQueryField',
|
15
|
+
'JoinStringCondition',
|
16
|
+
'JoinNumericCondition',
|
17
|
+
'JoinBooleanCondition',
|
18
|
+
'JoinDateCondition',
|
19
|
+
'JoinBytesCondition',
|
20
|
+
}
|
21
|
+
|
22
|
+
def convert_datetime_date_to_str(annotation):
|
23
|
+
"""Convert datetime/date or their unions with str to just str."""
|
24
|
+
if annotation in (datetime.datetime, datetime.date):
|
25
|
+
return str
|
26
|
+
origin_inner = get_origin(annotation)
|
27
|
+
args_inner = get_args(annotation)
|
28
|
+
if origin_inner is Union:
|
29
|
+
set_args = set(args_inner)
|
30
|
+
if str in set_args and (datetime.datetime in set_args or datetime.date in set_args):
|
31
|
+
return str
|
32
|
+
return annotation
|
33
|
+
|
34
|
+
def remove_query_field_types(annotation):
|
35
|
+
"""
|
36
|
+
Remove os tipos de QueryField (como StringQueryField) de uma Union ou substitui diretamente
|
37
|
+
"""
|
38
|
+
origin = get_origin(annotation)
|
39
|
+
args = get_args(annotation)
|
40
|
+
|
41
|
+
def is_query_field(arg):
|
42
|
+
return getattr(arg, '__name__', '') in QUERY_FIELD_TYPES
|
43
|
+
|
44
|
+
if origin is Union:
|
45
|
+
new_args = tuple(arg for arg in args if not is_query_field(arg))
|
46
|
+
if len(new_args) == 1:
|
47
|
+
return new_args[0]
|
48
|
+
return Union[new_args]
|
49
|
+
elif is_query_field(annotation):
|
50
|
+
return str # fallback para str caso algum passe isolado
|
51
|
+
return annotation
|
52
|
+
|
53
|
+
def is_optional(annotation):
|
54
|
+
"""Check if an annotation is Optional[...] or Union[..., None]."""
|
55
|
+
origin = get_origin(annotation)
|
56
|
+
args = get_args(annotation)
|
57
|
+
return origin is Union and type(None) in args
|
58
|
+
|
59
|
+
def remove_optional(annotation):
|
60
|
+
"""Remove NoneType from Union[...]"""
|
61
|
+
args = tuple(arg for arg in get_args(annotation) if arg is not type(None))
|
62
|
+
if len(args) == 1:
|
63
|
+
return args[0]
|
64
|
+
return Union[args]
|
65
|
+
|
66
|
+
def make_optional(annotation):
|
67
|
+
"""Make an annotation optional if not already."""
|
68
|
+
if is_optional(annotation):
|
69
|
+
return annotation
|
70
|
+
return Optional[annotation]
|
71
|
+
|
72
|
+
class SensitiveFields(object):
|
73
|
+
|
74
|
+
_sensitive_fields : Set[str] = set()
|
75
|
+
|
76
|
+
@classmethod
|
77
|
+
def set(cls, new_sensitive_filds : Set[str]):
|
78
|
+
cls._sensitive_fields = new_sensitive_filds
|
79
|
+
|
80
|
+
@classmethod
|
81
|
+
def get(cls):
|
82
|
+
return cls._sensitive_fields
|
83
|
+
|
5
84
|
|
6
85
|
class TableBaseModel(BaseModel, ABC):
|
7
|
-
class Config(
|
86
|
+
class Config(ConfigDict):
|
8
87
|
from_attributes = True
|
9
88
|
|
10
|
-
|
89
|
+
TABLE_NAME: ClassVar[str]
|
11
90
|
|
12
|
-
|
13
|
-
|
14
|
-
|
91
|
+
TABLE_ALIAS: ClassVar[str]
|
92
|
+
|
93
|
+
DESCRIPTION : ClassVar[str]
|
94
|
+
|
95
|
+
IDENTITIES : ClassVar[Set[str]]
|
96
|
+
|
97
|
+
PRIMARY_KEYs : ClassVar[Set[str]]
|
98
|
+
|
99
|
+
OPTIONAL_FIELDS : ClassVar[Set[str]]
|
100
|
+
|
101
|
+
MAX_LENGTH_FIELDS: ClassVar[dict[str, int]] = {}
|
102
|
+
|
103
|
+
_explicit_fields: Set[str] = PrivateAttr(default_factory=set)
|
104
|
+
_pending_updates: dict[str, Any] = PrivateAttr(default_factory=dict)
|
105
|
+
_initial_values: dict[str, Any] = PrivateAttr(default_factory=dict)
|
106
|
+
|
107
|
+
|
108
|
+
def __init__(self, **data):
|
109
|
+
sensitive = SensitiveFields.get()
|
110
|
+
filtered_data = {k: v for k, v in data.items() if k not in sensitive}
|
111
|
+
|
112
|
+
super().__init__(**filtered_data)
|
113
|
+
self._explicit_fields = set(filtered_data.keys())
|
114
|
+
self._initial_values = copy.deepcopy(self.model_dump())
|
115
|
+
|
116
|
+
def _reset_defaults(self):
|
117
|
+
for field_name, model_field in self.model_fields.items():
|
118
|
+
if field_name not in self._explicit_fields:
|
119
|
+
setattr(self, field_name, None)
|
120
|
+
|
121
|
+
def reset_to_initial_values(self):
|
122
|
+
for key, value in self._initial_values.items():
|
123
|
+
setattr(self, key, copy.deepcopy(value))
|
124
|
+
self.clear_updates()
|
125
|
+
|
126
|
+
def equals(self, other: "TableBaseModel") -> bool:
|
127
|
+
return self.model_dump() == other.model_dump()
|
128
|
+
|
129
|
+
def clear_updates(self):
|
130
|
+
self._pending_updates.clear()
|
15
131
|
|
132
|
+
def has_updates(self) -> bool:
|
133
|
+
for key, new_value in self._pending_updates.items():
|
134
|
+
if key in self.model_fields:
|
135
|
+
current_value = getattr(self, key, None)
|
136
|
+
|
137
|
+
if isinstance(current_value, BaseModel) and isinstance(new_value, BaseModel):
|
138
|
+
if current_value.model_dump() != new_value.model_dump():
|
139
|
+
return True
|
140
|
+
|
141
|
+
elif hasattr(current_value, "__dataclass_fields__") and hasattr(new_value, "__dataclass_fields__"):
|
142
|
+
if asdict(current_value) != asdict(new_value):
|
143
|
+
return True
|
144
|
+
|
145
|
+
elif hasattr(current_value, "__dict__") and hasattr(new_value, "__dict__"):
|
146
|
+
if vars(current_value) != vars(new_value):
|
147
|
+
return True
|
148
|
+
|
149
|
+
elif new_value != current_value:
|
150
|
+
return True
|
151
|
+
return False
|
152
|
+
|
153
|
+
@staticmethod
|
154
|
+
def queue_update(self : 'TableBaseModel', **fields):
|
155
|
+
fields = get_dict_args(fields)
|
156
|
+
for key, value in fields.items():
|
157
|
+
if value != None and key in self.model_fields:
|
158
|
+
self._pending_updates[key] = value
|
159
|
+
|
160
|
+
def apply_updates(self):
|
161
|
+
for key, value in self._pending_updates.items():
|
162
|
+
if key in self.model_fields:
|
163
|
+
setattr(self, key, value)
|
164
|
+
self.clear_updates()
|
165
|
+
|
166
|
+
def alter_model_class(self, remove_fields: tuple[str] = (), mode: Literal['all_optional', 'all_required', 'original'] = 'all_optional', query_field = False):
|
167
|
+
fields = {}
|
168
|
+
|
169
|
+
for field_name, field in self.model_fields.items():
|
170
|
+
if field_name in remove_fields:
|
171
|
+
continue
|
172
|
+
|
173
|
+
ann = convert_datetime_date_to_str(field.annotation)
|
174
|
+
if not query_field:
|
175
|
+
ann = remove_query_field_types(ann)
|
176
|
+
|
177
|
+
max_length = None
|
178
|
+
if mode in ('all_required', 'original'):
|
179
|
+
max_length = self.MAX_LENGTH_FIELDS.get(field_name)
|
180
|
+
if isinstance(max_length, int) and max_length < 1:
|
181
|
+
max_length = None
|
182
|
+
|
183
|
+
default = field.default
|
184
|
+
|
185
|
+
if mode == 'all_optional':
|
186
|
+
ann = make_optional(ann)
|
187
|
+
default = None
|
188
|
+
|
189
|
+
elif mode == 'all_required':
|
190
|
+
if is_optional(ann):
|
191
|
+
ann = remove_optional(ann)
|
192
|
+
default = ...
|
193
|
+
|
194
|
+
elif mode == 'original':
|
195
|
+
if field_name in self.OPTIONAL_FIELDS:
|
196
|
+
ann = make_optional(ann)
|
197
|
+
default = None
|
198
|
+
else:
|
199
|
+
if is_optional(ann):
|
200
|
+
ann = remove_optional(ann)
|
201
|
+
default = ...
|
202
|
+
|
203
|
+
fields[field_name] = (ann, Field(default=default, description=field.description, max_length=max_length))
|
204
|
+
|
205
|
+
new_model_class = create_model(
|
206
|
+
self.__name__,
|
207
|
+
__config__=ConfigDict(extra='forbid'),
|
208
|
+
**fields
|
209
|
+
)
|
210
|
+
return new_model_class
|
211
|
+
|
212
|
+
@classmethod
|
213
|
+
def get_field_type_names(cls) -> dict[str, set[str]]:
|
214
|
+
result = {}
|
215
|
+
type_hints = get_type_hints(cls, include_extras=True)
|
216
|
+
|
217
|
+
for field_name, hint in type_hints.items():
|
218
|
+
if field_name.startswith('_') or get_origin(hint) is ClassVar:
|
219
|
+
continue
|
220
|
+
|
221
|
+
args = get_args(hint)
|
222
|
+
if not args:
|
223
|
+
args = (hint,)
|
224
|
+
|
225
|
+
types = {
|
226
|
+
t.__name__ if hasattr(t, '__name__') else t._name if hasattr(t, '_name') else str(t)
|
227
|
+
for t in args
|
228
|
+
if t is not type(None)
|
229
|
+
}
|
230
|
+
|
231
|
+
result[field_name] = types
|
232
|
+
|
233
|
+
return result
|
234
|
+
|
235
|
+
class SearchTable(BaseModel):
|
236
|
+
model: TableBaseModel
|
237
|
+
include: Optional[List[str]] = Field(default_factory=list)
|
238
|
+
|
239
|
+
class JoinSearchTable(SearchTable):
|
240
|
+
join_type: Literal["INNER", "LEFT", "RIGHT", "FULL"] = "LEFT"
|
241
|
+
|
16
242
|
class BaseUpdate(ABC):
|
17
243
|
|
18
|
-
|
19
|
-
|
20
|
-
|
244
|
+
def __init__(self, executor , model):
|
245
|
+
self._set_data = model
|
246
|
+
self._executor = executor
|
21
247
|
|
22
|
-
|
23
|
-
|
24
|
-
|
248
|
+
@property
|
249
|
+
def set_data(self):
|
250
|
+
return self._set_data
|
25
251
|
|
26
|
-
|
27
|
-
|
28
|
-
|
252
|
+
@property
|
253
|
+
def executor(self):
|
254
|
+
return self._executor
|
29
255
|
|
30
|
-
|
31
|
-
|
32
|
-
|
256
|
+
@abstractmethod
|
257
|
+
def where(self, *args):
|
258
|
+
pass
|
33
259
|
|
34
260
|
|
35
261
|
|
dapper_sqls/models/connection.py
CHANGED
@@ -1,4 +1,4 @@
|
|
1
|
-
#
|
1
|
+
# coding: utf-8
|
2
2
|
|
3
3
|
class ConnectionStringData(object):
|
4
4
|
def __init__(self, server: str, database: str, username: str, password: str):
|
@@ -34,7 +34,7 @@ class ConnectionStringData(object):
|
|
34
34
|
@username.setter
|
35
35
|
def username(self, value: str):
|
36
36
|
if not isinstance(value, str):
|
37
|
-
raise ValueError("O nome de
|
37
|
+
raise ValueError("O nome de usuário deve ser uma string.")
|
38
38
|
self._username = value
|
39
39
|
|
40
40
|
@property
|