tp-common 0.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tp_common/__init__.py +52 -0
- tp_common/base_client/__init__.py +0 -0
- tp_common/base_client/base_client.py +585 -0
- tp_common/base_client/base_exception.py +2 -0
- tp_common/base_client/base_request.py +12 -0
- tp_common/base_client/base_response.py +11 -0
- tp_common/base_client/client_exceptions.py +76 -0
- tp_common/base_client/domain_exceptions.py +63 -0
- tp_common/logging.py +339 -0
- tp_common-0.0.1.dist-info/METADATA +110 -0
- tp_common-0.0.1.dist-info/RECORD +12 -0
- tp_common-0.0.1.dist-info/WHEEL +4 -0
tp_common/__init__.py
ADDED
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
from tp_common.base.base_client.base_client import BaseClient
|
|
2
|
+
from tp_common.base.base_client.base_exception import BaseInfrastructureException
|
|
3
|
+
from tp_common.base.base_client.base_request import BaseRequest
|
|
4
|
+
from tp_common.base.base_client.base_response import BaseResponse
|
|
5
|
+
from tp_common.base.base_client.client_exceptions import (
|
|
6
|
+
BaseClientException,
|
|
7
|
+
ClientConnectionException,
|
|
8
|
+
ClientDNSException,
|
|
9
|
+
ClientProxyException,
|
|
10
|
+
ClientResponseErrorException,
|
|
11
|
+
ClientTimeoutException,
|
|
12
|
+
)
|
|
13
|
+
from tp_common.base.base_client.domain_exceptions import (
|
|
14
|
+
AuthorizationException,
|
|
15
|
+
BaseBusinessErrorException,
|
|
16
|
+
BaseNetworkErrorException,
|
|
17
|
+
BaseProxyErrorException,
|
|
18
|
+
BaseServerErrorException,
|
|
19
|
+
ResourceNotFoundException,
|
|
20
|
+
ServerException,
|
|
21
|
+
ServiceUnavailableException,
|
|
22
|
+
TooManyRequestsException,
|
|
23
|
+
ValidationException,
|
|
24
|
+
)
|
|
25
|
+
from tp_common.logging import Logger, TracingLogger
|
|
26
|
+
|
|
27
|
+
__all__ = [
|
|
28
|
+
# BaseClient
|
|
29
|
+
"BaseClient",
|
|
30
|
+
"BaseInfrastructureException",
|
|
31
|
+
"BaseRequest",
|
|
32
|
+
"BaseResponse",
|
|
33
|
+
"BaseClientException",
|
|
34
|
+
"ClientResponseErrorException",
|
|
35
|
+
"ClientTimeoutException",
|
|
36
|
+
"ClientProxyException",
|
|
37
|
+
"ClientConnectionException",
|
|
38
|
+
"ClientDNSException",
|
|
39
|
+
"BaseBusinessErrorException",
|
|
40
|
+
"BaseServerErrorException",
|
|
41
|
+
"BaseNetworkErrorException",
|
|
42
|
+
"BaseProxyErrorException",
|
|
43
|
+
"AuthorizationException",
|
|
44
|
+
"ValidationException",
|
|
45
|
+
"ResourceNotFoundException",
|
|
46
|
+
"ServerException",
|
|
47
|
+
"TooManyRequestsException",
|
|
48
|
+
"ServiceUnavailableException",
|
|
49
|
+
# Logging
|
|
50
|
+
"TracingLogger",
|
|
51
|
+
"Logger",
|
|
52
|
+
]
|
|
File without changes
|
|
@@ -0,0 +1,585 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import json as json_lib
|
|
3
|
+
import logging
|
|
4
|
+
import time
|
|
5
|
+
from datetime import UTC, datetime
|
|
6
|
+
from typing import Any, Literal, TypeVar
|
|
7
|
+
from urllib.parse import urlencode
|
|
8
|
+
|
|
9
|
+
import aiohttp
|
|
10
|
+
from aiohttp import (
|
|
11
|
+
ClientConnectionError,
|
|
12
|
+
ClientConnectorDNSError,
|
|
13
|
+
ClientHttpProxyError,
|
|
14
|
+
ClientProxyConnectionError,
|
|
15
|
+
ServerTimeoutError,
|
|
16
|
+
)
|
|
17
|
+
|
|
18
|
+
from tp_common.base.base_client.client_exceptions import (
|
|
19
|
+
BaseClientException,
|
|
20
|
+
ClientConnectionException,
|
|
21
|
+
ClientDNSException,
|
|
22
|
+
ClientProxyException,
|
|
23
|
+
ClientResponseErrorException,
|
|
24
|
+
ClientTimeoutException,
|
|
25
|
+
)
|
|
26
|
+
|
|
27
|
+
T = TypeVar("T", bound="BaseClient")
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class BaseClient:
|
|
31
|
+
"""
|
|
32
|
+
Базовый HTTP клиент для работы с внешними API.
|
|
33
|
+
|
|
34
|
+
Поддерживает:
|
|
35
|
+
- Переиспользование сессии
|
|
36
|
+
- Прокси
|
|
37
|
+
- Cookies
|
|
38
|
+
- Имитацию браузера
|
|
39
|
+
- Логирование (с автоматическим созданием логгера при необходимости)
|
|
40
|
+
- Переопределение базового URL для тестирования
|
|
41
|
+
|
|
42
|
+
Args:
|
|
43
|
+
cookies: Строка с cookies в формате "name=value; name2=value2"
|
|
44
|
+
proxy: URL прокси-сервера
|
|
45
|
+
logger: Внешний логгер (если не передан, создается автоматически)
|
|
46
|
+
base_url: Базовый URL для запросов (обязательный параметр)
|
|
47
|
+
"""
|
|
48
|
+
|
|
49
|
+
DEFAULT_TIMEOUT = 30.0 # секунды
|
|
50
|
+
|
|
51
|
+
def __init__(
|
|
52
|
+
self,
|
|
53
|
+
base_url: str,
|
|
54
|
+
cookies: str | None = None,
|
|
55
|
+
proxy: str | None = None,
|
|
56
|
+
logger: logging.Logger | None = None,
|
|
57
|
+
):
|
|
58
|
+
self._raw_cookies = cookies
|
|
59
|
+
self._proxy: str | None = proxy
|
|
60
|
+
self._session: aiohttp.ClientSession | None = None
|
|
61
|
+
self._logger = logger or self._create_logger()
|
|
62
|
+
self._base_url = base_url
|
|
63
|
+
|
|
64
|
+
def _create_logger(self) -> logging.Logger:
|
|
65
|
+
"""Создает логгер на основе имени класса, если не передан внешний."""
|
|
66
|
+
class_name = self.__class__.__name__
|
|
67
|
+
module_name = self.__class__.__module__
|
|
68
|
+
logger_name = f"{module_name}.{class_name}"
|
|
69
|
+
return logging.getLogger(logger_name)
|
|
70
|
+
|
|
71
|
+
@property
|
|
72
|
+
def logger(self) -> logging.Logger:
|
|
73
|
+
"""Возвращает логгер для использования в наследниках."""
|
|
74
|
+
return self._logger
|
|
75
|
+
|
|
76
|
+
@logger.setter
|
|
77
|
+
def logger(self, logger: logging.Logger) -> None:
|
|
78
|
+
self._logger = logger
|
|
79
|
+
|
|
80
|
+
async def __aenter__(self: T) -> T:
|
|
81
|
+
"""Инициализация сессии при входе в контекстный менеджер"""
|
|
82
|
+
_ = await self._get_session()
|
|
83
|
+
return self
|
|
84
|
+
|
|
85
|
+
async def __aexit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
|
|
86
|
+
"""Закрытие сессии при выходе из контекстного менеджера"""
|
|
87
|
+
await self.close()
|
|
88
|
+
|
|
89
|
+
async def _get_session(self) -> aiohttp.ClientSession:
|
|
90
|
+
"""Получает или создает переиспользуемую сессию"""
|
|
91
|
+
if self._session is None or self._session.closed:
|
|
92
|
+
timeout = aiohttp.ClientTimeout(total=self.DEFAULT_TIMEOUT)
|
|
93
|
+
connector = aiohttp.TCPConnector(limit=100, limit_per_host=30)
|
|
94
|
+
|
|
95
|
+
self._session = aiohttp.ClientSession(
|
|
96
|
+
timeout=timeout,
|
|
97
|
+
connector=connector,
|
|
98
|
+
cookies=self._prepare_cookies_dict(),
|
|
99
|
+
)
|
|
100
|
+
return self._session
|
|
101
|
+
|
|
102
|
+
async def close(self) -> None:
|
|
103
|
+
"""Закрывает HTTP сессию"""
|
|
104
|
+
if self._session and not self._session.closed:
|
|
105
|
+
await self._session.close()
|
|
106
|
+
self._session = None
|
|
107
|
+
|
|
108
|
+
def _get_default_headers(self) -> dict[str, str]:
|
|
109
|
+
"""Возвращает заголовки по умолчанию для имитации браузера"""
|
|
110
|
+
return {
|
|
111
|
+
"accept": "*/*",
|
|
112
|
+
"accept-language": "ru-RU,ru;q=0.9,en-US;q=0.8,en;q=0.7",
|
|
113
|
+
"sec-ch-ua": '"Chromium";v="122"',
|
|
114
|
+
"sec-ch-ua-mobile": "?0",
|
|
115
|
+
"sec-fetch-dest": "empty",
|
|
116
|
+
"sec-fetch-mode": "cors",
|
|
117
|
+
"sec-fetch-site": "same-origin",
|
|
118
|
+
"referer": f"{self._base_url}/",
|
|
119
|
+
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36",
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
@property
|
|
123
|
+
def cookies(self) -> str | None:
|
|
124
|
+
"""Возвращает текущие cookies"""
|
|
125
|
+
return self._raw_cookies
|
|
126
|
+
|
|
127
|
+
@cookies.setter
|
|
128
|
+
def cookies(self, cookies: str) -> None:
|
|
129
|
+
"""Устанавливает новые cookies (требует пересоздания сессии)"""
|
|
130
|
+
self._raw_cookies = cookies
|
|
131
|
+
# Сразу обнуляем _session, чтобы следующий _get_session() создал новую сессию
|
|
132
|
+
# с новыми cookies; старую сессию закрываем в фоне (иначе гонка: запрос
|
|
133
|
+
# может получить уже закрываемую сессию → "Connector is closed")
|
|
134
|
+
old_session = self._session
|
|
135
|
+
self._session = None
|
|
136
|
+
if old_session and not old_session.closed:
|
|
137
|
+
|
|
138
|
+
async def _close_old() -> None:
|
|
139
|
+
await old_session.close()
|
|
140
|
+
|
|
141
|
+
asyncio.create_task(_close_old())
|
|
142
|
+
|
|
143
|
+
@property
|
|
144
|
+
def proxy(self) -> str | None:
|
|
145
|
+
"""Возвращает текущий прокси"""
|
|
146
|
+
return self._proxy
|
|
147
|
+
|
|
148
|
+
@proxy.setter
|
|
149
|
+
def proxy(self, proxy: str) -> None:
|
|
150
|
+
"""Устанавливает новый прокси"""
|
|
151
|
+
self._proxy = proxy
|
|
152
|
+
|
|
153
|
+
def _prepare_cookies_dict(self) -> dict[str, str] | None:
|
|
154
|
+
"""Подготавливает словарь cookies из строки для aiohttp"""
|
|
155
|
+
if not self._raw_cookies:
|
|
156
|
+
return None
|
|
157
|
+
|
|
158
|
+
cookies_dict: dict[str, str] = {}
|
|
159
|
+
|
|
160
|
+
# Парсим строку cookies в формате "name=value; name2=value2"
|
|
161
|
+
for cookie_str in self._raw_cookies.split(";"):
|
|
162
|
+
cookie_str = cookie_str.strip()
|
|
163
|
+
if not cookie_str:
|
|
164
|
+
continue
|
|
165
|
+
if "=" in cookie_str:
|
|
166
|
+
name, value = cookie_str.split("=", 1)
|
|
167
|
+
cookies_dict[name.strip()] = value.strip()
|
|
168
|
+
|
|
169
|
+
return cookies_dict if cookies_dict else None
|
|
170
|
+
|
|
171
|
+
def extract_cookies_from_session(self) -> str | None:
|
|
172
|
+
"""
|
|
173
|
+
Извлекает cookies из текущей сессии в строку формата 'name=value; name2=value2'.
|
|
174
|
+
|
|
175
|
+
Returns:
|
|
176
|
+
Строка с кукисами в формате 'name=value; name2=value2' или None
|
|
177
|
+
"""
|
|
178
|
+
if not self._session or self._session.closed:
|
|
179
|
+
return self._raw_cookies
|
|
180
|
+
|
|
181
|
+
cookies_list: list[str] = []
|
|
182
|
+
for cookie in self._session.cookie_jar:
|
|
183
|
+
cookies_list.append(f"{cookie.key}={cookie.value}")
|
|
184
|
+
|
|
185
|
+
if not cookies_list:
|
|
186
|
+
return self._raw_cookies
|
|
187
|
+
|
|
188
|
+
return "; ".join(cookies_list)
|
|
189
|
+
|
|
190
|
+
def _build_url(self, uri: str, params: str | dict[str, Any] | None = None) -> str:
|
|
191
|
+
"""Строит полный URL из базового адреса, URI и параметров"""
|
|
192
|
+
url = f"{self._base_url}/{uri.lstrip('/')}"
|
|
193
|
+
|
|
194
|
+
if params and isinstance(params, dict):
|
|
195
|
+
# Фильтруем None значения и конвертируем все значения в строки для urlencode
|
|
196
|
+
filtered_params: dict[str, str] = {}
|
|
197
|
+
for key, val in params.items():
|
|
198
|
+
if val is not None:
|
|
199
|
+
# Для datetime объектов используем ISO формат с Z и миллисекундами
|
|
200
|
+
if isinstance(val, datetime):
|
|
201
|
+
# Формат: 2026-01-20T00:00:00.000Z
|
|
202
|
+
# Если datetime имеет timezone, конвертируем в UTC, иначе используем как есть
|
|
203
|
+
if val.tzinfo is not None:
|
|
204
|
+
# Конвертируем в UTC
|
|
205
|
+
val_utc = val.astimezone(UTC)
|
|
206
|
+
filtered_params[key] = (
|
|
207
|
+
val_utc.strftime("%Y-%m-%dT%H:%M:%S.%f")[:-3] + "Z"
|
|
208
|
+
)
|
|
209
|
+
else:
|
|
210
|
+
# Naive datetime - используем как есть и добавляем Z
|
|
211
|
+
filtered_params[key] = (
|
|
212
|
+
val.strftime("%Y-%m-%dT%H:%M:%S.%f")[:-3] + "Z"
|
|
213
|
+
)
|
|
214
|
+
else:
|
|
215
|
+
filtered_params[key] = str(val)
|
|
216
|
+
if filtered_params:
|
|
217
|
+
# Используем urlencode для правильного кодирования параметров URL
|
|
218
|
+
query_string = urlencode(filtered_params, doseq=False)
|
|
219
|
+
url += "?" + query_string
|
|
220
|
+
elif params and isinstance(params, str):
|
|
221
|
+
url += f"?{params}"
|
|
222
|
+
|
|
223
|
+
return url
|
|
224
|
+
|
|
225
|
+
def _merge_headers(self, custom_headers: dict[str, Any] | None) -> dict[str, str]:
|
|
226
|
+
"""Объединяет заголовки по умолчанию с пользовательскими"""
|
|
227
|
+
headers = self._get_default_headers()
|
|
228
|
+
if custom_headers:
|
|
229
|
+
headers.update(custom_headers)
|
|
230
|
+
return headers
|
|
231
|
+
|
|
232
|
+
def _prepare_payload(
|
|
233
|
+
self, json: dict[str, Any] | None, data: Any | None
|
|
234
|
+
) -> dict[str, Any] | list[Any] | str | None:
|
|
235
|
+
"""Подготавливает payload для логирования"""
|
|
236
|
+
if json is not None:
|
|
237
|
+
return json
|
|
238
|
+
if data is not None:
|
|
239
|
+
if isinstance(data, str):
|
|
240
|
+
try:
|
|
241
|
+
return json_lib.loads(data)
|
|
242
|
+
except (json_lib.JSONDecodeError, TypeError):
|
|
243
|
+
return None
|
|
244
|
+
elif isinstance(data, (dict, list)):
|
|
245
|
+
return data
|
|
246
|
+
return None
|
|
247
|
+
return None
|
|
248
|
+
|
|
249
|
+
def _prepare_response(self, response_text: str) -> dict[str, Any] | list[Any] | str:
|
|
250
|
+
"""Подготавливает response для логирования: пытается распарсить JSON, иначе возвращает текст"""
|
|
251
|
+
if not response_text:
|
|
252
|
+
return response_text
|
|
253
|
+
try:
|
|
254
|
+
return json_lib.loads(response_text)
|
|
255
|
+
except (json_lib.JSONDecodeError, TypeError):
|
|
256
|
+
return response_text
|
|
257
|
+
|
|
258
|
+
def _decode_response_body(
|
|
259
|
+
self, body_bytes: bytes, response: aiohttp.ClientResponse
|
|
260
|
+
) -> str:
|
|
261
|
+
"""
|
|
262
|
+
Декодирует тело ответа в строку с учётом кодировки из заголовков
|
|
263
|
+
и резервных кодировок при ошибке UTF-8 (например, cp1251 для российских API).
|
|
264
|
+
"""
|
|
265
|
+
encodings_to_try: list[str | tuple[str, str]] = []
|
|
266
|
+
if response.charset:
|
|
267
|
+
encodings_to_try.append(response.charset)
|
|
268
|
+
encodings_to_try.append("utf-8")
|
|
269
|
+
encodings_to_try.append("cp1251") # Windows-1251, типично для российских API
|
|
270
|
+
encodings_to_try.append(("iso-8859-1", "replace"))
|
|
271
|
+
for entry in encodings_to_try:
|
|
272
|
+
if isinstance(entry, tuple):
|
|
273
|
+
encoding, errors = entry
|
|
274
|
+
else:
|
|
275
|
+
encoding, errors = entry, "strict"
|
|
276
|
+
try:
|
|
277
|
+
return body_bytes.decode(encoding, errors=errors)
|
|
278
|
+
except (UnicodeDecodeError, LookupError):
|
|
279
|
+
continue
|
|
280
|
+
return body_bytes.decode("iso-8859-1", errors="replace")
|
|
281
|
+
|
|
282
|
+
async def _make_request(
|
|
283
|
+
self,
|
|
284
|
+
method: Literal["GET", "POST", "PUT", "DELETE", "PATCH"],
|
|
285
|
+
uri: str,
|
|
286
|
+
headers: dict[str, Any] | None = None,
|
|
287
|
+
params: str | dict[str, Any] | None = None,
|
|
288
|
+
json: dict[str, Any] | None = None,
|
|
289
|
+
data: Any | None = None,
|
|
290
|
+
retry_on_proxy_error: bool = False,
|
|
291
|
+
max_retries: int = 5,
|
|
292
|
+
) -> str:
|
|
293
|
+
"""
|
|
294
|
+
Базовый метод для выполнения HTTP запросов. Возвращает только строку.
|
|
295
|
+
|
|
296
|
+
Args:
|
|
297
|
+
method: HTTP метод
|
|
298
|
+
uri: URI endpoint (будет добавлен к base_url)
|
|
299
|
+
headers: Дополнительные заголовки
|
|
300
|
+
params: Query параметры
|
|
301
|
+
json: JSON тело запроса (для POST/PUT/PATCH)
|
|
302
|
+
data: Данные тела запроса (альтернатива json)
|
|
303
|
+
retry_on_proxy_error: Повторять ли запрос при ошибках прокси
|
|
304
|
+
max_retries: Максимальное количество попыток при retry_on_proxy_error
|
|
305
|
+
|
|
306
|
+
Returns:
|
|
307
|
+
Текст ответа как строка
|
|
308
|
+
|
|
309
|
+
Raises:
|
|
310
|
+
ClientResponseErrorException: При неуспешном HTTP статусе (>=400)
|
|
311
|
+
ClientTimeoutException: При таймауте соединения
|
|
312
|
+
ClientProxyException: При ошибке прокси
|
|
313
|
+
ClientConnectionException: При ошибке соединения
|
|
314
|
+
ClientDNSException: При ошибке DNS
|
|
315
|
+
"""
|
|
316
|
+
url = self._build_url(uri, params)
|
|
317
|
+
merged_headers = self._merge_headers(headers)
|
|
318
|
+
session = await self._get_session()
|
|
319
|
+
|
|
320
|
+
# Определяем kwargs для запроса
|
|
321
|
+
request_kwargs: dict[str, Any] = {
|
|
322
|
+
"url": url,
|
|
323
|
+
"headers": merged_headers,
|
|
324
|
+
"proxy": self._proxy,
|
|
325
|
+
}
|
|
326
|
+
|
|
327
|
+
if json is not None:
|
|
328
|
+
request_kwargs["json"] = json
|
|
329
|
+
elif data is not None:
|
|
330
|
+
request_kwargs["data"] = data
|
|
331
|
+
|
|
332
|
+
payload = self._prepare_payload(json, data)
|
|
333
|
+
amount_failed = 0
|
|
334
|
+
|
|
335
|
+
while True:
|
|
336
|
+
response_body_text = ""
|
|
337
|
+
start_time = time.perf_counter()
|
|
338
|
+
try:
|
|
339
|
+
async with session.request(method, **request_kwargs) as response:
|
|
340
|
+
body_bytes = await response.read()
|
|
341
|
+
response_body_text = self._decode_response_body(
|
|
342
|
+
body_bytes, response
|
|
343
|
+
)
|
|
344
|
+
end_time = time.perf_counter()
|
|
345
|
+
duration_ms = int((end_time - start_time) * 1000)
|
|
346
|
+
|
|
347
|
+
response_data = self._prepare_response(response_body_text)
|
|
348
|
+
|
|
349
|
+
self._logger.info(
|
|
350
|
+
"HTTP запрос выполнен",
|
|
351
|
+
extra={
|
|
352
|
+
"url": url,
|
|
353
|
+
"method": method,
|
|
354
|
+
"status_code": response.status,
|
|
355
|
+
"payload": payload,
|
|
356
|
+
"response": response_data,
|
|
357
|
+
"duration": duration_ms,
|
|
358
|
+
},
|
|
359
|
+
)
|
|
360
|
+
|
|
361
|
+
if response.status >= 400:
|
|
362
|
+
raise ClientResponseErrorException(
|
|
363
|
+
f"HTTP {response.status} error: {response.reason}",
|
|
364
|
+
url=url,
|
|
365
|
+
status_code=response.status,
|
|
366
|
+
response_body=response_body_text,
|
|
367
|
+
)
|
|
368
|
+
|
|
369
|
+
return response_body_text
|
|
370
|
+
|
|
371
|
+
except (ClientHttpProxyError, ClientProxyConnectionError) as e:
|
|
372
|
+
if not retry_on_proxy_error:
|
|
373
|
+
self._logger.error(
|
|
374
|
+
"Ошибка прокси при HTTP запросе",
|
|
375
|
+
extra={
|
|
376
|
+
"method": method,
|
|
377
|
+
"url": url,
|
|
378
|
+
"proxy": self._proxy,
|
|
379
|
+
"error": str(e),
|
|
380
|
+
},
|
|
381
|
+
)
|
|
382
|
+
raise ClientProxyException(
|
|
383
|
+
f"Proxy error: {str(e)}",
|
|
384
|
+
url=url,
|
|
385
|
+
proxy=self._proxy,
|
|
386
|
+
) from e
|
|
387
|
+
|
|
388
|
+
amount_failed += 1
|
|
389
|
+
if amount_failed >= max_retries:
|
|
390
|
+
self._logger.error(
|
|
391
|
+
"Ошибка прокси после исчерпания попыток",
|
|
392
|
+
extra={
|
|
393
|
+
"method": method,
|
|
394
|
+
"url": url,
|
|
395
|
+
"proxy": self._proxy,
|
|
396
|
+
"retries": amount_failed,
|
|
397
|
+
"error": str(e),
|
|
398
|
+
},
|
|
399
|
+
)
|
|
400
|
+
raise ClientProxyException(
|
|
401
|
+
f"Proxy error after {max_retries} retries: {str(e)}",
|
|
402
|
+
url=url,
|
|
403
|
+
proxy=self._proxy,
|
|
404
|
+
) from e
|
|
405
|
+
|
|
406
|
+
self._logger.warning(
|
|
407
|
+
"Повтор запроса после ошибки прокси",
|
|
408
|
+
extra={
|
|
409
|
+
"method": method,
|
|
410
|
+
"url": url,
|
|
411
|
+
"proxy": self._proxy,
|
|
412
|
+
"retry": amount_failed,
|
|
413
|
+
"max_retries": max_retries,
|
|
414
|
+
},
|
|
415
|
+
)
|
|
416
|
+
continue
|
|
417
|
+
|
|
418
|
+
except (TimeoutError, ServerTimeoutError) as e:
|
|
419
|
+
# self._logger.error(
|
|
420
|
+
# "Таймаут HTTP запроса",
|
|
421
|
+
# extra={
|
|
422
|
+
# "method": method,
|
|
423
|
+
# "url": url,
|
|
424
|
+
# "timeout": self.DEFAULT_TIMEOUT,
|
|
425
|
+
# "error": str(e),
|
|
426
|
+
# },
|
|
427
|
+
# )
|
|
428
|
+
raise ClientTimeoutException(
|
|
429
|
+
f"Request timeout after {self.DEFAULT_TIMEOUT}s: {str(e)}",
|
|
430
|
+
url=url,
|
|
431
|
+
) from e
|
|
432
|
+
|
|
433
|
+
except ClientConnectorDNSError as e:
|
|
434
|
+
self._logger.error(
|
|
435
|
+
"Ошибка DNS при HTTP запросе",
|
|
436
|
+
extra={
|
|
437
|
+
"method": method,
|
|
438
|
+
"url": url,
|
|
439
|
+
"error": str(e),
|
|
440
|
+
},
|
|
441
|
+
)
|
|
442
|
+
raise ClientDNSException(
|
|
443
|
+
f"DNS resolution failed: {str(e)}",
|
|
444
|
+
url=url,
|
|
445
|
+
) from e
|
|
446
|
+
|
|
447
|
+
except ClientConnectionError as e:
|
|
448
|
+
self._logger.error(
|
|
449
|
+
"Ошибка соединения при HTTP запросе",
|
|
450
|
+
extra={
|
|
451
|
+
"method": method,
|
|
452
|
+
"url": url,
|
|
453
|
+
"error": str(e),
|
|
454
|
+
},
|
|
455
|
+
)
|
|
456
|
+
raise ClientConnectionException(
|
|
457
|
+
f"Connection error: {str(e)}",
|
|
458
|
+
url=url,
|
|
459
|
+
) from e
|
|
460
|
+
except ClientResponseErrorException as e:
|
|
461
|
+
raise e
|
|
462
|
+
except Exception as e:
|
|
463
|
+
# Неожиданные ошибки
|
|
464
|
+
self._logger.error(
|
|
465
|
+
"Неожиданная ошибка HTTP запроса",
|
|
466
|
+
extra={
|
|
467
|
+
"method": method,
|
|
468
|
+
"url": url,
|
|
469
|
+
"error_type": type(e).__name__,
|
|
470
|
+
"error": str(e),
|
|
471
|
+
"response": response_body_text or "",
|
|
472
|
+
},
|
|
473
|
+
)
|
|
474
|
+
raise BaseClientException(
|
|
475
|
+
f"Неожиданная HTTP-ошибка: {str(e)}",
|
|
476
|
+
url=url,
|
|
477
|
+
) from e
|
|
478
|
+
|
|
479
|
+
async def request_text(
|
|
480
|
+
self,
|
|
481
|
+
method: Literal["GET", "POST", "PUT", "DELETE", "PATCH"],
|
|
482
|
+
uri: str,
|
|
483
|
+
headers: dict[str, Any] | None = None,
|
|
484
|
+
params: str | dict[str, Any] | None = None,
|
|
485
|
+
json: dict[str, Any] | None = None,
|
|
486
|
+
data: Any | None = None,
|
|
487
|
+
retry_on_proxy_error: bool = False,
|
|
488
|
+
max_retries: int = 5,
|
|
489
|
+
) -> str:
|
|
490
|
+
"""
|
|
491
|
+
Выполняет HTTP запрос и возвращает ответ как строку.
|
|
492
|
+
|
|
493
|
+
Используется для получения HTML, XML или других текстовых форматов.
|
|
494
|
+
|
|
495
|
+
Args:
|
|
496
|
+
method: HTTP метод
|
|
497
|
+
uri: URI endpoint (будет добавлен к base_url)
|
|
498
|
+
headers: Дополнительные заголовки
|
|
499
|
+
params: Query параметры
|
|
500
|
+
json: JSON тело запроса (для POST/PUT/PATCH)
|
|
501
|
+
data: Данные тела запроса (альтернатива json)
|
|
502
|
+
retry_on_proxy_error: Повторять ли запрос при ошибках прокси
|
|
503
|
+
max_retries: Максимальное количество попыток при retry_on_proxy_error
|
|
504
|
+
|
|
505
|
+
Returns:
|
|
506
|
+
Текст ответа как строка
|
|
507
|
+
|
|
508
|
+
Raises:
|
|
509
|
+
ClientResponseErrorException: При неуспешном HTTP статусе (>=400)
|
|
510
|
+
ClientTimeoutException: При таймауте соединения
|
|
511
|
+
ClientProxyException: При ошибке прокси
|
|
512
|
+
ClientConnectionException: При ошибке соединения
|
|
513
|
+
ClientDNSException: При ошибке DNS
|
|
514
|
+
"""
|
|
515
|
+
return await self._make_request(
|
|
516
|
+
method=method,
|
|
517
|
+
uri=uri,
|
|
518
|
+
headers=headers,
|
|
519
|
+
params=params,
|
|
520
|
+
json=json,
|
|
521
|
+
data=data,
|
|
522
|
+
retry_on_proxy_error=retry_on_proxy_error,
|
|
523
|
+
max_retries=max_retries,
|
|
524
|
+
)
|
|
525
|
+
|
|
526
|
+
async def request_json(
|
|
527
|
+
self,
|
|
528
|
+
method: Literal["GET", "POST", "PUT", "DELETE", "PATCH"],
|
|
529
|
+
uri: str,
|
|
530
|
+
headers: dict[str, Any] | None = None,
|
|
531
|
+
params: str | dict[str, Any] | None = None,
|
|
532
|
+
json: dict[str, Any] | None = None,
|
|
533
|
+
data: Any | None = None,
|
|
534
|
+
retry_on_proxy_error: bool = False,
|
|
535
|
+
max_retries: int = 5,
|
|
536
|
+
) -> dict[str, Any] | list[Any]:
|
|
537
|
+
"""
|
|
538
|
+
Выполняет HTTP запрос и возвращает ответ как распарсенный JSON.
|
|
539
|
+
|
|
540
|
+
Args:
|
|
541
|
+
method: HTTP метод
|
|
542
|
+
uri: URI endpoint (будет добавлен к base_url)
|
|
543
|
+
headers: Дополнительные заголовки
|
|
544
|
+
params: Query параметры
|
|
545
|
+
json: JSON тело запроса (для POST/PUT/PATCH)
|
|
546
|
+
data: Данные тела запроса (альтернатива json)
|
|
547
|
+
retry_on_proxy_error: Повторять ли запрос при ошибках прокси
|
|
548
|
+
max_retries: Максимальное количество попыток при retry_on_proxy_error
|
|
549
|
+
|
|
550
|
+
Returns:
|
|
551
|
+
Распарсенный JSON (dict или list)
|
|
552
|
+
|
|
553
|
+
Raises:
|
|
554
|
+
ClientResponseErrorException: При неуспешном HTTP статусе (>=400)
|
|
555
|
+
ClientTimeoutException: При таймауте соединения
|
|
556
|
+
ClientProxyException: При ошибке прокси
|
|
557
|
+
ClientConnectionException: При ошибке соединения
|
|
558
|
+
ClientDNSException: При ошибке DNS
|
|
559
|
+
json.JSONDecodeError: Если ответ не является валидным JSON
|
|
560
|
+
"""
|
|
561
|
+
response_text = await self._make_request(
|
|
562
|
+
method=method,
|
|
563
|
+
uri=uri,
|
|
564
|
+
headers=headers,
|
|
565
|
+
params=params,
|
|
566
|
+
json=json,
|
|
567
|
+
data=data,
|
|
568
|
+
retry_on_proxy_error=retry_on_proxy_error,
|
|
569
|
+
max_retries=max_retries,
|
|
570
|
+
)
|
|
571
|
+
|
|
572
|
+
try:
|
|
573
|
+
return json_lib.loads(response_text)
|
|
574
|
+
except json_lib.JSONDecodeError as e:
|
|
575
|
+
url = self._build_url(uri, params)
|
|
576
|
+
self._logger.error(
|
|
577
|
+
"Не удалось распарсить ответ как JSON",
|
|
578
|
+
extra={
|
|
579
|
+
"method": method,
|
|
580
|
+
"url": url,
|
|
581
|
+
"response": response_text,
|
|
582
|
+
"error": str(e),
|
|
583
|
+
},
|
|
584
|
+
)
|
|
585
|
+
raise
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
from pydantic import BaseModel, ConfigDict
|
|
2
|
+
from pydantic.alias_generators import to_camel
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class BaseRequest(BaseModel):
|
|
6
|
+
model_config = ConfigDict(
|
|
7
|
+
alias_generator=to_camel,
|
|
8
|
+
validate_by_name=True,
|
|
9
|
+
validate_by_alias=True,
|
|
10
|
+
serialize_by_alias=True,
|
|
11
|
+
populate_by_name=True,
|
|
12
|
+
)
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
from pydantic import BaseModel, ConfigDict
|
|
2
|
+
from pydantic.alias_generators import to_camel
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class BaseResponse(BaseModel):
|
|
6
|
+
model_config = ConfigDict(
|
|
7
|
+
alias_generator=to_camel,
|
|
8
|
+
validate_by_name=True,
|
|
9
|
+
validate_by_alias=True,
|
|
10
|
+
serialize_by_alias=True,
|
|
11
|
+
)
|
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
"""Исключения для базового HTTP клиента."""
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class BaseClientException(Exception):
|
|
5
|
+
"""Базовое исключение для всех ошибок клиента."""
|
|
6
|
+
|
|
7
|
+
def __init__(
|
|
8
|
+
self,
|
|
9
|
+
message: str,
|
|
10
|
+
url: str | None = None,
|
|
11
|
+
status_code: int | None = None,
|
|
12
|
+
response_body: str | None = None,
|
|
13
|
+
) -> None:
|
|
14
|
+
super().__init__(message)
|
|
15
|
+
self.url = url
|
|
16
|
+
self.status_code = status_code
|
|
17
|
+
self.response_body = response_body
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class ClientResponseErrorException(BaseClientException):
|
|
21
|
+
"""Исключение при неуспешном HTTP статусе (>=400)."""
|
|
22
|
+
|
|
23
|
+
def __init__(
|
|
24
|
+
self,
|
|
25
|
+
message: str,
|
|
26
|
+
url: str | None = None,
|
|
27
|
+
status_code: int | None = None,
|
|
28
|
+
response_body: str | None = None,
|
|
29
|
+
) -> None:
|
|
30
|
+
super().__init__(message, url, status_code, response_body)
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class ClientTimeoutException(BaseClientException):
|
|
34
|
+
"""Исключение при таймауте соединения."""
|
|
35
|
+
|
|
36
|
+
def __init__(
|
|
37
|
+
self,
|
|
38
|
+
message: str,
|
|
39
|
+
url: str | None = None,
|
|
40
|
+
) -> None:
|
|
41
|
+
super().__init__(message, url)
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
class ClientProxyException(BaseClientException):
|
|
45
|
+
"""Исключение при ошибке прокси."""
|
|
46
|
+
|
|
47
|
+
def __init__(
|
|
48
|
+
self,
|
|
49
|
+
message: str,
|
|
50
|
+
url: str | None = None,
|
|
51
|
+
proxy: str | None = None,
|
|
52
|
+
) -> None:
|
|
53
|
+
super().__init__(message, url)
|
|
54
|
+
self.proxy = proxy
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
class ClientConnectionException(BaseClientException):
|
|
58
|
+
"""Исключение при ошибке соединения (не удалось установить соединение)."""
|
|
59
|
+
|
|
60
|
+
def __init__(
|
|
61
|
+
self,
|
|
62
|
+
message: str,
|
|
63
|
+
url: str | None = None,
|
|
64
|
+
) -> None:
|
|
65
|
+
super().__init__(message, url)
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
class ClientDNSException(BaseClientException):
|
|
69
|
+
"""Исключение при ошибке DNS (не удалось разрешить доменное имя)."""
|
|
70
|
+
|
|
71
|
+
def __init__(
|
|
72
|
+
self,
|
|
73
|
+
message: str,
|
|
74
|
+
url: str | None = None,
|
|
75
|
+
) -> None:
|
|
76
|
+
super().__init__(message, url)
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
"""Доменные исключения для обработки ошибок в воркерах."""
|
|
2
|
+
|
|
3
|
+
from tp_common.base.base_client.client_exceptions import BaseClientException
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class BaseBusinessErrorException(BaseClientException):
|
|
7
|
+
"""Базовое исключение для бизнес-ошибок (400, 401, 403, 404, 422)."""
|
|
8
|
+
|
|
9
|
+
pass
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class BaseServerErrorException(BaseClientException):
|
|
13
|
+
"""Базовое исключение для ошибок сервера (500, 502, 503, 504)."""
|
|
14
|
+
|
|
15
|
+
pass
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class BaseNetworkErrorException(BaseClientException):
|
|
19
|
+
"""Базовое исключение для сетевых ошибок (таймауты, соединение, DNS)."""
|
|
20
|
+
|
|
21
|
+
pass
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class BaseProxyErrorException(BaseClientException):
|
|
25
|
+
"""Базовое исключение для ошибок прокси."""
|
|
26
|
+
|
|
27
|
+
pass
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class AuthorizationException(BaseBusinessErrorException):
|
|
31
|
+
"""Исключение при ошибке авторизации (401, 403)."""
|
|
32
|
+
|
|
33
|
+
pass
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class ValidationException(BaseBusinessErrorException):
|
|
37
|
+
"""Исключение при ошибке валидации данных (400, 422)."""
|
|
38
|
+
|
|
39
|
+
pass
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
class ResourceNotFoundException(BaseBusinessErrorException):
|
|
43
|
+
"""Исключение при отсутствии ресурса (404)."""
|
|
44
|
+
|
|
45
|
+
pass
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
class ServerException(BaseServerErrorException):
|
|
49
|
+
"""Исключение при ошибке сервера (500, 502, 503, 504)."""
|
|
50
|
+
|
|
51
|
+
pass
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
class TooManyRequestsException(BaseProxyErrorException):
|
|
55
|
+
"""Исключение при превышении лимита запросов (429)."""
|
|
56
|
+
|
|
57
|
+
pass
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
class ServiceUnavailableException(BaseServerErrorException):
|
|
61
|
+
"""Исключение при недоступности сервиса (502, 503, 504)."""
|
|
62
|
+
|
|
63
|
+
pass
|
tp_common/logging.py
ADDED
|
@@ -0,0 +1,339 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Централизованный модуль для получения логгеров в проекте.
|
|
3
|
+
|
|
4
|
+
Единая точка входа — класс Logger(env, job=...).get_logger(name).
|
|
5
|
+
Все настройки форматирования и обработки логов находятся здесь.
|
|
6
|
+
Очередь и поток для асинхронного логирования общие для всех экземпляров (синглтоны).
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from __future__ import annotations
|
|
10
|
+
|
|
11
|
+
import logging
|
|
12
|
+
import queue
|
|
13
|
+
import sys
|
|
14
|
+
import threading
|
|
15
|
+
import traceback
|
|
16
|
+
import uuid
|
|
17
|
+
from typing import Any
|
|
18
|
+
|
|
19
|
+
# from logging_loki import LokiHandler
|
|
20
|
+
from pythonjsonlogger import json
|
|
21
|
+
from tp_helper.types.environment_type import EnvironmentType
|
|
22
|
+
|
|
23
|
+
# ============================================================================
|
|
24
|
+
# ЕДИНАЯ КОНФИГУРАЦИЯ ФОРМАТТЕРА
|
|
25
|
+
# ============================================================================
|
|
26
|
+
# Все изменения формата логов должны производиться здесь
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def _create_formatter(
|
|
30
|
+
env: EnvironmentType, job: str | int | uuid.UUID | None = None
|
|
31
|
+
) -> json.JsonFormatter:
|
|
32
|
+
"""
|
|
33
|
+
Создаёт JSON-форматтер для логов.
|
|
34
|
+
|
|
35
|
+
Единая точка настройки формата логов для всего проекта.
|
|
36
|
+
|
|
37
|
+
Args:
|
|
38
|
+
env: Окружение (для статического поля в логах).
|
|
39
|
+
job: Идентификатор задачи (для статических полей, опционально).
|
|
40
|
+
|
|
41
|
+
Returns:
|
|
42
|
+
Настроенный JSON форматтер.
|
|
43
|
+
"""
|
|
44
|
+
static_fields = {"env": env.value}
|
|
45
|
+
if job is not None:
|
|
46
|
+
static_fields["job"] = str(job)
|
|
47
|
+
|
|
48
|
+
return json.JsonFormatter(
|
|
49
|
+
"{asctime}{levelname}{message}{env}{trace_id}",
|
|
50
|
+
style="{",
|
|
51
|
+
json_ensure_ascii=False,
|
|
52
|
+
rename_fields={
|
|
53
|
+
"asctime": "timestamp",
|
|
54
|
+
"levelname": "level",
|
|
55
|
+
"message": "msg",
|
|
56
|
+
},
|
|
57
|
+
static_fields=static_fields,
|
|
58
|
+
)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
# ============================================================================
|
|
62
|
+
# ФИЛЬТР ДЛЯ ОБРАБОТКИ ИСКЛЮЧЕНИЙ
|
|
63
|
+
# ============================================================================
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
class ExceptionFormatterFilter(logging.Filter):
|
|
67
|
+
"""Filter that formats exceptions into JSON structure and adds to extra."""
|
|
68
|
+
|
|
69
|
+
def filter(self, record: logging.LogRecord) -> bool:
|
|
70
|
+
"""Format exception info into structured JSON if present."""
|
|
71
|
+
if record.exc_info and record.exc_info[0] is not None:
|
|
72
|
+
e_type, e, exc_traceback = record.exc_info
|
|
73
|
+
|
|
74
|
+
if exc_traceback:
|
|
75
|
+
tb = traceback.extract_tb(exc_traceback)
|
|
76
|
+
if tb:
|
|
77
|
+
last = tb[-1]
|
|
78
|
+
record.last_tb_line = (
|
|
79
|
+
f"{last.filename}:{last.lineno} — {last.name} → {last.line}"
|
|
80
|
+
)
|
|
81
|
+
|
|
82
|
+
record.error = e_type if e_type else ""
|
|
83
|
+
record.error_message = str(e) if e else ""
|
|
84
|
+
|
|
85
|
+
return True
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
# ============================================================================
|
|
89
|
+
# АСИНХРОННАЯ ОБРАБОТКА ЛОГОВ
|
|
90
|
+
# ============================================================================
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
class QueueHandler(logging.Handler):
|
|
94
|
+
"""Handler that sends log records to a queue for async processing."""
|
|
95
|
+
|
|
96
|
+
def __init__(self, log_queue: queue.Queue[logging.LogRecord | None]) -> None:
|
|
97
|
+
super().__init__()
|
|
98
|
+
self.queue = log_queue
|
|
99
|
+
|
|
100
|
+
def emit(self, record: logging.LogRecord) -> None:
|
|
101
|
+
"""Put the record into the queue."""
|
|
102
|
+
try:
|
|
103
|
+
self.queue.put_nowait(record)
|
|
104
|
+
except queue.Full:
|
|
105
|
+
self.handleError(record)
|
|
106
|
+
except Exception:
|
|
107
|
+
self.handleError(record)
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
def _log_listener_thread(
|
|
111
|
+
log_queue: queue.Queue[logging.LogRecord | None],
|
|
112
|
+
env: EnvironmentType,
|
|
113
|
+
) -> None:
|
|
114
|
+
"""Поток для обработки логов из очереди."""
|
|
115
|
+
|
|
116
|
+
formatter = _create_formatter(env)
|
|
117
|
+
stdout_handler = logging.StreamHandler(sys.stdout)
|
|
118
|
+
stdout_handler.setFormatter(formatter)
|
|
119
|
+
|
|
120
|
+
while True:
|
|
121
|
+
try:
|
|
122
|
+
record = log_queue.get()
|
|
123
|
+
if record is None: # Сигнал для завершения
|
|
124
|
+
break
|
|
125
|
+
stdout_handler.emit(record)
|
|
126
|
+
except (KeyboardInterrupt, SystemExit):
|
|
127
|
+
break
|
|
128
|
+
except Exception:
|
|
129
|
+
import traceback
|
|
130
|
+
|
|
131
|
+
traceback.print_exc(file=sys.stderr)
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
# -----------------------------------------------------------------------------
|
|
135
|
+
# Общая очередь и поток (синглтоны)
|
|
136
|
+
# -----------------------------------------------------------------------------
|
|
137
|
+
# Все экземпляры Logger с use_queue=True используют одну очередь и один поток.
|
|
138
|
+
# Это стандартная практика для асинхронного логирования: один listener на всё
|
|
139
|
+
# приложение — меньше памяти и контекстных переключений, единый формат вывода.
|
|
140
|
+
|
|
141
|
+
_log_queue: queue.Queue[logging.LogRecord | None] | None = None
|
|
142
|
+
_log_thread: threading.Thread | None = None
|
|
143
|
+
_log_lock = threading.Lock()
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
def _get_log_queue(env: EnvironmentType) -> queue.Queue[logging.LogRecord | None]:
|
|
147
|
+
"""Получает или создаёт глобальную очередь для логов (один раз на приложение)."""
|
|
148
|
+
global _log_queue, _log_thread
|
|
149
|
+
|
|
150
|
+
if _log_queue is None:
|
|
151
|
+
with _log_lock:
|
|
152
|
+
if _log_queue is None:
|
|
153
|
+
_log_queue = queue.Queue(-1) # -1 = без ограничения размера
|
|
154
|
+
_log_thread = threading.Thread(
|
|
155
|
+
target=_log_listener_thread,
|
|
156
|
+
args=(_log_queue, env),
|
|
157
|
+
daemon=True,
|
|
158
|
+
name="LogListenerThread",
|
|
159
|
+
)
|
|
160
|
+
_log_thread.start()
|
|
161
|
+
|
|
162
|
+
return _log_queue
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
# ============================================================================
|
|
166
|
+
# ОБЁРТКА ЛОГГЕРА С TRACE_ID
|
|
167
|
+
# ============================================================================
|
|
168
|
+
|
|
169
|
+
|
|
170
|
+
class TracingLogger:
|
|
171
|
+
"""
|
|
172
|
+
Обёртка над logging.Logger с методами set_trace_id / clear_trace_id.
|
|
173
|
+
|
|
174
|
+
trace_id хранится в экземпляре: простой вариант, не зависит от contextvars.
|
|
175
|
+
Передавайте этот логгер по цепочке вызовов — везде будет один и тот же trace_id.
|
|
176
|
+
|
|
177
|
+
Ограничение: один экземпляр логгера не должен использоваться из нескольких
|
|
178
|
+
параллельных asyncio-задач с разными trace_id (будут перезаписывать друг друга).
|
|
179
|
+
Для цикла «одна задача за раз» — подходит идеально.
|
|
180
|
+
"""
|
|
181
|
+
|
|
182
|
+
def __init__(self, logger: logging.Logger) -> None:
|
|
183
|
+
self._logger = logger
|
|
184
|
+
self._trace_id: str | uuid.UUID | None = None
|
|
185
|
+
|
|
186
|
+
def set_trace_id(self, value: str | uuid.UUID | None) -> None:
|
|
187
|
+
"""Устанавливает trace_id для этого экземпляра логгера."""
|
|
188
|
+
self._trace_id = value
|
|
189
|
+
|
|
190
|
+
def clear_trace_id(self) -> None:
|
|
191
|
+
"""Сбрасывает trace_id."""
|
|
192
|
+
self._trace_id = None
|
|
193
|
+
|
|
194
|
+
def _merge_extra(self, kwargs: dict) -> dict:
|
|
195
|
+
extra = dict(kwargs.get("extra") or {})
|
|
196
|
+
if self._trace_id is not None:
|
|
197
|
+
extra["trace_id"] = str(self._trace_id)
|
|
198
|
+
return {**kwargs, "extra": extra}
|
|
199
|
+
|
|
200
|
+
def debug(self, msg: str, *args: Any, **kwargs: Any) -> None:
|
|
201
|
+
self._logger.debug(msg, *args, **self._merge_extra(kwargs))
|
|
202
|
+
|
|
203
|
+
def info(self, msg: str, *args: Any, **kwargs: Any) -> None:
|
|
204
|
+
self._logger.info(msg, *args, **self._merge_extra(kwargs))
|
|
205
|
+
|
|
206
|
+
def warning(self, msg: str, *args: Any, **kwargs: Any) -> None:
|
|
207
|
+
self._logger.warning(msg, *args, **self._merge_extra(kwargs))
|
|
208
|
+
|
|
209
|
+
def error(self, msg: str, *args: Any, **kwargs: Any) -> None:
|
|
210
|
+
self._logger.error(msg, *args, **self._merge_extra(kwargs))
|
|
211
|
+
|
|
212
|
+
def exception(self, msg: str, *args: Any, **kwargs: Any) -> None:
|
|
213
|
+
self._logger.exception(msg, *args, **self._merge_extra(kwargs))
|
|
214
|
+
|
|
215
|
+
def critical(self, msg: str, *args: Any, **kwargs: Any) -> None:
|
|
216
|
+
self._logger.critical(msg, *args, **self._merge_extra(kwargs))
|
|
217
|
+
|
|
218
|
+
def log(self, level: int, msg: str, *args: Any, **kwargs: Any) -> None:
|
|
219
|
+
self._logger.log(level, msg, *args, **self._merge_extra(kwargs))
|
|
220
|
+
|
|
221
|
+
@property
|
|
222
|
+
def logger(self) -> logging.Logger:
|
|
223
|
+
"""Доступ к исходному logging.Logger при необходимости."""
|
|
224
|
+
return self._logger
|
|
225
|
+
|
|
226
|
+
|
|
227
|
+
# ============================================================================
|
|
228
|
+
# КЛАСС LOGGER — ЕДИНАЯ ТОЧКА ВХОДА ДЛЯ ЛОГГЕРОВ
|
|
229
|
+
# ============================================================================
|
|
230
|
+
|
|
231
|
+
|
|
232
|
+
class Logger:
|
|
233
|
+
"""
|
|
234
|
+
Фабрика логгеров с фиксированной средой (env) и общей очередью.
|
|
235
|
+
|
|
236
|
+
Создаётся один раз на уровне приложения с нужной средой (DEV/PROD/...).
|
|
237
|
+
В разных местах из неё получают логгеры через get_logger(name, job=...).
|
|
238
|
+
job передаётся в get_logger, а не в конструктор — у каждого логгера свой job.
|
|
239
|
+
|
|
240
|
+
Все экземпляры Logger с use_queue=True пишут в одну общую очередь и один
|
|
241
|
+
фоновый поток (синглтоны на уровне модуля).
|
|
242
|
+
|
|
243
|
+
Использование:
|
|
244
|
+
from tp_helper.logging import Logger
|
|
245
|
+
from tp_helper.types.environment_type import EnvironmentType
|
|
246
|
+
|
|
247
|
+
# Один раз на уровне приложения — фабрика с желаемой средой
|
|
248
|
+
log_factory = Logger(EnvironmentType.DEV)
|
|
249
|
+
|
|
250
|
+
# В разных местах получаем логгеры (job — при необходимости)
|
|
251
|
+
logger = log_factory.get_logger("api_service")
|
|
252
|
+
worker_logger = log_factory.get_logger("task_worker", job="task_123")
|
|
253
|
+
|
|
254
|
+
# trace_id для сквозной трассировки (например, в воркере по одной задаче)
|
|
255
|
+
# logger.set_trace_id(uuid.uuid4())
|
|
256
|
+
# logger.debug("Получена задача", extra=task) # в логе будет trace_id
|
|
257
|
+
|
|
258
|
+
# Настройка uvicorn/fastapi при старте
|
|
259
|
+
Logger.setup_standard_loggers(EnvironmentType.DEV)
|
|
260
|
+
"""
|
|
261
|
+
|
|
262
|
+
def __init__(self, env: EnvironmentType) -> None:
|
|
263
|
+
"""
|
|
264
|
+
Args:
|
|
265
|
+
env: Окружение (local/dev/staging/prod) для поля в логах.
|
|
266
|
+
"""
|
|
267
|
+
self._env = env
|
|
268
|
+
|
|
269
|
+
def get_logger(
|
|
270
|
+
self,
|
|
271
|
+
name: str | int,
|
|
272
|
+
job: str | uuid.UUID | int | None = None,
|
|
273
|
+
# loki_handler: LokiHandler | None = None,
|
|
274
|
+
use_queue: bool = True,
|
|
275
|
+
) -> TracingLogger:
|
|
276
|
+
"""
|
|
277
|
+
Возвращает логгер с именем name и опциональным job.
|
|
278
|
+
|
|
279
|
+
Возвращаемый объект — TracingLogger (обёртка с set_trace_id/clear_trace_id).
|
|
280
|
+
trace_id хранится в экземпляре и попадает во все записи лога; можно
|
|
281
|
+
передавать логгер по цепочке вызовов.
|
|
282
|
+
|
|
283
|
+
Args:
|
|
284
|
+
name: Имя логгера (идентификация в логах).
|
|
285
|
+
job: Идентификатор задачи (опционально), попадает в статические поля.
|
|
286
|
+
loki_handler: Опциональный handler для отправки в Loki.
|
|
287
|
+
use_queue: True — асинхронная запись через общую очередь (по умолчанию).
|
|
288
|
+
|
|
289
|
+
Returns:
|
|
290
|
+
TracingLogger с методами set_trace_id/clear_trace_id.
|
|
291
|
+
"""
|
|
292
|
+
log_queue = _get_log_queue(self._env) if use_queue else None
|
|
293
|
+
|
|
294
|
+
logger = logging.getLogger(f"app_logger_{name}")
|
|
295
|
+
logger.setLevel(logging.DEBUG)
|
|
296
|
+
logger.propagate = False
|
|
297
|
+
logger.addFilter(ExceptionFormatterFilter())
|
|
298
|
+
|
|
299
|
+
if use_queue and log_queue:
|
|
300
|
+
logger.addHandler(QueueHandler(log_queue))
|
|
301
|
+
else:
|
|
302
|
+
formatter = _create_formatter(self._env, job)
|
|
303
|
+
stdout_handler = logging.StreamHandler(sys.stdout)
|
|
304
|
+
stdout_handler.setFormatter(formatter)
|
|
305
|
+
logger.addHandler(stdout_handler)
|
|
306
|
+
|
|
307
|
+
# if loki_handler:
|
|
308
|
+
# formatter = _create_formatter(self._env, job)
|
|
309
|
+
# loki_handler.setFormatter(formatter)
|
|
310
|
+
# logger.addHandler(loki_handler)
|
|
311
|
+
|
|
312
|
+
return TracingLogger(logger)
|
|
313
|
+
|
|
314
|
+
@staticmethod
|
|
315
|
+
def setup_standard_loggers(env: EnvironmentType) -> None:
|
|
316
|
+
"""
|
|
317
|
+
Настраивает логирование для uvicorn и fastapi (единый JSON-формат).
|
|
318
|
+
|
|
319
|
+
Вызывать при инициализации API-приложения.
|
|
320
|
+
"""
|
|
321
|
+
json_formatter = _create_formatter(env)
|
|
322
|
+
standard_handler = logging.StreamHandler(sys.stdout)
|
|
323
|
+
standard_handler.setFormatter(json_formatter)
|
|
324
|
+
|
|
325
|
+
uvicorn_logger = logging.getLogger("uvicorn")
|
|
326
|
+
uvicorn_logger.setLevel(logging.INFO)
|
|
327
|
+
uvicorn_logger.handlers = [standard_handler]
|
|
328
|
+
uvicorn_logger.propagate = False
|
|
329
|
+
|
|
330
|
+
uvicorn_access_logger = logging.getLogger("uvicorn.access")
|
|
331
|
+
uvicorn_access_logger.setLevel(logging.WARNING)
|
|
332
|
+
uvicorn_access_logger.propagate = False
|
|
333
|
+
|
|
334
|
+
fastapi_logger = logging.getLogger("fastapi")
|
|
335
|
+
fastapi_logger.setLevel(logging.INFO)
|
|
336
|
+
fastapi_logger.handlers = [standard_handler]
|
|
337
|
+
fastapi_logger.propagate = False
|
|
338
|
+
|
|
339
|
+
logging.getLogger("src.infrastructure.clients").setLevel(logging.INFO)
|
|
@@ -0,0 +1,110 @@
|
|
|
1
|
+
Metadata-Version: 2.3
|
|
2
|
+
Name: tp-common
|
|
3
|
+
Version: 0.0.1
|
|
4
|
+
Summary:
|
|
5
|
+
Author: Developer
|
|
6
|
+
Author-email: front-gold@mail.ru
|
|
7
|
+
Requires-Python: >=3.12
|
|
8
|
+
Classifier: Programming Language :: Python :: 3
|
|
9
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
10
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
11
|
+
Requires-Dist: aiohttp (>=3.13.3,<4.0.0)
|
|
12
|
+
Requires-Dist: pydantic (>=2.12.5,<3.0.0)
|
|
13
|
+
Requires-Dist: python-json-logger (>=4.0.0,<5.0.0)
|
|
14
|
+
Requires-Dist: tp-helper (>=0.4.87,<0.5.0)
|
|
15
|
+
Description-Content-Type: text/markdown
|
|
16
|
+
|
|
17
|
+
# tp-common
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
## Getting started
|
|
22
|
+
|
|
23
|
+
To make it easy for you to get started with GitLab, here's a list of recommended next steps.
|
|
24
|
+
|
|
25
|
+
Already a pro? Just edit this README.md and make it your own. Want to make it easy? [Use the template at the bottom](#editing-this-readme)!
|
|
26
|
+
|
|
27
|
+
## Add your files
|
|
28
|
+
|
|
29
|
+
* [Create](https://docs.gitlab.com/ee/user/project/repository/web_editor.html#create-a-file) or [upload](https://docs.gitlab.com/ee/user/project/repository/web_editor.html#upload-a-file) files
|
|
30
|
+
* [Add files using the command line](https://docs.gitlab.com/topics/git/add_files/#add-files-to-a-git-repository) or push an existing Git repository with the following command:
|
|
31
|
+
|
|
32
|
+
```
|
|
33
|
+
cd existing_repo
|
|
34
|
+
git remote add origin https://gitlab.8525.ru/modules/tp-common.git
|
|
35
|
+
git branch -M main
|
|
36
|
+
git push -uf origin main
|
|
37
|
+
```
|
|
38
|
+
|
|
39
|
+
## Integrate with your tools
|
|
40
|
+
|
|
41
|
+
* [Set up project integrations](https://gitlab.8525.ru/modules/tp-common/-/settings/integrations)
|
|
42
|
+
|
|
43
|
+
## Collaborate with your team
|
|
44
|
+
|
|
45
|
+
* [Invite team members and collaborators](https://docs.gitlab.com/ee/user/project/members/)
|
|
46
|
+
* [Create a new merge request](https://docs.gitlab.com/ee/user/project/merge_requests/creating_merge_requests.html)
|
|
47
|
+
* [Automatically close issues from merge requests](https://docs.gitlab.com/ee/user/project/issues/managing_issues.html#closing-issues-automatically)
|
|
48
|
+
* [Enable merge request approvals](https://docs.gitlab.com/ee/user/project/merge_requests/approvals/)
|
|
49
|
+
* [Set auto-merge](https://docs.gitlab.com/user/project/merge_requests/auto_merge/)
|
|
50
|
+
|
|
51
|
+
## Test and Deploy
|
|
52
|
+
|
|
53
|
+
Use the built-in continuous integration in GitLab.
|
|
54
|
+
|
|
55
|
+
* [Get started with GitLab CI/CD](https://docs.gitlab.com/ee/ci/quick_start/)
|
|
56
|
+
* [Analyze your code for known vulnerabilities with Static Application Security Testing (SAST)](https://docs.gitlab.com/ee/user/application_security/sast/)
|
|
57
|
+
* [Deploy to Kubernetes, Amazon EC2, or Amazon ECS using Auto Deploy](https://docs.gitlab.com/ee/topics/autodevops/requirements.html)
|
|
58
|
+
* [Use pull-based deployments for improved Kubernetes management](https://docs.gitlab.com/ee/user/clusters/agent/)
|
|
59
|
+
* [Set up protected environments](https://docs.gitlab.com/ee/ci/environments/protected_environments.html)
|
|
60
|
+
|
|
61
|
+
***
|
|
62
|
+
|
|
63
|
+
# Editing this README
|
|
64
|
+
|
|
65
|
+
When you're ready to make this README your own, just edit this file and use the handy template below (or feel free to structure it however you want - this is just a starting point!). Thanks to [makeareadme.com](https://www.makeareadme.com/) for this template.
|
|
66
|
+
|
|
67
|
+
## Suggestions for a good README
|
|
68
|
+
|
|
69
|
+
Every project is different, so consider which of these sections apply to yours. The sections used in the template are suggestions for most open source projects. Also keep in mind that while a README can be too long and detailed, too long is better than too short. If you think your README is too long, consider utilizing another form of documentation rather than cutting out information.
|
|
70
|
+
|
|
71
|
+
## Name
|
|
72
|
+
Choose a self-explaining name for your project.
|
|
73
|
+
|
|
74
|
+
## Description
|
|
75
|
+
Let people know what your project can do specifically. Provide context and add a link to any reference visitors might be unfamiliar with. A list of Features or a Background subsection can also be added here. If there are alternatives to your project, this is a good place to list differentiating factors.
|
|
76
|
+
|
|
77
|
+
## Badges
|
|
78
|
+
On some READMEs, you may see small images that convey metadata, such as whether or not all the tests are passing for the project. You can use Shields to add some to your README. Many services also have instructions for adding a badge.
|
|
79
|
+
|
|
80
|
+
## Visuals
|
|
81
|
+
Depending on what you are making, it can be a good idea to include screenshots or even a video (you'll frequently see GIFs rather than actual videos). Tools like ttygif can help, but check out Asciinema for a more sophisticated method.
|
|
82
|
+
|
|
83
|
+
## Installation
|
|
84
|
+
Within a particular ecosystem, there may be a common way of installing things, such as using Yarn, NuGet, or Homebrew. However, consider the possibility that whoever is reading your README is a novice and would like more guidance. Listing specific steps helps remove ambiguity and gets people to using your project as quickly as possible. If it only runs in a specific context like a particular programming language version or operating system or has dependencies that have to be installed manually, also add a Requirements subsection.
|
|
85
|
+
|
|
86
|
+
## Usage
|
|
87
|
+
Use examples liberally, and show the expected output if you can. It's helpful to have inline the smallest example of usage that you can demonstrate, while providing links to more sophisticated examples if they are too long to reasonably include in the README.
|
|
88
|
+
|
|
89
|
+
## Support
|
|
90
|
+
Tell people where they can go to for help. It can be any combination of an issue tracker, a chat room, an email address, etc.
|
|
91
|
+
|
|
92
|
+
## Roadmap
|
|
93
|
+
If you have ideas for releases in the future, it is a good idea to list them in the README.
|
|
94
|
+
|
|
95
|
+
## Contributing
|
|
96
|
+
State if you are open to contributions and what your requirements are for accepting them.
|
|
97
|
+
|
|
98
|
+
For people who want to make changes to your project, it's helpful to have some documentation on how to get started. Perhaps there is a script that they should run or some environment variables that they need to set. Make these steps explicit. These instructions could also be useful to your future self.
|
|
99
|
+
|
|
100
|
+
You can also document commands to lint the code or run tests. These steps help to ensure high code quality and reduce the likelihood that the changes inadvertently break something. Having instructions for running tests is especially helpful if it requires external setup, such as starting a Selenium server for testing in a browser.
|
|
101
|
+
|
|
102
|
+
## Authors and acknowledgment
|
|
103
|
+
Show your appreciation to those who have contributed to the project.
|
|
104
|
+
|
|
105
|
+
## License
|
|
106
|
+
For open source projects, say how it is licensed.
|
|
107
|
+
|
|
108
|
+
## Project status
|
|
109
|
+
If you have run out of energy or time for your project, put a note at the top of the README saying that development has slowed down or stopped completely. Someone may choose to fork your project or volunteer to step in as a maintainer or owner, allowing your project to keep going. You can also make an explicit request for maintainers.
|
|
110
|
+
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
tp_common/__init__.py,sha256=2PQG0ZnJt11WcRYF5rBYhYjXq_1oKrjM760LTEBW8sU,1621
|
|
2
|
+
tp_common/base_client/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
3
|
+
tp_common/base_client/base_client.py,sha256=WYjuANyoczyoavnxXIQzvd3GIzeczG6w8jRxdsEpOG4,24996
|
|
4
|
+
tp_common/base_client/base_exception.py,sha256=vN7Qq-2QIHPsMNUdQ-2G_WDnF-UEcM9gA0L5LwETPlA,57
|
|
5
|
+
tp_common/base_client/base_request.py,sha256=U_ucIFkxVbRMe2gjx_bXATZU4-OvK7r3u6vk1LVanCM,332
|
|
6
|
+
tp_common/base_client/base_response.py,sha256=Y65bNcNIswRi1x49jhC7nuoxs9wuzdzYvZWZ4eFTy4Q,301
|
|
7
|
+
tp_common/base_client/client_exceptions.py,sha256=pC5KmcDEnP28r8lfMzuQNKsGfZhSoVua2JXmyGwaUxM,2234
|
|
8
|
+
tp_common/base_client/domain_exceptions.py,sha256=JoDklz4Uxvk3BJZDxjWsWc5zjASisypQvtA0wEPngsA,1880
|
|
9
|
+
tp_common/logging.py,sha256=YiH3NJmWKIYKB7Uoe81w-Xlov7Rtz9SF7r9yKTuXnbc,14538
|
|
10
|
+
tp_common-0.0.1.dist-info/METADATA,sha256=N6T0o_UdcHpoURKID2U70c7b5iLV5JI23v9sr8EGceQ,6582
|
|
11
|
+
tp_common-0.0.1.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
|
|
12
|
+
tp_common-0.0.1.dist-info/RECORD,,
|