apitally 0.13.0__py3-none-any.whl → 0.14.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- apitally/client/{asyncio.py → client_asyncio.py} +55 -16
- apitally/client/client_base.py +97 -0
- apitally/client/{threading.py → client_threading.py} +51 -10
- apitally/client/consumers.py +66 -0
- apitally/client/request_logging.py +340 -0
- apitally/client/requests.py +86 -0
- apitally/client/server_errors.py +126 -0
- apitally/client/validation_errors.py +58 -0
- apitally/common.py +10 -1
- apitally/django.py +112 -46
- apitally/django_ninja.py +2 -2
- apitally/django_rest_framework.py +2 -2
- apitally/fastapi.py +2 -2
- apitally/flask.py +100 -26
- apitally/litestar.py +122 -54
- apitally/starlette.py +90 -29
- {apitally-0.13.0.dist-info → apitally-0.14.0.dist-info}/METADATA +1 -2
- apitally-0.14.0.dist-info/RECORD +24 -0
- {apitally-0.13.0.dist-info → apitally-0.14.0.dist-info}/WHEEL +1 -1
- apitally/client/base.py +0 -404
- apitally-0.13.0.dist-info/RECORD +0 -19
- {apitally-0.13.0.dist-info → apitally-0.14.0.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,340 @@
|
|
1
|
+
import base64
|
2
|
+
import gzip
|
3
|
+
import re
|
4
|
+
import tempfile
|
5
|
+
import threading
|
6
|
+
import time
|
7
|
+
from collections import deque
|
8
|
+
from contextlib import suppress
|
9
|
+
from dataclasses import dataclass, field
|
10
|
+
from functools import lru_cache
|
11
|
+
from io import BufferedReader
|
12
|
+
from pathlib import Path
|
13
|
+
from typing import Any, AsyncIterator, Callable, Dict, List, Mapping, Optional, Tuple, TypedDict
|
14
|
+
from urllib.parse import parse_qsl, urlencode, urlparse, urlunparse
|
15
|
+
from uuid import uuid4
|
16
|
+
|
17
|
+
from apitally.client.logging import get_logger
|
18
|
+
|
19
|
+
|
20
|
+
logger = get_logger(__name__)
|
21
|
+
|
22
|
+
MAX_BODY_SIZE = 50_000 # 50 KB (uncompressed)
|
23
|
+
MAX_FILE_SIZE = 1_000_000 # 1 MB (compressed)
|
24
|
+
MAX_REQUESTS_IN_DEQUE = 100 # Written to file every second, so limits logging to 100 rps
|
25
|
+
MAX_FILES_IN_DEQUE = 50
|
26
|
+
BODY_TOO_LARGE = b"<body too large>"
|
27
|
+
BODY_MASKED = b"<masked>"
|
28
|
+
MASKED = "******"
|
29
|
+
ALLOWED_CONTENT_TYPES = ["application/json", "text/plain"]
|
30
|
+
EXCLUDE_PATH_PATTERNS = [
|
31
|
+
r"/_?healthz?$",
|
32
|
+
r"/_?health[_-]?checks?$",
|
33
|
+
r"/_?heart[_-]?beats?$",
|
34
|
+
r"/ping$",
|
35
|
+
r"/ready$",
|
36
|
+
r"/live$",
|
37
|
+
]
|
38
|
+
MASK_QUERY_PARAM_PATTERNS = [
|
39
|
+
r"auth",
|
40
|
+
r"api-?key",
|
41
|
+
r"secret",
|
42
|
+
r"token",
|
43
|
+
r"password",
|
44
|
+
r"pwd",
|
45
|
+
]
|
46
|
+
MASK_HEADER_PATTERNS = [
|
47
|
+
r"auth",
|
48
|
+
r"api-?key",
|
49
|
+
r"secret",
|
50
|
+
r"token",
|
51
|
+
r"cookie",
|
52
|
+
]
|
53
|
+
|
54
|
+
|
55
|
+
class RequestDict(TypedDict):
|
56
|
+
timestamp: float
|
57
|
+
method: str
|
58
|
+
path: Optional[str]
|
59
|
+
url: str
|
60
|
+
headers: List[Tuple[str, str]]
|
61
|
+
size: Optional[int]
|
62
|
+
consumer: Optional[str]
|
63
|
+
body: Optional[bytes]
|
64
|
+
|
65
|
+
|
66
|
+
class ResponseDict(TypedDict):
|
67
|
+
status_code: int
|
68
|
+
response_time: float
|
69
|
+
headers: List[Tuple[str, str]]
|
70
|
+
size: Optional[int]
|
71
|
+
body: Optional[bytes]
|
72
|
+
|
73
|
+
|
74
|
+
@dataclass
|
75
|
+
class RequestLoggingConfig:
|
76
|
+
"""
|
77
|
+
Configuration for request logging.
|
78
|
+
|
79
|
+
Attributes:
|
80
|
+
enabled: Whether request logging is enabled
|
81
|
+
log_query_params: Whether to log query parameter values
|
82
|
+
log_request_headers: Whether to log request header values
|
83
|
+
log_request_body: Whether to log the request body (only if JSON or plain text)
|
84
|
+
log_response_headers: Whether to log response header values
|
85
|
+
log_response_body: Whether to log the response body (only if JSON or plain text)
|
86
|
+
mask_query_params: Query parameter names to mask in logs. Expects regular expressions.
|
87
|
+
mask_headers: Header names to mask in logs. Expects regular expressions.
|
88
|
+
mask_request_body_callback: Callback to mask the request body. Expects (method, path, body) and returns the masked body as bytes or None.
|
89
|
+
mask_response_body_callback: Callback to mask the response body. Expects (method, path, body) and returns the masked body as bytes or None.
|
90
|
+
exclude_paths: Paths to exclude from logging. Expects regular expressions.
|
91
|
+
exclude_callback: Callback to exclude requests from logging. Should expect two arguments, `request: RequestDict` and `response: ResponseDict`, and return True to exclude the request.
|
92
|
+
"""
|
93
|
+
|
94
|
+
enabled: bool = False
|
95
|
+
log_query_params: bool = True
|
96
|
+
log_request_headers: bool = False
|
97
|
+
log_request_body: bool = False
|
98
|
+
log_response_headers: bool = True
|
99
|
+
log_response_body: bool = False
|
100
|
+
mask_query_params: List[str] = field(default_factory=list)
|
101
|
+
mask_headers: List[str] = field(default_factory=list)
|
102
|
+
mask_request_body_callback: Optional[Callable[[RequestDict], Optional[bytes]]] = None
|
103
|
+
mask_response_body_callback: Optional[Callable[[RequestDict, ResponseDict], Optional[bytes]]] = None
|
104
|
+
exclude_paths: List[str] = field(default_factory=list)
|
105
|
+
exclude_callback: Optional[Callable[[RequestDict, ResponseDict], bool]] = None
|
106
|
+
|
107
|
+
|
108
|
+
class TempGzipFile:
|
109
|
+
def __init__(self) -> None:
|
110
|
+
self.uuid = uuid4()
|
111
|
+
self.file = tempfile.NamedTemporaryFile(
|
112
|
+
suffix=".gz",
|
113
|
+
prefix="apitally-",
|
114
|
+
delete=False,
|
115
|
+
)
|
116
|
+
self.gzip_file = gzip.open(self.file, "wb")
|
117
|
+
|
118
|
+
@property
|
119
|
+
def path(self) -> Path:
|
120
|
+
return Path(self.file.name)
|
121
|
+
|
122
|
+
@property
|
123
|
+
def size(self) -> int:
|
124
|
+
return self.file.tell()
|
125
|
+
|
126
|
+
def write_line(self, data: bytes) -> None:
|
127
|
+
self.gzip_file.write(data + b"\n")
|
128
|
+
|
129
|
+
def open_compressed(self) -> BufferedReader:
|
130
|
+
return open(self.path, "rb")
|
131
|
+
|
132
|
+
async def stream_lines_compressed(self) -> AsyncIterator[bytes]:
|
133
|
+
with open(self.path, "rb") as fp:
|
134
|
+
for line in fp:
|
135
|
+
yield line
|
136
|
+
|
137
|
+
def close(self) -> None:
|
138
|
+
self.gzip_file.close()
|
139
|
+
self.file.close()
|
140
|
+
|
141
|
+
def delete(self) -> None:
|
142
|
+
self.close()
|
143
|
+
self.path.unlink(missing_ok=True)
|
144
|
+
|
145
|
+
|
146
|
+
class RequestLogger:
|
147
|
+
def __init__(self, config: Optional[RequestLoggingConfig]) -> None:
|
148
|
+
self.config = config or RequestLoggingConfig()
|
149
|
+
self.enabled = self.config.enabled and _check_writable_fs()
|
150
|
+
self.serialize = _get_json_serializer()
|
151
|
+
self.write_deque: deque[bytes] = deque([], MAX_REQUESTS_IN_DEQUE)
|
152
|
+
self.file_deque: deque[TempGzipFile] = deque([])
|
153
|
+
self.file: Optional[TempGzipFile] = None
|
154
|
+
self.lock = threading.Lock()
|
155
|
+
self.suspend_until: Optional[float] = None
|
156
|
+
|
157
|
+
@property
|
158
|
+
def current_file_size(self) -> int:
|
159
|
+
return self.file.size if self.file is not None else 0
|
160
|
+
|
161
|
+
def log_request(self, request: RequestDict, response: ResponseDict) -> None:
|
162
|
+
if not self.enabled or self.suspend_until is not None:
|
163
|
+
return
|
164
|
+
parsed_url = urlparse(request["url"])
|
165
|
+
if self._should_exclude_path(request["path"] or parsed_url.path) or self._should_exclude(request, response):
|
166
|
+
return
|
167
|
+
|
168
|
+
query = self._mask_query_params(parsed_url.query) if self.config.log_query_params else ""
|
169
|
+
request["url"] = urlunparse(parsed_url._replace(query=query))
|
170
|
+
request["headers"] = self._mask_headers(request["headers"]) if self.config.log_request_headers else []
|
171
|
+
response["headers"] = self._mask_headers(response["headers"]) if self.config.log_response_headers else []
|
172
|
+
|
173
|
+
if not self.config.log_request_body or not self._has_supported_content_type(request["headers"]):
|
174
|
+
request["body"] = None
|
175
|
+
elif (
|
176
|
+
self.config.mask_request_body_callback is not None
|
177
|
+
and request["body"] is not None
|
178
|
+
and request["body"] != BODY_TOO_LARGE
|
179
|
+
):
|
180
|
+
try:
|
181
|
+
request["body"] = self.config.mask_request_body_callback(request)
|
182
|
+
except Exception: # pragma: no cover
|
183
|
+
logger.exception("User-provided mask_request_body_callback function raised an exception")
|
184
|
+
request["body"] = None
|
185
|
+
if request["body"] is None:
|
186
|
+
request["body"] = BODY_MASKED
|
187
|
+
if request["body"] is not None and len(request["body"]) > MAX_BODY_SIZE:
|
188
|
+
request["body"] = BODY_TOO_LARGE
|
189
|
+
|
190
|
+
if not self.config.log_response_body or not self._has_supported_content_type(response["headers"]):
|
191
|
+
response["body"] = None
|
192
|
+
elif (
|
193
|
+
self.config.mask_response_body_callback is not None
|
194
|
+
and response["body"] is not None
|
195
|
+
and response["body"] != BODY_TOO_LARGE
|
196
|
+
):
|
197
|
+
try:
|
198
|
+
response["body"] = self.config.mask_response_body_callback(request, response)
|
199
|
+
except Exception: # pragma: no cover
|
200
|
+
logger.exception("User-provided mask_response_body_callback function raised an exception")
|
201
|
+
response["body"] = None
|
202
|
+
if response["body"] is None:
|
203
|
+
response["body"] = BODY_MASKED
|
204
|
+
if response["body"] is not None and len(response["body"]) > MAX_BODY_SIZE:
|
205
|
+
response["body"] = BODY_TOO_LARGE
|
206
|
+
|
207
|
+
item = {
|
208
|
+
"uuid": str(uuid4()),
|
209
|
+
"request": _skip_empty_values(request),
|
210
|
+
"response": _skip_empty_values(response),
|
211
|
+
}
|
212
|
+
serialized_item = self.serialize(item)
|
213
|
+
self.write_deque.append(serialized_item)
|
214
|
+
|
215
|
+
def write_to_file(self) -> None:
|
216
|
+
if not self.enabled or len(self.write_deque) == 0:
|
217
|
+
return
|
218
|
+
with self.lock:
|
219
|
+
if self.file is None:
|
220
|
+
self.file = TempGzipFile()
|
221
|
+
while True:
|
222
|
+
try:
|
223
|
+
item = self.write_deque.popleft()
|
224
|
+
self.file.write_line(item)
|
225
|
+
except IndexError:
|
226
|
+
break
|
227
|
+
|
228
|
+
def get_file(self) -> Optional[TempGzipFile]:
|
229
|
+
try:
|
230
|
+
return self.file_deque.popleft()
|
231
|
+
except IndexError:
|
232
|
+
return None
|
233
|
+
|
234
|
+
def retry_file_later(self, file: TempGzipFile) -> None:
|
235
|
+
self.file_deque.appendleft(file)
|
236
|
+
|
237
|
+
def rotate_file(self) -> None:
|
238
|
+
if self.file is not None:
|
239
|
+
with self.lock:
|
240
|
+
self.file.close()
|
241
|
+
self.file_deque.append(self.file)
|
242
|
+
self.file = None
|
243
|
+
|
244
|
+
def maintain(self) -> None:
|
245
|
+
if self.current_file_size > MAX_FILE_SIZE:
|
246
|
+
self.rotate_file()
|
247
|
+
while len(self.file_deque) > MAX_FILES_IN_DEQUE:
|
248
|
+
file = self.file_deque.popleft()
|
249
|
+
file.delete()
|
250
|
+
if self.suspend_until is not None and self.suspend_until < time.time():
|
251
|
+
self.suspend_until = None
|
252
|
+
|
253
|
+
def clear(self) -> None:
|
254
|
+
self.write_deque.clear()
|
255
|
+
self.rotate_file()
|
256
|
+
for file in self.file_deque:
|
257
|
+
file.delete()
|
258
|
+
self.file_deque.clear()
|
259
|
+
|
260
|
+
def close(self) -> None:
|
261
|
+
self.enabled = False
|
262
|
+
self.clear()
|
263
|
+
|
264
|
+
def _should_exclude(self, request: RequestDict, response: ResponseDict) -> bool:
|
265
|
+
if self.config.exclude_callback is not None:
|
266
|
+
return self.config.exclude_callback(request, response)
|
267
|
+
return False
|
268
|
+
|
269
|
+
@lru_cache(maxsize=1000)
|
270
|
+
def _should_exclude_path(self, url_path: str) -> bool:
|
271
|
+
patterns = self.config.exclude_paths + EXCLUDE_PATH_PATTERNS
|
272
|
+
return self._match_patterns(url_path, patterns)
|
273
|
+
|
274
|
+
def _mask_query_params(self, query: str) -> str:
|
275
|
+
query_params = parse_qsl(query)
|
276
|
+
masked_query_params = [(k, v if not self._should_mask_query_param(k) else MASKED) for k, v in query_params]
|
277
|
+
return urlencode(masked_query_params)
|
278
|
+
|
279
|
+
def _mask_headers(self, headers: List[Tuple[str, str]]) -> List[Tuple[str, str]]:
|
280
|
+
return [(k, v if not self._should_mask_header(k) else MASKED) for k, v in headers]
|
281
|
+
|
282
|
+
@lru_cache(maxsize=100)
|
283
|
+
def _should_mask_query_param(self, query_param_name: str) -> bool:
|
284
|
+
patterns = self.config.mask_query_params + MASK_QUERY_PARAM_PATTERNS
|
285
|
+
return self._match_patterns(query_param_name, patterns)
|
286
|
+
|
287
|
+
@lru_cache(maxsize=100)
|
288
|
+
def _should_mask_header(self, header_name: str) -> bool:
|
289
|
+
patterns = self.config.mask_headers + MASK_HEADER_PATTERNS
|
290
|
+
return self._match_patterns(header_name, patterns)
|
291
|
+
|
292
|
+
@staticmethod
|
293
|
+
def _match_patterns(value: str, patterns: List[str]) -> bool:
|
294
|
+
for pattern in patterns:
|
295
|
+
with suppress(re.error):
|
296
|
+
if re.search(pattern, value, re.I) is not None:
|
297
|
+
return True
|
298
|
+
return False
|
299
|
+
|
300
|
+
@staticmethod
|
301
|
+
def _has_supported_content_type(headers: List[Tuple[str, str]]) -> bool:
|
302
|
+
content_type = next((v for k, v in headers if k.lower() == "content-type"), None)
|
303
|
+
return content_type is not None and any(content_type.startswith(t) for t in ALLOWED_CONTENT_TYPES)
|
304
|
+
|
305
|
+
|
306
|
+
def _check_writable_fs() -> bool:
|
307
|
+
try:
|
308
|
+
with tempfile.NamedTemporaryFile():
|
309
|
+
return True
|
310
|
+
except (IOError, OSError): # pragma: no cover
|
311
|
+
logger.error("Unable to create temporary file for request logging")
|
312
|
+
return False
|
313
|
+
|
314
|
+
|
315
|
+
def _get_json_serializer() -> Callable[[Any], bytes]:
|
316
|
+
def default(obj: Any) -> Any:
|
317
|
+
if isinstance(obj, bytes):
|
318
|
+
return base64.b64encode(obj).decode()
|
319
|
+
raise TypeError # pragma: no cover
|
320
|
+
|
321
|
+
try:
|
322
|
+
import orjson # type: ignore
|
323
|
+
|
324
|
+
def orjson_dumps(obj: Any) -> bytes:
|
325
|
+
return orjson.dumps(obj, default=default)
|
326
|
+
|
327
|
+
return orjson_dumps
|
328
|
+
except ImportError:
|
329
|
+
import json
|
330
|
+
|
331
|
+
def json_dumps(obj: Any) -> bytes:
|
332
|
+
return json.dumps(obj, separators=(",", ":"), default=default).encode()
|
333
|
+
|
334
|
+
return json_dumps
|
335
|
+
|
336
|
+
|
337
|
+
def _skip_empty_values(data: Mapping) -> Dict:
|
338
|
+
return {
|
339
|
+
k: v for k, v in data.items() if v is not None and not (isinstance(v, (list, dict, bytes, str)) and len(v) == 0)
|
340
|
+
}
|
@@ -0,0 +1,86 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
import contextlib
|
4
|
+
import threading
|
5
|
+
from collections import Counter
|
6
|
+
from dataclasses import dataclass
|
7
|
+
from math import floor
|
8
|
+
from typing import Any, Dict, List, Optional
|
9
|
+
|
10
|
+
|
11
|
+
@dataclass(frozen=True)
|
12
|
+
class RequestInfo:
|
13
|
+
consumer: Optional[str]
|
14
|
+
method: str
|
15
|
+
path: str
|
16
|
+
status_code: int
|
17
|
+
|
18
|
+
|
19
|
+
class RequestCounter:
|
20
|
+
def __init__(self) -> None:
|
21
|
+
self.request_counts: Counter[RequestInfo] = Counter()
|
22
|
+
self.request_size_sums: Counter[RequestInfo] = Counter()
|
23
|
+
self.response_size_sums: Counter[RequestInfo] = Counter()
|
24
|
+
self.response_times: Dict[RequestInfo, Counter[int]] = {}
|
25
|
+
self.request_sizes: Dict[RequestInfo, Counter[int]] = {}
|
26
|
+
self.response_sizes: Dict[RequestInfo, Counter[int]] = {}
|
27
|
+
self._lock = threading.Lock()
|
28
|
+
|
29
|
+
def add_request(
|
30
|
+
self,
|
31
|
+
consumer: Optional[str],
|
32
|
+
method: str,
|
33
|
+
path: str,
|
34
|
+
status_code: int,
|
35
|
+
response_time: float,
|
36
|
+
request_size: str | int | None = None,
|
37
|
+
response_size: str | int | None = None,
|
38
|
+
) -> None:
|
39
|
+
request_info = RequestInfo(
|
40
|
+
consumer=consumer,
|
41
|
+
method=method.upper(),
|
42
|
+
path=path,
|
43
|
+
status_code=status_code,
|
44
|
+
)
|
45
|
+
response_time_ms_bin = int(floor(response_time / 0.01) * 10) # In ms, rounded down to nearest 10ms
|
46
|
+
with self._lock:
|
47
|
+
self.request_counts[request_info] += 1
|
48
|
+
self.response_times.setdefault(request_info, Counter())[response_time_ms_bin] += 1
|
49
|
+
if request_size is not None:
|
50
|
+
with contextlib.suppress(ValueError):
|
51
|
+
request_size = int(request_size)
|
52
|
+
request_size_kb_bin = request_size // 1000 # In KB, rounded down to nearest 1KB
|
53
|
+
self.request_size_sums[request_info] += request_size
|
54
|
+
self.request_sizes.setdefault(request_info, Counter())[request_size_kb_bin] += 1
|
55
|
+
if response_size is not None:
|
56
|
+
with contextlib.suppress(ValueError):
|
57
|
+
response_size = int(response_size)
|
58
|
+
response_size_kb_bin = response_size // 1000 # In KB, rounded down to nearest 1KB
|
59
|
+
self.response_size_sums[request_info] += response_size
|
60
|
+
self.response_sizes.setdefault(request_info, Counter())[response_size_kb_bin] += 1
|
61
|
+
|
62
|
+
def get_and_reset_requests(self) -> List[Dict[str, Any]]:
|
63
|
+
data: List[Dict[str, Any]] = []
|
64
|
+
with self._lock:
|
65
|
+
for request_info, count in self.request_counts.items():
|
66
|
+
data.append(
|
67
|
+
{
|
68
|
+
"consumer": request_info.consumer,
|
69
|
+
"method": request_info.method,
|
70
|
+
"path": request_info.path,
|
71
|
+
"status_code": request_info.status_code,
|
72
|
+
"request_count": count,
|
73
|
+
"request_size_sum": self.request_size_sums.get(request_info, 0),
|
74
|
+
"response_size_sum": self.response_size_sums.get(request_info, 0),
|
75
|
+
"response_times": self.response_times.get(request_info) or Counter(),
|
76
|
+
"request_sizes": self.request_sizes.get(request_info) or Counter(),
|
77
|
+
"response_sizes": self.response_sizes.get(request_info) or Counter(),
|
78
|
+
}
|
79
|
+
)
|
80
|
+
self.request_counts.clear()
|
81
|
+
self.request_size_sums.clear()
|
82
|
+
self.response_size_sums.clear()
|
83
|
+
self.response_times.clear()
|
84
|
+
self.request_sizes.clear()
|
85
|
+
self.response_sizes.clear()
|
86
|
+
return data
|
@@ -0,0 +1,126 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
import asyncio
|
4
|
+
import contextlib
|
5
|
+
import sys
|
6
|
+
import threading
|
7
|
+
import traceback
|
8
|
+
from collections import Counter
|
9
|
+
from dataclasses import dataclass
|
10
|
+
from typing import Any, Dict, List, Optional, Set
|
11
|
+
|
12
|
+
|
13
|
+
MAX_EXCEPTION_MSG_LENGTH = 2048
|
14
|
+
MAX_EXCEPTION_TRACEBACK_LENGTH = 65536
|
15
|
+
|
16
|
+
|
17
|
+
@dataclass(frozen=True)
|
18
|
+
class ServerError:
|
19
|
+
consumer: Optional[str]
|
20
|
+
method: str
|
21
|
+
path: str
|
22
|
+
type: str
|
23
|
+
msg: str
|
24
|
+
traceback: str
|
25
|
+
|
26
|
+
|
27
|
+
class ServerErrorCounter:
|
28
|
+
def __init__(self) -> None:
|
29
|
+
self.error_counts: Counter[ServerError] = Counter()
|
30
|
+
self.sentry_event_ids: Dict[ServerError, str] = {}
|
31
|
+
self._lock = threading.Lock()
|
32
|
+
self._tasks: Set[asyncio.Task] = set()
|
33
|
+
|
34
|
+
def add_server_error(self, consumer: Optional[str], method: str, path: str, exception: BaseException) -> None:
|
35
|
+
if not isinstance(exception, BaseException):
|
36
|
+
return # pragma: no cover
|
37
|
+
exception_type = type(exception)
|
38
|
+
with self._lock:
|
39
|
+
server_error = ServerError(
|
40
|
+
consumer=consumer,
|
41
|
+
method=method.upper(),
|
42
|
+
path=path,
|
43
|
+
type=f"{exception_type.__module__}.{exception_type.__qualname__}",
|
44
|
+
msg=self._get_truncated_exception_msg(exception),
|
45
|
+
traceback=self._get_truncated_exception_traceback(exception),
|
46
|
+
)
|
47
|
+
self.error_counts[server_error] += 1
|
48
|
+
self.capture_sentry_event_id(server_error)
|
49
|
+
|
50
|
+
def capture_sentry_event_id(self, server_error: ServerError) -> None:
|
51
|
+
try:
|
52
|
+
from sentry_sdk.hub import Hub
|
53
|
+
from sentry_sdk.scope import Scope
|
54
|
+
except ImportError:
|
55
|
+
return # pragma: no cover
|
56
|
+
if not hasattr(Scope, "get_isolation_scope") or not hasattr(Scope, "_last_event_id"):
|
57
|
+
# sentry-sdk < 2.2.0 is not supported
|
58
|
+
return # pragma: no cover
|
59
|
+
if Hub.current.client is None:
|
60
|
+
return # sentry-sdk not initialized
|
61
|
+
|
62
|
+
scope = Scope.get_isolation_scope()
|
63
|
+
if event_id := scope._last_event_id:
|
64
|
+
self.sentry_event_ids[server_error] = event_id
|
65
|
+
return
|
66
|
+
|
67
|
+
async def _wait_for_sentry_event_id(scope: Scope) -> None:
|
68
|
+
i = 0
|
69
|
+
while not (event_id := scope._last_event_id) and i < 100:
|
70
|
+
i += 1
|
71
|
+
await asyncio.sleep(0.001)
|
72
|
+
if event_id:
|
73
|
+
self.sentry_event_ids[server_error] = event_id
|
74
|
+
|
75
|
+
with contextlib.suppress(RuntimeError): # ignore no running loop
|
76
|
+
loop = asyncio.get_running_loop()
|
77
|
+
task = loop.create_task(_wait_for_sentry_event_id(scope))
|
78
|
+
self._tasks.add(task)
|
79
|
+
task.add_done_callback(self._tasks.discard)
|
80
|
+
|
81
|
+
def get_and_reset_server_errors(self) -> List[Dict[str, Any]]:
|
82
|
+
data: List[Dict[str, Any]] = []
|
83
|
+
with self._lock:
|
84
|
+
for server_error, count in self.error_counts.items():
|
85
|
+
data.append(
|
86
|
+
{
|
87
|
+
"consumer": server_error.consumer,
|
88
|
+
"method": server_error.method,
|
89
|
+
"path": server_error.path,
|
90
|
+
"type": server_error.type,
|
91
|
+
"msg": server_error.msg,
|
92
|
+
"traceback": server_error.traceback,
|
93
|
+
"sentry_event_id": self.sentry_event_ids.get(server_error),
|
94
|
+
"error_count": count,
|
95
|
+
}
|
96
|
+
)
|
97
|
+
self.error_counts.clear()
|
98
|
+
self.sentry_event_ids.clear()
|
99
|
+
return data
|
100
|
+
|
101
|
+
@staticmethod
|
102
|
+
def _get_truncated_exception_msg(exception: BaseException) -> str:
|
103
|
+
msg = str(exception).strip()
|
104
|
+
if len(msg) <= MAX_EXCEPTION_MSG_LENGTH:
|
105
|
+
return msg
|
106
|
+
suffix = "... (truncated)"
|
107
|
+
cutoff = MAX_EXCEPTION_MSG_LENGTH - len(suffix)
|
108
|
+
return msg[:cutoff] + suffix
|
109
|
+
|
110
|
+
@staticmethod
|
111
|
+
def _get_truncated_exception_traceback(exception: BaseException) -> str:
|
112
|
+
prefix = "... (truncated) ...\n"
|
113
|
+
cutoff = MAX_EXCEPTION_TRACEBACK_LENGTH - len(prefix)
|
114
|
+
lines = []
|
115
|
+
length = 0
|
116
|
+
if sys.version_info >= (3, 10):
|
117
|
+
traceback_lines = traceback.format_exception(exception)
|
118
|
+
else:
|
119
|
+
traceback_lines = traceback.format_exception(type(exception), exception, exception.__traceback__)
|
120
|
+
for line in traceback_lines[::-1]:
|
121
|
+
if length + len(line) > cutoff:
|
122
|
+
lines.append(prefix)
|
123
|
+
break
|
124
|
+
lines.append(line)
|
125
|
+
length += len(line)
|
126
|
+
return "".join(lines[::-1]).strip()
|
@@ -0,0 +1,58 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
import threading
|
4
|
+
from collections import Counter
|
5
|
+
from dataclasses import dataclass
|
6
|
+
from typing import Any, Dict, List, Optional, Tuple
|
7
|
+
|
8
|
+
|
9
|
+
@dataclass(frozen=True)
|
10
|
+
class ValidationError:
|
11
|
+
consumer: Optional[str]
|
12
|
+
method: str
|
13
|
+
path: str
|
14
|
+
loc: Tuple[str, ...]
|
15
|
+
msg: str
|
16
|
+
type: str
|
17
|
+
|
18
|
+
|
19
|
+
class ValidationErrorCounter:
|
20
|
+
def __init__(self) -> None:
|
21
|
+
self.error_counts: Counter[ValidationError] = Counter()
|
22
|
+
self._lock = threading.Lock()
|
23
|
+
|
24
|
+
def add_validation_errors(
|
25
|
+
self, consumer: Optional[str], method: str, path: str, detail: List[Dict[str, Any]]
|
26
|
+
) -> None:
|
27
|
+
with self._lock:
|
28
|
+
for error in detail:
|
29
|
+
try:
|
30
|
+
validation_error = ValidationError(
|
31
|
+
consumer=consumer,
|
32
|
+
method=method.upper(),
|
33
|
+
path=path,
|
34
|
+
loc=tuple(str(loc) for loc in error["loc"]),
|
35
|
+
msg=error["msg"],
|
36
|
+
type=error["type"],
|
37
|
+
)
|
38
|
+
self.error_counts[validation_error] += 1
|
39
|
+
except (KeyError, TypeError): # pragma: no cover
|
40
|
+
pass
|
41
|
+
|
42
|
+
def get_and_reset_validation_errors(self) -> List[Dict[str, Any]]:
|
43
|
+
data: List[Dict[str, Any]] = []
|
44
|
+
with self._lock:
|
45
|
+
for validation_error, count in self.error_counts.items():
|
46
|
+
data.append(
|
47
|
+
{
|
48
|
+
"consumer": validation_error.consumer,
|
49
|
+
"method": validation_error.method,
|
50
|
+
"path": validation_error.path,
|
51
|
+
"loc": validation_error.loc,
|
52
|
+
"msg": validation_error.msg,
|
53
|
+
"type": validation_error.type,
|
54
|
+
"error_count": count,
|
55
|
+
}
|
56
|
+
)
|
57
|
+
self.error_counts.clear()
|
58
|
+
return data
|
apitally/common.py
CHANGED
@@ -1,6 +1,15 @@
|
|
1
1
|
import sys
|
2
2
|
from importlib.metadata import PackageNotFoundError, version
|
3
|
-
from typing import Dict, Optional
|
3
|
+
from typing import Dict, Optional, Union
|
4
|
+
|
5
|
+
|
6
|
+
def parse_int(x: Union[str, int, None]) -> Optional[int]:
|
7
|
+
if x is None:
|
8
|
+
return None
|
9
|
+
try:
|
10
|
+
return int(x)
|
11
|
+
except ValueError:
|
12
|
+
return None
|
4
13
|
|
5
14
|
|
6
15
|
def get_versions(*packages, app_version: Optional[str] = None) -> Dict[str, str]:
|