apitally 0.13.0__py3-none-any.whl → 0.14.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,344 @@
1
+ import base64
2
+ import gzip
3
+ import re
4
+ import tempfile
5
+ import threading
6
+ import time
7
+ from collections import deque
8
+ from contextlib import suppress
9
+ from dataclasses import dataclass, field
10
+ from functools import lru_cache
11
+ from io import BufferedReader
12
+ from pathlib import Path
13
+ from typing import Any, AsyncIterator, Callable, Dict, List, Mapping, Optional, Tuple, TypedDict
14
+ from urllib.parse import parse_qsl, urlencode, urlparse, urlunparse
15
+ from uuid import uuid4
16
+
17
+ from apitally.client.logging import get_logger
18
+
19
+
20
+ logger = get_logger(__name__)
21
+
22
+ MAX_BODY_SIZE = 50_000 # 50 KB (uncompressed)
23
+ MAX_FILE_SIZE = 1_000_000 # 1 MB (compressed)
24
+ MAX_REQUESTS_IN_DEQUE = 100 # Written to file every second, so limits logging to 100 rps
25
+ MAX_FILES_IN_DEQUE = 50
26
+ BODY_TOO_LARGE = b"<body too large>"
27
+ BODY_MASKED = b"<masked>"
28
+ MASKED = "******"
29
+ ALLOWED_CONTENT_TYPES = ["application/json", "text/plain"]
30
+ EXCLUDE_PATH_PATTERNS = [
31
+ r"/_?healthz?$",
32
+ r"/_?health[_-]?checks?$",
33
+ r"/_?heart[_-]?beats?$",
34
+ r"/ping$",
35
+ r"/ready$",
36
+ r"/live$",
37
+ ]
38
+ MASK_QUERY_PARAM_PATTERNS = [
39
+ r"auth",
40
+ r"api-?key",
41
+ r"secret",
42
+ r"token",
43
+ r"password",
44
+ r"pwd",
45
+ ]
46
+ MASK_HEADER_PATTERNS = [
47
+ r"auth",
48
+ r"api-?key",
49
+ r"secret",
50
+ r"token",
51
+ r"cookie",
52
+ ]
53
+
54
+
55
+ class RequestDict(TypedDict):
56
+ timestamp: float
57
+ method: str
58
+ path: Optional[str]
59
+ url: str
60
+ headers: List[Tuple[str, str]]
61
+ size: Optional[int]
62
+ consumer: Optional[str]
63
+ body: Optional[bytes]
64
+
65
+
66
+ class ResponseDict(TypedDict):
67
+ status_code: int
68
+ response_time: float
69
+ headers: List[Tuple[str, str]]
70
+ size: Optional[int]
71
+ body: Optional[bytes]
72
+
73
+
74
+ @dataclass
75
+ class RequestLoggingConfig:
76
+ """
77
+ Configuration for request logging.
78
+
79
+ Attributes:
80
+ enabled: Whether request logging is enabled
81
+ log_query_params: Whether to log query parameter values
82
+ log_request_headers: Whether to log request header values
83
+ log_request_body: Whether to log the request body (only if JSON or plain text)
84
+ log_response_headers: Whether to log response header values
85
+ log_response_body: Whether to log the response body (only if JSON or plain text)
86
+ mask_query_params: Query parameter names to mask in logs. Expects regular expressions.
87
+ mask_headers: Header names to mask in logs. Expects regular expressions.
88
+ mask_request_body_callback: Callback to mask the request body. Expects (method, path, body) and returns the masked body as bytes or None.
89
+ mask_response_body_callback: Callback to mask the response body. Expects (method, path, body) and returns the masked body as bytes or None.
90
+ exclude_paths: Paths to exclude from logging. Expects regular expressions.
91
+ exclude_callback: Callback to exclude requests from logging. Should expect two arguments, `request: RequestDict` and `response: ResponseDict`, and return True to exclude the request.
92
+ """
93
+
94
+ enabled: bool = False
95
+ log_query_params: bool = True
96
+ log_request_headers: bool = False
97
+ log_request_body: bool = False
98
+ log_response_headers: bool = True
99
+ log_response_body: bool = False
100
+ mask_query_params: List[str] = field(default_factory=list)
101
+ mask_headers: List[str] = field(default_factory=list)
102
+ mask_request_body_callback: Optional[Callable[[str, str, bytes], Optional[bytes]]] = None
103
+ mask_response_body_callback: Optional[Callable[[str, str, bytes], Optional[bytes]]] = None
104
+ exclude_paths: List[str] = field(default_factory=list)
105
+ exclude_callback: Optional[Callable[[RequestDict, ResponseDict], bool]] = None
106
+
107
+
108
+ class TempGzipFile:
109
+ def __init__(self) -> None:
110
+ self.uuid = uuid4()
111
+ self.file = tempfile.NamedTemporaryFile(
112
+ suffix=".gz",
113
+ prefix="apitally-",
114
+ delete=False,
115
+ )
116
+ self.gzip_file = gzip.open(self.file, "wb")
117
+
118
+ @property
119
+ def path(self) -> Path:
120
+ return Path(self.file.name)
121
+
122
+ @property
123
+ def size(self) -> int:
124
+ return self.file.tell()
125
+
126
+ def write_line(self, data: bytes) -> None:
127
+ self.gzip_file.write(data + b"\n")
128
+
129
+ def open_compressed(self) -> BufferedReader:
130
+ return open(self.path, "rb")
131
+
132
+ async def stream_lines_compressed(self) -> AsyncIterator[bytes]:
133
+ with open(self.path, "rb") as fp:
134
+ for line in fp:
135
+ yield line
136
+
137
+ def close(self) -> None:
138
+ self.gzip_file.close()
139
+ self.file.close()
140
+
141
+ def delete(self) -> None:
142
+ self.close()
143
+ self.path.unlink(missing_ok=True)
144
+
145
+
146
+ class RequestLogger:
147
+ def __init__(self, config: Optional[RequestLoggingConfig]) -> None:
148
+ self.config = config or RequestLoggingConfig()
149
+ self.enabled = self.config.enabled and _check_writable_fs()
150
+ self.serialize = _get_json_serializer()
151
+ self.write_deque: deque[bytes] = deque([], MAX_REQUESTS_IN_DEQUE)
152
+ self.file_deque: deque[TempGzipFile] = deque([])
153
+ self.file: Optional[TempGzipFile] = None
154
+ self.lock = threading.Lock()
155
+ self.suspend_until: Optional[float] = None
156
+
157
+ @property
158
+ def current_file_size(self) -> int:
159
+ return self.file.size if self.file is not None else 0
160
+
161
+ def log_request(self, request: RequestDict, response: ResponseDict) -> None:
162
+ if not self.enabled or self.suspend_until is not None:
163
+ return
164
+ parsed_url = urlparse(request["url"])
165
+ if self._should_exclude_path(request["path"] or parsed_url.path) or self._should_exclude(request, response):
166
+ return
167
+
168
+ query = self._mask_query_params(parsed_url.query) if self.config.log_query_params else ""
169
+ request["url"] = urlunparse(parsed_url._replace(query=query))
170
+ request["headers"] = self._mask_headers(request["headers"]) if self.config.log_request_headers else []
171
+ response["headers"] = self._mask_headers(response["headers"]) if self.config.log_response_headers else []
172
+
173
+ if not self.config.log_request_body or not self._has_supported_content_type(request["headers"]):
174
+ request["body"] = None
175
+ elif (
176
+ self.config.mask_request_body_callback is not None
177
+ and request["body"] is not None
178
+ and request["body"] != BODY_TOO_LARGE
179
+ ):
180
+ try:
181
+ request["body"] = self.config.mask_request_body_callback(
182
+ request["method"], request["path"] or parsed_url.path, request["body"]
183
+ )
184
+ except Exception: # pragma: no cover
185
+ logger.exception("User-provided mask_request_body_callback function raised an exception")
186
+ request["body"] = None
187
+ if request["body"] is None:
188
+ request["body"] = BODY_MASKED
189
+ if request["body"] is not None and len(request["body"]) > MAX_BODY_SIZE:
190
+ request["body"] = BODY_TOO_LARGE
191
+
192
+ if not self.config.log_response_body or not self._has_supported_content_type(response["headers"]):
193
+ response["body"] = None
194
+ elif (
195
+ self.config.mask_response_body_callback is not None
196
+ and response["body"] is not None
197
+ and response["body"] != BODY_TOO_LARGE
198
+ ):
199
+ try:
200
+ response["body"] = self.config.mask_response_body_callback(
201
+ request["method"], request["path"] or parsed_url.path, response["body"]
202
+ )
203
+ except Exception: # pragma: no cover
204
+ logger.exception("User-provided mask_response_body_callback function raised an exception")
205
+ response["body"] = None
206
+ if response["body"] is None:
207
+ response["body"] = BODY_MASKED
208
+ if response["body"] is not None and len(response["body"]) > MAX_BODY_SIZE:
209
+ response["body"] = BODY_TOO_LARGE
210
+
211
+ item = {
212
+ "uuid": str(uuid4()),
213
+ "request": _skip_empty_values(request),
214
+ "response": _skip_empty_values(response),
215
+ }
216
+ serialized_item = self.serialize(item)
217
+ self.write_deque.append(serialized_item)
218
+
219
+ def write_to_file(self) -> None:
220
+ if not self.enabled or len(self.write_deque) == 0:
221
+ return
222
+ with self.lock:
223
+ if self.file is None:
224
+ self.file = TempGzipFile()
225
+ while True:
226
+ try:
227
+ item = self.write_deque.popleft()
228
+ self.file.write_line(item)
229
+ except IndexError:
230
+ break
231
+
232
+ def get_file(self) -> Optional[TempGzipFile]:
233
+ try:
234
+ return self.file_deque.popleft()
235
+ except IndexError:
236
+ return None
237
+
238
+ def retry_file_later(self, file: TempGzipFile) -> None:
239
+ self.file_deque.appendleft(file)
240
+
241
+ def rotate_file(self) -> None:
242
+ if self.file is not None:
243
+ with self.lock:
244
+ self.file.close()
245
+ self.file_deque.append(self.file)
246
+ self.file = None
247
+
248
+ def maintain(self) -> None:
249
+ if self.current_file_size > MAX_FILE_SIZE:
250
+ self.rotate_file()
251
+ while len(self.file_deque) > MAX_FILES_IN_DEQUE:
252
+ file = self.file_deque.popleft()
253
+ file.delete()
254
+ if self.suspend_until is not None and self.suspend_until < time.time():
255
+ self.suspend_until = None
256
+
257
+ def clear(self) -> None:
258
+ self.write_deque.clear()
259
+ self.rotate_file()
260
+ for file in self.file_deque:
261
+ file.delete()
262
+ self.file_deque.clear()
263
+
264
+ def close(self) -> None:
265
+ self.enabled = False
266
+ self.clear()
267
+
268
+ def _should_exclude(self, request: RequestDict, response: ResponseDict) -> bool:
269
+ if self.config.exclude_callback is not None:
270
+ return self.config.exclude_callback(request, response)
271
+ return False
272
+
273
+ @lru_cache(maxsize=1000)
274
+ def _should_exclude_path(self, url_path: str) -> bool:
275
+ patterns = self.config.exclude_paths + EXCLUDE_PATH_PATTERNS
276
+ return self._match_patterns(url_path, patterns)
277
+
278
+ def _mask_query_params(self, query: str) -> str:
279
+ query_params = parse_qsl(query)
280
+ masked_query_params = [(k, v if not self._should_mask_query_param(k) else MASKED) for k, v in query_params]
281
+ return urlencode(masked_query_params)
282
+
283
+ def _mask_headers(self, headers: List[Tuple[str, str]]) -> List[Tuple[str, str]]:
284
+ return [(k, v if not self._should_mask_header(k) else MASKED) for k, v in headers]
285
+
286
+ @lru_cache(maxsize=100)
287
+ def _should_mask_query_param(self, query_param_name: str) -> bool:
288
+ patterns = self.config.mask_query_params + MASK_QUERY_PARAM_PATTERNS
289
+ return self._match_patterns(query_param_name, patterns)
290
+
291
+ @lru_cache(maxsize=100)
292
+ def _should_mask_header(self, header_name: str) -> bool:
293
+ patterns = self.config.mask_headers + MASK_HEADER_PATTERNS
294
+ return self._match_patterns(header_name, patterns)
295
+
296
+ @staticmethod
297
+ def _match_patterns(value: str, patterns: List[str]) -> bool:
298
+ for pattern in patterns:
299
+ with suppress(re.error):
300
+ if re.search(pattern, value, re.I) is not None:
301
+ return True
302
+ return False
303
+
304
+ @staticmethod
305
+ def _has_supported_content_type(headers: List[Tuple[str, str]]) -> bool:
306
+ content_type = next((v for k, v in headers if k.lower() == "content-type"), None)
307
+ return content_type is not None and any(content_type.startswith(t) for t in ALLOWED_CONTENT_TYPES)
308
+
309
+
310
+ def _check_writable_fs() -> bool:
311
+ try:
312
+ with tempfile.NamedTemporaryFile():
313
+ return True
314
+ except (IOError, OSError): # pragma: no cover
315
+ logger.error("Unable to create temporary file for request logging")
316
+ return False
317
+
318
+
319
+ def _get_json_serializer() -> Callable[[Any], bytes]:
320
+ def default(obj: Any) -> Any:
321
+ if isinstance(obj, bytes):
322
+ return base64.b64encode(obj).decode()
323
+ raise TypeError # pragma: no cover
324
+
325
+ try:
326
+ import orjson # type: ignore
327
+
328
+ def orjson_dumps(obj: Any) -> bytes:
329
+ return orjson.dumps(obj, default=default)
330
+
331
+ return orjson_dumps
332
+ except ImportError:
333
+ import json
334
+
335
+ def json_dumps(obj: Any) -> bytes:
336
+ return json.dumps(obj, separators=(",", ":"), default=default).encode()
337
+
338
+ return json_dumps
339
+
340
+
341
+ def _skip_empty_values(data: Mapping) -> Dict:
342
+ return {
343
+ k: v for k, v in data.items() if v is not None and not (isinstance(v, (list, dict, bytes, str)) and len(v) == 0)
344
+ }
@@ -0,0 +1,86 @@
1
+ from __future__ import annotations
2
+
3
+ import contextlib
4
+ import threading
5
+ from collections import Counter
6
+ from dataclasses import dataclass
7
+ from math import floor
8
+ from typing import Any, Dict, List, Optional
9
+
10
+
11
+ @dataclass(frozen=True)
12
+ class RequestInfo:
13
+ consumer: Optional[str]
14
+ method: str
15
+ path: str
16
+ status_code: int
17
+
18
+
19
+ class RequestCounter:
20
+ def __init__(self) -> None:
21
+ self.request_counts: Counter[RequestInfo] = Counter()
22
+ self.request_size_sums: Counter[RequestInfo] = Counter()
23
+ self.response_size_sums: Counter[RequestInfo] = Counter()
24
+ self.response_times: Dict[RequestInfo, Counter[int]] = {}
25
+ self.request_sizes: Dict[RequestInfo, Counter[int]] = {}
26
+ self.response_sizes: Dict[RequestInfo, Counter[int]] = {}
27
+ self._lock = threading.Lock()
28
+
29
+ def add_request(
30
+ self,
31
+ consumer: Optional[str],
32
+ method: str,
33
+ path: str,
34
+ status_code: int,
35
+ response_time: float,
36
+ request_size: str | int | None = None,
37
+ response_size: str | int | None = None,
38
+ ) -> None:
39
+ request_info = RequestInfo(
40
+ consumer=consumer,
41
+ method=method.upper(),
42
+ path=path,
43
+ status_code=status_code,
44
+ )
45
+ response_time_ms_bin = int(floor(response_time / 0.01) * 10) # In ms, rounded down to nearest 10ms
46
+ with self._lock:
47
+ self.request_counts[request_info] += 1
48
+ self.response_times.setdefault(request_info, Counter())[response_time_ms_bin] += 1
49
+ if request_size is not None:
50
+ with contextlib.suppress(ValueError):
51
+ request_size = int(request_size)
52
+ request_size_kb_bin = request_size // 1000 # In KB, rounded down to nearest 1KB
53
+ self.request_size_sums[request_info] += request_size
54
+ self.request_sizes.setdefault(request_info, Counter())[request_size_kb_bin] += 1
55
+ if response_size is not None:
56
+ with contextlib.suppress(ValueError):
57
+ response_size = int(response_size)
58
+ response_size_kb_bin = response_size // 1000 # In KB, rounded down to nearest 1KB
59
+ self.response_size_sums[request_info] += response_size
60
+ self.response_sizes.setdefault(request_info, Counter())[response_size_kb_bin] += 1
61
+
62
+ def get_and_reset_requests(self) -> List[Dict[str, Any]]:
63
+ data: List[Dict[str, Any]] = []
64
+ with self._lock:
65
+ for request_info, count in self.request_counts.items():
66
+ data.append(
67
+ {
68
+ "consumer": request_info.consumer,
69
+ "method": request_info.method,
70
+ "path": request_info.path,
71
+ "status_code": request_info.status_code,
72
+ "request_count": count,
73
+ "request_size_sum": self.request_size_sums.get(request_info, 0),
74
+ "response_size_sum": self.response_size_sums.get(request_info, 0),
75
+ "response_times": self.response_times.get(request_info) or Counter(),
76
+ "request_sizes": self.request_sizes.get(request_info) or Counter(),
77
+ "response_sizes": self.response_sizes.get(request_info) or Counter(),
78
+ }
79
+ )
80
+ self.request_counts.clear()
81
+ self.request_size_sums.clear()
82
+ self.response_size_sums.clear()
83
+ self.response_times.clear()
84
+ self.request_sizes.clear()
85
+ self.response_sizes.clear()
86
+ return data
@@ -0,0 +1,126 @@
1
+ from __future__ import annotations
2
+
3
+ import asyncio
4
+ import contextlib
5
+ import sys
6
+ import threading
7
+ import traceback
8
+ from collections import Counter
9
+ from dataclasses import dataclass
10
+ from typing import Any, Dict, List, Optional, Set
11
+
12
+
13
+ MAX_EXCEPTION_MSG_LENGTH = 2048
14
+ MAX_EXCEPTION_TRACEBACK_LENGTH = 65536
15
+
16
+
17
+ @dataclass(frozen=True)
18
+ class ServerError:
19
+ consumer: Optional[str]
20
+ method: str
21
+ path: str
22
+ type: str
23
+ msg: str
24
+ traceback: str
25
+
26
+
27
+ class ServerErrorCounter:
28
+ def __init__(self) -> None:
29
+ self.error_counts: Counter[ServerError] = Counter()
30
+ self.sentry_event_ids: Dict[ServerError, str] = {}
31
+ self._lock = threading.Lock()
32
+ self._tasks: Set[asyncio.Task] = set()
33
+
34
+ def add_server_error(self, consumer: Optional[str], method: str, path: str, exception: BaseException) -> None:
35
+ if not isinstance(exception, BaseException):
36
+ return # pragma: no cover
37
+ exception_type = type(exception)
38
+ with self._lock:
39
+ server_error = ServerError(
40
+ consumer=consumer,
41
+ method=method.upper(),
42
+ path=path,
43
+ type=f"{exception_type.__module__}.{exception_type.__qualname__}",
44
+ msg=self._get_truncated_exception_msg(exception),
45
+ traceback=self._get_truncated_exception_traceback(exception),
46
+ )
47
+ self.error_counts[server_error] += 1
48
+ self.capture_sentry_event_id(server_error)
49
+
50
+ def capture_sentry_event_id(self, server_error: ServerError) -> None:
51
+ try:
52
+ from sentry_sdk.hub import Hub
53
+ from sentry_sdk.scope import Scope
54
+ except ImportError:
55
+ return # pragma: no cover
56
+ if not hasattr(Scope, "get_isolation_scope") or not hasattr(Scope, "_last_event_id"):
57
+ # sentry-sdk < 2.2.0 is not supported
58
+ return # pragma: no cover
59
+ if Hub.current.client is None:
60
+ return # sentry-sdk not initialized
61
+
62
+ scope = Scope.get_isolation_scope()
63
+ if event_id := scope._last_event_id:
64
+ self.sentry_event_ids[server_error] = event_id
65
+ return
66
+
67
+ async def _wait_for_sentry_event_id(scope: Scope) -> None:
68
+ i = 0
69
+ while not (event_id := scope._last_event_id) and i < 100:
70
+ i += 1
71
+ await asyncio.sleep(0.001)
72
+ if event_id:
73
+ self.sentry_event_ids[server_error] = event_id
74
+
75
+ with contextlib.suppress(RuntimeError): # ignore no running loop
76
+ loop = asyncio.get_running_loop()
77
+ task = loop.create_task(_wait_for_sentry_event_id(scope))
78
+ self._tasks.add(task)
79
+ task.add_done_callback(self._tasks.discard)
80
+
81
+ def get_and_reset_server_errors(self) -> List[Dict[str, Any]]:
82
+ data: List[Dict[str, Any]] = []
83
+ with self._lock:
84
+ for server_error, count in self.error_counts.items():
85
+ data.append(
86
+ {
87
+ "consumer": server_error.consumer,
88
+ "method": server_error.method,
89
+ "path": server_error.path,
90
+ "type": server_error.type,
91
+ "msg": server_error.msg,
92
+ "traceback": server_error.traceback,
93
+ "sentry_event_id": self.sentry_event_ids.get(server_error),
94
+ "error_count": count,
95
+ }
96
+ )
97
+ self.error_counts.clear()
98
+ self.sentry_event_ids.clear()
99
+ return data
100
+
101
+ @staticmethod
102
+ def _get_truncated_exception_msg(exception: BaseException) -> str:
103
+ msg = str(exception).strip()
104
+ if len(msg) <= MAX_EXCEPTION_MSG_LENGTH:
105
+ return msg
106
+ suffix = "... (truncated)"
107
+ cutoff = MAX_EXCEPTION_MSG_LENGTH - len(suffix)
108
+ return msg[:cutoff] + suffix
109
+
110
+ @staticmethod
111
+ def _get_truncated_exception_traceback(exception: BaseException) -> str:
112
+ prefix = "... (truncated) ...\n"
113
+ cutoff = MAX_EXCEPTION_TRACEBACK_LENGTH - len(prefix)
114
+ lines = []
115
+ length = 0
116
+ if sys.version_info >= (3, 10):
117
+ traceback_lines = traceback.format_exception(exception)
118
+ else:
119
+ traceback_lines = traceback.format_exception(type(exception), exception, exception.__traceback__)
120
+ for line in traceback_lines[::-1]:
121
+ if length + len(line) > cutoff:
122
+ lines.append(prefix)
123
+ break
124
+ lines.append(line)
125
+ length += len(line)
126
+ return "".join(lines[::-1]).strip()
@@ -0,0 +1,58 @@
1
+ from __future__ import annotations
2
+
3
+ import threading
4
+ from collections import Counter
5
+ from dataclasses import dataclass
6
+ from typing import Any, Dict, List, Optional, Tuple
7
+
8
+
9
+ @dataclass(frozen=True)
10
+ class ValidationError:
11
+ consumer: Optional[str]
12
+ method: str
13
+ path: str
14
+ loc: Tuple[str, ...]
15
+ msg: str
16
+ type: str
17
+
18
+
19
+ class ValidationErrorCounter:
20
+ def __init__(self) -> None:
21
+ self.error_counts: Counter[ValidationError] = Counter()
22
+ self._lock = threading.Lock()
23
+
24
+ def add_validation_errors(
25
+ self, consumer: Optional[str], method: str, path: str, detail: List[Dict[str, Any]]
26
+ ) -> None:
27
+ with self._lock:
28
+ for error in detail:
29
+ try:
30
+ validation_error = ValidationError(
31
+ consumer=consumer,
32
+ method=method.upper(),
33
+ path=path,
34
+ loc=tuple(str(loc) for loc in error["loc"]),
35
+ msg=error["msg"],
36
+ type=error["type"],
37
+ )
38
+ self.error_counts[validation_error] += 1
39
+ except (KeyError, TypeError): # pragma: no cover
40
+ pass
41
+
42
+ def get_and_reset_validation_errors(self) -> List[Dict[str, Any]]:
43
+ data: List[Dict[str, Any]] = []
44
+ with self._lock:
45
+ for validation_error, count in self.error_counts.items():
46
+ data.append(
47
+ {
48
+ "consumer": validation_error.consumer,
49
+ "method": validation_error.method,
50
+ "path": validation_error.path,
51
+ "loc": validation_error.loc,
52
+ "msg": validation_error.msg,
53
+ "type": validation_error.type,
54
+ "error_count": count,
55
+ }
56
+ )
57
+ self.error_counts.clear()
58
+ return data
apitally/common.py CHANGED
@@ -1,6 +1,15 @@
1
1
  import sys
2
2
  from importlib.metadata import PackageNotFoundError, version
3
- from typing import Dict, Optional
3
+ from typing import Dict, Optional, Union
4
+
5
+
6
+ def parse_int(x: Union[str, int, None]) -> Optional[int]:
7
+ if x is None:
8
+ return None
9
+ try:
10
+ return int(x)
11
+ except ValueError:
12
+ return None
4
13
 
5
14
 
6
15
  def get_versions(*packages, app_version: Optional[str] = None) -> Dict[str, str]: