timetracer 1.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- timetracer/__init__.py +29 -0
- timetracer/cassette/__init__.py +6 -0
- timetracer/cassette/io.py +421 -0
- timetracer/cassette/naming.py +69 -0
- timetracer/catalog/__init__.py +288 -0
- timetracer/cli/__init__.py +5 -0
- timetracer/cli/commands/__init__.py +1 -0
- timetracer/cli/main.py +692 -0
- timetracer/config.py +297 -0
- timetracer/constants.py +129 -0
- timetracer/context.py +93 -0
- timetracer/dashboard/__init__.py +14 -0
- timetracer/dashboard/generator.py +229 -0
- timetracer/dashboard/server.py +244 -0
- timetracer/dashboard/template.py +874 -0
- timetracer/diff/__init__.py +6 -0
- timetracer/diff/engine.py +311 -0
- timetracer/diff/report.py +113 -0
- timetracer/exceptions.py +113 -0
- timetracer/integrations/__init__.py +27 -0
- timetracer/integrations/fastapi.py +537 -0
- timetracer/integrations/flask.py +507 -0
- timetracer/plugins/__init__.py +42 -0
- timetracer/plugins/base.py +73 -0
- timetracer/plugins/httpx_plugin.py +413 -0
- timetracer/plugins/redis_plugin.py +297 -0
- timetracer/plugins/requests_plugin.py +333 -0
- timetracer/plugins/sqlalchemy_plugin.py +280 -0
- timetracer/policies/__init__.py +16 -0
- timetracer/policies/capture.py +64 -0
- timetracer/policies/redaction.py +165 -0
- timetracer/replay/__init__.py +6 -0
- timetracer/replay/engine.py +75 -0
- timetracer/replay/errors.py +9 -0
- timetracer/replay/matching.py +83 -0
- timetracer/session.py +390 -0
- timetracer/storage/__init__.py +18 -0
- timetracer/storage/s3.py +364 -0
- timetracer/timeline/__init__.py +6 -0
- timetracer/timeline/generator.py +150 -0
- timetracer/timeline/template.py +370 -0
- timetracer/types.py +197 -0
- timetracer/utils/__init__.py +6 -0
- timetracer/utils/hashing.py +68 -0
- timetracer/utils/time.py +106 -0
- timetracer-1.1.0.dist-info/METADATA +286 -0
- timetracer-1.1.0.dist-info/RECORD +51 -0
- timetracer-1.1.0.dist-info/WHEEL +5 -0
- timetracer-1.1.0.dist-info/entry_points.txt +2 -0
- timetracer-1.1.0.dist-info/licenses/LICENSE +21 -0
- timetracer-1.1.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,333 @@
|
|
|
1
|
+
"""
|
|
2
|
+
requests plugin for Timetracer.
|
|
3
|
+
|
|
4
|
+
Captures and replays requests library HTTP calls.
|
|
5
|
+
This provides compatibility for codebases using requests instead of httpx.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from __future__ import annotations
|
|
9
|
+
|
|
10
|
+
import time
|
|
11
|
+
from typing import TYPE_CHECKING, Any, Callable
|
|
12
|
+
from urllib.parse import parse_qs, urlparse
|
|
13
|
+
|
|
14
|
+
from timetracer.constants import EventType
|
|
15
|
+
from timetracer.context import get_current_session, has_active_session
|
|
16
|
+
from timetracer.policies import redact_headers_allowlist
|
|
17
|
+
from timetracer.types import (
|
|
18
|
+
BodySnapshot,
|
|
19
|
+
DependencyEvent,
|
|
20
|
+
EventResult,
|
|
21
|
+
EventSignature,
|
|
22
|
+
)
|
|
23
|
+
from timetracer.utils.hashing import hash_body
|
|
24
|
+
|
|
25
|
+
if TYPE_CHECKING:
|
|
26
|
+
import requests
|
|
27
|
+
|
|
28
|
+
# Store original method for restoration
|
|
29
|
+
_original_request: Callable | None = None
|
|
30
|
+
_enabled = False
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def enable_requests() -> None:
|
|
34
|
+
"""
|
|
35
|
+
Enable requests interception for recording and replay.
|
|
36
|
+
|
|
37
|
+
This patches requests.Session.request.
|
|
38
|
+
Call disable_requests() to restore original behavior.
|
|
39
|
+
"""
|
|
40
|
+
global _original_request, _enabled
|
|
41
|
+
|
|
42
|
+
if _enabled:
|
|
43
|
+
return
|
|
44
|
+
|
|
45
|
+
try:
|
|
46
|
+
import requests
|
|
47
|
+
except ImportError:
|
|
48
|
+
raise ImportError(
|
|
49
|
+
"requests is required for the requests plugin. "
|
|
50
|
+
"Install it with: pip install requests"
|
|
51
|
+
)
|
|
52
|
+
|
|
53
|
+
# Patch Session.request (all requests go through this)
|
|
54
|
+
_original_request = requests.Session.request
|
|
55
|
+
requests.Session.request = _patched_request
|
|
56
|
+
|
|
57
|
+
_enabled = True
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def disable_requests() -> None:
|
|
61
|
+
"""
|
|
62
|
+
Disable requests interception and restore original behavior.
|
|
63
|
+
"""
|
|
64
|
+
global _original_request, _enabled
|
|
65
|
+
|
|
66
|
+
if not _enabled:
|
|
67
|
+
return
|
|
68
|
+
|
|
69
|
+
try:
|
|
70
|
+
import requests
|
|
71
|
+
except ImportError:
|
|
72
|
+
return
|
|
73
|
+
|
|
74
|
+
# Restore original
|
|
75
|
+
if _original_request is not None:
|
|
76
|
+
requests.Session.request = _original_request
|
|
77
|
+
|
|
78
|
+
_original_request = None
|
|
79
|
+
_enabled = False
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
def _patched_request(
|
|
83
|
+
self: "requests.Session",
|
|
84
|
+
method: str,
|
|
85
|
+
url: str,
|
|
86
|
+
**kwargs: Any,
|
|
87
|
+
) -> "requests.Response":
|
|
88
|
+
"""Patched request method."""
|
|
89
|
+
# If no session, call original
|
|
90
|
+
if not has_active_session():
|
|
91
|
+
return _original_request(self, method, url, **kwargs) # type: ignore
|
|
92
|
+
|
|
93
|
+
session = get_current_session()
|
|
94
|
+
|
|
95
|
+
# Handle based on session mode
|
|
96
|
+
if session.is_recording:
|
|
97
|
+
return _record_request(self, method, url, kwargs)
|
|
98
|
+
elif session.is_replaying:
|
|
99
|
+
# Check for hybrid replay - if plugin should stay live, make real call
|
|
100
|
+
from timetracer.session import ReplaySession
|
|
101
|
+
if isinstance(session, ReplaySession) and not session.should_mock_plugin("http"):
|
|
102
|
+
return _original_request(self, method, url, **kwargs) # type: ignore
|
|
103
|
+
return _replay_request(method, url, kwargs)
|
|
104
|
+
else:
|
|
105
|
+
return _original_request(self, method, url, **kwargs) # type: ignore
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
def _record_request(
|
|
109
|
+
client: "requests.Session",
|
|
110
|
+
method: str,
|
|
111
|
+
url: str,
|
|
112
|
+
kwargs: dict[str, Any],
|
|
113
|
+
) -> "requests.Response":
|
|
114
|
+
"""Record a requests call."""
|
|
115
|
+
from timetracer.session import TraceSession
|
|
116
|
+
|
|
117
|
+
session = get_current_session()
|
|
118
|
+
if not isinstance(session, TraceSession):
|
|
119
|
+
return _original_request(client, method, url, **kwargs) # type: ignore
|
|
120
|
+
|
|
121
|
+
start_offset = session.elapsed_ms
|
|
122
|
+
start_time = time.perf_counter()
|
|
123
|
+
|
|
124
|
+
# Make the actual request
|
|
125
|
+
error_info = None
|
|
126
|
+
response = None
|
|
127
|
+
try:
|
|
128
|
+
response = _original_request(client, method, url, **kwargs) # type: ignore
|
|
129
|
+
except Exception as e:
|
|
130
|
+
error_info = (type(e).__name__, str(e))
|
|
131
|
+
raise
|
|
132
|
+
finally:
|
|
133
|
+
duration_ms = (time.perf_counter() - start_time) * 1000
|
|
134
|
+
|
|
135
|
+
# Build event
|
|
136
|
+
event = _build_event(
|
|
137
|
+
method=method,
|
|
138
|
+
url=url,
|
|
139
|
+
kwargs=kwargs,
|
|
140
|
+
response=response,
|
|
141
|
+
start_offset_ms=start_offset,
|
|
142
|
+
duration_ms=duration_ms,
|
|
143
|
+
error_info=error_info,
|
|
144
|
+
)
|
|
145
|
+
|
|
146
|
+
session.add_event(event)
|
|
147
|
+
|
|
148
|
+
return response
|
|
149
|
+
|
|
150
|
+
|
|
151
|
+
def _replay_request(
|
|
152
|
+
method: str,
|
|
153
|
+
url: str,
|
|
154
|
+
kwargs: dict[str, Any],
|
|
155
|
+
) -> "requests.Response":
|
|
156
|
+
"""Replay a requests call from cassette."""
|
|
157
|
+
from timetracer.session import ReplaySession
|
|
158
|
+
|
|
159
|
+
session = get_current_session()
|
|
160
|
+
if not isinstance(session, ReplaySession):
|
|
161
|
+
raise RuntimeError("Expected ReplaySession for replay")
|
|
162
|
+
|
|
163
|
+
# Build signature for matching
|
|
164
|
+
actual_signature = _make_signature_dict(method, url, kwargs)
|
|
165
|
+
|
|
166
|
+
# Get expected event
|
|
167
|
+
event = session.get_next_event(EventType.HTTP_CLIENT, actual_signature)
|
|
168
|
+
|
|
169
|
+
# Build synthetic response
|
|
170
|
+
return _build_synthetic_response(event)
|
|
171
|
+
|
|
172
|
+
|
|
173
|
+
def _build_event(
|
|
174
|
+
method: str,
|
|
175
|
+
url: str,
|
|
176
|
+
kwargs: dict[str, Any],
|
|
177
|
+
response: "requests.Response | None",
|
|
178
|
+
start_offset_ms: float,
|
|
179
|
+
duration_ms: float,
|
|
180
|
+
error_info: tuple[str, str] | None,
|
|
181
|
+
) -> DependencyEvent:
|
|
182
|
+
"""Build a DependencyEvent from requests call."""
|
|
183
|
+
# Build signature
|
|
184
|
+
signature = _make_signature(method, url, kwargs)
|
|
185
|
+
|
|
186
|
+
# Build result
|
|
187
|
+
result = _make_result(response, error_info)
|
|
188
|
+
|
|
189
|
+
return DependencyEvent(
|
|
190
|
+
eid=0, # Will be set by session
|
|
191
|
+
event_type=EventType.HTTP_CLIENT,
|
|
192
|
+
start_offset_ms=start_offset_ms,
|
|
193
|
+
duration_ms=duration_ms,
|
|
194
|
+
signature=signature,
|
|
195
|
+
result=result,
|
|
196
|
+
)
|
|
197
|
+
|
|
198
|
+
|
|
199
|
+
def _make_signature(method: str, url: str, kwargs: dict[str, Any]) -> EventSignature:
|
|
200
|
+
"""Create EventSignature from requests call."""
|
|
201
|
+
# Normalize URL
|
|
202
|
+
parsed = urlparse(url)
|
|
203
|
+
normalized_url = f"{parsed.scheme}://{parsed.netloc}{parsed.path}"
|
|
204
|
+
|
|
205
|
+
# Parse query params (from URL or kwargs)
|
|
206
|
+
query = dict(parse_qs(parsed.query))
|
|
207
|
+
if "params" in kwargs and kwargs["params"]:
|
|
208
|
+
params = kwargs["params"]
|
|
209
|
+
if isinstance(params, dict):
|
|
210
|
+
query.update({k: [v] if not isinstance(v, list) else v for k, v in params.items()})
|
|
211
|
+
# Flatten single-value lists
|
|
212
|
+
query = {k: v[0] if len(v) == 1 else v for k, v in query.items()}
|
|
213
|
+
|
|
214
|
+
# Hash request body
|
|
215
|
+
body_hash = None
|
|
216
|
+
if "data" in kwargs and kwargs["data"]:
|
|
217
|
+
body_hash = hash_body(kwargs["data"])
|
|
218
|
+
elif "json" in kwargs and kwargs["json"]:
|
|
219
|
+
body_hash = hash_body(kwargs["json"])
|
|
220
|
+
|
|
221
|
+
# Hash relevant headers
|
|
222
|
+
headers_hash = None
|
|
223
|
+
if "headers" in kwargs and kwargs["headers"]:
|
|
224
|
+
allowed_headers = redact_headers_allowlist(kwargs["headers"])
|
|
225
|
+
if allowed_headers:
|
|
226
|
+
headers_hash = hash_body(str(sorted(allowed_headers.items())))
|
|
227
|
+
|
|
228
|
+
return EventSignature(
|
|
229
|
+
lib="requests",
|
|
230
|
+
method=method.upper(),
|
|
231
|
+
url=normalized_url,
|
|
232
|
+
query=query,
|
|
233
|
+
headers_hash=headers_hash,
|
|
234
|
+
body_hash=body_hash,
|
|
235
|
+
)
|
|
236
|
+
|
|
237
|
+
|
|
238
|
+
def _make_signature_dict(method: str, url: str, kwargs: dict[str, Any]) -> dict[str, Any]:
|
|
239
|
+
"""Create signature dict for matching."""
|
|
240
|
+
sig = _make_signature(method, url, kwargs)
|
|
241
|
+
return {
|
|
242
|
+
"lib": sig.lib,
|
|
243
|
+
"method": sig.method,
|
|
244
|
+
"url": sig.url,
|
|
245
|
+
"query": sig.query,
|
|
246
|
+
"body_hash": sig.body_hash,
|
|
247
|
+
}
|
|
248
|
+
|
|
249
|
+
|
|
250
|
+
def _make_result(
|
|
251
|
+
response: "requests.Response | None",
|
|
252
|
+
error_info: tuple[str, str] | None,
|
|
253
|
+
) -> EventResult:
|
|
254
|
+
"""Create EventResult from requests response."""
|
|
255
|
+
if error_info:
|
|
256
|
+
return EventResult(
|
|
257
|
+
error_type=error_info[0],
|
|
258
|
+
error=error_info[1],
|
|
259
|
+
)
|
|
260
|
+
|
|
261
|
+
if response is None:
|
|
262
|
+
return EventResult()
|
|
263
|
+
|
|
264
|
+
# Capture response headers (allow-list)
|
|
265
|
+
headers = dict(response.headers)
|
|
266
|
+
filtered_headers = redact_headers_allowlist(headers)
|
|
267
|
+
|
|
268
|
+
# Capture response body
|
|
269
|
+
body_snapshot = None
|
|
270
|
+
try:
|
|
271
|
+
content = response.content
|
|
272
|
+
if content:
|
|
273
|
+
body_snapshot = BodySnapshot(
|
|
274
|
+
captured=True,
|
|
275
|
+
encoding="bytes",
|
|
276
|
+
data=None,
|
|
277
|
+
hash=hash_body(content),
|
|
278
|
+
size_bytes=len(content),
|
|
279
|
+
)
|
|
280
|
+
|
|
281
|
+
# Try to parse as JSON for storage
|
|
282
|
+
try:
|
|
283
|
+
import json
|
|
284
|
+
data = json.loads(content.decode("utf-8"))
|
|
285
|
+
body_snapshot.encoding = "json"
|
|
286
|
+
body_snapshot.data = data
|
|
287
|
+
except (json.JSONDecodeError, UnicodeDecodeError):
|
|
288
|
+
pass
|
|
289
|
+
except Exception:
|
|
290
|
+
pass
|
|
291
|
+
|
|
292
|
+
return EventResult(
|
|
293
|
+
status=response.status_code,
|
|
294
|
+
headers=filtered_headers,
|
|
295
|
+
body=body_snapshot,
|
|
296
|
+
)
|
|
297
|
+
|
|
298
|
+
|
|
299
|
+
def _build_synthetic_response(event: DependencyEvent) -> "requests.Response":
|
|
300
|
+
"""Build a synthetic requests.Response from recorded event."""
|
|
301
|
+
import requests
|
|
302
|
+
from requests.structures import CaseInsensitiveDict
|
|
303
|
+
|
|
304
|
+
result = event.result
|
|
305
|
+
|
|
306
|
+
# Handle error case
|
|
307
|
+
if result.error:
|
|
308
|
+
raise requests.RequestException(f"Recorded error: {result.error}")
|
|
309
|
+
|
|
310
|
+
# Build response content
|
|
311
|
+
content = b""
|
|
312
|
+
if result.body and result.body.captured and result.body.data is not None:
|
|
313
|
+
import json
|
|
314
|
+
if result.body.encoding == "json":
|
|
315
|
+
content = json.dumps(result.body.data).encode("utf-8")
|
|
316
|
+
elif isinstance(result.body.data, str):
|
|
317
|
+
content = result.body.data.encode("utf-8")
|
|
318
|
+
elif isinstance(result.body.data, bytes):
|
|
319
|
+
content = result.body.data
|
|
320
|
+
|
|
321
|
+
# Build headers
|
|
322
|
+
headers = CaseInsensitiveDict(result.headers or {})
|
|
323
|
+
if "content-length" not in headers:
|
|
324
|
+
headers["content-length"] = str(len(content))
|
|
325
|
+
|
|
326
|
+
# Create response object
|
|
327
|
+
response = requests.Response()
|
|
328
|
+
response.status_code = result.status or 200
|
|
329
|
+
response.headers = headers
|
|
330
|
+
response._content = content
|
|
331
|
+
response.encoding = "utf-8"
|
|
332
|
+
|
|
333
|
+
return response
|
|
@@ -0,0 +1,280 @@
|
|
|
1
|
+
"""
|
|
2
|
+
SQLAlchemy plugin for Timetracer.
|
|
3
|
+
|
|
4
|
+
Captures and replays SQLAlchemy database queries.
|
|
5
|
+
This enables recording database interactions for deterministic replay.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from __future__ import annotations
|
|
9
|
+
|
|
10
|
+
import threading
|
|
11
|
+
import time
|
|
12
|
+
from typing import TYPE_CHECKING, Any
|
|
13
|
+
|
|
14
|
+
from timetracer.constants import EventType
|
|
15
|
+
from timetracer.context import get_current_session, has_active_session
|
|
16
|
+
from timetracer.types import (
|
|
17
|
+
DependencyEvent,
|
|
18
|
+
EventResult,
|
|
19
|
+
EventSignature,
|
|
20
|
+
)
|
|
21
|
+
from timetracer.utils.hashing import hash_body
|
|
22
|
+
|
|
23
|
+
if TYPE_CHECKING:
|
|
24
|
+
from sqlalchemy.engine import Connection, Engine
|
|
25
|
+
|
|
26
|
+
# Store original state for restoration
|
|
27
|
+
_enabled = False
|
|
28
|
+
_listeners_attached = False
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def enable_sqlalchemy(engine: "Engine | None" = None) -> None:
|
|
32
|
+
"""
|
|
33
|
+
Enable SQLAlchemy interception for recording and replay.
|
|
34
|
+
|
|
35
|
+
This attaches event listeners to capture query execution.
|
|
36
|
+
Call disable_sqlalchemy() to restore original behavior.
|
|
37
|
+
|
|
38
|
+
Args:
|
|
39
|
+
engine: Optional specific engine. If None, uses global event.
|
|
40
|
+
"""
|
|
41
|
+
global _enabled, _listeners_attached
|
|
42
|
+
|
|
43
|
+
if _enabled:
|
|
44
|
+
return
|
|
45
|
+
|
|
46
|
+
try:
|
|
47
|
+
from sqlalchemy import event as sa_event
|
|
48
|
+
from sqlalchemy.engine import Engine
|
|
49
|
+
except ImportError:
|
|
50
|
+
raise ImportError(
|
|
51
|
+
"sqlalchemy is required for the sqlalchemy plugin. "
|
|
52
|
+
"Install it with: pip install sqlalchemy"
|
|
53
|
+
)
|
|
54
|
+
|
|
55
|
+
# Attach listeners
|
|
56
|
+
if engine is not None:
|
|
57
|
+
sa_event.listen(engine, "before_cursor_execute", _before_cursor_execute)
|
|
58
|
+
sa_event.listen(engine, "after_cursor_execute", _after_cursor_execute)
|
|
59
|
+
else:
|
|
60
|
+
# Global listener for all engines
|
|
61
|
+
sa_event.listen(Engine, "before_cursor_execute", _before_cursor_execute)
|
|
62
|
+
sa_event.listen(Engine, "after_cursor_execute", _after_cursor_execute)
|
|
63
|
+
|
|
64
|
+
_listeners_attached = True
|
|
65
|
+
_enabled = True
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def disable_sqlalchemy(engine: "Engine | None" = None) -> None:
|
|
69
|
+
"""
|
|
70
|
+
Disable SQLAlchemy interception and restore original behavior.
|
|
71
|
+
"""
|
|
72
|
+
global _enabled, _listeners_attached
|
|
73
|
+
|
|
74
|
+
if not _enabled:
|
|
75
|
+
return
|
|
76
|
+
|
|
77
|
+
try:
|
|
78
|
+
from sqlalchemy import event as sa_event
|
|
79
|
+
from sqlalchemy.engine import Engine
|
|
80
|
+
except ImportError:
|
|
81
|
+
return
|
|
82
|
+
|
|
83
|
+
# Remove listeners
|
|
84
|
+
if engine is not None:
|
|
85
|
+
sa_event.remove(engine, "before_cursor_execute", _before_cursor_execute)
|
|
86
|
+
sa_event.remove(engine, "after_cursor_execute", _after_cursor_execute)
|
|
87
|
+
else:
|
|
88
|
+
sa_event.remove(Engine, "before_cursor_execute", _before_cursor_execute)
|
|
89
|
+
sa_event.remove(Engine, "after_cursor_execute", _after_cursor_execute)
|
|
90
|
+
|
|
91
|
+
_listeners_attached = False
|
|
92
|
+
_enabled = False
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
# Thread-local storage for timing
|
|
96
|
+
_query_timing = threading.local()
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
def _before_cursor_execute(
|
|
100
|
+
conn: "Connection",
|
|
101
|
+
cursor: Any,
|
|
102
|
+
statement: str,
|
|
103
|
+
parameters: Any,
|
|
104
|
+
context: Any,
|
|
105
|
+
executemany: bool,
|
|
106
|
+
) -> None:
|
|
107
|
+
"""Event listener called before query execution."""
|
|
108
|
+
if not has_active_session():
|
|
109
|
+
return
|
|
110
|
+
|
|
111
|
+
session = get_current_session()
|
|
112
|
+
|
|
113
|
+
# Store timing info
|
|
114
|
+
_query_timing.start_time = time.perf_counter()
|
|
115
|
+
_query_timing.start_offset = session.elapsed_ms if hasattr(session, 'elapsed_ms') else 0
|
|
116
|
+
_query_timing.statement = statement
|
|
117
|
+
_query_timing.parameters = parameters
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
def _after_cursor_execute(
|
|
121
|
+
conn: "Connection",
|
|
122
|
+
cursor: Any,
|
|
123
|
+
statement: str,
|
|
124
|
+
parameters: Any,
|
|
125
|
+
context: Any,
|
|
126
|
+
executemany: bool,
|
|
127
|
+
) -> None:
|
|
128
|
+
"""Event listener called after query execution."""
|
|
129
|
+
if not has_active_session():
|
|
130
|
+
return
|
|
131
|
+
|
|
132
|
+
session = get_current_session()
|
|
133
|
+
|
|
134
|
+
# Check if we have timing info
|
|
135
|
+
if not hasattr(_query_timing, 'start_time'):
|
|
136
|
+
return
|
|
137
|
+
|
|
138
|
+
# Handle based on session mode
|
|
139
|
+
if session.is_recording:
|
|
140
|
+
_record_query(
|
|
141
|
+
conn, cursor, statement, parameters, context, executemany
|
|
142
|
+
)
|
|
143
|
+
# Replay is handled differently for DB - we can't easily mock cursor results
|
|
144
|
+
# For now, just record and skip replay mocking
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
def _record_query(
|
|
148
|
+
conn: "Connection",
|
|
149
|
+
cursor: Any,
|
|
150
|
+
statement: str,
|
|
151
|
+
parameters: Any,
|
|
152
|
+
context: Any,
|
|
153
|
+
executemany: bool,
|
|
154
|
+
) -> None:
|
|
155
|
+
"""Record a database query."""
|
|
156
|
+
from timetracer.session import TraceSession
|
|
157
|
+
|
|
158
|
+
session = get_current_session()
|
|
159
|
+
if not isinstance(session, TraceSession):
|
|
160
|
+
return
|
|
161
|
+
|
|
162
|
+
# Calculate duration
|
|
163
|
+
duration_ms = (time.perf_counter() - _query_timing.start_time) * 1000
|
|
164
|
+
start_offset = getattr(_query_timing, 'start_offset', 0)
|
|
165
|
+
|
|
166
|
+
# Build event
|
|
167
|
+
event = _build_event(
|
|
168
|
+
statement=statement,
|
|
169
|
+
parameters=parameters,
|
|
170
|
+
cursor=cursor,
|
|
171
|
+
start_offset_ms=start_offset,
|
|
172
|
+
duration_ms=duration_ms,
|
|
173
|
+
executemany=executemany,
|
|
174
|
+
)
|
|
175
|
+
|
|
176
|
+
session.add_event(event)
|
|
177
|
+
|
|
178
|
+
|
|
179
|
+
def _build_event(
|
|
180
|
+
statement: str,
|
|
181
|
+
parameters: Any,
|
|
182
|
+
cursor: Any,
|
|
183
|
+
start_offset_ms: float,
|
|
184
|
+
duration_ms: float,
|
|
185
|
+
executemany: bool,
|
|
186
|
+
) -> DependencyEvent:
|
|
187
|
+
"""Build a DependencyEvent from SQLAlchemy query."""
|
|
188
|
+
# Build signature
|
|
189
|
+
signature = _make_signature(statement, parameters)
|
|
190
|
+
|
|
191
|
+
# Build result (basic - just rowcount)
|
|
192
|
+
result = _make_result(cursor)
|
|
193
|
+
|
|
194
|
+
return DependencyEvent(
|
|
195
|
+
eid=0, # Will be set by session
|
|
196
|
+
event_type=EventType.DB_QUERY,
|
|
197
|
+
start_offset_ms=start_offset_ms,
|
|
198
|
+
duration_ms=duration_ms,
|
|
199
|
+
signature=signature,
|
|
200
|
+
result=result,
|
|
201
|
+
)
|
|
202
|
+
|
|
203
|
+
|
|
204
|
+
def _make_signature(statement: str, parameters: Any) -> EventSignature:
|
|
205
|
+
"""Create EventSignature from SQLAlchemy query."""
|
|
206
|
+
# Normalize statement (strip whitespace)
|
|
207
|
+
normalized = " ".join(statement.split())
|
|
208
|
+
|
|
209
|
+
# Extract operation type
|
|
210
|
+
operation = normalized.split()[0].upper() if normalized else "UNKNOWN"
|
|
211
|
+
|
|
212
|
+
# Hash parameters
|
|
213
|
+
params_hash = None
|
|
214
|
+
if parameters:
|
|
215
|
+
try:
|
|
216
|
+
params_hash = hash_body(str(parameters))
|
|
217
|
+
except Exception:
|
|
218
|
+
params_hash = None
|
|
219
|
+
|
|
220
|
+
# Extract table name (basic parsing)
|
|
221
|
+
table = _extract_table_name(normalized, operation)
|
|
222
|
+
|
|
223
|
+
return EventSignature(
|
|
224
|
+
lib="sqlalchemy",
|
|
225
|
+
method=operation, # SELECT, INSERT, UPDATE, DELETE
|
|
226
|
+
url=table, # Use url field for table name
|
|
227
|
+
query={},
|
|
228
|
+
body_hash=params_hash,
|
|
229
|
+
)
|
|
230
|
+
|
|
231
|
+
|
|
232
|
+
def _extract_table_name(statement: str, operation: str) -> str:
|
|
233
|
+
"""Extract table name from SQL statement."""
|
|
234
|
+
statement_upper = statement.upper()
|
|
235
|
+
|
|
236
|
+
try:
|
|
237
|
+
if operation == "SELECT":
|
|
238
|
+
# SELECT ... FROM table_name
|
|
239
|
+
from_idx = statement_upper.find(" FROM ")
|
|
240
|
+
if from_idx != -1:
|
|
241
|
+
after_from = statement[from_idx + 6:].strip()
|
|
242
|
+
return after_from.split()[0].strip("();")
|
|
243
|
+
|
|
244
|
+
elif operation == "INSERT":
|
|
245
|
+
# INSERT INTO table_name
|
|
246
|
+
into_idx = statement_upper.find(" INTO ")
|
|
247
|
+
if into_idx != -1:
|
|
248
|
+
after_into = statement[into_idx + 6:].strip()
|
|
249
|
+
return after_into.split()[0].strip("();")
|
|
250
|
+
|
|
251
|
+
elif operation == "UPDATE":
|
|
252
|
+
# UPDATE table_name
|
|
253
|
+
parts = statement.split()
|
|
254
|
+
if len(parts) > 1:
|
|
255
|
+
return parts[1].strip("();")
|
|
256
|
+
|
|
257
|
+
elif operation == "DELETE":
|
|
258
|
+
# DELETE FROM table_name
|
|
259
|
+
from_idx = statement_upper.find(" FROM ")
|
|
260
|
+
if from_idx != -1:
|
|
261
|
+
after_from = statement[from_idx + 6:].strip()
|
|
262
|
+
return after_from.split()[0].strip("();")
|
|
263
|
+
except Exception:
|
|
264
|
+
pass
|
|
265
|
+
|
|
266
|
+
return "unknown"
|
|
267
|
+
|
|
268
|
+
|
|
269
|
+
def _make_result(cursor: Any) -> EventResult:
|
|
270
|
+
"""Create EventResult from cursor."""
|
|
271
|
+
rowcount = -1
|
|
272
|
+
try:
|
|
273
|
+
rowcount = cursor.rowcount
|
|
274
|
+
except Exception:
|
|
275
|
+
pass
|
|
276
|
+
|
|
277
|
+
return EventResult(
|
|
278
|
+
status=rowcount, # Use status for rowcount
|
|
279
|
+
headers={"rowcount": str(rowcount)},
|
|
280
|
+
)
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
"""Policies module for redaction and capture control."""
|
|
2
|
+
|
|
3
|
+
from timetracer.policies.capture import should_store_body, truncate_body
|
|
4
|
+
from timetracer.policies.redaction import (
|
|
5
|
+
redact_body,
|
|
6
|
+
redact_headers,
|
|
7
|
+
redact_headers_allowlist,
|
|
8
|
+
)
|
|
9
|
+
|
|
10
|
+
__all__ = [
|
|
11
|
+
"redact_headers",
|
|
12
|
+
"redact_headers_allowlist",
|
|
13
|
+
"redact_body",
|
|
14
|
+
"should_store_body",
|
|
15
|
+
"truncate_body",
|
|
16
|
+
]
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Capture policies for body data.
|
|
3
|
+
|
|
4
|
+
Controls when and how much body data is captured.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
from timetracer.constants import CapturePolicy
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def should_store_body(
|
|
13
|
+
policy: CapturePolicy | str,
|
|
14
|
+
is_error: bool = False,
|
|
15
|
+
) -> bool:
|
|
16
|
+
"""
|
|
17
|
+
Determine if body should be stored based on policy.
|
|
18
|
+
|
|
19
|
+
Args:
|
|
20
|
+
policy: The capture policy (never, on_error, always).
|
|
21
|
+
is_error: Whether the request resulted in an error.
|
|
22
|
+
|
|
23
|
+
Returns:
|
|
24
|
+
True if body should be stored.
|
|
25
|
+
"""
|
|
26
|
+
# Handle string policy
|
|
27
|
+
if isinstance(policy, str):
|
|
28
|
+
policy = CapturePolicy(policy.lower())
|
|
29
|
+
|
|
30
|
+
if policy == CapturePolicy.ALWAYS:
|
|
31
|
+
return True
|
|
32
|
+
elif policy == CapturePolicy.NEVER:
|
|
33
|
+
return False
|
|
34
|
+
elif policy == CapturePolicy.ON_ERROR:
|
|
35
|
+
return is_error
|
|
36
|
+
|
|
37
|
+
return False
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def truncate_body(
|
|
41
|
+
data: bytes,
|
|
42
|
+
max_kb: int,
|
|
43
|
+
) -> tuple[bytes, bool]:
|
|
44
|
+
"""
|
|
45
|
+
Truncate body data if it exceeds size limit.
|
|
46
|
+
|
|
47
|
+
Args:
|
|
48
|
+
data: The body bytes.
|
|
49
|
+
max_kb: Maximum size in kilobytes.
|
|
50
|
+
|
|
51
|
+
Returns:
|
|
52
|
+
Tuple of (truncated_data, was_truncated).
|
|
53
|
+
"""
|
|
54
|
+
max_bytes = max_kb * 1024
|
|
55
|
+
|
|
56
|
+
if len(data) <= max_bytes:
|
|
57
|
+
return data, False
|
|
58
|
+
|
|
59
|
+
return data[:max_bytes], True
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
def get_body_size_kb(data: bytes) -> float:
|
|
63
|
+
"""Get body size in kilobytes."""
|
|
64
|
+
return len(data) / 1024
|