timetracer 1.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- timetracer/__init__.py +29 -0
- timetracer/cassette/__init__.py +6 -0
- timetracer/cassette/io.py +421 -0
- timetracer/cassette/naming.py +69 -0
- timetracer/catalog/__init__.py +288 -0
- timetracer/cli/__init__.py +5 -0
- timetracer/cli/commands/__init__.py +1 -0
- timetracer/cli/main.py +692 -0
- timetracer/config.py +297 -0
- timetracer/constants.py +129 -0
- timetracer/context.py +93 -0
- timetracer/dashboard/__init__.py +14 -0
- timetracer/dashboard/generator.py +229 -0
- timetracer/dashboard/server.py +244 -0
- timetracer/dashboard/template.py +874 -0
- timetracer/diff/__init__.py +6 -0
- timetracer/diff/engine.py +311 -0
- timetracer/diff/report.py +113 -0
- timetracer/exceptions.py +113 -0
- timetracer/integrations/__init__.py +27 -0
- timetracer/integrations/fastapi.py +537 -0
- timetracer/integrations/flask.py +507 -0
- timetracer/plugins/__init__.py +42 -0
- timetracer/plugins/base.py +73 -0
- timetracer/plugins/httpx_plugin.py +413 -0
- timetracer/plugins/redis_plugin.py +297 -0
- timetracer/plugins/requests_plugin.py +333 -0
- timetracer/plugins/sqlalchemy_plugin.py +280 -0
- timetracer/policies/__init__.py +16 -0
- timetracer/policies/capture.py +64 -0
- timetracer/policies/redaction.py +165 -0
- timetracer/replay/__init__.py +6 -0
- timetracer/replay/engine.py +75 -0
- timetracer/replay/errors.py +9 -0
- timetracer/replay/matching.py +83 -0
- timetracer/session.py +390 -0
- timetracer/storage/__init__.py +18 -0
- timetracer/storage/s3.py +364 -0
- timetracer/timeline/__init__.py +6 -0
- timetracer/timeline/generator.py +150 -0
- timetracer/timeline/template.py +370 -0
- timetracer/types.py +197 -0
- timetracer/utils/__init__.py +6 -0
- timetracer/utils/hashing.py +68 -0
- timetracer/utils/time.py +106 -0
- timetracer-1.1.0.dist-info/METADATA +286 -0
- timetracer-1.1.0.dist-info/RECORD +51 -0
- timetracer-1.1.0.dist-info/WHEEL +5 -0
- timetracer-1.1.0.dist-info/entry_points.txt +2 -0
- timetracer-1.1.0.dist-info/licenses/LICENSE +21 -0
- timetracer-1.1.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,413 @@
|
|
|
1
|
+
"""
|
|
2
|
+
httpx plugin for Timetracer.
|
|
3
|
+
|
|
4
|
+
Captures and replays httpx HTTP client calls.
|
|
5
|
+
This is the primary plugin for mocking external API dependencies.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from __future__ import annotations
|
|
9
|
+
|
|
10
|
+
import time
|
|
11
|
+
from typing import TYPE_CHECKING, Any, Callable
|
|
12
|
+
from urllib.parse import parse_qs, urlparse
|
|
13
|
+
|
|
14
|
+
from timetracer.constants import EventType
|
|
15
|
+
from timetracer.context import get_current_session, has_active_session
|
|
16
|
+
from timetracer.policies import redact_headers_allowlist
|
|
17
|
+
from timetracer.types import (
|
|
18
|
+
BodySnapshot,
|
|
19
|
+
DependencyEvent,
|
|
20
|
+
EventResult,
|
|
21
|
+
EventSignature,
|
|
22
|
+
)
|
|
23
|
+
from timetracer.utils.hashing import hash_body
|
|
24
|
+
|
|
25
|
+
if TYPE_CHECKING:
|
|
26
|
+
import httpx
|
|
27
|
+
|
|
28
|
+
# Store original methods for restoration
|
|
29
|
+
_original_async_send: Callable | None = None
|
|
30
|
+
_original_sync_send: Callable | None = None
|
|
31
|
+
_enabled = False
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def enable_httpx() -> None:
|
|
35
|
+
"""
|
|
36
|
+
Enable httpx interception for recording and replay.
|
|
37
|
+
|
|
38
|
+
This patches httpx.AsyncClient.send and httpx.Client.send.
|
|
39
|
+
Call disable_httpx() to restore original behavior.
|
|
40
|
+
"""
|
|
41
|
+
global _original_async_send, _original_sync_send, _enabled
|
|
42
|
+
|
|
43
|
+
if _enabled:
|
|
44
|
+
return
|
|
45
|
+
|
|
46
|
+
try:
|
|
47
|
+
import httpx
|
|
48
|
+
except ImportError:
|
|
49
|
+
raise ImportError(
|
|
50
|
+
"httpx is required for the httpx plugin. "
|
|
51
|
+
"Install it with: pip install timetrace[httpx]"
|
|
52
|
+
)
|
|
53
|
+
|
|
54
|
+
# Patch AsyncClient
|
|
55
|
+
_original_async_send = httpx.AsyncClient.send
|
|
56
|
+
httpx.AsyncClient.send = _patched_async_send
|
|
57
|
+
|
|
58
|
+
# Patch sync Client
|
|
59
|
+
_original_sync_send = httpx.Client.send
|
|
60
|
+
httpx.Client.send = _patched_sync_send
|
|
61
|
+
|
|
62
|
+
_enabled = True
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def disable_httpx() -> None:
|
|
66
|
+
"""
|
|
67
|
+
Disable httpx interception and restore original behavior.
|
|
68
|
+
"""
|
|
69
|
+
global _original_async_send, _original_sync_send, _enabled
|
|
70
|
+
|
|
71
|
+
if not _enabled:
|
|
72
|
+
return
|
|
73
|
+
|
|
74
|
+
try:
|
|
75
|
+
import httpx
|
|
76
|
+
except ImportError:
|
|
77
|
+
return
|
|
78
|
+
|
|
79
|
+
# Restore originals
|
|
80
|
+
if _original_async_send is not None:
|
|
81
|
+
httpx.AsyncClient.send = _original_async_send
|
|
82
|
+
if _original_sync_send is not None:
|
|
83
|
+
httpx.Client.send = _original_sync_send
|
|
84
|
+
|
|
85
|
+
_original_async_send = None
|
|
86
|
+
_original_sync_send = None
|
|
87
|
+
_enabled = False
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
async def _patched_async_send(
|
|
91
|
+
self: "httpx.AsyncClient",
|
|
92
|
+
request: "httpx.Request",
|
|
93
|
+
**kwargs: Any,
|
|
94
|
+
) -> "httpx.Response":
|
|
95
|
+
"""Patched async send method."""
|
|
96
|
+
# If no session, call original
|
|
97
|
+
if not has_active_session():
|
|
98
|
+
return await _original_async_send(self, request, **kwargs) # type: ignore
|
|
99
|
+
|
|
100
|
+
session = get_current_session()
|
|
101
|
+
|
|
102
|
+
# Handle based on session mode
|
|
103
|
+
if session.is_recording:
|
|
104
|
+
return await _record_async_request(self, request, kwargs)
|
|
105
|
+
elif session.is_replaying:
|
|
106
|
+
# Check for hybrid replay - if plugin should stay live, make real call
|
|
107
|
+
from timetracer.session import ReplaySession
|
|
108
|
+
if isinstance(session, ReplaySession) and not session.should_mock_plugin("http"):
|
|
109
|
+
return await _original_async_send(self, request, **kwargs) # type: ignore
|
|
110
|
+
return await _replay_async_request(request)
|
|
111
|
+
else:
|
|
112
|
+
return await _original_async_send(self, request, **kwargs) # type: ignore
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
def _patched_sync_send(
|
|
116
|
+
self: "httpx.Client",
|
|
117
|
+
request: "httpx.Request",
|
|
118
|
+
**kwargs: Any,
|
|
119
|
+
) -> "httpx.Response":
|
|
120
|
+
"""Patched sync send method."""
|
|
121
|
+
# If no session, call original
|
|
122
|
+
if not has_active_session():
|
|
123
|
+
return _original_sync_send(self, request, **kwargs) # type: ignore
|
|
124
|
+
|
|
125
|
+
session = get_current_session()
|
|
126
|
+
|
|
127
|
+
# Handle based on session mode
|
|
128
|
+
if session.is_recording:
|
|
129
|
+
return _record_sync_request(self, request, kwargs)
|
|
130
|
+
elif session.is_replaying:
|
|
131
|
+
# Check for hybrid replay - if plugin should stay live, make real call
|
|
132
|
+
from timetracer.session import ReplaySession
|
|
133
|
+
if isinstance(session, ReplaySession) and not session.should_mock_plugin("http"):
|
|
134
|
+
return _original_sync_send(self, request, **kwargs) # type: ignore
|
|
135
|
+
return _replay_sync_request(request)
|
|
136
|
+
else:
|
|
137
|
+
return _original_sync_send(self, request, **kwargs) # type: ignore
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
async def _record_async_request(
|
|
141
|
+
client: "httpx.AsyncClient",
|
|
142
|
+
request: "httpx.Request",
|
|
143
|
+
kwargs: dict[str, Any],
|
|
144
|
+
) -> "httpx.Response":
|
|
145
|
+
"""Record an async httpx request."""
|
|
146
|
+
from timetracer.session import TraceSession
|
|
147
|
+
|
|
148
|
+
session = get_current_session()
|
|
149
|
+
if not isinstance(session, TraceSession):
|
|
150
|
+
return await _original_async_send(client, request, **kwargs) # type: ignore
|
|
151
|
+
|
|
152
|
+
start_offset = session.elapsed_ms
|
|
153
|
+
start_time = time.perf_counter()
|
|
154
|
+
|
|
155
|
+
# Make the actual request
|
|
156
|
+
error_info = None
|
|
157
|
+
response = None
|
|
158
|
+
try:
|
|
159
|
+
response = await _original_async_send(client, request, **kwargs) # type: ignore
|
|
160
|
+
except Exception as e:
|
|
161
|
+
error_info = (type(e).__name__, str(e))
|
|
162
|
+
raise
|
|
163
|
+
finally:
|
|
164
|
+
duration_ms = (time.perf_counter() - start_time) * 1000
|
|
165
|
+
|
|
166
|
+
# Build event
|
|
167
|
+
event = _build_event(
|
|
168
|
+
request=request,
|
|
169
|
+
response=response,
|
|
170
|
+
start_offset_ms=start_offset,
|
|
171
|
+
duration_ms=duration_ms,
|
|
172
|
+
error_info=error_info,
|
|
173
|
+
)
|
|
174
|
+
|
|
175
|
+
session.add_event(event)
|
|
176
|
+
|
|
177
|
+
return response
|
|
178
|
+
|
|
179
|
+
|
|
180
|
+
def _record_sync_request(
|
|
181
|
+
client: "httpx.Client",
|
|
182
|
+
request: "httpx.Request",
|
|
183
|
+
kwargs: dict[str, Any],
|
|
184
|
+
) -> "httpx.Response":
|
|
185
|
+
"""Record a sync httpx request."""
|
|
186
|
+
from timetracer.session import TraceSession
|
|
187
|
+
|
|
188
|
+
session = get_current_session()
|
|
189
|
+
if not isinstance(session, TraceSession):
|
|
190
|
+
return _original_sync_send(client, request, **kwargs) # type: ignore
|
|
191
|
+
|
|
192
|
+
start_offset = session.elapsed_ms
|
|
193
|
+
start_time = time.perf_counter()
|
|
194
|
+
|
|
195
|
+
# Make the actual request
|
|
196
|
+
error_info = None
|
|
197
|
+
response = None
|
|
198
|
+
try:
|
|
199
|
+
response = _original_sync_send(client, request, **kwargs) # type: ignore
|
|
200
|
+
except Exception as e:
|
|
201
|
+
error_info = (type(e).__name__, str(e))
|
|
202
|
+
raise
|
|
203
|
+
finally:
|
|
204
|
+
duration_ms = (time.perf_counter() - start_time) * 1000
|
|
205
|
+
|
|
206
|
+
# Build event
|
|
207
|
+
event = _build_event(
|
|
208
|
+
request=request,
|
|
209
|
+
response=response,
|
|
210
|
+
start_offset_ms=start_offset,
|
|
211
|
+
duration_ms=duration_ms,
|
|
212
|
+
error_info=error_info,
|
|
213
|
+
)
|
|
214
|
+
|
|
215
|
+
session.add_event(event)
|
|
216
|
+
|
|
217
|
+
return response
|
|
218
|
+
|
|
219
|
+
|
|
220
|
+
async def _replay_async_request(
|
|
221
|
+
request: "httpx.Request",
|
|
222
|
+
) -> "httpx.Response":
|
|
223
|
+
"""Replay an async httpx request from cassette."""
|
|
224
|
+
from timetracer.session import ReplaySession
|
|
225
|
+
|
|
226
|
+
session = get_current_session()
|
|
227
|
+
if not isinstance(session, ReplaySession):
|
|
228
|
+
raise RuntimeError("Expected ReplaySession for replay")
|
|
229
|
+
|
|
230
|
+
# Build signature for matching
|
|
231
|
+
actual_signature = _make_signature_dict(request)
|
|
232
|
+
|
|
233
|
+
# Get expected event
|
|
234
|
+
event = session.get_next_event(EventType.HTTP_CLIENT, actual_signature)
|
|
235
|
+
|
|
236
|
+
# Build synthetic response
|
|
237
|
+
return _build_synthetic_response(request, event)
|
|
238
|
+
|
|
239
|
+
|
|
240
|
+
def _replay_sync_request(
|
|
241
|
+
request: "httpx.Request",
|
|
242
|
+
) -> "httpx.Response":
|
|
243
|
+
"""Replay a sync httpx request from cassette."""
|
|
244
|
+
from timetracer.session import ReplaySession
|
|
245
|
+
|
|
246
|
+
session = get_current_session()
|
|
247
|
+
if not isinstance(session, ReplaySession):
|
|
248
|
+
raise RuntimeError("Expected ReplaySession for replay")
|
|
249
|
+
|
|
250
|
+
# Build signature for matching
|
|
251
|
+
actual_signature = _make_signature_dict(request)
|
|
252
|
+
|
|
253
|
+
# Get expected event
|
|
254
|
+
event = session.get_next_event(EventType.HTTP_CLIENT, actual_signature)
|
|
255
|
+
|
|
256
|
+
# Build synthetic response
|
|
257
|
+
return _build_synthetic_response(request, event)
|
|
258
|
+
|
|
259
|
+
|
|
260
|
+
def _build_event(
|
|
261
|
+
request: "httpx.Request",
|
|
262
|
+
response: "httpx.Response | None",
|
|
263
|
+
start_offset_ms: float,
|
|
264
|
+
duration_ms: float,
|
|
265
|
+
error_info: tuple[str, str] | None,
|
|
266
|
+
) -> DependencyEvent:
|
|
267
|
+
"""Build a DependencyEvent from httpx request/response."""
|
|
268
|
+
# Build signature
|
|
269
|
+
signature = _make_signature(request)
|
|
270
|
+
|
|
271
|
+
# Build result
|
|
272
|
+
result = _make_result(response, error_info)
|
|
273
|
+
|
|
274
|
+
return DependencyEvent(
|
|
275
|
+
eid=0, # Will be set by session
|
|
276
|
+
event_type=EventType.HTTP_CLIENT,
|
|
277
|
+
start_offset_ms=start_offset_ms,
|
|
278
|
+
duration_ms=duration_ms,
|
|
279
|
+
signature=signature,
|
|
280
|
+
result=result,
|
|
281
|
+
)
|
|
282
|
+
|
|
283
|
+
|
|
284
|
+
def _make_signature(request: "httpx.Request") -> EventSignature:
|
|
285
|
+
"""Create EventSignature from httpx request."""
|
|
286
|
+
# Normalize URL (scheme + host + path)
|
|
287
|
+
url = str(request.url)
|
|
288
|
+
parsed = urlparse(url)
|
|
289
|
+
normalized_url = f"{parsed.scheme}://{parsed.netloc}{parsed.path}"
|
|
290
|
+
|
|
291
|
+
# Parse query params
|
|
292
|
+
query = dict(parse_qs(parsed.query))
|
|
293
|
+
# Flatten to single values
|
|
294
|
+
query = {k: v[0] if len(v) == 1 else v for k, v in query.items()}
|
|
295
|
+
|
|
296
|
+
# Hash request body
|
|
297
|
+
body_hash = None
|
|
298
|
+
if request.content:
|
|
299
|
+
body_hash = hash_body(request.content)
|
|
300
|
+
|
|
301
|
+
# Hash relevant headers
|
|
302
|
+
headers_dict = dict(request.headers)
|
|
303
|
+
allowed_headers = redact_headers_allowlist(headers_dict)
|
|
304
|
+
headers_hash = hash_body(str(sorted(allowed_headers.items()))) if allowed_headers else None
|
|
305
|
+
|
|
306
|
+
return EventSignature(
|
|
307
|
+
lib="httpx",
|
|
308
|
+
method=request.method,
|
|
309
|
+
url=normalized_url,
|
|
310
|
+
query=query,
|
|
311
|
+
headers_hash=headers_hash,
|
|
312
|
+
body_hash=body_hash,
|
|
313
|
+
)
|
|
314
|
+
|
|
315
|
+
|
|
316
|
+
def _make_signature_dict(request: "httpx.Request") -> dict[str, Any]:
|
|
317
|
+
"""Create signature dict for matching."""
|
|
318
|
+
sig = _make_signature(request)
|
|
319
|
+
return {
|
|
320
|
+
"lib": sig.lib,
|
|
321
|
+
"method": sig.method,
|
|
322
|
+
"url": sig.url,
|
|
323
|
+
"query": sig.query,
|
|
324
|
+
"body_hash": sig.body_hash,
|
|
325
|
+
}
|
|
326
|
+
|
|
327
|
+
|
|
328
|
+
def _make_result(
|
|
329
|
+
response: "httpx.Response | None",
|
|
330
|
+
error_info: tuple[str, str] | None,
|
|
331
|
+
) -> EventResult:
|
|
332
|
+
"""Create EventResult from httpx response."""
|
|
333
|
+
if error_info:
|
|
334
|
+
return EventResult(
|
|
335
|
+
error_type=error_info[0],
|
|
336
|
+
error=error_info[1],
|
|
337
|
+
)
|
|
338
|
+
|
|
339
|
+
if response is None:
|
|
340
|
+
return EventResult()
|
|
341
|
+
|
|
342
|
+
# Capture response headers (allow-list)
|
|
343
|
+
headers_dict = dict(response.headers)
|
|
344
|
+
headers = redact_headers_allowlist(headers_dict)
|
|
345
|
+
|
|
346
|
+
# Capture response body
|
|
347
|
+
body_snapshot = None
|
|
348
|
+
try:
|
|
349
|
+
content = response.content
|
|
350
|
+
if content:
|
|
351
|
+
body_snapshot = BodySnapshot(
|
|
352
|
+
captured=True,
|
|
353
|
+
encoding="bytes",
|
|
354
|
+
data=None, # Don't store full body by default
|
|
355
|
+
hash=hash_body(content),
|
|
356
|
+
size_bytes=len(content),
|
|
357
|
+
)
|
|
358
|
+
|
|
359
|
+
# Try to parse as JSON for storage
|
|
360
|
+
try:
|
|
361
|
+
import json
|
|
362
|
+
data = json.loads(content.decode("utf-8"))
|
|
363
|
+
body_snapshot.encoding = "json"
|
|
364
|
+
body_snapshot.data = data
|
|
365
|
+
except (json.JSONDecodeError, UnicodeDecodeError):
|
|
366
|
+
pass
|
|
367
|
+
except Exception:
|
|
368
|
+
pass
|
|
369
|
+
|
|
370
|
+
return EventResult(
|
|
371
|
+
status=response.status_code,
|
|
372
|
+
headers=headers,
|
|
373
|
+
body=body_snapshot,
|
|
374
|
+
)
|
|
375
|
+
|
|
376
|
+
|
|
377
|
+
def _build_synthetic_response(
|
|
378
|
+
request: "httpx.Request",
|
|
379
|
+
event: DependencyEvent,
|
|
380
|
+
) -> "httpx.Response":
|
|
381
|
+
"""Build a synthetic httpx.Response from recorded event."""
|
|
382
|
+
import httpx
|
|
383
|
+
|
|
384
|
+
result = event.result
|
|
385
|
+
|
|
386
|
+
# Handle error case
|
|
387
|
+
if result.error:
|
|
388
|
+
# Raise the recorded error
|
|
389
|
+
raise httpx.HTTPError(f"Recorded error: {result.error}")
|
|
390
|
+
|
|
391
|
+
# Build response content
|
|
392
|
+
content = b""
|
|
393
|
+
if result.body and result.body.captured and result.body.data is not None:
|
|
394
|
+
import json
|
|
395
|
+
if result.body.encoding == "json":
|
|
396
|
+
content = json.dumps(result.body.data).encode("utf-8")
|
|
397
|
+
elif isinstance(result.body.data, str):
|
|
398
|
+
content = result.body.data.encode("utf-8")
|
|
399
|
+
elif isinstance(result.body.data, bytes):
|
|
400
|
+
content = result.body.data
|
|
401
|
+
|
|
402
|
+
# Build headers
|
|
403
|
+
headers = result.headers.copy() if result.headers else {}
|
|
404
|
+
if "content-length" not in {k.lower() for k in headers}:
|
|
405
|
+
headers["content-length"] = str(len(content))
|
|
406
|
+
|
|
407
|
+
# Create response
|
|
408
|
+
return httpx.Response(
|
|
409
|
+
status_code=result.status or 200,
|
|
410
|
+
headers=headers,
|
|
411
|
+
content=content,
|
|
412
|
+
request=request,
|
|
413
|
+
)
|
|
@@ -0,0 +1,297 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Redis plugin for Timetracer.
|
|
3
|
+
|
|
4
|
+
Captures and replays Redis commands.
|
|
5
|
+
This enables recording Redis interactions for deterministic replay.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from __future__ import annotations
|
|
9
|
+
|
|
10
|
+
import time
|
|
11
|
+
from typing import TYPE_CHECKING, Any, Callable
|
|
12
|
+
|
|
13
|
+
from timetracer.constants import EventType
|
|
14
|
+
from timetracer.context import get_current_session, has_active_session
|
|
15
|
+
from timetracer.types import (
|
|
16
|
+
BodySnapshot,
|
|
17
|
+
DependencyEvent,
|
|
18
|
+
EventResult,
|
|
19
|
+
EventSignature,
|
|
20
|
+
)
|
|
21
|
+
from timetracer.utils.hashing import hash_body
|
|
22
|
+
|
|
23
|
+
if TYPE_CHECKING:
|
|
24
|
+
import redis
|
|
25
|
+
|
|
26
|
+
# Store original methods for restoration
|
|
27
|
+
_original_execute_command: Callable | None = None
|
|
28
|
+
_original_pipeline_execute: Callable | None = None
|
|
29
|
+
_enabled = False
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def enable_redis() -> None:
|
|
33
|
+
"""
|
|
34
|
+
Enable Redis interception for recording and replay.
|
|
35
|
+
|
|
36
|
+
This patches redis.Redis.execute_command.
|
|
37
|
+
Call disable_redis() to restore original behavior.
|
|
38
|
+
"""
|
|
39
|
+
global _original_execute_command, _original_pipeline_execute, _enabled
|
|
40
|
+
|
|
41
|
+
if _enabled:
|
|
42
|
+
return
|
|
43
|
+
|
|
44
|
+
try:
|
|
45
|
+
import redis as redis_lib
|
|
46
|
+
except ImportError:
|
|
47
|
+
raise ImportError(
|
|
48
|
+
"redis is required for the redis plugin. "
|
|
49
|
+
"Install it with: pip install redis"
|
|
50
|
+
)
|
|
51
|
+
|
|
52
|
+
# Patch Redis.execute_command
|
|
53
|
+
_original_execute_command = redis_lib.Redis.execute_command
|
|
54
|
+
redis_lib.Redis.execute_command = _patched_execute_command
|
|
55
|
+
|
|
56
|
+
# Patch Pipeline.execute (for pipeline commands)
|
|
57
|
+
_original_pipeline_execute = redis_lib.client.Pipeline.execute
|
|
58
|
+
redis_lib.client.Pipeline.execute = _patched_pipeline_execute
|
|
59
|
+
|
|
60
|
+
_enabled = True
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def disable_redis() -> None:
|
|
64
|
+
"""
|
|
65
|
+
Disable Redis interception and restore original behavior.
|
|
66
|
+
"""
|
|
67
|
+
global _original_execute_command, _original_pipeline_execute, _enabled
|
|
68
|
+
|
|
69
|
+
if not _enabled:
|
|
70
|
+
return
|
|
71
|
+
|
|
72
|
+
try:
|
|
73
|
+
import redis as redis_lib
|
|
74
|
+
except ImportError:
|
|
75
|
+
return
|
|
76
|
+
|
|
77
|
+
# Restore originals
|
|
78
|
+
if _original_execute_command is not None:
|
|
79
|
+
redis_lib.Redis.execute_command = _original_execute_command
|
|
80
|
+
|
|
81
|
+
if _original_pipeline_execute is not None:
|
|
82
|
+
redis_lib.client.Pipeline.execute = _original_pipeline_execute
|
|
83
|
+
|
|
84
|
+
_original_execute_command = None
|
|
85
|
+
_original_pipeline_execute = None
|
|
86
|
+
_enabled = False
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def _patched_execute_command(
|
|
90
|
+
self: "redis.Redis",
|
|
91
|
+
*args: Any,
|
|
92
|
+
**kwargs: Any,
|
|
93
|
+
) -> Any:
|
|
94
|
+
"""Patched execute_command method."""
|
|
95
|
+
# If no session, call original
|
|
96
|
+
if not has_active_session():
|
|
97
|
+
return _original_execute_command(self, *args, **kwargs) # type: ignore
|
|
98
|
+
|
|
99
|
+
session = get_current_session()
|
|
100
|
+
|
|
101
|
+
# Handle based on session mode
|
|
102
|
+
if session.is_recording:
|
|
103
|
+
return _record_command(self, args, kwargs)
|
|
104
|
+
elif session.is_replaying:
|
|
105
|
+
# Check for hybrid replay - if plugin should stay live, make real call
|
|
106
|
+
from timetracer.session import ReplaySession
|
|
107
|
+
if isinstance(session, ReplaySession) and not session.should_mock_plugin("redis"):
|
|
108
|
+
return _original_execute_command(self, *args, **kwargs) # type: ignore
|
|
109
|
+
return _replay_command(args, kwargs)
|
|
110
|
+
else:
|
|
111
|
+
return _original_execute_command(self, *args, **kwargs) # type: ignore
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
def _patched_pipeline_execute(
|
|
115
|
+
self: "redis.client.Pipeline",
|
|
116
|
+
raise_on_error: bool = True,
|
|
117
|
+
) -> list:
|
|
118
|
+
"""Patched pipeline execute method."""
|
|
119
|
+
# For now, just call original - pipeline tracking is complex
|
|
120
|
+
# TODO: Add full pipeline support in v2.1
|
|
121
|
+
return _original_pipeline_execute(self, raise_on_error) # type: ignore
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
def _record_command(
|
|
125
|
+
client: "redis.Redis",
|
|
126
|
+
args: tuple,
|
|
127
|
+
kwargs: dict[str, Any],
|
|
128
|
+
) -> Any:
|
|
129
|
+
"""Record a Redis command."""
|
|
130
|
+
from timetracer.session import TraceSession
|
|
131
|
+
|
|
132
|
+
session = get_current_session()
|
|
133
|
+
if not isinstance(session, TraceSession):
|
|
134
|
+
return _original_execute_command(client, *args, **kwargs) # type: ignore
|
|
135
|
+
|
|
136
|
+
start_offset = session.elapsed_ms
|
|
137
|
+
start_time = time.perf_counter()
|
|
138
|
+
|
|
139
|
+
# Make the actual call
|
|
140
|
+
error_info = None
|
|
141
|
+
result = None
|
|
142
|
+
try:
|
|
143
|
+
result = _original_execute_command(client, *args, **kwargs) # type: ignore
|
|
144
|
+
except Exception as e:
|
|
145
|
+
error_info = (type(e).__name__, str(e))
|
|
146
|
+
raise
|
|
147
|
+
finally:
|
|
148
|
+
duration_ms = (time.perf_counter() - start_time) * 1000
|
|
149
|
+
|
|
150
|
+
# Build event
|
|
151
|
+
event = _build_event(
|
|
152
|
+
args=args,
|
|
153
|
+
kwargs=kwargs,
|
|
154
|
+
result=result,
|
|
155
|
+
start_offset_ms=start_offset,
|
|
156
|
+
duration_ms=duration_ms,
|
|
157
|
+
error_info=error_info,
|
|
158
|
+
)
|
|
159
|
+
|
|
160
|
+
session.add_event(event)
|
|
161
|
+
|
|
162
|
+
return result
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
def _replay_command(
|
|
166
|
+
args: tuple,
|
|
167
|
+
kwargs: dict[str, Any],
|
|
168
|
+
) -> Any:
|
|
169
|
+
"""Replay a Redis command from cassette."""
|
|
170
|
+
from timetracer.session import ReplaySession
|
|
171
|
+
|
|
172
|
+
session = get_current_session()
|
|
173
|
+
if not isinstance(session, ReplaySession):
|
|
174
|
+
raise RuntimeError("Expected ReplaySession for replay")
|
|
175
|
+
|
|
176
|
+
# Build signature for matching
|
|
177
|
+
actual_signature = _make_signature_dict(args, kwargs)
|
|
178
|
+
|
|
179
|
+
# Get expected event
|
|
180
|
+
event = session.get_next_event(EventType.REDIS, actual_signature)
|
|
181
|
+
|
|
182
|
+
# Return recorded result
|
|
183
|
+
return _extract_result(event)
|
|
184
|
+
|
|
185
|
+
|
|
186
|
+
def _build_event(
|
|
187
|
+
args: tuple,
|
|
188
|
+
kwargs: dict[str, Any],
|
|
189
|
+
result: Any,
|
|
190
|
+
start_offset_ms: float,
|
|
191
|
+
duration_ms: float,
|
|
192
|
+
error_info: tuple[str, str] | None,
|
|
193
|
+
) -> DependencyEvent:
|
|
194
|
+
"""Build a DependencyEvent from Redis command."""
|
|
195
|
+
# Build signature
|
|
196
|
+
signature = _make_signature(args, kwargs)
|
|
197
|
+
|
|
198
|
+
# Build result
|
|
199
|
+
event_result = _make_result(result, error_info)
|
|
200
|
+
|
|
201
|
+
return DependencyEvent(
|
|
202
|
+
eid=0, # Will be set by session
|
|
203
|
+
event_type=EventType.REDIS,
|
|
204
|
+
start_offset_ms=start_offset_ms,
|
|
205
|
+
duration_ms=duration_ms,
|
|
206
|
+
signature=signature,
|
|
207
|
+
result=event_result,
|
|
208
|
+
)
|
|
209
|
+
|
|
210
|
+
|
|
211
|
+
def _make_signature(args: tuple, kwargs: dict[str, Any]) -> EventSignature:
|
|
212
|
+
"""Create EventSignature from Redis command."""
|
|
213
|
+
# First arg is command name
|
|
214
|
+
command = str(args[0]).upper() if args else "UNKNOWN"
|
|
215
|
+
|
|
216
|
+
# Key is typically second arg
|
|
217
|
+
key = str(args[1]) if len(args) > 1 else ""
|
|
218
|
+
|
|
219
|
+
# Hash remaining args
|
|
220
|
+
args_hash = None
|
|
221
|
+
if len(args) > 2:
|
|
222
|
+
try:
|
|
223
|
+
args_hash = hash_body(str(args[2:]))
|
|
224
|
+
except Exception:
|
|
225
|
+
pass
|
|
226
|
+
|
|
227
|
+
return EventSignature(
|
|
228
|
+
lib="redis",
|
|
229
|
+
method=command, # GET, SET, HGET, etc.
|
|
230
|
+
url=key, # Use url field for key
|
|
231
|
+
query={},
|
|
232
|
+
body_hash=args_hash,
|
|
233
|
+
)
|
|
234
|
+
|
|
235
|
+
|
|
236
|
+
def _make_signature_dict(args: tuple, kwargs: dict[str, Any]) -> dict[str, Any]:
|
|
237
|
+
"""Create signature dict for matching."""
|
|
238
|
+
sig = _make_signature(args, kwargs)
|
|
239
|
+
return {
|
|
240
|
+
"lib": sig.lib,
|
|
241
|
+
"method": sig.method,
|
|
242
|
+
"url": sig.url,
|
|
243
|
+
"body_hash": sig.body_hash,
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
|
|
247
|
+
def _make_result(
|
|
248
|
+
result: Any,
|
|
249
|
+
error_info: tuple[str, str] | None,
|
|
250
|
+
) -> EventResult:
|
|
251
|
+
"""Create EventResult from Redis response."""
|
|
252
|
+
if error_info:
|
|
253
|
+
return EventResult(
|
|
254
|
+
error_type=error_info[0],
|
|
255
|
+
error=error_info[1],
|
|
256
|
+
)
|
|
257
|
+
|
|
258
|
+
# Store simple result types
|
|
259
|
+
body_snapshot = None
|
|
260
|
+
if result is not None:
|
|
261
|
+
try:
|
|
262
|
+
# Handle different result types
|
|
263
|
+
if isinstance(result, bytes):
|
|
264
|
+
data = result.decode("utf-8", errors="replace")
|
|
265
|
+
elif isinstance(result, (list, dict)):
|
|
266
|
+
data = result
|
|
267
|
+
else:
|
|
268
|
+
data = str(result)
|
|
269
|
+
|
|
270
|
+
body_snapshot = BodySnapshot(
|
|
271
|
+
captured=True,
|
|
272
|
+
encoding="json" if isinstance(data, (list, dict)) else "text",
|
|
273
|
+
data=data,
|
|
274
|
+
)
|
|
275
|
+
except Exception:
|
|
276
|
+
pass
|
|
277
|
+
|
|
278
|
+
return EventResult(
|
|
279
|
+
status=1 if result is not None else 0,
|
|
280
|
+
body=body_snapshot,
|
|
281
|
+
)
|
|
282
|
+
|
|
283
|
+
|
|
284
|
+
def _extract_result(event: DependencyEvent) -> Any:
|
|
285
|
+
"""Extract result from recorded event."""
|
|
286
|
+
result = event.result
|
|
287
|
+
|
|
288
|
+
# Handle error case
|
|
289
|
+
if result.error:
|
|
290
|
+
import redis
|
|
291
|
+
raise redis.RedisError(f"Recorded error: {result.error}")
|
|
292
|
+
|
|
293
|
+
# Return captured data
|
|
294
|
+
if result.body and result.body.captured and result.body.data is not None:
|
|
295
|
+
return result.body.data
|
|
296
|
+
|
|
297
|
+
return None
|