timetracer 1.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- timetracer/__init__.py +29 -0
- timetracer/cassette/__init__.py +6 -0
- timetracer/cassette/io.py +421 -0
- timetracer/cassette/naming.py +69 -0
- timetracer/catalog/__init__.py +288 -0
- timetracer/cli/__init__.py +5 -0
- timetracer/cli/commands/__init__.py +1 -0
- timetracer/cli/main.py +692 -0
- timetracer/config.py +297 -0
- timetracer/constants.py +129 -0
- timetracer/context.py +93 -0
- timetracer/dashboard/__init__.py +14 -0
- timetracer/dashboard/generator.py +229 -0
- timetracer/dashboard/server.py +244 -0
- timetracer/dashboard/template.py +874 -0
- timetracer/diff/__init__.py +6 -0
- timetracer/diff/engine.py +311 -0
- timetracer/diff/report.py +113 -0
- timetracer/exceptions.py +113 -0
- timetracer/integrations/__init__.py +27 -0
- timetracer/integrations/fastapi.py +537 -0
- timetracer/integrations/flask.py +507 -0
- timetracer/plugins/__init__.py +42 -0
- timetracer/plugins/base.py +73 -0
- timetracer/plugins/httpx_plugin.py +413 -0
- timetracer/plugins/redis_plugin.py +297 -0
- timetracer/plugins/requests_plugin.py +333 -0
- timetracer/plugins/sqlalchemy_plugin.py +280 -0
- timetracer/policies/__init__.py +16 -0
- timetracer/policies/capture.py +64 -0
- timetracer/policies/redaction.py +165 -0
- timetracer/replay/__init__.py +6 -0
- timetracer/replay/engine.py +75 -0
- timetracer/replay/errors.py +9 -0
- timetracer/replay/matching.py +83 -0
- timetracer/session.py +390 -0
- timetracer/storage/__init__.py +18 -0
- timetracer/storage/s3.py +364 -0
- timetracer/timeline/__init__.py +6 -0
- timetracer/timeline/generator.py +150 -0
- timetracer/timeline/template.py +370 -0
- timetracer/types.py +197 -0
- timetracer/utils/__init__.py +6 -0
- timetracer/utils/hashing.py +68 -0
- timetracer/utils/time.py +106 -0
- timetracer-1.1.0.dist-info/METADATA +286 -0
- timetracer-1.1.0.dist-info/RECORD +51 -0
- timetracer-1.1.0.dist-info/WHEEL +5 -0
- timetracer-1.1.0.dist-info/entry_points.txt +2 -0
- timetracer-1.1.0.dist-info/licenses/LICENSE +21 -0
- timetracer-1.1.0.dist-info/top_level.txt +1 -0
timetracer/__init__.py
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Timetracer - Time-travel debugging for FastAPI and Flask
|
|
3
|
+
|
|
4
|
+
Record API requests into portable cassettes and replay them
|
|
5
|
+
with mocked dependencies for deterministic debugging.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from timetracer.config import TraceConfig
|
|
9
|
+
from timetracer.constants import CapturePolicy, TraceMode
|
|
10
|
+
from timetracer.exceptions import (
|
|
11
|
+
CassetteError,
|
|
12
|
+
CassetteNotFoundError,
|
|
13
|
+
CassetteSchemaError,
|
|
14
|
+
ReplayMismatchError,
|
|
15
|
+
TimetracerError,
|
|
16
|
+
)
|
|
17
|
+
|
|
18
|
+
__version__ = "1.1.0"
|
|
19
|
+
__all__ = [
|
|
20
|
+
"__version__",
|
|
21
|
+
"TraceConfig",
|
|
22
|
+
"TraceMode",
|
|
23
|
+
"CapturePolicy",
|
|
24
|
+
"TimetracerError",
|
|
25
|
+
"ReplayMismatchError",
|
|
26
|
+
"CassetteError",
|
|
27
|
+
"CassetteNotFoundError",
|
|
28
|
+
"CassetteSchemaError",
|
|
29
|
+
]
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
"""Cassette module for storage and retrieval."""
|
|
2
|
+
|
|
3
|
+
from timetracer.cassette.io import read_cassette, write_cassette
|
|
4
|
+
from timetracer.cassette.naming import cassette_filename, sanitize_route
|
|
5
|
+
|
|
6
|
+
__all__ = ["write_cassette", "read_cassette", "cassette_filename", "sanitize_route"]
|
|
@@ -0,0 +1,421 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Cassette I/O - reading and writing cassette files.
|
|
3
|
+
|
|
4
|
+
Handles serialization, deserialization, and file management.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
import json
|
|
10
|
+
from dataclasses import asdict
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
from typing import TYPE_CHECKING, Any
|
|
13
|
+
|
|
14
|
+
from timetracer.cassette.naming import cassette_filename, get_date_directory
|
|
15
|
+
from timetracer.constants import SCHEMA_VERSION, EventType
|
|
16
|
+
from timetracer.exceptions import CassetteNotFoundError, CassetteSchemaError
|
|
17
|
+
from timetracer.types import (
|
|
18
|
+
AppliedPolicies,
|
|
19
|
+
BodySnapshot,
|
|
20
|
+
CaptureStats,
|
|
21
|
+
Cassette,
|
|
22
|
+
DependencyEvent,
|
|
23
|
+
EventResult,
|
|
24
|
+
EventSignature,
|
|
25
|
+
RequestSnapshot,
|
|
26
|
+
ResponseSnapshot,
|
|
27
|
+
SessionMeta,
|
|
28
|
+
)
|
|
29
|
+
|
|
30
|
+
if TYPE_CHECKING:
|
|
31
|
+
from timetracer.config import TraceConfig
|
|
32
|
+
from timetracer.session import TraceSession
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class CassetteEncoder(json.JSONEncoder):
|
|
36
|
+
"""Custom JSON encoder for cassette data."""
|
|
37
|
+
|
|
38
|
+
def default(self, obj: Any) -> Any:
|
|
39
|
+
# Handle dataclasses
|
|
40
|
+
if hasattr(obj, "__dataclass_fields__"):
|
|
41
|
+
return asdict(obj)
|
|
42
|
+
|
|
43
|
+
# Handle enums
|
|
44
|
+
if hasattr(obj, "value"):
|
|
45
|
+
return obj.value
|
|
46
|
+
|
|
47
|
+
return super().default(obj)
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def write_cassette(session: TraceSession, config: TraceConfig) -> str:
|
|
51
|
+
"""
|
|
52
|
+
Write a trace session to a cassette file.
|
|
53
|
+
|
|
54
|
+
Creates date-based subdirectory and uses standardized naming.
|
|
55
|
+
|
|
56
|
+
Args:
|
|
57
|
+
session: The completed trace session.
|
|
58
|
+
config: Configuration for cassette directory.
|
|
59
|
+
|
|
60
|
+
Returns:
|
|
61
|
+
Absolute path to the written cassette file.
|
|
62
|
+
"""
|
|
63
|
+
# Ensure session is finalized
|
|
64
|
+
if not session._finalized:
|
|
65
|
+
session.finalize()
|
|
66
|
+
|
|
67
|
+
# Convert to cassette
|
|
68
|
+
cassette = session.to_cassette()
|
|
69
|
+
|
|
70
|
+
# Build path
|
|
71
|
+
base_dir = Path(config.cassette_dir).resolve()
|
|
72
|
+
date_dir = base_dir / get_date_directory()
|
|
73
|
+
date_dir.mkdir(parents=True, exist_ok=True)
|
|
74
|
+
|
|
75
|
+
# Generate filename
|
|
76
|
+
method = cassette.request.method or "UNKNOWN"
|
|
77
|
+
route = cassette.request.route_template or cassette.request.path or "unknown"
|
|
78
|
+
filename = cassette_filename(method, route, session.session_id)
|
|
79
|
+
|
|
80
|
+
file_path = date_dir / filename
|
|
81
|
+
|
|
82
|
+
# Serialize and write
|
|
83
|
+
cassette_dict = _cassette_to_dict(cassette)
|
|
84
|
+
|
|
85
|
+
with open(file_path, "w", encoding="utf-8") as f:
|
|
86
|
+
json.dump(cassette_dict, f, indent=2, cls=CassetteEncoder)
|
|
87
|
+
|
|
88
|
+
return str(file_path)
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
def read_cassette(path: str) -> Cassette:
|
|
92
|
+
"""
|
|
93
|
+
Read a cassette from file.
|
|
94
|
+
|
|
95
|
+
Args:
|
|
96
|
+
path: Path to the cassette file.
|
|
97
|
+
|
|
98
|
+
Returns:
|
|
99
|
+
Loaded Cassette object.
|
|
100
|
+
|
|
101
|
+
Raises:
|
|
102
|
+
CassetteNotFoundError: If file doesn't exist.
|
|
103
|
+
CassetteSchemaError: If schema version is incompatible.
|
|
104
|
+
"""
|
|
105
|
+
from timetracer.constants import SUPPORTED_SCHEMA_VERSIONS
|
|
106
|
+
|
|
107
|
+
file_path = Path(path)
|
|
108
|
+
|
|
109
|
+
if not file_path.exists():
|
|
110
|
+
raise CassetteNotFoundError(path)
|
|
111
|
+
|
|
112
|
+
with open(file_path, "r", encoding="utf-8") as f:
|
|
113
|
+
data = json.load(f)
|
|
114
|
+
|
|
115
|
+
# Validate schema version
|
|
116
|
+
schema_version = data.get("schema_version")
|
|
117
|
+
if schema_version not in SUPPORTED_SCHEMA_VERSIONS:
|
|
118
|
+
raise CassetteSchemaError(path, SCHEMA_VERSION, schema_version)
|
|
119
|
+
|
|
120
|
+
# Migrate if needed
|
|
121
|
+
if schema_version != SCHEMA_VERSION:
|
|
122
|
+
data = _migrate_cassette(data, schema_version)
|
|
123
|
+
|
|
124
|
+
return _dict_to_cassette(data)
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
def _migrate_cassette(data: dict[str, Any], from_version: str) -> dict[str, Any]:
|
|
128
|
+
"""
|
|
129
|
+
Migrate cassette from older schema version to current.
|
|
130
|
+
|
|
131
|
+
Args:
|
|
132
|
+
data: Cassette data dict.
|
|
133
|
+
from_version: Current schema version of the data.
|
|
134
|
+
|
|
135
|
+
Returns:
|
|
136
|
+
Migrated cassette data dict.
|
|
137
|
+
"""
|
|
138
|
+
# v0.1 -> v1.0 migration
|
|
139
|
+
if from_version == "0.1":
|
|
140
|
+
# Schema 0.1 and 1.0 are compatible - just update version
|
|
141
|
+
data["schema_version"] = SCHEMA_VERSION
|
|
142
|
+
|
|
143
|
+
return data
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
def _cassette_to_dict(cassette: Cassette) -> dict[str, Any]:
|
|
147
|
+
"""Convert Cassette to a dictionary for JSON serialization."""
|
|
148
|
+
return {
|
|
149
|
+
"schema_version": cassette.schema_version,
|
|
150
|
+
"session": _session_meta_to_dict(cassette.session),
|
|
151
|
+
"request": _request_to_dict(cassette.request),
|
|
152
|
+
"response": _response_to_dict(cassette.response),
|
|
153
|
+
"events": [_event_to_dict(e) for e in cassette.events],
|
|
154
|
+
"policies": _policies_to_dict(cassette.policies),
|
|
155
|
+
"stats": _stats_to_dict(cassette.stats),
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
def _session_meta_to_dict(meta: SessionMeta) -> dict[str, Any]:
|
|
160
|
+
"""Convert SessionMeta to dict."""
|
|
161
|
+
return {
|
|
162
|
+
"id": meta.id,
|
|
163
|
+
"recorded_at": meta.recorded_at,
|
|
164
|
+
"service": meta.service,
|
|
165
|
+
"env": meta.env,
|
|
166
|
+
"framework": meta.framework,
|
|
167
|
+
"timetracer_version": meta.timetracer_version,
|
|
168
|
+
"python_version": meta.python_version,
|
|
169
|
+
"git_sha": meta.git_sha,
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
|
|
173
|
+
def _request_to_dict(req: RequestSnapshot) -> dict[str, Any]:
|
|
174
|
+
"""Convert RequestSnapshot to dict."""
|
|
175
|
+
result: dict[str, Any] = {
|
|
176
|
+
"method": req.method,
|
|
177
|
+
"path": req.path,
|
|
178
|
+
}
|
|
179
|
+
if req.route_template:
|
|
180
|
+
result["route_template"] = req.route_template
|
|
181
|
+
if req.headers:
|
|
182
|
+
result["headers"] = req.headers
|
|
183
|
+
if req.query:
|
|
184
|
+
result["query"] = req.query
|
|
185
|
+
if req.body:
|
|
186
|
+
result["body"] = _body_to_dict(req.body)
|
|
187
|
+
if req.client_ip:
|
|
188
|
+
result["client_ip"] = req.client_ip
|
|
189
|
+
if req.user_agent:
|
|
190
|
+
result["user_agent"] = req.user_agent
|
|
191
|
+
return result
|
|
192
|
+
|
|
193
|
+
|
|
194
|
+
def _response_to_dict(res: ResponseSnapshot) -> dict[str, Any]:
|
|
195
|
+
"""Convert ResponseSnapshot to dict."""
|
|
196
|
+
result: dict[str, Any] = {
|
|
197
|
+
"status": res.status,
|
|
198
|
+
"duration_ms": res.duration_ms,
|
|
199
|
+
}
|
|
200
|
+
if res.headers:
|
|
201
|
+
result["headers"] = res.headers
|
|
202
|
+
if res.body:
|
|
203
|
+
result["body"] = _body_to_dict(res.body)
|
|
204
|
+
return result
|
|
205
|
+
|
|
206
|
+
|
|
207
|
+
def _body_to_dict(body: BodySnapshot) -> dict[str, Any]:
|
|
208
|
+
"""Convert BodySnapshot to dict."""
|
|
209
|
+
result: dict[str, Any] = {"_captured": body.captured}
|
|
210
|
+
if body.encoding:
|
|
211
|
+
result["encoding"] = body.encoding
|
|
212
|
+
if body.data is not None:
|
|
213
|
+
result["data"] = body.data
|
|
214
|
+
if body.truncated:
|
|
215
|
+
result["truncated"] = body.truncated
|
|
216
|
+
if body.size_bytes is not None:
|
|
217
|
+
result["size_bytes"] = body.size_bytes
|
|
218
|
+
if body.hash:
|
|
219
|
+
result["hash"] = body.hash
|
|
220
|
+
return result
|
|
221
|
+
|
|
222
|
+
|
|
223
|
+
def _event_to_dict(event: DependencyEvent) -> dict[str, Any]:
|
|
224
|
+
"""Convert DependencyEvent to dict."""
|
|
225
|
+
return {
|
|
226
|
+
"eid": event.eid,
|
|
227
|
+
"type": event.event_type.value,
|
|
228
|
+
"start_offset_ms": event.start_offset_ms,
|
|
229
|
+
"duration_ms": event.duration_ms,
|
|
230
|
+
"signature": _signature_to_dict(event.signature),
|
|
231
|
+
"result": _result_to_dict(event.result),
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
|
|
235
|
+
def _signature_to_dict(sig: EventSignature) -> dict[str, Any]:
|
|
236
|
+
"""Convert EventSignature to dict."""
|
|
237
|
+
result: dict[str, Any] = {
|
|
238
|
+
"lib": sig.lib,
|
|
239
|
+
"method": sig.method,
|
|
240
|
+
}
|
|
241
|
+
if sig.url:
|
|
242
|
+
result["url"] = sig.url
|
|
243
|
+
if sig.query:
|
|
244
|
+
result["query"] = sig.query
|
|
245
|
+
if sig.headers_hash:
|
|
246
|
+
result["headers_hash"] = sig.headers_hash
|
|
247
|
+
if sig.body_hash:
|
|
248
|
+
result["body_hash"] = sig.body_hash
|
|
249
|
+
return result
|
|
250
|
+
|
|
251
|
+
|
|
252
|
+
def _result_to_dict(result: EventResult) -> dict[str, Any]:
|
|
253
|
+
"""Convert EventResult to dict."""
|
|
254
|
+
data: dict[str, Any] = {}
|
|
255
|
+
if result.status is not None:
|
|
256
|
+
data["status"] = result.status
|
|
257
|
+
if result.headers:
|
|
258
|
+
data["headers"] = result.headers
|
|
259
|
+
if result.body:
|
|
260
|
+
data["body"] = _body_to_dict(result.body)
|
|
261
|
+
if result.error:
|
|
262
|
+
data["error"] = result.error
|
|
263
|
+
if result.error_type:
|
|
264
|
+
data["error_type"] = result.error_type
|
|
265
|
+
return data
|
|
266
|
+
|
|
267
|
+
|
|
268
|
+
def _policies_to_dict(policies: AppliedPolicies) -> dict[str, Any]:
|
|
269
|
+
"""Convert AppliedPolicies to dict."""
|
|
270
|
+
return {
|
|
271
|
+
"redaction": {
|
|
272
|
+
"mode": policies.redaction_mode,
|
|
273
|
+
"rules": policies.redaction_rules,
|
|
274
|
+
},
|
|
275
|
+
"capture": {
|
|
276
|
+
"max_body_kb": policies.max_body_kb,
|
|
277
|
+
"store_request_body": policies.store_request_body,
|
|
278
|
+
"store_response_body": policies.store_response_body,
|
|
279
|
+
},
|
|
280
|
+
"sampling": {
|
|
281
|
+
"rate": policies.sample_rate,
|
|
282
|
+
"errors_only": policies.errors_only,
|
|
283
|
+
},
|
|
284
|
+
}
|
|
285
|
+
|
|
286
|
+
|
|
287
|
+
def _stats_to_dict(stats: CaptureStats) -> dict[str, Any]:
|
|
288
|
+
"""Convert CaptureStats to dict."""
|
|
289
|
+
return {
|
|
290
|
+
"event_counts": stats.event_counts,
|
|
291
|
+
"total_events": stats.total_events,
|
|
292
|
+
"total_duration_ms": stats.total_duration_ms,
|
|
293
|
+
}
|
|
294
|
+
|
|
295
|
+
|
|
296
|
+
# =============================================================================
|
|
297
|
+
# DESERIALIZATION (dict -> Cassette)
|
|
298
|
+
# =============================================================================
|
|
299
|
+
|
|
300
|
+
def _dict_to_cassette(data: dict[str, Any]) -> Cassette:
|
|
301
|
+
"""Convert dict to Cassette."""
|
|
302
|
+
return Cassette(
|
|
303
|
+
schema_version=data["schema_version"],
|
|
304
|
+
session=_dict_to_session_meta(data["session"]),
|
|
305
|
+
request=_dict_to_request(data["request"]),
|
|
306
|
+
response=_dict_to_response(data["response"]),
|
|
307
|
+
events=[_dict_to_event(e) for e in data.get("events", [])],
|
|
308
|
+
policies=_dict_to_policies(data.get("policies", {})),
|
|
309
|
+
stats=_dict_to_stats(data.get("stats", {})),
|
|
310
|
+
)
|
|
311
|
+
|
|
312
|
+
|
|
313
|
+
def _dict_to_session_meta(data: dict[str, Any]) -> SessionMeta:
|
|
314
|
+
"""Convert dict to SessionMeta."""
|
|
315
|
+
return SessionMeta(
|
|
316
|
+
id=data["id"],
|
|
317
|
+
recorded_at=data["recorded_at"],
|
|
318
|
+
service=data.get("service", ""),
|
|
319
|
+
env=data.get("env", ""),
|
|
320
|
+
framework=data.get("framework", "fastapi"),
|
|
321
|
+
timetracer_version=data.get("timetracer_version") or data.get("timetrace_version", ""),
|
|
322
|
+
python_version=data.get("python_version", ""),
|
|
323
|
+
git_sha=data.get("git_sha"),
|
|
324
|
+
)
|
|
325
|
+
|
|
326
|
+
|
|
327
|
+
def _dict_to_request(data: dict[str, Any]) -> RequestSnapshot:
|
|
328
|
+
"""Convert dict to RequestSnapshot."""
|
|
329
|
+
return RequestSnapshot(
|
|
330
|
+
method=data["method"],
|
|
331
|
+
path=data["path"],
|
|
332
|
+
route_template=data.get("route_template"),
|
|
333
|
+
headers=data.get("headers", {}),
|
|
334
|
+
query=data.get("query", {}),
|
|
335
|
+
body=_dict_to_body(data["body"]) if "body" in data else None,
|
|
336
|
+
client_ip=data.get("client_ip"),
|
|
337
|
+
user_agent=data.get("user_agent"),
|
|
338
|
+
)
|
|
339
|
+
|
|
340
|
+
|
|
341
|
+
def _dict_to_response(data: dict[str, Any]) -> ResponseSnapshot:
|
|
342
|
+
"""Convert dict to ResponseSnapshot."""
|
|
343
|
+
return ResponseSnapshot(
|
|
344
|
+
status=data["status"],
|
|
345
|
+
headers=data.get("headers", {}),
|
|
346
|
+
body=_dict_to_body(data["body"]) if "body" in data else None,
|
|
347
|
+
duration_ms=data.get("duration_ms", 0.0),
|
|
348
|
+
)
|
|
349
|
+
|
|
350
|
+
|
|
351
|
+
def _dict_to_body(data: dict[str, Any]) -> BodySnapshot:
|
|
352
|
+
"""Convert dict to BodySnapshot."""
|
|
353
|
+
return BodySnapshot(
|
|
354
|
+
captured=data.get("_captured", False),
|
|
355
|
+
encoding=data.get("encoding"),
|
|
356
|
+
data=data.get("data"),
|
|
357
|
+
truncated=data.get("truncated", False),
|
|
358
|
+
size_bytes=data.get("size_bytes"),
|
|
359
|
+
hash=data.get("hash"),
|
|
360
|
+
)
|
|
361
|
+
|
|
362
|
+
|
|
363
|
+
def _dict_to_event(data: dict[str, Any]) -> DependencyEvent:
|
|
364
|
+
"""Convert dict to DependencyEvent."""
|
|
365
|
+
return DependencyEvent(
|
|
366
|
+
eid=data["eid"],
|
|
367
|
+
event_type=EventType(data["type"]),
|
|
368
|
+
start_offset_ms=data["start_offset_ms"],
|
|
369
|
+
duration_ms=data["duration_ms"],
|
|
370
|
+
signature=_dict_to_signature(data["signature"]),
|
|
371
|
+
result=_dict_to_result(data.get("result", {})),
|
|
372
|
+
)
|
|
373
|
+
|
|
374
|
+
|
|
375
|
+
def _dict_to_signature(data: dict[str, Any]) -> EventSignature:
|
|
376
|
+
"""Convert dict to EventSignature."""
|
|
377
|
+
return EventSignature(
|
|
378
|
+
lib=data["lib"],
|
|
379
|
+
method=data["method"],
|
|
380
|
+
url=data.get("url"),
|
|
381
|
+
query=data.get("query", {}),
|
|
382
|
+
headers_hash=data.get("headers_hash"),
|
|
383
|
+
body_hash=data.get("body_hash"),
|
|
384
|
+
)
|
|
385
|
+
|
|
386
|
+
|
|
387
|
+
def _dict_to_result(data: dict[str, Any]) -> EventResult:
|
|
388
|
+
"""Convert dict to EventResult."""
|
|
389
|
+
return EventResult(
|
|
390
|
+
status=data.get("status"),
|
|
391
|
+
headers=data.get("headers", {}),
|
|
392
|
+
body=_dict_to_body(data["body"]) if "body" in data else None,
|
|
393
|
+
error=data.get("error"),
|
|
394
|
+
error_type=data.get("error_type"),
|
|
395
|
+
)
|
|
396
|
+
|
|
397
|
+
|
|
398
|
+
def _dict_to_policies(data: dict[str, Any]) -> AppliedPolicies:
|
|
399
|
+
"""Convert dict to AppliedPolicies."""
|
|
400
|
+
redaction = data.get("redaction", {})
|
|
401
|
+
capture = data.get("capture", {})
|
|
402
|
+
sampling = data.get("sampling", {})
|
|
403
|
+
|
|
404
|
+
return AppliedPolicies(
|
|
405
|
+
redaction_mode=redaction.get("mode", "default"),
|
|
406
|
+
redaction_rules=redaction.get("rules", []),
|
|
407
|
+
max_body_kb=capture.get("max_body_kb", 64),
|
|
408
|
+
store_request_body=capture.get("store_request_body", "on_error"),
|
|
409
|
+
store_response_body=capture.get("store_response_body", "on_error"),
|
|
410
|
+
sample_rate=sampling.get("rate", 1.0),
|
|
411
|
+
errors_only=sampling.get("errors_only", False),
|
|
412
|
+
)
|
|
413
|
+
|
|
414
|
+
|
|
415
|
+
def _dict_to_stats(data: dict[str, Any]) -> CaptureStats:
|
|
416
|
+
"""Convert dict to CaptureStats."""
|
|
417
|
+
return CaptureStats(
|
|
418
|
+
event_counts=data.get("event_counts", {}),
|
|
419
|
+
total_events=data.get("total_events", 0),
|
|
420
|
+
total_duration_ms=data.get("total_duration_ms", 0.0),
|
|
421
|
+
)
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Cassette naming utilities.
|
|
3
|
+
|
|
4
|
+
Provides consistent naming for cassette files and directories.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import re
|
|
8
|
+
from datetime import datetime, timezone
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def sanitize_route(route: str) -> str:
|
|
12
|
+
"""
|
|
13
|
+
Convert a route template to a filesystem-safe string.
|
|
14
|
+
|
|
15
|
+
Examples:
|
|
16
|
+
/checkout -> checkout
|
|
17
|
+
/users/{id} -> users_id
|
|
18
|
+
/v1/payments/confirm -> v1_payments_confirm
|
|
19
|
+
/api/v2/orders/{order_id}/items -> api_v2_orders_order_id_items
|
|
20
|
+
"""
|
|
21
|
+
# Remove leading/trailing slashes
|
|
22
|
+
route = route.strip("/")
|
|
23
|
+
|
|
24
|
+
# Replace path parameters like {id} with their name
|
|
25
|
+
route = re.sub(r"\{(\w+)\}", r"\1", route)
|
|
26
|
+
|
|
27
|
+
# Replace non-alphanumeric with underscore
|
|
28
|
+
route = re.sub(r"[^a-zA-Z0-9]", "_", route)
|
|
29
|
+
|
|
30
|
+
# Collapse multiple underscores
|
|
31
|
+
route = re.sub(r"_+", "_", route)
|
|
32
|
+
|
|
33
|
+
# Remove leading/trailing underscores
|
|
34
|
+
route = route.strip("_")
|
|
35
|
+
|
|
36
|
+
# Handle empty result
|
|
37
|
+
if not route:
|
|
38
|
+
route = "root"
|
|
39
|
+
|
|
40
|
+
return route.lower()
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def cassette_filename(
|
|
44
|
+
method: str,
|
|
45
|
+
route_template: str,
|
|
46
|
+
session_id: str,
|
|
47
|
+
extension: str = "json"
|
|
48
|
+
) -> str:
|
|
49
|
+
"""
|
|
50
|
+
Generate a standardized cassette filename.
|
|
51
|
+
|
|
52
|
+
Format: {METHOD}__{sanitized_route}__{short_id}.{extension}
|
|
53
|
+
|
|
54
|
+
Example:
|
|
55
|
+
POST /checkout abc123... -> POST__checkout__abc123.json
|
|
56
|
+
"""
|
|
57
|
+
sanitized = sanitize_route(route_template or "unknown")
|
|
58
|
+
short_id = session_id[:8] if session_id else "unknown"
|
|
59
|
+
|
|
60
|
+
return f"{method.upper()}__{sanitized}__{short_id}.{extension}"
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def get_date_directory() -> str:
|
|
64
|
+
"""
|
|
65
|
+
Get the date-based subdirectory name for today.
|
|
66
|
+
|
|
67
|
+
Format: YYYY-MM-DD
|
|
68
|
+
"""
|
|
69
|
+
return datetime.now(timezone.utc).strftime("%Y-%m-%d")
|