timetracer 1.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- timetracer/__init__.py +29 -0
- timetracer/cassette/__init__.py +6 -0
- timetracer/cassette/io.py +421 -0
- timetracer/cassette/naming.py +69 -0
- timetracer/catalog/__init__.py +288 -0
- timetracer/cli/__init__.py +5 -0
- timetracer/cli/commands/__init__.py +1 -0
- timetracer/cli/main.py +692 -0
- timetracer/config.py +297 -0
- timetracer/constants.py +129 -0
- timetracer/context.py +93 -0
- timetracer/dashboard/__init__.py +14 -0
- timetracer/dashboard/generator.py +229 -0
- timetracer/dashboard/server.py +244 -0
- timetracer/dashboard/template.py +874 -0
- timetracer/diff/__init__.py +6 -0
- timetracer/diff/engine.py +311 -0
- timetracer/diff/report.py +113 -0
- timetracer/exceptions.py +113 -0
- timetracer/integrations/__init__.py +27 -0
- timetracer/integrations/fastapi.py +537 -0
- timetracer/integrations/flask.py +507 -0
- timetracer/plugins/__init__.py +42 -0
- timetracer/plugins/base.py +73 -0
- timetracer/plugins/httpx_plugin.py +413 -0
- timetracer/plugins/redis_plugin.py +297 -0
- timetracer/plugins/requests_plugin.py +333 -0
- timetracer/plugins/sqlalchemy_plugin.py +280 -0
- timetracer/policies/__init__.py +16 -0
- timetracer/policies/capture.py +64 -0
- timetracer/policies/redaction.py +165 -0
- timetracer/replay/__init__.py +6 -0
- timetracer/replay/engine.py +75 -0
- timetracer/replay/errors.py +9 -0
- timetracer/replay/matching.py +83 -0
- timetracer/session.py +390 -0
- timetracer/storage/__init__.py +18 -0
- timetracer/storage/s3.py +364 -0
- timetracer/timeline/__init__.py +6 -0
- timetracer/timeline/generator.py +150 -0
- timetracer/timeline/template.py +370 -0
- timetracer/types.py +197 -0
- timetracer/utils/__init__.py +6 -0
- timetracer/utils/hashing.py +68 -0
- timetracer/utils/time.py +106 -0
- timetracer-1.1.0.dist-info/METADATA +286 -0
- timetracer-1.1.0.dist-info/RECORD +51 -0
- timetracer-1.1.0.dist-info/WHEEL +5 -0
- timetracer-1.1.0.dist-info/entry_points.txt +2 -0
- timetracer-1.1.0.dist-info/licenses/LICENSE +21 -0
- timetracer-1.1.0.dist-info/top_level.txt +1 -0
timetracer/cli/main.py
ADDED
|
@@ -0,0 +1,692 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Timetracer CLI.
|
|
3
|
+
|
|
4
|
+
Provides commands for managing and inspecting cassettes.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
import argparse
|
|
10
|
+
import sys
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
|
|
13
|
+
from timetracer import __version__
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def main(args: list[str] | None = None) -> int:
|
|
17
|
+
"""Main CLI entry point."""
|
|
18
|
+
parser = argparse.ArgumentParser(
|
|
19
|
+
prog="timetracer",
|
|
20
|
+
description="Time-travel debugging for FastAPI - manage and inspect cassettes",
|
|
21
|
+
)
|
|
22
|
+
parser.add_argument(
|
|
23
|
+
"--version", "-v",
|
|
24
|
+
action="version",
|
|
25
|
+
version=f"timetracer {__version__}",
|
|
26
|
+
)
|
|
27
|
+
|
|
28
|
+
subparsers = parser.add_subparsers(dest="command", help="Available commands")
|
|
29
|
+
|
|
30
|
+
# List command
|
|
31
|
+
list_parser = subparsers.add_parser(
|
|
32
|
+
"list",
|
|
33
|
+
help="List recorded cassettes",
|
|
34
|
+
)
|
|
35
|
+
list_parser.add_argument(
|
|
36
|
+
"--dir", "-d",
|
|
37
|
+
default="./cassettes",
|
|
38
|
+
help="Cassette directory (default: ./cassettes)",
|
|
39
|
+
)
|
|
40
|
+
list_parser.add_argument(
|
|
41
|
+
"--last", "-n",
|
|
42
|
+
type=int,
|
|
43
|
+
default=10,
|
|
44
|
+
help="Number of recent cassettes to show (default: 10)",
|
|
45
|
+
)
|
|
46
|
+
|
|
47
|
+
# Show command
|
|
48
|
+
show_parser = subparsers.add_parser(
|
|
49
|
+
"show",
|
|
50
|
+
help="Show cassette details",
|
|
51
|
+
)
|
|
52
|
+
show_parser.add_argument(
|
|
53
|
+
"cassette",
|
|
54
|
+
help="Path to cassette file",
|
|
55
|
+
)
|
|
56
|
+
show_parser.add_argument(
|
|
57
|
+
"--events", "-e",
|
|
58
|
+
action="store_true",
|
|
59
|
+
help="Show event details",
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
# Diff command
|
|
63
|
+
diff_parser = subparsers.add_parser(
|
|
64
|
+
"diff",
|
|
65
|
+
help="Compare two cassettes",
|
|
66
|
+
)
|
|
67
|
+
diff_parser.add_argument(
|
|
68
|
+
"--a", "-a",
|
|
69
|
+
required=True,
|
|
70
|
+
dest="cassette_a",
|
|
71
|
+
help="Path to baseline cassette",
|
|
72
|
+
)
|
|
73
|
+
diff_parser.add_argument(
|
|
74
|
+
"--b", "-b",
|
|
75
|
+
required=True,
|
|
76
|
+
dest="cassette_b",
|
|
77
|
+
help="Path to comparison cassette",
|
|
78
|
+
)
|
|
79
|
+
diff_parser.add_argument(
|
|
80
|
+
"--json", "-j",
|
|
81
|
+
action="store_true",
|
|
82
|
+
help="Output as JSON",
|
|
83
|
+
)
|
|
84
|
+
diff_parser.add_argument(
|
|
85
|
+
"--out", "-o",
|
|
86
|
+
dest="output",
|
|
87
|
+
help="Write report to file",
|
|
88
|
+
)
|
|
89
|
+
diff_parser.add_argument(
|
|
90
|
+
"--threshold", "-t",
|
|
91
|
+
type=float,
|
|
92
|
+
default=20.0,
|
|
93
|
+
help="Duration change threshold percentage (default: 20)",
|
|
94
|
+
)
|
|
95
|
+
|
|
96
|
+
# Timeline command
|
|
97
|
+
timeline_parser = subparsers.add_parser(
|
|
98
|
+
"timeline",
|
|
99
|
+
help="Generate HTML timeline visualization",
|
|
100
|
+
)
|
|
101
|
+
timeline_parser.add_argument(
|
|
102
|
+
"cassette",
|
|
103
|
+
help="Path to cassette file",
|
|
104
|
+
)
|
|
105
|
+
timeline_parser.add_argument(
|
|
106
|
+
"--out", "-o",
|
|
107
|
+
dest="output",
|
|
108
|
+
help="Output HTML file (default: cassette name + .html)",
|
|
109
|
+
)
|
|
110
|
+
timeline_parser.add_argument(
|
|
111
|
+
"--open",
|
|
112
|
+
action="store_true",
|
|
113
|
+
help="Open in browser after generating",
|
|
114
|
+
)
|
|
115
|
+
|
|
116
|
+
# S3 commands
|
|
117
|
+
s3_parser = subparsers.add_parser(
|
|
118
|
+
"s3",
|
|
119
|
+
help="S3 storage operations",
|
|
120
|
+
)
|
|
121
|
+
s3_subparsers = s3_parser.add_subparsers(dest="s3_command", help="S3 commands")
|
|
122
|
+
|
|
123
|
+
# S3 upload
|
|
124
|
+
s3_upload = s3_subparsers.add_parser("upload", help="Upload cassettes to S3")
|
|
125
|
+
s3_upload.add_argument("path", help="Local cassette file or directory")
|
|
126
|
+
s3_upload.add_argument("--bucket", "-b", help="S3 bucket (or TIMETRACER_S3_BUCKET)")
|
|
127
|
+
s3_upload.add_argument("--prefix", "-p", default="cassettes", help="S3 prefix")
|
|
128
|
+
|
|
129
|
+
# S3 download
|
|
130
|
+
s3_download = s3_subparsers.add_parser("download", help="Download cassette from S3")
|
|
131
|
+
s3_download.add_argument("key", help="S3 key to download")
|
|
132
|
+
s3_download.add_argument("--out", "-o", help="Local output path")
|
|
133
|
+
s3_download.add_argument("--bucket", "-b", help="S3 bucket")
|
|
134
|
+
s3_download.add_argument("--prefix", "-p", default="cassettes", help="S3 prefix")
|
|
135
|
+
|
|
136
|
+
# S3 list
|
|
137
|
+
s3_list = s3_subparsers.add_parser("list", help="List cassettes in S3")
|
|
138
|
+
s3_list.add_argument("--bucket", "-b", help="S3 bucket")
|
|
139
|
+
s3_list.add_argument("--prefix", "-p", default="cassettes", help="S3 prefix")
|
|
140
|
+
s3_list.add_argument("--limit", "-n", type=int, default=20, help="Max results")
|
|
141
|
+
|
|
142
|
+
# S3 sync
|
|
143
|
+
s3_sync = s3_subparsers.add_parser("sync", help="Sync cassettes with S3")
|
|
144
|
+
s3_sync.add_argument("direction", choices=["up", "down"], help="Sync direction")
|
|
145
|
+
s3_sync.add_argument("--dir", "-d", default="./cassettes", help="Local directory")
|
|
146
|
+
s3_sync.add_argument("--bucket", "-b", help="S3 bucket")
|
|
147
|
+
s3_sync.add_argument("--prefix", "-p", default="cassettes", help="S3 prefix")
|
|
148
|
+
|
|
149
|
+
# Search command
|
|
150
|
+
search_parser = subparsers.add_parser(
|
|
151
|
+
"search",
|
|
152
|
+
help="Search cassettes by endpoint, status, etc.",
|
|
153
|
+
)
|
|
154
|
+
search_parser.add_argument(
|
|
155
|
+
"--dir", "-d",
|
|
156
|
+
default="./cassettes",
|
|
157
|
+
help="Cassette directory",
|
|
158
|
+
)
|
|
159
|
+
search_parser.add_argument(
|
|
160
|
+
"--method", "-m",
|
|
161
|
+
help="Filter by HTTP method (GET, POST, etc.)",
|
|
162
|
+
)
|
|
163
|
+
search_parser.add_argument(
|
|
164
|
+
"--endpoint", "-e",
|
|
165
|
+
help="Filter by endpoint path (partial match)",
|
|
166
|
+
)
|
|
167
|
+
search_parser.add_argument(
|
|
168
|
+
"--status",
|
|
169
|
+
type=int,
|
|
170
|
+
help="Filter by exact status code",
|
|
171
|
+
)
|
|
172
|
+
search_parser.add_argument(
|
|
173
|
+
"--errors",
|
|
174
|
+
action="store_true",
|
|
175
|
+
help="Only show error responses (4xx, 5xx)",
|
|
176
|
+
)
|
|
177
|
+
search_parser.add_argument(
|
|
178
|
+
"--service", "-s",
|
|
179
|
+
help="Filter by service name",
|
|
180
|
+
)
|
|
181
|
+
search_parser.add_argument(
|
|
182
|
+
"--limit", "-n",
|
|
183
|
+
type=int,
|
|
184
|
+
default=20,
|
|
185
|
+
help="Maximum results (default: 20)",
|
|
186
|
+
)
|
|
187
|
+
search_parser.add_argument(
|
|
188
|
+
"--json", "-j",
|
|
189
|
+
action="store_true",
|
|
190
|
+
help="Output as JSON",
|
|
191
|
+
)
|
|
192
|
+
|
|
193
|
+
# Index command
|
|
194
|
+
index_parser = subparsers.add_parser(
|
|
195
|
+
"index",
|
|
196
|
+
help="Build cassette index for fast searching",
|
|
197
|
+
)
|
|
198
|
+
index_parser.add_argument(
|
|
199
|
+
"--dir", "-d",
|
|
200
|
+
default="./cassettes",
|
|
201
|
+
help="Cassette directory",
|
|
202
|
+
)
|
|
203
|
+
index_parser.add_argument(
|
|
204
|
+
"--out", "-o",
|
|
205
|
+
help="Output index file (default: <dir>/index.json)",
|
|
206
|
+
)
|
|
207
|
+
|
|
208
|
+
# Dashboard command
|
|
209
|
+
dashboard_parser = subparsers.add_parser(
|
|
210
|
+
"dashboard",
|
|
211
|
+
help="Generate HTML dashboard to browse cassettes",
|
|
212
|
+
)
|
|
213
|
+
dashboard_parser.add_argument(
|
|
214
|
+
"--dir", "-d",
|
|
215
|
+
default="./cassettes",
|
|
216
|
+
help="Cassette directory (default: ./cassettes)",
|
|
217
|
+
)
|
|
218
|
+
dashboard_parser.add_argument(
|
|
219
|
+
"--out", "-o",
|
|
220
|
+
dest="output",
|
|
221
|
+
help="Output HTML file (default: dashboard.html)",
|
|
222
|
+
)
|
|
223
|
+
dashboard_parser.add_argument(
|
|
224
|
+
"--limit", "-n",
|
|
225
|
+
type=int,
|
|
226
|
+
default=500,
|
|
227
|
+
help="Maximum cassettes to include (default: 500)",
|
|
228
|
+
)
|
|
229
|
+
dashboard_parser.add_argument(
|
|
230
|
+
"--open",
|
|
231
|
+
action="store_true",
|
|
232
|
+
help="Open in browser after generating",
|
|
233
|
+
)
|
|
234
|
+
|
|
235
|
+
# Serve command (live dashboard)
|
|
236
|
+
serve_parser = subparsers.add_parser(
|
|
237
|
+
"serve",
|
|
238
|
+
help="Start live dashboard server with replay capability",
|
|
239
|
+
)
|
|
240
|
+
serve_parser.add_argument(
|
|
241
|
+
"--dir", "-d",
|
|
242
|
+
default="./cassettes",
|
|
243
|
+
help="Cassette directory (default: ./cassettes)",
|
|
244
|
+
)
|
|
245
|
+
serve_parser.add_argument(
|
|
246
|
+
"--port", "-p",
|
|
247
|
+
type=int,
|
|
248
|
+
default=8765,
|
|
249
|
+
help="Server port (default: 8765)",
|
|
250
|
+
)
|
|
251
|
+
serve_parser.add_argument(
|
|
252
|
+
"--open",
|
|
253
|
+
action="store_true",
|
|
254
|
+
help="Open in browser after starting",
|
|
255
|
+
)
|
|
256
|
+
|
|
257
|
+
parsed = parser.parse_args(args)
|
|
258
|
+
|
|
259
|
+
if parsed.command == "list":
|
|
260
|
+
return _cmd_list(parsed.dir, parsed.last)
|
|
261
|
+
elif parsed.command == "show":
|
|
262
|
+
return _cmd_show(parsed.cassette, parsed.events)
|
|
263
|
+
elif parsed.command == "diff":
|
|
264
|
+
return _cmd_diff(
|
|
265
|
+
parsed.cassette_a,
|
|
266
|
+
parsed.cassette_b,
|
|
267
|
+
parsed.json,
|
|
268
|
+
parsed.output,
|
|
269
|
+
parsed.threshold,
|
|
270
|
+
)
|
|
271
|
+
elif parsed.command == "timeline":
|
|
272
|
+
return _cmd_timeline(
|
|
273
|
+
parsed.cassette,
|
|
274
|
+
parsed.output,
|
|
275
|
+
parsed.open,
|
|
276
|
+
)
|
|
277
|
+
elif parsed.command == "s3":
|
|
278
|
+
return _cmd_s3(parsed)
|
|
279
|
+
elif parsed.command == "search":
|
|
280
|
+
return _cmd_search(parsed)
|
|
281
|
+
elif parsed.command == "index":
|
|
282
|
+
return _cmd_index(parsed)
|
|
283
|
+
elif parsed.command == "dashboard":
|
|
284
|
+
return _cmd_dashboard(parsed)
|
|
285
|
+
elif parsed.command == "serve":
|
|
286
|
+
return _cmd_serve(parsed)
|
|
287
|
+
else:
|
|
288
|
+
parser.print_help()
|
|
289
|
+
return 0
|
|
290
|
+
|
|
291
|
+
|
|
292
|
+
def _cmd_list(directory: str, limit: int) -> int:
|
|
293
|
+
"""List cassettes in directory."""
|
|
294
|
+
dir_path = Path(directory)
|
|
295
|
+
|
|
296
|
+
if not dir_path.exists():
|
|
297
|
+
print(f"Directory not found: {directory}", file=sys.stderr)
|
|
298
|
+
return 1
|
|
299
|
+
|
|
300
|
+
# Find all cassette files
|
|
301
|
+
cassettes: list[tuple[Path, float]] = []
|
|
302
|
+
|
|
303
|
+
for json_file in dir_path.rglob("*.json"):
|
|
304
|
+
try:
|
|
305
|
+
mtime = json_file.stat().st_mtime
|
|
306
|
+
cassettes.append((json_file, mtime))
|
|
307
|
+
except OSError:
|
|
308
|
+
continue
|
|
309
|
+
|
|
310
|
+
if not cassettes:
|
|
311
|
+
print(f"No cassettes found in {directory}")
|
|
312
|
+
return 0
|
|
313
|
+
|
|
314
|
+
# Sort by modification time (newest first)
|
|
315
|
+
cassettes.sort(key=lambda x: x[1], reverse=True)
|
|
316
|
+
|
|
317
|
+
# Limit results
|
|
318
|
+
cassettes = cassettes[:limit]
|
|
319
|
+
|
|
320
|
+
print(f"\nRecent cassettes in {directory}:\n")
|
|
321
|
+
print(f"{'#':<4} {'Filename':<50} {'Size':>10}")
|
|
322
|
+
print("-" * 70)
|
|
323
|
+
|
|
324
|
+
for i, (path, _) in enumerate(cassettes, 1):
|
|
325
|
+
relative = path.relative_to(dir_path)
|
|
326
|
+
size = path.stat().st_size
|
|
327
|
+
size_str = _format_size(size)
|
|
328
|
+
print(f"{i:<4} {str(relative):<50} {size_str:>10}")
|
|
329
|
+
|
|
330
|
+
print(f"\nShowing {len(cassettes)} of {sum(1 for _ in dir_path.rglob('*.json'))} total cassettes")
|
|
331
|
+
|
|
332
|
+
return 0
|
|
333
|
+
|
|
334
|
+
|
|
335
|
+
def _cmd_show(cassette_path: str, show_events: bool) -> int:
|
|
336
|
+
"""Show cassette details."""
|
|
337
|
+
from timetracer.cassette import read_cassette
|
|
338
|
+
from timetracer.exceptions import CassetteNotFoundError, CassetteSchemaError
|
|
339
|
+
|
|
340
|
+
try:
|
|
341
|
+
cassette = read_cassette(cassette_path)
|
|
342
|
+
except CassetteNotFoundError:
|
|
343
|
+
print(f"Cassette not found: {cassette_path}", file=sys.stderr)
|
|
344
|
+
return 1
|
|
345
|
+
except CassetteSchemaError as e:
|
|
346
|
+
print(f"Schema error: {e}", file=sys.stderr)
|
|
347
|
+
return 1
|
|
348
|
+
|
|
349
|
+
# Header
|
|
350
|
+
print(f"\nCassette: {cassette_path}\n")
|
|
351
|
+
print(f"Schema Version: {cassette.schema_version}")
|
|
352
|
+
|
|
353
|
+
# Session info
|
|
354
|
+
session = cassette.session
|
|
355
|
+
print("\nSession:")
|
|
356
|
+
print(f" ID: {session.id}")
|
|
357
|
+
print(f" Recorded: {session.recorded_at}")
|
|
358
|
+
print(f" Service: {session.service}")
|
|
359
|
+
print(f" Environment: {session.env}")
|
|
360
|
+
|
|
361
|
+
# Request
|
|
362
|
+
req = cassette.request
|
|
363
|
+
print("\nRequest:")
|
|
364
|
+
print(f" {req.method} {req.path}")
|
|
365
|
+
if req.route_template and req.route_template != req.path:
|
|
366
|
+
print(f" Route: {req.route_template}")
|
|
367
|
+
if req.query:
|
|
368
|
+
print(f" Query: {req.query}")
|
|
369
|
+
|
|
370
|
+
# Response
|
|
371
|
+
res = cassette.response
|
|
372
|
+
status_icon = "[OK]" if res.status < 400 else "[WARN]"
|
|
373
|
+
print("\nResponse:")
|
|
374
|
+
print(f" {status_icon} Status: {res.status}")
|
|
375
|
+
print(f" Duration: {res.duration_ms:.2f}ms")
|
|
376
|
+
|
|
377
|
+
# Events summary
|
|
378
|
+
print(f"\nEvents: {len(cassette.events)} total")
|
|
379
|
+
|
|
380
|
+
if cassette.stats.event_counts:
|
|
381
|
+
for event_type, count in cassette.stats.event_counts.items():
|
|
382
|
+
print(f" {event_type}: {count}")
|
|
383
|
+
|
|
384
|
+
# Event details (optional)
|
|
385
|
+
if show_events and cassette.events:
|
|
386
|
+
print("\nEvent Details:")
|
|
387
|
+
for event in cassette.events:
|
|
388
|
+
sig = event.signature
|
|
389
|
+
print(f"\n #{event.eid} [{event.event_type.value}]")
|
|
390
|
+
print(f" {sig.method} {sig.url}")
|
|
391
|
+
print(f" Offset: +{event.start_offset_ms:.1f}ms, Duration: {event.duration_ms:.1f}ms")
|
|
392
|
+
if event.result.status:
|
|
393
|
+
result_icon = "[OK]" if event.result.status < 400 else "[WARN]"
|
|
394
|
+
print(f" {result_icon} Response: {event.result.status}")
|
|
395
|
+
|
|
396
|
+
print() # Final newline
|
|
397
|
+
return 0
|
|
398
|
+
|
|
399
|
+
|
|
400
|
+
def _format_size(size_bytes: int) -> str:
|
|
401
|
+
"""Format bytes as human-readable size."""
|
|
402
|
+
if size_bytes < 1024:
|
|
403
|
+
return f"{size_bytes}B"
|
|
404
|
+
elif size_bytes < 1024 * 1024:
|
|
405
|
+
return f"{size_bytes / 1024:.1f}KB"
|
|
406
|
+
else:
|
|
407
|
+
return f"{size_bytes / (1024 * 1024):.1f}MB"
|
|
408
|
+
|
|
409
|
+
|
|
410
|
+
def _cmd_diff(
|
|
411
|
+
cassette_a: str,
|
|
412
|
+
cassette_b: str,
|
|
413
|
+
as_json: bool,
|
|
414
|
+
output: str | None,
|
|
415
|
+
threshold: float,
|
|
416
|
+
) -> int:
|
|
417
|
+
"""Compare two cassettes and show differences."""
|
|
418
|
+
import json
|
|
419
|
+
|
|
420
|
+
from timetracer.diff import diff_cassettes, format_diff_report
|
|
421
|
+
from timetracer.exceptions import CassetteNotFoundError, CassetteSchemaError
|
|
422
|
+
|
|
423
|
+
try:
|
|
424
|
+
report = diff_cassettes(cassette_a, cassette_b, duration_threshold_pct=threshold)
|
|
425
|
+
except CassetteNotFoundError as e:
|
|
426
|
+
print(f"Cassette not found: {e.path}", file=sys.stderr)
|
|
427
|
+
return 1
|
|
428
|
+
except CassetteSchemaError as e:
|
|
429
|
+
print(f"Schema error: {e}", file=sys.stderr)
|
|
430
|
+
return 1
|
|
431
|
+
|
|
432
|
+
# Format output
|
|
433
|
+
if as_json:
|
|
434
|
+
output_text = json.dumps(report.to_dict(), indent=2)
|
|
435
|
+
else:
|
|
436
|
+
output_text = format_diff_report(report)
|
|
437
|
+
|
|
438
|
+
# Write to file or stdout
|
|
439
|
+
if output:
|
|
440
|
+
with open(output, "w", encoding="utf-8") as f:
|
|
441
|
+
f.write(output_text)
|
|
442
|
+
print(f"Report written to: {output}")
|
|
443
|
+
else:
|
|
444
|
+
print(output_text)
|
|
445
|
+
|
|
446
|
+
# Return code based on result
|
|
447
|
+
if report.is_regression:
|
|
448
|
+
return 2 # Regression detected
|
|
449
|
+
elif report.has_differences:
|
|
450
|
+
return 1 # Differences found
|
|
451
|
+
else:
|
|
452
|
+
return 0 # No differences
|
|
453
|
+
|
|
454
|
+
|
|
455
|
+
def _cmd_timeline(
|
|
456
|
+
cassette_path: str,
|
|
457
|
+
output: str | None,
|
|
458
|
+
open_browser: bool,
|
|
459
|
+
) -> int:
|
|
460
|
+
"""Generate HTML timeline visualization."""
|
|
461
|
+
from timetracer.exceptions import CassetteNotFoundError, CassetteSchemaError
|
|
462
|
+
from timetracer.timeline import generate_timeline, render_timeline_html
|
|
463
|
+
|
|
464
|
+
try:
|
|
465
|
+
timeline_data = generate_timeline(cassette_path)
|
|
466
|
+
except CassetteNotFoundError:
|
|
467
|
+
print(f"Cassette not found: {cassette_path}", file=sys.stderr)
|
|
468
|
+
return 1
|
|
469
|
+
except CassetteSchemaError as e:
|
|
470
|
+
print(f"Schema error: {e}", file=sys.stderr)
|
|
471
|
+
return 1
|
|
472
|
+
|
|
473
|
+
# Generate HTML
|
|
474
|
+
html_content = render_timeline_html(timeline_data)
|
|
475
|
+
|
|
476
|
+
# Determine output path
|
|
477
|
+
if output:
|
|
478
|
+
output_path = output
|
|
479
|
+
else:
|
|
480
|
+
# Default: same name as cassette but .html
|
|
481
|
+
output_path = cassette_path.rsplit(".", 1)[0] + ".html"
|
|
482
|
+
|
|
483
|
+
# Write HTML file
|
|
484
|
+
with open(output_path, "w", encoding="utf-8") as f:
|
|
485
|
+
f.write(html_content)
|
|
486
|
+
|
|
487
|
+
print(f"Timeline generated: {output_path}")
|
|
488
|
+
print(f" Request: {timeline_data.method} {timeline_data.path}")
|
|
489
|
+
print(f" Duration: {timeline_data.total_duration_ms:.1f}ms")
|
|
490
|
+
print(f" Events: {timeline_data.event_count}")
|
|
491
|
+
|
|
492
|
+
# Open in browser if requested
|
|
493
|
+
if open_browser:
|
|
494
|
+
import webbrowser
|
|
495
|
+
webbrowser.open(f"file://{Path(output_path).absolute()}")
|
|
496
|
+
print(" Opened in browser")
|
|
497
|
+
|
|
498
|
+
return 0
|
|
499
|
+
|
|
500
|
+
|
|
501
|
+
def _cmd_s3(parsed) -> int:
|
|
502
|
+
"""Handle S3 commands."""
|
|
503
|
+
import os
|
|
504
|
+
|
|
505
|
+
try:
|
|
506
|
+
from timetracer.storage.s3 import S3Config, S3Store
|
|
507
|
+
except ImportError:
|
|
508
|
+
print("boto3 is required for S3 storage. Install with: pip install timetracer[s3]", file=sys.stderr)
|
|
509
|
+
return 1
|
|
510
|
+
|
|
511
|
+
# Get bucket from args or env
|
|
512
|
+
bucket = getattr(parsed, 'bucket', None) or os.environ.get("TIMETRACER_S3_BUCKET")
|
|
513
|
+
if not bucket:
|
|
514
|
+
print("S3 bucket required. Set --bucket or TIMETRACER_S3_BUCKET", file=sys.stderr)
|
|
515
|
+
return 1
|
|
516
|
+
|
|
517
|
+
prefix = getattr(parsed, 'prefix', 'cassettes')
|
|
518
|
+
|
|
519
|
+
config = S3Config(bucket=bucket, prefix=prefix)
|
|
520
|
+
store = S3Store(config)
|
|
521
|
+
|
|
522
|
+
cmd = parsed.s3_command
|
|
523
|
+
|
|
524
|
+
if cmd == "upload":
|
|
525
|
+
path = Path(parsed.path)
|
|
526
|
+
if path.is_file():
|
|
527
|
+
key = store.upload(str(path))
|
|
528
|
+
print(f"Uploaded: s3://{bucket}/{key}")
|
|
529
|
+
elif path.is_dir():
|
|
530
|
+
keys = store.sync_upload(str(path))
|
|
531
|
+
print(f"Uploaded {len(keys)} cassettes to s3://{bucket}/{prefix}/")
|
|
532
|
+
else:
|
|
533
|
+
print(f"Path not found: {path}", file=sys.stderr)
|
|
534
|
+
return 1
|
|
535
|
+
|
|
536
|
+
elif cmd == "download":
|
|
537
|
+
out = parsed.out or f"./{parsed.key}"
|
|
538
|
+
local = store.download(parsed.key, out)
|
|
539
|
+
print(f"Downloaded: {local}")
|
|
540
|
+
|
|
541
|
+
elif cmd == "list":
|
|
542
|
+
limit = parsed.limit
|
|
543
|
+
print(f"\nCassettes in s3://{bucket}/{prefix}/\n")
|
|
544
|
+
count = 0
|
|
545
|
+
for key in store.list(limit=limit):
|
|
546
|
+
print(f" {key}")
|
|
547
|
+
count += 1
|
|
548
|
+
print(f"\nShowing {count} cassettes")
|
|
549
|
+
|
|
550
|
+
elif cmd == "sync":
|
|
551
|
+
local_dir = parsed.dir
|
|
552
|
+
if parsed.direction == "up":
|
|
553
|
+
keys = store.sync_upload(local_dir)
|
|
554
|
+
print(f"Synced {len(keys)} cassettes to s3://{bucket}/{prefix}/")
|
|
555
|
+
else:
|
|
556
|
+
paths = store.sync_download(local_dir)
|
|
557
|
+
print(f"Downloaded {len(paths)} cassettes to {local_dir}")
|
|
558
|
+
|
|
559
|
+
else:
|
|
560
|
+
print("Unknown S3 command. Use: upload, download, list, sync", file=sys.stderr)
|
|
561
|
+
return 1
|
|
562
|
+
|
|
563
|
+
return 0
|
|
564
|
+
|
|
565
|
+
|
|
566
|
+
def _cmd_search(parsed) -> int:
|
|
567
|
+
"""Search cassettes."""
|
|
568
|
+
import json as json_module
|
|
569
|
+
|
|
570
|
+
from timetracer.catalog import search_cassettes
|
|
571
|
+
|
|
572
|
+
results = search_cassettes(
|
|
573
|
+
cassette_dir=parsed.dir,
|
|
574
|
+
method=parsed.method,
|
|
575
|
+
endpoint=parsed.endpoint,
|
|
576
|
+
status_min=parsed.status,
|
|
577
|
+
status_max=parsed.status,
|
|
578
|
+
errors_only=parsed.errors,
|
|
579
|
+
service=parsed.service,
|
|
580
|
+
limit=parsed.limit,
|
|
581
|
+
)
|
|
582
|
+
|
|
583
|
+
if parsed.json:
|
|
584
|
+
output = [r.to_dict() for r in results]
|
|
585
|
+
print(json_module.dumps(output, indent=2))
|
|
586
|
+
else:
|
|
587
|
+
if not results:
|
|
588
|
+
print("No cassettes found matching criteria.")
|
|
589
|
+
return 0
|
|
590
|
+
|
|
591
|
+
print(f"\nFound {len(results)} cassettes:\n")
|
|
592
|
+
print(f"{'#':<4} {'Method':<8} {'Endpoint':<30} {'Status':<8} {'Duration':<10}")
|
|
593
|
+
print("-" * 70)
|
|
594
|
+
|
|
595
|
+
for i, entry in enumerate(results, 1):
|
|
596
|
+
status_icon = "[OK]" if entry.status < 400 else "[ERR]"
|
|
597
|
+
print(
|
|
598
|
+
f"{i:<4} {entry.method:<8} {entry.endpoint[:28]:<30} "
|
|
599
|
+
f"{status_icon}{entry.status:<6} {entry.duration_ms:.0f}ms"
|
|
600
|
+
)
|
|
601
|
+
|
|
602
|
+
print(f"\nShowing {len(results)} results")
|
|
603
|
+
|
|
604
|
+
return 0
|
|
605
|
+
|
|
606
|
+
|
|
607
|
+
def _cmd_index(parsed) -> int:
|
|
608
|
+
"""Build cassette index."""
|
|
609
|
+
from timetracer.catalog import build_index, save_index
|
|
610
|
+
|
|
611
|
+
print(f"Building index for {parsed.dir}...")
|
|
612
|
+
|
|
613
|
+
index = build_index(parsed.dir)
|
|
614
|
+
|
|
615
|
+
output_path = parsed.out or f"{parsed.dir}/index.json"
|
|
616
|
+
save_index(index, output_path)
|
|
617
|
+
|
|
618
|
+
print(f"Indexed {index.total_count} cassettes")
|
|
619
|
+
print(f" Output: {output_path}")
|
|
620
|
+
|
|
621
|
+
return 0
|
|
622
|
+
|
|
623
|
+
|
|
624
|
+
def _cmd_dashboard(parsed) -> int:
|
|
625
|
+
"""Generate HTML dashboard for browsing cassettes."""
|
|
626
|
+
from pathlib import Path
|
|
627
|
+
|
|
628
|
+
from timetracer.dashboard import generate_dashboard, render_dashboard_html
|
|
629
|
+
|
|
630
|
+
print(f"Generating dashboard for {parsed.dir}...")
|
|
631
|
+
|
|
632
|
+
# Generate dashboard data
|
|
633
|
+
dashboard_data = generate_dashboard(parsed.dir, limit=parsed.limit)
|
|
634
|
+
|
|
635
|
+
if dashboard_data.total_count == 0:
|
|
636
|
+
print(f"No cassettes found in {parsed.dir}")
|
|
637
|
+
return 1
|
|
638
|
+
|
|
639
|
+
# Render HTML
|
|
640
|
+
html_content = render_dashboard_html(dashboard_data)
|
|
641
|
+
|
|
642
|
+
# Determine output path
|
|
643
|
+
output_path = parsed.output or "dashboard.html"
|
|
644
|
+
|
|
645
|
+
# Write file
|
|
646
|
+
with open(output_path, "w", encoding="utf-8") as f:
|
|
647
|
+
f.write(html_content)
|
|
648
|
+
|
|
649
|
+
print(f"Dashboard generated: {output_path}")
|
|
650
|
+
print(f" Cassettes: {dashboard_data.total_count}")
|
|
651
|
+
print(f" Success: {dashboard_data.success_count}")
|
|
652
|
+
print(f" Errors: {dashboard_data.error_count}")
|
|
653
|
+
|
|
654
|
+
# Open in browser if requested
|
|
655
|
+
if parsed.open:
|
|
656
|
+
import webbrowser
|
|
657
|
+
webbrowser.open(f"file://{Path(output_path).absolute()}")
|
|
658
|
+
print(" Opened in browser")
|
|
659
|
+
|
|
660
|
+
return 0
|
|
661
|
+
|
|
662
|
+
|
|
663
|
+
def _cmd_serve(parsed) -> int:
|
|
664
|
+
"""Start live dashboard server with replay capability."""
|
|
665
|
+
import threading
|
|
666
|
+
import webbrowser
|
|
667
|
+
|
|
668
|
+
from timetracer.dashboard.server import start_server
|
|
669
|
+
|
|
670
|
+
port = parsed.port
|
|
671
|
+
url = f"http://localhost:{port}"
|
|
672
|
+
|
|
673
|
+
# Open browser in a separate thread after a short delay
|
|
674
|
+
if parsed.open:
|
|
675
|
+
def open_browser():
|
|
676
|
+
import time
|
|
677
|
+
time.sleep(1)
|
|
678
|
+
webbrowser.open(url)
|
|
679
|
+
threading.Thread(target=open_browser, daemon=True).start()
|
|
680
|
+
|
|
681
|
+
# Start server (blocks)
|
|
682
|
+
start_server(parsed.dir, port)
|
|
683
|
+
|
|
684
|
+
return 0
|
|
685
|
+
|
|
686
|
+
|
|
687
|
+
if __name__ == "__main__":
|
|
688
|
+
sys.exit(main())
|
|
689
|
+
|
|
690
|
+
|
|
691
|
+
|
|
692
|
+
|