timetracer 1.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. timetracer/__init__.py +29 -0
  2. timetracer/cassette/__init__.py +6 -0
  3. timetracer/cassette/io.py +421 -0
  4. timetracer/cassette/naming.py +69 -0
  5. timetracer/catalog/__init__.py +288 -0
  6. timetracer/cli/__init__.py +5 -0
  7. timetracer/cli/commands/__init__.py +1 -0
  8. timetracer/cli/main.py +692 -0
  9. timetracer/config.py +297 -0
  10. timetracer/constants.py +129 -0
  11. timetracer/context.py +93 -0
  12. timetracer/dashboard/__init__.py +14 -0
  13. timetracer/dashboard/generator.py +229 -0
  14. timetracer/dashboard/server.py +244 -0
  15. timetracer/dashboard/template.py +874 -0
  16. timetracer/diff/__init__.py +6 -0
  17. timetracer/diff/engine.py +311 -0
  18. timetracer/diff/report.py +113 -0
  19. timetracer/exceptions.py +113 -0
  20. timetracer/integrations/__init__.py +27 -0
  21. timetracer/integrations/fastapi.py +537 -0
  22. timetracer/integrations/flask.py +507 -0
  23. timetracer/plugins/__init__.py +42 -0
  24. timetracer/plugins/base.py +73 -0
  25. timetracer/plugins/httpx_plugin.py +413 -0
  26. timetracer/plugins/redis_plugin.py +297 -0
  27. timetracer/plugins/requests_plugin.py +333 -0
  28. timetracer/plugins/sqlalchemy_plugin.py +280 -0
  29. timetracer/policies/__init__.py +16 -0
  30. timetracer/policies/capture.py +64 -0
  31. timetracer/policies/redaction.py +165 -0
  32. timetracer/replay/__init__.py +6 -0
  33. timetracer/replay/engine.py +75 -0
  34. timetracer/replay/errors.py +9 -0
  35. timetracer/replay/matching.py +83 -0
  36. timetracer/session.py +390 -0
  37. timetracer/storage/__init__.py +18 -0
  38. timetracer/storage/s3.py +364 -0
  39. timetracer/timeline/__init__.py +6 -0
  40. timetracer/timeline/generator.py +150 -0
  41. timetracer/timeline/template.py +370 -0
  42. timetracer/types.py +197 -0
  43. timetracer/utils/__init__.py +6 -0
  44. timetracer/utils/hashing.py +68 -0
  45. timetracer/utils/time.py +106 -0
  46. timetracer-1.1.0.dist-info/METADATA +286 -0
  47. timetracer-1.1.0.dist-info/RECORD +51 -0
  48. timetracer-1.1.0.dist-info/WHEEL +5 -0
  49. timetracer-1.1.0.dist-info/entry_points.txt +2 -0
  50. timetracer-1.1.0.dist-info/licenses/LICENSE +21 -0
  51. timetracer-1.1.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,364 @@
1
+ """
2
+ S3 storage backend for Timetracer cassettes.
3
+
4
+ Enables storing and retrieving cassettes from AWS S3 or S3-compatible storage.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ import json
10
+ import os
11
+ from dataclasses import dataclass
12
+ from pathlib import Path
13
+ from typing import TYPE_CHECKING, Any, Iterator
14
+
15
+ if TYPE_CHECKING:
16
+ from mypy_boto3_s3 import S3Client
17
+
18
+
19
+ @dataclass
20
+ class S3Config:
21
+ """Configuration for S3 storage."""
22
+ bucket: str
23
+ prefix: str = "cassettes"
24
+ region: str | None = None
25
+ endpoint_url: str | None = None # For S3-compatible (MinIO, LocalStack)
26
+ access_key: str | None = None
27
+ secret_key: str | None = None
28
+
29
+ @classmethod
30
+ def from_env(cls) -> "S3Config":
31
+ """
32
+ Load S3 configuration from environment variables.
33
+
34
+ Environment variables:
35
+ TIMETRACER_S3_BUCKET: S3 bucket name (required)
36
+ TIMETRACER_S3_PREFIX: Key prefix (default: "cassettes")
37
+ TIMETRACER_S3_REGION: AWS region
38
+ TIMETRACER_S3_ENDPOINT: Custom endpoint URL (for MinIO, etc.)
39
+ AWS_ACCESS_KEY_ID: AWS access key
40
+ AWS_SECRET_ACCESS_KEY: AWS secret key
41
+ """
42
+ bucket = os.environ.get("TIMETRACER_S3_BUCKET")
43
+ if not bucket:
44
+ raise ValueError("TIMETRACER_S3_BUCKET environment variable is required")
45
+
46
+ return cls(
47
+ bucket=bucket,
48
+ prefix=os.environ.get("TIMETRACER_S3_PREFIX", "cassettes"),
49
+ region=os.environ.get("TIMETRACER_S3_REGION") or os.environ.get("AWS_REGION"),
50
+ endpoint_url=os.environ.get("TIMETRACER_S3_ENDPOINT"),
51
+ access_key=os.environ.get("AWS_ACCESS_KEY_ID"),
52
+ secret_key=os.environ.get("AWS_SECRET_ACCESS_KEY"),
53
+ )
54
+
55
+
56
+ class S3Store:
57
+ """
58
+ S3 storage backend for cassettes.
59
+
60
+ Usage:
61
+ from timetracer.storage.s3 import S3Store, S3Config
62
+
63
+ config = S3Config(bucket="my-cassettes", prefix="api-traces")
64
+ store = S3Store(config)
65
+
66
+ # Upload a local cassette
67
+ store.upload("./cassettes/2026-01-15/POST__checkout__a91c.json")
68
+
69
+ # Download a cassette
70
+ store.download("2026-01-15/POST__checkout__a91c.json", "./local_copy.json")
71
+
72
+ # List cassettes
73
+ for key in store.list():
74
+ print(key)
75
+ """
76
+
77
+ def __init__(self, config: S3Config) -> None:
78
+ """
79
+ Initialize S3 store.
80
+
81
+ Args:
82
+ config: S3 configuration.
83
+ """
84
+ self.config = config
85
+ self._client: "S3Client | None" = None
86
+
87
+ @property
88
+ def client(self) -> "S3Client":
89
+ """Get or create S3 client."""
90
+ if self._client is None:
91
+ self._client = self._create_client()
92
+ return self._client
93
+
94
+ def _create_client(self) -> "S3Client":
95
+ """Create boto3 S3 client."""
96
+ try:
97
+ import boto3
98
+ except ImportError:
99
+ raise ImportError(
100
+ "boto3 is required for S3 storage. "
101
+ "Install with: pip install timetracer[s3]"
102
+ )
103
+
104
+ kwargs: dict[str, Any] = {}
105
+
106
+ if self.config.region:
107
+ kwargs["region_name"] = self.config.region
108
+
109
+ if self.config.endpoint_url:
110
+ kwargs["endpoint_url"] = self.config.endpoint_url
111
+
112
+ if self.config.access_key and self.config.secret_key:
113
+ kwargs["aws_access_key_id"] = self.config.access_key
114
+ kwargs["aws_secret_access_key"] = self.config.secret_key
115
+
116
+ return boto3.client("s3", **kwargs)
117
+
118
+ def _make_key(self, path: str) -> str:
119
+ """Create S3 key from path."""
120
+ # Normalize path
121
+ path = path.replace("\\", "/")
122
+
123
+ # Remove leading slashes
124
+ path = path.lstrip("/")
125
+
126
+ # Add prefix
127
+ if self.config.prefix:
128
+ return f"{self.config.prefix.rstrip('/')}/{path}"
129
+ return path
130
+
131
+ def upload(
132
+ self,
133
+ local_path: str,
134
+ remote_key: str | None = None,
135
+ ) -> str:
136
+ """
137
+ Upload a cassette to S3.
138
+
139
+ Args:
140
+ local_path: Path to local cassette file.
141
+ remote_key: Optional S3 key. If None, uses filename.
142
+
143
+ Returns:
144
+ S3 key where cassette was uploaded.
145
+ """
146
+ local_path = Path(local_path)
147
+
148
+ if not local_path.exists():
149
+ raise FileNotFoundError(f"Cassette not found: {local_path}")
150
+
151
+ # Determine key
152
+ if remote_key is None:
153
+ # Use date/filename structure
154
+ remote_key = local_path.name
155
+ if local_path.parent.name and local_path.parent.name != ".":
156
+ remote_key = f"{local_path.parent.name}/{local_path.name}"
157
+
158
+ s3_key = self._make_key(remote_key)
159
+
160
+ # Upload
161
+ self.client.upload_file(
162
+ str(local_path),
163
+ self.config.bucket,
164
+ s3_key,
165
+ ExtraArgs={"ContentType": "application/json"},
166
+ )
167
+
168
+ return s3_key
169
+
170
+ def download(
171
+ self,
172
+ remote_key: str,
173
+ local_path: str,
174
+ ) -> str:
175
+ """
176
+ Download a cassette from S3.
177
+
178
+ Args:
179
+ remote_key: S3 key (without prefix).
180
+ local_path: Where to save locally.
181
+
182
+ Returns:
183
+ Local path where file was saved.
184
+ """
185
+ s3_key = self._make_key(remote_key)
186
+ local_path = Path(local_path)
187
+
188
+ # Create parent directory
189
+ local_path.parent.mkdir(parents=True, exist_ok=True)
190
+
191
+ # Download
192
+ self.client.download_file(
193
+ self.config.bucket,
194
+ s3_key,
195
+ str(local_path),
196
+ )
197
+
198
+ return str(local_path)
199
+
200
+ def read(self, remote_key: str) -> dict[str, Any]:
201
+ """
202
+ Read a cassette directly from S3.
203
+
204
+ Args:
205
+ remote_key: S3 key (without prefix).
206
+
207
+ Returns:
208
+ Cassette data as dict.
209
+ """
210
+ s3_key = self._make_key(remote_key)
211
+
212
+ response = self.client.get_object(
213
+ Bucket=self.config.bucket,
214
+ Key=s3_key,
215
+ )
216
+
217
+ body = response["Body"].read()
218
+ return json.loads(body.decode("utf-8"))
219
+
220
+ def write(self, remote_key: str, data: dict[str, Any]) -> str:
221
+ """
222
+ Write cassette data directly to S3.
223
+
224
+ Args:
225
+ remote_key: S3 key (without prefix).
226
+ data: Cassette data as dict.
227
+
228
+ Returns:
229
+ S3 key where cassette was written.
230
+ """
231
+ s3_key = self._make_key(remote_key)
232
+
233
+ body = json.dumps(data, indent=2).encode("utf-8")
234
+
235
+ self.client.put_object(
236
+ Bucket=self.config.bucket,
237
+ Key=s3_key,
238
+ Body=body,
239
+ ContentType="application/json",
240
+ )
241
+
242
+ return s3_key
243
+
244
+ def list(
245
+ self,
246
+ prefix: str = "",
247
+ limit: int = 100,
248
+ ) -> Iterator[str]:
249
+ """
250
+ List cassettes in S3.
251
+
252
+ Args:
253
+ prefix: Additional prefix filter.
254
+ limit: Maximum number of results.
255
+
256
+ Yields:
257
+ S3 keys (relative to store prefix).
258
+ """
259
+ s3_prefix = self._make_key(prefix)
260
+
261
+ paginator = self.client.get_paginator("list_objects_v2")
262
+ pages = paginator.paginate(
263
+ Bucket=self.config.bucket,
264
+ Prefix=s3_prefix,
265
+ PaginationConfig={"MaxItems": limit},
266
+ )
267
+
268
+ base_prefix = self.config.prefix.rstrip("/") + "/" if self.config.prefix else ""
269
+
270
+ for page in pages:
271
+ for obj in page.get("Contents", []):
272
+ key = obj["Key"]
273
+ # Remove base prefix for relative key
274
+ if key.startswith(base_prefix):
275
+ key = key[len(base_prefix):]
276
+ yield key
277
+
278
+ def delete(self, remote_key: str) -> None:
279
+ """
280
+ Delete a cassette from S3.
281
+
282
+ Args:
283
+ remote_key: S3 key (without prefix).
284
+ """
285
+ s3_key = self._make_key(remote_key)
286
+
287
+ self.client.delete_object(
288
+ Bucket=self.config.bucket,
289
+ Key=s3_key,
290
+ )
291
+
292
+ def exists(self, remote_key: str) -> bool:
293
+ """
294
+ Check if a cassette exists in S3.
295
+
296
+ Args:
297
+ remote_key: S3 key (without prefix).
298
+
299
+ Returns:
300
+ True if cassette exists.
301
+ """
302
+ s3_key = self._make_key(remote_key)
303
+
304
+ try:
305
+ self.client.head_object(
306
+ Bucket=self.config.bucket,
307
+ Key=s3_key,
308
+ )
309
+ return True
310
+ except Exception:
311
+ return False
312
+
313
+ def sync_upload(
314
+ self,
315
+ local_dir: str,
316
+ remote_prefix: str = "",
317
+ ) -> list[str]:
318
+ """
319
+ Sync local cassettes to S3.
320
+
321
+ Args:
322
+ local_dir: Local directory with cassettes.
323
+ remote_prefix: Optional prefix in S3.
324
+
325
+ Returns:
326
+ List of uploaded S3 keys.
327
+ """
328
+ local_dir = Path(local_dir)
329
+ uploaded = []
330
+
331
+ for json_file in local_dir.rglob("*.json"):
332
+ relative = json_file.relative_to(local_dir)
333
+ remote_key = f"{remote_prefix}/{relative}" if remote_prefix else str(relative)
334
+ remote_key = remote_key.replace("\\", "/")
335
+
336
+ key = self.upload(str(json_file), remote_key)
337
+ uploaded.append(key)
338
+
339
+ return uploaded
340
+
341
+ def sync_download(
342
+ self,
343
+ local_dir: str,
344
+ remote_prefix: str = "",
345
+ ) -> list[str]:
346
+ """
347
+ Sync S3 cassettes to local directory.
348
+
349
+ Args:
350
+ local_dir: Local directory to download to.
351
+ remote_prefix: Optional prefix filter in S3.
352
+
353
+ Returns:
354
+ List of downloaded local paths.
355
+ """
356
+ local_dir = Path(local_dir)
357
+ downloaded = []
358
+
359
+ for remote_key in self.list(prefix=remote_prefix, limit=1000):
360
+ local_path = local_dir / remote_key
361
+ self.download(remote_key, str(local_path))
362
+ downloaded.append(str(local_path))
363
+
364
+ return downloaded
@@ -0,0 +1,6 @@
1
+ """Timeline module for HTML visualization."""
2
+
3
+ from timetracer.timeline.generator import generate_timeline
4
+ from timetracer.timeline.template import render_timeline_html
5
+
6
+ __all__ = ["generate_timeline", "render_timeline_html"]
@@ -0,0 +1,150 @@
1
+ """
2
+ Timeline data generator.
3
+
4
+ Converts cassette data into timeline-friendly format for visualization.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ from dataclasses import dataclass, field
10
+ from typing import Any
11
+
12
+ from timetracer.cassette import read_cassette
13
+ from timetracer.types import Cassette
14
+
15
+
16
+ @dataclass
17
+ class TimelineEvent:
18
+ """A single event on the timeline."""
19
+ id: int
20
+ label: str
21
+ event_type: str
22
+ start_ms: float
23
+ duration_ms: float
24
+ end_ms: float
25
+ status: int | None = None
26
+ url: str | None = None
27
+ is_error: bool = False
28
+ details: dict[str, Any] = field(default_factory=dict)
29
+
30
+
31
+ @dataclass
32
+ class TimelineData:
33
+ """Complete timeline data for visualization."""
34
+ title: str
35
+ method: str
36
+ path: str
37
+ total_duration_ms: float
38
+ recorded_at: str
39
+
40
+ # Main request span
41
+ request_start: float = 0.0
42
+ request_end: float = 0.0
43
+ response_status: int = 0
44
+
45
+ # All events
46
+ events: list[TimelineEvent] = field(default_factory=list)
47
+
48
+ # Stats
49
+ event_count: int = 0
50
+ error_count: int = 0
51
+
52
+ def to_dict(self) -> dict[str, Any]:
53
+ """Convert to dictionary for JSON/template use."""
54
+ return {
55
+ "title": self.title,
56
+ "method": self.method,
57
+ "path": self.path,
58
+ "total_duration_ms": self.total_duration_ms,
59
+ "recorded_at": self.recorded_at,
60
+ "request": {
61
+ "start": self.request_start,
62
+ "end": self.request_end,
63
+ "status": self.response_status,
64
+ },
65
+ "events": [
66
+ {
67
+ "id": e.id,
68
+ "label": e.label,
69
+ "type": e.event_type,
70
+ "start_ms": e.start_ms,
71
+ "duration_ms": e.duration_ms,
72
+ "end_ms": e.end_ms,
73
+ "status": e.status,
74
+ "url": e.url,
75
+ "is_error": e.is_error,
76
+ }
77
+ for e in self.events
78
+ ],
79
+ "stats": {
80
+ "event_count": self.event_count,
81
+ "error_count": self.error_count,
82
+ },
83
+ }
84
+
85
+
86
+ def generate_timeline(cassette_path: str) -> TimelineData:
87
+ """
88
+ Generate timeline data from a cassette file.
89
+
90
+ Args:
91
+ cassette_path: Path to the cassette file.
92
+
93
+ Returns:
94
+ TimelineData ready for visualization.
95
+ """
96
+ cassette = read_cassette(cassette_path)
97
+ return _cassette_to_timeline(cassette)
98
+
99
+
100
+ def _cassette_to_timeline(cassette: Cassette) -> TimelineData:
101
+ """Convert Cassette to TimelineData."""
102
+ req = cassette.request
103
+ res = cassette.response
104
+
105
+ timeline = TimelineData(
106
+ title=f"{req.method} {req.path}",
107
+ method=req.method,
108
+ path=req.path,
109
+ total_duration_ms=res.duration_ms,
110
+ recorded_at=cassette.session.recorded_at,
111
+ request_start=0.0,
112
+ request_end=res.duration_ms,
113
+ response_status=res.status,
114
+ )
115
+
116
+ # Convert events
117
+ error_count = 0
118
+ for event in cassette.events:
119
+ is_error = (event.result.status or 0) >= 400 or event.result.error is not None
120
+ if is_error:
121
+ error_count += 1
122
+
123
+ # Create label
124
+ sig = event.signature
125
+ if sig.url:
126
+ # Shorten URL for display
127
+ url_short = sig.url
128
+ if len(url_short) > 50:
129
+ url_short = url_short[:47] + "..."
130
+ label = f"{sig.method} {url_short}"
131
+ else:
132
+ label = f"{event.event_type.value}"
133
+
134
+ timeline_event = TimelineEvent(
135
+ id=event.eid,
136
+ label=label,
137
+ event_type=event.event_type.value,
138
+ start_ms=event.start_offset_ms,
139
+ duration_ms=event.duration_ms,
140
+ end_ms=event.start_offset_ms + event.duration_ms,
141
+ status=event.result.status,
142
+ url=sig.url,
143
+ is_error=is_error,
144
+ )
145
+ timeline.events.append(timeline_event)
146
+
147
+ timeline.event_count = len(timeline.events)
148
+ timeline.error_count = error_count
149
+
150
+ return timeline