prela 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- prela/__init__.py +394 -0
- prela/_version.py +3 -0
- prela/contrib/CLI.md +431 -0
- prela/contrib/README.md +118 -0
- prela/contrib/__init__.py +5 -0
- prela/contrib/cli.py +1063 -0
- prela/contrib/explorer.py +571 -0
- prela/core/__init__.py +64 -0
- prela/core/clock.py +98 -0
- prela/core/context.py +228 -0
- prela/core/replay.py +403 -0
- prela/core/sampler.py +178 -0
- prela/core/span.py +295 -0
- prela/core/tracer.py +498 -0
- prela/evals/__init__.py +94 -0
- prela/evals/assertions/README.md +484 -0
- prela/evals/assertions/__init__.py +78 -0
- prela/evals/assertions/base.py +90 -0
- prela/evals/assertions/multi_agent.py +625 -0
- prela/evals/assertions/semantic.py +223 -0
- prela/evals/assertions/structural.py +443 -0
- prela/evals/assertions/tool.py +380 -0
- prela/evals/case.py +370 -0
- prela/evals/n8n/__init__.py +69 -0
- prela/evals/n8n/assertions.py +450 -0
- prela/evals/n8n/runner.py +497 -0
- prela/evals/reporters/README.md +184 -0
- prela/evals/reporters/__init__.py +32 -0
- prela/evals/reporters/console.py +251 -0
- prela/evals/reporters/json.py +176 -0
- prela/evals/reporters/junit.py +278 -0
- prela/evals/runner.py +525 -0
- prela/evals/suite.py +316 -0
- prela/exporters/__init__.py +27 -0
- prela/exporters/base.py +189 -0
- prela/exporters/console.py +443 -0
- prela/exporters/file.py +322 -0
- prela/exporters/http.py +394 -0
- prela/exporters/multi.py +154 -0
- prela/exporters/otlp.py +388 -0
- prela/instrumentation/ANTHROPIC.md +297 -0
- prela/instrumentation/LANGCHAIN.md +480 -0
- prela/instrumentation/OPENAI.md +59 -0
- prela/instrumentation/__init__.py +49 -0
- prela/instrumentation/anthropic.py +1436 -0
- prela/instrumentation/auto.py +129 -0
- prela/instrumentation/base.py +436 -0
- prela/instrumentation/langchain.py +959 -0
- prela/instrumentation/llamaindex.py +719 -0
- prela/instrumentation/multi_agent/__init__.py +48 -0
- prela/instrumentation/multi_agent/autogen.py +357 -0
- prela/instrumentation/multi_agent/crewai.py +404 -0
- prela/instrumentation/multi_agent/langgraph.py +299 -0
- prela/instrumentation/multi_agent/models.py +203 -0
- prela/instrumentation/multi_agent/swarm.py +231 -0
- prela/instrumentation/n8n/__init__.py +68 -0
- prela/instrumentation/n8n/code_node.py +534 -0
- prela/instrumentation/n8n/models.py +336 -0
- prela/instrumentation/n8n/webhook.py +489 -0
- prela/instrumentation/openai.py +1198 -0
- prela/license.py +245 -0
- prela/replay/__init__.py +31 -0
- prela/replay/comparison.py +390 -0
- prela/replay/engine.py +1227 -0
- prela/replay/loader.py +231 -0
- prela/replay/result.py +196 -0
- prela-0.1.0.dist-info/METADATA +399 -0
- prela-0.1.0.dist-info/RECORD +71 -0
- prela-0.1.0.dist-info/WHEEL +4 -0
- prela-0.1.0.dist-info/entry_points.txt +2 -0
- prela-0.1.0.dist-info/licenses/LICENSE +190 -0
prela/exporters/file.py
ADDED
|
@@ -0,0 +1,322 @@
|
|
|
1
|
+
"""File exporter for writing spans to JSONL files."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
import threading
|
|
7
|
+
from datetime import datetime, timedelta, timezone
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
from typing import Iterator
|
|
10
|
+
|
|
11
|
+
from prela.core.span import Span
|
|
12
|
+
from prela.exporters.base import BaseExporter, ExportResult
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class FileExporter(BaseExporter):
|
|
16
|
+
"""
|
|
17
|
+
Export spans to JSONL files with rotation and trace management.
|
|
18
|
+
|
|
19
|
+
Features:
|
|
20
|
+
- Thread-safe writes using a lock
|
|
21
|
+
- Automatic directory creation
|
|
22
|
+
- Date-based file naming with sequence numbers
|
|
23
|
+
- Optional file rotation based on size
|
|
24
|
+
- Trace retrieval by trace_id
|
|
25
|
+
- Trace listing by date range
|
|
26
|
+
- Old trace cleanup
|
|
27
|
+
|
|
28
|
+
File naming: traces-{date}-{sequence}.jsonl
|
|
29
|
+
Example: traces-2025-01-26-001.jsonl
|
|
30
|
+
|
|
31
|
+
The JSONL format writes one JSON object per line, making it easy to
|
|
32
|
+
stream and process large trace files.
|
|
33
|
+
|
|
34
|
+
Example:
|
|
35
|
+
```python
|
|
36
|
+
from prela.core.tracer import Tracer
|
|
37
|
+
from prela.exporters.file import FileExporter
|
|
38
|
+
|
|
39
|
+
tracer = Tracer(
|
|
40
|
+
service_name="my-app",
|
|
41
|
+
exporter=FileExporter(
|
|
42
|
+
directory="./traces",
|
|
43
|
+
max_file_size_mb=100,
|
|
44
|
+
rotate=True
|
|
45
|
+
)
|
|
46
|
+
)
|
|
47
|
+
|
|
48
|
+
with tracer.span("operation") as span:
|
|
49
|
+
span.set_attribute("key", "value")
|
|
50
|
+
# Span is automatically written to ./traces/traces-2025-01-26-001.jsonl
|
|
51
|
+
```
|
|
52
|
+
"""
|
|
53
|
+
|
|
54
|
+
def __init__(
|
|
55
|
+
self,
|
|
56
|
+
directory: str | Path = "./traces",
|
|
57
|
+
format: str = "jsonl",
|
|
58
|
+
max_file_size_mb: int = 100,
|
|
59
|
+
rotate: bool = True,
|
|
60
|
+
):
|
|
61
|
+
"""
|
|
62
|
+
Initialize file exporter.
|
|
63
|
+
|
|
64
|
+
Args:
|
|
65
|
+
directory: Directory to store trace files (e.g., "./traces")
|
|
66
|
+
format: File format - "jsonl" or "ndjson" (both are equivalent)
|
|
67
|
+
max_file_size_mb: Maximum file size in MB before rotation
|
|
68
|
+
rotate: Whether to rotate files when size exceeded
|
|
69
|
+
"""
|
|
70
|
+
self.directory = Path(directory)
|
|
71
|
+
self.format = format if format in ("jsonl", "ndjson") else "jsonl"
|
|
72
|
+
self.max_file_size_bytes = max_file_size_mb * 1024 * 1024
|
|
73
|
+
self.rotate = rotate
|
|
74
|
+
self._lock = threading.Lock()
|
|
75
|
+
self._current_file: Path | None = None
|
|
76
|
+
self._current_sequence = 1
|
|
77
|
+
|
|
78
|
+
# Create directory if needed
|
|
79
|
+
self.directory.mkdir(parents=True, exist_ok=True)
|
|
80
|
+
|
|
81
|
+
# Initialize current file path
|
|
82
|
+
self._update_current_file()
|
|
83
|
+
|
|
84
|
+
def _update_current_file(self) -> None:
|
|
85
|
+
"""Update the current file path based on date and sequence."""
|
|
86
|
+
date_str = datetime.now(timezone.utc).strftime("%Y-%m-%d")
|
|
87
|
+
|
|
88
|
+
# Find the next available sequence number for today
|
|
89
|
+
self._current_sequence = 1
|
|
90
|
+
while True:
|
|
91
|
+
filename = f"traces-{date_str}-{self._current_sequence:03d}.{self.format}"
|
|
92
|
+
file_path = self.directory / filename
|
|
93
|
+
|
|
94
|
+
# If file doesn't exist or is under size limit, use it
|
|
95
|
+
if not file_path.exists():
|
|
96
|
+
self._current_file = file_path
|
|
97
|
+
break
|
|
98
|
+
|
|
99
|
+
if self.rotate and file_path.stat().st_size >= self.max_file_size_bytes:
|
|
100
|
+
# File is full, try next sequence
|
|
101
|
+
self._current_sequence += 1
|
|
102
|
+
else:
|
|
103
|
+
# File exists and has space
|
|
104
|
+
self._current_file = file_path
|
|
105
|
+
break
|
|
106
|
+
|
|
107
|
+
def _check_rotation(self) -> None:
|
|
108
|
+
"""Check if current file needs rotation and update if needed."""
|
|
109
|
+
if not self.rotate or not self._current_file:
|
|
110
|
+
return
|
|
111
|
+
|
|
112
|
+
# Check if date changed
|
|
113
|
+
current_date = datetime.now(timezone.utc).strftime("%Y-%m-%d")
|
|
114
|
+
file_date = self._current_file.stem.split("-")[1:4] # ["2025", "01", "26"]
|
|
115
|
+
file_date_str = "-".join(file_date)
|
|
116
|
+
|
|
117
|
+
if current_date != file_date_str:
|
|
118
|
+
# Date changed, reset to sequence 1
|
|
119
|
+
self._update_current_file()
|
|
120
|
+
return
|
|
121
|
+
|
|
122
|
+
# Check file size
|
|
123
|
+
if self._current_file.exists() and self._current_file.stat().st_size >= self.max_file_size_bytes:
|
|
124
|
+
self._current_sequence += 1
|
|
125
|
+
self._update_current_file()
|
|
126
|
+
|
|
127
|
+
def export(self, spans: list[Span]) -> ExportResult:
|
|
128
|
+
"""
|
|
129
|
+
Export spans to file.
|
|
130
|
+
|
|
131
|
+
Args:
|
|
132
|
+
spans: List of spans to export
|
|
133
|
+
|
|
134
|
+
Returns:
|
|
135
|
+
ExportResult.SUCCESS if successful, ExportResult.FAILURE otherwise
|
|
136
|
+
"""
|
|
137
|
+
if not spans:
|
|
138
|
+
return ExportResult.SUCCESS
|
|
139
|
+
|
|
140
|
+
try:
|
|
141
|
+
with self._lock:
|
|
142
|
+
# Check if rotation needed
|
|
143
|
+
self._check_rotation()
|
|
144
|
+
|
|
145
|
+
# Append spans
|
|
146
|
+
with open(self._current_file, "a", encoding="utf-8") as f:
|
|
147
|
+
for span in spans:
|
|
148
|
+
span_dict = span.to_dict()
|
|
149
|
+
json_line = json.dumps(span_dict)
|
|
150
|
+
f.write(json_line + "\n")
|
|
151
|
+
|
|
152
|
+
return ExportResult.SUCCESS
|
|
153
|
+
except Exception:
|
|
154
|
+
# Silently fail - don't crash user code due to export failures
|
|
155
|
+
return ExportResult.FAILURE
|
|
156
|
+
|
|
157
|
+
def get_trace_file(self, trace_id: str) -> Path | None:
|
|
158
|
+
"""
|
|
159
|
+
Find the file containing a specific trace.
|
|
160
|
+
|
|
161
|
+
Args:
|
|
162
|
+
trace_id: Trace ID to search for
|
|
163
|
+
|
|
164
|
+
Returns:
|
|
165
|
+
Path to the file containing the trace, or None if not found
|
|
166
|
+
"""
|
|
167
|
+
# Search through all trace files
|
|
168
|
+
pattern = f"traces-*.{self.format}"
|
|
169
|
+
for file_path in sorted(self.directory.glob(pattern)):
|
|
170
|
+
try:
|
|
171
|
+
with open(file_path, "r", encoding="utf-8") as f:
|
|
172
|
+
for line in f:
|
|
173
|
+
if not line.strip():
|
|
174
|
+
continue
|
|
175
|
+
try:
|
|
176
|
+
data = json.loads(line)
|
|
177
|
+
if data.get("trace_id") == trace_id:
|
|
178
|
+
return file_path
|
|
179
|
+
except json.JSONDecodeError:
|
|
180
|
+
continue
|
|
181
|
+
except Exception:
|
|
182
|
+
continue
|
|
183
|
+
|
|
184
|
+
return None
|
|
185
|
+
|
|
186
|
+
def read_traces(self, trace_id: str | None = None) -> Iterator[Span]:
|
|
187
|
+
"""
|
|
188
|
+
Read traces from files.
|
|
189
|
+
|
|
190
|
+
Args:
|
|
191
|
+
trace_id: Optional trace ID to filter by. If None, reads all traces.
|
|
192
|
+
|
|
193
|
+
Yields:
|
|
194
|
+
Span objects from the trace files
|
|
195
|
+
"""
|
|
196
|
+
pattern = f"traces-*.{self.format}"
|
|
197
|
+
|
|
198
|
+
# If trace_id provided, only read from that file
|
|
199
|
+
if trace_id:
|
|
200
|
+
file_path = self.get_trace_file(trace_id)
|
|
201
|
+
if file_path:
|
|
202
|
+
files_to_read = [file_path]
|
|
203
|
+
else:
|
|
204
|
+
files_to_read = []
|
|
205
|
+
else:
|
|
206
|
+
files_to_read = sorted(self.directory.glob(pattern))
|
|
207
|
+
|
|
208
|
+
for file_path in files_to_read:
|
|
209
|
+
try:
|
|
210
|
+
with open(file_path, "r", encoding="utf-8") as f:
|
|
211
|
+
for line in f:
|
|
212
|
+
if not line.strip():
|
|
213
|
+
continue
|
|
214
|
+
try:
|
|
215
|
+
data = json.loads(line)
|
|
216
|
+
if trace_id is None or data.get("trace_id") == trace_id:
|
|
217
|
+
span = Span.from_dict(data)
|
|
218
|
+
yield span
|
|
219
|
+
except (json.JSONDecodeError, KeyError, ValueError):
|
|
220
|
+
# Skip malformed lines
|
|
221
|
+
continue
|
|
222
|
+
except Exception:
|
|
223
|
+
# Skip files that can't be read
|
|
224
|
+
continue
|
|
225
|
+
|
|
226
|
+
def list_traces(self, start: datetime, end: datetime) -> list[str]:
|
|
227
|
+
"""
|
|
228
|
+
List trace IDs within a date range.
|
|
229
|
+
|
|
230
|
+
Args:
|
|
231
|
+
start: Start datetime (inclusive)
|
|
232
|
+
end: End datetime (inclusive)
|
|
233
|
+
|
|
234
|
+
Returns:
|
|
235
|
+
List of unique trace IDs found in the date range
|
|
236
|
+
"""
|
|
237
|
+
trace_ids = set()
|
|
238
|
+
pattern = f"traces-*.{self.format}"
|
|
239
|
+
|
|
240
|
+
for file_path in sorted(self.directory.glob(pattern)):
|
|
241
|
+
# Extract date from filename: traces-2025-01-26-001.jsonl
|
|
242
|
+
try:
|
|
243
|
+
parts = file_path.stem.split("-")
|
|
244
|
+
if len(parts) < 5:
|
|
245
|
+
continue
|
|
246
|
+
|
|
247
|
+
file_date_str = f"{parts[1]}-{parts[2]}-{parts[3]}"
|
|
248
|
+
file_date = datetime.strptime(file_date_str, "%Y-%m-%d").replace(tzinfo=timezone.utc)
|
|
249
|
+
|
|
250
|
+
# Check if file date is in range (add 1 day to end for inclusive search)
|
|
251
|
+
if not (start.date() <= file_date.date() <= end.date()):
|
|
252
|
+
continue
|
|
253
|
+
|
|
254
|
+
# Read traces from this file
|
|
255
|
+
with open(file_path, "r", encoding="utf-8") as f:
|
|
256
|
+
for line in f:
|
|
257
|
+
if not line.strip():
|
|
258
|
+
continue
|
|
259
|
+
try:
|
|
260
|
+
data = json.loads(line)
|
|
261
|
+
# Check if span timestamp is in range (inclusive)
|
|
262
|
+
span_time = datetime.fromisoformat(data["started_at"])
|
|
263
|
+
if start <= span_time <= end:
|
|
264
|
+
trace_ids.add(data["trace_id"])
|
|
265
|
+
except (json.JSONDecodeError, KeyError, ValueError):
|
|
266
|
+
continue
|
|
267
|
+
except Exception:
|
|
268
|
+
continue
|
|
269
|
+
|
|
270
|
+
return sorted(trace_ids)
|
|
271
|
+
|
|
272
|
+
def cleanup_old_traces(self, days: int) -> int:
|
|
273
|
+
"""
|
|
274
|
+
Delete trace files older than specified days.
|
|
275
|
+
|
|
276
|
+
Args:
|
|
277
|
+
days: Delete files older than this many days (0 means keep today and delete all older)
|
|
278
|
+
|
|
279
|
+
Returns:
|
|
280
|
+
Number of files deleted
|
|
281
|
+
"""
|
|
282
|
+
if days < 0:
|
|
283
|
+
raise ValueError("days must be non-negative")
|
|
284
|
+
|
|
285
|
+
# Calculate cutoff date (start of day)
|
|
286
|
+
cutoff_date = (datetime.now(timezone.utc) - timedelta(days=days)).date()
|
|
287
|
+
deleted_count = 0
|
|
288
|
+
pattern = f"traces-*.{self.format}"
|
|
289
|
+
|
|
290
|
+
with self._lock:
|
|
291
|
+
for file_path in self.directory.glob(pattern):
|
|
292
|
+
try:
|
|
293
|
+
# Extract date from filename
|
|
294
|
+
parts = file_path.stem.split("-")
|
|
295
|
+
if len(parts) < 5:
|
|
296
|
+
continue
|
|
297
|
+
|
|
298
|
+
file_date_str = f"{parts[1]}-{parts[2]}-{parts[3]}"
|
|
299
|
+
file_date = datetime.strptime(file_date_str, "%Y-%m-%d").date()
|
|
300
|
+
|
|
301
|
+
# Delete if older than cutoff (strictly less than)
|
|
302
|
+
if file_date < cutoff_date:
|
|
303
|
+
file_path.unlink()
|
|
304
|
+
deleted_count += 1
|
|
305
|
+
|
|
306
|
+
# Reset current file if we deleted it
|
|
307
|
+
if file_path == self._current_file:
|
|
308
|
+
self._update_current_file()
|
|
309
|
+
except Exception:
|
|
310
|
+
# Skip files that can't be processed
|
|
311
|
+
continue
|
|
312
|
+
|
|
313
|
+
return deleted_count
|
|
314
|
+
|
|
315
|
+
def shutdown(self) -> None:
|
|
316
|
+
"""
|
|
317
|
+
Shutdown the exporter.
|
|
318
|
+
|
|
319
|
+
No cleanup needed for file exporter - file handle is closed
|
|
320
|
+
after each write.
|
|
321
|
+
"""
|
|
322
|
+
pass
|