RouteKitAI 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- routekitai/__init__.py +53 -0
- routekitai/cli/__init__.py +18 -0
- routekitai/cli/main.py +40 -0
- routekitai/cli/replay.py +80 -0
- routekitai/cli/run.py +95 -0
- routekitai/cli/serve.py +966 -0
- routekitai/cli/test_agent.py +178 -0
- routekitai/cli/trace.py +209 -0
- routekitai/cli/trace_analyze.py +120 -0
- routekitai/cli/trace_search.py +126 -0
- routekitai/core/__init__.py +58 -0
- routekitai/core/agent.py +325 -0
- routekitai/core/errors.py +49 -0
- routekitai/core/hooks.py +174 -0
- routekitai/core/memory.py +54 -0
- routekitai/core/message.py +132 -0
- routekitai/core/model.py +91 -0
- routekitai/core/policies.py +373 -0
- routekitai/core/policy.py +85 -0
- routekitai/core/policy_adapter.py +133 -0
- routekitai/core/runtime.py +1403 -0
- routekitai/core/tool.py +148 -0
- routekitai/core/tools.py +180 -0
- routekitai/evals/__init__.py +13 -0
- routekitai/evals/dataset.py +75 -0
- routekitai/evals/metrics.py +101 -0
- routekitai/evals/runner.py +184 -0
- routekitai/graphs/__init__.py +12 -0
- routekitai/graphs/executors.py +457 -0
- routekitai/graphs/graph.py +164 -0
- routekitai/memory/__init__.py +13 -0
- routekitai/memory/episodic.py +242 -0
- routekitai/memory/kv.py +34 -0
- routekitai/memory/retrieval.py +192 -0
- routekitai/memory/vector.py +700 -0
- routekitai/memory/working.py +66 -0
- routekitai/message.py +29 -0
- routekitai/model.py +48 -0
- routekitai/observability/__init__.py +21 -0
- routekitai/observability/analyzer.py +314 -0
- routekitai/observability/exporters/__init__.py +10 -0
- routekitai/observability/exporters/base.py +30 -0
- routekitai/observability/exporters/jsonl.py +81 -0
- routekitai/observability/exporters/otel.py +119 -0
- routekitai/observability/spans.py +111 -0
- routekitai/observability/streaming.py +117 -0
- routekitai/observability/trace.py +144 -0
- routekitai/providers/__init__.py +9 -0
- routekitai/providers/anthropic.py +227 -0
- routekitai/providers/azure_openai.py +243 -0
- routekitai/providers/local.py +196 -0
- routekitai/providers/openai.py +321 -0
- routekitai/py.typed +0 -0
- routekitai/sandbox/__init__.py +12 -0
- routekitai/sandbox/filesystem.py +131 -0
- routekitai/sandbox/network.py +142 -0
- routekitai/sandbox/permissions.py +70 -0
- routekitai/tool.py +33 -0
- routekitai-0.1.0.dist-info/METADATA +328 -0
- routekitai-0.1.0.dist-info/RECORD +64 -0
- routekitai-0.1.0.dist-info/WHEEL +5 -0
- routekitai-0.1.0.dist-info/entry_points.txt +2 -0
- routekitai-0.1.0.dist-info/licenses/LICENSE +21 -0
- routekitai-0.1.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,119 @@
|
|
|
1
|
+
"""OpenTelemetry trace exporter."""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
from typing import Any
|
|
5
|
+
|
|
6
|
+
from pydantic import BaseModel, Field
|
|
7
|
+
|
|
8
|
+
from routekitai.core.errors import RuntimeError as RouteKitRuntimeError
|
|
9
|
+
from routekitai.observability.trace import Trace
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class OTELExporterError(RouteKitRuntimeError):
|
|
13
|
+
"""Error raised by OTEL exporter operations."""
|
|
14
|
+
|
|
15
|
+
pass
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class OTELExporter(BaseModel):
|
|
19
|
+
"""Exports traces to OpenTelemetry format.
|
|
20
|
+
|
|
21
|
+
Converts routkitai traces to OpenTelemetry format and exports them.
|
|
22
|
+
For MVP, exports in JSON format compatible with OTEL collectors.
|
|
23
|
+
"""
|
|
24
|
+
|
|
25
|
+
endpoint: str | None = Field(default=None, description="OTEL collector endpoint")
|
|
26
|
+
headers: dict[str, str] = Field(default_factory=dict, description="Export headers")
|
|
27
|
+
|
|
28
|
+
def _convert_trace_to_otel(self, trace: Trace) -> dict[str, Any]:
|
|
29
|
+
"""Convert routkitai trace to OpenTelemetry format.
|
|
30
|
+
|
|
31
|
+
Args:
|
|
32
|
+
trace: routkitai trace
|
|
33
|
+
|
|
34
|
+
Returns:
|
|
35
|
+
OTEL-compatible trace data
|
|
36
|
+
"""
|
|
37
|
+
spans = []
|
|
38
|
+
for event in trace.events:
|
|
39
|
+
span = {
|
|
40
|
+
"traceId": trace.trace_id,
|
|
41
|
+
"spanId": event.data.get("step_id", "unknown"),
|
|
42
|
+
"name": event.type,
|
|
43
|
+
"startTimeUnixNano": int(event.timestamp * 1_000_000_000),
|
|
44
|
+
"endTimeUnixNano": int(event.timestamp * 1_000_000_000),
|
|
45
|
+
"attributes": [
|
|
46
|
+
{"key": "event.type", "value": {"stringValue": event.type}},
|
|
47
|
+
{"key": "trace.id", "value": {"stringValue": trace.trace_id}},
|
|
48
|
+
],
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
# Add event data as attributes
|
|
52
|
+
for key, value in event.data.items():
|
|
53
|
+
if isinstance(value, (str, int, float, bool)):
|
|
54
|
+
span["attributes"].append({"key": key, "value": {"stringValue": str(value)}})
|
|
55
|
+
elif isinstance(value, dict):
|
|
56
|
+
span["attributes"].append(
|
|
57
|
+
{"key": key, "value": {"stringValue": json.dumps(value)}}
|
|
58
|
+
)
|
|
59
|
+
|
|
60
|
+
spans.append(span)
|
|
61
|
+
|
|
62
|
+
return {
|
|
63
|
+
"resourceSpans": [
|
|
64
|
+
{
|
|
65
|
+
"resource": {
|
|
66
|
+
"attributes": [
|
|
67
|
+
{"key": "service.name", "value": {"stringValue": "routekit"}},
|
|
68
|
+
]
|
|
69
|
+
},
|
|
70
|
+
"scopeSpans": [{"spans": spans}],
|
|
71
|
+
}
|
|
72
|
+
]
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
async def export(self, trace: Trace) -> None:
|
|
76
|
+
"""Export trace to OpenTelemetry.
|
|
77
|
+
|
|
78
|
+
Args:
|
|
79
|
+
trace: Trace to export
|
|
80
|
+
|
|
81
|
+
Raises:
|
|
82
|
+
OTELExporterError: If export fails
|
|
83
|
+
"""
|
|
84
|
+
try:
|
|
85
|
+
otel_data = self._convert_trace_to_otel(trace)
|
|
86
|
+
|
|
87
|
+
if self.endpoint:
|
|
88
|
+
# Export to OTEL collector endpoint
|
|
89
|
+
try:
|
|
90
|
+
import httpx
|
|
91
|
+
|
|
92
|
+
async with httpx.AsyncClient() as client:
|
|
93
|
+
response = await client.post(
|
|
94
|
+
self.endpoint,
|
|
95
|
+
json=otel_data,
|
|
96
|
+
headers=self.headers,
|
|
97
|
+
timeout=10.0,
|
|
98
|
+
)
|
|
99
|
+
response.raise_for_status()
|
|
100
|
+
except ImportError:
|
|
101
|
+
raise OTELExporterError(
|
|
102
|
+
"httpx is required for OTEL export. Install with: pip install httpx",
|
|
103
|
+
context={"endpoint": self.endpoint},
|
|
104
|
+
) from None
|
|
105
|
+
except Exception as e:
|
|
106
|
+
raise OTELExporterError(
|
|
107
|
+
f"Failed to export trace to OTEL endpoint: {e}",
|
|
108
|
+
context={"endpoint": self.endpoint, "trace_id": trace.trace_id},
|
|
109
|
+
) from e
|
|
110
|
+
else:
|
|
111
|
+
# No endpoint specified - just log the OTEL format (for debugging)
|
|
112
|
+
import logging
|
|
113
|
+
|
|
114
|
+
logger = logging.getLogger(__name__)
|
|
115
|
+
logger.debug(f"OTEL trace (no endpoint): {json.dumps(otel_data, indent=2)}")
|
|
116
|
+
except Exception as e:
|
|
117
|
+
raise OTELExporterError(
|
|
118
|
+
f"OTEL export failed: {e}", context={"trace_id": trace.trace_id}
|
|
119
|
+
) from e
|
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
"""Span management for distributed tracing."""
|
|
2
|
+
|
|
3
|
+
import contextlib
|
|
4
|
+
import time
|
|
5
|
+
from collections.abc import AsyncGenerator, Generator
|
|
6
|
+
from typing import Any
|
|
7
|
+
|
|
8
|
+
from pydantic import BaseModel, Field
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class SpanContext(BaseModel):
|
|
12
|
+
"""Context for span propagation."""
|
|
13
|
+
|
|
14
|
+
trace_id: str = Field(..., description="Trace ID")
|
|
15
|
+
span_id: str = Field(..., description="Span ID")
|
|
16
|
+
parent_span_id: str | None = Field(default=None, description="Parent span ID")
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class Span(BaseModel):
|
|
20
|
+
"""Execution span for tracing."""
|
|
21
|
+
|
|
22
|
+
span_id: str = Field(..., description="Span ID")
|
|
23
|
+
name: str = Field(..., description="Span name")
|
|
24
|
+
start_time: float = Field(..., description="Start timestamp")
|
|
25
|
+
end_time: float | None = Field(default=None, description="End timestamp")
|
|
26
|
+
attributes: dict[str, Any] = Field(default_factory=dict, description="Span attributes")
|
|
27
|
+
events: list[dict[str, Any]] = Field(default_factory=list, description="Span events")
|
|
28
|
+
|
|
29
|
+
def add_event(self, name: str, attributes: dict[str, Any] | None = None) -> None:
|
|
30
|
+
"""Add event to span.
|
|
31
|
+
|
|
32
|
+
Args:
|
|
33
|
+
name: Event name
|
|
34
|
+
attributes: Optional event attributes
|
|
35
|
+
"""
|
|
36
|
+
self.events.append(
|
|
37
|
+
{
|
|
38
|
+
"name": name,
|
|
39
|
+
"timestamp": time.time(),
|
|
40
|
+
"attributes": attributes or {},
|
|
41
|
+
}
|
|
42
|
+
)
|
|
43
|
+
|
|
44
|
+
@property
|
|
45
|
+
def duration(self) -> float | None:
|
|
46
|
+
"""Get span duration in seconds.
|
|
47
|
+
|
|
48
|
+
Returns:
|
|
49
|
+
Duration if span is ended, None otherwise
|
|
50
|
+
"""
|
|
51
|
+
if self.end_time is None:
|
|
52
|
+
return None
|
|
53
|
+
return self.end_time - self.start_time
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
@contextlib.contextmanager
|
|
57
|
+
def span(name: str, attributes: dict[str, Any] | None = None) -> Generator[Span, None, None]:
|
|
58
|
+
"""Context manager for creating a span.
|
|
59
|
+
|
|
60
|
+
Args:
|
|
61
|
+
name: Span name
|
|
62
|
+
attributes: Optional span attributes
|
|
63
|
+
|
|
64
|
+
Yields:
|
|
65
|
+
Span instance
|
|
66
|
+
|
|
67
|
+
Example:
|
|
68
|
+
>>> with span("operation") as s:
|
|
69
|
+
... # do work
|
|
70
|
+
... s.add_event("milestone")
|
|
71
|
+
"""
|
|
72
|
+
s = Span(
|
|
73
|
+
span_id=f"span_{int(time.time() * 1000000)}",
|
|
74
|
+
name=name,
|
|
75
|
+
start_time=time.time(),
|
|
76
|
+
attributes=attributes or {},
|
|
77
|
+
)
|
|
78
|
+
try:
|
|
79
|
+
yield s
|
|
80
|
+
finally:
|
|
81
|
+
s.end_time = time.time()
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
@contextlib.asynccontextmanager
|
|
85
|
+
async def async_span(
|
|
86
|
+
name: str, attributes: dict[str, Any] | None = None
|
|
87
|
+
) -> AsyncGenerator[Span, None]:
|
|
88
|
+
"""Async context manager for creating a span.
|
|
89
|
+
|
|
90
|
+
Args:
|
|
91
|
+
name: Span name
|
|
92
|
+
attributes: Optional span attributes
|
|
93
|
+
|
|
94
|
+
Yields:
|
|
95
|
+
Span instance
|
|
96
|
+
|
|
97
|
+
Example:
|
|
98
|
+
>>> async with async_span("async_operation") as s:
|
|
99
|
+
... # do async work
|
|
100
|
+
... s.add_event("milestone")
|
|
101
|
+
"""
|
|
102
|
+
s = Span(
|
|
103
|
+
span_id=f"span_{int(time.time() * 1000000)}",
|
|
104
|
+
name=name,
|
|
105
|
+
start_time=time.time(),
|
|
106
|
+
attributes=attributes or {},
|
|
107
|
+
)
|
|
108
|
+
try:
|
|
109
|
+
yield s
|
|
110
|
+
finally:
|
|
111
|
+
s.end_time = time.time()
|
|
@@ -0,0 +1,117 @@
|
|
|
1
|
+
"""Streaming support for trace events."""
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
import json
|
|
5
|
+
from collections.abc import AsyncIterator
|
|
6
|
+
from typing import Any
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class TraceEventBroadcaster:
|
|
10
|
+
"""Broadcasts trace events to multiple subscribers via WebSocket/SSE."""
|
|
11
|
+
|
|
12
|
+
def __init__(self) -> None:
|
|
13
|
+
"""Initialize broadcaster."""
|
|
14
|
+
self._subscribers: set[asyncio.Queue[Any]] = set()
|
|
15
|
+
self._lock = asyncio.Lock()
|
|
16
|
+
|
|
17
|
+
async def subscribe(self) -> asyncio.Queue[Any]:
|
|
18
|
+
"""Subscribe to trace events.
|
|
19
|
+
|
|
20
|
+
Returns:
|
|
21
|
+
Queue that will receive trace events
|
|
22
|
+
"""
|
|
23
|
+
queue: asyncio.Queue[Any] = asyncio.Queue()
|
|
24
|
+
async with self._lock:
|
|
25
|
+
self._subscribers.add(queue)
|
|
26
|
+
return queue
|
|
27
|
+
|
|
28
|
+
async def unsubscribe(self, queue: asyncio.Queue[Any]) -> None:
|
|
29
|
+
"""Unsubscribe from trace events.
|
|
30
|
+
|
|
31
|
+
Args:
|
|
32
|
+
queue: Queue to remove from subscribers
|
|
33
|
+
"""
|
|
34
|
+
async with self._lock:
|
|
35
|
+
self._subscribers.discard(queue)
|
|
36
|
+
|
|
37
|
+
async def broadcast(self, event: Any) -> None:
|
|
38
|
+
"""Broadcast an event to all subscribers.
|
|
39
|
+
|
|
40
|
+
Args:
|
|
41
|
+
event: Trace event to broadcast
|
|
42
|
+
"""
|
|
43
|
+
async with self._lock:
|
|
44
|
+
# Create a copy of subscribers to avoid modification during iteration
|
|
45
|
+
subscribers = list(self._subscribers)
|
|
46
|
+
|
|
47
|
+
# Send to all subscribers, removing dead ones
|
|
48
|
+
dead_subscribers = []
|
|
49
|
+
for queue in subscribers:
|
|
50
|
+
try:
|
|
51
|
+
queue.put_nowait(event)
|
|
52
|
+
except Exception:
|
|
53
|
+
dead_subscribers.append(queue)
|
|
54
|
+
|
|
55
|
+
# Clean up dead subscribers
|
|
56
|
+
if dead_subscribers:
|
|
57
|
+
async with self._lock:
|
|
58
|
+
for queue in dead_subscribers:
|
|
59
|
+
self._subscribers.discard(queue)
|
|
60
|
+
|
|
61
|
+
async def stream_events(
|
|
62
|
+
self, queue: asyncio.Queue[Any], trace_id: str | None = None
|
|
63
|
+
) -> AsyncIterator[str]:
|
|
64
|
+
"""Stream events as SSE format.
|
|
65
|
+
|
|
66
|
+
Args:
|
|
67
|
+
queue: Queue to read events from
|
|
68
|
+
trace_id: Optional trace ID to filter events
|
|
69
|
+
|
|
70
|
+
Yields:
|
|
71
|
+
SSE-formatted strings
|
|
72
|
+
"""
|
|
73
|
+
try:
|
|
74
|
+
while True:
|
|
75
|
+
try:
|
|
76
|
+
# Wait for event with timeout to allow periodic checks
|
|
77
|
+
event = await asyncio.wait_for(queue.get(), timeout=1.0)
|
|
78
|
+
queue.task_done()
|
|
79
|
+
|
|
80
|
+
# Filter by trace_id if provided
|
|
81
|
+
if (
|
|
82
|
+
trace_id
|
|
83
|
+
and hasattr(event, "data")
|
|
84
|
+
and event.data.get("trace_id") != trace_id
|
|
85
|
+
):
|
|
86
|
+
continue
|
|
87
|
+
|
|
88
|
+
# Format as SSE
|
|
89
|
+
event_data = {
|
|
90
|
+
"type": event.type,
|
|
91
|
+
"timestamp": event.timestamp,
|
|
92
|
+
"data": event.data,
|
|
93
|
+
}
|
|
94
|
+
yield f"data: {json.dumps(event_data)}\n\n"
|
|
95
|
+
except TimeoutError:
|
|
96
|
+
# Send keepalive
|
|
97
|
+
yield ": keepalive\n\n"
|
|
98
|
+
except asyncio.CancelledError:
|
|
99
|
+
break
|
|
100
|
+
finally:
|
|
101
|
+
await self.unsubscribe(queue)
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
# Global broadcaster instance
|
|
105
|
+
_broadcaster: TraceEventBroadcaster | None = None
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
def get_broadcaster() -> TraceEventBroadcaster:
|
|
109
|
+
"""Get the global trace event broadcaster.
|
|
110
|
+
|
|
111
|
+
Returns:
|
|
112
|
+
TraceEventBroadcaster instance
|
|
113
|
+
"""
|
|
114
|
+
global _broadcaster
|
|
115
|
+
if _broadcaster is None:
|
|
116
|
+
_broadcaster = TraceEventBroadcaster()
|
|
117
|
+
return _broadcaster
|
|
@@ -0,0 +1,144 @@
|
|
|
1
|
+
"""Trace collection and management."""
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
import time
|
|
5
|
+
from collections.abc import Awaitable, Callable
|
|
6
|
+
from typing import Any
|
|
7
|
+
|
|
8
|
+
from pydantic import BaseModel, Field
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class TraceEvent(BaseModel):
|
|
12
|
+
"""Immutable event in a trace."""
|
|
13
|
+
|
|
14
|
+
type: str = Field(..., description="Event type")
|
|
15
|
+
timestamp: float = Field(..., description="Event timestamp")
|
|
16
|
+
data: dict[str, Any] = Field(..., description="Event data")
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
TraceEventCallback = Callable[[TraceEvent], Awaitable[None] | None]
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class Trace(BaseModel):
|
|
23
|
+
"""Immutable trace of agent execution.
|
|
24
|
+
|
|
25
|
+
A trace is an append-only event log that records all execution events.
|
|
26
|
+
"""
|
|
27
|
+
|
|
28
|
+
trace_id: str = Field(..., description="Trace ID")
|
|
29
|
+
events: list[TraceEvent] = Field(default_factory=list, description="Trace events")
|
|
30
|
+
metadata: dict[str, Any] = Field(default_factory=dict, description="Trace metadata")
|
|
31
|
+
|
|
32
|
+
def __init__(self, **data: Any) -> None:
|
|
33
|
+
"""Initialize trace with callbacks."""
|
|
34
|
+
super().__init__(**data)
|
|
35
|
+
self._event_callbacks: list[TraceEventCallback] = []
|
|
36
|
+
|
|
37
|
+
def add_event_callback(self, callback: TraceEventCallback) -> None:
|
|
38
|
+
"""Add a callback to be notified of new events.
|
|
39
|
+
|
|
40
|
+
Args:
|
|
41
|
+
callback: Callback function that receives TraceEvent
|
|
42
|
+
"""
|
|
43
|
+
self._event_callbacks.append(callback)
|
|
44
|
+
|
|
45
|
+
def remove_event_callback(self, callback: TraceEventCallback) -> None:
|
|
46
|
+
"""Remove an event callback.
|
|
47
|
+
|
|
48
|
+
Args:
|
|
49
|
+
callback: Callback function to remove
|
|
50
|
+
"""
|
|
51
|
+
if callback in self._event_callbacks:
|
|
52
|
+
self._event_callbacks.remove(callback)
|
|
53
|
+
|
|
54
|
+
def add_event(self, event_type: str, data: dict[str, Any] | None = None) -> None:
|
|
55
|
+
"""Add an event to the trace.
|
|
56
|
+
|
|
57
|
+
Args:
|
|
58
|
+
event_type: Type of event (run_started, model_called, tool_called, etc.)
|
|
59
|
+
data: Event data
|
|
60
|
+
"""
|
|
61
|
+
event = TraceEvent(
|
|
62
|
+
type=event_type,
|
|
63
|
+
timestamp=time.time(),
|
|
64
|
+
data=data or {},
|
|
65
|
+
)
|
|
66
|
+
self.events.append(event)
|
|
67
|
+
|
|
68
|
+
# Notify callbacks
|
|
69
|
+
for callback in self._event_callbacks:
|
|
70
|
+
try:
|
|
71
|
+
result = callback(event)
|
|
72
|
+
# If callback is async, schedule it
|
|
73
|
+
if asyncio.iscoroutine(result):
|
|
74
|
+
# Create task if we're in an event loop, otherwise this will be handled by caller
|
|
75
|
+
try:
|
|
76
|
+
loop = asyncio.get_event_loop()
|
|
77
|
+
if loop.is_running():
|
|
78
|
+
asyncio.create_task(result)
|
|
79
|
+
except RuntimeError:
|
|
80
|
+
# No event loop, skip async callback
|
|
81
|
+
pass
|
|
82
|
+
except Exception:
|
|
83
|
+
# Don't let callback errors break trace collection
|
|
84
|
+
pass
|
|
85
|
+
|
|
86
|
+
# Broadcast to streaming subscribers (lazy import to avoid circular dependency)
|
|
87
|
+
try:
|
|
88
|
+
from routekitai.observability.streaming import get_broadcaster
|
|
89
|
+
|
|
90
|
+
broadcaster = get_broadcaster()
|
|
91
|
+
# Schedule broadcast in event loop if available
|
|
92
|
+
try:
|
|
93
|
+
loop = asyncio.get_event_loop()
|
|
94
|
+
if loop.is_running():
|
|
95
|
+
asyncio.create_task(broadcaster.broadcast(event))
|
|
96
|
+
except RuntimeError:
|
|
97
|
+
# No event loop, skip broadcasting
|
|
98
|
+
pass
|
|
99
|
+
except Exception:
|
|
100
|
+
# Don't let broadcasting errors break trace collection
|
|
101
|
+
pass
|
|
102
|
+
|
|
103
|
+
def get_events_by_type(self, event_type: str) -> list[TraceEvent]:
|
|
104
|
+
"""Get all events of a specific type.
|
|
105
|
+
|
|
106
|
+
Args:
|
|
107
|
+
event_type: Event type to filter by
|
|
108
|
+
|
|
109
|
+
Returns:
|
|
110
|
+
List of matching events
|
|
111
|
+
"""
|
|
112
|
+
return [event for event in self.events if event.type == event_type]
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
class TraceCollector(BaseModel):
|
|
116
|
+
"""Collects and manages traces."""
|
|
117
|
+
|
|
118
|
+
traces: dict[str, Trace] = Field(default_factory=dict, description="Collected traces")
|
|
119
|
+
|
|
120
|
+
def start_trace(self, trace_id: str, metadata: dict[str, Any] | None = None) -> Trace:
|
|
121
|
+
"""Start a new trace.
|
|
122
|
+
|
|
123
|
+
Args:
|
|
124
|
+
trace_id: Unique trace ID
|
|
125
|
+
metadata: Optional trace metadata
|
|
126
|
+
|
|
127
|
+
Returns:
|
|
128
|
+
New trace instance
|
|
129
|
+
"""
|
|
130
|
+
trace = Trace(trace_id=trace_id, metadata=metadata or {})
|
|
131
|
+
trace.add_event("run_started", {"trace_id": trace_id, "metadata": metadata or {}})
|
|
132
|
+
self.traces[trace_id] = trace
|
|
133
|
+
return trace
|
|
134
|
+
|
|
135
|
+
def get_trace(self, trace_id: str) -> Trace | None:
|
|
136
|
+
"""Get trace by ID.
|
|
137
|
+
|
|
138
|
+
Args:
|
|
139
|
+
trace_id: Trace ID
|
|
140
|
+
|
|
141
|
+
Returns:
|
|
142
|
+
Trace if found, None otherwise
|
|
143
|
+
"""
|
|
144
|
+
return self.traces.get(trace_id)
|