agentreplay 0.1.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,281 @@
1
+ # Copyright 2025 Sushanth (https://github.com/sushanthpy)
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ """Bridge between Agentreplay spans and OpenTelemetry spans.
16
+
17
+ This module provides utilities to convert Agentreplay's custom span format
18
+ to OpenTelemetry format and vice versa. It also handles the setup of
19
+ the OpenTelemetry tracer provider with Agentreplay-specific configuration.
20
+
21
+ The bridge ensures that:
22
+ 1. Agentreplay spans are compatible with standard OTLP exporters
23
+ 2. Agent/session/workflow context is preserved in span attributes
24
+ 3. Agentreplay's span types map to appropriate OTEL span kinds
25
+ """
26
+
27
+ from typing import Optional, Dict, Any
28
+ import logging
29
+ import os
30
+ import atexit
31
+
32
+ from opentelemetry import trace
33
+ from opentelemetry.sdk.trace import TracerProvider, SpanProcessor
34
+ from opentelemetry.sdk.trace.export import BatchSpanProcessor
35
+ from opentelemetry.sdk.resources import Resource, SERVICE_NAME
36
+ from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter
37
+ from opentelemetry.trace import SpanKind, Status, StatusCode
38
+
39
+ logger = logging.getLogger(__name__)
40
+
41
+
42
+ def setup_tracer_provider(
43
+ service_name: str,
44
+ otlp_endpoint: str,
45
+ tenant_id: int = 1,
46
+ project_id: int = 0,
47
+ debug: bool = False,
48
+ ) -> TracerProvider:
49
+ """Set up OpenTelemetry tracer provider with Agentreplay configuration.
50
+
51
+ Args:
52
+ service_name: Service name for resource attributes
53
+ otlp_endpoint: OTLP gRPC endpoint (e.g., 'localhost:47117')
54
+ tenant_id: Agentreplay tenant ID
55
+ project_id: Agentreplay project ID
56
+ debug: Enable debug logging
57
+
58
+ Returns:
59
+ Configured TracerProvider
60
+
61
+ Example:
62
+ >>> provider = setup_tracer_provider(
63
+ ... service_name="my-agent",
64
+ ... otlp_endpoint="localhost:47117",
65
+ ... project_id=27986
66
+ ... )
67
+ """
68
+ # Create resource with Agentreplay-specific attributes
69
+ # NOTE: Server expects "tenant_id" and "project_id" (with underscores, not dots)
70
+ resource = Resource.create({
71
+ SERVICE_NAME: service_name,
72
+ "tenant_id": tenant_id, # Server looks for "tenant.id" or "tenant_id"
73
+ "project_id": project_id, # Server looks for "project.id" or "project_id"
74
+ "agentreplay.sdk.name": "agentreplay-python",
75
+ "agentreplay.sdk.version": "0.1.0",
76
+ })
77
+
78
+ # Create tracer provider
79
+ provider = TracerProvider(resource=resource)
80
+
81
+ # Configure OTLP exporter
82
+ # Note: The endpoint should be gRPC format (no http:// prefix)
83
+ # Standard OTLP uses port 4317 for gRPC, 4318 for HTTP
84
+ if otlp_endpoint.startswith("http://"):
85
+ otlp_endpoint = otlp_endpoint.replace("http://", "")
86
+ if otlp_endpoint.startswith("https://"):
87
+ otlp_endpoint = otlp_endpoint.replace("https://", "")
88
+
89
+ # Headers for authentication/routing
90
+ headers = {
91
+ "x-agentreplay-tenant-id": str(tenant_id),
92
+ "x-agentreplay-project-id": str(project_id),
93
+ }
94
+
95
+ # Create OTLP exporter
96
+ # insecure=True because we're using localhost (change for production)
97
+ otlp_exporter = OTLPSpanExporter(
98
+ endpoint=otlp_endpoint,
99
+ headers=headers,
100
+ insecure=True, # TODO: Make configurable
101
+ )
102
+
103
+ # Use batch processor for better performance
104
+ span_processor = BatchSpanProcessor(
105
+ otlp_exporter,
106
+ max_queue_size=2048,
107
+ max_export_batch_size=512,
108
+ schedule_delay_millis=5000, # Export every 5 seconds
109
+ )
110
+
111
+ provider.add_span_processor(span_processor)
112
+
113
+ # Set as global tracer provider
114
+ trace.set_tracer_provider(provider)
115
+
116
+ # Register atexit handler to flush spans when script exits
117
+ # This ensures short-lived scripts export their spans before terminating
118
+ def _flush_on_exit():
119
+ """Flush any pending spans before process exits."""
120
+ try:
121
+ provider.force_flush(timeout_millis=5000)
122
+ if debug:
123
+ logger.debug("✓ Spans flushed on exit")
124
+ except Exception as e:
125
+ logger.debug(f"Failed to flush spans on exit: {e}")
126
+
127
+ atexit.register(_flush_on_exit)
128
+
129
+ if debug:
130
+ logger.debug(f"✓ TracerProvider configured:")
131
+ logger.debug(f" Service: {service_name}")
132
+ logger.debug(f" Endpoint: {otlp_endpoint}")
133
+ logger.debug(f" Tenant ID: {tenant_id}")
134
+ logger.debug(f" Project ID: {project_id}")
135
+
136
+ return provider
137
+
138
+
139
+ def agentreplay_span_type_to_otel_kind(span_type: str) -> SpanKind:
140
+ """Convert Agentreplay span type to OTEL span kind.
141
+
142
+ Args:
143
+ span_type: Agentreplay span type (e.g., 'Planning', 'Reasoning', 'ToolCall')
144
+
145
+ Returns:
146
+ Appropriate SpanKind
147
+ """
148
+ # Map Agentreplay types to OTEL span kinds
149
+ mapping = {
150
+ "Planning": SpanKind.INTERNAL,
151
+ "Reasoning": SpanKind.INTERNAL,
152
+ "ToolCall": SpanKind.CLIENT,
153
+ "Synthesis": SpanKind.INTERNAL,
154
+ "Root": SpanKind.SERVER,
155
+ "Error": SpanKind.INTERNAL,
156
+ "Response": SpanKind.SERVER,
157
+ }
158
+ return mapping.get(span_type, SpanKind.INTERNAL)
159
+
160
+
161
+ def inject_agent_context_to_span(span: trace.Span, context: Dict[str, Any]):
162
+ """Inject Agentreplay agent context into an OTEL span.
163
+
164
+ Args:
165
+ span: OpenTelemetry span
166
+ context: Context dictionary with agent_id, session_id, etc.
167
+
168
+ Example:
169
+ >>> with tracer.start_as_current_span("operation") as span:
170
+ ... inject_agent_context_to_span(span, {
171
+ ... "agent_id": "researcher",
172
+ ... "session_id": "sess-123"
173
+ ... })
174
+ """
175
+ # Add context as span attributes
176
+ if "agent_id" in context:
177
+ span.set_attribute("agentreplay.agent_id", context["agent_id"])
178
+
179
+ if "session_id" in context:
180
+ span.set_attribute("agentreplay.session_id", context["session_id"])
181
+
182
+ if "workflow_id" in context:
183
+ span.set_attribute("agentreplay.workflow_id", context["workflow_id"])
184
+
185
+ if "user_id" in context:
186
+ span.set_attribute("agentreplay.user_id", context["user_id"])
187
+
188
+
189
+ def _inject_agent_context(span: trace.Span):
190
+ """Inject current agent context from contextvars into span.
191
+
192
+ This reads from the global context variables set by AgentContext
193
+ and adds them to the span automatically.
194
+
195
+ Args:
196
+ span: OpenTelemetry span to annotate
197
+ """
198
+ try:
199
+ from agentreplay.context import (
200
+ get_current_agent_id,
201
+ get_current_session_id,
202
+ get_current_workflow_id,
203
+ get_current_user_id,
204
+ )
205
+
206
+ agent_id = get_current_agent_id()
207
+ if agent_id:
208
+ span.set_attribute("agentreplay.agent_id", agent_id)
209
+
210
+ session_id = get_current_session_id()
211
+ if session_id:
212
+ span.set_attribute("agentreplay.session_id", session_id)
213
+
214
+ workflow_id = get_current_workflow_id()
215
+ if workflow_id:
216
+ span.set_attribute("agentreplay.workflow_id", workflow_id)
217
+
218
+ user_id = get_current_user_id()
219
+ if user_id:
220
+ span.set_attribute("agentreplay.user_id", user_id)
221
+
222
+ except ImportError:
223
+ # Context module not available, skip
224
+ pass
225
+
226
+
227
+ def get_tracer(name: str = "agentreplay") -> trace.Tracer:
228
+ """Get an OpenTelemetry tracer for Agentreplay.
229
+
230
+ Args:
231
+ name: Tracer name (default: 'agentreplay')
232
+
233
+ Returns:
234
+ Configured tracer
235
+
236
+ Example:
237
+ >>> tracer = get_tracer("my-component")
238
+ >>> with tracer.start_as_current_span("operation") as span:
239
+ ... span.set_attribute("key", "value")
240
+ """
241
+ return trace.get_tracer(name)
242
+
243
+
244
+ def create_span_with_context(
245
+ tracer: trace.Tracer,
246
+ name: str,
247
+ kind: Optional[SpanKind] = None,
248
+ attributes: Optional[Dict[str, Any]] = None,
249
+ ) -> trace.Span:
250
+ """Create a span with automatic agent context injection.
251
+
252
+ Args:
253
+ tracer: OpenTelemetry tracer
254
+ name: Span name
255
+ kind: Span kind (default: INTERNAL)
256
+ attributes: Additional attributes
257
+
258
+ Returns:
259
+ Started span with context
260
+
261
+ Example:
262
+ >>> tracer = get_tracer()
263
+ >>> span = create_span_with_context(
264
+ ... tracer,
265
+ ... "llm_call",
266
+ ... kind=SpanKind.CLIENT,
267
+ ... attributes={"model": "gpt-4"}
268
+ ... )
269
+ """
270
+ kind = kind or SpanKind.INTERNAL
271
+ span = tracer.start_span(name, kind=kind)
272
+
273
+ # Inject agent context
274
+ _inject_agent_context(span)
275
+
276
+ # Add custom attributes
277
+ if attributes:
278
+ for key, value in attributes.items():
279
+ span.set_attribute(key, str(value))
280
+
281
+ return span
agentreplay/patch.py ADDED
@@ -0,0 +1,308 @@
1
+ # Copyright 2025 Sushanth (https://github.com/sushanthpy)
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ """Convenient one-liner patch functions for popular agent frameworks.
16
+
17
+ These functions provide the simplest possible integration - just one line of code
18
+ to enable full observability for each framework.
19
+
20
+ Examples:
21
+ >>> from agentreplay import patch_langgraph, patch_llamaindex, patch_crewai
22
+ >>>
23
+ >>> # LangGraph
24
+ >>> patch_langgraph()
25
+ >>> # Now all LangGraph workflows are automatically traced!
26
+ >>>
27
+ >>> # LlamaIndex
28
+ >>> patch_llamaindex()
29
+ >>> # Now all LlamaIndex queries are automatically traced!
30
+ >>>
31
+ >>> # CrewAI (coming soon)
32
+ >>> patch_crewai()
33
+ """
34
+
35
+ from typing import Optional
36
+ import logging
37
+
38
+ logger = logging.getLogger(__name__)
39
+
40
+
41
+ def patch_langgraph(
42
+ service_name: str = "langgraph-app",
43
+ agentreplay_url: str = "http://localhost:8080",
44
+ tenant_id: int = 1,
45
+ project_id: int = 0,
46
+ ) -> None:
47
+ """Enable Agentreplay observability for LangGraph with one line of code.
48
+
49
+ This automatically traces:
50
+ - Node executions
51
+ - State transitions
52
+ - LLM calls within nodes
53
+ - Tool/function invocations
54
+ - Routing decisions
55
+
56
+ Args:
57
+ service_name: Name for this LangGraph application
58
+ agentreplay_url: Agentreplay server URL
59
+ tenant_id: Your tenant ID
60
+ project_id: Your project ID
61
+
62
+ Example:
63
+ >>> from agentreplay import patch_langgraph
64
+ >>> patch_langgraph()
65
+ >>>
66
+ >>> # Now use LangGraph normally - everything is traced!
67
+ >>> from langgraph.graph import StateGraph
68
+ >>> # ... your LangGraph code ...
69
+ """
70
+ from agentreplay import auto_instrument
71
+
72
+ logger.info(f"Patching LangGraph for {service_name}")
73
+ auto_instrument(
74
+ service_name=service_name,
75
+ agentreplay_url=agentreplay_url,
76
+ tenant_id=tenant_id,
77
+ project_id=project_id,
78
+ frameworks=["langgraph", "openai", "anthropic"], # LangGraph commonly uses these
79
+ )
80
+ logger.info("✓ LangGraph patched successfully")
81
+
82
+
83
+ def patch_llamaindex(
84
+ service_name: str = "llamaindex-app",
85
+ agentreplay_url: str = "http://localhost:8080",
86
+ tenant_id: int = 1,
87
+ project_id: int = 0,
88
+ ) -> None:
89
+ """Enable Agentreplay observability for LlamaIndex with one line of code.
90
+
91
+ This automatically traces:
92
+ - Query engine executions
93
+ - Retrieval operations
94
+ - LLM synthesis
95
+ - Embedding generation
96
+ - Index operations
97
+
98
+ Args:
99
+ service_name: Name for this LlamaIndex application
100
+ agentreplay_url: Agentreplay server URL
101
+ tenant_id: Your tenant ID
102
+ project_id: Your project ID
103
+
104
+ Example:
105
+ >>> from agentreplay import patch_llamaindex
106
+ >>> patch_llamaindex()
107
+ >>>
108
+ >>> # Now use LlamaIndex normally - everything is traced!
109
+ >>> from llama_index import VectorStoreIndex
110
+ >>> # ... your LlamaIndex code ...
111
+ """
112
+ from agentreplay import auto_instrument
113
+
114
+ logger.info(f"Patching LlamaIndex for {service_name}")
115
+ auto_instrument(
116
+ service_name=service_name,
117
+ agentreplay_url=agentreplay_url,
118
+ tenant_id=tenant_id,
119
+ project_id=project_id,
120
+ frameworks=["llamaindex", "openai", "anthropic", "retrieval"],
121
+ )
122
+ logger.info("✓ LlamaIndex patched successfully")
123
+
124
+
125
+ def patch_crewai(
126
+ service_name: str = "crewai-app",
127
+ agentreplay_url: str = "http://localhost:8080",
128
+ tenant_id: int = 1,
129
+ project_id: int = 0,
130
+ ) -> None:
131
+ """Enable Agentreplay observability for CrewAI with one line of code.
132
+
133
+ This automatically traces:
134
+ - Agent executions
135
+ - Task assignments
136
+ - LLM calls
137
+ - Tool invocations
138
+ - Inter-agent communication
139
+
140
+ Args:
141
+ service_name: Name for this CrewAI application
142
+ agentreplay_url: Agentreplay server URL
143
+ tenant_id: Your tenant ID
144
+ project_id: Your project ID
145
+
146
+ Example:
147
+ >>> from agentreplay import patch_crewai
148
+ >>> patch_crewai()
149
+ >>>
150
+ >>> # Now use CrewAI normally - everything is traced!
151
+ >>> from crewai import Crew, Agent, Task
152
+ >>> # ... your CrewAI code ...
153
+
154
+ Note:
155
+ CrewAI integration uses their callback system since they don't have
156
+ native OpenTelemetry support yet.
157
+ """
158
+ from agentreplay import auto_instrument
159
+
160
+ logger.info(f"Patching CrewAI for {service_name}")
161
+
162
+ # CrewAI doesn't have native OTEL support, so we instrument the underlying LLMs
163
+ auto_instrument(
164
+ service_name=service_name,
165
+ agentreplay_url=agentreplay_url,
166
+ tenant_id=tenant_id,
167
+ project_id=project_id,
168
+ frameworks=["openai", "anthropic"], # CrewAI typically uses these
169
+ )
170
+
171
+ # TODO: Add CrewAI-specific callback integration when available
172
+ logger.warning(
173
+ "CrewAI direct integration not yet implemented. "
174
+ "Currently tracing underlying LLM calls only. "
175
+ "For full agent-level tracing, use manual span creation."
176
+ )
177
+ logger.info("✓ CrewAI LLM calls patched successfully")
178
+
179
+
180
+ def patch_autogen(
181
+ service_name: str = "autogen-app",
182
+ agentreplay_url: str = "http://localhost:8080",
183
+ tenant_id: int = 1,
184
+ project_id: int = 0,
185
+ ) -> None:
186
+ """Enable Agentreplay observability for AutoGen with one line of code.
187
+
188
+ This automatically traces:
189
+ - Agent conversations
190
+ - LLM calls
191
+ - Function/tool executions
192
+ - Group chat interactions
193
+
194
+ Args:
195
+ service_name: Name for this AutoGen application
196
+ agentreplay_url: Agentreplay server URL
197
+ tenant_id: Your tenant ID
198
+ project_id: Your project ID
199
+
200
+ Example:
201
+ >>> from agentreplay import patch_autogen
202
+ >>> patch_autogen()
203
+ >>>
204
+ >>> # Now use AutoGen normally - everything is traced!
205
+ >>> from autogen import AssistantAgent, UserProxyAgent
206
+ >>> # ... your AutoGen code ...
207
+ """
208
+ from agentreplay import auto_instrument
209
+
210
+ logger.info(f"Patching AutoGen for {service_name}")
211
+
212
+ # AutoGen uses callbacks, instrument underlying LLMs
213
+ auto_instrument(
214
+ service_name=service_name,
215
+ agentreplay_url=agentreplay_url,
216
+ tenant_id=tenant_id,
217
+ project_id=project_id,
218
+ frameworks=["openai", "anthropic"],
219
+ )
220
+
221
+ logger.info("✓ AutoGen LLM calls patched successfully")
222
+ logger.info("For conversation-level tracing, consider manual span creation")
223
+
224
+
225
+ def patch_haystack(
226
+ service_name: str = "haystack-app",
227
+ agentreplay_url: str = "http://localhost:8080",
228
+ tenant_id: int = 1,
229
+ project_id: int = 0,
230
+ ) -> None:
231
+ """Enable Agentreplay observability for Haystack with one line of code.
232
+
233
+ This automatically traces:
234
+ - Pipeline executions
235
+ - Retrieval operations
236
+ - LLM generations
237
+ - Document processing
238
+
239
+ Args:
240
+ service_name: Name for this Haystack application
241
+ agentreplay_url: Agentreplay server URL
242
+ tenant_id: Your tenant ID
243
+ project_id: Your project ID
244
+
245
+ Example:
246
+ >>> from agentreplay import patch_haystack
247
+ >>> patch_haystack()
248
+ >>>
249
+ >>> # Now use Haystack normally - everything is traced!
250
+ >>> from haystack import Pipeline
251
+ >>> # ... your Haystack code ...
252
+ """
253
+ from agentreplay import auto_instrument
254
+
255
+ logger.info(f"Patching Haystack for {service_name}")
256
+
257
+ auto_instrument(
258
+ service_name=service_name,
259
+ agentreplay_url=agentreplay_url,
260
+ tenant_id=tenant_id,
261
+ project_id=project_id,
262
+ frameworks=["openai", "anthropic", "retrieval"],
263
+ )
264
+
265
+ logger.info("✓ Haystack components patched successfully")
266
+
267
+
268
+ def patch_dspy(
269
+ service_name: str = "dspy-app",
270
+ agentreplay_url: str = "http://localhost:8080",
271
+ tenant_id: int = 1,
272
+ project_id: int = 0,
273
+ ) -> None:
274
+ """Enable Agentreplay observability for DSPy with one line of code.
275
+
276
+ This automatically traces:
277
+ - Module executions
278
+ - LLM calls
279
+ - Optimizer runs
280
+ - Signature invocations
281
+
282
+ Args:
283
+ service_name: Name for this DSPy application
284
+ agentreplay_url: Agentreplay server URL
285
+ tenant_id: Your tenant ID
286
+ project_id: Your project ID
287
+
288
+ Example:
289
+ >>> from agentreplay import patch_dspy
290
+ >>> patch_dspy()
291
+ >>>
292
+ >>> # Now use DSPy normally - everything is traced!
293
+ >>> import dspy
294
+ >>> # ... your DSPy code ...
295
+ """
296
+ from agentreplay import auto_instrument
297
+
298
+ logger.info(f"Patching DSPy for {service_name}")
299
+
300
+ auto_instrument(
301
+ service_name=service_name,
302
+ agentreplay_url=agentreplay_url,
303
+ tenant_id=tenant_id,
304
+ project_id=project_id,
305
+ frameworks=["openai", "anthropic"],
306
+ )
307
+
308
+ logger.info("✓ DSPy LLM calls patched successfully")