mseep-agentops 0.4.18__py3-none-any.whl → 0.4.22__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (153) hide show
  1. agentops/__init__.py +0 -0
  2. agentops/client/api/base.py +28 -30
  3. agentops/client/api/versions/v3.py +29 -25
  4. agentops/client/api/versions/v4.py +87 -46
  5. agentops/client/client.py +98 -29
  6. agentops/client/http/README.md +87 -0
  7. agentops/client/http/http_client.py +126 -172
  8. agentops/config.py +8 -2
  9. agentops/instrumentation/OpenTelemetry.md +133 -0
  10. agentops/instrumentation/README.md +167 -0
  11. agentops/instrumentation/__init__.py +13 -1
  12. agentops/instrumentation/agentic/ag2/__init__.py +18 -0
  13. agentops/instrumentation/agentic/ag2/instrumentor.py +922 -0
  14. agentops/instrumentation/agentic/agno/__init__.py +19 -0
  15. agentops/instrumentation/agentic/agno/attributes/__init__.py +20 -0
  16. agentops/instrumentation/agentic/agno/attributes/agent.py +250 -0
  17. agentops/instrumentation/agentic/agno/attributes/metrics.py +214 -0
  18. agentops/instrumentation/agentic/agno/attributes/storage.py +158 -0
  19. agentops/instrumentation/agentic/agno/attributes/team.py +195 -0
  20. agentops/instrumentation/agentic/agno/attributes/tool.py +210 -0
  21. agentops/instrumentation/agentic/agno/attributes/workflow.py +254 -0
  22. agentops/instrumentation/agentic/agno/instrumentor.py +1313 -0
  23. agentops/instrumentation/agentic/crewai/LICENSE +201 -0
  24. agentops/instrumentation/agentic/crewai/NOTICE.md +10 -0
  25. agentops/instrumentation/agentic/crewai/__init__.py +6 -0
  26. agentops/instrumentation/agentic/crewai/crewai_span_attributes.py +335 -0
  27. agentops/instrumentation/agentic/crewai/instrumentation.py +535 -0
  28. agentops/instrumentation/agentic/crewai/version.py +1 -0
  29. agentops/instrumentation/agentic/google_adk/__init__.py +19 -0
  30. agentops/instrumentation/agentic/google_adk/instrumentor.py +68 -0
  31. agentops/instrumentation/agentic/google_adk/patch.py +767 -0
  32. agentops/instrumentation/agentic/haystack/__init__.py +1 -0
  33. agentops/instrumentation/agentic/haystack/instrumentor.py +186 -0
  34. agentops/instrumentation/agentic/langgraph/__init__.py +3 -0
  35. agentops/instrumentation/agentic/langgraph/attributes.py +54 -0
  36. agentops/instrumentation/agentic/langgraph/instrumentation.py +598 -0
  37. agentops/instrumentation/agentic/langgraph/version.py +1 -0
  38. agentops/instrumentation/agentic/openai_agents/README.md +156 -0
  39. agentops/instrumentation/agentic/openai_agents/SPANS.md +145 -0
  40. agentops/instrumentation/agentic/openai_agents/TRACING_API.md +144 -0
  41. agentops/instrumentation/agentic/openai_agents/__init__.py +30 -0
  42. agentops/instrumentation/agentic/openai_agents/attributes/common.py +549 -0
  43. agentops/instrumentation/agentic/openai_agents/attributes/completion.py +172 -0
  44. agentops/instrumentation/agentic/openai_agents/attributes/model.py +58 -0
  45. agentops/instrumentation/agentic/openai_agents/attributes/tokens.py +275 -0
  46. agentops/instrumentation/agentic/openai_agents/exporter.py +469 -0
  47. agentops/instrumentation/agentic/openai_agents/instrumentor.py +107 -0
  48. agentops/instrumentation/agentic/openai_agents/processor.py +58 -0
  49. agentops/instrumentation/agentic/smolagents/README.md +88 -0
  50. agentops/instrumentation/agentic/smolagents/__init__.py +12 -0
  51. agentops/instrumentation/agentic/smolagents/attributes/agent.py +354 -0
  52. agentops/instrumentation/agentic/smolagents/attributes/model.py +205 -0
  53. agentops/instrumentation/agentic/smolagents/instrumentor.py +286 -0
  54. agentops/instrumentation/agentic/smolagents/stream_wrapper.py +258 -0
  55. agentops/instrumentation/agentic/xpander/__init__.py +15 -0
  56. agentops/instrumentation/agentic/xpander/context.py +112 -0
  57. agentops/instrumentation/agentic/xpander/instrumentor.py +877 -0
  58. agentops/instrumentation/agentic/xpander/trace_probe.py +86 -0
  59. agentops/instrumentation/agentic/xpander/version.py +3 -0
  60. agentops/instrumentation/common/README.md +65 -0
  61. agentops/instrumentation/common/attributes.py +1 -2
  62. agentops/instrumentation/providers/anthropic/__init__.py +24 -0
  63. agentops/instrumentation/providers/anthropic/attributes/__init__.py +23 -0
  64. agentops/instrumentation/providers/anthropic/attributes/common.py +64 -0
  65. agentops/instrumentation/providers/anthropic/attributes/message.py +541 -0
  66. agentops/instrumentation/providers/anthropic/attributes/tools.py +231 -0
  67. agentops/instrumentation/providers/anthropic/event_handler_wrapper.py +90 -0
  68. agentops/instrumentation/providers/anthropic/instrumentor.py +146 -0
  69. agentops/instrumentation/providers/anthropic/stream_wrapper.py +436 -0
  70. agentops/instrumentation/providers/google_genai/README.md +33 -0
  71. agentops/instrumentation/providers/google_genai/__init__.py +24 -0
  72. agentops/instrumentation/providers/google_genai/attributes/__init__.py +25 -0
  73. agentops/instrumentation/providers/google_genai/attributes/chat.py +125 -0
  74. agentops/instrumentation/providers/google_genai/attributes/common.py +88 -0
  75. agentops/instrumentation/providers/google_genai/attributes/model.py +284 -0
  76. agentops/instrumentation/providers/google_genai/instrumentor.py +170 -0
  77. agentops/instrumentation/providers/google_genai/stream_wrapper.py +238 -0
  78. agentops/instrumentation/providers/ibm_watsonx_ai/__init__.py +28 -0
  79. agentops/instrumentation/providers/ibm_watsonx_ai/attributes/__init__.py +27 -0
  80. agentops/instrumentation/providers/ibm_watsonx_ai/attributes/attributes.py +277 -0
  81. agentops/instrumentation/providers/ibm_watsonx_ai/attributes/common.py +104 -0
  82. agentops/instrumentation/providers/ibm_watsonx_ai/instrumentor.py +162 -0
  83. agentops/instrumentation/providers/ibm_watsonx_ai/stream_wrapper.py +302 -0
  84. agentops/instrumentation/providers/mem0/__init__.py +45 -0
  85. agentops/instrumentation/providers/mem0/common.py +377 -0
  86. agentops/instrumentation/providers/mem0/instrumentor.py +270 -0
  87. agentops/instrumentation/providers/mem0/memory.py +430 -0
  88. agentops/instrumentation/providers/openai/__init__.py +21 -0
  89. agentops/instrumentation/providers/openai/attributes/__init__.py +7 -0
  90. agentops/instrumentation/providers/openai/attributes/common.py +55 -0
  91. agentops/instrumentation/providers/openai/attributes/response.py +607 -0
  92. agentops/instrumentation/providers/openai/config.py +36 -0
  93. agentops/instrumentation/providers/openai/instrumentor.py +312 -0
  94. agentops/instrumentation/providers/openai/stream_wrapper.py +941 -0
  95. agentops/instrumentation/providers/openai/utils.py +44 -0
  96. agentops/instrumentation/providers/openai/v0.py +176 -0
  97. agentops/instrumentation/providers/openai/v0_wrappers.py +483 -0
  98. agentops/instrumentation/providers/openai/wrappers/__init__.py +30 -0
  99. agentops/instrumentation/providers/openai/wrappers/assistant.py +277 -0
  100. agentops/instrumentation/providers/openai/wrappers/chat.py +259 -0
  101. agentops/instrumentation/providers/openai/wrappers/completion.py +109 -0
  102. agentops/instrumentation/providers/openai/wrappers/embeddings.py +94 -0
  103. agentops/instrumentation/providers/openai/wrappers/image_gen.py +75 -0
  104. agentops/instrumentation/providers/openai/wrappers/responses.py +191 -0
  105. agentops/instrumentation/providers/openai/wrappers/shared.py +81 -0
  106. agentops/instrumentation/utilities/concurrent_futures/__init__.py +10 -0
  107. agentops/instrumentation/utilities/concurrent_futures/instrumentation.py +206 -0
  108. agentops/integration/callbacks/dspy/__init__.py +11 -0
  109. agentops/integration/callbacks/dspy/callback.py +471 -0
  110. agentops/integration/callbacks/langchain/README.md +59 -0
  111. agentops/integration/callbacks/langchain/__init__.py +15 -0
  112. agentops/integration/callbacks/langchain/callback.py +791 -0
  113. agentops/integration/callbacks/langchain/utils.py +54 -0
  114. agentops/legacy/crewai.md +121 -0
  115. agentops/logging/instrument_logging.py +4 -0
  116. agentops/sdk/README.md +220 -0
  117. agentops/sdk/core.py +75 -32
  118. agentops/sdk/descriptors/classproperty.py +28 -0
  119. agentops/sdk/exporters.py +152 -33
  120. agentops/semconv/README.md +125 -0
  121. agentops/semconv/span_kinds.py +0 -2
  122. agentops/validation.py +102 -63
  123. {mseep_agentops-0.4.18.dist-info → mseep_agentops-0.4.22.dist-info}/METADATA +30 -40
  124. mseep_agentops-0.4.22.dist-info/RECORD +178 -0
  125. {mseep_agentops-0.4.18.dist-info → mseep_agentops-0.4.22.dist-info}/WHEEL +1 -2
  126. mseep_agentops-0.4.18.dist-info/RECORD +0 -94
  127. mseep_agentops-0.4.18.dist-info/top_level.txt +0 -2
  128. tests/conftest.py +0 -10
  129. tests/unit/client/__init__.py +0 -1
  130. tests/unit/client/test_http_adapter.py +0 -221
  131. tests/unit/client/test_http_client.py +0 -206
  132. tests/unit/conftest.py +0 -54
  133. tests/unit/sdk/__init__.py +0 -1
  134. tests/unit/sdk/instrumentation_tester.py +0 -207
  135. tests/unit/sdk/test_attributes.py +0 -392
  136. tests/unit/sdk/test_concurrent_instrumentation.py +0 -468
  137. tests/unit/sdk/test_decorators.py +0 -763
  138. tests/unit/sdk/test_exporters.py +0 -241
  139. tests/unit/sdk/test_factory.py +0 -1188
  140. tests/unit/sdk/test_internal_span_processor.py +0 -397
  141. tests/unit/sdk/test_resource_attributes.py +0 -35
  142. tests/unit/test_config.py +0 -82
  143. tests/unit/test_context_manager.py +0 -777
  144. tests/unit/test_events.py +0 -27
  145. tests/unit/test_host_env.py +0 -54
  146. tests/unit/test_init_py.py +0 -501
  147. tests/unit/test_serialization.py +0 -433
  148. tests/unit/test_session.py +0 -676
  149. tests/unit/test_user_agent.py +0 -34
  150. tests/unit/test_validation.py +0 -405
  151. {tests → agentops/instrumentation/agentic/openai_agents/attributes}/__init__.py +0 -0
  152. /tests/unit/__init__.py → /agentops/instrumentation/providers/openai/attributes/tools.py +0 -0
  153. {mseep_agentops-0.4.18.dist-info → mseep_agentops-0.4.22.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,436 @@
1
+ """Anthropic stream wrapper implementation.
2
+
3
+ This module provides wrappers for Anthropic's streaming functionality,
4
+ focusing on the MessageStreamManager for both sync and async operations.
5
+ It instruments streams to collect telemetry data for monitoring and analysis.
6
+ """
7
+
8
+ import logging
9
+ from typing import TypeVar
10
+
11
+ from opentelemetry import context as context_api
12
+ from opentelemetry.trace import SpanKind
13
+ from opentelemetry.instrumentation.utils import _SUPPRESS_INSTRUMENTATION_KEY
14
+
15
+ from agentops.semconv import SpanAttributes, LLMRequestTypeValues, CoreAttributes, MessageAttributes
16
+ from agentops.instrumentation.common.wrappers import _with_tracer_wrapper
17
+ from agentops.instrumentation.providers.anthropic.attributes.message import (
18
+ get_message_request_attributes,
19
+ get_stream_attributes,
20
+ )
21
+ from agentops.instrumentation.providers.anthropic.event_handler_wrapper import EventHandleWrapper
22
+
23
+ logger = logging.getLogger(__name__)
24
+
25
+ T = TypeVar("T")
26
+
27
+
28
+ @_with_tracer_wrapper
29
+ def messages_stream_wrapper(tracer, wrapped, instance, args, kwargs):
30
+ """Wrapper for the Messages.stream method.
31
+
32
+ This wrapper creates spans for tracking stream performance and injects
33
+ an event handler wrapper to capture streaming events.
34
+
35
+ Args:
36
+ tracer: The OpenTelemetry tracer to use
37
+ wrapped: The original stream method
38
+ instance: The instance the method is bound to
39
+ args: Positional arguments to the method
40
+ kwargs: Keyword arguments to the method
41
+
42
+ Returns:
43
+ A wrapped stream manager that captures telemetry data
44
+ """
45
+ if context_api.get_value(_SUPPRESS_INSTRUMENTATION_KEY):
46
+ return wrapped(*args, **kwargs)
47
+
48
+ span = tracer.start_span(
49
+ "anthropic.messages.stream",
50
+ kind=SpanKind.CLIENT,
51
+ attributes={SpanAttributes.LLM_REQUEST_TYPE: LLMRequestTypeValues.CHAT.value},
52
+ )
53
+
54
+ request_attributes = get_message_request_attributes(kwargs)
55
+ for key, value in request_attributes.items():
56
+ span.set_attribute(key, value)
57
+
58
+ span.set_attribute(SpanAttributes.LLM_REQUEST_STREAMING, True)
59
+
60
+ original_event_handler = kwargs.get("event_handler")
61
+
62
+ if original_event_handler is not None:
63
+ wrapped_handler = EventHandleWrapper(original_handler=original_event_handler, span=span)
64
+ kwargs["event_handler"] = wrapped_handler
65
+
66
+ try:
67
+
68
+ class TracedStreamManager:
69
+ """A wrapper for Anthropic's MessageStreamManager that adds telemetry.
70
+
71
+ This class wraps the original stream manager to capture metrics about
72
+ the streaming process, including token counts, content, and errors.
73
+ """
74
+
75
+ def __init__(self, original_manager):
76
+ """Initialize with the original manager.
77
+
78
+ Args:
79
+ original_manager: The Anthropic MessageStreamManager to wrap
80
+ """
81
+ self.original_manager = original_manager
82
+ self.stream = None
83
+
84
+ def __enter__(self):
85
+ """Context manager entry that initializes stream monitoring.
86
+
87
+ Returns:
88
+ The original stream with instrumentation added
89
+ """
90
+ self.stream = self.original_manager.__enter__()
91
+
92
+ try:
93
+ stream_attributes = get_stream_attributes(self.stream)
94
+ for key, value in stream_attributes.items():
95
+ span.set_attribute(key, value)
96
+ except Exception as e:
97
+ logger.debug(f"Error getting stream attributes: {e}")
98
+
99
+ # Set the event handler on the stream if provided
100
+ if original_event_handler is not None:
101
+ self.stream.event_handler = kwargs["event_handler"]
102
+ else:
103
+ try:
104
+ original_text_stream = self.stream.text_stream
105
+ token_count = 0
106
+
107
+ class InstrumentedTextStream:
108
+ """A wrapper for Anthropic's text stream that counts tokens."""
109
+
110
+ def __iter__(self):
111
+ """Iterate through text chunks, counting tokens.
112
+
113
+ Yields:
114
+ Text chunks from the original stream
115
+ """
116
+ nonlocal token_count
117
+ for text in original_text_stream:
118
+ token_count += len(text.split())
119
+ span.set_attribute(SpanAttributes.LLM_USAGE_STREAMING_TOKENS, token_count)
120
+ yield text
121
+
122
+ self.stream.text_stream = InstrumentedTextStream()
123
+ except Exception as e:
124
+ logger.debug(f"Error patching text_stream: {e}")
125
+
126
+ return self.stream
127
+
128
+ def __exit__(self, exc_type, exc_val, exc_tb):
129
+ """Context manager exit that records final metrics.
130
+
131
+ Args:
132
+ exc_type: Exception type, if an exception occurred
133
+ exc_val: Exception value, if an exception occurred
134
+ exc_tb: Exception traceback, if an exception occurred
135
+
136
+ Returns:
137
+ Result of the original context manager's __exit__
138
+ """
139
+ try:
140
+ if exc_type is not None:
141
+ span.record_exception(exc_val)
142
+ span.set_attribute(CoreAttributes.ERROR_MESSAGE, str(exc_val))
143
+ span.set_attribute(CoreAttributes.ERROR_TYPE, exc_type.__name__)
144
+
145
+ try:
146
+ final_message = None
147
+
148
+ if hasattr(self.original_manager, "_MessageStreamManager__stream") and hasattr(
149
+ self.original_manager._MessageStreamManager__stream,
150
+ "_MessageStream__final_message_snapshot",
151
+ ):
152
+ final_message = self.original_manager._MessageStreamManager__stream._MessageStream__final_message_snapshot
153
+
154
+ if final_message:
155
+ if hasattr(final_message, "content"):
156
+ content_text = ""
157
+ if isinstance(final_message.content, list):
158
+ for content_block in final_message.content:
159
+ if hasattr(content_block, "text"):
160
+ content_text += content_block.text
161
+
162
+ if content_text:
163
+ span.set_attribute(MessageAttributes.COMPLETION_TYPE.format(i=0), "text")
164
+ span.set_attribute(MessageAttributes.COMPLETION_ROLE.format(i=0), "assistant")
165
+ span.set_attribute(MessageAttributes.COMPLETION_CONTENT.format(i=0), content_text)
166
+
167
+ if hasattr(final_message, "usage"):
168
+ usage = final_message.usage
169
+ if hasattr(usage, "input_tokens"):
170
+ span.set_attribute(SpanAttributes.LLM_USAGE_PROMPT_TOKENS, usage.input_tokens)
171
+
172
+ if hasattr(usage, "output_tokens"):
173
+ span.set_attribute(SpanAttributes.LLM_USAGE_COMPLETION_TOKENS, usage.output_tokens)
174
+
175
+ if hasattr(usage, "input_tokens") and hasattr(usage, "output_tokens"):
176
+ total_tokens = usage.input_tokens + usage.output_tokens
177
+ span.set_attribute(SpanAttributes.LLM_USAGE_TOTAL_TOKENS, total_tokens)
178
+ except Exception as e:
179
+ logger.debug(f"Failed to extract final message data: {e}")
180
+ finally:
181
+ if span.is_recording():
182
+ span.end()
183
+ return self.original_manager.__exit__(exc_type, exc_val, exc_tb)
184
+
185
+ stream_manager = wrapped(*args, **kwargs)
186
+
187
+ return TracedStreamManager(stream_manager)
188
+
189
+ except Exception as e:
190
+ span.record_exception(e)
191
+ span.set_attribute(CoreAttributes.ERROR_MESSAGE, str(e))
192
+ span.set_attribute(CoreAttributes.ERROR_TYPE, e.__class__.__name__)
193
+ span.end()
194
+ raise
195
+
196
+
197
+ class AsyncStreamContextManagerWrapper:
198
+ """A wrapper that implements both async context manager and awaitable protocols.
199
+
200
+ This wrapper allows the instrumented async stream to be used either with
201
+ 'async with' or by awaiting it first, preserving compatibility with
202
+ different usage patterns.
203
+ """
204
+
205
+ def __init__(self, coro):
206
+ """Initialize with a coroutine.
207
+
208
+ Args:
209
+ coro: The coroutine that will return a stream manager
210
+ """
211
+ self._coro = coro
212
+ self._stream_manager = None
213
+
214
+ def __await__(self):
215
+ """Make this wrapper awaitable.
216
+
217
+ This allows users to do:
218
+ stream_manager = await client.messages.stream(...)
219
+
220
+ Returns:
221
+ An awaitable that yields the traced stream manager
222
+ """
223
+
224
+ async def get_stream_manager():
225
+ self._stream_manager = await self._coro
226
+ return self._stream_manager
227
+
228
+ return get_stream_manager().__await__()
229
+
230
+ async def __aenter__(self):
231
+ """Async context manager enter.
232
+
233
+ This allows users to do:
234
+ async with client.messages.stream(...) as stream:
235
+
236
+ Returns:
237
+ The result of the stream manager's __aenter__
238
+ """
239
+ if self._stream_manager is None:
240
+ self._stream_manager = await self._coro
241
+
242
+ return await self._stream_manager.__aenter__()
243
+
244
+ async def __aexit__(self, exc_type, exc_val, exc_tb):
245
+ """Async context manager exit.
246
+
247
+ Args:
248
+ exc_type: Exception type
249
+ exc_val: Exception value
250
+ exc_tb: Exception traceback
251
+
252
+ Returns:
253
+ The result of the stream manager's __aexit__
254
+ """
255
+ if self._stream_manager is not None:
256
+ return await self._stream_manager.__aexit__(exc_type, exc_val, exc_tb)
257
+ return False
258
+
259
+
260
+ @_with_tracer_wrapper
261
+ def messages_stream_async_wrapper(tracer, wrapped, instance, args, kwargs):
262
+ """Wrapper for the async Messages.stream method.
263
+
264
+ This wrapper creates spans for tracking stream performance and injects
265
+ an event handler wrapper to capture streaming events in async contexts.
266
+
267
+ Args:
268
+ tracer: The OpenTelemetry tracer to use
269
+ wrapped: The original async stream method
270
+ instance: The instance the method is bound to
271
+ args: Positional arguments to the method
272
+ kwargs: Keyword arguments to the method
273
+
274
+ Returns:
275
+ An object that can be used with async with or awaited
276
+ """
277
+ if context_api.get_value(_SUPPRESS_INSTRUMENTATION_KEY):
278
+ return wrapped(*args, **kwargs)
279
+
280
+ span = tracer.start_span(
281
+ "anthropic.messages.stream",
282
+ kind=SpanKind.CLIENT,
283
+ attributes={SpanAttributes.LLM_REQUEST_TYPE: LLMRequestTypeValues.CHAT.value},
284
+ )
285
+
286
+ request_attributes = get_message_request_attributes(kwargs)
287
+ for key, value in request_attributes.items():
288
+ span.set_attribute(key, value)
289
+
290
+ span.set_attribute(SpanAttributes.LLM_REQUEST_STREAMING, True)
291
+
292
+ original_event_handler = kwargs.get("event_handler")
293
+
294
+ if original_event_handler is not None:
295
+ wrapped_handler = EventHandleWrapper(original_handler=original_event_handler, span=span)
296
+ kwargs["event_handler"] = wrapped_handler
297
+
298
+ async def _wrapped_stream():
299
+ """Async wrapper function for the stream method.
300
+
301
+ Returns:
302
+ A traced async stream manager
303
+ """
304
+ try:
305
+ # Don't await wrapped(*args, **kwargs) - it returns an async context manager, not a coroutine
306
+ stream_manager = wrapped(*args, **kwargs)
307
+
308
+ class TracedAsyncStreamManager:
309
+ """A wrapper for Anthropic's AsyncMessageStreamManager that adds telemetry.
310
+
311
+ This class wraps the original async stream manager to capture metrics
312
+ about the streaming process, including token counts, content, and errors.
313
+ """
314
+
315
+ def __init__(self, original_manager):
316
+ """Initialize with the original manager.
317
+
318
+ Args:
319
+ original_manager: The Anthropic AsyncMessageStreamManager to wrap
320
+ """
321
+ self.original_manager = original_manager
322
+ self.stream = None
323
+
324
+ async def __aenter__(self):
325
+ """Async context manager entry that initializes stream monitoring.
326
+
327
+ Returns:
328
+ The original stream with instrumentation added
329
+ """
330
+ self.stream = await self.original_manager.__aenter__()
331
+
332
+ try:
333
+ stream_attributes = get_stream_attributes(self.stream)
334
+ for key, value in stream_attributes.items():
335
+ span.set_attribute(key, value)
336
+ except Exception as e:
337
+ logger.debug(f"Error getting async stream attributes: {e}")
338
+
339
+ if original_event_handler is None:
340
+ try:
341
+ original_text_stream = self.stream.text_stream
342
+ token_count = 0
343
+
344
+ class InstrumentedAsyncTextStream:
345
+ """A wrapper for Anthropic's async text stream that counts tokens."""
346
+
347
+ async def __aiter__(self):
348
+ """Async iterate through text chunks, counting tokens.
349
+
350
+ Yields:
351
+ Text chunks from the original async stream
352
+ """
353
+ nonlocal token_count
354
+ async for text in original_text_stream:
355
+ token_count += len(text.split())
356
+ span.set_attribute(SpanAttributes.LLM_USAGE_STREAMING_TOKENS, token_count)
357
+ yield text
358
+
359
+ self.stream.text_stream = InstrumentedAsyncTextStream()
360
+ except Exception as e:
361
+ logger.debug(f"Error patching async text_stream: {e}")
362
+
363
+ return self.stream
364
+
365
+ async def __aexit__(self, exc_type, exc_val, exc_tb):
366
+ """Async context manager exit that records final metrics.
367
+
368
+ Args:
369
+ exc_type: Exception type, if an exception occurred
370
+ exc_val: Exception value, if an exception occurred
371
+ exc_tb: Exception traceback, if an exception occurred
372
+
373
+ Returns:
374
+ Result of the original async context manager's __aexit__
375
+ """
376
+ try:
377
+ if exc_type is not None:
378
+ span.record_exception(exc_val)
379
+ span.set_attribute(CoreAttributes.ERROR_MESSAGE, str(exc_val))
380
+ span.set_attribute(CoreAttributes.ERROR_TYPE, exc_type.__name__)
381
+
382
+ try:
383
+ final_message = None
384
+
385
+ if hasattr(self.original_manager, "_AsyncMessageStreamManager__stream") and hasattr(
386
+ self.original_manager._AsyncMessageStreamManager__stream,
387
+ "_AsyncMessageStream__final_message_snapshot",
388
+ ):
389
+ final_message = self.original_manager._AsyncMessageStreamManager__stream._AsyncMessageStream__final_message_snapshot
390
+
391
+ if final_message:
392
+ if hasattr(final_message, "content"):
393
+ content_text = ""
394
+ if isinstance(final_message.content, list):
395
+ for content_block in final_message.content:
396
+ if hasattr(content_block, "text"):
397
+ content_text += content_block.text
398
+
399
+ if content_text:
400
+ span.set_attribute(MessageAttributes.COMPLETION_TYPE.format(i=0), "text")
401
+ span.set_attribute(MessageAttributes.COMPLETION_ROLE.format(i=0), "assistant")
402
+ span.set_attribute(
403
+ MessageAttributes.COMPLETION_CONTENT.format(i=0), content_text
404
+ )
405
+
406
+ if hasattr(final_message, "usage"):
407
+ usage = final_message.usage
408
+ if hasattr(usage, "input_tokens"):
409
+ span.set_attribute(SpanAttributes.LLM_USAGE_PROMPT_TOKENS, usage.input_tokens)
410
+
411
+ if hasattr(usage, "output_tokens"):
412
+ span.set_attribute(
413
+ SpanAttributes.LLM_USAGE_COMPLETION_TOKENS, usage.output_tokens
414
+ )
415
+
416
+ if hasattr(usage, "input_tokens") and hasattr(usage, "output_tokens"):
417
+ total_tokens = usage.input_tokens + usage.output_tokens
418
+ span.set_attribute(SpanAttributes.LLM_USAGE_TOTAL_TOKENS, total_tokens)
419
+ except Exception as e:
420
+ logger.debug(f"Failed to extract final async message data: {e}")
421
+ finally:
422
+ if span.is_recording():
423
+ span.end()
424
+ return await self.original_manager.__aexit__(exc_type, exc_val, exc_tb)
425
+
426
+ return TracedAsyncStreamManager(stream_manager)
427
+
428
+ except Exception as e:
429
+ span.record_exception(e)
430
+ span.set_attribute(CoreAttributes.ERROR_MESSAGE, str(e))
431
+ span.set_attribute(CoreAttributes.ERROR_TYPE, e.__class__.__name__)
432
+ span.end()
433
+ raise
434
+
435
+ # Return a wrapper that implements both async context manager and awaitable protocols
436
+ return AsyncStreamContextManagerWrapper(_wrapped_stream())
@@ -0,0 +1,33 @@
1
+ # Google Generative AI (Gemini) Instrumentation
2
+
3
+ This module provides OpenTelemetry instrumentation for Google's Generative AI (Gemini) API. The instrumentation allows you to trace all API calls made using the `google-genai` Python SDK, capturing:
4
+
5
+ - Model parameters (temperature, max_tokens, etc.)
6
+ - Prompt content (with privacy controls)
7
+ - Response text and token usage
8
+ - Streaming metrics
9
+ - Token counting
10
+ - Performance and error data
11
+
12
+ ## Supported Features
13
+
14
+ The instrumentation covers all major API methods including:
15
+
16
+ ### Client-Based API
17
+ - `client.models.generate_content`
18
+ - `client.models.generate_content_stream`
19
+ - `client.models.count_tokens`
20
+ - `client.models.compute_tokens`
21
+ - And their corresponding async variants
22
+
23
+ ## Metrics
24
+
25
+ The instrumentation captures the following metrics:
26
+
27
+ - Input tokens used
28
+ - Output tokens generated
29
+ - Total tokens consumed
30
+ - Operation duration
31
+ - Exception counts
32
+
33
+ These metrics are available as OpenTelemetry span attributes and can be viewed in your observability platform of choice when properly configured.
@@ -0,0 +1,24 @@
1
+ """Google Generative AI (Gemini) API instrumentation.
2
+
3
+ This module provides instrumentation for the Google Generative AI (Gemini) API,
4
+ including content generation, streaming, and chat functionality.
5
+ """
6
+
7
+ import logging
8
+ from agentops.instrumentation.common import LibraryInfo
9
+
10
+ logger = logging.getLogger(__name__)
11
+
12
+ # Library information
13
+ _library_info = LibraryInfo(name="google-genai")
14
+ LIBRARY_NAME = _library_info.name
15
+ LIBRARY_VERSION = _library_info.version
16
+
17
+ # Import after defining constants to avoid circular imports
18
+ from agentops.instrumentation.providers.google_genai.instrumentor import GoogleGenaiInstrumentor # noqa: E402
19
+
20
+ __all__ = [
21
+ "LIBRARY_NAME",
22
+ "LIBRARY_VERSION",
23
+ "GoogleGenaiInstrumentor",
24
+ ]
@@ -0,0 +1,25 @@
1
+ """Attribute extractors for Google Generative AI instrumentation."""
2
+
3
+ from agentops.instrumentation.providers.google_genai.attributes.common import (
4
+ get_common_instrumentation_attributes,
5
+ extract_request_attributes,
6
+ )
7
+ from agentops.instrumentation.providers.google_genai.attributes.model import (
8
+ get_model_attributes,
9
+ get_generate_content_attributes,
10
+ get_stream_attributes,
11
+ get_token_counting_attributes,
12
+ )
13
+ from agentops.instrumentation.providers.google_genai.attributes.chat import (
14
+ get_chat_attributes,
15
+ )
16
+
17
+ __all__ = [
18
+ "get_common_instrumentation_attributes",
19
+ "extract_request_attributes",
20
+ "get_model_attributes",
21
+ "get_generate_content_attributes",
22
+ "get_stream_attributes",
23
+ "get_chat_attributes",
24
+ "get_token_counting_attributes",
25
+ ]
@@ -0,0 +1,125 @@
1
+ """Chat attribute extraction for Google Generative AI instrumentation."""
2
+
3
+ from typing import Dict, Any, Optional, Tuple
4
+
5
+ from agentops.logging import logger
6
+ from agentops.semconv import SpanAttributes, LLMRequestTypeValues, MessageAttributes
7
+ from agentops.instrumentation.common.attributes import AttributeMap
8
+ from agentops.instrumentation.providers.google_genai.attributes.common import (
9
+ extract_request_attributes,
10
+ get_common_instrumentation_attributes,
11
+ )
12
+ from agentops.instrumentation.providers.google_genai.attributes.model import (
13
+ _extract_content_from_prompt,
14
+ _set_response_attributes,
15
+ )
16
+
17
+
18
+ def _extract_message_content(message: Any) -> str:
19
+ """Extract text content from a chat message.
20
+
21
+ Handles the various message formats in the Gemini chat API.
22
+
23
+ Args:
24
+ message: The message to extract content from
25
+
26
+ Returns:
27
+ Extracted text as a string
28
+ """
29
+ if isinstance(message, str):
30
+ return message
31
+
32
+ if isinstance(message, dict):
33
+ if "content" in message:
34
+ return _extract_content_from_prompt(message["content"])
35
+ if "text" in message:
36
+ return message["text"]
37
+
38
+ if hasattr(message, "content"):
39
+ return _extract_content_from_prompt(message.content)
40
+
41
+ if hasattr(message, "text"):
42
+ return message.text
43
+
44
+ return ""
45
+
46
+
47
+ def _set_chat_history_attributes(attributes: AttributeMap, args: Tuple, kwargs: Dict[str, Any]) -> None:
48
+ """Extract and set chat history attributes from the request.
49
+
50
+ Args:
51
+ attributes: The attribute dictionary to update
52
+ args: Positional arguments to the method
53
+ kwargs: Keyword arguments to the method
54
+ """
55
+ messages = []
56
+ if "message" in kwargs:
57
+ messages = [kwargs["message"]]
58
+ elif args and len(args) > 0:
59
+ messages = [args[0]]
60
+ elif "messages" in kwargs:
61
+ messages = kwargs["messages"]
62
+
63
+ if not messages:
64
+ return
65
+
66
+ for i, message in enumerate(messages):
67
+ try:
68
+ content = _extract_message_content(message)
69
+ if content:
70
+ role = "user"
71
+
72
+ if isinstance(message, dict) and "role" in message:
73
+ role = message["role"]
74
+ elif hasattr(message, "role"):
75
+ role = message.role
76
+
77
+ attributes[MessageAttributes.PROMPT_CONTENT.format(i=i)] = content
78
+ attributes[MessageAttributes.PROMPT_ROLE.format(i=i)] = role
79
+ except Exception as e:
80
+ logger.debug(f"Error extracting chat message at index {i}: {e}")
81
+
82
+
83
+ def get_chat_attributes(
84
+ args: Optional[Tuple] = None,
85
+ kwargs: Optional[Dict[str, Any]] = None,
86
+ return_value: Optional[Any] = None,
87
+ ) -> AttributeMap:
88
+ """Extract attributes for chat session methods.
89
+
90
+ This function handles attribute extraction for chat session operations,
91
+ particularly the send_message method.
92
+
93
+ Args:
94
+ args: Positional arguments to the method
95
+ kwargs: Keyword arguments to the method
96
+ return_value: Return value from the method
97
+
98
+ Returns:
99
+ Dictionary of extracted attributes
100
+ """
101
+ attributes = get_common_instrumentation_attributes()
102
+ attributes[SpanAttributes.LLM_SYSTEM] = "Gemini"
103
+ attributes[SpanAttributes.LLM_REQUEST_TYPE] = LLMRequestTypeValues.CHAT.value
104
+
105
+ if kwargs:
106
+ kwargs_attributes = extract_request_attributes(kwargs)
107
+ attributes.update(kwargs_attributes)
108
+
109
+ chat_session = None
110
+ if args and len(args) >= 1:
111
+ chat_session = args[0]
112
+
113
+ if chat_session and hasattr(chat_session, "model"):
114
+ if isinstance(chat_session.model, str):
115
+ attributes[SpanAttributes.LLM_REQUEST_MODEL] = chat_session.model
116
+ elif hasattr(chat_session.model, "name"):
117
+ attributes[SpanAttributes.LLM_REQUEST_MODEL] = chat_session.model.name
118
+
119
+ if args or kwargs:
120
+ _set_chat_history_attributes(attributes, args or (), kwargs or {})
121
+
122
+ if return_value is not None:
123
+ _set_response_attributes(attributes, return_value)
124
+
125
+ return attributes