mseep-agentops 0.4.18__py3-none-any.whl → 0.4.22__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (153) hide show
  1. agentops/__init__.py +0 -0
  2. agentops/client/api/base.py +28 -30
  3. agentops/client/api/versions/v3.py +29 -25
  4. agentops/client/api/versions/v4.py +87 -46
  5. agentops/client/client.py +98 -29
  6. agentops/client/http/README.md +87 -0
  7. agentops/client/http/http_client.py +126 -172
  8. agentops/config.py +8 -2
  9. agentops/instrumentation/OpenTelemetry.md +133 -0
  10. agentops/instrumentation/README.md +167 -0
  11. agentops/instrumentation/__init__.py +13 -1
  12. agentops/instrumentation/agentic/ag2/__init__.py +18 -0
  13. agentops/instrumentation/agentic/ag2/instrumentor.py +922 -0
  14. agentops/instrumentation/agentic/agno/__init__.py +19 -0
  15. agentops/instrumentation/agentic/agno/attributes/__init__.py +20 -0
  16. agentops/instrumentation/agentic/agno/attributes/agent.py +250 -0
  17. agentops/instrumentation/agentic/agno/attributes/metrics.py +214 -0
  18. agentops/instrumentation/agentic/agno/attributes/storage.py +158 -0
  19. agentops/instrumentation/agentic/agno/attributes/team.py +195 -0
  20. agentops/instrumentation/agentic/agno/attributes/tool.py +210 -0
  21. agentops/instrumentation/agentic/agno/attributes/workflow.py +254 -0
  22. agentops/instrumentation/agentic/agno/instrumentor.py +1313 -0
  23. agentops/instrumentation/agentic/crewai/LICENSE +201 -0
  24. agentops/instrumentation/agentic/crewai/NOTICE.md +10 -0
  25. agentops/instrumentation/agentic/crewai/__init__.py +6 -0
  26. agentops/instrumentation/agentic/crewai/crewai_span_attributes.py +335 -0
  27. agentops/instrumentation/agentic/crewai/instrumentation.py +535 -0
  28. agentops/instrumentation/agentic/crewai/version.py +1 -0
  29. agentops/instrumentation/agentic/google_adk/__init__.py +19 -0
  30. agentops/instrumentation/agentic/google_adk/instrumentor.py +68 -0
  31. agentops/instrumentation/agentic/google_adk/patch.py +767 -0
  32. agentops/instrumentation/agentic/haystack/__init__.py +1 -0
  33. agentops/instrumentation/agentic/haystack/instrumentor.py +186 -0
  34. agentops/instrumentation/agentic/langgraph/__init__.py +3 -0
  35. agentops/instrumentation/agentic/langgraph/attributes.py +54 -0
  36. agentops/instrumentation/agentic/langgraph/instrumentation.py +598 -0
  37. agentops/instrumentation/agentic/langgraph/version.py +1 -0
  38. agentops/instrumentation/agentic/openai_agents/README.md +156 -0
  39. agentops/instrumentation/agentic/openai_agents/SPANS.md +145 -0
  40. agentops/instrumentation/agentic/openai_agents/TRACING_API.md +144 -0
  41. agentops/instrumentation/agentic/openai_agents/__init__.py +30 -0
  42. agentops/instrumentation/agentic/openai_agents/attributes/common.py +549 -0
  43. agentops/instrumentation/agentic/openai_agents/attributes/completion.py +172 -0
  44. agentops/instrumentation/agentic/openai_agents/attributes/model.py +58 -0
  45. agentops/instrumentation/agentic/openai_agents/attributes/tokens.py +275 -0
  46. agentops/instrumentation/agentic/openai_agents/exporter.py +469 -0
  47. agentops/instrumentation/agentic/openai_agents/instrumentor.py +107 -0
  48. agentops/instrumentation/agentic/openai_agents/processor.py +58 -0
  49. agentops/instrumentation/agentic/smolagents/README.md +88 -0
  50. agentops/instrumentation/agentic/smolagents/__init__.py +12 -0
  51. agentops/instrumentation/agentic/smolagents/attributes/agent.py +354 -0
  52. agentops/instrumentation/agentic/smolagents/attributes/model.py +205 -0
  53. agentops/instrumentation/agentic/smolagents/instrumentor.py +286 -0
  54. agentops/instrumentation/agentic/smolagents/stream_wrapper.py +258 -0
  55. agentops/instrumentation/agentic/xpander/__init__.py +15 -0
  56. agentops/instrumentation/agentic/xpander/context.py +112 -0
  57. agentops/instrumentation/agentic/xpander/instrumentor.py +877 -0
  58. agentops/instrumentation/agentic/xpander/trace_probe.py +86 -0
  59. agentops/instrumentation/agentic/xpander/version.py +3 -0
  60. agentops/instrumentation/common/README.md +65 -0
  61. agentops/instrumentation/common/attributes.py +1 -2
  62. agentops/instrumentation/providers/anthropic/__init__.py +24 -0
  63. agentops/instrumentation/providers/anthropic/attributes/__init__.py +23 -0
  64. agentops/instrumentation/providers/anthropic/attributes/common.py +64 -0
  65. agentops/instrumentation/providers/anthropic/attributes/message.py +541 -0
  66. agentops/instrumentation/providers/anthropic/attributes/tools.py +231 -0
  67. agentops/instrumentation/providers/anthropic/event_handler_wrapper.py +90 -0
  68. agentops/instrumentation/providers/anthropic/instrumentor.py +146 -0
  69. agentops/instrumentation/providers/anthropic/stream_wrapper.py +436 -0
  70. agentops/instrumentation/providers/google_genai/README.md +33 -0
  71. agentops/instrumentation/providers/google_genai/__init__.py +24 -0
  72. agentops/instrumentation/providers/google_genai/attributes/__init__.py +25 -0
  73. agentops/instrumentation/providers/google_genai/attributes/chat.py +125 -0
  74. agentops/instrumentation/providers/google_genai/attributes/common.py +88 -0
  75. agentops/instrumentation/providers/google_genai/attributes/model.py +284 -0
  76. agentops/instrumentation/providers/google_genai/instrumentor.py +170 -0
  77. agentops/instrumentation/providers/google_genai/stream_wrapper.py +238 -0
  78. agentops/instrumentation/providers/ibm_watsonx_ai/__init__.py +28 -0
  79. agentops/instrumentation/providers/ibm_watsonx_ai/attributes/__init__.py +27 -0
  80. agentops/instrumentation/providers/ibm_watsonx_ai/attributes/attributes.py +277 -0
  81. agentops/instrumentation/providers/ibm_watsonx_ai/attributes/common.py +104 -0
  82. agentops/instrumentation/providers/ibm_watsonx_ai/instrumentor.py +162 -0
  83. agentops/instrumentation/providers/ibm_watsonx_ai/stream_wrapper.py +302 -0
  84. agentops/instrumentation/providers/mem0/__init__.py +45 -0
  85. agentops/instrumentation/providers/mem0/common.py +377 -0
  86. agentops/instrumentation/providers/mem0/instrumentor.py +270 -0
  87. agentops/instrumentation/providers/mem0/memory.py +430 -0
  88. agentops/instrumentation/providers/openai/__init__.py +21 -0
  89. agentops/instrumentation/providers/openai/attributes/__init__.py +7 -0
  90. agentops/instrumentation/providers/openai/attributes/common.py +55 -0
  91. agentops/instrumentation/providers/openai/attributes/response.py +607 -0
  92. agentops/instrumentation/providers/openai/config.py +36 -0
  93. agentops/instrumentation/providers/openai/instrumentor.py +312 -0
  94. agentops/instrumentation/providers/openai/stream_wrapper.py +941 -0
  95. agentops/instrumentation/providers/openai/utils.py +44 -0
  96. agentops/instrumentation/providers/openai/v0.py +176 -0
  97. agentops/instrumentation/providers/openai/v0_wrappers.py +483 -0
  98. agentops/instrumentation/providers/openai/wrappers/__init__.py +30 -0
  99. agentops/instrumentation/providers/openai/wrappers/assistant.py +277 -0
  100. agentops/instrumentation/providers/openai/wrappers/chat.py +259 -0
  101. agentops/instrumentation/providers/openai/wrappers/completion.py +109 -0
  102. agentops/instrumentation/providers/openai/wrappers/embeddings.py +94 -0
  103. agentops/instrumentation/providers/openai/wrappers/image_gen.py +75 -0
  104. agentops/instrumentation/providers/openai/wrappers/responses.py +191 -0
  105. agentops/instrumentation/providers/openai/wrappers/shared.py +81 -0
  106. agentops/instrumentation/utilities/concurrent_futures/__init__.py +10 -0
  107. agentops/instrumentation/utilities/concurrent_futures/instrumentation.py +206 -0
  108. agentops/integration/callbacks/dspy/__init__.py +11 -0
  109. agentops/integration/callbacks/dspy/callback.py +471 -0
  110. agentops/integration/callbacks/langchain/README.md +59 -0
  111. agentops/integration/callbacks/langchain/__init__.py +15 -0
  112. agentops/integration/callbacks/langchain/callback.py +791 -0
  113. agentops/integration/callbacks/langchain/utils.py +54 -0
  114. agentops/legacy/crewai.md +121 -0
  115. agentops/logging/instrument_logging.py +4 -0
  116. agentops/sdk/README.md +220 -0
  117. agentops/sdk/core.py +75 -32
  118. agentops/sdk/descriptors/classproperty.py +28 -0
  119. agentops/sdk/exporters.py +152 -33
  120. agentops/semconv/README.md +125 -0
  121. agentops/semconv/span_kinds.py +0 -2
  122. agentops/validation.py +102 -63
  123. {mseep_agentops-0.4.18.dist-info → mseep_agentops-0.4.22.dist-info}/METADATA +30 -40
  124. mseep_agentops-0.4.22.dist-info/RECORD +178 -0
  125. {mseep_agentops-0.4.18.dist-info → mseep_agentops-0.4.22.dist-info}/WHEEL +1 -2
  126. mseep_agentops-0.4.18.dist-info/RECORD +0 -94
  127. mseep_agentops-0.4.18.dist-info/top_level.txt +0 -2
  128. tests/conftest.py +0 -10
  129. tests/unit/client/__init__.py +0 -1
  130. tests/unit/client/test_http_adapter.py +0 -221
  131. tests/unit/client/test_http_client.py +0 -206
  132. tests/unit/conftest.py +0 -54
  133. tests/unit/sdk/__init__.py +0 -1
  134. tests/unit/sdk/instrumentation_tester.py +0 -207
  135. tests/unit/sdk/test_attributes.py +0 -392
  136. tests/unit/sdk/test_concurrent_instrumentation.py +0 -468
  137. tests/unit/sdk/test_decorators.py +0 -763
  138. tests/unit/sdk/test_exporters.py +0 -241
  139. tests/unit/sdk/test_factory.py +0 -1188
  140. tests/unit/sdk/test_internal_span_processor.py +0 -397
  141. tests/unit/sdk/test_resource_attributes.py +0 -35
  142. tests/unit/test_config.py +0 -82
  143. tests/unit/test_context_manager.py +0 -777
  144. tests/unit/test_events.py +0 -27
  145. tests/unit/test_host_env.py +0 -54
  146. tests/unit/test_init_py.py +0 -501
  147. tests/unit/test_serialization.py +0 -433
  148. tests/unit/test_session.py +0 -676
  149. tests/unit/test_user_agent.py +0 -34
  150. tests/unit/test_validation.py +0 -405
  151. {tests → agentops/instrumentation/agentic/openai_agents/attributes}/__init__.py +0 -0
  152. /tests/unit/__init__.py → /agentops/instrumentation/providers/openai/attributes/tools.py +0 -0
  153. {mseep_agentops-0.4.18.dist-info → mseep_agentops-0.4.22.dist-info}/licenses/LICENSE +0 -0
@@ -1,468 +0,0 @@
1
- """
2
- Unit tests for concurrent instrumentation and context propagation.
3
-
4
- This module tests the behavior of OpenTelemetry spans when using concurrent.futures.ThreadPoolExecutor,
5
- specifically testing context propagation across thread boundaries.
6
- """
7
-
8
- import concurrent.futures
9
- import time
10
- import unittest
11
- from unittest.mock import patch
12
- import threading
13
-
14
- from opentelemetry import context, trace
15
- from opentelemetry.sdk.trace import TracerProvider
16
- from opentelemetry.sdk.trace.export import SimpleSpanProcessor
17
- from opentelemetry.sdk.trace.export.in_memory_span_exporter import InMemorySpanExporter
18
-
19
- from agentops.sdk.processors import InternalSpanProcessor
20
-
21
-
22
- class IsolatedInstrumentationTester:
23
- """
24
- A lighter-weight instrumentation tester that doesn't affect global state.
25
-
26
- This version creates an isolated tracer provider and doesn't shut down
27
- the global tracing core, making it safer for use alongside other tests.
28
- """
29
-
30
- def __init__(self):
31
- """Initialize with isolated tracer provider."""
32
- # Create isolated tracer provider and exporter
33
- self.tracer_provider = TracerProvider()
34
- self.memory_exporter = InMemorySpanExporter()
35
- self.span_processor = SimpleSpanProcessor(self.memory_exporter)
36
- self.tracer_provider.add_span_processor(self.span_processor)
37
-
38
- # Don't set as global provider - keep isolated
39
- self.tracer = self.tracer_provider.get_tracer(__name__)
40
-
41
- def get_tracer(self):
42
- """Get the isolated tracer."""
43
- return self.tracer
44
-
45
- def clear_spans(self):
46
- """Clear all spans from the memory exporter."""
47
- self.span_processor.force_flush()
48
- self.memory_exporter.clear()
49
-
50
- def get_finished_spans(self):
51
- """Get all finished spans."""
52
- self.span_processor.force_flush()
53
- return list(self.memory_exporter.get_finished_spans())
54
-
55
-
56
- class TestConcurrentInstrumentation(unittest.TestCase):
57
- """Tests for concurrent instrumentation and context propagation."""
58
-
59
- def setUp(self):
60
- """Set up test environment with isolated instrumentation tester."""
61
- self.tester = IsolatedInstrumentationTester()
62
- self.tracer = self.tester.get_tracer()
63
-
64
- def tearDown(self):
65
- """Clean up test environment without affecting global state."""
66
- # Only clear our isolated spans
67
- self.tester.clear_spans()
68
-
69
- def _create_simple_span(self, name: str, sleep_duration: float = 0.01) -> str:
70
- """Helper to create a simple span and return its trace_id."""
71
- with self.tracer.start_as_current_span(name) as span:
72
- time.sleep(sleep_duration) # Simulate work
73
- return span.get_span_context().trace_id
74
-
75
- def _create_nested_spans(self, parent_name: str, child_name: str) -> tuple:
76
- """Helper to create nested spans and return their trace_ids."""
77
- with self.tracer.start_as_current_span(parent_name) as parent_span:
78
- parent_trace_id = parent_span.get_span_context().trace_id
79
- time.sleep(0.01)
80
-
81
- with self.tracer.start_as_current_span(child_name) as child_span:
82
- child_trace_id = child_span.get_span_context().trace_id
83
- time.sleep(0.01)
84
-
85
- return parent_trace_id, child_trace_id
86
-
87
- def test_sequential_spans_same_trace(self):
88
- """Test that sequential spans in the same thread share the same trace."""
89
- self._create_simple_span("span1")
90
- self._create_simple_span("span2")
91
-
92
- # In sequential execution, spans should be independent (different traces)
93
- spans = self.tester.get_finished_spans()
94
- self.assertEqual(len(spans), 2)
95
-
96
- # Each span should be a root span (no parent)
97
- for span in spans:
98
- self.assertIsNone(span.parent)
99
-
100
- def test_nested_spans_same_trace(self):
101
- """Test that nested spans share the same trace."""
102
- parent_trace_id, child_trace_id = self._create_nested_spans("parent", "child")
103
-
104
- # Nested spans should share the same trace
105
- self.assertEqual(parent_trace_id, child_trace_id)
106
-
107
- spans = self.tester.get_finished_spans()
108
- self.assertEqual(len(spans), 2)
109
-
110
- # Find parent and child spans
111
- parent_spans = [s for s in spans if s.name == "parent"]
112
- child_spans = [s for s in spans if s.name == "child"]
113
-
114
- self.assertEqual(len(parent_spans), 1)
115
- self.assertEqual(len(child_spans), 1)
116
-
117
- parent_span = parent_spans[0]
118
- child_span = child_spans[0]
119
-
120
- # Child should have parent as its parent
121
- self.assertEqual(child_span.parent.span_id, parent_span.context.span_id)
122
-
123
- def test_threadpool_without_context_propagation_creates_separate_traces(self):
124
- """Test that ThreadPoolExecutor without context propagation creates separate traces."""
125
-
126
- def worker_task(task_id: str) -> dict:
127
- """Worker task that creates a span without context propagation."""
128
- with self.tracer.start_as_current_span(f"worker_task_{task_id}") as span:
129
- time.sleep(0.01) # Simulate work
130
- return {
131
- "task_id": task_id,
132
- "trace_id": span.get_span_context().trace_id,
133
- "span_id": span.get_span_context().span_id,
134
- "thread_id": threading.get_ident(),
135
- }
136
-
137
- # Create a parent span
138
- with self.tracer.start_as_current_span("main_task") as main_span:
139
- main_trace_id = main_span.get_span_context().trace_id
140
-
141
- # Execute tasks in thread pool WITHOUT context propagation
142
- with concurrent.futures.ThreadPoolExecutor(max_workers=3) as executor:
143
- futures = [executor.submit(worker_task, f"task_{i}") for i in range(3)]
144
- results = [future.result() for future in concurrent.futures.as_completed(futures)]
145
-
146
- spans = self.tester.get_finished_spans()
147
- self.assertEqual(len(spans), 4) # 1 main + 3 worker spans
148
-
149
- # Extract trace IDs from results
150
- worker_trace_ids = [result["trace_id"] for result in results]
151
-
152
- # Each worker should have a different trace ID from the main span
153
- for worker_trace_id in worker_trace_ids:
154
- self.assertNotEqual(
155
- worker_trace_id,
156
- main_trace_id,
157
- "Worker span should NOT share trace with main span (no context propagation)",
158
- )
159
-
160
- # Worker spans should also be different from each other (separate traces)
161
- unique_trace_ids = set(worker_trace_ids)
162
- self.assertEqual(len(unique_trace_ids), 3, "Each worker should create a separate trace")
163
-
164
- # Verify that worker spans have no parent (they are root spans)
165
- worker_spans = [s for s in spans if s.name.startswith("worker_task_")]
166
- for worker_span in worker_spans:
167
- self.assertIsNone(worker_span.parent, "Worker spans should be root spans without parent")
168
-
169
- def test_threadpool_with_manual_context_propagation_shares_trace(self):
170
- """Test that ThreadPoolExecutor with manual context propagation shares the same trace."""
171
-
172
- def worker_task_with_context(task_info: tuple) -> dict:
173
- """Worker task that restores context before creating spans."""
174
- task_id, ctx = task_info
175
-
176
- # Restore the context in this thread
177
- token = context.attach(ctx)
178
- try:
179
- with self.tracer.start_as_current_span(f"worker_task_{task_id}") as span:
180
- time.sleep(0.01) # Simulate work
181
- return {
182
- "task_id": task_id,
183
- "trace_id": span.get_span_context().trace_id,
184
- "span_id": span.get_span_context().span_id,
185
- "thread_id": threading.get_ident(),
186
- "parent_span_id": span.parent.span_id if span.parent else None,
187
- }
188
- finally:
189
- context.detach(token)
190
-
191
- # Create a parent span and capture its context
192
- with self.tracer.start_as_current_span("main_task") as main_span:
193
- main_trace_id = main_span.get_span_context().trace_id
194
- main_span_id = main_span.get_span_context().span_id
195
- current_context = context.get_current()
196
-
197
- # Execute tasks in thread pool WITH manual context propagation
198
- with concurrent.futures.ThreadPoolExecutor(max_workers=3) as executor:
199
- futures = [executor.submit(worker_task_with_context, (f"task_{i}", current_context)) for i in range(3)]
200
- results = [future.result() for future in concurrent.futures.as_completed(futures)]
201
-
202
- spans = self.tester.get_finished_spans()
203
- self.assertEqual(len(spans), 4) # 1 main + 3 worker spans
204
-
205
- # Extract trace IDs from results
206
- worker_trace_ids = [result["trace_id"] for result in results]
207
-
208
- # All workers should share the same trace ID as the main span
209
- for result in results:
210
- self.assertEqual(
211
- result["trace_id"], main_trace_id, f"Worker task {result['task_id']} should share trace with main span"
212
- )
213
- self.assertEqual(
214
- result["parent_span_id"],
215
- main_span_id,
216
- f"Worker task {result['task_id']} should have main span as parent",
217
- )
218
-
219
- # All worker trace IDs should be the same
220
- unique_trace_ids = set(worker_trace_ids)
221
- self.assertEqual(len(unique_trace_ids), 1, "All workers should share the same trace")
222
-
223
- def test_threadpool_with_contextvars_copy_context_shares_trace(self):
224
- """Test ThreadPoolExecutor with proper context propagation using attach/detach."""
225
-
226
- def worker_task_with_context_management(args) -> dict:
227
- """Worker task that manages context properly."""
228
- task_id, ctx = args
229
- # Use attach/detach for better control over context
230
- token = context.attach(ctx)
231
- try:
232
- with self.tracer.start_as_current_span(f"worker_task_{task_id}") as span:
233
- time.sleep(0.01) # Simulate work
234
- return {
235
- "task_id": task_id,
236
- "trace_id": span.get_span_context().trace_id,
237
- "span_id": span.get_span_context().span_id,
238
- "thread_id": threading.get_ident(),
239
- "parent_span_id": span.parent.span_id if span.parent else None,
240
- }
241
- finally:
242
- context.detach(token)
243
-
244
- # Create a parent span and capture context properly
245
- with self.tracer.start_as_current_span("main_task") as main_span:
246
- main_trace_id = main_span.get_span_context().trace_id
247
- main_span_id = main_span.get_span_context().span_id
248
-
249
- # Get current context to propagate
250
- current_context = context.get_current()
251
-
252
- with concurrent.futures.ThreadPoolExecutor(max_workers=3) as executor:
253
- futures = [
254
- executor.submit(worker_task_with_context_management, (f"task_{i}", current_context))
255
- for i in range(3)
256
- ]
257
- results = [future.result() for future in concurrent.futures.as_completed(futures)]
258
-
259
- spans = self.tester.get_finished_spans()
260
- self.assertEqual(len(spans), 4) # 1 main + 3 worker spans
261
-
262
- # All workers should share the same trace ID as the main span
263
- for result in results:
264
- self.assertEqual(
265
- result["trace_id"], main_trace_id, f"Worker task {result['task_id']} should share trace with main span"
266
- )
267
- self.assertEqual(
268
- result["parent_span_id"],
269
- main_span_id,
270
- f"Worker task {result['task_id']} should have main span as parent",
271
- )
272
-
273
- def test_mixed_sequential_and_concurrent_spans(self):
274
- """Test a complex scenario with both sequential and concurrent spans."""
275
- results = []
276
-
277
- # Sequential span 1
278
- trace_id1 = self._create_simple_span("sequential_1")
279
- results.append(("sequential_1", trace_id1))
280
-
281
- # Concurrent spans with context propagation
282
- with self.tracer.start_as_current_span("concurrent_parent") as parent_span:
283
- parent_trace_id = parent_span.get_span_context().trace_id
284
- results.append(("concurrent_parent", parent_trace_id))
285
-
286
- def worker_task_with_context(args) -> tuple:
287
- task_id, ctx = args
288
- token = context.attach(ctx)
289
- try:
290
- with self.tracer.start_as_current_span(f"concurrent_{task_id}") as span:
291
- time.sleep(0.01)
292
- return (f"concurrent_{task_id}", span.get_span_context().trace_id)
293
- finally:
294
- context.detach(token)
295
-
296
- current_context = context.get_current()
297
- with concurrent.futures.ThreadPoolExecutor(max_workers=2) as executor:
298
- futures = [executor.submit(worker_task_with_context, (f"task_{i}", current_context)) for i in range(2)]
299
- concurrent_results = [future.result() for future in concurrent.futures.as_completed(futures)]
300
- results.extend(concurrent_results)
301
-
302
- # Sequential span 2
303
- trace_id2 = self._create_simple_span("sequential_2")
304
- results.append(("sequential_2", trace_id2))
305
-
306
- spans = self.tester.get_finished_spans()
307
- self.assertEqual(len(spans), 5) # 2 sequential + 1 parent + 2 concurrent
308
-
309
- # Verify trace relationships
310
- sequential_spans = [r for r in results if r[0].startswith("sequential_")]
311
- concurrent_spans = [r for r in results if r[0].startswith("concurrent_")]
312
-
313
- # Sequential spans should have different traces
314
- sequential_trace_ids = [r[1] for r in sequential_spans]
315
- self.assertEqual(len(set(sequential_trace_ids)), 2, "Sequential spans should have different traces")
316
-
317
- # Concurrent spans should share the same trace
318
- concurrent_trace_ids = [r[1] for r in concurrent_spans]
319
- unique_concurrent_traces = set(concurrent_trace_ids)
320
- self.assertEqual(len(unique_concurrent_traces), 1, "All concurrent spans should share the same trace")
321
-
322
- def test_error_handling_in_concurrent_spans(self):
323
- """Test error handling and span status in concurrent execution."""
324
-
325
- def worker_task_with_error_and_context(args) -> dict:
326
- """Worker task that may raise an error."""
327
- task_id, ctx = args
328
- token = context.attach(ctx)
329
- try:
330
- with self.tracer.start_as_current_span(f"worker_task_{task_id}") as span:
331
- if task_id == "error_task":
332
- span.set_status(trace.Status(trace.StatusCode.ERROR, "Simulated error"))
333
- raise ValueError("Simulated error")
334
-
335
- time.sleep(0.01)
336
- return {"task_id": task_id, "trace_id": span.get_span_context().trace_id, "status": "success"}
337
- finally:
338
- context.detach(token)
339
-
340
- with self.tracer.start_as_current_span("main_task") as main_span:
341
- main_trace_id = main_span.get_span_context().trace_id
342
- current_context = context.get_current()
343
-
344
- with concurrent.futures.ThreadPoolExecutor(max_workers=3) as executor:
345
- futures = [
346
- executor.submit(worker_task_with_error_and_context, ("success_task_1", current_context)),
347
- executor.submit(worker_task_with_error_and_context, ("error_task", current_context)),
348
- executor.submit(worker_task_with_error_and_context, ("success_task_2", current_context)),
349
- ]
350
-
351
- results = []
352
- errors = []
353
- for future in concurrent.futures.as_completed(futures):
354
- try:
355
- results.append(future.result())
356
- except Exception as e:
357
- errors.append(str(e))
358
-
359
- spans = self.tester.get_finished_spans()
360
- self.assertEqual(len(spans), 4) # 1 main + 3 worker spans
361
-
362
- # Should have 2 successful results and 1 error
363
- self.assertEqual(len(results), 2)
364
- self.assertEqual(len(errors), 1)
365
- self.assertIn("Simulated error", errors[0])
366
-
367
- # All spans should share the same trace
368
- for result in results:
369
- self.assertEqual(result["trace_id"], main_trace_id)
370
-
371
- # Find the error span and verify its status
372
- error_spans = [s for s in spans if s.name == "worker_task_error_task"]
373
- self.assertEqual(len(error_spans), 1)
374
-
375
- error_span = error_spans[0]
376
- self.assertEqual(error_span.status.status_code, trace.StatusCode.ERROR)
377
-
378
- @patch("agentops.sdk.processors.logger")
379
- def test_internal_span_processor_with_concurrent_spans(self, mock_logger):
380
- """Test InternalSpanProcessor behavior with concurrent spans."""
381
- # Create an InternalSpanProcessor to test
382
- processor = InternalSpanProcessor()
383
-
384
- # Add the processor to the tracer provider
385
- self.tester.tracer_provider.add_span_processor(processor)
386
-
387
- try:
388
-
389
- def worker_task_with_context(args) -> str:
390
- task_id, ctx = args
391
- token = context.attach(ctx)
392
- try:
393
- with self.tracer.start_as_current_span(f"openai.chat.completion_{task_id}"):
394
- time.sleep(0.01)
395
- return f"result_{task_id}"
396
- finally:
397
- context.detach(token)
398
-
399
- # Execute concurrent tasks
400
- with self.tracer.start_as_current_span("main_session"):
401
- current_context = context.get_current()
402
-
403
- with concurrent.futures.ThreadPoolExecutor(max_workers=2) as executor:
404
- futures = [
405
- executor.submit(worker_task_with_context, (f"task_{i}", current_context)) for i in range(2)
406
- ]
407
- results = [future.result() for future in concurrent.futures.as_completed(futures)]
408
-
409
- # Verify results
410
- self.assertEqual(len(results), 2)
411
-
412
- # Verify that debug logging would have been called
413
- # (The processor tracks root spans and logs when they end)
414
- self.assertTrue(mock_logger.debug.called)
415
-
416
- finally:
417
- # Clean up the processor to avoid affecting other tests
418
- try:
419
- processor.shutdown()
420
- except Exception:
421
- pass
422
-
423
- def test_performance_impact_of_context_propagation(self):
424
- """Test the performance impact of different context propagation methods."""
425
- import timeit
426
-
427
- def without_context_propagation():
428
- def worker():
429
- with self.tracer.start_as_current_span("test_span"):
430
- time.sleep(0.001)
431
-
432
- with concurrent.futures.ThreadPoolExecutor(max_workers=2) as executor:
433
- futures = [executor.submit(worker) for _ in range(4)]
434
- [f.result() for f in futures]
435
-
436
- def with_context_propagation():
437
- def worker_with_context(ctx):
438
- token = context.attach(ctx)
439
- try:
440
- with self.tracer.start_as_current_span("test_span"):
441
- time.sleep(0.001)
442
- finally:
443
- context.detach(token)
444
-
445
- current_context = context.get_current()
446
- with concurrent.futures.ThreadPoolExecutor(max_workers=2) as executor:
447
- futures = [executor.submit(worker_with_context, current_context) for _ in range(4)]
448
- [f.result() for f in futures]
449
-
450
- # Clear spans before performance test
451
- self.tester.clear_spans()
452
-
453
- # Measure timing (just to ensure context propagation doesn't break anything)
454
- time_without = timeit.timeit(without_context_propagation, number=1)
455
- self.tester.clear_spans()
456
-
457
- time_with = timeit.timeit(with_context_propagation, number=1)
458
- self.tester.clear_spans()
459
-
460
- # Context propagation should not cause significant performance degradation
461
- # This is a sanity check rather than a strict performance requirement
462
- self.assertGreater(
463
- time_with * 10, time_without, "Context propagation should not cause extreme performance degradation"
464
- )
465
-
466
-
467
- if __name__ == "__main__":
468
- unittest.main()