ioa-observe-sdk 1.0.11__tar.gz → 1.0.13__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. {ioa_observe_sdk-1.0.11 → ioa_observe_sdk-1.0.13}/PKG-INFO +2 -51
  2. {ioa_observe_sdk-1.0.11 → ioa_observe_sdk-1.0.13}/ioa_observe/sdk/decorators/base.py +62 -70
  3. ioa_observe_sdk-1.0.13/ioa_observe/sdk/decorators/helpers.py +66 -0
  4. {ioa_observe_sdk-1.0.11 → ioa_observe_sdk-1.0.13}/ioa_observe/sdk/decorators/util.py +123 -0
  5. {ioa_observe_sdk-1.0.11 → ioa_observe_sdk-1.0.13}/ioa_observe/sdk/instrumentations/slim.py +90 -26
  6. {ioa_observe_sdk-1.0.11 → ioa_observe_sdk-1.0.13}/ioa_observe_sdk.egg-info/PKG-INFO +2 -51
  7. {ioa_observe_sdk-1.0.11 → ioa_observe_sdk-1.0.13}/ioa_observe_sdk.egg-info/SOURCES.txt +1 -0
  8. ioa_observe_sdk-1.0.13/ioa_observe_sdk.egg-info/requires.txt +28 -0
  9. ioa_observe_sdk-1.0.13/pyproject.toml +45 -0
  10. ioa_observe_sdk-1.0.11/ioa_observe_sdk.egg-info/requires.txt +0 -77
  11. ioa_observe_sdk-1.0.11/pyproject.toml +0 -93
  12. {ioa_observe_sdk-1.0.11 → ioa_observe_sdk-1.0.13}/LICENSE.md +0 -0
  13. {ioa_observe_sdk-1.0.11 → ioa_observe_sdk-1.0.13}/README.md +0 -0
  14. {ioa_observe_sdk-1.0.11 → ioa_observe_sdk-1.0.13}/ioa_observe/__init__.py +0 -0
  15. {ioa_observe_sdk-1.0.11 → ioa_observe_sdk-1.0.13}/ioa_observe/sdk/__init__.py +0 -0
  16. {ioa_observe_sdk-1.0.11 → ioa_observe_sdk-1.0.13}/ioa_observe/sdk/client/__init__.py +0 -0
  17. {ioa_observe_sdk-1.0.11 → ioa_observe_sdk-1.0.13}/ioa_observe/sdk/client/client.py +0 -0
  18. {ioa_observe_sdk-1.0.11 → ioa_observe_sdk-1.0.13}/ioa_observe/sdk/client/http.py +0 -0
  19. {ioa_observe_sdk-1.0.11 → ioa_observe_sdk-1.0.13}/ioa_observe/sdk/config/__init__.py +0 -0
  20. {ioa_observe_sdk-1.0.11 → ioa_observe_sdk-1.0.13}/ioa_observe/sdk/connectors/__init__.py +0 -0
  21. {ioa_observe_sdk-1.0.11 → ioa_observe_sdk-1.0.13}/ioa_observe/sdk/connectors/slim.py +0 -0
  22. {ioa_observe_sdk-1.0.11 → ioa_observe_sdk-1.0.13}/ioa_observe/sdk/decorators/__init__.py +0 -0
  23. {ioa_observe_sdk-1.0.11 → ioa_observe_sdk-1.0.13}/ioa_observe/sdk/instrumentations/__init__.py +0 -0
  24. {ioa_observe_sdk-1.0.11 → ioa_observe_sdk-1.0.13}/ioa_observe/sdk/instrumentations/a2a.py +0 -0
  25. {ioa_observe_sdk-1.0.11 → ioa_observe_sdk-1.0.13}/ioa_observe/sdk/instruments.py +0 -0
  26. {ioa_observe_sdk-1.0.11 → ioa_observe_sdk-1.0.13}/ioa_observe/sdk/logging/__init__.py +0 -0
  27. {ioa_observe_sdk-1.0.11 → ioa_observe_sdk-1.0.13}/ioa_observe/sdk/logging/logging.py +0 -0
  28. {ioa_observe_sdk-1.0.11 → ioa_observe_sdk-1.0.13}/ioa_observe/sdk/metrics/__init__.py +0 -0
  29. {ioa_observe_sdk-1.0.11 → ioa_observe_sdk-1.0.13}/ioa_observe/sdk/metrics/agent.py +0 -0
  30. {ioa_observe_sdk-1.0.11 → ioa_observe_sdk-1.0.13}/ioa_observe/sdk/metrics/agents/__init__.py +0 -0
  31. {ioa_observe_sdk-1.0.11 → ioa_observe_sdk-1.0.13}/ioa_observe/sdk/metrics/agents/agent_connections.py +0 -0
  32. {ioa_observe_sdk-1.0.11 → ioa_observe_sdk-1.0.13}/ioa_observe/sdk/metrics/agents/availability.py +0 -0
  33. {ioa_observe_sdk-1.0.11 → ioa_observe_sdk-1.0.13}/ioa_observe/sdk/metrics/agents/heuristics.py +0 -0
  34. {ioa_observe_sdk-1.0.11 → ioa_observe_sdk-1.0.13}/ioa_observe/sdk/metrics/agents/recovery_tracker.py +0 -0
  35. {ioa_observe_sdk-1.0.11 → ioa_observe_sdk-1.0.13}/ioa_observe/sdk/metrics/agents/tool_call_tracker.py +0 -0
  36. {ioa_observe_sdk-1.0.11 → ioa_observe_sdk-1.0.13}/ioa_observe/sdk/metrics/agents/tracker.py +0 -0
  37. {ioa_observe_sdk-1.0.11 → ioa_observe_sdk-1.0.13}/ioa_observe/sdk/metrics/metrics.py +0 -0
  38. {ioa_observe_sdk-1.0.11 → ioa_observe_sdk-1.0.13}/ioa_observe/sdk/telemetry.py +0 -0
  39. {ioa_observe_sdk-1.0.11 → ioa_observe_sdk-1.0.13}/ioa_observe/sdk/tracing/__init__.py +0 -0
  40. {ioa_observe_sdk-1.0.11 → ioa_observe_sdk-1.0.13}/ioa_observe/sdk/tracing/content_allow_list.py +0 -0
  41. {ioa_observe_sdk-1.0.11 → ioa_observe_sdk-1.0.13}/ioa_observe/sdk/tracing/context_manager.py +0 -0
  42. {ioa_observe_sdk-1.0.11 → ioa_observe_sdk-1.0.13}/ioa_observe/sdk/tracing/context_utils.py +0 -0
  43. {ioa_observe_sdk-1.0.11 → ioa_observe_sdk-1.0.13}/ioa_observe/sdk/tracing/manual.py +0 -0
  44. {ioa_observe_sdk-1.0.11 → ioa_observe_sdk-1.0.13}/ioa_observe/sdk/tracing/tracing.py +0 -0
  45. {ioa_observe_sdk-1.0.11 → ioa_observe_sdk-1.0.13}/ioa_observe/sdk/utils/__init__.py +0 -0
  46. {ioa_observe_sdk-1.0.11 → ioa_observe_sdk-1.0.13}/ioa_observe/sdk/utils/const.py +0 -0
  47. {ioa_observe_sdk-1.0.11 → ioa_observe_sdk-1.0.13}/ioa_observe/sdk/utils/in_memory_span_exporter.py +0 -0
  48. {ioa_observe_sdk-1.0.11 → ioa_observe_sdk-1.0.13}/ioa_observe/sdk/utils/json_encoder.py +0 -0
  49. {ioa_observe_sdk-1.0.11 → ioa_observe_sdk-1.0.13}/ioa_observe/sdk/utils/package_check.py +0 -0
  50. {ioa_observe_sdk-1.0.11 → ioa_observe_sdk-1.0.13}/ioa_observe/sdk/version.py +0 -0
  51. {ioa_observe_sdk-1.0.11 → ioa_observe_sdk-1.0.13}/ioa_observe_sdk.egg-info/dependency_links.txt +0 -0
  52. {ioa_observe_sdk-1.0.11 → ioa_observe_sdk-1.0.13}/ioa_observe_sdk.egg-info/top_level.txt +0 -0
  53. {ioa_observe_sdk-1.0.11 → ioa_observe_sdk-1.0.13}/setup.cfg +0 -0
  54. {ioa_observe_sdk-1.0.11 → ioa_observe_sdk-1.0.13}/tests/test_client.py +0 -0
  55. {ioa_observe_sdk-1.0.11 → ioa_observe_sdk-1.0.13}/tests/test_instrumentor.py +0 -0
  56. {ioa_observe_sdk-1.0.11 → ioa_observe_sdk-1.0.13}/tests/test_manual_instrumentation.py +0 -0
  57. {ioa_observe_sdk-1.0.11 → ioa_observe_sdk-1.0.13}/tests/test_version.py +0 -0
@@ -1,38 +1,12 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ioa-observe-sdk
3
- Version: 1.0.11
3
+ Version: 1.0.13
4
4
  Summary: IOA Observability SDK
5
5
  Requires-Python: >=3.10
6
6
  Description-Content-Type: text/markdown
7
7
  License-File: LICENSE.md
8
- Requires-Dist: aiohappyeyeballs>=2.4.8
9
- Requires-Dist: aiohttp>=3.11.18
10
- Requires-Dist: aiosignal>=1.3.2
11
- Requires-Dist: annotated-types>=0.7.0
12
- Requires-Dist: anyio>=4.8.0
13
- Requires-Dist: async-timeout>=4.0.3
14
- Requires-Dist: attrs>=25.1.0
15
- Requires-Dist: backoff>=2.2.1
16
- Requires-Dist: certifi>=2025.1.31
17
- Requires-Dist: charset-normalizer>=3.4.1
18
8
  Requires-Dist: colorama==0.4.6
19
- Requires-Dist: Deprecated>=1.2.18
20
- Requires-Dist: distro>=1.9.0
21
- Requires-Dist: exceptiongroup>=1.2.2
22
- Requires-Dist: frozenlist>=1.5.0
23
- Requires-Dist: googleapis-common-protos>=1.69.0
24
- Requires-Dist: grpcio>=1.70.0
25
- Requires-Dist: h11>=0.16.0
26
- Requires-Dist: httpcore>=1.0.9
27
- Requires-Dist: httpx>=0.28.1
28
- Requires-Dist: idna>=3.10
29
- Requires-Dist: importlib_metadata>=8.5.0
30
- Requires-Dist: Jinja2>=3.1.6
31
- Requires-Dist: jiter>=0.8.2
32
- Requires-Dist: MarkupSafe>=3.0.2
33
- Requires-Dist: monotonic>=1.6
34
- Requires-Dist: multidict>=6.1.0
35
- Requires-Dist: openai>=1.75.0
9
+ Requires-Dist: requests>=2.32.3
36
10
  Requires-Dist: opentelemetry-api==1.33.1
37
11
  Requires-Dist: opentelemetry-distro
38
12
  Requires-Dist: opentelemetry-exporter-otlp==1.33.1
@@ -53,33 +27,10 @@ Requires-Dist: opentelemetry-sdk==1.33.1
53
27
  Requires-Dist: opentelemetry-semantic-conventions==0.54b1
54
28
  Requires-Dist: opentelemetry-semantic-conventions-ai==0.4.9
55
29
  Requires-Dist: opentelemetry-util-http==0.54b1
56
- Requires-Dist: packaging>=24.2
57
- Requires-Dist: propcache>=0.3.0
58
- Requires-Dist: protobuf>=5.29.3
59
- Requires-Dist: pydantic>=2.10.6
60
- Requires-Dist: pydantic_core>=2.27.2
61
- Requires-Dist: python-dateutil>=2.9.0.post0
62
- Requires-Dist: regex==2024.11.6
63
- Requires-Dist: requests>=2.32.3
64
- Requires-Dist: six>=1.17.0
65
- Requires-Dist: sniffio>=1.3.1
66
- Requires-Dist: tenacity>=9.0.0
67
- Requires-Dist: tiktoken>=0.9.0
68
- Requires-Dist: tqdm>=4.67.1
69
- Requires-Dist: typing_extensions>=4.12.2
70
- Requires-Dist: urllib3>=2.3.0
71
- Requires-Dist: wrapt>=1.17.2
72
- Requires-Dist: yarl>=1.18.3
73
- Requires-Dist: zipp>=3.21.0
74
30
  Requires-Dist: langgraph>=0.3.2
75
31
  Requires-Dist: langchain>=0.3.19
76
32
  Requires-Dist: langchain-openai>=0.3.8
77
- Requires-Dist: langchain-community>=0.3.25
78
33
  Requires-Dist: llama-index>=0.12.34
79
- Requires-Dist: opentelemetry-instrumentation-requests
80
- Requires-Dist: opentelemetry-instrumentation-transformers>=0.40.8
81
- Requires-Dist: opentelemetry-instrumentation-crewai>=0.40.8
82
- Requires-Dist: llama-index-utils-workflow>=0.3.1
83
34
  Requires-Dist: pytest
84
35
  Requires-Dist: pytest-vcr
85
36
  Dynamic: license-file
@@ -10,13 +10,20 @@ import types
10
10
  from typing import Optional, TypeVar, Callable, Awaitable, Any, cast, Union
11
11
  import inspect
12
12
 
13
+ from ioa_observe.sdk.decorators.helpers import (
14
+ _is_async_method,
15
+ _get_original_function_name,
16
+ _is_async_generator,
17
+ )
18
+
19
+
13
20
  from langgraph.graph.state import CompiledStateGraph
14
21
  from opentelemetry import trace
15
22
  from opentelemetry import context as context_api
16
23
  from pydantic_core import PydanticSerializationError
17
24
  from typing_extensions import ParamSpec
18
25
 
19
- from ioa_observe.sdk.decorators.util import determine_workflow_type
26
+ from ioa_observe.sdk.decorators.util import determine_workflow_type, _serialize_object
20
27
  from ioa_observe.sdk.metrics.agents.availability import agent_availability
21
28
  from ioa_observe.sdk.metrics.agents.recovery_tracker import agent_recovery_tracker
22
29
  from ioa_observe.sdk.metrics.agents.tool_call_tracker import tool_call_tracker
@@ -79,14 +86,6 @@ def _should_send_prompts():
79
86
  ).lower() == "true" or context_api.get_value("override_enable_content_tracing")
80
87
 
81
88
 
82
- # Unified Decorators : handles both sync and async operations
83
-
84
-
85
- def _is_async_method(fn):
86
- # check if co-routine function or async generator( example : using async & yield)
87
- return inspect.iscoroutinefunction(fn) or inspect.isasyncgenfunction(fn)
88
-
89
-
90
89
  def _setup_span(
91
90
  entity_name,
92
91
  tlp_span_kind: Optional[ObserveSpanKindValues] = None,
@@ -164,24 +163,24 @@ def _handle_span_input(span, args, kwargs, cls=None):
164
163
  # Safely convert args
165
164
  for arg in args:
166
165
  try:
167
- # Test if the object can be JSON serialized
166
+ # Check if the object can be JSON serialized directly
168
167
  json.dumps(arg)
169
168
  safe_args.append(arg)
170
169
  except (TypeError, ValueError, PydanticSerializationError):
171
- # If it can't be serialized, use string representation
172
- safe_args.append(str(arg))
170
+ # Use intelligent serialization
171
+ safe_args.append(_serialize_object(arg))
173
172
 
174
173
  # Safely convert kwargs
175
174
  for key, value in kwargs.items():
176
175
  try:
177
- # Test if the object can be JSON serialized
176
+ # Test if the object can be JSON serialized directly
178
177
  json.dumps(value)
179
178
  safe_kwargs[key] = value
180
179
  except (TypeError, ValueError, PydanticSerializationError):
181
- # If it can't be serialized, use string representation
182
- safe_kwargs[key] = str(value)
180
+ # Use intelligent serialization
181
+ safe_kwargs[key] = _serialize_object(value)
183
182
 
184
- # Create the JSON without custom encoder to avoid recursion
183
+ # Create the JSON
185
184
  json_input = json.dumps({"args": safe_args, "kwargs": safe_kwargs})
186
185
 
187
186
  if _is_json_size_valid(json_input):
@@ -212,62 +211,55 @@ def _handle_span_output(span, tlp_span_kind, res, cls=None):
212
211
  # end_span.end() # end the span immediately
213
212
  set_agent_id_event("") # reset the agent id event
214
213
  # Add agent interpretation scoring
215
- if (
216
- tlp_span_kind == ObserveSpanKindValues.AGENT
217
- or tlp_span_kind == ObserveSpanKindValues.WORKFLOW
218
- ):
219
- current_agent = span.attributes.get("agent_id", "unknown")
220
-
221
- # Determine next agent from response (if Command object with goto)
222
- next_agent = None
223
- if isinstance(res, dict) and "goto" in res:
224
- next_agent = res["goto"]
225
- # Check if there's an error flag in the response
226
- success = not (
227
- res.get("error", False) or res.get("goto") == "__end__"
214
+ if (
215
+ tlp_span_kind == ObserveSpanKindValues.AGENT
216
+ or tlp_span_kind == ObserveSpanKindValues.WORKFLOW
217
+ ):
218
+ current_agent = span.attributes.get("agent_id", "unknown")
219
+
220
+ # Determine next agent from response (if Command object with goto)
221
+ next_agent = None
222
+ if isinstance(res, dict) and "goto" in res:
223
+ next_agent = res["goto"]
224
+ # Check if there's an error flag in the response
225
+ success = not (res.get("error", False) or res.get("goto") == "__end__")
226
+
227
+ # If we have a chain of communication, compute interpretation score
228
+ if next_agent and next_agent != "__end__":
229
+ score = compute_agent_interpretation_score(
230
+ sender_agent=current_agent,
231
+ receiver_agent=next_agent,
232
+ data=res,
233
+ )
234
+ span.set_attribute("gen_ai.ioa.agent.interpretation_score", score)
235
+ reliability = connection_tracker.record_connection(
236
+ sender=current_agent, receiver=next_agent, success=success
237
+ )
238
+ span.set_attribute(
239
+ "gen_ai.ioa.agent.connection_reliability", reliability
228
240
  )
229
241
 
230
- # If we have a chain of communication, compute interpretation score
231
- if next_agent and next_agent != "__end__":
232
- score = compute_agent_interpretation_score(
233
- sender_agent=current_agent,
234
- receiver_agent=next_agent,
235
- data=res,
236
- )
237
- span.set_attribute(
238
- "gen_ai.ioa.agent.interpretation_score", score
239
- )
240
- reliability = connection_tracker.record_connection(
241
- sender=current_agent, receiver=next_agent, success=success
242
- )
243
- span.set_attribute(
244
- "gen_ai.ioa.agent.connection_reliability", reliability
245
- )
246
-
247
- if _should_send_prompts():
242
+ if _should_send_prompts():
243
+ try:
244
+ # Try direct JSON serialization first
245
+ json_output = json.dumps(res)
246
+ except (TypeError, PydanticSerializationError, ValueError):
247
+ # Use intelligent serialization for complex objects
248
248
  try:
249
- # Try direct JSON serialization first (without custom encoder)
250
- json_output = json.dumps(res)
251
- except (TypeError, PydanticSerializationError, ValueError):
252
- # Fallback for objects that can't be directly serialized
253
- try:
254
- # Try to serialize a string representation
255
- safe_output = str(res)
256
- json_output = json.dumps(
257
- {"__str_representation__": safe_output}
258
- )
259
- except Exception:
260
- # If all serialization fails, skip output attribute
261
- json_output = None
249
+ serialized_res = _serialize_object(res)
250
+ json_output = json.dumps(serialized_res)
251
+ except Exception:
252
+ # If all serialization fails, skip output attribute
253
+ json_output = None
262
254
 
263
- if json_output and _is_json_size_valid(json_output):
264
- span.set_attribute(
265
- OBSERVE_ENTITY_OUTPUT,
266
- json_output,
267
- )
268
- TracerWrapper().span_processor_on_ending(
269
- span
270
- ) # record the response latency
255
+ if json_output and _is_json_size_valid(json_output):
256
+ span.set_attribute(
257
+ OBSERVE_ENTITY_OUTPUT,
258
+ json_output,
259
+ )
260
+ TracerWrapper().span_processor_on_ending(
261
+ span
262
+ ) # record the response latency
271
263
  except Exception as e:
272
264
  print(f"Warning: Failed to serialize output for span: {e}")
273
265
  Telemetry().log_exception(e)
@@ -301,9 +293,9 @@ def entity_method(
301
293
  ) -> Callable[[F], F]:
302
294
  def decorate(fn: F) -> F:
303
295
  is_async = _is_async_method(fn)
304
- entity_name = name or fn.__qualname__
296
+ entity_name = name or _get_original_function_name(fn)
305
297
  if is_async:
306
- if inspect.isasyncgenfunction(fn):
298
+ if _is_async_generator(fn):
307
299
 
308
300
  @wraps(fn)
309
301
  async def async_gen_wrap(*args: Any, **kwargs: Any) -> Any:
@@ -0,0 +1,66 @@
1
+ # Copyright AGNTCY Contributors (https://github.com/agntcy)
2
+ # SPDX-License-Identifier: Apache-2.0
3
+
4
+ import inspect
5
+
6
+
7
+ def _is_async_method(fn):
8
+ # check if co-routine function or async generator( example : using async & yield)
9
+ if inspect.iscoroutinefunction(fn) or inspect.isasyncgenfunction(fn):
10
+ return True
11
+
12
+ # Check if this is a wrapped function that might hide the original async nature
13
+ # Look for common wrapper attributes that might contain the original function
14
+ for attr_name in ["__wrapped__", "func", "_func", "function"]:
15
+ if hasattr(fn, attr_name):
16
+ wrapped_fn = getattr(fn, attr_name)
17
+ if wrapped_fn and callable(wrapped_fn):
18
+ if inspect.iscoroutinefunction(
19
+ wrapped_fn
20
+ ) or inspect.isasyncgenfunction(wrapped_fn):
21
+ return True
22
+ # Recursively check in case of multiple levels of wrapping
23
+ if _is_async_method(wrapped_fn):
24
+ return True
25
+
26
+ return False
27
+
28
+
29
+ def _is_async_generator(fn):
30
+ """Check if function is an async generator, looking through wrapped functions"""
31
+ if inspect.isasyncgenfunction(fn):
32
+ return True
33
+
34
+ # Check if this is a wrapped function that might hide the original async generator nature
35
+ for attr_name in ["__wrapped__", "func", "_func", "function"]:
36
+ if hasattr(fn, attr_name):
37
+ wrapped_fn = getattr(fn, attr_name)
38
+ if wrapped_fn and callable(wrapped_fn):
39
+ if inspect.isasyncgenfunction(wrapped_fn):
40
+ return True
41
+ # Recursively check in case of multiple levels of wrapping
42
+ if _is_async_generator(wrapped_fn):
43
+ return True
44
+
45
+ return False
46
+
47
+
48
+ def _get_original_function_name(fn):
49
+ """Extract the original function name from potentially wrapped functions"""
50
+ if hasattr(fn, "__qualname__") and fn.__qualname__:
51
+ return fn.__qualname__
52
+
53
+ # Look for the original function in common wrapper attributes
54
+ for attr_name in ["__wrapped__", "func", "_func", "function"]:
55
+ if hasattr(fn, attr_name):
56
+ wrapped_fn = getattr(fn, attr_name)
57
+ if wrapped_fn and callable(wrapped_fn):
58
+ if hasattr(wrapped_fn, "__qualname__") and wrapped_fn.__qualname__:
59
+ return wrapped_fn.__qualname__
60
+ # Recursively check in case of multiple levels of wrapping
61
+ result = _get_original_function_name(wrapped_fn)
62
+ if result:
63
+ return result
64
+
65
+ # Fallback to function name if qualname is not available
66
+ return getattr(fn, "__name__", "unknown_function")
@@ -13,6 +13,129 @@ from llama_index.core.workflow.utils import (
13
13
  )
14
14
 
15
15
 
16
+ def _serialize_object(obj, max_depth=5, current_depth=0):
17
+ """
18
+ Intelligently serialize an object to a more meaningful representation
19
+ """
20
+ if current_depth > max_depth:
21
+ return f"<{type(obj).__name__}:max_depth_reached>"
22
+
23
+ # Handle basic JSON-serializable types
24
+ if obj is None or isinstance(obj, (bool, int, float, str)):
25
+ return obj
26
+
27
+ # Handle lists and tuples
28
+ if isinstance(obj, (list, tuple)):
29
+ try:
30
+ return [
31
+ _serialize_object(item, max_depth, current_depth + 1)
32
+ for item in obj[:10]
33
+ ] # Limit to first 10 items
34
+ except Exception:
35
+ return f"<{type(obj).__name__}:length={len(obj)}>"
36
+
37
+ # Handle dictionaries
38
+ if isinstance(obj, dict):
39
+ try:
40
+ serialized = {}
41
+ for key, value in list(obj.items())[:10]: # Limit to first 10 items
42
+ serialized[str(key)] = _serialize_object(
43
+ value, max_depth, current_depth + 1
44
+ )
45
+ return serialized
46
+ except Exception:
47
+ return f"<dict:keys={len(obj)}>"
48
+
49
+ # Handle common object types with meaningful attributes
50
+ try:
51
+ # Check class attributes first
52
+ class_attrs = {}
53
+ for attr_name in dir(type(obj)):
54
+ if (
55
+ not attr_name.startswith("_")
56
+ and not callable(getattr(type(obj), attr_name, None))
57
+ and hasattr(obj, attr_name)
58
+ ):
59
+ try:
60
+ attr_value = getattr(obj, attr_name)
61
+ if not callable(attr_value):
62
+ class_attrs[attr_name] = _serialize_object(
63
+ attr_value, max_depth, current_depth + 1
64
+ )
65
+ if len(class_attrs) >= 5: # Limit attributes
66
+ break
67
+ except Exception:
68
+ continue
69
+
70
+ # Check if object has a __dict__ with interesting attributes
71
+ instance_attrs = {}
72
+ if hasattr(obj, "__dict__"):
73
+ obj_dict = obj.__dict__
74
+ if obj_dict:
75
+ # Extract meaningful attributes (skip private ones and callables)
76
+ for key, value in obj_dict.items():
77
+ if not key.startswith("_") and not callable(value):
78
+ try:
79
+ instance_attrs[key] = _serialize_object(
80
+ value, max_depth, current_depth + 1
81
+ )
82
+ if len(instance_attrs) >= 5: # Limit attributes
83
+ break
84
+ except Exception:
85
+ continue
86
+
87
+ # Combine class and instance attributes
88
+ all_attrs = {**class_attrs, **instance_attrs}
89
+
90
+ if all_attrs:
91
+ return {
92
+ "__class__": type(obj).__name__,
93
+ "__module__": getattr(type(obj), "__module__", "unknown"),
94
+ "attributes": all_attrs,
95
+ }
96
+
97
+ # Special handling for specific types
98
+ if hasattr(obj, "message") and hasattr(obj.message, "parts"):
99
+ # Handle RequestContext-like objects
100
+ try:
101
+ parts_content = []
102
+ for part in obj.message.parts:
103
+ if hasattr(part, "root") and hasattr(part.root, "text"):
104
+ parts_content.append(part.root.text)
105
+ return {
106
+ "__class__": type(obj).__name__,
107
+ "message_content": parts_content,
108
+ }
109
+ except Exception:
110
+ pass
111
+
112
+ # Check for common readable attributes
113
+ for attr in ["name", "id", "type", "value", "content", "text", "data"]:
114
+ if hasattr(obj, attr):
115
+ try:
116
+ attr_value = getattr(obj, attr)
117
+ if not callable(attr_value):
118
+ return {
119
+ "__class__": type(obj).__name__,
120
+ attr: _serialize_object(
121
+ attr_value, max_depth, current_depth + 1
122
+ ),
123
+ }
124
+ except Exception:
125
+ continue
126
+
127
+ # Fallback to class information
128
+ return {
129
+ "__class__": type(obj).__name__,
130
+ "__module__": getattr(type(obj), "__module__", "unknown"),
131
+ "__repr__": str(obj)[:100] + ("..." if len(str(obj)) > 100 else ""),
132
+ }
133
+
134
+ except Exception:
135
+ # Final fallback
136
+ return f"<{type(obj).__name__}:serialization_failed>"
137
+
138
+
16
139
  def determine_workflow_type(workflow_obj: Any) -> Union[None, dict]:
17
140
  """Determines the workflow type and generates appropriate topology."""
18
141
  # Check if it's a dict mapping agent roles to agent names
@@ -66,26 +66,71 @@ class SLIMInstrumentor(BaseInstrumentor):
66
66
  if traceparent and session_id:
67
67
  baggage.set_baggage(f"execution.{traceparent}", session_id)
68
68
 
69
- # Process message payload
69
+ # Process message payload and preserve original structure
70
70
  if isinstance(message, bytes):
71
71
  try:
72
72
  decoded_message = message.decode("utf-8")
73
73
  try:
74
- json.loads(decoded_message)
75
- payload = decoded_message
74
+ # If it's already a JSON structure, preserve it
75
+ original_message = json.loads(decoded_message)
76
+ if isinstance(original_message, dict):
77
+ # Preserve all original fields and merge/update headers
78
+ wrapped_message = original_message.copy()
79
+ existing_headers = wrapped_message.get("headers", {})
80
+ existing_headers.update(headers)
81
+ wrapped_message["headers"] = existing_headers
82
+ else:
83
+ # If it's not a dict, wrap it as payload
84
+ wrapped_message = {
85
+ "headers": headers,
86
+ "payload": original_message,
87
+ }
76
88
  except json.JSONDecodeError:
77
- payload = decoded_message
89
+ # If it's not JSON, treat as raw payload
90
+ wrapped_message = {
91
+ "headers": headers,
92
+ "payload": decoded_message,
93
+ }
78
94
  except UnicodeDecodeError:
79
- payload = base64.b64encode(message).decode("utf-8")
95
+ # If it can't be decoded, base64 encode it
96
+ wrapped_message = {
97
+ "headers": headers,
98
+ "payload": base64.b64encode(message).decode("utf-8"),
99
+ }
80
100
  elif isinstance(message, str):
81
- payload = message
101
+ try:
102
+ # Try to parse as JSON first
103
+ original_message = json.loads(message)
104
+ if isinstance(original_message, dict):
105
+ # Preserve all original fields and merge/update headers
106
+ wrapped_message = original_message.copy()
107
+ existing_headers = wrapped_message.get("headers", {})
108
+ existing_headers.update(headers)
109
+ wrapped_message["headers"] = existing_headers
110
+ else:
111
+ # If it's not a dict, wrap it as payload
112
+ wrapped_message = {
113
+ "headers": headers,
114
+ "payload": original_message,
115
+ }
116
+ except json.JSONDecodeError:
117
+ # If it's not JSON, treat as raw payload
118
+ wrapped_message = {
119
+ "headers": headers,
120
+ "payload": message,
121
+ }
122
+ elif isinstance(message, dict):
123
+ # If it's already a dict, preserve all fields and merge headers
124
+ wrapped_message = message.copy()
125
+ existing_headers = wrapped_message.get("headers", {})
126
+ existing_headers.update(headers)
127
+ wrapped_message["headers"] = existing_headers
82
128
  else:
83
- payload = json.dumps(message)
84
-
85
- wrapped_message = {
86
- "headers": headers,
87
- "payload": payload,
88
- }
129
+ # For other types, convert to JSON and wrap as payload
130
+ wrapped_message = {
131
+ "headers": headers,
132
+ "payload": json.dumps(message),
133
+ }
89
134
 
90
135
  message_to_send = json.dumps(wrapped_message).encode("utf-8")
91
136
 
@@ -152,20 +197,39 @@ class SLIMInstrumentor(BaseInstrumentor):
152
197
  session_id = stored_session_id
153
198
  set_session_id(session_id, traceparent=traceparent)
154
199
 
155
- # Process payload
156
- payload = message_dict.get("payload", raw_message)
157
- if isinstance(payload, str):
158
- try:
159
- payload_dict = json.loads(payload)
160
- return recv_session, json.dumps(payload_dict).encode("utf-8")
161
- except json.JSONDecodeError:
162
- return recv_session, payload.encode("utf-8") if isinstance(
163
- payload, str
164
- ) else payload
165
-
166
- return recv_session, json.dumps(payload).encode("utf-8") if isinstance(
167
- payload, (dict, list)
168
- ) else payload
200
+ # Process the complete message structure
201
+ # Remove tracing headers before returning the message
202
+ message_to_return = message_dict.copy()
203
+ if "headers" in message_to_return:
204
+ headers_copy = message_to_return["headers"].copy()
205
+ # Remove tracing-specific headers but keep other headers
206
+ headers_copy.pop("traceparent", None)
207
+ headers_copy.pop("session_id", None)
208
+ if headers_copy:
209
+ message_to_return["headers"] = headers_copy
210
+ else:
211
+ message_to_return.pop("headers", None)
212
+
213
+ # If the message only contains a payload field and no other fields,
214
+ # return just the payload for backward compatibility
215
+ if len(message_to_return) == 1 and "payload" in message_to_return:
216
+ payload = message_to_return["payload"]
217
+ if isinstance(payload, str):
218
+ try:
219
+ payload_dict = json.loads(payload)
220
+ return recv_session, json.dumps(payload_dict).encode(
221
+ "utf-8"
222
+ )
223
+ except json.JSONDecodeError:
224
+ return recv_session, payload.encode("utf-8") if isinstance(
225
+ payload, str
226
+ ) else payload
227
+ return recv_session, json.dumps(payload).encode(
228
+ "utf-8"
229
+ ) if isinstance(payload, (dict, list)) else payload
230
+ else:
231
+ # Return the complete message structure with all original fields
232
+ return recv_session, json.dumps(message_to_return).encode("utf-8")
169
233
 
170
234
  except Exception as e:
171
235
  print(f"Error processing message: {e}")
@@ -1,38 +1,12 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ioa-observe-sdk
3
- Version: 1.0.11
3
+ Version: 1.0.13
4
4
  Summary: IOA Observability SDK
5
5
  Requires-Python: >=3.10
6
6
  Description-Content-Type: text/markdown
7
7
  License-File: LICENSE.md
8
- Requires-Dist: aiohappyeyeballs>=2.4.8
9
- Requires-Dist: aiohttp>=3.11.18
10
- Requires-Dist: aiosignal>=1.3.2
11
- Requires-Dist: annotated-types>=0.7.0
12
- Requires-Dist: anyio>=4.8.0
13
- Requires-Dist: async-timeout>=4.0.3
14
- Requires-Dist: attrs>=25.1.0
15
- Requires-Dist: backoff>=2.2.1
16
- Requires-Dist: certifi>=2025.1.31
17
- Requires-Dist: charset-normalizer>=3.4.1
18
8
  Requires-Dist: colorama==0.4.6
19
- Requires-Dist: Deprecated>=1.2.18
20
- Requires-Dist: distro>=1.9.0
21
- Requires-Dist: exceptiongroup>=1.2.2
22
- Requires-Dist: frozenlist>=1.5.0
23
- Requires-Dist: googleapis-common-protos>=1.69.0
24
- Requires-Dist: grpcio>=1.70.0
25
- Requires-Dist: h11>=0.16.0
26
- Requires-Dist: httpcore>=1.0.9
27
- Requires-Dist: httpx>=0.28.1
28
- Requires-Dist: idna>=3.10
29
- Requires-Dist: importlib_metadata>=8.5.0
30
- Requires-Dist: Jinja2>=3.1.6
31
- Requires-Dist: jiter>=0.8.2
32
- Requires-Dist: MarkupSafe>=3.0.2
33
- Requires-Dist: monotonic>=1.6
34
- Requires-Dist: multidict>=6.1.0
35
- Requires-Dist: openai>=1.75.0
9
+ Requires-Dist: requests>=2.32.3
36
10
  Requires-Dist: opentelemetry-api==1.33.1
37
11
  Requires-Dist: opentelemetry-distro
38
12
  Requires-Dist: opentelemetry-exporter-otlp==1.33.1
@@ -53,33 +27,10 @@ Requires-Dist: opentelemetry-sdk==1.33.1
53
27
  Requires-Dist: opentelemetry-semantic-conventions==0.54b1
54
28
  Requires-Dist: opentelemetry-semantic-conventions-ai==0.4.9
55
29
  Requires-Dist: opentelemetry-util-http==0.54b1
56
- Requires-Dist: packaging>=24.2
57
- Requires-Dist: propcache>=0.3.0
58
- Requires-Dist: protobuf>=5.29.3
59
- Requires-Dist: pydantic>=2.10.6
60
- Requires-Dist: pydantic_core>=2.27.2
61
- Requires-Dist: python-dateutil>=2.9.0.post0
62
- Requires-Dist: regex==2024.11.6
63
- Requires-Dist: requests>=2.32.3
64
- Requires-Dist: six>=1.17.0
65
- Requires-Dist: sniffio>=1.3.1
66
- Requires-Dist: tenacity>=9.0.0
67
- Requires-Dist: tiktoken>=0.9.0
68
- Requires-Dist: tqdm>=4.67.1
69
- Requires-Dist: typing_extensions>=4.12.2
70
- Requires-Dist: urllib3>=2.3.0
71
- Requires-Dist: wrapt>=1.17.2
72
- Requires-Dist: yarl>=1.18.3
73
- Requires-Dist: zipp>=3.21.0
74
30
  Requires-Dist: langgraph>=0.3.2
75
31
  Requires-Dist: langchain>=0.3.19
76
32
  Requires-Dist: langchain-openai>=0.3.8
77
- Requires-Dist: langchain-community>=0.3.25
78
33
  Requires-Dist: llama-index>=0.12.34
79
- Requires-Dist: opentelemetry-instrumentation-requests
80
- Requires-Dist: opentelemetry-instrumentation-transformers>=0.40.8
81
- Requires-Dist: opentelemetry-instrumentation-crewai>=0.40.8
82
- Requires-Dist: llama-index-utils-workflow>=0.3.1
83
34
  Requires-Dist: pytest
84
35
  Requires-Dist: pytest-vcr
85
36
  Dynamic: license-file
@@ -14,6 +14,7 @@ ioa_observe/sdk/connectors/__init__.py
14
14
  ioa_observe/sdk/connectors/slim.py
15
15
  ioa_observe/sdk/decorators/__init__.py
16
16
  ioa_observe/sdk/decorators/base.py
17
+ ioa_observe/sdk/decorators/helpers.py
17
18
  ioa_observe/sdk/decorators/util.py
18
19
  ioa_observe/sdk/instrumentations/__init__.py
19
20
  ioa_observe/sdk/instrumentations/a2a.py
@@ -0,0 +1,28 @@
1
+ colorama==0.4.6
2
+ requests>=2.32.3
3
+ opentelemetry-api==1.33.1
4
+ opentelemetry-distro
5
+ opentelemetry-exporter-otlp==1.33.1
6
+ opentelemetry-exporter-otlp-proto-common==1.33.1
7
+ opentelemetry-exporter-otlp-proto-grpc==1.33.1
8
+ opentelemetry-exporter-otlp-proto-http==1.33.1
9
+ opentelemetry-instrumentation
10
+ opentelemetry-instrumentation-logging==0.54b1
11
+ opentelemetry-instrumentation-openai==0.40.8
12
+ opentelemetry-instrumentation-llamaindex==0.40.8
13
+ opentelemetry-instrumentation-ollama==0.40.8
14
+ opentelemetry-instrumentation-anthropic==0.40.8
15
+ opentelemetry-instrumentation-langchain==0.40.8
16
+ opentelemetry-instrumentation-threading==00.54b1
17
+ opentelemetry-instrumentation-urllib3==0.54b1
18
+ opentelemetry-proto==1.33.1
19
+ opentelemetry-sdk==1.33.1
20
+ opentelemetry-semantic-conventions==0.54b1
21
+ opentelemetry-semantic-conventions-ai==0.4.9
22
+ opentelemetry-util-http==0.54b1
23
+ langgraph>=0.3.2
24
+ langchain>=0.3.19
25
+ langchain-openai>=0.3.8
26
+ llama-index>=0.12.34
27
+ pytest
28
+ pytest-vcr
@@ -0,0 +1,45 @@
1
+ [build-system]
2
+ requires = ["setuptools"]
3
+ build-backend = "setuptools.build_meta"
4
+
5
+
6
+ [project]
7
+ name = "ioa-observe-sdk"
8
+ version = "1.0.13"
9
+ description = "IOA Observability SDK"
10
+ readme = "README.md"
11
+ requires-python = ">=3.10"
12
+ dependencies = [
13
+ # Directly used in code or tests
14
+ "colorama==0.4.6",
15
+ "requests>=2.32.3",
16
+ "opentelemetry-api==1.33.1",
17
+ "opentelemetry-distro",
18
+ "opentelemetry-exporter-otlp==1.33.1",
19
+ "opentelemetry-exporter-otlp-proto-common==1.33.1",
20
+ "opentelemetry-exporter-otlp-proto-grpc==1.33.1",
21
+ "opentelemetry-exporter-otlp-proto-http==1.33.1",
22
+ "opentelemetry-instrumentation",
23
+ "opentelemetry-instrumentation-logging==0.54b1",
24
+ "opentelemetry-instrumentation-openai==0.40.8",
25
+ "opentelemetry-instrumentation-llamaindex==0.40.8",
26
+ "opentelemetry-instrumentation-ollama==0.40.8",
27
+ "opentelemetry-instrumentation-anthropic==0.40.8",
28
+ "opentelemetry-instrumentation-langchain==0.40.8",
29
+ "opentelemetry-instrumentation-threading==00.54b1",
30
+ "opentelemetry-instrumentation-urllib3==0.54b1",
31
+ "opentelemetry-proto==1.33.1",
32
+ "opentelemetry-sdk==1.33.1",
33
+ "opentelemetry-semantic-conventions==0.54b1",
34
+ "opentelemetry-semantic-conventions-ai==0.4.9",
35
+ "opentelemetry-util-http==0.54b1",
36
+ "langgraph>=0.3.2",
37
+ "langchain>=0.3.19",
38
+ "langchain-openai>=0.3.8",
39
+ "llama-index>=0.12.34",
40
+ "pytest",
41
+ "pytest-vcr",
42
+ ]
43
+
44
+ [tool.setuptools.packages.find]
45
+ include = ["ioa_observe*"]
@@ -1,77 +0,0 @@
1
- aiohappyeyeballs>=2.4.8
2
- aiohttp>=3.11.18
3
- aiosignal>=1.3.2
4
- annotated-types>=0.7.0
5
- anyio>=4.8.0
6
- async-timeout>=4.0.3
7
- attrs>=25.1.0
8
- backoff>=2.2.1
9
- certifi>=2025.1.31
10
- charset-normalizer>=3.4.1
11
- colorama==0.4.6
12
- Deprecated>=1.2.18
13
- distro>=1.9.0
14
- exceptiongroup>=1.2.2
15
- frozenlist>=1.5.0
16
- googleapis-common-protos>=1.69.0
17
- grpcio>=1.70.0
18
- h11>=0.16.0
19
- httpcore>=1.0.9
20
- httpx>=0.28.1
21
- idna>=3.10
22
- importlib_metadata>=8.5.0
23
- Jinja2>=3.1.6
24
- jiter>=0.8.2
25
- MarkupSafe>=3.0.2
26
- monotonic>=1.6
27
- multidict>=6.1.0
28
- openai>=1.75.0
29
- opentelemetry-api==1.33.1
30
- opentelemetry-distro
31
- opentelemetry-exporter-otlp==1.33.1
32
- opentelemetry-exporter-otlp-proto-common==1.33.1
33
- opentelemetry-exporter-otlp-proto-grpc==1.33.1
34
- opentelemetry-exporter-otlp-proto-http==1.33.1
35
- opentelemetry-instrumentation
36
- opentelemetry-instrumentation-logging==0.54b1
37
- opentelemetry-instrumentation-openai==0.40.8
38
- opentelemetry-instrumentation-llamaindex==0.40.8
39
- opentelemetry-instrumentation-ollama==0.40.8
40
- opentelemetry-instrumentation-anthropic==0.40.8
41
- opentelemetry-instrumentation-langchain==0.40.8
42
- opentelemetry-instrumentation-threading==00.54b1
43
- opentelemetry-instrumentation-urllib3==0.54b1
44
- opentelemetry-proto==1.33.1
45
- opentelemetry-sdk==1.33.1
46
- opentelemetry-semantic-conventions==0.54b1
47
- opentelemetry-semantic-conventions-ai==0.4.9
48
- opentelemetry-util-http==0.54b1
49
- packaging>=24.2
50
- propcache>=0.3.0
51
- protobuf>=5.29.3
52
- pydantic>=2.10.6
53
- pydantic_core>=2.27.2
54
- python-dateutil>=2.9.0.post0
55
- regex==2024.11.6
56
- requests>=2.32.3
57
- six>=1.17.0
58
- sniffio>=1.3.1
59
- tenacity>=9.0.0
60
- tiktoken>=0.9.0
61
- tqdm>=4.67.1
62
- typing_extensions>=4.12.2
63
- urllib3>=2.3.0
64
- wrapt>=1.17.2
65
- yarl>=1.18.3
66
- zipp>=3.21.0
67
- langgraph>=0.3.2
68
- langchain>=0.3.19
69
- langchain-openai>=0.3.8
70
- langchain-community>=0.3.25
71
- llama-index>=0.12.34
72
- opentelemetry-instrumentation-requests
73
- opentelemetry-instrumentation-transformers>=0.40.8
74
- opentelemetry-instrumentation-crewai>=0.40.8
75
- llama-index-utils-workflow>=0.3.1
76
- pytest
77
- pytest-vcr
@@ -1,93 +0,0 @@
1
- [build-system]
2
- requires = ["setuptools"]
3
- build-backend = "setuptools.build_meta"
4
-
5
-
6
- [project]
7
- name = "ioa-observe-sdk"
8
- version = "1.0.11"
9
- description = "IOA Observability SDK"
10
- readme = "README.md"
11
- requires-python = ">=3.10"
12
- dependencies = [
13
- "aiohappyeyeballs>=2.4.8",
14
- "aiohttp>=3.11.18",
15
- "aiosignal>=1.3.2",
16
- "annotated-types>=0.7.0",
17
- "anyio>=4.8.0",
18
- "async-timeout>=4.0.3",
19
- "attrs>=25.1.0",
20
- "backoff>=2.2.1",
21
- "certifi>=2025.1.31",
22
- "charset-normalizer>=3.4.1",
23
- "colorama==0.4.6",
24
- "Deprecated>=1.2.18",
25
- "distro>=1.9.0",
26
- "exceptiongroup>=1.2.2",
27
- "frozenlist>=1.5.0",
28
- "googleapis-common-protos>=1.69.0",
29
- "grpcio>=1.70.0",
30
- "h11>=0.16.0",
31
- "httpcore>=1.0.9",
32
- "httpx>=0.28.1",
33
- "idna>=3.10",
34
- "importlib_metadata>=8.5.0",
35
- "Jinja2>=3.1.6",
36
- "jiter>=0.8.2",
37
- "MarkupSafe>=3.0.2",
38
- "monotonic>=1.6",
39
- "multidict>=6.1.0",
40
- "openai>=1.75.0",
41
- "opentelemetry-api==1.33.1",
42
- "opentelemetry-distro",
43
- "opentelemetry-exporter-otlp==1.33.1",
44
- "opentelemetry-exporter-otlp-proto-common==1.33.1",
45
- "opentelemetry-exporter-otlp-proto-grpc==1.33.1",
46
- "opentelemetry-exporter-otlp-proto-http==1.33.1",
47
- "opentelemetry-instrumentation",
48
- "opentelemetry-instrumentation-logging==0.54b1",
49
- "opentelemetry-instrumentation-openai==0.40.8",
50
- "opentelemetry-instrumentation-llamaindex==0.40.8",
51
- "opentelemetry-instrumentation-ollama==0.40.8",
52
- "opentelemetry-instrumentation-anthropic==0.40.8",
53
- "opentelemetry-instrumentation-langchain==0.40.8",
54
- "opentelemetry-instrumentation-threading==00.54b1",
55
- "opentelemetry-instrumentation-urllib3==0.54b1",
56
- "opentelemetry-proto==1.33.1",
57
- "opentelemetry-sdk==1.33.1",
58
- "opentelemetry-semantic-conventions==0.54b1",
59
- "opentelemetry-semantic-conventions-ai==0.4.9",
60
- "opentelemetry-util-http==0.54b1",
61
- "packaging>=24.2",
62
- "propcache>=0.3.0",
63
- "protobuf>=5.29.3",
64
- "pydantic>=2.10.6",
65
- "pydantic_core>=2.27.2",
66
- "python-dateutil>=2.9.0.post0",
67
- "regex==2024.11.6",
68
- "requests>=2.32.3",
69
- "six>=1.17.0",
70
- "sniffio>=1.3.1",
71
- "tenacity>=9.0.0",
72
- "tiktoken>=0.9.0",
73
- "tqdm>=4.67.1",
74
- "typing_extensions>=4.12.2",
75
- "urllib3>=2.3.0",
76
- "wrapt>=1.17.2",
77
- "yarl>=1.18.3",
78
- "zipp>=3.21.0",
79
- "langgraph>=0.3.2",
80
- "langchain>=0.3.19",
81
- "langchain-openai>=0.3.8",
82
- "langchain-community>=0.3.25",
83
- "llama-index>=0.12.34",
84
- "opentelemetry-instrumentation-requests",
85
- "opentelemetry-instrumentation-transformers>=0.40.8",
86
- "opentelemetry-instrumentation-crewai>=0.40.8",
87
- "llama-index-utils-workflow>=0.3.1",
88
- "pytest",
89
- "pytest-vcr"
90
- ]
91
-
92
- [tool.setuptools.packages.find]
93
- include = ["ioa_observe*"]