openinference-instrumentation-beeai 0.1.7__tar.gz → 0.1.9__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (22) hide show
  1. {openinference_instrumentation_beeai-0.1.7 → openinference_instrumentation_beeai-0.1.9}/PKG-INFO +5 -5
  2. {openinference_instrumentation_beeai-0.1.7 → openinference_instrumentation_beeai-0.1.9}/pyproject.toml +4 -4
  3. {openinference_instrumentation_beeai-0.1.7 → openinference_instrumentation_beeai-0.1.9}/src/openinference/instrumentation/beeai/__init__.py +3 -1
  4. {openinference_instrumentation_beeai-0.1.7 → openinference_instrumentation_beeai-0.1.9}/src/openinference/instrumentation/beeai/_span.py +15 -0
  5. {openinference_instrumentation_beeai-0.1.7 → openinference_instrumentation_beeai-0.1.9}/src/openinference/instrumentation/beeai/_utils.py +11 -2
  6. {openinference_instrumentation_beeai-0.1.7 → openinference_instrumentation_beeai-0.1.9}/src/openinference/instrumentation/beeai/processors/base.py +3 -4
  7. {openinference_instrumentation_beeai-0.1.7 → openinference_instrumentation_beeai-0.1.9}/src/openinference/instrumentation/beeai/processors/chat.py +19 -13
  8. {openinference_instrumentation_beeai-0.1.7 → openinference_instrumentation_beeai-0.1.9}/src/openinference/instrumentation/beeai/processors/tool.py +2 -4
  9. openinference_instrumentation_beeai-0.1.9/src/openinference/instrumentation/beeai/version.py +1 -0
  10. openinference_instrumentation_beeai-0.1.7/src/openinference/instrumentation/beeai/version.py +0 -1
  11. {openinference_instrumentation_beeai-0.1.7 → openinference_instrumentation_beeai-0.1.9}/.gitignore +0 -0
  12. {openinference_instrumentation_beeai-0.1.7 → openinference_instrumentation_beeai-0.1.9}/README.md +0 -0
  13. {openinference_instrumentation_beeai-0.1.7 → openinference_instrumentation_beeai-0.1.9}/src/openinference/instrumentation/beeai/processors/__init__.py +0 -0
  14. {openinference_instrumentation_beeai-0.1.7 → openinference_instrumentation_beeai-0.1.9}/src/openinference/instrumentation/beeai/processors/agents/__init__.py +0 -0
  15. {openinference_instrumentation_beeai-0.1.7 → openinference_instrumentation_beeai-0.1.9}/src/openinference/instrumentation/beeai/processors/agents/base.py +0 -0
  16. {openinference_instrumentation_beeai-0.1.7 → openinference_instrumentation_beeai-0.1.9}/src/openinference/instrumentation/beeai/processors/agents/react.py +0 -0
  17. {openinference_instrumentation_beeai-0.1.7 → openinference_instrumentation_beeai-0.1.9}/src/openinference/instrumentation/beeai/processors/agents/requirement_agent.py +0 -0
  18. {openinference_instrumentation_beeai-0.1.7 → openinference_instrumentation_beeai-0.1.9}/src/openinference/instrumentation/beeai/processors/agents/tool_calling.py +0 -0
  19. {openinference_instrumentation_beeai-0.1.7 → openinference_instrumentation_beeai-0.1.9}/src/openinference/instrumentation/beeai/processors/embedding.py +0 -0
  20. {openinference_instrumentation_beeai-0.1.7 → openinference_instrumentation_beeai-0.1.9}/src/openinference/instrumentation/beeai/processors/locator.py +0 -0
  21. {openinference_instrumentation_beeai-0.1.7 → openinference_instrumentation_beeai-0.1.9}/src/openinference/instrumentation/beeai/processors/requirement.py +0 -0
  22. {openinference_instrumentation_beeai-0.1.7 → openinference_instrumentation_beeai-0.1.9}/src/openinference/instrumentation/beeai/processors/workflow.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: openinference-instrumentation-beeai
3
- Version: 0.1.7
3
+ Version: 0.1.9
4
4
  Summary: OpenInference BeeAI Instrumentation
5
5
  Project-URL: Homepage, https://github.com/Arize-ai/openinference/tree/main/python/instrumentation/openinference-instrumentation-beeai
6
6
  Author: IBM Corp.
@@ -15,17 +15,17 @@ Classifier: Programming Language :: Python :: 3.11
15
15
  Classifier: Programming Language :: Python :: 3.12
16
16
  Classifier: Programming Language :: Python :: 3.13
17
17
  Requires-Python: <3.14,>=3.11
18
- Requires-Dist: beeai-framework<0.2.0,>=0.1.32
19
- Requires-Dist: openinference-instrumentation>=0.1.36
18
+ Requires-Dist: beeai-framework<0.2.0,>=0.1.36
19
+ Requires-Dist: openinference-instrumentation>=0.1.37
20
20
  Requires-Dist: openinference-semantic-conventions>=0.1.21
21
21
  Requires-Dist: opentelemetry-api>=1.36.0
22
22
  Requires-Dist: opentelemetry-instrumentation>=0.57b0
23
23
  Requires-Dist: opentelemetry-semantic-conventions>=0.57b0
24
24
  Requires-Dist: wrapt>=1.17.2
25
25
  Provides-Extra: instruments
26
- Requires-Dist: beeai-framework>=0.1.32; extra == 'instruments'
26
+ Requires-Dist: beeai-framework>=0.1.36; extra == 'instruments'
27
27
  Provides-Extra: test
28
- Requires-Dist: beeai-framework>=0.1.32; extra == 'test'
28
+ Requires-Dist: beeai-framework>=0.1.36; extra == 'test'
29
29
  Requires-Dist: opentelemetry-exporter-otlp; extra == 'test'
30
30
  Requires-Dist: opentelemetry-sdk; extra == 'test'
31
31
  Description-Content-Type: text/markdown
@@ -22,8 +22,8 @@ classifiers = [
22
22
  "Programming Language :: Python :: 3.13",
23
23
  ]
24
24
  dependencies = [
25
- "beeai-framework (>=0.1.32,<0.2.0)",
26
- "openinference-instrumentation>=0.1.36",
25
+ "beeai-framework (>=0.1.36,<0.2.0)",
26
+ "openinference-instrumentation>=0.1.37",
27
27
  "openinference-semantic-conventions>=0.1.21",
28
28
  "opentelemetry-api>=1.36.0",
29
29
  "opentelemetry-instrumentation>=0.57b0",
@@ -33,10 +33,10 @@ dependencies = [
33
33
 
34
34
  [project.optional-dependencies]
35
35
  instruments = [
36
- "beeai-framework >= 0.1.32",
36
+ "beeai-framework >= 0.1.36",
37
37
  ]
38
38
  test = [
39
- "beeai-framework >= 0.1.32",
39
+ "beeai-framework >= 0.1.36",
40
40
  "opentelemetry-sdk",
41
41
  "opentelemetry-exporter-otlp"
42
42
  ]
@@ -2,6 +2,8 @@ import logging
2
2
  from importlib.metadata import PackageNotFoundError, version
3
3
  from typing import TYPE_CHECKING, Any, Callable, Collection
4
4
 
5
+ from opentelemetry.trace import StatusCode
6
+
5
7
  if TYPE_CHECKING:
6
8
  from beeai_framework.emitter import EventMeta
7
9
 
@@ -82,7 +84,7 @@ class BeeAIInstrumentor(BaseInstrumentor): # type: ignore
82
84
  self._build_tree(children)
83
85
 
84
86
  current_span.set_status(node.status)
85
- if node.error is not None:
87
+ if node.error is not None and node.status == StatusCode.ERROR:
86
88
  current_span.record_exception(node.error)
87
89
 
88
90
  current_span.end(_datetime_to_span_time(node.ended_at) if node.ended_at else None)
@@ -77,5 +77,20 @@ class SpanWrapper:
77
77
  def set_status(self, status: StatusCode) -> None:
78
78
  self.status = status
79
79
 
80
+ def reset_exception(self) -> None:
81
+ self.error = None
82
+ self.set_status(StatusCode.OK)
83
+
80
84
  def record_exception(self, error: Exception) -> None:
85
+ from beeai_framework.errors import FrameworkError
86
+
81
87
  self.error = error
88
+ self.set_status(StatusCode.ERROR)
89
+ self.set_attributes(
90
+ {
91
+ SpanAttributes.OUTPUT_VALUE: error.explain()
92
+ if isinstance(error, FrameworkError)
93
+ else str(error),
94
+ SpanAttributes.OUTPUT_MIME_TYPE: OpenInferenceMimeTypeValues.TEXT.value,
95
+ }
96
+ )
@@ -19,9 +19,11 @@ def _datetime_to_span_time(dt: datetime.datetime) -> int:
19
19
 
20
20
  def _unpack_object(obj: dict[str, Any] | list[Any] | BaseModel, prefix: str = "") -> dict[str, Any]:
21
21
  if not isinstance(obj, dict) and not isinstance(obj, list):
22
+ obj_ref = obj
22
23
  obj = json.loads(stringify(obj))
23
24
  if not isinstance(obj, dict) and not isinstance(obj, list):
24
- raise ValueError(f"Cannot unpack object of type {type(obj)}")
25
+ logger.debug(f"Cannot unpack object of type {type(obj_ref)} (prefix={prefix})")
26
+ return {"value": str(obj)}
25
27
 
26
28
  if prefix and prefix.startswith("."):
27
29
  prefix = prefix[1:]
@@ -32,7 +34,7 @@ def _unpack_object(obj: dict[str, Any] | list[Any] | BaseModel, prefix: str = ""
32
34
  for key, value in obj.items() if isinstance(obj, dict) else enumerate(obj):
33
35
  if value is None:
34
36
  continue
35
- if is_primitive(value):
37
+ if is_primitive(value) or has_custom_str(value):
36
38
  output[f"{prefix}{key}"] = str(value)
37
39
  else:
38
40
  output.update(_unpack_object(value, prefix=f"{prefix}{key}"))
@@ -43,6 +45,13 @@ def is_primitive(value: Any) -> bool:
43
45
  return isinstance(value, str | bool | int | float | type(None))
44
46
 
45
47
 
48
+ def has_custom_str(value: Any) -> bool:
49
+ if value.__class__.__module__ == "builtins":
50
+ return False
51
+
52
+ return value.__class__.__str__ is not object.__str__
53
+
54
+
46
55
  def stringify(value: Any, pretty: bool = False) -> str:
47
56
  if is_primitive(value):
48
57
  return str(value)
@@ -43,6 +43,9 @@ class Processor:
43
43
  pass
44
44
 
45
45
  async def end(self, event: "RunContextFinishEvent", meta: "EventMeta") -> None:
46
+ if event.error is not None:
47
+ self.span.record_exception(event.error)
48
+
46
49
  if event.output is not None:
47
50
  if SpanAttributes.OUTPUT_VALUE not in self.span.attributes:
48
51
  self.span.attributes.update(
@@ -53,8 +56,4 @@ class Processor:
53
56
  )
54
57
  self.span.set_status(StatusCode.OK)
55
58
 
56
- if event.error is not None:
57
- self.span.set_status(StatusCode.ERROR)
58
- self.span.record_exception(event.error)
59
-
60
59
  self.span.ended_at = meta.created_at
@@ -1,4 +1,3 @@
1
- import json
2
1
  from datetime import datetime
3
2
  from typing import Any, ClassVar
4
3
 
@@ -81,7 +80,7 @@ class ChatModelProcessor(Processor):
81
80
  )
82
81
  self.span.set_attributes(
83
82
  {
84
- SpanAttributes.LLM_TOOLS: json.loads(stringify(event.input.tools or [])),
83
+ SpanAttributes.LLM_TOOLS: [t.name for t in (event.input.tools or [])],
85
84
  SpanAttributes.LLM_INVOCATION_PARAMETERS: stringify(
86
85
  meta.creator.parameters.model_dump(
87
86
  exclude_none=True, exclude_unset=True
@@ -103,18 +102,27 @@ class ChatModelProcessor(Processor):
103
102
  self._add_new_messages(event.value.messages)
104
103
 
105
104
  usage = event.value.usage
105
+ if usage:
106
+ self.span.set_attributes(
107
+ {
108
+ SpanAttributes.LLM_TOKEN_COUNT_TOTAL: usage.total_tokens,
109
+ SpanAttributes.LLM_TOKEN_COUNT_PROMPT: usage.prompt_tokens,
110
+ SpanAttributes.LLM_TOKEN_COUNT_COMPLETION: usage.completion_tokens,
111
+ }
112
+ )
113
+
114
+ cost = event.value.cost
115
+ if cost:
116
+ self.span.set_attributes(
117
+ {
118
+ SpanAttributes.LLM_COST_COMPLETION: cost.completion_tokens_cost_usd,
119
+ SpanAttributes.LLM_COST_PROMPT: cost.prompt_tokens_usd,
120
+ SpanAttributes.LLM_COST_TOTAL: cost.total_cost_usd,
121
+ }
122
+ )
106
123
 
107
124
  self.span.set_attributes(
108
125
  {
109
- **(
110
- {
111
- SpanAttributes.LLM_TOKEN_COUNT_TOTAL: usage.total_tokens,
112
- SpanAttributes.LLM_TOKEN_COUNT_PROMPT: usage.prompt_tokens,
113
- SpanAttributes.LLM_TOKEN_COUNT_COMPLETION: usage.completion_tokens,
114
- }
115
- if usage
116
- else {}
117
- ),
118
126
  SpanAttributes.OPENINFERENCE_SPAN_KIND: type(self).kind,
119
127
  SpanAttributes.OUTPUT_VALUE: event.value.get_text_content(),
120
128
  SpanAttributes.OUTPUT_MIME_TYPE: OpenInferenceMimeTypeValues.TEXT.value,
@@ -149,8 +157,6 @@ def _process_tools(tools: list[AnyTool]) -> list[dict[str, str | Any]]:
149
157
  {
150
158
  SpanAttributes.TOOL_NAME: t.name,
151
159
  SpanAttributes.TOOL_DESCRIPTION: t.description,
152
- # TODO: difference between TOOL_PARAMETERS and TOOL_JSON_SCHEMA is not obvious
153
- SpanAttributes.TOOL_PARAMETERS: safe_dump_model_schema(t.input_schema),
154
160
  ToolAttributes.TOOL_JSON_SCHEMA: safe_dump_model_schema(t.input_schema),
155
161
  }
156
162
  for t in tools
@@ -31,10 +31,6 @@ class ToolProcessor(Processor):
31
31
  {
32
32
  SpanAttributes.TOOL_NAME: tool.name,
33
33
  SpanAttributes.TOOL_DESCRIPTION: tool.description,
34
- # TODO: what's the difference?
35
- SpanAttributes.TOOL_PARAMETERS: stringify(
36
- safe_dump_model_schema(tool.input_schema)
37
- ),
38
34
  ToolAttributes.TOOL_JSON_SCHEMA: stringify(
39
35
  safe_dump_model_schema(tool.input_schema)
40
36
  ),
@@ -55,6 +51,7 @@ class ToolProcessor(Processor):
55
51
  case ToolSuccessEvent():
56
52
  output_cls = type(event.output)
57
53
 
54
+ self.span.reset_exception()
58
55
  self.span.set_attributes(
59
56
  {
60
57
  SpanAttributes.OUTPUT_VALUE: event.output.get_text_content(),
@@ -66,6 +63,7 @@ class ToolProcessor(Processor):
66
63
  case ToolErrorEvent():
67
64
  span = self.span.child(meta.name, event=(event, meta))
68
65
  span.record_exception(event.error)
66
+ self.span.record_exception(event.error)
69
67
  case ToolRetryEvent():
70
68
  self.span.child(meta.name, event=(event, meta))
71
69
  case _: