openinference-instrumentation-beeai 0.1.5__py3-none-any.whl → 0.1.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (27) hide show
  1. openinference/instrumentation/beeai/__init__.py +78 -104
  2. openinference/instrumentation/beeai/_span.py +81 -0
  3. openinference/instrumentation/beeai/_utils.py +75 -0
  4. openinference/instrumentation/beeai/processors/__init__.py +0 -0
  5. openinference/instrumentation/beeai/processors/agents/__init__.py +0 -0
  6. openinference/instrumentation/beeai/processors/agents/base.py +34 -0
  7. openinference/instrumentation/beeai/processors/agents/react.py +77 -0
  8. openinference/instrumentation/beeai/processors/agents/requirement_agent.py +71 -0
  9. openinference/instrumentation/beeai/processors/agents/tool_calling.py +34 -0
  10. openinference/instrumentation/beeai/processors/base.py +60 -0
  11. openinference/instrumentation/beeai/processors/chat.py +239 -0
  12. openinference/instrumentation/beeai/processors/embedding.py +71 -0
  13. openinference/instrumentation/beeai/processors/locator.py +106 -0
  14. openinference/instrumentation/beeai/processors/requirement.py +67 -0
  15. openinference/instrumentation/beeai/processors/tool.py +72 -0
  16. openinference/instrumentation/beeai/processors/workflow.py +108 -0
  17. openinference/instrumentation/beeai/version.py +1 -1
  18. {openinference_instrumentation_beeai-0.1.5.dist-info → openinference_instrumentation_beeai-0.1.7.dist-info}/METADATA +12 -9
  19. openinference_instrumentation_beeai-0.1.7.dist-info/RECORD +21 -0
  20. openinference/instrumentation/beeai/middleware.py +0 -291
  21. openinference/instrumentation/beeai/utils/build_trace_tree.py +0 -170
  22. openinference/instrumentation/beeai/utils/create_span.py +0 -80
  23. openinference/instrumentation/beeai/utils/get_serialized_object_safe.py +0 -302
  24. openinference/instrumentation/beeai/utils/id_name_manager.py +0 -58
  25. openinference_instrumentation_beeai-0.1.5.dist-info/RECORD +0 -11
  26. {openinference_instrumentation_beeai-0.1.5.dist-info → openinference_instrumentation_beeai-0.1.7.dist-info}/WHEEL +0 -0
  27. {openinference_instrumentation_beeai-0.1.5.dist-info → openinference_instrumentation_beeai-0.1.7.dist-info}/entry_points.txt +0 -0
@@ -1,302 +0,0 @@
1
- # Copyright 2025 IBM Corp.
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
-
15
- import json
16
- import logging
17
- from typing import Any, Dict, List, cast
18
-
19
- from beeai_framework.agents.base import BaseAgent
20
- from beeai_framework.agents.react.events import (
21
- ReActAgentErrorEvent,
22
- ReActAgentRetryEvent,
23
- ReActAgentStartEvent,
24
- ReActAgentSuccessEvent,
25
- ReActAgentUpdateEvent,
26
- )
27
- from beeai_framework.backend.chat import (
28
- ChatModel,
29
- )
30
- from beeai_framework.backend.events import (
31
- ChatModelErrorEvent,
32
- ChatModelStartEvent,
33
- ChatModelSuccessEvent,
34
- )
35
- from beeai_framework.backend.types import ChatModelUsage
36
- from beeai_framework.emitter import EventMeta
37
- from beeai_framework.errors import FrameworkError
38
- from beeai_framework.memory.base_memory import BaseMemory
39
- from beeai_framework.tools.events import (
40
- ToolErrorEvent,
41
- ToolRetryEvent,
42
- ToolStartEvent,
43
- ToolSuccessEvent,
44
- )
45
- from beeai_framework.tools.tool import AnyTool, Tool
46
- from pydantic import BaseModel, InstanceOf
47
-
48
- from openinference.semconv.trace import (
49
- MessageAttributes,
50
- OpenInferenceMimeTypeValues,
51
- OpenInferenceSpanKindValues,
52
- SpanAttributes,
53
- )
54
-
55
- logger = logging.getLogger(__name__)
56
-
57
-
58
- def parse_llm_input_messages(messages: List[Any]) -> Dict[str, str]:
59
- result = {}
60
- for idx, message in enumerate(messages):
61
- result[f"{SpanAttributes.LLM_INPUT_MESSAGES}.{idx}.{MessageAttributes.MESSAGE_ROLE}"] = (
62
- message.role.value
63
- )
64
- text_content = "".join(
65
- part.text
66
- for part in getattr(message, "content", [])
67
- if getattr(part, "type", "") == "text"
68
- )
69
- result[f"{SpanAttributes.LLM_INPUT_MESSAGES}.{idx}.{MessageAttributes.MESSAGE_CONTENT}"] = (
70
- text_content
71
- )
72
- return result
73
-
74
-
75
- def parse_llm_output_messages(messages: List[Any]) -> Dict[str, str]:
76
- result = {}
77
- for idx, message in enumerate(messages):
78
- result[f"{SpanAttributes.LLM_OUTPUT_MESSAGES}.{idx}.{MessageAttributes.MESSAGE_ROLE}"] = (
79
- message.role.value
80
- )
81
- text_content = "".join(
82
- part.text
83
- for part in getattr(message, "content", [])
84
- if getattr(part, "type", "") == "text"
85
- )
86
- result[
87
- f"{SpanAttributes.LLM_OUTPUT_MESSAGES}.{idx}.{MessageAttributes.MESSAGE_CONTENT}"
88
- ] = text_content
89
- return result
90
-
91
-
92
- def get_serialized_object_safe(data_object: Any, meta: EventMeta) -> Any:
93
- try:
94
- # agent events
95
- if (
96
- meta.name in {"start", "success", "error", "retry"}
97
- and hasattr(meta, "creator")
98
- and isinstance(meta.creator, BaseAgent)
99
- ):
100
- agent_event_typed_data = cast(
101
- ReActAgentStartEvent
102
- | ReActAgentRetryEvent
103
- | ReActAgentErrorEvent
104
- | ReActAgentSuccessEvent,
105
- data_object,
106
- )
107
-
108
- output = {
109
- SpanAttributes.OPENINFERENCE_SPAN_KIND: OpenInferenceSpanKindValues.AGENT.value,
110
- "iteration": agent_event_typed_data.meta.iteration
111
- if hasattr(agent_event_typed_data, "meta")
112
- else None,
113
- }
114
-
115
- if hasattr(agent_event_typed_data, "tools"):
116
- typed_tools = cast(list[InstanceOf[AnyTool]], agent_event_typed_data.tools)
117
- output[SpanAttributes.LLM_TOOLS] = json.dumps(
118
- [
119
- {
120
- SpanAttributes.TOOL_NAME: tool.name,
121
- SpanAttributes.TOOL_DESCRIPTION: tool.description,
122
- "tool.options": json.dumps(tool.options),
123
- }
124
- for tool in typed_tools
125
- ]
126
- )
127
-
128
- if hasattr(agent_event_typed_data, "memory") and hasattr(
129
- agent_event_typed_data.memory, "messages"
130
- ):
131
- typed_memory = cast(InstanceOf[BaseMemory], agent_event_typed_data.memory)
132
- output[SpanAttributes.INPUT_MIME_TYPE] = OpenInferenceMimeTypeValues.JSON.value
133
- output[SpanAttributes.INPUT_VALUE] = json.dumps(
134
- [
135
- {
136
- "text": m.text,
137
- "role": m.role.value if hasattr(m.role, "value") else m.role,
138
- }
139
- for m in typed_memory.messages
140
- ]
141
- )
142
-
143
- if hasattr(agent_event_typed_data, "error"):
144
- typed_error = cast(InstanceOf[FrameworkError], agent_event_typed_data.error)
145
-
146
- output["exception.message"] = typed_error.message
147
- output["exception.stacktrace"] = getattr(typed_error, "stack", "")
148
- output["exception.type"] = typed_error.__class__.__name__
149
-
150
- if hasattr(agent_event_typed_data, "data") and agent_event_typed_data.data is not None:
151
- output[SpanAttributes.OUTPUT_MIME_TYPE] = OpenInferenceMimeTypeValues.JSON.value
152
- output[SpanAttributes.OUTPUT_VALUE] = json.dumps(
153
- agent_event_typed_data.data.to_plain()
154
- )
155
-
156
- return output
157
-
158
- ## update events
159
- if meta.name in {"partial_update", "update"}:
160
- update_event_typed_data = cast(ReActAgentUpdateEvent, data_object)
161
-
162
- if isinstance(data_object.data, dict) and not data_object.data:
163
- return
164
-
165
- output = {
166
- SpanAttributes.OPENINFERENCE_SPAN_KIND: OpenInferenceSpanKindValues.AGENT.value,
167
- }
168
-
169
- if hasattr(update_event_typed_data, "data") and isinstance(
170
- update_event_typed_data.data, dict
171
- ):
172
- output[SpanAttributes.OUTPUT_VALUE] = update_event_typed_data.data.get(
173
- "final_answer"
174
- ) or update_event_typed_data.data.get("tool_output")
175
- output[SpanAttributes.OUTPUT_MIME_TYPE] = OpenInferenceMimeTypeValues.JSON.value
176
- output["thought"] = update_event_typed_data.data.get("thought")
177
-
178
- if update_event_typed_data.data.get("tool_name"):
179
- output[SpanAttributes.TOOL_NAME] = update_event_typed_data.data["tool_name"]
180
-
181
- if update_event_typed_data.data.get("tool_input"):
182
- output[SpanAttributes.TOOL_PARAMETERS] = json.dumps(
183
- update_event_typed_data.data["tool_input"]
184
- )
185
- elif hasattr(update_event_typed_data, "data") and not isinstance(
186
- update_event_typed_data.data, dict
187
- ):
188
- output[SpanAttributes.OUTPUT_VALUE] = (
189
- update_event_typed_data.data.final_answer
190
- or update_event_typed_data.data.tool_output
191
- )
192
- output[SpanAttributes.OUTPUT_MIME_TYPE] = OpenInferenceMimeTypeValues.JSON.value
193
- output["thought"] = update_event_typed_data.data.thought
194
-
195
- if hasattr(update_event_typed_data.data, "tool_name"):
196
- output[SpanAttributes.TOOL_NAME] = update_event_typed_data.data.tool_name
197
-
198
- if hasattr(update_event_typed_data.data, "tool_input"):
199
- output[SpanAttributes.TOOL_PARAMETERS] = json.dumps(
200
- update_event_typed_data.data.tool_input
201
- )
202
-
203
- return output
204
-
205
- ## Tool events
206
- if meta.name in {"start", "success", "error", "retry", "finish"} and isinstance(
207
- meta.creator, Tool
208
- ):
209
- tool_event_typed_data = cast(
210
- ToolSuccessEvent | ToolErrorEvent | ToolRetryEvent | ToolStartEvent, data_object
211
- )
212
-
213
- output = {
214
- SpanAttributes.OPENINFERENCE_SPAN_KIND: OpenInferenceSpanKindValues.TOOL.value,
215
- }
216
-
217
- if hasattr(tool_event_typed_data, "error"):
218
- typed_error = cast(InstanceOf[FrameworkError], tool_event_typed_data.error)
219
-
220
- output["exception.message"] = typed_error.message
221
- output["exception.stacktrace"] = getattr(typed_error, "stack", "")
222
- output["exception.type"] = typed_error.__class__.__name__
223
-
224
- if hasattr(tool_event_typed_data, "output"):
225
- output[SpanAttributes.OUTPUT_VALUE] = str(tool_event_typed_data.output)
226
-
227
- if hasattr(tool_event_typed_data, "input"):
228
- output["tool.parameters"] = (
229
- tool_event_typed_data.input.model_dump_json()
230
- if isinstance(tool_event_typed_data.input, BaseModel)
231
- else json.dumps(tool_event_typed_data.input)
232
- )
233
-
234
- return output
235
-
236
- ## llm events
237
- if meta.name in {"start", "success", "finish"}:
238
- llm_event_typed_data = cast(
239
- ChatModelStartEvent | ChatModelErrorEvent | ChatModelSuccessEvent, data_object
240
- )
241
- creator = cast(ChatModel, meta.creator)
242
- output = {
243
- SpanAttributes.OPENINFERENCE_SPAN_KIND: OpenInferenceSpanKindValues.LLM.value,
244
- }
245
-
246
- if (
247
- hasattr(llm_event_typed_data, "value")
248
- and getattr(llm_event_typed_data.value, "usage", None) is not None
249
- and isinstance(llm_event_typed_data.value.usage, ChatModelUsage)
250
- ):
251
- usage = llm_event_typed_data.value.usage
252
-
253
- if usage.completion_tokens is not None:
254
- output[SpanAttributes.LLM_TOKEN_COUNT_COMPLETION] = usage.completion_tokens
255
-
256
- if usage.prompt_tokens is not None:
257
- output[SpanAttributes.LLM_TOKEN_COUNT_PROMPT] = usage.prompt_tokens
258
-
259
- if usage.total_tokens is not None:
260
- output[SpanAttributes.LLM_TOKEN_COUNT_TOTAL] = usage.total_tokens
261
-
262
- if hasattr(llm_event_typed_data, "input") and hasattr(
263
- llm_event_typed_data.input, "messages"
264
- ):
265
- output[SpanAttributes.INPUT_MIME_TYPE] = OpenInferenceMimeTypeValues.JSON.value
266
- output[SpanAttributes.INPUT_VALUE] = json.dumps(
267
- [msg.to_plain() for msg in llm_event_typed_data.input.messages]
268
- )
269
- output.update(parse_llm_input_messages(llm_event_typed_data.input.messages))
270
-
271
- if hasattr(llm_event_typed_data, "value") and hasattr(
272
- llm_event_typed_data.value, "messages"
273
- ):
274
- output[SpanAttributes.OUTPUT_MIME_TYPE] = OpenInferenceMimeTypeValues.JSON.value
275
- output[SpanAttributes.OUTPUT_VALUE] = json.dumps(
276
- [msg.to_plain() for msg in llm_event_typed_data.value.messages]
277
- )
278
- output.update(parse_llm_output_messages(llm_event_typed_data.value.messages))
279
-
280
- if hasattr(llm_event_typed_data, "error"):
281
- output["exception.message"] = llm_event_typed_data.error.message
282
- output["exception.stacktrace"] = getattr(llm_event_typed_data.error, "stack", None)
283
- output["exception.type"] = getattr(
284
- llm_event_typed_data.error, "name", type(llm_event_typed_data.error).__name__
285
- )
286
-
287
- if hasattr(creator, "provider_id"):
288
- output[SpanAttributes.LLM_PROVIDER] = creator.provider_id
289
-
290
- if hasattr(creator, "model_id"):
291
- output[SpanAttributes.LLM_MODEL_NAME] = creator.model_id
292
-
293
- if hasattr(creator, "parameters"):
294
- output[SpanAttributes.LLM_INVOCATION_PARAMETERS] = (
295
- creator.parameters.model_dump_json()
296
- )
297
-
298
- return output
299
- return None
300
- except Exception as e:
301
- logger.error("Failed to parse event data", exc_info=e)
302
- return None
@@ -1,58 +0,0 @@
1
- # Copyright 2025 IBM Corp.
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
-
15
- from typing import Dict, Optional, TypedDict
16
-
17
-
18
- class GetIdsResult(TypedDict):
19
- spanId: str
20
- parentSpanId: Optional[str]
21
-
22
-
23
- class IdNameManager:
24
- def __init__(self) -> None:
25
- self._id_names_counter: Dict[str, int] = {}
26
- self._id_name_map: Dict[str, str] = {}
27
- self._run_id_map: Dict[str, str] = {}
28
-
29
- def _span_id_generator(self, name: str) -> str:
30
- count = self._id_names_counter.get(name, 0)
31
- self._id_names_counter[name] = count + 1
32
- return f"{name}-{self._id_names_counter[name]}"
33
-
34
- def get_ids(
35
- self,
36
- *,
37
- path: str,
38
- id: str,
39
- run_id: str,
40
- parent_run_id: Optional[str] = None,
41
- group_id: Optional[str] = None,
42
- ) -> GetIdsResult:
43
- self._run_id_map[run_id] = id
44
-
45
- span_id = self._span_id_generator(path)
46
- self._id_name_map[id] = span_id
47
-
48
- parent_span_id = None
49
- if parent_run_id:
50
- parent_event_id = self._run_id_map.get(parent_run_id, "")
51
- parent_span_id = self._id_name_map.get(parent_event_id)
52
- elif group_id:
53
- parent_span_id = group_id
54
-
55
- return {
56
- "spanId": span_id,
57
- "parentSpanId": parent_span_id,
58
- }
@@ -1,11 +0,0 @@
1
- openinference/instrumentation/beeai/__init__.py,sha256=Gpt6XMsKEMimjPYz7rxjjM95wrLXjt-1LoTkmilqPwA,6118
2
- openinference/instrumentation/beeai/middleware.py,sha256=1CHjvbFgBiI6sESkzRRwP9h_t74zwd1ZiUZwta1aqak,12273
3
- openinference/instrumentation/beeai/version.py,sha256=rPSfWgIeq2YWVPyESOAwCBt8vftsTpIkuLAGDEzyRQc,22
4
- openinference/instrumentation/beeai/utils/build_trace_tree.py,sha256=DLfHXmr_msCG7l3OVY0ezqJ6DlPflK9OuOwTR8thjcM,6495
5
- openinference/instrumentation/beeai/utils/create_span.py,sha256=51sOEc1PxD_L49TeDXo31P7VexLJzvnaeZ3EM4g3u08,2114
6
- openinference/instrumentation/beeai/utils/get_serialized_object_safe.py,sha256=K6q89m53w-vQfvItG28toUBJ4RZVSlAHGczxqDA-9Sw,12422
7
- openinference/instrumentation/beeai/utils/id_name_manager.py,sha256=lJf0YQ3vpJVsznyP1V1FwhYLk8KZWsWxyybz8yfdiDw,1804
8
- openinference_instrumentation_beeai-0.1.5.dist-info/METADATA,sha256=jozaICCIndJLrD04q4f_ydTR8DTv_Omjp16yuQwAjy8,5411
9
- openinference_instrumentation_beeai-0.1.5.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
10
- openinference_instrumentation_beeai-0.1.5.dist-info/entry_points.txt,sha256=ee7EUhbWv-XK1dxhPXuFVy9qstzj-lc-265Phe2Ml9s,183
11
- openinference_instrumentation_beeai-0.1.5.dist-info/RECORD,,