openinference-instrumentation-beeai 0.1.9__py3-none-any.whl → 0.1.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,9 +1,12 @@
1
+ import contextlib
1
2
  import logging
2
3
  from importlib.metadata import PackageNotFoundError, version
3
- from typing import TYPE_CHECKING, Any, Callable, Collection
4
+ from typing import TYPE_CHECKING, Any, Callable, Collection, Generator
4
5
 
5
6
  from opentelemetry.trace import StatusCode
6
7
 
8
+ from openinference.instrumentation._spans import OpenInferenceSpan
9
+
7
10
  if TYPE_CHECKING:
8
11
  from beeai_framework.emitter import EventMeta
9
12
 
@@ -29,12 +32,13 @@ except PackageNotFoundError:
29
32
 
30
33
 
31
34
  class BeeAIInstrumentor(BaseInstrumentor): # type: ignore
32
- __slots__ = ("_tracer", "_cleanup", "_processes")
35
+ __slots__ = ("_tracer", "_cleanup", "_processes", "_processes_deps")
33
36
 
34
37
  def __init__(self, *args: Any, **kwargs: Any) -> None:
35
38
  super().__init__(*args, **kwargs)
36
39
  self._cleanup: Callable[[], None] = lambda: None
37
40
  self._processes: dict[str, Processor] = {}
41
+ self._processes_deps: dict[str, list[Processor]] = {}
38
42
 
39
43
  def instrumentation_dependencies(self) -> Collection[str]:
40
44
  return _instruments
@@ -66,8 +70,16 @@ class BeeAIInstrumentor(BaseInstrumentor): # type: ignore
66
70
  def _uninstrument(self, **kwargs: Any) -> None:
67
71
  self._cleanup()
68
72
  self._processes.clear()
73
+ self._processes_deps.clear()
74
+
75
+ def _build_tree(self, processor: Processor) -> None:
76
+ with self._build_tree_for_span(processor.span):
77
+ for child in self._processes_deps.pop(processor.run_id):
78
+ self._build_tree(child)
79
+ self._processes.pop(processor.run_id)
69
80
 
70
- def _build_tree(self, node: SpanWrapper) -> None:
81
+ @contextlib.contextmanager
82
+ def _build_tree_for_span(self, node: SpanWrapper) -> Generator[OpenInferenceSpan, None, None]:
71
83
  with self._tracer.start_as_current_span(
72
84
  name=node.name,
73
85
  openinference_span_kind=node.kind,
@@ -75,13 +87,16 @@ class BeeAIInstrumentor(BaseInstrumentor): # type: ignore
75
87
  start_time=_datetime_to_span_time(node.started_at) if node.started_at else None,
76
88
  end_on_exit=False, # we do it manually
77
89
  ) as current_span:
90
+ yield current_span
91
+
78
92
  for event in node.events:
79
93
  current_span.add_event(
80
94
  name=event.name, attributes=event.attributes, timestamp=event.timestamp
81
95
  )
82
96
 
83
97
  for children in node.children:
84
- self._build_tree(children)
98
+ with self._build_tree_for_span(children):
99
+ pass
85
100
 
86
101
  current_span.set_status(node.status)
87
102
  if node.error is not None and node.status == StatusCode.ERROR:
@@ -91,7 +106,8 @@ class BeeAIInstrumentor(BaseInstrumentor): # type: ignore
91
106
 
92
107
  @exception_handler
93
108
  async def _handler(self, data: Any, event: "EventMeta") -> None:
94
- assert event.trace is not None, "Event must have a trace"
109
+ if event.trace is None:
110
+ return
95
111
 
96
112
  if event.trace.run_id not in self._processes:
97
113
  parent = (
@@ -102,9 +118,10 @@ class BeeAIInstrumentor(BaseInstrumentor): # type: ignore
102
118
  if event.trace.parent_run_id and not parent:
103
119
  raise ValueError(f"Parent run with ID {event.trace.parent_run_id} was not found!")
104
120
 
121
+ self._processes_deps[event.trace.run_id] = []
105
122
  node = self._processes[event.trace.run_id] = ProcessorLocator.locate(data, event)
106
123
  if parent is not None:
107
- parent.span.children.append(node.span)
124
+ self._processes_deps[parent.run_id].append(node)
108
125
  else:
109
126
  node = self._processes[event.trace.run_id]
110
127
 
@@ -112,8 +129,8 @@ class BeeAIInstrumentor(BaseInstrumentor): # type: ignore
112
129
 
113
130
  if isinstance(data, RunContextFinishEvent):
114
131
  await node.end(data, event)
115
- self._build_tree(node.span)
116
- self._processes.pop(event.trace.run_id)
132
+ if event.trace.parent_run_id is None:
133
+ self._build_tree(node)
117
134
  else:
118
135
  if event.context.get("internal"):
119
136
  return
@@ -24,6 +24,9 @@ class Processor:
24
24
  assert isinstance(meta.creator, RunContext)
25
25
  target_cls = type(meta.creator.instance)
26
26
 
27
+ assert meta.trace is not None
28
+ self.run_id = meta.trace.run_id
29
+
27
30
  self.span = SpanWrapper(name=target_cls.__name__, kind=type(self).kind)
28
31
  self.span.started_at = meta.created_at
29
32
  self.span.attributes.update(
@@ -99,7 +99,7 @@ class ChatModelProcessor(Processor):
99
99
 
100
100
  case ChatModelSuccessEvent():
101
101
  if not self._messages: # only when no streaming
102
- self._add_new_messages(event.value.messages)
102
+ self._add_new_messages(event.value.output)
103
103
 
104
104
  usage = event.value.usage
105
105
  if usage:
@@ -126,7 +126,7 @@ class ChatModelProcessor(Processor):
126
126
  SpanAttributes.OPENINFERENCE_SPAN_KIND: type(self).kind,
127
127
  SpanAttributes.OUTPUT_VALUE: event.value.get_text_content(),
128
128
  SpanAttributes.OUTPUT_MIME_TYPE: OpenInferenceMimeTypeValues.TEXT.value,
129
- f"{SpanAttributes.METADATA}.chunks_count": len(event.value.messages),
129
+ f"{SpanAttributes.METADATA}.chunks_count": len(event.value.output),
130
130
  **_unpack_object(
131
131
  usage.model_dump(exclude_none=True) if usage else {},
132
132
  prefix=f"{SpanAttributes.METADATA}.usage",
@@ -2,7 +2,7 @@ from typing import Any, ClassVar
2
2
 
3
3
  from beeai_framework.context import RunContext, RunContextStartEvent
4
4
  from beeai_framework.emitter import EventMeta
5
- from beeai_framework.tools import ToolErrorEvent, ToolRetryEvent, ToolSuccessEvent
5
+ from beeai_framework.tools import ToolErrorEvent, ToolRetryEvent, ToolStartEvent, ToolSuccessEvent
6
6
  from beeai_framework.tools.tool import Tool
7
7
  from typing_extensions import override
8
8
 
@@ -48,6 +48,10 @@ class ToolProcessor(Processor):
48
48
  self.span.add_event(f"{meta.name} ({meta.path})", timestamp=meta.created_at)
49
49
 
50
50
  match event:
51
+ case ToolStartEvent():
52
+ pass
53
+ case None: # finish event
54
+ pass
51
55
  case ToolSuccessEvent():
52
56
  output_cls = type(event.output)
53
57
 
@@ -1 +1 @@
1
- __version__ = "0.1.9"
1
+ __version__ = "0.1.11"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: openinference-instrumentation-beeai
3
- Version: 0.1.9
3
+ Version: 0.1.11
4
4
  Summary: OpenInference BeeAI Instrumentation
5
5
  Project-URL: Homepage, https://github.com/Arize-ai/openinference/tree/main/python/instrumentation/openinference-instrumentation-beeai
6
6
  Author: IBM Corp.
@@ -15,7 +15,7 @@ Classifier: Programming Language :: Python :: 3.11
15
15
  Classifier: Programming Language :: Python :: 3.12
16
16
  Classifier: Programming Language :: Python :: 3.13
17
17
  Requires-Python: <3.14,>=3.11
18
- Requires-Dist: beeai-framework<0.2.0,>=0.1.36
18
+ Requires-Dist: beeai-framework<0.2.0,>=0.1.49
19
19
  Requires-Dist: openinference-instrumentation>=0.1.37
20
20
  Requires-Dist: openinference-semantic-conventions>=0.1.21
21
21
  Requires-Dist: opentelemetry-api>=1.36.0
@@ -23,9 +23,9 @@ Requires-Dist: opentelemetry-instrumentation>=0.57b0
23
23
  Requires-Dist: opentelemetry-semantic-conventions>=0.57b0
24
24
  Requires-Dist: wrapt>=1.17.2
25
25
  Provides-Extra: instruments
26
- Requires-Dist: beeai-framework>=0.1.36; extra == 'instruments'
26
+ Requires-Dist: beeai-framework>=0.1.49; extra == 'instruments'
27
27
  Provides-Extra: test
28
- Requires-Dist: beeai-framework>=0.1.36; extra == 'test'
28
+ Requires-Dist: beeai-framework>=0.1.49; extra == 'test'
29
29
  Requires-Dist: opentelemetry-exporter-otlp; extra == 'test'
30
30
  Requires-Dist: opentelemetry-sdk; extra == 'test'
31
31
  Description-Content-Type: text/markdown
@@ -1,21 +1,21 @@
1
- openinference/instrumentation/beeai/__init__.py,sha256=tEM7-1EhCKIFsoLeMEa7GcRYPFfjuGeXi2w4CEKA1R4,4547
1
+ openinference/instrumentation/beeai/__init__.py,sha256=yTygxcKT4VHgegEQndDrUrhjKZ9ifLsJtwZjF6A3R2M,5229
2
2
  openinference/instrumentation/beeai/_span.py,sha256=iVlYou4vnNKtDxpypMdZuD2AKeaDiG1Cu5PXVzgQ8w4,3259
3
3
  openinference/instrumentation/beeai/_utils.py,sha256=tfQsQEcevyLJno8WmLTOe936GVTIS2etnAFVbAPyztc,2521
4
- openinference/instrumentation/beeai/version.py,sha256=XIaxbMbyiP-L3kguR1GhxirFblTXiHR1lMfDVITvHUI,22
4
+ openinference/instrumentation/beeai/version.py,sha256=nllDrH0jyChMuuYrK0CC55iTBKUNTUjejtcwxyUF2EQ,23
5
5
  openinference/instrumentation/beeai/processors/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
- openinference/instrumentation/beeai/processors/base.py,sha256=eAcKq6ip23sp1pjCa6efjDEcfoPJ_Jc3GwaWk5oZU7k,2125
7
- openinference/instrumentation/beeai/processors/chat.py,sha256=a4Ps6opRb2POrQ8Nla1JHUhkdVFZF5rmht8Xca3F_xA,9285
6
+ openinference/instrumentation/beeai/processors/base.py,sha256=-h8tx1moKVi-1t3RpN-c7klGRJyHwoYe16rKWcZ_bhs,2204
7
+ openinference/instrumentation/beeai/processors/chat.py,sha256=Nl0JWThA7f7Oeet6136fARdpNmakMH0SYym3T9kCGFg,9281
8
8
  openinference/instrumentation/beeai/processors/embedding.py,sha256=T9fZs2M7qEs4SnLYbSXRbhe3P7rCNch-snRQBDSC9Es,2598
9
9
  openinference/instrumentation/beeai/processors/locator.py,sha256=G9TFW_HgXM1TrOVdvtRU2Eq3D-atHLAET4fSo4F02X8,3635
10
10
  openinference/instrumentation/beeai/processors/requirement.py,sha256=Q9DgHDd-5rmP88Fe00d7cNKQg5zQ7ILuRzVAej5xfmk,2666
11
- openinference/instrumentation/beeai/processors/tool.py,sha256=ddkJP0Y7-pZFQNlKSB-SOOwquAbWL9ram8VS46SU8-Q,2633
11
+ openinference/instrumentation/beeai/processors/tool.py,sha256=o5aKAqEHZIk9bmK2rWUqtACiyMltIFCnzaunCKxRtu8,2765
12
12
  openinference/instrumentation/beeai/processors/workflow.py,sha256=OMwFFHv3mp4M4hFvH7utYd_fiSkTnBcl2oUVaMEdy-A,3815
13
13
  openinference/instrumentation/beeai/processors/agents/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
14
14
  openinference/instrumentation/beeai/processors/agents/base.py,sha256=3fidrUoU9pVixq9YN_y1jkNMozNMX-YG2CMuh855cLk,1244
15
15
  openinference/instrumentation/beeai/processors/agents/react.py,sha256=rS3xlvgyZ5G6MyDMeSh4xFLT_66h7GVAYEYlwZCpIdY,3022
16
16
  openinference/instrumentation/beeai/processors/agents/requirement_agent.py,sha256=HpleY8pNWojuUqcae2PZgat7Xq2edU9C-uz0YjZQUyc,2774
17
17
  openinference/instrumentation/beeai/processors/agents/tool_calling.py,sha256=yaWP5JmGuvZIha9iUSKgv0MJgI0QSbuiJLLQFnbqUZw,1223
18
- openinference_instrumentation_beeai-0.1.9.dist-info/METADATA,sha256=q2qwIddIyMKySNHBL6DRFSCCQi5NqoK9xHuSn1fkEm0,5491
19
- openinference_instrumentation_beeai-0.1.9.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
20
- openinference_instrumentation_beeai-0.1.9.dist-info/entry_points.txt,sha256=ee7EUhbWv-XK1dxhPXuFVy9qstzj-lc-265Phe2Ml9s,183
21
- openinference_instrumentation_beeai-0.1.9.dist-info/RECORD,,
18
+ openinference_instrumentation_beeai-0.1.11.dist-info/METADATA,sha256=TS92bnYQ0L02tUQBftmfxYhZl_8UUWmiKfAnZ_i83JU,5492
19
+ openinference_instrumentation_beeai-0.1.11.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
20
+ openinference_instrumentation_beeai-0.1.11.dist-info/entry_points.txt,sha256=ee7EUhbWv-XK1dxhPXuFVy9qstzj-lc-265Phe2Ml9s,183
21
+ openinference_instrumentation_beeai-0.1.11.dist-info/RECORD,,