openinference-instrumentation-beeai 0.1.13__tar.gz → 0.1.15__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (22) hide show
  1. {openinference_instrumentation_beeai-0.1.13 → openinference_instrumentation_beeai-0.1.15}/PKG-INFO +7 -2
  2. {openinference_instrumentation_beeai-0.1.13 → openinference_instrumentation_beeai-0.1.15}/pyproject.toml +7 -2
  3. {openinference_instrumentation_beeai-0.1.13 → openinference_instrumentation_beeai-0.1.15}/src/openinference/instrumentation/beeai/processors/base.py +4 -1
  4. {openinference_instrumentation_beeai-0.1.13 → openinference_instrumentation_beeai-0.1.15}/src/openinference/instrumentation/beeai/processors/chat.py +21 -2
  5. {openinference_instrumentation_beeai-0.1.13 → openinference_instrumentation_beeai-0.1.15}/src/openinference/instrumentation/beeai/processors/embedding.py +20 -2
  6. openinference_instrumentation_beeai-0.1.15/src/openinference/instrumentation/beeai/version.py +1 -0
  7. openinference_instrumentation_beeai-0.1.13/src/openinference/instrumentation/beeai/version.py +0 -1
  8. {openinference_instrumentation_beeai-0.1.13 → openinference_instrumentation_beeai-0.1.15}/.gitignore +0 -0
  9. {openinference_instrumentation_beeai-0.1.13 → openinference_instrumentation_beeai-0.1.15}/README.md +0 -0
  10. {openinference_instrumentation_beeai-0.1.13 → openinference_instrumentation_beeai-0.1.15}/src/openinference/instrumentation/beeai/__init__.py +0 -0
  11. {openinference_instrumentation_beeai-0.1.13 → openinference_instrumentation_beeai-0.1.15}/src/openinference/instrumentation/beeai/_span.py +0 -0
  12. {openinference_instrumentation_beeai-0.1.13 → openinference_instrumentation_beeai-0.1.15}/src/openinference/instrumentation/beeai/_utils.py +0 -0
  13. {openinference_instrumentation_beeai-0.1.13 → openinference_instrumentation_beeai-0.1.15}/src/openinference/instrumentation/beeai/processors/__init__.py +0 -0
  14. {openinference_instrumentation_beeai-0.1.13 → openinference_instrumentation_beeai-0.1.15}/src/openinference/instrumentation/beeai/processors/agents/__init__.py +0 -0
  15. {openinference_instrumentation_beeai-0.1.13 → openinference_instrumentation_beeai-0.1.15}/src/openinference/instrumentation/beeai/processors/agents/base.py +0 -0
  16. {openinference_instrumentation_beeai-0.1.13 → openinference_instrumentation_beeai-0.1.15}/src/openinference/instrumentation/beeai/processors/agents/react.py +0 -0
  17. {openinference_instrumentation_beeai-0.1.13 → openinference_instrumentation_beeai-0.1.15}/src/openinference/instrumentation/beeai/processors/agents/requirement_agent.py +0 -0
  18. {openinference_instrumentation_beeai-0.1.13 → openinference_instrumentation_beeai-0.1.15}/src/openinference/instrumentation/beeai/processors/agents/tool_calling.py +0 -0
  19. {openinference_instrumentation_beeai-0.1.13 → openinference_instrumentation_beeai-0.1.15}/src/openinference/instrumentation/beeai/processors/locator.py +0 -0
  20. {openinference_instrumentation_beeai-0.1.13 → openinference_instrumentation_beeai-0.1.15}/src/openinference/instrumentation/beeai/processors/requirement.py +0 -0
  21. {openinference_instrumentation_beeai-0.1.13 → openinference_instrumentation_beeai-0.1.15}/src/openinference/instrumentation/beeai/processors/tool.py +0 -0
  22. {openinference_instrumentation_beeai-0.1.13 → openinference_instrumentation_beeai-0.1.15}/src/openinference/instrumentation/beeai/processors/workflow.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: openinference-instrumentation-beeai
3
- Version: 0.1.13
3
+ Version: 0.1.15
4
4
  Summary: OpenInference BeeAI Instrumentation
5
5
  Project-URL: Homepage, https://github.com/Arize-ai/openinference/tree/main/python/instrumentation/openinference-instrumentation-beeai
6
6
  Author: IBM Corp.
@@ -18,7 +18,7 @@ Classifier: Programming Language :: Python :: 3.14
18
18
  Requires-Python: <3.15,>=3.11
19
19
  Requires-Dist: beeai-framework<0.2.0,>=0.1.51
20
20
  Requires-Dist: openinference-instrumentation>=0.1.37
21
- Requires-Dist: openinference-semantic-conventions>=0.1.21
21
+ Requires-Dist: openinference-semantic-conventions>=0.1.25
22
22
  Requires-Dist: opentelemetry-api>=1.36.0
23
23
  Requires-Dist: opentelemetry-instrumentation>=0.57b0
24
24
  Requires-Dist: opentelemetry-semantic-conventions>=0.57b0
@@ -27,8 +27,13 @@ Provides-Extra: instruments
27
27
  Requires-Dist: beeai-framework>=0.1.51; extra == 'instruments'
28
28
  Provides-Extra: test
29
29
  Requires-Dist: beeai-framework>=0.1.51; extra == 'test'
30
+ Requires-Dist: beeai-framework[duckduckgo]; extra == 'test'
30
31
  Requires-Dist: opentelemetry-exporter-otlp; extra == 'test'
31
32
  Requires-Dist: opentelemetry-sdk; extra == 'test'
33
+ Requires-Dist: pytest; extra == 'test'
34
+ Requires-Dist: pytest-asyncio; extra == 'test'
35
+ Requires-Dist: pytest-recording; extra == 'test'
36
+ Requires-Dist: vcrpy<8.0.0,>=5.0.0; extra == 'test'
32
37
  Description-Content-Type: text/markdown
33
38
 
34
39
  # OpenInference Instrumentation for BeeAI
@@ -25,7 +25,7 @@ classifiers = [
25
25
  dependencies = [
26
26
  "beeai-framework (>=0.1.51,<0.2.0)",
27
27
  "openinference-instrumentation>=0.1.37",
28
- "openinference-semantic-conventions>=0.1.21",
28
+ "openinference-semantic-conventions>=0.1.25",
29
29
  "opentelemetry-api>=1.36.0",
30
30
  "opentelemetry-instrumentation>=0.57b0",
31
31
  "opentelemetry-semantic-conventions>=0.57b0",
@@ -39,7 +39,12 @@ instruments = [
39
39
  test = [
40
40
  "beeai-framework >= 0.1.51",
41
41
  "opentelemetry-sdk",
42
- "opentelemetry-exporter-otlp"
42
+ "opentelemetry-exporter-otlp",
43
+ "pytest-recording",
44
+ "vcrpy>=5.0.0,<8.0.0",
45
+ "beeai-framework[duckduckgo]",
46
+ "pytest-asyncio",
47
+ "pytest",
43
48
  ]
44
49
 
45
50
  [project.entry-points.opentelemetry_instrumentor]
@@ -18,6 +18,9 @@ from openinference.semconv.trace import (
18
18
  class Processor:
19
19
  kind: ClassVar[OpenInferenceSpanKindValues] = OpenInferenceSpanKindValues.UNKNOWN
20
20
 
21
+ def get_span_name(self, target_cls: type) -> str:
22
+ return target_cls.__name__
23
+
21
24
  def __init__(self, event: "RunContextStartEvent", meta: "EventMeta"):
22
25
  from beeai_framework.context import RunContext
23
26
 
@@ -27,7 +30,7 @@ class Processor:
27
30
  assert meta.trace is not None
28
31
  self.run_id = meta.trace.run_id
29
32
 
30
- self.span = SpanWrapper(name=target_cls.__name__, kind=type(self).kind)
33
+ self.span = SpanWrapper(name=self.get_span_name(target_cls), kind=type(self).kind)
31
34
  self.span.started_at = meta.created_at
32
35
  self.span.attributes.update(
33
36
  {
@@ -1,5 +1,5 @@
1
1
  from datetime import datetime
2
- from typing import Any, ClassVar
2
+ from typing import Any, ClassVar, Dict, Generator, Tuple
3
3
 
4
4
  from beeai_framework.backend import (
5
5
  AnyMessage,
@@ -19,6 +19,7 @@ from beeai_framework.tools import AnyTool
19
19
  from beeai_framework.utils.lists import remove_falsy
20
20
  from typing_extensions import override
21
21
 
22
+ from openinference.instrumentation import safe_json_dumps
22
23
  from openinference.instrumentation.beeai._utils import (
23
24
  _unpack_object,
24
25
  safe_dump_model_schema,
@@ -36,6 +37,24 @@ from openinference.semconv.trace import (
36
37
  )
37
38
 
38
39
 
40
+ def get_tool_parameters(tool: AnyTool) -> Dict[str, Any]:
41
+ tool_dict = tool.to_json_safe()
42
+ if "input_schema" in tool_dict:
43
+ input_schema = tool_dict.pop("input_schema")
44
+ tool_dict["parameters"] = input_schema or {}
45
+ tool_dict["parameters"]["type"] = "object"
46
+ return tool_dict
47
+
48
+
49
+ def get_tools(tools: list[AnyTool]) -> Generator[Tuple[str, str], None, None]:
50
+ for index, tool in enumerate(tools):
51
+ function = {"type": "function", "function": get_tool_parameters(tool)}
52
+ yield (
53
+ f"{SpanAttributes.LLM_TOOLS}.{index}.{ToolAttributes.TOOL_JSON_SCHEMA}",
54
+ safe_json_dumps(function),
55
+ )
56
+
57
+
39
58
  class ChatModelProcessor(Processor):
40
59
  kind: ClassVar[OpenInferenceSpanKindValues] = OpenInferenceSpanKindValues.LLM
41
60
 
@@ -80,7 +99,7 @@ class ChatModelProcessor(Processor):
80
99
  )
81
100
  self.span.set_attributes(
82
101
  {
83
- SpanAttributes.LLM_TOOLS: [t.name for t in (event.input.tools or [])],
102
+ **dict(get_tools(event.input.tools or [])),
84
103
  SpanAttributes.LLM_INVOCATION_PARAMETERS: stringify(
85
104
  meta.creator.parameters.model_dump(
86
105
  exclude_none=True, exclude_unset=True
@@ -12,6 +12,7 @@ from beeai_framework.backend.events import (
12
12
  from beeai_framework.context import RunContext
13
13
  from typing_extensions import override
14
14
 
15
+ from openinference.instrumentation import safe_json_dumps
15
16
  from openinference.instrumentation.beeai.processors.base import Processor
16
17
  from openinference.semconv.trace import (
17
18
  EmbeddingAttributes,
@@ -23,6 +24,10 @@ from openinference.semconv.trace import (
23
24
  class EmbeddingModelProcessor(Processor):
24
25
  kind: ClassVar[OpenInferenceSpanKindValues] = OpenInferenceSpanKindValues.EMBEDDING
25
26
 
27
+ @override
28
+ def get_span_name(self, target_cls: type) -> str:
29
+ return "CreateEmbeddings"
30
+
26
31
  def __init__(self, event: "RunContextStartEvent", meta: "EventMeta"):
27
32
  super().__init__(event, meta)
28
33
 
@@ -33,10 +38,22 @@ class EmbeddingModelProcessor(Processor):
33
38
  self.span.set_attributes(
34
39
  {
35
40
  SpanAttributes.EMBEDDING_MODEL_NAME: llm.model_id,
36
- SpanAttributes.LLM_PROVIDER: llm.provider_id,
37
41
  }
38
42
  )
39
43
 
44
+ # Extract invocation parameters (exclude input values)
45
+ if hasattr(event, "input") and hasattr(event.input, "__dict__"):
46
+ invocation_params = {
47
+ k: v
48
+ for k, v in event.input.__dict__.items()
49
+ if k not in {"values", "api_key", "token"} and not k.startswith("_")
50
+ }
51
+ if invocation_params:
52
+ self.span.set_attribute(
53
+ SpanAttributes.EMBEDDING_INVOCATION_PARAMETERS,
54
+ safe_json_dumps(invocation_params),
55
+ )
56
+
40
57
  @override
41
58
  async def update(
42
59
  self,
@@ -56,9 +73,10 @@ class EmbeddingModelProcessor(Processor):
56
73
  )
57
74
  elif isinstance(event, EmbeddingModelSuccessEvent):
58
75
  for idx, embedding in enumerate(event.value.embeddings):
76
+ vector = list(embedding) if not isinstance(embedding, list) else embedding
59
77
  self.span.set_attribute(
60
78
  f"{SpanAttributes.EMBEDDING_EMBEDDINGS}.{idx}.{EmbeddingAttributes.EMBEDDING_VECTOR}",
61
- embedding,
79
+ vector,
62
80
  )
63
81
 
64
82
  if event.value.usage: