openinference-instrumentation-beeai 0.1.10__tar.gz → 0.1.11__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (22) hide show
  1. {openinference_instrumentation_beeai-0.1.10 → openinference_instrumentation_beeai-0.1.11}/PKG-INFO +4 -4
  2. {openinference_instrumentation_beeai-0.1.10 → openinference_instrumentation_beeai-0.1.11}/pyproject.toml +3 -3
  3. {openinference_instrumentation_beeai-0.1.10 → openinference_instrumentation_beeai-0.1.11}/src/openinference/instrumentation/beeai/processors/chat.py +2 -2
  4. openinference_instrumentation_beeai-0.1.11/src/openinference/instrumentation/beeai/version.py +1 -0
  5. openinference_instrumentation_beeai-0.1.10/src/openinference/instrumentation/beeai/version.py +0 -1
  6. {openinference_instrumentation_beeai-0.1.10 → openinference_instrumentation_beeai-0.1.11}/.gitignore +0 -0
  7. {openinference_instrumentation_beeai-0.1.10 → openinference_instrumentation_beeai-0.1.11}/README.md +0 -0
  8. {openinference_instrumentation_beeai-0.1.10 → openinference_instrumentation_beeai-0.1.11}/src/openinference/instrumentation/beeai/__init__.py +0 -0
  9. {openinference_instrumentation_beeai-0.1.10 → openinference_instrumentation_beeai-0.1.11}/src/openinference/instrumentation/beeai/_span.py +0 -0
  10. {openinference_instrumentation_beeai-0.1.10 → openinference_instrumentation_beeai-0.1.11}/src/openinference/instrumentation/beeai/_utils.py +0 -0
  11. {openinference_instrumentation_beeai-0.1.10 → openinference_instrumentation_beeai-0.1.11}/src/openinference/instrumentation/beeai/processors/__init__.py +0 -0
  12. {openinference_instrumentation_beeai-0.1.10 → openinference_instrumentation_beeai-0.1.11}/src/openinference/instrumentation/beeai/processors/agents/__init__.py +0 -0
  13. {openinference_instrumentation_beeai-0.1.10 → openinference_instrumentation_beeai-0.1.11}/src/openinference/instrumentation/beeai/processors/agents/base.py +0 -0
  14. {openinference_instrumentation_beeai-0.1.10 → openinference_instrumentation_beeai-0.1.11}/src/openinference/instrumentation/beeai/processors/agents/react.py +0 -0
  15. {openinference_instrumentation_beeai-0.1.10 → openinference_instrumentation_beeai-0.1.11}/src/openinference/instrumentation/beeai/processors/agents/requirement_agent.py +0 -0
  16. {openinference_instrumentation_beeai-0.1.10 → openinference_instrumentation_beeai-0.1.11}/src/openinference/instrumentation/beeai/processors/agents/tool_calling.py +0 -0
  17. {openinference_instrumentation_beeai-0.1.10 → openinference_instrumentation_beeai-0.1.11}/src/openinference/instrumentation/beeai/processors/base.py +0 -0
  18. {openinference_instrumentation_beeai-0.1.10 → openinference_instrumentation_beeai-0.1.11}/src/openinference/instrumentation/beeai/processors/embedding.py +0 -0
  19. {openinference_instrumentation_beeai-0.1.10 → openinference_instrumentation_beeai-0.1.11}/src/openinference/instrumentation/beeai/processors/locator.py +0 -0
  20. {openinference_instrumentation_beeai-0.1.10 → openinference_instrumentation_beeai-0.1.11}/src/openinference/instrumentation/beeai/processors/requirement.py +0 -0
  21. {openinference_instrumentation_beeai-0.1.10 → openinference_instrumentation_beeai-0.1.11}/src/openinference/instrumentation/beeai/processors/tool.py +0 -0
  22. {openinference_instrumentation_beeai-0.1.10 → openinference_instrumentation_beeai-0.1.11}/src/openinference/instrumentation/beeai/processors/workflow.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: openinference-instrumentation-beeai
3
- Version: 0.1.10
3
+ Version: 0.1.11
4
4
  Summary: OpenInference BeeAI Instrumentation
5
5
  Project-URL: Homepage, https://github.com/Arize-ai/openinference/tree/main/python/instrumentation/openinference-instrumentation-beeai
6
6
  Author: IBM Corp.
@@ -15,7 +15,7 @@ Classifier: Programming Language :: Python :: 3.11
15
15
  Classifier: Programming Language :: Python :: 3.12
16
16
  Classifier: Programming Language :: Python :: 3.13
17
17
  Requires-Python: <3.14,>=3.11
18
- Requires-Dist: beeai-framework<0.2.0,>=0.1.36
18
+ Requires-Dist: beeai-framework<0.2.0,>=0.1.49
19
19
  Requires-Dist: openinference-instrumentation>=0.1.37
20
20
  Requires-Dist: openinference-semantic-conventions>=0.1.21
21
21
  Requires-Dist: opentelemetry-api>=1.36.0
@@ -23,9 +23,9 @@ Requires-Dist: opentelemetry-instrumentation>=0.57b0
23
23
  Requires-Dist: opentelemetry-semantic-conventions>=0.57b0
24
24
  Requires-Dist: wrapt>=1.17.2
25
25
  Provides-Extra: instruments
26
- Requires-Dist: beeai-framework>=0.1.36; extra == 'instruments'
26
+ Requires-Dist: beeai-framework>=0.1.49; extra == 'instruments'
27
27
  Provides-Extra: test
28
- Requires-Dist: beeai-framework>=0.1.36; extra == 'test'
28
+ Requires-Dist: beeai-framework>=0.1.49; extra == 'test'
29
29
  Requires-Dist: opentelemetry-exporter-otlp; extra == 'test'
30
30
  Requires-Dist: opentelemetry-sdk; extra == 'test'
31
31
  Description-Content-Type: text/markdown
@@ -22,7 +22,7 @@ classifiers = [
22
22
  "Programming Language :: Python :: 3.13",
23
23
  ]
24
24
  dependencies = [
25
- "beeai-framework (>=0.1.36,<0.2.0)",
25
+ "beeai-framework (>=0.1.49,<0.2.0)",
26
26
  "openinference-instrumentation>=0.1.37",
27
27
  "openinference-semantic-conventions>=0.1.21",
28
28
  "opentelemetry-api>=1.36.0",
@@ -33,10 +33,10 @@ dependencies = [
33
33
 
34
34
  [project.optional-dependencies]
35
35
  instruments = [
36
- "beeai-framework >= 0.1.36",
36
+ "beeai-framework >= 0.1.49",
37
37
  ]
38
38
  test = [
39
- "beeai-framework >= 0.1.36",
39
+ "beeai-framework >= 0.1.49",
40
40
  "opentelemetry-sdk",
41
41
  "opentelemetry-exporter-otlp"
42
42
  ]
@@ -99,7 +99,7 @@ class ChatModelProcessor(Processor):
99
99
 
100
100
  case ChatModelSuccessEvent():
101
101
  if not self._messages: # only when no streaming
102
- self._add_new_messages(event.value.messages)
102
+ self._add_new_messages(event.value.output)
103
103
 
104
104
  usage = event.value.usage
105
105
  if usage:
@@ -126,7 +126,7 @@ class ChatModelProcessor(Processor):
126
126
  SpanAttributes.OPENINFERENCE_SPAN_KIND: type(self).kind,
127
127
  SpanAttributes.OUTPUT_VALUE: event.value.get_text_content(),
128
128
  SpanAttributes.OUTPUT_MIME_TYPE: OpenInferenceMimeTypeValues.TEXT.value,
129
- f"{SpanAttributes.METADATA}.chunks_count": len(event.value.messages),
129
+ f"{SpanAttributes.METADATA}.chunks_count": len(event.value.output),
130
130
  **_unpack_object(
131
131
  usage.model_dump(exclude_none=True) if usage else {},
132
132
  prefix=f"{SpanAttributes.METADATA}.usage",