opentelemetry-instrumentation-llamaindex 0.27.0__tar.gz → 0.28.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of opentelemetry-instrumentation-llamaindex might be problematic. Click here for more details.
- {opentelemetry_instrumentation_llamaindex-0.27.0 → opentelemetry_instrumentation_llamaindex-0.28.1}/PKG-INFO +1 -1
- {opentelemetry_instrumentation_llamaindex-0.27.0 → opentelemetry_instrumentation_llamaindex-0.28.1}/opentelemetry/instrumentation/llamaindex/dispatcher_wrapper.py +59 -7
- opentelemetry_instrumentation_llamaindex-0.28.1/opentelemetry/instrumentation/llamaindex/version.py +1 -0
- {opentelemetry_instrumentation_llamaindex-0.27.0 → opentelemetry_instrumentation_llamaindex-0.28.1}/pyproject.toml +6 -5
- opentelemetry_instrumentation_llamaindex-0.27.0/opentelemetry/instrumentation/llamaindex/version.py +0 -1
- {opentelemetry_instrumentation_llamaindex-0.27.0 → opentelemetry_instrumentation_llamaindex-0.28.1}/README.md +0 -0
- {opentelemetry_instrumentation_llamaindex-0.27.0 → opentelemetry_instrumentation_llamaindex-0.28.1}/opentelemetry/instrumentation/llamaindex/__init__.py +0 -0
- {opentelemetry_instrumentation_llamaindex-0.27.0 → opentelemetry_instrumentation_llamaindex-0.28.1}/opentelemetry/instrumentation/llamaindex/base_agent_instrumentor.py +0 -0
- {opentelemetry_instrumentation_llamaindex-0.27.0 → opentelemetry_instrumentation_llamaindex-0.28.1}/opentelemetry/instrumentation/llamaindex/base_embedding_instrumentor.py +0 -0
- {opentelemetry_instrumentation_llamaindex-0.27.0 → opentelemetry_instrumentation_llamaindex-0.28.1}/opentelemetry/instrumentation/llamaindex/base_retriever_instrumentor.py +0 -0
- {opentelemetry_instrumentation_llamaindex-0.27.0 → opentelemetry_instrumentation_llamaindex-0.28.1}/opentelemetry/instrumentation/llamaindex/base_synthesizer_instrumentor.py +0 -0
- {opentelemetry_instrumentation_llamaindex-0.27.0 → opentelemetry_instrumentation_llamaindex-0.28.1}/opentelemetry/instrumentation/llamaindex/base_tool_instrumentor.py +0 -0
- {opentelemetry_instrumentation_llamaindex-0.27.0 → opentelemetry_instrumentation_llamaindex-0.28.1}/opentelemetry/instrumentation/llamaindex/config.py +0 -0
- {opentelemetry_instrumentation_llamaindex-0.27.0 → opentelemetry_instrumentation_llamaindex-0.28.1}/opentelemetry/instrumentation/llamaindex/custom_llm_instrumentor.py +0 -0
- {opentelemetry_instrumentation_llamaindex-0.27.0 → opentelemetry_instrumentation_llamaindex-0.28.1}/opentelemetry/instrumentation/llamaindex/query_pipeline_instrumentor.py +0 -0
- {opentelemetry_instrumentation_llamaindex-0.27.0 → opentelemetry_instrumentation_llamaindex-0.28.1}/opentelemetry/instrumentation/llamaindex/retriever_query_engine_instrumentor.py +0 -0
- {opentelemetry_instrumentation_llamaindex-0.27.0 → opentelemetry_instrumentation_llamaindex-0.28.1}/opentelemetry/instrumentation/llamaindex/utils.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: opentelemetry-instrumentation-llamaindex
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.28.1
|
|
4
4
|
Summary: OpenTelemetry LlamaIndex instrumentation
|
|
5
5
|
Home-page: https://github.com/traceloop/openllmetry/tree/main/packages/opentelemetry-instrumentation-llamaindex
|
|
6
6
|
License: Apache-2.0
|
|
@@ -1,18 +1,21 @@
|
|
|
1
1
|
import inspect
|
|
2
2
|
import json
|
|
3
3
|
import re
|
|
4
|
-
from typing import Any, Optional
|
|
4
|
+
from typing import Any, Dict, Optional
|
|
5
5
|
from dataclasses import dataclass
|
|
6
6
|
|
|
7
7
|
from llama_index.core.bridge.pydantic import PrivateAttr
|
|
8
8
|
from llama_index.core.base.llms.types import MessageRole
|
|
9
9
|
from llama_index.core.instrumentation import get_dispatcher
|
|
10
10
|
from llama_index.core.instrumentation.events import BaseEvent
|
|
11
|
+
from llama_index.core.instrumentation.events.agent import AgentToolCallEvent
|
|
12
|
+
from llama_index.core.instrumentation.events.embedding import EmbeddingStartEvent
|
|
11
13
|
from llama_index.core.instrumentation.events.llm import (
|
|
12
14
|
LLMChatEndEvent,
|
|
13
15
|
LLMChatStartEvent,
|
|
14
16
|
LLMPredictEndEvent,
|
|
15
17
|
)
|
|
18
|
+
from llama_index.core.instrumentation.events.rerank import ReRankStartEvent
|
|
16
19
|
from llama_index.core.instrumentation.event_handlers import BaseEventHandler
|
|
17
20
|
from llama_index.core.instrumentation.span_handlers import BaseSpanHandler
|
|
18
21
|
from opentelemetry import context as context_api
|
|
@@ -23,6 +26,7 @@ from opentelemetry.instrumentation.llamaindex.utils import (
|
|
|
23
26
|
)
|
|
24
27
|
from opentelemetry.semconv_ai import (
|
|
25
28
|
SUPPRESS_LANGUAGE_MODEL_INSTRUMENTATION_KEY,
|
|
29
|
+
LLMRequestTypeValues,
|
|
26
30
|
SpanAttributes,
|
|
27
31
|
TraceloopSpanKindValues,
|
|
28
32
|
)
|
|
@@ -59,6 +63,7 @@ def _set_llm_chat_request(event, span) -> None:
|
|
|
59
63
|
|
|
60
64
|
@dont_throw
|
|
61
65
|
def _set_llm_chat_response(event, span) -> None:
|
|
66
|
+
response = event.response
|
|
62
67
|
if should_send_prompts():
|
|
63
68
|
for idx, message in enumerate(event.messages):
|
|
64
69
|
span.set_attribute(
|
|
@@ -67,7 +72,6 @@ def _set_llm_chat_response(event, span) -> None:
|
|
|
67
72
|
span.set_attribute(
|
|
68
73
|
f"{SpanAttributes.LLM_PROMPTS}.{idx}.content", message.content
|
|
69
74
|
)
|
|
70
|
-
response = event.response
|
|
71
75
|
span.set_attribute(
|
|
72
76
|
f"{SpanAttributes.LLM_COMPLETIONS}.0.role",
|
|
73
77
|
response.message.role.value,
|
|
@@ -76,12 +80,11 @@ def _set_llm_chat_response(event, span) -> None:
|
|
|
76
80
|
f"{SpanAttributes.LLM_COMPLETIONS}.0.content",
|
|
77
81
|
response.message.content,
|
|
78
82
|
)
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
# raw can be Any, not just ChatCompletion
|
|
83
|
+
if not (raw := response.raw):
|
|
84
|
+
return
|
|
82
85
|
span.set_attribute(
|
|
83
86
|
SpanAttributes.LLM_RESPONSE_MODEL,
|
|
84
|
-
raw.get("model") if "model" in raw else raw.model,
|
|
87
|
+
raw.get("model") if "model" in raw else raw.model, # raw can be Any, not just ChatCompletion
|
|
85
88
|
)
|
|
86
89
|
if usage := raw.get("usage") if "usage" in raw else raw.usage:
|
|
87
90
|
span.set_attribute(
|
|
@@ -93,6 +96,10 @@ def _set_llm_chat_response(event, span) -> None:
|
|
|
93
96
|
span.set_attribute(
|
|
94
97
|
SpanAttributes.LLM_USAGE_TOTAL_TOKENS, usage.total_tokens
|
|
95
98
|
)
|
|
99
|
+
if choices := raw.choices:
|
|
100
|
+
span.set_attribute(
|
|
101
|
+
SpanAttributes.LLM_RESPONSE_FINISH_REASON, choices[0].finish_reason
|
|
102
|
+
)
|
|
96
103
|
|
|
97
104
|
|
|
98
105
|
@dont_throw
|
|
@@ -108,6 +115,39 @@ def _set_llm_predict_response(event, span) -> None:
|
|
|
108
115
|
)
|
|
109
116
|
|
|
110
117
|
|
|
118
|
+
@dont_throw
|
|
119
|
+
def _set_embedding(event, span) -> None:
|
|
120
|
+
model_dict = event.model_dict
|
|
121
|
+
span.set_attribute(
|
|
122
|
+
f"{LLMRequestTypeValues.EMBEDDING.value}.model_name",
|
|
123
|
+
model_dict.get("model_name"),
|
|
124
|
+
)
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
@dont_throw
|
|
128
|
+
def _set_rerank(event, span) -> None:
|
|
129
|
+
span.set_attribute(
|
|
130
|
+
f"{LLMRequestTypeValues.RERANK.value}.model_name",
|
|
131
|
+
event.model_name,
|
|
132
|
+
)
|
|
133
|
+
span.set_attribute(
|
|
134
|
+
f"{LLMRequestTypeValues.RERANK.value}.top_n",
|
|
135
|
+
event.top_n,
|
|
136
|
+
)
|
|
137
|
+
if should_send_prompts():
|
|
138
|
+
span.set_attribute(
|
|
139
|
+
f"{LLMRequestTypeValues.RERANK.value}.query",
|
|
140
|
+
event.query.query_str,
|
|
141
|
+
)
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
@dont_throw
|
|
145
|
+
def _set_tool(event, span) -> None:
|
|
146
|
+
span.set_attribute("tool.name", event.tool.name)
|
|
147
|
+
span.set_attribute("tool.description", event.tool.description)
|
|
148
|
+
span.set_attribute("tool.arguments", event.arguments)
|
|
149
|
+
|
|
150
|
+
|
|
111
151
|
@dataclass
|
|
112
152
|
class SpanHolder:
|
|
113
153
|
span: Span
|
|
@@ -123,7 +163,13 @@ class OpenLLSpanHandler(BaseSpanHandler[SpanHolder]):
|
|
|
123
163
|
self._tracer = tracer
|
|
124
164
|
|
|
125
165
|
def new_span(
|
|
126
|
-
self,
|
|
166
|
+
self,
|
|
167
|
+
id_: str,
|
|
168
|
+
bound_args: inspect.BoundArguments,
|
|
169
|
+
instance: Optional[Any] = None,
|
|
170
|
+
parent_span_id: Optional[str] = None,
|
|
171
|
+
tags: Optional[Dict[str, Any]] = None,
|
|
172
|
+
**kwargs: Any,
|
|
127
173
|
) -> Optional[SpanHolder]:
|
|
128
174
|
"""Create a span."""
|
|
129
175
|
parent = self.open_spans.get(parent_span_id)
|
|
@@ -217,3 +263,9 @@ class OpenLLEventHandler(BaseEventHandler):
|
|
|
217
263
|
_set_llm_chat_response(event, span)
|
|
218
264
|
elif isinstance(event, LLMPredictEndEvent):
|
|
219
265
|
_set_llm_predict_response(event, span)
|
|
266
|
+
elif isinstance(event, EmbeddingStartEvent):
|
|
267
|
+
_set_embedding(event, span)
|
|
268
|
+
elif isinstance(event, ReRankStartEvent):
|
|
269
|
+
_set_rerank(event, span)
|
|
270
|
+
elif isinstance(event, AgentToolCallEvent):
|
|
271
|
+
_set_tool(event, span)
|
opentelemetry_instrumentation_llamaindex-0.28.1/opentelemetry/instrumentation/llamaindex/version.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
__version__ = "0.28.1"
|
|
@@ -8,7 +8,7 @@ show_missing = true
|
|
|
8
8
|
|
|
9
9
|
[tool.poetry]
|
|
10
10
|
name = "opentelemetry-instrumentation-llamaindex"
|
|
11
|
-
version = "0.
|
|
11
|
+
version = "0.28.1"
|
|
12
12
|
description = "OpenTelemetry LlamaIndex instrumentation"
|
|
13
13
|
authors = [
|
|
14
14
|
"Gal Kleinman <gal@traceloop.com>",
|
|
@@ -41,14 +41,15 @@ pytest-asyncio = "^0.23.7"
|
|
|
41
41
|
chromadb = ">=0.4.22,<0.6.0"
|
|
42
42
|
openai = "^1.35.0"
|
|
43
43
|
opentelemetry-sdk = "^1.23.0"
|
|
44
|
-
llama-index = "^0.10.
|
|
44
|
+
llama-index = "^0.10.59"
|
|
45
45
|
llama-index-postprocessor-cohere-rerank = "^0.1.7"
|
|
46
|
-
opentelemetry-instrumentation-openai = "==0.
|
|
47
|
-
opentelemetry-instrumentation-cohere = "==0.
|
|
48
|
-
opentelemetry-instrumentation-chromadb = "==0.
|
|
46
|
+
opentelemetry-instrumentation-openai = "==0.28.1"
|
|
47
|
+
opentelemetry-instrumentation-cohere = "==0.28.1"
|
|
48
|
+
opentelemetry-instrumentation-chromadb = "==0.28.1"
|
|
49
49
|
sqlalchemy = "^2.0.31"
|
|
50
50
|
llama-index-agent-openai = ">=0.2.7,<0.3.0"
|
|
51
51
|
llama-index-vector-stores-chroma = "^0.1.9"
|
|
52
|
+
llama-index-llms-cohere = "^0.2.0"
|
|
52
53
|
|
|
53
54
|
[build-system]
|
|
54
55
|
requires = ["poetry-core"]
|
opentelemetry_instrumentation_llamaindex-0.27.0/opentelemetry/instrumentation/llamaindex/version.py
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
__version__ = "0.27.0"
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|