opentelemetry-instrumentation-llamaindex 0.33.5__py3-none-any.whl → 0.33.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of opentelemetry-instrumentation-llamaindex might be problematic. Click here for more details.

@@ -1,18 +1,24 @@
1
+ from functools import singledispatchmethod
1
2
  import inspect
2
3
  import json
3
4
  import re
4
- from typing import Any, Dict, Optional
5
- from dataclasses import dataclass
5
+ from typing import Any, AsyncGenerator, Dict, Generator, List, Optional
6
+ from dataclasses import dataclass, field
6
7
 
7
8
  from llama_index.core.bridge.pydantic import PrivateAttr
8
9
  from llama_index.core.base.llms.types import MessageRole
10
+ from llama_index.core.base.response.schema import StreamingResponse
9
11
  from llama_index.core.instrumentation import get_dispatcher
10
12
  from llama_index.core.instrumentation.events import BaseEvent
11
13
  from llama_index.core.instrumentation.events.agent import AgentToolCallEvent
12
14
  from llama_index.core.instrumentation.events.embedding import EmbeddingStartEvent
15
+ from llama_index.core.instrumentation.events.chat_engine import (
16
+ StreamChatEndEvent,
17
+ )
13
18
  from llama_index.core.instrumentation.events.llm import (
14
19
  LLMChatEndEvent,
15
20
  LLMChatStartEvent,
21
+ LLMCompletionEndEvent,
16
22
  LLMPredictEndEvent,
17
23
  )
18
24
  from llama_index.core.instrumentation.events.rerank import ReRankStartEvent
@@ -30,18 +36,27 @@ from opentelemetry.semconv_ai import (
30
36
  SpanAttributes,
31
37
  TraceloopSpanKindValues,
32
38
  )
33
- from opentelemetry.trace import get_current_span, set_span_in_context, Tracer
39
+ from opentelemetry.trace import set_span_in_context, Tracer
34
40
  from opentelemetry.trace.span import Span
35
41
 
36
42
 
37
- LLAMA_INDEX_REGEX = re.compile(r"^([a-zA-Z]+)\.")
43
+ # For these spans, instead of creating a span using data from LlamaIndex,
44
+ # we use the regular OpenLLMetry instrumentations
45
+ AVAILABLE_OPENLLMETRY_INSTRUMENTATIONS = ["OpenAI"]
46
+
47
+ CLASS_NAME_FROM_ID_REGEX = re.compile(r"^([a-zA-Z]+)\.")
48
+ STREAMING_END_EVENTS = (
49
+ LLMChatEndEvent,
50
+ LLMCompletionEndEvent,
51
+ StreamChatEndEvent,
52
+ )
38
53
 
39
54
 
40
55
  def instrument_with_dispatcher(tracer: Tracer):
41
56
  dispatcher = get_dispatcher()
42
- openll_span_handler = OpenLLSpanHandler(tracer)
43
- dispatcher.add_span_handler(openll_span_handler)
44
- dispatcher.add_event_handler(OpenLLEventHandler(openll_span_handler))
57
+ openllmetry_span_handler = OpenLLMetrySpanHandler(tracer)
58
+ dispatcher.add_span_handler(openllmetry_span_handler)
59
+ dispatcher.add_event_handler(OpenLLMetryEventHandler(openllmetry_span_handler))
45
60
 
46
61
 
47
62
  @dont_throw
@@ -85,18 +100,16 @@ def _set_llm_chat_response(event, span) -> None:
85
100
  return
86
101
  span.set_attribute(
87
102
  SpanAttributes.LLM_RESPONSE_MODEL,
88
- raw.get("model") if "model" in raw else raw.model, # raw can be Any, not just ChatCompletion
103
+ (
104
+ raw.get("model") if "model" in raw else raw.model
105
+ ), # raw can be Any, not just ChatCompletion
89
106
  )
90
107
  if usage := raw.get("usage") if "usage" in raw else raw.usage:
91
108
  span.set_attribute(
92
109
  SpanAttributes.LLM_USAGE_COMPLETION_TOKENS, usage.completion_tokens
93
110
  )
94
- span.set_attribute(
95
- SpanAttributes.LLM_USAGE_PROMPT_TOKENS, usage.prompt_tokens
96
- )
97
- span.set_attribute(
98
- SpanAttributes.LLM_USAGE_TOTAL_TOKENS, usage.total_tokens
99
- )
111
+ span.set_attribute(SpanAttributes.LLM_USAGE_PROMPT_TOKENS, usage.prompt_tokens)
112
+ span.set_attribute(SpanAttributes.LLM_USAGE_TOTAL_TOKENS, usage.total_tokens)
100
113
  if choices := raw.choices:
101
114
  span.set_attribute(
102
115
  SpanAttributes.LLM_RESPONSE_FINISH_REASON, choices[0].finish_reason
@@ -151,12 +164,71 @@ def _set_tool(event, span) -> None:
151
164
 
152
165
  @dataclass
153
166
  class SpanHolder:
154
- span: Span
155
- token: Any
156
- context: context_api.context.Context
167
+ span_id: str
168
+ parent: Optional["SpanHolder"] = None
169
+ otel_span: Optional[Span] = None
170
+ token: Optional[Any] = None
171
+ context: Optional[context_api.context.Context] = None
172
+ waiting_for_streaming: bool = field(init=False, default=False)
173
+
174
+ _active: bool = field(init=False, default=True)
175
+
176
+ def process_event(self, event: BaseEvent) -> List["SpanHolder"]:
177
+ self.update_span_for_event(event)
178
+
179
+ if self.waiting_for_streaming and isinstance(event, STREAMING_END_EVENTS):
180
+ self.end()
181
+ return [self] + self.notify_parent()
182
+
183
+ return []
184
+
185
+ def notify_parent(self) -> List["SpanHolder"]:
186
+ if self.parent:
187
+ self.parent.end()
188
+ return [self.parent] + self.parent.notify_parent()
189
+ return []
190
+
191
+ def end(self):
192
+ if not self._active:
193
+ return
194
+
195
+ self._active = False
196
+ if self.otel_span:
197
+ self.otel_span.end()
198
+ if self.token:
199
+ context_api.detach(self.token)
200
+
201
+ @singledispatchmethod
202
+ def update_span_for_event(self, event: BaseEvent):
203
+ pass
204
+
205
+ @update_span_for_event.register
206
+ def _(self, event: LLMChatStartEvent):
207
+ _set_llm_chat_request(event, self.otel_span)
157
208
 
209
+ @update_span_for_event.register
210
+ def _(self, event: LLMChatEndEvent):
211
+ _set_llm_chat_response(event, self.otel_span)
158
212
 
159
- class OpenLLSpanHandler(BaseSpanHandler[SpanHolder]):
213
+ @update_span_for_event.register
214
+ def _(self, event: LLMPredictEndEvent):
215
+ _set_llm_predict_response(event, self.otel_span)
216
+
217
+ @update_span_for_event.register
218
+ def _(self, event: EmbeddingStartEvent):
219
+ _set_embedding(event, self.otel_span)
220
+
221
+ @update_span_for_event.register
222
+ def _(self, event: ReRankStartEvent):
223
+ _set_rerank(event, self.otel_span)
224
+
225
+ @update_span_for_event.register
226
+ def _(self, event: AgentToolCallEvent):
227
+ _set_tool(event, self.otel_span)
228
+
229
+
230
+ class OpenLLMetrySpanHandler(BaseSpanHandler[SpanHolder]):
231
+ waiting_for_streaming_spans: Dict[str, SpanHolder] = {}
160
232
  _tracer: Tracer = PrivateAttr()
161
233
 
162
234
  def __init__(self, tracer: Tracer):
@@ -181,7 +253,15 @@ class OpenLLSpanHandler(BaseSpanHandler[SpanHolder]):
181
253
  )
182
254
  # Take the class name from id_ where id_ is e.g.
183
255
  # 'SentenceSplitter.split_text_metadata_aware-a2f2a780-2fa6-4682-a88e-80dc1f1ebe6a'
184
- class_name = LLAMA_INDEX_REGEX.match(id_).groups()[0]
256
+ class_name = CLASS_NAME_FROM_ID_REGEX.match(id_).groups()[0]
257
+ if class_name in AVAILABLE_OPENLLMETRY_INSTRUMENTATIONS:
258
+ context_api.attach(
259
+ context_api.set_value(
260
+ SUPPRESS_LANGUAGE_MODEL_INSTRUMENTATION_KEY, False
261
+ )
262
+ )
263
+ return SpanHolder(id_, parent)
264
+
185
265
  span_name = f"{class_name}.{kind}"
186
266
  span = self._tracer.start_span(
187
267
  span_name,
@@ -196,21 +276,26 @@ class OpenLLSpanHandler(BaseSpanHandler[SpanHolder]):
196
276
  current_context,
197
277
  )
198
278
  token = context_api.attach(current_context)
279
+
199
280
  span.set_attribute(SpanAttributes.TRACELOOP_SPAN_KIND, kind)
200
281
  span.set_attribute(SpanAttributes.TRACELOOP_ENTITY_NAME, span_name)
201
282
  try:
202
283
  if should_send_prompts():
203
284
  span.set_attribute(
204
285
  SpanAttributes.TRACELOOP_ENTITY_INPUT,
205
- json.dumps(bound_args.arguments, cls=JSONEncoder)
286
+ json.dumps(bound_args.arguments, cls=JSONEncoder),
206
287
  )
207
288
  except Exception:
208
289
  pass
209
290
 
210
- return SpanHolder(span, token, current_context)
291
+ return SpanHolder(id_, parent, span, token, current_context)
211
292
 
212
293
  def prepare_to_exit_span(
213
- self, id_: str, result: Optional[Any] = None, **kwargs
294
+ self,
295
+ id_: str,
296
+ instance: Optional[Any] = None,
297
+ result: Optional[Any] = None,
298
+ **kwargs,
214
299
  ) -> SpanHolder:
215
300
  """Logic for preparing to drop a span."""
216
301
  span_holder = self.open_spans[id_]
@@ -223,18 +308,22 @@ class OpenLLSpanHandler(BaseSpanHandler[SpanHolder]):
223
308
  if "source_nodes" in output:
224
309
  del output["source_nodes"]
225
310
  if should_send_prompts():
226
- span_holder.span.set_attribute(
311
+ span_holder.otel_span.set_attribute(
227
312
  SpanAttributes.TRACELOOP_ENTITY_OUTPUT,
228
313
  json.dumps(output, cls=JSONEncoder),
229
314
  )
230
315
  except Exception:
231
316
  pass
232
317
 
233
- span_holder.span.end()
234
- context_api.detach(span_holder.token)
235
- with self.lock:
236
- self.completed_spans += [span_holder]
237
- return span_holder
318
+ if isinstance(result, (Generator, AsyncGenerator, StreamingResponse)):
319
+ # This is a streaming response, we want to wait for the streaming end event before ending the span
320
+ span_holder.waiting_for_streaming = True
321
+ with self.lock:
322
+ self.waiting_for_streaming_spans[id_] = span_holder
323
+ return span_holder
324
+ else:
325
+ span_holder.end()
326
+ return span_holder
238
327
 
239
328
  def prepare_to_drop_span(
240
329
  self, id_: str, err: Optional[Exception], **kwargs
@@ -243,30 +332,27 @@ class OpenLLSpanHandler(BaseSpanHandler[SpanHolder]):
243
332
  if id_ in self.open_spans:
244
333
  with self.lock:
245
334
  span_holder = self.open_spans[id_]
246
- self.dropped_spans += [span_holder]
247
335
  return span_holder
248
336
  return None
249
337
 
250
338
 
251
- class OpenLLEventHandler(BaseEventHandler):
252
- _span_handler: OpenLLSpanHandler = PrivateAttr()
339
+ class OpenLLMetryEventHandler(BaseEventHandler):
340
+ _span_handler: OpenLLMetrySpanHandler = PrivateAttr()
253
341
 
254
- def __init__(self, span_handler: OpenLLSpanHandler):
342
+ def __init__(self, span_handler: OpenLLMetrySpanHandler):
255
343
  super().__init__()
256
344
  self._span_handler = span_handler
257
345
 
258
346
  def handle(self, event: BaseEvent, **kwargs) -> Any:
259
- span = get_current_span()
260
- # use case with class_pattern if support for 3.9 is dropped
261
- if isinstance(event, LLMChatStartEvent):
262
- _set_llm_chat_request(event, span)
263
- elif isinstance(event, LLMChatEndEvent):
264
- _set_llm_chat_response(event, span)
265
- elif isinstance(event, LLMPredictEndEvent):
266
- _set_llm_predict_response(event, span)
267
- elif isinstance(event, EmbeddingStartEvent):
268
- _set_embedding(event, span)
269
- elif isinstance(event, ReRankStartEvent):
270
- _set_rerank(event, span)
271
- elif isinstance(event, AgentToolCallEvent):
272
- _set_tool(event, span)
347
+ span = self._span_handler.open_spans.get(event.span_id)
348
+ if not span:
349
+ span = self._span_handler.waiting_for_streaming_spans.get(event.span_id)
350
+ if not span:
351
+ print(f"No span found for event {event}")
352
+ return
353
+
354
+ finished_spans = span.process_event(event)
355
+
356
+ with self._span_handler.lock:
357
+ for span in finished_spans:
358
+ self._span_handler.waiting_for_streaming_spans.pop(span.span_id)
@@ -1 +1 @@
1
- __version__ = "0.33.5"
1
+ __version__ = "0.33.6"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: opentelemetry-instrumentation-llamaindex
3
- Version: 0.33.5
3
+ Version: 0.33.6
4
4
  Summary: OpenTelemetry LlamaIndex instrumentation
5
5
  Home-page: https://github.com/traceloop/openllmetry/tree/main/packages/opentelemetry-instrumentation-llamaindex
6
6
  License: Apache-2.0
@@ -6,12 +6,12 @@ opentelemetry/instrumentation/llamaindex/base_synthesizer_instrumentor.py,sha256
6
6
  opentelemetry/instrumentation/llamaindex/base_tool_instrumentor.py,sha256=mdPai098XOqra-BnfdN3amn9WFX06FEf7N9mVqZcJ_c,2758
7
7
  opentelemetry/instrumentation/llamaindex/config.py,sha256=CtypZov_ytI9nSrfN9lWnjcufbAR9sfkXRA0OstDEUw,42
8
8
  opentelemetry/instrumentation/llamaindex/custom_llm_instrumentor.py,sha256=3YIcVdVGrWOYpPzZCyXnXyi-g1AyAQQDhD1XGLgyc_Q,5950
9
- opentelemetry/instrumentation/llamaindex/dispatcher_wrapper.py,sha256=4AJkwZdZMf6_VSgTT30GSkGzFIzu7e6JuhnJhnq97Zk,9484
9
+ opentelemetry/instrumentation/llamaindex/dispatcher_wrapper.py,sha256=eWV4CDECxMd_8hdW_CmS7rmgz-NOZwVUb9_SD38XpxY,12308
10
10
  opentelemetry/instrumentation/llamaindex/query_pipeline_instrumentor.py,sha256=PfUens1GisvbU98TLXEJ8_ALWGhnbOdsQkMwhFom8ZA,2496
11
11
  opentelemetry/instrumentation/llamaindex/retriever_query_engine_instrumentor.py,sha256=OtQ7uZckFtzq9mzqSlKDhvO-Uffl99axuZ2TJXCqDRQ,2627
12
12
  opentelemetry/instrumentation/llamaindex/utils.py,sha256=7NfuSbIf5Uohxo79AUM_gB-8RQtxgUO5glCWzXHeueQ,2349
13
- opentelemetry/instrumentation/llamaindex/version.py,sha256=fqSeLkelQdyV4dkWnpTs0itWA0EN5xr2NivqnqZLzoI,23
14
- opentelemetry_instrumentation_llamaindex-0.33.5.dist-info/METADATA,sha256=7kDFHkAkJpkcT35ZUyRz77od7lArwqu7Ebae2urMhMc,2336
15
- opentelemetry_instrumentation_llamaindex-0.33.5.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
16
- opentelemetry_instrumentation_llamaindex-0.33.5.dist-info/entry_points.txt,sha256=gtV40W4oFCp6VNvgowTKa0zQjfIrvfdlYflgGdSsA5A,106
17
- opentelemetry_instrumentation_llamaindex-0.33.5.dist-info/RECORD,,
13
+ opentelemetry/instrumentation/llamaindex/version.py,sha256=5Y0NtoRrd9Mt0esLjATqHKGOX_Jre-ex7yMzoNdSwtY,23
14
+ opentelemetry_instrumentation_llamaindex-0.33.6.dist-info/METADATA,sha256=BfmejQ1tt6PJYUxAeNpYlAWaH_t5ZmnfjEN5smND7ZI,2336
15
+ opentelemetry_instrumentation_llamaindex-0.33.6.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
16
+ opentelemetry_instrumentation_llamaindex-0.33.6.dist-info/entry_points.txt,sha256=gtV40W4oFCp6VNvgowTKa0zQjfIrvfdlYflgGdSsA5A,106
17
+ opentelemetry_instrumentation_llamaindex-0.33.6.dist-info/RECORD,,