opentelemetry-instrumentation-openai 0.5.1__py3-none-any.whl → 0.5.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of opentelemetry-instrumentation-openai might be problematic. Click here for more details.

@@ -131,6 +131,7 @@ def _set_response_attributes(span, response):
131
131
 
132
132
 
133
133
  def is_streaming_response(response):
134
- return isinstance(response, types.GeneratorType) or (
135
- is_openai_v1() and isinstance(response, openai.Stream)
136
- )
134
+ if is_openai_v1():
135
+ return isinstance(response, openai.Stream)
136
+
137
+ return isinstance(response, types.GeneratorType) or isinstance(response, types.AsyncGeneratorType)
@@ -7,8 +7,7 @@ from opentelemetry.semconv.ai import SpanAttributes, LLMRequestTypeValues
7
7
 
8
8
  from opentelemetry.instrumentation.utils import _SUPPRESS_INSTRUMENTATION_KEY
9
9
  from opentelemetry.instrumentation.openai.utils import (
10
- _with_tracer_wrapper,
11
- start_as_current_span_async,
10
+ _with_tracer_wrapper
12
11
  )
13
12
  from opentelemetry.instrumentation.openai.shared import (
14
13
  _set_request_attributes,
@@ -43,9 +42,8 @@ def chat_wrapper(tracer, wrapped, instance, args, kwargs):
43
42
  if is_streaming_response(response):
44
43
  # span will be closed after the generator is done
45
44
  return _build_from_streaming_response(span, response)
46
- else:
47
- _handle_response(response, span)
48
45
 
46
+ _handle_response(response, span)
49
47
  span.end()
50
48
 
51
49
  return response
@@ -56,14 +54,18 @@ async def achat_wrapper(tracer, wrapped, instance, args, kwargs):
56
54
  if context_api.get_value(_SUPPRESS_INSTRUMENTATION_KEY):
57
55
  return wrapped(*args, **kwargs)
58
56
 
59
- async with start_as_current_span_async(
60
- tracer=tracer, name=SPAN_NAME, kind=SpanKind.CLIENT
61
- ) as span:
62
- _handle_request(span, kwargs)
63
- response = await wrapped(*args, **kwargs)
64
- _handle_response(response, span)
57
+ span = tracer.start_span(SPAN_NAME, kind=SpanKind.CLIENT)
58
+ _handle_request(span, kwargs)
59
+ response = await wrapped(*args, **kwargs)
60
+
61
+ if is_streaming_response(response):
62
+ # span will be closed after the generator is done
63
+ return _abuild_from_streaming_response(span, response)
64
+
65
+ _handle_response(response, span)
66
+ span.end()
65
67
 
66
- return response
68
+ return response
67
69
 
68
70
 
69
71
  def _handle_request(span, kwargs):
@@ -139,25 +141,24 @@ def _build_from_streaming_response(span, response):
139
141
  complete_response = {"choices": [], "model": ""}
140
142
  for item in response:
141
143
  item_to_yield = item
142
- if is_openai_v1():
143
- item = item.model_dump()
144
-
145
- for choice in item.get("choices"):
146
- index = choice.get("index")
147
- if len(complete_response.get("choices")) <= index:
148
- complete_response["choices"].append(
149
- {"index": index, "message": {"content": "", "role": ""}}
150
- )
151
- complete_choice = complete_response.get("choices")[index]
152
- if choice.get("finish_reason"):
153
- complete_choice["finish_reason"] = choice.get("finish_reason")
154
-
155
- delta = choice.get("delta")
156
-
157
- if delta.get("content"):
158
- complete_choice["message"]["content"] += delta.get("content")
159
- if delta.get("role"):
160
- complete_choice["message"]["role"] = delta.get("role")
144
+ _accumulate_stream_items(item, complete_response)
145
+
146
+ yield item_to_yield
147
+
148
+ _set_response_attributes(span, complete_response)
149
+
150
+ if should_send_prompts():
151
+ _set_completions(span, complete_response.get("choices"))
152
+
153
+ span.set_status(Status(StatusCode.OK))
154
+ span.end()
155
+
156
+
157
+ async def _abuild_from_streaming_response(span, response):
158
+ complete_response = {"choices": [], "model": ""}
159
+ async for item in response:
160
+ item_to_yield = item
161
+ _accumulate_stream_items(item, complete_response)
161
162
 
162
163
  yield item_to_yield
163
164
 
@@ -168,3 +169,25 @@ def _build_from_streaming_response(span, response):
168
169
 
169
170
  span.set_status(Status(StatusCode.OK))
170
171
  span.end()
172
+
173
+
174
+ def _accumulate_stream_items(item, complete_response):
175
+ if is_openai_v1():
176
+ item = item.model_dump()
177
+
178
+ for choice in item.get("choices"):
179
+ index = choice.get("index")
180
+ if len(complete_response.get("choices")) <= index:
181
+ complete_response["choices"].append(
182
+ {"index": index, "message": {"content": "", "role": ""}}
183
+ )
184
+ complete_choice = complete_response.get("choices")[index]
185
+ if choice.get("finish_reason"):
186
+ complete_choice["finish_reason"] = choice.get("finish_reason")
187
+
188
+ delta = choice.get("delta")
189
+
190
+ if delta.get("content"):
191
+ complete_choice["message"]["content"] += delta.get("content")
192
+ if delta.get("role"):
193
+ complete_choice["message"]["role"] = delta.get("role")
@@ -1 +1 @@
1
- __version__ = "0.5.1"
1
+ __version__ = "0.5.3"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: opentelemetry-instrumentation-openai
3
- Version: 0.5.1
3
+ Version: 0.5.3
4
4
  Summary: OpenTelemetry OpenAI instrumentation
5
5
  License: Apache-2.0
6
6
  Author: Gal Kleinman
@@ -1,11 +1,11 @@
1
1
  opentelemetry/instrumentation/openai/__init__.py,sha256=iiuJICVwzv0K2OeskonjZIYpPnxooSEsIkXOS7uZEf8,937
2
- opentelemetry/instrumentation/openai/shared/__init__.py,sha256=I3mHPmUPjb22ZsbgmbPVtuIxo8_FM6F87dyqBxfxTnA,4285
3
- opentelemetry/instrumentation/openai/shared/chat_wrappers.py,sha256=EcER4tO6Zwu4eC-Z-L2dmYbgTnsKmZG8hmVIhpEOURA,5354
2
+ opentelemetry/instrumentation/openai/shared/__init__.py,sha256=eZ-baHO8ocPb-vmfBG-20ilrffBtfBa6kyut0dDnua4,4336
3
+ opentelemetry/instrumentation/openai/shared/chat_wrappers.py,sha256=3l9TMZ6FC7eWb8gSFk7Y9Be5a7hjB_YARJazs_YBuqM,5934
4
4
  opentelemetry/instrumentation/openai/shared/completion_wrappers.py,sha256=M3K4IkQOS2bnBehLI_Mh_ApvD1trSMX2_PjSxbZO0yc,4449
5
5
  opentelemetry/instrumentation/openai/utils.py,sha256=m405jldmW6dsgQz_qF75WNM0fMTJUMD_IVMluu7s-nY,546
6
6
  opentelemetry/instrumentation/openai/v0/__init__.py,sha256=0olHqFUCAfVtD-TnOzIVDwK5YaieZU36RewZPLX8Cos,1236
7
7
  opentelemetry/instrumentation/openai/v1/__init__.py,sha256=FnDPr5UQcKnTBLt1dAbr_GYtE8pOCtR7U2PtY0thnFo,1320
8
- opentelemetry/instrumentation/openai/version.py,sha256=eZ1bOun1DDVV0YLOBW4wj2FP1ajReLjbIrGmzN7ASBw,22
9
- opentelemetry_instrumentation_openai-0.5.1.dist-info/METADATA,sha256=hbb4z20Hf0DJhbpyla--bVKQnwcqWTY6Sh4GFApfD9Q,1767
10
- opentelemetry_instrumentation_openai-0.5.1.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
11
- opentelemetry_instrumentation_openai-0.5.1.dist-info/RECORD,,
8
+ opentelemetry/instrumentation/openai/version.py,sha256=tgzuqHKcEdKBaP57F5oXxq4XlW2n9J4Fj8ZGu7nGOZg,22
9
+ opentelemetry_instrumentation_openai-0.5.3.dist-info/METADATA,sha256=_xfm3o1qSjz5ZRI0wxolsW6R3d7MolQC6kaUe8l7-TQ,1767
10
+ opentelemetry_instrumentation_openai-0.5.3.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
11
+ opentelemetry_instrumentation_openai-0.5.3.dist-info/RECORD,,