opentelemetry-instrumentation-openai 0.34.1__py3-none-any.whl → 0.49.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of opentelemetry-instrumentation-openai might be problematic. Click here for more details.

Files changed (22) hide show
  1. opentelemetry/instrumentation/openai/__init__.py +11 -6
  2. opentelemetry/instrumentation/openai/shared/__init__.py +167 -68
  3. opentelemetry/instrumentation/openai/shared/chat_wrappers.py +544 -231
  4. opentelemetry/instrumentation/openai/shared/completion_wrappers.py +143 -81
  5. opentelemetry/instrumentation/openai/shared/config.py +8 -3
  6. opentelemetry/instrumentation/openai/shared/embeddings_wrappers.py +91 -30
  7. opentelemetry/instrumentation/openai/shared/event_emitter.py +108 -0
  8. opentelemetry/instrumentation/openai/shared/event_models.py +41 -0
  9. opentelemetry/instrumentation/openai/shared/image_gen_wrappers.py +1 -1
  10. opentelemetry/instrumentation/openai/shared/span_utils.py +0 -0
  11. opentelemetry/instrumentation/openai/utils.py +42 -9
  12. opentelemetry/instrumentation/openai/v0/__init__.py +32 -11
  13. opentelemetry/instrumentation/openai/v1/__init__.py +177 -69
  14. opentelemetry/instrumentation/openai/v1/assistant_wrappers.py +208 -109
  15. opentelemetry/instrumentation/openai/v1/event_handler_wrapper.py +41 -19
  16. opentelemetry/instrumentation/openai/v1/responses_wrappers.py +1073 -0
  17. opentelemetry/instrumentation/openai/version.py +1 -1
  18. {opentelemetry_instrumentation_openai-0.34.1.dist-info → opentelemetry_instrumentation_openai-0.49.3.dist-info}/METADATA +7 -8
  19. opentelemetry_instrumentation_openai-0.49.3.dist-info/RECORD +21 -0
  20. {opentelemetry_instrumentation_openai-0.34.1.dist-info → opentelemetry_instrumentation_openai-0.49.3.dist-info}/WHEEL +1 -1
  21. opentelemetry_instrumentation_openai-0.34.1.dist-info/RECORD +0 -17
  22. {opentelemetry_instrumentation_openai-0.34.1.dist-info → opentelemetry_instrumentation_openai-0.49.3.dist-info}/entry_points.txt +0 -0
@@ -47,7 +47,7 @@ def image_gen_metrics_wrapper(
47
47
  if exception_counter:
48
48
  exception_counter.add(1, attributes=attributes)
49
49
 
50
- raise e
50
+ raise
51
51
 
52
52
  if is_openai_v1():
53
53
  response_dict = model_as_dict(response)
@@ -1,17 +1,33 @@
1
1
  import asyncio
2
- from importlib.metadata import version
3
- from contextlib import asynccontextmanager
4
2
  import logging
5
3
  import os
6
4
  import threading
7
5
  import traceback
6
+ from contextlib import asynccontextmanager
7
+ from importlib.metadata import version
8
+ from packaging import version as pkg_version
8
9
 
9
- import openai
10
+ from opentelemetry import context as context_api
11
+ from opentelemetry._logs import Logger
10
12
  from opentelemetry.instrumentation.openai.shared.config import Config
11
13
 
14
+ import openai
15
+
16
+ _OPENAI_VERSION = version("openai")
17
+
18
+ TRACELOOP_TRACE_CONTENT = "TRACELOOP_TRACE_CONTENT"
19
+
12
20
 
13
21
  def is_openai_v1():
14
- return version("openai") >= "1.0.0"
22
+ return pkg_version.parse(_OPENAI_VERSION) >= pkg_version.parse("1.0.0")
23
+
24
+
25
+ def is_reasoning_supported():
26
+ # Reasoning has been introduced in OpenAI API on Dec 17, 2024
27
+ # as per https://platform.openai.com/docs/changelog.
28
+ # The updated OpenAI library version is 1.58.0
29
+ # as per https://pypi.org/project/openai/.
30
+ return pkg_version.parse(_OPENAI_VERSION) >= pkg_version.parse("1.58.0")
15
31
 
16
32
 
17
33
  def is_azure_openai(instance):
@@ -24,15 +40,16 @@ def is_metrics_enabled() -> bool:
24
40
  return (os.getenv("TRACELOOP_METRICS_ENABLED") or "true").lower() == "true"
25
41
 
26
42
 
27
- def should_record_stream_token_usage():
28
- return Config.enrich_token_usage
29
-
30
-
31
43
  def _with_image_gen_metric_wrapper(func):
32
44
  def _with_metric(duration_histogram, exception_counter):
33
45
  def wrapper(wrapped, instance, args, kwargs):
34
46
  return func(
35
- duration_histogram, exception_counter, wrapped, instance, args, kwargs
47
+ duration_histogram,
48
+ exception_counter,
49
+ wrapped,
50
+ instance,
51
+ args,
52
+ kwargs,
36
53
  )
37
54
 
38
55
  return wrapper
@@ -155,3 +172,19 @@ def run_async(method):
155
172
  thread.join()
156
173
  else:
157
174
  asyncio.run(method)
175
+
176
+
177
+ def should_send_prompts():
178
+ return (
179
+ os.getenv(TRACELOOP_TRACE_CONTENT) or "true"
180
+ ).lower() == "true" or context_api.get_value("override_enable_content_tracing")
181
+
182
+
183
+ def should_emit_events() -> bool:
184
+ """
185
+ Checks if the instrumentation isn't using the legacy attributes
186
+ and if the event logger is not None.
187
+ """
188
+ return not Config.use_legacy_attributes and isinstance(
189
+ Config.event_logger, Logger
190
+ )
@@ -1,25 +1,28 @@
1
1
  from typing import Collection
2
2
 
3
+ from opentelemetry._logs import get_logger
3
4
  from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
4
- from opentelemetry.trace import get_tracer
5
- from opentelemetry.metrics import get_meter
6
- from wrapt import wrap_function_wrapper
7
-
8
5
  from opentelemetry.instrumentation.openai.shared.chat_wrappers import (
9
- chat_wrapper,
10
6
  achat_wrapper,
7
+ chat_wrapper,
11
8
  )
12
9
  from opentelemetry.instrumentation.openai.shared.completion_wrappers import (
13
- completion_wrapper,
14
10
  acompletion_wrapper,
11
+ completion_wrapper,
15
12
  )
13
+ from opentelemetry.instrumentation.openai.shared.config import Config
16
14
  from opentelemetry.instrumentation.openai.shared.embeddings_wrappers import (
17
- embeddings_wrapper,
18
15
  aembeddings_wrapper,
16
+ embeddings_wrapper,
19
17
  )
20
18
  from opentelemetry.instrumentation.openai.utils import is_metrics_enabled
21
19
  from opentelemetry.instrumentation.openai.version import __version__
20
+ from opentelemetry.instrumentation.utils import unwrap
21
+ from opentelemetry.metrics import get_meter
22
+ from opentelemetry.semconv._incubating.metrics import gen_ai_metrics as GenAIMetrics
22
23
  from opentelemetry.semconv_ai import Meters
24
+ from opentelemetry.trace import get_tracer
25
+ from wrapt import wrap_function_wrapper
23
26
 
24
27
  _instruments = ("openai >= 0.27.0", "openai < 1.0.0")
25
28
 
@@ -35,6 +38,12 @@ class OpenAIV0Instrumentor(BaseInstrumentor):
35
38
  meter_provider = kwargs.get("meter_provider")
36
39
  meter = get_meter(__name__, __version__, meter_provider)
37
40
 
41
+ if not Config.use_legacy_attributes:
42
+ logger_provider = kwargs.get("logger_provider")
43
+ Config.event_logger = get_logger(
44
+ __name__, __version__, logger_provider=logger_provider
45
+ )
46
+
38
47
  if is_metrics_enabled():
39
48
  tokens_histogram = meter.create_histogram(
40
49
  name=Meters.LLM_TOKEN_USAGE,
@@ -61,7 +70,7 @@ class OpenAIV0Instrumentor(BaseInstrumentor):
61
70
  )
62
71
 
63
72
  streaming_time_to_first_token = meter.create_histogram(
64
- name=Meters.LLM_STREAMING_TIME_TO_FIRST_TOKEN,
73
+ name=GenAIMetrics.GEN_AI_SERVER_TIME_TO_FIRST_TOKEN,
65
74
  unit="s",
66
75
  description="Time to first token in streaming chat completions",
67
76
  )
@@ -98,9 +107,16 @@ class OpenAIV0Instrumentor(BaseInstrumentor):
98
107
  embeddings_exception_counter,
99
108
  ) = (None, None, None)
100
109
 
101
- wrap_function_wrapper("openai", "Completion.create", completion_wrapper(tracer))
102
110
  wrap_function_wrapper(
103
- "openai", "Completion.acreate", acompletion_wrapper(tracer)
111
+ "openai",
112
+ "Completion.create",
113
+ completion_wrapper(tracer),
114
+ )
115
+
116
+ wrap_function_wrapper(
117
+ "openai",
118
+ "Completion.acreate",
119
+ acompletion_wrapper(tracer),
104
120
  )
105
121
  wrap_function_wrapper(
106
122
  "openai",
@@ -152,4 +168,9 @@ class OpenAIV0Instrumentor(BaseInstrumentor):
152
168
  )
153
169
 
154
170
  def _uninstrument(self, **kwargs):
155
- pass
171
+ unwrap("openai", "Completion.create")
172
+ unwrap("openai", "Completion.acreate")
173
+ unwrap("openai", "ChatCompletion.create")
174
+ unwrap("openai", "ChatCompletion.acreate")
175
+ unwrap("openai", "Embedding.create")
176
+ unwrap("openai", "Embedding.acreate")
@@ -1,39 +1,47 @@
1
1
  from typing import Collection
2
2
 
3
+ from opentelemetry._logs import get_logger
3
4
  from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
4
- from opentelemetry.trace import get_tracer
5
-
6
- from opentelemetry.metrics import get_meter
7
-
8
- from wrapt import wrap_function_wrapper
9
-
10
5
  from opentelemetry.instrumentation.openai.shared.chat_wrappers import (
11
- chat_wrapper,
12
6
  achat_wrapper,
7
+ chat_wrapper,
13
8
  )
14
9
  from opentelemetry.instrumentation.openai.shared.completion_wrappers import (
15
- completion_wrapper,
16
10
  acompletion_wrapper,
11
+ completion_wrapper,
17
12
  )
13
+ from opentelemetry.instrumentation.openai.shared.config import Config
18
14
  from opentelemetry.instrumentation.openai.shared.embeddings_wrappers import (
19
- embeddings_wrapper,
20
15
  aembeddings_wrapper,
16
+ embeddings_wrapper,
21
17
  )
22
18
  from opentelemetry.instrumentation.openai.shared.image_gen_wrappers import (
23
19
  image_gen_metrics_wrapper,
24
20
  )
21
+ from opentelemetry.instrumentation.openai.utils import is_metrics_enabled
25
22
  from opentelemetry.instrumentation.openai.v1.assistant_wrappers import (
26
23
  assistants_create_wrapper,
24
+ messages_list_wrapper,
25
+ runs_create_and_stream_wrapper,
27
26
  runs_create_wrapper,
28
27
  runs_retrieve_wrapper,
29
- runs_create_and_stream_wrapper,
30
- messages_list_wrapper,
31
28
  )
32
29
 
33
- from opentelemetry.instrumentation.openai.utils import is_metrics_enabled
34
- from opentelemetry.instrumentation.openai.version import __version__
30
+ from opentelemetry.instrumentation.openai.v1.responses_wrappers import (
31
+ async_responses_cancel_wrapper,
32
+ async_responses_get_or_create_wrapper,
33
+ responses_cancel_wrapper,
34
+ responses_get_or_create_wrapper,
35
+ )
35
36
 
37
+ from opentelemetry.instrumentation.openai.version import __version__
38
+ from opentelemetry.instrumentation.utils import unwrap
39
+ from opentelemetry.metrics import get_meter
40
+ from opentelemetry.semconv._incubating.metrics import gen_ai_metrics as GenAIMetrics
36
41
  from opentelemetry.semconv_ai import Meters
42
+ from opentelemetry.trace import get_tracer
43
+ from wrapt import wrap_function_wrapper
44
+
37
45
 
38
46
  _instruments = ("openai >= 1.0.0",)
39
47
 
@@ -42,6 +50,22 @@ class OpenAIV1Instrumentor(BaseInstrumentor):
42
50
  def instrumentation_dependencies(self) -> Collection[str]:
43
51
  return _instruments
44
52
 
53
+ def _try_wrap(self, module, function, wrapper):
54
+ """
55
+ Wrap a function if it exists, otherwise do nothing.
56
+ This is useful for handling cases where the function is not available in
57
+ the older versions of the library.
58
+
59
+ Args:
60
+ module (str): The module to wrap, e.g. "openai.resources.chat.completions"
61
+ function (str): "Object.function" to wrap, e.g. "Completions.parse"
62
+ wrapper (callable): The wrapper to apply to the function.
63
+ """
64
+ try:
65
+ wrap_function_wrapper(module, function, wrapper)
66
+ except (AttributeError, ModuleNotFoundError):
67
+ pass
68
+
45
69
  def _instrument(self, **kwargs):
46
70
  tracer_provider = kwargs.get("tracer_provider")
47
71
  tracer = get_tracer(__name__, __version__, tracer_provider)
@@ -50,6 +74,12 @@ class OpenAIV1Instrumentor(BaseInstrumentor):
50
74
  meter_provider = kwargs.get("meter_provider")
51
75
  meter = get_meter(__name__, __version__, meter_provider)
52
76
 
77
+ if not Config.use_legacy_attributes:
78
+ logger_provider = kwargs.get("logger_provider")
79
+ Config.event_logger = get_logger(
80
+ __name__, __version__, logger_provider=logger_provider
81
+ )
82
+
53
83
  if is_metrics_enabled():
54
84
  tokens_histogram = meter.create_histogram(
55
85
  name=Meters.LLM_TOKEN_USAGE,
@@ -76,7 +106,7 @@ class OpenAIV1Instrumentor(BaseInstrumentor):
76
106
  )
77
107
 
78
108
  streaming_time_to_first_token = meter.create_histogram(
79
- name=Meters.LLM_STREAMING_TIME_TO_FIRST_TOKEN,
109
+ name=GenAIMetrics.GEN_AI_SERVER_TIME_TO_FIRST_TOKEN,
80
110
  unit="s",
81
111
  description="Time to first token in streaming chat completions",
82
112
  )
@@ -174,6 +204,33 @@ class OpenAIV1Instrumentor(BaseInstrumentor):
174
204
  embeddings_exception_counter,
175
205
  ),
176
206
  )
207
+ # in newer versions, Completions.parse are out of beta
208
+ self._try_wrap(
209
+ "openai.resources.chat.completions",
210
+ "Completions.parse",
211
+ chat_wrapper(
212
+ tracer,
213
+ tokens_histogram,
214
+ chat_choice_counter,
215
+ duration_histogram,
216
+ chat_exception_counter,
217
+ streaming_time_to_first_token,
218
+ streaming_time_to_generate,
219
+ ),
220
+ )
221
+ self._try_wrap(
222
+ "openai.resources.chat.completions",
223
+ "AsyncCompletions.parse",
224
+ achat_wrapper(
225
+ tracer,
226
+ tokens_histogram,
227
+ chat_choice_counter,
228
+ duration_histogram,
229
+ chat_exception_counter,
230
+ streaming_time_to_first_token,
231
+ streaming_time_to_generate,
232
+ ),
233
+ )
177
234
 
178
235
  if is_metrics_enabled():
179
236
  image_gen_exception_counter = meter.create_counter(
@@ -191,60 +248,111 @@ class OpenAIV1Instrumentor(BaseInstrumentor):
191
248
  )
192
249
 
193
250
  # Beta APIs may not be available consistently in all versions
194
- try:
195
- wrap_function_wrapper(
196
- "openai.resources.beta.assistants",
197
- "Assistants.create",
198
- assistants_create_wrapper(tracer),
199
- )
200
- wrap_function_wrapper(
201
- "openai.resources.beta.chat.completions",
202
- "Completions.parse",
203
- chat_wrapper(
204
- tracer,
205
- tokens_histogram,
206
- chat_choice_counter,
207
- duration_histogram,
208
- chat_exception_counter,
209
- streaming_time_to_first_token,
210
- streaming_time_to_generate,
211
- ),
212
- )
213
- wrap_function_wrapper(
214
- "openai.resources.beta.chat.completions",
215
- "AsyncCompletions.parse",
216
- achat_wrapper(
217
- tracer,
218
- tokens_histogram,
219
- chat_choice_counter,
220
- duration_histogram,
221
- chat_exception_counter,
222
- streaming_time_to_first_token,
223
- streaming_time_to_generate,
224
- ),
225
- )
226
- wrap_function_wrapper(
227
- "openai.resources.beta.threads.runs",
228
- "Runs.create",
229
- runs_create_wrapper(tracer),
230
- )
231
- wrap_function_wrapper(
232
- "openai.resources.beta.threads.runs",
233
- "Runs.retrieve",
234
- runs_retrieve_wrapper(tracer),
235
- )
236
- wrap_function_wrapper(
237
- "openai.resources.beta.threads.runs",
238
- "Runs.create_and_stream",
239
- runs_create_and_stream_wrapper(tracer),
240
- )
241
- wrap_function_wrapper(
242
- "openai.resources.beta.threads.messages",
243
- "Messages.list",
244
- messages_list_wrapper(tracer),
245
- )
246
- except (AttributeError, ModuleNotFoundError):
247
- pass
251
+ self._try_wrap(
252
+ "openai.resources.beta.assistants",
253
+ "Assistants.create",
254
+ assistants_create_wrapper(tracer),
255
+ )
256
+ self._try_wrap(
257
+ "openai.resources.beta.chat.completions",
258
+ "Completions.parse",
259
+ chat_wrapper(
260
+ tracer,
261
+ tokens_histogram,
262
+ chat_choice_counter,
263
+ duration_histogram,
264
+ chat_exception_counter,
265
+ streaming_time_to_first_token,
266
+ streaming_time_to_generate,
267
+ ),
268
+ )
269
+ self._try_wrap(
270
+ "openai.resources.beta.chat.completions",
271
+ "AsyncCompletions.parse",
272
+ achat_wrapper(
273
+ tracer,
274
+ tokens_histogram,
275
+ chat_choice_counter,
276
+ duration_histogram,
277
+ chat_exception_counter,
278
+ streaming_time_to_first_token,
279
+ streaming_time_to_generate,
280
+ ),
281
+ )
282
+ self._try_wrap(
283
+ "openai.resources.beta.threads.runs",
284
+ "Runs.create",
285
+ runs_create_wrapper(tracer),
286
+ )
287
+ self._try_wrap(
288
+ "openai.resources.beta.threads.runs",
289
+ "Runs.retrieve",
290
+ runs_retrieve_wrapper(tracer),
291
+ )
292
+ self._try_wrap(
293
+ "openai.resources.beta.threads.runs",
294
+ "Runs.create_and_stream",
295
+ runs_create_and_stream_wrapper(tracer),
296
+ )
297
+ self._try_wrap(
298
+ "openai.resources.beta.threads.messages",
299
+ "Messages.list",
300
+ messages_list_wrapper(tracer),
301
+ )
302
+ self._try_wrap(
303
+ "openai.resources.responses",
304
+ "Responses.create",
305
+ responses_get_or_create_wrapper(tracer),
306
+ )
307
+ self._try_wrap(
308
+ "openai.resources.responses",
309
+ "Responses.retrieve",
310
+ responses_get_or_create_wrapper(tracer),
311
+ )
312
+ self._try_wrap(
313
+ "openai.resources.responses",
314
+ "Responses.cancel",
315
+ responses_cancel_wrapper(tracer),
316
+ )
317
+ self._try_wrap(
318
+ "openai.resources.responses",
319
+ "AsyncResponses.create",
320
+ async_responses_get_or_create_wrapper(tracer),
321
+ )
322
+ self._try_wrap(
323
+ "openai.resources.responses",
324
+ "AsyncResponses.retrieve",
325
+ async_responses_get_or_create_wrapper(tracer),
326
+ )
327
+ self._try_wrap(
328
+ "openai.resources.responses",
329
+ "AsyncResponses.cancel",
330
+ async_responses_cancel_wrapper(tracer),
331
+ )
248
332
 
249
333
  def _uninstrument(self, **kwargs):
250
- pass
334
+ unwrap("openai.resources.chat.completions", "Completions.create")
335
+ unwrap("openai.resources.completions", "Completions.create")
336
+ unwrap("openai.resources.embeddings", "Embeddings.create")
337
+ unwrap("openai.resources.chat.completions", "AsyncCompletions.create")
338
+ unwrap("openai.resources.completions", "AsyncCompletions.create")
339
+ unwrap("openai.resources.embeddings", "AsyncEmbeddings.create")
340
+ unwrap("openai.resources.images", "Images.generate")
341
+
342
+ # Beta APIs may not be available consistently in all versions
343
+ try:
344
+ unwrap("openai.resources.beta.assistants", "Assistants.create")
345
+ unwrap("openai.resources.beta.chat.completions", "Completions.parse")
346
+ unwrap("openai.resources.beta.chat.completions", "AsyncCompletions.parse")
347
+ unwrap("openai.resources.beta.threads.runs", "Runs.create")
348
+ unwrap("openai.resources.beta.threads.runs", "Runs.retrieve")
349
+ unwrap("openai.resources.beta.threads.runs", "Runs.create_and_stream")
350
+ unwrap("openai.resources.beta.threads.messages", "Messages.list")
351
+ unwrap("openai.resources.responses", "Responses.create")
352
+ unwrap("openai.resources.responses", "Responses.retrieve")
353
+ unwrap("openai.resources.responses", "Responses.cancel")
354
+ unwrap("openai.resources.responses", "AsyncResponses.create")
355
+ unwrap("openai.resources.responses", "AsyncResponses.retrieve")
356
+ unwrap("openai.resources.responses", "AsyncResponses.cancel")
357
+ except ImportError:
358
+ pass