opentelemetry-instrumentation-openai 0.16.4__tar.gz → 0.16.5__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of opentelemetry-instrumentation-openai might be problematic. Click here for more details.

Files changed (17) hide show
  1. {opentelemetry_instrumentation_openai-0.16.4 → opentelemetry_instrumentation_openai-0.16.5}/PKG-INFO +1 -1
  2. {opentelemetry_instrumentation_openai-0.16.4 → opentelemetry_instrumentation_openai-0.16.5}/opentelemetry/instrumentation/openai/shared/embeddings_wrappers.py +96 -22
  3. {opentelemetry_instrumentation_openai-0.16.4 → opentelemetry_instrumentation_openai-0.16.5}/opentelemetry/instrumentation/openai/v1/__init__.py +7 -1
  4. opentelemetry_instrumentation_openai-0.16.5/opentelemetry/instrumentation/openai/version.py +1 -0
  5. {opentelemetry_instrumentation_openai-0.16.4 → opentelemetry_instrumentation_openai-0.16.5}/pyproject.toml +1 -1
  6. opentelemetry_instrumentation_openai-0.16.4/opentelemetry/instrumentation/openai/version.py +0 -1
  7. {opentelemetry_instrumentation_openai-0.16.4 → opentelemetry_instrumentation_openai-0.16.5}/README.md +0 -0
  8. {opentelemetry_instrumentation_openai-0.16.4 → opentelemetry_instrumentation_openai-0.16.5}/opentelemetry/instrumentation/openai/__init__.py +0 -0
  9. {opentelemetry_instrumentation_openai-0.16.4 → opentelemetry_instrumentation_openai-0.16.5}/opentelemetry/instrumentation/openai/shared/__init__.py +0 -0
  10. {opentelemetry_instrumentation_openai-0.16.4 → opentelemetry_instrumentation_openai-0.16.5}/opentelemetry/instrumentation/openai/shared/chat_wrappers.py +0 -0
  11. {opentelemetry_instrumentation_openai-0.16.4 → opentelemetry_instrumentation_openai-0.16.5}/opentelemetry/instrumentation/openai/shared/completion_wrappers.py +0 -0
  12. {opentelemetry_instrumentation_openai-0.16.4 → opentelemetry_instrumentation_openai-0.16.5}/opentelemetry/instrumentation/openai/shared/config.py +0 -0
  13. {opentelemetry_instrumentation_openai-0.16.4 → opentelemetry_instrumentation_openai-0.16.5}/opentelemetry/instrumentation/openai/shared/image_gen_wrappers.py +0 -0
  14. {opentelemetry_instrumentation_openai-0.16.4 → opentelemetry_instrumentation_openai-0.16.5}/opentelemetry/instrumentation/openai/utils.py +0 -0
  15. {opentelemetry_instrumentation_openai-0.16.4 → opentelemetry_instrumentation_openai-0.16.5}/opentelemetry/instrumentation/openai/v0/__init__.py +0 -0
  16. {opentelemetry_instrumentation_openai-0.16.4 → opentelemetry_instrumentation_openai-0.16.5}/opentelemetry/instrumentation/openai/v1/assistant_wrappers.py +0 -0
  17. {opentelemetry_instrumentation_openai-0.16.4 → opentelemetry_instrumentation_openai-0.16.5}/opentelemetry/instrumentation/openai/v1/event_handler_wrapper.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: opentelemetry-instrumentation-openai
3
- Version: 0.16.4
3
+ Version: 0.16.5
4
4
  Summary: OpenTelemetry OpenAI instrumentation
5
5
  Home-page: https://github.com/traceloop/openllmetry/tree/main/packages/opentelemetry-instrumentation-openai
6
6
  License: Apache-2.0
@@ -7,7 +7,6 @@ from opentelemetry.semconv.ai import SpanAttributes, LLMRequestTypeValues
7
7
 
8
8
  from opentelemetry.instrumentation.utils import _SUPPRESS_INSTRUMENTATION_KEY
9
9
  from opentelemetry.instrumentation.openai.utils import (
10
- _with_tracer_wrapper,
11
10
  start_as_current_span_async,
12
11
  _with_embeddings_telemetry_wrapper,
13
12
  )
@@ -33,12 +32,17 @@ logger = logging.getLogger(__name__)
33
32
 
34
33
 
35
34
  @_with_embeddings_telemetry_wrapper
36
- def embeddings_wrapper(tracer,
37
- token_counter: Counter,
38
- vector_size_counter: Counter,
39
- duration_histogram: Histogram,
40
- exception_counter: Counter,
41
- wrapped, instance, args, kwargs):
35
+ def embeddings_wrapper(
36
+ tracer,
37
+ token_counter: Counter,
38
+ vector_size_counter: Counter,
39
+ duration_histogram: Histogram,
40
+ exception_counter: Counter,
41
+ wrapped,
42
+ instance,
43
+ args,
44
+ kwargs,
45
+ ):
42
46
  if context_api.get_value(_SUPPRESS_INSTRUMENTATION_KEY):
43
47
  return wrapped(*args, **kwargs)
44
48
 
@@ -56,7 +60,7 @@ def embeddings_wrapper(tracer,
56
60
  end_time = time.time()
57
61
  except Exception as e: # pylint: disable=broad-except
58
62
  end_time = time.time()
59
- duration = end_time - start_time if 'start_time' in locals() else 0
63
+ duration = end_time - start_time if "start_time" in locals() else 0
60
64
  attributes = {
61
65
  "error.type": e.__class__.__name__,
62
66
  }
@@ -71,25 +75,71 @@ def embeddings_wrapper(tracer,
71
75
 
72
76
  duration = end_time - start_time
73
77
 
74
- _handle_response(response, span, instance, token_counter, vector_size_counter, duration_histogram, duration)
78
+ _handle_response(
79
+ response,
80
+ span,
81
+ instance,
82
+ token_counter,
83
+ vector_size_counter,
84
+ duration_histogram,
85
+ duration,
86
+ )
75
87
 
76
88
  return response
77
89
 
78
90
 
79
- @_with_tracer_wrapper
80
- async def aembeddings_wrapper(tracer, wrapped, instance, args, kwargs):
91
+ @_with_embeddings_telemetry_wrapper
92
+ async def aembeddings_wrapper(
93
+ tracer,
94
+ token_counter: Counter,
95
+ vector_size_counter: Counter,
96
+ duration_histogram: Histogram,
97
+ exception_counter: Counter,
98
+ wrapped,
99
+ instance,
100
+ args,
101
+ kwargs,
102
+ ):
81
103
  if context_api.get_value(_SUPPRESS_INSTRUMENTATION_KEY):
82
104
  return wrapped(*args, **kwargs)
83
105
 
84
106
  async with start_as_current_span_async(
85
- tracer=tracer,
86
- name=SPAN_NAME,
87
- kind=SpanKind.CLIENT,
88
- attributes={SpanAttributes.LLM_REQUEST_TYPE: LLM_REQUEST_TYPE.value},
107
+ tracer=tracer,
108
+ name=SPAN_NAME,
109
+ kind=SpanKind.CLIENT,
110
+ attributes={SpanAttributes.LLM_REQUEST_TYPE: LLM_REQUEST_TYPE.value},
89
111
  ) as span:
90
112
  _handle_request(span, kwargs, instance)
91
- response = await wrapped(*args, **kwargs)
92
- _handle_response(response, span)
113
+ try:
114
+ # record time for duration
115
+ start_time = time.time()
116
+ response = wrapped(*args, **kwargs)
117
+ end_time = time.time()
118
+ except Exception as e: # pylint: disable=broad-except
119
+ end_time = time.time()
120
+ duration = end_time - start_time if "start_time" in locals() else 0
121
+ attributes = {
122
+ "error.type": e.__class__.__name__,
123
+ }
124
+
125
+ # if there are legal duration, record it
126
+ if duration > 0 and duration_histogram:
127
+ duration_histogram.record(duration, attributes=attributes)
128
+ if exception_counter:
129
+ exception_counter.add(1, attributes=attributes)
130
+
131
+ raise e
132
+
133
+ duration = end_time - start_time
134
+ _handle_response(
135
+ response,
136
+ span,
137
+ instance,
138
+ token_counter,
139
+ vector_size_counter,
140
+ duration_histogram,
141
+ duration,
142
+ )
93
143
 
94
144
  return response
95
145
 
@@ -101,19 +151,40 @@ def _handle_request(span, kwargs, instance):
101
151
  _set_client_attributes(span, instance)
102
152
 
103
153
 
104
- def _handle_response(response, span, instance=None, token_counter=None, vector_size_counter=None,
105
- duration_histogram=None, duration=None):
154
+ def _handle_response(
155
+ response,
156
+ span,
157
+ instance=None,
158
+ token_counter=None,
159
+ vector_size_counter=None,
160
+ duration_histogram=None,
161
+ duration=None,
162
+ ):
106
163
  if is_openai_v1():
107
164
  response_dict = model_as_dict(response)
108
165
  else:
109
166
  response_dict = response
110
167
  # metrics record
111
- _set_embeddings_metrics(instance, token_counter, vector_size_counter, duration_histogram, response_dict, duration)
168
+ _set_embeddings_metrics(
169
+ instance,
170
+ token_counter,
171
+ vector_size_counter,
172
+ duration_histogram,
173
+ response_dict,
174
+ duration,
175
+ )
112
176
  # span attributes
113
177
  _set_response_attributes(span, response_dict)
114
178
 
115
179
 
116
- def _set_embeddings_metrics(instance, token_counter, vector_size_counter, duration_histogram, response_dict, duration):
180
+ def _set_embeddings_metrics(
181
+ instance,
182
+ token_counter,
183
+ vector_size_counter,
184
+ duration_histogram,
185
+ response_dict,
186
+ duration,
187
+ ):
117
188
  shared_attributes = {
118
189
  "llm.response.model": response_dict.get("model") or None,
119
190
  "server.address": _get_openai_base_url(instance),
@@ -124,7 +195,10 @@ def _set_embeddings_metrics(instance, token_counter, vector_size_counter, durati
124
195
  if usage and token_counter:
125
196
  for name, val in usage.items():
126
197
  if name in OPENAI_LLM_USAGE_TOKEN_TYPES:
127
- attributes_with_token_type = {**shared_attributes, "llm.usage.token_type": name.split('_')[0]}
198
+ attributes_with_token_type = {
199
+ **shared_attributes,
200
+ "llm.usage.token_type": name.split("_")[0],
201
+ }
128
202
  token_counter.add(val, attributes=attributes_with_token_type)
129
203
 
130
204
  # vec size metrics
@@ -178,7 +178,13 @@ class OpenAIV1Instrumentor(BaseInstrumentor):
178
178
  wrap_function_wrapper(
179
179
  "openai.resources.embeddings",
180
180
  "AsyncEmbeddings.create",
181
- aembeddings_wrapper(tracer),
181
+ aembeddings_wrapper(
182
+ tracer,
183
+ embeddings_token_counter,
184
+ embeddings_vector_size_counter,
185
+ embeddings_duration_histogram,
186
+ embeddings_exception_counter,
187
+ ),
182
188
  )
183
189
 
184
190
  if is_metrics_enabled():
@@ -8,7 +8,7 @@ show_missing = true
8
8
 
9
9
  [tool.poetry]
10
10
  name = "opentelemetry-instrumentation-openai"
11
- version = "0.16.4"
11
+ version = "0.16.5"
12
12
  description = "OpenTelemetry OpenAI instrumentation"
13
13
  authors = [
14
14
  "Gal Kleinman <gal@traceloop.com>",