opentelemetry-instrumentation-openai 0.21.3__py3-none-any.whl → 0.21.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of opentelemetry-instrumentation-openai might be problematic. Click here for more details.
- opentelemetry/instrumentation/openai/shared/__init__.py +21 -0
- opentelemetry/instrumentation/openai/shared/chat_wrappers.py +30 -19
- opentelemetry/instrumentation/openai/shared/embeddings_wrappers.py +8 -5
- opentelemetry/instrumentation/openai/shared/image_gen_wrappers.py +7 -5
- opentelemetry/instrumentation/openai/v0/__init__.py +2 -2
- opentelemetry/instrumentation/openai/v1/__init__.py +2 -2
- opentelemetry/instrumentation/openai/version.py +1 -1
- {opentelemetry_instrumentation_openai-0.21.3.dist-info → opentelemetry_instrumentation_openai-0.21.4.dist-info}/METADATA +1 -1
- {opentelemetry_instrumentation_openai-0.21.3.dist-info → opentelemetry_instrumentation_openai-0.21.4.dist-info}/RECORD +11 -11
- {opentelemetry_instrumentation_openai-0.21.3.dist-info → opentelemetry_instrumentation_openai-0.21.4.dist-info}/WHEEL +0 -0
- {opentelemetry_instrumentation_openai-0.21.3.dist-info → opentelemetry_instrumentation_openai-0.21.4.dist-info}/entry_points.txt +0 -0
|
@@ -245,3 +245,24 @@ def get_token_count_from_string(string: str, model_name: str):
|
|
|
245
245
|
|
|
246
246
|
token_count = len(encoding.encode(string))
|
|
247
247
|
return token_count
|
|
248
|
+
|
|
249
|
+
|
|
250
|
+
def _token_type(token_type: str):
|
|
251
|
+
if token_type == "prompt_tokens":
|
|
252
|
+
return "input"
|
|
253
|
+
elif token_type == "completion_tokens":
|
|
254
|
+
return "output"
|
|
255
|
+
|
|
256
|
+
return None
|
|
257
|
+
|
|
258
|
+
|
|
259
|
+
def _metric_shared_attributes(
|
|
260
|
+
response_model: str, operation: str, server_address: str, is_streaming: bool = False
|
|
261
|
+
):
|
|
262
|
+
return {
|
|
263
|
+
"gen_ai.system": "openai",
|
|
264
|
+
"gen_ai.response.model": response_model,
|
|
265
|
+
"gen_ai.operation.name": operation,
|
|
266
|
+
"server.address": server_address,
|
|
267
|
+
"stream": is_streaming,
|
|
268
|
+
}
|
|
@@ -14,10 +14,12 @@ from opentelemetry.instrumentation.openai.utils import (
|
|
|
14
14
|
dont_throw,
|
|
15
15
|
)
|
|
16
16
|
from opentelemetry.instrumentation.openai.shared import (
|
|
17
|
+
_metric_shared_attributes,
|
|
17
18
|
_set_client_attributes,
|
|
18
19
|
_set_request_attributes,
|
|
19
20
|
_set_span_attribute,
|
|
20
21
|
_set_functions_attributes,
|
|
22
|
+
_token_type,
|
|
21
23
|
set_tools_attributes,
|
|
22
24
|
_set_response_attributes,
|
|
23
25
|
is_streaming_response,
|
|
@@ -267,11 +269,12 @@ def _handle_response(
|
|
|
267
269
|
def _set_chat_metrics(
|
|
268
270
|
instance, token_counter, choice_counter, duration_histogram, response_dict, duration
|
|
269
271
|
):
|
|
270
|
-
shared_attributes =
|
|
271
|
-
|
|
272
|
-
"
|
|
273
|
-
|
|
274
|
-
|
|
272
|
+
shared_attributes = _metric_shared_attributes(
|
|
273
|
+
response_model=response_dict.get("model") or None,
|
|
274
|
+
operation="chat",
|
|
275
|
+
server_address=_get_openai_base_url(instance),
|
|
276
|
+
is_streaming=False,
|
|
277
|
+
)
|
|
275
278
|
|
|
276
279
|
# token metrics
|
|
277
280
|
usage = response_dict.get("usage") # type: dict
|
|
@@ -303,7 +306,7 @@ def _set_token_counter_metrics(token_counter, usage, shared_attributes):
|
|
|
303
306
|
if name in OPENAI_LLM_USAGE_TOKEN_TYPES:
|
|
304
307
|
attributes_with_token_type = {
|
|
305
308
|
**shared_attributes,
|
|
306
|
-
"
|
|
309
|
+
"gen_ai.token.type": _token_type(name),
|
|
307
310
|
}
|
|
308
311
|
token_counter.record(val, attributes=attributes_with_token_type)
|
|
309
312
|
|
|
@@ -420,14 +423,14 @@ def _set_streaming_token_metrics(
|
|
|
420
423
|
if type(prompt_usage) is int and prompt_usage >= 0:
|
|
421
424
|
attributes_with_token_type = {
|
|
422
425
|
**shared_attributes,
|
|
423
|
-
"
|
|
426
|
+
"gen_ai.token.type": "input",
|
|
424
427
|
}
|
|
425
428
|
token_counter.record(prompt_usage, attributes=attributes_with_token_type)
|
|
426
429
|
|
|
427
430
|
if type(completion_usage) is int and completion_usage >= 0:
|
|
428
431
|
attributes_with_token_type = {
|
|
429
432
|
**shared_attributes,
|
|
430
|
-
"
|
|
433
|
+
"gen_ai.token.type": "output",
|
|
431
434
|
}
|
|
432
435
|
token_counter.record(
|
|
433
436
|
completion_usage, attributes=attributes_with_token_type
|
|
@@ -523,7 +526,8 @@ class ChatStream(ObjectProxy):
|
|
|
523
526
|
if self._first_token and self._streaming_time_to_first_token:
|
|
524
527
|
self._time_of_first_token = time.time()
|
|
525
528
|
self._streaming_time_to_first_token.record(
|
|
526
|
-
self._time_of_first_token - self._start_time
|
|
529
|
+
self._time_of_first_token - self._start_time,
|
|
530
|
+
attributes=self._shared_attributes(),
|
|
527
531
|
)
|
|
528
532
|
self._first_token = False
|
|
529
533
|
|
|
@@ -549,20 +553,24 @@ class ChatStream(ObjectProxy):
|
|
|
549
553
|
if delta and delta.get("role"):
|
|
550
554
|
complete_choice["message"]["role"] = delta.get("role")
|
|
551
555
|
|
|
552
|
-
def
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
|
|
556
|
-
|
|
557
|
-
|
|
556
|
+
def _shared_attributes(self):
|
|
557
|
+
return _metric_shared_attributes(
|
|
558
|
+
response_model=self._complete_response.get("model")
|
|
559
|
+
or self._request_kwargs.get("model")
|
|
560
|
+
or None,
|
|
561
|
+
operation="chat",
|
|
562
|
+
server_address=_get_openai_base_url(self._instance),
|
|
563
|
+
is_streaming=True,
|
|
564
|
+
)
|
|
558
565
|
|
|
566
|
+
def _close_span(self):
|
|
559
567
|
if not is_azure_openai(self._instance):
|
|
560
568
|
_set_streaming_token_metrics(
|
|
561
569
|
self._request_kwargs,
|
|
562
570
|
self._complete_response,
|
|
563
571
|
self._span,
|
|
564
572
|
self._token_counter,
|
|
565
|
-
|
|
573
|
+
self._shared_attributes(),
|
|
566
574
|
)
|
|
567
575
|
|
|
568
576
|
# choice metrics
|
|
@@ -570,7 +578,7 @@ class ChatStream(ObjectProxy):
|
|
|
570
578
|
_set_choice_counter_metrics(
|
|
571
579
|
self._choice_counter,
|
|
572
580
|
self._complete_response.get("choices"),
|
|
573
|
-
|
|
581
|
+
self._shared_attributes(),
|
|
574
582
|
)
|
|
575
583
|
|
|
576
584
|
# duration metrics
|
|
@@ -579,10 +587,13 @@ class ChatStream(ObjectProxy):
|
|
|
579
587
|
else:
|
|
580
588
|
duration = None
|
|
581
589
|
if duration and isinstance(duration, (float, int)) and self._duration_histogram:
|
|
582
|
-
self._duration_histogram.record(
|
|
590
|
+
self._duration_histogram.record(
|
|
591
|
+
duration, attributes=self._shared_attributes()
|
|
592
|
+
)
|
|
583
593
|
if self._streaming_time_to_generate and self._time_of_first_token:
|
|
584
594
|
self._streaming_time_to_generate.record(
|
|
585
|
-
time.time() - self._time_of_first_token
|
|
595
|
+
time.time() - self._time_of_first_token,
|
|
596
|
+
attributes=self._shared_attributes(),
|
|
586
597
|
)
|
|
587
598
|
|
|
588
599
|
_set_response_attributes(self._span, self._complete_response)
|
|
@@ -12,10 +12,12 @@ from opentelemetry.instrumentation.openai.utils import (
|
|
|
12
12
|
_with_embeddings_telemetry_wrapper,
|
|
13
13
|
)
|
|
14
14
|
from opentelemetry.instrumentation.openai.shared import (
|
|
15
|
+
_metric_shared_attributes,
|
|
15
16
|
_set_client_attributes,
|
|
16
17
|
_set_request_attributes,
|
|
17
18
|
_set_span_attribute,
|
|
18
19
|
_set_response_attributes,
|
|
20
|
+
_token_type,
|
|
19
21
|
should_send_prompts,
|
|
20
22
|
model_as_dict,
|
|
21
23
|
_get_openai_base_url,
|
|
@@ -188,10 +190,11 @@ def _set_embeddings_metrics(
|
|
|
188
190
|
response_dict,
|
|
189
191
|
duration,
|
|
190
192
|
):
|
|
191
|
-
shared_attributes =
|
|
192
|
-
|
|
193
|
-
"
|
|
194
|
-
|
|
193
|
+
shared_attributes = _metric_shared_attributes(
|
|
194
|
+
response_model=response_dict.get("model") or None,
|
|
195
|
+
operation="embeddings",
|
|
196
|
+
server_address=_get_openai_base_url(instance),
|
|
197
|
+
)
|
|
195
198
|
|
|
196
199
|
# token count metrics
|
|
197
200
|
usage = response_dict.get("usage")
|
|
@@ -200,7 +203,7 @@ def _set_embeddings_metrics(
|
|
|
200
203
|
if name in OPENAI_LLM_USAGE_TOKEN_TYPES:
|
|
201
204
|
attributes_with_token_type = {
|
|
202
205
|
**shared_attributes,
|
|
203
|
-
"
|
|
206
|
+
"gen_ai.token.type": _token_type(name),
|
|
204
207
|
}
|
|
205
208
|
token_counter.record(val, attributes=attributes_with_token_type)
|
|
206
209
|
|
|
@@ -8,6 +8,7 @@ from opentelemetry.metrics import Counter, Histogram
|
|
|
8
8
|
from opentelemetry.instrumentation.openai import is_openai_v1
|
|
9
9
|
from opentelemetry.instrumentation.openai.shared import (
|
|
10
10
|
_get_openai_base_url,
|
|
11
|
+
_metric_shared_attributes,
|
|
11
12
|
model_as_dict,
|
|
12
13
|
)
|
|
13
14
|
from opentelemetry.instrumentation.openai.utils import (
|
|
@@ -52,11 +53,12 @@ def image_gen_metrics_wrapper(
|
|
|
52
53
|
else:
|
|
53
54
|
response_dict = response
|
|
54
55
|
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
"
|
|
59
|
-
|
|
56
|
+
# not provide response.model in ImagesResponse response, use model in request kwargs
|
|
57
|
+
shared_attributes = _metric_shared_attributes(
|
|
58
|
+
response_model=kwargs.get("model") or None,
|
|
59
|
+
operation="image_gen",
|
|
60
|
+
server_address=_get_openai_base_url(instance),
|
|
61
|
+
)
|
|
60
62
|
|
|
61
63
|
duration = end_time - start_time
|
|
62
64
|
if duration_histogram:
|
|
@@ -38,7 +38,7 @@ class OpenAIV0Instrumentor(BaseInstrumentor):
|
|
|
38
38
|
tokens_histogram = meter.create_histogram(
|
|
39
39
|
name="gen_ai.client.token.usage",
|
|
40
40
|
unit="token",
|
|
41
|
-
description="
|
|
41
|
+
description="Measures number of input and output tokens used",
|
|
42
42
|
)
|
|
43
43
|
|
|
44
44
|
chat_choice_counter = meter.create_counter(
|
|
@@ -50,7 +50,7 @@ class OpenAIV0Instrumentor(BaseInstrumentor):
|
|
|
50
50
|
chat_duration_histogram = meter.create_histogram(
|
|
51
51
|
name="gen_ai.client.operation.duration",
|
|
52
52
|
unit="s",
|
|
53
|
-
description="
|
|
53
|
+
description="GenAI operation duration",
|
|
54
54
|
)
|
|
55
55
|
|
|
56
56
|
chat_exception_counter = meter.create_counter(
|
|
@@ -52,7 +52,7 @@ class OpenAIV1Instrumentor(BaseInstrumentor):
|
|
|
52
52
|
tokens_histogram = meter.create_histogram(
|
|
53
53
|
name="gen_ai.client.token.usage",
|
|
54
54
|
unit="token",
|
|
55
|
-
description="
|
|
55
|
+
description="Measures number of input and output tokens used",
|
|
56
56
|
)
|
|
57
57
|
|
|
58
58
|
chat_choice_counter = meter.create_counter(
|
|
@@ -64,7 +64,7 @@ class OpenAIV1Instrumentor(BaseInstrumentor):
|
|
|
64
64
|
chat_duration_histogram = meter.create_histogram(
|
|
65
65
|
name="gen_ai.client.operation.duration",
|
|
66
66
|
unit="s",
|
|
67
|
-
description="
|
|
67
|
+
description="GenAI operation duration",
|
|
68
68
|
)
|
|
69
69
|
|
|
70
70
|
chat_exception_counter = meter.create_counter(
|
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = "0.21.
|
|
1
|
+
__version__ = "0.21.4"
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: opentelemetry-instrumentation-openai
|
|
3
|
-
Version: 0.21.
|
|
3
|
+
Version: 0.21.4
|
|
4
4
|
Summary: OpenTelemetry OpenAI instrumentation
|
|
5
5
|
Home-page: https://github.com/traceloop/openllmetry/tree/main/packages/opentelemetry-instrumentation-openai
|
|
6
6
|
License: Apache-2.0
|
|
@@ -1,17 +1,17 @@
|
|
|
1
1
|
opentelemetry/instrumentation/openai/__init__.py,sha256=xl3Kvqry9glVhu8VtdknfUE9FpXQ7KWAFqtVlpjE-40,1344
|
|
2
|
-
opentelemetry/instrumentation/openai/shared/__init__.py,sha256=
|
|
3
|
-
opentelemetry/instrumentation/openai/shared/chat_wrappers.py,sha256=
|
|
2
|
+
opentelemetry/instrumentation/openai/shared/__init__.py,sha256=rn8AqGI0s0RAPlJGFgOtKE4ONPUF9N8rBPkmPErqB9o,8018
|
|
3
|
+
opentelemetry/instrumentation/openai/shared/chat_wrappers.py,sha256=0Wjb95q-7nzHKE8N6OCSQinvKZiKHdeIZO7J_NBTxsQ,24093
|
|
4
4
|
opentelemetry/instrumentation/openai/shared/completion_wrappers.py,sha256=-JHfgyxic5I3Wr3Uc_L-U7ztDVFcyovtF37tNLtaW3s,6604
|
|
5
5
|
opentelemetry/instrumentation/openai/shared/config.py,sha256=5uekQEnmYo1o6tsTD2IGc-cVmHUo5KmUC4pOVdrFFNk,102
|
|
6
|
-
opentelemetry/instrumentation/openai/shared/embeddings_wrappers.py,sha256=
|
|
7
|
-
opentelemetry/instrumentation/openai/shared/image_gen_wrappers.py,sha256=
|
|
6
|
+
opentelemetry/instrumentation/openai/shared/embeddings_wrappers.py,sha256=7aVXEaDw0Lzb_iAmVaUSqNTXLnXxAmWURM1iGHdXI18,6686
|
|
7
|
+
opentelemetry/instrumentation/openai/shared/image_gen_wrappers.py,sha256=uN9eK-EFWZNn7OKgAEMSxju_5b_MGRePynn3ov3Exgw,1961
|
|
8
8
|
opentelemetry/instrumentation/openai/utils.py,sha256=c7y4iO4C-81PQQFXCuZ7smFz9bsuw9AH1_RHFUr7SZA,3398
|
|
9
|
-
opentelemetry/instrumentation/openai/v0/__init__.py,sha256=
|
|
10
|
-
opentelemetry/instrumentation/openai/v1/__init__.py,sha256=
|
|
9
|
+
opentelemetry/instrumentation/openai/v0/__init__.py,sha256=ZpnAQW0vJYVeNC--RYR21hI3nQFEo8mRjHPELfTBdlk,5819
|
|
10
|
+
opentelemetry/instrumentation/openai/v1/__init__.py,sha256=sAA10BU4nsviHpqp0SLJdsKKLOyPS-xi0bk5N5uM2fE,8355
|
|
11
11
|
opentelemetry/instrumentation/openai/v1/assistant_wrappers.py,sha256=T6Vtdp1fAZdcYjGiTMZwkn4F4DgsltD4p4xLEFW-GhI,5874
|
|
12
12
|
opentelemetry/instrumentation/openai/v1/event_handler_wrapper.py,sha256=SAzYoun2yyOloofyOWtxpm8E2M9TL3Nm8TgKdNyXHuY,2779
|
|
13
|
-
opentelemetry/instrumentation/openai/version.py,sha256=
|
|
14
|
-
opentelemetry_instrumentation_openai-0.21.
|
|
15
|
-
opentelemetry_instrumentation_openai-0.21.
|
|
16
|
-
opentelemetry_instrumentation_openai-0.21.
|
|
17
|
-
opentelemetry_instrumentation_openai-0.21.
|
|
13
|
+
opentelemetry/instrumentation/openai/version.py,sha256=GyaC3hhx285KqPcqukJ07gRSFkuES5T27aUtAwvYYRw,23
|
|
14
|
+
opentelemetry_instrumentation_openai-0.21.4.dist-info/METADATA,sha256=gXXHbnllbvRwhKU-sykZ3e-5CK12ceUvuS60M-o06rI,2255
|
|
15
|
+
opentelemetry_instrumentation_openai-0.21.4.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
|
|
16
|
+
opentelemetry_instrumentation_openai-0.21.4.dist-info/entry_points.txt,sha256=vTBfiX5yXji5YHikuJHEOoBZ1TFdPQ1EI4ctd2pZSeE,93
|
|
17
|
+
opentelemetry_instrumentation_openai-0.21.4.dist-info/RECORD,,
|
|
File without changes
|