opentelemetry-instrumentation-openai 0.33.12__py3-none-any.whl → 0.34.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of opentelemetry-instrumentation-openai might be problematic. Click here for more details.
- opentelemetry/instrumentation/openai/__init__.py +2 -0
- opentelemetry/instrumentation/openai/shared/__init__.py +9 -0
- opentelemetry/instrumentation/openai/shared/chat_wrappers.py +3 -0
- opentelemetry/instrumentation/openai/shared/completion_wrappers.py +5 -0
- opentelemetry/instrumentation/openai/shared/config.py +1 -0
- opentelemetry/instrumentation/openai/shared/embeddings_wrappers.py +5 -0
- opentelemetry/instrumentation/openai/v1/assistant_wrappers.py +29 -2
- opentelemetry/instrumentation/openai/v1/event_handler_wrapper.py +15 -0
- opentelemetry/instrumentation/openai/version.py +1 -1
- {opentelemetry_instrumentation_openai-0.33.12.dist-info → opentelemetry_instrumentation_openai-0.34.0.dist-info}/METADATA +3 -3
- opentelemetry_instrumentation_openai-0.34.0.dist-info/RECORD +17 -0
- opentelemetry_instrumentation_openai-0.33.12.dist-info/RECORD +0 -17
- {opentelemetry_instrumentation_openai-0.33.12.dist-info → opentelemetry_instrumentation_openai-0.34.0.dist-info}/WHEEL +0 -0
- {opentelemetry_instrumentation_openai-0.33.12.dist-info → opentelemetry_instrumentation_openai-0.34.0.dist-info}/entry_points.txt +0 -0
|
@@ -23,6 +23,7 @@ class OpenAIInstrumentor(BaseInstrumentor):
|
|
|
23
23
|
upload_base64_image: Optional[
|
|
24
24
|
Callable[[str, str, str, str], Coroutine[None, None, str]]
|
|
25
25
|
] = lambda *args: "",
|
|
26
|
+
enable_trace_context_propagation: bool = True,
|
|
26
27
|
):
|
|
27
28
|
super().__init__()
|
|
28
29
|
Config.enrich_assistant = enrich_assistant
|
|
@@ -30,6 +31,7 @@ class OpenAIInstrumentor(BaseInstrumentor):
|
|
|
30
31
|
Config.exception_logger = exception_logger
|
|
31
32
|
Config.get_common_metrics_attributes = get_common_metrics_attributes
|
|
32
33
|
Config.upload_base64_image = upload_base64_image
|
|
34
|
+
Config.enable_trace_context_propagation = enable_trace_context_propagation
|
|
33
35
|
|
|
34
36
|
def instrumentation_dependencies(self) -> Collection[str]:
|
|
35
37
|
return _instruments
|
|
@@ -7,6 +7,8 @@ import logging
|
|
|
7
7
|
from importlib.metadata import version
|
|
8
8
|
|
|
9
9
|
from opentelemetry import context as context_api
|
|
10
|
+
from opentelemetry.trace.propagation import set_span_in_context
|
|
11
|
+
from opentelemetry.trace.propagation.tracecontext import TraceContextTextMapPropagator
|
|
10
12
|
|
|
11
13
|
from opentelemetry.instrumentation.openai.shared.config import Config
|
|
12
14
|
from opentelemetry.semconv_ai import SpanAttributes
|
|
@@ -289,3 +291,10 @@ def metric_shared_attributes(
|
|
|
289
291
|
"server.address": server_address,
|
|
290
292
|
"stream": is_streaming,
|
|
291
293
|
}
|
|
294
|
+
|
|
295
|
+
|
|
296
|
+
def propagate_trace_context(span, kwargs):
|
|
297
|
+
extra_headers = kwargs.get("extra_headers", {})
|
|
298
|
+
ctx = set_span_in_context(span)
|
|
299
|
+
TraceContextTextMapPropagator().inject(extra_headers, context=ctx)
|
|
300
|
+
kwargs["extra_headers"] = extra_headers
|
|
@@ -37,6 +37,7 @@ from opentelemetry.instrumentation.openai.shared import (
|
|
|
37
37
|
should_record_stream_token_usage,
|
|
38
38
|
get_token_count_from_string,
|
|
39
39
|
_set_span_stream_usage,
|
|
40
|
+
propagate_trace_context,
|
|
40
41
|
)
|
|
41
42
|
from opentelemetry.trace import SpanKind, Tracer
|
|
42
43
|
from opentelemetry.trace.status import Status, StatusCode
|
|
@@ -246,6 +247,8 @@ async def _handle_request(span, kwargs, instance):
|
|
|
246
247
|
_set_functions_attributes(span, kwargs.get("functions"))
|
|
247
248
|
elif kwargs.get("tools"):
|
|
248
249
|
set_tools_attributes(span, kwargs.get("tools"))
|
|
250
|
+
if Config.enable_trace_context_propagation:
|
|
251
|
+
propagate_trace_context(span, kwargs)
|
|
249
252
|
|
|
250
253
|
|
|
251
254
|
@dont_throw
|
|
@@ -22,6 +22,7 @@ from opentelemetry.instrumentation.openai.shared import (
|
|
|
22
22
|
should_record_stream_token_usage,
|
|
23
23
|
get_token_count_from_string,
|
|
24
24
|
_set_span_stream_usage,
|
|
25
|
+
propagate_trace_context,
|
|
25
26
|
)
|
|
26
27
|
|
|
27
28
|
from opentelemetry.instrumentation.openai.utils import is_openai_v1
|
|
@@ -29,6 +30,8 @@ from opentelemetry.instrumentation.openai.utils import is_openai_v1
|
|
|
29
30
|
from opentelemetry.trace import SpanKind
|
|
30
31
|
from opentelemetry.trace.status import Status, StatusCode
|
|
31
32
|
|
|
33
|
+
from opentelemetry.instrumentation.openai.shared.config import Config
|
|
34
|
+
|
|
32
35
|
SPAN_NAME = "openai.completion"
|
|
33
36
|
LLM_REQUEST_TYPE = LLMRequestTypeValues.COMPLETION
|
|
34
37
|
|
|
@@ -95,6 +98,8 @@ def _handle_request(span, kwargs, instance):
|
|
|
95
98
|
_set_prompts(span, kwargs.get("prompt"))
|
|
96
99
|
_set_functions_attributes(span, kwargs.get("functions"))
|
|
97
100
|
_set_client_attributes(span, instance)
|
|
101
|
+
if Config.enable_trace_context_propagation:
|
|
102
|
+
propagate_trace_context(span, kwargs)
|
|
98
103
|
|
|
99
104
|
|
|
100
105
|
@dont_throw
|
|
@@ -26,8 +26,11 @@ from opentelemetry.instrumentation.openai.shared import (
|
|
|
26
26
|
model_as_dict,
|
|
27
27
|
_get_openai_base_url,
|
|
28
28
|
OPENAI_LLM_USAGE_TOKEN_TYPES,
|
|
29
|
+
propagate_trace_context,
|
|
29
30
|
)
|
|
30
31
|
|
|
32
|
+
from opentelemetry.instrumentation.openai.shared.config import Config
|
|
33
|
+
|
|
31
34
|
from opentelemetry.instrumentation.openai.utils import is_openai_v1
|
|
32
35
|
|
|
33
36
|
from opentelemetry.trace import SpanKind
|
|
@@ -161,6 +164,8 @@ def _handle_request(span, kwargs, instance):
|
|
|
161
164
|
if should_send_prompts():
|
|
162
165
|
_set_prompts(span, kwargs.get("input"))
|
|
163
166
|
_set_client_attributes(span, instance)
|
|
167
|
+
if Config.enable_trace_context_propagation:
|
|
168
|
+
propagate_trace_context(span, kwargs)
|
|
164
169
|
|
|
165
170
|
|
|
166
171
|
@dont_throw
|
|
@@ -46,11 +46,13 @@ def runs_create_wrapper(tracer, wrapped, instance, args, kwargs):
|
|
|
46
46
|
instructions = kwargs.get("instructions")
|
|
47
47
|
|
|
48
48
|
response = wrapped(*args, **kwargs)
|
|
49
|
+
response_dict = model_as_dict(response)
|
|
49
50
|
|
|
50
51
|
runs[thread_id] = {
|
|
51
52
|
"start_time": time.time_ns(),
|
|
52
53
|
"assistant_id": kwargs.get("assistant_id"),
|
|
53
54
|
"instructions": instructions,
|
|
55
|
+
"run_id": response_dict.get("id"),
|
|
54
56
|
}
|
|
55
57
|
|
|
56
58
|
return response
|
|
@@ -66,13 +68,15 @@ def runs_retrieve_wrapper(tracer, wrapped, instance, args, kwargs):
|
|
|
66
68
|
parsed_response = response
|
|
67
69
|
assert type(parsed_response) is Run
|
|
68
70
|
|
|
69
|
-
if parsed_response.
|
|
71
|
+
if parsed_response.thread_id in runs:
|
|
72
|
+
thread_id = parsed_response.thread_id
|
|
70
73
|
runs[thread_id]["end_time"] = time.time_ns()
|
|
74
|
+
if parsed_response.usage:
|
|
75
|
+
runs[thread_id]["usage"] = parsed_response.usage
|
|
71
76
|
|
|
72
77
|
if context_api.get_value(_SUPPRESS_INSTRUMENTATION_KEY):
|
|
73
78
|
return wrapped(*args, **kwargs)
|
|
74
79
|
|
|
75
|
-
thread_id = kwargs.get("thread_id")
|
|
76
80
|
response = wrapped(*args, **kwargs)
|
|
77
81
|
process_response(response)
|
|
78
82
|
|
|
@@ -112,6 +116,11 @@ def messages_list_wrapper(tracer, wrapped, instance, args, kwargs):
|
|
|
112
116
|
else:
|
|
113
117
|
assistant = assistants[run["assistant_id"]]
|
|
114
118
|
|
|
119
|
+
_set_span_attribute(
|
|
120
|
+
span,
|
|
121
|
+
SpanAttributes.LLM_SYSTEM,
|
|
122
|
+
"openai",
|
|
123
|
+
)
|
|
115
124
|
_set_span_attribute(
|
|
116
125
|
span,
|
|
117
126
|
SpanAttributes.LLM_REQUEST_MODEL,
|
|
@@ -143,6 +152,19 @@ def messages_list_wrapper(tracer, wrapped, instance, args, kwargs):
|
|
|
143
152
|
span, f"{prefix}.content", content[0].get("text").get("value")
|
|
144
153
|
)
|
|
145
154
|
|
|
155
|
+
if run.get("usage"):
|
|
156
|
+
usage_dict = model_as_dict(run.get("usage"))
|
|
157
|
+
_set_span_attribute(
|
|
158
|
+
span,
|
|
159
|
+
SpanAttributes.LLM_USAGE_COMPLETION_TOKENS,
|
|
160
|
+
usage_dict.get("completion_tokens"),
|
|
161
|
+
)
|
|
162
|
+
_set_span_attribute(
|
|
163
|
+
span,
|
|
164
|
+
SpanAttributes.LLM_USAGE_PROMPT_TOKENS,
|
|
165
|
+
usage_dict.get("prompt_tokens"),
|
|
166
|
+
)
|
|
167
|
+
|
|
146
168
|
span.end(run.get("end_time"))
|
|
147
169
|
|
|
148
170
|
return response
|
|
@@ -175,6 +197,11 @@ def runs_create_and_stream_wrapper(tracer, wrapped, instance, args, kwargs):
|
|
|
175
197
|
_set_span_attribute(
|
|
176
198
|
span, SpanAttributes.LLM_REQUEST_MODEL, assistants[assistant_id]["model"]
|
|
177
199
|
)
|
|
200
|
+
_set_span_attribute(
|
|
201
|
+
span,
|
|
202
|
+
SpanAttributes.LLM_SYSTEM,
|
|
203
|
+
"openai",
|
|
204
|
+
)
|
|
178
205
|
_set_span_attribute(
|
|
179
206
|
span,
|
|
180
207
|
SpanAttributes.LLM_RESPONSE_MODEL,
|
|
@@ -9,6 +9,8 @@ from typing_extensions import override
|
|
|
9
9
|
class EventHandleWrapper(AssistantEventHandler):
|
|
10
10
|
|
|
11
11
|
_current_text_index = 0
|
|
12
|
+
_prompt_tokens = 0
|
|
13
|
+
_completion_tokens = 0
|
|
12
14
|
|
|
13
15
|
def __init__(self, original_handler, span):
|
|
14
16
|
super().__init__()
|
|
@@ -17,6 +19,16 @@ class EventHandleWrapper(AssistantEventHandler):
|
|
|
17
19
|
|
|
18
20
|
@override
|
|
19
21
|
def on_end(self):
|
|
22
|
+
_set_span_attribute(
|
|
23
|
+
self._span,
|
|
24
|
+
SpanAttributes.LLM_USAGE_PROMPT_TOKENS,
|
|
25
|
+
self._prompt_tokens,
|
|
26
|
+
)
|
|
27
|
+
_set_span_attribute(
|
|
28
|
+
self._span,
|
|
29
|
+
SpanAttributes.LLM_USAGE_COMPLETION_TOKENS,
|
|
30
|
+
self._completion_tokens,
|
|
31
|
+
)
|
|
20
32
|
self._original_handler.on_end()
|
|
21
33
|
self._span.end()
|
|
22
34
|
|
|
@@ -34,6 +46,9 @@ class EventHandleWrapper(AssistantEventHandler):
|
|
|
34
46
|
|
|
35
47
|
@override
|
|
36
48
|
def on_run_step_done(self, run_step):
|
|
49
|
+
if run_step.usage:
|
|
50
|
+
self._prompt_tokens += run_step.usage.prompt_tokens
|
|
51
|
+
self._completion_tokens += run_step.usage.completion_tokens
|
|
37
52
|
self._original_handler.on_run_step_done(run_step)
|
|
38
53
|
|
|
39
54
|
@override
|
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = "0.
|
|
1
|
+
__version__ = "0.34.0"
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: opentelemetry-instrumentation-openai
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.34.0
|
|
4
4
|
Summary: OpenTelemetry OpenAI instrumentation
|
|
5
5
|
Home-page: https://github.com/traceloop/openllmetry/tree/main/packages/opentelemetry-instrumentation-openai
|
|
6
6
|
License: Apache-2.0
|
|
@@ -16,8 +16,8 @@ Classifier: Programming Language :: Python :: 3.12
|
|
|
16
16
|
Classifier: Programming Language :: Python :: 3.13
|
|
17
17
|
Provides-Extra: instruments
|
|
18
18
|
Requires-Dist: opentelemetry-api (>=1.28.0,<2.0.0)
|
|
19
|
-
Requires-Dist: opentelemetry-instrumentation (>=0.
|
|
20
|
-
Requires-Dist: opentelemetry-semantic-conventions (>=0.
|
|
19
|
+
Requires-Dist: opentelemetry-instrumentation (>=0.50b0)
|
|
20
|
+
Requires-Dist: opentelemetry-semantic-conventions (>=0.50b0)
|
|
21
21
|
Requires-Dist: opentelemetry-semantic-conventions-ai (==0.4.2)
|
|
22
22
|
Requires-Dist: tiktoken (>=0.6.0,<1)
|
|
23
23
|
Project-URL: Repository, https://github.com/traceloop/openllmetry/tree/main/packages/opentelemetry-instrumentation-openai
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
opentelemetry/instrumentation/openai/__init__.py,sha256=ly0ZPoOTAVRo1f0EeKfyTWj8_HLna3kZfIo9GWgUJk4,1888
|
|
2
|
+
opentelemetry/instrumentation/openai/shared/__init__.py,sha256=HQ2qOn8fiinyUbbN55EO3aP8c33cM5IaWJ886bGiJac,9291
|
|
3
|
+
opentelemetry/instrumentation/openai/shared/chat_wrappers.py,sha256=FiVCibvVkU1f6602HOaHodU9N95EDFOAKxmND3zhC2s,28241
|
|
4
|
+
opentelemetry/instrumentation/openai/shared/completion_wrappers.py,sha256=P066vEEjc3tKNrnIPXi7Inc56mVhmJl5xEz1fG_vW-I,7037
|
|
5
|
+
opentelemetry/instrumentation/openai/shared/config.py,sha256=dCQviJ1a5cpFlrP0HcKgE7lpiXB98ssnumLy_CIMibQ,355
|
|
6
|
+
opentelemetry/instrumentation/openai/shared/embeddings_wrappers.py,sha256=ROnTujsXOrO3YVcXiV5Z-IifeuXbPOBrdqa3Ym6IDwI,7263
|
|
7
|
+
opentelemetry/instrumentation/openai/shared/image_gen_wrappers.py,sha256=A4qdJIeJdA45SfiLaFj3Vo0Ndcqqfuew1BDsGdnJU3E,2122
|
|
8
|
+
opentelemetry/instrumentation/openai/utils.py,sha256=KCxCpos2-rmIucUdwzbqbJINhs65I4TKo1DGRN-3kGU,4051
|
|
9
|
+
opentelemetry/instrumentation/openai/v0/__init__.py,sha256=02-bXv0aZbscMYO2W3jsHpjU521vkVK5RzdfSeGXBzg,5475
|
|
10
|
+
opentelemetry/instrumentation/openai/v1/__init__.py,sha256=B2Ut0X14KQS5in_0jLBMCvXQK73SqBy2rBFIDk5ZRkc,8688
|
|
11
|
+
opentelemetry/instrumentation/openai/v1/assistant_wrappers.py,sha256=PxMeM5ikB0iedev95nDA5PunNLVme8oFsFhoKu3ipQc,7115
|
|
12
|
+
opentelemetry/instrumentation/openai/v1/event_handler_wrapper.py,sha256=nEdavNRw5mY__PnarGoaszG24BjRC8BqDYNrtLFc6cs,3296
|
|
13
|
+
opentelemetry/instrumentation/openai/version.py,sha256=7wgjZxPxspP87CI4mDkFQuldkKgENXO5FaPiS8EXM88,23
|
|
14
|
+
opentelemetry_instrumentation_openai-0.34.0.dist-info/METADATA,sha256=0vlL4CsOqFWRuKEfgyX0JVIclMyWcs4PQ-rRfcYoKnI,2294
|
|
15
|
+
opentelemetry_instrumentation_openai-0.34.0.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
|
|
16
|
+
opentelemetry_instrumentation_openai-0.34.0.dist-info/entry_points.txt,sha256=vTBfiX5yXji5YHikuJHEOoBZ1TFdPQ1EI4ctd2pZSeE,93
|
|
17
|
+
opentelemetry_instrumentation_openai-0.34.0.dist-info/RECORD,,
|
|
@@ -1,17 +0,0 @@
|
|
|
1
|
-
opentelemetry/instrumentation/openai/__init__.py,sha256=ZNytfy9PeGr-O4qkyCQLtANukfPlnOEynsXJ_5L75oU,1750
|
|
2
|
-
opentelemetry/instrumentation/openai/shared/__init__.py,sha256=YEkV9inSBiQlPCjpkcJJdyPdDZlgNgEEs5ZPzx_boNI,8892
|
|
3
|
-
opentelemetry/instrumentation/openai/shared/chat_wrappers.py,sha256=YR-yz2T9QiINfVYb0w-aftrOCoVXwKFD_fkPtcsaG8U,28118
|
|
4
|
-
opentelemetry/instrumentation/openai/shared/completion_wrappers.py,sha256=CZngvHkGtLUh-Ne4uzM6VH8LiajZsKVOV1glhmSwij0,6843
|
|
5
|
-
opentelemetry/instrumentation/openai/shared/config.py,sha256=86mItW37lxSAyD8LAx4RbYWTR_-XKLeGxQgMozD5eNY,305
|
|
6
|
-
opentelemetry/instrumentation/openai/shared/embeddings_wrappers.py,sha256=YI0lFGulaE8N2F1hdTGHj52kHwUy6KQPIt7Lw9AWJHc,7069
|
|
7
|
-
opentelemetry/instrumentation/openai/shared/image_gen_wrappers.py,sha256=A4qdJIeJdA45SfiLaFj3Vo0Ndcqqfuew1BDsGdnJU3E,2122
|
|
8
|
-
opentelemetry/instrumentation/openai/utils.py,sha256=KCxCpos2-rmIucUdwzbqbJINhs65I4TKo1DGRN-3kGU,4051
|
|
9
|
-
opentelemetry/instrumentation/openai/v0/__init__.py,sha256=02-bXv0aZbscMYO2W3jsHpjU521vkVK5RzdfSeGXBzg,5475
|
|
10
|
-
opentelemetry/instrumentation/openai/v1/__init__.py,sha256=B2Ut0X14KQS5in_0jLBMCvXQK73SqBy2rBFIDk5ZRkc,8688
|
|
11
|
-
opentelemetry/instrumentation/openai/v1/assistant_wrappers.py,sha256=BMiZykQ3u2xrWpo2g3qZOxTJqy317JXRGO4zCVu4bZ8,6277
|
|
12
|
-
opentelemetry/instrumentation/openai/v1/event_handler_wrapper.py,sha256=kXOuDAt7If23EhWpH2r6L0eUVFvVWQBYrWGu3qJNKR0,2779
|
|
13
|
-
opentelemetry/instrumentation/openai/version.py,sha256=DwXc1CSXuMVWYoH7D0uXZo6zdiqQEvGMIXRuPrODlkk,24
|
|
14
|
-
opentelemetry_instrumentation_openai-0.33.12.dist-info/METADATA,sha256=JUgQp20k68mmsCmubIG6GB2Fu9ixBuPFvBhNYBZAt6E,2307
|
|
15
|
-
opentelemetry_instrumentation_openai-0.33.12.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
|
|
16
|
-
opentelemetry_instrumentation_openai-0.33.12.dist-info/entry_points.txt,sha256=vTBfiX5yXji5YHikuJHEOoBZ1TFdPQ1EI4ctd2pZSeE,93
|
|
17
|
-
opentelemetry_instrumentation_openai-0.33.12.dist-info/RECORD,,
|
|
File without changes
|