langtrace-python-sdk 2.1.28__py3-none-any.whl → 2.2.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- examples/cohere_example/chat.py +1 -0
- examples/cohere_example/chat_stream.py +3 -0
- examples/gemini_example/__init__.py +6 -0
- examples/gemini_example/function_tools.py +62 -0
- examples/gemini_example/main.py +91 -0
- examples/langchain_example/__init__.py +8 -0
- examples/langchain_example/groq_example.py +28 -15
- examples/ollama_example/basic.py +1 -0
- examples/openai_example/__init__.py +1 -0
- examples/openai_example/async_tool_calling_nonstreaming.py +1 -1
- examples/openai_example/chat_completion.py +1 -1
- examples/openai_example/embeddings_create.py +1 -0
- examples/openai_example/images_edit.py +2 -2
- examples/vertexai_example/__init__.py +6 -0
- examples/vertexai_example/main.py +214 -0
- langtrace_python_sdk/constants/instrumentation/common.py +2 -0
- langtrace_python_sdk/constants/instrumentation/gemini.py +12 -0
- langtrace_python_sdk/constants/instrumentation/vertexai.py +42 -0
- langtrace_python_sdk/instrumentation/__init__.py +4 -0
- langtrace_python_sdk/instrumentation/anthropic/patch.py +68 -96
- langtrace_python_sdk/instrumentation/chroma/patch.py +29 -29
- langtrace_python_sdk/instrumentation/cohere/patch.py +143 -242
- langtrace_python_sdk/instrumentation/gemini/__init__.py +3 -0
- langtrace_python_sdk/instrumentation/gemini/instrumentation.py +36 -0
- langtrace_python_sdk/instrumentation/gemini/patch.py +186 -0
- langtrace_python_sdk/instrumentation/groq/patch.py +82 -125
- langtrace_python_sdk/instrumentation/ollama/patch.py +62 -65
- langtrace_python_sdk/instrumentation/openai/patch.py +190 -494
- langtrace_python_sdk/instrumentation/qdrant/patch.py +6 -6
- langtrace_python_sdk/instrumentation/vertexai/__init__.py +3 -0
- langtrace_python_sdk/instrumentation/vertexai/instrumentation.py +33 -0
- langtrace_python_sdk/instrumentation/vertexai/patch.py +131 -0
- langtrace_python_sdk/langtrace.py +7 -1
- langtrace_python_sdk/utils/__init__.py +14 -3
- langtrace_python_sdk/utils/llm.py +311 -6
- langtrace_python_sdk/version.py +1 -1
- {langtrace_python_sdk-2.1.28.dist-info → langtrace_python_sdk-2.2.1.dist-info}/METADATA +26 -19
- {langtrace_python_sdk-2.1.28.dist-info → langtrace_python_sdk-2.2.1.dist-info}/RECORD +55 -36
- tests/anthropic/test_anthropic.py +28 -27
- tests/cohere/test_cohere_chat.py +36 -36
- tests/cohere/test_cohere_embed.py +12 -9
- tests/cohere/test_cohere_rerank.py +18 -11
- tests/groq/cassettes/test_async_chat_completion.yaml +113 -0
- tests/groq/cassettes/test_async_chat_completion_streaming.yaml +2232 -0
- tests/groq/cassettes/test_chat_completion.yaml +114 -0
- tests/groq/cassettes/test_chat_completion_streaming.yaml +2512 -0
- tests/groq/conftest.py +33 -0
- tests/groq/test_groq.py +142 -0
- tests/openai/cassettes/test_async_chat_completion_streaming.yaml +28 -28
- tests/openai/test_chat_completion.py +53 -67
- tests/openai/test_image_generation.py +47 -24
- tests/utils.py +40 -5
- {langtrace_python_sdk-2.1.28.dist-info → langtrace_python_sdk-2.2.1.dist-info}/WHEEL +0 -0
- {langtrace_python_sdk-2.1.28.dist-info → langtrace_python_sdk-2.2.1.dist-info}/entry_points.txt +0 -0
- {langtrace_python_sdk-2.1.28.dist-info → langtrace_python_sdk-2.2.1.dist-info}/licenses/LICENSE +0 -0
|
@@ -17,120 +17,59 @@ limitations under the License.
|
|
|
17
17
|
import json
|
|
18
18
|
|
|
19
19
|
from langtrace.trace_attributes import Event, LLMSpanAttributes
|
|
20
|
-
from
|
|
20
|
+
from langtrace_python_sdk.utils import set_span_attribute, silently_fail
|
|
21
|
+
from langtrace_python_sdk.utils.llm import (
|
|
22
|
+
get_extra_attributes,
|
|
23
|
+
get_langtrace_attributes,
|
|
24
|
+
get_llm_request_attributes,
|
|
25
|
+
get_llm_url,
|
|
26
|
+
is_streaming,
|
|
27
|
+
set_event_completion,
|
|
28
|
+
set_usage_attributes,
|
|
29
|
+
)
|
|
21
30
|
from opentelemetry.trace import SpanKind
|
|
22
31
|
from opentelemetry.trace.status import Status, StatusCode
|
|
32
|
+
from langtrace.trace_attributes import SpanAttributes
|
|
23
33
|
|
|
24
34
|
from langtrace_python_sdk.constants.instrumentation.anthropic import APIS
|
|
25
35
|
from langtrace_python_sdk.constants.instrumentation.common import (
|
|
26
|
-
LANGTRACE_ADDITIONAL_SPAN_ATTRIBUTES_KEY,
|
|
27
36
|
SERVICE_PROVIDERS,
|
|
28
37
|
)
|
|
29
|
-
from importlib_metadata import version as v
|
|
30
|
-
|
|
31
|
-
from langtrace_python_sdk.constants import LANGTRACE_SDK_NAME
|
|
32
38
|
|
|
33
39
|
|
|
34
40
|
def messages_create(original_method, version, tracer):
|
|
35
41
|
"""Wrap the `messages_create` method."""
|
|
36
42
|
|
|
37
43
|
def traced_method(wrapped, instance, args, kwargs):
|
|
38
|
-
base_url = (
|
|
39
|
-
str(instance._client._base_url)
|
|
40
|
-
if hasattr(instance, "_client") and hasattr(instance._client, "_base_url")
|
|
41
|
-
else ""
|
|
42
|
-
)
|
|
43
44
|
service_provider = SERVICE_PROVIDERS["ANTHROPIC"]
|
|
44
45
|
|
|
45
46
|
# extract system from kwargs and attach as a role to the prompts
|
|
46
47
|
# we do this to keep it consistent with the openai
|
|
47
|
-
prompts =
|
|
48
|
+
prompts = kwargs.get("messages", [])
|
|
48
49
|
system = kwargs.get("system")
|
|
49
50
|
if system:
|
|
50
|
-
prompts =
|
|
51
|
-
[{"role": "system", "content": system}] + kwargs.get("messages", [])
|
|
52
|
-
)
|
|
53
|
-
extra_attributes = baggage.get_baggage(LANGTRACE_ADDITIONAL_SPAN_ATTRIBUTES_KEY)
|
|
51
|
+
prompts = [{"role": "system", "content": system}] + kwargs.get("messages", [])
|
|
54
52
|
|
|
55
53
|
span_attributes = {
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
"url.full": base_url,
|
|
62
|
-
"llm.api": APIS["MESSAGES_CREATE"]["ENDPOINT"],
|
|
63
|
-
"llm.model": kwargs.get("model"),
|
|
64
|
-
"llm.prompts": prompts,
|
|
65
|
-
"llm.stream": kwargs.get("stream"),
|
|
66
|
-
**(extra_attributes if extra_attributes is not None else {}),
|
|
54
|
+
**get_langtrace_attributes(version, service_provider),
|
|
55
|
+
**get_llm_request_attributes(kwargs, prompts=prompts),
|
|
56
|
+
**get_llm_url(instance),
|
|
57
|
+
SpanAttributes.LLM_PATH: APIS["MESSAGES_CREATE"]["ENDPOINT"],
|
|
58
|
+
**get_extra_attributes(),
|
|
67
59
|
}
|
|
68
60
|
|
|
69
61
|
attributes = LLMSpanAttributes(**span_attributes)
|
|
70
62
|
|
|
71
|
-
if kwargs.get("temperature") is not None:
|
|
72
|
-
attributes.llm_temperature = kwargs.get("temperature")
|
|
73
|
-
if kwargs.get("top_p") is not None:
|
|
74
|
-
attributes.llm_top_p = kwargs.get("top_p")
|
|
75
|
-
if kwargs.get("top_k") is not None:
|
|
76
|
-
attributes.llm_top_p = kwargs.get("top_k")
|
|
77
|
-
if kwargs.get("user") is not None:
|
|
78
|
-
attributes.llm_user = kwargs.get("user")
|
|
79
|
-
if kwargs.get("max_tokens") is not None:
|
|
80
|
-
attributes.llm_max_tokens = str(kwargs.get("max_tokens"))
|
|
81
|
-
|
|
82
63
|
span = tracer.start_span(
|
|
83
64
|
APIS["MESSAGES_CREATE"]["METHOD"], kind=SpanKind.CLIENT
|
|
84
65
|
)
|
|
85
66
|
for field, value in attributes.model_dump(by_alias=True).items():
|
|
86
|
-
|
|
87
|
-
span.set_attribute(field, value)
|
|
67
|
+
set_span_attribute(span, field, value)
|
|
88
68
|
try:
|
|
89
69
|
# Attempt to call the original method
|
|
90
70
|
result = wrapped(*args, **kwargs)
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
span.set_attribute(
|
|
94
|
-
"llm.model",
|
|
95
|
-
result.model if result.model else kwargs.get("model"),
|
|
96
|
-
)
|
|
97
|
-
span.set_attribute(
|
|
98
|
-
"llm.responses",
|
|
99
|
-
json.dumps(
|
|
100
|
-
[
|
|
101
|
-
{
|
|
102
|
-
"role": result.role if result.role else "assistant",
|
|
103
|
-
"content": result.content[0].text,
|
|
104
|
-
"type": result.content[0].type,
|
|
105
|
-
}
|
|
106
|
-
]
|
|
107
|
-
),
|
|
108
|
-
)
|
|
109
|
-
else:
|
|
110
|
-
responses = []
|
|
111
|
-
span.set_attribute("llm.responses", json.dumps(responses))
|
|
112
|
-
if (
|
|
113
|
-
hasattr(result, "system_fingerprint")
|
|
114
|
-
and result.system_fingerprint is not None
|
|
115
|
-
):
|
|
116
|
-
span.set_attribute(
|
|
117
|
-
"llm.system.fingerprint", result.system_fingerprint
|
|
118
|
-
)
|
|
119
|
-
# Get the usage
|
|
120
|
-
if hasattr(result, "usage") and result.usage is not None:
|
|
121
|
-
usage = result.usage
|
|
122
|
-
if usage is not None:
|
|
123
|
-
usage_dict = {
|
|
124
|
-
"input_tokens": usage.input_tokens,
|
|
125
|
-
"output_tokens": usage.output_tokens,
|
|
126
|
-
"total_tokens": usage.input_tokens + usage.output_tokens,
|
|
127
|
-
}
|
|
128
|
-
span.set_attribute("llm.token.counts", json.dumps(usage_dict))
|
|
129
|
-
span.set_status(StatusCode.OK)
|
|
130
|
-
span.end()
|
|
131
|
-
return result
|
|
132
|
-
else:
|
|
133
|
-
return handle_streaming_response(result, span)
|
|
71
|
+
return set_response_attributes(result, span, kwargs)
|
|
72
|
+
|
|
134
73
|
except Exception as err:
|
|
135
74
|
# Record the exception in the span
|
|
136
75
|
span.record_exception(err)
|
|
@@ -154,7 +93,9 @@ def messages_create(original_method, version, tracer):
|
|
|
154
93
|
and hasattr(chunk.message, "model")
|
|
155
94
|
and chunk.message.model is not None
|
|
156
95
|
):
|
|
157
|
-
span.set_attribute(
|
|
96
|
+
span.set_attribute(
|
|
97
|
+
SpanAttributes.LLM_RESPONSE_MODEL, chunk.message.model
|
|
98
|
+
)
|
|
158
99
|
content = ""
|
|
159
100
|
if hasattr(chunk, "delta") and chunk.delta is not None:
|
|
160
101
|
content = chunk.delta.text if hasattr(chunk.delta, "text") else ""
|
|
@@ -177,7 +118,8 @@ def messages_create(original_method, version, tracer):
|
|
|
177
118
|
# Add event for each chunk of content
|
|
178
119
|
if content:
|
|
179
120
|
span.add_event(
|
|
180
|
-
Event.STREAM_OUTPUT.value,
|
|
121
|
+
Event.STREAM_OUTPUT.value,
|
|
122
|
+
{SpanAttributes.LLM_CONTENT_COMPLETION_CHUNK: "".join(content)},
|
|
181
123
|
)
|
|
182
124
|
|
|
183
125
|
# Assuming this is part of a generator, yield chunk or aggregated content
|
|
@@ -186,22 +128,52 @@ def messages_create(original_method, version, tracer):
|
|
|
186
128
|
|
|
187
129
|
# Finalize span after processing all chunks
|
|
188
130
|
span.add_event(Event.STREAM_END.value)
|
|
189
|
-
|
|
190
|
-
"
|
|
191
|
-
|
|
131
|
+
set_usage_attributes(
|
|
132
|
+
span, {"input_tokens": input_tokens, "output_tokens": output_tokens}
|
|
133
|
+
)
|
|
134
|
+
completion = [{"role": "assistant", "content": "".join(result_content)}]
|
|
135
|
+
set_event_completion(span, completion)
|
|
136
|
+
|
|
137
|
+
span.set_status(StatusCode.OK)
|
|
138
|
+
span.end()
|
|
139
|
+
|
|
140
|
+
def set_response_attributes(result, span, kwargs):
|
|
141
|
+
if not is_streaming(kwargs):
|
|
142
|
+
if hasattr(result, "content") and result.content is not None:
|
|
143
|
+
set_span_attribute(
|
|
144
|
+
span, SpanAttributes.LLM_RESPONSE_MODEL, result.model
|
|
145
|
+
)
|
|
146
|
+
completion = [
|
|
192
147
|
{
|
|
193
|
-
"
|
|
194
|
-
"
|
|
195
|
-
"
|
|
148
|
+
"role": result.role if result.role else "assistant",
|
|
149
|
+
"content": result.content[0].text,
|
|
150
|
+
"type": result.content[0].type,
|
|
196
151
|
}
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
152
|
+
]
|
|
153
|
+
set_event_completion(span, completion)
|
|
154
|
+
|
|
155
|
+
else:
|
|
156
|
+
responses = []
|
|
157
|
+
set_event_completion(span, responses)
|
|
158
|
+
|
|
159
|
+
if (
|
|
160
|
+
hasattr(result, "system_fingerprint")
|
|
161
|
+
and result.system_fingerprint is not None
|
|
162
|
+
):
|
|
163
|
+
span.set_attribute(
|
|
164
|
+
SpanAttributes.LLM_SYSTEM_FINGERPRINT,
|
|
165
|
+
result.system_fingerprint,
|
|
166
|
+
)
|
|
167
|
+
# Get the usage
|
|
168
|
+
if hasattr(result, "usage") and result.usage is not None:
|
|
169
|
+
usage = result.usage
|
|
170
|
+
set_usage_attributes(span, dict(usage))
|
|
171
|
+
|
|
203
172
|
span.set_status(StatusCode.OK)
|
|
204
173
|
span.end()
|
|
174
|
+
return result
|
|
175
|
+
else:
|
|
176
|
+
return handle_streaming_response(result, span)
|
|
205
177
|
|
|
206
178
|
# return the wrapped method
|
|
207
179
|
return traced_method
|
|
@@ -15,7 +15,7 @@ limitations under the License.
|
|
|
15
15
|
"""
|
|
16
16
|
|
|
17
17
|
from langtrace.trace_attributes import DatabaseSpanAttributes
|
|
18
|
-
from langtrace_python_sdk.utils
|
|
18
|
+
from langtrace_python_sdk.utils import set_span_attribute
|
|
19
19
|
from langtrace_python_sdk.utils.silently_fail import silently_fail
|
|
20
20
|
from opentelemetry import baggage, trace
|
|
21
21
|
from opentelemetry.trace import SpanKind
|
|
@@ -119,20 +119,20 @@ def handle_null_params(param):
|
|
|
119
119
|
|
|
120
120
|
@silently_fail
|
|
121
121
|
def _set_chroma_add_attributes(span, kwargs):
|
|
122
|
-
|
|
122
|
+
set_span_attribute(
|
|
123
123
|
span, "db.chroma.add.ids_count", get_count_or_none(kwargs.get("ids"))
|
|
124
124
|
)
|
|
125
|
-
|
|
125
|
+
set_span_attribute(
|
|
126
126
|
span,
|
|
127
127
|
"db.chroma.add.embeddings_count",
|
|
128
128
|
get_count_or_none(kwargs.get("embeddings")),
|
|
129
129
|
)
|
|
130
|
-
|
|
130
|
+
set_span_attribute(
|
|
131
131
|
span,
|
|
132
132
|
"db.chroma.add.metadatas_count",
|
|
133
133
|
get_count_or_none(kwargs.get("metadatas")),
|
|
134
134
|
)
|
|
135
|
-
|
|
135
|
+
set_span_attribute(
|
|
136
136
|
span,
|
|
137
137
|
"db.chroma.add.documents_count",
|
|
138
138
|
get_count_or_none(kwargs.get("documents")),
|
|
@@ -141,71 +141,71 @@ def _set_chroma_add_attributes(span, kwargs):
|
|
|
141
141
|
|
|
142
142
|
@silently_fail
|
|
143
143
|
def _set_chroma_get_attributes(span, kwargs):
|
|
144
|
-
|
|
144
|
+
set_span_attribute(
|
|
145
145
|
span, "db.chroma.get.ids_count", get_count_or_none(kwargs.get("ids"))
|
|
146
146
|
)
|
|
147
|
-
|
|
147
|
+
set_span_attribute(
|
|
148
148
|
span, "db.chroma.get.where", handle_null_params(kwargs.get("where"))
|
|
149
149
|
)
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
150
|
+
set_span_attribute(span, "db.chroma.get.limit", kwargs.get("limit"))
|
|
151
|
+
set_span_attribute(span, "db.chroma.get.offset", kwargs.get("offset"))
|
|
152
|
+
set_span_attribute(
|
|
153
153
|
span,
|
|
154
154
|
"db.chroma.get.where_document",
|
|
155
155
|
handle_null_params(kwargs.get("where_document")),
|
|
156
156
|
)
|
|
157
|
-
|
|
157
|
+
set_span_attribute(
|
|
158
158
|
span, "db.chroma.get.include", handle_null_params(kwargs.get("include"))
|
|
159
159
|
)
|
|
160
160
|
|
|
161
161
|
|
|
162
162
|
@silently_fail
|
|
163
163
|
def _set_chroma_query_attributes(span, kwargs):
|
|
164
|
-
|
|
164
|
+
set_span_attribute(
|
|
165
165
|
span,
|
|
166
166
|
"db.chroma.query.query_embeddings_count",
|
|
167
167
|
get_count_or_none(kwargs.get("query_embeddings")),
|
|
168
168
|
)
|
|
169
|
-
|
|
169
|
+
set_span_attribute(
|
|
170
170
|
span,
|
|
171
171
|
"db.chroma.query.query_texts_count",
|
|
172
172
|
get_count_or_none(kwargs.get("query_texts")),
|
|
173
173
|
)
|
|
174
|
-
|
|
175
|
-
|
|
174
|
+
set_span_attribute(span, "db.chroma.query.n_results", kwargs.get("n_results"))
|
|
175
|
+
set_span_attribute(
|
|
176
176
|
span, "db.chroma.query.where", handle_null_params(kwargs.get("where"))
|
|
177
177
|
)
|
|
178
|
-
|
|
178
|
+
set_span_attribute(
|
|
179
179
|
span,
|
|
180
180
|
"db.chroma.query.where_document",
|
|
181
181
|
handle_null_params(kwargs.get("where_document")),
|
|
182
182
|
)
|
|
183
|
-
|
|
183
|
+
set_span_attribute(
|
|
184
184
|
span, "db.chroma.query.include", handle_null_params(kwargs.get("include"))
|
|
185
185
|
)
|
|
186
186
|
|
|
187
187
|
|
|
188
188
|
@silently_fail
|
|
189
189
|
def _set_chroma_peek_attributes(span, kwargs):
|
|
190
|
-
|
|
190
|
+
set_span_attribute(span, "db.chroma.peek.limit", kwargs.get("limit"))
|
|
191
191
|
|
|
192
192
|
|
|
193
193
|
@silently_fail
|
|
194
194
|
def _set_chroma_update_attributes(span, kwargs):
|
|
195
|
-
|
|
195
|
+
set_span_attribute(
|
|
196
196
|
span, "db.chroma.update.ids_count", get_count_or_none(kwargs.get("ids"))
|
|
197
197
|
)
|
|
198
|
-
|
|
198
|
+
set_span_attribute(
|
|
199
199
|
span,
|
|
200
200
|
"db.chroma.update.embeddings_count",
|
|
201
201
|
get_count_or_none(kwargs.get("embeddings")),
|
|
202
202
|
)
|
|
203
|
-
|
|
203
|
+
set_span_attribute(
|
|
204
204
|
span,
|
|
205
205
|
"db.chroma.update.metadatas_count",
|
|
206
206
|
get_count_or_none(kwargs.get("metadatas")),
|
|
207
207
|
)
|
|
208
|
-
|
|
208
|
+
set_span_attribute(
|
|
209
209
|
span,
|
|
210
210
|
"db.chroma.update.documents_count",
|
|
211
211
|
get_count_or_none(kwargs.get("documents")),
|
|
@@ -214,23 +214,23 @@ def _set_chroma_update_attributes(span, kwargs):
|
|
|
214
214
|
|
|
215
215
|
@silently_fail
|
|
216
216
|
def _set_chroma_modify_attributes(span, kwargs):
|
|
217
|
-
|
|
217
|
+
set_span_attribute(span, "db.chroma.modify.name", kwargs.get("name"))
|
|
218
218
|
# TODO: Add metadata attribute
|
|
219
219
|
|
|
220
220
|
|
|
221
221
|
@silently_fail
|
|
222
222
|
def _set_chroma_upsert_attributes(span, kwargs):
|
|
223
|
-
|
|
223
|
+
set_span_attribute(
|
|
224
224
|
span,
|
|
225
225
|
"db.chroma.upsert.embeddings_count",
|
|
226
226
|
get_count_or_none(kwargs.get("embeddings")),
|
|
227
227
|
)
|
|
228
|
-
|
|
228
|
+
set_span_attribute(
|
|
229
229
|
span,
|
|
230
230
|
"db.chroma.upsert.metadatas_count",
|
|
231
231
|
get_count_or_none(kwargs.get("metadatas")),
|
|
232
232
|
)
|
|
233
|
-
|
|
233
|
+
set_span_attribute(
|
|
234
234
|
span,
|
|
235
235
|
"db.chroma.upsert.documents_count",
|
|
236
236
|
get_count_or_none(kwargs.get("documents")),
|
|
@@ -239,13 +239,13 @@ def _set_chroma_upsert_attributes(span, kwargs):
|
|
|
239
239
|
|
|
240
240
|
@silently_fail
|
|
241
241
|
def _set_chroma_delete_attributes(span, kwargs):
|
|
242
|
-
|
|
242
|
+
set_span_attribute(
|
|
243
243
|
span, "db.chroma.delete.ids_count", get_count_or_none(kwargs.get("ids"))
|
|
244
244
|
)
|
|
245
|
-
|
|
245
|
+
set_span_attribute(
|
|
246
246
|
span, "db.chroma.delete.where", handle_null_params(kwargs.get("where"))
|
|
247
247
|
)
|
|
248
|
-
|
|
248
|
+
set_span_attribute(
|
|
249
249
|
span,
|
|
250
250
|
"db.chroma.delete.where_document",
|
|
251
251
|
handle_null_params(kwargs.get("where_document")),
|