langtrace-python-sdk 3.3.14__py3-none-any.whl → 3.3.16__py3-none-any.whl
Sign up to get free protection for your applications and to get access to all the features.
- examples/cohere_example/__init__.py +6 -0
- examples/cohere_example/chat_streamv2.py +17 -0
- examples/cohere_example/chatv2.py +21 -0
- examples/cohere_example/rerankv2.py +23 -0
- langtrace_python_sdk/constants/exporter/langtrace_exporter.py +2 -1
- langtrace_python_sdk/constants/instrumentation/cohere.py +21 -1
- langtrace_python_sdk/extensions/langtrace_exporter.py +8 -0
- langtrace_python_sdk/instrumentation/cohere/instrumentation.py +25 -0
- langtrace_python_sdk/instrumentation/cohere/patch.py +106 -8
- langtrace_python_sdk/instrumentation/dspy/patch.py +1 -6
- langtrace_python_sdk/instrumentation/openai/patch.py +43 -4
- langtrace_python_sdk/langtrace.py +26 -9
- langtrace_python_sdk/utils/llm.py +25 -3
- langtrace_python_sdk/utils/with_root_span.py +11 -0
- langtrace_python_sdk/version.py +1 -1
- {langtrace_python_sdk-3.3.14.dist-info → langtrace_python_sdk-3.3.16.dist-info}/METADATA +4 -3
- {langtrace_python_sdk-3.3.14.dist-info → langtrace_python_sdk-3.3.16.dist-info}/RECORD +23 -18
- {langtrace_python_sdk-3.3.14.dist-info → langtrace_python_sdk-3.3.16.dist-info}/WHEEL +1 -1
- tests/openai/cassettes/test_embeddings_base_url.yaml +208 -0
- tests/openai/test_embeddings.py +88 -0
- tests/test_session_id.py +59 -0
- {langtrace_python_sdk-3.3.14.dist-info → langtrace_python_sdk-3.3.16.dist-info}/entry_points.txt +0 -0
- {langtrace_python_sdk-3.3.14.dist-info → langtrace_python_sdk-3.3.16.dist-info}/licenses/LICENSE +0 -0
@@ -1,8 +1,11 @@
|
|
1
1
|
from examples.cohere_example.chat import chat_comp
|
2
|
+
from examples.cohere_example.chatv2 import chat_v2
|
3
|
+
from examples.cohere_example.chat_streamv2 import chat_stream_v2
|
2
4
|
from examples.cohere_example.chat_stream import chat_stream
|
3
5
|
from examples.cohere_example.tools import tool_calling
|
4
6
|
from examples.cohere_example.embed import embed
|
5
7
|
from examples.cohere_example.rerank import rerank
|
8
|
+
from examples.cohere_example.rerankv2 import rerank_v2
|
6
9
|
from langtrace_python_sdk import with_langtrace_root_span
|
7
10
|
|
8
11
|
|
@@ -10,8 +13,11 @@ class CohereRunner:
|
|
10
13
|
|
11
14
|
@with_langtrace_root_span("Cohere")
|
12
15
|
def run(self):
|
16
|
+
chat_v2()
|
17
|
+
chat_stream_v2()
|
13
18
|
chat_comp()
|
14
19
|
chat_stream()
|
15
20
|
tool_calling()
|
16
21
|
embed()
|
17
22
|
rerank()
|
23
|
+
rerank_v2()
|
@@ -0,0 +1,17 @@
|
|
1
|
+
import os
|
2
|
+
from langtrace_python_sdk import langtrace
|
3
|
+
import cohere
|
4
|
+
|
5
|
+
langtrace.init(api_key=os.getenv("LANGTRACE_API_KEY"))
|
6
|
+
co = cohere.ClientV2(api_key=os.getenv("COHERE_API_KEY"))
|
7
|
+
|
8
|
+
def chat_stream_v2():
|
9
|
+
res = co.chat_stream(
|
10
|
+
model="command-r-plus-08-2024",
|
11
|
+
messages=[{"role": "user", "content": "Write a title for a blog post about API design. Only output the title text"}],
|
12
|
+
)
|
13
|
+
|
14
|
+
for event in res:
|
15
|
+
if event:
|
16
|
+
if event.type == "content-delta":
|
17
|
+
print(event.delta.message.content.text)
|
@@ -0,0 +1,21 @@
|
|
1
|
+
import os
|
2
|
+
from langtrace_python_sdk import langtrace
|
3
|
+
import cohere
|
4
|
+
|
5
|
+
langtrace.init(api_key=os.getenv("LANGTRACE_API_KEY"))
|
6
|
+
|
7
|
+
|
8
|
+
def chat_v2():
|
9
|
+
co = cohere.ClientV2(api_key=os.getenv("COHERE_API_KEY"))
|
10
|
+
|
11
|
+
res = co.chat(
|
12
|
+
model="command-r-plus-08-2024",
|
13
|
+
messages=[
|
14
|
+
{
|
15
|
+
"role": "user",
|
16
|
+
"content": "Write a title for a blog post about API design. Only output the title text.",
|
17
|
+
}
|
18
|
+
],
|
19
|
+
)
|
20
|
+
|
21
|
+
print(res.message.content[0].text)
|
@@ -0,0 +1,23 @@
|
|
1
|
+
import os
|
2
|
+
from langtrace_python_sdk import langtrace
|
3
|
+
import cohere
|
4
|
+
|
5
|
+
langtrace.init(api_key=os.getenv("LANGTRACE_API_KEY"))
|
6
|
+
co = cohere.ClientV2(api_key=os.getenv("COHERE_API_KEY"))
|
7
|
+
|
8
|
+
docs = [
|
9
|
+
"Carson City is the capital city of the American state of Nevada.",
|
10
|
+
"The Commonwealth of the Northern Mariana Islands is a group of islands in the Pacific Ocean. Its capital is Saipan.",
|
11
|
+
"Capitalization or capitalisation in English grammar is the use of a capital letter at the start of a word. English usage varies from capitalization in other languages.",
|
12
|
+
"Washington, D.C. (also known as simply Washington or D.C., and officially as the District of Columbia) is the capital of the United States. It is a federal district.",
|
13
|
+
"Capital punishment has existed in the United States since beforethe United States was a country. As of 2017, capital punishment is legal in 30 of the 50 states.",
|
14
|
+
]
|
15
|
+
|
16
|
+
def rerank_v2():
|
17
|
+
response = co.rerank(
|
18
|
+
model="rerank-v3.5",
|
19
|
+
query="What is the capital of the United States?",
|
20
|
+
documents=docs,
|
21
|
+
top_n=3,
|
22
|
+
)
|
23
|
+
print(response)
|
@@ -1 +1,2 @@
|
|
1
|
-
LANGTRACE_REMOTE_URL = "https://app.langtrace.ai"
|
1
|
+
LANGTRACE_REMOTE_URL = "https://app.langtrace.ai"
|
2
|
+
LANGTRACE_SESSION_ID_HEADER = "x-langtrace-session-id"
|
@@ -4,19 +4,39 @@ APIS = {
|
|
4
4
|
"METHOD": "cohere.client.chat",
|
5
5
|
"ENDPOINT": "/v1/chat",
|
6
6
|
},
|
7
|
+
"CHAT_CREATE_V2": {
|
8
|
+
"URL": "https://api.cohere.ai",
|
9
|
+
"METHOD": "cohere.client_v2.chat",
|
10
|
+
"ENDPOINT": "/v2/chat",
|
11
|
+
},
|
7
12
|
"EMBED": {
|
8
13
|
"URL": "https://api.cohere.ai",
|
9
14
|
"METHOD": "cohere.client.embed",
|
10
15
|
"ENDPOINT": "/v1/embed",
|
11
16
|
},
|
17
|
+
"EMBED_V2": {
|
18
|
+
"URL": "https://api.cohere.ai",
|
19
|
+
"METHOD": "cohere.client_v2.embed",
|
20
|
+
"ENDPOINT": "/v2/embed",
|
21
|
+
},
|
12
22
|
"CHAT_STREAM": {
|
13
23
|
"URL": "https://api.cohere.ai",
|
14
24
|
"METHOD": "cohere.client.chat_stream",
|
15
|
-
"ENDPOINT": "/v1/
|
25
|
+
"ENDPOINT": "/v1/chat",
|
26
|
+
},
|
27
|
+
"CHAT_STREAM_V2": {
|
28
|
+
"URL": "https://api.cohere.ai",
|
29
|
+
"METHOD": "cohere.client_v2.chat_stream",
|
30
|
+
"ENDPOINT": "/v2/chat",
|
16
31
|
},
|
17
32
|
"RERANK": {
|
18
33
|
"URL": "https://api.cohere.ai",
|
19
34
|
"METHOD": "cohere.client.rerank",
|
20
35
|
"ENDPOINT": "/v1/rerank",
|
21
36
|
},
|
37
|
+
"RERANK_V2": {
|
38
|
+
"URL": "https://api.cohere.ai",
|
39
|
+
"METHOD": "cohere.client_v2.rerank",
|
40
|
+
"ENDPOINT": "/v2/rerank",
|
41
|
+
},
|
22
42
|
}
|
@@ -9,6 +9,7 @@ from opentelemetry.trace.span import format_trace_id
|
|
9
9
|
|
10
10
|
from langtrace_python_sdk.constants.exporter.langtrace_exporter import (
|
11
11
|
LANGTRACE_REMOTE_URL,
|
12
|
+
LANGTRACE_SESSION_ID_HEADER,
|
12
13
|
)
|
13
14
|
from colorama import Fore
|
14
15
|
from requests.exceptions import RequestException
|
@@ -51,12 +52,14 @@ class LangTraceExporter(SpanExporter):
|
|
51
52
|
api_key: str
|
52
53
|
api_host: str
|
53
54
|
disable_logging: bool
|
55
|
+
session_id: str
|
54
56
|
|
55
57
|
def __init__(
|
56
58
|
self,
|
57
59
|
api_host,
|
58
60
|
api_key: str = None,
|
59
61
|
disable_logging: bool = False,
|
62
|
+
session_id: str = None,
|
60
63
|
) -> None:
|
61
64
|
self.api_key = api_key or os.environ.get("LANGTRACE_API_KEY")
|
62
65
|
self.api_host = (
|
@@ -65,6 +68,7 @@ class LangTraceExporter(SpanExporter):
|
|
65
68
|
else api_host
|
66
69
|
)
|
67
70
|
self.disable_logging = disable_logging
|
71
|
+
self.session_id = session_id or os.environ.get("LANGTRACE_SESSION_ID")
|
68
72
|
|
69
73
|
def export(self, spans: typing.Sequence[ReadableSpan]) -> SpanExportResult:
|
70
74
|
"""
|
@@ -82,6 +86,10 @@ class LangTraceExporter(SpanExporter):
|
|
82
86
|
"User-Agent": "LangtraceExporter",
|
83
87
|
}
|
84
88
|
|
89
|
+
# Add session ID if available
|
90
|
+
if self.session_id:
|
91
|
+
headers[LANGTRACE_SESSION_ID_HEADER] = self.session_id
|
92
|
+
|
85
93
|
# Check if the OTEL_EXPORTER_OTLP_HEADERS environment variable is set
|
86
94
|
otel_headers = os.getenv("OTEL_EXPORTER_OTLP_HEADERS", None)
|
87
95
|
if otel_headers:
|
@@ -23,6 +23,7 @@ from wrapt import wrap_function_wrapper
|
|
23
23
|
|
24
24
|
from langtrace_python_sdk.instrumentation.cohere.patch import (
|
25
25
|
chat_create,
|
26
|
+
chat_create_v2,
|
26
27
|
chat_stream,
|
27
28
|
embed,
|
28
29
|
rerank,
|
@@ -48,6 +49,18 @@ class CohereInstrumentation(BaseInstrumentor):
|
|
48
49
|
chat_create("cohere.client.chat", version, tracer),
|
49
50
|
)
|
50
51
|
|
52
|
+
wrap_function_wrapper(
|
53
|
+
"cohere.client_v2",
|
54
|
+
"ClientV2.chat",
|
55
|
+
chat_create_v2("cohere.client_v2.chat", version, tracer),
|
56
|
+
)
|
57
|
+
|
58
|
+
wrap_function_wrapper(
|
59
|
+
"cohere.client_v2",
|
60
|
+
"ClientV2.chat_stream",
|
61
|
+
chat_create_v2("cohere.client_v2.chat", version, tracer, stream=True),
|
62
|
+
)
|
63
|
+
|
51
64
|
wrap_function_wrapper(
|
52
65
|
"cohere.client",
|
53
66
|
"Client.chat_stream",
|
@@ -60,12 +73,24 @@ class CohereInstrumentation(BaseInstrumentor):
|
|
60
73
|
embed("cohere.client.embed", version, tracer),
|
61
74
|
)
|
62
75
|
|
76
|
+
wrap_function_wrapper(
|
77
|
+
"cohere.client_v2",
|
78
|
+
"ClientV2.embed",
|
79
|
+
embed("cohere.client.embed", version, tracer, v2=True),
|
80
|
+
)
|
81
|
+
|
63
82
|
wrap_function_wrapper(
|
64
83
|
"cohere.client",
|
65
84
|
"Client.rerank",
|
66
85
|
rerank("cohere.client.rerank", version, tracer),
|
67
86
|
)
|
68
87
|
|
88
|
+
wrap_function_wrapper(
|
89
|
+
"cohere.client_v2",
|
90
|
+
"ClientV2.rerank",
|
91
|
+
rerank("cohere.client.rerank", version, tracer, v2=True),
|
92
|
+
)
|
93
|
+
|
69
94
|
def _instrument_module(self, module_name):
|
70
95
|
pass
|
71
96
|
|
@@ -24,6 +24,7 @@ from langtrace_python_sdk.utils.llm import (
|
|
24
24
|
get_span_name,
|
25
25
|
set_event_completion,
|
26
26
|
set_usage_attributes,
|
27
|
+
StreamWrapper
|
27
28
|
)
|
28
29
|
from langtrace.trace_attributes import Event, LLMSpanAttributes
|
29
30
|
from langtrace_python_sdk.utils import set_span_attribute
|
@@ -38,7 +39,7 @@ from langtrace_python_sdk.constants.instrumentation.common import (
|
|
38
39
|
from langtrace.trace_attributes import SpanAttributes
|
39
40
|
|
40
41
|
|
41
|
-
def rerank(original_method, version, tracer):
|
42
|
+
def rerank(original_method, version, tracer, v2=False):
|
42
43
|
"""Wrap the `rerank` method."""
|
43
44
|
|
44
45
|
def traced_method(wrapped, instance, args, kwargs):
|
@@ -49,8 +50,8 @@ def rerank(original_method, version, tracer):
|
|
49
50
|
**get_llm_request_attributes(kwargs, operation_name="rerank"),
|
50
51
|
**get_llm_url(instance),
|
51
52
|
SpanAttributes.LLM_REQUEST_MODEL: kwargs.get("model") or "command-r-plus",
|
52
|
-
SpanAttributes.LLM_URL: APIS["RERANK"]["URL"],
|
53
|
-
SpanAttributes.LLM_PATH: APIS["RERANK"]["ENDPOINT"],
|
53
|
+
SpanAttributes.LLM_URL: APIS["RERANK" if not v2 else "RERANK_V2"]["URL"],
|
54
|
+
SpanAttributes.LLM_PATH: APIS["RERANK" if not v2 else "RERANK_V2"]["ENDPOINT"],
|
54
55
|
SpanAttributes.LLM_REQUEST_DOCUMENTS: json.dumps(
|
55
56
|
kwargs.get("documents"), cls=datetime_encoder
|
56
57
|
),
|
@@ -61,7 +62,7 @@ def rerank(original_method, version, tracer):
|
|
61
62
|
attributes = LLMSpanAttributes(**span_attributes)
|
62
63
|
|
63
64
|
span = tracer.start_span(
|
64
|
-
name=get_span_name(APIS["RERANK"]["METHOD"]), kind=SpanKind.CLIENT
|
65
|
+
name=get_span_name(APIS["RERANK" if not v2 else "RERANK_V2"]["METHOD"]), kind=SpanKind.CLIENT
|
65
66
|
)
|
66
67
|
for field, value in attributes.model_dump(by_alias=True).items():
|
67
68
|
set_span_attribute(span, field, value)
|
@@ -119,7 +120,7 @@ def rerank(original_method, version, tracer):
|
|
119
120
|
return traced_method
|
120
121
|
|
121
122
|
|
122
|
-
def embed(original_method, version, tracer):
|
123
|
+
def embed(original_method, version, tracer, v2=False):
|
123
124
|
"""Wrap the `embed` method."""
|
124
125
|
|
125
126
|
def traced_method(wrapped, instance, args, kwargs):
|
@@ -129,8 +130,8 @@ def embed(original_method, version, tracer):
|
|
129
130
|
**get_langtrace_attributes(version, service_provider),
|
130
131
|
**get_llm_request_attributes(kwargs, operation_name="embed"),
|
131
132
|
**get_llm_url(instance),
|
132
|
-
SpanAttributes.LLM_URL: APIS["EMBED"]["URL"],
|
133
|
-
SpanAttributes.LLM_PATH: APIS["EMBED"]["ENDPOINT"],
|
133
|
+
SpanAttributes.LLM_URL: APIS["EMBED" if not v2 else "EMBED_V2"]["URL"],
|
134
|
+
SpanAttributes.LLM_PATH: APIS["EMBED" if not v2 else "EMBED_V2"]["ENDPOINT"],
|
134
135
|
SpanAttributes.LLM_REQUEST_EMBEDDING_INPUTS: json.dumps(
|
135
136
|
kwargs.get("texts")
|
136
137
|
),
|
@@ -143,7 +144,7 @@ def embed(original_method, version, tracer):
|
|
143
144
|
attributes = LLMSpanAttributes(**span_attributes)
|
144
145
|
|
145
146
|
span = tracer.start_span(
|
146
|
-
name=get_span_name(APIS["EMBED"]["METHOD"]),
|
147
|
+
name=get_span_name(APIS["EMBED" if not v2 else "EMBED_V2"]["METHOD"]),
|
147
148
|
kind=SpanKind.CLIENT,
|
148
149
|
)
|
149
150
|
for field, value in attributes.model_dump(by_alias=True).items():
|
@@ -343,6 +344,103 @@ def chat_create(original_method, version, tracer):
|
|
343
344
|
return traced_method
|
344
345
|
|
345
346
|
|
347
|
+
def chat_create_v2(original_method, version, tracer, stream=False):
|
348
|
+
"""Wrap the `chat_create` method for Cohere API v2."""
|
349
|
+
|
350
|
+
def traced_method(wrapped, instance, args, kwargs):
|
351
|
+
service_provider = SERVICE_PROVIDERS["COHERE"]
|
352
|
+
|
353
|
+
messages = kwargs.get("messages", [])
|
354
|
+
if kwargs.get("preamble"):
|
355
|
+
messages = [{"role": "system", "content": kwargs["preamble"]}] + messages
|
356
|
+
|
357
|
+
span_attributes = {
|
358
|
+
**get_langtrace_attributes(version, service_provider),
|
359
|
+
**get_llm_request_attributes(kwargs, prompts=messages),
|
360
|
+
**get_llm_url(instance),
|
361
|
+
SpanAttributes.LLM_REQUEST_MODEL: kwargs.get("model") or "command-r-plus",
|
362
|
+
SpanAttributes.LLM_URL: APIS["CHAT_CREATE_V2"]["URL"],
|
363
|
+
SpanAttributes.LLM_PATH: APIS["CHAT_CREATE_V2"]["ENDPOINT"],
|
364
|
+
**get_extra_attributes(),
|
365
|
+
}
|
366
|
+
|
367
|
+
attributes = LLMSpanAttributes(**span_attributes)
|
368
|
+
|
369
|
+
for attr_name in ["max_input_tokens", "conversation_id", "connectors", "tools", "tool_results"]:
|
370
|
+
value = kwargs.get(attr_name)
|
371
|
+
if value is not None:
|
372
|
+
if attr_name == "max_input_tokens":
|
373
|
+
attributes.llm_max_input_tokens = str(value)
|
374
|
+
elif attr_name == "conversation_id":
|
375
|
+
attributes.conversation_id = value
|
376
|
+
else:
|
377
|
+
setattr(attributes, f"llm_{attr_name}", json.dumps(value))
|
378
|
+
|
379
|
+
span = tracer.start_span(
|
380
|
+
name=get_span_name(APIS["CHAT_CREATE_V2"]["METHOD"]),
|
381
|
+
kind=SpanKind.CLIENT
|
382
|
+
)
|
383
|
+
|
384
|
+
for field, value in attributes.model_dump(by_alias=True).items():
|
385
|
+
set_span_attribute(span, field, value)
|
386
|
+
|
387
|
+
try:
|
388
|
+
result = wrapped(*args, **kwargs)
|
389
|
+
|
390
|
+
if stream:
|
391
|
+
return StreamWrapper(
|
392
|
+
result,
|
393
|
+
span,
|
394
|
+
tool_calls=kwargs.get("tools") is not None,
|
395
|
+
)
|
396
|
+
else:
|
397
|
+
if hasattr(result, "id") and result.id is not None:
|
398
|
+
span.set_attribute(SpanAttributes.LLM_GENERATION_ID, result.id)
|
399
|
+
span.set_attribute(SpanAttributes.LLM_RESPONSE_ID, result.id)
|
400
|
+
|
401
|
+
if (hasattr(result, "message") and
|
402
|
+
hasattr(result.message, "content") and
|
403
|
+
len(result.message.content) > 0 and
|
404
|
+
hasattr(result.message.content[0], "text") and
|
405
|
+
result.message.content[0].text is not None and
|
406
|
+
result.message.content[0].text != ""):
|
407
|
+
responses = [{
|
408
|
+
"role": result.message.role,
|
409
|
+
"content": result.message.content[0].text
|
410
|
+
}]
|
411
|
+
set_event_completion(span, responses)
|
412
|
+
if hasattr(result, "tool_calls") and result.tool_calls is not None:
|
413
|
+
tool_calls = [tool_call.json() for tool_call in result.tool_calls]
|
414
|
+
span.set_attribute(
|
415
|
+
SpanAttributes.LLM_TOOL_RESULTS,
|
416
|
+
json.dumps(tool_calls)
|
417
|
+
)
|
418
|
+
if hasattr(result, "usage") and result.usage is not None:
|
419
|
+
if (hasattr(result.usage, "billed_units") and
|
420
|
+
result.usage.billed_units is not None):
|
421
|
+
usage = result.usage.billed_units
|
422
|
+
for metric, value in {
|
423
|
+
"input": usage.input_tokens or 0,
|
424
|
+
"output": usage.output_tokens or 0,
|
425
|
+
"total": (usage.input_tokens or 0) + (usage.output_tokens or 0),
|
426
|
+
}.items():
|
427
|
+
span.set_attribute(
|
428
|
+
f"gen_ai.usage.{metric}_tokens",
|
429
|
+
int(value)
|
430
|
+
)
|
431
|
+
span.set_status(StatusCode.OK)
|
432
|
+
span.end()
|
433
|
+
return result
|
434
|
+
|
435
|
+
except Exception as error:
|
436
|
+
span.record_exception(error)
|
437
|
+
span.set_status(Status(StatusCode.ERROR, str(error)))
|
438
|
+
span.end()
|
439
|
+
raise
|
440
|
+
|
441
|
+
return traced_method
|
442
|
+
|
443
|
+
|
346
444
|
def chat_stream(original_method, version, tracer):
|
347
445
|
"""Wrap the `messages_stream` method."""
|
348
446
|
|
@@ -50,7 +50,7 @@ def patch_bootstrapfewshot_optimizer(operation_name, version, tracer):
|
|
50
50
|
),
|
51
51
|
}
|
52
52
|
span_attributes["dspy.optimizer.module.prog"] = json.dumps(prog)
|
53
|
-
if hasattr(instance, "metric"):
|
53
|
+
if hasattr(instance, "metric") and getattr(instance, "metric") is not None:
|
54
54
|
span_attributes["dspy.optimizer.metric"] = getattr(
|
55
55
|
instance, "metric"
|
56
56
|
).__name__
|
@@ -120,8 +120,6 @@ def patch_signature(operation_name, version, tracer):
|
|
120
120
|
**get_extra_attributes(),
|
121
121
|
}
|
122
122
|
|
123
|
-
trace_checkpoint = os.environ.get("TRACE_DSPY_CHECKPOINT", "true").lower()
|
124
|
-
|
125
123
|
if instance.__class__.__name__:
|
126
124
|
span_attributes["dspy.signature.name"] = instance.__class__.__name__
|
127
125
|
span_attributes["dspy.signature"] = str(instance.signature)
|
@@ -143,9 +141,6 @@ def patch_signature(operation_name, version, tracer):
|
|
143
141
|
"dspy.signature.result",
|
144
142
|
json.dumps(result.toDict()),
|
145
143
|
)
|
146
|
-
if trace_checkpoint == "true":
|
147
|
-
print(Fore.RED + "Note: DSPy checkpoint tracing is enabled in Langtrace. To disable it, set the env var, TRACE_DSPY_CHECKPOINT to false" + Fore.RESET)
|
148
|
-
set_span_attribute(span, "dspy.checkpoint", ujson.dumps(instance.dump_state(False), indent=2))
|
149
144
|
span.set_status(Status(StatusCode.OK))
|
150
145
|
|
151
146
|
span.end()
|
@@ -342,8 +342,12 @@ def async_chat_completions_create(version: str, tracer: Tracer) -> Callable:
|
|
342
342
|
service_provider = SERVICE_PROVIDERS["PPLX"]
|
343
343
|
elif "azure" in get_base_url(instance):
|
344
344
|
service_provider = SERVICE_PROVIDERS["AZURE"]
|
345
|
+
elif "groq" in get_base_url(instance):
|
346
|
+
service_provider = SERVICE_PROVIDERS["GROQ"]
|
345
347
|
elif "x.ai" in get_base_url(instance):
|
346
348
|
service_provider = SERVICE_PROVIDERS["XAI"]
|
349
|
+
elif "deepseek" in get_base_url(instance):
|
350
|
+
service_provider = SERVICE_PROVIDERS["DEEPSEEK"]
|
347
351
|
llm_prompts = []
|
348
352
|
for item in kwargs.get("messages", []):
|
349
353
|
tools = get_tool_calls(item)
|
@@ -431,6 +435,18 @@ def embeddings_create(version: str, tracer: Tracer) -> Callable:
|
|
431
435
|
kwargs: EmbeddingsCreateKwargs,
|
432
436
|
) -> Any:
|
433
437
|
service_provider = SERVICE_PROVIDERS["OPENAI"]
|
438
|
+
base_url = get_base_url(instance)
|
439
|
+
|
440
|
+
if "perplexity" in base_url:
|
441
|
+
service_provider = SERVICE_PROVIDERS["PPLX"]
|
442
|
+
elif "azure" in base_url:
|
443
|
+
service_provider = SERVICE_PROVIDERS["AZURE"]
|
444
|
+
elif "groq" in base_url:
|
445
|
+
service_provider = SERVICE_PROVIDERS["GROQ"]
|
446
|
+
elif "x.ai" in base_url:
|
447
|
+
service_provider = SERVICE_PROVIDERS["XAI"]
|
448
|
+
elif "deepseek" in base_url:
|
449
|
+
service_provider = SERVICE_PROVIDERS["DEEPSEEK"]
|
434
450
|
|
435
451
|
span_attributes = {
|
436
452
|
**get_langtrace_attributes(version, service_provider, vendor_type="llm"),
|
@@ -469,7 +485,6 @@ def embeddings_create(version: str, tracer: Tracer) -> Callable:
|
|
469
485
|
kind=SpanKind.CLIENT,
|
470
486
|
context=set_span_in_context(trace.get_current_span()),
|
471
487
|
) as span:
|
472
|
-
|
473
488
|
set_span_attributes(span, attributes)
|
474
489
|
try:
|
475
490
|
# Attempt to call the original method
|
@@ -507,17 +522,27 @@ def async_embeddings_create(version: str, tracer: Tracer) -> Callable:
|
|
507
522
|
) -> Awaitable[Any]:
|
508
523
|
|
509
524
|
service_provider = SERVICE_PROVIDERS["OPENAI"]
|
525
|
+
base_url = get_base_url(instance)
|
526
|
+
if "perplexity" in base_url:
|
527
|
+
service_provider = SERVICE_PROVIDERS["PPLX"]
|
528
|
+
elif "azure" in base_url:
|
529
|
+
service_provider = SERVICE_PROVIDERS["AZURE"]
|
530
|
+
elif "groq" in base_url:
|
531
|
+
service_provider = SERVICE_PROVIDERS["GROQ"]
|
532
|
+
elif "x.ai" in base_url:
|
533
|
+
service_provider = SERVICE_PROVIDERS["XAI"]
|
534
|
+
elif "deepseek" in base_url:
|
535
|
+
service_provider = SERVICE_PROVIDERS["DEEPSEEK"]
|
510
536
|
|
511
537
|
span_attributes = {
|
512
538
|
**get_langtrace_attributes(version, service_provider, vendor_type="llm"),
|
513
539
|
**get_llm_request_attributes(kwargs, operation_name="embed"),
|
540
|
+
**get_llm_url(instance),
|
514
541
|
SpanAttributes.LLM_PATH: APIS["EMBEDDINGS_CREATE"]["ENDPOINT"],
|
515
542
|
SpanAttributes.LLM_REQUEST_DIMENSIONS: kwargs.get("dimensions"),
|
516
543
|
**get_extra_attributes(), # type: ignore
|
517
544
|
}
|
518
545
|
|
519
|
-
attributes = LLMSpanAttributes(**filter_valid_attributes(span_attributes))
|
520
|
-
|
521
546
|
encoding_format = kwargs.get("encoding_format")
|
522
547
|
if encoding_format is not None:
|
523
548
|
if not isinstance(encoding_format, list):
|
@@ -530,17 +555,31 @@ def async_embeddings_create(version: str, tracer: Tracer) -> Callable:
|
|
530
555
|
span_attributes[SpanAttributes.LLM_REQUEST_EMBEDDING_INPUTS] = json.dumps(
|
531
556
|
[kwargs.get("input", "")]
|
532
557
|
)
|
558
|
+
span_attributes[SpanAttributes.LLM_PROMPTS] = json.dumps(
|
559
|
+
[
|
560
|
+
{
|
561
|
+
"role": "user",
|
562
|
+
"content": kwargs.get("input"),
|
563
|
+
}
|
564
|
+
]
|
565
|
+
)
|
566
|
+
|
567
|
+
attributes = LLMSpanAttributes(**filter_valid_attributes(span_attributes))
|
533
568
|
|
534
569
|
with tracer.start_as_current_span(
|
535
570
|
name=get_span_name(APIS["EMBEDDINGS_CREATE"]["METHOD"]),
|
536
571
|
kind=SpanKind.CLIENT,
|
537
572
|
context=set_span_in_context(trace.get_current_span()),
|
538
573
|
) as span:
|
539
|
-
|
540
574
|
set_span_attributes(span, attributes)
|
541
575
|
try:
|
542
576
|
# Attempt to call the original method
|
543
577
|
result = await wrapped(*args, **kwargs)
|
578
|
+
usage = getattr(result, "usage", None)
|
579
|
+
if usage:
|
580
|
+
set_usage_attributes(
|
581
|
+
span, {"prompt_tokens": getattr(usage, "prompt_tokens", 0)}
|
582
|
+
)
|
544
583
|
span.set_status(StatusCode.OK)
|
545
584
|
return result
|
546
585
|
except Exception as err:
|
@@ -39,6 +39,7 @@ from opentelemetry.exporter.otlp.proto.http.trace_exporter import (
|
|
39
39
|
)
|
40
40
|
from langtrace_python_sdk.constants.exporter.langtrace_exporter import (
|
41
41
|
LANGTRACE_REMOTE_URL,
|
42
|
+
LANGTRACE_SESSION_ID_HEADER,
|
42
43
|
)
|
43
44
|
from langtrace_python_sdk.instrumentation import (
|
44
45
|
AnthropicInstrumentation,
|
@@ -98,6 +99,9 @@ class LangtraceConfig:
|
|
98
99
|
or os.environ.get("LANGTRACE_HEADERS")
|
99
100
|
or os.environ.get("OTEL_EXPORTER_OTLP_HEADERS")
|
100
101
|
)
|
102
|
+
self.session_id = kwargs.get("session_id") or os.environ.get(
|
103
|
+
"LANGTRACE_SESSION_ID"
|
104
|
+
)
|
101
105
|
|
102
106
|
|
103
107
|
def get_host(config: LangtraceConfig) -> str:
|
@@ -134,15 +138,19 @@ def setup_tracer_provider(config: LangtraceConfig, host: str) -> TracerProvider:
|
|
134
138
|
|
135
139
|
|
136
140
|
def get_headers(config: LangtraceConfig):
|
137
|
-
|
138
|
-
|
139
|
-
|
140
|
-
|
141
|
+
headers = {
|
142
|
+
"x-api-key": config.api_key,
|
143
|
+
}
|
144
|
+
|
145
|
+
if config.session_id:
|
146
|
+
headers[LANGTRACE_SESSION_ID_HEADER] = config.session_id
|
141
147
|
|
142
148
|
if isinstance(config.headers, str):
|
143
|
-
|
149
|
+
headers.update(parse_env_headers(config.headers, liberal=True))
|
150
|
+
elif config.headers:
|
151
|
+
headers.update(config.headers)
|
144
152
|
|
145
|
-
return
|
153
|
+
return headers
|
146
154
|
|
147
155
|
|
148
156
|
def get_exporter(config: LangtraceConfig, host: str):
|
@@ -150,9 +158,16 @@ def get_exporter(config: LangtraceConfig, host: str):
|
|
150
158
|
return config.custom_remote_exporter
|
151
159
|
|
152
160
|
headers = get_headers(config)
|
153
|
-
|
154
|
-
if "http" in
|
155
|
-
return HTTPExporter(
|
161
|
+
exporter_protocol = os.environ.get("OTEL_EXPORTER_OTLP_PROTOCOL", "http")
|
162
|
+
if "http" in exporter_protocol.lower():
|
163
|
+
return HTTPExporter(
|
164
|
+
endpoint=(
|
165
|
+
f"{host}/api/trace"
|
166
|
+
if host == LANGTRACE_REMOTE_URL
|
167
|
+
else f"{host}/v1/traces"
|
168
|
+
),
|
169
|
+
headers=headers,
|
170
|
+
)
|
156
171
|
else:
|
157
172
|
return GRPCExporter(endpoint=host, headers=headers)
|
158
173
|
|
@@ -215,6 +230,7 @@ def init(
|
|
215
230
|
service_name: Optional[str] = None,
|
216
231
|
disable_logging: bool = False,
|
217
232
|
headers: Dict[str, str] = {},
|
233
|
+
session_id: Optional[str] = None,
|
218
234
|
):
|
219
235
|
|
220
236
|
check_if_sdk_is_outdated()
|
@@ -229,6 +245,7 @@ def init(
|
|
229
245
|
service_name=service_name,
|
230
246
|
disable_logging=disable_logging,
|
231
247
|
headers=headers,
|
248
|
+
session_id=session_id,
|
232
249
|
)
|
233
250
|
|
234
251
|
if config.disable_logging:
|
@@ -162,11 +162,12 @@ def get_llm_url(instance):
|
|
162
162
|
|
163
163
|
|
164
164
|
def get_base_url(instance):
|
165
|
-
|
165
|
+
base_url = (
|
166
166
|
str(instance._client._base_url)
|
167
167
|
if hasattr(instance, "_client") and hasattr(instance._client, "_base_url")
|
168
168
|
else ""
|
169
169
|
)
|
170
|
+
return base_url
|
170
171
|
|
171
172
|
|
172
173
|
def is_streaming(kwargs):
|
@@ -393,8 +394,19 @@ class StreamWrapper:
|
|
393
394
|
if hasattr(chunk, "text") and chunk.text is not None:
|
394
395
|
content = [chunk.text]
|
395
396
|
|
397
|
+
# CohereV2
|
398
|
+
if (hasattr(chunk, "delta") and
|
399
|
+
chunk.delta is not None and
|
400
|
+
hasattr(chunk.delta, "message") and
|
401
|
+
chunk.delta.message is not None and
|
402
|
+
hasattr(chunk.delta.message, "content") and
|
403
|
+
chunk.delta.message.content is not None and
|
404
|
+
hasattr(chunk.delta.message.content, "text") and
|
405
|
+
chunk.delta.message.content.text is not None):
|
406
|
+
content = [chunk.delta.message.content.text]
|
407
|
+
|
396
408
|
# Anthropic
|
397
|
-
if hasattr(chunk, "delta") and chunk.delta is not None:
|
409
|
+
if hasattr(chunk, "delta") and chunk.delta is not None and not hasattr(chunk.delta, "message"):
|
398
410
|
content = [chunk.delta.text] if hasattr(chunk.delta, "text") else []
|
399
411
|
|
400
412
|
if isinstance(chunk, dict):
|
@@ -408,7 +420,17 @@ class StreamWrapper:
|
|
408
420
|
|
409
421
|
# Anthropic & OpenAI
|
410
422
|
if hasattr(chunk, "type") and chunk.type == "message_start":
|
411
|
-
|
423
|
+
if hasattr(chunk.message, "usage") and chunk.message.usage is not None:
|
424
|
+
self.prompt_tokens = chunk.message.usage.input_tokens
|
425
|
+
|
426
|
+
# CohereV2
|
427
|
+
if hasattr(chunk, "type") and chunk.type == "message-end":
|
428
|
+
if (hasattr(chunk, "delta") and chunk.delta is not None and
|
429
|
+
hasattr(chunk.delta, "usage") and chunk.delta.usage is not None and
|
430
|
+
hasattr(chunk.delta.usage, "billed_units") and chunk.delta.usage.billed_units is not None):
|
431
|
+
usage = chunk.delta.usage.billed_units
|
432
|
+
self.completion_tokens = int(usage.output_tokens)
|
433
|
+
self.prompt_tokens = int(usage.input_tokens)
|
412
434
|
|
413
435
|
if hasattr(chunk, "usage") and chunk.usage is not None:
|
414
436
|
if hasattr(chunk.usage, "output_tokens"):
|
@@ -61,6 +61,11 @@ def with_langtrace_root_span(
|
|
61
61
|
span_id = str(span.get_span_context().span_id)
|
62
62
|
trace_id = str(span.get_span_context().trace_id)
|
63
63
|
|
64
|
+
# Attach session ID if available
|
65
|
+
session_id = os.environ.get("LANGTRACE_SESSION_ID")
|
66
|
+
if session_id:
|
67
|
+
span.set_attribute("session.id", session_id)
|
68
|
+
|
64
69
|
if (
|
65
70
|
"span_id" in func.__code__.co_varnames
|
66
71
|
and "trace_id" in func.__code__.co_varnames
|
@@ -82,6 +87,12 @@ def with_langtrace_root_span(
|
|
82
87
|
) as span:
|
83
88
|
span_id = span.get_span_context().span_id
|
84
89
|
trace_id = span.get_span_context().trace_id
|
90
|
+
|
91
|
+
# Attach session ID if available
|
92
|
+
session_id = os.environ.get("LANGTRACE_SESSION_ID")
|
93
|
+
if session_id:
|
94
|
+
span.set_attribute("session.id", session_id)
|
95
|
+
|
85
96
|
if (
|
86
97
|
"span_id" in func.__code__.co_varnames
|
87
98
|
and "trace_id" in func.__code__.co_varnames
|
langtrace_python_sdk/version.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
__version__ = "3.3.
|
1
|
+
__version__ = "3.3.16"
|
@@ -1,10 +1,11 @@
|
|
1
|
-
Metadata-Version: 2.
|
1
|
+
Metadata-Version: 2.4
|
2
2
|
Name: langtrace-python-sdk
|
3
|
-
Version: 3.3.
|
3
|
+
Version: 3.3.16
|
4
4
|
Summary: Python SDK for LangTrace
|
5
5
|
Project-URL: Homepage, https://github.com/Scale3-Labs/langtrace-python-sdk
|
6
6
|
Author-email: Scale3 Labs <engineering@scale3labs.com>
|
7
|
-
License: Apache-2.0
|
7
|
+
License-Expression: Apache-2.0
|
8
|
+
License-File: LICENSE
|
8
9
|
Classifier: License :: OSI Approved :: Apache Software License
|
9
10
|
Classifier: Operating System :: OS Independent
|
10
11
|
Classifier: Programming Language :: Python :: 3
|
@@ -11,11 +11,14 @@ examples/cerebras_example/__init__.py,sha256=ydfNi0DjFMGVcfo79XVG3VEbzIrHo5wYBgS
|
|
11
11
|
examples/cerebras_example/main.py,sha256=QrzQLTEr0dkrrPrlOPqwXkeeGU4dwc8tPR4LhHPOQ3k,6573
|
12
12
|
examples/chroma_example/__init__.py,sha256=Mrf8KptW1hhzu6WDdRRTxbaB-0kM7x5u-Goc_zR7G5c,203
|
13
13
|
examples/chroma_example/basic.py,sha256=oO7-__8HptnFXLVKbnPgoz02yM-CAPi721xsbUb_gYg,868
|
14
|
-
examples/cohere_example/__init__.py,sha256=
|
14
|
+
examples/cohere_example/__init__.py,sha256=7Sa0MEQrRU5gBZg31Ao6kXGDC9raHoLU0BxqVWAX8b8,736
|
15
15
|
examples/cohere_example/chat.py,sha256=A1ZSkkPPOj3h27VSSa_o_Thabz08ZUzUgTVgAG0pgcA,901
|
16
16
|
examples/cohere_example/chat_stream.py,sha256=BvhUgBEuyMhyzRZ_2i_SBvO9Ndf0b7-aRDyO399RyFE,664
|
17
|
+
examples/cohere_example/chat_streamv2.py,sha256=mOzTmBoCm3e8aKzNWDaTxfeSwl_dG-aCExEMA67oTb4,544
|
18
|
+
examples/cohere_example/chatv2.py,sha256=dEiCxdMLHKe6hjAZW_h3cmDgxHcIn31gMhs0B-3q8_k,504
|
17
19
|
examples/cohere_example/embed.py,sha256=p9BJvOg09JVb8BfTCb63v3uh_wOsi_OyrCAJdXXrE6E,496
|
18
20
|
examples/cohere_example/rerank.py,sha256=9XEG90sTa6djcHqikSqZDnffLhxEZub76e7-l2LPYdI,1445
|
21
|
+
examples/cohere_example/rerankv2.py,sha256=ihWpv_eez6U9cjv0MwfFVBbCJ7DZLIGegZV6kNrJ1hs,1103
|
19
22
|
examples/cohere_example/tools.py,sha256=a5uvS058tcwU6PJbF9EDO6LPVmPj2LoW4Vn8Web3Iq8,1656
|
20
23
|
examples/crewai_example/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
21
24
|
examples/crewai_example/basic.py,sha256=PBu4f8yQfZO1L_22UDm_ReU9lnEcycjZcGuy5UpgDJM,1948
|
@@ -105,15 +108,15 @@ examples/vertexai_example/main.py,sha256=gndId5X5ksD-ycxnAWMdEqIDbLc3kz5Vt8vm4YP
|
|
105
108
|
examples/weaviate_example/__init__.py,sha256=8JMDBsRSEV10HfTd-YC7xb4txBjD3la56snk-Bbg2Kw,618
|
106
109
|
examples/weaviate_example/query_text.py,sha256=wPHQTc_58kPoKTZMygVjTj-2ZcdrIuaausJfMxNQnQc,127162
|
107
110
|
langtrace_python_sdk/__init__.py,sha256=VZM6i71NR7pBQK6XvJWRelknuTYUhqwqE7PlicKa5Wg,1166
|
108
|
-
langtrace_python_sdk/langtrace.py,sha256=
|
109
|
-
langtrace_python_sdk/version.py,sha256=
|
111
|
+
langtrace_python_sdk/langtrace.py,sha256=BxuXc2nFp_P0UYyuTV5DLNPWGsuaSMjZ8mqcqCWpxI4,13099
|
112
|
+
langtrace_python_sdk/version.py,sha256=2by_AinXADvzUFKp--fRsB9P_I_LD3NXs1PvFwaL31c,23
|
110
113
|
langtrace_python_sdk/constants/__init__.py,sha256=3CNYkWMdd1DrkGqzLUgNZXjdAlM6UFMlf_F-odAToyc,146
|
111
|
-
langtrace_python_sdk/constants/exporter/langtrace_exporter.py,sha256=
|
114
|
+
langtrace_python_sdk/constants/exporter/langtrace_exporter.py,sha256=EVCrouYCpY98f0KSaKr4PzNxPULTZZO6dSA_crEOyJU,106
|
112
115
|
langtrace_python_sdk/constants/instrumentation/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
113
116
|
langtrace_python_sdk/constants/instrumentation/anthropic.py,sha256=YX3llt3zwDY6XrYk3CB8WEVqgrzRXEw_ffyk56JoF3k,126
|
114
117
|
langtrace_python_sdk/constants/instrumentation/aws_bedrock.py,sha256=f9eukqoxrPgPeaBJX2gpBUz1uu0dZIPahOpvoudfbH8,310
|
115
118
|
langtrace_python_sdk/constants/instrumentation/chroma.py,sha256=hiPGYdHS0Yj4Kh3eaYBbuCAl_swqIygu80yFqkOgdak,955
|
116
|
-
langtrace_python_sdk/constants/instrumentation/cohere.py,sha256=
|
119
|
+
langtrace_python_sdk/constants/instrumentation/cohere.py,sha256=9yD133VdrYZ5BoJR4nJHlj67gHEImB9-KsD-NkzHW1I,1159
|
117
120
|
langtrace_python_sdk/constants/instrumentation/common.py,sha256=DPDX8icb0Tj3OrgpbL9WeiIaMG7Si2IKiSL8YRwwor4,1203
|
118
121
|
langtrace_python_sdk/constants/instrumentation/embedchain.py,sha256=HodCJvaFjILoOG50OwFObxfVxt_8VUaIAIqvgoN3tzo,278
|
119
122
|
langtrace_python_sdk/constants/instrumentation/gemini.py,sha256=UAmfgg9FM7uNeOCdPfWlir6OIH-8BoxFGPRpdBd9ZZs,358
|
@@ -129,7 +132,7 @@ langtrace_python_sdk/constants/instrumentation/qdrant.py,sha256=yL7BopNQTXW7L7Z-
|
|
129
132
|
langtrace_python_sdk/constants/instrumentation/vertexai.py,sha256=0s2vX3Y0iwjOPkUg5lAKi-7o3LaNivDSBBbF-o695Ok,1266
|
130
133
|
langtrace_python_sdk/constants/instrumentation/weaviate.py,sha256=gtv-JBxvNGClEMxClmRKzjJ1khgOonsli4D_k9IagSE,2601
|
131
134
|
langtrace_python_sdk/extensions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
132
|
-
langtrace_python_sdk/extensions/langtrace_exporter.py,sha256=
|
135
|
+
langtrace_python_sdk/extensions/langtrace_exporter.py,sha256=ckd8dMmY6h2oxE04p1JFLwUB5PSJX_Cy4eDFEM6aj4Y,6605
|
133
136
|
langtrace_python_sdk/extensions/langtrace_filesystem.py,sha256=34fZutG28EJ66l67OvTGsydAH3ZpXgikdE7hVLqBpG4,7863
|
134
137
|
langtrace_python_sdk/instrumentation/__init__.py,sha256=ftQa0rnss2V-HLZC2KDkhTEsLHhvRzdzabQTVV0omZQ,2002
|
135
138
|
langtrace_python_sdk/instrumentation/anthropic/__init__.py,sha256=donrurJAGYlxrSRA3BIf76jGeUcAx9Tq8CVpah68S0Y,101
|
@@ -149,14 +152,14 @@ langtrace_python_sdk/instrumentation/chroma/__init__.py,sha256=pNZ5UO8Q-d5VkXSob
|
|
149
152
|
langtrace_python_sdk/instrumentation/chroma/instrumentation.py,sha256=nT6PS6bsrIOO9kLV5GuUeRjMe6THHHAZGvqWBP1dYog,1807
|
150
153
|
langtrace_python_sdk/instrumentation/chroma/patch.py,sha256=jYcqBeu-0cYA29PO880oXYRwYh-R1oseXmzfK6UDBps,9074
|
151
154
|
langtrace_python_sdk/instrumentation/cohere/__init__.py,sha256=sGUSLdTUyYf36Tm6L5jQflhzCqvmWrhnBOMYHjvp6Hs,95
|
152
|
-
langtrace_python_sdk/instrumentation/cohere/instrumentation.py,sha256=
|
153
|
-
langtrace_python_sdk/instrumentation/cohere/patch.py,sha256=
|
155
|
+
langtrace_python_sdk/instrumentation/cohere/instrumentation.py,sha256=1wxMhWMfsvKprdV52BIfCZhZS1beRYBW9rUzUDDkyCk,2854
|
156
|
+
langtrace_python_sdk/instrumentation/cohere/patch.py,sha256=q29gJnik8bnJ7fnwaJ8PArHTQbJkWhhwBfDdQOTRBng,25422
|
154
157
|
langtrace_python_sdk/instrumentation/crewai/__init__.py,sha256=_UBKfvQv7l0g2_wnmA5F6CdSAFH0atNOVPd49zsN3aM,88
|
155
158
|
langtrace_python_sdk/instrumentation/crewai/instrumentation.py,sha256=5Umzq8zjEnMEtjZZiMB4DQOPkxZ-1vts7RKC6JWpn24,2969
|
156
159
|
langtrace_python_sdk/instrumentation/crewai/patch.py,sha256=VoyOtGKYzaOIu7UnVNTemZeB3LrCIodrrYwmXLdxRw8,9133
|
157
160
|
langtrace_python_sdk/instrumentation/dspy/__init__.py,sha256=tM1srfi_QgyCzrde4izojMrRq2Wm7Dj5QUvVQXIJzkk,84
|
158
161
|
langtrace_python_sdk/instrumentation/dspy/instrumentation.py,sha256=qx2vBeuODI7rubf-0bkuNzDWu4bLI-E5uabrWTEuH6k,2923
|
159
|
-
langtrace_python_sdk/instrumentation/dspy/patch.py,sha256=
|
162
|
+
langtrace_python_sdk/instrumentation/dspy/patch.py,sha256=BpesWxlNJAibzu06xNU9mSH82Ufw5h3p5jxM5d3QXqE,10073
|
160
163
|
langtrace_python_sdk/instrumentation/embedchain/__init__.py,sha256=5L6n8-brMnRWZ0CMmHEuN1mrhIxrYLNtxRy0Ujc-hOY,103
|
161
164
|
langtrace_python_sdk/instrumentation/embedchain/instrumentation.py,sha256=dShwm0duy25IvL7g9I_v-2oYuyh2fadeiJqXtXBay-8,1987
|
162
165
|
langtrace_python_sdk/instrumentation/embedchain/patch.py,sha256=ovvBrtqUDwGSmSgK_S3pOOrDa4gkPSFG-HvmsxqmJE8,3627
|
@@ -196,7 +199,7 @@ langtrace_python_sdk/instrumentation/ollama/instrumentation.py,sha256=jdsvkqUJAA
|
|
196
199
|
langtrace_python_sdk/instrumentation/ollama/patch.py,sha256=w99r9wCCVDdJnZQEezYE2lW_iNFEtrldt9vq3ISAsag,5375
|
197
200
|
langtrace_python_sdk/instrumentation/openai/__init__.py,sha256=VPHRNCQEdkizIVP2d0Uw_a7t8XOTSTprEIB8oboJFbs,95
|
198
201
|
langtrace_python_sdk/instrumentation/openai/instrumentation.py,sha256=PZxI0qfoud1VsKGmJu49YDp0Z9z9TzCR8qxR3uznOMA,2810
|
199
|
-
langtrace_python_sdk/instrumentation/openai/patch.py,sha256=
|
202
|
+
langtrace_python_sdk/instrumentation/openai/patch.py,sha256=u8qlCooN-JmGVWLe7jwAeBMHTIi35ktVvL9gWVBfDko,26809
|
200
203
|
langtrace_python_sdk/instrumentation/openai/types.py,sha256=aVkoa7tmAbYfyOhnyMrDaVjQuwhmRNLMthlNtKMtWX8,4311
|
201
204
|
langtrace_python_sdk/instrumentation/pinecone/__init__.py,sha256=DzXyGh9_MGWveJvXULkFwdkf7PbG2s3bAWtT1Dmz7Ok,99
|
202
205
|
langtrace_python_sdk/instrumentation/pinecone/instrumentation.py,sha256=HDXkRITrVPwdQEoOYJOfMzZE_2-vDDvuqHTlD8W1lQw,1845
|
@@ -216,15 +219,16 @@ langtrace_python_sdk/instrumentation/weaviate/patch.py,sha256=aWLDbNGz35V6XQUv4l
|
|
216
219
|
langtrace_python_sdk/types/__init__.py,sha256=2VykM6fNHRlkOaIEUCdK3VyaaVgk2rTIr9jMmCVj2Ag,4676
|
217
220
|
langtrace_python_sdk/utils/__init__.py,sha256=VVDOG-QLd59ZvSHp0avjof0sbxlZ1QQOf0KoOF7ofhQ,3310
|
218
221
|
langtrace_python_sdk/utils/langtrace_sampler.py,sha256=BupNndHbU9IL_wGleKetz8FdcveqHMBVz1bfKTTW80w,1753
|
219
|
-
langtrace_python_sdk/utils/llm.py,sha256=
|
222
|
+
langtrace_python_sdk/utils/llm.py,sha256=qX-4aMCq_7wetlPqhsd4aqr8e76ePdTNBJM_PwvPcAg,16160
|
220
223
|
langtrace_python_sdk/utils/misc.py,sha256=LaQr5LOmZMiuwVdjYh7aIu6o2C_Xb1wgpQGNOVmRzfE,1918
|
221
224
|
langtrace_python_sdk/utils/prompt_registry.py,sha256=n5dQMVLBw8aJZY8Utvf67bncc25ELf6AH9BYw8_hSzo,2619
|
222
225
|
langtrace_python_sdk/utils/sdk_version_checker.py,sha256=F-VVVH7Fmhr5LcY0IIe-34zIi5RQcx26uuxFpPzZesM,1782
|
223
226
|
langtrace_python_sdk/utils/silently_fail.py,sha256=wzmvRDZppaRZgVP8C1xpq2GlWXYCwubhaeWvEbQP1SI,1196
|
224
227
|
langtrace_python_sdk/utils/types.py,sha256=l-N6o7cnWUyrD6dBvW7W3Pf5CkPo5QaoT__k1XLbrQg,383
|
225
|
-
langtrace_python_sdk/utils/with_root_span.py,sha256=
|
228
|
+
langtrace_python_sdk/utils/with_root_span.py,sha256=9WtZP6kbixKxdwyWLKERShxyrHk6NEyCDAc_BF4CEL4,8720
|
226
229
|
tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
227
230
|
tests/conftest.py,sha256=0Jo6iCZTXbdvyJVhG9UpYGkLabL75378oauCzmt-Sa8,603
|
231
|
+
tests/test_session_id.py,sha256=ncdkzsDdUVolAq8W6WOq8mlBoAGatDfmwaB9prZxPIY,1862
|
228
232
|
tests/utils.py,sha256=8ZBYvxBH6PynipT1sqenfyjTGLhEV7SORQH1NJjnpsM,2500
|
229
233
|
tests/anthropic/conftest.py,sha256=ht1zF3q4Xb1UiZ3BL_L-CQ3YWrd52Dqb4WTZ3f-GdG4,739
|
230
234
|
tests/anthropic/test_anthropic.py,sha256=ODeGDGSZCopR7QVA8Y-Vvv3wBLs-LOlQ58LneU-aTng,2864
|
@@ -252,12 +256,13 @@ tests/langchain/test_langchain.py,sha256=BYAQY3ShJIVnLS1b-TkJ4wMKhbiPV-E4-ISTjGy
|
|
252
256
|
tests/langchain/cassettes/test_langchain.yaml,sha256=KPBTVIYMUPFaSNpwrTDgWzsu4p3hHj_yNDoudDa-Jis,3755
|
253
257
|
tests/openai/conftest.py,sha256=BkehS6heg-O91Nzoc4546OSiAzy8KgSgk7VCO3A11zM,700
|
254
258
|
tests/openai/test_chat_completion.py,sha256=vkUtyF01U41J2TR8O8ygvoRXPniXI734QuQ79DgImOg,4863
|
255
|
-
tests/openai/test_embeddings.py,sha256=
|
259
|
+
tests/openai/test_embeddings.py,sha256=CGBDPH6cF7a83dhcNrKdVoWZSVViJ7rmC4FH3vuTR1k,3200
|
256
260
|
tests/openai/test_image_generation.py,sha256=u6lJCDPs2kZ_MfoaEXEszU_VTOQr7xGK7ki93E2e-wY,4902
|
257
261
|
tests/openai/cassettes/test_async_chat_completion_streaming.yaml,sha256=bna_jKychb6v7gwAd8IrVjzIluUw7NoI27nvlKVYXkI,6847
|
258
262
|
tests/openai/cassettes/test_async_image_generation.yaml,sha256=OwWDdzameaStU4jScuDK7SvpN8Ziuu4tkbKTaBMlXas,3701
|
259
263
|
tests/openai/cassettes/test_chat_completion.yaml,sha256=BoI0Wn4_jNQ8fesIiTQkmn92y0p_1JczGMS4fCmXnzQ,2930
|
260
264
|
tests/openai/cassettes/test_chat_completion_streaming.yaml,sha256=urfMUkDkUveT3t18k9KuiEE3SMZSwu7EFYTWbTPRIbA,2592397
|
265
|
+
tests/openai/cassettes/test_embeddings_base_url.yaml,sha256=K039Y6iP_xkifySWBR4JTaXoXnLaeJEtHFxLG-xeRZ4,11779
|
261
266
|
tests/openai/cassettes/test_image_generation.yaml,sha256=GL-AqzMOjL_t3m7WbgXlK06piXiUEH--4c-p8D4vabU,3629
|
262
267
|
tests/pinecone/conftest.py,sha256=EY1m5M6MKSOktbVEe__wYNjCW9A7H0xdC-aXhU8JEOg,916
|
263
268
|
tests/pinecone/test_pinecone.py,sha256=o1NnY4QirQU4JgMAi21kl9ejuXsGhF9EjyS9C2HuWtU,3599
|
@@ -265,8 +270,8 @@ tests/pinecone/cassettes/test_query.yaml,sha256=b5v9G3ssUy00oG63PlFUR3JErF2Js-5A
|
|
265
270
|
tests/pinecone/cassettes/test_upsert.yaml,sha256=neWmQ1v3d03V8WoLl8FoFeeCYImb8pxlJBWnFd_lITU,38607
|
266
271
|
tests/qdrant/conftest.py,sha256=9n0uHxxIjWk9fbYc4bx-uP8lSAgLBVx-cV9UjnsyCHM,381
|
267
272
|
tests/qdrant/test_qdrant.py,sha256=pzjAjVY2kmsmGfrI2Gs2xrolfuaNHz7l1fqGQCjp5_o,3353
|
268
|
-
langtrace_python_sdk-3.3.
|
269
|
-
langtrace_python_sdk-3.3.
|
270
|
-
langtrace_python_sdk-3.3.
|
271
|
-
langtrace_python_sdk-3.3.
|
272
|
-
langtrace_python_sdk-3.3.
|
273
|
+
langtrace_python_sdk-3.3.16.dist-info/METADATA,sha256=J5e-JB6fDqEMbeXWLm6CjtbUEU322tgGic0XYoKo2XM,15676
|
274
|
+
langtrace_python_sdk-3.3.16.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
275
|
+
langtrace_python_sdk-3.3.16.dist-info/entry_points.txt,sha256=1_b9-qvf2fE7uQNZcbUei9vLpFZBbbh9LrtGw95ssAo,70
|
276
|
+
langtrace_python_sdk-3.3.16.dist-info/licenses/LICENSE,sha256=QwcOLU5TJoTeUhuIXzhdCEEDDvorGiC6-3YTOl4TecE,11356
|
277
|
+
langtrace_python_sdk-3.3.16.dist-info/RECORD,,
|
@@ -0,0 +1,208 @@
|
|
1
|
+
interactions:
|
2
|
+
- request:
|
3
|
+
body: '{"input": "Test input", "model": "text-embedding-ada-002", "encoding_format":
|
4
|
+
"base64"}'
|
5
|
+
headers:
|
6
|
+
accept:
|
7
|
+
- application/json
|
8
|
+
accept-encoding:
|
9
|
+
- gzip, deflate
|
10
|
+
connection:
|
11
|
+
- keep-alive
|
12
|
+
content-length:
|
13
|
+
- '87'
|
14
|
+
content-type:
|
15
|
+
- application/json
|
16
|
+
host:
|
17
|
+
- api.openai.com
|
18
|
+
user-agent:
|
19
|
+
- OpenAI/Python 1.54.4
|
20
|
+
x-stainless-arch:
|
21
|
+
- x64
|
22
|
+
x-stainless-async:
|
23
|
+
- 'false'
|
24
|
+
x-stainless-lang:
|
25
|
+
- python
|
26
|
+
x-stainless-os:
|
27
|
+
- Linux
|
28
|
+
x-stainless-package-version:
|
29
|
+
- 1.54.4
|
30
|
+
x-stainless-retry-count:
|
31
|
+
- '0'
|
32
|
+
x-stainless-runtime:
|
33
|
+
- CPython
|
34
|
+
x-stainless-runtime-version:
|
35
|
+
- 3.12.5
|
36
|
+
method: POST
|
37
|
+
uri: https://api.openai.com/v1/embeddings
|
38
|
+
response:
|
39
|
+
body:
|
40
|
+
string: !!binary |
|
41
|
+
H4sIAAAAAAAAA1R6Ww+6Orvn/fspVtYt8wYEpGXdcZaTLYIiTiYTQEUQRA4th5393Sf439kzc6MJ
|
42
|
+
VrHl6e/09D/+9ddff7dZ9cjHv//56++6HMa//8d27Z6O6d///PU///XXX3/99R+/1/9v5KPJHvd7
|
43
|
+
+Sl+w38flp/7Y/77n7+4/77yfwf989fflTcNiI3kjzfmWOQk+XjXsco4dT9Zp1crD3LZULsPODBd
|
44
|
+
X5kEj2onUQtkz2qO/HqC56TOqcaUSbYwQXkGRThfsHt6FtVKrwwBIbreMdb6TzYx82zB6MLxFEU7
|
45
|
+
4BGeYX3gPv0zdQ7euV/aUipgGp09eimNGNA+PknwGWgU207Y9+t5HErYe09EvUsWZdNL93hplR4l
|
46
|
+
tUWWAxO3YxSY+ZZL/WjOo9FK+hzeMn5Bsx9TsL6Rn8I44Tk0OUTSaXzlShicFJEeD59sneVamODt
|
47
|
+
+r1h/Bo+3jLuWw3eUmWh+ulurNv8YsBdlJIe0Zit811NeJDTxKHJAnt9TB8BL5sZw2BdehF9iMye
|
48
|
+
AcWnPGDjPRveclp2LmCUY4dm2cLRTHhWBG/hFBJQqfm6dmf/DLPGmKnLDIU3c5+pgUQ4+mjnDFd9
|
49
|
+
Zgv5DAvtCOmhEzRvqq/9BL1xfmFFArdqag0nBlXFRtjro282nDk+ho9ilLF5YqqKwsa3QRaNCfaa
|
50
|
+
A1zbafE6wIyORqZEDyJenK4ItjftSw9WtwPLV72msFt5lR74K+uR19tt4NgEDT3yiu2RVzpqYBjY
|
51
|
+
I6pfn3jtRLdL4WM+xWivkldE4NON4Uc6BVidhZs+67sql569cKGu/HXWtaxgKrJLpGFdeBvRYk4p
|
52
|
+
B99Pq6f2bs+ty+n7fAOnyS80jF9mttsVbQOPPC6oYyBP51+caEN1FD7YDMAY0YdtPOCzaGx8KJCW
|
53
|
+
8Wod5LKraheq9mjvkf0yTGD7Plbv2j7rcVm6sPW7LxIQ1CLhe2Im0GjQoOoqm9Xa6Y8Fvk3DxGiK
|
54
|
+
T9k0p5YBgXozsUVBWfWsGBfwHp8drOvkDQZgZm8A6/BO1vc30Ze8CHzgz4cXqYKU9KvrcS0gee9j
|
55
|
+
vxzadVqNhwWukPhI0F7PfmaB/QB9gFTs+ns345ogmmCM+CNiquOnWmonsWFmvhJShoyZzYEUxaC/
|
56
|
+
Cohae2OI5usMAqjqUYvVG7xlQ87eUsj35gGj0+MAvo5/U+DjE1LqFUen4vPj3ZLc5Fti5yo8vFm6
|
57
|
+
aww7xdeFWteqXAlE+QLl207Hp7u2j1ZXexowPipHahke30+ex+dAZ9oEH8Zrv86dZROoJlpKpN67
|
58
|
+
Z6Towwby/pUgfscM2VKBawpkzOX47N/e0bcrrxDsnI9NuN19rObDKXuAu8M4RCw+O4+S/YmXS8UE
|
59
|
+
aDexejTuMNAge6UaEm52mU0fdZ3AbrnYOJoeRj+TJLLgOJ2OGK37kzcQARlQoUFK1fpcg4H/xAq8
|
60
|
+
PXwPG7JTgd98gGF3LLbwKc6G+zSncIbvijTc5QmWNHyWcA8jk3BLoYPFbHYBZET2SyYkNdXsViMP
|
61
|
+
XhUF2J2KIprDdxXDvEcTdrD08kbHdDTY4+5DfZ8L18ks1A6e+wLh0PBxNk2vhwXPX3uHj3vL1pfJ
|
62
|
+
eUswpfZMFfFtRbueEVtYVo2FfYeEYI6aSZTgyzSwWXajV1yaRyw1QtlRYz4L2Vg/xjc0Yp6lqhVZ
|
63
|
+
3iQUSAHax1Eokp/pOgE027IpxgzG8B30Q2a9cjgX/ht5zSFfZyV3DHDK3QDrFmesvMEs/K/+qcnU
|
64
|
+
/rqqQxrCLrjw+LjcbX2O93oAO+uKkdjuX9Xs8S2E/eW+o8fxlPWTInEWJCCtSc+aC1hqajBQ7pKc
|
65
|
+
ZjN0omXN2HA/vOwPka63FUw5e0rkVxeM2KZ8r89FuiL4WM0Ym0w9gG9xunSQp2FBj+Et1L80Sjop
|
66
|
+
pe5MWA19InozQkPeZcyL6muG9IUGnARn6ehibM6fbKpfzhkykZ1TNbNrsCiVJkHsXZ5Ui191Nkrv
|
67
|
+
EMLCul0o+vKffmboPQZqgVWsXR5Yn2Tl5MrLeXUJg5ID4BTNcEFXtyrVuoiASdKRAiztWlNjlsZq
|
68
|
+
LEHdwBd/OBMj3I/ruhh5ILXCpFGHAAWsP7yJGD1Hq2aK0cZPCWCCWafXDb/Wo5i6MNWWBb2iPa7W
|
69
|
+
azCUQLOzDwF+pni7IT4WoHnGOoGtA7PFr2sEhXdS4ENu+t6MJ3mCi3GcscnYwzox1xH+8AUR3Zn7
|
70
|
+
jf8K+B6DHq33StQ/GSk12DcqIvLGb72YOROkKl6xeUyO0XTmmDMwMiXBDvdyKz44J2dQecuA5Ji1
|
71
|
+
s/GqtRLgqG7/8AOM32NC4JcZVKwP4F1NTBsV8LHLKT2Zh32/PpJFgXbkxkgKV7maczzx8LN0OpIL
|
72
|
+
e/FIt7BnwGgfnRDueajWouU5mDGnK/X3We6R2ZNyKXyIPHamhIsmZt4bMOVklkjnuujJfXfNIfHN
|
73
|
+
FSviu8kGr3He8HTIHlj/Ju9qCdxlgUdDifC1mkudLsY5FLQ+vuHjy7MjYb/EDYRWZWBbKNVoyS3T
|
74
|
+
gBt/bes7eEvKKz7YObX9469sKs1bCmytkdEOtJU+HXkrBSHNMRH88VkNc8gbcM2og40TL1SrEXcB
|
75
|
+
PLVFQLb7e+MyAAUKpych4jFT1/ExVQju8WUgfF7x2dr7XAydJYD0hxfzT99QxvMwWiNpHV+a3ILv
|
76
|
+
58OjeUjNdXUiowOxdc8oLr4TWPYv1ofo5obYkruymrNjFcJnmdrUGvFzHZVcNWBSP3zCFnraE//M
|
77
|
+
dtKeHEzCGQHxxlat3/A2pB4+etOiD/wEGtDzzYf0BnXAnMFdC69SvG7zZfuxnVgG2qMLsb2yr2xy
|
78
|
+
0vgBLVu6Um9cV50a8GkBZX9SsSk8/XUSEl4D401q8EFsSn2Z8laB+zJ80uMFhvoyTloOVf3UYkN5
|
79
|
+
C2CS96olf4qwo8Y4C1H3rW0DHrokxsFRmLKmgIoLpVo1qWWiAXwRe19AdhRd+mwvH33B9ugD2fvq
|
80
|
+
2H99w2rDkxaAwH5Q5+6/9Cn0AQNkHuskNrnKW5K5NsCUS28kfM6lvlxkoYBQu/bYMVCvd2udc7DZ
|
81
|
+
eSci92heO/Yj2HDHly/s5I4LZmkUS6kf3gq+dRe3f3dZbEMntS5IOO19fcdBlpMUGqYb3i8racul
|
82
|
+
hM1Y6oiVjAaMu6JoQO8cduhVn02w3CrTgNt6EWaKT9FSGbImnfcDou4TKRkx71cCDFavqfJsJp3y
|
83
|
+
+NGB773G2HpRDXBCwmiAecU5RdlrzmbzCR5A+3gKEfgrq/cP23/86h1FD0X0lrxIEBxL80j16Jp4
|
84
|
+
i8+KMTRijqX6Dghg7X0Yw1o/7NF+FN1qNW0t2PNKZWOt6hAgh6VNxanZe1ibbwishgkJbLGmYWUS
|
85
|
+
A4/eC1cD5dW38EPxW31mWjOGj3tgY3Rc5oqEO5eA/dl6opOgmz1hNS2W27gP0E54+mC5Ot2w3/QL
|
86
|
+
Wpvw3S/mFHIQiFeD1G44Zgtowhi04VNAAju8wPJo2BxsfEMPUnHJJkXzXchJO4pkIrX6fIKFJE8l
|
87
|
+
XVDYx7W39PAswg0fiFivr6r+7VfMP0R6jMs2W7+DI4kDwhxV7eO+WmLlaEsXfdhR2xgu0frqjRQW
|
88
|
+
GoZIMCKtnw77QQGHBslEptdWH8EzKWE3jDtqnhi931Xy6kJxkUeqBympxpAoIrx+55SIftL3K7zE
|
89
|
+
b0jfdk0VvQ+ytdA1Ddapb9Bzar+9UVvvImyeZ52eteM7Wq6REkBTZRKqeSH1FqVyRek3PvyOYj8r
|
90
|
+
jsnAqEkxxvZp1qek+uYwIfsbTge5yKZ5VH0wD6qLmOx1yqYIqyVUcqAi8Zi9wBK40gJLcIwwni9S
|
91
|
+
NCYvc4CpHM8YO+dD9n25xw5ownDDP3+zrLyWQnY5afjQEA90G96Au8DKZPK4pl9N4dxC+Xrv6OGU
|
92
|
+
+14T7tzhtx/osbAXfQxWPQYkFXwkGsMuW+ire4D2vQuo1semvoh7bpF+8/XvMsyWIrk+YBdSTCrd
|
93
|
+
Rv1ykxP/p1/pQT/s1pERJVHyJnLGVqU/wWwIsybxNCgQ2GvfjOafUNl3t0Cl7m0vVwThHQLg9ulR
|
94
|
+
ITB0nWMliqFe5DdsumLVTz4ytD/8l3H56m164C2nRrPgTS+sP/6Qc/95RSx8T/1Ce1sB1+MtpQfu
|
95
|
+
3oMl8aZA3h2OFLtXtvMW0S1T2N59Dce+VOkLq7nnPdU/PsWJnG78KXLQqDUWzTFL+4W0Ug4GnuWo
|
96
|
+
mbtVNdlg3/z8IlrXjHhLd3RLoJysjOpP/10tS3cNYRhZAbagyusE3cIFlnVqYXWV654CRh2k4KCd
|
97
|
+
8BHcs6gb94UiP2FKyAj10zrTtG1BsD8+qGWffTB9wtcZKCcj2/QEWOdKBi4M+4D/41c6x3c5WN3g
|
98
|
+
gPWXlegTe1tLmLY1S/a378tbLu0UwiDkROx8nYe3+ZsQmOR8pcqUBd606XnwMUlHf/6MvtKYg9XD
|
99
|
+
NumRu2b9KipMC4RSPWB8r6Z1sZUrAc6BQHqQ76w+RRCdIWtev3TT63p3kdkCVk08bPyjeRPz0gsI
|
100
|
+
6+BOnWpwq8G/ft8//qQIJZ+1+/nhysQSmcl80wfjc+qg6d3qH394kyY7HVjszEWLsE+i0SBZDvgB
|
101
|
+
HFDyGF/95FRQA1t9UCv9auskP2QI8kVgyNyjmz463Y6Dy1pW1FzjuJ8AoxdSzS0GmsSDlU3tl3GB
|
102
|
+
uLAjVRh1349tJhJwXwYHuyetqNaofzDw58/PvVCtAy/qDaSHqcYa2mFvei0PBDrWRPSmEjWaLAVa
|
103
|
+
sP2+NarzDNG/mx+A36LUMOZfDaDf7BxD521BIjmrm3F1e4vhPbEdAjNlqQaTITl43vsLml8t7VfW
|
104
|
+
8W24uFOFphFY0XQRPAken/yeqm9C+9qnAQNf4mMk7M+PcFbfwfp5Fzc/c43WQncV8NwJEIFKhSs9
|
105
|
+
7DskLZCdqOdnhTcvfPCGafth0Y4rq+j7uisLPHVxQN1DpPST2lkFLPOnRFEXffoBfwEDr9J5xcfO
|
106
|
+
y6rO0cYAZCNl6KHuLxVpPhwD43OfUceqH9FsR18FbH4DFd2uieZAymKpFRaNfNlPl41YNCf4OF9k
|
107
|
+
uq13Pz8+ZgfOY4yozT97b6bKpYTivnhib9PrvGILk0Tfbk0PnOKB6XzYF9C6hFdsZZa2Lq+01oBa
|
108
|
+
qiq1iOhny/Nrar+8DYmM98qmk9sksHbantqhePGGnPQcHOm40t/+GJJUl8D9OcfYuNyu/Ur2Jw7W
|
109
|
+
r12I3/umAms6nB9//Oet+XLRmFtZALDUnbD1lNmV2Mpz+OkffBjlUl9sRw7A5pepNSe8Pmz+R9rW
|
110
|
+
E/svPc/m+cZK8OzecsKpLvImQnkDumNypqoPG4+oQ/uQNr7DasjfsuWnZ8orsqiLR7la3getAanF
|
111
|
+
eFRthRrQyuO5/fdT81gJOMmb7wLwwRUOPjb95er9eZ6b/iNCbvr6eo+HB9wdMEXiHDH9UJqnFATK
|
112
|
+
/Ynmeq7Xr6i3DXRzHmz7c69P4qlRQBeLLemubKcvgXr3QfVwTWytD62fWqkfoA1iSE0FVyv548f3
|
113
|
+
XoKskIujWYRJCblXUuL43vj9EqdJAKMmwTRzv7JHIDpPsLGPIjXgoIPJqThNGkLmQzg4VCu9F5om
|
114
|
+
jxd6/LPfOUIZAxa1TXA4xWK1ytfBhZ/7VcCKO08Z/Q6qJF8OEqIH88P2E/4CCJLKWBFXeCOgTrWg
|
115
|
+
n/6kjpTdwaizjgZxG4hUiZWmanlRf8OHwNvULefiv/KhxxvEpHtoEZie5SmRlzqu0Bjelu353lz4
|
116
|
+
NVyN1MmnqkZBcv/kM9g4dEK/RKNE4FsjA7Uj0Ge0IfwCkjr3kbjLuZW0Q2JBlckUammnSn8L0smV
|
117
|
+
urpTsbM8FLAE0tGFL4m3MHqomU7W4c1D9kW/1H86LRjsls/hYz3E1Le8cB19mjC/fBgfjtEFrEko
|
118
|
+
Gftw/+DJB8W+Pty8XgJhZATUWR4FmK7dPoebfqbO/lCsxDUkfm/Z4hUJQnrJfv735zfp4XPWvIX9
|
119
|
+
CC58iscL3eofDPv31/3lN2QfKaousI7hwpdXnelxFmYw29FLk77ZisliLGbPO77Lgw88BNg5yR2Y
|
120
|
+
VS8roavxF6pfZZQBNvwOMOLeGIeNK/ZEk54EGJXPEiFa2eiXf8LafIZoAbkYTcbn1sKM79w/31/P
|
121
|
+
Oc/8ydeUT2hFdKq+HNBhyKJZHsxKWC4qJ3Ns88LOWeL6UVF3DISEdkiUWjWbNn8mcTK2t/VQ1t3L
|
122
|
+
NVsQVMmJmlcxBos4PdG+9+6IOqPYVcMzUxPoHZgzWfI31acqb9B+y5fItOHfNCbfAMrcTccx/AzR
|
123
|
+
uOEj3PiNTDu1if7sB1t7y2hlunc069bJgmt7cLDXBau35ncuB4UkA2pqjaCvLzBD+FtPFs1pv3b3
|
124
|
+
rwik9WgiyIMwI1f6LOEv77JfRy9aHG0MwWBDhUgxjMBwETwR7k/Uom59cvXJzd8aFJamoDa9efp8
|
125
|
+
vGapdMdHGeutn3jrzQgt+Uvc9k8+Rza+B5fYQNTdyVG14z7TG/786EEqdlHb+4wNVPOrIi56Gvqf
|
126
|
+
/Grzt9QzqLNywurYYMsDqX48o34uUuBDryzP+HAYfW/VXMHe6ypVsEUvfjYnryP58S9VEn3K1qmb
|
127
|
+
ecAFgoP1q0yi9UKXP/5708eKLrx6PwGKJmV/8H1dLioPH1+LYqtfhohYX5eDvl9KW94G+ulNXzEw
|
128
|
+
wr5BPBb8it/4BKqlrhKp5Tp903cBkJKviXHafDyh8hj+T//APwqOzpMlUYCTBBec9ON53fLls2x+
|
129
|
+
1hoxKcj01Wc7F/aaolK7DNR+x4dKA3tNU6kzJefoh/cwmhKe7D+4q0ZtvYiwjb8Btbb8d63yIQHb
|
130
|
+
86D2Aj1vbRwHgsQVfZoeoqJayrG14dYPoTY5Mv1wZCsGbv0UxG157fRIVg7y/oUgCd7lbN3wB54V
|
131
|
+
iHHGliFYyBIof/IaVzucven+yfhfPeKDkO4y8tvvp9wOcGaHDlhMoePhl9gtdbWxi963i/OArd9+
|
132
|
+
iXTb36slYopANofyiu1X10cj5K6LdG7WnBq5JvRL0afvfedZKtWOi7gSPRZzuOUlWM90lPGZzpbg
|
133
|
+
udtBNGsuBuv2BmuwE9H+dOX6jnv43M9fb/mstNL39cXI+YOf6GHLixbXWHiIVhISRquy7E+/wLcv
|
134
|
+
L2rP0aMi7Id1wZOZDvTnr5Ye5hI4Em1PzS7Wo01v2+DQKvKW7zPeuuUtcFfLNva4PNKnazc/gBjX
|
135
|
+
I3aPsRuNl5Mt/cnP+M55Ah5Z1xZWixeR/TtUPS4b4lDCgRXi7XlGf/xOAPQHWmKjjtYX2DNw4zc0
|
136
|
+
gjvI5sm3O9Ck9WnLV82MBvZTgUxef7Ez7bhobLqEgJo5nv7ww9QNHoHF7XzAiDFnb0WfoQEbXpPl
|
137
|
+
BKi+9DixYHz+ZkQa5CJa2y8XQzv/VFRnOiPin9+jBlBdhPTn3+ic3zgIr9YdH+3V7n96BZy4WKCH
|
138
|
+
wzh45c0oQnnT0xRd1M86vhV7+dNfgA4J19Xn/bf0SEVKxAQ8fn4NQQDLO7WlVo0ms3A6uP0eYjV0
|
139
|
+
yNaiZTi4GHgmYrtXKy6/8yE88FNMDyge9Gk9rt2fvKfcm6Y3Q9RBiK/0SaTmrGSCHQgWdFEHCFOP
|
140
|
+
p2i+hfUAN32P/e9ir4PaWSW897X8q8ese+qBLw+XWEEs5qZ1TTSjlE3cMOTGqpw+VWM0AQsc3lQx
|
141
|
+
TtY638mllbb/j4TNX/7yGeCC9xm7y+kezQlIePDzm7pNkuxrCLPy0z/Y5fQimn/9GtaTMsSh6VrN
|
142
|
+
gis2EM3nPcX8ywKjA/oUhoYY0OR17DPylgcIh8d6xPbbvegDnnVm7+6XMzYwfG/4yNoQP4ITTdXX
|
143
|
+
Hfzpl2z+Ffvu2fGW4BzEsLOHFt8G/PCmdkb2b73QfvcuVhpkqQvzk+9uevYc0aw2HlIxFwTb7JPJ
|
144
|
+
Rut70uTIPN2pw7zPHqGvLgf3d+lga+vv/OpDntzKw26Qu9mEHOcNCUhqrB0o9YbDZOXwh4cx/y77
|
145
|
+
lb7OIbyQkMNb/1X/+jSA0GPOEnWXk5wtxWunQXV5p5u/ANXWr5Fge0catsJrHC1hKrx/ef6v3lfB
|
146
|
+
iLsQDjRukLDp7T/5d8oOLj2c9oO39YcMactvsP2e+myuW8+Fp7YM8DHkF4+iIn9Lm76n2t6s9Ymc
|
147
|
+
biG89x8ZCeaHrZbPWljypn/+9JPXc3MuYX5CLjW/0dHrDn2SgL9/pwL+819//fW/ficMmvb+qLeD
|
148
|
+
AeNjHv/930cF/p3e039zHP/nGAIZ0uLx9z//dQLh72/fNt/xf4/t+/EZ/v7nL/7PUYO/x3ZM6//n
|
149
|
+
8r+2G/3nv/4PAAAA//8DAEF/1EXeIAAA
|
150
|
+
headers:
|
151
|
+
CF-Cache-Status:
|
152
|
+
- DYNAMIC
|
153
|
+
CF-RAY:
|
154
|
+
- 8ef92722e9cb8389-SEA
|
155
|
+
Connection:
|
156
|
+
- keep-alive
|
157
|
+
Content-Encoding:
|
158
|
+
- gzip
|
159
|
+
Content-Type:
|
160
|
+
- application/json
|
161
|
+
Date:
|
162
|
+
- Tue, 10 Dec 2024 00:35:05 GMT
|
163
|
+
Server:
|
164
|
+
- cloudflare
|
165
|
+
Set-Cookie:
|
166
|
+
- __cf_bm=Bc46FmH66u.HvNfo3T4q3vwT9_TGhtm3wFFMbxQcZ0U-1733790905-1.0.1.1-mXshj7zBxHMWpnIh3c8EmaqnQsDr8FCF6kOqoQvhFoytI6b7MXLbqkZNRfXRP0pd_OraJnyZhUzOl8KaZdq9qg;
|
167
|
+
path=/; expires=Tue, 10-Dec-24 01:05:05 GMT; domain=.api.openai.com; HttpOnly;
|
168
|
+
Secure; SameSite=None
|
169
|
+
- _cfuvid=pdqpFgJ05BGtT4Cs8llmvgLkp1kOJXZNJgkv3mk5xnA-1733790905081-0.0.1.1-604800000;
|
170
|
+
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
|
171
|
+
Transfer-Encoding:
|
172
|
+
- chunked
|
173
|
+
X-Content-Type-Options:
|
174
|
+
- nosniff
|
175
|
+
access-control-allow-origin:
|
176
|
+
- '*'
|
177
|
+
access-control-expose-headers:
|
178
|
+
- X-Request-ID
|
179
|
+
alt-svc:
|
180
|
+
- h3=":443"; ma=86400
|
181
|
+
openai-model:
|
182
|
+
- text-embedding-ada-002
|
183
|
+
openai-organization:
|
184
|
+
- scale3-1
|
185
|
+
openai-processing-ms:
|
186
|
+
- '49'
|
187
|
+
openai-version:
|
188
|
+
- '2020-10-01'
|
189
|
+
strict-transport-security:
|
190
|
+
- max-age=31536000; includeSubDomains; preload
|
191
|
+
x-ratelimit-limit-requests:
|
192
|
+
- '10000'
|
193
|
+
x-ratelimit-limit-tokens:
|
194
|
+
- '10000000'
|
195
|
+
x-ratelimit-remaining-requests:
|
196
|
+
- '9999'
|
197
|
+
x-ratelimit-remaining-tokens:
|
198
|
+
- '9999998'
|
199
|
+
x-ratelimit-reset-requests:
|
200
|
+
- 6ms
|
201
|
+
x-ratelimit-reset-tokens:
|
202
|
+
- 0s
|
203
|
+
x-request-id:
|
204
|
+
- req_0ce98b0d1723b6734062d2cd711aa85e
|
205
|
+
status:
|
206
|
+
code: 200
|
207
|
+
message: OK
|
208
|
+
version: 1
|
tests/openai/test_embeddings.py
CHANGED
@@ -0,0 +1,88 @@
|
|
1
|
+
import pytest
|
2
|
+
import httpx
|
3
|
+
from openai import OpenAI
|
4
|
+
from opentelemetry.trace import SpanKind, StatusCode
|
5
|
+
from langtrace.trace_attributes import SpanAttributes
|
6
|
+
from langtrace_python_sdk.constants.instrumentation.openai import APIS
|
7
|
+
from langtrace_python_sdk.instrumentation.openai import OpenAIInstrumentation
|
8
|
+
from tests.utils import assert_token_count
|
9
|
+
from importlib_metadata import version as v
|
10
|
+
|
11
|
+
# Initialize OpenAI instrumentation
|
12
|
+
instrumentor = OpenAIInstrumentation()
|
13
|
+
instrumentor.instrument()
|
14
|
+
|
15
|
+
@pytest.mark.vcr()
|
16
|
+
def test_embeddings_base_url(exporter, openai_client):
|
17
|
+
input_value = "Test input"
|
18
|
+
kwargs = {
|
19
|
+
"input": input_value,
|
20
|
+
"model": "text-embedding-ada-002",
|
21
|
+
}
|
22
|
+
|
23
|
+
openai_client.embeddings.create(**kwargs)
|
24
|
+
spans = exporter.get_finished_spans()
|
25
|
+
embedding_span = spans[-1]
|
26
|
+
|
27
|
+
attributes = embedding_span.attributes
|
28
|
+
assert attributes.get(SpanAttributes.LLM_URL) == "https://api.openai.com/v1/"
|
29
|
+
assert attributes.get(SpanAttributes.LANGTRACE_SERVICE_NAME) == "OpenAI"
|
30
|
+
|
31
|
+
|
32
|
+
def test_embeddings_azure_provider(exporter, monkeypatch):
|
33
|
+
# Mock response data
|
34
|
+
mock_response = {
|
35
|
+
"data": [{"embedding": [0.1] * 1536, "index": 0, "object": "embedding"}],
|
36
|
+
"model": "text-embedding-ada-002",
|
37
|
+
"object": "list",
|
38
|
+
"usage": {"prompt_tokens": 5, "total_tokens": 5}
|
39
|
+
}
|
40
|
+
|
41
|
+
# Create a mock send method for the HTTP client
|
42
|
+
def mock_send(self, request, **kwargs):
|
43
|
+
# Create a proper request with headers
|
44
|
+
headers = {
|
45
|
+
"authorization": "Bearer test_api_key",
|
46
|
+
"content-type": "application/json",
|
47
|
+
}
|
48
|
+
request = httpx.Request(
|
49
|
+
method="POST",
|
50
|
+
url="https://your-resource.azure.openai.com/v1/embeddings",
|
51
|
+
headers=headers,
|
52
|
+
)
|
53
|
+
|
54
|
+
# Create response with proper context
|
55
|
+
return httpx.Response(
|
56
|
+
status_code=200,
|
57
|
+
content=b'{"data": [{"embedding": [0.1, 0.1], "index": 0, "object": "embedding"}], "model": "text-embedding-ada-002", "object": "list", "usage": {"prompt_tokens": 5, "total_tokens": 5}}',
|
58
|
+
request=request,
|
59
|
+
headers={"content-type": "application/json"}
|
60
|
+
)
|
61
|
+
|
62
|
+
# Create Azure client
|
63
|
+
azure_client = OpenAI(
|
64
|
+
api_key="test_api_key",
|
65
|
+
base_url="https://your-resource.azure.openai.com/v1",
|
66
|
+
)
|
67
|
+
|
68
|
+
# Debug prints
|
69
|
+
print(f"Debug - Azure client type: {type(azure_client)}")
|
70
|
+
print(f"Debug - Azure client base_url: {azure_client.base_url}")
|
71
|
+
print(f"Debug - Azure client _client._base_url: {azure_client._client._base_url if hasattr(azure_client, '_client') else 'No _client'}")
|
72
|
+
|
73
|
+
# Patch the HTTP client's send method
|
74
|
+
monkeypatch.setattr(httpx.Client, "send", mock_send)
|
75
|
+
|
76
|
+
input_value = "Test input"
|
77
|
+
kwargs = {
|
78
|
+
"input": input_value,
|
79
|
+
"model": "text-embedding-ada-002",
|
80
|
+
}
|
81
|
+
|
82
|
+
azure_client.embeddings.create(**kwargs)
|
83
|
+
spans = exporter.get_finished_spans()
|
84
|
+
embedding_span = spans[-1]
|
85
|
+
|
86
|
+
attributes = embedding_span.attributes
|
87
|
+
assert attributes.get(SpanAttributes.LLM_URL) == "https://your-resource.azure.openai.com/v1/"
|
88
|
+
assert attributes.get(SpanAttributes.LANGTRACE_SERVICE_NAME) == "Azure"
|
tests/test_session_id.py
ADDED
@@ -0,0 +1,59 @@
|
|
1
|
+
import os
|
2
|
+
import pytest
|
3
|
+
from opentelemetry.trace import SpanKind
|
4
|
+
from langtrace_python_sdk.langtrace import LangtraceConfig
|
5
|
+
from langtrace_python_sdk.extensions.langtrace_exporter import LangTraceExporter
|
6
|
+
from langtrace_python_sdk.utils.with_root_span import with_langtrace_root_span
|
7
|
+
from langtrace_python_sdk.constants.exporter.langtrace_exporter import LANGTRACE_SESSION_ID_HEADER
|
8
|
+
|
9
|
+
def test_session_id_from_env(exporter):
|
10
|
+
# Test session ID from environment variable
|
11
|
+
test_session_id = "test-session-123"
|
12
|
+
os.environ["LANGTRACE_SESSION_ID"] = test_session_id
|
13
|
+
|
14
|
+
@with_langtrace_root_span()
|
15
|
+
def test_function():
|
16
|
+
pass
|
17
|
+
|
18
|
+
test_function()
|
19
|
+
|
20
|
+
spans = exporter.get_finished_spans()
|
21
|
+
assert len(spans) == 1
|
22
|
+
span = spans[0]
|
23
|
+
assert span.attributes.get("session.id") == test_session_id
|
24
|
+
|
25
|
+
# Cleanup
|
26
|
+
del os.environ["LANGTRACE_SESSION_ID"]
|
27
|
+
|
28
|
+
def test_session_id_in_config():
|
29
|
+
# Test session ID through LangtraceConfig
|
30
|
+
test_session_id = "config-session-456"
|
31
|
+
config = LangtraceConfig(session_id=test_session_id)
|
32
|
+
exporter = LangTraceExporter(
|
33
|
+
api_host="http://test",
|
34
|
+
api_key="test-key",
|
35
|
+
session_id=config.session_id
|
36
|
+
)
|
37
|
+
|
38
|
+
assert exporter.session_id == test_session_id
|
39
|
+
|
40
|
+
def test_session_id_in_headers():
|
41
|
+
# Test session ID in HTTP headers
|
42
|
+
test_session_id = "header-session-789"
|
43
|
+
exporter = LangTraceExporter(
|
44
|
+
api_host="http://test",
|
45
|
+
api_key="test-key",
|
46
|
+
session_id=test_session_id
|
47
|
+
)
|
48
|
+
|
49
|
+
# Export method adds headers, so we'll check the headers directly
|
50
|
+
headers = {
|
51
|
+
"Content-Type": "application/json",
|
52
|
+
"x-api-key": "test-key",
|
53
|
+
"User-Agent": "LangtraceExporter",
|
54
|
+
}
|
55
|
+
|
56
|
+
if test_session_id:
|
57
|
+
headers[LANGTRACE_SESSION_ID_HEADER] = test_session_id
|
58
|
+
|
59
|
+
assert headers[LANGTRACE_SESSION_ID_HEADER] == test_session_id
|
{langtrace_python_sdk-3.3.14.dist-info → langtrace_python_sdk-3.3.16.dist-info}/entry_points.txt
RENAMED
File without changes
|
{langtrace_python_sdk-3.3.14.dist-info → langtrace_python_sdk-3.3.16.dist-info}/licenses/LICENSE
RENAMED
File without changes
|