langtrace-python-sdk 3.2.0__py3-none-any.whl → 3.3.1__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,11 @@
1
+ class CerebrasRunner:
2
+ def run(self):
3
+ from .main import (
4
+ completion_example,
5
+ completion_with_tools_example,
6
+ openai_cerebras_example,
7
+ )
8
+
9
+ completion_with_tools_example()
10
+ completion_example()
11
+ openai_cerebras_example()
@@ -0,0 +1,195 @@
1
+ from langtrace_python_sdk import langtrace
2
+ from cerebras.cloud.sdk import Cerebras
3
+ from dotenv import load_dotenv
4
+ import re
5
+ import json
6
+ from openai import OpenAI
7
+ import os
8
+
9
+ load_dotenv()
10
+
11
+ langtrace.init()
12
+ openai_client = OpenAI(
13
+ base_url="https://api.cerebras.ai/v1",
14
+ api_key=os.getenv("CEREBRAS_API_KEY"),
15
+ )
16
+ client = Cerebras()
17
+
18
+
19
+ def openai_cerebras_example(stream=False):
20
+ completion = openai_client.chat.completions.create(
21
+ messages=[
22
+ {
23
+ "role": "user",
24
+ "content": "Why is fast inference important?",
25
+ }
26
+ ],
27
+ model="llama3.1-8b",
28
+ stream=stream,
29
+ )
30
+
31
+ if stream:
32
+ for chunk in completion:
33
+ print(chunk)
34
+ else:
35
+ return completion
36
+
37
+
38
+ def completion_example(stream=False):
39
+ completion = client.chat.completions.create(
40
+ messages=[
41
+ {
42
+ "role": "user",
43
+ "content": "Why is fast inference important?",
44
+ }
45
+ ],
46
+ model="llama3.1-8b",
47
+ stream=stream,
48
+ )
49
+
50
+ if stream:
51
+ for chunk in completion:
52
+ print(chunk)
53
+ else:
54
+ return completion
55
+
56
+
57
+ def completion_with_tools_example(stream=False):
58
+ messages = [
59
+ {
60
+ "role": "system",
61
+ "content": "You are a helpful assistant with access to a calculator. Use the calculator tool to compute mathematical expressions when needed.",
62
+ },
63
+ {"role": "user", "content": "What's the result of 15 multiplied by 7?"},
64
+ ]
65
+
66
+ response = client.chat.completions.create(
67
+ model="llama3.1-8b",
68
+ messages=messages,
69
+ tools=tools,
70
+ stream=stream,
71
+ )
72
+
73
+ if stream:
74
+ # Handle streaming response
75
+ full_content = ""
76
+ for chunk in response:
77
+ if chunk.choices[0].delta.tool_calls:
78
+ tool_call = chunk.choices[0].delta.tool_calls[0]
79
+ if hasattr(tool_call, "function"):
80
+ if tool_call.function.name == "calculate":
81
+ arguments = json.loads(tool_call.function.arguments)
82
+ result = calculate(arguments["expression"])
83
+ print(f"Calculation result: {result}")
84
+
85
+ # Get final response with calculation result
86
+ messages.append(
87
+ {
88
+ "role": "assistant",
89
+ "content": None,
90
+ "tool_calls": [
91
+ {
92
+ "function": {
93
+ "name": "calculate",
94
+ "arguments": tool_call.function.arguments,
95
+ },
96
+ "id": tool_call.id,
97
+ "type": "function",
98
+ }
99
+ ],
100
+ }
101
+ )
102
+ messages.append(
103
+ {
104
+ "role": "tool",
105
+ "content": str(result),
106
+ "tool_call_id": tool_call.id,
107
+ }
108
+ )
109
+
110
+ final_response = client.chat.completions.create(
111
+ model="llama3.1-70b", messages=messages, stream=True
112
+ )
113
+
114
+ for final_chunk in final_response:
115
+ if final_chunk.choices[0].delta.content:
116
+ print(final_chunk.choices[0].delta.content, end="")
117
+ elif chunk.choices[0].delta.content:
118
+ print(chunk.choices[0].delta.content, end="")
119
+ full_content += chunk.choices[0].delta.content
120
+ else:
121
+ # Handle non-streaming response
122
+ choice = response.choices[0].message
123
+ if choice.tool_calls:
124
+ function_call = choice.tool_calls[0].function
125
+ if function_call.name == "calculate":
126
+ arguments = json.loads(function_call.arguments)
127
+ result = calculate(arguments["expression"])
128
+ print(f"Calculation result: {result}")
129
+
130
+ messages.append(
131
+ {
132
+ "role": "assistant",
133
+ "content": None,
134
+ "tool_calls": [
135
+ {
136
+ "function": {
137
+ "name": "calculate",
138
+ "arguments": function_call.arguments,
139
+ },
140
+ "id": choice.tool_calls[0].id,
141
+ "type": "function",
142
+ }
143
+ ],
144
+ }
145
+ )
146
+ messages.append(
147
+ {
148
+ "role": "tool",
149
+ "content": str(result),
150
+ "tool_call_id": choice.tool_calls[0].id,
151
+ }
152
+ )
153
+
154
+ final_response = client.chat.completions.create(
155
+ model="llama3.1-70b",
156
+ messages=messages,
157
+ )
158
+
159
+ if final_response:
160
+ print(final_response.choices[0].message.content)
161
+ else:
162
+ print("No final response received")
163
+ else:
164
+ print("Unexpected response from the model")
165
+
166
+
167
+ def calculate(expression):
168
+ expression = re.sub(r"[^0-9+\-*/().]", "", expression)
169
+
170
+ try:
171
+ result = eval(expression)
172
+ return str(result)
173
+ except (SyntaxError, ZeroDivisionError, NameError, TypeError, OverflowError):
174
+ return "Error: Invalid expression"
175
+
176
+
177
+ tools = [
178
+ {
179
+ "type": "function",
180
+ "function": {
181
+ "name": "calculate",
182
+ "description": "A calculator tool that can perform basic arithmetic operations. Use this when you need to compute mathematical expressions or solve numerical problems.",
183
+ "parameters": {
184
+ "type": "object",
185
+ "properties": {
186
+ "expression": {
187
+ "type": "string",
188
+ "description": "The mathematical expression to evaluate",
189
+ }
190
+ },
191
+ "required": ["expression"],
192
+ },
193
+ },
194
+ }
195
+ ]
@@ -35,6 +35,7 @@ SERVICE_PROVIDERS = {
35
35
  "AUTOGEN": "Autogen",
36
36
  "XAI": "XAI",
37
37
  "AWS_BEDROCK": "AWS Bedrock",
38
+ "CEREBRAS": "Cerebras",
38
39
  }
39
40
 
40
41
  LANGTRACE_ADDITIONAL_SPAN_ATTRIBUTES_KEY = "langtrace_additional_attributes"
@@ -21,6 +21,7 @@ from .mistral import MistralInstrumentation
21
21
  from .aws_bedrock import AWSBedrockInstrumentation
22
22
  from .embedchain import EmbedchainInstrumentation
23
23
  from .litellm import LiteLLMInstrumentation
24
+ from .cerebras import CerebrasInstrumentation
24
25
 
25
26
  __all__ = [
26
27
  "AnthropicInstrumentation",
@@ -46,4 +47,5 @@ __all__ = [
46
47
  "GeminiInstrumentation",
47
48
  "MistralInstrumentation",
48
49
  "AWSBedrockInstrumentation",
50
+ "CerebrasInstrumentation",
49
51
  ]
@@ -0,0 +1,3 @@
1
+ from .instrumentation import CerebrasInstrumentation
2
+
3
+ __all__ = ["CerebrasInstrumentation"]
@@ -0,0 +1,54 @@
1
+ """
2
+ Copyright (c) 2024 Scale3 Labs
3
+
4
+ Licensed under the Apache License, Version 2.0 (the "License");
5
+ you may not use this file except in compliance with the License.
6
+ You may obtain a copy of the License at
7
+
8
+ http://www.apache.org/licenses/LICENSE-2.0
9
+
10
+ Unless required by applicable law or agreed to in writing, software
11
+ distributed under the License is distributed on an "AS IS" BASIS,
12
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ See the License for the specific language governing permissions and
14
+ limitations under the License.
15
+ """
16
+
17
+ from typing import Collection
18
+ from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
19
+ from opentelemetry.trace import get_tracer
20
+ from opentelemetry.semconv.schemas import Schemas
21
+ from wrapt import wrap_function_wrapper
22
+ from importlib_metadata import version as v
23
+ from .patch import chat_completions_create, async_chat_completions_create
24
+
25
+
26
+ class CerebrasInstrumentation(BaseInstrumentor):
27
+ """
28
+ The CerebrasInstrumentation class represents the Cerebras instrumentation
29
+ """
30
+
31
+ def instrumentation_dependencies(self) -> Collection[str]:
32
+ return ["cerebras-cloud-sdk >= 1.0.0"]
33
+
34
+ def _instrument(self, **kwargs):
35
+ tracer_provider = kwargs.get("tracer_provider")
36
+ tracer = get_tracer(
37
+ __name__, "", tracer_provider, schema_url=Schemas.V1_27_0.value
38
+ )
39
+ version = v("cerebras-cloud-sdk")
40
+
41
+ wrap_function_wrapper(
42
+ module="cerebras.cloud.sdk",
43
+ name="resources.chat.completions.CompletionsResource.create",
44
+ wrapper=chat_completions_create(version, tracer),
45
+ )
46
+
47
+ wrap_function_wrapper(
48
+ module="cerebras.cloud.sdk",
49
+ name="resources.chat.completions.AsyncCompletionsResource.create",
50
+ wrapper=async_chat_completions_create(version, tracer),
51
+ )
52
+
53
+ def _uninstrument(self, **kwargs):
54
+ pass
@@ -0,0 +1,138 @@
1
+ from langtrace_python_sdk.instrumentation.groq.patch import extract_content
2
+ from opentelemetry.trace import SpanKind
3
+ from langtrace_python_sdk.utils.llm import (
4
+ get_llm_request_attributes,
5
+ get_langtrace_attributes,
6
+ get_extra_attributes,
7
+ get_llm_url,
8
+ is_streaming,
9
+ set_event_completion,
10
+ set_span_attributes,
11
+ StreamWrapper,
12
+ )
13
+ from langtrace_python_sdk.utils.silently_fail import silently_fail
14
+ from langtrace_python_sdk.constants.instrumentation.common import SERVICE_PROVIDERS
15
+ from langtrace.trace_attributes import SpanAttributes
16
+ from langtrace_python_sdk.utils import handle_span_error, set_span_attribute
17
+
18
+
19
+ def chat_completions_create(version: str, tracer):
20
+ def traced_method(wrapped, instance, args, kwargs):
21
+ llm_prompts = []
22
+ for message in kwargs.get("messages", []):
23
+ llm_prompts.append(message)
24
+
25
+ span_attributes = {
26
+ **get_langtrace_attributes(version, SERVICE_PROVIDERS["CEREBRAS"]),
27
+ **get_llm_request_attributes(kwargs, prompts=llm_prompts),
28
+ **get_llm_url(instance),
29
+ **get_extra_attributes(),
30
+ }
31
+
32
+ span_name = f"{span_attributes[SpanAttributes.LLM_OPERATION_NAME]} {span_attributes[SpanAttributes.LLM_REQUEST_MODEL]}"
33
+ with tracer.start_as_current_span(
34
+ name=span_name,
35
+ kind=SpanKind.CLIENT,
36
+ attributes=span_attributes,
37
+ end_on_exit=False,
38
+ ) as span:
39
+
40
+ try:
41
+ _set_input_attributes(span, kwargs, span_attributes)
42
+ result = wrapped(*args, **kwargs)
43
+ if is_streaming(kwargs):
44
+ return StreamWrapper(result, span)
45
+
46
+ if span.is_recording():
47
+ _set_response_attributes(span, result)
48
+ span.end()
49
+ return result
50
+
51
+ except Exception as error:
52
+ handle_span_error(span, error)
53
+ raise
54
+
55
+ return traced_method
56
+
57
+
58
+ def async_chat_completions_create(version: str, tracer):
59
+ async def traced_method(wrapped, instance, args, kwargs):
60
+ llm_prompts = []
61
+ for message in kwargs.get("messages", []):
62
+ llm_prompts.append(message)
63
+
64
+ span_attributes = {
65
+ **get_langtrace_attributes(version, SERVICE_PROVIDERS["CEREBRAS"]),
66
+ **get_llm_request_attributes(kwargs, prompts=llm_prompts),
67
+ **get_llm_url(instance),
68
+ **get_extra_attributes(),
69
+ }
70
+
71
+ span_name = f"{span_attributes[SpanAttributes.LLM_OPERATION_NAME]} {span_attributes[SpanAttributes.LLM_REQUEST_MODEL]}"
72
+ with tracer.start_as_current_span(
73
+ name=span_name,
74
+ kind=SpanKind.CLIENT,
75
+ attributes=span_attributes,
76
+ end_on_exit=False,
77
+ ) as span:
78
+
79
+ try:
80
+ _set_input_attributes(span, kwargs, span_attributes)
81
+ result = await wrapped(*args, **kwargs)
82
+ if is_streaming(kwargs):
83
+ return StreamWrapper(result, span)
84
+
85
+ if span.is_recording():
86
+ _set_response_attributes(span, result)
87
+ span.end()
88
+ return result
89
+
90
+ except Exception as error:
91
+ handle_span_error(span, error)
92
+ raise
93
+
94
+ return traced_method
95
+
96
+
97
+ @silently_fail
98
+ def _set_response_attributes(span, result):
99
+ set_span_attribute(span, SpanAttributes.LLM_RESPONSE_MODEL, result.model)
100
+
101
+ if getattr(result, "id", None):
102
+ set_span_attribute(span, SpanAttributes.LLM_RESPONSE_ID, result.id)
103
+
104
+ if getattr(result, "choices", None):
105
+ responses = [
106
+ {
107
+ "role": (
108
+ choice.message.role
109
+ if choice.message and choice.message.role
110
+ else "assistant"
111
+ ),
112
+ "content": extract_content(choice),
113
+ **(
114
+ {"content_filter_results": choice.content_filter_results}
115
+ if hasattr(choice, "content_filter_results")
116
+ else {}
117
+ ),
118
+ }
119
+ for choice in result.choices
120
+ ]
121
+ set_event_completion(span, responses)
122
+ # Get the usage
123
+ if getattr(result, "usage", None):
124
+ set_span_attribute(
125
+ span,
126
+ SpanAttributes.LLM_USAGE_PROMPT_TOKENS,
127
+ result.usage.prompt_tokens,
128
+ )
129
+ set_span_attribute(
130
+ span,
131
+ SpanAttributes.LLM_USAGE_COMPLETION_TOKENS,
132
+ result.usage.completion_tokens,
133
+ )
134
+
135
+
136
+ @silently_fail
137
+ def _set_input_attributes(span, kwargs, attributes):
138
+ set_span_attributes(span, attributes)
@@ -64,6 +64,7 @@ from langtrace_python_sdk.instrumentation import (
64
64
  AutogenInstrumentation,
65
65
  VertexAIInstrumentation,
66
66
  WeaviateInstrumentation,
67
+ CerebrasInstrumentation,
67
68
  )
68
69
  from opentelemetry.util.re import parse_env_headers
69
70
 
@@ -151,7 +152,7 @@ def get_exporter(config: LangtraceConfig, host: str):
151
152
  headers = get_headers(config)
152
153
  host = f"{host}/api/trace" if host == LANGTRACE_REMOTE_URL else host
153
154
  if "http" in host.lower() or "https" in host.lower():
154
- return LangTraceExporter(host, config.api_key, config.disable_logging)
155
+ return HTTPExporter(endpoint=host, headers=headers)
155
156
  else:
156
157
  return GRPCExporter(endpoint=host, headers=headers)
157
158
 
@@ -281,6 +282,7 @@ def init(
281
282
  "mistralai": MistralInstrumentation(),
282
283
  "boto3": AWSBedrockInstrumentation(),
283
284
  "autogen": AutogenInstrumentation(),
285
+ "cerebras-cloud-sdk": CerebrasInstrumentation(),
284
286
  }
285
287
 
286
288
  init_instrumentations(config.disable_instrumentations, all_instrumentations)
@@ -1,6 +1,11 @@
1
1
  from langtrace_python_sdk.types import NOT_GIVEN, InstrumentationType
2
2
  from .sdk_version_checker import SDKVersionChecker
3
3
  from opentelemetry.trace import Span
4
+ from opentelemetry.semconv.attributes import (
5
+ error_attributes as ErrorAttributes,
6
+ )
7
+ from opentelemetry.trace.status import Status, StatusCode
8
+
4
9
  from langtrace.trace_attributes import SpanAttributes
5
10
  import inspect
6
11
  import os
@@ -90,3 +95,10 @@ def is_package_installed(package_name):
90
95
 
91
96
  installed_packages = {p.key for p in pkg_resources.working_set}
92
97
  return package_name in installed_packages
98
+
99
+
100
+ def handle_span_error(span: Span, error):
101
+ span.set_status(Status(StatusCode.ERROR, str(error)))
102
+ if span.is_recording():
103
+ span.set_attribute(ErrorAttributes.ERROR_TYPE, type(error).__qualname__)
104
+ span.end()
@@ -1 +1 @@
1
- __version__ = "3.2.0"
1
+ __version__ = "3.3.1"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: langtrace-python-sdk
3
- Version: 3.2.0
3
+ Version: 3.3.1
4
4
  Summary: Python SDK for LangTrace
5
5
  Project-URL: Homepage, https://github.com/Scale3-Labs/langtrace-python-sdk
6
6
  Author-email: Scale3 Labs <engineering@scale3labs.com>
@@ -315,6 +315,7 @@ Langtrace automatically captures traces from the following vendors:
315
315
  | Langchain | Framework | :x: | :white_check_mark: |
316
316
  | Langgraph | Framework | :x: | :white_check_mark: |
317
317
  | LlamaIndex | Framework | :white_check_mark: | :white_check_mark: |
318
+ | AWS Bedrock | Framework | :white_check_mark: | :white_check_mark: |
318
319
  | LiteLLM | Framework | :x: | :white_check_mark: |
319
320
  | DSPy | Framework | :x: | :white_check_mark: |
320
321
  | CrewAI | Framework | :x: | :white_check_mark: |
@@ -7,6 +7,8 @@ examples/awsbedrock_examples/__init__.py,sha256=MMaW1756Hqv8rRX6do_O_-SIfauLzoYx
7
7
  examples/awsbedrock_examples/converse.py,sha256=vra4yfXYynWyFenoO8wdUnksPx_o481BQlpuWkddLZY,1024
8
8
  examples/azureopenai_example/__init__.py,sha256=PaZM90r6VN4eSOXxb6wGsyhf9-RJCNqBypzk1Xa2GJI,271
9
9
  examples/azureopenai_example/completion.py,sha256=K_GeU0TfJ9lLDfW5VI0Lmm8_I0JXf1x9Qi83ImJ350c,668
10
+ examples/cerebras_example/__init__.py,sha256=ydfNi0DjFMGVcfo79XVG3VEbzIrHo5wYBgSJzl_asNA,295
11
+ examples/cerebras_example/main.py,sha256=QrzQLTEr0dkrrPrlOPqwXkeeGU4dwc8tPR4LhHPOQ3k,6573
10
12
  examples/chroma_example/__init__.py,sha256=Mrf8KptW1hhzu6WDdRRTxbaB-0kM7x5u-Goc_zR7G5c,203
11
13
  examples/chroma_example/basic.py,sha256=oO7-__8HptnFXLVKbnPgoz02yM-CAPi721xsbUb_gYg,868
12
14
  examples/cohere_example/__init__.py,sha256=oYpsnS-dKOlWLkPEUWhXxi1vfxa77bt_DOdkJHg__7g,502
@@ -100,8 +102,8 @@ examples/vertexai_example/main.py,sha256=gndId5X5ksD-ycxnAWMdEqIDbLc3kz5Vt8vm4YP
100
102
  examples/weaviate_example/__init__.py,sha256=8JMDBsRSEV10HfTd-YC7xb4txBjD3la56snk-Bbg2Kw,618
101
103
  examples/weaviate_example/query_text.py,sha256=wPHQTc_58kPoKTZMygVjTj-2ZcdrIuaausJfMxNQnQc,127162
102
104
  langtrace_python_sdk/__init__.py,sha256=VZM6i71NR7pBQK6XvJWRelknuTYUhqwqE7PlicKa5Wg,1166
103
- langtrace_python_sdk/langtrace.py,sha256=tqdGqEeVoAq0QhzY0l_BWXGwU25hUQkGIoh00gvFi3c,12421
104
- langtrace_python_sdk/version.py,sha256=OUX37Yd6ZO82d0GJL2dmK0gZTtc_xvlTvGQIl2I-D8k,22
105
+ langtrace_python_sdk/langtrace.py,sha256=IBl7tapb5Mig5362WdIoZUmYWE-ZuqFe2Bk4IJxk-Xs,12488
106
+ langtrace_python_sdk/version.py,sha256=XErusAMGUPwBUpdA6BLyq8CjU-6n6gLlBRymgSC8Y-0,22
105
107
  langtrace_python_sdk/constants/__init__.py,sha256=3CNYkWMdd1DrkGqzLUgNZXjdAlM6UFMlf_F-odAToyc,146
106
108
  langtrace_python_sdk/constants/exporter/langtrace_exporter.py,sha256=d-3Qn5C_NTy1NkmdavZvy-6vePwTC5curN6QMy2haHc,50
107
109
  langtrace_python_sdk/constants/instrumentation/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -109,7 +111,7 @@ langtrace_python_sdk/constants/instrumentation/anthropic.py,sha256=YX3llt3zwDY6X
109
111
  langtrace_python_sdk/constants/instrumentation/aws_bedrock.py,sha256=f9eukqoxrPgPeaBJX2gpBUz1uu0dZIPahOpvoudfbH8,310
110
112
  langtrace_python_sdk/constants/instrumentation/chroma.py,sha256=hiPGYdHS0Yj4Kh3eaYBbuCAl_swqIygu80yFqkOgdak,955
111
113
  langtrace_python_sdk/constants/instrumentation/cohere.py,sha256=tf9sDfb5K3qOAHChEE5o8eYWPZ1io58VsOjZDCZPxfw,577
112
- langtrace_python_sdk/constants/instrumentation/common.py,sha256=yOtmk9R_u1G2RP5edodmJ5O4I1ebl7HuHKDzd0XSwPw,1097
114
+ langtrace_python_sdk/constants/instrumentation/common.py,sha256=gs7xupRbUW_4UjXFnmqiLGRktA8lllXHRcOLeQFKek4,1125
113
115
  langtrace_python_sdk/constants/instrumentation/embedchain.py,sha256=HodCJvaFjILoOG50OwFObxfVxt_8VUaIAIqvgoN3tzo,278
114
116
  langtrace_python_sdk/constants/instrumentation/gemini.py,sha256=UAmfgg9FM7uNeOCdPfWlir6OIH-8BoxFGPRpdBd9ZZs,358
115
117
  langtrace_python_sdk/constants/instrumentation/groq.py,sha256=VFXmIl4aqGY_fS0PAmjPj_Qm7Tibxbx7Ur_e7rQpqXc,134
@@ -124,7 +126,7 @@ langtrace_python_sdk/constants/instrumentation/weaviate.py,sha256=gtv-JBxvNGClEM
124
126
  langtrace_python_sdk/extensions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
125
127
  langtrace_python_sdk/extensions/langtrace_exporter.py,sha256=UFupNL03zklVd5penpsfXjbWSb5qB39mEv2BY2wczSs,6307
126
128
  langtrace_python_sdk/extensions/langtrace_filesystem.py,sha256=34fZutG28EJ66l67OvTGsydAH3ZpXgikdE7hVLqBpG4,7863
127
- langtrace_python_sdk/instrumentation/__init__.py,sha256=MUMbmAQ7YcnmhtitJT8QLVMqDdDjI4WtloctWf_jvJs,1780
129
+ langtrace_python_sdk/instrumentation/__init__.py,sha256=0wSimp9RM2Z8OpGW-h-ftDCWjXDcer0UAWg-BYXyZ0s,1857
128
130
  langtrace_python_sdk/instrumentation/anthropic/__init__.py,sha256=donrurJAGYlxrSRA3BIf76jGeUcAx9Tq8CVpah68S0Y,101
129
131
  langtrace_python_sdk/instrumentation/anthropic/instrumentation.py,sha256=ndXdruI0BG7n75rsuEpKjfzePxrZxg40gZ39ONmD_v4,1845
130
132
  langtrace_python_sdk/instrumentation/anthropic/patch.py,sha256=ztPN4VZujoxYOKhTbFnup7Ibms9NAzYCPAJY43NUgKw,4935
@@ -135,6 +137,9 @@ langtrace_python_sdk/instrumentation/autogen/patch.py,sha256=mp6WxHYVqTXvqZOi6Cn
135
137
  langtrace_python_sdk/instrumentation/aws_bedrock/__init__.py,sha256=IHqPgR1kdDvcoV1nUb-B21PaJ_qbQB0jc011Udi1ioU,96
136
138
  langtrace_python_sdk/instrumentation/aws_bedrock/instrumentation.py,sha256=2l-WiyWYUEoGre92rmylq2jPZ5w4jcxTXmCTuQNC1RU,1911
137
139
  langtrace_python_sdk/instrumentation/aws_bedrock/patch.py,sha256=VAroMezSGKT2jQ5tggbdiMRIPr9mtLItGJJgZ-xoGls,6296
140
+ langtrace_python_sdk/instrumentation/cerebras/__init__.py,sha256=9rHNg7PWcZ7a9jExQZlqwWPkvLGcPT-DGWot0_6Bx9k,92
141
+ langtrace_python_sdk/instrumentation/cerebras/instrumentation.py,sha256=WPsaYxHanYnoxGjDk7fILGJSnSRUs_zoQ30JCyPBMII,1927
142
+ langtrace_python_sdk/instrumentation/cerebras/patch.py,sha256=HR4slOrE3pMp0ABafnlYeTK61G-EnGhOgq3pd9A_G88,4697
138
143
  langtrace_python_sdk/instrumentation/chroma/__init__.py,sha256=pNZ5UO8Q-d5VkXSobBf79reB6AmEl_usnnTp5Itv818,95
139
144
  langtrace_python_sdk/instrumentation/chroma/instrumentation.py,sha256=nT6PS6bsrIOO9kLV5GuUeRjMe6THHHAZGvqWBP1dYog,1807
140
145
  langtrace_python_sdk/instrumentation/chroma/patch.py,sha256=jYcqBeu-0cYA29PO880oXYRwYh-R1oseXmzfK6UDBps,9074
@@ -198,7 +203,7 @@ langtrace_python_sdk/instrumentation/weaviate/__init__.py,sha256=Mc-Je6evPo-kKQz
198
203
  langtrace_python_sdk/instrumentation/weaviate/instrumentation.py,sha256=Kwq5QQTUQNRHrWrMnNe9X0TcqtXGiNpBidsuToRTqG0,2417
199
204
  langtrace_python_sdk/instrumentation/weaviate/patch.py,sha256=aWLDbNGz35V6XQUv4lkMD0O689suqh6KdTa33VDtUkE,6905
200
205
  langtrace_python_sdk/types/__init__.py,sha256=2VykM6fNHRlkOaIEUCdK3VyaaVgk2rTIr9jMmCVj2Ag,4676
201
- langtrace_python_sdk/utils/__init__.py,sha256=O-Ra9IDd1MnxihdQUC8HW_wYFhk7KbTCK2BIl02yacQ,2935
206
+ langtrace_python_sdk/utils/__init__.py,sha256=VVDOG-QLd59ZvSHp0avjof0sbxlZ1QQOf0KoOF7ofhQ,3310
202
207
  langtrace_python_sdk/utils/langtrace_sampler.py,sha256=BupNndHbU9IL_wGleKetz8FdcveqHMBVz1bfKTTW80w,1753
203
208
  langtrace_python_sdk/utils/llm.py,sha256=mA7nEpndjKwPY3LfYV8hv-83xrDlD8MSTW8mItp5tXI,14953
204
209
  langtrace_python_sdk/utils/misc.py,sha256=LaQr5LOmZMiuwVdjYh7aIu6o2C_Xb1wgpQGNOVmRzfE,1918
@@ -249,8 +254,8 @@ tests/pinecone/cassettes/test_query.yaml,sha256=b5v9G3ssUy00oG63PlFUR3JErF2Js-5A
249
254
  tests/pinecone/cassettes/test_upsert.yaml,sha256=neWmQ1v3d03V8WoLl8FoFeeCYImb8pxlJBWnFd_lITU,38607
250
255
  tests/qdrant/conftest.py,sha256=9n0uHxxIjWk9fbYc4bx-uP8lSAgLBVx-cV9UjnsyCHM,381
251
256
  tests/qdrant/test_qdrant.py,sha256=pzjAjVY2kmsmGfrI2Gs2xrolfuaNHz7l1fqGQCjp5_o,3353
252
- langtrace_python_sdk-3.2.0.dist-info/METADATA,sha256=_I75AdE1KxO_pccerUpV0G1A2gpAN2hv85Sc_m0VfNg,15905
253
- langtrace_python_sdk-3.2.0.dist-info/WHEEL,sha256=1yFddiXMmvYK7QYTqtRNtX66WJ0Mz8PYEiEUoOUUxRY,87
254
- langtrace_python_sdk-3.2.0.dist-info/entry_points.txt,sha256=1_b9-qvf2fE7uQNZcbUei9vLpFZBbbh9LrtGw95ssAo,70
255
- langtrace_python_sdk-3.2.0.dist-info/licenses/LICENSE,sha256=QwcOLU5TJoTeUhuIXzhdCEEDDvorGiC6-3YTOl4TecE,11356
256
- langtrace_python_sdk-3.2.0.dist-info/RECORD,,
257
+ langtrace_python_sdk-3.3.1.dist-info/METADATA,sha256=S13-jNowYfopmvPCqaqKnazad-r6iOp_DNop-VrgAps,15996
258
+ langtrace_python_sdk-3.3.1.dist-info/WHEEL,sha256=1yFddiXMmvYK7QYTqtRNtX66WJ0Mz8PYEiEUoOUUxRY,87
259
+ langtrace_python_sdk-3.3.1.dist-info/entry_points.txt,sha256=1_b9-qvf2fE7uQNZcbUei9vLpFZBbbh9LrtGw95ssAo,70
260
+ langtrace_python_sdk-3.3.1.dist-info/licenses/LICENSE,sha256=QwcOLU5TJoTeUhuIXzhdCEEDDvorGiC6-3YTOl4TecE,11356
261
+ langtrace_python_sdk-3.3.1.dist-info/RECORD,,