langtrace-python-sdk 2.2.6__py3-none-any.whl → 2.2.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,10 +1,12 @@
1
1
  import fsspec
2
+ from dotenv import find_dotenv, load_dotenv
2
3
  from inspect_ai import Task, task
3
4
  from inspect_ai.dataset import csv_dataset, Sample
4
5
  from inspect_ai.scorer import model_graded_qa
5
6
  from inspect_ai.solver import chain_of_thought, self_critique
6
7
  from langtrace_python_sdk.extensions.langtrace_filesystem import LangTraceFileSystem
7
8
 
9
+ _ = load_dotenv(find_dotenv())
8
10
 
9
11
  # Manually register the filesystem with fsspec
10
12
  # Note: This is only necessary because the filesystem is not registered.
@@ -24,9 +26,9 @@ def hydrate_with_question(record):
24
26
 
25
27
 
26
28
  @task
27
- def pricing_question():
29
+ def basic_eval():
28
30
  return Task(
29
- dataset=csv_dataset("langtracefs://clyythmcs0001145cuvi426zi", hydrate_with_question),
31
+ dataset=csv_dataset("langtracefs://clz0p4i1t000fwv0xjtlvkxyx"),
30
32
  plan=[chain_of_thought(), self_critique()],
31
33
  scorer=model_graded_qa(),
32
34
  )
@@ -0,0 +1,40 @@
1
+ from dotenv import find_dotenv, load_dotenv
2
+ from openai import OpenAI
3
+ from langtrace_python_sdk import langtrace, with_langtrace_root_span, SendUserFeedback
4
+
5
+ _ = load_dotenv(find_dotenv())
6
+
7
+ # Initialize Langtrace SDK
8
+ langtrace.init()
9
+ client = OpenAI()
10
+
11
+
12
+ def api(span_id, trace_id):
13
+ response = client.chat.completions.create(
14
+ model="gpt-4o-mini",
15
+ messages=[
16
+ {"role": "user", "content": "What is the best place to live in the US?"},
17
+ ],
18
+ stream=False,
19
+ )
20
+
21
+ # Collect user feedback and send it to Langtrace
22
+ user_score = 1 # Example user score
23
+ user_id = 'user_1234' # Example user ID
24
+ data = {
25
+ "userScore": user_score,
26
+ "userId": user_id,
27
+ "spanId": span_id,
28
+ "traceId": trace_id
29
+ }
30
+ SendUserFeedback().evaluate(data=data)
31
+
32
+ # Return the response
33
+ return response.choices[0].message.content
34
+
35
+
36
+ # wrap the API call with the Langtrace root span
37
+ wrapped_api = with_langtrace_root_span()(api)
38
+
39
+ # Call the wrapped API
40
+ wrapped_api()
@@ -0,0 +1,59 @@
1
+ # Instructions
2
+ # 1. Run the OpenTelemetry Collector with the OTLP receiver enabled
3
+ # Create otel-config.yaml with the following content:
4
+ # receivers:
5
+ # otlp:
6
+ # protocols:
7
+ # grpc:
8
+ # endpoint: "0.0.0.0:4317"
9
+ # http:
10
+ # endpoint: "0.0.0.0:4318"
11
+
12
+ # exporters:
13
+ # logging:
14
+ # loglevel: debug
15
+
16
+ # service:
17
+ # pipelines:
18
+ # traces:
19
+ # receivers: [otlp]
20
+ # exporters: [logging]
21
+ # docker pull otel/opentelemetry-collector:latest
22
+ # docker run --rm -p 4317:4317 -p 4318:4318 -v $(pwd)/otel-config.yaml:/otel-config.yaml otel/opentelemetry-collector --config otel-config.yaml
23
+ # 2. Run the following code
24
+
25
+ from langtrace_python_sdk import langtrace
26
+ from openai import OpenAI
27
+ from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
28
+
29
+
30
+ # Configure the OTLP exporter to use the correct endpoint and API key
31
+ otlp_endpoint = "http://localhost:4318/v1/traces"
32
+ otlp_exporter = OTLPSpanExporter(
33
+ endpoint=otlp_endpoint,
34
+ headers=(("Content-Type", "application/json"),))
35
+ langtrace.init(custom_remote_exporter=otlp_exporter, batch=False)
36
+
37
+
38
+ def chat_with_openai():
39
+ client = OpenAI()
40
+ messages = [
41
+ {
42
+ "role": "user",
43
+ "content": "Hello, I'm a human.",
44
+ },
45
+ ]
46
+ chat_completion = client.chat.completions.create(
47
+ messages=messages,
48
+ stream=False,
49
+ model="gpt-3.5-turbo",
50
+ )
51
+ print(chat_completion.choices[0].message.content)
52
+
53
+
54
+ def main():
55
+ chat_with_openai()
56
+
57
+
58
+ if __name__ == "__main__":
59
+ main()
@@ -0,0 +1,41 @@
1
+ import sys
2
+
3
+ sys.path.insert(0, "/Users/karthikkalyanaraman/work/langtrace/langtrace-python-sdk/src")
4
+
5
+ from langtrace_python_sdk import langtrace
6
+ from langtrace_python_sdk.utils.with_root_span import with_langtrace_root_span
7
+ from routellm.controller import Controller
8
+ from dotenv import load_dotenv
9
+
10
+ load_dotenv()
11
+
12
+ langtrace.init()
13
+
14
+ # litellm.set_verbose=True
15
+ client = Controller(
16
+ routers=["mf"],
17
+ strong_model="claude-3-opus-20240229",
18
+ weak_model="claude-3-opus-20240229",
19
+ )
20
+
21
+
22
+ @with_langtrace_root_span("Routellm")
23
+ def Routellm(prompt):
24
+ try:
25
+
26
+ response = client.chat.completions.create(
27
+ model="router-mf-0.11593", messages=[{"role": "user", "content": prompt}]
28
+ )
29
+
30
+ for chunk in response:
31
+ if hasattr(chunk, "choices"):
32
+ print(chunk.choices[0].delta.content or "", end="")
33
+ else:
34
+ print(chunk)
35
+
36
+ except Exception as e:
37
+ print(f"An error occurred: {e}")
38
+
39
+
40
+ Routellm("what is the square root of 12182382932.99")
41
+ Routellm("Write me a short story")
@@ -27,13 +27,25 @@ class OpenMode(str):
27
27
 
28
28
 
29
29
  class LangTraceFile(io.BytesIO):
30
- _host: str = os.environ.get("LANGTRACE_API_HOST", None) or LANGTRACE_REMOTE_URL
31
30
 
32
31
  def __init__(self, fs: "LangTraceFileSystem", path: str, mode: OpenMode):
33
32
  super().__init__()
34
33
  self.fs = fs
35
34
  self.path = path
36
35
  self.mode = mode
36
+ self._host: str = os.environ.get("LANGTRACE_API_HOST", LANGTRACE_REMOTE_URL)
37
+ self._api_key: str = os.environ.get("LANGTRACE_API_KEY", None)
38
+ if self._host.endswith("/api/trace"):
39
+ self._host = self._host.replace("/api/trace", "")
40
+
41
+ if self._api_key is None:
42
+ print(Fore.RED)
43
+ print(
44
+ f"Missing Langtrace API key, proceed to {self._host} to create one"
45
+ )
46
+ print("Set the API key as an environment variable LANGTRACE_API_KEY")
47
+ print(Fore.RESET)
48
+ return
37
49
 
38
50
  def close(self) -> None:
39
51
  if not self.closed:
@@ -71,7 +83,7 @@ class LangTraceFile(io.BytesIO):
71
83
  data=json.dumps(data),
72
84
  headers={
73
85
  "Content-Type": "application/json",
74
- "x-api-key": os.environ.get("LANGTRACE_API_KEY"),
86
+ "x-api-key": self._api_key,
75
87
  },
76
88
  timeout=20,
77
89
  )
@@ -82,7 +94,6 @@ class LangTraceFile(io.BytesIO):
82
94
 
83
95
 
84
96
  class LangTraceFileSystem(AbstractFileSystem):
85
- _host: str = os.environ.get("LANGTRACE_API_HOST", None) or LANGTRACE_REMOTE_URL
86
97
  protocol = "langtracefs"
87
98
  sep = "/"
88
99
 
@@ -90,6 +101,19 @@ class LangTraceFileSystem(AbstractFileSystem):
90
101
  super().__init__(*args, **kwargs)
91
102
  self.files = {}
92
103
  self.dirs = set()
104
+ self._host: str = os.environ.get("LANGTRACE_API_HOST", LANGTRACE_REMOTE_URL)
105
+ self._api_key: str = os.environ.get("LANGTRACE_API_KEY", None)
106
+ if self._host.endswith("/api/trace"):
107
+ self._host = self._host.replace("/api/trace", "")
108
+
109
+ if self._api_key is None:
110
+ print(Fore.RED)
111
+ print(
112
+ f"Missing Langtrace API key, proceed to {self._host} to create one"
113
+ )
114
+ print("Set the API key as an environment variable LANGTRACE_API_KEY")
115
+ print(Fore.RESET)
116
+ return
93
117
 
94
118
  def open(
95
119
  self,
@@ -118,7 +142,7 @@ class LangTraceFileSystem(AbstractFileSystem):
118
142
  url=f"{self._host}/api/dataset/download?id={dataset_id}",
119
143
  headers={
120
144
  "Content-Type": "application/json",
121
- "x-api-key": os.environ.get("LANGTRACE_API_KEY"),
145
+ "x-api-key": self._api_key,
122
146
  },
123
147
  timeout=20,
124
148
  )
@@ -44,7 +44,7 @@ def rerank(original_method, version, tracer):
44
44
 
45
45
  span_attributes = {
46
46
  **get_langtrace_attributes(version, service_provider),
47
- **get_llm_request_attributes(kwargs),
47
+ **get_llm_request_attributes(kwargs, operation_name="rerank"),
48
48
  **get_llm_url(instance),
49
49
  SpanAttributes.LLM_REQUEST_MODEL: kwargs.get("model") or "command-r-plus",
50
50
  SpanAttributes.LLM_URL: APIS["RERANK"]["URL"],
@@ -121,7 +121,7 @@ def embed(original_method, version, tracer):
121
121
 
122
122
  span_attributes = {
123
123
  **get_langtrace_attributes(version, service_provider),
124
- **get_llm_request_attributes(kwargs),
124
+ **get_llm_request_attributes(kwargs, operation_name="embed"),
125
125
  **get_llm_url(instance),
126
126
  SpanAttributes.LLM_URL: APIS["EMBED"]["URL"],
127
127
  SpanAttributes.LLM_PATH: APIS["EMBED"]["ENDPOINT"],
@@ -55,7 +55,7 @@ def images_generate(original_method, version, tracer):
55
55
  service_provider = SERVICE_PROVIDERS["OPENAI"]
56
56
  span_attributes = {
57
57
  **get_langtrace_attributes(version, service_provider, vendor_type="llm"),
58
- **get_llm_request_attributes(kwargs),
58
+ **get_llm_request_attributes(kwargs, operation_name="images_generate"),
59
59
  **get_llm_url(instance),
60
60
  SpanAttributes.LLM_PATH: APIS["IMAGES_GENERATION"]["ENDPOINT"],
61
61
  **get_extra_attributes(),
@@ -118,7 +118,7 @@ def async_images_generate(original_method, version, tracer):
118
118
 
119
119
  span_attributes = {
120
120
  **get_langtrace_attributes(version, service_provider, vendor_type="llm"),
121
- **get_llm_request_attributes(kwargs),
121
+ **get_llm_request_attributes(kwargs, operation_name="images_generate"),
122
122
  **get_llm_url(instance),
123
123
  SpanAttributes.LLM_PATH: APIS["IMAGES_GENERATION"]["ENDPOINT"],
124
124
  **get_extra_attributes(),
@@ -181,7 +181,7 @@ def images_edit(original_method, version, tracer):
181
181
 
182
182
  span_attributes = {
183
183
  **get_langtrace_attributes(version, service_provider, vendor_type="llm"),
184
- **get_llm_request_attributes(kwargs),
184
+ **get_llm_request_attributes(kwargs, operation_name="images_edit"),
185
185
  **get_llm_url(instance),
186
186
  SpanAttributes.LLM_PATH: APIS["IMAGES_EDIT"]["ENDPOINT"],
187
187
  SpanAttributes.LLM_RESPONSE_FORMAT: kwargs.get("response_format"),
@@ -432,7 +432,7 @@ def embeddings_create(original_method, version, tracer):
432
432
 
433
433
  span_attributes = {
434
434
  **get_langtrace_attributes(version, service_provider, vendor_type="llm"),
435
- **get_llm_request_attributes(kwargs),
435
+ **get_llm_request_attributes(kwargs, operation_name="embed"),
436
436
  **get_llm_url(instance),
437
437
  SpanAttributes.LLM_PATH: APIS["EMBEDDINGS_CREATE"]["ENDPOINT"],
438
438
  SpanAttributes.LLM_REQUEST_DIMENSIONS: kwargs.get("dimensions"),
@@ -490,7 +490,7 @@ def async_embeddings_create(original_method, version, tracer):
490
490
 
491
491
  span_attributes = {
492
492
  **get_langtrace_attributes(version, service_provider, vendor_type="llm"),
493
- **get_llm_request_attributes(kwargs),
493
+ **get_llm_request_attributes(kwargs, operation_name="embed"),
494
494
  SpanAttributes.LLM_PATH: APIS["EMBEDDINGS_CREATE"]["ENDPOINT"],
495
495
  SpanAttributes.LLM_REQUEST_DIMENSIONS: kwargs.get("dimensions"),
496
496
  **get_extra_attributes(),
@@ -92,7 +92,7 @@ def get_langtrace_attributes(version, service_provider, vendor_type="llm"):
92
92
  }
93
93
 
94
94
 
95
- def get_llm_request_attributes(kwargs, prompts=None, model=None):
95
+ def get_llm_request_attributes(kwargs, prompts=None, model=None, operation_name="chat"):
96
96
 
97
97
  user = kwargs.get("user", None)
98
98
  if prompts is None:
@@ -111,6 +111,7 @@ def get_llm_request_attributes(kwargs, prompts=None, model=None):
111
111
  top_p = kwargs.get("p", None) or kwargs.get("top_p", None)
112
112
  tools = kwargs.get("tools", None)
113
113
  return {
114
+ SpanAttributes.LLM_OPERATION_NAME: operation_name,
114
115
  SpanAttributes.LLM_REQUEST_MODEL: model or kwargs.get("model"),
115
116
  SpanAttributes.LLM_IS_STREAMING: kwargs.get("stream"),
116
117
  SpanAttributes.LLM_REQUEST_TEMPERATURE: kwargs.get("temperature"),
@@ -25,6 +25,9 @@ from opentelemetry import baggage, context, trace
25
25
  from opentelemetry.trace import SpanKind
26
26
  from opentelemetry.trace.propagation import set_span_in_context
27
27
 
28
+ from langtrace_python_sdk.constants.exporter.langtrace_exporter import (
29
+ LANGTRACE_REMOTE_URL,
30
+ )
28
31
  from langtrace_python_sdk.constants.instrumentation.common import (
29
32
  LANGTRACE_ADDITIONAL_SPAN_ATTRIBUTES_KEY,
30
33
  )
@@ -142,7 +145,10 @@ class SendUserFeedback:
142
145
  _langtrace_api_key: str
143
146
 
144
147
  def __init__(self):
145
- self._langtrace_host = os.environ["LANGTRACE_API_HOST"]
148
+ self._langtrace_host = os.environ.get("LANGTRACE_API_HOST", LANGTRACE_REMOTE_URL)
149
+ # When the host is set to /api/trace, remove the /api/trace
150
+ if self._langtrace_host.endswith("/api/trace"):
151
+ self._langtrace_host = self._langtrace_host.replace("/api/trace", "")
146
152
  self._langtrace_api_key = os.environ.get("LANGTRACE_API_KEY", None)
147
153
 
148
154
  def evaluate(self, data: EvaluationAPIData) -> None:
@@ -155,6 +161,16 @@ class SendUserFeedback:
155
161
  print("Set the API key as an environment variable LANGTRACE_API_KEY")
156
162
  print(Fore.RESET)
157
163
  return
164
+
165
+ # convert spanId and traceId to hexadecimals
166
+ span_hex_number = hex(int(data["spanId"], 10))[2:] # Convert to hex and remove the '0x' prefix
167
+ formatted_span_hex_number = span_hex_number.zfill(16) # Pad with zeros to 16 characters
168
+ data["spanId"] = f"0x{formatted_span_hex_number}"
169
+
170
+ trace_hex_number = hex(int(data["traceId"], 10))[2:] # Convert to hex and remove the '0x' prefix
171
+ formatted_trace_hex_number = trace_hex_number.zfill(32) # Pad with zeros to 32 characters
172
+ data["traceId"] = f"0x{formatted_trace_hex_number}"
173
+
158
174
  evaluation = self.get_evaluation(data["spanId"])
159
175
  headers = {"x-api-key": self._langtrace_api_key}
160
176
  if evaluation is not None:
@@ -1 +1 @@
1
- __version__ = "2.2.6"
1
+ __version__ = "2.2.8"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: langtrace-python-sdk
3
- Version: 2.2.6
3
+ Version: 2.2.8
4
4
  Summary: Python SDK for LangTrace
5
5
  Project-URL: Homepage, https://github.com/Scale3-Labs/langtrace-python-sdk
6
6
  Author-email: Scale3 Labs <engineering@scale3labs.com>
@@ -20,7 +20,7 @@ Requires-Dist: opentelemetry-instrumentation>=0.46b0
20
20
  Requires-Dist: opentelemetry-sdk>=1.25.0
21
21
  Requires-Dist: sqlalchemy
22
22
  Requires-Dist: tiktoken>=0.1.1
23
- Requires-Dist: trace-attributes<7.0.0,>=6.0.3
23
+ Requires-Dist: trace-attributes==7.0.0
24
24
  Provides-Extra: dev
25
25
  Requires-Dist: anthropic; extra == 'dev'
26
26
  Requires-Dist: chromadb; extra == 'dev'
@@ -239,7 +239,7 @@ def main():
239
239
  from langtrace_python_sdk import with_langtrace_root_span, with_additional_attributes
240
240
 
241
241
 
242
- @with_additional_attributes({"user.id": "1234", "user.feedback.rating": 1})
242
+ @with_additional_attributes({"user.id": "1234"})
243
243
  def api_call1():
244
244
  response = client.chat.completions.create(
245
245
  model="gpt-4",
@@ -249,7 +249,7 @@ def api_call1():
249
249
  return response
250
250
 
251
251
 
252
- @with_additional_attributes({"user.id": "5678", "user.feedback.rating": -1})
252
+ @with_additional_attributes({"user.id": "5678"})
253
253
  def api_call2():
254
254
  response = client.chat.completions.create(
255
255
  model="gpt-4",
@@ -293,6 +293,7 @@ Langtrace automatically captures traces from the following vendors:
293
293
  | CrewAI | Framework | :x: | :white_check_mark: |
294
294
  | Ollama | Framework | :x: | :white_check_mark: |
295
295
  | VertexAI | Framework | :x: | :white_check_mark: |
296
+ | Vercel AI SDK| Framework | :white_check_mark: | :x: |
296
297
  | Pinecone | Vector Database | :white_check_mark: | :white_check_mark: |
297
298
  | ChromaDB | Vector Database | :white_check_mark: | :white_check_mark: |
298
299
  | QDrant | Vector Database | :white_check_mark: | :white_check_mark: |
@@ -21,7 +21,7 @@ examples/gemini_example/__init__.py,sha256=omVgLyIiLc3c0zwy3vTtYKdeenYEXzEbLZsYi
21
21
  examples/gemini_example/function_tools.py,sha256=ZOBrdPy_8s3NDfsF5A4RXIoUi2VXlD8og4UsWz_8AuQ,2700
22
22
  examples/gemini_example/main.py,sha256=cTXqgOa6lEMwgX56uneM-1TbIY_QZtDRkByW5z0LpNk,2470
23
23
  examples/hiveagent_example/basic.py,sha256=Sd7I5w8w5Xx7ODaydTY30yiq9HwJDMKHQywrZjgehP0,441
24
- examples/inspect_ai_example/basic_eval.py,sha256=zNrAyyhFEHnDcl8AUBMyCToYxcQ_Easp5eYaRV5hCYs,994
24
+ examples/inspect_ai_example/basic_eval.py,sha256=hDg2BB9ONNpOGRVH08HsghnS1373sOnq6dyDmUQd9gY,1040
25
25
  examples/langchain_example/__init__.py,sha256=xAys_K5AbVqaJ8d5wCcE6w2tCiTXPkSGMyY9paBXitI,410
26
26
  examples/langchain_example/basic.py,sha256=hrwMHOUv78-su5DP9i5krkQnMGHq0svEXsBa40Jkggg,2981
27
27
  examples/langchain_example/groq_example.py,sha256=egrg3FHCnSJ-kV22Z2_t9ElJfKilddfcO5bwcKCfc5M,1060
@@ -33,7 +33,6 @@ examples/llamaindex_example/basic.py,sha256=aFZngkye95sjUr4wc2Uo_Je0iEexXpNcdlV0
33
33
  examples/llamaindex_example/data/abramov.txt,sha256=Ou-GyWZm5AjHLgxviBoRE9ikNv5MScsF0cd--0vVVhI,32667
34
34
  examples/ollama_example/__init__.py,sha256=qOx0jGCPuSpRCPiqtDVm7F0z8hIZ8C75hDZ_C8Apz-s,399
35
35
  examples/ollama_example/basic.py,sha256=EPbsigOF4xBDBgLgAD0EzPo737ycVm7aXZr7F5Xt-A4,1062
36
- examples/ollama_example/basic_example_2.py,sha256=h6hLX4Mot6H8ezg970zrh5XFVyI4zMVHeYLhvAMTQlQ,953
37
36
  examples/openai_example/__init__.py,sha256=MU4CELvhe2EU6d4Okg-bTfjvfGxQO7PNzqMw1yrVeCA,828
38
37
  examples/openai_example/async_tool_calling_nonstreaming.py,sha256=H1-CrNfNDfqAkB5wEipITXlW2OsYL7XD5uQb6k3C6ps,3865
39
38
  examples/openai_example/async_tool_calling_streaming.py,sha256=LaSKmn_Unv55eTHXYdEmKjo39eNuB3ASOBV-m8U1HfU,7136
@@ -43,24 +42,27 @@ examples/openai_example/embeddings_create.py,sha256=kcOZpl5nhHo_NC-3n2yKX5W8mAzN
43
42
  examples/openai_example/function_calling.py,sha256=zz-JdCcpP7uCXG21EYXF1Y39IKj6gYt2fOP5N_ywpnc,2338
44
43
  examples/openai_example/images_edit.py,sha256=6dSKA40V39swSs1mWdWXSa0reK4tyNBkK9MM7V3IEPw,939
45
44
  examples/openai_example/images_generate.py,sha256=SZNY8Visk7JUpx5QhNxTNINHmPAGdCUayF-Q7_iCr50,470
45
+ examples/openai_example/send_user_feedback.py,sha256=iPeKPYlO4nt0bbyeY4wCAjQJHFDouCalOEVHpd467LY,1028
46
46
  examples/openai_example/tool_calling.py,sha256=_IV7KoSI_37u1TTZWdVa58BYjkDfhSurvM86xwaNNhY,2316
47
47
  examples/openai_example/tool_calling_nonstreaming.py,sha256=Yc848IooZRXNynHL6z0kOgJ4qbmL_NOufcb2VmWRukI,3847
48
48
  examples/openai_example/tool_calling_streaming.py,sha256=mV1RbyAoVhumGRPpqPWQ6PMhnJyeifrlELd2-K1qJ_w,7015
49
49
  examples/openai_example/resources/lounge_flamingo.png,sha256=aspniTtmWqwLp3YUhYqAe2ze8nJaq-bTSW7uUJudtd0,2416234
50
50
  examples/openai_example/resources/mask.png,sha256=mUE9Dfp-x8jI0Nh4WGr0P9pueUqEZfpjwxR-6Rxzxz4,2483660
51
51
  examples/otlp_example/otlp_basic.py,sha256=Ykbzu6EpO-V1wQsPePgC16eLFVym91r-ZR-SDj2mIT0,1346
52
+ examples/otlp_example/otlp_with_langtrace.py,sha256=0x5UOrisqaOSkBP8nbaNqL83_gB-jNM0Uq5FXZ9B4Q0,1535
52
53
  examples/perplexity_example/basic.py,sha256=bp7n27gaugJkaFVyt8pjaEfi66lYcqP6eFFjPewUShY,668
53
54
  examples/pinecone_example/__init__.py,sha256=_rvn7Ygt_QWMQoa5wB2GB0S9gZVrlJrPrEhXqU3hPKw,427
54
55
  examples/pinecone_example/basic.py,sha256=5MoHZMBxHMdC61oj-CP19gj9SxSvIcDrQL934JPZoQs,1549
55
56
  examples/qdrant_example/__init__.py,sha256=Ze9xEzW8FiHUO58YBa8JeHNOwcmo3dpYH77AkdyglKU,197
56
57
  examples/qdrant_example/basic.py,sha256=DCMjHSuBZKkhEjCkwy5d5La9WMyW0lCWqtcZWiFCEm4,1425
58
+ examples/routellm_example/basic.py,sha256=6XYfs9Wu2ty-Kv8yiC5U49qJuTNv728xWKQsImu3-ag,1054
57
59
  examples/vertexai_example/__init__.py,sha256=sEKULUwHdn-CJnbYs_jt4QPAUnM_fqwMBI3HJ1RBZco,83
58
60
  examples/vertexai_example/main.py,sha256=gndId5X5ksD-ycxnAWMdEqIDbLc3kz5Vt8vm4YPIk7I,5849
59
61
  examples/weaviate_example/__init__.py,sha256=8JMDBsRSEV10HfTd-YC7xb4txBjD3la56snk-Bbg2Kw,618
60
62
  examples/weaviate_example/query_text.py,sha256=sG8O-bXQpflBAiYpgE_M2X7GcHUlZNgl_wJW8_h-W6Q,127024
61
63
  langtrace_python_sdk/__init__.py,sha256=VZM6i71NR7pBQK6XvJWRelknuTYUhqwqE7PlicKa5Wg,1166
62
64
  langtrace_python_sdk/langtrace.py,sha256=1L0IjME-pzEYht92QfwByPZr3H1MClTrqQdoN1KyKJY,7689
63
- langtrace_python_sdk/version.py,sha256=qzqxcwWCwWgKw_eJA2nZPycPzwfpaSjAKO3MwNvDqgw,22
65
+ langtrace_python_sdk/version.py,sha256=YZuLtr55Kuq9kEWnoXMkj5a8XMpLccTIPbg7OO2S8KM,22
64
66
  langtrace_python_sdk/constants/__init__.py,sha256=P8QvYwt5czUNDZsKS64vxm9Dc41ptGbuF1TFtAF6nv4,44
65
67
  langtrace_python_sdk/constants/exporter/langtrace_exporter.py,sha256=5MNjnAOg-4am78J3gVMH6FSwq5N8TOj72ugkhsw4vi0,46
66
68
  langtrace_python_sdk/constants/instrumentation/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -78,7 +80,7 @@ langtrace_python_sdk/constants/instrumentation/vertexai.py,sha256=0s2vX3Y0iwjOPk
78
80
  langtrace_python_sdk/constants/instrumentation/weaviate.py,sha256=gtv-JBxvNGClEMxClmRKzjJ1khgOonsli4D_k9IagSE,2601
79
81
  langtrace_python_sdk/extensions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
80
82
  langtrace_python_sdk/extensions/langtrace_exporter.py,sha256=gWVRU2DlB4xjZ4ww7M63DaLiAN5zQ2k1HPrythmjEdo,4202
81
- langtrace_python_sdk/extensions/langtrace_filesystem.py,sha256=qpnkpkuTZ2yhGgpBK64QJLt0T1iL-1zpEMPz4quJ_ng,6925
83
+ langtrace_python_sdk/extensions/langtrace_filesystem.py,sha256=34fZutG28EJ66l67OvTGsydAH3ZpXgikdE7hVLqBpG4,7863
82
84
  langtrace_python_sdk/instrumentation/__init__.py,sha256=yJd3aGu4kPfm2h6oe6kiCWvzTF9awpC1UztjXF9WSO4,1391
83
85
  langtrace_python_sdk/instrumentation/anthropic/__init__.py,sha256=donrurJAGYlxrSRA3BIf76jGeUcAx9Tq8CVpah68S0Y,101
84
86
  langtrace_python_sdk/instrumentation/anthropic/instrumentation.py,sha256=-srgE8qumAn0ulQYZxMa8ch-9IBH0XgBW_rfEnGk6LI,1684
@@ -88,7 +90,7 @@ langtrace_python_sdk/instrumentation/chroma/instrumentation.py,sha256=nT6PS6bsrI
88
90
  langtrace_python_sdk/instrumentation/chroma/patch.py,sha256=JfFc8SDfwkEyIwTd1yM6jwa1vu5hZH6IXyxAEcQQQOs,9010
89
91
  langtrace_python_sdk/instrumentation/cohere/__init__.py,sha256=sGUSLdTUyYf36Tm6L5jQflhzCqvmWrhnBOMYHjvp6Hs,95
90
92
  langtrace_python_sdk/instrumentation/cohere/instrumentation.py,sha256=YQFHZIBd7SSPD4b6Va-ZR0thf_AuBCqj5yzHLHJVWnM,2121
91
- langtrace_python_sdk/instrumentation/cohere/patch.py,sha256=zSPhROZsCU3ayfq25073zedCf9C1niJQ6oX8vGQKi4E,21126
93
+ langtrace_python_sdk/instrumentation/cohere/patch.py,sha256=_0mmJ9eCjokkJTQTXWYK2hSd5XY02_iu5PW6HB6DvQI,21175
92
94
  langtrace_python_sdk/instrumentation/crewai/__init__.py,sha256=_UBKfvQv7l0g2_wnmA5F6CdSAFH0atNOVPd49zsN3aM,88
93
95
  langtrace_python_sdk/instrumentation/crewai/instrumentation.py,sha256=q07x6nnig9JPxDT6ZylyIShfXWjNafKBetnNcA1UdEU,1836
94
96
  langtrace_python_sdk/instrumentation/crewai/patch.py,sha256=Vnpip9Pbk4UFbTFHoUrHtAnDgsaihwSvZBgtUeOtLr8,6109
@@ -121,7 +123,7 @@ langtrace_python_sdk/instrumentation/ollama/instrumentation.py,sha256=jdsvkqUJAA
121
123
  langtrace_python_sdk/instrumentation/ollama/patch.py,sha256=Twi3yeGgBj0DadBmZ0X0DsMPx71iSdL4R3OjOw3-p_E,8132
122
124
  langtrace_python_sdk/instrumentation/openai/__init__.py,sha256=VPHRNCQEdkizIVP2d0Uw_a7t8XOTSTprEIB8oboJFbs,95
123
125
  langtrace_python_sdk/instrumentation/openai/instrumentation.py,sha256=A0BJHRLcZ74TNVg6I0I9M5YWvSpAtXwMmME6N5CEQ_M,2945
124
- langtrace_python_sdk/instrumentation/openai/patch.py,sha256=CBZ_f1os7LMCkikh6Gvv-eqEA83aWs5R3lbSrtrroJY,23929
126
+ langtrace_python_sdk/instrumentation/openai/patch.py,sha256=T0g9BbUw5JWSupZbWCF6sQxO2Auj_oPpAFw0RdVkKLg,24075
125
127
  langtrace_python_sdk/instrumentation/pinecone/__init__.py,sha256=DzXyGh9_MGWveJvXULkFwdkf7PbG2s3bAWtT1Dmz7Ok,99
126
128
  langtrace_python_sdk/instrumentation/pinecone/instrumentation.py,sha256=HDXkRITrVPwdQEoOYJOfMzZE_2-vDDvuqHTlD8W1lQw,1845
127
129
  langtrace_python_sdk/instrumentation/pinecone/patch.py,sha256=KiIRRz8kk47FllFT746Cb_w6F6M60AN_pcsguD979E4,5172
@@ -137,13 +139,13 @@ langtrace_python_sdk/instrumentation/weaviate/patch.py,sha256=rRD6WfQcNGYpw9teoC
137
139
  langtrace_python_sdk/types/__init__.py,sha256=KDW6S74FDxpeBa9xoH5zVEYfmRjccCCHzlW7lTJg1TA,3194
138
140
  langtrace_python_sdk/utils/__init__.py,sha256=QPF7SMuiz_003fLCHkRrgNb9NjqErDQ5cQr6pkJReKc,724
139
141
  langtrace_python_sdk/utils/langtrace_sampler.py,sha256=BupNndHbU9IL_wGleKetz8FdcveqHMBVz1bfKTTW80w,1753
140
- langtrace_python_sdk/utils/llm.py,sha256=VIxYr8QJvTAvgnHHSQ_rj9b9ZPvoLq5WQND-3e7MLww,12901
142
+ langtrace_python_sdk/utils/llm.py,sha256=bvJkU3IGt_ssx5taY_Dx5GkgxGJ9Jqc1CrKElMVJ5to,12983
141
143
  langtrace_python_sdk/utils/misc.py,sha256=CD9NWRLxLpFd0YwlHJqzlpFNedXVWtAKGOjQWnDCo8k,838
142
144
  langtrace_python_sdk/utils/prompt_registry.py,sha256=n5dQMVLBw8aJZY8Utvf67bncc25ELf6AH9BYw8_hSzo,2619
143
145
  langtrace_python_sdk/utils/sdk_version_checker.py,sha256=FzjIWZjn53cX0LEVPdipQd1fO9lG8iGVUEVUs9Hyk6M,1713
144
146
  langtrace_python_sdk/utils/silently_fail.py,sha256=F_9EteXCO9Cyq-8MA1OT2Zy_dx8n06nt31I7t7ui24E,478
145
147
  langtrace_python_sdk/utils/types.py,sha256=l-N6o7cnWUyrD6dBvW7W3Pf5CkPo5QaoT__k1XLbrQg,383
146
- langtrace_python_sdk/utils/with_root_span.py,sha256=RO-MhRUNSWdSo7KKOdAnTBfwj973KMTEKD-e54fSr0c,7539
148
+ langtrace_python_sdk/utils/with_root_span.py,sha256=2iWu8XD1NOFqSFgDZDJiMHZ1JB4HzmYPLr_F3Ugul2k,8480
147
149
  tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
148
150
  tests/conftest.py,sha256=0Jo6iCZTXbdvyJVhG9UpYGkLabL75378oauCzmt-Sa8,603
149
151
  tests/utils.py,sha256=8ZBYvxBH6PynipT1sqenfyjTGLhEV7SORQH1NJjnpsM,2500
@@ -186,8 +188,8 @@ tests/pinecone/cassettes/test_query.yaml,sha256=b5v9G3ssUy00oG63PlFUR3JErF2Js-5A
186
188
  tests/pinecone/cassettes/test_upsert.yaml,sha256=neWmQ1v3d03V8WoLl8FoFeeCYImb8pxlJBWnFd_lITU,38607
187
189
  tests/qdrant/conftest.py,sha256=9n0uHxxIjWk9fbYc4bx-uP8lSAgLBVx-cV9UjnsyCHM,381
188
190
  tests/qdrant/test_qdrant.py,sha256=pzjAjVY2kmsmGfrI2Gs2xrolfuaNHz7l1fqGQCjp5_o,3353
189
- langtrace_python_sdk-2.2.6.dist-info/METADATA,sha256=DftMFXvtlAqbFrztv22v4zmGwTKDmcfWO1gpy9nMU_s,14471
190
- langtrace_python_sdk-2.2.6.dist-info/WHEEL,sha256=1yFddiXMmvYK7QYTqtRNtX66WJ0Mz8PYEiEUoOUUxRY,87
191
- langtrace_python_sdk-2.2.6.dist-info/entry_points.txt,sha256=1_b9-qvf2fE7uQNZcbUei9vLpFZBbbh9LrtGw95ssAo,70
192
- langtrace_python_sdk-2.2.6.dist-info/licenses/LICENSE,sha256=QwcOLU5TJoTeUhuIXzhdCEEDDvorGiC6-3YTOl4TecE,11356
193
- langtrace_python_sdk-2.2.6.dist-info/RECORD,,
191
+ langtrace_python_sdk-2.2.8.dist-info/METADATA,sha256=YwWKH01P-qaU1otk19smm_jTfFxCWECL9bvMzumEEOU,14499
192
+ langtrace_python_sdk-2.2.8.dist-info/WHEEL,sha256=1yFddiXMmvYK7QYTqtRNtX66WJ0Mz8PYEiEUoOUUxRY,87
193
+ langtrace_python_sdk-2.2.8.dist-info/entry_points.txt,sha256=1_b9-qvf2fE7uQNZcbUei9vLpFZBbbh9LrtGw95ssAo,70
194
+ langtrace_python_sdk-2.2.8.dist-info/licenses/LICENSE,sha256=QwcOLU5TJoTeUhuIXzhdCEEDDvorGiC6-3YTOl4TecE,11356
195
+ langtrace_python_sdk-2.2.8.dist-info/RECORD,,
@@ -1,34 +0,0 @@
1
- from langtrace_python_sdk import langtrace
2
- from openai import OpenAI
3
- from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
4
-
5
- service_name = "langtrace-python-ollama"
6
- otlp_endpoint = "http://localhost:4318/v1/traces"
7
- otlp_exporter = OTLPSpanExporter(
8
- endpoint=otlp_endpoint,
9
- headers=(("Content-Type", "application/json"),))
10
- langtrace.init(custom_remote_exporter=otlp_exporter, batch=False)
11
-
12
-
13
- def chat_with_ollama():
14
- # Use the OpenAI endpoint, not the Ollama API.
15
- base_url = "http://localhost:11434/v1"
16
- client = OpenAI(base_url=base_url, api_key="unused")
17
- messages = [
18
- {
19
- "role": "user",
20
- "content": "Hello, I'm a human.",
21
- },
22
- ]
23
- chat_completion = client.chat.completions.create(
24
- model="llama3", messages=messages
25
- )
26
- print(chat_completion.choices[0].message.content)
27
-
28
-
29
- def main():
30
- chat_with_ollama()
31
-
32
-
33
- if __name__ == "__main__":
34
- main()