langtrace-python-sdk 2.2.21__py3-none-any.whl → 2.2.23__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,7 @@
1
1
  import json
2
2
  import os
3
3
  import typing
4
+ import sys
4
5
 
5
6
  import requests
6
7
  from opentelemetry.sdk.trace.export import ReadableSpan, SpanExporter, SpanExportResult
@@ -49,11 +50,13 @@ class LangTraceExporter(SpanExporter):
49
50
 
50
51
  api_key: str
51
52
  api_host: str
53
+ disable_logging: bool
52
54
 
53
55
  def __init__(
54
56
  self,
55
57
  api_host,
56
58
  api_key: str = None,
59
+ disable_logging: bool = False,
57
60
  ) -> None:
58
61
  self.api_key = api_key or os.environ.get("LANGTRACE_API_KEY")
59
62
  self.api_host = (
@@ -61,6 +64,7 @@ class LangTraceExporter(SpanExporter):
61
64
  if api_host == LANGTRACE_REMOTE_URL
62
65
  else api_host
63
66
  )
67
+ self.disable_logging = disable_logging
64
68
 
65
69
  def export(self, spans: typing.Sequence[ReadableSpan]) -> SpanExportResult:
66
70
  """
@@ -72,7 +76,7 @@ class LangTraceExporter(SpanExporter):
72
76
  Returns:
73
77
  The result of the export SUCCESS or FAILURE
74
78
  """
75
- if not self.api_key:
79
+ if not self.api_key and not self.disable_logging:
76
80
  print(Fore.RED)
77
81
  print(
78
82
  "Missing Langtrace API key, proceed to https://langtrace.ai to create one"
@@ -107,14 +111,15 @@ class LangTraceExporter(SpanExporter):
107
111
 
108
112
  if not response.ok:
109
113
  raise RequestException(response.text)
110
-
111
- print(
112
- Fore.GREEN + f"Exported {len(spans)} spans successfully." + Fore.RESET
113
- )
114
+ if not self.disable_logging:
115
+ print(
116
+ Fore.GREEN + f"Exported {len(spans)} spans successfully." + Fore.RESET
117
+ )
114
118
  return SpanExportResult.SUCCESS
115
119
  except RequestException as err:
116
- print(Fore.RED + "Failed to export spans.")
117
- print(Fore.RED + f"Error: {err}" + Fore.RESET)
120
+ if not self.disable_logging:
121
+ print(Fore.RED + "Failed to export spans.")
122
+ print(Fore.RED + f"Error: {err}" + Fore.RESET)
118
123
  return SpanExportResult.FAILURE
119
124
 
120
125
  def shutdown(self) -> None:
@@ -1,6 +1,7 @@
1
1
  from langtrace_python_sdk.constants.instrumentation.ollama import APIS
2
2
  from langtrace_python_sdk.utils import set_span_attribute
3
3
  from langtrace_python_sdk.utils.llm import (
4
+ StreamWrapper,
4
5
  get_extra_attributes,
5
6
  get_langtrace_attributes,
6
7
  get_llm_request_attributes,
@@ -16,9 +17,10 @@ from opentelemetry.trace import SpanKind
16
17
  import json
17
18
  from opentelemetry.trace.status import Status, StatusCode
18
19
  from langtrace.trace_attributes import SpanAttributes
20
+ from opentelemetry.trace import Tracer
19
21
 
20
22
 
21
- def generic_patch(operation_name, version, tracer):
23
+ def generic_patch(operation_name, version, tracer: Tracer):
22
24
  def traced_method(wrapped, instance, args, kwargs):
23
25
  api = APIS[operation_name]
24
26
  service_provider = SERVICE_PROVIDERS["OLLAMA"]
@@ -35,36 +37,29 @@ def generic_patch(operation_name, version, tracer):
35
37
  }
36
38
 
37
39
  attributes = LLMSpanAttributes(**span_attributes)
38
- with tracer.start_as_current_span(
39
- name=get_span_name(f'ollama.{api["METHOD"]}'), kind=SpanKind.CLIENT
40
- ) as span:
41
- _set_input_attributes(span, kwargs, attributes)
42
-
43
- try:
44
- result = wrapped(*args, **kwargs)
45
- if result:
46
- if span.is_recording():
47
-
48
- if kwargs.get("stream"):
49
- return _handle_streaming_response(
50
- span, result, api["METHOD"]
51
- )
52
40
 
53
- _set_response_attributes(span, result)
54
- span.set_status(Status(StatusCode.OK))
41
+ span = tracer.start_span(
42
+ name=get_span_name(f'ollama.{api["METHOD"]}'), kind=SpanKind.CLIENT
43
+ )
44
+ _set_input_attributes(span, kwargs, attributes)
55
45
 
56
- span.end()
57
- return result
46
+ try:
47
+ result = wrapped(*args, **kwargs)
48
+ if kwargs.get("stream"):
49
+ return StreamWrapper(result, span)
50
+ else:
51
+ _set_response_attributes(span, result)
52
+ return result
58
53
 
59
- except Exception as err:
60
- # Record the exception in the span
61
- span.record_exception(err)
54
+ except Exception as err:
55
+ # Record the exception in the span
56
+ span.record_exception(err)
62
57
 
63
- # Set the span status to indicate an error
64
- span.set_status(Status(StatusCode.ERROR, str(err)))
58
+ # Set the span status to indicate an error
59
+ span.set_status(Status(StatusCode.ERROR, str(err)))
65
60
 
66
- # Reraise the exception to ensure it's not swallowed
67
- raise
61
+ # Reraise the exception to ensure it's not swallowed
62
+ raise
68
63
 
69
64
  return traced_method
70
65
 
@@ -82,30 +77,28 @@ def ageneric_patch(operation_name, version, tracer):
82
77
  **get_extra_attributes(),
83
78
  }
84
79
  attributes = LLMSpanAttributes(**span_attributes)
85
- with tracer.start_as_current_span(api["METHOD"], kind=SpanKind.CLIENT) as span:
86
- _set_input_attributes(span, kwargs, attributes)
87
- try:
88
- result = await wrapped(*args, **kwargs)
89
- if result:
90
- if span.is_recording():
91
- if kwargs.get("stream"):
92
- return _ahandle_streaming_response(
93
- span, result, api["METHOD"]
94
- )
95
-
96
- _set_response_attributes(span, result)
97
- span.set_status(Status(StatusCode.OK))
98
- span.end()
99
- return result
100
- except Exception as err:
101
- # Record the exception in the span
102
- span.record_exception(err)
103
-
104
- # Set the span status to indicate an error
105
- span.set_status(Status(StatusCode.ERROR, str(err)))
106
-
107
- # Reraise the exception to ensure it's not swallowed
108
- raise
80
+ span = tracer.start_span(
81
+ name=get_span_name(f'ollama.{api["METHOD"]}'), kind=SpanKind.CLIENT
82
+ )
83
+
84
+ _set_input_attributes(span, kwargs, attributes)
85
+ try:
86
+ result = await wrapped(*args, **kwargs)
87
+ if kwargs.get("stream"):
88
+ return StreamWrapper(span, result)
89
+ else:
90
+ _set_response_attributes(span, result)
91
+ span.end()
92
+ return result
93
+ except Exception as err:
94
+ # Record the exception in the span
95
+ span.record_exception(err)
96
+
97
+ # Set the span status to indicate an error
98
+ span.set_status(Status(StatusCode.ERROR, str(err)))
99
+
100
+ # Reraise the exception to ensure it's not swallowed
101
+ raise
109
102
 
110
103
  return traced_method
111
104
 
@@ -162,63 +155,3 @@ def _set_input_attributes(span, kwargs, attributes):
162
155
  SpanAttributes.LLM_PRESENCE_PENALTY,
163
156
  options.get("presence_penalty"),
164
157
  )
165
-
166
-
167
- def _handle_streaming_response(span, response, api):
168
- accumulated_tokens = None
169
- if api == "chat":
170
- accumulated_tokens = {"message": {"content": "", "role": ""}}
171
- if api == "completion" or api == "generate":
172
- accumulated_tokens = {"response": ""}
173
- span.add_event(Event.STREAM_START.value)
174
- try:
175
- for chunk in response:
176
- content = None
177
- if api == "chat":
178
- content = chunk["message"]["content"]
179
- accumulated_tokens["message"]["content"] += chunk["message"]["content"]
180
- accumulated_tokens["message"]["role"] = chunk["message"]["role"]
181
- if api == "generate":
182
- content = chunk["response"]
183
- accumulated_tokens["response"] += chunk["response"]
184
-
185
- set_event_completion_chunk(span, content)
186
-
187
- _set_response_attributes(span, chunk | accumulated_tokens)
188
- finally:
189
- # Finalize span after processing all chunks
190
- span.add_event(Event.STREAM_END.value)
191
- span.set_status(StatusCode.OK)
192
- span.end()
193
-
194
- return response
195
-
196
-
197
- async def _ahandle_streaming_response(span, response, api):
198
- accumulated_tokens = None
199
- if api == "chat":
200
- accumulated_tokens = {"message": {"content": "", "role": ""}}
201
- if api == "completion" or api == "generate":
202
- accumulated_tokens = {"response": ""}
203
-
204
- span.add_event(Event.STREAM_START.value)
205
- try:
206
- async for chunk in response:
207
- content = None
208
- if api == "chat":
209
- content = chunk["message"]["content"]
210
- accumulated_tokens["message"]["content"] += chunk["message"]["content"]
211
- accumulated_tokens["message"]["role"] = chunk["message"]["role"]
212
- if api == "generate":
213
- content = chunk["response"]
214
- accumulated_tokens["response"] += chunk["response"]
215
-
216
- set_event_completion_chunk(span, content)
217
- _set_response_attributes(span, chunk | accumulated_tokens)
218
- finally:
219
- # Finalize span after processing all chunks
220
- span.add_event(Event.STREAM_END.value)
221
- span.set_status(StatusCode.OK)
222
- span.end()
223
-
224
- return response
@@ -72,7 +72,10 @@ def init(
72
72
  disable_instrumentations: Optional[DisableInstrumentations] = None,
73
73
  disable_tracing_for_functions: Optional[InstrumentationMethods] = None,
74
74
  service_name: Optional[str] = None,
75
+ disable_logging = False
75
76
  ):
77
+ if disable_logging:
78
+ sys.stdout = open(os.devnull, "w")
76
79
 
77
80
  host = (
78
81
  os.environ.get("LANGTRACE_API_HOST", None) or api_host or LANGTRACE_REMOTE_URL
@@ -90,7 +93,7 @@ def init(
90
93
  provider = TracerProvider(resource=resource, sampler=sampler)
91
94
 
92
95
  remote_write_exporter = (
93
- LangTraceExporter(api_key=api_key, api_host=host)
96
+ LangTraceExporter(api_key=api_key, api_host=host, disable_logging=disable_logging)
94
97
  if custom_remote_exporter is None
95
98
  else custom_remote_exporter
96
99
  )
@@ -146,6 +149,8 @@ def init(
146
149
  print(Fore.BLUE + "Exporting spans to Langtrace cloud.." + Fore.RESET)
147
150
  provider.add_span_processor(batch_processor_remote)
148
151
 
152
+ sys.stdout = sys.__stdout__
153
+
149
154
 
150
155
  def init_instrumentations(
151
156
  disable_instrumentations: DisableInstrumentations, all_instrumentations: dict
@@ -237,7 +237,6 @@ class StreamWrapper:
237
237
  def __init__(
238
238
  self, stream, span, prompt_tokens=0, function_call=False, tool_calls=False
239
239
  ):
240
-
241
240
  self.stream = stream
242
241
  self.span = span
243
242
  self.prompt_tokens = prompt_tokens
@@ -284,7 +283,6 @@ class StreamWrapper:
284
283
  }
285
284
  ],
286
285
  )
287
-
288
286
  self.span.set_status(StatusCode.OK)
289
287
  self.span.end()
290
288
  self._span_started = False
@@ -377,6 +375,10 @@ class StreamWrapper:
377
375
  if hasattr(chunk, "delta") and chunk.delta is not None:
378
376
  content = [chunk.delta.text] if hasattr(chunk.delta, "text") else []
379
377
 
378
+ if isinstance(chunk, dict):
379
+ if "message" in chunk:
380
+ if "content" in chunk["message"]:
381
+ content = [chunk["message"]["content"]]
380
382
  if content:
381
383
  self.result_content.append(content[0])
382
384
 
@@ -401,6 +403,13 @@ class StreamWrapper:
401
403
  self.completion_tokens = chunk.usage_metadata.candidates_token_count
402
404
  self.prompt_tokens = chunk.usage_metadata.prompt_token_count
403
405
 
406
+ # Ollama
407
+ if isinstance(chunk, dict):
408
+ if "prompt_eval_count" in chunk:
409
+ self.prompt_tokens = chunk["prompt_eval_count"]
410
+ if "eval_count" in chunk:
411
+ self.completion_tokens = chunk["eval_count"]
412
+
404
413
  def process_chunk(self, chunk):
405
414
  self.set_response_model(chunk=chunk)
406
415
  self.build_streaming_response(chunk=chunk)
@@ -1 +1 @@
1
- __version__ = "2.2.21"
1
+ __version__ = "2.2.23"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: langtrace-python-sdk
3
- Version: 2.2.21
3
+ Version: 2.2.23
4
4
  Summary: Python SDK for LangTrace
5
5
  Project-URL: Homepage, https://github.com/Scale3-Labs/langtrace-python-sdk
6
6
  Author-email: Scale3 Labs <engineering@scale3labs.com>
@@ -73,8 +73,8 @@ examples/vertexai_example/main.py,sha256=gndId5X5ksD-ycxnAWMdEqIDbLc3kz5Vt8vm4YP
73
73
  examples/weaviate_example/__init__.py,sha256=8JMDBsRSEV10HfTd-YC7xb4txBjD3la56snk-Bbg2Kw,618
74
74
  examples/weaviate_example/query_text.py,sha256=sG8O-bXQpflBAiYpgE_M2X7GcHUlZNgl_wJW8_h-W6Q,127024
75
75
  langtrace_python_sdk/__init__.py,sha256=VZM6i71NR7pBQK6XvJWRelknuTYUhqwqE7PlicKa5Wg,1166
76
- langtrace_python_sdk/langtrace.py,sha256=hh3okJYyxXvC9TMm_vaFOGz-5TxxJp1zQDmZmy63aRY,7813
77
- langtrace_python_sdk/version.py,sha256=IrYOgPsxOieq29c1VC74EBYhBXaTVWyQEljnlRrTLJg,23
76
+ langtrace_python_sdk/langtrace.py,sha256=5BL5lNZejLRq9AVuOCjFaPpIkFNUh2vLvlGSGVxUlE4,7974
77
+ langtrace_python_sdk/version.py,sha256=Q8BkngWSfGy5JQxcAsViGql-GdawCdx5vm_X4sj0nJc,23
78
78
  langtrace_python_sdk/constants/__init__.py,sha256=P8QvYwt5czUNDZsKS64vxm9Dc41ptGbuF1TFtAF6nv4,44
79
79
  langtrace_python_sdk/constants/exporter/langtrace_exporter.py,sha256=5MNjnAOg-4am78J3gVMH6FSwq5N8TOj72ugkhsw4vi0,46
80
80
  langtrace_python_sdk/constants/instrumentation/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -91,7 +91,7 @@ langtrace_python_sdk/constants/instrumentation/qdrant.py,sha256=yL7BopNQTXW7L7Z-
91
91
  langtrace_python_sdk/constants/instrumentation/vertexai.py,sha256=0s2vX3Y0iwjOPkUg5lAKi-7o3LaNivDSBBbF-o695Ok,1266
92
92
  langtrace_python_sdk/constants/instrumentation/weaviate.py,sha256=gtv-JBxvNGClEMxClmRKzjJ1khgOonsli4D_k9IagSE,2601
93
93
  langtrace_python_sdk/extensions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
94
- langtrace_python_sdk/extensions/langtrace_exporter.py,sha256=gTdmje-q5DGYNl2S6tmVnGPHj_nZNiCB3tux9RWDiYM,4316
94
+ langtrace_python_sdk/extensions/langtrace_exporter.py,sha256=_GIH4zP9lpk8UO81zBJ_9HklNszg1bsndqqXwcVe2rY,4569
95
95
  langtrace_python_sdk/extensions/langtrace_filesystem.py,sha256=34fZutG28EJ66l67OvTGsydAH3ZpXgikdE7hVLqBpG4,7863
96
96
  langtrace_python_sdk/instrumentation/__init__.py,sha256=yJd3aGu4kPfm2h6oe6kiCWvzTF9awpC1UztjXF9WSO4,1391
97
97
  langtrace_python_sdk/instrumentation/anthropic/__init__.py,sha256=donrurJAGYlxrSRA3BIf76jGeUcAx9Tq8CVpah68S0Y,101
@@ -132,7 +132,7 @@ langtrace_python_sdk/instrumentation/llamaindex/instrumentation.py,sha256=8iAg-O
132
132
  langtrace_python_sdk/instrumentation/llamaindex/patch.py,sha256=548hzPyT_k-2wmt9AArv4JzTT4j4AGKJq5Ar2bWv7o8,4615
133
133
  langtrace_python_sdk/instrumentation/ollama/__init__.py,sha256=g2zJsXnDHinXPzTc-WxDeTtHmr9gmAj3K6l_00kP8c8,82
134
134
  langtrace_python_sdk/instrumentation/ollama/instrumentation.py,sha256=jdsvkqUJAAUNLVPtAkn_rG26HXetVQXWtjn4a6eWZro,2029
135
- langtrace_python_sdk/instrumentation/ollama/patch.py,sha256=AYIT8N6LXTgd5HqjuDOtKSfqD-24a1_5bF3pd_R3HGM,8128
135
+ langtrace_python_sdk/instrumentation/ollama/patch.py,sha256=5Y_pPVh1Pt8BcxyCiJWex3oNqYTcuOo5udh1-jNsCO0,5407
136
136
  langtrace_python_sdk/instrumentation/openai/__init__.py,sha256=VPHRNCQEdkizIVP2d0Uw_a7t8XOTSTprEIB8oboJFbs,95
137
137
  langtrace_python_sdk/instrumentation/openai/instrumentation.py,sha256=A0BJHRLcZ74TNVg6I0I9M5YWvSpAtXwMmME6N5CEQ_M,2945
138
138
  langtrace_python_sdk/instrumentation/openai/patch.py,sha256=4GCYJzZdUBopEDinpTwRBFf-Enb0hdNO16LiiMKqqvY,24226
@@ -151,7 +151,7 @@ langtrace_python_sdk/instrumentation/weaviate/patch.py,sha256=qX4VQqScH2kygn5lQa
151
151
  langtrace_python_sdk/types/__init__.py,sha256=KDW6S74FDxpeBa9xoH5zVEYfmRjccCCHzlW7lTJg1TA,3194
152
152
  langtrace_python_sdk/utils/__init__.py,sha256=SwYYPIh2AzEpI3zbwowQU2zJlwRwoVdWOCcrAKnkI9g,873
153
153
  langtrace_python_sdk/utils/langtrace_sampler.py,sha256=BupNndHbU9IL_wGleKetz8FdcveqHMBVz1bfKTTW80w,1753
154
- langtrace_python_sdk/utils/llm.py,sha256=Iftkbz16P_tvAaXdfAv2wQ_4IizDr4RTkWi0lqPD4f0,13499
154
+ langtrace_python_sdk/utils/llm.py,sha256=hDOPyUp3kSL1g92uiHySARzBmVRMy3umGizz91vTDSI,13940
155
155
  langtrace_python_sdk/utils/misc.py,sha256=CD9NWRLxLpFd0YwlHJqzlpFNedXVWtAKGOjQWnDCo8k,838
156
156
  langtrace_python_sdk/utils/prompt_registry.py,sha256=n5dQMVLBw8aJZY8Utvf67bncc25ELf6AH9BYw8_hSzo,2619
157
157
  langtrace_python_sdk/utils/sdk_version_checker.py,sha256=FzjIWZjn53cX0LEVPdipQd1fO9lG8iGVUEVUs9Hyk6M,1713
@@ -200,8 +200,8 @@ tests/pinecone/cassettes/test_query.yaml,sha256=b5v9G3ssUy00oG63PlFUR3JErF2Js-5A
200
200
  tests/pinecone/cassettes/test_upsert.yaml,sha256=neWmQ1v3d03V8WoLl8FoFeeCYImb8pxlJBWnFd_lITU,38607
201
201
  tests/qdrant/conftest.py,sha256=9n0uHxxIjWk9fbYc4bx-uP8lSAgLBVx-cV9UjnsyCHM,381
202
202
  tests/qdrant/test_qdrant.py,sha256=pzjAjVY2kmsmGfrI2Gs2xrolfuaNHz7l1fqGQCjp5_o,3353
203
- langtrace_python_sdk-2.2.21.dist-info/METADATA,sha256=LnMQla3SsuwZxn4eMGBMC5sFXGQNNDs10945gGAmfQ0,14705
204
- langtrace_python_sdk-2.2.21.dist-info/WHEEL,sha256=1yFddiXMmvYK7QYTqtRNtX66WJ0Mz8PYEiEUoOUUxRY,87
205
- langtrace_python_sdk-2.2.21.dist-info/entry_points.txt,sha256=1_b9-qvf2fE7uQNZcbUei9vLpFZBbbh9LrtGw95ssAo,70
206
- langtrace_python_sdk-2.2.21.dist-info/licenses/LICENSE,sha256=QwcOLU5TJoTeUhuIXzhdCEEDDvorGiC6-3YTOl4TecE,11356
207
- langtrace_python_sdk-2.2.21.dist-info/RECORD,,
203
+ langtrace_python_sdk-2.2.23.dist-info/METADATA,sha256=x2GEjOIpSYOL_HIP8dzQYBMEU8AYVsJ3QE7Gn-KfIvU,14705
204
+ langtrace_python_sdk-2.2.23.dist-info/WHEEL,sha256=1yFddiXMmvYK7QYTqtRNtX66WJ0Mz8PYEiEUoOUUxRY,87
205
+ langtrace_python_sdk-2.2.23.dist-info/entry_points.txt,sha256=1_b9-qvf2fE7uQNZcbUei9vLpFZBbbh9LrtGw95ssAo,70
206
+ langtrace_python_sdk-2.2.23.dist-info/licenses/LICENSE,sha256=QwcOLU5TJoTeUhuIXzhdCEEDDvorGiC6-3YTOl4TecE,11356
207
+ langtrace_python_sdk-2.2.23.dist-info/RECORD,,