langtrace-python-sdk 2.1.23__py3-none-any.whl → 2.1.24__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -29,6 +29,7 @@ from langtrace_python_sdk.constants.instrumentation.common import (
29
29
  )
30
30
  from langtrace_python_sdk.constants.instrumentation.openai import APIS
31
31
  from langtrace_python_sdk.utils.llm import calculate_prompt_tokens, estimate_tokens
32
+ from openai._types import NOT_GIVEN
32
33
 
33
34
 
34
35
  def images_generate(original_method, version, tracer):
@@ -470,16 +471,16 @@ def chat_completions_create(original_method, version, tracer):
470
471
  attributes = LLMSpanAttributes(**span_attributes)
471
472
 
472
473
  tools = []
473
- if kwargs.get("temperature") is not None:
474
+ if kwargs.get("temperature") is not None and kwargs.get("temperature") != NOT_GIVEN:
474
475
  attributes.llm_temperature = kwargs.get("temperature")
475
- if kwargs.get("top_p") is not None:
476
+ if kwargs.get("top_p") is not None and kwargs.get("top_p") != NOT_GIVEN:
476
477
  attributes.llm_top_p = kwargs.get("top_p")
477
- if kwargs.get("user") is not None:
478
+ if kwargs.get("user") is not None and kwargs.get("user") != NOT_GIVEN:
478
479
  attributes.llm_user = kwargs.get("user")
479
- if kwargs.get("functions") is not None:
480
+ if kwargs.get("functions") is not None and kwargs.get("functions") != NOT_GIVEN:
480
481
  for function in kwargs.get("functions"):
481
482
  tools.append(json.dumps({"type": "function", "function": function}))
482
- if kwargs.get("tools") is not None:
483
+ if kwargs.get("tools") is not None and kwargs.get("tools") != NOT_GIVEN:
483
484
  tools.append(json.dumps(kwargs.get("tools")))
484
485
  if len(tools) > 0:
485
486
  attributes.llm_tools = json.dumps(tools)
@@ -498,7 +499,7 @@ def chat_completions_create(original_method, version, tracer):
498
499
  try:
499
500
  # Attempt to call the original method
500
501
  result = wrapped(*args, **kwargs)
501
- if kwargs.get("stream") is False or kwargs.get("stream") is None:
502
+ if kwargs.get("stream") is False or kwargs.get("stream") is None or kwargs.get("stream") == NOT_GIVEN:
502
503
  span.set_attribute("llm.model", result.model)
503
504
  if hasattr(result, "choices") and result.choices is not None:
504
505
  responses = [
@@ -527,7 +528,7 @@ def chat_completions_create(original_method, version, tracer):
527
528
  span.set_attribute("llm.responses", json.dumps(responses))
528
529
  if (
529
530
  hasattr(result, "system_fingerprint")
530
- and result.system_fingerprint is not None
531
+ and result.system_fingerprint is not None and result.system_fingerprint != NOT_GIVEN
531
532
  ):
532
533
  span.set_attribute(
533
534
  "llm.system.fingerprint", result.system_fingerprint
@@ -554,7 +555,7 @@ def chat_completions_create(original_method, version, tracer):
554
555
  )
555
556
 
556
557
  # iterate over kwargs.get("functions") and calculate the prompt tokens
557
- if kwargs.get("functions") is not None:
558
+ if kwargs.get("functions") is not None and kwargs.get("functions") != NOT_GIVEN:
558
559
  for function in kwargs.get("functions"):
559
560
  prompt_tokens += calculate_prompt_tokens(
560
561
  json.dumps(function), kwargs.get("model")
@@ -640,16 +641,16 @@ def async_chat_completions_create(original_method, version, tracer):
640
641
  attributes = LLMSpanAttributes(**span_attributes)
641
642
 
642
643
  tools = []
643
- if kwargs.get("temperature") is not None:
644
+ if kwargs.get("temperature") is not None and kwargs.get("temperature") != NOT_GIVEN:
644
645
  attributes.llm_temperature = kwargs.get("temperature")
645
- if kwargs.get("top_p") is not None:
646
+ if kwargs.get("top_p") is not None and kwargs.get("top_p") != NOT_GIVEN:
646
647
  attributes.llm_top_p = kwargs.get("top_p")
647
- if kwargs.get("user") is not None:
648
+ if kwargs.get("user") is not None and kwargs.get("user") != NOT_GIVEN:
648
649
  attributes.llm_user = kwargs.get("user")
649
- if kwargs.get("functions") is not None:
650
+ if kwargs.get("functions") is not None and kwargs.get("functions") != NOT_GIVEN:
650
651
  for function in kwargs.get("functions"):
651
652
  tools.append(json.dumps({"type": "function", "function": function}))
652
- if kwargs.get("tools") is not None:
653
+ if kwargs.get("tools") is not None and kwargs.get("tools") != NOT_GIVEN:
653
654
  tools.append(json.dumps(kwargs.get("tools")))
654
655
  if len(tools) > 0:
655
656
  attributes.llm_tools = json.dumps(tools)
@@ -666,7 +667,7 @@ def async_chat_completions_create(original_method, version, tracer):
666
667
  try:
667
668
  # Attempt to call the original method
668
669
  result = await wrapped(*args, **kwargs)
669
- if kwargs.get("stream") is False or kwargs.get("stream") is None:
670
+ if kwargs.get("stream") is False or kwargs.get("stream") is None or kwargs.get("stream") == NOT_GIVEN:
670
671
  span.set_attribute("llm.model", result.model)
671
672
  if hasattr(result, "choices") and result.choices is not None:
672
673
  responses = [
@@ -695,7 +696,7 @@ def async_chat_completions_create(original_method, version, tracer):
695
696
  span.set_attribute("llm.responses", json.dumps(responses))
696
697
  if (
697
698
  hasattr(result, "system_fingerprint")
698
- and result.system_fingerprint is not None
699
+ and result.system_fingerprint is not None and result.system_fingerprint != NOT_GIVEN
699
700
  ):
700
701
  span.set_attribute(
701
702
  "llm.system.fingerprint", result.system_fingerprint
@@ -722,7 +723,7 @@ def async_chat_completions_create(original_method, version, tracer):
722
723
  )
723
724
 
724
725
  # iterate over kwargs.get("functions") and calculate the prompt tokens
725
- if kwargs.get("functions") is not None:
726
+ if kwargs.get("functions") is not None and kwargs.get("functions") != NOT_GIVEN:
726
727
  for function in kwargs.get("functions"):
727
728
  prompt_tokens += calculate_prompt_tokens(
728
729
  json.dumps(function), kwargs.get("model")
@@ -1 +1 @@
1
- __version__ = "2.1.23"
1
+ __version__ = "2.1.24"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: langtrace-python-sdk
3
- Version: 2.1.23
3
+ Version: 2.1.24
4
4
  Summary: Python SDK for LangTrace
5
5
  Project-URL: Homepage, https://github.com/Scale3-Labs/langtrace-python-sdk
6
6
  Author-email: Scale3 Labs <engineering@scale3labs.com>
@@ -51,7 +51,7 @@ examples/weaviate_example/__init__.py,sha256=8JMDBsRSEV10HfTd-YC7xb4txBjD3la56sn
51
51
  examples/weaviate_example/query_text.py,sha256=qz9o-fTDzX5AW5m8BJF-TfmBdokxh492NfnmnPUMU3s,64814
52
52
  langtrace_python_sdk/__init__.py,sha256=VZM6i71NR7pBQK6XvJWRelknuTYUhqwqE7PlicKa5Wg,1166
53
53
  langtrace_python_sdk/langtrace.py,sha256=pG_dWzzQxUP8r5SAMUwRScb6IopINcda1CZvJERjXBo,7486
54
- langtrace_python_sdk/version.py,sha256=v7ybXce2sltwQL38gMDsgF0NlBfnohuHthBrSTULxMw,23
54
+ langtrace_python_sdk/version.py,sha256=qc6AJ16ihdkGGzdf0nDZHdeSsAAbLwShB1HyYj8ASPE,23
55
55
  langtrace_python_sdk/constants/__init__.py,sha256=P8QvYwt5czUNDZsKS64vxm9Dc41ptGbuF1TFtAF6nv4,44
56
56
  langtrace_python_sdk/constants/exporter/langtrace_exporter.py,sha256=5MNjnAOg-4am78J3gVMH6FSwq5N8TOj72ugkhsw4vi0,46
57
57
  langtrace_python_sdk/constants/instrumentation/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -107,7 +107,7 @@ langtrace_python_sdk/instrumentation/ollama/instrumentation.py,sha256=jdsvkqUJAA
107
107
  langtrace_python_sdk/instrumentation/ollama/patch.py,sha256=CGLgt0qZg3WDr6XLn32qqs4D9USdMmeTmJxctbAsDeM,7908
108
108
  langtrace_python_sdk/instrumentation/openai/__init__.py,sha256=VPHRNCQEdkizIVP2d0Uw_a7t8XOTSTprEIB8oboJFbs,95
109
109
  langtrace_python_sdk/instrumentation/openai/instrumentation.py,sha256=A0BJHRLcZ74TNVg6I0I9M5YWvSpAtXwMmME6N5CEQ_M,2945
110
- langtrace_python_sdk/instrumentation/openai/patch.py,sha256=AUsboATdpAbIL9Eg2rLAjY-GPZgr2VkTlvGEyg_I1iU,37315
110
+ langtrace_python_sdk/instrumentation/openai/patch.py,sha256=-DIYUr0yvE2qGE2o4EgP4aFZavm3lSFd65PUFO3_35Q,37981
111
111
  langtrace_python_sdk/instrumentation/pinecone/__init__.py,sha256=DzXyGh9_MGWveJvXULkFwdkf7PbG2s3bAWtT1Dmz7Ok,99
112
112
  langtrace_python_sdk/instrumentation/pinecone/instrumentation.py,sha256=HDXkRITrVPwdQEoOYJOfMzZE_2-vDDvuqHTlD8W1lQw,1845
113
113
  langtrace_python_sdk/instrumentation/pinecone/patch.py,sha256=KiIRRz8kk47FllFT746Cb_w6F6M60AN_pcsguD979E4,5172
@@ -163,8 +163,8 @@ tests/pinecone/cassettes/test_query.yaml,sha256=b5v9G3ssUy00oG63PlFUR3JErF2Js-5A
163
163
  tests/pinecone/cassettes/test_upsert.yaml,sha256=neWmQ1v3d03V8WoLl8FoFeeCYImb8pxlJBWnFd_lITU,38607
164
164
  tests/qdrant/conftest.py,sha256=9n0uHxxIjWk9fbYc4bx-uP8lSAgLBVx-cV9UjnsyCHM,381
165
165
  tests/qdrant/test_qdrant.py,sha256=pzjAjVY2kmsmGfrI2Gs2xrolfuaNHz7l1fqGQCjp5_o,3353
166
- langtrace_python_sdk-2.1.23.dist-info/METADATA,sha256=SFkmrJ__gq5jO8aHqumab20p7Y99I3HIccHG8Oc6eZQ,13315
167
- langtrace_python_sdk-2.1.23.dist-info/WHEEL,sha256=1yFddiXMmvYK7QYTqtRNtX66WJ0Mz8PYEiEUoOUUxRY,87
168
- langtrace_python_sdk-2.1.23.dist-info/entry_points.txt,sha256=1_b9-qvf2fE7uQNZcbUei9vLpFZBbbh9LrtGw95ssAo,70
169
- langtrace_python_sdk-2.1.23.dist-info/licenses/LICENSE,sha256=QwcOLU5TJoTeUhuIXzhdCEEDDvorGiC6-3YTOl4TecE,11356
170
- langtrace_python_sdk-2.1.23.dist-info/RECORD,,
166
+ langtrace_python_sdk-2.1.24.dist-info/METADATA,sha256=50Fbxi2n64CuBiA2w9o0XF-Ktsw4PauKvoPEi4Cmjts,13315
167
+ langtrace_python_sdk-2.1.24.dist-info/WHEEL,sha256=1yFddiXMmvYK7QYTqtRNtX66WJ0Mz8PYEiEUoOUUxRY,87
168
+ langtrace_python_sdk-2.1.24.dist-info/entry_points.txt,sha256=1_b9-qvf2fE7uQNZcbUei9vLpFZBbbh9LrtGw95ssAo,70
169
+ langtrace_python_sdk-2.1.24.dist-info/licenses/LICENSE,sha256=QwcOLU5TJoTeUhuIXzhdCEEDDvorGiC6-3YTOl4TecE,11356
170
+ langtrace_python_sdk-2.1.24.dist-info/RECORD,,