promptlayer 1.0.52__py3-none-any.whl → 1.0.53__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of promptlayer might be problematic. Click here for more details.

promptlayer/__init__.py CHANGED
@@ -1,4 +1,4 @@
1
1
  from .promptlayer import AsyncPromptLayer, PromptLayer
2
2
 
3
- __version__ = "1.0.52"
3
+ __version__ = "1.0.53"
4
4
  __all__ = ["PromptLayer", "AsyncPromptLayer", "__version__"]
@@ -23,6 +23,11 @@ class TextContent(TypedDict, total=False):
23
23
  text: str
24
24
 
25
25
 
26
+ class ThinkingContent(TypedDict, total=False):
27
+ type: Literal["thinking"]
28
+ thinking: str
29
+
30
+
26
31
  class ImageContent(TypedDict, total=False):
27
32
  type: Literal["image_url"]
28
33
  image_url: ImageUrl
@@ -44,7 +49,7 @@ class MediaVariable(TypedDict, total=False):
44
49
  name: str
45
50
 
46
51
 
47
- Content = Union[TextContent, ImageContent, MediaContnt, MediaVariable]
52
+ Content = Union[TextContent, ThinkingContent, ImageContent, MediaContnt, MediaVariable]
48
53
 
49
54
 
50
55
  class Function(TypedDict, total=False):
promptlayer/utils.py CHANGED
@@ -788,30 +788,79 @@ class GeneratorProxy:
788
788
  response = ""
789
789
  for result in self.results:
790
790
  if hasattr(result, "completion"):
791
- response = f"{response}{result.completion}"
791
+ response += result.completion
792
792
  elif hasattr(result, "message") and isinstance(result.message, str):
793
- response = f"{response}{result.message}"
793
+ response += result.message
794
794
  elif (
795
795
  hasattr(result, "content_block")
796
796
  and hasattr(result.content_block, "text")
797
- and "type" in result
798
- and result.type != "message_stop"
797
+ and getattr(result, "type", None) != "message_stop"
799
798
  ):
800
- response = f"{response}{result.content_block.text}"
801
- elif hasattr(result, "delta") and hasattr(result.delta, "text"):
802
- response = f"{response}{result.delta.text}"
803
- if (
804
- hasattr(self.results[-1], "type") and self.results[-1].type == "message_stop"
805
- ): # this is a message stream and not the correct event
799
+ response += result.content_block.text
800
+ elif hasattr(result, "delta"):
801
+ if hasattr(result.delta, "thinking"):
802
+ response += result.delta.thinking
803
+ elif hasattr(result.delta, "text"):
804
+ response += result.delta.text
805
+
806
+ # 2) If this is a “stream” (ended by message_stop), reconstruct both ThinkingBlock & TextBlock
807
+ last_event = self.results[-1]
808
+ if getattr(last_event, "type", None) == "message_stop":
806
809
  final_result = deepcopy(self.results[0].message)
807
- final_result.usage = None
808
- content_block = deepcopy(self.results[1].content_block)
809
- content_block.text = response
810
- final_result.content = [content_block]
810
+
811
+ content_blocks = []
812
+ current_block = None
813
+ current_signature = ""
814
+ current_thinking = ""
815
+ current_text = ""
816
+
817
+ for event in self.results:
818
+ # On a new content block starting:
819
+ if getattr(event, "type", None) == "content_block_start":
820
+ current_block = deepcopy(event.content_block)
821
+
822
+ if getattr(event.content_block, "type", None) == "thinking":
823
+ current_signature = ""
824
+ current_thinking = ""
825
+ elif getattr(event.content_block, "type", None) == "text":
826
+ current_text = ""
827
+
828
+ elif getattr(event, "type", None) == "content_block_delta" and current_block is not None:
829
+ if getattr(current_block, "type", None) == "thinking":
830
+ if hasattr(event.delta, "signature"):
831
+ current_signature = event.delta.signature
832
+ if hasattr(event.delta, "thinking"):
833
+ current_thinking += event.delta.thinking
834
+
835
+ elif getattr(current_block, "type", None) == "text":
836
+ if hasattr(event.delta, "text"):
837
+ current_text += event.delta.text
838
+
839
+ elif getattr(event, "type", None) == "content_block_stop" and current_block is not None:
840
+ if getattr(current_block, "type", None) == "thinking":
841
+ current_block.signature = current_signature
842
+ current_block.thinking = current_thinking
843
+ elif getattr(current_block, "type", None) == "text":
844
+ current_block.text = current_text
845
+
846
+ content_blocks.append(current_block)
847
+
848
+ current_block = None
849
+ current_signature = ""
850
+ current_thinking = ""
851
+ current_text = ""
852
+
853
+ final_result.content = content_blocks
854
+ for event in reversed(self.results):
855
+ if hasattr(event, "usage") and hasattr(event.usage, "output_tokens"):
856
+ final_result.usage.output_tokens = event.usage.output_tokens
857
+ break
858
+
859
+ return final_result
860
+
861
+ # 3) Otherwise (not a “stream”), fall back to returning the last raw message
811
862
  else:
812
- final_result = deepcopy(self.results[-1])
813
- final_result.completion = response
814
- return final_result
863
+ return deepcopy(self.results[-1])
815
864
  if hasattr(self.results[0].choices[0], "text"): # this is regular completion
816
865
  response = ""
817
866
  for result in self.results:
@@ -1898,11 +1947,11 @@ MAP_TYPE_TO_GOOGLE_FUNCTION = {
1898
1947
  }
1899
1948
 
1900
1949
 
1901
- def google_request(request: GetPromptTemplateResponse, _: dict, function_kwargs: dict):
1950
+ def google_request(prompt_blueprint: GetPromptTemplateResponse, client_kwargs: dict, function_kwargs: dict):
1902
1951
  from google import genai
1903
1952
 
1904
1953
  client = genai.Client()
1905
- request_to_make = MAP_TYPE_TO_GOOGLE_FUNCTION[request["prompt_template"]["type"]]
1954
+ request_to_make = MAP_TYPE_TO_GOOGLE_FUNCTION[prompt_blueprint["prompt_template"]["type"]]
1906
1955
  return request_to_make(client, **function_kwargs)
1907
1956
 
1908
1957
 
@@ -1936,11 +1985,11 @@ AMAP_TYPE_TO_GOOGLE_FUNCTION = {
1936
1985
  }
1937
1986
 
1938
1987
 
1939
- async def agoogle_request(request: GetPromptTemplateResponse, _: dict, function_kwargs: dict):
1988
+ async def agoogle_request(prompt_blueprint: GetPromptTemplateResponse, client_kwargs: dict, function_kwargs: dict):
1940
1989
  from google import genai
1941
1990
 
1942
1991
  client = genai.Client()
1943
- request_to_make = AMAP_TYPE_TO_GOOGLE_FUNCTION[request["prompt_template"]["type"]]
1992
+ request_to_make = AMAP_TYPE_TO_GOOGLE_FUNCTION[prompt_blueprint["prompt_template"]["type"]]
1944
1993
  return await request_to_make(client, **function_kwargs)
1945
1994
 
1946
1995
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: promptlayer
3
- Version: 1.0.52
3
+ Version: 1.0.53
4
4
  Summary: PromptLayer is a platform for prompt engineering and tracks your LLM requests.
5
5
  License: Apache-2.0
6
6
  Author: Magniv
@@ -1,4 +1,4 @@
1
- promptlayer/__init__.py,sha256=iPgrSckrRsUyjKcOVRNOVz5tvY4EBFx7f6BpoAOY2Yw,140
1
+ promptlayer/__init__.py,sha256=fsZw2O6kL3S1kgqDVKNwwT-KdfELftmUFxae1FxnMtk,140
2
2
  promptlayer/groups/__init__.py,sha256=xhOAolLUBkr76ZHvJr29OwjCIk1V9qKQXjZCuyTJUIY,429
3
3
  promptlayer/groups/groups.py,sha256=YPROicy-TzpkrpA8vOpZS2lwvJ6VRtlbQ1S2oT1N0vM,338
4
4
  promptlayer/promptlayer.py,sha256=K4KRW9eB1FF_Cdllu_Z-fpof058P45WhITnap29vlOk,21644
@@ -9,10 +9,10 @@ promptlayer/templates.py,sha256=7ObDPMzHXjttDdJdCXA_pDL9XAnmcujIWucmgZJcOC8,1179
9
9
  promptlayer/track/__init__.py,sha256=tyweLTAY7UpYpBHWwY-T3pOPDIlGjcgccYXqU_r0694,1710
10
10
  promptlayer/track/track.py,sha256=A-awcYwsSwxktrlCMchy8NITIquwxU1UXbgLZMwqrA0,3164
11
11
  promptlayer/types/__init__.py,sha256=xJcvQuOk91ZBBePb40-1FDNDKYrZoH5lPE2q6_UhprM,111
12
- promptlayer/types/prompt_template.py,sha256=GoYSorgBmUgvtyXaGAOv0KgVC61Llzn8bND6PF1fW50,4929
12
+ promptlayer/types/prompt_template.py,sha256=ANmt0z_bRHKtf5V_p6uEzOcSOTXS4-7NC92yxvMscP4,5043
13
13
  promptlayer/types/request_log.py,sha256=xU6bcxQar6GaBOJlgZTavXUV3FjE8sF_nSjPu4Ya_00,174
14
- promptlayer/utils.py,sha256=iKA7kIBdOzYIORHdkS556Wf9MRCnD7ctEo4obI0etL8,69712
15
- promptlayer-1.0.52.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
16
- promptlayer-1.0.52.dist-info/METADATA,sha256=CKfXX6iAXGCTna9bETZhCupDGsQ8jbUL4Qq3gd7yTHc,4819
17
- promptlayer-1.0.52.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
18
- promptlayer-1.0.52.dist-info/RECORD,,
14
+ promptlayer/utils.py,sha256=fR3lqCRMWpOnxyLm1nHc6UrZ-HxMoyBeWa6GRQiveJg,72148
15
+ promptlayer-1.0.53.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
16
+ promptlayer-1.0.53.dist-info/METADATA,sha256=wH6BdHhRx-HyguMq0WUVvNYsI5PNafdevcs_yz9TJb0,4819
17
+ promptlayer-1.0.53.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
18
+ promptlayer-1.0.53.dist-info/RECORD,,