promptlayer 1.0.53__tar.gz → 1.0.54__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of promptlayer might be problematic. Click here for more details.

@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: promptlayer
3
- Version: 1.0.53
3
+ Version: 1.0.54
4
4
  Summary: PromptLayer is a platform for prompt engineering and tracks your LLM requests.
5
5
  License: Apache-2.0
6
6
  Author: Magniv
@@ -1,4 +1,4 @@
1
1
  from .promptlayer import AsyncPromptLayer, PromptLayer
2
2
 
3
- __version__ = "1.0.53"
3
+ __version__ = "1.0.54"
4
4
  __all__ = ["PromptLayer", "AsyncPromptLayer", "__version__"]
@@ -24,6 +24,7 @@ class TextContent(TypedDict, total=False):
24
24
 
25
25
 
26
26
  class ThinkingContent(TypedDict, total=False):
27
+ signature: str | None = None
27
28
  type: Literal["thinking"]
28
29
  thinking: str
29
30
 
@@ -691,6 +691,52 @@ async def apromptlayer_track_score(
691
691
  return True
692
692
 
693
693
 
694
+ def build_anthropic_content_blocks(events):
695
+ content_blocks = []
696
+ current_block = None
697
+ current_signature = ""
698
+ current_thinking = ""
699
+ current_text = ""
700
+ usage = None
701
+ stop_reason = None
702
+
703
+ for event in events:
704
+ if event.type == "content_block_start":
705
+ current_block = deepcopy(event.content_block)
706
+ if current_block.type == "thinking":
707
+ current_signature = ""
708
+ current_thinking = ""
709
+ elif current_block.type == "text":
710
+ current_text = ""
711
+ elif event.type == "content_block_delta" and current_block is not None:
712
+ if current_block.type == "thinking":
713
+ if hasattr(event.delta, "signature"):
714
+ current_signature = event.delta.signature
715
+ if hasattr(event.delta, "thinking"):
716
+ current_thinking += event.delta.thinking
717
+ elif current_block.type == "text":
718
+ if hasattr(event.delta, "text"):
719
+ current_text += event.delta.text
720
+ elif event.type == "content_block_stop" and current_block is not None:
721
+ if current_block.type == "thinking":
722
+ current_block.signature = current_signature
723
+ current_block.thinking = current_thinking
724
+ elif current_block.type == "text":
725
+ current_block.text = current_text
726
+
727
+ content_blocks.append(current_block)
728
+ current_block = None
729
+ current_signature = ""
730
+ current_thinking = ""
731
+ current_text = ""
732
+ elif event.type == "message_delta":
733
+ if hasattr(event, "usage"):
734
+ usage = event.usage
735
+ if hasattr(event.delta, "stop_reason"):
736
+ stop_reason = event.delta.stop_reason
737
+ return content_blocks, usage, stop_reason
738
+
739
+
694
740
  class GeneratorProxy:
695
741
  def __init__(self, generator, api_request_arguments, api_key):
696
742
  self.generator = generator
@@ -808,59 +854,15 @@ class GeneratorProxy:
808
854
  if getattr(last_event, "type", None) == "message_stop":
809
855
  final_result = deepcopy(self.results[0].message)
810
856
 
811
- content_blocks = []
812
- current_block = None
813
- current_signature = ""
814
- current_thinking = ""
815
- current_text = ""
816
-
817
- for event in self.results:
818
- # On a new content block starting:
819
- if getattr(event, "type", None) == "content_block_start":
820
- current_block = deepcopy(event.content_block)
821
-
822
- if getattr(event.content_block, "type", None) == "thinking":
823
- current_signature = ""
824
- current_thinking = ""
825
- elif getattr(event.content_block, "type", None) == "text":
826
- current_text = ""
827
-
828
- elif getattr(event, "type", None) == "content_block_delta" and current_block is not None:
829
- if getattr(current_block, "type", None) == "thinking":
830
- if hasattr(event.delta, "signature"):
831
- current_signature = event.delta.signature
832
- if hasattr(event.delta, "thinking"):
833
- current_thinking += event.delta.thinking
834
-
835
- elif getattr(current_block, "type", None) == "text":
836
- if hasattr(event.delta, "text"):
837
- current_text += event.delta.text
838
-
839
- elif getattr(event, "type", None) == "content_block_stop" and current_block is not None:
840
- if getattr(current_block, "type", None) == "thinking":
841
- current_block.signature = current_signature
842
- current_block.thinking = current_thinking
843
- elif getattr(current_block, "type", None) == "text":
844
- current_block.text = current_text
845
-
846
- content_blocks.append(current_block)
847
-
848
- current_block = None
849
- current_signature = ""
850
- current_thinking = ""
851
- current_text = ""
852
-
853
- final_result.content = content_blocks
854
- for event in reversed(self.results):
855
- if hasattr(event, "usage") and hasattr(event.usage, "output_tokens"):
856
- final_result.usage.output_tokens = event.usage.output_tokens
857
- break
858
-
859
- return final_result
860
-
861
- # 3) Otherwise (not a “stream”), fall back to returning the last raw message
862
- else:
863
- return deepcopy(self.results[-1])
857
+ content_blocks, usage, stop_reason = build_anthropic_content_blocks(self.results)
858
+ final_result.content = content_blocks
859
+ if usage:
860
+ final_result.usage.output_tokens = usage.output_tokens
861
+ if stop_reason:
862
+ final_result.stop_reason = stop_reason
863
+ return final_result
864
+ else:
865
+ return deepcopy(self.results[-1])
864
866
  if hasattr(self.results[0].choices[0], "text"): # this is regular completion
865
867
  response = ""
866
868
  for result in self.results:
@@ -1425,7 +1427,7 @@ async def aopenai_stream_completion(generator: AsyncIterable[Any]) -> Any:
1425
1427
 
1426
1428
 
1427
1429
  def anthropic_stream_message(results: list):
1428
- from anthropic.types import Message, MessageStreamEvent, TextBlock, Usage
1430
+ from anthropic.types import Message, MessageStreamEvent, Usage
1429
1431
 
1430
1432
  message_stream_events: List[MessageStreamEvent] = results
1431
1433
  response: Message = Message(
@@ -1438,24 +1440,24 @@ def anthropic_stream_message(results: list):
1438
1440
  stop_sequence=None,
1439
1441
  usage=Usage(input_tokens=0, output_tokens=0),
1440
1442
  )
1441
- content = ""
1442
- for result in message_stream_events:
1443
- if result.type == "message_start":
1444
- response = result.message
1445
- elif result.type == "content_block_delta":
1446
- if result.delta.type == "text_delta":
1447
- content = f"{content}{result.delta.text}"
1448
- elif result.type == "message_delta":
1449
- if hasattr(result, "usage"):
1450
- response.usage.output_tokens = result.usage.output_tokens
1451
- if hasattr(result.delta, "stop_reason"):
1452
- response.stop_reason = result.delta.stop_reason
1453
- response.content.append(TextBlock(type="text", text=content))
1443
+
1444
+ for event in message_stream_events:
1445
+ if event.type == "message_start":
1446
+ response = event.message
1447
+ break
1448
+
1449
+ content_blocks, usage, stop_reason = build_anthropic_content_blocks(message_stream_events)
1450
+ response.content = content_blocks
1451
+ if usage:
1452
+ response.usage.output_tokens = usage.output_tokens
1453
+ if stop_reason:
1454
+ response.stop_reason = stop_reason
1455
+
1454
1456
  return response
1455
1457
 
1456
1458
 
1457
1459
  async def aanthropic_stream_message(generator: AsyncIterable[Any]) -> Any:
1458
- from anthropic.types import Message, MessageStreamEvent, TextBlock, Usage
1460
+ from anthropic.types import Message, MessageStreamEvent, Usage
1459
1461
 
1460
1462
  message_stream_events: List[MessageStreamEvent] = []
1461
1463
  response: Message = Message(
@@ -1468,22 +1470,19 @@ async def aanthropic_stream_message(generator: AsyncIterable[Any]) -> Any:
1468
1470
  stop_sequence=None,
1469
1471
  usage=Usage(input_tokens=0, output_tokens=0),
1470
1472
  )
1471
- content = ""
1472
1473
 
1473
- async for result in generator:
1474
- message_stream_events.append(result)
1475
- if result.type == "message_start":
1476
- response = result.message
1477
- elif result.type == "content_block_delta":
1478
- if result.delta.type == "text_delta":
1479
- content = f"{content}{result.delta.text}"
1480
- elif result.type == "message_delta":
1481
- if hasattr(result, "usage"):
1482
- response.usage.output_tokens = result.usage.output_tokens
1483
- if hasattr(result.delta, "stop_reason"):
1484
- response.stop_reason = result.delta.stop_reason
1485
-
1486
- response.content.append(TextBlock(type="text", text=content))
1474
+ async for event in generator:
1475
+ if event.type == "message_start":
1476
+ response = event.message
1477
+ message_stream_events.append(event)
1478
+
1479
+ content_blocks, usage, stop_reason = build_anthropic_content_blocks(message_stream_events)
1480
+ response.content = content_blocks
1481
+ if usage:
1482
+ response.usage.output_tokens = usage.output_tokens
1483
+ if stop_reason:
1484
+ response.stop_reason = stop_reason
1485
+
1487
1486
  return response
1488
1487
 
1489
1488
 
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "promptlayer"
3
- version = "1.0.53"
3
+ version = "1.0.54"
4
4
  description = "PromptLayer is a platform for prompt engineering and tracks your LLM requests."
5
5
  authors = ["Magniv <hello@magniv.io>"]
6
6
  license = "Apache-2.0"
File without changes
File without changes