promptlayer 1.0.62__tar.gz → 1.0.63__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of promptlayer might be problematic. Click here for more details.

Files changed (22) hide show
  1. {promptlayer-1.0.62 → promptlayer-1.0.63}/PKG-INFO +1 -1
  2. {promptlayer-1.0.62 → promptlayer-1.0.63}/promptlayer/__init__.py +1 -1
  3. {promptlayer-1.0.62 → promptlayer-1.0.63}/promptlayer/promptlayer_mixins.py +24 -0
  4. {promptlayer-1.0.62 → promptlayer-1.0.63}/promptlayer/streaming/stream_processor.py +1 -1
  5. {promptlayer-1.0.62 → promptlayer-1.0.63}/promptlayer/utils.py +42 -0
  6. {promptlayer-1.0.62 → promptlayer-1.0.63}/pyproject.toml +1 -1
  7. {promptlayer-1.0.62 → promptlayer-1.0.63}/LICENSE +0 -0
  8. {promptlayer-1.0.62 → promptlayer-1.0.63}/README.md +0 -0
  9. {promptlayer-1.0.62 → promptlayer-1.0.63}/promptlayer/groups/__init__.py +0 -0
  10. {promptlayer-1.0.62 → promptlayer-1.0.63}/promptlayer/groups/groups.py +0 -0
  11. {promptlayer-1.0.62 → promptlayer-1.0.63}/promptlayer/promptlayer.py +0 -0
  12. {promptlayer-1.0.62 → promptlayer-1.0.63}/promptlayer/promptlayer_base.py +0 -0
  13. {promptlayer-1.0.62 → promptlayer-1.0.63}/promptlayer/span_exporter.py +0 -0
  14. {promptlayer-1.0.62 → promptlayer-1.0.63}/promptlayer/streaming/__init__.py +0 -0
  15. {promptlayer-1.0.62 → promptlayer-1.0.63}/promptlayer/streaming/blueprint_builder.py +0 -0
  16. {promptlayer-1.0.62 → promptlayer-1.0.63}/promptlayer/streaming/response_handlers.py +0 -0
  17. {promptlayer-1.0.62 → promptlayer-1.0.63}/promptlayer/templates.py +0 -0
  18. {promptlayer-1.0.62 → promptlayer-1.0.63}/promptlayer/track/__init__.py +0 -0
  19. {promptlayer-1.0.62 → promptlayer-1.0.63}/promptlayer/track/track.py +0 -0
  20. {promptlayer-1.0.62 → promptlayer-1.0.63}/promptlayer/types/__init__.py +0 -0
  21. {promptlayer-1.0.62 → promptlayer-1.0.63}/promptlayer/types/prompt_template.py +0 -0
  22. {promptlayer-1.0.62 → promptlayer-1.0.63}/promptlayer/types/request_log.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: promptlayer
3
- Version: 1.0.62
3
+ Version: 1.0.63
4
4
  Summary: PromptLayer is a platform for prompt engineering and tracks your LLM requests.
5
5
  License: Apache-2.0
6
6
  Author: Magniv
@@ -1,4 +1,4 @@
1
1
  from .promptlayer import AsyncPromptLayer, PromptLayer
2
2
 
3
- __version__ = "1.0.62"
3
+ __version__ = "1.0.63"
4
4
  __all__ = ["PromptLayer", "AsyncPromptLayer", "__version__"]
@@ -27,10 +27,12 @@ from promptlayer.streaming import (
27
27
  openai_stream_completion,
28
28
  )
29
29
  from promptlayer.utils import (
30
+ aanthropic_bedrock_request,
30
31
  aanthropic_request,
31
32
  aazure_openai_request,
32
33
  agoogle_request,
33
34
  amistral_request,
35
+ anthropic_bedrock_request,
34
36
  anthropic_request,
35
37
  aopenai_request,
36
38
  avertexai_request,
@@ -92,6 +94,16 @@ MAP_PROVIDER_TO_FUNCTION_NAME = {
92
94
  "stream_function": google_stream_completion,
93
95
  },
94
96
  },
97
+ "anthropic.bedrock": {
98
+ "chat": {
99
+ "function_name": "anthropic.messages.create",
100
+ "stream_function": anthropic_stream_message,
101
+ },
102
+ "completion": {
103
+ "function_name": "anthropic.completions.create",
104
+ "stream_function": anthropic_stream_completion,
105
+ },
106
+ },
95
107
  }
96
108
 
97
109
 
@@ -102,6 +114,7 @@ MAP_PROVIDER_TO_FUNCTION = {
102
114
  "openai": openai_request,
103
115
  "openai.azure": azure_openai_request,
104
116
  "vertexai": vertexai_request,
117
+ "anthropic.bedrock": anthropic_bedrock_request,
105
118
  }
106
119
 
107
120
  AMAP_PROVIDER_TO_FUNCTION_NAME = {
@@ -155,6 +168,16 @@ AMAP_PROVIDER_TO_FUNCTION_NAME = {
155
168
  "stream_function": agoogle_stream_completion,
156
169
  },
157
170
  },
171
+ "anthropic.bedrock": {
172
+ "chat": {
173
+ "function_name": "anthropic.messages.create",
174
+ "stream_function": aanthropic_stream_message,
175
+ },
176
+ "completion": {
177
+ "function_name": "anthropic.completions.create",
178
+ "stream_function": aanthropic_stream_completion,
179
+ },
180
+ },
158
181
  }
159
182
 
160
183
 
@@ -165,6 +188,7 @@ AMAP_PROVIDER_TO_FUNCTION = {
165
188
  "openai": aopenai_request,
166
189
  "openai.azure": aazure_openai_request,
167
190
  "vertexai": avertexai_request,
191
+ "anthropic.bedrock": aanthropic_bedrock_request,
168
192
  }
169
193
 
170
194
 
@@ -18,7 +18,7 @@ def _build_stream_blueprint(result: Any, metadata: Dict) -> Any:
18
18
  elif provider == "google" or (provider == "vertexai" and model_name.startswith("gemini")):
19
19
  return build_prompt_blueprint_from_google_event(result, metadata)
20
20
 
21
- elif provider == "anthropic" or (provider == "vertexai" and model_name.startswith("claude")):
21
+ elif provider in ["anthropic", "anthropic.bedrock"] or (provider == "vertexai" and model_name.startswith("claude")):
22
22
  return build_prompt_blueprint_from_anthropic_event(result, metadata)
23
23
 
24
24
  elif provider == "mistral":
@@ -1542,3 +1542,45 @@ async def avertexai_request(prompt_blueprint: GetPromptTemplateResponse, client_
1542
1542
  raise NotImplementedError(
1543
1543
  f"Vertex AI request for model {prompt_blueprint['metadata']['model']['name']} is not implemented yet."
1544
1544
  )
1545
+
1546
+
1547
+ def anthropic_bedrock_request(prompt_blueprint: GetPromptTemplateResponse, client_kwargs: dict, function_kwargs: dict):
1548
+ from anthropic import AnthropicBedrock
1549
+
1550
+ client = AnthropicBedrock(
1551
+ aws_access_key=function_kwargs.pop("aws_access_key", None),
1552
+ aws_secret_key=function_kwargs.pop("aws_secret_key", None),
1553
+ aws_region=function_kwargs.pop("aws_region", None),
1554
+ aws_session_token=function_kwargs.pop("aws_session_token", None),
1555
+ base_url=function_kwargs.pop("base_url", None),
1556
+ **client_kwargs,
1557
+ )
1558
+ if prompt_blueprint["prompt_template"]["type"] == "chat":
1559
+ return anthropic_chat_request(client=client, **function_kwargs)
1560
+ elif prompt_blueprint["prompt_template"]["type"] == "completion":
1561
+ return anthropic_completions_request(client=client, **function_kwargs)
1562
+ raise NotImplementedError(
1563
+ f"Unsupported prompt template type {prompt_blueprint['prompt_template']['type']}' for Anthropic Bedrock"
1564
+ )
1565
+
1566
+
1567
+ async def aanthropic_bedrock_request(
1568
+ prompt_blueprint: GetPromptTemplateResponse, client_kwargs: dict, function_kwargs: dict
1569
+ ):
1570
+ from anthropic import AsyncAnthropicBedrock
1571
+
1572
+ client = AsyncAnthropicBedrock(
1573
+ aws_access_key=function_kwargs.pop("aws_access_key", None),
1574
+ aws_secret_key=function_kwargs.pop("aws_secret_key", None),
1575
+ aws_region=function_kwargs.pop("aws_region", None),
1576
+ aws_session_token=function_kwargs.pop("aws_session_token", None),
1577
+ base_url=function_kwargs.pop("base_url", None),
1578
+ **client_kwargs,
1579
+ )
1580
+ if prompt_blueprint["prompt_template"]["type"] == "chat":
1581
+ return await aanthropic_chat_request(client=client, **function_kwargs)
1582
+ elif prompt_blueprint["prompt_template"]["type"] == "completion":
1583
+ return await aanthropic_completions_request(client=client, **function_kwargs)
1584
+ raise NotImplementedError(
1585
+ f"Unsupported prompt template type {prompt_blueprint['prompt_template']['type']}' for Anthropic Bedrock"
1586
+ )
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "promptlayer"
3
- version = "1.0.62"
3
+ version = "1.0.63"
4
4
  description = "PromptLayer is a platform for prompt engineering and tracks your LLM requests."
5
5
  authors = ["Magniv <hello@magniv.io>"]
6
6
  license = "Apache-2.0"
File without changes
File without changes