promptlayer 1.0.62__tar.gz → 1.0.64__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of promptlayer might be problematic. Click here for more details.
- {promptlayer-1.0.62 → promptlayer-1.0.64}/PKG-INFO +1 -1
- {promptlayer-1.0.62 → promptlayer-1.0.64}/promptlayer/__init__.py +1 -1
- {promptlayer-1.0.62 → promptlayer-1.0.64}/promptlayer/promptlayer_mixins.py +27 -4
- {promptlayer-1.0.62 → promptlayer-1.0.64}/promptlayer/streaming/stream_processor.py +1 -1
- {promptlayer-1.0.62 → promptlayer-1.0.64}/promptlayer/utils.py +42 -0
- {promptlayer-1.0.62 → promptlayer-1.0.64}/pyproject.toml +1 -1
- {promptlayer-1.0.62 → promptlayer-1.0.64}/LICENSE +0 -0
- {promptlayer-1.0.62 → promptlayer-1.0.64}/README.md +0 -0
- {promptlayer-1.0.62 → promptlayer-1.0.64}/promptlayer/groups/__init__.py +0 -0
- {promptlayer-1.0.62 → promptlayer-1.0.64}/promptlayer/groups/groups.py +0 -0
- {promptlayer-1.0.62 → promptlayer-1.0.64}/promptlayer/promptlayer.py +2 -2
- {promptlayer-1.0.62 → promptlayer-1.0.64}/promptlayer/promptlayer_base.py +0 -0
- {promptlayer-1.0.62 → promptlayer-1.0.64}/promptlayer/span_exporter.py +0 -0
- {promptlayer-1.0.62 → promptlayer-1.0.64}/promptlayer/streaming/__init__.py +0 -0
- {promptlayer-1.0.62 → promptlayer-1.0.64}/promptlayer/streaming/blueprint_builder.py +0 -0
- {promptlayer-1.0.62 → promptlayer-1.0.64}/promptlayer/streaming/response_handlers.py +0 -0
- {promptlayer-1.0.62 → promptlayer-1.0.64}/promptlayer/templates.py +0 -0
- {promptlayer-1.0.62 → promptlayer-1.0.64}/promptlayer/track/__init__.py +0 -0
- {promptlayer-1.0.62 → promptlayer-1.0.64}/promptlayer/track/track.py +0 -0
- {promptlayer-1.0.62 → promptlayer-1.0.64}/promptlayer/types/__init__.py +0 -0
- {promptlayer-1.0.62 → promptlayer-1.0.64}/promptlayer/types/prompt_template.py +0 -0
- {promptlayer-1.0.62 → promptlayer-1.0.64}/promptlayer/types/request_log.py +0 -0
|
@@ -27,10 +27,12 @@ from promptlayer.streaming import (
|
|
|
27
27
|
openai_stream_completion,
|
|
28
28
|
)
|
|
29
29
|
from promptlayer.utils import (
|
|
30
|
+
aanthropic_bedrock_request,
|
|
30
31
|
aanthropic_request,
|
|
31
32
|
aazure_openai_request,
|
|
32
33
|
agoogle_request,
|
|
33
34
|
amistral_request,
|
|
35
|
+
anthropic_bedrock_request,
|
|
34
36
|
anthropic_request,
|
|
35
37
|
aopenai_request,
|
|
36
38
|
avertexai_request,
|
|
@@ -92,6 +94,16 @@ MAP_PROVIDER_TO_FUNCTION_NAME = {
|
|
|
92
94
|
"stream_function": google_stream_completion,
|
|
93
95
|
},
|
|
94
96
|
},
|
|
97
|
+
"anthropic.bedrock": {
|
|
98
|
+
"chat": {
|
|
99
|
+
"function_name": "anthropic.messages.create",
|
|
100
|
+
"stream_function": anthropic_stream_message,
|
|
101
|
+
},
|
|
102
|
+
"completion": {
|
|
103
|
+
"function_name": "anthropic.completions.create",
|
|
104
|
+
"stream_function": anthropic_stream_completion,
|
|
105
|
+
},
|
|
106
|
+
},
|
|
95
107
|
}
|
|
96
108
|
|
|
97
109
|
|
|
@@ -102,6 +114,7 @@ MAP_PROVIDER_TO_FUNCTION = {
|
|
|
102
114
|
"openai": openai_request,
|
|
103
115
|
"openai.azure": azure_openai_request,
|
|
104
116
|
"vertexai": vertexai_request,
|
|
117
|
+
"anthropic.bedrock": anthropic_bedrock_request,
|
|
105
118
|
}
|
|
106
119
|
|
|
107
120
|
AMAP_PROVIDER_TO_FUNCTION_NAME = {
|
|
@@ -155,6 +168,16 @@ AMAP_PROVIDER_TO_FUNCTION_NAME = {
|
|
|
155
168
|
"stream_function": agoogle_stream_completion,
|
|
156
169
|
},
|
|
157
170
|
},
|
|
171
|
+
"anthropic.bedrock": {
|
|
172
|
+
"chat": {
|
|
173
|
+
"function_name": "anthropic.messages.create",
|
|
174
|
+
"stream_function": aanthropic_stream_message,
|
|
175
|
+
},
|
|
176
|
+
"completion": {
|
|
177
|
+
"function_name": "anthropic.completions.create",
|
|
178
|
+
"stream_function": aanthropic_stream_completion,
|
|
179
|
+
},
|
|
180
|
+
},
|
|
158
181
|
}
|
|
159
182
|
|
|
160
183
|
|
|
@@ -165,6 +188,7 @@ AMAP_PROVIDER_TO_FUNCTION = {
|
|
|
165
188
|
"openai": aopenai_request,
|
|
166
189
|
"openai.azure": aazure_openai_request,
|
|
167
190
|
"vertexai": avertexai_request,
|
|
191
|
+
"anthropic.bedrock": aanthropic_bedrock_request,
|
|
168
192
|
}
|
|
169
193
|
|
|
170
194
|
|
|
@@ -191,6 +215,7 @@ class PromptLayerMixin:
|
|
|
191
215
|
metadata: Union[Dict[str, str], None],
|
|
192
216
|
provider: Union[str, None] = None,
|
|
193
217
|
model: Union[str, None] = None,
|
|
218
|
+
model_parameter_overrides: Union[Dict[str, Any], None] = None,
|
|
194
219
|
) -> Dict[str, Any]:
|
|
195
220
|
params = {}
|
|
196
221
|
|
|
@@ -206,6 +231,8 @@ class PromptLayerMixin:
|
|
|
206
231
|
params["provider"] = provider
|
|
207
232
|
if model:
|
|
208
233
|
params["model"] = model
|
|
234
|
+
if model_parameter_overrides:
|
|
235
|
+
params["model_parameter_overrides"] = model_parameter_overrides
|
|
209
236
|
|
|
210
237
|
return params
|
|
211
238
|
|
|
@@ -215,7 +242,6 @@ class PromptLayerMixin:
|
|
|
215
242
|
prompt_blueprint,
|
|
216
243
|
prompt_template,
|
|
217
244
|
prompt_blueprint_model,
|
|
218
|
-
model_parameter_overrides,
|
|
219
245
|
stream,
|
|
220
246
|
is_async=False,
|
|
221
247
|
):
|
|
@@ -233,9 +259,6 @@ class PromptLayerMixin:
|
|
|
233
259
|
elif provider_base_url := prompt_blueprint.get("provider_base_url"):
|
|
234
260
|
client_kwargs["base_url"] = provider_base_url["url"]
|
|
235
261
|
|
|
236
|
-
if model_parameter_overrides:
|
|
237
|
-
function_kwargs.update(model_parameter_overrides)
|
|
238
|
-
|
|
239
262
|
if stream and provider in ["openai", "openai.azure"]:
|
|
240
263
|
function_kwargs["stream_options"] = {"include_usage": True}
|
|
241
264
|
|
|
@@ -18,7 +18,7 @@ def _build_stream_blueprint(result: Any, metadata: Dict) -> Any:
|
|
|
18
18
|
elif provider == "google" or (provider == "vertexai" and model_name.startswith("gemini")):
|
|
19
19
|
return build_prompt_blueprint_from_google_event(result, metadata)
|
|
20
20
|
|
|
21
|
-
elif provider
|
|
21
|
+
elif provider in ["anthropic", "anthropic.bedrock"] or (provider == "vertexai" and model_name.startswith("claude")):
|
|
22
22
|
return build_prompt_blueprint_from_anthropic_event(result, metadata)
|
|
23
23
|
|
|
24
24
|
elif provider == "mistral":
|
|
@@ -1542,3 +1542,45 @@ async def avertexai_request(prompt_blueprint: GetPromptTemplateResponse, client_
|
|
|
1542
1542
|
raise NotImplementedError(
|
|
1543
1543
|
f"Vertex AI request for model {prompt_blueprint['metadata']['model']['name']} is not implemented yet."
|
|
1544
1544
|
)
|
|
1545
|
+
|
|
1546
|
+
|
|
1547
|
+
def anthropic_bedrock_request(prompt_blueprint: GetPromptTemplateResponse, client_kwargs: dict, function_kwargs: dict):
|
|
1548
|
+
from anthropic import AnthropicBedrock
|
|
1549
|
+
|
|
1550
|
+
client = AnthropicBedrock(
|
|
1551
|
+
aws_access_key=function_kwargs.pop("aws_access_key", None),
|
|
1552
|
+
aws_secret_key=function_kwargs.pop("aws_secret_key", None),
|
|
1553
|
+
aws_region=function_kwargs.pop("aws_region", None),
|
|
1554
|
+
aws_session_token=function_kwargs.pop("aws_session_token", None),
|
|
1555
|
+
base_url=function_kwargs.pop("base_url", None),
|
|
1556
|
+
**client_kwargs,
|
|
1557
|
+
)
|
|
1558
|
+
if prompt_blueprint["prompt_template"]["type"] == "chat":
|
|
1559
|
+
return anthropic_chat_request(client=client, **function_kwargs)
|
|
1560
|
+
elif prompt_blueprint["prompt_template"]["type"] == "completion":
|
|
1561
|
+
return anthropic_completions_request(client=client, **function_kwargs)
|
|
1562
|
+
raise NotImplementedError(
|
|
1563
|
+
f"Unsupported prompt template type {prompt_blueprint['prompt_template']['type']}' for Anthropic Bedrock"
|
|
1564
|
+
)
|
|
1565
|
+
|
|
1566
|
+
|
|
1567
|
+
async def aanthropic_bedrock_request(
|
|
1568
|
+
prompt_blueprint: GetPromptTemplateResponse, client_kwargs: dict, function_kwargs: dict
|
|
1569
|
+
):
|
|
1570
|
+
from anthropic import AsyncAnthropicBedrock
|
|
1571
|
+
|
|
1572
|
+
client = AsyncAnthropicBedrock(
|
|
1573
|
+
aws_access_key=function_kwargs.pop("aws_access_key", None),
|
|
1574
|
+
aws_secret_key=function_kwargs.pop("aws_secret_key", None),
|
|
1575
|
+
aws_region=function_kwargs.pop("aws_region", None),
|
|
1576
|
+
aws_session_token=function_kwargs.pop("aws_session_token", None),
|
|
1577
|
+
base_url=function_kwargs.pop("base_url", None),
|
|
1578
|
+
**client_kwargs,
|
|
1579
|
+
)
|
|
1580
|
+
if prompt_blueprint["prompt_template"]["type"] == "chat":
|
|
1581
|
+
return await aanthropic_chat_request(client=client, **function_kwargs)
|
|
1582
|
+
elif prompt_blueprint["prompt_template"]["type"] == "completion":
|
|
1583
|
+
return await aanthropic_completions_request(client=client, **function_kwargs)
|
|
1584
|
+
raise NotImplementedError(
|
|
1585
|
+
f"Unsupported prompt template type {prompt_blueprint['prompt_template']['type']}' for Anthropic Bedrock"
|
|
1586
|
+
)
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
@@ -135,6 +135,7 @@ class PromptLayer(PromptLayerMixin):
|
|
|
135
135
|
metadata=metadata,
|
|
136
136
|
provider=provider,
|
|
137
137
|
model=model,
|
|
138
|
+
model_parameter_overrides=model_parameter_overrides,
|
|
138
139
|
)
|
|
139
140
|
prompt_blueprint = self.templates.get(prompt_name, get_prompt_template_params)
|
|
140
141
|
prompt_blueprint_model = self._validate_and_extract_model_from_prompt_blueprint(
|
|
@@ -144,7 +145,6 @@ class PromptLayer(PromptLayerMixin):
|
|
|
144
145
|
prompt_blueprint=prompt_blueprint,
|
|
145
146
|
prompt_template=prompt_blueprint["prompt_template"],
|
|
146
147
|
prompt_blueprint_model=prompt_blueprint_model,
|
|
147
|
-
model_parameter_overrides=model_parameter_overrides,
|
|
148
148
|
stream=stream,
|
|
149
149
|
)
|
|
150
150
|
|
|
@@ -572,6 +572,7 @@ class AsyncPromptLayer(PromptLayerMixin):
|
|
|
572
572
|
metadata=metadata,
|
|
573
573
|
provider=provider,
|
|
574
574
|
model=model,
|
|
575
|
+
model_parameter_overrides=model_parameter_overrides,
|
|
575
576
|
)
|
|
576
577
|
prompt_blueprint = await self.templates.get(prompt_name, get_prompt_template_params)
|
|
577
578
|
prompt_blueprint_model = self._validate_and_extract_model_from_prompt_blueprint(
|
|
@@ -581,7 +582,6 @@ class AsyncPromptLayer(PromptLayerMixin):
|
|
|
581
582
|
prompt_blueprint=prompt_blueprint,
|
|
582
583
|
prompt_template=prompt_blueprint["prompt_template"],
|
|
583
584
|
prompt_blueprint_model=prompt_blueprint_model,
|
|
584
|
-
model_parameter_overrides=model_parameter_overrides,
|
|
585
585
|
stream=stream,
|
|
586
586
|
is_async=True,
|
|
587
587
|
)
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|