promptlayer 1.0.57__tar.gz → 1.0.59__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of promptlayer might be problematic. Click here for more details.
- {promptlayer-1.0.57 → promptlayer-1.0.59}/PKG-INFO +1 -1
- {promptlayer-1.0.57 → promptlayer-1.0.59}/promptlayer/__init__.py +1 -1
- {promptlayer-1.0.57 → promptlayer-1.0.59}/promptlayer/promptlayer_mixins.py +13 -2
- {promptlayer-1.0.57 → promptlayer-1.0.59}/promptlayer/utils.py +48 -2
- {promptlayer-1.0.57 → promptlayer-1.0.59}/pyproject.toml +2 -2
- {promptlayer-1.0.57 → promptlayer-1.0.59}/LICENSE +0 -0
- {promptlayer-1.0.57 → promptlayer-1.0.59}/README.md +0 -0
- {promptlayer-1.0.57 → promptlayer-1.0.59}/promptlayer/groups/__init__.py +0 -0
- {promptlayer-1.0.57 → promptlayer-1.0.59}/promptlayer/groups/groups.py +0 -0
- {promptlayer-1.0.57 → promptlayer-1.0.59}/promptlayer/promptlayer.py +0 -0
- {promptlayer-1.0.57 → promptlayer-1.0.59}/promptlayer/promptlayer_base.py +0 -0
- {promptlayer-1.0.57 → promptlayer-1.0.59}/promptlayer/span_exporter.py +0 -0
- {promptlayer-1.0.57 → promptlayer-1.0.59}/promptlayer/templates.py +0 -0
- {promptlayer-1.0.57 → promptlayer-1.0.59}/promptlayer/track/__init__.py +0 -0
- {promptlayer-1.0.57 → promptlayer-1.0.59}/promptlayer/track/track.py +0 -0
- {promptlayer-1.0.57 → promptlayer-1.0.59}/promptlayer/types/__init__.py +0 -0
- {promptlayer-1.0.57 → promptlayer-1.0.59}/promptlayer/types/prompt_template.py +0 -0
- {promptlayer-1.0.57 → promptlayer-1.0.59}/promptlayer/types/request_log.py +0 -0
|
@@ -26,6 +26,7 @@ from promptlayer.utils import (
|
|
|
26
26
|
aopenai_request,
|
|
27
27
|
aopenai_stream_chat,
|
|
28
28
|
aopenai_stream_completion,
|
|
29
|
+
avertexai_request,
|
|
29
30
|
azure_openai_request,
|
|
30
31
|
google_request,
|
|
31
32
|
google_stream_chat,
|
|
@@ -35,6 +36,7 @@ from promptlayer.utils import (
|
|
|
35
36
|
openai_request,
|
|
36
37
|
openai_stream_chat,
|
|
37
38
|
openai_stream_completion,
|
|
39
|
+
vertexai_request,
|
|
38
40
|
)
|
|
39
41
|
|
|
40
42
|
MAP_PROVIDER_TO_FUNCTION_NAME = {
|
|
@@ -97,6 +99,7 @@ MAP_PROVIDER_TO_FUNCTION = {
|
|
|
97
99
|
"mistral": mistral_request,
|
|
98
100
|
"openai": openai_request,
|
|
99
101
|
"openai.azure": azure_openai_request,
|
|
102
|
+
"vertexai": vertexai_request,
|
|
100
103
|
}
|
|
101
104
|
|
|
102
105
|
AMAP_PROVIDER_TO_FUNCTION_NAME = {
|
|
@@ -159,6 +162,7 @@ AMAP_PROVIDER_TO_FUNCTION = {
|
|
|
159
162
|
"mistral": amistral_request,
|
|
160
163
|
"openai": aopenai_request,
|
|
161
164
|
"openai.azure": aazure_openai_request,
|
|
165
|
+
"vertexai": avertexai_request,
|
|
162
166
|
}
|
|
163
167
|
|
|
164
168
|
|
|
@@ -233,11 +237,18 @@ class PromptLayerMixin:
|
|
|
233
237
|
if stream and provider in ["openai", "openai.azure"]:
|
|
234
238
|
function_kwargs["stream_options"] = {"include_usage": True}
|
|
235
239
|
|
|
240
|
+
provider_function_name = provider
|
|
241
|
+
if provider_function_name == "vertexai":
|
|
242
|
+
if "gemini" in prompt_blueprint_model["name"]:
|
|
243
|
+
provider_function_name = "google"
|
|
244
|
+
elif "claude" in prompt_blueprint_model["name"]:
|
|
245
|
+
provider_function_name = "anthropic"
|
|
246
|
+
|
|
236
247
|
if is_async:
|
|
237
|
-
config = AMAP_PROVIDER_TO_FUNCTION_NAME[
|
|
248
|
+
config = AMAP_PROVIDER_TO_FUNCTION_NAME[provider_function_name][prompt_template["type"]]
|
|
238
249
|
request_function = AMAP_PROVIDER_TO_FUNCTION[provider]
|
|
239
250
|
else:
|
|
240
|
-
config = MAP_PROVIDER_TO_FUNCTION_NAME[
|
|
251
|
+
config = MAP_PROVIDER_TO_FUNCTION_NAME[provider_function_name][prompt_template["type"]]
|
|
241
252
|
request_function = MAP_PROVIDER_TO_FUNCTION[provider]
|
|
242
253
|
|
|
243
254
|
return {
|
|
@@ -1911,7 +1911,7 @@ def google_chat_request(client, **kwargs):
|
|
|
1911
1911
|
history = [Content(**item) for item in kwargs.get("history", [])]
|
|
1912
1912
|
generation_config = kwargs.get("generation_config", {})
|
|
1913
1913
|
chat = client.chats.create(model=model, history=history, config=generation_config)
|
|
1914
|
-
last_message = history[-1].parts[0] if history else
|
|
1914
|
+
last_message = history[-1].parts[0] if history else ""
|
|
1915
1915
|
if stream:
|
|
1916
1916
|
return chat.send_message_stream(message=last_message)
|
|
1917
1917
|
return chat.send_message(message=last_message)
|
|
@@ -2008,7 +2008,7 @@ async def agoogle_chat_request(client, **kwargs):
|
|
|
2008
2008
|
history = [Content(**item) for item in kwargs.get("history", [])]
|
|
2009
2009
|
generation_config = kwargs.get("generation_config", {})
|
|
2010
2010
|
chat = client.aio.chats.create(model=model, history=history, config=generation_config)
|
|
2011
|
-
last_message = history[-1].parts[0] if history else
|
|
2011
|
+
last_message = history[-1].parts[0] if history else ""
|
|
2012
2012
|
if stream:
|
|
2013
2013
|
return await chat.send_message_stream(message=last_message)
|
|
2014
2014
|
return await chat.send_message(message=last_message)
|
|
@@ -2072,3 +2072,49 @@ async def agoogle_stream_chat(generator: AsyncIterable[Any]):
|
|
|
2072
2072
|
|
|
2073
2073
|
async def agoogle_stream_completion(generator: AsyncIterable[Any]):
|
|
2074
2074
|
return await amap_google_stream_response(generator)
|
|
2075
|
+
|
|
2076
|
+
|
|
2077
|
+
def vertexai_request(prompt_blueprint: GetPromptTemplateResponse, client_kwargs: dict, function_kwargs: dict):
|
|
2078
|
+
if "gemini" in prompt_blueprint["metadata"]["model"]["name"]:
|
|
2079
|
+
return google_request(
|
|
2080
|
+
prompt_blueprint=prompt_blueprint,
|
|
2081
|
+
client_kwargs=client_kwargs,
|
|
2082
|
+
function_kwargs=function_kwargs,
|
|
2083
|
+
)
|
|
2084
|
+
|
|
2085
|
+
if "claude" in prompt_blueprint["metadata"]["model"]["name"]:
|
|
2086
|
+
from anthropic import AnthropicVertex
|
|
2087
|
+
|
|
2088
|
+
client = AnthropicVertex(**client_kwargs)
|
|
2089
|
+
if prompt_blueprint["prompt_template"]["type"] == "chat":
|
|
2090
|
+
return anthropic_chat_request(client=client, **function_kwargs)
|
|
2091
|
+
raise NotImplementedError(
|
|
2092
|
+
f"Unsupported prompt template type {prompt_blueprint['prompt_template']['type']}' for Anthropic Vertex AI"
|
|
2093
|
+
)
|
|
2094
|
+
|
|
2095
|
+
raise NotImplementedError(
|
|
2096
|
+
f"Vertex AI request for model {prompt_blueprint['metadata']['model']['name']} is not implemented yet."
|
|
2097
|
+
)
|
|
2098
|
+
|
|
2099
|
+
|
|
2100
|
+
async def avertexai_request(prompt_blueprint: GetPromptTemplateResponse, client_kwargs: dict, function_kwargs: dict):
|
|
2101
|
+
if "gemini" in prompt_blueprint["metadata"]["model"]["name"]:
|
|
2102
|
+
return await agoogle_request(
|
|
2103
|
+
prompt_blueprint=prompt_blueprint,
|
|
2104
|
+
client_kwargs=client_kwargs,
|
|
2105
|
+
function_kwargs=function_kwargs,
|
|
2106
|
+
)
|
|
2107
|
+
|
|
2108
|
+
if "claude" in prompt_blueprint["metadata"]["model"]["name"]:
|
|
2109
|
+
from anthropic import AsyncAnthropicVertex
|
|
2110
|
+
|
|
2111
|
+
client = AsyncAnthropicVertex(**client_kwargs)
|
|
2112
|
+
if prompt_blueprint["prompt_template"]["type"] == "chat":
|
|
2113
|
+
return await aanthropic_chat_request(client=client, **function_kwargs)
|
|
2114
|
+
raise NotImplementedError(
|
|
2115
|
+
f"Unsupported prompt template type {prompt_blueprint['prompt_template']['type']}' for Anthropic Vertex AI"
|
|
2116
|
+
)
|
|
2117
|
+
|
|
2118
|
+
raise NotImplementedError(
|
|
2119
|
+
f"Vertex AI request for model {prompt_blueprint['metadata']['model']['name']} is not implemented yet."
|
|
2120
|
+
)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
[tool.poetry]
|
|
2
2
|
name = "promptlayer"
|
|
3
|
-
version = "1.0.
|
|
3
|
+
version = "1.0.59"
|
|
4
4
|
description = "PromptLayer is a platform for prompt engineering and tracks your LLM requests."
|
|
5
5
|
authors = ["Magniv <hello@magniv.io>"]
|
|
6
6
|
license = "Apache-2.0"
|
|
@@ -22,7 +22,7 @@ pytest = "^8.2.0"
|
|
|
22
22
|
pytest-asyncio = "^0.23.6"
|
|
23
23
|
openai = "^1.60.1"
|
|
24
24
|
google-genai = "^1.5.0"
|
|
25
|
-
anthropic = "0.
|
|
25
|
+
anthropic = {extras = ["vertex"], version = "^0.57.1"}
|
|
26
26
|
# TODO(dmu) MEDIUM: Upgrade to vcrpy >= 7 once it supports urllib3 >= 2.2.2
|
|
27
27
|
vcrpy = "<7.0.0"
|
|
28
28
|
pytest-network = "^0.0.1"
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|