promptlayer 1.0.58__py3-none-any.whl → 1.0.59__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of promptlayer might be problematic. Click here for more details.
- promptlayer/__init__.py +1 -1
- promptlayer/promptlayer_mixins.py +13 -2
- promptlayer/utils.py +46 -0
- {promptlayer-1.0.58.dist-info → promptlayer-1.0.59.dist-info}/METADATA +1 -1
- {promptlayer-1.0.58.dist-info → promptlayer-1.0.59.dist-info}/RECORD +7 -7
- {promptlayer-1.0.58.dist-info → promptlayer-1.0.59.dist-info}/LICENSE +0 -0
- {promptlayer-1.0.58.dist-info → promptlayer-1.0.59.dist-info}/WHEEL +0 -0
promptlayer/__init__.py
CHANGED
|
@@ -26,6 +26,7 @@ from promptlayer.utils import (
|
|
|
26
26
|
aopenai_request,
|
|
27
27
|
aopenai_stream_chat,
|
|
28
28
|
aopenai_stream_completion,
|
|
29
|
+
avertexai_request,
|
|
29
30
|
azure_openai_request,
|
|
30
31
|
google_request,
|
|
31
32
|
google_stream_chat,
|
|
@@ -35,6 +36,7 @@ from promptlayer.utils import (
|
|
|
35
36
|
openai_request,
|
|
36
37
|
openai_stream_chat,
|
|
37
38
|
openai_stream_completion,
|
|
39
|
+
vertexai_request,
|
|
38
40
|
)
|
|
39
41
|
|
|
40
42
|
MAP_PROVIDER_TO_FUNCTION_NAME = {
|
|
@@ -97,6 +99,7 @@ MAP_PROVIDER_TO_FUNCTION = {
|
|
|
97
99
|
"mistral": mistral_request,
|
|
98
100
|
"openai": openai_request,
|
|
99
101
|
"openai.azure": azure_openai_request,
|
|
102
|
+
"vertexai": vertexai_request,
|
|
100
103
|
}
|
|
101
104
|
|
|
102
105
|
AMAP_PROVIDER_TO_FUNCTION_NAME = {
|
|
@@ -159,6 +162,7 @@ AMAP_PROVIDER_TO_FUNCTION = {
|
|
|
159
162
|
"mistral": amistral_request,
|
|
160
163
|
"openai": aopenai_request,
|
|
161
164
|
"openai.azure": aazure_openai_request,
|
|
165
|
+
"vertexai": avertexai_request,
|
|
162
166
|
}
|
|
163
167
|
|
|
164
168
|
|
|
@@ -233,11 +237,18 @@ class PromptLayerMixin:
|
|
|
233
237
|
if stream and provider in ["openai", "openai.azure"]:
|
|
234
238
|
function_kwargs["stream_options"] = {"include_usage": True}
|
|
235
239
|
|
|
240
|
+
provider_function_name = provider
|
|
241
|
+
if provider_function_name == "vertexai":
|
|
242
|
+
if "gemini" in prompt_blueprint_model["name"]:
|
|
243
|
+
provider_function_name = "google"
|
|
244
|
+
elif "claude" in prompt_blueprint_model["name"]:
|
|
245
|
+
provider_function_name = "anthropic"
|
|
246
|
+
|
|
236
247
|
if is_async:
|
|
237
|
-
config = AMAP_PROVIDER_TO_FUNCTION_NAME[
|
|
248
|
+
config = AMAP_PROVIDER_TO_FUNCTION_NAME[provider_function_name][prompt_template["type"]]
|
|
238
249
|
request_function = AMAP_PROVIDER_TO_FUNCTION[provider]
|
|
239
250
|
else:
|
|
240
|
-
config = MAP_PROVIDER_TO_FUNCTION_NAME[
|
|
251
|
+
config = MAP_PROVIDER_TO_FUNCTION_NAME[provider_function_name][prompt_template["type"]]
|
|
241
252
|
request_function = MAP_PROVIDER_TO_FUNCTION[provider]
|
|
242
253
|
|
|
243
254
|
return {
|
promptlayer/utils.py
CHANGED
|
@@ -2072,3 +2072,49 @@ async def agoogle_stream_chat(generator: AsyncIterable[Any]):
|
|
|
2072
2072
|
|
|
2073
2073
|
async def agoogle_stream_completion(generator: AsyncIterable[Any]):
|
|
2074
2074
|
return await amap_google_stream_response(generator)
|
|
2075
|
+
|
|
2076
|
+
|
|
2077
|
+
def vertexai_request(prompt_blueprint: GetPromptTemplateResponse, client_kwargs: dict, function_kwargs: dict):
|
|
2078
|
+
if "gemini" in prompt_blueprint["metadata"]["model"]["name"]:
|
|
2079
|
+
return google_request(
|
|
2080
|
+
prompt_blueprint=prompt_blueprint,
|
|
2081
|
+
client_kwargs=client_kwargs,
|
|
2082
|
+
function_kwargs=function_kwargs,
|
|
2083
|
+
)
|
|
2084
|
+
|
|
2085
|
+
if "claude" in prompt_blueprint["metadata"]["model"]["name"]:
|
|
2086
|
+
from anthropic import AnthropicVertex
|
|
2087
|
+
|
|
2088
|
+
client = AnthropicVertex(**client_kwargs)
|
|
2089
|
+
if prompt_blueprint["prompt_template"]["type"] == "chat":
|
|
2090
|
+
return anthropic_chat_request(client=client, **function_kwargs)
|
|
2091
|
+
raise NotImplementedError(
|
|
2092
|
+
f"Unsupported prompt template type {prompt_blueprint['prompt_template']['type']}' for Anthropic Vertex AI"
|
|
2093
|
+
)
|
|
2094
|
+
|
|
2095
|
+
raise NotImplementedError(
|
|
2096
|
+
f"Vertex AI request for model {prompt_blueprint['metadata']['model']['name']} is not implemented yet."
|
|
2097
|
+
)
|
|
2098
|
+
|
|
2099
|
+
|
|
2100
|
+
async def avertexai_request(prompt_blueprint: GetPromptTemplateResponse, client_kwargs: dict, function_kwargs: dict):
|
|
2101
|
+
if "gemini" in prompt_blueprint["metadata"]["model"]["name"]:
|
|
2102
|
+
return await agoogle_request(
|
|
2103
|
+
prompt_blueprint=prompt_blueprint,
|
|
2104
|
+
client_kwargs=client_kwargs,
|
|
2105
|
+
function_kwargs=function_kwargs,
|
|
2106
|
+
)
|
|
2107
|
+
|
|
2108
|
+
if "claude" in prompt_blueprint["metadata"]["model"]["name"]:
|
|
2109
|
+
from anthropic import AsyncAnthropicVertex
|
|
2110
|
+
|
|
2111
|
+
client = AsyncAnthropicVertex(**client_kwargs)
|
|
2112
|
+
if prompt_blueprint["prompt_template"]["type"] == "chat":
|
|
2113
|
+
return await aanthropic_chat_request(client=client, **function_kwargs)
|
|
2114
|
+
raise NotImplementedError(
|
|
2115
|
+
f"Unsupported prompt template type {prompt_blueprint['prompt_template']['type']}' for Anthropic Vertex AI"
|
|
2116
|
+
)
|
|
2117
|
+
|
|
2118
|
+
raise NotImplementedError(
|
|
2119
|
+
f"Vertex AI request for model {prompt_blueprint['metadata']['model']['name']} is not implemented yet."
|
|
2120
|
+
)
|
|
@@ -1,9 +1,9 @@
|
|
|
1
|
-
promptlayer/__init__.py,sha256=
|
|
1
|
+
promptlayer/__init__.py,sha256=Gj2sG2OFxyf6LP1lCysUZUGPVSegsJn2FhSc5HBc1ww,140
|
|
2
2
|
promptlayer/groups/__init__.py,sha256=xhOAolLUBkr76ZHvJr29OwjCIk1V9qKQXjZCuyTJUIY,429
|
|
3
3
|
promptlayer/groups/groups.py,sha256=YPROicy-TzpkrpA8vOpZS2lwvJ6VRtlbQ1S2oT1N0vM,338
|
|
4
4
|
promptlayer/promptlayer.py,sha256=4MtP_byvYdVi4yZp_VCyVpPGx3iAaijwDZHPoYDfqZc,22212
|
|
5
5
|
promptlayer/promptlayer_base.py,sha256=jOgXzNZlV1LKOOsXSSAOgn8o4hXn_EV0oY9Nf3Bsu_s,6872
|
|
6
|
-
promptlayer/promptlayer_mixins.py,sha256=
|
|
6
|
+
promptlayer/promptlayer_mixins.py,sha256=1DfNk9woiB-EWtbydFRPp55f3Y88gt6be0m1OdVcDxs,12134
|
|
7
7
|
promptlayer/span_exporter.py,sha256=Pc1-zWAcjVCSykh-4rYPqiEZvzkG9xaYLVoHFY_TWaQ,2410
|
|
8
8
|
promptlayer/templates.py,sha256=7ObDPMzHXjttDdJdCXA_pDL9XAnmcujIWucmgZJcOC8,1179
|
|
9
9
|
promptlayer/track/__init__.py,sha256=tyweLTAY7UpYpBHWwY-T3pOPDIlGjcgccYXqU_r0694,1710
|
|
@@ -11,8 +11,8 @@ promptlayer/track/track.py,sha256=A-awcYwsSwxktrlCMchy8NITIquwxU1UXbgLZMwqrA0,31
|
|
|
11
11
|
promptlayer/types/__init__.py,sha256=xJcvQuOk91ZBBePb40-1FDNDKYrZoH5lPE2q6_UhprM,111
|
|
12
12
|
promptlayer/types/prompt_template.py,sha256=blkVBhh4u5pMhgX_Dsn78sN7Rv2Vy_zhd1-NERLXTpM,5075
|
|
13
13
|
promptlayer/types/request_log.py,sha256=xU6bcxQar6GaBOJlgZTavXUV3FjE8sF_nSjPu4Ya_00,174
|
|
14
|
-
promptlayer/utils.py,sha256=
|
|
15
|
-
promptlayer-1.0.
|
|
16
|
-
promptlayer-1.0.
|
|
17
|
-
promptlayer-1.0.
|
|
18
|
-
promptlayer-1.0.
|
|
14
|
+
promptlayer/utils.py,sha256=lJoEMhXteIdkztX67rRoel8EcIMEcxH7JwQXXmg_yLo,75790
|
|
15
|
+
promptlayer-1.0.59.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
|
16
|
+
promptlayer-1.0.59.dist-info/METADATA,sha256=xoBHjtE7yxwwx4gsuRCCqrPIgg6PxQ7vM_1u9iOa978,4819
|
|
17
|
+
promptlayer-1.0.59.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
|
|
18
|
+
promptlayer-1.0.59.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|