promptlayer 1.0.58__py3-none-any.whl → 1.0.60__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of promptlayer might be problematic. Click here for more details.

promptlayer/__init__.py CHANGED
@@ -1,4 +1,4 @@
1
1
  from .promptlayer import AsyncPromptLayer, PromptLayer
2
2
 
3
- __version__ = "1.0.58"
3
+ __version__ = "1.0.60"
4
4
  __all__ = ["PromptLayer", "AsyncPromptLayer", "__version__"]
@@ -320,6 +320,7 @@ class PromptLayer(PromptLayerMixin):
320
320
  price: float = 0.0,
321
321
  function_name: str = "",
322
322
  score: int = 0,
323
+ prompt_id: Union[int, None] = None,
323
324
  ):
324
325
  return util_log_request(
325
326
  self.api_key,
@@ -340,6 +341,7 @@ class PromptLayer(PromptLayerMixin):
340
341
  price=price,
341
342
  function_name=function_name,
342
343
  score=score,
344
+ prompt_id=prompt_id,
343
345
  )
344
346
 
345
347
 
@@ -478,6 +480,7 @@ class AsyncPromptLayer(PromptLayerMixin):
478
480
  price: float = 0.0,
479
481
  function_name: str = "",
480
482
  score: int = 0,
483
+ prompt_id: Union[int, None] = None,
481
484
  ):
482
485
  return await autil_log_request(
483
486
  self.api_key,
@@ -498,6 +501,7 @@ class AsyncPromptLayer(PromptLayerMixin):
498
501
  price=price,
499
502
  function_name=function_name,
500
503
  score=score,
504
+ prompt_id=prompt_id,
501
505
  )
502
506
 
503
507
  async def _create_track_request_callable(
@@ -26,6 +26,7 @@ from promptlayer.utils import (
26
26
  aopenai_request,
27
27
  aopenai_stream_chat,
28
28
  aopenai_stream_completion,
29
+ avertexai_request,
29
30
  azure_openai_request,
30
31
  google_request,
31
32
  google_stream_chat,
@@ -35,6 +36,7 @@ from promptlayer.utils import (
35
36
  openai_request,
36
37
  openai_stream_chat,
37
38
  openai_stream_completion,
39
+ vertexai_request,
38
40
  )
39
41
 
40
42
  MAP_PROVIDER_TO_FUNCTION_NAME = {
@@ -97,6 +99,7 @@ MAP_PROVIDER_TO_FUNCTION = {
97
99
  "mistral": mistral_request,
98
100
  "openai": openai_request,
99
101
  "openai.azure": azure_openai_request,
102
+ "vertexai": vertexai_request,
100
103
  }
101
104
 
102
105
  AMAP_PROVIDER_TO_FUNCTION_NAME = {
@@ -159,6 +162,7 @@ AMAP_PROVIDER_TO_FUNCTION = {
159
162
  "mistral": amistral_request,
160
163
  "openai": aopenai_request,
161
164
  "openai.azure": aazure_openai_request,
165
+ "vertexai": avertexai_request,
162
166
  }
163
167
 
164
168
 
@@ -233,11 +237,18 @@ class PromptLayerMixin:
233
237
  if stream and provider in ["openai", "openai.azure"]:
234
238
  function_kwargs["stream_options"] = {"include_usage": True}
235
239
 
240
+ provider_function_name = provider
241
+ if provider_function_name == "vertexai":
242
+ if "gemini" in prompt_blueprint_model["name"]:
243
+ provider_function_name = "google"
244
+ elif "claude" in prompt_blueprint_model["name"]:
245
+ provider_function_name = "anthropic"
246
+
236
247
  if is_async:
237
- config = AMAP_PROVIDER_TO_FUNCTION_NAME[provider][prompt_template["type"]]
248
+ config = AMAP_PROVIDER_TO_FUNCTION_NAME[provider_function_name][prompt_template["type"]]
238
249
  request_function = AMAP_PROVIDER_TO_FUNCTION[provider]
239
250
  else:
240
- config = MAP_PROVIDER_TO_FUNCTION_NAME[provider][prompt_template["type"]]
251
+ config = MAP_PROVIDER_TO_FUNCTION_NAME[provider_function_name][prompt_template["type"]]
241
252
  request_function = MAP_PROVIDER_TO_FUNCTION[provider]
242
253
 
243
254
  return {
promptlayer/utils.py CHANGED
@@ -2072,3 +2072,49 @@ async def agoogle_stream_chat(generator: AsyncIterable[Any]):
2072
2072
 
2073
2073
  async def agoogle_stream_completion(generator: AsyncIterable[Any]):
2074
2074
  return await amap_google_stream_response(generator)
2075
+
2076
+
2077
+ def vertexai_request(prompt_blueprint: GetPromptTemplateResponse, client_kwargs: dict, function_kwargs: dict):
2078
+ if "gemini" in prompt_blueprint["metadata"]["model"]["name"]:
2079
+ return google_request(
2080
+ prompt_blueprint=prompt_blueprint,
2081
+ client_kwargs=client_kwargs,
2082
+ function_kwargs=function_kwargs,
2083
+ )
2084
+
2085
+ if "claude" in prompt_blueprint["metadata"]["model"]["name"]:
2086
+ from anthropic import AnthropicVertex
2087
+
2088
+ client = AnthropicVertex(**client_kwargs)
2089
+ if prompt_blueprint["prompt_template"]["type"] == "chat":
2090
+ return anthropic_chat_request(client=client, **function_kwargs)
2091
+ raise NotImplementedError(
2092
+ f"Unsupported prompt template type {prompt_blueprint['prompt_template']['type']}' for Anthropic Vertex AI"
2093
+ )
2094
+
2095
+ raise NotImplementedError(
2096
+ f"Vertex AI request for model {prompt_blueprint['metadata']['model']['name']} is not implemented yet."
2097
+ )
2098
+
2099
+
2100
+ async def avertexai_request(prompt_blueprint: GetPromptTemplateResponse, client_kwargs: dict, function_kwargs: dict):
2101
+ if "gemini" in prompt_blueprint["metadata"]["model"]["name"]:
2102
+ return await agoogle_request(
2103
+ prompt_blueprint=prompt_blueprint,
2104
+ client_kwargs=client_kwargs,
2105
+ function_kwargs=function_kwargs,
2106
+ )
2107
+
2108
+ if "claude" in prompt_blueprint["metadata"]["model"]["name"]:
2109
+ from anthropic import AsyncAnthropicVertex
2110
+
2111
+ client = AsyncAnthropicVertex(**client_kwargs)
2112
+ if prompt_blueprint["prompt_template"]["type"] == "chat":
2113
+ return await aanthropic_chat_request(client=client, **function_kwargs)
2114
+ raise NotImplementedError(
2115
+ f"Unsupported prompt template type {prompt_blueprint['prompt_template']['type']}' for Anthropic Vertex AI"
2116
+ )
2117
+
2118
+ raise NotImplementedError(
2119
+ f"Vertex AI request for model {prompt_blueprint['metadata']['model']['name']} is not implemented yet."
2120
+ )
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: promptlayer
3
- Version: 1.0.58
3
+ Version: 1.0.60
4
4
  Summary: PromptLayer is a platform for prompt engineering and tracks your LLM requests.
5
5
  License: Apache-2.0
6
6
  Author: Magniv
@@ -1,9 +1,9 @@
1
- promptlayer/__init__.py,sha256=kndG0ClzL7nS-XlOQ97smsiXqqAtHhuLSk_ofAQFZBQ,140
1
+ promptlayer/__init__.py,sha256=7i62BmXAXcIvBDJ1JoA82MY44zPz-tzFZyvflNG979k,140
2
2
  promptlayer/groups/__init__.py,sha256=xhOAolLUBkr76ZHvJr29OwjCIk1V9qKQXjZCuyTJUIY,429
3
3
  promptlayer/groups/groups.py,sha256=YPROicy-TzpkrpA8vOpZS2lwvJ6VRtlbQ1S2oT1N0vM,338
4
- promptlayer/promptlayer.py,sha256=4MtP_byvYdVi4yZp_VCyVpPGx3iAaijwDZHPoYDfqZc,22212
4
+ promptlayer/promptlayer.py,sha256=llLxfEBX5-j8NSaOTQqSPSsmnqy-E-msi4LM4xV9OAU,22366
5
5
  promptlayer/promptlayer_base.py,sha256=jOgXzNZlV1LKOOsXSSAOgn8o4hXn_EV0oY9Nf3Bsu_s,6872
6
- promptlayer/promptlayer_mixins.py,sha256=MTOIx2jyebph4pu8fZA7QQPm22W_leX_CUmCiNt1K8U,11677
6
+ promptlayer/promptlayer_mixins.py,sha256=1DfNk9woiB-EWtbydFRPp55f3Y88gt6be0m1OdVcDxs,12134
7
7
  promptlayer/span_exporter.py,sha256=Pc1-zWAcjVCSykh-4rYPqiEZvzkG9xaYLVoHFY_TWaQ,2410
8
8
  promptlayer/templates.py,sha256=7ObDPMzHXjttDdJdCXA_pDL9XAnmcujIWucmgZJcOC8,1179
9
9
  promptlayer/track/__init__.py,sha256=tyweLTAY7UpYpBHWwY-T3pOPDIlGjcgccYXqU_r0694,1710
@@ -11,8 +11,8 @@ promptlayer/track/track.py,sha256=A-awcYwsSwxktrlCMchy8NITIquwxU1UXbgLZMwqrA0,31
11
11
  promptlayer/types/__init__.py,sha256=xJcvQuOk91ZBBePb40-1FDNDKYrZoH5lPE2q6_UhprM,111
12
12
  promptlayer/types/prompt_template.py,sha256=blkVBhh4u5pMhgX_Dsn78sN7Rv2Vy_zhd1-NERLXTpM,5075
13
13
  promptlayer/types/request_log.py,sha256=xU6bcxQar6GaBOJlgZTavXUV3FjE8sF_nSjPu4Ya_00,174
14
- promptlayer/utils.py,sha256=J4QyN91Qn2aXMPoEOwYPDhSz43J2QJp4bxfbkXwQgkc,73815
15
- promptlayer-1.0.58.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
16
- promptlayer-1.0.58.dist-info/METADATA,sha256=s4q7LYZADyOi5ma2WTuhXgvsvotDKssvE5cVB-er3cQ,4819
17
- promptlayer-1.0.58.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
18
- promptlayer-1.0.58.dist-info/RECORD,,
14
+ promptlayer/utils.py,sha256=lJoEMhXteIdkztX67rRoel8EcIMEcxH7JwQXXmg_yLo,75790
15
+ promptlayer-1.0.60.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
16
+ promptlayer-1.0.60.dist-info/METADATA,sha256=_Xo7wCnK6PjRLC4nUEXjhVA-O_Es7sdU6xriDD4uSHk,4819
17
+ promptlayer-1.0.60.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
18
+ promptlayer-1.0.60.dist-info/RECORD,,