promptlayer 0.1.79__tar.gz → 0.1.80__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of promptlayer might be problematic. Click here for more details.

Files changed (20) hide show
  1. {promptlayer-0.1.79 → promptlayer-0.1.80}/PKG-INFO +1 -1
  2. {promptlayer-0.1.79 → promptlayer-0.1.80}/promptlayer/prompts/prompts.py +17 -7
  3. {promptlayer-0.1.79 → promptlayer-0.1.80}/promptlayer/track/track.py +2 -2
  4. {promptlayer-0.1.79 → promptlayer-0.1.80}/promptlayer/utils.py +13 -5
  5. {promptlayer-0.1.79 → promptlayer-0.1.80}/promptlayer.egg-info/PKG-INFO +1 -1
  6. {promptlayer-0.1.79 → promptlayer-0.1.80}/setup.py +1 -1
  7. {promptlayer-0.1.79 → promptlayer-0.1.80}/LICENSE +0 -0
  8. {promptlayer-0.1.79 → promptlayer-0.1.80}/README.md +0 -0
  9. {promptlayer-0.1.79 → promptlayer-0.1.80}/promptlayer/__init__.py +0 -0
  10. {promptlayer-0.1.79 → promptlayer-0.1.80}/promptlayer/langchain/__init__.py +0 -0
  11. {promptlayer-0.1.79 → promptlayer-0.1.80}/promptlayer/langchain/llms/__init__.py +0 -0
  12. {promptlayer-0.1.79 → promptlayer-0.1.80}/promptlayer/langchain/llms/openai.py +0 -0
  13. {promptlayer-0.1.79 → promptlayer-0.1.80}/promptlayer/promptlayer.py +0 -0
  14. {promptlayer-0.1.79 → promptlayer-0.1.80}/promptlayer/prompts/__init__.py +0 -0
  15. {promptlayer-0.1.79 → promptlayer-0.1.80}/promptlayer/track/__init__.py +0 -0
  16. {promptlayer-0.1.79 → promptlayer-0.1.80}/promptlayer.egg-info/SOURCES.txt +0 -0
  17. {promptlayer-0.1.79 → promptlayer-0.1.80}/promptlayer.egg-info/dependency_links.txt +0 -0
  18. {promptlayer-0.1.79 → promptlayer-0.1.80}/promptlayer.egg-info/requires.txt +0 -0
  19. {promptlayer-0.1.79 → promptlayer-0.1.80}/promptlayer.egg-info/top_level.txt +0 -0
  20. {promptlayer-0.1.79 → promptlayer-0.1.80}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: promptlayer
3
- Version: 0.1.79
3
+ Version: 0.1.80
4
4
  Summary: PromptLayer is a package to keep track of your GPT models training
5
5
  Home-page: https://www.promptlayer.com
6
6
  Author: Magniv
@@ -1,22 +1,32 @@
1
- from promptlayer.utils import get_api_key, promptlayer_get_prompt, promptlayer_publish_prompt
2
- from langchain.prompts.loading import load_prompt_from_config
3
1
  from langchain import PromptTemplate
2
+ from langchain.prompts.loading import load_prompt_from_config
3
+
4
+ from promptlayer.utils import (get_api_key, promptlayer_get_prompt,
5
+ promptlayer_publish_prompt)
4
6
 
5
- def get_prompt(prompt_name, langchain=False):
7
+
8
+ def get_prompt(prompt_name, langchain=False, version=None):
9
+ """
10
+ Get a prompt template from PromptLayer.
11
+ version: The version of the prompt to get. If not specified, the latest version will be returned.
12
+ """
6
13
  api_key = get_api_key()
7
- prompt = promptlayer_get_prompt(prompt_name, api_key)
14
+ prompt = promptlayer_get_prompt(prompt_name, api_key, version)
8
15
  if langchain:
9
16
  if "_type" not in prompt["prompt_template"]:
10
17
  prompt["prompt_template"]["_type"] = "prompt"
11
18
  return load_prompt_from_config(prompt["prompt_template"])
12
19
  else:
13
20
  return prompt["prompt_template"]
14
-
21
+
22
+
15
23
  def publish_prompt(prompt_name, tags=[], prompt_template=None):
16
24
  api_key = get_api_key()
17
25
  if type(prompt_template) == dict:
18
26
  promptlayer_publish_prompt(prompt_name, prompt_template, tags, api_key)
19
27
  elif isinstance(prompt_template, PromptTemplate):
20
- promptlayer_publish_prompt(prompt_name, prompt_template.dict(), tags, api_key)
28
+ promptlayer_publish_prompt(
29
+ prompt_name, prompt_template.dict(), tags, api_key)
21
30
  else:
22
- raise Exception("Please provide either a JSON prompt template or a langchain prompt template.")
31
+ raise Exception(
32
+ "Please provide either a JSON prompt template or a langchain prompt template.")
@@ -1,9 +1,9 @@
1
1
  from promptlayer.utils import get_api_key, promptlayer_track_prompt, promptlayer_track_metadata, promptlayer_track_score
2
2
 
3
- def prompt(request_id, prompt_name, prompt_input_variables):
3
+ def prompt(request_id, prompt_name, prompt_input_variables, version=None):
4
4
  if not isinstance(prompt_input_variables, dict):
5
5
  raise Exception("Please provide a dictionary of input variables.")
6
- return promptlayer_track_prompt(request_id, prompt_name, prompt_input_variables, get_api_key())
6
+ return promptlayer_track_prompt(request_id, prompt_name, prompt_input_variables, get_api_key(), version)
7
7
 
8
8
 
9
9
  def metadata(request_id, metadata):
@@ -176,11 +176,16 @@ def promptlayer_api_request_async(
176
176
  )
177
177
 
178
178
 
179
- def promptlayer_get_prompt(prompt_name, api_key):
179
+ def promptlayer_get_prompt(prompt_name, api_key, version=None):
180
+ """
181
+ Get a prompt from the PromptLayer library
182
+ version: version of the prompt to get, None for latest
183
+ """
180
184
  try:
181
185
  request_response = requests.post(
182
186
  f"{URL_API_PROMPTLAYER}/library-get-prompt-template",
183
- json={"prompt_name": prompt_name, "api_key": api_key,},
187
+ json={"prompt_name": prompt_name,
188
+ "api_key": api_key, 'version': version},
184
189
  )
185
190
  if request_response.status_code != 200:
186
191
  if hasattr(request_response, "json"):
@@ -225,7 +230,7 @@ def promptlayer_publish_prompt(prompt_name, prompt_template, tags, api_key):
225
230
  return True
226
231
 
227
232
 
228
- def promptlayer_track_prompt(request_id, prompt_name, input_variables, api_key):
233
+ def promptlayer_track_prompt(request_id, prompt_name, input_variables, api_key, version):
229
234
  try:
230
235
  request_response = requests.post(
231
236
  f"{URL_API_PROMPTLAYER}/library-track-prompt",
@@ -234,6 +239,7 @@ def promptlayer_track_prompt(request_id, prompt_name, input_variables, api_key):
234
239
  "prompt_name": prompt_name,
235
240
  "prompt_input_variables": input_variables,
236
241
  "api_key": api_key,
242
+ "version": version,
237
243
  },
238
244
  )
239
245
  if request_response.status_code != 200:
@@ -262,7 +268,8 @@ def promptlayer_track_metadata(request_id, metadata, api_key):
262
268
  try:
263
269
  request_response = requests.post(
264
270
  f"{URL_API_PROMPTLAYER}/library-track-metadata",
265
- json={"request_id": request_id, "metadata": metadata, "api_key": api_key,},
271
+ json={"request_id": request_id,
272
+ "metadata": metadata, "api_key": api_key, },
266
273
  )
267
274
  if request_response.status_code != 200:
268
275
  if hasattr(request_response, "json"):
@@ -290,7 +297,8 @@ def promptlayer_track_score(request_id, score, api_key):
290
297
  try:
291
298
  request_response = requests.post(
292
299
  f"{URL_API_PROMPTLAYER}/library-track-score",
293
- json={"request_id": request_id, "score": score, "api_key": api_key,},
300
+ json={"request_id": request_id,
301
+ "score": score, "api_key": api_key, },
294
302
  )
295
303
  if request_response.status_code != 200:
296
304
  if hasattr(request_response, "json"):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: promptlayer
3
- Version: 0.1.79
3
+ Version: 0.1.80
4
4
  Summary: PromptLayer is a package to keep track of your GPT models training
5
5
  Home-page: https://www.promptlayer.com
6
6
  Author: Magniv
@@ -17,7 +17,7 @@ setup(
17
17
  author_email="hello@magniv.io",
18
18
  url="https://www.promptlayer.com",
19
19
  project_urls={"Documentation": "https://magniv.notion.site/Prompt-Layer-Docs-db0e6f50cacf4564a6d09824ba17a629",},
20
- version="0.1.79",
20
+ version="0.1.80",
21
21
  py_modules=["promptlayer"],
22
22
  packages=find_packages(),
23
23
  install_requires=["requests", "openai", "langchain"],
File without changes
File without changes
File without changes