promptlayer 0.5.6__tar.gz → 1.0.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of promptlayer might be problematic. Click here for more details.

@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: promptlayer
3
- Version: 0.5.6
3
+ Version: 1.0.1
4
4
  Summary: PromptLayer is a platform for prompt engineering and tracks your LLM requests.
5
5
  License: Apache-2.0
6
6
  Author: Magniv
@@ -12,6 +12,9 @@ Classifier: Programming Language :: Python :: 3.9
12
12
  Classifier: Programming Language :: Python :: 3.10
13
13
  Classifier: Programming Language :: Python :: 3.11
14
14
  Classifier: Programming Language :: Python :: 3.12
15
+ Requires-Dist: anthropic (>=0.25.8,<0.26.0)
16
+ Requires-Dist: anyio (>=4.3.0,<5.0.0)
17
+ Requires-Dist: openai (>=1.26.0,<2.0.0)
15
18
  Requires-Dist: requests (>=2.31.0,<3.0.0)
16
19
  Description-Content-Type: text/markdown
17
20
 
@@ -56,8 +59,9 @@ Once you have that all set up, [install PromptLayer using](https://pypi.org/proj
56
59
  In the Python file where you use OpenAI APIs, add the following. This allows us to keep track of your requests without needing any other code changes.
57
60
 
58
61
  ```python
59
- import promptlayer
60
- promptlayer.api_key = "<YOUR PromptLayer API KEY pl_xxxxxx>"
62
+ from promptlayer import PromptLayer
63
+
64
+ promptlayer = PromptLayer(api_key="<YOUR PromptLayer API KEY pl_xxxxxx>")
61
65
  openai = promptlayer.openai
62
66
  ```
63
67
 
@@ -39,8 +39,9 @@ Once you have that all set up, [install PromptLayer using](https://pypi.org/proj
39
39
  In the Python file where you use OpenAI APIs, add the following. This allows us to keep track of your requests without needing any other code changes.
40
40
 
41
41
  ```python
42
- import promptlayer
43
- promptlayer.api_key = "<YOUR PromptLayer API KEY pl_xxxxxx>"
42
+ from promptlayer import PromptLayer
43
+
44
+ promptlayer = PromptLayer(api_key="<YOUR PromptLayer API KEY pl_xxxxxx>")
44
45
  openai = promptlayer.openai
45
46
  ```
46
47
 
@@ -0,0 +1,48 @@
1
+ import os
2
+ from typing import Literal, Union
3
+
4
+ from promptlayer.groups import GroupManager
5
+ from promptlayer.promptlayer import PromptLayerBase
6
+ from promptlayer.templates import TemplateManager
7
+ from promptlayer.track import TrackManager
8
+
9
+
10
+ class PromptLayer:
11
+ def __init__(self, api_key: str = None):
12
+ if api_key is None:
13
+ api_key = os.environ.get("PROMPTLAYER_API_KEY")
14
+ if api_key is None:
15
+ raise ValueError(
16
+ "PromptLayer API key not provided. Please set the PROMPTLAYER_API_KEY environment variable or pass the api_key parameter."
17
+ )
18
+ self.api_key = api_key
19
+ self.templates = TemplateManager(api_key)
20
+ self.group = GroupManager(api_key)
21
+ self.track = TrackManager(api_key)
22
+
23
+ def __getattr__(
24
+ self,
25
+ name: Union[Literal["openai"], Literal["anthropic"], Literal["prompts"]],
26
+ ):
27
+ if name == "openai":
28
+ import openai as openai_module
29
+
30
+ openai = PromptLayerBase(
31
+ openai_module, function_name="openai", api_key=self.api_key
32
+ )
33
+ return openai
34
+ elif name == "anthropic":
35
+ import anthropic as anthropic_module
36
+
37
+ anthropic = PromptLayerBase(
38
+ anthropic_module,
39
+ function_name="anthropic",
40
+ provider_type="anthropic",
41
+ api_key=self.api_key,
42
+ )
43
+ return anthropic
44
+ else:
45
+ raise AttributeError(f"module {__name__} has no attribute {name}")
46
+
47
+
48
+ __all__ = ["PromptLayer"]
@@ -0,0 +1,12 @@
1
+ from promptlayer.groups.groups import create
2
+
3
+
4
+ class GroupManager:
5
+ def __init__(self, api_key: str):
6
+ self.api_key = api_key
7
+
8
+ def create(self):
9
+ return create(self.api_key)
10
+
11
+
12
+ __all__ = ["GroupManager"]
@@ -1,6 +1,6 @@
1
1
  from promptlayer.utils import promptlayer_create_group
2
2
 
3
3
 
4
- def create():
4
+ def create(api_key: str = None):
5
5
  """Create a new group."""
6
- return promptlayer_create_group()
6
+ return promptlayer_create_group(api_key)
@@ -2,26 +2,27 @@ import datetime
2
2
  import inspect
3
3
  import re
4
4
 
5
- from promptlayer.utils import async_wrapper, get_api_key, promptlayer_api_handler
5
+ from promptlayer.utils import async_wrapper, promptlayer_api_handler
6
6
 
7
7
 
8
8
  class PromptLayerBase(object):
9
- __slots__ = ["_obj", "__weakref__", "_function_name", "_provider_type"]
9
+ __slots__ = ["_obj", "__weakref__", "_function_name", "_provider_type", "_api_key"]
10
10
 
11
- def __init__(self, obj, function_name="", provider_type="openai"):
11
+ def __init__(self, obj, function_name="", provider_type="openai", api_key=None):
12
12
  object.__setattr__(self, "_obj", obj)
13
13
  object.__setattr__(self, "_function_name", function_name)
14
14
  object.__setattr__(self, "_provider_type", provider_type)
15
+ object.__setattr__(self, "_api_key", api_key)
15
16
 
16
17
  def __getattr__(self, name):
17
18
  attr = getattr(object.__getattribute__(self, "_obj"), name)
18
19
  if (
19
20
  name != "count_tokens" # fix for anthropic count_tokens
20
21
  and not re.match(
21
- "<class 'anthropic\..*Error'>", str(attr)
22
+ r"<class 'anthropic\..*Error'>", str(attr)
22
23
  ) # fix for anthropic errors
23
24
  and not re.match(
24
- "<class 'openai\..*Error'>", str(attr)
25
+ r"<class 'openai\..*Error'>", str(attr)
25
26
  ) # fix for openai errors
26
27
  and (
27
28
  inspect.isclass(attr)
@@ -34,13 +35,14 @@ class PromptLayerBase(object):
34
35
  or str(type(attr)) == "<class 'anthropic.resources.messages.Messages'>"
35
36
  or str(type(attr))
36
37
  == "<class 'anthropic.resources.messages.AsyncMessages'>"
37
- or re.match("<class 'openai\.resources.*'>", str(type(attr)))
38
+ or re.match(r"<class 'openai\.resources.*'>", str(type(attr)))
38
39
  )
39
40
  ):
40
41
  return PromptLayerBase(
41
42
  attr,
42
43
  function_name=f'{object.__getattribute__(self, "_function_name")}.{name}',
43
44
  provider_type=object.__getattribute__(self, "_provider_type"),
45
+ api_key=object.__getattribute__(self, "_api_key"),
44
46
  )
45
47
  return attr
46
48
 
@@ -62,6 +64,7 @@ class PromptLayerBase(object):
62
64
  function_object(*args, **kwargs),
63
65
  function_name=object.__getattribute__(self, "_function_name"),
64
66
  provider_type=object.__getattribute__(self, "_provider_type"),
67
+ api_key=object.__getattribute__(self, "_api_key"),
65
68
  )
66
69
  function_response = function_object(*args, **kwargs)
67
70
  if inspect.iscoroutinefunction(function_object) or inspect.iscoroutine(
@@ -74,6 +77,7 @@ class PromptLayerBase(object):
74
77
  object.__getattribute__(self, "_function_name"),
75
78
  object.__getattribute__(self, "_provider_type"),
76
79
  tags,
80
+ api_key=object.__getattribute__(self, "_api_key"),
77
81
  *args,
78
82
  **kwargs,
79
83
  )
@@ -87,6 +91,6 @@ class PromptLayerBase(object):
87
91
  function_response,
88
92
  request_start_time,
89
93
  request_end_time,
90
- get_api_key(),
94
+ object.__getattribute__(self, "_api_key"),
91
95
  return_pl_id=return_pl_id,
92
96
  )
@@ -0,0 +1,22 @@
1
+ from typing import Union
2
+
3
+ from promptlayer.types.prompt_template import GetPromptTemplate, PublishPromptTemplate
4
+ from promptlayer.utils import (
5
+ get_all_prompt_templates,
6
+ get_prompt_template,
7
+ publish_prompt_template,
8
+ )
9
+
10
+
11
+ class TemplateManager:
12
+ def __init__(self, api_key: str):
13
+ self.api_key = api_key
14
+
15
+ def get(self, prompt_name: str, params: Union[GetPromptTemplate, None] = None):
16
+ return get_prompt_template(prompt_name, params, self.api_key)
17
+
18
+ def publish(self, body: PublishPromptTemplate):
19
+ return publish_prompt_template(body, self.api_key)
20
+
21
+ def all(self, page: int = 1, per_page: int = 30):
22
+ return get_all_prompt_templates(page, per_page, self.api_key)
@@ -0,0 +1,33 @@
1
+ from promptlayer.track.track import group
2
+ from promptlayer.track.track import metadata as metadata_
3
+ from promptlayer.track.track import prompt
4
+ from promptlayer.track.track import score as score_
5
+
6
+
7
+ class TrackManager:
8
+ def __init__(self, api_key: str):
9
+ self.api_key = api_key
10
+
11
+ def group(self, request_id, group_id):
12
+ return group(request_id, group_id, self.api_key)
13
+
14
+ def metadata(self, request_id, metadata):
15
+ return metadata_(request_id, metadata, self.api_key)
16
+
17
+ def prompt(
18
+ self, request_id, prompt_name, prompt_input_variables, version=None, label=None
19
+ ):
20
+ return prompt(
21
+ request_id,
22
+ prompt_name,
23
+ prompt_input_variables,
24
+ version,
25
+ label,
26
+ self.api_key,
27
+ )
28
+
29
+ def score(self, request_id, score, score_name=None):
30
+ return score_(request_id, score, score_name, self.api_key)
31
+
32
+
33
+ __all__ = ["TrackManager"]
@@ -1,5 +1,4 @@
1
1
  from promptlayer.utils import (
2
- get_api_key,
3
2
  promptlayer_track_group,
4
3
  promptlayer_track_metadata,
5
4
  promptlayer_track_prompt,
@@ -7,15 +6,22 @@ from promptlayer.utils import (
7
6
  )
8
7
 
9
8
 
10
- def prompt(request_id, prompt_name, prompt_input_variables, version=None, label=None):
9
+ def prompt(
10
+ request_id,
11
+ prompt_name,
12
+ prompt_input_variables,
13
+ version=None,
14
+ label=None,
15
+ api_key: str = None,
16
+ ):
11
17
  if not isinstance(prompt_input_variables, dict):
12
18
  raise Exception("Please provide a dictionary of input variables.")
13
19
  return promptlayer_track_prompt(
14
- request_id, prompt_name, prompt_input_variables, get_api_key(), version, label
20
+ request_id, prompt_name, prompt_input_variables, api_key, version, label
15
21
  )
16
22
 
17
23
 
18
- def metadata(request_id, metadata):
24
+ def metadata(request_id, metadata, api_key: str = None):
19
25
  if not isinstance(metadata, dict):
20
26
  raise Exception("Please provide a dictionary of metadata.")
21
27
  for key, value in metadata.items():
@@ -23,18 +29,18 @@ def metadata(request_id, metadata):
23
29
  raise Exception(
24
30
  "Please provide a dictionary of metadata with key value pair of strings."
25
31
  )
26
- return promptlayer_track_metadata(request_id, metadata, get_api_key())
32
+ return promptlayer_track_metadata(request_id, metadata, api_key)
27
33
 
28
34
 
29
- def score(request_id, score, score_name=None):
35
+ def score(request_id, score, score_name=None, api_key: str = None):
30
36
  if not isinstance(score, int):
31
37
  raise Exception("Please provide a int score.")
32
38
  if not isinstance(score_name, str) and score_name is not None:
33
39
  raise Exception("Please provide a string as score name.")
34
40
  if score < 0 or score > 100:
35
41
  raise Exception("Please provide a score between 0 and 100.")
36
- return promptlayer_track_score(request_id, score, score_name, get_api_key())
42
+ return promptlayer_track_score(request_id, score, score_name, api_key)
37
43
 
38
44
 
39
- def group(request_id, group_id):
40
- return promptlayer_track_group(request_id, group_id)
45
+ def group(request_id, group_id, api_key: str = None):
46
+ return promptlayer_track_group(request_id, group_id, api_key)
@@ -12,7 +12,6 @@ from typing import List, Union
12
12
 
13
13
  import requests
14
14
 
15
- import promptlayer
16
15
  from promptlayer.types.prompt_template import (
17
16
  GetPromptTemplate,
18
17
  GetPromptTemplateResponse,
@@ -26,16 +25,6 @@ URL_API_PROMPTLAYER = os.environ.setdefault(
26
25
  )
27
26
 
28
27
 
29
- def get_api_key():
30
- # raise an error if the api key is not set
31
- if promptlayer.api_key is None:
32
- raise Exception(
33
- "Please set your PROMPTLAYER_API_KEY environment variable or set API KEY in code using 'promptlayer.api_key = <your_api_key>' "
34
- )
35
- else:
36
- return promptlayer.api_key
37
-
38
-
39
28
  def promptlayer_api_handler(
40
29
  function_name,
41
30
  provider_type,
@@ -66,6 +55,7 @@ def promptlayer_api_handler(
66
55
  "request_end_time": request_end_time,
67
56
  "return_pl_id": return_pl_id,
68
57
  },
58
+ api_key,
69
59
  )
70
60
  else:
71
61
  request_id = promptlayer_api_request(
@@ -108,7 +98,7 @@ async def promptlayer_api_handler_async(
108
98
  response,
109
99
  request_start_time,
110
100
  request_end_time,
111
- get_api_key(),
101
+ api_key,
112
102
  return_pl_id=return_pl_id,
113
103
  )
114
104
 
@@ -345,10 +335,11 @@ def promptlayer_track_score(request_id, score, score_name, api_key):
345
335
 
346
336
 
347
337
  class GeneratorProxy:
348
- def __init__(self, generator, api_request_arguments):
338
+ def __init__(self, generator, api_request_arguments, api_key):
349
339
  self.generator = generator
350
340
  self.results = []
351
341
  self.api_request_arugments = api_request_arguments
342
+ self.api_key = api_key
352
343
 
353
344
  def __iter__(self):
354
345
  return self
@@ -362,6 +353,7 @@ class GeneratorProxy:
362
353
  return GeneratorProxy(
363
354
  await self.generator._AsyncMessageStreamManager__api_request,
364
355
  api_request_arguments,
356
+ self.api_key,
365
357
  )
366
358
 
367
359
  async def __aexit__(self, exc_type, exc_val, exc_tb):
@@ -378,7 +370,7 @@ class GeneratorProxy:
378
370
  def __getattr__(self, name):
379
371
  if name == "text_stream": # anthropic async stream
380
372
  return GeneratorProxy(
381
- self.generator.text_stream, self.api_request_arugments
373
+ self.generator.text_stream, self.api_request_arugments, self.api_key
382
374
  )
383
375
  return getattr(self.generator, name)
384
376
 
@@ -408,7 +400,7 @@ class GeneratorProxy:
408
400
  self.cleaned_result(),
409
401
  self.api_request_arugments["request_start_time"],
410
402
  self.api_request_arugments["request_end_time"],
411
- get_api_key(),
403
+ self.api_key,
412
404
  return_pl_id=self.api_request_arugments["return_pl_id"],
413
405
  )
414
406
  if self.api_request_arugments["return_pl_id"]:
@@ -517,6 +509,7 @@ async def async_wrapper(
517
509
  function_name,
518
510
  provider_type,
519
511
  tags,
512
+ api_key: str = None,
520
513
  *args,
521
514
  **kwargs,
522
515
  ):
@@ -531,17 +524,17 @@ async def async_wrapper(
531
524
  response,
532
525
  request_start_time,
533
526
  request_end_time,
534
- get_api_key(),
527
+ api_key,
535
528
  return_pl_id=return_pl_id,
536
529
  )
537
530
 
538
531
 
539
- def promptlayer_create_group():
532
+ def promptlayer_create_group(api_key: str = None):
540
533
  try:
541
534
  request_response = requests.post(
542
535
  f"{URL_API_PROMPTLAYER}/create-group",
543
536
  json={
544
- "api_key": get_api_key(),
537
+ "api_key": api_key,
545
538
  },
546
539
  )
547
540
  if request_response.status_code != 200:
@@ -558,12 +551,12 @@ def promptlayer_create_group():
558
551
  return request_response.json()["id"]
559
552
 
560
553
 
561
- def promptlayer_track_group(request_id, group_id):
554
+ def promptlayer_track_group(request_id, group_id, api_key: str = None):
562
555
  try:
563
556
  request_response = requests.post(
564
557
  f"{URL_API_PROMPTLAYER}/track-group",
565
558
  json={
566
- "api_key": get_api_key(),
559
+ "api_key": api_key,
567
560
  "request_id": request_id,
568
561
  "group_id": group_id,
569
562
  },
@@ -579,18 +572,19 @@ def promptlayer_track_group(request_id, group_id):
579
572
  raise Exception(
580
573
  f"PromptLayer had the following error while tracking your group: {e}"
581
574
  )
575
+ return True
582
576
 
583
577
 
584
578
  def get_prompt_template(
585
- prompt_name: str, params: Union[GetPromptTemplate, None] = None
579
+ prompt_name: str, params: Union[GetPromptTemplate, None] = None, api_key: str = None
586
580
  ) -> GetPromptTemplateResponse:
587
581
  try:
588
- json_body = {"api_key": get_api_key()}
582
+ json_body = {"api_key": api_key}
589
583
  if params:
590
584
  json_body = {**json_body, **params}
591
585
  response = requests.post(
592
586
  f"{URL_API_PROMPTLAYER}/prompt-templates/{prompt_name}",
593
- headers={"X-API-KEY": get_api_key()},
587
+ headers={"X-API-KEY": api_key},
594
588
  json=json_body,
595
589
  )
596
590
  if response.status_code != 200:
@@ -606,11 +600,12 @@ def get_prompt_template(
606
600
 
607
601
  def publish_prompt_template(
608
602
  body: PublishPromptTemplate,
603
+ api_key: str = None,
609
604
  ) -> PublishPromptTemplateResponse:
610
605
  try:
611
606
  response = requests.post(
612
607
  f"{URL_API_PROMPTLAYER}/rest/prompt-templates",
613
- headers={"X-API-KEY": get_api_key()},
608
+ headers={"X-API-KEY": api_key},
614
609
  json={
615
610
  "prompt_template": {**body},
616
611
  "prompt_version": {**body},
@@ -629,12 +624,12 @@ def publish_prompt_template(
629
624
 
630
625
 
631
626
  def get_all_prompt_templates(
632
- page: int = 1, per_page: int = 30
627
+ page: int = 1, per_page: int = 30, api_key: str = None
633
628
  ) -> List[ListPromptTemplateResponse]:
634
629
  try:
635
630
  response = requests.get(
636
631
  f"{URL_API_PROMPTLAYER}/prompt-templates",
637
- headers={"X-API-KEY": get_api_key()},
632
+ headers={"X-API-KEY": api_key},
638
633
  params={"page": page, "per_page": per_page},
639
634
  )
640
635
  if response.status_code != 200:
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "promptlayer"
3
- version = "0.5.6"
3
+ version = "1.0.1"
4
4
  description = "PromptLayer is a platform for prompt engineering and tracks your LLM requests."
5
5
  authors = ["Magniv <hello@magniv.io>"]
6
6
  license = "Apache-2.0"
@@ -9,11 +9,15 @@ readme = "README.md"
9
9
  [tool.poetry.dependencies]
10
10
  python = ">=3.8.1,<4.0"
11
11
  requests = "^2.31.0"
12
+ openai = "^1.26.0"
13
+ anyio = "^4.3.0"
14
+ anthropic = "^0.25.8"
12
15
 
13
16
  [tool.poetry.group.dev.dependencies]
14
- openai = "^0.27.8"
15
17
  langchain = "^0.0.260"
16
18
  behave = "^1.2.6"
19
+ pytest = "^8.2.0"
20
+ pytest-asyncio = "^0.23.6"
17
21
 
18
22
  [build-system]
19
23
  requires = ["poetry-core"]
@@ -1,44 +0,0 @@
1
- import os
2
- from typing import Literal, Union
3
-
4
- from promptlayer.promptlayer import PromptLayerBase
5
-
6
- from . import templates
7
-
8
- api_key = os.environ.get("PROMPTLAYER_API_KEY")
9
-
10
-
11
- def __getattr__(
12
- name: Union[Literal["openai"], Literal["anthropic"], Literal["prompts"]],
13
- ):
14
- if name == "openai":
15
- import openai as openai_module
16
-
17
- openai = PromptLayerBase(openai_module, function_name="openai")
18
- return openai
19
- elif name == "anthropic":
20
- import anthropic as anthropic_module
21
-
22
- anthropic = PromptLayerBase(
23
- anthropic_module,
24
- function_name="anthropic",
25
- provider_type="anthropic",
26
- )
27
- return anthropic
28
- elif name == "prompts":
29
- import promptlayer.prompts as prompts
30
-
31
- return prompts
32
- elif name == "group":
33
- import promptlayer.groups as group
34
-
35
- return group
36
- elif name == "track":
37
- import promptlayer.track as track
38
-
39
- return track
40
- else:
41
- raise AttributeError(f"module {__name__} has no attribute {name}")
42
-
43
-
44
- __all__ = ["api_key", "openai", "anthropic", "templates"]
@@ -1,3 +0,0 @@
1
- from promptlayer.groups.groups import create
2
-
3
- __all__ = ["create"]
@@ -1,5 +0,0 @@
1
- from promptlayer.prompts.prompts import all
2
- from promptlayer.prompts.prompts import get_prompt as get
3
- from promptlayer.prompts.prompts import publish_prompt as publish
4
-
5
- __all__ = ["get", "all", "publish"]
@@ -1,61 +0,0 @@
1
- import json
2
-
3
- from langchain import prompts
4
-
5
- CHAT_PROMPTLAYER_LANGCHAIN = "chat_promptlayer_langchain"
6
- ROLE_SYSTEM = "system"
7
- ROLE_ASSISTANT = "assistant"
8
- ROLE_USER = "user"
9
-
10
-
11
- def to_dict(prompt_template: prompts.ChatPromptTemplate):
12
- prompt_dict = json.loads(prompt_template.json())
13
- prompt_dict["_type"] = CHAT_PROMPTLAYER_LANGCHAIN
14
- for index, message in enumerate(prompt_template.messages):
15
- if isinstance(message, prompts.SystemMessagePromptTemplate):
16
- prompt_dict["messages"][index]["role"] = ROLE_SYSTEM
17
- elif isinstance(message, prompts.AIMessagePromptTemplate):
18
- prompt_dict["messages"][index]["role"] = ROLE_ASSISTANT
19
- elif isinstance(message, prompts.HumanMessagePromptTemplate):
20
- prompt_dict["messages"][index]["role"] = ROLE_USER
21
-
22
- return prompt_dict
23
-
24
-
25
- def to_prompt(prompt_dict: dict):
26
- try:
27
- messages = []
28
- for message in prompt_dict.get("messages", []):
29
- role = message.get("role")
30
- prompt = message.get("prompt", {})
31
- if not prompt:
32
- continue
33
- prompt_template = prompts.PromptTemplate(
34
- **{k: v for k, v in prompt.items() if k != "_type"}
35
- )
36
- message_fields = {
37
- k: v for k, v in message.items() if k != "role" and k != "prompt"
38
- }
39
- if role == ROLE_SYSTEM:
40
- message = prompts.SystemMessagePromptTemplate(
41
- prompt=prompt_template, **message_fields
42
- )
43
- elif role == ROLE_ASSISTANT:
44
- message = prompts.AIMessagePromptTemplate(
45
- prompt=prompt_template, **message_fields
46
- )
47
- elif role == ROLE_USER:
48
- message = prompts.HumanMessagePromptTemplate(
49
- prompt=prompt_template, **message_fields
50
- )
51
- messages.append(message)
52
- prompt_template = prompts.ChatPromptTemplate(
53
- messages=messages,
54
- input_variables=prompt_dict.get("input_variables", []),
55
- output_parser=prompt_dict.get("output_parser", None),
56
- partial_variables=prompt_dict.get("partial_variables", {}),
57
- )
58
- return prompt_template
59
- except Exception as e:
60
- print("Unknown error occurred. ", e)
61
- return None
@@ -1,91 +0,0 @@
1
- from promptlayer.prompts.chat import CHAT_PROMPTLAYER_LANGCHAIN, to_dict, to_prompt
2
- from promptlayer.resources.prompt import Prompt
3
- from promptlayer.utils import (
4
- get_api_key,
5
- promptlayer_get_prompt,
6
- promptlayer_publish_prompt,
7
- )
8
-
9
-
10
- def get_prompt(
11
- prompt_name,
12
- langchain=False,
13
- version: int = None,
14
- label: str = None,
15
- include_metadata: bool = False,
16
- ):
17
- """
18
- Get a prompt template from PromptLayer.
19
- prompt_name: the prompt name
20
- langchain: Enable this for langchain compatible prompt
21
- version: The version of the prompt to get. If not specified, the latest version will be returned.
22
- label: The specific label of a prompt you want to get. Setting this will supercede version
23
- include_metadata: Whether or not to include the metadata of the prompt in the response.
24
- """
25
- api_key = get_api_key()
26
- prompt = promptlayer_get_prompt(prompt_name, api_key, version, label)
27
- if langchain:
28
- try:
29
- from langchain.prompts.loading import load_prompt_from_config
30
- except ImportError:
31
- raise Exception(
32
- "Please install langchain to use langchain compatible prompts."
33
- )
34
- if "_type" not in prompt["prompt_template"]:
35
- prompt["prompt_template"]["_type"] = "prompt"
36
- if prompt["prompt_template"]["_type"] == CHAT_PROMPTLAYER_LANGCHAIN:
37
- prompt_template = to_prompt(prompt["prompt_template"])
38
- else:
39
- prompt_template = load_prompt_from_config(prompt["prompt_template"])
40
- else:
41
- prompt_template = prompt["prompt_template"]
42
- if include_metadata:
43
- return prompt_template, prompt["metadata"]
44
- return prompt_template
45
-
46
-
47
- def publish_prompt(
48
- prompt_name, tags=[], commit_message=None, prompt_template=None, metadata=None
49
- ):
50
- try:
51
- from langchain.prompts import ChatPromptTemplate, PromptTemplate
52
- except ImportError:
53
- raise Exception("Please install langchain to use langchain compatible prompts.")
54
-
55
- api_key = get_api_key()
56
- if commit_message is not None and len(commit_message) > 72:
57
- raise Exception("Commit message must be less than 72 characters.")
58
- if isinstance(prompt_template, ChatPromptTemplate):
59
- prompt_template = to_dict(prompt_template)
60
- elif isinstance(prompt_template, PromptTemplate):
61
- prompt_template = prompt_template.dict()
62
- elif not isinstance(prompt_template, dict):
63
- raise Exception(
64
- "Please provide either a JSON prompt template or a langchain prompt template."
65
- )
66
- promptlayer_publish_prompt(
67
- prompt_name, prompt_template, commit_message, tags, api_key, metadata
68
- )
69
-
70
-
71
- def all(page: int = 1, per_page: int = 30):
72
- """
73
- List all prompts on PromptLayer.
74
-
75
- Parameters:
76
- ----------
77
- page: int
78
- The page of prompts to get.
79
- per_page: int
80
- The number of prompts to get per page.
81
-
82
- Returns:
83
- -------
84
- list of prompts
85
- """
86
- response = Prompt.list({"page": page, "per_page": per_page})
87
- if not response.get("success", True):
88
- raise Exception(
89
- f"Failed to get prompts from PromptLayer. {response.get('message')}"
90
- )
91
- return response["items"]
@@ -1,3 +0,0 @@
1
- from promptlayer.resources import prompt
2
-
3
- __all__ = ["prompt"]
@@ -1,23 +0,0 @@
1
- import os
2
-
3
- import requests
4
-
5
- from promptlayer.utils import get_api_key
6
-
7
-
8
- class Base:
9
- API_KEY = get_api_key()
10
- BASE_URL = (
11
- os.environ.get("URL_API_PROMPTLAYER", "https://api.promptlayer.com") + "/rest"
12
- )
13
-
14
- @classmethod
15
- def list(cls, params={}):
16
- """
17
- List all resources
18
- """
19
- return requests.get(
20
- cls.BASE_URL,
21
- headers={"X-API-KEY": cls.API_KEY},
22
- params=params,
23
- ).json()
@@ -1,12 +0,0 @@
1
- from .base import Base
2
-
3
-
4
- class Prompt(Base):
5
- BASE_URL = Base.BASE_URL + "/prompts"
6
-
7
- @classmethod
8
- def list(cls, params={}):
9
- """
10
- List all prompts
11
- """
12
- return super().list(params)
@@ -1,20 +0,0 @@
1
- from typing import Union
2
-
3
- from promptlayer.types.prompt_template import GetPromptTemplate, PublishPromptTemplate
4
- from promptlayer.utils import (
5
- get_all_prompt_templates,
6
- get_prompt_template,
7
- publish_prompt_template,
8
- )
9
-
10
-
11
- def get(prompt_name: str, params: Union[GetPromptTemplate, None] = None):
12
- return get_prompt_template(prompt_name, params)
13
-
14
-
15
- def publish(body: PublishPromptTemplate):
16
- return publish_prompt_template(body)
17
-
18
-
19
- def all(page: int = 1, per_page: int = 30):
20
- return get_all_prompt_templates(page, per_page)
@@ -1,3 +0,0 @@
1
- from promptlayer.track.track import group, metadata, prompt, score
2
-
3
- __all__ = ["group", "metadata", "prompt", "score"]
File without changes