promptlayer 1.0.25__tar.gz → 1.0.27__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of promptlayer might be problematic. Click here for more details.

@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: promptlayer
3
- Version: 1.0.25
3
+ Version: 1.0.27
4
4
  Summary: PromptLayer is a platform for prompt engineering and tracks your LLM requests.
5
5
  License: Apache-2.0
6
6
  Author: Magniv
@@ -14,6 +14,8 @@ Classifier: Programming Language :: Python :: 3.11
14
14
  Classifier: Programming Language :: Python :: 3.12
15
15
  Classifier: Programming Language :: Python :: 3.13
16
16
  Requires-Dist: ably (>=2.0.6,<3.0.0)
17
+ Requires-Dist: aiohttp (>=3.10.10,<4.0.0)
18
+ Requires-Dist: httpx (>=0.27.2,<0.28.0)
17
19
  Requires-Dist: opentelemetry-api (>=1.26.0,<2.0.0)
18
20
  Requires-Dist: opentelemetry-sdk (>=1.26.0,<2.0.0)
19
21
  Requires-Dist: requests (>=2.31.0,<3.0.0)
@@ -0,0 +1,4 @@
1
+ from .promptlayer import AsyncPromptLayer, PromptLayer
2
+
3
+ __version__ = "1.0.27"
4
+ __all__ = ["PromptLayer", "AsyncPromptLayer", "__version__"]
@@ -0,0 +1,20 @@
1
+ from promptlayer.groups.groups import acreate, create
2
+
3
+
4
+ class GroupManager:
5
+ def __init__(self, api_key: str):
6
+ self.api_key = api_key
7
+
8
+ def create(self):
9
+ return create(self.api_key)
10
+
11
+
12
+ class AsyncGroupManager:
13
+ def __init__(self, api_key: str):
14
+ self.api_key = api_key
15
+
16
+ async def create(self) -> str:
17
+ return await acreate(self.api_key)
18
+
19
+
20
+ __all__ = ["GroupManager", "AsyncGroupManager"]
@@ -0,0 +1,11 @@
1
+ from promptlayer.utils import apromptlayer_create_group, promptlayer_create_group
2
+
3
+
4
+ def create(api_key: str = None):
5
+ """Create a new group."""
6
+ return promptlayer_create_group(api_key)
7
+
8
+
9
+ async def acreate(api_key: str = None) -> str:
10
+ """Asynchronously create a new group."""
11
+ return await apromptlayer_create_group(api_key)
@@ -10,16 +10,17 @@ from opentelemetry.sdk.trace import TracerProvider
10
10
  from opentelemetry.sdk.trace.export import BatchSpanProcessor
11
11
  from opentelemetry.semconv.resource import ResourceAttributes
12
12
 
13
- from promptlayer.groups import GroupManager
13
+ from promptlayer.groups import AsyncGroupManager, GroupManager
14
14
  from promptlayer.promptlayer_base import PromptLayerBase
15
15
  from promptlayer.span_exporter import PromptLayerSpanExporter
16
- from promptlayer.templates import TemplateManager
17
- from promptlayer.track import TrackManager
16
+ from promptlayer.templates import AsyncTemplateManager, TemplateManager
17
+ from promptlayer.track import AsyncTrackManager, TrackManager
18
18
  from promptlayer.types.prompt_template import PromptTemplate
19
19
  from promptlayer.utils import (
20
20
  anthropic_request,
21
21
  anthropic_stream_completion,
22
22
  anthropic_stream_message,
23
+ autil_log_request,
23
24
  azure_openai_request,
24
25
  openai_request,
25
26
  openai_stream_chat,
@@ -481,3 +482,65 @@ class PromptLayer:
481
482
  function_name=function_name,
482
483
  score=score,
483
484
  )
485
+
486
+
487
+ class AsyncPromptLayer:
488
+ def __init__(
489
+ self,
490
+ api_key: str = None,
491
+ ):
492
+ if api_key is None:
493
+ api_key = os.environ.get("PROMPTLAYER_API_KEY")
494
+
495
+ if api_key is None:
496
+ raise ValueError(
497
+ "PromptLayer API key not provided. "
498
+ "Please set the PROMPTLAYER_API_KEY environment variable or pass the api_key parameter."
499
+ )
500
+
501
+ self.api_key = api_key
502
+ self.templates = AsyncTemplateManager(api_key)
503
+ self.group = AsyncGroupManager(api_key)
504
+ self.track = AsyncTrackManager(api_key)
505
+
506
+ async def log_request(
507
+ self,
508
+ *,
509
+ provider: str,
510
+ model: str,
511
+ input: PromptTemplate,
512
+ output: PromptTemplate,
513
+ request_start_time: float,
514
+ request_end_time: float,
515
+ parameters: Dict[str, Any] = {},
516
+ tags: List[str] = [],
517
+ metadata: Dict[str, str] = {},
518
+ prompt_name: Union[str, None] = None,
519
+ prompt_version_number: Union[int, None] = None,
520
+ prompt_input_variables: Dict[str, Any] = {},
521
+ input_tokens: int = 0,
522
+ output_tokens: int = 0,
523
+ price: float = 0.0,
524
+ function_name: str = "",
525
+ score: int = 0,
526
+ ):
527
+ return await autil_log_request(
528
+ self.api_key,
529
+ provider=provider,
530
+ model=model,
531
+ input=input,
532
+ output=output,
533
+ request_start_time=request_start_time,
534
+ request_end_time=request_end_time,
535
+ parameters=parameters,
536
+ tags=tags,
537
+ metadata=metadata,
538
+ prompt_name=prompt_name,
539
+ prompt_version_number=prompt_version_number,
540
+ prompt_input_variables=prompt_input_variables,
541
+ input_tokens=input_tokens,
542
+ output_tokens=output_tokens,
543
+ price=price,
544
+ function_name=function_name,
545
+ score=score,
546
+ )
@@ -2,6 +2,8 @@ from typing import Union
2
2
 
3
3
  from promptlayer.types.prompt_template import GetPromptTemplate, PublishPromptTemplate
4
4
  from promptlayer.utils import (
5
+ aget_all_prompt_templates,
6
+ aget_prompt_template,
5
7
  get_all_prompt_templates,
6
8
  get_prompt_template,
7
9
  publish_prompt_template,
@@ -20,3 +22,16 @@ class TemplateManager:
20
22
 
21
23
  def all(self, page: int = 1, per_page: int = 30):
22
24
  return get_all_prompt_templates(page, per_page, self.api_key)
25
+
26
+
27
+ class AsyncTemplateManager:
28
+ def __init__(self, api_key: str):
29
+ self.api_key = api_key
30
+
31
+ async def get(
32
+ self, prompt_name: str, params: Union[GetPromptTemplate, None] = None
33
+ ):
34
+ return await aget_prompt_template(prompt_name, params, self.api_key)
35
+
36
+ async def all(self, page: int = 1, per_page: int = 30):
37
+ return await aget_all_prompt_templates(page, per_page, self.api_key)
@@ -1,4 +1,4 @@
1
- from promptlayer.track.track import group
1
+ from promptlayer.track.track import agroup, ametadata, aprompt, ascore, group
2
2
  from promptlayer.track.track import metadata as metadata_
3
3
  from promptlayer.track.track import prompt
4
4
  from promptlayer.track.track import score as score_
@@ -30,4 +30,30 @@ class TrackManager:
30
30
  return score_(request_id, score, score_name, self.api_key)
31
31
 
32
32
 
33
+ class AsyncTrackManager:
34
+ def __init__(self, api_key: str):
35
+ self.api_key = api_key
36
+
37
+ async def group(self, request_id, group_id):
38
+ return await agroup(request_id, group_id, self.api_key)
39
+
40
+ async def metadata(self, request_id, metadata):
41
+ return await ametadata(request_id, metadata, self.api_key)
42
+
43
+ async def prompt(
44
+ self, request_id, prompt_name, prompt_input_variables, version=None, label=None
45
+ ):
46
+ return await aprompt(
47
+ request_id,
48
+ prompt_name,
49
+ prompt_input_variables,
50
+ version,
51
+ label,
52
+ self.api_key,
53
+ )
54
+
55
+ async def score(self, request_id, score, score_name=None):
56
+ return await ascore(request_id, score, score_name, self.api_key)
57
+
58
+
33
59
  __all__ = ["TrackManager"]
@@ -0,0 +1,90 @@
1
+ from promptlayer.utils import (
2
+ apromptlayer_track_group,
3
+ apromptlayer_track_metadata,
4
+ apromptlayer_track_prompt,
5
+ apromptlayer_track_score,
6
+ promptlayer_track_group,
7
+ promptlayer_track_metadata,
8
+ promptlayer_track_prompt,
9
+ promptlayer_track_score,
10
+ )
11
+
12
+
13
+ def prompt(
14
+ request_id,
15
+ prompt_name,
16
+ prompt_input_variables,
17
+ version=None,
18
+ label=None,
19
+ api_key: str = None,
20
+ ):
21
+ if not isinstance(prompt_input_variables, dict):
22
+ raise Exception("Please provide a dictionary of input variables.")
23
+ return promptlayer_track_prompt(
24
+ request_id, prompt_name, prompt_input_variables, api_key, version, label
25
+ )
26
+
27
+
28
+ def metadata(request_id, metadata, api_key: str = None):
29
+ if not isinstance(metadata, dict):
30
+ raise Exception("Please provide a dictionary of metadata.")
31
+ for key, value in metadata.items():
32
+ if not isinstance(key, str) or not isinstance(value, str):
33
+ raise Exception(
34
+ "Please provide a dictionary of metadata with key value pair of strings."
35
+ )
36
+ return promptlayer_track_metadata(request_id, metadata, api_key)
37
+
38
+
39
+ def score(request_id, score, score_name=None, api_key: str = None):
40
+ if not isinstance(score, int):
41
+ raise Exception("Please provide a int score.")
42
+ if not isinstance(score_name, str) and score_name is not None:
43
+ raise Exception("Please provide a string as score name.")
44
+ if score < 0 or score > 100:
45
+ raise Exception("Please provide a score between 0 and 100.")
46
+ return promptlayer_track_score(request_id, score, score_name, api_key)
47
+
48
+
49
+ def group(request_id, group_id, api_key: str = None):
50
+ return promptlayer_track_group(request_id, group_id, api_key)
51
+
52
+
53
+ async def aprompt(
54
+ request_id,
55
+ prompt_name,
56
+ prompt_input_variables,
57
+ version=None,
58
+ label=None,
59
+ api_key: str = None,
60
+ ):
61
+ if not isinstance(prompt_input_variables, dict):
62
+ raise Exception("Please provide a dictionary of input variables.")
63
+ return await apromptlayer_track_prompt(
64
+ request_id, prompt_name, prompt_input_variables, api_key, version, label
65
+ )
66
+
67
+
68
+ async def ametadata(request_id, metadata, api_key: str = None):
69
+ if not isinstance(metadata, dict):
70
+ raise Exception("Please provide a dictionary of metadata.")
71
+ for key, value in metadata.items():
72
+ if not isinstance(key, str) or not isinstance(value, str):
73
+ raise Exception(
74
+ "Please provide a dictionary of metadata with key-value pairs of strings."
75
+ )
76
+ return await apromptlayer_track_metadata(request_id, metadata, api_key)
77
+
78
+
79
+ async def ascore(request_id, score, score_name=None, api_key: str = None):
80
+ if not isinstance(score, int):
81
+ raise Exception("Please provide an integer score.")
82
+ if not isinstance(score_name, str) and score_name is not None:
83
+ raise Exception("Please provide a string as score name.")
84
+ if score < 0 or score > 100:
85
+ raise Exception("Please provide a score between 0 and 100.")
86
+ return await apromptlayer_track_score(request_id, score, score_name, api_key)
87
+
88
+
89
+ async def agroup(request_id, group_id, api_key: str = None):
90
+ return await apromptlayer_track_group(request_id, group_id, api_key)
@@ -11,6 +11,7 @@ from enum import Enum
11
11
  from typing import Any, Callable, Dict, Generator, List, Optional, Union
12
12
 
13
13
  import aiohttp
14
+ import httpx
14
15
  import requests
15
16
  from ably import AblyRealtime
16
17
  from opentelemetry import context, trace
@@ -429,6 +430,42 @@ def promptlayer_track_prompt(
429
430
  return True
430
431
 
431
432
 
433
+ async def apromptlayer_track_prompt(
434
+ request_id: str,
435
+ prompt_name: str,
436
+ input_variables: Dict[str, Any],
437
+ api_key: Optional[str] = None,
438
+ version: Optional[int] = None,
439
+ label: Optional[str] = None,
440
+ ) -> bool:
441
+ url = f"{URL_API_PROMPTLAYER}/library-track-prompt"
442
+ payload = {
443
+ "request_id": request_id,
444
+ "prompt_name": prompt_name,
445
+ "prompt_input_variables": input_variables,
446
+ "api_key": api_key,
447
+ "version": version,
448
+ "label": label,
449
+ }
450
+ try:
451
+ async with httpx.AsyncClient() as client:
452
+ response = await client.post(url, json=payload)
453
+ if response.status_code != 200:
454
+ warn_on_bad_response(
455
+ response,
456
+ "WARNING: While tracking your prompt, PromptLayer had the following error",
457
+ )
458
+ return False
459
+ except httpx.RequestError as e:
460
+ print(
461
+ f"WARNING: While tracking your prompt PromptLayer had the following error: {e}",
462
+ file=sys.stderr,
463
+ )
464
+ return False
465
+
466
+ return True
467
+
468
+
432
469
  def promptlayer_track_metadata(request_id, metadata, api_key):
433
470
  try:
434
471
  request_response = requests.post(
@@ -454,6 +491,34 @@ def promptlayer_track_metadata(request_id, metadata, api_key):
454
491
  return True
455
492
 
456
493
 
494
+ async def apromptlayer_track_metadata(
495
+ request_id: str, metadata: Dict[str, Any], api_key: Optional[str] = None
496
+ ) -> bool:
497
+ url = f"{URL_API_PROMPTLAYER}/library-track-metadata"
498
+ payload = {
499
+ "request_id": request_id,
500
+ "metadata": metadata,
501
+ "api_key": api_key,
502
+ }
503
+ try:
504
+ async with httpx.AsyncClient() as client:
505
+ response = await client.post(url, json=payload)
506
+ if response.status_code != 200:
507
+ warn_on_bad_response(
508
+ response,
509
+ "WARNING: While tracking your metadata, PromptLayer had the following error",
510
+ )
511
+ return False
512
+ except httpx.RequestError as e:
513
+ print(
514
+ f"WARNING: While tracking your metadata PromptLayer had the following error: {e}",
515
+ file=sys.stderr,
516
+ )
517
+ return False
518
+
519
+ return True
520
+
521
+
457
522
  def promptlayer_track_score(request_id, score, score_name, api_key):
458
523
  try:
459
524
  data = {"request_id": request_id, "score": score, "api_key": api_key}
@@ -478,6 +543,39 @@ def promptlayer_track_score(request_id, score, score_name, api_key):
478
543
  return True
479
544
 
480
545
 
546
+ async def apromptlayer_track_score(
547
+ request_id: str,
548
+ score: float,
549
+ score_name: Optional[str],
550
+ api_key: Optional[str] = None,
551
+ ) -> bool:
552
+ url = f"{URL_API_PROMPTLAYER}/library-track-score"
553
+ data = {
554
+ "request_id": request_id,
555
+ "score": score,
556
+ "api_key": api_key,
557
+ }
558
+ if score_name is not None:
559
+ data["name"] = score_name
560
+ try:
561
+ async with httpx.AsyncClient() as client:
562
+ response = await client.post(url, json=data)
563
+ if response.status_code != 200:
564
+ warn_on_bad_response(
565
+ response,
566
+ "WARNING: While tracking your score, PromptLayer had the following error",
567
+ )
568
+ return False
569
+ except httpx.RequestError as e:
570
+ print(
571
+ f"WARNING: While tracking your score PromptLayer had the following error: {str(e)}",
572
+ file=sys.stderr,
573
+ )
574
+ return False
575
+
576
+ return True
577
+
578
+
481
579
  class GeneratorProxy:
482
580
  def __init__(self, generator, api_request_arguments, api_key):
483
581
  self.generator = generator
@@ -735,6 +833,28 @@ def promptlayer_create_group(api_key: str = None):
735
833
  return request_response.json()["id"]
736
834
 
737
835
 
836
+ async def apromptlayer_create_group(api_key: Optional[str] = None) -> str:
837
+ try:
838
+ async with httpx.AsyncClient() as client:
839
+ response = await client.post(
840
+ f"{URL_API_PROMPTLAYER}/create-group",
841
+ json={
842
+ "api_key": api_key,
843
+ },
844
+ )
845
+ if response.status_code != 200:
846
+ warn_on_bad_response(
847
+ response,
848
+ "WARNING: While creating your group, PromptLayer had the following error",
849
+ )
850
+ return False
851
+ return response.json()["id"]
852
+ except httpx.RequestError as e:
853
+ raise Exception(
854
+ f"PromptLayer had the following error while creating your group: {str(e)}"
855
+ ) from e
856
+
857
+
738
858
  def promptlayer_track_group(request_id, group_id, api_key: str = None):
739
859
  try:
740
860
  request_response = requests.post(
@@ -759,6 +879,35 @@ def promptlayer_track_group(request_id, group_id, api_key: str = None):
759
879
  return True
760
880
 
761
881
 
882
+ async def apromptlayer_track_group(request_id, group_id, api_key: str = None):
883
+ try:
884
+ payload = {
885
+ "api_key": api_key,
886
+ "request_id": request_id,
887
+ "group_id": group_id,
888
+ }
889
+ async with httpx.AsyncClient() as client:
890
+ response = await client.post(
891
+ f"{URL_API_PROMPTLAYER}/track-group",
892
+ headers={"X-API-KEY": api_key},
893
+ json=payload,
894
+ )
895
+ if response.status_code != 200:
896
+ warn_on_bad_response(
897
+ response,
898
+ "WARNING: While tracking your group, PromptLayer had the following error",
899
+ )
900
+ return False
901
+ except httpx.RequestError as e:
902
+ print(
903
+ f"WARNING: While tracking your group PromptLayer had the following error: {e}",
904
+ file=sys.stderr,
905
+ )
906
+ return False
907
+
908
+ return True
909
+
910
+
762
911
  def get_prompt_template(
763
912
  prompt_name: str, params: Union[GetPromptTemplate, None] = None, api_key: str = None
764
913
  ) -> GetPromptTemplateResponse:
@@ -789,6 +938,39 @@ def get_prompt_template(
789
938
  )
790
939
 
791
940
 
941
+ async def aget_prompt_template(
942
+ prompt_name: str,
943
+ params: Union[GetPromptTemplate, None] = None,
944
+ api_key: str = None,
945
+ ) -> GetPromptTemplateResponse:
946
+ try:
947
+ json_body = {"api_key": api_key}
948
+ if params:
949
+ json_body.update(params)
950
+ async with httpx.AsyncClient() as client:
951
+ response = await client.post(
952
+ f"{URL_API_PROMPTLAYER}/prompt-templates/{prompt_name}",
953
+ headers={"X-API-KEY": api_key},
954
+ json=json_body,
955
+ )
956
+ response.raise_for_status()
957
+ warning = response.json().get("warning", None)
958
+ if warning:
959
+ warn_on_bad_response(
960
+ warning,
961
+ "WARNING: While getting your prompt template",
962
+ )
963
+ return response.json()
964
+ except httpx.HTTPStatusError as e:
965
+ raise Exception(
966
+ f"PromptLayer had the following error while getting your prompt template: {e.response.text}"
967
+ ) from e
968
+ except httpx.RequestError as e:
969
+ raise Exception(
970
+ f"PromptLayer had the following error while getting your prompt template: {str(e)}"
971
+ ) from e
972
+
973
+
792
974
  def publish_prompt_template(
793
975
  body: PublishPromptTemplate,
794
976
  api_key: str = None,
@@ -814,6 +996,37 @@ def publish_prompt_template(
814
996
  )
815
997
 
816
998
 
999
+ async def apublish_prompt_template(
1000
+ body: PublishPromptTemplate,
1001
+ api_key: str = None,
1002
+ ) -> PublishPromptTemplateResponse:
1003
+ try:
1004
+ async with httpx.AsyncClient() as client:
1005
+ response = await client.post(
1006
+ f"{URL_API_PROMPTLAYER}/rest/prompt-templates",
1007
+ headers={"X-API-KEY": api_key},
1008
+ json={
1009
+ "prompt_template": {**body},
1010
+ "prompt_version": {**body},
1011
+ "release_labels": body.get("release_labels"),
1012
+ },
1013
+ )
1014
+ if response.status_code == 400:
1015
+ raise Exception(
1016
+ f"PromptLayer had the following error while publishing your prompt template: {response.text}"
1017
+ )
1018
+ response.raise_for_status()
1019
+ return response.json()
1020
+ except httpx.HTTPStatusError as e:
1021
+ raise Exception(
1022
+ f"PromptLayer had the following error while publishing your prompt template: {e.response.text}"
1023
+ ) from e
1024
+ except httpx.RequestError as e:
1025
+ raise Exception(
1026
+ f"PromptLayer had the following error while publishing your prompt template: {str(e)}"
1027
+ ) from e
1028
+
1029
+
817
1030
  def get_all_prompt_templates(
818
1031
  page: int = 1, per_page: int = 30, api_key: str = None
819
1032
  ) -> List[ListPromptTemplateResponse]:
@@ -835,6 +1048,29 @@ def get_all_prompt_templates(
835
1048
  )
836
1049
 
837
1050
 
1051
+ async def aget_all_prompt_templates(
1052
+ page: int = 1, per_page: int = 30, api_key: str = None
1053
+ ) -> List[ListPromptTemplateResponse]:
1054
+ try:
1055
+ async with httpx.AsyncClient() as client:
1056
+ response = await client.get(
1057
+ f"{URL_API_PROMPTLAYER}/prompt-templates",
1058
+ headers={"X-API-KEY": api_key},
1059
+ params={"page": page, "per_page": per_page},
1060
+ )
1061
+ response.raise_for_status()
1062
+ items = response.json().get("items", [])
1063
+ return items
1064
+ except httpx.HTTPStatusError as e:
1065
+ raise Exception(
1066
+ f"PromptLayer had the following error while getting all your prompt templates: {e.response.text}"
1067
+ ) from e
1068
+ except httpx.RequestError as e:
1069
+ raise Exception(
1070
+ f"PromptLayer had the following error while getting all your prompt templates: {str(e)}"
1071
+ ) from e
1072
+
1073
+
838
1074
  def openai_stream_chat(results: list):
839
1075
  from openai.types.chat import (
840
1076
  ChatCompletion,
@@ -1090,3 +1326,26 @@ def util_log_request(api_key: str, **kwargs) -> Union[RequestLog, None]:
1090
1326
  file=sys.stderr,
1091
1327
  )
1092
1328
  return None
1329
+
1330
+
1331
+ async def autil_log_request(api_key: str, **kwargs) -> Union[RequestLog, None]:
1332
+ try:
1333
+ async with httpx.AsyncClient() as client:
1334
+ response = await client.post(
1335
+ f"{URL_API_PROMPTLAYER}/log-request",
1336
+ headers={"X-API-KEY": api_key},
1337
+ json=kwargs,
1338
+ )
1339
+ if response.status_code != 201:
1340
+ warn_on_bad_response(
1341
+ response,
1342
+ "WARNING: While logging your request PromptLayer had the following error",
1343
+ )
1344
+ return None
1345
+ return response.json()
1346
+ except Exception as e:
1347
+ print(
1348
+ f"WARNING: While tracking your prompt PromptLayer had the following error: {e}",
1349
+ file=sys.stderr,
1350
+ )
1351
+ return None
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "promptlayer"
3
- version = "1.0.25"
3
+ version = "1.0.27"
4
4
  description = "PromptLayer is a platform for prompt engineering and tracks your LLM requests."
5
5
  authors = ["Magniv <hello@magniv.io>"]
6
6
  license = "Apache-2.0"
@@ -12,6 +12,8 @@ requests = "^2.31.0"
12
12
  opentelemetry-api = "^1.26.0"
13
13
  opentelemetry-sdk = "^1.26.0"
14
14
  ably = "^2.0.6"
15
+ aiohttp = "^3.10.10"
16
+ httpx = "^0.27.2"
15
17
 
16
18
  [tool.poetry.group.dev.dependencies]
17
19
  langchain = "^0.0.260"
@@ -1,4 +0,0 @@
1
- from .promptlayer import PromptLayer
2
-
3
- __version__ = "1.0.25"
4
- __all__ = ["PromptLayer", "__version__"]
@@ -1,12 +0,0 @@
1
- from promptlayer.groups.groups import create
2
-
3
-
4
- class GroupManager:
5
- def __init__(self, api_key: str):
6
- self.api_key = api_key
7
-
8
- def create(self):
9
- return create(self.api_key)
10
-
11
-
12
- __all__ = ["GroupManager"]
@@ -1,6 +0,0 @@
1
- from promptlayer.utils import promptlayer_create_group
2
-
3
-
4
- def create(api_key: str = None):
5
- """Create a new group."""
6
- return promptlayer_create_group(api_key)
@@ -1,46 +0,0 @@
1
- from promptlayer.utils import (
2
- promptlayer_track_group,
3
- promptlayer_track_metadata,
4
- promptlayer_track_prompt,
5
- promptlayer_track_score,
6
- )
7
-
8
-
9
- def prompt(
10
- request_id,
11
- prompt_name,
12
- prompt_input_variables,
13
- version=None,
14
- label=None,
15
- api_key: str = None,
16
- ):
17
- if not isinstance(prompt_input_variables, dict):
18
- raise Exception("Please provide a dictionary of input variables.")
19
- return promptlayer_track_prompt(
20
- request_id, prompt_name, prompt_input_variables, api_key, version, label
21
- )
22
-
23
-
24
- def metadata(request_id, metadata, api_key: str = None):
25
- if not isinstance(metadata, dict):
26
- raise Exception("Please provide a dictionary of metadata.")
27
- for key, value in metadata.items():
28
- if not isinstance(key, str) or not isinstance(value, str):
29
- raise Exception(
30
- "Please provide a dictionary of metadata with key value pair of strings."
31
- )
32
- return promptlayer_track_metadata(request_id, metadata, api_key)
33
-
34
-
35
- def score(request_id, score, score_name=None, api_key: str = None):
36
- if not isinstance(score, int):
37
- raise Exception("Please provide a int score.")
38
- if not isinstance(score_name, str) and score_name is not None:
39
- raise Exception("Please provide a string as score name.")
40
- if score < 0 or score > 100:
41
- raise Exception("Please provide a score between 0 and 100.")
42
- return promptlayer_track_score(request_id, score, score_name, api_key)
43
-
44
-
45
- def group(request_id, group_id, api_key: str = None):
46
- return promptlayer_track_group(request_id, group_id, api_key)
File without changes
File without changes