promptlayer 0.5.1__py3-none-any.whl → 0.5.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of promptlayer might be problematic. Click here for more details.

@@ -32,6 +32,8 @@ class PromptLayerBase(object):
32
32
  or str(type(attr))
33
33
  == "<class 'anthropic.resources.completions.AsyncCompletions'>"
34
34
  or str(type(attr)) == "<class 'anthropic.resources.messages.Messages'>"
35
+ or str(type(attr))
36
+ == "<class 'anthropic.resources.messages.AsyncMessages'>"
35
37
  or re.match("<class 'openai\.resources.*'>", str(type(attr)))
36
38
  )
37
39
  ):
@@ -1,7 +1,5 @@
1
1
  from typing import Dict, List, Literal, Sequence, TypedDict, Union
2
2
 
3
- from typing_extensions import NotRequired
4
-
5
3
 
6
4
  class GetPromptTemplate(TypedDict, total=False):
7
5
  version: int
@@ -48,18 +46,18 @@ class FunctionCall(TypedDict, total=False):
48
46
 
49
47
  class SystemMessage(TypedDict, total=False):
50
48
  role: Literal["system"]
51
- input_variables: NotRequired[List[str]]
52
- template_format: NotRequired[TemplateFormat]
49
+ input_variables: List[str]
50
+ template_format: TemplateFormat
53
51
  content: Sequence[Content]
54
- name: NotRequired[str]
52
+ name: str
55
53
 
56
54
 
57
55
  class UserMessage(TypedDict, total=False):
58
56
  role: Literal["user"]
59
- input_variables: NotRequired[List[str]]
60
- template_format: NotRequired[TemplateFormat]
57
+ input_variables: List[str]
58
+ template_format: TemplateFormat
61
59
  content: Sequence[Content]
62
- name: NotRequired[str]
60
+ name: str
63
61
 
64
62
 
65
63
  class ToolCall(TypedDict, total=False):
@@ -70,29 +68,29 @@ class ToolCall(TypedDict, total=False):
70
68
 
71
69
  class AssistantMessage(TypedDict, total=False):
72
70
  role: Literal["assistant"]
73
- input_variables: NotRequired[List[str]]
74
- template_format: NotRequired[TemplateFormat]
75
- content: NotRequired[Sequence[Content]]
76
- function_call: NotRequired[FunctionCall]
77
- name: NotRequired[str]
78
- tool_calls: NotRequired[List[ToolCall]]
71
+ input_variables: List[str]
72
+ template_format: TemplateFormat
73
+ content: Sequence[Content]
74
+ function_call: FunctionCall
75
+ name: str
76
+ tool_calls: List[ToolCall]
79
77
 
80
78
 
81
79
  class FunctionMessage(TypedDict, total=False):
82
80
  role: Literal["function"]
83
- input_variables: NotRequired[List[str]]
84
- template_format: NotRequired[TemplateFormat]
85
- content: NotRequired[Sequence[Content]]
81
+ input_variables: List[str]
82
+ template_format: TemplateFormat
83
+ content: Sequence[Content]
86
84
  name: str
87
85
 
88
86
 
89
87
  class ToolMessage(TypedDict, total=False):
90
88
  role: Literal["tool"]
91
- input_variables: NotRequired[List[str]]
92
- template_format: NotRequired[TemplateFormat]
89
+ input_variables: List[str]
90
+ template_format: TemplateFormat
93
91
  content: Sequence[Content]
94
92
  tool_call_id: str
95
- name: NotRequired[str]
93
+ name: str
96
94
 
97
95
 
98
96
  class ChatFunctionCall(TypedDict, total=False):
@@ -113,19 +111,19 @@ Message = Union[
113
111
 
114
112
  class CompletionPromptTemplate(TypedDict, total=False):
115
113
  type: Literal["completion"]
116
- template_format: NotRequired[TemplateFormat]
114
+ template_format: TemplateFormat
117
115
  content: Sequence[Content]
118
- input_variables: NotRequired[List[str]]
116
+ input_variables: List[str]
119
117
 
120
118
 
121
119
  class ChatPromptTemplate(TypedDict, total=False):
122
120
  type: Literal["chat"]
123
121
  messages: Sequence[Message]
124
- functions: NotRequired[Sequence[Function]]
125
- function_call: NotRequired[Union[Literal["auto", "none"], ChatFunctionCall]]
126
- input_variables: NotRequired[List[str]]
127
- tools: NotRequired[Sequence[Tool]]
128
- tool_choice: NotRequired[ToolChoice]
122
+ functions: Sequence[Function]
123
+ function_call: Union[Literal["auto", "none"], ChatFunctionCall]
124
+ input_variables: List[str]
125
+ tools: Sequence[Tool]
126
+ tool_choice: ToolChoice
129
127
 
130
128
 
131
129
  PromptTemplate = Union[CompletionPromptTemplate, ChatPromptTemplate]
@@ -138,39 +136,39 @@ class Model(TypedDict, total=False):
138
136
 
139
137
 
140
138
  class Metadata(TypedDict, total=False):
141
- model: NotRequired[Model]
139
+ model: Model
142
140
 
143
141
 
144
142
  class BasePromptTemplate(TypedDict, total=False):
145
143
  prompt_name: str
146
- tags: NotRequired[List[str]]
144
+ tags: List[str]
147
145
 
148
146
 
149
147
  class PromptVersion(TypedDict, total=False):
150
148
  prompt_template: PromptTemplate
151
- commit_message: NotRequired[str]
152
- metadata: NotRequired[Metadata]
149
+ commit_message: str
150
+ metadata: Metadata
153
151
 
154
152
 
155
153
  class PublishPromptTemplate(BasePromptTemplate, PromptVersion):
156
154
  pass
157
155
 
158
156
 
159
- class BasePromptTemplateResponse(TypedDict, total=False):
157
+ class BasePromptTemplateResponse(TypedDict):
160
158
  id: int
161
159
  prompt_name: str
162
160
  tags: List[str]
163
161
  prompt_template: PromptTemplate
164
- commit_message: NotRequired[str]
165
- metadata: NotRequired[Metadata]
162
+ commit_message: str
163
+ metadata: Metadata
166
164
 
167
165
 
168
- class PublishPromptTemplateResponse(TypedDict, total=False):
166
+ class PublishPromptTemplateResponse(BasePromptTemplateResponse):
169
167
  pass
170
168
 
171
169
 
172
170
  class GetPromptTemplateResponse(BasePromptTemplateResponse):
173
- llm_kwargs: NotRequired[Dict[str, object]]
171
+ llm_kwargs: Union[Dict[str, object], None]
174
172
  version: int
175
173
 
176
174
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: promptlayer
3
- Version: 0.5.1
3
+ Version: 0.5.3
4
4
  Summary: PromptLayer is a platform for prompt engineering and tracks your LLM requests.
5
5
  License: Apache-2.0
6
6
  Author: Magniv
@@ -1,7 +1,7 @@
1
1
  promptlayer/__init__.py,sha256=rA-DPHeuPejv0A0ehtwprcT4oIKO2-EQvASPtn74CQQ,1110
2
2
  promptlayer/groups/__init__.py,sha256=vVvaDeWgNq7QVtA9xa0z0IEonBa2zuVNkuCvCFYO0rQ,67
3
3
  promptlayer/groups/groups.py,sha256=tWMQMtyptbco-BF9CJvOMerHH8q-SgYZzaKjh9mPlgk,139
4
- promptlayer/promptlayer.py,sha256=0_s4gOVIvYy7QgqSot8Htm-KtTKCMkpe8rtSxEOrJcw,3627
4
+ promptlayer/promptlayer.py,sha256=xhUKh0zNSes8ywfsLJw3yZ_XZ8l1SxH9J3Rb79pNdag,3736
5
5
  promptlayer/prompts/__init__.py,sha256=grp4oTduHc1yQqWa48e6QR5V9j1a7nIfPh4RsFjv4l0,205
6
6
  promptlayer/prompts/chat.py,sha256=aNyuDC21Mk3ULPFm-nD1LIqIAv5R8032rRawNRFMGbk,2348
7
7
  promptlayer/prompts/prompts.py,sha256=oAkVnI8_5CezPrIvwyGT15Mg40Yz4Bx8MewDA5oWzdc,3215
@@ -12,9 +12,9 @@ promptlayer/templates.py,sha256=CGRpioqb6jmlR4dnE1Cah27jlhQj1YBCIb8NcNOCpPI,542
12
12
  promptlayer/track/__init__.py,sha256=tkIlHRZMX5GCKBlu4vYpQrxauPno2JDmP0M9RQxUBkQ,119
13
13
  promptlayer/track/track.py,sha256=gNM3aAKAAtOc8TOjogpZorlwg4zM2hoWgRnBQfexmmo,1525
14
14
  promptlayer/types/__init__.py,sha256=ulWSyCrk5hZ_PI-nKGpd6GPcRaK8lqP4wFl0LPNUYWk,61
15
- promptlayer/types/prompt_template.py,sha256=akKn8SIkYx97jsnmAugOQbG_oMVgLquu_vvzo9zvpuw,4207
15
+ promptlayer/types/prompt_template.py,sha256=pMFlCG8YAIajc3-GjW0Y9_9-m7_ysbuGDC0WZzFHZbI,3752
16
16
  promptlayer/utils.py,sha256=QvhwdJ3T-InIWTH3CPzyQkJTp_7J35HeYYPUc0Y8cR0,19090
17
- promptlayer-0.5.1.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
18
- promptlayer-0.5.1.dist-info/METADATA,sha256=4CJDFdRkOfuVxOdasWCzmfyH6jkJTNSHP6FOOOgR3gw,4475
19
- promptlayer-0.5.1.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
20
- promptlayer-0.5.1.dist-info/RECORD,,
17
+ promptlayer-0.5.3.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
18
+ promptlayer-0.5.3.dist-info/METADATA,sha256=9dP3yjIb1sLnuSoG446cuPE1hPlsVvc_ckvoGwXptZw,4475
19
+ promptlayer-0.5.3.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
20
+ promptlayer-0.5.3.dist-info/RECORD,,