prompty 0.1.34__py3-none-any.whl → 0.1.37__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
@@ -9,6 +9,8 @@ from openai.types.create_embedding_response import CreateEmbeddingResponse
9
9
 
10
10
  @InvokerFactory.register_processor("azure")
11
11
  @InvokerFactory.register_processor("azure_openai")
12
+ @InvokerFactory.register_processor("azure_beta")
13
+ @InvokerFactory.register_processor("azure_openai_beta")
12
14
  class AzureOpenAIProcessor(Invoker):
13
15
  """Azure OpenAI Processor"""
14
16
 
@@ -0,0 +1,11 @@
1
+ # __init__.py
2
+ from prompty.invoker import InvokerException
3
+
4
+ try:
5
+ from .executor import AzureOpenAIBetaExecutor
6
+ # Reuse the common Azure OpenAI Processor
7
+ from ..azure.processor import AzureOpenAIProcessor
8
+ except ImportError:
9
+ raise InvokerException(
10
+ "Error registering AzureOpenAIBetaExecutor and AzureOpenAIProcessor", "azure_beta"
11
+ )
@@ -0,0 +1,281 @@
1
+ import azure.identity
2
+ import importlib.metadata
3
+ from typing import AsyncIterator, Iterator
4
+ from openai import AzureOpenAI, AsyncAzureOpenAI
5
+
6
+ from prompty.tracer import Tracer
7
+ from ..core import AsyncPromptyStream, Prompty, PromptyStream
8
+ from ..invoker import Invoker, InvokerFactory
9
+ import re
10
+ from datetime import datetime
11
+
12
+ def extract_date(data: str) -> datetime:
13
+ """Extract date from a string
14
+
15
+ Parameters
16
+ ----------
17
+ data : str
18
+ The string containing the date
19
+
20
+ Returns
21
+ -------
22
+ datetime
23
+ The extracted date as a datetime object
24
+ """
25
+
26
+ # Regular expression to find dates in the format YYYY-MM-DD
27
+ date_pattern = re.compile(r'\b\d{4}-\d{2}-\d{2}\b')
28
+ match = date_pattern.search(data)
29
+ if match:
30
+ date_str = match.group(0)
31
+ # Validate the date format
32
+ try:
33
+ return datetime.strptime(date_str, '%Y-%m-%d')
34
+ except ValueError:
35
+ pass
36
+ return None
37
+
38
+ def is_structured_output_available(api_version: str) -> bool:
39
+ """Check if the structured output API is available for the given API version
40
+
41
+ Parameters
42
+ ----------
43
+ api_version : datetime
44
+ The API version
45
+
46
+ Returns
47
+ -------
48
+ bool
49
+ True if the structured output API is available, False otherwise
50
+ """
51
+
52
+ # Define the threshold date
53
+ threshold_api_version_date = datetime(2024, 8, 1)
54
+
55
+ api_version_date = extract_date(api_version)
56
+
57
+ # Check if the API version are on or after the threshold date
58
+ if api_version_date >= threshold_api_version_date:
59
+ return True
60
+ return False
61
+
62
+ VERSION = importlib.metadata.version("prompty")
63
+
64
+
65
+ @InvokerFactory.register_executor("azure_beta")
66
+ @InvokerFactory.register_executor("azure_openai_beta")
67
+ class AzureOpenAIBetaExecutor(Invoker):
68
+ """Azure OpenAI Beta Executor"""
69
+
70
+ def __init__(self, prompty: Prompty) -> None:
71
+ super().__init__(prompty)
72
+ self.kwargs = {
73
+ key: value
74
+ for key, value in self.prompty.model.configuration.items()
75
+ if key != "type"
76
+ }
77
+
78
+ # no key, use default credentials
79
+ if "api_key" not in self.kwargs:
80
+ # managed identity if client id
81
+ if "client_id" in self.kwargs:
82
+ default_credential = azure.identity.ManagedIdentityCredential(
83
+ client_id=self.kwargs.pop("client_id"),
84
+ )
85
+ # default credential
86
+ else:
87
+ default_credential = azure.identity.DefaultAzureCredential(
88
+ exclude_shared_token_cache_credential=True
89
+ )
90
+
91
+ self.kwargs["azure_ad_token_provider"] = (
92
+ azure.identity.get_bearer_token_provider(
93
+ default_credential, "https://cognitiveservices.azure.com/.default"
94
+ )
95
+ )
96
+
97
+ self.api = self.prompty.model.api
98
+ self.api_version = self.prompty.model.configuration["api_version"]
99
+ self.deployment = self.prompty.model.configuration["azure_deployment"]
100
+ self.parameters = self.prompty.model.parameters
101
+
102
+ def invoke(self, data: any) -> any:
103
+ """Invoke the Azure OpenAI API
104
+
105
+ Parameters
106
+ ----------
107
+ data : any
108
+ The data to send to the Azure OpenAI API
109
+
110
+ Returns
111
+ -------
112
+ any
113
+ The response from the Azure OpenAI API
114
+ """
115
+
116
+ with Tracer.start("AzureOpenAI") as trace:
117
+ trace("type", "LLM")
118
+ trace("signature", "AzureOpenAI.ctor")
119
+ trace("description", "Azure OpenAI Constructor")
120
+ trace("inputs", self.kwargs)
121
+ client = AzureOpenAI(
122
+ default_headers={
123
+ "User-Agent": f"prompty/{VERSION}",
124
+ "x-ms-useragent": f"prompty/{VERSION}",
125
+ },
126
+ **self.kwargs,
127
+ )
128
+ trace("result", client)
129
+
130
+ with Tracer.start("create") as trace:
131
+ trace("type", "LLM")
132
+ trace("description", "Azure OpenAI Client")
133
+
134
+ if self.api == "chat":
135
+ # We can only verify the API version as the model and its version are not part of prompty configuration
136
+ # Should be gpt-4o and 2024-08-06 or later
137
+ choose_beta = is_structured_output_available(self.api_version)
138
+ if choose_beta:
139
+ trace("signature", "AzureOpenAI.beta.chat.completions.parse")
140
+ else:
141
+ trace("signature", "AzureOpenAI.chat.completions.create")
142
+
143
+ args = {
144
+ "model": self.deployment,
145
+ "messages": data if isinstance(data, list) else [data],
146
+ **self.parameters,
147
+ }
148
+ trace("inputs", args)
149
+ if choose_beta:
150
+ response = client.beta.chat.completions.parse(**args)
151
+ else:
152
+ response = client.chat.completions.create(**args)
153
+ trace("result", response)
154
+
155
+ elif self.api == "completion":
156
+ trace("signature", "AzureOpenAI.completions.create")
157
+ args = {
158
+ "prompt": data,
159
+ "model": self.deployment,
160
+ **self.parameters,
161
+ }
162
+ trace("inputs", args)
163
+ response = client.completions.create(**args)
164
+ trace("result", response)
165
+
166
+ elif self.api == "embedding":
167
+ trace("signature", "AzureOpenAI.embeddings.create")
168
+ args = {
169
+ "input": data if isinstance(data, list) else [data],
170
+ "model": self.deployment,
171
+ **self.parameters,
172
+ }
173
+ trace("inputs", args)
174
+ response = client.embeddings.create(**args)
175
+ trace("result", response)
176
+
177
+ elif self.api == "image":
178
+ trace("signature", "AzureOpenAI.images.generate")
179
+ args = {
180
+ "prompt": data,
181
+ "model": self.deployment,
182
+ **self.parameters,
183
+ }
184
+ trace("inputs", args)
185
+ response = client.images.generate.create(**args)
186
+ trace("result", response)
187
+
188
+ # stream response
189
+ if isinstance(response, Iterator):
190
+ if self.api == "chat":
191
+ # TODO: handle the case where there might be no usage in the stream
192
+ return PromptyStream("AzureOpenAIBetaExecutor", response)
193
+ else:
194
+ return PromptyStream("AzureOpenAIBetaExecutor", response)
195
+ else:
196
+ return response
197
+
198
+ async def invoke_async(self, data: str) -> str:
199
+ """Invoke the Prompty Chat Parser (Async)
200
+
201
+ Parameters
202
+ ----------
203
+ data : str
204
+ The data to parse
205
+
206
+ Returns
207
+ -------
208
+ str
209
+ The parsed data
210
+ """
211
+ with Tracer.start("AzureOpenAIAsync") as trace:
212
+ trace("type", "LLM")
213
+ trace("signature", "AzureOpenAIAsync.ctor")
214
+ trace("description", "Async Azure OpenAI Constructor")
215
+ trace("inputs", self.kwargs)
216
+ client = AsyncAzureOpenAI(
217
+ default_headers={
218
+ "User-Agent": f"prompty/{VERSION}",
219
+ "x-ms-useragent": f"prompty/{VERSION}",
220
+ },
221
+ **self.kwargs,
222
+ )
223
+ trace("result", client)
224
+
225
+ with Tracer.start("create") as trace:
226
+ trace("type", "LLM")
227
+ trace("description", "Azure OpenAI Client")
228
+
229
+ if self.api == "chat":
230
+ trace("signature", "AzureOpenAIAsync.chat.completions.create")
231
+ args = {
232
+ "model": self.deployment,
233
+ "messages": data if isinstance(data, list) else [data],
234
+ **self.parameters,
235
+ }
236
+ trace("inputs", args)
237
+ response = await client.chat.completions.create(**args)
238
+ trace("result", response)
239
+
240
+ elif self.api == "completion":
241
+ trace("signature", "AzureOpenAIAsync.completions.create")
242
+ args = {
243
+ "prompt": data,
244
+ "model": self.deployment,
245
+ **self.parameters,
246
+ }
247
+ trace("inputs", args)
248
+ response = await client.completions.create(**args)
249
+ trace("result", response)
250
+
251
+ elif self.api == "embedding":
252
+ trace("signature", "AzureOpenAIAsync.embeddings.create")
253
+ args = {
254
+ "input": data if isinstance(data, list) else [data],
255
+ "model": self.deployment,
256
+ **self.parameters,
257
+ }
258
+ trace("inputs", args)
259
+ response = await client.embeddings.create(**args)
260
+ trace("result", response)
261
+
262
+ elif self.api == "image":
263
+ trace("signature", "AzureOpenAIAsync.images.generate")
264
+ args = {
265
+ "prompt": data,
266
+ "model": self.deployment,
267
+ **self.parameters,
268
+ }
269
+ trace("inputs", args)
270
+ response = await client.images.generate.create(**args)
271
+ trace("result", response)
272
+
273
+ # stream response
274
+ if isinstance(response, AsyncIterator):
275
+ if self.api == "chat":
276
+ # TODO: handle the case where there might be no usage in the stream
277
+ return AsyncPromptyStream("AzureOpenAIBetaExecutorAsync", response)
278
+ else:
279
+ return AsyncPromptyStream("AzureOpenAIBetaExecutorAsync", response)
280
+ else:
281
+ return response
prompty/tracer.py CHANGED
@@ -28,6 +28,10 @@ def sanitize(key: str, value: Any) -> Any:
28
28
  class Tracer:
29
29
  _tracers: Dict[str, Callable[[str], Iterator[Callable[[str, Any], None]]]] = {}
30
30
 
31
+ SIGNATURE = "signature"
32
+ INPUTS = "inputs"
33
+ RESULT = "result"
34
+
31
35
  @classmethod
32
36
  def add(
33
37
  cls, name: str, tracer: Callable[[str], Iterator[Callable[[str, Any], None]]]
@@ -40,11 +44,17 @@ class Tracer:
40
44
 
41
45
  @classmethod
42
46
  @contextlib.contextmanager
43
- def start(cls, name: str) -> Iterator[Callable[[str, Any], None]]:
47
+ def start(cls, name: str, attributes: Dict[str, Any] = None) -> Iterator[Callable[[str, Any], None]]:
44
48
  with contextlib.ExitStack() as stack:
45
49
  traces = [
46
50
  stack.enter_context(tracer(name)) for tracer in cls._tracers.values()
47
51
  ]
52
+
53
+ if attributes:
54
+ for trace in traces:
55
+ for key, value in attributes.items():
56
+ trace(key, value)
57
+
48
58
  yield lambda key, value: [
49
59
  # normalize and sanitize any trace values
50
60
  trace(key, sanitize(key, to_dict(value)))
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: prompty
3
- Version: 0.1.34
3
+ Version: 0.1.37
4
4
  Summary: Prompty is a new asset class and format for LLM prompts that aims to provide observability, understandability, and portability for developers. It includes spec, tooling, and a runtime. This Prompty runtime supports Python
5
5
  Author-Email: Seth Juarez <seth.juarez@microsoft.com>
6
6
  License: MIT
@@ -13,9 +13,9 @@ Requires-Dist: click>=8.1.7
13
13
  Requires-Dist: aiofiles>=24.1.0
14
14
  Provides-Extra: azure
15
15
  Requires-Dist: azure-identity>=1.17.1; extra == "azure"
16
- Requires-Dist: openai>=1.35.10; extra == "azure"
16
+ Requires-Dist: openai>=1.43.0; extra == "azure"
17
17
  Provides-Extra: openai
18
- Requires-Dist: openai>=1.35.10; extra == "openai"
18
+ Requires-Dist: openai>=1.43.0; extra == "openai"
19
19
  Provides-Extra: serverless
20
20
  Requires-Dist: azure-identity>=1.17.1; extra == "serverless"
21
21
  Requires-Dist: azure-ai-inference>=1.0.0b3; extra == "serverless"
@@ -1,11 +1,13 @@
1
- prompty-0.1.34.dist-info/METADATA,sha256=tZPxYcZ2mi_pOO-Z-6wrQ6zTeadaPLpFdkLZXbfWDF0,9166
2
- prompty-0.1.34.dist-info/WHEEL,sha256=thaaA2w1JzcGC48WYufAs8nrYZjJm8LqNfnXFOFyCC4,90
3
- prompty-0.1.34.dist-info/entry_points.txt,sha256=a3i7Kvf--3DOkkv9VQpstwaNKgsnXwDGaPL18lPpKeI,60
4
- prompty-0.1.34.dist-info/licenses/LICENSE,sha256=KWSC4z9cfML_t0xThoQYjzTdcZQj86Y_mhXdatzU-KM,1052
1
+ prompty-0.1.37.dist-info/METADATA,sha256=q2jDy8mc-b5UPlsKAkel1SzCP8XPwspLr0mfcZA5OlQ,9164
2
+ prompty-0.1.37.dist-info/WHEEL,sha256=thaaA2w1JzcGC48WYufAs8nrYZjJm8LqNfnXFOFyCC4,90
3
+ prompty-0.1.37.dist-info/entry_points.txt,sha256=a3i7Kvf--3DOkkv9VQpstwaNKgsnXwDGaPL18lPpKeI,60
4
+ prompty-0.1.37.dist-info/licenses/LICENSE,sha256=KWSC4z9cfML_t0xThoQYjzTdcZQj86Y_mhXdatzU-KM,1052
5
5
  prompty/__init__.py,sha256=HCAvInBgNcIDO54rR4-RDIF4KUmGVQ2TRam_dS7xHEk,16561
6
6
  prompty/azure/__init__.py,sha256=WI8qeNWfxqggj21bznL-mxGUS-v67bUrunX0Lf2hsI8,295
7
7
  prompty/azure/executor.py,sha256=RJXMB0W7KcVvQ7l3xJaau7YM8PqOCQwuN4IwIe0sTLg,7930
8
- prompty/azure/processor.py,sha256=eWcHTLwxxBw7ZfK-rSf2cdljJgouxGXuRh_7EtV-MGk,4974
8
+ prompty/azure/processor.py,sha256=-CWc_1h4xdb0nyHwUkaI40NtzTxxenCXkgjJTh76AOk,5079
9
+ prompty/azure_beta/__init__.py,sha256=QF4qcILpsryBLl1nvc1AhRzkKI2uqc6OAU_fA3LISNE,361
10
+ prompty/azure_beta/executor.py,sha256=PIPfeOTLk9YEM80adktL2zxpa51gO4itlQzUDoq0QVg,9896
9
11
  prompty/cli.py,sha256=k8Rxm41fMFNvmnsX737UiN6v-7756tpoJPN4rPXMNcU,3726
10
12
  prompty/core.py,sha256=EvkXV_mH7Mj1skT21XMZ4VX-Jlwx6AF-WEJ9yPc50AE,13061
11
13
  prompty/invoker.py,sha256=O77E5iQ1552wQXxL8FhZGERbCi_0O3mDTd5Ozqw-O-E,8593
@@ -17,6 +19,6 @@ prompty/renderers.py,sha256=80HNtCp3osgaLfhKxkG4j1kiRhJ727ITzT_yL5JLjEQ,1104
17
19
  prompty/serverless/__init__.py,sha256=xoXOTRXO8C631swNKaa-ek5_R3X-87bJpTm0z_Rsg6A,282
18
20
  prompty/serverless/executor.py,sha256=PUDJsYcJLQx9JSTh-R3HdJd0ehEC6w2Ch5OEqz52uVI,8395
19
21
  prompty/serverless/processor.py,sha256=ZSL9y8JC-G4qbtWOSbQAqEcFMWEaLskyOr5VjLthelU,3660
20
- prompty/tracer.py,sha256=WUR7PsvhBLQf7WcnKQPOeLeii9l4xPKB2owjvJ50d0E,10907
22
+ prompty/tracer.py,sha256=7z9IsJgOyE3tJkRIAhbo3QRSKrjFjH0-ZoN5fKIT_9w,11181
21
23
  prompty/utils.py,sha256=jm7HEzOGk3zz8d5aquXK3zWIQWuDpBpJTzlz5sswtdg,2836
22
- prompty-0.1.34.dist-info/RECORD,,
24
+ prompty-0.1.37.dist-info/RECORD,,