prompty 0.1.39__py3-none-any.whl → 0.1.44__py3-none-any.whl
Sign up to get free protection for your applications and to get access to all the features.
- prompty/__init__.py +35 -31
- prompty/azure/__init__.py +2 -2
- prompty/azure/executor.py +23 -19
- prompty/azure/processor.py +18 -7
- prompty/azure_beta/__init__.py +2 -2
- prompty/azure_beta/executor.py +26 -17
- prompty/cli.py +47 -13
- prompty/core.py +45 -40
- prompty/invoker.py +51 -37
- prompty/openai/__init__.py +4 -4
- prompty/openai/executor.py +9 -5
- prompty/openai/processor.py +9 -6
- prompty/parsers.py +21 -17
- prompty/renderers.py +18 -7
- prompty/serverless/__init__.py +2 -2
- prompty/serverless/executor.py +32 -16
- prompty/serverless/processor.py +11 -7
- prompty/tracer.py +41 -32
- prompty/utils.py +20 -15
- {prompty-0.1.39.dist-info → prompty-0.1.44.dist-info}/METADATA +1 -1
- prompty-0.1.44.dist-info/RECORD +24 -0
- prompty-0.1.39.dist-info/RECORD +0 -24
- {prompty-0.1.39.dist-info → prompty-0.1.44.dist-info}/WHEEL +0 -0
- {prompty-0.1.39.dist-info → prompty-0.1.44.dist-info}/entry_points.txt +0 -0
- {prompty-0.1.39.dist-info → prompty-0.1.44.dist-info}/licenses/LICENSE +0 -0
prompty/serverless/executor.py
CHANGED
@@ -1,24 +1,27 @@
|
|
1
|
-
import azure.identity
|
2
1
|
import importlib.metadata
|
3
|
-
|
4
|
-
from
|
2
|
+
import typing
|
3
|
+
from collections.abc import Iterator
|
4
|
+
|
5
|
+
import azure.identity
|
5
6
|
from azure.ai.inference import (
|
6
7
|
ChatCompletionsClient,
|
7
8
|
EmbeddingsClient,
|
8
9
|
)
|
9
|
-
|
10
10
|
from azure.ai.inference.aio import (
|
11
11
|
ChatCompletionsClient as AsyncChatCompletionsClient,
|
12
|
+
)
|
13
|
+
from azure.ai.inference.aio import (
|
12
14
|
EmbeddingsClient as AsyncEmbeddingsClient,
|
13
15
|
)
|
14
16
|
from azure.ai.inference.models import (
|
15
|
-
StreamingChatCompletions,
|
16
17
|
AsyncStreamingChatCompletions,
|
18
|
+
StreamingChatCompletions,
|
17
19
|
)
|
20
|
+
from azure.core.credentials import AzureKeyCredential
|
18
21
|
|
19
|
-
from ..
|
22
|
+
from ..core import AsyncPromptyStream, Prompty, PromptyStream
|
20
23
|
from ..invoker import Invoker, InvokerFactory
|
21
|
-
from ..
|
24
|
+
from ..tracer import Tracer
|
22
25
|
|
23
26
|
VERSION = importlib.metadata.version("prompty")
|
24
27
|
|
@@ -29,13 +32,20 @@ class ServerlessExecutor(Invoker):
|
|
29
32
|
|
30
33
|
def __init__(self, prompty: Prompty) -> None:
|
31
34
|
super().__init__(prompty)
|
35
|
+
self.kwargs = {
|
36
|
+
key: value
|
37
|
+
for key, value in self.prompty.model.configuration.items()
|
38
|
+
if key != "type"
|
39
|
+
}
|
32
40
|
|
33
41
|
self.endpoint = self.prompty.model.configuration["endpoint"]
|
34
42
|
self.model = self.prompty.model.configuration["model"]
|
35
43
|
|
36
44
|
# no key, use default credentials
|
37
45
|
if "key" not in self.kwargs:
|
38
|
-
self.credential
|
46
|
+
self.credential: typing.Union[
|
47
|
+
azure.identity.DefaultAzureCredential, AzureKeyCredential
|
48
|
+
] = azure.identity.DefaultAzureCredential(
|
39
49
|
exclude_shared_token_cache_credential=True
|
40
50
|
)
|
41
51
|
else:
|
@@ -46,11 +56,13 @@ class ServerlessExecutor(Invoker):
|
|
46
56
|
# api type
|
47
57
|
self.api = self.prompty.model.api
|
48
58
|
|
49
|
-
def _response(self, response:
|
59
|
+
def _response(self, response: typing.Any) -> typing.Any:
|
50
60
|
# stream response
|
51
61
|
if isinstance(response, Iterator):
|
52
62
|
if isinstance(response, StreamingChatCompletions):
|
53
|
-
stream = PromptyStream(
|
63
|
+
stream: typing.Union[PromptyStream, AsyncPromptyStream] = PromptyStream(
|
64
|
+
"ServerlessExecutor", response
|
65
|
+
)
|
54
66
|
return stream
|
55
67
|
elif isinstance(response, AsyncStreamingChatCompletions):
|
56
68
|
stream = AsyncPromptyStream("ServerlessExecutor", response)
|
@@ -62,7 +74,7 @@ class ServerlessExecutor(Invoker):
|
|
62
74
|
else:
|
63
75
|
return response
|
64
76
|
|
65
|
-
def invoke(self, data:
|
77
|
+
def invoke(self, data: typing.Any) -> typing.Any:
|
66
78
|
"""Invoke the Serverless SDK
|
67
79
|
|
68
80
|
Parameters
|
@@ -89,7 +101,7 @@ class ServerlessExecutor(Invoker):
|
|
89
101
|
"description", "Azure Unified Inference SDK Chat Completions Client"
|
90
102
|
)
|
91
103
|
trace("inputs", cargs)
|
92
|
-
client = ChatCompletionsClient(
|
104
|
+
client: typing.Any = ChatCompletionsClient(
|
93
105
|
user_agent=f"prompty/{VERSION}",
|
94
106
|
**cargs,
|
95
107
|
)
|
@@ -174,10 +186,11 @@ class ServerlessExecutor(Invoker):
|
|
174
186
|
trace("type", "LLM")
|
175
187
|
trace("signature", "azure.ai.inference.aio.ChatCompletionsClient.ctor")
|
176
188
|
trace(
|
177
|
-
"description",
|
189
|
+
"description",
|
190
|
+
"Azure Unified Inference SDK Async Chat Completions Client",
|
178
191
|
)
|
179
192
|
trace("inputs", cargs)
|
180
|
-
client = AsyncChatCompletionsClient(
|
193
|
+
client: typing.Any = AsyncChatCompletionsClient(
|
181
194
|
user_agent=f"prompty/{VERSION}",
|
182
195
|
**cargs,
|
183
196
|
)
|
@@ -187,7 +200,8 @@ class ServerlessExecutor(Invoker):
|
|
187
200
|
trace("type", "LLM")
|
188
201
|
trace("signature", "azure.ai.inference.ChatCompletionsClient.complete")
|
189
202
|
trace(
|
190
|
-
"description",
|
203
|
+
"description",
|
204
|
+
"Azure Unified Inference SDK Async Chat Completions Client",
|
191
205
|
)
|
192
206
|
eargs = {
|
193
207
|
"model": self.model,
|
@@ -209,7 +223,9 @@ class ServerlessExecutor(Invoker):
|
|
209
223
|
with Tracer.start("EmbeddingsClient") as trace:
|
210
224
|
trace("type", "LLM")
|
211
225
|
trace("signature", "azure.ai.inference.aio.EmbeddingsClient.ctor")
|
212
|
-
trace(
|
226
|
+
trace(
|
227
|
+
"description", "Azure Unified Inference SDK Async Embeddings Client"
|
228
|
+
)
|
213
229
|
trace("inputs", cargs)
|
214
230
|
client = AsyncEmbeddingsClient(
|
215
231
|
user_agent=f"prompty/{VERSION}",
|
prompty/serverless/processor.py
CHANGED
@@ -1,9 +1,11 @@
|
|
1
|
-
|
2
|
-
from
|
3
|
-
from ..core import AsyncPromptyStream, Prompty, PromptyStream, ToolCall
|
1
|
+
import typing
|
2
|
+
from collections.abc import AsyncIterator, Iterator
|
4
3
|
|
5
4
|
from azure.ai.inference.models import ChatCompletions, EmbeddingsResult
|
6
5
|
|
6
|
+
from ..core import AsyncPromptyStream, Prompty, PromptyStream, ToolCall
|
7
|
+
from ..invoker import Invoker, InvokerFactory
|
8
|
+
|
7
9
|
|
8
10
|
@InvokerFactory.register_processor("serverless")
|
9
11
|
class ServerlessProcessor(Invoker):
|
@@ -12,7 +14,9 @@ class ServerlessProcessor(Invoker):
|
|
12
14
|
def __init__(self, prompty: Prompty) -> None:
|
13
15
|
super().__init__(prompty)
|
14
16
|
|
15
|
-
def invoke(
|
17
|
+
def invoke(
|
18
|
+
self, data: typing.Any
|
19
|
+
) -> typing.Any:
|
16
20
|
"""Invoke the OpenAI API
|
17
21
|
|
18
22
|
Parameters
|
@@ -53,7 +57,7 @@ class ServerlessProcessor(Invoker):
|
|
53
57
|
for chunk in data:
|
54
58
|
if (
|
55
59
|
len(chunk.choices) == 1
|
56
|
-
and chunk.choices[0].delta.content
|
60
|
+
and chunk.choices[0].delta.content is not None
|
57
61
|
):
|
58
62
|
content = chunk.choices[0].delta.content
|
59
63
|
yield content
|
@@ -62,7 +66,7 @@ class ServerlessProcessor(Invoker):
|
|
62
66
|
else:
|
63
67
|
return data
|
64
68
|
|
65
|
-
async def invoke_async(self, data: str) -> str:
|
69
|
+
async def invoke_async(self, data: str) -> typing.Union[str, AsyncPromptyStream]:
|
66
70
|
"""Invoke the Prompty Chat Parser (Async)
|
67
71
|
|
68
72
|
Parameters
|
@@ -103,7 +107,7 @@ class ServerlessProcessor(Invoker):
|
|
103
107
|
async for chunk in data:
|
104
108
|
if (
|
105
109
|
len(chunk.choices) == 1
|
106
|
-
and chunk.choices[0].delta.content
|
110
|
+
and chunk.choices[0].delta.content is not None
|
107
111
|
):
|
108
112
|
content = chunk.choices[0].delta.content
|
109
113
|
yield content
|
prompty/tracer.py
CHANGED
@@ -1,16 +1,17 @@
|
|
1
|
-
import
|
2
|
-
import
|
1
|
+
import contextlib
|
2
|
+
import importlib
|
3
3
|
import inspect
|
4
|
-
import
|
4
|
+
import json
|
5
|
+
import os
|
5
6
|
import traceback
|
6
|
-
import
|
7
|
-
import contextlib
|
8
|
-
from pathlib import Path
|
9
|
-
from numbers import Number
|
7
|
+
from collections.abc import Iterator
|
10
8
|
from datetime import datetime
|
9
|
+
from functools import partial, wraps
|
10
|
+
from numbers import Number
|
11
|
+
from pathlib import Path
|
12
|
+
from typing import Any, Callable, Union
|
13
|
+
|
11
14
|
from pydantic import BaseModel
|
12
|
-
from functools import wraps, partial
|
13
|
-
from typing import Any, Callable, Dict, Iterator, List
|
14
15
|
|
15
16
|
|
16
17
|
# clean up key value pairs for sensitive values
|
@@ -26,7 +27,12 @@ def sanitize(key: str, value: Any) -> Any:
|
|
26
27
|
|
27
28
|
|
28
29
|
class Tracer:
|
29
|
-
_tracers:
|
30
|
+
_tracers: dict[
|
31
|
+
str,
|
32
|
+
Callable[
|
33
|
+
[str], contextlib._GeneratorContextManager[Callable[[str, Any], None]]
|
34
|
+
],
|
35
|
+
] = {}
|
30
36
|
|
31
37
|
SIGNATURE = "signature"
|
32
38
|
INPUTS = "inputs"
|
@@ -34,7 +40,11 @@ class Tracer:
|
|
34
40
|
|
35
41
|
@classmethod
|
36
42
|
def add(
|
37
|
-
cls,
|
43
|
+
cls,
|
44
|
+
name: str,
|
45
|
+
tracer: Callable[
|
46
|
+
[str], contextlib._GeneratorContextManager[Callable[[str, Any], None]]
|
47
|
+
],
|
38
48
|
) -> None:
|
39
49
|
cls._tracers[name] = tracer
|
40
50
|
|
@@ -44,9 +54,11 @@ class Tracer:
|
|
44
54
|
|
45
55
|
@classmethod
|
46
56
|
@contextlib.contextmanager
|
47
|
-
def start(
|
57
|
+
def start(
|
58
|
+
cls, name: str, attributes: Union[dict[str, Any], None] = None
|
59
|
+
) -> Iterator[Callable[[str, Any], list[None]]]:
|
48
60
|
with contextlib.ExitStack() as stack:
|
49
|
-
traces = [
|
61
|
+
traces: list[Callable[[str, Any], None]] = [
|
50
62
|
stack.enter_context(tracer(name)) for tracer in cls._tracers.values()
|
51
63
|
]
|
52
64
|
|
@@ -62,7 +74,7 @@ class Tracer:
|
|
62
74
|
]
|
63
75
|
|
64
76
|
|
65
|
-
def to_dict(obj: Any) ->
|
77
|
+
def to_dict(obj: Any) -> Any:
|
66
78
|
# simple json types
|
67
79
|
if isinstance(obj, str) or isinstance(obj, Number) or isinstance(obj, bool):
|
68
80
|
return obj
|
@@ -123,18 +135,16 @@ def _inputs(func: Callable, args, kwargs) -> dict:
|
|
123
135
|
return inputs
|
124
136
|
|
125
137
|
|
126
|
-
def _results(result: Any) ->
|
138
|
+
def _results(result: Any) -> Any:
|
127
139
|
return to_dict(result) if result is not None else "None"
|
128
140
|
|
129
141
|
|
130
|
-
def _trace_sync(
|
131
|
-
func: Callable = None, **okwargs: Any
|
132
|
-
) -> Callable:
|
142
|
+
def _trace_sync(func: Callable, **okwargs: Any) -> Callable:
|
133
143
|
|
134
144
|
@wraps(func)
|
135
145
|
def wrapper(*args, **kwargs):
|
136
146
|
name, signature = _name(func, args)
|
137
|
-
altname: str = None
|
147
|
+
altname: Union[str, None] = None
|
138
148
|
# special case
|
139
149
|
if "name" in okwargs:
|
140
150
|
altname = name
|
@@ -142,7 +152,7 @@ def _trace_sync(
|
|
142
152
|
del okwargs["name"]
|
143
153
|
|
144
154
|
with Tracer.start(name) as trace:
|
145
|
-
if altname
|
155
|
+
if altname is not None:
|
146
156
|
trace("function", altname)
|
147
157
|
|
148
158
|
trace("signature", signature)
|
@@ -181,14 +191,12 @@ def _trace_sync(
|
|
181
191
|
return wrapper
|
182
192
|
|
183
193
|
|
184
|
-
def _trace_async(
|
185
|
-
func: Callable = None, **okwargs: Any
|
186
|
-
) -> Callable:
|
194
|
+
def _trace_async(func: Callable, **okwargs: Any) -> Callable:
|
187
195
|
|
188
196
|
@wraps(func)
|
189
197
|
async def wrapper(*args, **kwargs):
|
190
198
|
name, signature = _name(func, args)
|
191
|
-
altname: str = None
|
199
|
+
altname: Union[str, None] = None
|
192
200
|
# special case
|
193
201
|
if "name" in okwargs:
|
194
202
|
altname = name
|
@@ -196,9 +204,9 @@ def _trace_async(
|
|
196
204
|
del okwargs["name"]
|
197
205
|
|
198
206
|
with Tracer.start(name) as trace:
|
199
|
-
if altname
|
207
|
+
if altname is not None:
|
200
208
|
trace("function", altname)
|
201
|
-
|
209
|
+
|
202
210
|
trace("signature", signature)
|
203
211
|
|
204
212
|
# support arbitrary keyword
|
@@ -234,7 +242,7 @@ def _trace_async(
|
|
234
242
|
return wrapper
|
235
243
|
|
236
244
|
|
237
|
-
def trace(func: Callable = None, **kwargs: Any) -> Callable:
|
245
|
+
def trace(func: Union[Callable, None] = None, **kwargs: Any) -> Callable:
|
238
246
|
if func is None:
|
239
247
|
return partial(trace, **kwargs)
|
240
248
|
wrapped_method = _trace_async if inspect.iscoroutinefunction(func) else _trace_sync
|
@@ -242,7 +250,7 @@ def trace(func: Callable = None, **kwargs: Any) -> Callable:
|
|
242
250
|
|
243
251
|
|
244
252
|
class PromptyTracer:
|
245
|
-
def __init__(self, output_dir: str = None) -> None:
|
253
|
+
def __init__(self, output_dir: Union[str, None] = None) -> None:
|
246
254
|
if output_dir:
|
247
255
|
self.output = Path(output_dir).resolve().absolute()
|
248
256
|
else:
|
@@ -251,7 +259,7 @@ class PromptyTracer:
|
|
251
259
|
if not self.output.exists():
|
252
260
|
self.output.mkdir(parents=True, exist_ok=True)
|
253
261
|
|
254
|
-
self.stack:
|
262
|
+
self.stack: list[dict[str, Any]] = []
|
255
263
|
|
256
264
|
@contextlib.contextmanager
|
257
265
|
def tracer(self, name: str) -> Iterator[Callable[[str, Any], None]]:
|
@@ -324,7 +332,7 @@ class PromptyTracer:
|
|
324
332
|
self.stack[-1]["__frames"] = []
|
325
333
|
self.stack[-1]["__frames"].append(frame)
|
326
334
|
|
327
|
-
def hoist_item(self, src:
|
335
|
+
def hoist_item(self, src: dict[str, Any], cur: dict[str, Any]) -> dict[str, Any]:
|
328
336
|
for key, value in src.items():
|
329
337
|
if value is None or isinstance(value, list) or isinstance(value, dict):
|
330
338
|
continue
|
@@ -333,12 +341,13 @@ class PromptyTracer:
|
|
333
341
|
cur[key] = value
|
334
342
|
else:
|
335
343
|
cur[key] += value
|
336
|
-
except:
|
344
|
+
except Exception:
|
345
|
+
# TODO: Be more specific about exceptions here
|
337
346
|
continue
|
338
347
|
|
339
348
|
return cur
|
340
349
|
|
341
|
-
def write_trace(self, frame:
|
350
|
+
def write_trace(self, frame: dict[str, Any]) -> None:
|
342
351
|
trace_file = (
|
343
352
|
self.output
|
344
353
|
/ f"{frame['name']}.{datetime.now().strftime('%Y%m%d.%H%M%S')}.tracy"
|
prompty/utils.py
CHANGED
@@ -1,34 +1,39 @@
|
|
1
|
-
import re
|
2
|
-
import yaml
|
3
1
|
import json
|
4
|
-
import
|
5
|
-
import
|
6
|
-
from typing import Dict
|
2
|
+
import re
|
3
|
+
import typing
|
7
4
|
from pathlib import Path
|
8
5
|
|
6
|
+
import aiofiles
|
7
|
+
import yaml
|
8
|
+
|
9
9
|
_yaml_regex = re.compile(
|
10
10
|
r"^\s*" + r"(?:---|\+\+\+)" + r"(.*?)" + r"(?:---|\+\+\+)" + r"\s*(.+)$",
|
11
11
|
re.S | re.M,
|
12
12
|
)
|
13
13
|
|
14
|
-
|
15
|
-
|
14
|
+
|
15
|
+
def load_text(file_path, encoding="utf-8"):
|
16
|
+
with open(file_path, encoding=encoding) as file:
|
16
17
|
return file.read()
|
17
18
|
|
18
|
-
|
19
|
-
|
19
|
+
|
20
|
+
async def load_text_async(file_path, encoding="utf-8"):
|
21
|
+
async with aiofiles.open(file_path, encoding=encoding) as f:
|
20
22
|
content = await f.read()
|
21
23
|
return content
|
22
24
|
|
23
|
-
|
25
|
+
|
26
|
+
def load_json(file_path, encoding="utf-8"):
|
24
27
|
return json.loads(load_text(file_path, encoding=encoding))
|
25
28
|
|
26
|
-
|
29
|
+
|
30
|
+
async def load_json_async(file_path, encoding="utf-8"):
|
27
31
|
# async file open
|
28
32
|
content = await load_text_async(file_path, encoding=encoding)
|
29
33
|
return json.loads(content)
|
30
34
|
|
31
|
-
|
35
|
+
|
36
|
+
def _find_global_config(prompty_path: Path = Path.cwd()) -> typing.Union[Path, None]:
|
32
37
|
prompty_config = list(Path.cwd().glob("**/prompty.json"))
|
33
38
|
|
34
39
|
if len(prompty_config) > 0:
|
@@ -46,7 +51,7 @@ def _find_global_config(prompty_path: Path = Path.cwd()) -> Path:
|
|
46
51
|
|
47
52
|
def load_global_config(
|
48
53
|
prompty_path: Path = Path.cwd(), configuration: str = "default"
|
49
|
-
) ->
|
54
|
+
) -> dict[str, typing.Any]:
|
50
55
|
# prompty.config laying around?
|
51
56
|
config = _find_global_config(prompty_path)
|
52
57
|
|
@@ -63,7 +68,7 @@ def load_global_config(
|
|
63
68
|
|
64
69
|
async def load_global_config_async(
|
65
70
|
prompty_path: Path = Path.cwd(), configuration: str = "default"
|
66
|
-
) ->
|
71
|
+
) -> dict[str, typing.Any]:
|
67
72
|
# prompty.config laying around?
|
68
73
|
config = _find_global_config(prompty_path)
|
69
74
|
|
@@ -78,7 +83,7 @@ async def load_global_config_async(
|
|
78
83
|
return {}
|
79
84
|
|
80
85
|
|
81
|
-
def load_prompty(file_path, encoding=
|
86
|
+
def load_prompty(file_path, encoding="utf-8"):
|
82
87
|
contents = load_text(file_path, encoding=encoding)
|
83
88
|
return parse(contents)
|
84
89
|
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: prompty
|
3
|
-
Version: 0.1.
|
3
|
+
Version: 0.1.44
|
4
4
|
Summary: Prompty is a new asset class and format for LLM prompts that aims to provide observability, understandability, and portability for developers. It includes spec, tooling, and a runtime. This Prompty runtime supports Python
|
5
5
|
Author-Email: Seth Juarez <seth.juarez@microsoft.com>
|
6
6
|
License: MIT
|
@@ -0,0 +1,24 @@
|
|
1
|
+
prompty-0.1.44.dist-info/METADATA,sha256=v_DCka2hLvSXEYPdWGYCAziC2WZ0cN13Paga30I-WWY,9164
|
2
|
+
prompty-0.1.44.dist-info/WHEEL,sha256=thaaA2w1JzcGC48WYufAs8nrYZjJm8LqNfnXFOFyCC4,90
|
3
|
+
prompty-0.1.44.dist-info/entry_points.txt,sha256=a3i7Kvf--3DOkkv9VQpstwaNKgsnXwDGaPL18lPpKeI,60
|
4
|
+
prompty-0.1.44.dist-info/licenses/LICENSE,sha256=KWSC4z9cfML_t0xThoQYjzTdcZQj86Y_mhXdatzU-KM,1052
|
5
|
+
prompty/__init__.py,sha256=geIhhprDhbqqkj5jQhy3Nv_b83T38VhleEUfB7Xkd4U,16861
|
6
|
+
prompty/azure/__init__.py,sha256=zBxzOMQCwtiz2CsI8gNWzr7T2ZJHF3TZNIUtvfuaEQI,309
|
7
|
+
prompty/azure/executor.py,sha256=UaojJ4aUObdGvR-aBRRSntrFJZynvZ0Q1z8j2XHyc3I,9269
|
8
|
+
prompty/azure/processor.py,sha256=gNmUkPBoSLPE0t7IVJUxGdwZ2otiycDE4Fu6L1vurZI,5330
|
9
|
+
prompty/azure_beta/__init__.py,sha256=-LPrqoAv2UGI29SUjiZCkPlAGARvixlYe2IW1edswWo,375
|
10
|
+
prompty/azure_beta/executor.py,sha256=bARyO2k42ZQfd5NrNKyMUd7JZ2Rcm8urUP0sJ1P1Qxk,10157
|
11
|
+
prompty/cli.py,sha256=7aDL8Lup5EONizwn1zQUhGq9vk6z62EA0lpdf80Ybdk,4836
|
12
|
+
prompty/core.py,sha256=kAREbThLiVUikvikZX27riW0jJKIldkLusidzmdOftg,13070
|
13
|
+
prompty/invoker.py,sha256=ARmyVkinm_Nk91k8mRlsDffox7MtGSVLpwNycavMSSI,9028
|
14
|
+
prompty/openai/__init__.py,sha256=aRdXZ5pL4tURFqRwVX7gNdVy3PPWE6BvUbK1o73AqQc,303
|
15
|
+
prompty/openai/executor.py,sha256=v5K8kDpGUsbtTpCb1hOVxPt7twX2p-a5yL2iC3Xm8TQ,3752
|
16
|
+
prompty/openai/processor.py,sha256=X6yfSlHoTgXxCfGvUwADACjNtzLMNumcn0SX3U4T9as,2542
|
17
|
+
prompty/parsers.py,sha256=ptET4j8Rf55Ob0uOx6A41nlQ6cwo1CHRytSjeMIp6JE,5083
|
18
|
+
prompty/renderers.py,sha256=UMgJOjcAWd8LXX1ynXEdT4i9bqEAaJkeFp6KOD74lqU,1453
|
19
|
+
prompty/serverless/__init__.py,sha256=ILtbqhy6E-wWodbnq-_aftruTGk1Z0EI9zURFH4FECM,297
|
20
|
+
prompty/serverless/executor.py,sha256=QZXeFTXv_aT22rbBBgBFLtdAJqGp4W8GS3H3Rtoy8bE,8936
|
21
|
+
prompty/serverless/processor.py,sha256=d42MalWRf8RUpHRiueqAPAj_nGmJSMUE2yL-Tgxrss4,3754
|
22
|
+
prompty/tracer.py,sha256=jM2nQ7Cb-tSb7wog9w9QzhmB1IGwYb0B0-TBClZOn_A,11980
|
23
|
+
prompty/utils.py,sha256=P9nW0QfY1F5PM4pLxsFVlnul1BaRBatmvC2tpqydPKQ,2836
|
24
|
+
prompty-0.1.44.dist-info/RECORD,,
|
prompty-0.1.39.dist-info/RECORD
DELETED
@@ -1,24 +0,0 @@
|
|
1
|
-
prompty-0.1.39.dist-info/METADATA,sha256=MIUpA37K8Wo4wUD7xtK6oIeLFJj5uuZT-5rM18LbI5w,9164
|
2
|
-
prompty-0.1.39.dist-info/WHEEL,sha256=thaaA2w1JzcGC48WYufAs8nrYZjJm8LqNfnXFOFyCC4,90
|
3
|
-
prompty-0.1.39.dist-info/entry_points.txt,sha256=a3i7Kvf--3DOkkv9VQpstwaNKgsnXwDGaPL18lPpKeI,60
|
4
|
-
prompty-0.1.39.dist-info/licenses/LICENSE,sha256=KWSC4z9cfML_t0xThoQYjzTdcZQj86Y_mhXdatzU-KM,1052
|
5
|
-
prompty/__init__.py,sha256=HCAvInBgNcIDO54rR4-RDIF4KUmGVQ2TRam_dS7xHEk,16561
|
6
|
-
prompty/azure/__init__.py,sha256=WI8qeNWfxqggj21bznL-mxGUS-v67bUrunX0Lf2hsI8,295
|
7
|
-
prompty/azure/executor.py,sha256=LZG0U5AZB4H6CDfZxlsvcUnmq0LnTxMEFgLd9nzHAgc,9126
|
8
|
-
prompty/azure/processor.py,sha256=-CWc_1h4xdb0nyHwUkaI40NtzTxxenCXkgjJTh76AOk,5079
|
9
|
-
prompty/azure_beta/__init__.py,sha256=QF4qcILpsryBLl1nvc1AhRzkKI2uqc6OAU_fA3LISNE,361
|
10
|
-
prompty/azure_beta/executor.py,sha256=PIPfeOTLk9YEM80adktL2zxpa51gO4itlQzUDoq0QVg,9896
|
11
|
-
prompty/cli.py,sha256=k8Rxm41fMFNvmnsX737UiN6v-7756tpoJPN4rPXMNcU,3726
|
12
|
-
prompty/core.py,sha256=haX415_MciCn6K0zU4nZ394ZaJemZe9-iOsdRbe9G6A,12985
|
13
|
-
prompty/invoker.py,sha256=O77E5iQ1552wQXxL8FhZGERbCi_0O3mDTd5Ozqw-O-E,8593
|
14
|
-
prompty/openai/__init__.py,sha256=hbBhgCwB_uSq-1NWL02yiOiNkyi39-G-AyVlTSgKTkU,276
|
15
|
-
prompty/openai/executor.py,sha256=qkFSMA-pWlA1c602Dx5aR1cFEOnYsUUp_E7P3zFhSPs,3644
|
16
|
-
prompty/openai/processor.py,sha256=l9-91_CCgRtYvkwMO-jV6rkgeCA4gV_MFamQcvoNGQ0,2499
|
17
|
-
prompty/parsers.py,sha256=zHqcRpFPUDG6BOI7ipaJf6yGc6ZbKnsLmO7jKEYNct4,5013
|
18
|
-
prompty/renderers.py,sha256=80HNtCp3osgaLfhKxkG4j1kiRhJ727ITzT_yL5JLjEQ,1104
|
19
|
-
prompty/serverless/__init__.py,sha256=xoXOTRXO8C631swNKaa-ek5_R3X-87bJpTm0z_Rsg6A,282
|
20
|
-
prompty/serverless/executor.py,sha256=PUDJsYcJLQx9JSTh-R3HdJd0ehEC6w2Ch5OEqz52uVI,8395
|
21
|
-
prompty/serverless/processor.py,sha256=ZSL9y8JC-G4qbtWOSbQAqEcFMWEaLskyOr5VjLthelU,3660
|
22
|
-
prompty/tracer.py,sha256=VcrXkM71VO45xWY2b7H2AdCp5bSsCtcAv1m00ycf7XM,11678
|
23
|
-
prompty/utils.py,sha256=jm7HEzOGk3zz8d5aquXK3zWIQWuDpBpJTzlz5sswtdg,2836
|
24
|
-
prompty-0.1.39.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|