prompty 0.1.8__py3-none-any.whl → 0.1.10__py3-none-any.whl
Sign up to get free protection for your applications and to get access to all the features.
- prompty/core.py +62 -2
- prompty/executors.py +8 -9
- prompty/processors.py +2 -5
- prompty/tracer.py +122 -151
- {prompty-0.1.8.dist-info → prompty-0.1.10.dist-info}/METADATA +3 -3
- prompty-0.1.10.dist-info/RECORD +12 -0
- prompty-0.1.8.dist-info/RECORD +0 -12
- {prompty-0.1.8.dist-info → prompty-0.1.10.dist-info}/WHEEL +0 -0
- {prompty-0.1.8.dist-info → prompty-0.1.10.dist-info}/licenses/LICENSE +0 -0
prompty/core.py
CHANGED
@@ -6,9 +6,9 @@ import yaml
|
|
6
6
|
import json
|
7
7
|
import abc
|
8
8
|
from pathlib import Path
|
9
|
+
from .tracer import Tracer, trace, to_dict
|
9
10
|
from pydantic import BaseModel, Field, FilePath
|
10
|
-
from typing import List, Literal, Dict, Callable, Set
|
11
|
-
from .tracer import trace
|
11
|
+
from typing import AsyncIterator, Iterator, List, Literal, Dict, Callable, Set
|
12
12
|
|
13
13
|
|
14
14
|
class PropertySettings(BaseModel):
|
@@ -449,3 +449,63 @@ class Frontmatter:
|
|
449
449
|
"body": body,
|
450
450
|
"frontmatter": fmatter,
|
451
451
|
}
|
452
|
+
|
453
|
+
|
454
|
+
class PromptyStream(Iterator):
|
455
|
+
"""PromptyStream class to iterate over LLM stream.
|
456
|
+
Necessary for Prompty to handle streaming data when tracing."""
|
457
|
+
|
458
|
+
def __init__(self, name: str, iterator: Iterator):
|
459
|
+
self.name = name
|
460
|
+
self.iterator = iterator
|
461
|
+
self.items: List[any] = []
|
462
|
+
self.__name__ = "PromptyStream"
|
463
|
+
|
464
|
+
def __iter__(self):
|
465
|
+
return self
|
466
|
+
|
467
|
+
def __next__(self):
|
468
|
+
try:
|
469
|
+
# enumerate but add to list
|
470
|
+
o = self.iterator.__next__()
|
471
|
+
self.items.append(o)
|
472
|
+
return o
|
473
|
+
|
474
|
+
except StopIteration:
|
475
|
+
# StopIteration is raised
|
476
|
+
# contents are exhausted
|
477
|
+
if len(self.items) > 0:
|
478
|
+
with Tracer.start(f"{self.name}.PromptyStream") as trace:
|
479
|
+
trace("items", [to_dict(s) for s in self.items])
|
480
|
+
|
481
|
+
raise StopIteration
|
482
|
+
|
483
|
+
|
484
|
+
class AsyncPromptyStream(AsyncIterator):
|
485
|
+
"""AsyncPromptyStream class to iterate over LLM stream.
|
486
|
+
Necessary for Prompty to handle streaming data when tracing."""
|
487
|
+
|
488
|
+
def __init__(self, name: str, iterator: AsyncIterator):
|
489
|
+
self.name = name
|
490
|
+
self.iterator = iterator
|
491
|
+
self.items: List[any] = []
|
492
|
+
self.__name__ = "AsyncPromptyStream"
|
493
|
+
|
494
|
+
def __aiter__(self):
|
495
|
+
return self
|
496
|
+
|
497
|
+
async def __anext__(self):
|
498
|
+
try:
|
499
|
+
# enumerate but add to list
|
500
|
+
o = await self.iterator.__anext__()
|
501
|
+
self.items.append(o)
|
502
|
+
return o
|
503
|
+
|
504
|
+
except StopIteration:
|
505
|
+
# StopIteration is raised
|
506
|
+
# contents are exhausted
|
507
|
+
if len(self.items) > 0:
|
508
|
+
with Tracer.start(f"{self.name}.AsyncPromptyStream") as trace:
|
509
|
+
trace("items", [to_dict(s) for s in self.items])
|
510
|
+
|
511
|
+
raise StopIteration
|
prompty/executors.py
CHANGED
@@ -1,8 +1,8 @@
|
|
1
1
|
import azure.identity
|
2
|
-
from .tracer import Trace
|
3
|
-
from openai import AzureOpenAI
|
4
|
-
from .core import Invoker, InvokerFactory, Prompty
|
5
2
|
import importlib.metadata
|
3
|
+
from typing import Iterator
|
4
|
+
from openai import AzureOpenAI
|
5
|
+
from .core import Invoker, InvokerFactory, Prompty, PromptyStream
|
6
6
|
|
7
7
|
VERSION = importlib.metadata.version("prompty")
|
8
8
|
|
@@ -87,9 +87,8 @@ class AzureOpenAIExecutor(Invoker):
|
|
87
87
|
elif self.api == "image":
|
88
88
|
raise NotImplementedError("Azure OpenAI Image API is not implemented yet")
|
89
89
|
|
90
|
-
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
return response
|
90
|
+
# stream response
|
91
|
+
if isinstance(response, Iterator):
|
92
|
+
return PromptyStream("AzureOpenAIExecutor", response)
|
93
|
+
else:
|
94
|
+
return response
|
prompty/processors.py
CHANGED
@@ -1,10 +1,8 @@
|
|
1
|
-
from .tracer import Trace
|
2
|
-
from openai import Stream
|
3
1
|
from typing import Iterator
|
4
2
|
from pydantic import BaseModel
|
5
3
|
from openai.types.completion import Completion
|
6
|
-
from .core import Invoker, InvokerFactory, Prompty
|
7
4
|
from openai.types.chat.chat_completion import ChatCompletion
|
5
|
+
from .core import Invoker, InvokerFactory, Prompty, PromptyStream
|
8
6
|
from openai.types.create_embedding_response import CreateEmbeddingResponse
|
9
7
|
|
10
8
|
|
@@ -66,9 +64,8 @@ class OpenAIProcessor(Invoker):
|
|
66
64
|
for chunk in data:
|
67
65
|
if len(chunk.choices) == 1 and chunk.choices[0].delta.content != None:
|
68
66
|
content = chunk.choices[0].delta.content
|
69
|
-
Trace.add("stream", content)
|
70
67
|
yield content
|
71
68
|
|
72
|
-
return generator()
|
69
|
+
return PromptyStream("OpenAIProcessor", generator())
|
73
70
|
else:
|
74
71
|
return data
|
prompty/tracer.py
CHANGED
@@ -1,92 +1,66 @@
|
|
1
|
-
import
|
1
|
+
import os
|
2
2
|
import json
|
3
3
|
import inspect
|
4
|
-
import
|
4
|
+
import contextlib
|
5
|
+
from pathlib import Path
|
5
6
|
from numbers import Number
|
6
|
-
import os
|
7
7
|
from datetime import datetime
|
8
|
-
from pathlib import Path
|
9
8
|
from pydantic import BaseModel
|
10
9
|
from functools import wraps, partial
|
11
|
-
from typing import Any, Callable, Dict, List
|
12
|
-
|
13
|
-
|
14
|
-
class Tracer(abc.ABC):
|
15
|
-
|
16
|
-
@abc.abstractmethod
|
17
|
-
def start(self, name: str) -> None:
|
18
|
-
pass
|
19
|
-
|
20
|
-
@abc.abstractmethod
|
21
|
-
def add(self, key: str, value: Any) -> None:
|
22
|
-
pass
|
10
|
+
from typing import Any, Callable, Dict, Iterator, List
|
23
11
|
|
24
|
-
@abc.abstractmethod
|
25
|
-
def end(self) -> None:
|
26
|
-
pass
|
27
12
|
|
28
|
-
|
29
|
-
|
30
|
-
_tracers: Dict[str, Tracer] = {}
|
13
|
+
class Tracer:
|
14
|
+
_tracers: Dict[str, Callable[[str], Iterator[Callable[[str, Any], None]]]] = {}
|
31
15
|
|
32
16
|
@classmethod
|
33
|
-
def
|
17
|
+
def add(
|
18
|
+
cls, name: str, tracer: Callable[[str], Iterator[Callable[[str, Any], None]]]
|
19
|
+
) -> None:
|
34
20
|
cls._tracers[name] = tracer
|
35
21
|
|
36
|
-
@classmethod
|
37
|
-
def start(cls, name: str) -> None:
|
38
|
-
for tracer in cls._tracers.values():
|
39
|
-
tracer.start(name)
|
40
|
-
|
41
|
-
@classmethod
|
42
|
-
def add(cls, name: str, value: Any) -> None:
|
43
|
-
for tracer in cls._tracers.values():
|
44
|
-
tracer.add(name, value)
|
45
|
-
|
46
|
-
@classmethod
|
47
|
-
def end(cls) -> None:
|
48
|
-
for tracer in cls._tracers.values():
|
49
|
-
tracer.end()
|
50
|
-
|
51
22
|
@classmethod
|
52
23
|
def clear(cls) -> None:
|
53
24
|
cls._tracers = {}
|
54
25
|
|
55
26
|
@classmethod
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
27
|
+
@contextlib.contextmanager
|
28
|
+
def start(cls, name: str) -> Iterator[Callable[[str, Any], None]]:
|
29
|
+
with contextlib.ExitStack() as stack:
|
30
|
+
traces = [
|
31
|
+
stack.enter_context(tracer(name)) for tracer in cls._tracers.values()
|
32
|
+
]
|
33
|
+
yield lambda key, value: [trace(key, value) for trace in traces]
|
34
|
+
|
35
|
+
|
36
|
+
def to_dict(obj: Any) -> Dict[str, Any]:
|
37
|
+
# simple json types
|
38
|
+
if isinstance(obj, str) or isinstance(obj, Number) or isinstance(obj, bool):
|
39
|
+
return obj
|
40
|
+
# datetime
|
41
|
+
elif isinstance(obj, datetime):
|
42
|
+
return obj.isoformat()
|
43
|
+
# safe Prompty obj serialization
|
44
|
+
elif type(obj).__name__ == "Prompty":
|
45
|
+
return obj.to_safe_dict()
|
46
|
+
# safe PromptyStream obj serialization
|
47
|
+
elif type(obj).__name__ == "PromptyStream":
|
48
|
+
return "PromptyStream"
|
49
|
+
elif type(obj).__name__ == "AsyncPromptyStream":
|
50
|
+
return "AsyncPromptyStream"
|
51
|
+
# pydantic models have their own json serialization
|
52
|
+
elif isinstance(obj, BaseModel):
|
53
|
+
return obj.model_dump()
|
54
|
+
# recursive list and dict
|
55
|
+
elif isinstance(obj, list):
|
56
|
+
return [to_dict(item) for item in obj]
|
57
|
+
elif isinstance(obj, dict):
|
58
|
+
return {k: v if isinstance(v, str) else to_dict(v) for k, v in obj.items()}
|
59
|
+
elif isinstance(obj, Path):
|
60
|
+
return str(obj)
|
61
|
+
# cast to string otherwise...
|
62
|
+
else:
|
63
|
+
return str(obj)
|
90
64
|
|
91
65
|
|
92
66
|
def _name(func: Callable, args):
|
@@ -110,14 +84,14 @@ def _inputs(func: Callable, args, kwargs) -> dict:
|
|
110
84
|
ba = inspect.signature(func).bind(*args, **kwargs)
|
111
85
|
ba.apply_defaults()
|
112
86
|
|
113
|
-
inputs = {k:
|
87
|
+
inputs = {k: to_dict(v) for k, v in ba.arguments.items() if k != "self"}
|
114
88
|
|
115
89
|
return inputs
|
116
90
|
|
91
|
+
|
117
92
|
def _results(result: Any) -> dict:
|
118
|
-
return
|
119
|
-
|
120
|
-
}
|
93
|
+
return to_dict(result) if result is not None else "None"
|
94
|
+
|
121
95
|
|
122
96
|
def _trace_sync(func: Callable = None, *, description: str = None) -> Callable:
|
123
97
|
description = description or ""
|
@@ -125,107 +99,104 @@ def _trace_sync(func: Callable = None, *, description: str = None) -> Callable:
|
|
125
99
|
@wraps(func)
|
126
100
|
def wrapper(*args, **kwargs):
|
127
101
|
name, signature = _name(func, args)
|
128
|
-
|
129
|
-
|
130
|
-
|
131
|
-
|
102
|
+
with Tracer.start(name) as trace:
|
103
|
+
trace("signature", signature)
|
104
|
+
if description and description != "":
|
105
|
+
trace("description", description)
|
132
106
|
|
133
|
-
|
134
|
-
|
107
|
+
inputs = _inputs(func, args, kwargs)
|
108
|
+
trace("inputs", inputs)
|
135
109
|
|
136
|
-
|
137
|
-
|
110
|
+
result = func(*args, **kwargs)
|
111
|
+
trace("result", _results(result))
|
138
112
|
|
139
|
-
|
113
|
+
return result
|
140
114
|
|
141
|
-
return result
|
142
|
-
|
143
115
|
return wrapper
|
144
116
|
|
117
|
+
|
145
118
|
def _trace_async(func: Callable = None, *, description: str = None) -> Callable:
|
146
119
|
description = description or ""
|
147
120
|
|
148
121
|
@wraps(func)
|
149
122
|
async def wrapper(*args, **kwargs):
|
150
123
|
name, signature = _name(func, args)
|
151
|
-
|
152
|
-
|
153
|
-
|
154
|
-
|
124
|
+
with Tracer.start(name) as trace:
|
125
|
+
trace("signature", signature)
|
126
|
+
if description and description != "":
|
127
|
+
trace("description", description)
|
155
128
|
|
156
|
-
|
157
|
-
|
129
|
+
inputs = _inputs(func, args, kwargs)
|
130
|
+
trace("inputs", inputs)
|
158
131
|
|
159
|
-
|
160
|
-
|
132
|
+
result = await func(*args, **kwargs)
|
133
|
+
trace("result", _results(result))
|
161
134
|
|
162
|
-
|
135
|
+
return result
|
163
136
|
|
164
|
-
return result
|
165
|
-
|
166
137
|
return wrapper
|
167
138
|
|
139
|
+
|
168
140
|
def trace(func: Callable = None, *, description: str = None) -> Callable:
|
169
141
|
if func is None:
|
170
142
|
return partial(trace, description=description)
|
171
|
-
|
172
|
-
wrapped_method = (
|
173
|
-
_trace_async if inspect.iscoroutinefunction(func) else _trace_sync
|
174
|
-
)
|
175
143
|
|
176
|
-
|
144
|
+
wrapped_method = _trace_async if inspect.iscoroutinefunction(func) else _trace_sync
|
177
145
|
|
146
|
+
return wrapped_method(func, description=description)
|
178
147
|
|
179
|
-
class PromptyTracer(Tracer):
|
180
|
-
_stack: List[Dict[str, Any]] = []
|
181
|
-
_name: str = None
|
182
148
|
|
149
|
+
class PromptyTracer:
|
183
150
|
def __init__(self, output_dir: str = None) -> None:
|
184
|
-
super().__init__()
|
185
151
|
if output_dir:
|
186
|
-
self.
|
152
|
+
self.output = Path(output_dir).resolve().absolute()
|
187
153
|
else:
|
188
|
-
self.
|
189
|
-
|
190
|
-
if not self.
|
191
|
-
self.
|
192
|
-
|
193
|
-
|
194
|
-
|
195
|
-
|
196
|
-
|
197
|
-
|
198
|
-
|
199
|
-
|
200
|
-
|
201
|
-
|
202
|
-
|
203
|
-
|
204
|
-
|
205
|
-
|
206
|
-
|
154
|
+
self.output = Path(Path(os.getcwd()) / ".runs").resolve().absolute()
|
155
|
+
|
156
|
+
if not self.output.exists():
|
157
|
+
self.output.mkdir(parents=True, exist_ok=True)
|
158
|
+
|
159
|
+
self.stack: List[Dict[str, Any]] = []
|
160
|
+
|
161
|
+
@contextlib.contextmanager
|
162
|
+
def tracer(self, name: str) -> Iterator[Callable[[str, Any], None]]:
|
163
|
+
try:
|
164
|
+
self.stack.append({"name": name})
|
165
|
+
frame = self.stack[-1]
|
166
|
+
|
167
|
+
def add(key: str, value: Any) -> None:
|
168
|
+
if key not in frame:
|
169
|
+
frame[key] = value
|
170
|
+
# multiple values creates list
|
171
|
+
else:
|
172
|
+
if isinstance(frame[key], list):
|
173
|
+
frame[key].append(value)
|
174
|
+
else:
|
175
|
+
frame[key] = [frame[key], value]
|
176
|
+
|
177
|
+
yield add
|
178
|
+
finally:
|
179
|
+
frame = self.stack.pop()
|
180
|
+
# if stack is empty, dump the frame
|
181
|
+
if len(self.stack) == 0:
|
182
|
+
trace_file = (
|
183
|
+
self.output
|
184
|
+
/ f"{frame['name']}.{datetime.now().strftime('%Y%m%d.%H%M%S')}.ptrace"
|
185
|
+
)
|
186
|
+
|
187
|
+
with open(trace_file, "w") as f:
|
188
|
+
json.dump(frame, f, indent=4)
|
189
|
+
# otherwise, append the frame to the parent
|
207
190
|
else:
|
208
|
-
|
209
|
-
|
210
|
-
|
211
|
-
|
212
|
-
|
213
|
-
|
214
|
-
|
215
|
-
|
216
|
-
|
217
|
-
|
218
|
-
|
219
|
-
|
220
|
-
if "__frames" not in self._stack[-1]:
|
221
|
-
self._stack[-1]["__frames"] = []
|
222
|
-
self._stack[-1]["__frames"].append(frame)
|
223
|
-
|
224
|
-
def flush(self, frame: Dict[str, Any]) -> None:
|
225
|
-
|
226
|
-
trace_file = (
|
227
|
-
self.root / f"{self._name}.{datetime.now().strftime('%Y%m%d.%H%M%S')}.ptrace"
|
228
|
-
)
|
229
|
-
|
230
|
-
with open(trace_file, "w") as f:
|
231
|
-
json.dump(frame, f, indent=4)
|
191
|
+
if "__frames" not in self.stack[-1]:
|
192
|
+
self.stack[-1]["__frames"] = []
|
193
|
+
self.stack[-1]["__frames"].append(frame)
|
194
|
+
|
195
|
+
|
196
|
+
@contextlib.contextmanager
|
197
|
+
def console_tracer(name: str) -> Iterator[Callable[[str, Any], None]]:
|
198
|
+
try:
|
199
|
+
print(f"Starting {name}")
|
200
|
+
yield lambda key, value: print(f"{key}:\n{json.dumps(value, indent=4)}")
|
201
|
+
finally:
|
202
|
+
print(f"Ending {name}")
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: prompty
|
3
|
-
Version: 0.1.
|
3
|
+
Version: 0.1.10
|
4
4
|
Summary: Prompty is a new asset class and format for LLM prompts that aims to provide observability, understandability, and portability for developers. It includes spec, tooling, and a runtime. This Prompty runtime supports Python
|
5
5
|
Author-Email: Seth Juarez <seth.juarez@microsoft.com>
|
6
6
|
License: MIT
|
@@ -15,7 +15,7 @@ Requires-Dist: click>=8.1.7
|
|
15
15
|
Description-Content-Type: text/markdown
|
16
16
|
|
17
17
|
|
18
|
-
Prompty is an asset class and format for LLM prompts designed to enhance observability, understandability, and portability for developers. The primary goal is to accelerate the developer inner loop of prompt engineering and prompt source management in a cross-language and cross-platform
|
18
|
+
Prompty is an asset class and format for LLM prompts designed to enhance observability, understandability, and portability for developers. The primary goal is to accelerate the developer inner loop of prompt engineering and prompt source management in a cross-language and cross-platform implementation.
|
19
19
|
|
20
20
|
The file format has a supporting toolchain with a VS Code extension and runtimes in multiple programming languages to simplify and accelerate your AI application development.
|
21
21
|
|
@@ -133,4 +133,4 @@ prompty -s path/to/prompty/file
|
|
133
133
|
This will execute the prompt and print the response to the console. It also has default tracing enabled.
|
134
134
|
|
135
135
|
## Contributing
|
136
|
-
We welcome contributions to the Prompty project! This community led project is open to all contributors. The project cvan be found on [GitHub](https://github.com/Microsoft/prompty).
|
136
|
+
We welcome contributions to the Prompty project! This community led project is open to all contributors. The project cvan be found on [GitHub](https://github.com/Microsoft/prompty).
|
@@ -0,0 +1,12 @@
|
|
1
|
+
prompty-0.1.10.dist-info/METADATA,sha256=nO36pVf-OKjnsEZRKWV0q7XHuKLBOI7Hw2a9_8dtrIs,4669
|
2
|
+
prompty-0.1.10.dist-info/WHEEL,sha256=rSwsxJWe3vzyR5HCwjWXQruDgschpei4h_giTm0dJVE,90
|
3
|
+
prompty-0.1.10.dist-info/licenses/LICENSE,sha256=KWSC4z9cfML_t0xThoQYjzTdcZQj86Y_mhXdatzU-KM,1052
|
4
|
+
prompty/__init__.py,sha256=Msp8eiKdrDq0wyl6G5DFDH8r5BxM2_E60uzzL7_MJ5w,11183
|
5
|
+
prompty/cli.py,sha256=_bx_l5v7OGhtAn4d_73b8tyfEw7OOkjCqGMQPu0YP5A,2489
|
6
|
+
prompty/core.py,sha256=osKfdwc_uDhQpsRpB8TyvWELUfXPBAoS2iLEU-hPJW0,15373
|
7
|
+
prompty/executors.py,sha256=z_SXF-i2qBbxmsBexQ4Ouiqwil6L0lU2wWfwIeSN-eE,3083
|
8
|
+
prompty/parsers.py,sha256=4mmIn4SVNs8B0R1BufanqUJk8v4r0OEEo8yx6UOxQpA,4670
|
9
|
+
prompty/processors.py,sha256=VaB7fGyaeIPRGuAZ9KTwktx7MIkfCtPALLQgNko1-Gk,2310
|
10
|
+
prompty/renderers.py,sha256=RSHFQFx7AtKLUfsMLCXR0a56Mb7DL1NJNgjUqgg3IqU,776
|
11
|
+
prompty/tracer.py,sha256=_C8bSoGNvucYnuS-DZSx6dq6iuEsVWWZxj-hUrgiTIw,6333
|
12
|
+
prompty-0.1.10.dist-info/RECORD,,
|
prompty-0.1.8.dist-info/RECORD
DELETED
@@ -1,12 +0,0 @@
|
|
1
|
-
prompty-0.1.8.dist-info/METADATA,sha256=1sVPpxf3pjHAhCIJoXa-v02zF6P5w4aCdBcQZV3kEm4,4665
|
2
|
-
prompty-0.1.8.dist-info/WHEEL,sha256=rSwsxJWe3vzyR5HCwjWXQruDgschpei4h_giTm0dJVE,90
|
3
|
-
prompty-0.1.8.dist-info/licenses/LICENSE,sha256=KWSC4z9cfML_t0xThoQYjzTdcZQj86Y_mhXdatzU-KM,1052
|
4
|
-
prompty/__init__.py,sha256=Msp8eiKdrDq0wyl6G5DFDH8r5BxM2_E60uzzL7_MJ5w,11183
|
5
|
-
prompty/cli.py,sha256=_bx_l5v7OGhtAn4d_73b8tyfEw7OOkjCqGMQPu0YP5A,2489
|
6
|
-
prompty/core.py,sha256=WYSvognjMUl08FT0_mkcqZfymb_guKcp3sK8_RO4Kq0,13528
|
7
|
-
prompty/executors.py,sha256=TankDTAEBTZkvnPfNUw2KNb1TnNuWhyY8TkWOogUXKs,3185
|
8
|
-
prompty/parsers.py,sha256=4mmIn4SVNs8B0R1BufanqUJk8v4r0OEEo8yx6UOxQpA,4670
|
9
|
-
prompty/processors.py,sha256=GmReygLx2XW1UuanlX71HG3rTZL86y0yAGyNdbGWkcg,2366
|
10
|
-
prompty/renderers.py,sha256=RSHFQFx7AtKLUfsMLCXR0a56Mb7DL1NJNgjUqgg3IqU,776
|
11
|
-
prompty/tracer.py,sha256=XMS4aJD_Tp76wm2UFB8amtXn7ioGmPBUy11LmklSUFQ,6490
|
12
|
-
prompty-0.1.8.dist-info/RECORD,,
|
File without changes
|
File without changes
|