prompty 0.1.40__py3-none-any.whl → 0.1.45__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
prompty/core.py CHANGED
@@ -1,12 +1,13 @@
1
- from __future__ import annotations
2
-
3
1
  import os
2
+ import typing
3
+ from collections.abc import AsyncIterator, Iterator
4
4
  from pathlib import Path
5
+ from typing import Literal, Union
5
6
 
6
- from .tracer import Tracer, to_dict, sanitize
7
7
  from pydantic import BaseModel, Field, FilePath
8
- from typing import AsyncIterator, Iterator, List, Literal, Dict, Callable, Set, Tuple
8
+ from pydantic.main import IncEx
9
9
 
10
+ from .tracer import Tracer, sanitize, to_dict
10
11
  from .utils import load_json, load_json_async
11
12
 
12
13
 
@@ -30,7 +31,7 @@ class PropertySettings(BaseModel):
30
31
  """
31
32
 
32
33
  type: Literal["string", "number", "array", "object", "boolean"]
33
- default: str | int | float | List | dict | bool = Field(default=None)
34
+ default: Union[str, int, float, list, dict, bool, None] = Field(default=None)
34
35
  description: str = Field(default="")
35
36
 
36
37
 
@@ -58,21 +59,19 @@ class ModelSettings(BaseModel):
58
59
  self,
59
60
  *,
60
61
  mode: str = "python",
61
- include: (
62
- Set[int] | Set[str] | Dict[int, os.Any] | Dict[str, os.Any] | None
63
- ) = None,
64
- exclude: (
65
- Set[int] | Set[str] | Dict[int, os.Any] | Dict[str, os.Any] | None
66
- ) = None,
67
- context: os.Any | None = None,
62
+ include: Union[IncEx, None] = None,
63
+ exclude: Union[IncEx, None] = None,
64
+ context: Union[typing.Any, None] = None,
68
65
  by_alias: bool = False,
69
66
  exclude_unset: bool = False,
70
67
  exclude_defaults: bool = False,
71
68
  exclude_none: bool = False,
72
69
  round_trip: bool = False,
73
- warnings: bool | Literal["none"] | Literal["warn"] | Literal["error"] = True,
70
+ warnings: Union[
71
+ bool, Literal["none"], Literal["warn"], Literal["error"]
72
+ ] = True,
74
73
  serialize_as_any: bool = False,
75
- ) -> Dict[str, os.Any]:
74
+ ) -> dict[str, typing.Any]:
76
75
  """Method to dump the model in a safe way"""
77
76
  d = super().model_dump(
78
77
  mode=mode,
@@ -145,11 +144,11 @@ class Prompty(BaseModel):
145
144
  # metadata
146
145
  name: str = Field(default="")
147
146
  description: str = Field(default="")
148
- authors: List[str] = Field(default=[])
149
- tags: List[str] = Field(default=[])
147
+ authors: list[str] = Field(default=[])
148
+ tags: list[str] = Field(default=[])
150
149
  version: str = Field(default="")
151
150
  base: str = Field(default="")
152
- basePrompty: Prompty | None = Field(default=None)
151
+ basePrompty: Union["Prompty", None] = Field(default=None)
153
152
  # model
154
153
  model: ModelSettings = Field(default_factory=ModelSettings)
155
154
 
@@ -157,19 +156,19 @@ class Prompty(BaseModel):
157
156
  sample: dict = Field(default={})
158
157
 
159
158
  # input / output
160
- inputs: Dict[str, PropertySettings] = Field(default={})
161
- outputs: Dict[str, PropertySettings] = Field(default={})
159
+ inputs: dict[str, PropertySettings] = Field(default={})
160
+ outputs: dict[str, PropertySettings] = Field(default={})
162
161
 
163
162
  # template
164
163
  template: TemplateSettings
165
164
 
166
- file: FilePath = Field(default="")
167
- content: str | List[str] | dict = Field(default="")
165
+ file: Union[str, FilePath] = Field(default="")
166
+ content: Union[str, list[str], dict] = Field(default="")
168
167
 
169
- def to_safe_dict(self) -> Dict[str, any]:
168
+ def to_safe_dict(self) -> dict[str, typing.Any]:
170
169
  d = {}
171
170
  for k, v in self:
172
- if v != "" and v != {} and v != [] and v != None:
171
+ if v != "" and v != {} and v != [] and v is not None:
173
172
  if k == "model":
174
173
  d[k] = v.model_dump()
175
174
  elif k == "template":
@@ -191,7 +190,7 @@ class Prompty(BaseModel):
191
190
  return d
192
191
 
193
192
  @staticmethod
194
- def hoist_base_prompty(top: Prompty, base: Prompty) -> Prompty:
193
+ def hoist_base_prompty(top: "Prompty", base: "Prompty") -> "Prompty":
195
194
  top.name = base.name if top.name == "" else top.name
196
195
  top.description = base.description if top.description == "" else top.description
197
196
  top.authors = list(set(base.authors + top.authors))
@@ -214,10 +213,10 @@ class Prompty(BaseModel):
214
213
  return top
215
214
 
216
215
  @staticmethod
217
- def _process_file(file: str, parent: Path) -> any:
218
- file = Path(parent / Path(file)).resolve().absolute()
219
- if file.exists():
220
- items = load_json(file)
216
+ def _process_file(file: str, parent: Path) -> typing.Any:
217
+ f = Path(parent / Path(file)).resolve().absolute()
218
+ if f.exists():
219
+ items = load_json(f)
221
220
  if isinstance(items, list):
222
221
  return [Prompty.normalize(value, parent) for value in items]
223
222
  elif isinstance(items, dict):
@@ -231,10 +230,10 @@ class Prompty(BaseModel):
231
230
  raise FileNotFoundError(f"File {file} not found")
232
231
 
233
232
  @staticmethod
234
- async def _process_file_async(file: str, parent: Path) -> any:
235
- file = Path(parent / Path(file)).resolve().absolute()
236
- if file.exists():
237
- items = await load_json_async(file)
233
+ async def _process_file_async(file: str, parent: Path) -> typing.Any:
234
+ f = Path(parent / Path(file)).resolve().absolute()
235
+ if f.exists():
236
+ items = await load_json_async(f)
238
237
  if isinstance(items, list):
239
238
  return [Prompty.normalize(value, parent) for value in items]
240
239
  elif isinstance(items, dict):
@@ -248,7 +247,9 @@ class Prompty(BaseModel):
248
247
  raise FileNotFoundError(f"File {file} not found")
249
248
 
250
249
  @staticmethod
251
- def _process_env(variable: str, env_error=True, default: str = None) -> any:
250
+ def _process_env(
251
+ variable: str, env_error=True, default: Union[str, None] = None
252
+ ) -> typing.Any:
252
253
  if variable in os.environ.keys():
253
254
  return os.environ[variable]
254
255
  else:
@@ -260,7 +261,7 @@ class Prompty(BaseModel):
260
261
  return ""
261
262
 
262
263
  @staticmethod
263
- def normalize(attribute: any, parent: Path, env_error=True) -> any:
264
+ def normalize(attribute: typing.Any, parent: Path, env_error=True) -> typing.Any:
264
265
  if isinstance(attribute, str):
265
266
  attribute = attribute.strip()
266
267
  if attribute.startswith("${") and attribute.endswith("}"):
@@ -289,7 +290,9 @@ class Prompty(BaseModel):
289
290
  return attribute
290
291
 
291
292
  @staticmethod
292
- async def normalize_async(attribute: any, parent: Path, env_error=True) -> any:
293
+ async def normalize_async(
294
+ attribute: typing.Any, parent: Path, env_error=True
295
+ ) -> typing.Any:
293
296
  if isinstance(attribute, str):
294
297
  attribute = attribute.strip()
295
298
  if attribute.startswith("${") and attribute.endswith("}"):
@@ -319,14 +322,16 @@ class Prompty(BaseModel):
319
322
 
320
323
 
321
324
  def param_hoisting(
322
- top: Dict[str, any], bottom: Dict[str, any], top_key: str = None
323
- ) -> Dict[str, any]:
325
+ top: dict[str, typing.Any],
326
+ bottom: dict[str, typing.Any],
327
+ top_key: Union[str, None] = None,
328
+ ) -> dict[str, typing.Any]:
324
329
  if top_key:
325
330
  new_dict = {**top[top_key]} if top_key in top else {}
326
331
  else:
327
332
  new_dict = {**top}
328
333
  for key, value in bottom.items():
329
- if not key in new_dict:
334
+ if key not in new_dict:
330
335
  new_dict[key] = value
331
336
  return new_dict
332
337
 
@@ -338,7 +343,7 @@ class PromptyStream(Iterator):
338
343
  def __init__(self, name: str, iterator: Iterator):
339
344
  self.name = name
340
345
  self.iterator = iterator
341
- self.items: List[any] = []
346
+ self.items: list[typing.Any] = []
342
347
  self.__name__ = "PromptyStream"
343
348
 
344
349
  def __iter__(self):
@@ -370,7 +375,7 @@ class AsyncPromptyStream(AsyncIterator):
370
375
  def __init__(self, name: str, iterator: AsyncIterator):
371
376
  self.name = name
372
377
  self.iterator = iterator
373
- self.items: List[any] = []
378
+ self.items: list[typing.Any] = []
374
379
  self.__name__ = "AsyncPromptyStream"
375
380
 
376
381
  def __aiter__(self):
prompty/invoker.py CHANGED
@@ -1,7 +1,9 @@
1
1
  import abc
2
- from .tracer import trace
2
+ import typing
3
+ from typing import Callable, Literal
4
+
3
5
  from .core import Prompty
4
- from typing import Callable, Dict, Literal
6
+ from .tracer import trace
5
7
 
6
8
 
7
9
  class Invoker(abc.ABC):
@@ -21,7 +23,7 @@ class Invoker(abc.ABC):
21
23
  self.name = self.__class__.__name__
22
24
 
23
25
  @abc.abstractmethod
24
- def invoke(self, data: any) -> any:
26
+ def invoke(self, data: typing.Any) -> typing.Any:
25
27
  """Abstract method to invoke the invoker
26
28
 
27
29
  Parameters
@@ -37,7 +39,7 @@ class Invoker(abc.ABC):
37
39
  pass
38
40
 
39
41
  @abc.abstractmethod
40
- async def invoke_async(self, data: any) -> any:
42
+ async def invoke_async(self, data: typing.Any) -> typing.Any:
41
43
  """Abstract method to invoke the invoker asynchronously
42
44
 
43
45
  Parameters
@@ -53,7 +55,7 @@ class Invoker(abc.ABC):
53
55
  pass
54
56
 
55
57
  @trace
56
- def run(self, data: any) -> any:
58
+ def run(self, data: typing.Any) -> typing.Any:
57
59
  """Method to run the invoker
58
60
 
59
61
  Parameters
@@ -69,7 +71,7 @@ class Invoker(abc.ABC):
69
71
  return self.invoke(data)
70
72
 
71
73
  @trace
72
- async def run_async(self, data: any) -> any:
74
+ async def run_async(self, data: typing.Any) -> typing.Any:
73
75
  """Method to run the invoker asynchronously
74
76
 
75
77
  Parameters
@@ -88,30 +90,31 @@ class Invoker(abc.ABC):
88
90
  class InvokerFactory:
89
91
  """Factory class for Invoker"""
90
92
 
91
- _renderers: Dict[str, Invoker] = {}
92
- _parsers: Dict[str, Invoker] = {}
93
- _executors: Dict[str, Invoker] = {}
94
- _processors: Dict[str, Invoker] = {}
93
+ _renderers: dict[str, type[Invoker]] = {}
94
+ _parsers: dict[str, type[Invoker]] = {}
95
+ _executors: dict[str, type[Invoker]] = {}
96
+ _processors: dict[str, type[Invoker]] = {}
95
97
 
96
98
  @classmethod
97
- def add_renderer(cls, name: str, invoker: Invoker) -> None:
99
+ def add_renderer(cls, name: str, invoker: type[Invoker]) -> None:
98
100
  cls._renderers[name] = invoker
99
101
 
100
102
  @classmethod
101
- def add_parser(cls, name: str, invoker: Invoker) -> None:
103
+ def add_parser(cls, name: str, invoker: type[Invoker]) -> None:
102
104
  cls._parsers[name] = invoker
103
105
 
104
106
  @classmethod
105
- def add_executor(cls, name: str, invoker: Invoker) -> None:
107
+ def add_executor(cls, name: str, invoker: type[Invoker]) -> None:
106
108
  cls._executors[name] = invoker
107
109
 
108
110
  @classmethod
109
- def add_processor(cls, name: str, invoker: Invoker) -> None:
111
+ def add_processor(cls, name: str, invoker: type[Invoker]) -> None:
110
112
  cls._processors[name] = invoker
111
113
 
112
114
  @classmethod
113
115
  def register_renderer(cls, name: str) -> Callable:
114
- def inner_wrapper(wrapped_class: Invoker) -> Callable:
116
+
117
+ def inner_wrapper(wrapped_class: type[Invoker]) -> type[Invoker]:
115
118
  cls._renderers[name] = wrapped_class
116
119
  return wrapped_class
117
120
 
@@ -119,7 +122,8 @@ class InvokerFactory:
119
122
 
120
123
  @classmethod
121
124
  def register_parser(cls, name: str) -> Callable:
122
- def inner_wrapper(wrapped_class: Invoker) -> Callable:
125
+
126
+ def inner_wrapper(wrapped_class: type[Invoker]) -> type[Invoker]:
123
127
  cls._parsers[name] = wrapped_class
124
128
  return wrapped_class
125
129
 
@@ -127,7 +131,8 @@ class InvokerFactory:
127
131
 
128
132
  @classmethod
129
133
  def register_executor(cls, name: str) -> Callable:
130
- def inner_wrapper(wrapped_class: Invoker) -> Callable:
134
+
135
+ def inner_wrapper(wrapped_class: type[Invoker]) -> type[Invoker]:
131
136
  cls._executors[name] = wrapped_class
132
137
  return wrapped_class
133
138
 
@@ -135,7 +140,8 @@ class InvokerFactory:
135
140
 
136
141
  @classmethod
137
142
  def register_processor(cls, name: str) -> Callable:
138
- def inner_wrapper(wrapped_class: Invoker) -> Callable:
143
+
144
+ def inner_wrapper(wrapped_class: type[Invoker]) -> type[Invoker]:
139
145
  cls._processors[name] = wrapped_class
140
146
  return wrapped_class
141
147
 
@@ -200,11 +206,11 @@ class InvokerFactory:
200
206
  cls,
201
207
  type: Literal["renderer", "parser", "executor", "processor"],
202
208
  prompty: Prompty,
203
- data: any,
204
- default: any = None,
209
+ data: typing.Any,
210
+ default: typing.Any = None,
205
211
  ):
206
212
  name = cls._get_name(type, prompty)
207
- if name.startswith("NOOP") and default != None:
213
+ if name.startswith("NOOP") and default is not None:
208
214
  return default
209
215
  elif name.startswith("NOOP"):
210
216
  return data
@@ -218,11 +224,11 @@ class InvokerFactory:
218
224
  cls,
219
225
  type: Literal["renderer", "parser", "executor", "processor"],
220
226
  prompty: Prompty,
221
- data: any,
222
- default: any = None,
227
+ data: typing.Any,
228
+ default: typing.Any = None,
223
229
  ):
224
230
  name = cls._get_name(type, prompty)
225
- if name.startswith("NOOP") and default != None:
231
+ if name.startswith("NOOP") and default is not None:
226
232
  return default
227
233
  elif name.startswith("NOOP"):
228
234
  return data
@@ -231,43 +237,51 @@ class InvokerFactory:
231
237
  return value
232
238
 
233
239
  @classmethod
234
- def run_renderer(cls, prompty: Prompty, data: any, default: any = None) -> any:
240
+ def run_renderer(
241
+ cls, prompty: Prompty, data: typing.Any, default: typing.Any = None
242
+ ) -> typing.Any:
235
243
  return cls.run("renderer", prompty, data, default)
236
244
 
237
245
  @classmethod
238
246
  async def run_renderer_async(
239
- cls, prompty: Prompty, data: any, default: any = None
240
- ) -> any:
247
+ cls, prompty: Prompty, data: typing.Any, default: typing.Any = None
248
+ ) -> typing.Any:
241
249
  return await cls.run_async("renderer", prompty, data, default)
242
250
 
243
251
  @classmethod
244
- def run_parser(cls, prompty: Prompty, data: any, default: any = None) -> any:
252
+ def run_parser(
253
+ cls, prompty: Prompty, data: typing.Any, default: typing.Any = None
254
+ ) -> typing.Any:
245
255
  return cls.run("parser", prompty, data, default)
246
256
 
247
257
  @classmethod
248
258
  async def run_parser_async(
249
- cls, prompty: Prompty, data: any, default: any = None
250
- ) -> any:
259
+ cls, prompty: Prompty, data: typing.Any, default: typing.Any = None
260
+ ) -> typing.Any:
251
261
  return await cls.run_async("parser", prompty, data, default)
252
262
 
253
263
  @classmethod
254
- def run_executor(cls, prompty: Prompty, data: any, default: any = None) -> any:
264
+ def run_executor(
265
+ cls, prompty: Prompty, data: typing.Any, default: typing.Any = None
266
+ ) -> typing.Any:
255
267
  return cls.run("executor", prompty, data, default)
256
268
 
257
269
  @classmethod
258
270
  async def run_executor_async(
259
- cls, prompty: Prompty, data: any, default: any = None
260
- ) -> any:
271
+ cls, prompty: Prompty, data: typing.Any, default: typing.Any = None
272
+ ) -> typing.Any:
261
273
  return await cls.run_async("executor", prompty, data, default)
262
274
 
263
275
  @classmethod
264
- def run_processor(cls, prompty: Prompty, data: any, default: any = None) -> any:
276
+ def run_processor(
277
+ cls, prompty: Prompty, data: typing.Any, default: typing.Any = None
278
+ ) -> typing.Any:
265
279
  return cls.run("processor", prompty, data, default)
266
280
 
267
281
  @classmethod
268
282
  async def run_processor_async(
269
- cls, prompty: Prompty, data: any, default: any = None
270
- ) -> any:
283
+ cls, prompty: Prompty, data: typing.Any, default: typing.Any = None
284
+ ) -> typing.Any:
271
285
  return await cls.run_async("processor", prompty, data, default)
272
286
 
273
287
 
@@ -290,7 +304,7 @@ class InvokerException(Exception):
290
304
  @InvokerFactory.register_parser("prompty.image")
291
305
  @InvokerFactory.register_parser("prompty.completion")
292
306
  class NoOp(Invoker):
293
- def invoke(self, data: any) -> any:
307
+ def invoke(self, data: typing.Any) -> typing.Any:
294
308
  return data
295
309
 
296
310
  async def invoke_async(self, data: str) -> str:
@@ -2,8 +2,8 @@
2
2
  from prompty.invoker import InvokerException
3
3
 
4
4
  try:
5
- from .executor import OpenAIExecutor
6
- from .processor import OpenAIProcessor
5
+ from .executor import OpenAIExecutor # noqa
6
+ from .processor import OpenAIProcessor # noqa
7
7
  except ImportError as e:
8
8
  raise InvokerException(
9
9
  f"Error registering OpenAIExecutor and OpenAIProcessor: {e}", "openai"
@@ -1,8 +1,11 @@
1
1
  import importlib.metadata
2
+ import typing
3
+ from collections.abc import Iterator
4
+
2
5
  from openai import OpenAI
3
- from typing import Iterator
4
6
 
5
7
  from prompty.tracer import Tracer
8
+
6
9
  from ..core import Prompty, PromptyStream
7
10
  from ..invoker import Invoker, InvokerFactory
8
11
 
@@ -24,8 +27,9 @@ class OpenAIExecutor(Invoker):
24
27
  self.api = self.prompty.model.api
25
28
  self.parameters = self.prompty.model.parameters
26
29
  self.model = self.prompty.model.configuration["name"]
30
+ self.deployment = self.prompty.model.configuration["deployment"]
27
31
 
28
- def invoke(self, data: any) -> any:
32
+ def invoke(self, data: typing.Any) -> typing.Any:
29
33
  """Invoke the OpenAI API
30
34
 
31
35
  Parameters
@@ -1,10 +1,13 @@
1
- from typing import Iterator
2
- from openai.types.completion import Completion
1
+ import typing
2
+ from collections.abc import Iterator
3
+
3
4
  from openai.types.chat.chat_completion import ChatCompletion
4
- from ..invoker import Invoker, InvokerFactory
5
- from ..core import Prompty, PromptyStream, ToolCall
5
+ from openai.types.completion import Completion
6
6
  from openai.types.create_embedding_response import CreateEmbeddingResponse
7
7
 
8
+ from ..core import Prompty, PromptyStream, ToolCall
9
+ from ..invoker import Invoker, InvokerFactory
10
+
8
11
 
9
12
  @InvokerFactory.register_processor("openai")
10
13
  class OpenAIProcessor(Invoker):
@@ -13,7 +16,7 @@ class OpenAIProcessor(Invoker):
13
16
  def __init__(self, prompty: Prompty) -> None:
14
17
  super().__init__(prompty)
15
18
 
16
- def invoke(self, data: any) -> any:
19
+ def invoke(self, data: typing.Any) -> typing.Any:
17
20
  """Invoke the OpenAI API
18
21
 
19
22
  Parameters
@@ -56,7 +59,7 @@ class OpenAIProcessor(Invoker):
56
59
  for chunk in data:
57
60
  if (
58
61
  len(chunk.choices) == 1
59
- and chunk.choices[0].delta.content != None
62
+ and chunk.choices[0].delta.content is not None
60
63
  ):
61
64
  content = chunk.choices[0].delta.content
62
65
  yield content
prompty/parsers.py CHANGED
@@ -1,25 +1,30 @@
1
- import re
2
1
  import base64
2
+ import re
3
+ from pathlib import Path
4
+
3
5
  from .core import Prompty
4
- from .invoker import Invoker, InvokerFactory
6
+ from .invoker import Invoker
5
7
 
6
8
 
7
- @InvokerFactory.register_parser("prompty.chat")
8
9
  class PromptyChatParser(Invoker):
9
- """ Prompty Chat Parser """
10
+ """Prompty Chat Parser"""
11
+
10
12
  def __init__(self, prompty: Prompty) -> None:
11
13
  super().__init__(prompty)
12
14
  self.roles = ["assistant", "function", "system", "user"]
15
+ if isinstance(self.prompty.file, str):
16
+ self.prompty.file = Path(self.prompty.file).resolve().absolute()
17
+
13
18
  self.path = self.prompty.file.parent
14
19
 
15
20
  def inline_image(self, image_item: str) -> str:
16
- """ Inline Image
21
+ """Inline Image
17
22
 
18
23
  Parameters
19
24
  ----------
20
25
  image_item : str
21
26
  The image item to inline
22
-
27
+
23
28
  Returns
24
29
  -------
25
30
  str
@@ -46,13 +51,13 @@ class PromptyChatParser(Invoker):
46
51
  )
47
52
 
48
53
  def parse_content(self, content: str):
49
- """ for parsing inline images
50
-
54
+ """for parsing inline images
55
+
51
56
  Parameters
52
57
  ----------
53
58
  content : str
54
59
  The content to parse
55
-
60
+
56
61
  Returns
57
62
  -------
58
63
  any
@@ -97,14 +102,14 @@ class PromptyChatParser(Invoker):
97
102
  else:
98
103
  return content
99
104
 
100
- def invoke(self, data: str) -> str:
101
- """ Invoke the Prompty Chat Parser
105
+ def invoke(self, data: str) -> list[dict[str, str]]:
106
+ """Invoke the Prompty Chat Parser
102
107
 
103
108
  Parameters
104
109
  ----------
105
110
  data : str
106
111
  The data to parse
107
-
112
+
108
113
  Returns
109
114
  -------
110
115
  str
@@ -121,7 +126,7 @@ class PromptyChatParser(Invoker):
121
126
  ]
122
127
 
123
128
  # if no starter role, then inject system role
124
- if not chunks[0].strip().lower() in self.roles:
129
+ if chunks[0].strip().lower() not in self.roles:
125
130
  chunks.insert(0, "system")
126
131
 
127
132
  # if last chunk is role entry, then remove (no content?)
@@ -138,16 +143,15 @@ class PromptyChatParser(Invoker):
138
143
  messages.append({"role": role, "content": self.parse_content(content)})
139
144
 
140
145
  return messages
141
-
142
146
 
143
- async def invoke_async(self, data: str) -> str:
144
- """ Invoke the Prompty Chat Parser (Async)
147
+ async def invoke_async(self, data: str) -> list[dict[str, str]]:
148
+ """Invoke the Prompty Chat Parser (Async)
145
149
 
146
150
  Parameters
147
151
  ----------
148
152
  data : str
149
153
  The data to parse
150
-
154
+
151
155
  Returns
152
156
  -------
153
157
  str
prompty/py.typed ADDED
File without changes
prompty/renderers.py CHANGED
@@ -1,24 +1,35 @@
1
- from .core import Prompty
1
+ import typing
2
+ from pathlib import Path
3
+
2
4
  from jinja2 import DictLoader, Environment
3
- from .invoker import Invoker, InvokerFactory
5
+
6
+ from .core import Prompty
7
+ from .invoker import Invoker
4
8
 
5
9
 
6
- @InvokerFactory.register_renderer("jinja2")
7
10
  class Jinja2Renderer(Invoker):
8
11
  """Jinja2 Renderer"""
9
12
 
10
13
  def __init__(self, prompty: Prompty) -> None:
11
14
  super().__init__(prompty)
12
- self.templates = {}
15
+ self.templates: dict[str, str] = {}
13
16
  # generate template dictionary
14
- cur_prompt = self.prompty
17
+ cur_prompt: typing.Union[Prompty, None] = self.prompty
15
18
  while cur_prompt:
16
- self.templates[cur_prompt.file.name] = cur_prompt.content
19
+ if isinstance(cur_prompt.file, str):
20
+ cur_prompt.file = Path(cur_prompt.file).resolve().absolute()
21
+
22
+ if isinstance(cur_prompt.content, str):
23
+ self.templates[cur_prompt.file.name] = cur_prompt.content
24
+
17
25
  cur_prompt = cur_prompt.basePrompty
18
26
 
27
+ if isinstance(self.prompty.file, str):
28
+ self.prompty.file = Path(self.prompty.file).resolve().absolute()
29
+
19
30
  self.name = self.prompty.file.name
20
31
 
21
- def invoke(self, data: any) -> any:
32
+ def invoke(self, data: typing.Any) -> typing.Any:
22
33
  env = Environment(loader=DictLoader(self.templates))
23
34
  t = env.get_template(self.name)
24
35
  generated = t.render(**data)
@@ -2,7 +2,7 @@
2
2
  from prompty.invoker import InvokerException
3
3
 
4
4
  try:
5
- from .executor import ServerlessExecutor
6
- from .processor import ServerlessProcessor
5
+ from .executor import ServerlessExecutor # noqa
6
+ from .processor import ServerlessProcessor # noqa
7
7
  except ImportError:
8
8
  raise InvokerException("Error registering ServerlessExecutor and ServerlessProcessor", "serverless")