ai-prompter 0.2.2__py3-none-any.whl → 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
ai_prompter/__init__.py CHANGED
@@ -5,7 +5,7 @@ A prompt management module using Jinja to generate complex prompts with simple t
5
5
  import os
6
6
  from dataclasses import dataclass
7
7
  from datetime import datetime
8
- from typing import Any, Dict, Optional, Union, Callable
8
+ from typing import Any, Dict, List, Optional, Union, Callable
9
9
 
10
10
  from jinja2 import Environment, FileSystemLoader, Template
11
11
  from pydantic import BaseModel
@@ -36,6 +36,8 @@ class Prompter:
36
36
  template: Optional[Union[str, Template]] = None
37
37
  template_text: Optional[str] = None
38
38
  parser: Optional[Any] = None
39
+ text_templates: Optional[Dict[str, str]] = None
40
+ prompt_folders: Optional[List[str]] = None
39
41
 
40
42
  def __init__(
41
43
  self,
@@ -69,6 +71,8 @@ class Prompter:
69
71
  self.parser = parser
70
72
  self.template: Template | None = None
71
73
  self.model = model or os.getenv("OPENAI_MODEL", "gpt-4-turbo")
74
+ self.text_templates = {}
75
+ self.prompt_folders = []
72
76
  self._setup_template(template_text, prompt_dir)
73
77
 
74
78
  def _setup_template(
@@ -93,16 +97,37 @@ class Prompter:
93
97
  prompts_path = os.getenv("PROMPTS_PATH")
94
98
  if prompts_path is not None:
95
99
  prompt_dirs.extend(prompts_path.split(":"))
96
- # Fallback to local folder and ~/ai-prompter
97
- prompt_dirs.extend([os.getcwd(), os.path.expanduser("~/ai-prompter")])
100
+
101
+ # Add current working directory + /prompts
102
+ cwd_prompts = os.path.join(os.getcwd(), "prompts")
103
+ if os.path.exists(cwd_prompts):
104
+ prompt_dirs.append(cwd_prompts)
105
+
106
+ # Try to find project root and add its prompts folder
107
+ current_path = os.getcwd()
108
+ while current_path != os.path.dirname(current_path): # Stop at root
109
+ # Check for common project indicators
110
+ if any(os.path.exists(os.path.join(current_path, indicator))
111
+ for indicator in ['pyproject.toml', 'setup.py', 'setup.cfg', '.git']):
112
+ project_prompts = os.path.join(current_path, "prompts")
113
+ if os.path.exists(project_prompts) and project_prompts not in prompt_dirs:
114
+ prompt_dirs.append(project_prompts)
115
+ break
116
+ current_path = os.path.dirname(current_path)
117
+
118
+ # Fallback to ~/ai-prompter
119
+ prompt_dirs.append(os.path.expanduser("~/ai-prompter"))
120
+
98
121
  # Default package prompts folder
99
122
  if os.path.exists(prompt_path_default):
100
123
  prompt_dirs.append(prompt_path_default)
101
124
  env = Environment(loader=FileSystemLoader(prompt_dirs))
102
125
  self.template = env.get_template(f"{self.prompt_template}.jinja")
126
+ self.prompt_folders = prompt_dirs
103
127
  else:
104
128
  self.template_text = template_text
105
129
  self.template = Template(template_text)
130
+ self.text_templates[self.prompt_template] = template_text
106
131
 
107
132
  def to_langchain(self):
108
133
  # Support for both text-based and file-based templates with LangChain
@@ -187,6 +212,28 @@ class Prompter:
187
212
  "Either prompt_template with a valid template or template_text must be provided for LangChain conversion"
188
213
  )
189
214
 
215
+ def template_location(self, template_name: str) -> str:
216
+ """
217
+ Returns the location of the template used for the given template name.
218
+ If the template is a text template (not a file), returns 'text'.
219
+ If the template is not found, returns 'not found'.
220
+
221
+ Args:
222
+ template_name (str): The name of the template to check.
223
+
224
+ Returns:
225
+ str: The file path of the template, or 'text' if it's a text template, or 'not found' if the template doesn't exist.
226
+ """
227
+ if template_name in self.text_templates:
228
+ return 'text'
229
+
230
+ for folder in self.prompt_folders:
231
+ template_file = os.path.join(folder, f"{template_name}.jinja")
232
+ if os.path.exists(template_file):
233
+ return template_file
234
+
235
+ return 'not found'
236
+
190
237
  @classmethod
191
238
  def from_text(
192
239
  cls, text: str, model: Optional[Union[str, Any]] = None
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ai-prompter
3
- Version: 0.2.2
3
+ Version: 0.3.0
4
4
  Summary: A prompt management library using Jinja2 templates to build complex prompts easily.
5
5
  Author-email: LUIS NOVO <lfnovo@gmail.com>
6
6
  License: MIT
@@ -22,8 +22,10 @@ A prompt management library using Jinja2 templates to build complex prompts easi
22
22
  - Define prompts as Jinja templates.
23
23
  - Load default templates from `src/ai_prompter/prompts`.
24
24
  - Override templates via `PROMPTS_PATH` environment variable.
25
+ - Automatic project root detection for prompt templates.
25
26
  - Render prompts with arbitrary data or Pydantic models.
26
27
  - Export to LangChain `ChatPromptTemplate`.
28
+ - Automatic output parser integration for structured outputs.
27
29
 
28
30
  ## Installation
29
31
 
@@ -85,32 +87,70 @@ Set the `PROMPTS_PATH` environment variable to point to your custom prompts dire
85
87
  export PROMPTS_PATH=/path/to/your/prompts
86
88
  ```
87
89
 
88
- The `Prompter` class will check this path if no custom directory is provided in the constructor. If not set, it will also look in the current working directory and `~/ai-prompter/` as fallback options before using the default package prompts.
90
+ You can specify multiple directories separated by `:` (colon):
89
91
 
90
- ### Raw text template
92
+ ```bash
93
+ export PROMPTS_PATH=/path/to/templates1:/path/to/templates2
94
+ ```
91
95
 
92
- ```python
93
- from ai_prompter import Prompter
96
+ ### Template Search Order
94
97
 
95
- template = """Write an article about {{ topic }}."""
96
- prompter = Prompter(template_text=template)
97
- prompt = prompter.render({"topic": "AI"})
98
- print(prompt) # Write an article about AI.
98
+ The `Prompter` class searches for templates in the following locations (in order of priority):
99
+
100
+ 1. **Custom directory** - If you provide `prompt_dir` parameter when initializing Prompter
101
+ 2. **Environment variable paths** - Directories specified in `PROMPTS_PATH` (colon-separated)
102
+ 3. **Current directory prompts** - `./prompts` subfolder in your current working directory
103
+ 4. **Project root prompts** - Automatically detects your Python project root (by looking for `pyproject.toml`, `setup.py`, `setup.cfg`, or `.git`) and checks for a `prompts` folder there
104
+ 5. **Home directory** - `~/ai-prompter` folder
105
+ 6. **Package defaults** - Built-in templates at `src/ai_prompter/prompts`
106
+
107
+ This allows you to organize your project with prompts at the root level, regardless of your package structure:
108
+ ```
109
+ my-project/
110
+ ├── prompts/ # <- Templates here will be found automatically
111
+ │ └── my_template.jinja
112
+ ├── src/
113
+ │ └── my_package/
114
+ │ └── main.py
115
+ └── pyproject.toml
99
116
  ```
100
117
 
101
118
  ### Using File-based Templates
102
119
 
103
- You can store your templates in files and reference them by name (without the `.jinja` extension). The library looks for templates in the `prompts` directory by default, or you can set a custom directory with the `PROMPTS_PATH` environment variable. You can specify multiple directories separated by `:` (colon), and the library will search through them in order until a matching template is found.
120
+ You can store your templates in files and reference them by name (without the `.jinja` extension). The library will search through all configured paths (see Template Search Order above) until a matching template is found.
104
121
 
105
122
  ```python
106
123
  from ai_prompter import Prompter
107
124
 
125
+ # Will search for 'greet.jinja' in all configured paths
126
+ prompter = Prompter(prompt_template="greet")
127
+ result = prompter.render({"name": "World"})
128
+ print(result) # Output depends on the content of greet.jinja
129
+ ```
130
+
131
+ You can also specify multiple search paths via environment variable:
132
+
133
+ ```python
134
+ import os
135
+ from ai_prompter import Prompter
136
+
108
137
  # Set multiple search paths
109
138
  os.environ["PROMPTS_PATH"] = "/path/to/templates1:/path/to/templates2"
110
139
 
111
140
  prompter = Prompter(prompt_template="greet")
112
141
  result = prompter.render({"name": "World"})
113
- print(result) # Output depends on the content of greet.jinja in the first found path
142
+ print(result) # Uses greet.jinja from the first path where it's found
143
+ ```
144
+
145
+ ### Raw text template
146
+
147
+ ```python
148
+ from ai_prompter import Prompter
149
+
150
+ template = """Write an article about {{ topic }}."""
151
+ prompter = Prompter(template_text=template)
152
+ prompt = prompter.render({"topic": "AI"})
153
+ print(prompt) # Write an article about AI.
114
154
  ```
115
155
 
116
156
  ### Using Raw Text Templates
@@ -149,6 +189,61 @@ lc_file_prompt = file_prompter.to_langchain()
149
189
 
150
190
  **Note**: LangChain integration requires the `langchain-core` package. Install it with `pip install .[langchain]`.
151
191
 
192
+ ### Using Output Parsers
193
+
194
+ The Prompter class supports LangChain output parsers to automatically inject formatting instructions into your prompts. When you provide a parser, it will call the parser's `get_format_instructions()` method and make the result available as `{{ format_instructions }}` in your template.
195
+
196
+ ```python
197
+ from ai_prompter import Prompter
198
+ from langchain.output_parsers import PydanticOutputParser
199
+ from pydantic import BaseModel, Field
200
+
201
+ # Define your output model
202
+ class Article(BaseModel):
203
+ title: str = Field(description="Article title")
204
+ summary: str = Field(description="Brief summary")
205
+ tags: list[str] = Field(description="Relevant tags")
206
+
207
+ # Create a parser
208
+ parser = PydanticOutputParser(pydantic_object=Article)
209
+
210
+ # Create a prompter with the parser
211
+ prompter = Prompter(
212
+ template_text="""Write an article about {{ topic }}.
213
+
214
+ {{ format_instructions }}""",
215
+ parser=parser
216
+ )
217
+
218
+ # Render the prompt - format instructions are automatically included
219
+ prompt = prompter.render({"topic": "AI Safety"})
220
+ print(prompt)
221
+ # Output will include the topic AND the parser's format instructions
222
+ ```
223
+
224
+ This works with file-based templates too:
225
+
226
+ ```jinja
227
+ # article_structured.jinja
228
+ Write an article about {{ topic }}.
229
+
230
+ Please format your response according to these instructions:
231
+ {{ format_instructions }}
232
+ ```
233
+
234
+ ```python
235
+ prompter = Prompter(
236
+ prompt_template="article_structured",
237
+ parser=parser
238
+ )
239
+ ```
240
+
241
+ The parser integration supports any LangChain output parser that implements `get_format_instructions()`, including:
242
+ - `PydanticOutputParser` - For structured Pydantic model outputs
243
+ - `OutputFixingParser` - For fixing malformed outputs
244
+ - `RetryOutputParser` - For retrying failed parsing attempts
245
+ - `StructuredOutputParser` - For dictionary-based structured outputs
246
+
152
247
  ### Including Other Templates
153
248
 
154
249
  You can include other template files within a template using Jinja2's `{% include %}` directive. This allows you to build modular templates.
@@ -0,0 +1,6 @@
1
+ ai_prompter/__init__.py,sha256=5T8FDK9wJPWhl69c2wQrPC4pxNkbew7snoUwrrgBz5o,12854
2
+ ai_prompter/py.typed,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
3
+ ai_prompter-0.3.0.dist-info/METADATA,sha256=XKGpNCyLpy7uJjtIQEaCmscVkNHij_tIimOoFDcBfVM,9942
4
+ ai_prompter-0.3.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
5
+ ai_prompter-0.3.0.dist-info/licenses/LICENSE,sha256=cS0_fa_8BoP0PvVG8D19pn_HDJrG96hd4PyEm9nkRo8,1066
6
+ ai_prompter-0.3.0.dist-info/RECORD,,
@@ -1,6 +0,0 @@
1
- ai_prompter/__init__.py,sha256=p3AyJ9R3C3c8BL2DdMxwYFEv0UMcH-Bz_d6nQ-YjKKA,10744
2
- ai_prompter/py.typed,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
3
- ai_prompter-0.2.2.dist-info/METADATA,sha256=wKeWaoaiYixNYEAjEmkhZYp8FfO68hwjtfjApHgtvIQ,6950
4
- ai_prompter-0.2.2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
5
- ai_prompter-0.2.2.dist-info/licenses/LICENSE,sha256=cS0_fa_8BoP0PvVG8D19pn_HDJrG96hd4PyEm9nkRo8,1066
6
- ai_prompter-0.2.2.dist-info/RECORD,,