ai-prompter 0.1.1__py3-none-any.whl → 0.2.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
ai_prompter/__init__.py CHANGED
@@ -5,7 +5,7 @@ A prompt management module using Jinja to generate complex prompts with simple t
5
5
  import os
6
6
  from dataclasses import dataclass
7
7
  from datetime import datetime
8
- from typing import Any, Dict, Optional, Union
8
+ from typing import Any, Dict, Optional, Union, Callable
9
9
 
10
10
  from jinja2 import Environment, FileSystemLoader, Template
11
11
  from pydantic import BaseModel
@@ -13,7 +13,7 @@ from pydantic import BaseModel
13
13
  prompt_path_default = os.path.join(
14
14
  os.path.dirname(os.path.abspath(__file__)), "prompts"
15
15
  )
16
- prompt_path_custom = os.getenv("PROMPT_PATH")
16
+ prompt_path_custom = os.getenv("PROMPTS_PATH")
17
17
 
18
18
  env_default = Environment(loader=FileSystemLoader(prompt_path_default))
19
19
 
@@ -34,95 +34,173 @@ class Prompter:
34
34
  prompt_variation: Optional[str] = "default"
35
35
  prompt_text: Optional[str] = None
36
36
  template: Optional[Union[str, Template]] = None
37
+ template_text: Optional[str] = None
37
38
  parser: Optional[Any] = None
38
39
 
39
- def __init__(self, prompt_template=None, prompt_text=None, parser=None):
40
- """
41
- Initialize the Prompter with either a template file or raw text.
40
+ def __init__(
41
+ self,
42
+ prompt_template: str | None = None,
43
+ model: str | Any | None = None,
44
+ prompt_variation: str = "default",
45
+ prompt_dir: str | None = None,
46
+ template_text: str | None = None,
47
+ parser: Callable[[str], dict[str, Any]] | None = None,
48
+ *args,
49
+ **kwargs,
50
+ ) -> None:
51
+ """Initialize the Prompter with a template name, model, and optional custom directory.
42
52
 
43
53
  Args:
44
- prompt_template (str, optional): The name of the prompt template file.
45
- prompt_text (str, optional): The raw prompt text.
54
+ prompt_template (str, optional): The name of the prompt template (without .jinja extension).
55
+ model (Union[str, Any], optional): The model to use for generation.
56
+ prompt_variation (str, optional): The variation of the prompt template.
57
+ prompt_dir (str, optional): Custom directory to search for templates.
58
+ template_text (str, optional): The raw text of the template.
59
+ parser (Callable[[str], dict[str, Any]], optional): The parser to use for generation.
46
60
  """
61
+ if template_text is not None and prompt_template is not None:
62
+ raise ValueError(
63
+ "Cannot provide both template_text and prompt_template. Choose one or the other."
64
+ )
47
65
  self.prompt_template = prompt_template
48
- self.prompt_text = prompt_text
66
+ self.prompt_variation = prompt_variation
67
+ self.prompt_dir = prompt_dir
68
+ self.template_text = template_text
49
69
  self.parser = parser
50
- self.setup()
70
+ self.template: Template | None = None
71
+ self.model = model or os.getenv("OPENAI_MODEL", "gpt-4-turbo")
72
+ self._setup_template(template_text, prompt_dir)
51
73
 
52
- def setup(self):
53
- """
54
- Set up the Jinja2 template based on the provided template file or text.
55
- Raises:
56
- ValueError: If neither prompt_template nor prompt_text is provided, or if template name is empty.
74
+ def _setup_template(
75
+ self, template_text: Optional[str] = None, prompt_dir: Optional[str] = None
76
+ ) -> None:
77
+ """Set up the Jinja2 template based on the provided template file or text.
78
+
79
+ Args:
80
+ template_text (str, optional): The raw text of the template.
81
+ prompt_dir (str, optional): Custom directory to search for templates.
57
82
  """
58
- if self.prompt_template is not None:
83
+ if template_text is None:
84
+ if self.prompt_template is None:
85
+ raise ValueError(
86
+ "Either prompt_template or template_text must be provided"
87
+ )
59
88
  if not self.prompt_template:
60
89
  raise ValueError("Template name cannot be empty")
61
- # attempt to load from custom path at runtime
62
- custom_path = os.getenv("PROMPT_PATH")
63
- if custom_path and os.path.exists(custom_path):
64
- try:
65
- env = Environment(loader=FileSystemLoader(custom_path))
66
- self.template = env.get_template(f"{self.prompt_template}.jinja")
67
- return
68
- except Exception:
69
- pass
70
- # fallback to default path
71
- try:
72
- env = Environment(loader=FileSystemLoader(prompt_path_default))
73
- self.template = env.get_template(f"{self.prompt_template}.jinja")
74
- except Exception as e:
75
- raise ValueError(f"Template {self.prompt_template} not found in default folder: {e}")
76
- elif self.prompt_text is not None:
77
- self.template = Template(self.prompt_text)
90
+ prompt_dirs = []
91
+ if prompt_dir:
92
+ prompt_dirs.append(prompt_dir)
93
+ prompts_path = os.getenv("PROMPTS_PATH")
94
+ if prompts_path is not None:
95
+ prompt_dirs.extend(prompts_path.split(":"))
96
+ # Fallback to local folder and ~/ai-prompter
97
+ prompt_dirs.extend([os.getcwd(), os.path.expanduser("~/ai-prompter")])
98
+ # Default package prompts folder
99
+ if os.path.exists(prompt_path_default):
100
+ prompt_dirs.append(prompt_path_default)
101
+ env = Environment(loader=FileSystemLoader(prompt_dirs))
102
+ self.template = env.get_template(f"{self.prompt_template}.jinja")
78
103
  else:
79
- raise ValueError("Prompter must have a prompt_template or prompt_text")
80
-
81
- # Removed assertion as it's redundant with the checks above
82
- # assert self.prompt_template or self.prompt_text, "Prompt is required"
104
+ self.template_text = template_text
105
+ self.template = Template(template_text)
83
106
 
84
107
  def to_langchain(self):
85
- # only file-based templates supported for LangChain
86
- if self.prompt_text is not None:
87
- raise ImportError(
88
- "langchain-core integration only supports file-based templates; install with `pip install .[langchain]`"
89
- )
108
+ # Support for both text-based and file-based templates with LangChain
90
109
  try:
91
110
  from langchain_core.prompts import ChatPromptTemplate
92
111
  except ImportError:
93
112
  raise ImportError(
94
113
  "langchain-core is required for to_langchain; install with `pip install .[langchain]`"
95
114
  )
96
- if isinstance(self.template, str):
97
- template_text = self.template
98
- elif isinstance(self.template, Template):
99
- # raw Jinja2 template object
100
- template_text = self.prompt_text
115
+ from jinja2 import Template, Environment, FileSystemLoader
116
+ import os
117
+ import re
118
+ if self.template_text is not None:
119
+ template_content = self.template_text
120
+ return ChatPromptTemplate.from_template(
121
+ template_content, template_format="jinja2"
122
+ )
123
+ elif self.prompt_template is not None and self.template is not None:
124
+ # For file-based templates, we need to get the raw string content with includes resolved
125
+ if isinstance(self.template, Template):
126
+ try:
127
+ # Use the same logic as Prompter initialization for finding prompt_dir
128
+ if self.prompt_dir is None:
129
+ # Check for PROMPTS_PATH environment variable
130
+ prompts_path = os.environ.get("PROMPTS_PATH")
131
+ if prompts_path:
132
+ self.prompt_dir = prompts_path
133
+ else:
134
+ # Check a series of default directories
135
+ potential_dirs = [
136
+ os.path.join(os.getcwd(), "prompts"),
137
+ os.path.join(os.path.dirname(os.path.abspath(__file__)), "prompts"),
138
+ os.path.join(os.path.expanduser("~"), ".prompts"),
139
+ ]
140
+ for dir_path in potential_dirs:
141
+ if os.path.exists(dir_path):
142
+ self.prompt_dir = dir_path
143
+ break
144
+ if self.prompt_dir is None:
145
+ raise ValueError(
146
+ "No prompt directory found. Please set PROMPTS_PATH environment variable "
147
+ "or specify prompt_dir when initializing Prompter with a prompt_template."
148
+ )
149
+ # Function to manually resolve includes while preserving variables
150
+ def resolve_includes(template_name, base_dir, visited=None):
151
+ if visited is None:
152
+ visited = set()
153
+ if template_name in visited:
154
+ raise ValueError(f"Circular include detected for {template_name}")
155
+ visited.add(template_name)
156
+ # Ensure we don't add .jinja if it's already in the name
157
+ if template_name.endswith('.jinja'):
158
+ template_file = os.path.join(base_dir, template_name)
159
+ else:
160
+ template_file = os.path.join(base_dir, f"{template_name}.jinja")
161
+ if not os.path.exists(template_file):
162
+ raise ValueError(f"Template file {template_file} not found")
163
+ with open(template_file, 'r', encoding='utf-8') as f:
164
+ content = f.read()
165
+ # Find all include statements
166
+ include_pattern = r"{%\s*include\s*['\"]([^'\"]+)['\"]\s*%}"
167
+ matches = re.findall(include_pattern, content)
168
+ for included_template in matches:
169
+ included_content = resolve_includes(included_template, base_dir, visited)
170
+ placeholder = "{% include '" + included_template + "' %}"
171
+ content = content.replace(placeholder, included_content)
172
+ visited.remove(template_name)
173
+ return content
174
+ # Resolve includes for the main template
175
+ template_content = resolve_includes(self.prompt_template, self.prompt_dir)
176
+ return ChatPromptTemplate.from_template(
177
+ template_content, template_format="jinja2"
178
+ )
179
+ except Exception as e:
180
+ raise ValueError(f"Error processing template for LangChain: {str(e)}")
181
+ else:
182
+ raise ValueError(
183
+ "Template is not properly initialized for LangChain conversion"
184
+ )
101
185
  else:
102
- # file-based template
103
- prompt_dir = (
104
- prompt_path_custom
105
- if prompt_path_custom and os.path.exists(prompt_path_custom)
106
- else prompt_path_default
186
+ raise ValueError(
187
+ "Either prompt_template with a valid template or template_text must be provided for LangChain conversion"
107
188
  )
108
- template_file = os.path.join(prompt_dir, f"{self.prompt_template}.jinja")
109
- with open(template_file, "r") as f:
110
- template_text = f.read()
111
- return ChatPromptTemplate.from_template(template_text, template_format="jinja2")
112
189
 
113
190
  @classmethod
114
- def from_text(cls, text: str):
115
- """
116
- Create a Prompter instance from raw text, which can contain Jinja code.
191
+ def from_text(
192
+ cls, text: str, model: Optional[Union[str, Any]] = None
193
+ ) -> "Prompter":
194
+ """Create a Prompter instance from raw text, which can contain Jinja code.
117
195
 
118
196
  Args:
119
- text (str): The raw prompt text.
197
+ text (str): The raw template text.
198
+ model (Union[str, Any], optional): The model to use for generation.
120
199
 
121
200
  Returns:
122
201
  Prompter: A new Prompter instance.
123
202
  """
124
-
125
- return cls(prompt_text=text)
203
+ return cls(template_text=text, model=model)
126
204
 
127
205
  def render(self, data: Optional[Union[Dict, BaseModel]] = None) -> str:
128
206
  """
ai_prompter/py.typed ADDED
@@ -0,0 +1 @@
1
+
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ai-prompter
3
- Version: 0.1.1
3
+ Version: 0.2.2
4
4
  Summary: A prompt management library using Jinja2 templates to build complex prompts easily.
5
5
  Author-email: LUIS NOVO <lfnovo@gmail.com>
6
6
  License: MIT
@@ -21,7 +21,7 @@ A prompt management library using Jinja2 templates to build complex prompts easi
21
21
 
22
22
  - Define prompts as Jinja templates.
23
23
  - Load default templates from `src/ai_prompter/prompts`.
24
- - Override templates via `PROMPT_PATH` environment variable.
24
+ - Override templates via `PROMPTS_PATH` environment variable.
25
25
  - Render prompts with arbitrary data or Pydantic models.
26
26
  - Export to LangChain `ChatPromptTemplate`.
27
27
 
@@ -51,34 +51,104 @@ uv add langchain_core
51
51
  Configure a custom template path by creating a `.env` file in the project root:
52
52
 
53
53
  ```dotenv
54
- PROMPT_PATH=path/to/custom/templates
54
+ PROMPTS_PATH=path/to/custom/templates
55
55
  ```
56
56
 
57
57
  ## Usage
58
58
 
59
+ ### Basic Usage
60
+
61
+ ```python
62
+ from ai_prompter import Prompter
63
+
64
+ # Initialize with a template name
65
+ prompter = Prompter('my_template')
66
+
67
+ # Render a prompt with variables
68
+ prompt = prompter.render({'variable': 'value'})
69
+ print(prompt)
70
+ ```
71
+
72
+ ### Custom Prompt Directory
73
+
74
+ You can specify a custom directory for your prompt templates using the `prompt_dir` parameter:
75
+
76
+ ```python
77
+ prompter = Prompter(template_text='Hello {{ name }}!', prompt_dir='/path/to/your/prompts')
78
+ ```
79
+
80
+ ### Using Environment Variable for Prompt Path
81
+
82
+ Set the `PROMPTS_PATH` environment variable to point to your custom prompts directory:
83
+
84
+ ```bash
85
+ export PROMPTS_PATH=/path/to/your/prompts
86
+ ```
87
+
88
+ The `Prompter` class will check this path if no custom directory is provided in the constructor. If not set, it will also look in the current working directory and `~/ai-prompter/` as fallback options before using the default package prompts.
89
+
59
90
  ### Raw text template
60
91
 
61
92
  ```python
62
93
  from ai_prompter import Prompter
63
94
 
64
95
  template = """Write an article about {{ topic }}."""
65
- prompter = Prompter(prompt_text=template)
96
+ prompter = Prompter(template_text=template)
66
97
  prompt = prompter.render({"topic": "AI"})
67
98
  print(prompt) # Write an article about AI.
68
99
  ```
69
100
 
70
101
  ### Using File-based Templates
71
102
 
72
- You can store your templates in files and reference them by name (without the `.jinja` extension). The library looks for templates in the `prompts` directory by default, or you can set a custom directory with the `PROMPT_PATH` environment variable.
103
+ You can store your templates in files and reference them by name (without the `.jinja` extension). The library looks for templates in the `prompts` directory by default, or you can set a custom directory with the `PROMPTS_PATH` environment variable. You can specify multiple directories separated by `:` (colon), and the library will search through them in order until a matching template is found.
73
104
 
74
105
  ```python
75
106
  from ai_prompter import Prompter
76
107
 
108
+ # Set multiple search paths
109
+ os.environ["PROMPTS_PATH"] = "/path/to/templates1:/path/to/templates2"
110
+
77
111
  prompter = Prompter(prompt_template="greet")
78
- prompt = prompter.render({"who": "Tester"})
79
- print(prompt) # GREET Tester
112
+ result = prompter.render({"name": "World"})
113
+ print(result) # Output depends on the content of greet.jinja in the first found path
114
+ ```
115
+
116
+ ### Using Raw Text Templates
117
+
118
+ Alternatively, you can provide the template content directly as raw text using the `template_text` parameter or the `from_text` class method.
119
+
120
+ ```python
121
+ from ai_prompter import Prompter
122
+
123
+ # Using template_text parameter
124
+ prompter = Prompter(template_text="Hello, {{ name }}!")
125
+ result = prompter.render({"name": "World"})
126
+ print(result) # Output: Hello, World!
127
+
128
+ # Using from_text class method
129
+ prompter = Prompter.from_text("Hi, {{ person }}!", model="gpt-4")
130
+ result = prompter.render({"person": "Alice"})
131
+ print(result) # Output: Hi, Alice!
132
+ ```
133
+
134
+ ### LangChain Integration
135
+
136
+ You can convert your prompts to LangChain's `ChatPromptTemplate` format for use in LangChain workflows. This works for both text-based and file-based templates.
137
+
138
+ ```python
139
+ from ai_prompter import Prompter
140
+
141
+ # With text-based template
142
+ text_prompter = Prompter(template_text="Hello, {{ name }}!")
143
+ lc_text_prompt = text_prompter.to_langchain()
144
+
145
+ # With file-based template
146
+ file_prompter = Prompter(prompt_template="greet")
147
+ lc_file_prompt = file_prompter.to_langchain()
80
148
  ```
81
149
 
150
+ **Note**: LangChain integration requires the `langchain-core` package. Install it with `pip install .[langchain]`.
151
+
82
152
  ### Including Other Templates
83
153
 
84
154
  You can include other template files within a template using Jinja2's `{% include %}` directive. This allows you to build modular templates.
@@ -138,7 +208,7 @@ The library also automatically provides a `current_time` variable with the curre
138
208
  ```python
139
209
  from ai_prompter import Prompter
140
210
 
141
- prompter = Prompter(prompt_text="Current time: {{current_time}}")
211
+ prompter = Prompter(template_text="Current time: {{current_time}}")
142
212
  prompt = prompter.render()
143
213
  print(prompt) # Current time: 2025-04-19 23:28:00
144
214
  ```
@@ -159,16 +229,6 @@ prompt = prompter.render({"topic": "AI"})
159
229
  print(prompt)
160
230
  ```
161
231
 
162
- ### LangChain integration
163
-
164
- ```python
165
- from ai_prompter import Prompter
166
-
167
- prompter = Prompter(prompt_template="article")
168
- lc_template = prompter.to_langchain()
169
- # use lc_template in LangChain chains
170
- ```
171
-
172
232
  ### Jupyter Notebook
173
233
 
174
234
  See `notebooks/prompter_usage.ipynb` for interactive examples.
@@ -0,0 +1,6 @@
1
+ ai_prompter/__init__.py,sha256=p3AyJ9R3C3c8BL2DdMxwYFEv0UMcH-Bz_d6nQ-YjKKA,10744
2
+ ai_prompter/py.typed,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
3
+ ai_prompter-0.2.2.dist-info/METADATA,sha256=wKeWaoaiYixNYEAjEmkhZYp8FfO68hwjtfjApHgtvIQ,6950
4
+ ai_prompter-0.2.2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
5
+ ai_prompter-0.2.2.dist-info/licenses/LICENSE,sha256=cS0_fa_8BoP0PvVG8D19pn_HDJrG96hd4PyEm9nkRo8,1066
6
+ ai_prompter-0.2.2.dist-info/RECORD,,
@@ -1,5 +0,0 @@
1
- ai_prompter/__init__.py,sha256=4Zzy7drJRTmQZGJCEcqnf1-RkvDvZiLIYXLDeyFXGJw,5913
2
- ai_prompter-0.1.1.dist-info/METADATA,sha256=PgRjdlAIemIiqe5ANnoM52qK1FWxW4frN2SxS7MTsPs,4755
3
- ai_prompter-0.1.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
4
- ai_prompter-0.1.1.dist-info/licenses/LICENSE,sha256=cS0_fa_8BoP0PvVG8D19pn_HDJrG96hd4PyEm9nkRo8,1066
5
- ai_prompter-0.1.1.dist-info/RECORD,,