ai-prompter 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1 @@
1
+ PROMPTS_PATH=/your/prompt/path
@@ -0,0 +1,22 @@
1
+ <!--
2
+ Thanks for contributing a pull request! Please ensure you have taken a look at CONTRIBUTING.md
3
+ -->
4
+
5
+ #### Reference Issues/PRs
6
+ <!--
7
+ Example: Fixes #1234. See also #3456.
8
+ Please use keywords (e.g., Fixes) to create link to the issues or pull requests
9
+ you resolved, so that they will automatically be closed when your pull request
10
+ is merged. See https://github.com/blog/1506-closing-issues-via-pull-requests
11
+ -->
12
+
13
+
14
+ #### What does this implement/fix? Explain your changes.
15
+
16
+
17
+ #### Any other comments?
18
+
19
+
20
+ <!--
21
+ Thanks for contributing!
22
+ -->
@@ -0,0 +1,38 @@
1
+ name: Publish
2
+
3
+ on:
4
+ push:
5
+ tags:
6
+ - 'v*'
7
+ - '[0-9]*.[0-9]*.[0-9]*'
8
+
9
+ jobs:
10
+ publish:
11
+ runs-on: ubuntu-latest
12
+
13
+ steps:
14
+ - uses: actions/checkout@v4
15
+
16
+ - name: Set up Python
17
+ uses: actions/setup-python@v5
18
+ with:
19
+ python-version: "3.10"
20
+
21
+ - name: Install uv
22
+ run: |
23
+ curl -LsSf https://astral.sh/uv/install.sh | sh
24
+ echo "$HOME/.cargo/bin" >> $GITHUB_PATH
25
+
26
+ - name: Install dependencies and build tools
27
+ run: |
28
+ uv venv
29
+ rm -rf dist
30
+ uv sync
31
+
32
+ - name: Build package
33
+ run: uv build
34
+
35
+ - name: Publish to PyPI
36
+ env:
37
+ PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
38
+ run: uv publish --token "$PYPI_TOKEN"
@@ -0,0 +1,12 @@
1
+ # Python-generated files
2
+ __pycache__/
3
+ *.py[oc]
4
+ build/
5
+ dist/
6
+ wheels/
7
+ *.egg-info
8
+
9
+ # Virtual environments
10
+ .venv
11
+ .vscode
12
+ .*rules
@@ -0,0 +1 @@
1
+ 3.10.6
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2025 Luis Novo
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights to
8
+ use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
9
+ of the Software, and to permit persons to whom the Software is furnished to do
10
+ so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1,206 @@
1
+ Metadata-Version: 2.4
2
+ Name: ai-prompter
3
+ Version: 0.1.0
4
+ Summary: A prompt management library using Jinja2 templates to build complex prompts easily.
5
+ Author-email: LUIS NOVO <lfnovo@gmail.com>
6
+ License: MIT
7
+ License-File: LICENSE
8
+ Requires-Python: >=3.10.6
9
+ Requires-Dist: jinja2>=3.1.6
10
+ Requires-Dist: pip>=25.0.1
11
+ Requires-Dist: pydantic>=2.0
12
+ Provides-Extra: langchain
13
+ Requires-Dist: langchain-core>=0.3; extra == 'langchain'
14
+ Description-Content-Type: text/markdown
15
+
16
+ # AI Prompter
17
+
18
+ A prompt management library using Jinja2 templates to build complex prompts easily. Supports raw text or file-based templates and integrates with LangChain.
19
+
20
+ ## Features
21
+
22
+ - Define prompts as Jinja templates.
23
+ - Load default templates from `src/ai_prompter/prompts`.
24
+ - Override templates via `PROMPT_PATH` environment variable.
25
+ - Render prompts with arbitrary data or Pydantic models.
26
+ - Export to LangChain `ChatPromptTemplate`.
27
+
28
+ ## Installation
29
+
30
+ 1. (Optional) Create and activate a virtual environment:
31
+ ```bash
32
+ python3 -m venv .venv
33
+ source .venv/bin/activate
34
+ ```
35
+ 2. Install the package:
36
+ ```bash
37
+ pip install .
38
+ ```
39
+ ### Extras
40
+
41
+ To enable LangChain integration:
42
+
43
+ ```bash
44
+ pip install .[langchain]
45
+ # or
46
+ uv add langchain_core
47
+ ```
48
+
49
+ ## Configuration
50
+
51
+ Configure a custom template path by creating a `.env` file in the project root:
52
+
53
+ ```dotenv
54
+ PROMPT_PATH=path/to/custom/templates
55
+ ```
56
+
57
+ ## Usage
58
+
59
+ ### Raw text template
60
+
61
+ ```python
62
+ from ai_prompter import Prompter
63
+
64
+ template = """Write an article about {{ topic }}."""
65
+ prompter = Prompter(prompt_text=template)
66
+ prompt = prompter.render({"topic": "AI"})
67
+ print(prompt) # Write an article about AI.
68
+ ```
69
+
70
+ ### Using File-based Templates
71
+
72
+ You can store your templates in files and reference them by name (without the `.jinja` extension). The library looks for templates in the `prompts` directory by default, or you can set a custom directory with the `PROMPT_PATH` environment variable.
73
+
74
+ ```python
75
+ from ai_prompter import Prompter
76
+
77
+ prompter = Prompter(prompt_template="greet")
78
+ prompt = prompter.render({"who": "Tester"})
79
+ print(prompt) # GREET Tester
80
+ ```
81
+
82
+ ### Including Other Templates
83
+
84
+ You can include other template files within a template using Jinja2's `{% include %}` directive. This allows you to build modular templates.
85
+
86
+ ```jinja
87
+ # outer.jinja
88
+ This is the outer file
89
+
90
+ {% include 'inner.jinja' %}
91
+
92
+ This is the end of the outer file
93
+ ```
94
+
95
+ ```jinja
96
+ # inner.jinja
97
+ This is the inner file
98
+
99
+ {% if type == 'a' %}
100
+ You selected A
101
+ {% else %}
102
+ You didn't select A
103
+ {% endif %}
104
+ ```
105
+
106
+ ```python
107
+ from ai_prompter import Prompter
108
+
109
+ prompter = Prompter(prompt_template="outer")
110
+ prompt = prompter.render(dict(type="a"))
111
+ print(prompt)
112
+ # This is the outer file
113
+ #
114
+ # This is the inner file
115
+ #
116
+ # You selected A
117
+ #
118
+ #
119
+ # This is the end of the outer file
120
+ ```
121
+
122
+ ### Using Variables
123
+
124
+ Templates can use variables that you pass in through the `render()` method. You can use Jinja2 filters and conditionals to control the output based on your data.
125
+
126
+ ```python
127
+ from ai_prompter import Prompter
128
+
129
+ prompter = Prompter(prompt_text="Hello {{name|default('Guest')}}!")
130
+ prompt = prompter.render() # No data provided, uses default
131
+ print(prompt) # Hello Guest!
132
+ prompt = prompter.render({"name": "Alice"}) # Data provided
133
+ print(prompt) # Hello Alice!
134
+ ```
135
+
136
+ The library also automatically provides a `current_time` variable with the current timestamp in format "YYYY-MM-DD HH:MM:SS".
137
+
138
+ ```python
139
+ from ai_prompter import Prompter
140
+
141
+ prompter = Prompter(prompt_text="Current time: {{current_time}}")
142
+ prompt = prompter.render()
143
+ print(prompt) # Current time: 2025-04-19 23:28:00
144
+ ```
145
+
146
+ ### File-based template
147
+
148
+ Place a Jinja file (e.g., `article.jinja`) in the default prompts directory (`src/ai_prompter/prompts`) or your custom path:
149
+
150
+ ```jinja
151
+ Write an article about {{ topic }}.
152
+ ```
153
+
154
+ ```python
155
+ from ai_prompter import Prompter
156
+
157
+ prompter = Prompter(prompt_template="article")
158
+ prompt = prompter.render({"topic": "AI"})
159
+ print(prompt)
160
+ ```
161
+
162
+ ### LangChain integration
163
+
164
+ ```python
165
+ from ai_prompter import Prompter
166
+
167
+ prompter = Prompter(prompt_template="article")
168
+ lc_template = prompter.to_langchain()
169
+ # use lc_template in LangChain chains
170
+ ```
171
+
172
+ ### Jupyter Notebook
173
+
174
+ See `notebooks/prompter_usage.ipynb` for interactive examples.
175
+
176
+ ## Project Structure
177
+
178
+ ```
179
+ ai-prompter/
180
+ ├── src/ai_prompter
181
+ │ ├── __init__.py
182
+ │ └── prompts/
183
+ │ └── *.jinja
184
+ ├── notebooks/
185
+ │ ├── prompter_usage.ipynb
186
+ │ └── prompts/
187
+ ├── pyproject.toml
188
+ ├── README.md
189
+ └── .env (optional)
190
+ ```
191
+
192
+ ## Testing
193
+
194
+ Run tests with:
195
+
196
+ ```bash
197
+ uv run pytest -v
198
+ ```
199
+
200
+ ## Contributing
201
+
202
+ Contributions welcome! Please open issues or PRs.
203
+
204
+ ## License
205
+
206
+ This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details.
@@ -0,0 +1,191 @@
1
+ # AI Prompter
2
+
3
+ A prompt management library using Jinja2 templates to build complex prompts easily. Supports raw text or file-based templates and integrates with LangChain.
4
+
5
+ ## Features
6
+
7
+ - Define prompts as Jinja templates.
8
+ - Load default templates from `src/ai_prompter/prompts`.
9
+ - Override templates via `PROMPT_PATH` environment variable.
10
+ - Render prompts with arbitrary data or Pydantic models.
11
+ - Export to LangChain `ChatPromptTemplate`.
12
+
13
+ ## Installation
14
+
15
+ 1. (Optional) Create and activate a virtual environment:
16
+ ```bash
17
+ python3 -m venv .venv
18
+ source .venv/bin/activate
19
+ ```
20
+ 2. Install the package:
21
+ ```bash
22
+ pip install .
23
+ ```
24
+ ### Extras
25
+
26
+ To enable LangChain integration:
27
+
28
+ ```bash
29
+ pip install .[langchain]
30
+ # or
31
+ uv add langchain_core
32
+ ```
33
+
34
+ ## Configuration
35
+
36
+ Configure a custom template path by creating a `.env` file in the project root:
37
+
38
+ ```dotenv
39
+ PROMPT_PATH=path/to/custom/templates
40
+ ```
41
+
42
+ ## Usage
43
+
44
+ ### Raw text template
45
+
46
+ ```python
47
+ from ai_prompter import Prompter
48
+
49
+ template = """Write an article about {{ topic }}."""
50
+ prompter = Prompter(prompt_text=template)
51
+ prompt = prompter.render({"topic": "AI"})
52
+ print(prompt) # Write an article about AI.
53
+ ```
54
+
55
+ ### Using File-based Templates
56
+
57
+ You can store your templates in files and reference them by name (without the `.jinja` extension). The library looks for templates in the `prompts` directory by default, or you can set a custom directory with the `PROMPT_PATH` environment variable.
58
+
59
+ ```python
60
+ from ai_prompter import Prompter
61
+
62
+ prompter = Prompter(prompt_template="greet")
63
+ prompt = prompter.render({"who": "Tester"})
64
+ print(prompt) # GREET Tester
65
+ ```
66
+
67
+ ### Including Other Templates
68
+
69
+ You can include other template files within a template using Jinja2's `{% include %}` directive. This allows you to build modular templates.
70
+
71
+ ```jinja
72
+ # outer.jinja
73
+ This is the outer file
74
+
75
+ {% include 'inner.jinja' %}
76
+
77
+ This is the end of the outer file
78
+ ```
79
+
80
+ ```jinja
81
+ # inner.jinja
82
+ This is the inner file
83
+
84
+ {% if type == 'a' %}
85
+ You selected A
86
+ {% else %}
87
+ You didn't select A
88
+ {% endif %}
89
+ ```
90
+
91
+ ```python
92
+ from ai_prompter import Prompter
93
+
94
+ prompter = Prompter(prompt_template="outer")
95
+ prompt = prompter.render(dict(type="a"))
96
+ print(prompt)
97
+ # This is the outer file
98
+ #
99
+ # This is the inner file
100
+ #
101
+ # You selected A
102
+ #
103
+ #
104
+ # This is the end of the outer file
105
+ ```
106
+
107
+ ### Using Variables
108
+
109
+ Templates can use variables that you pass in through the `render()` method. You can use Jinja2 filters and conditionals to control the output based on your data.
110
+
111
+ ```python
112
+ from ai_prompter import Prompter
113
+
114
+ prompter = Prompter(prompt_text="Hello {{name|default('Guest')}}!")
115
+ prompt = prompter.render() # No data provided, uses default
116
+ print(prompt) # Hello Guest!
117
+ prompt = prompter.render({"name": "Alice"}) # Data provided
118
+ print(prompt) # Hello Alice!
119
+ ```
120
+
121
+ The library also automatically provides a `current_time` variable with the current timestamp in format "YYYY-MM-DD HH:MM:SS".
122
+
123
+ ```python
124
+ from ai_prompter import Prompter
125
+
126
+ prompter = Prompter(prompt_text="Current time: {{current_time}}")
127
+ prompt = prompter.render()
128
+ print(prompt) # Current time: 2025-04-19 23:28:00
129
+ ```
130
+
131
+ ### File-based template
132
+
133
+ Place a Jinja file (e.g., `article.jinja`) in the default prompts directory (`src/ai_prompter/prompts`) or your custom path:
134
+
135
+ ```jinja
136
+ Write an article about {{ topic }}.
137
+ ```
138
+
139
+ ```python
140
+ from ai_prompter import Prompter
141
+
142
+ prompter = Prompter(prompt_template="article")
143
+ prompt = prompter.render({"topic": "AI"})
144
+ print(prompt)
145
+ ```
146
+
147
+ ### LangChain integration
148
+
149
+ ```python
150
+ from ai_prompter import Prompter
151
+
152
+ prompter = Prompter(prompt_template="article")
153
+ lc_template = prompter.to_langchain()
154
+ # use lc_template in LangChain chains
155
+ ```
156
+
157
+ ### Jupyter Notebook
158
+
159
+ See `notebooks/prompter_usage.ipynb` for interactive examples.
160
+
161
+ ## Project Structure
162
+
163
+ ```
164
+ ai-prompter/
165
+ ├── src/ai_prompter
166
+ │ ├── __init__.py
167
+ │ └── prompts/
168
+ │ └── *.jinja
169
+ ├── notebooks/
170
+ │ ├── prompter_usage.ipynb
171
+ │ └── prompts/
172
+ ├── pyproject.toml
173
+ ├── README.md
174
+ └── .env (optional)
175
+ ```
176
+
177
+ ## Testing
178
+
179
+ Run tests with:
180
+
181
+ ```bash
182
+ uv run pytest -v
183
+ ```
184
+
185
+ ## Contributing
186
+
187
+ Contributions welcome! Please open issues or PRs.
188
+
189
+ ## License
190
+
191
+ This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details.
@@ -0,0 +1,135 @@
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": 1,
6
+ "metadata": {},
7
+ "outputs": [
8
+ {
9
+ "data": {
10
+ "text/plain": [
11
+ "'/Users/luisnovo/dev/projetos/ai-prompter/notebooks/prompts'"
12
+ ]
13
+ },
14
+ "execution_count": 1,
15
+ "metadata": {},
16
+ "output_type": "execute_result"
17
+ }
18
+ ],
19
+ "source": [
20
+ "import os\n",
21
+ "from pathlib import Path\n",
22
+ "\n",
23
+ "os.environ['PROMPT_PATH'] = str(Path('prompts').resolve())\n",
24
+ "\n",
25
+ "os.environ['PROMPT_PATH']"
26
+ ]
27
+ },
28
+ {
29
+ "cell_type": "code",
30
+ "execution_count": 2,
31
+ "metadata": {},
32
+ "outputs": [
33
+ {
34
+ "data": {
35
+ "text/plain": [
36
+ "'Write an article about AI.'"
37
+ ]
38
+ },
39
+ "execution_count": 2,
40
+ "metadata": {},
41
+ "output_type": "execute_result"
42
+ }
43
+ ],
44
+ "source": [
45
+ "from ai_prompter import Prompter\n",
46
+ "\n",
47
+ "template = \"\"\"Write an article about {{topic}}.\"\"\"\n",
48
+ "\n",
49
+ "prompter = Prompter(prompt_text=template)\n",
50
+ "\n",
51
+ "prompt = prompter.render(dict(topic=\"AI\"))\n",
52
+ "\n",
53
+ "prompt"
54
+ ]
55
+ },
56
+ {
57
+ "cell_type": "code",
58
+ "execution_count": 3,
59
+ "metadata": {},
60
+ "outputs": [
61
+ {
62
+ "data": {
63
+ "text/plain": [
64
+ "'Write an article about AI.'"
65
+ ]
66
+ },
67
+ "execution_count": 3,
68
+ "metadata": {},
69
+ "output_type": "execute_result"
70
+ }
71
+ ],
72
+ "source": [
73
+ "prompter = Prompter(prompt_template=\"article\")\n",
74
+ "\n",
75
+ "\n",
76
+ "prompt = prompter.render(dict(topic=\"AI\"))\n",
77
+ "\n",
78
+ "prompt"
79
+ ]
80
+ },
81
+ {
82
+ "cell_type": "markdown",
83
+ "metadata": {},
84
+ "source": [
85
+ "## Using Includes and Ifs"
86
+ ]
87
+ },
88
+ {
89
+ "cell_type": "code",
90
+ "execution_count": 5,
91
+ "metadata": {},
92
+ "outputs": [
93
+ {
94
+ "data": {
95
+ "text/plain": [
96
+ "'This is the outer file \\n\\nThis is the inner file\\n\\n\\n You selected A\\n\\n\\nThis is the end of the outer file'"
97
+ ]
98
+ },
99
+ "execution_count": 5,
100
+ "metadata": {},
101
+ "output_type": "execute_result"
102
+ }
103
+ ],
104
+ "source": [
105
+ "prompter = Prompter(prompt_template=\"outer\")\n",
106
+ "\n",
107
+ "\n",
108
+ "prompt = prompter.render(dict(type=\"a\"))\n",
109
+ "\n",
110
+ "prompt"
111
+ ]
112
+ }
113
+ ],
114
+ "metadata": {
115
+ "kernelspec": {
116
+ "display_name": ".venv",
117
+ "language": "python",
118
+ "name": "python3"
119
+ },
120
+ "language_info": {
121
+ "codemirror_mode": {
122
+ "name": "ipython",
123
+ "version": 3
124
+ },
125
+ "file_extension": ".py",
126
+ "mimetype": "text/x-python",
127
+ "name": "python",
128
+ "nbconvert_exporter": "python",
129
+ "pygments_lexer": "ipython3",
130
+ "version": "3.10.6"
131
+ }
132
+ },
133
+ "nbformat": 4,
134
+ "nbformat_minor": 2
135
+ }
@@ -0,0 +1 @@
1
+ Write an article about {{topic}}.
@@ -0,0 +1,7 @@
1
+ This is the inner file
2
+
3
+ {% if type == 'a' %}
4
+ You selected A
5
+ {% else %}
6
+ You didn't select A
7
+ {% endif %}
@@ -0,0 +1,5 @@
1
+ This is the outer file
2
+
3
+ {% include 'inner.jinja' %}
4
+
5
+ This is the end of the outer file
@@ -0,0 +1,42 @@
1
+ [project]
2
+ name = "ai-prompter"
3
+ version = "0.1.0"
4
+ description = "A prompt management library using Jinja2 templates to build complex prompts easily."
5
+ readme = "README.md"
6
+ homepage = "https://github.com/lfnovo/ai-prompter"
7
+ authors = [
8
+ { name = "LUIS NOVO", email = "lfnovo@gmail.com" }
9
+ ]
10
+ requires-python = ">=3.10.6"
11
+ dependencies = [
12
+ "jinja2>=3.1.6",
13
+ "pip>=25.0.1",
14
+ "pydantic>=2.0",
15
+ ]
16
+ optional-dependencies = { langchain = ["langchain-core>=0.3"] }
17
+ license = { text = "MIT" }
18
+
19
+ [tool.hatch.metadata]
20
+ allow-direct-references = true
21
+
22
+ [build-system]
23
+ requires = ["hatchling", "pip"]
24
+ build-backend = "hatchling.build"
25
+
26
+ [tool.setuptools]
27
+ package-dir = {"ai_prompter" = "src/ai_prompter"}
28
+
29
+ [dependency-groups]
30
+ dev = [
31
+ "ipykernel>=4.0.1",
32
+ "ipywidgets>=4.0.0",
33
+ "pyperclip>=1.9.0",
34
+ "pytest>=7.2.0",
35
+ "pytest-asyncio>=0.21.0",
36
+ "python-dotenv>=1.1.0",
37
+ ]
38
+
39
+ [tool.pytest.ini_options]
40
+ pythonpath = ["src/ai_prompter"]
41
+ asyncio_mode = "auto"
42
+ asyncio_default_fixture_loop_scope = "function"