ai-prompter 0.2.3__py3-none-any.whl → 0.3.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
ai_prompter/__init__.py CHANGED
@@ -97,13 +97,36 @@ class Prompter:
97
97
  prompts_path = os.getenv("PROMPTS_PATH")
98
98
  if prompts_path is not None:
99
99
  prompt_dirs.extend(prompts_path.split(":"))
100
- # Fallback to local folder and ~/ai-prompter
101
- prompt_dirs.extend([os.getcwd(), os.path.expanduser("~/ai-prompter")])
100
+
101
+ # Add current working directory + /prompts
102
+ cwd_prompts = os.path.join(os.getcwd(), "prompts")
103
+ if os.path.exists(cwd_prompts):
104
+ prompt_dirs.append(cwd_prompts)
105
+
106
+ # Try to find project root and add its prompts folder
107
+ current_path = os.getcwd()
108
+ while current_path != os.path.dirname(current_path): # Stop at root
109
+ # Check for common project indicators
110
+ if any(os.path.exists(os.path.join(current_path, indicator))
111
+ for indicator in ['pyproject.toml', 'setup.py', 'setup.cfg', '.git']):
112
+ project_prompts = os.path.join(current_path, "prompts")
113
+ if os.path.exists(project_prompts) and project_prompts not in prompt_dirs:
114
+ prompt_dirs.append(project_prompts)
115
+ break
116
+ current_path = os.path.dirname(current_path)
117
+
118
+ # Fallback to ~/ai-prompter
119
+ prompt_dirs.append(os.path.expanduser("~/ai-prompter"))
120
+
102
121
  # Default package prompts folder
103
122
  if os.path.exists(prompt_path_default):
104
123
  prompt_dirs.append(prompt_path_default)
105
124
  env = Environment(loader=FileSystemLoader(prompt_dirs))
106
- self.template = env.get_template(f"{self.prompt_template}.jinja")
125
+ # Strip .jinja extension if present to avoid double extension
126
+ template_name = self.prompt_template
127
+ if template_name.endswith('.jinja'):
128
+ template_name = template_name[:-6] # Remove '.jinja'
129
+ self.template = env.get_template(f"{template_name}.jinja")
107
130
  self.prompt_folders = prompt_dirs
108
131
  else:
109
132
  self.template_text = template_text
@@ -159,11 +182,11 @@ class Prompter:
159
182
  if template_name in visited:
160
183
  raise ValueError(f"Circular include detected for {template_name}")
161
184
  visited.add(template_name)
162
- # Ensure we don't add .jinja if it's already in the name
163
- if template_name.endswith('.jinja'):
164
- template_file = os.path.join(base_dir, template_name)
165
- else:
166
- template_file = os.path.join(base_dir, f"{template_name}.jinja")
185
+ # Strip .jinja extension if present to avoid double extension
186
+ clean_name = template_name
187
+ if clean_name.endswith('.jinja'):
188
+ clean_name = clean_name[:-6] # Remove '.jinja'
189
+ template_file = os.path.join(base_dir, f"{clean_name}.jinja")
167
190
  if not os.path.exists(template_file):
168
191
  raise ValueError(f"Template file {template_file} not found")
169
192
  with open(template_file, 'r', encoding='utf-8') as f:
@@ -209,7 +232,11 @@ class Prompter:
209
232
  return 'text'
210
233
 
211
234
  for folder in self.prompt_folders:
212
- template_file = os.path.join(folder, f"{template_name}.jinja")
235
+ # Strip .jinja extension if present to avoid double extension
236
+ clean_name = template_name
237
+ if clean_name.endswith('.jinja'):
238
+ clean_name = clean_name[:-6] # Remove '.jinja'
239
+ template_file = os.path.join(folder, f"{clean_name}.jinja")
213
240
  if os.path.exists(template_file):
214
241
  return template_file
215
242
 
@@ -0,0 +1,884 @@
1
+ Metadata-Version: 2.4
2
+ Name: ai-prompter
3
+ Version: 0.3.1
4
+ Summary: A prompt management library using Jinja2 templates to build complex prompts easily.
5
+ Author-email: LUIS NOVO <lfnovo@gmail.com>
6
+ License: MIT
7
+ License-File: LICENSE
8
+ Requires-Python: >=3.10
9
+ Requires-Dist: jinja2>=3.1.6
10
+ Requires-Dist: pip>=25.0.1
11
+ Requires-Dist: pydantic>=2.0
12
+ Provides-Extra: langchain
13
+ Requires-Dist: langchain-core>=0.3; extra == 'langchain'
14
+ Description-Content-Type: text/markdown
15
+
16
+ # AI Prompter: Professional Prompt Management Made Simple
17
+
18
+ **Stop hardcoding prompts. Start building maintainable, reusable AI prompt templates.**
19
+
20
+ AI Prompter is a powerful Python library that transforms how you manage AI prompts. Using familiar Jinja2 templating, you can create dynamic, reusable prompts that scale with your applications - whether you're building chatbots, content generators, or complex AI workflows.
21
+
22
+ ## Why AI Prompter?
23
+
24
+ - **🎯 Template-Driven**: Write prompts once, reuse everywhere with dynamic variables
25
+ - **📁 Organized**: Keep prompts in separate files, organized and version-controlled
26
+ - **🔧 Flexible**: Works with any LLM provider - OpenAI, Anthropic, local models
27
+ - **⚡ LangChain Ready**: Seamless integration with LangChain workflows
28
+ - **🏗️ Structured Output**: Built-in support for JSON, Pydantic models, and custom parsers
29
+ - **🎨 Modular**: Include and compose templates for complex prompt engineering
30
+
31
+ ## Quick Start
32
+
33
+ ### Installation
34
+
35
+ ```bash
36
+ pip install ai-prompter
37
+
38
+ # For LangChain integration
39
+ pip install ai-prompter[langchain]
40
+ ```
41
+
42
+ ### 30-Second Example
43
+
44
+ ```python
45
+ from ai_prompter import Prompter
46
+
47
+ # Create a simple prompt template
48
+ prompter = Prompter(template_text="""
49
+ You are a {{ role }} expert. Help the user with their {{ task_type }} question.
50
+
51
+ User Question: {{ question }}
52
+
53
+ Please provide a {{ tone }} and detailed response.
54
+ """)
55
+
56
+ # Use it with different scenarios
57
+ response = prompter.render({
58
+ "role": "Python programming",
59
+ "task_type": "debugging",
60
+ "question": "Why is my list comprehension not working?",
61
+ "tone": "friendly"
62
+ })
63
+
64
+ print(response)
65
+ # Output: You are a Python programming expert. Help the user with their debugging question...
66
+ ```
67
+
68
+ ### File-Based Templates (Recommended)
69
+
70
+ Create a `prompts/` folder in your project and save templates as `.jinja` files:
71
+
72
+ ```jinja
73
+ <!-- prompts/code_review.jinja -->
74
+ You are an experienced {{ language }} developer conducting a code review.
75
+
76
+ Code to review:
77
+ ```{{ language }}
78
+ {{ code }}
79
+ ```
80
+
81
+ Focus on:
82
+ {% for focus_area in focus_areas %}
83
+ - {{ focus_area }}
84
+ {% endfor %}
85
+
86
+ Provide specific, actionable feedback with examples.
87
+ ```
88
+
89
+ ```python
90
+ from ai_prompter import Prompter
91
+
92
+ # Load the template by name (finds prompts/code_review.jinja automatically)
93
+ reviewer = Prompter(prompt_template="code_review")
94
+
95
+ prompt = reviewer.render({
96
+ "language": "python",
97
+ "code": "def calculate(x, y): return x + y",
98
+ "focus_areas": ["error handling", "documentation", "performance"]
99
+ })
100
+ ```
101
+
102
+ ## Features
103
+
104
+ - Define prompts as Jinja templates.
105
+ - Load default templates from `src/ai_prompter/prompts`.
106
+ - Override templates via `PROMPTS_PATH` environment variable.
107
+ - Automatic project root detection for prompt templates.
108
+ - Render prompts with arbitrary data or Pydantic models.
109
+ - Export to LangChain `ChatPromptTemplate`.
110
+ - Automatic output parser integration for structured outputs.
111
+
112
+ ## Installation & Setup
113
+
114
+ ### Basic Installation
115
+
116
+ ```bash
117
+ # Install from PyPI
118
+ pip install ai-prompter
119
+
120
+ # Or using uv (recommended for Python projects)
121
+ uv add ai-prompter
122
+ ```
123
+
124
+ ### With LangChain Integration
125
+
126
+ ```bash
127
+ pip install ai-prompter[langchain]
128
+ # or
129
+ uv add ai-prompter[langchain]
130
+ ```
131
+
132
+ ### Development Installation
133
+
134
+ ```bash
135
+ git clone https://github.com/lfnovo/ai-prompter
136
+ cd ai-prompter
137
+ uv sync # installs with all dev dependencies
138
+ ```
139
+
140
+ ## Configuration
141
+
142
+ Configure a custom template path by creating a `.env` file in the project root:
143
+
144
+ ```dotenv
145
+ PROMPTS_PATH=path/to/custom/templates
146
+ ```
147
+
148
+ ## Usage
149
+
150
+ ### Basic Usage
151
+
152
+ ```python
153
+ from ai_prompter import Prompter
154
+
155
+ # Initialize with a template name
156
+ prompter = Prompter('my_template')
157
+
158
+ # Render a prompt with variables
159
+ prompt = prompter.render({'variable': 'value'})
160
+ print(prompt)
161
+ ```
162
+
163
+ ### Custom Prompt Directory
164
+
165
+ You can specify a custom directory for your prompt templates using the `prompt_dir` parameter:
166
+
167
+ ```python
168
+ prompter = Prompter(template_text='Hello {{ name }}!', prompt_dir='/path/to/your/prompts')
169
+ ```
170
+
171
+ ### Using Environment Variable for Prompt Path
172
+
173
+ Set the `PROMPTS_PATH` environment variable to point to your custom prompts directory:
174
+
175
+ ```bash
176
+ export PROMPTS_PATH=/path/to/your/prompts
177
+ ```
178
+
179
+ You can specify multiple directories separated by `:` (colon):
180
+
181
+ ```bash
182
+ export PROMPTS_PATH=/path/to/templates1:/path/to/templates2
183
+ ```
184
+
185
+ ### Template Search Order
186
+
187
+ The `Prompter` class searches for templates in the following locations (in order of priority):
188
+
189
+ 1. **Custom directory** - If you provide `prompt_dir` parameter when initializing Prompter
190
+ 2. **Environment variable paths** - Directories specified in `PROMPTS_PATH` (colon-separated)
191
+ 3. **Current directory prompts** - `./prompts` subfolder in your current working directory
192
+ 4. **Project root prompts** - Automatically detects your Python project root (by looking for `pyproject.toml`, `setup.py`, `setup.cfg`, or `.git`) and checks for a `prompts` folder there
193
+ 5. **Home directory** - `~/ai-prompter` folder
194
+ 6. **Package defaults** - Built-in templates at `src/ai_prompter/prompts`
195
+
196
+ This allows you to organize your project with prompts at the root level, regardless of your package structure:
197
+ ```
198
+ my-project/
199
+ ├── prompts/ # <- Templates here will be found automatically
200
+ │ └── my_template.jinja
201
+ ├── src/
202
+ │ └── my_package/
203
+ │ └── main.py
204
+ └── pyproject.toml
205
+ ```
206
+
207
+ ### Using File-based Templates
208
+
209
+ You can store your templates in files and reference them by name. The library will search through all configured paths (see Template Search Order above) until a matching template is found.
210
+
211
+ **Template naming**: You can reference templates either with or without the `.jinja` extension:
212
+ - `prompt_template="greet"` → searches for `greet.jinja`
213
+ - `prompt_template="greet.jinja"` → also searches for `greet.jinja`
214
+
215
+ Both approaches work identically, so use whichever feels more natural for your workflow.
216
+
217
+ ```python
218
+ from ai_prompter import Prompter
219
+
220
+ # Will search for 'greet.jinja' in all configured paths
221
+ prompter = Prompter(prompt_template="greet")
222
+ result = prompter.render({"name": "World"})
223
+ print(result) # Output depends on the content of greet.jinja
224
+ ```
225
+
226
+ You can also specify multiple search paths via environment variable:
227
+
228
+ ```python
229
+ import os
230
+ from ai_prompter import Prompter
231
+
232
+ # Set multiple search paths
233
+ os.environ["PROMPTS_PATH"] = "/path/to/templates1:/path/to/templates2"
234
+
235
+ prompter = Prompter(prompt_template="greet")
236
+ result = prompter.render({"name": "World"})
237
+ print(result) # Uses greet.jinja from the first path where it's found
238
+ ```
239
+
240
+ ### Raw text template
241
+
242
+ ```python
243
+ from ai_prompter import Prompter
244
+
245
+ template = """Write an article about {{ topic }}."""
246
+ prompter = Prompter(template_text=template)
247
+ prompt = prompter.render({"topic": "AI"})
248
+ print(prompt) # Write an article about AI.
249
+ ```
250
+
251
+ ### Using Raw Text Templates
252
+
253
+ Alternatively, you can provide the template content directly as raw text using the `template_text` parameter or the `from_text` class method.
254
+
255
+ ```python
256
+ from ai_prompter import Prompter
257
+
258
+ # Using template_text parameter
259
+ prompter = Prompter(template_text="Hello, {{ name }}!")
260
+ result = prompter.render({"name": "World"})
261
+ print(result) # Output: Hello, World!
262
+
263
+ # Using from_text class method
264
+ prompter = Prompter.from_text("Hi, {{ person }}!", model="gpt-4")
265
+ result = prompter.render({"person": "Alice"})
266
+ print(result) # Output: Hi, Alice!
267
+ ```
268
+
269
+ ### LangChain Integration
270
+
271
+ You can convert your prompts to LangChain's `ChatPromptTemplate` format for use in LangChain workflows. This works for both text-based and file-based templates.
272
+
273
+ ```python
274
+ from ai_prompter import Prompter
275
+
276
+ # With text-based template
277
+ text_prompter = Prompter(template_text="Hello, {{ name }}!")
278
+ lc_text_prompt = text_prompter.to_langchain()
279
+
280
+ # With file-based template
281
+ file_prompter = Prompter(prompt_template="greet")
282
+ lc_file_prompt = file_prompter.to_langchain()
283
+ ```
284
+
285
+ **Note**: LangChain integration requires the `langchain-core` package. Install it with `pip install .[langchain]`.
286
+
287
+ ### Using Output Parsers
288
+
289
+ The Prompter class supports LangChain output parsers to automatically inject formatting instructions into your prompts. When you provide a parser, it will call the parser's `get_format_instructions()` method and make the result available as `{{ format_instructions }}` in your template.
290
+
291
+ ```python
292
+ from ai_prompter import Prompter
293
+ from langchain.output_parsers import PydanticOutputParser
294
+ from pydantic import BaseModel, Field
295
+
296
+ # Define your output model
297
+ class Article(BaseModel):
298
+ title: str = Field(description="Article title")
299
+ summary: str = Field(description="Brief summary")
300
+ tags: list[str] = Field(description="Relevant tags")
301
+
302
+ # Create a parser
303
+ parser = PydanticOutputParser(pydantic_object=Article)
304
+
305
+ # Create a prompter with the parser
306
+ prompter = Prompter(
307
+ template_text="""Write an article about {{ topic }}.
308
+
309
+ {{ format_instructions }}""",
310
+ parser=parser
311
+ )
312
+
313
+ # Render the prompt - format instructions are automatically included
314
+ prompt = prompter.render({"topic": "AI Safety"})
315
+ print(prompt)
316
+ # Output will include the topic AND the parser's format instructions
317
+ ```
318
+
319
+ This works with file-based templates too:
320
+
321
+ ```jinja
322
+ # article_structured.jinja
323
+ Write an article about {{ topic }}.
324
+
325
+ Please format your response according to these instructions:
326
+ {{ format_instructions }}
327
+ ```
328
+
329
+ ```python
330
+ prompter = Prompter(
331
+ prompt_template="article_structured",
332
+ parser=parser
333
+ )
334
+ ```
335
+
336
+ The parser integration supports any LangChain output parser that implements `get_format_instructions()`, including:
337
+ - `PydanticOutputParser` - For structured Pydantic model outputs
338
+ - `OutputFixingParser` - For fixing malformed outputs
339
+ - `RetryOutputParser` - For retrying failed parsing attempts
340
+ - `StructuredOutputParser` - For dictionary-based structured outputs
341
+
342
+ ## Real-World Examples
343
+
344
+ ### Content Generation Pipeline
345
+
346
+ ```python
347
+ # prompts/blog_post.jinja
348
+ You are a professional content writer specializing in {{ niche }}.
349
+
350
+ Write a {{ post_type }} blog post about "{{ title }}" for {{ target_audience }}.
351
+
352
+ Requirements:
353
+ - Length: {{ word_count }} words
354
+ - Tone: {{ tone }}
355
+ - Include {{ num_sections }} main sections
356
+ {% if seo_keywords -%}
357
+ - SEO Keywords to include: {{ seo_keywords|join(', ') }}
358
+ {% endif %}
359
+ {% if call_to_action -%}
360
+ - End with this call-to-action: {{ call_to_action }}
361
+ {% endif %}
362
+
363
+ {{ format_instructions }}
364
+ ```
365
+
366
+ ```python
367
+ from ai_prompter import Prompter
368
+ from pydantic import BaseModel, Field
369
+
370
+ class BlogPost(BaseModel):
371
+ title: str = Field(description="SEO-optimized title")
372
+ sections: list[dict] = Field(description="List of sections with headers and content")
373
+ meta_description: str = Field(description="SEO meta description")
374
+ tags: list[str] = Field(description="Relevant tags")
375
+
376
+ # Create content generator
377
+ blog_generator = Prompter(
378
+ prompt_template="blog_post",
379
+ parser=PydanticOutputParser(pydantic_object=BlogPost)
380
+ )
381
+
382
+ # Generate different types of content
383
+ tech_post = blog_generator.render({
384
+ "niche": "technology",
385
+ "title": "Getting Started with AI Prompt Engineering",
386
+ "target_audience": "software developers",
387
+ "post_type": "how-to guide",
388
+ "word_count": 1500,
389
+ "tone": "technical but accessible",
390
+ "num_sections": 5,
391
+ "seo_keywords": ["AI prompts", "prompt engineering", "LLM"],
392
+ "call_to_action": "Try AI Prompter in your next project!"
393
+ })
394
+ ```
395
+
396
+ ### Multi-Language Support
397
+
398
+ ```python
399
+ # prompts/customer_support.jinja
400
+ {% set greetings = {
401
+ 'en': 'Hello',
402
+ 'es': 'Hola',
403
+ 'fr': 'Bonjour',
404
+ 'de': 'Hallo'
405
+ } %}
406
+
407
+ {{ greetings[language] }}! I'm here to help you with {{ issue_type }}.
408
+
409
+ Customer Issue: {{ customer_message }}
410
+
411
+ {% if language != 'en' -%}
412
+ Please respond in {{ language }}.
413
+ {% endif %}
414
+
415
+ Provide a {{ tone }} response that:
416
+ 1. Acknowledges the customer's concern
417
+ 2. Offers a specific solution or next steps
418
+ 3. Includes relevant {{ company_name }} policies if applicable
419
+ ```
420
+
421
+ ```python
422
+ support_agent = Prompter(prompt_template="customer_support")
423
+
424
+ # Handle support tickets in different languages
425
+ spanish_response = support_agent.render({
426
+ "language": "es",
427
+ "issue_type": "billing inquiry",
428
+ "customer_message": "No puedo encontrar mi factura",
429
+ "tone": "empathetic and professional",
430
+ "company_name": "TechCorp"
431
+ })
432
+ ```
433
+
434
+ ### Dynamic Email Campaigns
435
+
436
+ ```python
437
+ # prompts/email_campaign.jinja
438
+ Subject: {% if user.is_premium %}Exclusive{% else %}Special{% endif %} {{ campaign_type }} - {{ subject_line }}
439
+
440
+ Hi {{ user.first_name|default('there') }},
441
+
442
+ {% if user.last_purchase_days_ago < 30 -%}
443
+ Thanks for your recent purchase of {{ user.last_product }}!
444
+ {% elif user.last_purchase_days_ago > 90 -%}
445
+ We miss you! It's been a while since your last order.
446
+ {% endif %}
447
+
448
+ {{ main_message }}
449
+
450
+ {% if user.is_premium -%}
451
+ As a premium member, you get:
452
+ {% for benefit in premium_benefits -%}
453
+ ✓ {{ benefit }}
454
+ {% endfor %}
455
+ {% else -%}
456
+ {% if upgrade_offer -%}
457
+ Upgrade to premium and save {{ upgrade_discount }}%!
458
+ {% endif %}
459
+ {% endif %}
460
+
461
+ {{ call_to_action }}
462
+
463
+ Best regards,
464
+ {{ sender_name }}
465
+ ```
466
+
467
+ ```python
468
+ email_generator = Prompter(prompt_template="email_campaign")
469
+
470
+ # Personalized emails based on user data
471
+ campaign_email = email_generator.render({
472
+ "user": {
473
+ "first_name": "Sarah",
474
+ "is_premium": False,
475
+ "last_purchase_days_ago": 45,
476
+ "last_product": "Python Course"
477
+ },
478
+ "campaign_type": "Sale",
479
+ "subject_line": "50% Off All Programming Courses",
480
+ "main_message": "Master new skills with our comprehensive programming courses.",
481
+ "upgrade_offer": True,
482
+ "upgrade_discount": 25,
483
+ "premium_benefits": ["Early access to new courses", "1-on-1 mentoring", "Certificate priority"],
484
+ "call_to_action": "Shop Now →",
485
+ "sender_name": "The Learning Team"
486
+ })
487
+ ```
488
+
489
+ ### API Documentation Generator
490
+
491
+ ```python
492
+ # prompts/api_docs.jinja
493
+ # {{ endpoint.method }} {{ endpoint.path }}
494
+
495
+ {{ endpoint.description }}
496
+
497
+ ## Request
498
+
499
+ {% if endpoint.parameters -%}
500
+ ### Parameters
501
+
502
+ | Name | Type | Required | Description |
503
+ |------|------|----------|-------------|
504
+ {% for param in endpoint.parameters -%}
505
+ | `{{ param.name }}` | {{ param.type }} | {{ "Yes" if param.required else "No" }} | {{ param.description }} |
506
+ {% endfor %}
507
+ {% endif %}
508
+
509
+ {% if endpoint.body_schema -%}
510
+ ### Request Body
511
+
512
+ ```json
513
+ {{ endpoint.body_schema|tojson(indent=2) }}
514
+ ```
515
+ {% endif %}
516
+
517
+ ## Response
518
+
519
+ ```json
520
+ {{ endpoint.response_example|tojson(indent=2) }}
521
+ ```
522
+
523
+ {% if endpoint.error_codes -%}
524
+ ## Error Codes
525
+
526
+ {% for error in endpoint.error_codes -%}
527
+ - **{{ error.code }}**: {{ error.description }}
528
+ {% endfor %}
529
+ {% endif %}
530
+
531
+ ## Example
532
+
533
+ ```bash
534
+ curl -X {{ endpoint.method }} "{{ base_url }}{{ endpoint.path }}" \
535
+ {% for header in endpoint.headers -%}
536
+ -H "{{ header.name }}: {{ header.value }}" \
537
+ {% endfor %}
538
+ {% if endpoint.body_example -%}
539
+ -d '{{ endpoint.body_example|tojson }}'
540
+ {% endif %}
541
+ ```
542
+ ```
543
+
544
+ ```python
545
+ docs_generator = Prompter(prompt_template="api_docs")
546
+
547
+ endpoint_doc = docs_generator.render({
548
+ "base_url": "https://api.example.com",
549
+ "endpoint": {
550
+ "method": "POST",
551
+ "path": "/users",
552
+ "description": "Create a new user account",
553
+ "parameters": [
554
+ {"name": "api_key", "type": "string", "required": True, "description": "Your API key"}
555
+ ],
556
+ "body_schema": {"name": "string", "email": "string", "role": "string"},
557
+ "body_example": {"name": "John Doe", "email": "john@example.com", "role": "user"},
558
+ "response_example": {"id": 123, "name": "John Doe", "created_at": "2024-01-01T00:00:00Z"},
559
+ "error_codes": [
560
+ {"code": 400, "description": "Invalid request data"},
561
+ {"code": 409, "description": "Email already exists"}
562
+ ],
563
+ "headers": [{"name": "Authorization", "value": "Bearer YOUR_API_KEY"}]
564
+ }
565
+ })
566
+ ```
567
+
568
+ ## Best Practices
569
+
570
+ ### 1. Organize Templates by Use Case
571
+
572
+ ```
573
+ prompts/
574
+ ├── content/
575
+ │ ├── blog_post.jinja
576
+ │ ├── social_media.jinja
577
+ │ └── email_newsletter.jinja
578
+ ├── analysis/
579
+ │ ├── code_review.jinja
580
+ │ ├── data_analysis.jinja
581
+ │ └── competitor_research.jinja
582
+ └── support/
583
+ ├── customer_support.jinja
584
+ └── technical_troubleshooting.jinja
585
+ ```
586
+
587
+ ### 2. Use Descriptive Variable Names
588
+
589
+ ```python
590
+ # Good ✅
591
+ prompter.render({
592
+ "user_expertise_level": "beginner",
593
+ "preferred_learning_style": "visual",
594
+ "target_completion_time": "2 weeks"
595
+ })
596
+
597
+ # Avoid ❌
598
+ prompter.render({
599
+ "level": "beginner",
600
+ "style": "visual",
601
+ "time": "2 weeks"
602
+ })
603
+ ```
604
+
605
+ ### 3. Include Validation and Defaults
606
+
607
+ ```jinja
608
+ <!-- prompts/content_generator.jinja -->
609
+ {% if not topic -%}
610
+ {{ raise_error("topic is required") }}
611
+ {% endif %}
612
+
613
+ Generate content about {{ topic }} for {{ audience|default("general audience") }}.
614
+
615
+ Word count: {{ word_count|default(500) }}
616
+ Tone: {{ tone|default("professional") }}
617
+ ```
618
+
619
+ ### 4. Leverage Jinja2 Features
620
+
621
+ ```jinja
622
+ <!-- Use filters for formatting -->
623
+ Today's date: {{ current_time|strftime("%B %d, %Y") }}
624
+ Uppercase title: {{ title|upper }}
625
+ Comma-separated tags: {{ tags|join(", ") }}
626
+
627
+ <!-- Use conditionals for dynamic content -->
628
+ {% if user.subscription_type == "premium" %}
629
+ You have access to premium features!
630
+ {% else %}
631
+ Upgrade to premium for advanced features.
632
+ {% endif %}
633
+
634
+ <!-- Use loops for repetitive content -->
635
+ {% for step in instructions %}
636
+ {{ loop.index }}. {{ step }}
637
+ {% endfor %}
638
+ ```
639
+
640
+ ### 5. Version Control Your Prompts
641
+
642
+ ```bash
643
+ # Track prompt changes with git
644
+ git add prompts/
645
+ git commit -m "feat: add support for multi-language customer service prompts"
646
+
647
+ # Use branches for prompt experiments
648
+ git checkout -b experiment/new-tone-testing
649
+ ```
650
+
651
+ ### 6. Test Templates with Sample Data
652
+
653
+ ```python
654
+ # Create test data for your templates
655
+ test_data = {
656
+ "user": {"name": "Test User", "level": "beginner"},
657
+ "product": {"name": "AI Course", "price": 99.99},
658
+ "current_time": "2024-01-15 10:30:00"
659
+ }
660
+
661
+ # Test all your templates
662
+ for template_name in ["welcome", "product_recommendation", "follow_up"]:
663
+ prompter = Prompter(prompt_template=template_name)
664
+ result = prompter.render(test_data)
665
+ print(f"Template: {template_name}")
666
+ print(f"Length: {len(result)} characters")
667
+ print("---")
668
+ ```
669
+
670
+ ## Advanced Features
671
+
672
+ ### Including Other Templates
673
+
674
+ You can include other template files within a template using Jinja2's `{% include %}` directive. This allows you to build modular templates.
675
+
676
+ ```jinja
677
+ # outer.jinja
678
+ This is the outer file
679
+
680
+ {% include 'inner.jinja' %}
681
+
682
+ This is the end of the outer file
683
+ ```
684
+
685
+ ```jinja
686
+ # inner.jinja
687
+ This is the inner file
688
+
689
+ {% if type == 'a' %}
690
+ You selected A
691
+ {% else %}
692
+ You didn't select A
693
+ {% endif %}
694
+ ```
695
+
696
+ ```python
697
+ from ai_prompter import Prompter
698
+
699
+ prompter = Prompter(prompt_template="outer")
700
+ prompt = prompter.render(dict(type="a"))
701
+ print(prompt)
702
+ # This is the outer file
703
+ #
704
+ # This is the inner file
705
+ #
706
+ # You selected A
707
+ #
708
+ #
709
+ # This is the end of the outer file
710
+ ```
711
+
712
+ ### Using Variables
713
+
714
+ Templates can use variables that you pass in through the `render()` method. You can use Jinja2 filters and conditionals to control the output based on your data.
715
+
716
+ ```python
717
+ from ai_prompter import Prompter
718
+
719
+ prompter = Prompter(template_text="Hello {{name|default('Guest')}}!")
720
+ prompt = prompter.render() # No data provided, uses default
721
+ print(prompt) # Hello Guest!
722
+ prompt = prompter.render({"name": "Alice"}) # Data provided
723
+ print(prompt) # Hello Alice!
724
+ ```
725
+
726
+ The library also automatically provides a `current_time` variable with the current timestamp in format "YYYY-MM-DD HH:MM:SS".
727
+
728
+ ```python
729
+ from ai_prompter import Prompter
730
+
731
+ prompter = Prompter(template_text="Current time: {{current_time}}")
732
+ prompt = prompter.render()
733
+ print(prompt) # Current time: 2025-04-19 23:28:00
734
+ ```
735
+
736
+ ### File-based template
737
+
738
+ Place a Jinja file (e.g., `article.jinja`) in the default prompts directory (`src/ai_prompter/prompts`) or your custom path:
739
+
740
+ ```jinja
741
+ Write an article about {{ topic }}.
742
+ ```
743
+
744
+ ```python
745
+ from ai_prompter import Prompter
746
+
747
+ prompter = Prompter(prompt_template="article")
748
+ prompt = prompter.render({"topic": "AI"})
749
+ print(prompt)
750
+ ```
751
+
752
+ ## Troubleshooting
753
+
754
+ ### Common Issues
755
+
756
+ **Template Not Found Error**
757
+ ```python
758
+ # Check where AI Prompter is looking for templates
759
+ prompter = Prompter(prompt_template="my_template")
760
+ print("Template locations searched:")
761
+ for folder in prompter.prompt_folders:
762
+ print(f" - {folder}")
763
+
764
+ # Verify template location
765
+ location = prompter.template_location("my_template")
766
+ print(f"Template location: {location}")
767
+ ```
768
+
769
+ **Jinja2 Syntax Errors**
770
+ ```python
771
+ # Test templates in isolation
772
+ from jinja2 import Template
773
+
774
+ template_content = "Hello {{ name }}!"
775
+ template = Template(template_content)
776
+ result = template.render(name="World") # Test basic rendering
777
+ ```
778
+
779
+ **Environment Variable Issues**
780
+ ```bash
781
+ # Check current PROMPTS_PATH
782
+ echo $PROMPTS_PATH
783
+
784
+ # Set for current session
785
+ export PROMPTS_PATH="/path/to/templates"
786
+
787
+ # Set permanently in ~/.bashrc or ~/.zshrc
788
+ echo 'export PROMPTS_PATH="/path/to/templates"' >> ~/.bashrc
789
+ ```
790
+
791
+ ### Performance Tips
792
+
793
+ - **Cache Prompter instances** for frequently used templates
794
+ - **Use file-based templates** for better performance with includes
795
+ - **Keep template files small** and modular
796
+ - **Minimize variable processing** in templates when possible
797
+
798
+ ```python
799
+ # Good: Reuse prompter instances
800
+ email_prompter = Prompter(prompt_template="email_template")
801
+ for user in users:
802
+ email = email_prompter.render({"user": user})
803
+ send_email(email)
804
+
805
+ # Avoid: Creating new instances repeatedly
806
+ for user in users: # Less efficient
807
+ prompter = Prompter(prompt_template="email_template")
808
+ email = prompter.render({"user": user})
809
+ ```
810
+
811
+ ## Interactive Examples
812
+
813
+ Explore AI Prompter features interactively:
814
+
815
+ ```bash
816
+ # Clone the repository
817
+ git clone https://github.com/lfnovo/ai-prompter
818
+ cd ai-prompter
819
+
820
+ # Install with dev dependencies
821
+ uv sync
822
+
823
+ # Launch Jupyter notebook
824
+ uv run jupyter lab notebooks/prompter_usage.ipynb
825
+ ```
826
+
827
+ ## Testing & Development
828
+
829
+ ```bash
830
+ # Run all tests
831
+ uv run pytest -v
832
+
833
+ # Run tests with coverage
834
+ uv run pytest --cov=ai_prompter
835
+
836
+ # Run specific test file
837
+ uv run pytest tests/test_prompter.py -v
838
+
839
+ # Format code
840
+ uv run black src/
841
+ uv run isort src/
842
+ ```
843
+
844
+ ## Community & Support
845
+
846
+ - **GitHub Issues**: [Report bugs or request features](https://github.com/lfnovo/ai-prompter/issues)
847
+ - **Discussions**: [Ask questions and share templates](https://github.com/lfnovo/ai-prompter/discussions)
848
+ - **Examples**: [Community template gallery](https://github.com/lfnovo/ai-prompter/wiki/Template-Gallery)
849
+
850
+ ## Contributing
851
+
852
+ We welcome contributions! Here's how to get started:
853
+
854
+ 1. **Fork the repository**
855
+ 2. **Create a feature branch**: `git checkout -b feature/amazing-feature`
856
+ 3. **Add tests** for new functionality
857
+ 4. **Ensure tests pass**: `uv run pytest`
858
+ 5. **Submit a Pull Request**
859
+
860
+ ### Contributing Templates
861
+
862
+ Share your templates with the community:
863
+
864
+ 1. Add your template to `examples/community-templates/`
865
+ 2. Include documentation and example usage
866
+ 3. Submit a PR with the `template-contribution` label
867
+
868
+ ## Changelog
869
+
870
+ See [CHANGELOG.md](CHANGELOG.md) for version history and updates.
871
+
872
+ ## License
873
+
874
+ This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details.
875
+
876
+ ---
877
+
878
+ **Ready to transform your prompt management?**
879
+
880
+ ```bash
881
+ pip install ai-prompter
882
+ ```
883
+
884
+ Start building better AI applications with organized, maintainable prompts today!
@@ -0,0 +1,6 @@
1
+ ai_prompter/__init__.py,sha256=oVRyjDv-V7kjIOwfkR1Hw5vXdA4pB6bwqVeviYn9AnY,13319
2
+ ai_prompter/py.typed,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
3
+ ai_prompter-0.3.1.dist-info/METADATA,sha256=DOIo5YnRMQ5NhNMBlsaxBifoVjPZa4zbBgJ3Hpx09fo,23801
4
+ ai_prompter-0.3.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
5
+ ai_prompter-0.3.1.dist-info/licenses/LICENSE,sha256=cS0_fa_8BoP0PvVG8D19pn_HDJrG96hd4PyEm9nkRo8,1066
6
+ ai_prompter-0.3.1.dist-info/RECORD,,
@@ -1,266 +0,0 @@
1
- Metadata-Version: 2.4
2
- Name: ai-prompter
3
- Version: 0.2.3
4
- Summary: A prompt management library using Jinja2 templates to build complex prompts easily.
5
- Author-email: LUIS NOVO <lfnovo@gmail.com>
6
- License: MIT
7
- License-File: LICENSE
8
- Requires-Python: >=3.10
9
- Requires-Dist: jinja2>=3.1.6
10
- Requires-Dist: pip>=25.0.1
11
- Requires-Dist: pydantic>=2.0
12
- Provides-Extra: langchain
13
- Requires-Dist: langchain-core>=0.3; extra == 'langchain'
14
- Description-Content-Type: text/markdown
15
-
16
- # AI Prompter
17
-
18
- A prompt management library using Jinja2 templates to build complex prompts easily. Supports raw text or file-based templates and integrates with LangChain.
19
-
20
- ## Features
21
-
22
- - Define prompts as Jinja templates.
23
- - Load default templates from `src/ai_prompter/prompts`.
24
- - Override templates via `PROMPTS_PATH` environment variable.
25
- - Render prompts with arbitrary data or Pydantic models.
26
- - Export to LangChain `ChatPromptTemplate`.
27
-
28
- ## Installation
29
-
30
- 1. (Optional) Create and activate a virtual environment:
31
- ```bash
32
- python3 -m venv .venv
33
- source .venv/bin/activate
34
- ```
35
- 2. Install the package:
36
- ```bash
37
- pip install .
38
- ```
39
- ### Extras
40
-
41
- To enable LangChain integration:
42
-
43
- ```bash
44
- pip install .[langchain]
45
- # or
46
- uv add langchain_core
47
- ```
48
-
49
- ## Configuration
50
-
51
- Configure a custom template path by creating a `.env` file in the project root:
52
-
53
- ```dotenv
54
- PROMPTS_PATH=path/to/custom/templates
55
- ```
56
-
57
- ## Usage
58
-
59
- ### Basic Usage
60
-
61
- ```python
62
- from ai_prompter import Prompter
63
-
64
- # Initialize with a template name
65
- prompter = Prompter('my_template')
66
-
67
- # Render a prompt with variables
68
- prompt = prompter.render({'variable': 'value'})
69
- print(prompt)
70
- ```
71
-
72
- ### Custom Prompt Directory
73
-
74
- You can specify a custom directory for your prompt templates using the `prompt_dir` parameter:
75
-
76
- ```python
77
- prompter = Prompter(template_text='Hello {{ name }}!', prompt_dir='/path/to/your/prompts')
78
- ```
79
-
80
- ### Using Environment Variable for Prompt Path
81
-
82
- Set the `PROMPTS_PATH` environment variable to point to your custom prompts directory:
83
-
84
- ```bash
85
- export PROMPTS_PATH=/path/to/your/prompts
86
- ```
87
-
88
- The `Prompter` class will check this path if no custom directory is provided in the constructor. If not set, it will also look in the current working directory and `~/ai-prompter/` as fallback options before using the default package prompts.
89
-
90
- ### Raw text template
91
-
92
- ```python
93
- from ai_prompter import Prompter
94
-
95
- template = """Write an article about {{ topic }}."""
96
- prompter = Prompter(template_text=template)
97
- prompt = prompter.render({"topic": "AI"})
98
- print(prompt) # Write an article about AI.
99
- ```
100
-
101
- ### Using File-based Templates
102
-
103
- You can store your templates in files and reference them by name (without the `.jinja` extension). The library looks for templates in the `prompts` directory by default, or you can set a custom directory with the `PROMPTS_PATH` environment variable. You can specify multiple directories separated by `:` (colon), and the library will search through them in order until a matching template is found.
104
-
105
- ```python
106
- from ai_prompter import Prompter
107
-
108
- # Set multiple search paths
109
- os.environ["PROMPTS_PATH"] = "/path/to/templates1:/path/to/templates2"
110
-
111
- prompter = Prompter(prompt_template="greet")
112
- result = prompter.render({"name": "World"})
113
- print(result) # Output depends on the content of greet.jinja in the first found path
114
- ```
115
-
116
- ### Using Raw Text Templates
117
-
118
- Alternatively, you can provide the template content directly as raw text using the `template_text` parameter or the `from_text` class method.
119
-
120
- ```python
121
- from ai_prompter import Prompter
122
-
123
- # Using template_text parameter
124
- prompter = Prompter(template_text="Hello, {{ name }}!")
125
- result = prompter.render({"name": "World"})
126
- print(result) # Output: Hello, World!
127
-
128
- # Using from_text class method
129
- prompter = Prompter.from_text("Hi, {{ person }}!", model="gpt-4")
130
- result = prompter.render({"person": "Alice"})
131
- print(result) # Output: Hi, Alice!
132
- ```
133
-
134
- ### LangChain Integration
135
-
136
- You can convert your prompts to LangChain's `ChatPromptTemplate` format for use in LangChain workflows. This works for both text-based and file-based templates.
137
-
138
- ```python
139
- from ai_prompter import Prompter
140
-
141
- # With text-based template
142
- text_prompter = Prompter(template_text="Hello, {{ name }}!")
143
- lc_text_prompt = text_prompter.to_langchain()
144
-
145
- # With file-based template
146
- file_prompter = Prompter(prompt_template="greet")
147
- lc_file_prompt = file_prompter.to_langchain()
148
- ```
149
-
150
- **Note**: LangChain integration requires the `langchain-core` package. Install it with `pip install .[langchain]`.
151
-
152
- ### Including Other Templates
153
-
154
- You can include other template files within a template using Jinja2's `{% include %}` directive. This allows you to build modular templates.
155
-
156
- ```jinja
157
- # outer.jinja
158
- This is the outer file
159
-
160
- {% include 'inner.jinja' %}
161
-
162
- This is the end of the outer file
163
- ```
164
-
165
- ```jinja
166
- # inner.jinja
167
- This is the inner file
168
-
169
- {% if type == 'a' %}
170
- You selected A
171
- {% else %}
172
- You didn't select A
173
- {% endif %}
174
- ```
175
-
176
- ```python
177
- from ai_prompter import Prompter
178
-
179
- prompter = Prompter(prompt_template="outer")
180
- prompt = prompter.render(dict(type="a"))
181
- print(prompt)
182
- # This is the outer file
183
- #
184
- # This is the inner file
185
- #
186
- # You selected A
187
- #
188
- #
189
- # This is the end of the outer file
190
- ```
191
-
192
- ### Using Variables
193
-
194
- Templates can use variables that you pass in through the `render()` method. You can use Jinja2 filters and conditionals to control the output based on your data.
195
-
196
- ```python
197
- from ai_prompter import Prompter
198
-
199
- prompter = Prompter(prompt_text="Hello {{name|default('Guest')}}!")
200
- prompt = prompter.render() # No data provided, uses default
201
- print(prompt) # Hello Guest!
202
- prompt = prompter.render({"name": "Alice"}) # Data provided
203
- print(prompt) # Hello Alice!
204
- ```
205
-
206
- The library also automatically provides a `current_time` variable with the current timestamp in format "YYYY-MM-DD HH:MM:SS".
207
-
208
- ```python
209
- from ai_prompter import Prompter
210
-
211
- prompter = Prompter(template_text="Current time: {{current_time}}")
212
- prompt = prompter.render()
213
- print(prompt) # Current time: 2025-04-19 23:28:00
214
- ```
215
-
216
- ### File-based template
217
-
218
- Place a Jinja file (e.g., `article.jinja`) in the default prompts directory (`src/ai_prompter/prompts`) or your custom path:
219
-
220
- ```jinja
221
- Write an article about {{ topic }}.
222
- ```
223
-
224
- ```python
225
- from ai_prompter import Prompter
226
-
227
- prompter = Prompter(prompt_template="article")
228
- prompt = prompter.render({"topic": "AI"})
229
- print(prompt)
230
- ```
231
-
232
- ### Jupyter Notebook
233
-
234
- See `notebooks/prompter_usage.ipynb` for interactive examples.
235
-
236
- ## Project Structure
237
-
238
- ```
239
- ai-prompter/
240
- ├── src/ai_prompter
241
- │ ├── __init__.py
242
- │ └── prompts/
243
- │ └── *.jinja
244
- ├── notebooks/
245
- │ ├── prompter_usage.ipynb
246
- │ └── prompts/
247
- ├── pyproject.toml
248
- ├── README.md
249
- └── .env (optional)
250
- ```
251
-
252
- ## Testing
253
-
254
- Run tests with:
255
-
256
- ```bash
257
- uv run pytest -v
258
- ```
259
-
260
- ## Contributing
261
-
262
- Contributions welcome! Please open issues or PRs.
263
-
264
- ## License
265
-
266
- This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details.
@@ -1,6 +0,0 @@
1
- ai_prompter/__init__.py,sha256=l5PTvboNK5uUSVoAJbNuntFc3iUlVCmGsThA8Qy3WI4,11894
2
- ai_prompter/py.typed,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
3
- ai_prompter-0.2.3.dist-info/METADATA,sha256=o2E-K4xbBm69yJOxg6OFeqiIPUQFmwcXtDusefavdWA,6950
4
- ai_prompter-0.2.3.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
5
- ai_prompter-0.2.3.dist-info/licenses/LICENSE,sha256=cS0_fa_8BoP0PvVG8D19pn_HDJrG96hd4PyEm9nkRo8,1066
6
- ai_prompter-0.2.3.dist-info/RECORD,,