textprompts 0.0.3__py3-none-any.whl → 0.0.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
textprompts/models.py CHANGED
@@ -21,22 +21,14 @@ class Prompt(BaseModel):
21
21
  meta: Union[PromptMeta, None]
22
22
  prompt: PromptString
23
23
 
24
- def __init__(
25
- self,
26
- path: Union[str, Path],
27
- meta: Union[PromptMeta, MetadataMode, str, None] = None,
28
- prompt: Union[str, PromptString, None] = None,
29
- ) -> None:
30
- """Initialize Prompt from fields or load from file."""
31
- if prompt is None:
32
- from .loaders import load_prompt
33
-
34
- loaded = load_prompt(path, meta=meta)
35
- super().__init__(**loaded.model_dump())
36
- else:
37
- if isinstance(prompt, str):
38
- prompt = PromptString(prompt)
39
- super().__init__(path=Path(path), meta=meta, prompt=prompt)
24
+ @classmethod
25
+ def from_path(
26
+ cls, path: Union[str, Path], *, meta: Union[MetadataMode, str, None] = None
27
+ ) -> "Prompt":
28
+ """Load a Prompt from ``path`` using ``load_prompt``."""
29
+ from .loaders import load_prompt
30
+
31
+ return load_prompt(path, meta=meta)
40
32
 
41
33
  @field_validator("prompt")
42
34
  @classmethod
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: textprompts
3
- Version: 0.0.3
3
+ Version: 0.0.4
4
4
  Summary: Minimal text-based prompt-loader with TOML front-matter
5
5
  Keywords: prompts,toml,frontmatter,template
6
6
  Author: Jan Siml
@@ -33,7 +33,7 @@ Description-Content-Type: text/markdown
33
33
 
34
34
  > **So simple, it's not even worth vibing about coding yet it just makes so much sense.**
35
35
 
36
- Are you tired of vendors trying to sell you fancy UIs for prompt management that just make your system more confusing and harder to debug? Isn't it nice to just have your prompts **next to your code**?
36
+ Are you tired of vendors trying to sell you fancy UIs for prompt management that just make your system more confusing and harder to debug? Isn't it nice to just have your prompts **next to your code**?
37
37
 
38
38
  But then you worry: *Did my formatter change my prompt? Are those spaces at the beginning actually part of the prompt or just indentation?*
39
39
 
@@ -42,7 +42,7 @@ But then you worry: *Did my formatter change my prompt? Are those spaces at the
42
42
  ## Why textprompts?
43
43
 
44
44
  - ✅ **Prompts live next to your code** - no external systems to manage
45
- - ✅ **Git is your version control** - diff, branch, and experiment with ease
45
+ - ✅ **Git is your version control** - diff, branch, and experiment with ease
46
46
  - ✅ **No formatter headaches** - your prompts stay exactly as you wrote them
47
47
  - ✅ **Minimal markup** - just TOML front-matter when you need metadata (or no metadata if you prefer!)
48
48
  - ✅ **Zero dependencies** - well, almost (just Pydantic)
@@ -81,12 +81,12 @@ import textprompts
81
81
  # Just load it - works with or without metadata
82
82
  prompt = textprompts.load_prompt("greeting.txt")
83
83
  # Or simply
84
- alt = textprompts.Prompt("greeting.txt")
84
+ alt = textprompts.Prompt.from_path("greeting.txt")
85
85
 
86
86
  # Use it safely - all placeholders must be provided
87
87
  message = prompt.prompt.format(
88
88
  customer_name="Alice",
89
- company_name="ACME Corp",
89
+ company_name="ACME Corp",
90
90
  issue_type="billing question",
91
91
  agent_name="Sarah"
92
92
  )
@@ -169,7 +169,7 @@ prompt = textprompts.load_prompt("prompt.txt") # No metadata parsing
169
169
  print(prompt.meta.title) # "prompt" (from filename)
170
170
 
171
171
  # 2. ALLOW: Load metadata if present, don't worry if it's incomplete
172
- textprompts.set_metadata("allow") # Flexible metadata loading
172
+ textprompts.set_metadata("allow") # Flexible metadata loading
173
173
  prompt = textprompts.load_prompt("prompt.txt") # Loads any metadata found
174
174
 
175
175
  # 3. STRICT: Require complete metadata for production use
@@ -182,7 +182,7 @@ prompt = textprompts.load_prompt("prompt.txt", meta="strict")
182
182
 
183
183
  **Why this design?**
184
184
  - **Default = Simple**: No configuration needed, just load files
185
- - **Flexible**: Add metadata when you want structure
185
+ - **Flexible**: Add metadata when you want structure
186
186
  - **Production-Safe**: Use strict mode to catch missing metadata before deployment
187
187
 
188
188
  ## Real-World Examples
@@ -207,7 +207,7 @@ response = openai.chat.completions.create(
207
207
  )
208
208
  },
209
209
  {
210
- "role": "user",
210
+ "role": "user",
211
211
  "content": user_prompt.prompt.format(
212
212
  query="How do I return an item?",
213
213
  customer_tier="premium"
@@ -241,7 +241,7 @@ description = "Search our product catalog"
241
241
  "description": "Search query for products"
242
242
  },
243
243
  "category": {
244
- "type": "string",
244
+ "type": "string",
245
245
  "enum": ["electronics", "clothing", "books"],
246
246
  "description": "Product category to search within"
247
247
  },
@@ -323,7 +323,7 @@ Use {variables} for templating.
323
323
  Choose the right level of strictness for your use case:
324
324
 
325
325
  1. **IGNORE** (default) - Simple text file loading, filename becomes title
326
- 2. **ALLOW** - Load metadata if present, don't worry about completeness
326
+ 2. **ALLOW** - Load metadata if present, don't worry about completeness
327
327
  3. **STRICT** - Require complete metadata (title, description, version) for production safety
328
328
 
329
329
  You can also set the environment variable `TEXTPROMPTS_METADATA_MODE` to one of
@@ -333,7 +333,7 @@ default mode.
333
333
  ```python
334
334
  # Set globally
335
335
  textprompts.set_metadata("ignore") # Default: simple file loading
336
- textprompts.set_metadata("allow") # Flexible: load any metadata
336
+ textprompts.set_metadata("allow") # Flexible: load any metadata
337
337
  textprompts.set_metadata("strict") # Production: require complete metadata
338
338
 
339
339
  # Or override per prompt
@@ -411,7 +411,7 @@ template = PromptString("Hello {name}, you are {role}")
411
411
  result = template.format(name="Alice", role="admin") # ✅ Works
412
412
  result = template.format(name="Alice") # ❌ Raises ValueError
413
413
 
414
- # Partial formatting - replace only available placeholders
414
+ # Partial formatting - replace only available placeholders
415
415
  partial = template.format(name="Alice", skip_validation=True) # ✅ "Hello Alice, you are {role}"
416
416
 
417
417
  # Access placeholder information
@@ -503,4 +503,4 @@ MIT License - see [LICENSE](LICENSE) for details.
503
503
 
504
504
  ---
505
505
 
506
- **textprompts** - Because your prompts deserve better than being buried in code strings. 🚀
506
+ **textprompts** - Because your prompts deserve better than being buried in code strings. 🚀
@@ -5,13 +5,13 @@ textprompts/cli.py,sha256=8db7c429a182be9e8e69e11907fba3ee4465e391c580a96e25ee6a
5
5
  textprompts/config.py,sha256=445a439432cf25141bbaa199cef67ceabed8a6505c6169b6e386f3ad87efb17b,3530
6
6
  textprompts/errors.py,sha256=7eda4a1bdf4ee8a50b420886d2016a52923baa05a5b5a65d6f582e3e500290d2,354
7
7
  textprompts/loaders.py,sha256=f6ddb9b6ea3f35d358f7d03e94e1aa781887e2d122f0b31345818ee458364083,4199
8
- textprompts/models.py,sha256=08c3e29719eafa74bfc0946d8a1c013b1283e351de6489920439961068ddbbd5,2673
8
+ textprompts/models.py,sha256=61dfcf233c3887a7573f19402ab2af7b00b287127eeb1e0b29e69814007a4ddb,2354
9
9
  textprompts/placeholder_utils.py,sha256=a6df51ea0e7474a68a4f036fce0f647069be1395f1980db57decec7e01246772,4476
10
10
  textprompts/prompt_string.py,sha256=4b7fefaca6c0c0a7edf67ae0213f77d32d21ff4735d458cd03c9f9bc885b804f,2156
11
11
  textprompts/py.typed,sha256=e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855,0
12
12
  textprompts/safe_string.py,sha256=5bed8e93d404133591a9cc9d2f3260efe07de6030553d247edf3fceae025a198,195
13
13
  textprompts/savers.py,sha256=0a46c5d2a1ffb24398e6380db3a090058c4bdd13369510ad2ee8506d143c47a1,2040
14
- textprompts-0.0.3.dist-info/WHEEL,sha256=ab6157bc637547491fb4567cd7ddf26b04d63382916ca16c29a5c8e94c9c9ef7,79
15
- textprompts-0.0.3.dist-info/entry_points.txt,sha256=f8f14b032092a81e77431911104853b39293c983c9390aa11fe023e8bcd5c049,54
16
- textprompts-0.0.3.dist-info/METADATA,sha256=b7740af134dc39d47603f5b5f6250901dcda87dc51ca0b66f05ea2c6099d1f40,15627
17
- textprompts-0.0.3.dist-info/RECORD,,
14
+ textprompts-0.0.4.dist-info/WHEEL,sha256=ab6157bc637547491fb4567cd7ddf26b04d63382916ca16c29a5c8e94c9c9ef7,79
15
+ textprompts-0.0.4.dist-info/entry_points.txt,sha256=f8f14b032092a81e77431911104853b39293c983c9390aa11fe023e8bcd5c049,54
16
+ textprompts-0.0.4.dist-info/METADATA,sha256=7ff0dd7d497b84f8b20922a37f0c4f97355aa1bb50a77a9216949da7e3ee49a4,15622
17
+ textprompts-0.0.4.dist-info/RECORD,,