textprompts 0.0.2__py3-none-any.whl → 0.0.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
textprompts/_parser.py CHANGED
@@ -71,8 +71,12 @@ def parse_file(path: Path, *, metadata_mode: MetadataMode) -> Prompt:
71
71
  stacklevel=2,
72
72
  )
73
73
  ignore_meta = PromptMeta(title=path.stem)
74
- return Prompt(
75
- path=path, meta=ignore_meta, prompt=PromptString(textwrap.dedent(raw))
74
+ return Prompt.model_validate(
75
+ {
76
+ "path": path,
77
+ "meta": ignore_meta,
78
+ "prompt": PromptString(textwrap.dedent(raw)),
79
+ }
76
80
  )
77
81
 
78
82
  # For STRICT and ALLOW modes, try to parse front matter
@@ -149,4 +153,10 @@ def parse_file(path: Path, *, metadata_mode: MetadataMode) -> Prompt:
149
153
  if meta.title is None:
150
154
  meta.title = path.stem
151
155
 
152
- return Prompt(path=path, meta=meta, prompt=PromptString(textwrap.dedent(body)))
156
+ return Prompt.model_validate(
157
+ {
158
+ "path": path,
159
+ "meta": meta,
160
+ "prompt": PromptString(textwrap.dedent(body)),
161
+ }
162
+ )
textprompts/config.py CHANGED
@@ -2,6 +2,7 @@
2
2
  Global configuration for textprompts metadata handling.
3
3
  """
4
4
 
5
+ import os
5
6
  from enum import Enum
6
7
  from typing import Union
7
8
 
@@ -29,7 +30,13 @@ class MetadataMode(Enum):
29
30
 
30
31
 
31
32
  # Global configuration variable
32
- _METADATA_MODE: MetadataMode = MetadataMode.IGNORE
33
+ _env_mode = os.getenv("TEXTPROMPTS_METADATA_MODE")
34
+ try:
35
+ _METADATA_MODE: MetadataMode = (
36
+ MetadataMode(_env_mode.lower()) if _env_mode else MetadataMode.IGNORE
37
+ )
38
+ except ValueError:
39
+ _METADATA_MODE = MetadataMode.IGNORE
33
40
  _WARN_ON_IGNORED_META: bool = True
34
41
 
35
42
 
textprompts/models.py CHANGED
@@ -4,6 +4,7 @@ from typing import Any, Union
4
4
 
5
5
  from pydantic import BaseModel, Field, field_validator
6
6
 
7
+ from .config import MetadataMode
7
8
  from .prompt_string import PromptString
8
9
 
9
10
 
@@ -20,6 +21,15 @@ class Prompt(BaseModel):
20
21
  meta: Union[PromptMeta, None]
21
22
  prompt: PromptString
22
23
 
24
+ @classmethod
25
+ def from_path(
26
+ cls, path: Union[str, Path], *, meta: Union[MetadataMode, str, None] = None
27
+ ) -> "Prompt":
28
+ """Load a Prompt from ``path`` using ``load_prompt``."""
29
+ from .loaders import load_prompt
30
+
31
+ return load_prompt(path, meta=meta)
32
+
23
33
  @field_validator("prompt")
24
34
  @classmethod
25
35
  def prompt_not_empty(cls, v: str) -> PromptString:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: textprompts
3
- Version: 0.0.2
3
+ Version: 0.0.4
4
4
  Summary: Minimal text-based prompt-loader with TOML front-matter
5
5
  Keywords: prompts,toml,frontmatter,template
6
6
  Author: Jan Siml
@@ -24,9 +24,16 @@ Description-Content-Type: text/markdown
24
24
 
25
25
  # textprompts
26
26
 
27
+ [![PyPI version](https://img.shields.io/pypi/v/textprompts.svg)](https://pypi.org/project/textprompts/)
28
+ [![Python versions](https://img.shields.io/pypi/pyversions/textprompts.svg)](https://pypi.org/project/textprompts/)
29
+ [![CI status](https://github.com/svilupp/textprompts/workflows/CI/badge.svg)](https://github.com/svilupp/textprompts/actions)
30
+ [![Coverage](https://img.shields.io/codecov/c/github/svilupp/textprompts)](https://codecov.io/gh/svilupp/textprompts)
31
+ [![License](https://img.shields.io/pypi/l/textprompts.svg)](https://github.com/svilupp/textprompts/blob/main/LICENSE)
32
+
33
+
27
34
  > **So simple, it's not even worth vibing about coding yet it just makes so much sense.**
28
35
 
29
- Are you tired of vendors trying to sell you fancy UIs for prompt management that just make your system more confusing and harder to debug? Isn't it nice to just have your prompts **next to your code**?
36
+ Are you tired of vendors trying to sell you fancy UIs for prompt management that just make your system more confusing and harder to debug? Isn't it nice to just have your prompts **next to your code**?
30
37
 
31
38
  But then you worry: *Did my formatter change my prompt? Are those spaces at the beginning actually part of the prompt or just indentation?*
32
39
 
@@ -35,7 +42,7 @@ But then you worry: *Did my formatter change my prompt? Are those spaces at the
35
42
  ## Why textprompts?
36
43
 
37
44
  - ✅ **Prompts live next to your code** - no external systems to manage
38
- - ✅ **Git is your version control** - diff, branch, and experiment with ease
45
+ - ✅ **Git is your version control** - diff, branch, and experiment with ease
39
46
  - ✅ **No formatter headaches** - your prompts stay exactly as you wrote them
40
47
  - ✅ **Minimal markup** - just TOML front-matter when you need metadata (or no metadata if you prefer!)
41
48
  - ✅ **Zero dependencies** - well, almost (just Pydantic)
@@ -59,7 +66,6 @@ title = "Customer Greeting"
59
66
  version = "1.0.0"
60
67
  description = "Friendly greeting for customer support"
61
68
  ---
62
-
63
69
  Hello {customer_name}!
64
70
 
65
71
  Welcome to {company_name}. We're here to help you with {issue_type}.
@@ -74,11 +80,13 @@ import textprompts
74
80
 
75
81
  # Just load it - works with or without metadata
76
82
  prompt = textprompts.load_prompt("greeting.txt")
83
+ # Or simply
84
+ alt = textprompts.Prompt.from_path("greeting.txt")
77
85
 
78
86
  # Use it safely - all placeholders must be provided
79
87
  message = prompt.prompt.format(
80
88
  customer_name="Alice",
81
- company_name="ACME Corp",
89
+ company_name="ACME Corp",
82
90
  issue_type="billing question",
83
91
  agent_name="Sarah"
84
92
  )
@@ -161,7 +169,7 @@ prompt = textprompts.load_prompt("prompt.txt") # No metadata parsing
161
169
  print(prompt.meta.title) # "prompt" (from filename)
162
170
 
163
171
  # 2. ALLOW: Load metadata if present, don't worry if it's incomplete
164
- textprompts.set_metadata("allow") # Flexible metadata loading
172
+ textprompts.set_metadata("allow") # Flexible metadata loading
165
173
  prompt = textprompts.load_prompt("prompt.txt") # Loads any metadata found
166
174
 
167
175
  # 3. STRICT: Require complete metadata for production use
@@ -174,7 +182,7 @@ prompt = textprompts.load_prompt("prompt.txt", meta="strict")
174
182
 
175
183
  **Why this design?**
176
184
  - **Default = Simple**: No configuration needed, just load files
177
- - **Flexible**: Add metadata when you want structure
185
+ - **Flexible**: Add metadata when you want structure
178
186
  - **Production-Safe**: Use strict mode to catch missing metadata before deployment
179
187
 
180
188
  ## Real-World Examples
@@ -199,7 +207,7 @@ response = openai.chat.completions.create(
199
207
  )
200
208
  },
201
209
  {
202
- "role": "user",
210
+ "role": "user",
203
211
  "content": user_prompt.prompt.format(
204
212
  query="How do I return an item?",
205
213
  customer_tier="premium"
@@ -220,7 +228,6 @@ title = "Product Search Tool"
220
228
  version = "2.1.0"
221
229
  description = "Search our product catalog"
222
230
  ---
223
-
224
231
  {
225
232
  "type": "function",
226
233
  "function": {
@@ -234,7 +241,7 @@ description = "Search our product catalog"
234
241
  "description": "Search query for products"
235
242
  },
236
243
  "category": {
237
- "type": "string",
244
+ "type": "string",
238
245
  "enum": ["electronics", "clothing", "books"],
239
246
  "description": "Product category to search within"
240
247
  },
@@ -306,7 +313,6 @@ description = "What this prompt does"
306
313
  created = "2024-01-15"
307
314
  tags = ["customer-support", "greeting"]
308
315
  ---
309
-
310
316
  Your prompt content goes here.
311
317
 
312
318
  Use {variables} for templating.
@@ -317,13 +323,17 @@ Use {variables} for templating.
317
323
  Choose the right level of strictness for your use case:
318
324
 
319
325
  1. **IGNORE** (default) - Simple text file loading, filename becomes title
320
- 2. **ALLOW** - Load metadata if present, don't worry about completeness
326
+ 2. **ALLOW** - Load metadata if present, don't worry about completeness
321
327
  3. **STRICT** - Require complete metadata (title, description, version) for production safety
322
328
 
329
+ You can also set the environment variable `TEXTPROMPTS_METADATA_MODE` to one of
330
+ `strict`, `allow`, or `ignore` before importing the library to configure the
331
+ default mode.
332
+
323
333
  ```python
324
334
  # Set globally
325
335
  textprompts.set_metadata("ignore") # Default: simple file loading
326
- textprompts.set_metadata("allow") # Flexible: load any metadata
336
+ textprompts.set_metadata("allow") # Flexible: load any metadata
327
337
  textprompts.set_metadata("strict") # Production: require complete metadata
328
338
 
329
339
  # Or override per prompt
@@ -401,7 +411,7 @@ template = PromptString("Hello {name}, you are {role}")
401
411
  result = template.format(name="Alice", role="admin") # ✅ Works
402
412
  result = template.format(name="Alice") # ❌ Raises ValueError
403
413
 
404
- # Partial formatting - replace only available placeholders
414
+ # Partial formatting - replace only available placeholders
405
415
  partial = template.format(name="Alice", skip_validation=True) # ✅ "Hello Alice, you are {role}"
406
416
 
407
417
  # Access placeholder information
@@ -493,4 +503,4 @@ MIT License - see [LICENSE](LICENSE) for details.
493
503
 
494
504
  ---
495
505
 
496
- **textprompts** - Because your prompts deserve better than being buried in code strings. 🚀
506
+ **textprompts** - Because your prompts deserve better than being buried in code strings. 🚀
@@ -1,17 +1,17 @@
1
1
  textprompts/__init__.py,sha256=f17693e986f73978dc6ebecd683d3e812bc66b0f0691326957f6ea5e54fe7a87,831
2
2
  textprompts/__main__.py,sha256=aee028e84759ad217eee72d3c3d7460ba1d46a25186d02b71c85078541db7f81,282
3
- textprompts/_parser.py,sha256=40b8b64ca70398b8ee2093bb9e279d334fcfa3c31c7b6a30c37143596ea46c86,5793
3
+ textprompts/_parser.py,sha256=c6f9b91ff06f6157003c5b7ab128bc352d81ea6bd4d63fde43287d682176e79c,5969
4
4
  textprompts/cli.py,sha256=8db7c429a182be9e8e69e11907fba3ee4465e391c580a96e25ee6ae566296ac0,855
5
- textprompts/config.py,sha256=fe7c13db48a9081d2ba4fb0c925636d08f894d49a721307f885b5a360a7bc45d,3334
5
+ textprompts/config.py,sha256=445a439432cf25141bbaa199cef67ceabed8a6505c6169b6e386f3ad87efb17b,3530
6
6
  textprompts/errors.py,sha256=7eda4a1bdf4ee8a50b420886d2016a52923baa05a5b5a65d6f582e3e500290d2,354
7
7
  textprompts/loaders.py,sha256=f6ddb9b6ea3f35d358f7d03e94e1aa781887e2d122f0b31345818ee458364083,4199
8
- textprompts/models.py,sha256=ca13c427ec92b80c00b05fe2b4fc43ddda793ba2bb1bfeecb4ef0383f6afb0b7,2030
8
+ textprompts/models.py,sha256=61dfcf233c3887a7573f19402ab2af7b00b287127eeb1e0b29e69814007a4ddb,2354
9
9
  textprompts/placeholder_utils.py,sha256=a6df51ea0e7474a68a4f036fce0f647069be1395f1980db57decec7e01246772,4476
10
10
  textprompts/prompt_string.py,sha256=4b7fefaca6c0c0a7edf67ae0213f77d32d21ff4735d458cd03c9f9bc885b804f,2156
11
11
  textprompts/py.typed,sha256=e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855,0
12
12
  textprompts/safe_string.py,sha256=5bed8e93d404133591a9cc9d2f3260efe07de6030553d247edf3fceae025a198,195
13
13
  textprompts/savers.py,sha256=0a46c5d2a1ffb24398e6380db3a090058c4bdd13369510ad2ee8506d143c47a1,2040
14
- textprompts-0.0.2.dist-info/WHEEL,sha256=ab6157bc637547491fb4567cd7ddf26b04d63382916ca16c29a5c8e94c9c9ef7,79
15
- textprompts-0.0.2.dist-info/entry_points.txt,sha256=f8f14b032092a81e77431911104853b39293c983c9390aa11fe023e8bcd5c049,54
16
- textprompts-0.0.2.dist-info/METADATA,sha256=f78de4d094ffe79b3553596c21b98fe814e5cfc29d596c2b2402a5bb7c23e42f,14821
17
- textprompts-0.0.2.dist-info/RECORD,,
14
+ textprompts-0.0.4.dist-info/WHEEL,sha256=ab6157bc637547491fb4567cd7ddf26b04d63382916ca16c29a5c8e94c9c9ef7,79
15
+ textprompts-0.0.4.dist-info/entry_points.txt,sha256=f8f14b032092a81e77431911104853b39293c983c9390aa11fe023e8bcd5c049,54
16
+ textprompts-0.0.4.dist-info/METADATA,sha256=7ff0dd7d497b84f8b20922a37f0c4f97355aa1bb50a77a9216949da7e3ee49a4,15622
17
+ textprompts-0.0.4.dist-info/RECORD,,