echostash 1.0.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,51 @@
1
+ # Dependencies
2
+ node_modules/
3
+ __pycache__/
4
+ *.py[cod]
5
+ *$py.class
6
+ .Python
7
+ *.egg-info/
8
+ *.egg
9
+ dist/
10
+ build/
11
+ .eggs/
12
+
13
+ # Build outputs
14
+ packages/js/dist/
15
+ packages/python/dist/
16
+ packages/python/build/
17
+
18
+ # IDE
19
+ .idea/
20
+ .vscode/
21
+ *.swp
22
+ *.swo
23
+ .DS_Store
24
+
25
+ # Environment
26
+ .env
27
+ .env.local
28
+ .env.*.local
29
+ *.env
30
+
31
+ # Logs
32
+ *.log
33
+ npm-debug.log*
34
+ yarn-debug.log*
35
+ yarn-error.log*
36
+
37
+ # Testing
38
+ coverage/
39
+ .coverage
40
+ htmlcov/
41
+ .pytest_cache/
42
+ .mypy_cache/
43
+ .ruff_cache/
44
+
45
+ # OS
46
+ Thumbs.db
47
+
48
+ # Lock files (keep pnpm-lock for JS)
49
+ package-lock.json
50
+ yarn.lock
51
+ poetry.lock
@@ -0,0 +1,310 @@
1
+ Metadata-Version: 2.4
2
+ Name: echostash
3
+ Version: 1.0.0
4
+ Summary: The universal prompt SDK. Fetch prompts from any PLP-compliant library and use them with any AI provider.
5
+ Project-URL: Homepage, https://echostash.com
6
+ Project-URL: Documentation, https://echostash.com/docs/sdk
7
+ Project-URL: Repository, https://github.com/GoReal-AI/echostash-sdk
8
+ Author-email: GoReal AI <support@echostash.com>
9
+ License-Expression: MIT
10
+ Keywords: ai,anthropic,echostash,langchain,llm,openai,plp,prompt,prompt-management
11
+ Classifier: Development Status :: 5 - Production/Stable
12
+ Classifier: Intended Audience :: Developers
13
+ Classifier: License :: OSI Approved :: MIT License
14
+ Classifier: Programming Language :: Python :: 3
15
+ Classifier: Programming Language :: Python :: 3.9
16
+ Classifier: Programming Language :: Python :: 3.10
17
+ Classifier: Programming Language :: Python :: 3.11
18
+ Classifier: Programming Language :: Python :: 3.12
19
+ Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
20
+ Requires-Python: >=3.9
21
+ Requires-Dist: requests>=2.28.0
22
+ Requires-Dist: typing-extensions>=4.0.0; python_version < '3.11'
23
+ Provides-Extra: dev
24
+ Requires-Dist: mypy>=1.0.0; extra == 'dev'
25
+ Requires-Dist: pytest-asyncio>=0.21.0; extra == 'dev'
26
+ Requires-Dist: pytest>=7.0.0; extra == 'dev'
27
+ Requires-Dist: responses>=0.23.0; extra == 'dev'
28
+ Requires-Dist: ruff>=0.1.0; extra == 'dev'
29
+ Description-Content-Type: text/markdown
30
+
31
+ # Echostash SDK for Python
32
+
33
+ The universal prompt SDK. Fetch prompts from any PLP-compliant library and use them with any AI provider.
34
+
35
+ ## Installation
36
+
37
+ ```bash
38
+ pip install echostash
39
+ ```
40
+
41
+ ## Quick Start
42
+
43
+ ```python
44
+ from echostash import Echostash
45
+ from openai import OpenAI
46
+
47
+ # Connect to any PLP-compliant server
48
+ es = Echostash("https://api.echostash.com", api_key="sk_...")
49
+
50
+ # Fetch a prompt and use with OpenAI
51
+ prompt = es.prompt("welcome-email").get()
52
+ message = prompt.with_vars(name="Alice").openai()
53
+
54
+ client = OpenAI()
55
+ response = client.chat.completions.create(
56
+ model="gpt-4",
57
+ messages=[message]
58
+ )
59
+ ```
60
+
61
+ ## The Fluent API
62
+
63
+ The SDK is designed to be intuitive and chainable:
64
+
65
+ ```python
66
+ # Fetch + substitute + convert in one line
67
+ msg = es.prompt("welcome").vars(name="Alice").openai()
68
+
69
+ # Or step by step
70
+ prompt = es.prompt("welcome").get()
71
+ rendered = prompt.with_vars(name="Alice")
72
+ message = rendered.openai()
73
+
74
+ # Fetch a specific version
75
+ v1 = es.prompt("welcome").version("1.0.0").get()
76
+
77
+ # Short version alias
78
+ v2 = es.prompt("welcome").v("2.0.0").get()
79
+ ```
80
+
81
+ ## Provider Support
82
+
83
+ ### OpenAI
84
+
85
+ ```python
86
+ from openai import OpenAI
87
+
88
+ message = prompt.with_vars(name="Alice").openai(role="system")
89
+ config = prompt.openai_config()
90
+
91
+ client = OpenAI()
92
+ response = client.chat.completions.create(
93
+ **config,
94
+ messages=[message]
95
+ )
96
+ ```
97
+
98
+ ### Anthropic
99
+
100
+ ```python
101
+ import anthropic
102
+
103
+ # As user message
104
+ message = prompt.with_vars(name="Bob").anthropic()
105
+
106
+ # As system message (Anthropic handles system separately)
107
+ system = prompt.anthropic_system()
108
+ config = prompt.anthropic_config()
109
+
110
+ client = anthropic.Anthropic()
111
+ response = client.messages.create(
112
+ **config,
113
+ system=system,
114
+ messages=[{"role": "user", "content": "Hello!"}]
115
+ )
116
+ ```
117
+
118
+ ### Google / Gemini
119
+
120
+ ```python
121
+ message = prompt.google(role="user")
122
+ # or
123
+ message = prompt.gemini()
124
+
125
+ config = prompt.google_config()
126
+ ```
127
+
128
+ ### Vercel AI SDK (for Node.js interop)
129
+
130
+ ```python
131
+ message = prompt.vercel(role="system")
132
+ ```
133
+
134
+ ### LangChain
135
+
136
+ ```python
137
+ from langchain.prompts import PromptTemplate
138
+
139
+ # As message
140
+ message = prompt.langchain(message_type="human")
141
+
142
+ # As template (with input variables)
143
+ template, input_variables = prompt.langchain_template()
144
+ prompt_template = PromptTemplate.from_template(template)
145
+ ```
146
+
147
+ ## Connect to Any PLP Server
148
+
149
+ The SDK works with any PLP-compliant prompt library:
150
+
151
+ ```python
152
+ import os
153
+
154
+ # Echostash Cloud
155
+ es = Echostash("https://api.echostash.com", api_key="sk_...")
156
+
157
+ # Local PLP server
158
+ local = Echostash("http://localhost:3000")
159
+
160
+ # Your company's prompt registry
161
+ corp = Echostash(
162
+ "https://prompts.mycompany.com",
163
+ api_key=os.environ["PROMPT_API_KEY"]
164
+ )
165
+
166
+ # Any PLP-compliant service
167
+ other = Echostash("https://plp.example.com")
168
+ ```
169
+
170
+ ## Working with Prompts
171
+
172
+ ### Fetch Prompts
173
+
174
+ ```python
175
+ # Latest version
176
+ prompt = es.prompt("marketing/welcome").get()
177
+
178
+ # Specific version
179
+ v1 = es.prompt("marketing/welcome").version("1.0.0").get()
180
+
181
+ # With variables pre-set
182
+ prompt = es.prompt("welcome").vars(name="Alice").get()
183
+ ```
184
+
185
+ ### Access Content
186
+
187
+ ```python
188
+ prompt = es.prompt("welcome").get()
189
+
190
+ # Raw content (string or list of ContentBlock)
191
+ raw = prompt.raw()
192
+
193
+ # As plain text
194
+ text = prompt.text()
195
+
196
+ # As string (same as text)
197
+ print(str(prompt))
198
+ ```
199
+
200
+ ### Save Prompts
201
+
202
+ ```python
203
+ es.save(
204
+ "my-new-prompt",
205
+ content="Hello {{name}}!",
206
+ meta={
207
+ "version": "1.0.0",
208
+ "author": "me"
209
+ }
210
+ )
211
+ ```
212
+
213
+ ### Delete Prompts
214
+
215
+ ```python
216
+ es.delete("my-old-prompt")
217
+ ```
218
+
219
+ ### Server Discovery
220
+
221
+ ```python
222
+ info = es.discover()
223
+ print(info["plp_version"]) # "1.0"
224
+ print(info["capabilities"]) # {"versioning": True, ...}
225
+ ```
226
+
227
+ ## Context Manager Support
228
+
229
+ ```python
230
+ with Echostash("https://api.echostash.com", api_key="sk_...") as es:
231
+ prompt = es.prompt("welcome").get()
232
+ # Connection is properly closed after the block
233
+ ```
234
+
235
+ ## Type Hints
236
+
237
+ Full type hint support:
238
+
239
+ ```python
240
+ from echostash import (
241
+ Prompt,
242
+ PromptContent,
243
+ PromptMeta,
244
+ ModelConfig,
245
+ LoadedPrompt,
246
+ )
247
+ from echostash.types import (
248
+ OpenAIMessage,
249
+ AnthropicMessage,
250
+ GoogleMessage,
251
+ VercelMessage,
252
+ LangChainMessage,
253
+ )
254
+ ```
255
+
256
+ ## Advanced: Direct Provider Functions
257
+
258
+ For advanced use cases, you can import provider functions directly:
259
+
260
+ ```python
261
+ from echostash.providers import (
262
+ to_openai,
263
+ to_anthropic,
264
+ to_google,
265
+ to_vercel,
266
+ to_langchain,
267
+ extract_openai_config,
268
+ )
269
+
270
+ content = "Hello {{name}}!"
271
+ message = to_openai(content, role="system")
272
+ ```
273
+
274
+ ## Configuration
275
+
276
+ ```python
277
+ es = Echostash(
278
+ "https://api.echostash.com",
279
+
280
+ # API key for authentication
281
+ api_key="sk_...",
282
+
283
+ # Custom headers
284
+ headers={
285
+ "X-Custom-Header": "value"
286
+ },
287
+
288
+ # Request timeout (seconds)
289
+ timeout=10,
290
+
291
+ # Default parameter symbol for variable substitution
292
+ default_parameter_symbol="{{}}"
293
+ )
294
+ ```
295
+
296
+ ## Error Handling
297
+
298
+ ```python
299
+ from echostash import EchostashError
300
+
301
+ try:
302
+ prompt = es.prompt("not-found").get()
303
+ except EchostashError as e:
304
+ print(e.args[0]) # "HTTP 404"
305
+ print(e.status_code) # 404
306
+ ```
307
+
308
+ ## License
309
+
310
+ MIT
@@ -0,0 +1,280 @@
1
+ # Echostash SDK for Python
2
+
3
+ The universal prompt SDK. Fetch prompts from any PLP-compliant library and use them with any AI provider.
4
+
5
+ ## Installation
6
+
7
+ ```bash
8
+ pip install echostash
9
+ ```
10
+
11
+ ## Quick Start
12
+
13
+ ```python
14
+ from echostash import Echostash
15
+ from openai import OpenAI
16
+
17
+ # Connect to any PLP-compliant server
18
+ es = Echostash("https://api.echostash.com", api_key="sk_...")
19
+
20
+ # Fetch a prompt and use with OpenAI
21
+ prompt = es.prompt("welcome-email").get()
22
+ message = prompt.with_vars(name="Alice").openai()
23
+
24
+ client = OpenAI()
25
+ response = client.chat.completions.create(
26
+ model="gpt-4",
27
+ messages=[message]
28
+ )
29
+ ```
30
+
31
+ ## The Fluent API
32
+
33
+ The SDK is designed to be intuitive and chainable:
34
+
35
+ ```python
36
+ # Fetch + substitute + convert in one line
37
+ msg = es.prompt("welcome").vars(name="Alice").openai()
38
+
39
+ # Or step by step
40
+ prompt = es.prompt("welcome").get()
41
+ rendered = prompt.with_vars(name="Alice")
42
+ message = rendered.openai()
43
+
44
+ # Fetch a specific version
45
+ v1 = es.prompt("welcome").version("1.0.0").get()
46
+
47
+ # Short version alias
48
+ v2 = es.prompt("welcome").v("2.0.0").get()
49
+ ```
50
+
51
+ ## Provider Support
52
+
53
+ ### OpenAI
54
+
55
+ ```python
56
+ from openai import OpenAI
57
+
58
+ message = prompt.with_vars(name="Alice").openai(role="system")
59
+ config = prompt.openai_config()
60
+
61
+ client = OpenAI()
62
+ response = client.chat.completions.create(
63
+ **config,
64
+ messages=[message]
65
+ )
66
+ ```
67
+
68
+ ### Anthropic
69
+
70
+ ```python
71
+ import anthropic
72
+
73
+ # As user message
74
+ message = prompt.with_vars(name="Bob").anthropic()
75
+
76
+ # As system message (Anthropic handles system separately)
77
+ system = prompt.anthropic_system()
78
+ config = prompt.anthropic_config()
79
+
80
+ client = anthropic.Anthropic()
81
+ response = client.messages.create(
82
+ **config,
83
+ system=system,
84
+ messages=[{"role": "user", "content": "Hello!"}]
85
+ )
86
+ ```
87
+
88
+ ### Google / Gemini
89
+
90
+ ```python
91
+ message = prompt.google(role="user")
92
+ # or
93
+ message = prompt.gemini()
94
+
95
+ config = prompt.google_config()
96
+ ```
97
+
98
+ ### Vercel AI SDK (for Node.js interop)
99
+
100
+ ```python
101
+ message = prompt.vercel(role="system")
102
+ ```
103
+
104
+ ### LangChain
105
+
106
+ ```python
107
+ from langchain.prompts import PromptTemplate
108
+
109
+ # As message
110
+ message = prompt.langchain(message_type="human")
111
+
112
+ # As template (with input variables)
113
+ template, input_variables = prompt.langchain_template()
114
+ prompt_template = PromptTemplate.from_template(template)
115
+ ```
116
+
117
+ ## Connect to Any PLP Server
118
+
119
+ The SDK works with any PLP-compliant prompt library:
120
+
121
+ ```python
122
+ import os
123
+
124
+ # Echostash Cloud
125
+ es = Echostash("https://api.echostash.com", api_key="sk_...")
126
+
127
+ # Local PLP server
128
+ local = Echostash("http://localhost:3000")
129
+
130
+ # Your company's prompt registry
131
+ corp = Echostash(
132
+ "https://prompts.mycompany.com",
133
+ api_key=os.environ["PROMPT_API_KEY"]
134
+ )
135
+
136
+ # Any PLP-compliant service
137
+ other = Echostash("https://plp.example.com")
138
+ ```
139
+
140
+ ## Working with Prompts
141
+
142
+ ### Fetch Prompts
143
+
144
+ ```python
145
+ # Latest version
146
+ prompt = es.prompt("marketing/welcome").get()
147
+
148
+ # Specific version
149
+ v1 = es.prompt("marketing/welcome").version("1.0.0").get()
150
+
151
+ # With variables pre-set
152
+ prompt = es.prompt("welcome").vars(name="Alice").get()
153
+ ```
154
+
155
+ ### Access Content
156
+
157
+ ```python
158
+ prompt = es.prompt("welcome").get()
159
+
160
+ # Raw content (string or list of ContentBlock)
161
+ raw = prompt.raw()
162
+
163
+ # As plain text
164
+ text = prompt.text()
165
+
166
+ # As string (same as text)
167
+ print(str(prompt))
168
+ ```
169
+
170
+ ### Save Prompts
171
+
172
+ ```python
173
+ es.save(
174
+ "my-new-prompt",
175
+ content="Hello {{name}}!",
176
+ meta={
177
+ "version": "1.0.0",
178
+ "author": "me"
179
+ }
180
+ )
181
+ ```
182
+
183
+ ### Delete Prompts
184
+
185
+ ```python
186
+ es.delete("my-old-prompt")
187
+ ```
188
+
189
+ ### Server Discovery
190
+
191
+ ```python
192
+ info = es.discover()
193
+ print(info["plp_version"]) # "1.0"
194
+ print(info["capabilities"]) # {"versioning": True, ...}
195
+ ```
196
+
197
+ ## Context Manager Support
198
+
199
+ ```python
200
+ with Echostash("https://api.echostash.com", api_key="sk_...") as es:
201
+ prompt = es.prompt("welcome").get()
202
+ # Connection is properly closed after the block
203
+ ```
204
+
205
+ ## Type Hints
206
+
207
+ Full type hint support:
208
+
209
+ ```python
210
+ from echostash import (
211
+ Prompt,
212
+ PromptContent,
213
+ PromptMeta,
214
+ ModelConfig,
215
+ LoadedPrompt,
216
+ )
217
+ from echostash.types import (
218
+ OpenAIMessage,
219
+ AnthropicMessage,
220
+ GoogleMessage,
221
+ VercelMessage,
222
+ LangChainMessage,
223
+ )
224
+ ```
225
+
226
+ ## Advanced: Direct Provider Functions
227
+
228
+ For advanced use cases, you can import provider functions directly:
229
+
230
+ ```python
231
+ from echostash.providers import (
232
+ to_openai,
233
+ to_anthropic,
234
+ to_google,
235
+ to_vercel,
236
+ to_langchain,
237
+ extract_openai_config,
238
+ )
239
+
240
+ content = "Hello {{name}}!"
241
+ message = to_openai(content, role="system")
242
+ ```
243
+
244
+ ## Configuration
245
+
246
+ ```python
247
+ es = Echostash(
248
+ "https://api.echostash.com",
249
+
250
+ # API key for authentication
251
+ api_key="sk_...",
252
+
253
+ # Custom headers
254
+ headers={
255
+ "X-Custom-Header": "value"
256
+ },
257
+
258
+ # Request timeout (seconds)
259
+ timeout=10,
260
+
261
+ # Default parameter symbol for variable substitution
262
+ default_parameter_symbol="{{}}"
263
+ )
264
+ ```
265
+
266
+ ## Error Handling
267
+
268
+ ```python
269
+ from echostash import EchostashError
270
+
271
+ try:
272
+ prompt = es.prompt("not-found").get()
273
+ except EchostashError as e:
274
+ print(e.args[0]) # "HTTP 404"
275
+ print(e.status_code) # 404
276
+ ```
277
+
278
+ ## License
279
+
280
+ MIT
@@ -0,0 +1,65 @@
1
+ [build-system]
2
+ requires = ["hatchling"]
3
+ build-backend = "hatchling.build"
4
+
5
+ [project]
6
+ name = "echostash"
7
+ version = "1.0.0"
8
+ description = "The universal prompt SDK. Fetch prompts from any PLP-compliant library and use them with any AI provider."
9
+ readme = "README.md"
10
+ license = "MIT"
11
+ requires-python = ">=3.9"
12
+ authors = [
13
+ { name = "GoReal AI", email = "support@echostash.com" }
14
+ ]
15
+ keywords = [
16
+ "prompt",
17
+ "llm",
18
+ "ai",
19
+ "openai",
20
+ "anthropic",
21
+ "langchain",
22
+ "plp",
23
+ "echostash",
24
+ "prompt-management"
25
+ ]
26
+ classifiers = [
27
+ "Development Status :: 5 - Production/Stable",
28
+ "Intended Audience :: Developers",
29
+ "License :: OSI Approved :: MIT License",
30
+ "Programming Language :: Python :: 3",
31
+ "Programming Language :: Python :: 3.9",
32
+ "Programming Language :: Python :: 3.10",
33
+ "Programming Language :: Python :: 3.11",
34
+ "Programming Language :: Python :: 3.12",
35
+ "Topic :: Scientific/Engineering :: Artificial Intelligence",
36
+ ]
37
+ dependencies = [
38
+ "requests>=2.28.0",
39
+ "typing-extensions>=4.0.0;python_version<'3.11'",
40
+ ]
41
+
42
+ [project.optional-dependencies]
43
+ dev = [
44
+ "pytest>=7.0.0",
45
+ "pytest-asyncio>=0.21.0",
46
+ "responses>=0.23.0",
47
+ "mypy>=1.0.0",
48
+ "ruff>=0.1.0",
49
+ ]
50
+
51
+ [project.urls]
52
+ Homepage = "https://echostash.com"
53
+ Documentation = "https://echostash.com/docs/sdk"
54
+ Repository = "https://github.com/GoReal-AI/echostash-sdk"
55
+
56
+ [tool.hatch.build.targets.wheel]
57
+ packages = ["src/echostash"]
58
+
59
+ [tool.ruff]
60
+ line-length = 100
61
+ target-version = "py39"
62
+
63
+ [tool.mypy]
64
+ python_version = "3.9"
65
+ strict = true