podstack 1.2.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (32) hide show
  1. podstack-1.2.0/LICENSE +21 -0
  2. podstack-1.2.0/PKG-INFO +299 -0
  3. podstack-1.2.0/README.md +247 -0
  4. podstack-1.2.0/podstack/__init__.py +222 -0
  5. podstack-1.2.0/podstack/annotations.py +725 -0
  6. podstack-1.2.0/podstack/client.py +322 -0
  7. podstack-1.2.0/podstack/exceptions.py +125 -0
  8. podstack-1.2.0/podstack/execution.py +291 -0
  9. podstack-1.2.0/podstack/gpu_runner.py +1141 -0
  10. podstack-1.2.0/podstack/models.py +274 -0
  11. podstack-1.2.0/podstack/notebook.py +410 -0
  12. podstack-1.2.0/podstack/registry/__init__.py +402 -0
  13. podstack-1.2.0/podstack/registry/client.py +957 -0
  14. podstack-1.2.0/podstack/registry/exceptions.py +107 -0
  15. podstack-1.2.0/podstack/registry/experiment.py +227 -0
  16. podstack-1.2.0/podstack/registry/model.py +273 -0
  17. podstack-1.2.0/podstack/registry/model_utils.py +231 -0
  18. podstack-1.2.0/podstack.egg-info/PKG-INFO +299 -0
  19. podstack-1.2.0/podstack.egg-info/SOURCES.txt +30 -0
  20. podstack-1.2.0/podstack.egg-info/dependency_links.txt +1 -0
  21. podstack-1.2.0/podstack.egg-info/requires.txt +30 -0
  22. podstack-1.2.0/podstack.egg-info/top_level.txt +2 -0
  23. podstack-1.2.0/podstack_gpu/__init__.py +126 -0
  24. podstack-1.2.0/podstack_gpu/app.py +675 -0
  25. podstack-1.2.0/podstack_gpu/exceptions.py +35 -0
  26. podstack-1.2.0/podstack_gpu/image.py +325 -0
  27. podstack-1.2.0/podstack_gpu/runner.py +746 -0
  28. podstack-1.2.0/podstack_gpu/secret.py +189 -0
  29. podstack-1.2.0/podstack_gpu/utils.py +203 -0
  30. podstack-1.2.0/podstack_gpu/volume.py +198 -0
  31. podstack-1.2.0/pyproject.toml +82 -0
  32. podstack-1.2.0/setup.cfg +4 -0
podstack-1.2.0/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2024-2026 Podstack
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1,299 @@
1
+ Metadata-Version: 2.4
2
+ Name: podstack
3
+ Version: 1.2.0
4
+ Summary: Official Python SDK for Podstack GPU Notebook Platform
5
+ Author-email: Podstack <support@podstack.io>
6
+ License-Expression: MIT
7
+ Project-URL: Homepage, https://podstack.io
8
+ Project-URL: Documentation, https://docs.podstack.io
9
+ Project-URL: Repository, https://github.com/podstack/podstack-python
10
+ Project-URL: Issues, https://github.com/podstack/podstack-python/issues
11
+ Keywords: gpu,notebook,machine-learning,deep-learning,cloud,jupyter
12
+ Classifier: Development Status :: 5 - Production/Stable
13
+ Classifier: Intended Audience :: Developers
14
+ Classifier: Intended Audience :: Science/Research
15
+ Classifier: Operating System :: OS Independent
16
+ Classifier: Programming Language :: Python :: 3
17
+ Classifier: Programming Language :: Python :: 3.8
18
+ Classifier: Programming Language :: Python :: 3.9
19
+ Classifier: Programming Language :: Python :: 3.10
20
+ Classifier: Programming Language :: Python :: 3.11
21
+ Classifier: Programming Language :: Python :: 3.12
22
+ Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
23
+ Classifier: Topic :: Software Development :: Libraries :: Python Modules
24
+ Requires-Python: >=3.8
25
+ Description-Content-Type: text/markdown
26
+ License-File: LICENSE
27
+ Requires-Dist: httpx>=0.24.0
28
+ Requires-Dist: requests>=2.28.0
29
+ Provides-Extra: torch
30
+ Requires-Dist: torch; extra == "torch"
31
+ Provides-Extra: tensorflow
32
+ Requires-Dist: tensorflow; extra == "tensorflow"
33
+ Provides-Extra: sklearn
34
+ Requires-Dist: scikit-learn; extra == "sklearn"
35
+ Provides-Extra: huggingface
36
+ Requires-Dist: transformers; extra == "huggingface"
37
+ Requires-Dist: safetensors; extra == "huggingface"
38
+ Provides-Extra: all
39
+ Requires-Dist: torch; extra == "all"
40
+ Requires-Dist: tensorflow; extra == "all"
41
+ Requires-Dist: scikit-learn; extra == "all"
42
+ Requires-Dist: transformers; extra == "all"
43
+ Requires-Dist: safetensors; extra == "all"
44
+ Provides-Extra: dev
45
+ Requires-Dist: pytest>=7.0.0; extra == "dev"
46
+ Requires-Dist: pytest-asyncio>=0.21.0; extra == "dev"
47
+ Requires-Dist: pytest-cov>=4.0.0; extra == "dev"
48
+ Requires-Dist: black>=23.0.0; extra == "dev"
49
+ Requires-Dist: mypy>=1.0.0; extra == "dev"
50
+ Requires-Dist: ruff>=0.0.270; extra == "dev"
51
+ Dynamic: license-file
52
+
53
+ # Podstack Python SDK
54
+
55
+ Official Python SDK for the Podstack GPU Notebook Platform. Launch GPU notebooks in under 1 second and execute ML workloads with ease.
56
+
57
+ ## Installation
58
+
59
+ ```bash
60
+ pip install podstack
61
+ ```
62
+
63
+ ## Quick Start
64
+
65
+ ```python
66
+ import asyncio
67
+ from podstack import Client
68
+
69
+ async def main():
70
+ async with Client(api_key="your-api-key") as client:
71
+ # Create a GPU notebook
72
+ notebook = await client.notebooks.create(
73
+ name="my-experiment",
74
+ gpu_type="A100",
75
+ environment="pytorch"
76
+ )
77
+
78
+ # Execute code
79
+ result = await notebook.execute("""
80
+ import torch
81
+ print(f"GPU: {torch.cuda.get_device_name(0)}")
82
+ print(f"Memory: {torch.cuda.get_device_properties(0).total_memory / 1e9:.1f} GB")
83
+ """)
84
+
85
+ print(result.output)
86
+
87
+ # Save a version
88
+ version = await notebook.save(message="Initial experiment")
89
+
90
+ # Stop when done
91
+ await notebook.stop()
92
+
93
+ asyncio.run(main())
94
+ ```
95
+
96
+ ## Sync Usage
97
+
98
+ For simple scripts, use the sync wrappers:
99
+
100
+ ```python
101
+ from podstack import Client
102
+
103
+ client = Client(api_key="your-api-key")
104
+
105
+ # Create notebook
106
+ notebook = client.sync_create_notebook(name="quick-test", gpu_type="A10")
107
+
108
+ # Run code
109
+ result = client.sync_run("print('Hello GPU!')", gpu_type="A10")
110
+ print(result.output)
111
+ ```
112
+
113
+ ## Features
114
+
115
+ ### Notebooks
116
+
117
+ ```python
118
+ # Create with options
119
+ notebook = await client.notebooks.create(
120
+ name="training-run",
121
+ gpu_type="A100",
122
+ environment="pytorch",
123
+ project_id="proj_xxx",
124
+ idle_timeout_minutes=60,
125
+ auto_shutdown_enabled=True,
126
+ metadata={"experiment": "v2"}
127
+ )
128
+
129
+ # List notebooks
130
+ notebooks = await client.notebooks.list(status="running")
131
+
132
+ # Execute code
133
+ result = await notebook.execute("import torch; print(torch.cuda.device_count())")
134
+
135
+ # Access JupyterLab
136
+ print(f"JupyterLab: {notebook.jupyter_url}")
137
+
138
+ # Stop/Start
139
+ await notebook.stop()
140
+ await notebook.start()
141
+ ```
142
+
143
+ ### Serverless Executions
144
+
145
+ Run code without managing notebooks:
146
+
147
+ ```python
148
+ # Quick execution
149
+ result = await client.executions.run(
150
+ code="print('Hello!')",
151
+ gpu_type="A10",
152
+ environment="pytorch"
153
+ )
154
+
155
+ # Non-blocking execution
156
+ execution = await client.executions.create(
157
+ code=long_running_code,
158
+ gpu_type="H100",
159
+ timeout_seconds=3600
160
+ )
161
+
162
+ # Check status later
163
+ await execution.refresh()
164
+ if execution.is_complete:
165
+ print(execution.output)
166
+ ```
167
+
168
+ ### Notebook Versioning
169
+
170
+ Git-like versioning for notebooks:
171
+
172
+ ```python
173
+ # Save a version
174
+ version = await notebook.save(message="Added training loop")
175
+
176
+ # List versions
177
+ versions = await notebook.list_versions()
178
+
179
+ # Restore a version
180
+ await notebook.restore_version(version.id)
181
+
182
+ # Create a branch
183
+ await notebook.create_branch("experiment-v2", from_version_id=version.id)
184
+ ```
185
+
186
+ ### Projects
187
+
188
+ Organize notebooks into projects:
189
+
190
+ ```python
191
+ # Create project
192
+ project = await client.create_project(
193
+ name="ML Research",
194
+ description="Transformer experiments"
195
+ )
196
+
197
+ # Create notebook in project
198
+ notebook = await client.notebooks.create(
199
+ name="attention-study",
200
+ gpu_type="A100",
201
+ project_id=project.id
202
+ )
203
+
204
+ # List project notebooks
205
+ notebooks = await client.notebooks.list(project_id=project.id)
206
+ ```
207
+
208
+ ### Billing & Usage
209
+
210
+ ```python
211
+ # Check balance
212
+ balance = await client.get_wallet_balance()
213
+ print(f"Balance: ₹{balance.balance:.2f}")
214
+
215
+ # Get usage
216
+ usage = await client.get_usage(
217
+ start_date="2024-01-01",
218
+ end_date="2024-01-31",
219
+ group_by="day"
220
+ )
221
+ print(f"Total cost: ₹{usage.total_cost:.2f}")
222
+ ```
223
+
224
+ ### GPU Types
225
+
226
+ ```python
227
+ # List available GPUs
228
+ gpus = await client.list_gpus()
229
+ for gpu in gpus:
230
+ print(f"{gpu.type}: {gpu.memory_gb}GB, ₹{gpu.price_per_hour_paise/100:.2f}/hr")
231
+ ```
232
+
233
+ Available GPU types:
234
+ - `T4` - 16GB, budget-friendly
235
+ - `L4` - 24GB, inference optimized
236
+ - `A10` - 24GB, balanced
237
+ - `A100_40GB` - 40GB, training
238
+ - `A100_80GB` - 80GB, large models
239
+ - `H100` - 80GB, fastest
240
+
241
+ ### Webhooks
242
+
243
+ ```python
244
+ # Create webhook
245
+ webhook = await client.create_webhook(
246
+ url="https://your-server.com/webhook",
247
+ events=["notebook.started", "execution.completed"]
248
+ )
249
+
250
+ # List webhooks
251
+ webhooks = await client.list_webhooks()
252
+ ```
253
+
254
+ ## Error Handling
255
+
256
+ ```python
257
+ from podstack import (
258
+ Client,
259
+ PodstackError,
260
+ AuthenticationError,
261
+ GPUNotAvailableError,
262
+ RateLimitError,
263
+ ExecutionTimeoutError
264
+ )
265
+
266
+ try:
267
+ async with Client(api_key="invalid") as client:
268
+ await client.notebooks.create(name="test", gpu_type="A100")
269
+ except AuthenticationError:
270
+ print("Invalid API key")
271
+ except GPUNotAvailableError as e:
272
+ print(f"GPU {e.gpu_type} not available, try: {e.available_types}")
273
+ except RateLimitError as e:
274
+ print(f"Rate limited, retry after {e.retry_after}s")
275
+ except ExecutionTimeoutError as e:
276
+ print(f"Execution {e.execution_id} timed out")
277
+ except PodstackError as e:
278
+ print(f"Error [{e.code}]: {e.message}")
279
+ ```
280
+
281
+ ## Configuration
282
+
283
+ ```python
284
+ # Environment variables
285
+ # PODSTACK_API_KEY=psk_live_xxxxx
286
+ # PODSTACK_BASE_URL=https://api.podstack.io/v1
287
+
288
+ # Or pass directly
289
+ client = Client(
290
+ api_key="psk_live_xxxxx",
291
+ base_url="https://api.podstack.io/v1",
292
+ timeout=60.0,
293
+ max_retries=5
294
+ )
295
+ ```
296
+
297
+ ## License
298
+
299
+ MIT License - see LICENSE for details.
@@ -0,0 +1,247 @@
1
+ # Podstack Python SDK
2
+
3
+ Official Python SDK for the Podstack GPU Notebook Platform. Launch GPU notebooks in under 1 second and execute ML workloads with ease.
4
+
5
+ ## Installation
6
+
7
+ ```bash
8
+ pip install podstack
9
+ ```
10
+
11
+ ## Quick Start
12
+
13
+ ```python
14
+ import asyncio
15
+ from podstack import Client
16
+
17
+ async def main():
18
+ async with Client(api_key="your-api-key") as client:
19
+ # Create a GPU notebook
20
+ notebook = await client.notebooks.create(
21
+ name="my-experiment",
22
+ gpu_type="A100",
23
+ environment="pytorch"
24
+ )
25
+
26
+ # Execute code
27
+ result = await notebook.execute("""
28
+ import torch
29
+ print(f"GPU: {torch.cuda.get_device_name(0)}")
30
+ print(f"Memory: {torch.cuda.get_device_properties(0).total_memory / 1e9:.1f} GB")
31
+ """)
32
+
33
+ print(result.output)
34
+
35
+ # Save a version
36
+ version = await notebook.save(message="Initial experiment")
37
+
38
+ # Stop when done
39
+ await notebook.stop()
40
+
41
+ asyncio.run(main())
42
+ ```
43
+
44
+ ## Sync Usage
45
+
46
+ For simple scripts, use the sync wrappers:
47
+
48
+ ```python
49
+ from podstack import Client
50
+
51
+ client = Client(api_key="your-api-key")
52
+
53
+ # Create notebook
54
+ notebook = client.sync_create_notebook(name="quick-test", gpu_type="A10")
55
+
56
+ # Run code
57
+ result = client.sync_run("print('Hello GPU!')", gpu_type="A10")
58
+ print(result.output)
59
+ ```
60
+
61
+ ## Features
62
+
63
+ ### Notebooks
64
+
65
+ ```python
66
+ # Create with options
67
+ notebook = await client.notebooks.create(
68
+ name="training-run",
69
+ gpu_type="A100",
70
+ environment="pytorch",
71
+ project_id="proj_xxx",
72
+ idle_timeout_minutes=60,
73
+ auto_shutdown_enabled=True,
74
+ metadata={"experiment": "v2"}
75
+ )
76
+
77
+ # List notebooks
78
+ notebooks = await client.notebooks.list(status="running")
79
+
80
+ # Execute code
81
+ result = await notebook.execute("import torch; print(torch.cuda.device_count())")
82
+
83
+ # Access JupyterLab
84
+ print(f"JupyterLab: {notebook.jupyter_url}")
85
+
86
+ # Stop/Start
87
+ await notebook.stop()
88
+ await notebook.start()
89
+ ```
90
+
91
+ ### Serverless Executions
92
+
93
+ Run code without managing notebooks:
94
+
95
+ ```python
96
+ # Quick execution
97
+ result = await client.executions.run(
98
+ code="print('Hello!')",
99
+ gpu_type="A10",
100
+ environment="pytorch"
101
+ )
102
+
103
+ # Non-blocking execution
104
+ execution = await client.executions.create(
105
+ code=long_running_code,
106
+ gpu_type="H100",
107
+ timeout_seconds=3600
108
+ )
109
+
110
+ # Check status later
111
+ await execution.refresh()
112
+ if execution.is_complete:
113
+ print(execution.output)
114
+ ```
115
+
116
+ ### Notebook Versioning
117
+
118
+ Git-like versioning for notebooks:
119
+
120
+ ```python
121
+ # Save a version
122
+ version = await notebook.save(message="Added training loop")
123
+
124
+ # List versions
125
+ versions = await notebook.list_versions()
126
+
127
+ # Restore a version
128
+ await notebook.restore_version(version.id)
129
+
130
+ # Create a branch
131
+ await notebook.create_branch("experiment-v2", from_version_id=version.id)
132
+ ```
133
+
134
+ ### Projects
135
+
136
+ Organize notebooks into projects:
137
+
138
+ ```python
139
+ # Create project
140
+ project = await client.create_project(
141
+ name="ML Research",
142
+ description="Transformer experiments"
143
+ )
144
+
145
+ # Create notebook in project
146
+ notebook = await client.notebooks.create(
147
+ name="attention-study",
148
+ gpu_type="A100",
149
+ project_id=project.id
150
+ )
151
+
152
+ # List project notebooks
153
+ notebooks = await client.notebooks.list(project_id=project.id)
154
+ ```
155
+
156
+ ### Billing & Usage
157
+
158
+ ```python
159
+ # Check balance
160
+ balance = await client.get_wallet_balance()
161
+ print(f"Balance: ₹{balance.balance:.2f}")
162
+
163
+ # Get usage
164
+ usage = await client.get_usage(
165
+ start_date="2024-01-01",
166
+ end_date="2024-01-31",
167
+ group_by="day"
168
+ )
169
+ print(f"Total cost: ₹{usage.total_cost:.2f}")
170
+ ```
171
+
172
+ ### GPU Types
173
+
174
+ ```python
175
+ # List available GPUs
176
+ gpus = await client.list_gpus()
177
+ for gpu in gpus:
178
+ print(f"{gpu.type}: {gpu.memory_gb}GB, ₹{gpu.price_per_hour_paise/100:.2f}/hr")
179
+ ```
180
+
181
+ Available GPU types:
182
+ - `T4` - 16GB, budget-friendly
183
+ - `L4` - 24GB, inference optimized
184
+ - `A10` - 24GB, balanced
185
+ - `A100_40GB` - 40GB, training
186
+ - `A100_80GB` - 80GB, large models
187
+ - `H100` - 80GB, fastest
188
+
189
+ ### Webhooks
190
+
191
+ ```python
192
+ # Create webhook
193
+ webhook = await client.create_webhook(
194
+ url="https://your-server.com/webhook",
195
+ events=["notebook.started", "execution.completed"]
196
+ )
197
+
198
+ # List webhooks
199
+ webhooks = await client.list_webhooks()
200
+ ```
201
+
202
+ ## Error Handling
203
+
204
+ ```python
205
+ from podstack import (
206
+ Client,
207
+ PodstackError,
208
+ AuthenticationError,
209
+ GPUNotAvailableError,
210
+ RateLimitError,
211
+ ExecutionTimeoutError
212
+ )
213
+
214
+ try:
215
+ async with Client(api_key="invalid") as client:
216
+ await client.notebooks.create(name="test", gpu_type="A100")
217
+ except AuthenticationError:
218
+ print("Invalid API key")
219
+ except GPUNotAvailableError as e:
220
+ print(f"GPU {e.gpu_type} not available, try: {e.available_types}")
221
+ except RateLimitError as e:
222
+ print(f"Rate limited, retry after {e.retry_after}s")
223
+ except ExecutionTimeoutError as e:
224
+ print(f"Execution {e.execution_id} timed out")
225
+ except PodstackError as e:
226
+ print(f"Error [{e.code}]: {e.message}")
227
+ ```
228
+
229
+ ## Configuration
230
+
231
+ ```python
232
+ # Environment variables
233
+ # PODSTACK_API_KEY=psk_live_xxxxx
234
+ # PODSTACK_BASE_URL=https://api.podstack.io/v1
235
+
236
+ # Or pass directly
237
+ client = Client(
238
+ api_key="psk_live_xxxxx",
239
+ base_url="https://api.podstack.io/v1",
240
+ timeout=60.0,
241
+ max_retries=5
242
+ )
243
+ ```
244
+
245
+ ## License
246
+
247
+ MIT License - see LICENSE for details.