podstack 1.2.3__tar.gz → 1.3.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (32) hide show
  1. {podstack-1.2.3 → podstack-1.3.1}/PKG-INFO +1 -1
  2. {podstack-1.2.3 → podstack-1.3.1}/podstack/__init__.py +7 -1
  3. {podstack-1.2.3 → podstack-1.3.1}/podstack/annotations.py +109 -3
  4. {podstack-1.2.3 → podstack-1.3.1}/podstack/gpu_runner.py +70 -5
  5. {podstack-1.2.3 → podstack-1.3.1}/podstack.egg-info/PKG-INFO +1 -1
  6. {podstack-1.2.3 → podstack-1.3.1}/pyproject.toml +1 -1
  7. {podstack-1.2.3 → podstack-1.3.1}/LICENSE +0 -0
  8. {podstack-1.2.3 → podstack-1.3.1}/README.md +0 -0
  9. {podstack-1.2.3 → podstack-1.3.1}/podstack/client.py +0 -0
  10. {podstack-1.2.3 → podstack-1.3.1}/podstack/exceptions.py +0 -0
  11. {podstack-1.2.3 → podstack-1.3.1}/podstack/execution.py +0 -0
  12. {podstack-1.2.3 → podstack-1.3.1}/podstack/models.py +0 -0
  13. {podstack-1.2.3 → podstack-1.3.1}/podstack/notebook.py +0 -0
  14. {podstack-1.2.3 → podstack-1.3.1}/podstack/registry/__init__.py +0 -0
  15. {podstack-1.2.3 → podstack-1.3.1}/podstack/registry/client.py +0 -0
  16. {podstack-1.2.3 → podstack-1.3.1}/podstack/registry/exceptions.py +0 -0
  17. {podstack-1.2.3 → podstack-1.3.1}/podstack/registry/experiment.py +0 -0
  18. {podstack-1.2.3 → podstack-1.3.1}/podstack/registry/model.py +0 -0
  19. {podstack-1.2.3 → podstack-1.3.1}/podstack/registry/model_utils.py +0 -0
  20. {podstack-1.2.3 → podstack-1.3.1}/podstack.egg-info/SOURCES.txt +0 -0
  21. {podstack-1.2.3 → podstack-1.3.1}/podstack.egg-info/dependency_links.txt +0 -0
  22. {podstack-1.2.3 → podstack-1.3.1}/podstack.egg-info/requires.txt +0 -0
  23. {podstack-1.2.3 → podstack-1.3.1}/podstack.egg-info/top_level.txt +0 -0
  24. {podstack-1.2.3 → podstack-1.3.1}/podstack_gpu/__init__.py +0 -0
  25. {podstack-1.2.3 → podstack-1.3.1}/podstack_gpu/app.py +0 -0
  26. {podstack-1.2.3 → podstack-1.3.1}/podstack_gpu/exceptions.py +0 -0
  27. {podstack-1.2.3 → podstack-1.3.1}/podstack_gpu/image.py +0 -0
  28. {podstack-1.2.3 → podstack-1.3.1}/podstack_gpu/runner.py +0 -0
  29. {podstack-1.2.3 → podstack-1.3.1}/podstack_gpu/secret.py +0 -0
  30. {podstack-1.2.3 → podstack-1.3.1}/podstack_gpu/utils.py +0 -0
  31. {podstack-1.2.3 → podstack-1.3.1}/podstack_gpu/volume.py +0 -0
  32. {podstack-1.2.3 → podstack-1.3.1}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: podstack
3
- Version: 1.2.3
3
+ Version: 1.3.1
4
4
  Summary: Official Python SDK for Podstack GPU Notebook Platform
5
5
  Author-email: Podstack <support@podstack.ai>
6
6
  License-Expression: MIT
@@ -54,7 +54,7 @@ Decorators:
54
54
  ...
55
55
  """
56
56
 
57
- __version__ = "1.2.3"
57
+ __version__ = "1.3.1"
58
58
 
59
59
  from .client import Client
60
60
  from .notebook import Notebook, NotebookStatus
@@ -84,12 +84,15 @@ from .gpu_runner import (
84
84
  GPURunner,
85
85
  GPUExecutionResult,
86
86
  run as run_on_gpu,
87
+ list_runners,
87
88
  )
88
89
 
89
90
  # Annotations module import
90
91
  from . import annotations
91
92
  from .annotations import (
92
93
  gpu,
94
+ runner,
95
+ RunnerConfig,
93
96
  environment,
94
97
  auto_shutdown,
95
98
  experiment,
@@ -207,9 +210,12 @@ __all__ = [
207
210
  "GPURunner",
208
211
  "GPUExecutionResult",
209
212
  "run_on_gpu",
213
+ "list_runners",
210
214
  # Annotations
211
215
  "annotations",
212
216
  "gpu",
217
+ "runner",
218
+ "RunnerConfig",
213
219
  "environment",
214
220
  "auto_shutdown",
215
221
  "experiment",
@@ -51,6 +51,7 @@ from .gpu_runner import (
51
51
 
52
52
  # Global state for configuration
53
53
  _current_gpu_config: Dict[str, Any] = {}
54
+ _current_runner_config: Optional[Dict[str, Any]] = None
54
55
  _current_environment: str = "pytorch"
55
56
  _auto_shutdown_minutes: int = 60
56
57
  _remote_execution_enabled: bool = True
@@ -212,6 +213,23 @@ if __podstack_result__ is not None:
212
213
  print(repr(__podstack_result__))
213
214
  """
214
215
 
216
+ # Check for active runner config from @podstack.runner() decorator
217
+ global _current_runner_config
218
+ runner_name = None
219
+ effective_pip = self.pip
220
+ effective_uv = self.uv
221
+ effective_conda = self.conda
222
+ if _current_runner_config is not None:
223
+ runner_name = _current_runner_config.get("image")
224
+ # Merge package lists from runner config
225
+ if _current_runner_config.get("pip") and not effective_pip:
226
+ effective_pip = _current_runner_config["pip"]
227
+ if _current_runner_config.get("uv") and not effective_uv:
228
+ effective_uv = _current_runner_config["uv"]
229
+ if _current_runner_config.get("conda") and not effective_conda:
230
+ effective_conda = _current_runner_config["conda"]
231
+ _current_runner_config = None # Clear after use
232
+
215
233
  # Execute on remote GPU with improved error handling
216
234
  # Auto-enable streaming in Jupyter notebooks for real-time output
217
235
  try:
@@ -222,11 +240,12 @@ if __podstack_result__ is not None:
222
240
  fraction=self.fraction,
223
241
  timeout=self.timeout,
224
242
  env=self.env,
225
- pip=self.pip,
226
- uv=self.uv,
227
- conda=self.conda,
243
+ pip=effective_pip,
244
+ uv=effective_uv,
245
+ conda=effective_conda,
228
246
  requirements=self.requirements,
229
247
  use_uv=self.use_uv,
248
+ runner=runner_name,
230
249
  wait=True,
231
250
  stream=None # Auto-detect: True in Jupyter, False otherwise
232
251
  )
@@ -402,6 +421,91 @@ def gpu(
402
421
  )
403
422
 
404
423
 
424
+ class RunnerConfig:
425
+ """
426
+ Runner configuration decorator.
427
+
428
+ Used alongside @podstack.gpu() to specify which runner image and
429
+ additional packages to use.
430
+
431
+ Usage:
432
+ @podstack.gpu(type="L40S", count=1)
433
+ @podstack.runner(image="pytorch-2.3.0-cuda-12.1-py3.11")
434
+ def my_func():
435
+ ...
436
+ """
437
+
438
+ def __init__(
439
+ self,
440
+ image: str = None,
441
+ pip: Union[str, list] = None,
442
+ uv: Union[str, list] = None,
443
+ conda: Union[str, list] = None,
444
+ ):
445
+ self.image = image
446
+ self.pip = pip
447
+ self.uv = uv
448
+ self.conda = conda
449
+
450
+ # Store in global state
451
+ global _current_runner_config
452
+ _current_runner_config = {
453
+ "image": image,
454
+ "pip": pip,
455
+ "uv": uv,
456
+ "conda": conda,
457
+ }
458
+
459
+ def __call__(self, func: Callable) -> Callable:
460
+ """Decorator usage - stores runner config for the next gpu() call."""
461
+ @functools.wraps(func)
462
+ def wrapper(*args, **kwargs):
463
+ global _current_runner_config
464
+ _current_runner_config = {
465
+ "image": self.image,
466
+ "pip": self.pip,
467
+ "uv": self.uv,
468
+ "conda": self.conda,
469
+ }
470
+ return func(*args, **kwargs)
471
+ return wrapper
472
+
473
+
474
+ def runner(
475
+ image: str = None,
476
+ pip: Union[str, list] = None,
477
+ uv: Union[str, list] = None,
478
+ conda: Union[str, list] = None,
479
+ ) -> RunnerConfig:
480
+ """
481
+ Configure which runner image and packages to use for GPU execution.
482
+
483
+ Used alongside @podstack.gpu() to select a pre-built runner image.
484
+
485
+ Examples:
486
+ @podstack.gpu(type="L40S")
487
+ @podstack.runner(image="pytorch-2.3.0-cuda-12.1-py3.11")
488
+ def train():
489
+ import torch
490
+ ...
491
+
492
+ @podstack.gpu(type="L40S")
493
+ @podstack.runner(image="pytorch-2.3.0-cuda-12.1-py3.11", pip=["transformers"])
494
+ def train_llm():
495
+ ...
496
+
497
+ Args:
498
+ image: Runner name from podstack runners (e.g. "pytorch-2.3.0-cuda-12.1-py3.11")
499
+ pip: Additional pip packages - string "pkg1,pkg2" or list ["pkg1", "pkg2"]
500
+ uv: Additional UV packages (faster than pip) - string or list
501
+ conda: Additional conda packages - string or list
502
+
503
+ Returns:
504
+ RunnerConfig instance
505
+ """
506
+ return RunnerConfig(image=image, pip=pip, uv=uv, conda=conda)
507
+
508
+
405
509
  def environment(env: str = "pytorch") -> str:
406
510
  """
407
511
  Set the runtime environment.
@@ -708,6 +812,8 @@ def get_auto_shutdown_minutes() -> int:
708
812
 
709
813
  __all__ = [
710
814
  "gpu",
815
+ "runner",
816
+ "RunnerConfig",
711
817
  "environment",
712
818
  "auto_shutdown",
713
819
  "experiment",
@@ -268,7 +268,8 @@ class GPURunner:
268
268
  uv: Union[str, list] = None,
269
269
  conda: Union[str, list] = None,
270
270
  requirements: str = None,
271
- use_uv: bool = False
271
+ use_uv: bool = False,
272
+ runner: str = None
272
273
  ) -> str:
273
274
  """Build the @podstack annotation string."""
274
275
  parts = [f"#@podstack gpu={gpu}"]
@@ -307,6 +308,10 @@ class GPURunner:
307
308
  if use_uv:
308
309
  parts.append("use_uv=true")
309
310
 
311
+ # Runner name (resolved by notebook-service via platform-service)
312
+ if runner:
313
+ parts.append(f"runner={runner}")
314
+
310
315
  return " ".join(parts)
311
316
 
312
317
  def submit(
@@ -322,7 +327,8 @@ class GPURunner:
322
327
  conda: Union[str, list] = None,
323
328
  requirements: str = None,
324
329
  use_uv: bool = False,
325
- add_annotation: bool = True
330
+ add_annotation: bool = True,
331
+ runner: str = None
326
332
  ) -> Dict[str, Any]:
327
333
  """
328
334
  Submit code for GPU execution (non-blocking).
@@ -352,7 +358,7 @@ class GPURunner:
352
358
 
353
359
  # Add annotation if not present
354
360
  if add_annotation and not code.strip().startswith("#@podstack"):
355
- annotation = self._build_annotation(gpu, count, fraction, timeout, env, pip, uv, conda, requirements, use_uv)
361
+ annotation = self._build_annotation(gpu, count, fraction, timeout, env, pip, uv, conda, requirements, use_uv, runner)
356
362
  code = f"{annotation}\n\n{code}"
357
363
 
358
364
  # Build installation code for packages
@@ -635,7 +641,8 @@ _stream_install(
635
641
  max_retries: int = 3,
636
642
  provisioning_timeout: int = 300,
637
643
  cancel_on_timeout: bool = True,
638
- stream: bool = None
644
+ stream: bool = None,
645
+ runner: str = None
639
646
  ) -> GPUExecutionResult:
640
647
  """
641
648
  Execute code on GPU and optionally wait for completion.
@@ -670,7 +677,7 @@ _stream_install(
670
677
  ValueError: If parameters are invalid
671
678
  """
672
679
  # Submit the code
673
- submission = self.submit(code, gpu, count, fraction, timeout, env, pip, uv, conda, requirements, use_uv)
680
+ submission = self.submit(code, gpu, count, fraction, timeout, env, pip, uv, conda, requirements, use_uv, runner=runner)
674
681
  execution_id = submission.get("execution_id")
675
682
 
676
683
  if not execution_id:
@@ -958,6 +965,49 @@ _stream_install(
958
965
  except:
959
966
  return {"status": "cancelled"}
960
967
 
968
+ def _get_platform_url(self) -> str:
969
+ """Derive the platform service URL from the notebook service URL."""
970
+ if os.environ.get("PODSTACK_PLATFORM_URL"):
971
+ return os.environ.get("PODSTACK_PLATFORM_URL")
972
+ # External: cloud.podstack.ai/notebooks -> cloud.podstack.ai/platform
973
+ if "/notebooks" in self.api_url:
974
+ return self.api_url.replace("/notebooks", "/platform")
975
+ # Internal K8s: notebook service URL -> platform service URL
976
+ if "notebook" in self.api_url and ".svc.cluster.local" in self.api_url:
977
+ return self.api_url.replace("notebook", "platform").replace(":8084", ":8081")
978
+ # Fallback: use the same base URL
979
+ return self.api_url
980
+
981
+ def list_runners(self) -> list:
982
+ """
983
+ List available GPU/CPU runners.
984
+
985
+ Returns:
986
+ List of runner dictionaries with name, type, description, image, libraries, scenarios.
987
+ """
988
+ platform_url = self._get_platform_url()
989
+ url = f"{platform_url}/api/v1/runners"
990
+
991
+ with httpx.Client(timeout=self.timeout) as client:
992
+ try:
993
+ response = client.get(url, headers=self._get_headers())
994
+ except httpx.ConnectError as e:
995
+ raise ConnectionError(f"Failed to connect to {url}: {e}")
996
+ except httpx.TimeoutException:
997
+ raise TimeoutError(f"Request to {url} timed out")
998
+
999
+ if response.status_code >= 400:
1000
+ try:
1001
+ error_msg = response.json().get("error", response.text)
1002
+ except Exception:
1003
+ error_msg = response.text[:500] if response.text else f"HTTP {response.status_code}"
1004
+ raise RuntimeError(f"Failed to list runners: {error_msg}")
1005
+
1006
+ try:
1007
+ return response.json()
1008
+ except Exception:
1009
+ raise RuntimeError(f"Invalid JSON response: {response.text[:200]}")
1010
+
961
1011
  def run_function(
962
1012
  self,
963
1013
  func: Callable,
@@ -1131,6 +1181,21 @@ def run(
1131
1181
  )
1132
1182
 
1133
1183
 
1184
+ def list_runners() -> list:
1185
+ """
1186
+ List available GPU/CPU runners.
1187
+
1188
+ Returns:
1189
+ List of runner dictionaries with name, type, description, image, libraries, scenarios.
1190
+
1191
+ Example:
1192
+ runners = podstack.gpu_runner.list_runners()
1193
+ for r in runners:
1194
+ print(f"{r['name']} ({r['type']}): {r['description']}")
1195
+ """
1196
+ return get_runner().list_runners()
1197
+
1198
+
1134
1199
  def stream_output(execution_id: str, show_output: bool = True) -> Iterator[Dict[str, Any]]:
1135
1200
  """
1136
1201
  Stream real-time output from a running execution.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: podstack
3
- Version: 1.2.3
3
+ Version: 1.3.1
4
4
  Summary: Official Python SDK for Podstack GPU Notebook Platform
5
5
  Author-email: Podstack <support@podstack.ai>
6
6
  License-Expression: MIT
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "podstack"
7
- version = "1.2.3"
7
+ version = "1.3.1"
8
8
  description = "Official Python SDK for Podstack GPU Notebook Platform"
9
9
  readme = "README.md"
10
10
  license = "MIT"
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes