podstack 1.3.20__tar.gz → 1.4.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. {podstack-1.3.20 → podstack-1.4.0}/PKG-INFO +1 -1
  2. {podstack-1.3.20 → podstack-1.4.0}/podstack/__init__.py +66 -1
  3. {podstack-1.3.20 → podstack-1.4.0}/podstack/annotations.py +156 -0
  4. {podstack-1.3.20 → podstack-1.4.0}/podstack/gpu_runner.py +300 -11
  5. podstack-1.4.0/podstack/pipelines.py +269 -0
  6. podstack-1.4.0/podstack/storage.py +291 -0
  7. {podstack-1.3.20 → podstack-1.4.0}/podstack.egg-info/PKG-INFO +1 -1
  8. {podstack-1.3.20 → podstack-1.4.0}/podstack.egg-info/SOURCES.txt +2 -0
  9. {podstack-1.3.20 → podstack-1.4.0}/pyproject.toml +1 -1
  10. {podstack-1.3.20 → podstack-1.4.0}/LICENSE +0 -0
  11. {podstack-1.3.20 → podstack-1.4.0}/README.md +0 -0
  12. {podstack-1.3.20 → podstack-1.4.0}/podstack/client.py +0 -0
  13. {podstack-1.3.20 → podstack-1.4.0}/podstack/exceptions.py +0 -0
  14. {podstack-1.3.20 → podstack-1.4.0}/podstack/execution.py +0 -0
  15. {podstack-1.3.20 → podstack-1.4.0}/podstack/models.py +0 -0
  16. {podstack-1.3.20 → podstack-1.4.0}/podstack/notebook.py +0 -0
  17. {podstack-1.3.20 → podstack-1.4.0}/podstack/registry/__init__.py +0 -0
  18. {podstack-1.3.20 → podstack-1.4.0}/podstack/registry/autolog.py +0 -0
  19. {podstack-1.3.20 → podstack-1.4.0}/podstack/registry/client.py +0 -0
  20. {podstack-1.3.20 → podstack-1.4.0}/podstack/registry/exceptions.py +0 -0
  21. {podstack-1.3.20 → podstack-1.4.0}/podstack/registry/experiment.py +0 -0
  22. {podstack-1.3.20 → podstack-1.4.0}/podstack/registry/model.py +0 -0
  23. {podstack-1.3.20 → podstack-1.4.0}/podstack/registry/model_utils.py +0 -0
  24. {podstack-1.3.20 → podstack-1.4.0}/podstack.egg-info/dependency_links.txt +0 -0
  25. {podstack-1.3.20 → podstack-1.4.0}/podstack.egg-info/requires.txt +0 -0
  26. {podstack-1.3.20 → podstack-1.4.0}/podstack.egg-info/top_level.txt +0 -0
  27. {podstack-1.3.20 → podstack-1.4.0}/podstack_gpu/__init__.py +0 -0
  28. {podstack-1.3.20 → podstack-1.4.0}/podstack_gpu/app.py +0 -0
  29. {podstack-1.3.20 → podstack-1.4.0}/podstack_gpu/exceptions.py +0 -0
  30. {podstack-1.3.20 → podstack-1.4.0}/podstack_gpu/image.py +0 -0
  31. {podstack-1.3.20 → podstack-1.4.0}/podstack_gpu/runner.py +0 -0
  32. {podstack-1.3.20 → podstack-1.4.0}/podstack_gpu/secret.py +0 -0
  33. {podstack-1.3.20 → podstack-1.4.0}/podstack_gpu/utils.py +0 -0
  34. {podstack-1.3.20 → podstack-1.4.0}/podstack_gpu/volume.py +0 -0
  35. {podstack-1.3.20 → podstack-1.4.0}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: podstack
3
- Version: 1.3.20
3
+ Version: 1.4.0
4
4
  Summary: Official Python SDK for Podstack GPU Notebook Platform
5
5
  Author-email: Podstack <support@podstack.ai>
6
6
  License-Expression: MIT
@@ -54,7 +54,7 @@ Decorators:
54
54
  ...
55
55
  """
56
56
 
57
- __version__ = "1.3.2"
57
+ __version__ = "1.4.0"
58
58
 
59
59
  from .client import Client
60
60
  from .notebook import Notebook, NotebookStatus
@@ -86,6 +86,31 @@ from .gpu_runner import (
86
86
  RunnerList,
87
87
  run as run_on_gpu,
88
88
  list_runners,
89
+ get_runner_info,
90
+ pip_install,
91
+ )
92
+
93
+ # Pipeline module import
94
+ from . import pipelines
95
+ from .pipelines import (
96
+ step,
97
+ pipeline,
98
+ StepConfig,
99
+ PipelineDef,
100
+ list_pipelines,
101
+ get_pipeline,
102
+ get_pipeline_run,
103
+ trigger_pipeline,
104
+ cancel_pipeline_run,
105
+ archive_pipeline,
106
+ )
107
+
108
+ # Storage module import
109
+ from . import storage
110
+ from .storage import (
111
+ StorageClient, BucketList, VolumeList,
112
+ create_bucket, get_bucket, list_buckets, delete_bucket,
113
+ create_volume, get_volume, list_volumes, delete_volume,
89
114
  )
90
115
 
91
116
  # Annotations module import
@@ -103,6 +128,13 @@ from .annotations import (
103
128
  get_environment,
104
129
  get_auto_shutdown_minutes,
105
130
  enable_remote_execution,
131
+ deploy,
132
+ DeployConfig,
133
+ )
134
+ from .gpu_runner import (
135
+ get_deployment,
136
+ stop_deployment,
137
+ delete_deployment,
106
138
  )
107
139
 
108
140
 
@@ -206,6 +238,31 @@ __all__ = [
206
238
  "WalletBalance",
207
239
  # Registry
208
240
  "registry",
241
+ # Pipelines
242
+ "pipelines",
243
+ "step",
244
+ "pipeline",
245
+ "StepConfig",
246
+ "PipelineDef",
247
+ "list_pipelines",
248
+ "get_pipeline",
249
+ "get_pipeline_run",
250
+ "trigger_pipeline",
251
+ "cancel_pipeline_run",
252
+ "archive_pipeline",
253
+ # Storage
254
+ "storage",
255
+ "StorageClient",
256
+ "BucketList",
257
+ "VolumeList",
258
+ "create_bucket",
259
+ "get_bucket",
260
+ "list_buckets",
261
+ "delete_bucket",
262
+ "create_volume",
263
+ "get_volume",
264
+ "list_volumes",
265
+ "delete_volume",
209
266
  # GPU Runner
210
267
  "gpu_runner",
211
268
  "GPURunner",
@@ -213,6 +270,8 @@ __all__ = [
213
270
  "RunnerList",
214
271
  "run_on_gpu",
215
272
  "list_runners",
273
+ "get_runner_info",
274
+ "pip_install",
216
275
  # Annotations
217
276
  "annotations",
218
277
  "gpu",
@@ -227,4 +286,10 @@ __all__ = [
227
286
  "get_environment",
228
287
  "get_auto_shutdown_minutes",
229
288
  "enable_remote_execution",
289
+ # Deploy
290
+ "deploy",
291
+ "DeployConfig",
292
+ "get_deployment",
293
+ "stop_deployment",
294
+ "delete_deployment",
230
295
  ]
@@ -37,6 +37,7 @@ from typing import Optional, Dict, Any, Callable, Union
37
37
  from contextlib import contextmanager
38
38
 
39
39
  from . import registry
40
+ from .pipelines import step, pipeline, StepConfig, PipelineDef
40
41
  from .gpu_runner import (
41
42
  GPURunner,
42
43
  GPUExecutionResult,
@@ -802,6 +803,154 @@ class ModelRegistry:
802
803
  model = ModelRegistry()
803
804
 
804
805
 
806
+ class DeployConfig:
807
+ """
808
+ Deploy configuration decorator.
809
+
810
+ Deploys the decorated function as a persistent REST API endpoint.
811
+
812
+ Usage:
813
+ @podstack.deploy(name="sentiment-api", gpu="A10", fraction=50)
814
+ def predict(data):
815
+ return {"sentiment": "positive", "text": data["text"]}
816
+
817
+ result = predict() # Deploys and returns endpoint info
818
+ """
819
+
820
+ def __init__(
821
+ self,
822
+ name: str,
823
+ gpu: str = "A10",
824
+ count: int = 1,
825
+ fraction: int = 100,
826
+ pip: Union[str, list] = None,
827
+ uv: Union[str, list] = None,
828
+ function: str = None,
829
+ image: str = None,
830
+ ):
831
+ self.name = name
832
+ self.gpu = gpu
833
+ self.count = count
834
+ self.fraction = fraction
835
+ self.pip = pip
836
+ self.uv = uv
837
+ self.function = function
838
+ self.image = image
839
+
840
+ def __call__(self, func: Callable) -> Callable:
841
+ @functools.wraps(func)
842
+ def wrapper(*args, **kwargs):
843
+ runner = get_runner()
844
+
845
+ # Get function source
846
+ source = inspect.getsource(func)
847
+ source = textwrap.dedent(source)
848
+
849
+ # Remove decorator lines
850
+ lines = source.split('\n')
851
+ clean_lines = []
852
+ skip_decorator = False
853
+ paren_depth = 0
854
+ for line in lines:
855
+ stripped = line.strip()
856
+ if stripped.startswith('@podstack.deploy') or stripped.startswith('@deploy'):
857
+ skip_decorator = True
858
+ paren_depth += line.count('(') - line.count(')')
859
+ if paren_depth <= 0:
860
+ skip_decorator = False
861
+ continue
862
+ if skip_decorator:
863
+ paren_depth += line.count('(') - line.count(')')
864
+ if paren_depth <= 0:
865
+ skip_decorator = False
866
+ continue
867
+ if stripped.startswith('@'):
868
+ continue
869
+ clean_lines.append(line)
870
+ source = '\n'.join(clean_lines)
871
+
872
+ function_name = self.function or func.__name__
873
+ pip_packages = self.pip
874
+ if isinstance(pip_packages, list):
875
+ pip_packages = " ".join(pip_packages)
876
+
877
+ # POST to deployment API
878
+ import httpx
879
+ headers = runner._get_headers()
880
+ url = f"{runner.api_url}/api/v1/deployments/from-source"
881
+ with httpx.Client(timeout=30.0) as client:
882
+ response = client.post(url, headers=headers, json={
883
+ "name": self.name,
884
+ "source": source,
885
+ "function_name": function_name,
886
+ "gpu_type": self.gpu,
887
+ "gpu_fraction": self.fraction,
888
+ "gpu_count": self.count,
889
+ "pip_packages": pip_packages or "",
890
+ "image": self.image or "",
891
+ })
892
+ if response.status_code >= 400:
893
+ raise PodstackError(f"Deployment failed: {response.text}")
894
+ data = response.json()
895
+
896
+ deployment_id = data.get("deployment_id")
897
+ print(f"[Podstack] Deploying '{self.name}' as REST API...")
898
+ print(f"[Podstack] Deployment ID: {deployment_id}")
899
+ print(f"[Podstack] Status: {data.get('status')}")
900
+
901
+ # Poll for active status (up to 120s)
902
+ status_url = f"{runner.api_url}/api/v1/deployments/{deployment_id}"
903
+ for _ in range(24):
904
+ time.sleep(5)
905
+ with httpx.Client(timeout=30.0) as client:
906
+ status_resp = client.get(status_url, headers=headers)
907
+ if status_resp.status_code >= 400:
908
+ continue
909
+ status_data = status_resp.json()
910
+ status = status_data.get("status")
911
+ if status == "active":
912
+ endpoint = status_data.get("service_url", "")
913
+ print(f"[Podstack] Endpoint live: POST {endpoint}/predict")
914
+ return {"deployment_id": deployment_id, "status": "active", "endpoint": f"{endpoint}/predict"}
915
+ elif status == "failed":
916
+ error = status_data.get("error_message", "Unknown error")
917
+ print(f"[Podstack] Deployment failed: {error}")
918
+ raise PodstackError(f"Deployment failed: {error}")
919
+ print(f"[Podstack] Status: {status}...")
920
+
921
+ print(f"[Podstack] Deployment still starting. Check status with:")
922
+ print(f" podstack.get_deployment('{deployment_id}')")
923
+ return {"deployment_id": deployment_id, "status": "starting"}
924
+
925
+ return wrapper
926
+
927
+
928
+ def deploy(
929
+ name: str,
930
+ gpu: str = "A10",
931
+ count: int = 1,
932
+ fraction: int = 100,
933
+ pip: Union[str, list] = None,
934
+ uv: Union[str, list] = None,
935
+ function: str = None,
936
+ image: str = None,
937
+ ) -> DeployConfig:
938
+ """Deploy a function as a persistent REST API endpoint.
939
+
940
+ Examples:
941
+ @podstack.deploy(name="sentiment-api", gpu="A10", fraction=25)
942
+ def predict(data):
943
+ return {"sentiment": "positive"}
944
+
945
+ result = predict() # Deploys and returns endpoint info
946
+ print(result["endpoint"])
947
+ """
948
+ return DeployConfig(
949
+ name=name, gpu=gpu, count=count, fraction=fraction,
950
+ pip=pip, uv=uv, function=function, image=image,
951
+ )
952
+
953
+
805
954
  def get_gpu_config() -> Dict[str, Any]:
806
955
  """Get current GPU configuration."""
807
956
  return _current_gpu_config.copy()
@@ -832,6 +981,13 @@ __all__ = [
832
981
  "enable_remote_execution",
833
982
  "is_remote_execution_enabled",
834
983
  "GPUConfig",
984
+ "deploy",
985
+ "DeployConfig",
986
+ # Pipeline
987
+ "step",
988
+ "pipeline",
989
+ "StepConfig",
990
+ "PipelineDef",
835
991
  # Exceptions
836
992
  "PodstackError",
837
993
  "PodstackTimeoutError",