llama-deploy-core 0.3.0a13__tar.gz → 0.3.0a14__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (23) hide show
  1. {llama_deploy_core-0.3.0a13 → llama_deploy_core-0.3.0a14}/PKG-INFO +1 -1
  2. {llama_deploy_core-0.3.0a13 → llama_deploy_core-0.3.0a14}/pyproject.toml +1 -1
  3. {llama_deploy_core-0.3.0a13 → llama_deploy_core-0.3.0a14}/src/llama_deploy/core/client/manage_client.py +31 -0
  4. llama_deploy_core-0.3.0a14/src/llama_deploy/core/schema/projects.py +28 -0
  5. llama_deploy_core-0.3.0a14/src/llama_deploy/core/schema/public.py +7 -0
  6. llama_deploy_core-0.3.0a13/src/llama_deploy/core/schema/projects.py +0 -15
  7. llama_deploy_core-0.3.0a13/src/llama_deploy/core/schema/public.py +0 -5
  8. {llama_deploy_core-0.3.0a13 → llama_deploy_core-0.3.0a14}/README.md +0 -0
  9. {llama_deploy_core-0.3.0a13 → llama_deploy_core-0.3.0a14}/src/llama_deploy/core/__init__.py +0 -0
  10. {llama_deploy_core-0.3.0a13 → llama_deploy_core-0.3.0a14}/src/llama_deploy/core/config.py +0 -0
  11. {llama_deploy_core-0.3.0a13 → llama_deploy_core-0.3.0a14}/src/llama_deploy/core/deployment_config.py +0 -0
  12. {llama_deploy_core-0.3.0a13 → llama_deploy_core-0.3.0a14}/src/llama_deploy/core/git/git_util.py +0 -0
  13. {llama_deploy_core-0.3.0a13 → llama_deploy_core-0.3.0a14}/src/llama_deploy/core/path_util.py +0 -0
  14. {llama_deploy_core-0.3.0a13 → llama_deploy_core-0.3.0a14}/src/llama_deploy/core/py.typed +0 -0
  15. {llama_deploy_core-0.3.0a13 → llama_deploy_core-0.3.0a14}/src/llama_deploy/core/schema/__init__.py +0 -0
  16. {llama_deploy_core-0.3.0a13 → llama_deploy_core-0.3.0a14}/src/llama_deploy/core/schema/base.py +0 -0
  17. {llama_deploy_core-0.3.0a13 → llama_deploy_core-0.3.0a14}/src/llama_deploy/core/schema/deployments.py +0 -0
  18. {llama_deploy_core-0.3.0a13 → llama_deploy_core-0.3.0a14}/src/llama_deploy/core/schema/git_validation.py +0 -0
  19. {llama_deploy_core-0.3.0a13 → llama_deploy_core-0.3.0a14}/src/llama_deploy/core/server/manage_api/__init__.py +0 -0
  20. {llama_deploy_core-0.3.0a13 → llama_deploy_core-0.3.0a14}/src/llama_deploy/core/server/manage_api/_abstract_deployments_service.py +0 -0
  21. {llama_deploy_core-0.3.0a13 → llama_deploy_core-0.3.0a14}/src/llama_deploy/core/server/manage_api/_create_deployments_router.py +0 -0
  22. {llama_deploy_core-0.3.0a13 → llama_deploy_core-0.3.0a14}/src/llama_deploy/core/server/manage_api/_exceptions.py +0 -0
  23. {llama_deploy_core-0.3.0a13 → llama_deploy_core-0.3.0a14}/src/llama_deploy/core/ui_build.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: llama-deploy-core
3
- Version: 0.3.0a13
3
+ Version: 0.3.0a14
4
4
  Summary: Core models and schemas for LlamaDeploy
5
5
  License: MIT
6
6
  Requires-Dist: fastapi>=0.115.0
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "llama-deploy-core"
3
- version = "0.3.0a13"
3
+ version = "0.3.0a14"
4
4
  description = "Core models and schemas for LlamaDeploy"
5
5
  readme = "README.md"
6
6
  license = { text = "MIT" }
@@ -1,3 +1,6 @@
1
+ from __future__ import annotations
2
+
3
+ from contextlib import asynccontextmanager
1
4
  from typing import AsyncIterator, Callable, List
2
5
 
3
6
  import httpx
@@ -70,6 +73,20 @@ class BaseClient:
70
73
  class ControlPlaneClient(BaseClient):
71
74
  """Unscoped client for non-project endpoints."""
72
75
 
76
+ @classmethod
77
+ @asynccontextmanager
78
+ async def ctx(
79
+ cls, base_url: str, api_key: str | None = None
80
+ ) -> AsyncIterator[ControlPlaneClient]:
81
+ client = cls(base_url, api_key)
82
+ try:
83
+ yield client
84
+ finally:
85
+ try:
86
+ await client.aclose()
87
+ except Exception:
88
+ pass
89
+
73
90
  def __init__(self, base_url: str, api_key: str | None = None) -> None:
74
91
  super().__init__(base_url, api_key)
75
92
 
@@ -86,6 +103,20 @@ class ControlPlaneClient(BaseClient):
86
103
  class ProjectClient(BaseClient):
87
104
  """Project-scoped client for deployment operations."""
88
105
 
106
+ @classmethod
107
+ @asynccontextmanager
108
+ async def ctx(
109
+ cls, base_url: str, project_id: str, api_key: str | None = None
110
+ ) -> AsyncIterator[ProjectClient]:
111
+ client = cls(base_url, project_id, api_key)
112
+ try:
113
+ yield client
114
+ finally:
115
+ try:
116
+ await client.aclose()
117
+ except Exception:
118
+ pass
119
+
89
120
  def __init__(
90
121
  self,
91
122
  base_url: str,
@@ -0,0 +1,28 @@
1
+ from typing import Any
2
+
3
+ from pydantic import model_validator
4
+
5
+ from .base import Base
6
+
7
+
8
+ class ProjectSummary(Base):
9
+ """Summary of a project with deployment count"""
10
+
11
+ project_id: str
12
+ project_name: str
13
+ deployment_count: int
14
+
15
+ @model_validator(mode="before")
16
+ @classmethod
17
+ def set_default_project_name(cls, data: Any) -> Any:
18
+ if isinstance(data, dict):
19
+ if "project_name" not in data or data.get("project_name") is None:
20
+ if "project_id" in data:
21
+ data["project_name"] = data["project_id"]
22
+ return data
23
+
24
+
25
+ class ProjectsListResponse(Base):
26
+ """Response model for listing projects with deployment counts"""
27
+
28
+ projects: list[ProjectSummary]
@@ -0,0 +1,7 @@
1
+ from .base import Base
2
+
3
+
4
+ class VersionResponse(Base):
5
+ version: str
6
+ requires_auth: bool = False
7
+ min_llamactl_version: str | None = None
@@ -1,15 +0,0 @@
1
- from .base import Base
2
-
3
-
4
- class ProjectSummary(Base):
5
- """Summary of a project with deployment count"""
6
-
7
- project_id: str
8
- project_name: str
9
- deployment_count: int
10
-
11
-
12
- class ProjectsListResponse(Base):
13
- """Response model for listing projects with deployment counts"""
14
-
15
- projects: list[ProjectSummary]
@@ -1,5 +0,0 @@
1
- from .base import Base
2
-
3
-
4
- class VersionResponse(Base):
5
- version: str