llama-deploy-core 0.2.7a1__tar.gz → 0.3.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (24) hide show
  1. {llama_deploy_core-0.2.7a1 → llama_deploy_core-0.3.0}/PKG-INFO +9 -2
  2. {llama_deploy_core-0.2.7a1 → llama_deploy_core-0.3.0}/pyproject.toml +17 -2
  3. llama_deploy_core-0.3.0/src/llama_deploy/core/client/manage_client.py +230 -0
  4. llama_deploy_core-0.3.0/src/llama_deploy/core/config.py +1 -0
  5. llama_deploy_core-0.3.0/src/llama_deploy/core/deployment_config.py +415 -0
  6. {llama_deploy_core-0.2.7a1 → llama_deploy_core-0.3.0}/src/llama_deploy/core/git/git_util.py +142 -30
  7. llama_deploy_core-0.3.0/src/llama_deploy/core/iter_utils.py +196 -0
  8. llama_deploy_core-0.3.0/src/llama_deploy/core/path_util.py +24 -0
  9. llama_deploy_core-0.3.0/src/llama_deploy/core/py.typed +0 -0
  10. {llama_deploy_core-0.2.7a1 → llama_deploy_core-0.3.0}/src/llama_deploy/core/schema/__init__.py +8 -4
  11. {llama_deploy_core-0.2.7a1 → llama_deploy_core-0.3.0}/src/llama_deploy/core/schema/deployments.py +29 -3
  12. {llama_deploy_core-0.2.7a1 → llama_deploy_core-0.3.0}/src/llama_deploy/core/schema/git_validation.py +10 -5
  13. llama_deploy_core-0.3.0/src/llama_deploy/core/schema/projects.py +28 -0
  14. llama_deploy_core-0.3.0/src/llama_deploy/core/schema/public.py +7 -0
  15. llama_deploy_core-0.3.0/src/llama_deploy/core/server/manage_api/__init__.py +14 -0
  16. llama_deploy_core-0.3.0/src/llama_deploy/core/server/manage_api/_abstract_deployments_service.py +165 -0
  17. llama_deploy_core-0.3.0/src/llama_deploy/core/server/manage_api/_create_deployments_router.py +188 -0
  18. llama_deploy_core-0.3.0/src/llama_deploy/core/server/manage_api/_exceptions.py +14 -0
  19. llama_deploy_core-0.3.0/src/llama_deploy/core/ui_build.py +44 -0
  20. llama_deploy_core-0.2.7a1/src/llama_deploy/core/config.py +0 -1
  21. llama_deploy_core-0.2.7a1/src/llama_deploy/core/schema/projects.py +0 -14
  22. {llama_deploy_core-0.2.7a1 → llama_deploy_core-0.3.0}/README.md +0 -0
  23. {llama_deploy_core-0.2.7a1 → llama_deploy_core-0.3.0}/src/llama_deploy/core/__init__.py +0 -0
  24. {llama_deploy_core-0.2.7a1 → llama_deploy_core-0.3.0}/src/llama_deploy/core/schema/base.py +0 -0
@@ -1,11 +1,18 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: llama-deploy-core
3
- Version: 0.2.7a1
3
+ Version: 0.3.0
4
4
  Summary: Core models and schemas for LlamaDeploy
5
5
  License: MIT
6
+ Requires-Dist: fastapi>=0.115.0
7
+ Requires-Dist: overrides>=7.7.0
6
8
  Requires-Dist: pydantic>=2.0.0
7
9
  Requires-Dist: pyyaml>=6.0.2
8
- Requires-Python: >=3.12, <4
10
+ Requires-Dist: types-pyyaml>=6.0.12.20250822
11
+ Requires-Dist: httpx>=0.24.0,<1.0.0 ; extra == 'client'
12
+ Requires-Dist: fastapi>=0.115.0 ; extra == 'server'
13
+ Requires-Python: >=3.11, <4
14
+ Provides-Extra: client
15
+ Provides-Extra: server
9
16
  Description-Content-Type: text/markdown
10
17
 
11
18
  > [!WARNING]
@@ -1,13 +1,24 @@
1
1
  [project]
2
2
  name = "llama-deploy-core"
3
- version = "0.2.7a1"
3
+ version = "0.3.0"
4
4
  description = "Core models and schemas for LlamaDeploy"
5
5
  readme = "README.md"
6
6
  license = { text = "MIT" }
7
- requires-python = ">=3.12, <4"
7
+ requires-python = ">=3.11, <4"
8
8
  dependencies = [
9
+ "fastapi>=0.115.0",
10
+ "overrides>=7.7.0",
9
11
  "pydantic>=2.0.0",
10
12
  "pyyaml>=6.0.2",
13
+ "types-pyyaml>=6.0.12.20250822",
14
+ ]
15
+
16
+ [project.optional-dependencies]
17
+ server = [
18
+ "fastapi>=0.115.0",
19
+ ]
20
+ client = [
21
+ "httpx>=0.24.0,<1.0.0",
11
22
  ]
12
23
 
13
24
  [build-system]
@@ -20,4 +31,8 @@ module-name = "llama_deploy.core"
20
31
  [dependency-groups]
21
32
  dev = [
22
33
  "pytest>=8.4.1",
34
+ "pytest-xdist>=3.8.0",
35
+ "respx>=0.22.0",
36
+ "ruff>=0.12.9",
37
+ "ty>=0.0.1a19",
23
38
  ]
@@ -0,0 +1,230 @@
1
+ from __future__ import annotations
2
+
3
+ from contextlib import asynccontextmanager
4
+ from typing import AsyncIterator, Callable, List
5
+
6
+ import httpx
7
+ from llama_deploy.core.schema import LogEvent
8
+ from llama_deploy.core.schema.deployments import (
9
+ DeploymentCreate,
10
+ DeploymentResponse,
11
+ DeploymentsListResponse,
12
+ DeploymentUpdate,
13
+ )
14
+ from llama_deploy.core.schema.git_validation import (
15
+ RepositoryValidationRequest,
16
+ RepositoryValidationResponse,
17
+ )
18
+ from llama_deploy.core.schema.projects import ProjectsListResponse, ProjectSummary
19
+ from llama_deploy.core.schema.public import VersionResponse
20
+
21
+
22
+ class BaseClient:
23
+ def __init__(
24
+ self, base_url: str, api_key: str | None = None, auth: httpx.Auth | None = None
25
+ ) -> None:
26
+ self.base_url = base_url.rstrip("/")
27
+ self.api_key = api_key
28
+
29
+ headers: dict[str, str] = {}
30
+ if api_key:
31
+ headers["Authorization"] = f"Bearer {api_key}"
32
+
33
+ self.client = httpx.AsyncClient(
34
+ base_url=self.base_url,
35
+ headers=headers,
36
+ auth=auth,
37
+ )
38
+ self.hookless_client = httpx.AsyncClient(
39
+ base_url=self.base_url, headers=headers, auth=auth
40
+ )
41
+
42
+ async def aclose(self) -> None:
43
+ await self.client.aclose()
44
+ await self.hookless_client.aclose()
45
+
46
+
47
+ class ControlPlaneClient(BaseClient):
48
+ """Unscoped client for non-project endpoints."""
49
+
50
+ @classmethod
51
+ @asynccontextmanager
52
+ async def ctx(
53
+ cls, base_url: str, api_key: str | None = None, auth: httpx.Auth | None = None
54
+ ) -> AsyncIterator[ControlPlaneClient]:
55
+ client = cls(base_url, api_key, auth)
56
+ try:
57
+ yield client
58
+ finally:
59
+ try:
60
+ await client.aclose()
61
+ except Exception:
62
+ pass
63
+
64
+ def __init__(
65
+ self, base_url: str, api_key: str | None = None, auth: httpx.Auth | None = None
66
+ ) -> None:
67
+ super().__init__(base_url, api_key, auth)
68
+
69
+ async def server_version(self) -> VersionResponse:
70
+ response = await self.client.get("/api/v1beta1/deployments-public/version")
71
+ response.raise_for_status()
72
+ return VersionResponse.model_validate(response.json())
73
+
74
+ async def list_projects(self) -> List[ProjectSummary]:
75
+ response = await self.client.get("/api/v1beta1/deployments/list-projects")
76
+ response.raise_for_status()
77
+ projects_response = ProjectsListResponse.model_validate(response.json())
78
+ return [project for project in projects_response.projects]
79
+
80
+
81
+ class ProjectClient(BaseClient):
82
+ """Project-scoped client for deployment operations."""
83
+
84
+ @classmethod
85
+ @asynccontextmanager
86
+ async def ctx(
87
+ cls,
88
+ base_url: str,
89
+ project_id: str,
90
+ api_key: str | None = None,
91
+ auth: httpx.Auth | None = None,
92
+ ) -> AsyncIterator[ProjectClient]:
93
+ client = cls(base_url, project_id, api_key, auth)
94
+ try:
95
+ yield client
96
+ finally:
97
+ try:
98
+ await client.aclose()
99
+ except Exception:
100
+ pass
101
+
102
+ def __init__(
103
+ self,
104
+ base_url: str,
105
+ project_id: str,
106
+ api_key: str | None = None,
107
+ auth: httpx.Auth | None = None,
108
+ ) -> None:
109
+ super().__init__(base_url, api_key, auth)
110
+ self.project_id = project_id
111
+
112
+ async def list_deployments(self) -> List[DeploymentResponse]:
113
+ response = await self.client.get(
114
+ "/api/v1beta1/deployments",
115
+ params={"project_id": self.project_id},
116
+ )
117
+ response.raise_for_status()
118
+ deployments_response = DeploymentsListResponse.model_validate(response.json())
119
+ return [deployment for deployment in deployments_response.deployments]
120
+
121
+ async def get_deployment(
122
+ self, deployment_id: str, include_events: bool = False
123
+ ) -> DeploymentResponse:
124
+ response = await self.client.get(
125
+ f"/api/v1beta1/deployments/{deployment_id}",
126
+ params={"project_id": self.project_id, "include_events": include_events},
127
+ )
128
+ response.raise_for_status()
129
+ return DeploymentResponse.model_validate(response.json())
130
+
131
+ async def create_deployment(
132
+ self, deployment_data: DeploymentCreate
133
+ ) -> DeploymentResponse:
134
+ response = await self.client.post(
135
+ "/api/v1beta1/deployments",
136
+ params={"project_id": self.project_id},
137
+ json=deployment_data.model_dump(exclude_none=True),
138
+ )
139
+ response.raise_for_status()
140
+ return DeploymentResponse.model_validate(response.json())
141
+
142
+ async def delete_deployment(self, deployment_id: str) -> None:
143
+ response = await self.client.delete(
144
+ f"/api/v1beta1/deployments/{deployment_id}",
145
+ params={"project_id": self.project_id},
146
+ )
147
+ response.raise_for_status()
148
+
149
+ async def update_deployment(
150
+ self,
151
+ deployment_id: str,
152
+ update_data: DeploymentUpdate,
153
+ ) -> DeploymentResponse:
154
+ response = await self.client.patch(
155
+ f"/api/v1beta1/deployments/{deployment_id}",
156
+ params={"project_id": self.project_id},
157
+ json=update_data.model_dump(),
158
+ )
159
+ response.raise_for_status()
160
+ return DeploymentResponse.model_validate(response.json())
161
+
162
+ async def validate_repository(
163
+ self,
164
+ repo_url: str,
165
+ deployment_id: str | None = None,
166
+ pat: str | None = None,
167
+ ) -> RepositoryValidationResponse:
168
+ response = await self.client.post(
169
+ "/api/v1beta1/deployments/validate-repository",
170
+ params={"project_id": self.project_id},
171
+ json=RepositoryValidationRequest(
172
+ repository_url=repo_url,
173
+ deployment_id=deployment_id,
174
+ pat=pat,
175
+ ).model_dump(),
176
+ )
177
+ response.raise_for_status()
178
+ return RepositoryValidationResponse.model_validate(response.json())
179
+
180
+ async def stream_deployment_logs(
181
+ self,
182
+ deployment_id: str,
183
+ *,
184
+ include_init_containers: bool = False,
185
+ since_seconds: int | None = None,
186
+ tail_lines: int | None = None,
187
+ ) -> AsyncIterator[LogEvent]:
188
+ """Stream logs as LogEvent items from the control plane using SSE.
189
+
190
+ Yields `LogEvent` models until the stream ends (e.g., rollout completes).
191
+ """
192
+ params: dict[str, object] = {
193
+ "project_id": self.project_id,
194
+ "include_init_containers": include_init_containers,
195
+ }
196
+ if since_seconds is not None:
197
+ params["since_seconds"] = since_seconds
198
+ if tail_lines is not None:
199
+ params["tail_lines"] = tail_lines
200
+
201
+ url = f"/api/v1beta1/deployments/{deployment_id}/logs"
202
+ headers = {"Accept": "text/event-stream"}
203
+
204
+ async with self.hookless_client.stream(
205
+ "GET", url, params=params, headers=headers, timeout=None
206
+ ) as response:
207
+ response.raise_for_status()
208
+
209
+ event_name: str | None = None
210
+ data_lines: list[str] = []
211
+ async for line in response.aiter_lines():
212
+ if line is None:
213
+ continue
214
+ line = line.decode() if isinstance(line, (bytes, bytearray)) else line
215
+ if line.startswith("event:"):
216
+ event_name = line[len("event:") :].strip()
217
+ elif line.startswith("data:"):
218
+ data_lines.append(line[len("data:") :].lstrip())
219
+ elif line.strip() == "":
220
+ if event_name == "log" and data_lines:
221
+ data_str = "\n".join(data_lines)
222
+ try:
223
+ yield LogEvent.model_validate_json(data_str)
224
+ except Exception:
225
+ pass
226
+ event_name = None
227
+ data_lines = []
228
+
229
+
230
+ Closer = Callable[[], None]
@@ -0,0 +1 @@
1
+ DEFAULT_DEPLOYMENT_FILE_PATH = "."