llama-deploy-core 0.3.6__tar.gz → 0.3.8__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {llama_deploy_core-0.3.6 → llama_deploy_core-0.3.8}/PKG-INFO +1 -1
- {llama_deploy_core-0.3.6 → llama_deploy_core-0.3.8}/pyproject.toml +1 -1
- {llama_deploy_core-0.3.6 → llama_deploy_core-0.3.8}/src/llama_deploy/core/client/manage_client.py +49 -9
- {llama_deploy_core-0.3.6 → llama_deploy_core-0.3.8}/README.md +0 -0
- {llama_deploy_core-0.3.6 → llama_deploy_core-0.3.8}/src/llama_deploy/core/__init__.py +0 -0
- {llama_deploy_core-0.3.6 → llama_deploy_core-0.3.8}/src/llama_deploy/core/config.py +0 -0
- {llama_deploy_core-0.3.6 → llama_deploy_core-0.3.8}/src/llama_deploy/core/deployment_config.py +0 -0
- {llama_deploy_core-0.3.6 → llama_deploy_core-0.3.8}/src/llama_deploy/core/git/git_util.py +0 -0
- {llama_deploy_core-0.3.6 → llama_deploy_core-0.3.8}/src/llama_deploy/core/iter_utils.py +0 -0
- {llama_deploy_core-0.3.6 → llama_deploy_core-0.3.8}/src/llama_deploy/core/path_util.py +0 -0
- {llama_deploy_core-0.3.6 → llama_deploy_core-0.3.8}/src/llama_deploy/core/py.typed +0 -0
- {llama_deploy_core-0.3.6 → llama_deploy_core-0.3.8}/src/llama_deploy/core/schema/__init__.py +0 -0
- {llama_deploy_core-0.3.6 → llama_deploy_core-0.3.8}/src/llama_deploy/core/schema/base.py +0 -0
- {llama_deploy_core-0.3.6 → llama_deploy_core-0.3.8}/src/llama_deploy/core/schema/deployments.py +0 -0
- {llama_deploy_core-0.3.6 → llama_deploy_core-0.3.8}/src/llama_deploy/core/schema/git_validation.py +0 -0
- {llama_deploy_core-0.3.6 → llama_deploy_core-0.3.8}/src/llama_deploy/core/schema/projects.py +0 -0
- {llama_deploy_core-0.3.6 → llama_deploy_core-0.3.8}/src/llama_deploy/core/schema/public.py +0 -0
- {llama_deploy_core-0.3.6 → llama_deploy_core-0.3.8}/src/llama_deploy/core/server/manage_api/__init__.py +0 -0
- {llama_deploy_core-0.3.6 → llama_deploy_core-0.3.8}/src/llama_deploy/core/server/manage_api/_abstract_deployments_service.py +0 -0
- {llama_deploy_core-0.3.6 → llama_deploy_core-0.3.8}/src/llama_deploy/core/server/manage_api/_create_deployments_router.py +0 -0
- {llama_deploy_core-0.3.6 → llama_deploy_core-0.3.8}/src/llama_deploy/core/server/manage_api/_exceptions.py +0 -0
- {llama_deploy_core-0.3.6 → llama_deploy_core-0.3.8}/src/llama_deploy/core/ui_build.py +0 -0
{llama_deploy_core-0.3.6 → llama_deploy_core-0.3.8}/src/llama_deploy/core/client/manage_client.py
RENAMED
|
@@ -68,16 +68,56 @@ class ControlPlaneClient(BaseClient):
|
|
|
68
68
|
|
|
69
69
|
async def server_version(self) -> VersionResponse:
|
|
70
70
|
response = await self.client.get("/api/v1beta1/deployments-public/version")
|
|
71
|
-
response
|
|
71
|
+
_raise_for_status(response)
|
|
72
72
|
return VersionResponse.model_validate(response.json())
|
|
73
73
|
|
|
74
74
|
async def list_projects(self) -> List[ProjectSummary]:
|
|
75
75
|
response = await self.client.get("/api/v1beta1/deployments/list-projects")
|
|
76
|
-
response
|
|
76
|
+
_raise_for_status(response)
|
|
77
77
|
projects_response = ProjectsListResponse.model_validate(response.json())
|
|
78
78
|
return [project for project in projects_response.projects]
|
|
79
79
|
|
|
80
80
|
|
|
81
|
+
def _raise_for_status(response: httpx.Response) -> None:
|
|
82
|
+
"""
|
|
83
|
+
Custom raise for status that adds response body information to the error message, but still uses the httpx
|
|
84
|
+
error classes
|
|
85
|
+
"""
|
|
86
|
+
try:
|
|
87
|
+
response.raise_for_status()
|
|
88
|
+
except httpx.HTTPStatusError as e:
|
|
89
|
+
body = _response_body_snippet(response, limit=250)
|
|
90
|
+
request_id = response.headers.get("x-request-id") or response.headers.get(
|
|
91
|
+
"x-correlation-id"
|
|
92
|
+
)
|
|
93
|
+
rid = f" [request id: {request_id}]" if request_id else ""
|
|
94
|
+
body_part = f" - {body}" if body else ""
|
|
95
|
+
raise httpx.HTTPStatusError(
|
|
96
|
+
f"HTTP {response.status_code} for url {response.url}{body_part}{rid}",
|
|
97
|
+
request=e.request or response.request,
|
|
98
|
+
response=e.response or response,
|
|
99
|
+
)
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
def _response_body_snippet(response: httpx.Response, limit: int = 500) -> str:
|
|
103
|
+
try:
|
|
104
|
+
text = response.text
|
|
105
|
+
if not text:
|
|
106
|
+
# fallback attempt if body not read
|
|
107
|
+
try:
|
|
108
|
+
data = response.json()
|
|
109
|
+
except Exception:
|
|
110
|
+
data = None
|
|
111
|
+
if data is not None:
|
|
112
|
+
text = str(data)
|
|
113
|
+
text = (text or "").strip()
|
|
114
|
+
if len(text) > limit:
|
|
115
|
+
return text[: limit - 3] + "..."
|
|
116
|
+
return text
|
|
117
|
+
except Exception:
|
|
118
|
+
return ""
|
|
119
|
+
|
|
120
|
+
|
|
81
121
|
class ProjectClient(BaseClient):
|
|
82
122
|
"""Project-scoped client for deployment operations."""
|
|
83
123
|
|
|
@@ -114,7 +154,7 @@ class ProjectClient(BaseClient):
|
|
|
114
154
|
"/api/v1beta1/deployments",
|
|
115
155
|
params={"project_id": self.project_id},
|
|
116
156
|
)
|
|
117
|
-
response
|
|
157
|
+
_raise_for_status(response)
|
|
118
158
|
deployments_response = DeploymentsListResponse.model_validate(response.json())
|
|
119
159
|
return [deployment for deployment in deployments_response.deployments]
|
|
120
160
|
|
|
@@ -125,7 +165,7 @@ class ProjectClient(BaseClient):
|
|
|
125
165
|
f"/api/v1beta1/deployments/{deployment_id}",
|
|
126
166
|
params={"project_id": self.project_id, "include_events": include_events},
|
|
127
167
|
)
|
|
128
|
-
response
|
|
168
|
+
_raise_for_status(response)
|
|
129
169
|
return DeploymentResponse.model_validate(response.json())
|
|
130
170
|
|
|
131
171
|
async def create_deployment(
|
|
@@ -136,7 +176,7 @@ class ProjectClient(BaseClient):
|
|
|
136
176
|
params={"project_id": self.project_id},
|
|
137
177
|
json=deployment_data.model_dump(exclude_none=True),
|
|
138
178
|
)
|
|
139
|
-
response
|
|
179
|
+
_raise_for_status(response)
|
|
140
180
|
return DeploymentResponse.model_validate(response.json())
|
|
141
181
|
|
|
142
182
|
async def delete_deployment(self, deployment_id: str) -> None:
|
|
@@ -144,7 +184,7 @@ class ProjectClient(BaseClient):
|
|
|
144
184
|
f"/api/v1beta1/deployments/{deployment_id}",
|
|
145
185
|
params={"project_id": self.project_id},
|
|
146
186
|
)
|
|
147
|
-
response
|
|
187
|
+
_raise_for_status(response)
|
|
148
188
|
|
|
149
189
|
async def update_deployment(
|
|
150
190
|
self,
|
|
@@ -156,7 +196,7 @@ class ProjectClient(BaseClient):
|
|
|
156
196
|
params={"project_id": self.project_id},
|
|
157
197
|
json=update_data.model_dump(),
|
|
158
198
|
)
|
|
159
|
-
response
|
|
199
|
+
_raise_for_status(response)
|
|
160
200
|
return DeploymentResponse.model_validate(response.json())
|
|
161
201
|
|
|
162
202
|
async def validate_repository(
|
|
@@ -174,7 +214,7 @@ class ProjectClient(BaseClient):
|
|
|
174
214
|
pat=pat,
|
|
175
215
|
).model_dump(),
|
|
176
216
|
)
|
|
177
|
-
response
|
|
217
|
+
_raise_for_status(response)
|
|
178
218
|
return RepositoryValidationResponse.model_validate(response.json())
|
|
179
219
|
|
|
180
220
|
async def stream_deployment_logs(
|
|
@@ -204,7 +244,7 @@ class ProjectClient(BaseClient):
|
|
|
204
244
|
async with self.hookless_client.stream(
|
|
205
245
|
"GET", url, params=params, headers=headers, timeout=None
|
|
206
246
|
) as response:
|
|
207
|
-
response
|
|
247
|
+
_raise_for_status(response)
|
|
208
248
|
|
|
209
249
|
event_name: str | None = None
|
|
210
250
|
data_lines: list[str] = []
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{llama_deploy_core-0.3.6 → llama_deploy_core-0.3.8}/src/llama_deploy/core/deployment_config.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{llama_deploy_core-0.3.6 → llama_deploy_core-0.3.8}/src/llama_deploy/core/schema/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
{llama_deploy_core-0.3.6 → llama_deploy_core-0.3.8}/src/llama_deploy/core/schema/deployments.py
RENAMED
|
File without changes
|
{llama_deploy_core-0.3.6 → llama_deploy_core-0.3.8}/src/llama_deploy/core/schema/git_validation.py
RENAMED
|
File without changes
|
{llama_deploy_core-0.3.6 → llama_deploy_core-0.3.8}/src/llama_deploy/core/schema/projects.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|