llamactl 0.3.0a7__tar.gz → 0.3.0a9__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {llamactl-0.3.0a7 → llamactl-0.3.0a9}/PKG-INFO +4 -4
- {llamactl-0.3.0a7 → llamactl-0.3.0a9}/pyproject.toml +5 -5
- {llamactl-0.3.0a7 → llamactl-0.3.0a9}/src/llama_deploy/cli/__init__.py +2 -1
- {llamactl-0.3.0a7 → llamactl-0.3.0a9}/src/llama_deploy/cli/client.py +115 -16
- {llamactl-0.3.0a7 → llamactl-0.3.0a9}/src/llama_deploy/cli/commands/deployment.py +14 -4
- llamactl-0.3.0a9/src/llama_deploy/cli/commands/init.py +210 -0
- {llamactl-0.3.0a7 → llamactl-0.3.0a9}/src/llama_deploy/cli/textual/deployment_form.py +30 -5
- llamactl-0.3.0a9/src/llama_deploy/cli/textual/deployment_monitor.py +474 -0
- {llamactl-0.3.0a7 → llamactl-0.3.0a9}/README.md +0 -0
- {llamactl-0.3.0a7 → llamactl-0.3.0a9}/src/llama_deploy/cli/app.py +0 -0
- {llamactl-0.3.0a7 → llamactl-0.3.0a9}/src/llama_deploy/cli/commands/aliased_group.py +0 -0
- {llamactl-0.3.0a7 → llamactl-0.3.0a9}/src/llama_deploy/cli/commands/profile.py +0 -0
- {llamactl-0.3.0a7 → llamactl-0.3.0a9}/src/llama_deploy/cli/commands/serve.py +0 -0
- {llamactl-0.3.0a7 → llamactl-0.3.0a9}/src/llama_deploy/cli/config.py +0 -0
- {llamactl-0.3.0a7 → llamactl-0.3.0a9}/src/llama_deploy/cli/debug.py +0 -0
- {llamactl-0.3.0a7 → llamactl-0.3.0a9}/src/llama_deploy/cli/env.py +0 -0
- {llamactl-0.3.0a7 → llamactl-0.3.0a9}/src/llama_deploy/cli/interactive_prompts/utils.py +0 -0
- {llamactl-0.3.0a7 → llamactl-0.3.0a9}/src/llama_deploy/cli/options.py +0 -0
- {llamactl-0.3.0a7 → llamactl-0.3.0a9}/src/llama_deploy/cli/textual/deployment_help.py +0 -0
- {llamactl-0.3.0a7 → llamactl-0.3.0a9}/src/llama_deploy/cli/textual/git_validation.py +0 -0
- {llamactl-0.3.0a7 → llamactl-0.3.0a9}/src/llama_deploy/cli/textual/github_callback_server.py +0 -0
- {llamactl-0.3.0a7 → llamactl-0.3.0a9}/src/llama_deploy/cli/textual/llama_loader.py +0 -0
- {llamactl-0.3.0a7 → llamactl-0.3.0a9}/src/llama_deploy/cli/textual/profile_form.py +0 -0
- {llamactl-0.3.0a7 → llamactl-0.3.0a9}/src/llama_deploy/cli/textual/secrets_form.py +0 -0
- {llamactl-0.3.0a7 → llamactl-0.3.0a9}/src/llama_deploy/cli/textual/styles.tcss +0 -0
|
@@ -1,19 +1,19 @@
|
|
|
1
1
|
Metadata-Version: 2.3
|
|
2
2
|
Name: llamactl
|
|
3
|
-
Version: 0.3.
|
|
3
|
+
Version: 0.3.0a9
|
|
4
4
|
Summary: A command-line interface for managing LlamaDeploy projects and deployments
|
|
5
5
|
Author: Adrian Lyjak
|
|
6
6
|
Author-email: Adrian Lyjak <adrianlyjak@gmail.com>
|
|
7
7
|
License: MIT
|
|
8
|
-
Requires-Dist: llama-deploy-core>=0.3.
|
|
9
|
-
Requires-Dist: llama-deploy-appserver>=0.3.
|
|
8
|
+
Requires-Dist: llama-deploy-core>=0.3.0a9,<0.4.0
|
|
9
|
+
Requires-Dist: llama-deploy-appserver>=0.3.0a9,<0.4.0
|
|
10
10
|
Requires-Dist: httpx>=0.24.0
|
|
11
11
|
Requires-Dist: rich>=13.0.0
|
|
12
12
|
Requires-Dist: questionary>=2.0.0
|
|
13
13
|
Requires-Dist: click>=8.2.1
|
|
14
14
|
Requires-Dist: python-dotenv>=1.0.0
|
|
15
15
|
Requires-Dist: tenacity>=9.1.2
|
|
16
|
-
Requires-Dist: textual>=
|
|
16
|
+
Requires-Dist: textual>=5.3.0
|
|
17
17
|
Requires-Dist: aiohttp>=3.12.14
|
|
18
18
|
Requires-Dist: copier>=9.9.0
|
|
19
19
|
Requires-Python: >=3.12, <4
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
[project]
|
|
2
2
|
name = "llamactl"
|
|
3
|
-
version = "0.3.
|
|
3
|
+
version = "0.3.0a9"
|
|
4
4
|
description = "A command-line interface for managing LlamaDeploy projects and deployments"
|
|
5
5
|
readme = "README.md"
|
|
6
6
|
license = { text = "MIT" }
|
|
@@ -9,17 +9,17 @@ authors = [
|
|
|
9
9
|
]
|
|
10
10
|
requires-python = ">=3.12, <4"
|
|
11
11
|
dependencies = [
|
|
12
|
-
"llama-deploy-core>=0.3.
|
|
13
|
-
"llama-deploy-appserver>=0.3.
|
|
12
|
+
"llama-deploy-core>=0.3.0a9,<0.4.0",
|
|
13
|
+
"llama-deploy-appserver>=0.3.0a9,<0.4.0",
|
|
14
14
|
"httpx>=0.24.0",
|
|
15
15
|
"rich>=13.0.0",
|
|
16
16
|
"questionary>=2.0.0",
|
|
17
17
|
"click>=8.2.1",
|
|
18
18
|
"python-dotenv>=1.0.0",
|
|
19
19
|
"tenacity>=9.1.2",
|
|
20
|
-
"textual>=
|
|
20
|
+
"textual>=5.3.0",
|
|
21
21
|
"aiohttp>=3.12.14",
|
|
22
|
-
"copier>=9.9.0"
|
|
22
|
+
"copier>=9.9.0",
|
|
23
23
|
]
|
|
24
24
|
|
|
25
25
|
[project.scripts]
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
from llama_deploy.cli.commands.deployment import deployments
|
|
2
|
+
from llama_deploy.cli.commands.init import init
|
|
2
3
|
from llama_deploy.cli.commands.profile import profiles
|
|
3
4
|
from llama_deploy.cli.commands.serve import serve
|
|
4
5
|
|
|
@@ -10,7 +11,7 @@ def main() -> None:
|
|
|
10
11
|
app()
|
|
11
12
|
|
|
12
13
|
|
|
13
|
-
__all__ = ["app", "deployments", "profiles", "serve"]
|
|
14
|
+
__all__ = ["app", "deployments", "profiles", "serve", "init"]
|
|
14
15
|
|
|
15
16
|
|
|
16
17
|
if __name__ == "__main__":
|
|
@@ -1,6 +1,8 @@
|
|
|
1
|
-
|
|
1
|
+
import contextlib
|
|
2
|
+
from typing import Iterator, List
|
|
2
3
|
|
|
3
4
|
import httpx
|
|
5
|
+
from llama_deploy.core.schema.base import LogEvent
|
|
4
6
|
from llama_deploy.core.schema.deployments import (
|
|
5
7
|
DeploymentCreate,
|
|
6
8
|
DeploymentResponse,
|
|
@@ -17,19 +19,24 @@ from rich.console import Console
|
|
|
17
19
|
from .config import config_manager
|
|
18
20
|
|
|
19
21
|
|
|
22
|
+
class ClientError(Exception):
|
|
23
|
+
"""Base class for client errors."""
|
|
24
|
+
|
|
25
|
+
def __init__(self, message: str) -> None:
|
|
26
|
+
super().__init__(message)
|
|
27
|
+
|
|
28
|
+
|
|
20
29
|
class BaseClient:
|
|
21
30
|
def __init__(self, base_url: str, console: Console) -> None:
|
|
22
31
|
self.base_url = base_url.rstrip("/")
|
|
23
32
|
self.console = console
|
|
24
33
|
self.client = httpx.Client(
|
|
25
|
-
base_url=self.base_url,
|
|
34
|
+
base_url=self.base_url,
|
|
35
|
+
event_hooks={"response": [self._handle_response]},
|
|
26
36
|
)
|
|
37
|
+
self.hookless_client = httpx.Client(base_url=self.base_url)
|
|
27
38
|
|
|
28
39
|
def _handle_response(self, response: httpx.Response) -> None:
|
|
29
|
-
if "X-Warning" in response.headers:
|
|
30
|
-
self.console.print(
|
|
31
|
-
f"[yellow]Warning: {response.headers['X-Warning']}[/yellow]"
|
|
32
|
-
)
|
|
33
40
|
try:
|
|
34
41
|
response.raise_for_status()
|
|
35
42
|
except httpx.HTTPStatusError as e:
|
|
@@ -42,9 +49,9 @@ class BaseClient:
|
|
|
42
49
|
error_message = str(error_data)
|
|
43
50
|
except (ValueError, KeyError):
|
|
44
51
|
error_message = e.response.text
|
|
45
|
-
raise
|
|
52
|
+
raise ClientError(f"HTTP {e.response.status_code}: {error_message}") from e
|
|
46
53
|
except httpx.RequestError as e:
|
|
47
|
-
raise
|
|
54
|
+
raise ClientError(f"Request failed: {e}") from e
|
|
48
55
|
|
|
49
56
|
|
|
50
57
|
class ControlPlaneClient(BaseClient):
|
|
@@ -59,7 +66,7 @@ class ControlPlaneClient(BaseClient):
|
|
|
59
66
|
return response.json()
|
|
60
67
|
|
|
61
68
|
def list_projects(self) -> List[ProjectSummary]:
|
|
62
|
-
response = self.client.get("/projects
|
|
69
|
+
response = self.client.get("/api/v1beta1/deployments/list-projects")
|
|
63
70
|
projects_response = ProjectsListResponse.model_validate(response.json())
|
|
64
71
|
return [project for project in projects_response.projects]
|
|
65
72
|
|
|
@@ -95,25 +102,37 @@ class ProjectClient(BaseClient):
|
|
|
95
102
|
self.project_id = project_id
|
|
96
103
|
|
|
97
104
|
def list_deployments(self) -> List[DeploymentResponse]:
|
|
98
|
-
response = self.client.get(
|
|
105
|
+
response = self.client.get(
|
|
106
|
+
"/api/v1beta1/deployments",
|
|
107
|
+
params={"project_id": self.project_id},
|
|
108
|
+
)
|
|
99
109
|
deployments_response = DeploymentsListResponse.model_validate(response.json())
|
|
100
110
|
return [deployment for deployment in deployments_response.deployments]
|
|
101
111
|
|
|
102
|
-
def get_deployment(
|
|
103
|
-
|
|
112
|
+
def get_deployment(
|
|
113
|
+
self, deployment_id: str, include_events: bool = False
|
|
114
|
+
) -> DeploymentResponse:
|
|
115
|
+
response = self.client.get(
|
|
116
|
+
f"/api/v1beta1/deployments/{deployment_id}",
|
|
117
|
+
params={"project_id": self.project_id, "include_events": include_events},
|
|
118
|
+
)
|
|
104
119
|
return DeploymentResponse.model_validate(response.json())
|
|
105
120
|
|
|
106
121
|
def create_deployment(
|
|
107
122
|
self, deployment_data: DeploymentCreate
|
|
108
123
|
) -> DeploymentResponse:
|
|
109
124
|
response = self.client.post(
|
|
110
|
-
|
|
125
|
+
"/api/v1beta1/deployments",
|
|
126
|
+
params={"project_id": self.project_id},
|
|
111
127
|
json=deployment_data.model_dump(exclude_none=True),
|
|
112
128
|
)
|
|
113
129
|
return DeploymentResponse.model_validate(response.json())
|
|
114
130
|
|
|
115
131
|
def delete_deployment(self, deployment_id: str) -> None:
|
|
116
|
-
self.client.delete(
|
|
132
|
+
self.client.delete(
|
|
133
|
+
f"/api/v1beta1/deployments/{deployment_id}",
|
|
134
|
+
params={"project_id": self.project_id},
|
|
135
|
+
)
|
|
117
136
|
|
|
118
137
|
def update_deployment(
|
|
119
138
|
self,
|
|
@@ -121,7 +140,8 @@ class ProjectClient(BaseClient):
|
|
|
121
140
|
update_data: DeploymentUpdate,
|
|
122
141
|
) -> DeploymentResponse:
|
|
123
142
|
response = self.client.patch(
|
|
124
|
-
f"/
|
|
143
|
+
f"/api/v1beta1/deployments/{deployment_id}",
|
|
144
|
+
params={"project_id": self.project_id},
|
|
125
145
|
json=update_data.model_dump(),
|
|
126
146
|
)
|
|
127
147
|
return DeploymentResponse.model_validate(response.json())
|
|
@@ -133,7 +153,8 @@ class ProjectClient(BaseClient):
|
|
|
133
153
|
pat: str | None = None,
|
|
134
154
|
) -> RepositoryValidationResponse:
|
|
135
155
|
response = self.client.post(
|
|
136
|
-
|
|
156
|
+
"/api/v1beta1/deployments/validate-repository",
|
|
157
|
+
params={"project_id": self.project_id},
|
|
137
158
|
json=RepositoryValidationRequest(
|
|
138
159
|
repository_url=repo_url,
|
|
139
160
|
deployment_id=deployment_id,
|
|
@@ -142,6 +163,81 @@ class ProjectClient(BaseClient):
|
|
|
142
163
|
)
|
|
143
164
|
return RepositoryValidationResponse.model_validate(response.json())
|
|
144
165
|
|
|
166
|
+
def stream_deployment_logs(
|
|
167
|
+
self,
|
|
168
|
+
deployment_id: str,
|
|
169
|
+
*,
|
|
170
|
+
include_init_containers: bool = False,
|
|
171
|
+
since_seconds: int | None = None,
|
|
172
|
+
tail_lines: int | None = None,
|
|
173
|
+
) -> tuple["Closer", Iterator[LogEvent]]:
|
|
174
|
+
"""Stream logs as LogEvent items from the control plane using SSE.
|
|
175
|
+
|
|
176
|
+
This yields `LogEvent` models until the stream ends (e.g. rollout).
|
|
177
|
+
"""
|
|
178
|
+
# Use a separate client without response hooks so we don't consume the stream
|
|
179
|
+
|
|
180
|
+
params = {
|
|
181
|
+
"project_id": self.project_id,
|
|
182
|
+
"include_init_containers": include_init_containers,
|
|
183
|
+
}
|
|
184
|
+
if since_seconds is not None:
|
|
185
|
+
params["since_seconds"] = since_seconds
|
|
186
|
+
if tail_lines is not None:
|
|
187
|
+
params["tail_lines"] = tail_lines
|
|
188
|
+
|
|
189
|
+
url = f"/api/v1beta1/deployments/{deployment_id}/logs"
|
|
190
|
+
headers = {"Accept": "text/event-stream"}
|
|
191
|
+
|
|
192
|
+
stack = contextlib.ExitStack()
|
|
193
|
+
response = stack.enter_context(
|
|
194
|
+
self.hookless_client.stream(
|
|
195
|
+
"GET", url, params=params, headers=headers, timeout=None
|
|
196
|
+
)
|
|
197
|
+
)
|
|
198
|
+
try:
|
|
199
|
+
response.raise_for_status()
|
|
200
|
+
except Exception:
|
|
201
|
+
stack.close()
|
|
202
|
+
raise
|
|
203
|
+
|
|
204
|
+
return stack.close, _iterate_log_stream(response, stack.close)
|
|
205
|
+
|
|
206
|
+
|
|
207
|
+
def _iterate_log_stream(
|
|
208
|
+
response: httpx.Response, closer: "Closer"
|
|
209
|
+
) -> Iterator[LogEvent]:
|
|
210
|
+
event_name: str | None = None
|
|
211
|
+
data_lines: list[str] = []
|
|
212
|
+
|
|
213
|
+
try:
|
|
214
|
+
for line in response.iter_lines():
|
|
215
|
+
if line is None:
|
|
216
|
+
continue
|
|
217
|
+
line = line.decode() if isinstance(line, (bytes, bytearray)) else line
|
|
218
|
+
print("got line", line)
|
|
219
|
+
if line.startswith("event:"):
|
|
220
|
+
event_name = line[len("event:") :].strip()
|
|
221
|
+
elif line.startswith("data:"):
|
|
222
|
+
data_lines.append(line[len("data:") :].lstrip())
|
|
223
|
+
elif line.strip() == "":
|
|
224
|
+
if event_name == "log" and data_lines:
|
|
225
|
+
data_str = "\n".join(data_lines)
|
|
226
|
+
try:
|
|
227
|
+
yield LogEvent.model_validate_json(data_str)
|
|
228
|
+
print("yielded log event", data_str)
|
|
229
|
+
except Exception:
|
|
230
|
+
# If parsing fails, skip malformed event
|
|
231
|
+
pass
|
|
232
|
+
# reset for next event
|
|
233
|
+
event_name = None
|
|
234
|
+
data_lines = []
|
|
235
|
+
finally:
|
|
236
|
+
try:
|
|
237
|
+
closer()
|
|
238
|
+
except Exception:
|
|
239
|
+
pass
|
|
240
|
+
|
|
145
241
|
|
|
146
242
|
def get_control_plane_client(base_url: str | None = None) -> ControlPlaneClient:
|
|
147
243
|
console = Console()
|
|
@@ -174,3 +270,6 @@ def get_project_client(
|
|
|
174
270
|
if not resolved_project_id:
|
|
175
271
|
raise ValueError("Project ID is required")
|
|
176
272
|
return ProjectClient(resolved_base_url, resolved_project_id, console)
|
|
273
|
+
|
|
274
|
+
|
|
275
|
+
type Closer = callable[tuple[()], None]
|
|
@@ -19,6 +19,7 @@ from ..interactive_prompts.utils import (
|
|
|
19
19
|
)
|
|
20
20
|
from ..options import global_options
|
|
21
21
|
from ..textual.deployment_form import create_deployment_form, edit_deployment_form
|
|
22
|
+
from ..textual.deployment_monitor import monitor_deployment_screen
|
|
22
23
|
|
|
23
24
|
|
|
24
25
|
@app.group(
|
|
@@ -88,7 +89,12 @@ def list_deployments() -> None:
|
|
|
88
89
|
@deployments.command("get")
|
|
89
90
|
@global_options
|
|
90
91
|
@click.argument("deployment_id", required=False)
|
|
91
|
-
|
|
92
|
+
@click.option(
|
|
93
|
+
"--non-interactive",
|
|
94
|
+
is_flag=True,
|
|
95
|
+
help="Do not open a live monitor screen showing status and streaming logs",
|
|
96
|
+
)
|
|
97
|
+
def get_deployment(deployment_id: str | None, non_interactive: bool) -> None:
|
|
92
98
|
"""Get details of a specific deployment"""
|
|
93
99
|
try:
|
|
94
100
|
client = get_project_client()
|
|
@@ -98,6 +104,10 @@ def get_deployment(deployment_id: str | None) -> None:
|
|
|
98
104
|
rprint("[yellow]No deployment selected[/yellow]")
|
|
99
105
|
return
|
|
100
106
|
|
|
107
|
+
if not non_interactive:
|
|
108
|
+
monitor_deployment_screen(deployment_id)
|
|
109
|
+
return
|
|
110
|
+
|
|
101
111
|
deployment = client.get_deployment(deployment_id)
|
|
102
112
|
|
|
103
113
|
table = Table(title=f"Deployment: {deployment.name}")
|
|
@@ -143,7 +153,7 @@ def create_deployment(
|
|
|
143
153
|
git_ref: str | None,
|
|
144
154
|
personal_access_token: str | None,
|
|
145
155
|
) -> None:
|
|
146
|
-
"""
|
|
156
|
+
"""Interactively create a new deployment"""
|
|
147
157
|
|
|
148
158
|
# Use interactive creation
|
|
149
159
|
deployment_form = create_deployment_form()
|
|
@@ -214,11 +224,11 @@ def edit_deployment(deployment_id: str | None) -> None:
|
|
|
214
224
|
raise click.Abort()
|
|
215
225
|
|
|
216
226
|
|
|
217
|
-
@deployments.command("
|
|
227
|
+
@deployments.command("update")
|
|
218
228
|
@global_options
|
|
219
229
|
@click.argument("deployment_id", required=False)
|
|
220
230
|
def refresh_deployment(deployment_id: str | None) -> None:
|
|
221
|
-
"""
|
|
231
|
+
"""Update the deployment, pulling the latest code from it's branch"""
|
|
222
232
|
try:
|
|
223
233
|
client = get_project_client()
|
|
224
234
|
|
|
@@ -0,0 +1,210 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import shutil
|
|
3
|
+
import subprocess
|
|
4
|
+
from dataclasses import dataclass
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
|
|
7
|
+
import click
|
|
8
|
+
import copier
|
|
9
|
+
import questionary
|
|
10
|
+
from click.exceptions import Exit
|
|
11
|
+
from llama_deploy.cli.app import app
|
|
12
|
+
from llama_deploy.cli.options import global_options
|
|
13
|
+
from rich import print as rprint
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
@dataclass
|
|
17
|
+
class TemplateOption:
|
|
18
|
+
id: str
|
|
19
|
+
name: str
|
|
20
|
+
description: str
|
|
21
|
+
git_url: str
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
options = [
|
|
25
|
+
TemplateOption(
|
|
26
|
+
id="basic-ui",
|
|
27
|
+
name="Basic UI",
|
|
28
|
+
description="A basic starter workflow with a React Vite UI",
|
|
29
|
+
git_url="https://github.com/adrianlyjak/qs",
|
|
30
|
+
),
|
|
31
|
+
TemplateOption(
|
|
32
|
+
id="extraction-review",
|
|
33
|
+
name="Extraction Agent with Review UI",
|
|
34
|
+
description="Extract data from documents using a custom schema and Llama Cloud. Includes a UI to review and correct the results",
|
|
35
|
+
git_url="https://github.com/run-llama/template-workflow-data-extraction",
|
|
36
|
+
),
|
|
37
|
+
]
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
@app.command()
|
|
41
|
+
@click.option(
|
|
42
|
+
"--update",
|
|
43
|
+
is_flag=True,
|
|
44
|
+
help="Instead of creating a new app, update the current app to the latest version. Other options will be ignored.",
|
|
45
|
+
)
|
|
46
|
+
@click.option(
|
|
47
|
+
"--template",
|
|
48
|
+
type=click.Choice([o.id for o in options]),
|
|
49
|
+
help="The template to use for the new app",
|
|
50
|
+
)
|
|
51
|
+
@click.option(
|
|
52
|
+
"--dir",
|
|
53
|
+
help="The directory to create the new app in",
|
|
54
|
+
type=click.Path(
|
|
55
|
+
file_okay=False, dir_okay=True, writable=True, resolve_path=True, path_type=Path
|
|
56
|
+
),
|
|
57
|
+
)
|
|
58
|
+
@click.option(
|
|
59
|
+
"--force",
|
|
60
|
+
is_flag=True,
|
|
61
|
+
help="Force overwrite the directory if it exists",
|
|
62
|
+
)
|
|
63
|
+
@global_options
|
|
64
|
+
def init(
|
|
65
|
+
update: bool,
|
|
66
|
+
template: str | None,
|
|
67
|
+
dir: Path | None,
|
|
68
|
+
force: bool,
|
|
69
|
+
) -> None:
|
|
70
|
+
"""Create a new app repository from a template"""
|
|
71
|
+
if update:
|
|
72
|
+
_update()
|
|
73
|
+
else:
|
|
74
|
+
_create(template, dir, force)
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def _create(template: str | None, dir: Path | None, force: bool) -> None:
|
|
78
|
+
if template is None:
|
|
79
|
+
template = questionary.select(
|
|
80
|
+
"Choose a template",
|
|
81
|
+
choices=[
|
|
82
|
+
questionary.Choice(title=o.name, value=o.id, description=o.description)
|
|
83
|
+
for o in options
|
|
84
|
+
],
|
|
85
|
+
).ask()
|
|
86
|
+
if template is None:
|
|
87
|
+
rprint("No template selected")
|
|
88
|
+
raise Exit(1)
|
|
89
|
+
if dir is None:
|
|
90
|
+
dir_str = questionary.text(
|
|
91
|
+
"Enter the directory to create the new app in", default=template
|
|
92
|
+
).ask()
|
|
93
|
+
if not dir_str:
|
|
94
|
+
rprint("No directory provided")
|
|
95
|
+
raise Exit(1)
|
|
96
|
+
dir = Path(dir_str)
|
|
97
|
+
resolved_template = next((o for o in options if o.id == template), None)
|
|
98
|
+
if resolved_template is None:
|
|
99
|
+
rprint(f"Template {template} not found")
|
|
100
|
+
raise Exit(1)
|
|
101
|
+
if dir.exists():
|
|
102
|
+
is_ok = (
|
|
103
|
+
force
|
|
104
|
+
or questionary.confirm("Directory exists. Overwrite?", default=False).ask()
|
|
105
|
+
)
|
|
106
|
+
if not is_ok:
|
|
107
|
+
raise Exit(1)
|
|
108
|
+
else:
|
|
109
|
+
shutil.rmtree(dir, ignore_errors=True)
|
|
110
|
+
copier.run_copy(
|
|
111
|
+
resolved_template.git_url,
|
|
112
|
+
dir,
|
|
113
|
+
quiet=True,
|
|
114
|
+
)
|
|
115
|
+
# Initialize git repository if git is available
|
|
116
|
+
is_git_initialized = False
|
|
117
|
+
try:
|
|
118
|
+
subprocess.run(["git", "--version"], check=True, capture_output=True)
|
|
119
|
+
|
|
120
|
+
# Change to the new directory and initialize git repo
|
|
121
|
+
original_cwd = Path.cwd()
|
|
122
|
+
os.chdir(dir)
|
|
123
|
+
|
|
124
|
+
try:
|
|
125
|
+
subprocess.run(["git", "init"], check=True, capture_output=True)
|
|
126
|
+
subprocess.run(["git", "add", "."], check=True, capture_output=True)
|
|
127
|
+
subprocess.run(
|
|
128
|
+
["git", "commit", "-m", "Initial commit"],
|
|
129
|
+
check=True,
|
|
130
|
+
capture_output=True,
|
|
131
|
+
)
|
|
132
|
+
is_git_initialized = True
|
|
133
|
+
finally:
|
|
134
|
+
os.chdir(original_cwd)
|
|
135
|
+
|
|
136
|
+
except (subprocess.CalledProcessError, FileNotFoundError):
|
|
137
|
+
# Git not available or failed - continue without git initialization
|
|
138
|
+
pass
|
|
139
|
+
|
|
140
|
+
rprint(
|
|
141
|
+
f"Successfully created [blue]{dir}[/] using the [blue]{resolved_template.name}[/] template! 🎉 🦙 💾"
|
|
142
|
+
)
|
|
143
|
+
rprint("")
|
|
144
|
+
rprint("[bold]To run locally:[/]")
|
|
145
|
+
rprint(f" [orange3]cd[/] {dir}")
|
|
146
|
+
rprint(" [orange3]uvx[/] llamactl serve")
|
|
147
|
+
rprint("")
|
|
148
|
+
rprint("[bold]To deploy:[/]")
|
|
149
|
+
if not is_git_initialized:
|
|
150
|
+
rprint(" [orange3]git[/] init")
|
|
151
|
+
rprint(" [orange3]git[/] add .")
|
|
152
|
+
rprint(" [orange3]git[/] commit -m 'Initial commit'")
|
|
153
|
+
rprint("")
|
|
154
|
+
rprint("[dim](Create a new repo and add it as a remote)[/]")
|
|
155
|
+
rprint("")
|
|
156
|
+
rprint(" [orange3]git[/] remote add origin <your-repo-url>")
|
|
157
|
+
rprint(" [orange3]git[/] push -u origin main")
|
|
158
|
+
rprint("")
|
|
159
|
+
# rprint(" [orange3]uvx[/] llamactl login")
|
|
160
|
+
rprint(" [orange3]uvx[/] llamactl deploy")
|
|
161
|
+
rprint("")
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
def _update():
|
|
165
|
+
"""Update the app to the latest version"""
|
|
166
|
+
try:
|
|
167
|
+
copier.run_update(
|
|
168
|
+
overwrite=True,
|
|
169
|
+
skip_answered=True,
|
|
170
|
+
quiet=True,
|
|
171
|
+
)
|
|
172
|
+
except copier.UserMessageError as e:
|
|
173
|
+
rprint(f"{e}")
|
|
174
|
+
raise Exit(1)
|
|
175
|
+
|
|
176
|
+
# Check git status and warn about conflicts
|
|
177
|
+
try:
|
|
178
|
+
result = subprocess.run(
|
|
179
|
+
["git", "status", "--porcelain"],
|
|
180
|
+
check=True,
|
|
181
|
+
capture_output=True,
|
|
182
|
+
text=True,
|
|
183
|
+
)
|
|
184
|
+
|
|
185
|
+
if result.stdout.strip():
|
|
186
|
+
conflicted_files = []
|
|
187
|
+
modified_files = []
|
|
188
|
+
|
|
189
|
+
for line in result.stdout.strip().split("\n"):
|
|
190
|
+
status = line[:2]
|
|
191
|
+
filename = line[3:]
|
|
192
|
+
|
|
193
|
+
if "UU" in status or "AA" in status or "DD" in status:
|
|
194
|
+
conflicted_files.append(filename)
|
|
195
|
+
elif status.strip():
|
|
196
|
+
modified_files.append(filename)
|
|
197
|
+
|
|
198
|
+
if conflicted_files:
|
|
199
|
+
rprint("")
|
|
200
|
+
rprint("⚠️ [bold]Files with conflicts detected:[/]")
|
|
201
|
+
for file in conflicted_files:
|
|
202
|
+
rprint(f" {file}")
|
|
203
|
+
rprint("")
|
|
204
|
+
rprint(
|
|
205
|
+
"Please manually resolve conflicts with a merge editor before proceeding."
|
|
206
|
+
)
|
|
207
|
+
|
|
208
|
+
except (subprocess.CalledProcessError, FileNotFoundError):
|
|
209
|
+
# Git not available or not in a git repo - continue silently
|
|
210
|
+
pass
|
|
@@ -13,6 +13,10 @@ from llama_deploy.cli.textual.deployment_help import (
|
|
|
13
13
|
DeploymentHelpBackMessage,
|
|
14
14
|
DeploymentHelpWidget,
|
|
15
15
|
)
|
|
16
|
+
from llama_deploy.cli.textual.deployment_monitor import (
|
|
17
|
+
DeploymentMonitorWidget,
|
|
18
|
+
MonitorCloseMessage,
|
|
19
|
+
)
|
|
16
20
|
from llama_deploy.cli.textual.git_validation import (
|
|
17
21
|
GitValidationWidget,
|
|
18
22
|
ValidationCancelMessage,
|
|
@@ -334,10 +338,11 @@ class DeploymentEditApp(App[DeploymentResponse | None]):
|
|
|
334
338
|
|
|
335
339
|
CSS_PATH = Path(__file__).parent / "styles.tcss"
|
|
336
340
|
|
|
337
|
-
# App states: 'form', 'validation', or '
|
|
341
|
+
# App states: 'form', 'validation', 'help', or 'monitor'
|
|
338
342
|
current_state: reactive[str] = reactive("form", recompose=True)
|
|
339
343
|
form_data: reactive[DeploymentForm] = reactive(DeploymentForm())
|
|
340
344
|
save_error: reactive[str] = reactive("", recompose=True)
|
|
345
|
+
saved_deployment = reactive[DeploymentResponse | None](None, recompose=True)
|
|
341
346
|
|
|
342
347
|
def __init__(self, initial_data: DeploymentForm):
|
|
343
348
|
super().__init__()
|
|
@@ -350,10 +355,14 @@ class DeploymentEditApp(App[DeploymentResponse | None]):
|
|
|
350
355
|
def on_key(self, event) -> None:
|
|
351
356
|
"""Handle key events, including Ctrl+C"""
|
|
352
357
|
if event.key == "ctrl+c":
|
|
353
|
-
self.
|
|
358
|
+
if self.current_state == "monitor" and self.saved_deployment is not None:
|
|
359
|
+
self.exit(self.saved_deployment)
|
|
360
|
+
else:
|
|
361
|
+
self.exit(None)
|
|
354
362
|
|
|
355
363
|
def compose(self) -> ComposeResult:
|
|
356
|
-
|
|
364
|
+
is_slim = self.current_state != "monitor"
|
|
365
|
+
with Container(classes="form-container" if is_slim else ""):
|
|
357
366
|
if self.current_state == "form":
|
|
358
367
|
yield DeploymentFormWidget(self.form_data, self.save_error)
|
|
359
368
|
elif self.current_state == "validation":
|
|
@@ -368,6 +377,11 @@ class DeploymentEditApp(App[DeploymentResponse | None]):
|
|
|
368
377
|
)
|
|
369
378
|
elif self.current_state == "help":
|
|
370
379
|
yield DeploymentHelpWidget()
|
|
380
|
+
elif self.current_state == "monitor":
|
|
381
|
+
deployment_id = (
|
|
382
|
+
self.saved_deployment.id if self.saved_deployment else ""
|
|
383
|
+
)
|
|
384
|
+
yield DeploymentMonitorWidget(deployment_id)
|
|
371
385
|
else:
|
|
372
386
|
yield Static("Unknown state: " + self.current_state)
|
|
373
387
|
|
|
@@ -432,8 +446,15 @@ class DeploymentEditApp(App[DeploymentResponse | None]):
|
|
|
432
446
|
)
|
|
433
447
|
else:
|
|
434
448
|
update_deployment = client.create_deployment(result.to_create())
|
|
435
|
-
#
|
|
436
|
-
self.
|
|
449
|
+
# Save and navigate to embedded monitor screen
|
|
450
|
+
self.saved_deployment = update_deployment
|
|
451
|
+
# Ensure form_data carries the new ID for any subsequent operations
|
|
452
|
+
if not result.is_editing and update_deployment.id:
|
|
453
|
+
updated_form = dataclasses.replace(self.form_data)
|
|
454
|
+
updated_form.id = update_deployment.id
|
|
455
|
+
updated_form.is_editing = True
|
|
456
|
+
self.form_data = updated_form
|
|
457
|
+
self.current_state = "monitor"
|
|
437
458
|
except Exception as e:
|
|
438
459
|
# Return to form and show error
|
|
439
460
|
self.save_error = f"Error saving deployment: {e}"
|
|
@@ -447,6 +468,10 @@ class DeploymentEditApp(App[DeploymentResponse | None]):
|
|
|
447
468
|
"""Handle cancel message from form widget"""
|
|
448
469
|
self.exit(None)
|
|
449
470
|
|
|
471
|
+
def on_monitor_close_message(self, _: MonitorCloseMessage) -> None:
|
|
472
|
+
"""Handle close from embedded monitor by exiting with saved deployment."""
|
|
473
|
+
self.exit(self.saved_deployment)
|
|
474
|
+
|
|
450
475
|
|
|
451
476
|
def edit_deployment_form(
|
|
452
477
|
deployment: DeploymentResponse,
|
|
@@ -0,0 +1,474 @@
|
|
|
1
|
+
"""Textual component to monitor a deployment and stream its logs."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import asyncio
|
|
6
|
+
import hashlib
|
|
7
|
+
import threading
|
|
8
|
+
import time
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
from typing import Iterator
|
|
11
|
+
|
|
12
|
+
from llama_deploy.cli.client import Closer
|
|
13
|
+
from llama_deploy.cli.client import get_project_client as get_client
|
|
14
|
+
from llama_deploy.core.schema.base import LogEvent
|
|
15
|
+
from llama_deploy.core.schema.deployments import DeploymentResponse
|
|
16
|
+
from rich.text import Text
|
|
17
|
+
from textual.app import App, ComposeResult
|
|
18
|
+
from textual.containers import Container, HorizontalGroup, Widget
|
|
19
|
+
from textual.content import Content
|
|
20
|
+
from textual.message import Message
|
|
21
|
+
from textual.reactive import reactive
|
|
22
|
+
from textual.widgets import Button, RichLog, Static
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class DeploymentMonitorWidget(Widget):
|
|
26
|
+
"""Widget that fetches deployment details once and streams logs.
|
|
27
|
+
|
|
28
|
+
Notes:
|
|
29
|
+
- Status is polled periodically
|
|
30
|
+
- Log stream is started with init container logs included on first connect
|
|
31
|
+
- If the stream ends or hangs, we reconnect with duration-aware backoff
|
|
32
|
+
"""
|
|
33
|
+
|
|
34
|
+
DEFAULT_CSS = """
|
|
35
|
+
DeploymentMonitorWidget {
|
|
36
|
+
layout: vertical;
|
|
37
|
+
width: 1fr;
|
|
38
|
+
height: 1fr;
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
.monitor-container {
|
|
42
|
+
width: 1fr;
|
|
43
|
+
height: 1fr;
|
|
44
|
+
padding: 0;
|
|
45
|
+
margin: 0;
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
.details-grid {
|
|
49
|
+
layout: grid;
|
|
50
|
+
grid-size: 2;
|
|
51
|
+
grid-columns: auto 1fr;
|
|
52
|
+
grid-gutter: 0 1;
|
|
53
|
+
grid-rows: auto;
|
|
54
|
+
height: auto;
|
|
55
|
+
width: 1fr;
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
.log-header {
|
|
59
|
+
margin-top: 1;
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
.status-line .status-main {
|
|
63
|
+
width: auto;
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
.status-line .status-right {
|
|
67
|
+
width: 1fr;
|
|
68
|
+
text-align: right;
|
|
69
|
+
min-width: 12;
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
"""
|
|
74
|
+
|
|
75
|
+
deployment_id: str
|
|
76
|
+
deployment = reactive[DeploymentResponse | None](None, recompose=False)
|
|
77
|
+
error_message = reactive("", recompose=False)
|
|
78
|
+
wrap_enabled = reactive(False, recompose=False)
|
|
79
|
+
autoscroll_enabled = reactive(True, recompose=False)
|
|
80
|
+
stream_closer: Closer | None = None
|
|
81
|
+
|
|
82
|
+
def __init__(self, deployment_id: str) -> None:
|
|
83
|
+
super().__init__()
|
|
84
|
+
self.deployment_id = deployment_id
|
|
85
|
+
self._stop_stream = threading.Event()
|
|
86
|
+
# Persist content written to the RichLog across recomposes
|
|
87
|
+
self._log_buffer: list[Text] = []
|
|
88
|
+
|
|
89
|
+
def on_mount(self) -> None:
|
|
90
|
+
# Kick off initial fetch and start logs stream in background
|
|
91
|
+
self.run_worker(self._fetch_deployment(), exclusive=True)
|
|
92
|
+
self.run_worker(self._stream_logs, exclusive=False, thread=True)
|
|
93
|
+
# Start periodic polling of deployment status
|
|
94
|
+
self.run_worker(self._poll_deployment_status(), exclusive=False)
|
|
95
|
+
|
|
96
|
+
def compose(self) -> ComposeResult:
|
|
97
|
+
yield Static("Deployment Status", classes="primary-message")
|
|
98
|
+
yield Static("", classes="error-message", id="error_message")
|
|
99
|
+
|
|
100
|
+
# Single-line status bar with colored icon and deployment ID
|
|
101
|
+
with HorizontalGroup(classes="status-line"):
|
|
102
|
+
yield Static(
|
|
103
|
+
self._render_status_line(), classes="status-main", id="status_line"
|
|
104
|
+
)
|
|
105
|
+
yield Static("", classes="status-right", id="last_event_status")
|
|
106
|
+
yield Static("", classes="last-event mb-1", id="last_event_details")
|
|
107
|
+
|
|
108
|
+
yield Static("Logs", classes="secondary-message log-header")
|
|
109
|
+
yield RichLog(
|
|
110
|
+
id="log_view",
|
|
111
|
+
classes="log-view mb-1",
|
|
112
|
+
auto_scroll=self.autoscroll_enabled,
|
|
113
|
+
wrap=self.wrap_enabled,
|
|
114
|
+
highlight=True,
|
|
115
|
+
)
|
|
116
|
+
|
|
117
|
+
with HorizontalGroup(classes="button-row"):
|
|
118
|
+
wrap_label = "Wrap: On" if self.wrap_enabled else "Wrap: Off"
|
|
119
|
+
auto_label = (
|
|
120
|
+
"Auto-scroll: On" if self.autoscroll_enabled else "Auto-scroll: Off"
|
|
121
|
+
)
|
|
122
|
+
yield Button(wrap_label, id="toggle_wrap", variant="default", compact=True)
|
|
123
|
+
yield Button(
|
|
124
|
+
auto_label, id="toggle_autoscroll", variant="default", compact=True
|
|
125
|
+
)
|
|
126
|
+
yield Button("Copy", id="copy_log", variant="default", compact=True)
|
|
127
|
+
yield Button("Close", id="close", variant="default", compact=True)
|
|
128
|
+
|
|
129
|
+
def on_button_pressed(self, event: Button.Pressed) -> None:
|
|
130
|
+
if event.button.id == "close":
|
|
131
|
+
# Signal parent app to close
|
|
132
|
+
self.post_message(MonitorCloseMessage())
|
|
133
|
+
elif event.button.id == "toggle_wrap":
|
|
134
|
+
self.wrap_enabled = not self.wrap_enabled
|
|
135
|
+
elif event.button.id == "toggle_autoscroll":
|
|
136
|
+
self.autoscroll_enabled = not self.autoscroll_enabled
|
|
137
|
+
elif event.button.id == "copy_log":
|
|
138
|
+
txt = "\n".join([str(x) for x in self._log_buffer])
|
|
139
|
+
self.app.copy_to_clipboard(txt)
|
|
140
|
+
|
|
141
|
+
async def _fetch_deployment(self) -> None:
|
|
142
|
+
try:
|
|
143
|
+
client = get_client()
|
|
144
|
+
self.deployment = client.get_deployment(
|
|
145
|
+
self.deployment_id, include_events=True
|
|
146
|
+
)
|
|
147
|
+
# Clear any previous error on success
|
|
148
|
+
self.error_message = ""
|
|
149
|
+
except Exception as e: # pragma: no cover - network errors
|
|
150
|
+
self.error_message = f"Failed to fetch deployment: {e}"
|
|
151
|
+
|
|
152
|
+
def _stream_logs(self) -> None:
|
|
153
|
+
"""Consume the blocking log iterator in a single worker thread.
|
|
154
|
+
|
|
155
|
+
Cooperative cancellation uses `self._stop_stream` to exit cleanly.
|
|
156
|
+
"""
|
|
157
|
+
client = get_client()
|
|
158
|
+
|
|
159
|
+
def _sleep_with_cancel(total_seconds: float) -> None:
|
|
160
|
+
step = 0.2
|
|
161
|
+
remaining = total_seconds
|
|
162
|
+
while remaining > 0 and not self._stop_stream.is_set():
|
|
163
|
+
time.sleep(min(step, remaining))
|
|
164
|
+
remaining -= step
|
|
165
|
+
|
|
166
|
+
base_backoff_seconds = 0.2
|
|
167
|
+
backoff_seconds = base_backoff_seconds
|
|
168
|
+
max_backoff_seconds = 30.0
|
|
169
|
+
|
|
170
|
+
while not self._stop_stream.is_set():
|
|
171
|
+
try:
|
|
172
|
+
connect_started_at = time.monotonic()
|
|
173
|
+
closer, stream = client.stream_deployment_logs(
|
|
174
|
+
self.deployment_id,
|
|
175
|
+
include_init_containers=True,
|
|
176
|
+
)
|
|
177
|
+
# On any (re)connect, clear existing content
|
|
178
|
+
self.app.call_from_thread(self._reset_log_view_for_reconnect)
|
|
179
|
+
|
|
180
|
+
buffered_stream = _buffer_log_lines(stream)
|
|
181
|
+
|
|
182
|
+
def close_stream():
|
|
183
|
+
try:
|
|
184
|
+
closer()
|
|
185
|
+
except Exception:
|
|
186
|
+
pass
|
|
187
|
+
|
|
188
|
+
self.stream_closer = close_stream
|
|
189
|
+
# Stream connected; consume until end
|
|
190
|
+
for events in buffered_stream:
|
|
191
|
+
if self._stop_stream.is_set():
|
|
192
|
+
break
|
|
193
|
+
# Marshal UI updates back to the main thread via the App
|
|
194
|
+
self.app.call_from_thread(self._handle_log_events, events)
|
|
195
|
+
if self._stop_stream.is_set():
|
|
196
|
+
break
|
|
197
|
+
# Stream ended without explicit error; attempt reconnect
|
|
198
|
+
self.app.call_from_thread(
|
|
199
|
+
self._set_error_message, "Log stream disconnected. Reconnecting..."
|
|
200
|
+
)
|
|
201
|
+
except Exception as e:
|
|
202
|
+
if self._stop_stream.is_set():
|
|
203
|
+
break
|
|
204
|
+
# Surface the error to the UI and attempt reconnect with backoff
|
|
205
|
+
self.app.call_from_thread(
|
|
206
|
+
self._set_error_message, f"Log stream failed: {e}. Reconnecting..."
|
|
207
|
+
)
|
|
208
|
+
|
|
209
|
+
# Duration-aware backoff: subtract how long the last connection lived
|
|
210
|
+
connection_lifetime = 0.0
|
|
211
|
+
try:
|
|
212
|
+
connection_lifetime = max(0.0, time.monotonic() - connect_started_at)
|
|
213
|
+
except Exception:
|
|
214
|
+
connection_lifetime = 0.0
|
|
215
|
+
|
|
216
|
+
# If the connection lived longer than the current backoff window,
|
|
217
|
+
# reset to base so the next reconnect is immediate.
|
|
218
|
+
if connection_lifetime >= backoff_seconds:
|
|
219
|
+
backoff_seconds = base_backoff_seconds
|
|
220
|
+
else:
|
|
221
|
+
backoff_seconds = min(backoff_seconds * 2.0, max_backoff_seconds)
|
|
222
|
+
|
|
223
|
+
delay = max(0.0, backoff_seconds - connection_lifetime)
|
|
224
|
+
if delay > 0:
|
|
225
|
+
_sleep_with_cancel(delay)
|
|
226
|
+
|
|
227
|
+
def _reset_log_view_for_reconnect(self) -> None:
|
|
228
|
+
"""Clear UI and buffers so new stream replaces previous content."""
|
|
229
|
+
try:
|
|
230
|
+
log_widget = self.query_one("#log_view", RichLog)
|
|
231
|
+
except Exception:
|
|
232
|
+
log_widget = None
|
|
233
|
+
if log_widget is not None:
|
|
234
|
+
log_widget.clear()
|
|
235
|
+
|
|
236
|
+
def _set_error_message(self, message: str) -> None:
|
|
237
|
+
self.error_message = message
|
|
238
|
+
|
|
239
|
+
def _handle_log_events(self, events: list[LogEvent]) -> None:
|
|
240
|
+
def to_text(event: LogEvent) -> Text:
|
|
241
|
+
txt = Text()
|
|
242
|
+
txt.append(
|
|
243
|
+
f"[{event.container}] ", style=self._container_style(event.container)
|
|
244
|
+
)
|
|
245
|
+
txt.append(event.text)
|
|
246
|
+
return txt
|
|
247
|
+
|
|
248
|
+
texts = [to_text(event) for event in events]
|
|
249
|
+
if not texts:
|
|
250
|
+
return
|
|
251
|
+
|
|
252
|
+
log_widget = self.query_one("#log_view", RichLog)
|
|
253
|
+
for text in texts:
|
|
254
|
+
log_widget.write(text)
|
|
255
|
+
self._log_buffer.append(text)
|
|
256
|
+
# Clear any previous error once we successfully receive logs
|
|
257
|
+
if self.error_message:
|
|
258
|
+
self.error_message = ""
|
|
259
|
+
|
|
260
|
+
def _container_style(self, container_name: str) -> str:
|
|
261
|
+
palette = [
|
|
262
|
+
"bold magenta",
|
|
263
|
+
"bold cyan",
|
|
264
|
+
"bold blue",
|
|
265
|
+
"bold green",
|
|
266
|
+
"bold red",
|
|
267
|
+
"bold bright_blue",
|
|
268
|
+
]
|
|
269
|
+
# Stable hash to pick a color per container name
|
|
270
|
+
h = int(hashlib.sha256(container_name.encode()).hexdigest(), 16)
|
|
271
|
+
return palette[h % len(palette)]
|
|
272
|
+
|
|
273
|
+
def _status_icon_and_style(self, phase: str) -> tuple[str, str]:
|
|
274
|
+
# Map deployment phase to a colored icon
|
|
275
|
+
phase = phase or "-"
|
|
276
|
+
green = "bold green"
|
|
277
|
+
yellow = "bold yellow"
|
|
278
|
+
red = "bold red"
|
|
279
|
+
gray = "grey50"
|
|
280
|
+
if phase in {"Running", "Succeeded"}:
|
|
281
|
+
return "●", green
|
|
282
|
+
if phase in {"Pending", "Syncing", "RollingOut"}:
|
|
283
|
+
return "●", yellow
|
|
284
|
+
if phase in {"Failed", "RolloutFailed"}:
|
|
285
|
+
return "●", red
|
|
286
|
+
return "●", gray
|
|
287
|
+
|
|
288
|
+
def _render_status_line(self) -> Text:
|
|
289
|
+
phase = self.deployment.status if self.deployment else "Unknown"
|
|
290
|
+
icon, style = self._status_icon_and_style(phase)
|
|
291
|
+
line = Text()
|
|
292
|
+
line.append(icon, style=style)
|
|
293
|
+
line.append(" ")
|
|
294
|
+
line.append(f"Status: {phase} — Deployment ID: {self.deployment_id or '-'}")
|
|
295
|
+
return line
|
|
296
|
+
|
|
297
|
+
def _render_last_event_details(self) -> Content:
|
|
298
|
+
if not self.deployment or not self.deployment.events:
|
|
299
|
+
return Content()
|
|
300
|
+
latest = self.deployment.events[-1]
|
|
301
|
+
txt = Text(f" {latest.message}", style="dim")
|
|
302
|
+
return Content.from_rich_text(txt)
|
|
303
|
+
|
|
304
|
+
def _render_last_event_status(self) -> Content:
|
|
305
|
+
if not self.deployment or not self.deployment.events:
|
|
306
|
+
return Content()
|
|
307
|
+
txt = Text()
|
|
308
|
+
# Pick the most recent by last_timestamp
|
|
309
|
+
latest = self.deployment.events[-1]
|
|
310
|
+
ts = None
|
|
311
|
+
ts = (latest.last_timestamp or latest.first_timestamp).strftime(
|
|
312
|
+
"%Y-%m-%d %H:%M:%S"
|
|
313
|
+
)
|
|
314
|
+
parts: list[str] = []
|
|
315
|
+
if latest.type:
|
|
316
|
+
parts.append(latest.type)
|
|
317
|
+
if latest.reason:
|
|
318
|
+
parts.append(latest.reason)
|
|
319
|
+
kind = "/".join(parts) if parts else None
|
|
320
|
+
if kind:
|
|
321
|
+
txt.append(f"{kind} ", style="medium_purple3")
|
|
322
|
+
txt.append(f"{ts}", style="dim")
|
|
323
|
+
return Content.from_rich_text(txt)
|
|
324
|
+
|
|
325
|
+
def on_unmount(self) -> None:
|
|
326
|
+
# Attempt to stop the streaming loop
|
|
327
|
+
self._stop_stream.set()
|
|
328
|
+
if self.stream_closer is not None:
|
|
329
|
+
self.stream_closer()
|
|
330
|
+
self.stream_closer = None
|
|
331
|
+
|
|
332
|
+
# Reactive watchers to update widgets in place instead of recomposing
|
|
333
|
+
def watch_error_message(self, message: str) -> None:
|
|
334
|
+
try:
|
|
335
|
+
widget = self.query_one("#error_message", Static)
|
|
336
|
+
except Exception:
|
|
337
|
+
return
|
|
338
|
+
widget.update(message)
|
|
339
|
+
widget.display = bool(message)
|
|
340
|
+
|
|
341
|
+
def watch_deployment(self, deployment: DeploymentResponse | None) -> None:
|
|
342
|
+
if deployment is None:
|
|
343
|
+
return
|
|
344
|
+
|
|
345
|
+
widget = self.query_one("#status_line", Static)
|
|
346
|
+
ev_widget = self.query_one("#last_event_status", Static)
|
|
347
|
+
ev_details_widget = self.query_one("#last_event_details", Static)
|
|
348
|
+
|
|
349
|
+
widget.update(self._render_status_line())
|
|
350
|
+
# Update last event line
|
|
351
|
+
ev_widget.update(self._render_last_event_status())
|
|
352
|
+
ev_details_widget.update(self._render_last_event_details())
|
|
353
|
+
ev_details_widget.display = bool(self.deployment and self.deployment.events)
|
|
354
|
+
|
|
355
|
+
def watch_wrap_enabled(self, enabled: bool) -> None:
|
|
356
|
+
try:
|
|
357
|
+
log_widget = self.query_one("#log_view", RichLog)
|
|
358
|
+
log_widget.wrap = enabled
|
|
359
|
+
# Clear existing lines; new wrap mode will apply to subsequent events
|
|
360
|
+
log_widget.clear()
|
|
361
|
+
for text in self._log_buffer:
|
|
362
|
+
log_widget.write(text)
|
|
363
|
+
except Exception:
|
|
364
|
+
pass
|
|
365
|
+
try:
|
|
366
|
+
btn = self.query_one("#toggle_wrap", Button)
|
|
367
|
+
btn.label = "Wrap: On" if enabled else "Wrap: Off"
|
|
368
|
+
except Exception:
|
|
369
|
+
pass
|
|
370
|
+
|
|
371
|
+
def watch_autoscroll_enabled(self, enabled: bool) -> None:
|
|
372
|
+
try:
|
|
373
|
+
log_widget = self.query_one("#log_view", RichLog)
|
|
374
|
+
log_widget.auto_scroll = enabled
|
|
375
|
+
except Exception:
|
|
376
|
+
pass
|
|
377
|
+
try:
|
|
378
|
+
btn = self.query_one("#toggle_autoscroll", Button)
|
|
379
|
+
btn.label = "Auto-scroll: On" if enabled else "Auto-scroll: Off"
|
|
380
|
+
except Exception:
|
|
381
|
+
pass
|
|
382
|
+
|
|
383
|
+
async def _poll_deployment_status(self) -> None:
|
|
384
|
+
"""Periodically refresh deployment status to reflect updates in the UI."""
|
|
385
|
+
client = get_client()
|
|
386
|
+
while not self._stop_stream.is_set():
|
|
387
|
+
try:
|
|
388
|
+
self.deployment = client.get_deployment(
|
|
389
|
+
self.deployment_id, include_events=True
|
|
390
|
+
)
|
|
391
|
+
# Clear any previous error on success
|
|
392
|
+
if self.error_message:
|
|
393
|
+
self.error_message = ""
|
|
394
|
+
except Exception as e: # pragma: no cover - network errors
|
|
395
|
+
# Non-fatal; will try again on next interval
|
|
396
|
+
self.error_message = f"Failed to refresh status: {e}"
|
|
397
|
+
await asyncio.sleep(5)
|
|
398
|
+
|
|
399
|
+
|
|
400
|
+
class MonitorCloseMessage(Message):
|
|
401
|
+
pass
|
|
402
|
+
|
|
403
|
+
|
|
404
|
+
class DeploymentMonitorApp(App[None]):
|
|
405
|
+
"""Standalone app wrapper around the monitor widget.
|
|
406
|
+
|
|
407
|
+
This allows easy reuse in other flows by embedding the widget.
|
|
408
|
+
"""
|
|
409
|
+
|
|
410
|
+
CSS_PATH = Path(__file__).parent / "styles.tcss"
|
|
411
|
+
|
|
412
|
+
def __init__(self, deployment_id: str) -> None:
|
|
413
|
+
super().__init__()
|
|
414
|
+
self.deployment_id = deployment_id
|
|
415
|
+
|
|
416
|
+
def on_mount(self) -> None:
|
|
417
|
+
self.theme = "tokyo-night"
|
|
418
|
+
|
|
419
|
+
def compose(self) -> ComposeResult:
|
|
420
|
+
with Container():
|
|
421
|
+
yield DeploymentMonitorWidget(self.deployment_id)
|
|
422
|
+
|
|
423
|
+
def on_monitor_close_message(self, _: MonitorCloseMessage) -> None:
|
|
424
|
+
self.exit(None)
|
|
425
|
+
|
|
426
|
+
def on_key(self, event) -> None:
|
|
427
|
+
# Support Ctrl+C to exit, consistent with other screens and terminals
|
|
428
|
+
if event.key == "ctrl+c":
|
|
429
|
+
self.exit(None)
|
|
430
|
+
|
|
431
|
+
|
|
432
|
+
def monitor_deployment_screen(deployment_id: str) -> None:
|
|
433
|
+
"""Launch the standalone deployment monitor screen."""
|
|
434
|
+
app = DeploymentMonitorApp(deployment_id)
|
|
435
|
+
app.run()
|
|
436
|
+
|
|
437
|
+
|
|
438
|
+
def _buffer_log_lines(iter: Iterator[LogEvent]) -> Iterator[list[LogEvent]]:
|
|
439
|
+
"""Batch log events into small lists using a background reader.
|
|
440
|
+
|
|
441
|
+
This reduces UI churn while still reacting quickly. On shutdown we
|
|
442
|
+
absorb stream read errors that are expected when the connection is
|
|
443
|
+
closed from another thread.
|
|
444
|
+
"""
|
|
445
|
+
buffer: list[LogEvent] = []
|
|
446
|
+
bg_error: Exception | None = None
|
|
447
|
+
done = threading.Event()
|
|
448
|
+
|
|
449
|
+
def pump() -> None:
|
|
450
|
+
nonlocal bg_error
|
|
451
|
+
try:
|
|
452
|
+
for event in iter:
|
|
453
|
+
buffer.append(event)
|
|
454
|
+
except Exception as e:
|
|
455
|
+
bg_error = e
|
|
456
|
+
finally:
|
|
457
|
+
done.set()
|
|
458
|
+
|
|
459
|
+
t = threading.Thread(target=pump, daemon=True)
|
|
460
|
+
t.start()
|
|
461
|
+
try:
|
|
462
|
+
while not done.is_set():
|
|
463
|
+
if buffer:
|
|
464
|
+
# Yield a snapshot and clear in-place to avoid reallocating list
|
|
465
|
+
yield list(buffer)
|
|
466
|
+
buffer.clear()
|
|
467
|
+
time.sleep(0.5)
|
|
468
|
+
if bg_error is not None:
|
|
469
|
+
raise bg_error
|
|
470
|
+
finally:
|
|
471
|
+
try:
|
|
472
|
+
t.join(timeout=0.1)
|
|
473
|
+
except Exception:
|
|
474
|
+
pass
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{llamactl-0.3.0a7 → llamactl-0.3.0a9}/src/llama_deploy/cli/textual/github_callback_server.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|