llamactl 0.3.0a6__py3-none-any.whl → 0.3.0a8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- llama_deploy/cli/__init__.py +2 -1
- llama_deploy/cli/client.py +112 -15
- llama_deploy/cli/commands/deployment.py +14 -4
- llama_deploy/cli/commands/init.py +210 -0
- llama_deploy/cli/commands/serve.py +8 -2
- llama_deploy/cli/config.py +1 -1
- llama_deploy/cli/interactive_prompts/utils.py +0 -2
- llama_deploy/cli/textual/deployment_form.py +59 -24
- llama_deploy/cli/textual/deployment_help.py +2 -2
- llama_deploy/cli/textual/deployment_monitor.py +429 -0
- llama_deploy/cli/textual/github_callback_server.py +12 -9
- llama_deploy/cli/textual/profile_form.py +0 -1
- llama_deploy/cli/textual/secrets_form.py +1 -1
- {llamactl-0.3.0a6.dist-info → llamactl-0.3.0a8.dist-info}/METADATA +4 -4
- llamactl-0.3.0a8.dist-info/RECORD +26 -0
- llamactl-0.3.0a6.dist-info/RECORD +0 -24
- {llamactl-0.3.0a6.dist-info → llamactl-0.3.0a8.dist-info}/WHEEL +0 -0
- {llamactl-0.3.0a6.dist-info → llamactl-0.3.0a8.dist-info}/entry_points.txt +0 -0
llama_deploy/cli/__init__.py
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
from llama_deploy.cli.commands.deployment import deployments
|
|
2
|
+
from llama_deploy.cli.commands.init import init
|
|
2
3
|
from llama_deploy.cli.commands.profile import profiles
|
|
3
4
|
from llama_deploy.cli.commands.serve import serve
|
|
4
5
|
|
|
@@ -10,7 +11,7 @@ def main() -> None:
|
|
|
10
11
|
app()
|
|
11
12
|
|
|
12
13
|
|
|
13
|
-
__all__ = ["app", "deployments", "profiles", "serve"]
|
|
14
|
+
__all__ = ["app", "deployments", "profiles", "serve", "init"]
|
|
14
15
|
|
|
15
16
|
|
|
16
17
|
if __name__ == "__main__":
|
llama_deploy/cli/client.py
CHANGED
|
@@ -1,6 +1,8 @@
|
|
|
1
|
-
|
|
1
|
+
import contextlib
|
|
2
|
+
from typing import Iterator, List
|
|
2
3
|
|
|
3
4
|
import httpx
|
|
5
|
+
from llama_deploy.core.schema.base import LogEvent
|
|
4
6
|
from llama_deploy.core.schema.deployments import (
|
|
5
7
|
DeploymentCreate,
|
|
6
8
|
DeploymentResponse,
|
|
@@ -17,19 +19,24 @@ from rich.console import Console
|
|
|
17
19
|
from .config import config_manager
|
|
18
20
|
|
|
19
21
|
|
|
22
|
+
class ClientError(Exception):
|
|
23
|
+
"""Base class for client errors."""
|
|
24
|
+
|
|
25
|
+
def __init__(self, message: str) -> None:
|
|
26
|
+
super().__init__(message)
|
|
27
|
+
|
|
28
|
+
|
|
20
29
|
class BaseClient:
|
|
21
30
|
def __init__(self, base_url: str, console: Console) -> None:
|
|
22
31
|
self.base_url = base_url.rstrip("/")
|
|
23
32
|
self.console = console
|
|
24
33
|
self.client = httpx.Client(
|
|
25
|
-
base_url=self.base_url,
|
|
34
|
+
base_url=self.base_url,
|
|
35
|
+
event_hooks={"response": [self._handle_response]},
|
|
26
36
|
)
|
|
37
|
+
self.hookless_client = httpx.Client(base_url=self.base_url)
|
|
27
38
|
|
|
28
39
|
def _handle_response(self, response: httpx.Response) -> None:
|
|
29
|
-
if "X-Warning" in response.headers:
|
|
30
|
-
self.console.print(
|
|
31
|
-
f"[yellow]Warning: {response.headers['X-Warning']}[/yellow]"
|
|
32
|
-
)
|
|
33
40
|
try:
|
|
34
41
|
response.raise_for_status()
|
|
35
42
|
except httpx.HTTPStatusError as e:
|
|
@@ -42,9 +49,9 @@ class BaseClient:
|
|
|
42
49
|
error_message = str(error_data)
|
|
43
50
|
except (ValueError, KeyError):
|
|
44
51
|
error_message = e.response.text
|
|
45
|
-
raise
|
|
52
|
+
raise ClientError(f"HTTP {e.response.status_code}: {error_message}") from e
|
|
46
53
|
except httpx.RequestError as e:
|
|
47
|
-
raise
|
|
54
|
+
raise ClientError(f"Request failed: {e}") from e
|
|
48
55
|
|
|
49
56
|
|
|
50
57
|
class ControlPlaneClient(BaseClient):
|
|
@@ -59,7 +66,7 @@ class ControlPlaneClient(BaseClient):
|
|
|
59
66
|
return response.json()
|
|
60
67
|
|
|
61
68
|
def list_projects(self) -> List[ProjectSummary]:
|
|
62
|
-
response = self.client.get("/projects
|
|
69
|
+
response = self.client.get("/api/v1beta1/deployments/list-projects")
|
|
63
70
|
projects_response = ProjectsListResponse.model_validate(response.json())
|
|
64
71
|
return [project for project in projects_response.projects]
|
|
65
72
|
|
|
@@ -95,25 +102,35 @@ class ProjectClient(BaseClient):
|
|
|
95
102
|
self.project_id = project_id
|
|
96
103
|
|
|
97
104
|
def list_deployments(self) -> List[DeploymentResponse]:
|
|
98
|
-
response = self.client.get(
|
|
105
|
+
response = self.client.get(
|
|
106
|
+
"/api/v1beta1/deployments",
|
|
107
|
+
params={"project_id": self.project_id},
|
|
108
|
+
)
|
|
99
109
|
deployments_response = DeploymentsListResponse.model_validate(response.json())
|
|
100
110
|
return [deployment for deployment in deployments_response.deployments]
|
|
101
111
|
|
|
102
112
|
def get_deployment(self, deployment_id: str) -> DeploymentResponse:
|
|
103
|
-
response = self.client.get(
|
|
113
|
+
response = self.client.get(
|
|
114
|
+
f"/api/v1beta1/deployments/{deployment_id}",
|
|
115
|
+
params={"project_id": self.project_id},
|
|
116
|
+
)
|
|
104
117
|
return DeploymentResponse.model_validate(response.json())
|
|
105
118
|
|
|
106
119
|
def create_deployment(
|
|
107
120
|
self, deployment_data: DeploymentCreate
|
|
108
121
|
) -> DeploymentResponse:
|
|
109
122
|
response = self.client.post(
|
|
110
|
-
|
|
123
|
+
"/api/v1beta1/deployments",
|
|
124
|
+
params={"project_id": self.project_id},
|
|
111
125
|
json=deployment_data.model_dump(exclude_none=True),
|
|
112
126
|
)
|
|
113
127
|
return DeploymentResponse.model_validate(response.json())
|
|
114
128
|
|
|
115
129
|
def delete_deployment(self, deployment_id: str) -> None:
|
|
116
|
-
self.client.delete(
|
|
130
|
+
self.client.delete(
|
|
131
|
+
f"/api/v1beta1/deployments/{deployment_id}",
|
|
132
|
+
params={"project_id": self.project_id},
|
|
133
|
+
)
|
|
117
134
|
|
|
118
135
|
def update_deployment(
|
|
119
136
|
self,
|
|
@@ -121,7 +138,8 @@ class ProjectClient(BaseClient):
|
|
|
121
138
|
update_data: DeploymentUpdate,
|
|
122
139
|
) -> DeploymentResponse:
|
|
123
140
|
response = self.client.patch(
|
|
124
|
-
f"/
|
|
141
|
+
f"/api/v1beta1/deployments/{deployment_id}",
|
|
142
|
+
params={"project_id": self.project_id},
|
|
125
143
|
json=update_data.model_dump(),
|
|
126
144
|
)
|
|
127
145
|
return DeploymentResponse.model_validate(response.json())
|
|
@@ -133,7 +151,8 @@ class ProjectClient(BaseClient):
|
|
|
133
151
|
pat: str | None = None,
|
|
134
152
|
) -> RepositoryValidationResponse:
|
|
135
153
|
response = self.client.post(
|
|
136
|
-
|
|
154
|
+
"/api/v1beta1/deployments/validate-repository",
|
|
155
|
+
params={"project_id": self.project_id},
|
|
137
156
|
json=RepositoryValidationRequest(
|
|
138
157
|
repository_url=repo_url,
|
|
139
158
|
deployment_id=deployment_id,
|
|
@@ -142,6 +161,81 @@ class ProjectClient(BaseClient):
|
|
|
142
161
|
)
|
|
143
162
|
return RepositoryValidationResponse.model_validate(response.json())
|
|
144
163
|
|
|
164
|
+
def stream_deployment_logs(
|
|
165
|
+
self,
|
|
166
|
+
deployment_id: str,
|
|
167
|
+
*,
|
|
168
|
+
include_init_containers: bool = False,
|
|
169
|
+
since_seconds: int | None = None,
|
|
170
|
+
tail_lines: int | None = None,
|
|
171
|
+
) -> tuple["Closer", Iterator[LogEvent]]:
|
|
172
|
+
"""Stream logs as LogEvent items from the control plane using SSE.
|
|
173
|
+
|
|
174
|
+
This yields `LogEvent` models until the stream ends (e.g. rollout).
|
|
175
|
+
"""
|
|
176
|
+
# Use a separate client without response hooks so we don't consume the stream
|
|
177
|
+
|
|
178
|
+
params = {
|
|
179
|
+
"project_id": self.project_id,
|
|
180
|
+
"include_init_containers": include_init_containers,
|
|
181
|
+
}
|
|
182
|
+
if since_seconds is not None:
|
|
183
|
+
params["since_seconds"] = since_seconds
|
|
184
|
+
if tail_lines is not None:
|
|
185
|
+
params["tail_lines"] = tail_lines
|
|
186
|
+
|
|
187
|
+
url = f"/api/v1beta1/deployments/{deployment_id}/logs"
|
|
188
|
+
headers = {"Accept": "text/event-stream"}
|
|
189
|
+
|
|
190
|
+
stack = contextlib.ExitStack()
|
|
191
|
+
response = stack.enter_context(
|
|
192
|
+
self.hookless_client.stream(
|
|
193
|
+
"GET", url, params=params, headers=headers, timeout=None
|
|
194
|
+
)
|
|
195
|
+
)
|
|
196
|
+
try:
|
|
197
|
+
response.raise_for_status()
|
|
198
|
+
except Exception:
|
|
199
|
+
stack.close()
|
|
200
|
+
raise
|
|
201
|
+
|
|
202
|
+
return stack.close, _iterate_log_stream(response, stack.close)
|
|
203
|
+
|
|
204
|
+
|
|
205
|
+
def _iterate_log_stream(
|
|
206
|
+
response: httpx.Response, closer: "Closer"
|
|
207
|
+
) -> Iterator[LogEvent]:
|
|
208
|
+
event_name: str | None = None
|
|
209
|
+
data_lines: list[str] = []
|
|
210
|
+
|
|
211
|
+
try:
|
|
212
|
+
for line in response.iter_lines():
|
|
213
|
+
if line is None:
|
|
214
|
+
continue
|
|
215
|
+
line = line.decode() if isinstance(line, (bytes, bytearray)) else line
|
|
216
|
+
print("got line", line)
|
|
217
|
+
if line.startswith("event:"):
|
|
218
|
+
event_name = line[len("event:") :].strip()
|
|
219
|
+
elif line.startswith("data:"):
|
|
220
|
+
data_lines.append(line[len("data:") :].lstrip())
|
|
221
|
+
elif line.strip() == "":
|
|
222
|
+
if event_name == "log" and data_lines:
|
|
223
|
+
data_str = "\n".join(data_lines)
|
|
224
|
+
try:
|
|
225
|
+
yield LogEvent.model_validate_json(data_str)
|
|
226
|
+
print("yielded log event", data_str)
|
|
227
|
+
except Exception:
|
|
228
|
+
# If parsing fails, skip malformed event
|
|
229
|
+
pass
|
|
230
|
+
# reset for next event
|
|
231
|
+
event_name = None
|
|
232
|
+
data_lines = []
|
|
233
|
+
finally:
|
|
234
|
+
try:
|
|
235
|
+
closer()
|
|
236
|
+
except Exception:
|
|
237
|
+
pass
|
|
238
|
+
|
|
145
239
|
|
|
146
240
|
def get_control_plane_client(base_url: str | None = None) -> ControlPlaneClient:
|
|
147
241
|
console = Console()
|
|
@@ -174,3 +268,6 @@ def get_project_client(
|
|
|
174
268
|
if not resolved_project_id:
|
|
175
269
|
raise ValueError("Project ID is required")
|
|
176
270
|
return ProjectClient(resolved_base_url, resolved_project_id, console)
|
|
271
|
+
|
|
272
|
+
|
|
273
|
+
type Closer = callable[tuple[()], None]
|
|
@@ -19,6 +19,7 @@ from ..interactive_prompts.utils import (
|
|
|
19
19
|
)
|
|
20
20
|
from ..options import global_options
|
|
21
21
|
from ..textual.deployment_form import create_deployment_form, edit_deployment_form
|
|
22
|
+
from ..textual.deployment_monitor import monitor_deployment_screen
|
|
22
23
|
|
|
23
24
|
|
|
24
25
|
@app.group(
|
|
@@ -88,7 +89,12 @@ def list_deployments() -> None:
|
|
|
88
89
|
@deployments.command("get")
|
|
89
90
|
@global_options
|
|
90
91
|
@click.argument("deployment_id", required=False)
|
|
91
|
-
|
|
92
|
+
@click.option(
|
|
93
|
+
"--non-interactive",
|
|
94
|
+
is_flag=True,
|
|
95
|
+
help="Do not open a live monitor screen showing status and streaming logs",
|
|
96
|
+
)
|
|
97
|
+
def get_deployment(deployment_id: str | None, non_interactive: bool) -> None:
|
|
92
98
|
"""Get details of a specific deployment"""
|
|
93
99
|
try:
|
|
94
100
|
client = get_project_client()
|
|
@@ -98,6 +104,10 @@ def get_deployment(deployment_id: str | None) -> None:
|
|
|
98
104
|
rprint("[yellow]No deployment selected[/yellow]")
|
|
99
105
|
return
|
|
100
106
|
|
|
107
|
+
if not non_interactive:
|
|
108
|
+
monitor_deployment_screen(deployment_id)
|
|
109
|
+
return
|
|
110
|
+
|
|
101
111
|
deployment = client.get_deployment(deployment_id)
|
|
102
112
|
|
|
103
113
|
table = Table(title=f"Deployment: {deployment.name}")
|
|
@@ -143,7 +153,7 @@ def create_deployment(
|
|
|
143
153
|
git_ref: str | None,
|
|
144
154
|
personal_access_token: str | None,
|
|
145
155
|
) -> None:
|
|
146
|
-
"""
|
|
156
|
+
"""Interactively create a new deployment"""
|
|
147
157
|
|
|
148
158
|
# Use interactive creation
|
|
149
159
|
deployment_form = create_deployment_form()
|
|
@@ -214,11 +224,11 @@ def edit_deployment(deployment_id: str | None) -> None:
|
|
|
214
224
|
raise click.Abort()
|
|
215
225
|
|
|
216
226
|
|
|
217
|
-
@deployments.command("
|
|
227
|
+
@deployments.command("update")
|
|
218
228
|
@global_options
|
|
219
229
|
@click.argument("deployment_id", required=False)
|
|
220
230
|
def refresh_deployment(deployment_id: str | None) -> None:
|
|
221
|
-
"""
|
|
231
|
+
"""Update the deployment, pulling the latest code from it's branch"""
|
|
222
232
|
try:
|
|
223
233
|
client = get_project_client()
|
|
224
234
|
|
|
@@ -0,0 +1,210 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import shutil
|
|
3
|
+
import subprocess
|
|
4
|
+
from dataclasses import dataclass
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
|
|
7
|
+
import click
|
|
8
|
+
import copier
|
|
9
|
+
import questionary
|
|
10
|
+
from click.exceptions import Exit
|
|
11
|
+
from llama_deploy.cli.app import app
|
|
12
|
+
from llama_deploy.cli.options import global_options
|
|
13
|
+
from rich import print as rprint
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
@dataclass
|
|
17
|
+
class TemplateOption:
|
|
18
|
+
id: str
|
|
19
|
+
name: str
|
|
20
|
+
description: str
|
|
21
|
+
git_url: str
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
options = [
|
|
25
|
+
TemplateOption(
|
|
26
|
+
id="basic-ui",
|
|
27
|
+
name="Basic UI",
|
|
28
|
+
description="A basic starter workflow with a React Vite UI",
|
|
29
|
+
git_url="https://github.com/adrianlyjak/qs",
|
|
30
|
+
),
|
|
31
|
+
TemplateOption(
|
|
32
|
+
id="extraction-review",
|
|
33
|
+
name="Extraction Agent with Review UI",
|
|
34
|
+
description="Extract data from documents using a custom schema and Llama Cloud. Includes a UI to review and correct the results",
|
|
35
|
+
git_url="https://github.com/run-llama/template-workflow-data-extraction",
|
|
36
|
+
),
|
|
37
|
+
]
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
@app.command()
|
|
41
|
+
@click.option(
|
|
42
|
+
"--update",
|
|
43
|
+
is_flag=True,
|
|
44
|
+
help="Instead of creating a new app, update the current app to the latest version. Other options will be ignored.",
|
|
45
|
+
)
|
|
46
|
+
@click.option(
|
|
47
|
+
"--template",
|
|
48
|
+
type=click.Choice([o.id for o in options]),
|
|
49
|
+
help="The template to use for the new app",
|
|
50
|
+
)
|
|
51
|
+
@click.option(
|
|
52
|
+
"--dir",
|
|
53
|
+
help="The directory to create the new app in",
|
|
54
|
+
type=click.Path(
|
|
55
|
+
file_okay=False, dir_okay=True, writable=True, resolve_path=True, path_type=Path
|
|
56
|
+
),
|
|
57
|
+
)
|
|
58
|
+
@click.option(
|
|
59
|
+
"--force",
|
|
60
|
+
is_flag=True,
|
|
61
|
+
help="Force overwrite the directory if it exists",
|
|
62
|
+
)
|
|
63
|
+
@global_options
|
|
64
|
+
def init(
|
|
65
|
+
update: bool,
|
|
66
|
+
template: str | None,
|
|
67
|
+
dir: Path | None,
|
|
68
|
+
force: bool,
|
|
69
|
+
) -> None:
|
|
70
|
+
"""Create a new app repository from a template"""
|
|
71
|
+
if update:
|
|
72
|
+
_update()
|
|
73
|
+
else:
|
|
74
|
+
_create(template, dir, force)
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def _create(template: str | None, dir: Path | None, force: bool) -> None:
|
|
78
|
+
if template is None:
|
|
79
|
+
template = questionary.select(
|
|
80
|
+
"Choose a template",
|
|
81
|
+
choices=[
|
|
82
|
+
questionary.Choice(title=o.name, value=o.id, description=o.description)
|
|
83
|
+
for o in options
|
|
84
|
+
],
|
|
85
|
+
).ask()
|
|
86
|
+
if template is None:
|
|
87
|
+
rprint("No template selected")
|
|
88
|
+
raise Exit(1)
|
|
89
|
+
if dir is None:
|
|
90
|
+
dir_str = questionary.text(
|
|
91
|
+
"Enter the directory to create the new app in", default=template
|
|
92
|
+
).ask()
|
|
93
|
+
if not dir_str:
|
|
94
|
+
rprint("No directory provided")
|
|
95
|
+
raise Exit(1)
|
|
96
|
+
dir = Path(dir_str)
|
|
97
|
+
resolved_template = next((o for o in options if o.id == template), None)
|
|
98
|
+
if resolved_template is None:
|
|
99
|
+
rprint(f"Template {template} not found")
|
|
100
|
+
raise Exit(1)
|
|
101
|
+
if dir.exists():
|
|
102
|
+
is_ok = (
|
|
103
|
+
force
|
|
104
|
+
or questionary.confirm("Directory exists. Overwrite?", default=False).ask()
|
|
105
|
+
)
|
|
106
|
+
if not is_ok:
|
|
107
|
+
raise Exit(1)
|
|
108
|
+
else:
|
|
109
|
+
shutil.rmtree(dir, ignore_errors=True)
|
|
110
|
+
copier.run_copy(
|
|
111
|
+
resolved_template.git_url,
|
|
112
|
+
dir,
|
|
113
|
+
quiet=True,
|
|
114
|
+
)
|
|
115
|
+
# Initialize git repository if git is available
|
|
116
|
+
is_git_initialized = False
|
|
117
|
+
try:
|
|
118
|
+
subprocess.run(["git", "--version"], check=True, capture_output=True)
|
|
119
|
+
|
|
120
|
+
# Change to the new directory and initialize git repo
|
|
121
|
+
original_cwd = Path.cwd()
|
|
122
|
+
os.chdir(dir)
|
|
123
|
+
|
|
124
|
+
try:
|
|
125
|
+
subprocess.run(["git", "init"], check=True, capture_output=True)
|
|
126
|
+
subprocess.run(["git", "add", "."], check=True, capture_output=True)
|
|
127
|
+
subprocess.run(
|
|
128
|
+
["git", "commit", "-m", "Initial commit"],
|
|
129
|
+
check=True,
|
|
130
|
+
capture_output=True,
|
|
131
|
+
)
|
|
132
|
+
is_git_initialized = True
|
|
133
|
+
finally:
|
|
134
|
+
os.chdir(original_cwd)
|
|
135
|
+
|
|
136
|
+
except (subprocess.CalledProcessError, FileNotFoundError):
|
|
137
|
+
# Git not available or failed - continue without git initialization
|
|
138
|
+
pass
|
|
139
|
+
|
|
140
|
+
rprint(
|
|
141
|
+
f"Successfully created [blue]{dir}[/] using the [blue]{resolved_template.name}[/] template! 🎉 🦙 💾"
|
|
142
|
+
)
|
|
143
|
+
rprint("")
|
|
144
|
+
rprint("[bold]To run locally:[/]")
|
|
145
|
+
rprint(f" [orange3]cd[/] {dir}")
|
|
146
|
+
rprint(" [orange3]uvx[/] llamactl serve")
|
|
147
|
+
rprint("")
|
|
148
|
+
rprint("[bold]To deploy:[/]")
|
|
149
|
+
if not is_git_initialized:
|
|
150
|
+
rprint(" [orange3]git[/] init")
|
|
151
|
+
rprint(" [orange3]git[/] add .")
|
|
152
|
+
rprint(" [orange3]git[/] commit -m 'Initial commit'")
|
|
153
|
+
rprint("")
|
|
154
|
+
rprint("[dim](Create a new repo and add it as a remote)[/]")
|
|
155
|
+
rprint("")
|
|
156
|
+
rprint(" [orange3]git[/] remote add origin <your-repo-url>")
|
|
157
|
+
rprint(" [orange3]git[/] push -u origin main")
|
|
158
|
+
rprint("")
|
|
159
|
+
# rprint(" [orange3]uvx[/] llamactl login")
|
|
160
|
+
rprint(" [orange3]uvx[/] llamactl deploy")
|
|
161
|
+
rprint("")
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
def _update():
|
|
165
|
+
"""Update the app to the latest version"""
|
|
166
|
+
try:
|
|
167
|
+
copier.run_update(
|
|
168
|
+
overwrite=True,
|
|
169
|
+
skip_answered=True,
|
|
170
|
+
quiet=True,
|
|
171
|
+
)
|
|
172
|
+
except copier.UserMessageError as e:
|
|
173
|
+
rprint(f"{e}")
|
|
174
|
+
raise Exit(1)
|
|
175
|
+
|
|
176
|
+
# Check git status and warn about conflicts
|
|
177
|
+
try:
|
|
178
|
+
result = subprocess.run(
|
|
179
|
+
["git", "status", "--porcelain"],
|
|
180
|
+
check=True,
|
|
181
|
+
capture_output=True,
|
|
182
|
+
text=True,
|
|
183
|
+
)
|
|
184
|
+
|
|
185
|
+
if result.stdout.strip():
|
|
186
|
+
conflicted_files = []
|
|
187
|
+
modified_files = []
|
|
188
|
+
|
|
189
|
+
for line in result.stdout.strip().split("\n"):
|
|
190
|
+
status = line[:2]
|
|
191
|
+
filename = line[3:]
|
|
192
|
+
|
|
193
|
+
if "UU" in status or "AA" in status or "DD" in status:
|
|
194
|
+
conflicted_files.append(filename)
|
|
195
|
+
elif status.strip():
|
|
196
|
+
modified_files.append(filename)
|
|
197
|
+
|
|
198
|
+
if conflicted_files:
|
|
199
|
+
rprint("")
|
|
200
|
+
rprint("⚠️ [bold]Files with conflicts detected:[/]")
|
|
201
|
+
for file in conflicted_files:
|
|
202
|
+
rprint(f" {file}")
|
|
203
|
+
rprint("")
|
|
204
|
+
rprint(
|
|
205
|
+
"Please manually resolve conflicts with a merge editor before proceeding."
|
|
206
|
+
)
|
|
207
|
+
|
|
208
|
+
except (subprocess.CalledProcessError, FileNotFoundError):
|
|
209
|
+
# Git not available or not in a git repo - continue silently
|
|
210
|
+
pass
|
|
@@ -20,7 +20,7 @@ from ..options import global_options
|
|
|
20
20
|
"deployment_file",
|
|
21
21
|
required=False,
|
|
22
22
|
default=DEFAULT_DEPLOYMENT_FILE_PATH,
|
|
23
|
-
type=click.Path(dir_okay=
|
|
23
|
+
type=click.Path(dir_okay=True, resolve_path=True, path_type=Path),
|
|
24
24
|
)
|
|
25
25
|
@click.option(
|
|
26
26
|
"--no-install", is_flag=True, help="Skip installing python and js dependencies"
|
|
@@ -34,6 +34,8 @@ from ..options import global_options
|
|
|
34
34
|
is_flag=True,
|
|
35
35
|
help="Preview mode pre-builds the UI to static files, like a production build",
|
|
36
36
|
)
|
|
37
|
+
@click.option("--port", type=int, help="The port to run the API server on")
|
|
38
|
+
@click.option("--ui-port", type=int, help="The port to run the UI proxy server on")
|
|
37
39
|
@global_options
|
|
38
40
|
def serve(
|
|
39
41
|
deployment_file: Path,
|
|
@@ -41,8 +43,10 @@ def serve(
|
|
|
41
43
|
no_reload: bool,
|
|
42
44
|
no_open_browser: bool,
|
|
43
45
|
preview: bool,
|
|
46
|
+
port: int | None = None,
|
|
47
|
+
ui_port: int | None = None,
|
|
44
48
|
) -> None:
|
|
45
|
-
"""Run llama_deploy API Server in the foreground.
|
|
49
|
+
"""Run llama_deploy API Server in the foreground. Reads the deployment configuration from the current directory. Can optionally specify a deployment file path."""
|
|
46
50
|
if not deployment_file.exists():
|
|
47
51
|
rprint(f"[red]Deployment file '{deployment_file}' not found[/red]")
|
|
48
52
|
raise click.Abort()
|
|
@@ -58,6 +62,8 @@ def serve(
|
|
|
58
62
|
proxy_ui=not preview,
|
|
59
63
|
reload=not no_reload,
|
|
60
64
|
open_browser=not no_open_browser,
|
|
65
|
+
port=port,
|
|
66
|
+
ui_port=ui_port,
|
|
61
67
|
)
|
|
62
68
|
|
|
63
69
|
except KeyboardInterrupt:
|
llama_deploy/cli/config.py
CHANGED