llamactl 0.3.0a1__py3-none-any.whl → 0.3.0a2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- llama_deploy/cli/__init__.py +4 -1
- llama_deploy/cli/client.py +78 -70
- llama_deploy/cli/commands.py +36 -8
- llama_deploy/cli/interactive_prompts/utils.py +1 -1
- llama_deploy/cli/textual/deployment_form.py +1 -1
- llama_deploy/cli/textual/git_validation.py +1 -1
- {llamactl-0.3.0a1.dist-info → llamactl-0.3.0a2.dist-info}/METADATA +4 -3
- {llamactl-0.3.0a1.dist-info → llamactl-0.3.0a2.dist-info}/RECORD +10 -10
- {llamactl-0.3.0a1.dist-info → llamactl-0.3.0a2.dist-info}/WHEEL +0 -0
- {llamactl-0.3.0a1.dist-info → llamactl-0.3.0a2.dist-info}/entry_points.txt +0 -0
llama_deploy/cli/__init__.py
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import click
|
|
2
|
-
from .commands import projects, deployments, profile, health_check, serve
|
|
2
|
+
from .commands import projects, deployments, profile, health_check, serve, version
|
|
3
3
|
from .options import global_options
|
|
4
4
|
|
|
5
5
|
|
|
@@ -22,6 +22,9 @@ app.add_command(health_check, name="health")
|
|
|
22
22
|
# Add serve command at root level
|
|
23
23
|
app.add_command(serve, name="serve")
|
|
24
24
|
|
|
25
|
+
# Add version command at root level
|
|
26
|
+
app.add_command(version, name="version")
|
|
27
|
+
|
|
25
28
|
|
|
26
29
|
# Main entry point function (called by the script)
|
|
27
30
|
def main() -> None:
|
llama_deploy/cli/client.py
CHANGED
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import logging
|
|
2
1
|
from typing import List, Optional
|
|
3
2
|
|
|
4
3
|
import httpx
|
|
@@ -18,109 +17,102 @@ from rich.console import Console
|
|
|
18
17
|
from .config import config_manager
|
|
19
18
|
|
|
20
19
|
|
|
21
|
-
class
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
self, base_url: Optional[str] = None, project_id: Optional[str] = None
|
|
26
|
-
):
|
|
27
|
-
"""Initialize the client with a configured profile"""
|
|
28
|
-
self.console = Console()
|
|
29
|
-
|
|
30
|
-
# Get profile data
|
|
31
|
-
profile = config_manager.get_current_profile()
|
|
32
|
-
if not profile:
|
|
33
|
-
self.console.print("\n[bold red]No profile configured![/bold red]")
|
|
34
|
-
self.console.print("\nTo get started, create a profile with:")
|
|
35
|
-
self.console.print("[cyan]llamactl profile create[/cyan]")
|
|
36
|
-
raise SystemExit(1)
|
|
37
|
-
|
|
38
|
-
# Use profile data with optional overrides
|
|
39
|
-
self.base_url = base_url or profile.api_url
|
|
40
|
-
self.project_id = project_id or profile.active_project_id
|
|
41
|
-
|
|
42
|
-
if not self.base_url:
|
|
43
|
-
raise ValueError("API URL is required")
|
|
44
|
-
|
|
45
|
-
if not self.project_id:
|
|
46
|
-
raise ValueError("Project ID is required")
|
|
47
|
-
|
|
48
|
-
self.base_url = self.base_url.rstrip("/")
|
|
49
|
-
|
|
50
|
-
# Create persistent client with event hooks
|
|
20
|
+
class BaseClient:
|
|
21
|
+
def __init__(self, base_url: str, console: Console) -> None:
|
|
22
|
+
self.base_url = base_url.rstrip("/")
|
|
23
|
+
self.console = console
|
|
51
24
|
self.client = httpx.Client(
|
|
52
25
|
base_url=self.base_url, event_hooks={"response": [self._handle_response]}
|
|
53
26
|
)
|
|
54
27
|
|
|
55
28
|
def _handle_response(self, response: httpx.Response) -> None:
|
|
56
|
-
"""Handle response middleware - warnings and error conversion"""
|
|
57
|
-
# Check for warnings in response headers
|
|
58
29
|
if "X-Warning" in response.headers:
|
|
59
30
|
self.console.print(
|
|
60
31
|
f"[yellow]Warning: {response.headers['X-Warning']}[/yellow]"
|
|
61
32
|
)
|
|
62
|
-
|
|
63
|
-
# Convert httpx errors to our current exception format
|
|
64
33
|
try:
|
|
65
34
|
response.raise_for_status()
|
|
66
35
|
except httpx.HTTPStatusError as e:
|
|
67
|
-
# Try to parse JSON error response
|
|
68
36
|
try:
|
|
69
|
-
response.read()
|
|
37
|
+
response.read()
|
|
70
38
|
error_data = e.response.json()
|
|
71
39
|
if isinstance(error_data, dict) and "detail" in error_data:
|
|
72
40
|
error_message = error_data["detail"]
|
|
73
41
|
else:
|
|
74
42
|
error_message = str(error_data)
|
|
75
43
|
except (ValueError, KeyError):
|
|
76
|
-
# Fallback to raw response text
|
|
77
44
|
error_message = e.response.text
|
|
78
|
-
|
|
79
45
|
raise Exception(f"HTTP {e.response.status_code}: {error_message}") from e
|
|
80
46
|
except httpx.RequestError as e:
|
|
81
47
|
raise Exception(f"Request failed: {e}") from e
|
|
82
48
|
|
|
83
|
-
|
|
49
|
+
|
|
50
|
+
class ControlPlaneClient(BaseClient):
|
|
51
|
+
"""Unscoped client for non-project endpoints."""
|
|
52
|
+
|
|
84
53
|
def health_check(self) -> dict:
|
|
85
|
-
"""Check if the API server is healthy"""
|
|
86
54
|
response = self.client.get("/health")
|
|
87
55
|
return response.json()
|
|
88
56
|
|
|
89
|
-
|
|
57
|
+
def server_version(self) -> dict:
|
|
58
|
+
response = self.client.get("/version")
|
|
59
|
+
return response.json()
|
|
60
|
+
|
|
90
61
|
def list_projects(self) -> List[ProjectSummary]:
|
|
91
|
-
"""List all projects with deployment counts"""
|
|
92
62
|
response = self.client.get("/projects/")
|
|
93
63
|
projects_response = ProjectsListResponse.model_validate(response.json())
|
|
94
64
|
return [project for project in projects_response.projects]
|
|
95
65
|
|
|
96
|
-
|
|
66
|
+
|
|
67
|
+
class ProjectClient(BaseClient):
|
|
68
|
+
"""Project-scoped client for deployment operations."""
|
|
69
|
+
|
|
70
|
+
def __init__(
|
|
71
|
+
self,
|
|
72
|
+
base_url: Optional[str] = None,
|
|
73
|
+
project_id: Optional[str] = None,
|
|
74
|
+
console: Optional[Console] = None,
|
|
75
|
+
) -> None:
|
|
76
|
+
# Allow default construction using active profile (for tests and convenience)
|
|
77
|
+
if base_url is None or project_id is None:
|
|
78
|
+
profile = config_manager.get_current_profile()
|
|
79
|
+
if not profile:
|
|
80
|
+
# Match previous behavior for missing profiles
|
|
81
|
+
(console or Console()).print(
|
|
82
|
+
"\n[bold red]No profile configured![/bold red]"
|
|
83
|
+
)
|
|
84
|
+
(console or Console()).print("\nTo get started, create a profile with:")
|
|
85
|
+
(console or Console()).print("[cyan]llamactl profile create[/cyan]")
|
|
86
|
+
raise SystemExit(1)
|
|
87
|
+
base_url = base_url or profile.api_url or ""
|
|
88
|
+
project_id = project_id or profile.active_project_id
|
|
89
|
+
if not base_url:
|
|
90
|
+
raise ValueError("API URL is required")
|
|
91
|
+
if not project_id:
|
|
92
|
+
raise ValueError("Project ID is required")
|
|
93
|
+
resolved_console = console or Console()
|
|
94
|
+
super().__init__(base_url, resolved_console)
|
|
95
|
+
self.project_id = project_id
|
|
96
|
+
|
|
97
97
|
def list_deployments(self) -> List[DeploymentResponse]:
|
|
98
|
-
"""List deployments for the configured project"""
|
|
99
98
|
response = self.client.get(f"/{self.project_id}/deployments/")
|
|
100
99
|
deployments_response = DeploymentsListResponse.model_validate(response.json())
|
|
101
100
|
return [deployment for deployment in deployments_response.deployments]
|
|
102
101
|
|
|
103
102
|
def get_deployment(self, deployment_id: str) -> DeploymentResponse:
|
|
104
|
-
"""Get a specific deployment"""
|
|
105
103
|
response = self.client.get(f"/{self.project_id}/deployments/{deployment_id}")
|
|
106
|
-
|
|
107
|
-
return deployment
|
|
104
|
+
return DeploymentResponse.model_validate(response.json())
|
|
108
105
|
|
|
109
106
|
def create_deployment(
|
|
110
|
-
self,
|
|
111
|
-
deployment_data: DeploymentCreate,
|
|
107
|
+
self, deployment_data: DeploymentCreate
|
|
112
108
|
) -> DeploymentResponse:
|
|
113
|
-
"""Create a new deployment"""
|
|
114
|
-
|
|
115
109
|
response = self.client.post(
|
|
116
110
|
f"/{self.project_id}/deployments/",
|
|
117
111
|
json=deployment_data.model_dump(exclude_none=True),
|
|
118
112
|
)
|
|
119
|
-
|
|
120
|
-
return deployment
|
|
113
|
+
return DeploymentResponse.model_validate(response.json())
|
|
121
114
|
|
|
122
115
|
def delete_deployment(self, deployment_id: str) -> None:
|
|
123
|
-
"""Delete a deployment"""
|
|
124
116
|
self.client.delete(f"/{self.project_id}/deployments/{deployment_id}")
|
|
125
117
|
|
|
126
118
|
def update_deployment(
|
|
@@ -129,19 +121,15 @@ class LlamaDeployClient:
|
|
|
129
121
|
update_data: DeploymentUpdate,
|
|
130
122
|
force_git_sha_update: bool = False,
|
|
131
123
|
) -> DeploymentResponse:
|
|
132
|
-
"""Update an existing deployment"""
|
|
133
|
-
|
|
134
124
|
params = {}
|
|
135
125
|
if force_git_sha_update:
|
|
136
126
|
params["force_git_sha_update"] = True
|
|
137
|
-
|
|
138
127
|
response = self.client.patch(
|
|
139
128
|
f"/{self.project_id}/deployments/{deployment_id}",
|
|
140
129
|
json=update_data.model_dump(),
|
|
141
130
|
params=params,
|
|
142
131
|
)
|
|
143
|
-
|
|
144
|
-
return deployment
|
|
132
|
+
return DeploymentResponse.model_validate(response.json())
|
|
145
133
|
|
|
146
134
|
def validate_repository(
|
|
147
135
|
self,
|
|
@@ -149,10 +137,6 @@ class LlamaDeployClient:
|
|
|
149
137
|
deployment_id: str | None = None,
|
|
150
138
|
pat: str | None = None,
|
|
151
139
|
) -> RepositoryValidationResponse:
|
|
152
|
-
"""Validate a repository URL"""
|
|
153
|
-
logging.info(
|
|
154
|
-
f"Validating repository with params: {repo_url}, {deployment_id}, {pat}"
|
|
155
|
-
)
|
|
156
140
|
response = self.client.post(
|
|
157
141
|
f"/{self.project_id}/deployments/validate-repository",
|
|
158
142
|
json=RepositoryValidationRequest(
|
|
@@ -161,13 +145,37 @@ class LlamaDeployClient:
|
|
|
161
145
|
pat=pat,
|
|
162
146
|
).model_dump(),
|
|
163
147
|
)
|
|
164
|
-
logging.info(f"Response: {response.json()}")
|
|
165
148
|
return RepositoryValidationResponse.model_validate(response.json())
|
|
166
149
|
|
|
167
150
|
|
|
168
|
-
|
|
169
|
-
|
|
151
|
+
def get_control_plane_client(base_url: Optional[str] = None) -> ControlPlaneClient:
|
|
152
|
+
console = Console()
|
|
153
|
+
profile = config_manager.get_current_profile()
|
|
154
|
+
if not profile and not base_url:
|
|
155
|
+
console.print("\n[bold red]No profile configured![/bold red]")
|
|
156
|
+
console.print("\nTo get started, create a profile with:")
|
|
157
|
+
console.print("[cyan]llamactl profile create[/cyan]")
|
|
158
|
+
raise SystemExit(1)
|
|
159
|
+
resolved_base_url = (base_url or (profile.api_url if profile else "")).rstrip("/")
|
|
160
|
+
if not resolved_base_url:
|
|
161
|
+
raise ValueError("API URL is required")
|
|
162
|
+
return ControlPlaneClient(resolved_base_url, console)
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
def get_project_client(
|
|
170
166
|
base_url: Optional[str] = None, project_id: Optional[str] = None
|
|
171
|
-
) ->
|
|
172
|
-
|
|
173
|
-
|
|
167
|
+
) -> ProjectClient:
|
|
168
|
+
console = Console()
|
|
169
|
+
profile = config_manager.get_current_profile()
|
|
170
|
+
if not profile:
|
|
171
|
+
console.print("\n[bold red]No profile configured![/bold red]")
|
|
172
|
+
console.print("\nTo get started, create a profile with:")
|
|
173
|
+
console.print("[cyan]llamactl profile create[/cyan]")
|
|
174
|
+
raise SystemExit(1)
|
|
175
|
+
resolved_base_url = (base_url or profile.api_url or "").rstrip("/")
|
|
176
|
+
if not resolved_base_url:
|
|
177
|
+
raise ValueError("API URL is required")
|
|
178
|
+
resolved_project_id = project_id or profile.active_project_id
|
|
179
|
+
if not resolved_project_id:
|
|
180
|
+
raise ValueError("Project ID is required")
|
|
181
|
+
return ProjectClient(resolved_base_url, resolved_project_id, console)
|
llama_deploy/cli/commands.py
CHANGED
|
@@ -9,7 +9,7 @@ from rich import print as rprint
|
|
|
9
9
|
from rich.console import Console
|
|
10
10
|
from rich.table import Table
|
|
11
11
|
|
|
12
|
-
from .client import
|
|
12
|
+
from .client import get_project_client, get_control_plane_client
|
|
13
13
|
from .config import config_manager
|
|
14
14
|
from .interactive_prompts.utils import (
|
|
15
15
|
confirm_action,
|
|
@@ -217,7 +217,7 @@ def edit_profile(name: Optional[str]) -> None:
|
|
|
217
217
|
def list_projects() -> None:
|
|
218
218
|
"""List all projects with deployment counts"""
|
|
219
219
|
try:
|
|
220
|
-
client =
|
|
220
|
+
client = get_control_plane_client()
|
|
221
221
|
projects = client.list_projects()
|
|
222
222
|
|
|
223
223
|
if not projects:
|
|
@@ -246,7 +246,7 @@ def list_projects() -> None:
|
|
|
246
246
|
def health_check() -> None:
|
|
247
247
|
"""Check if the API server is healthy"""
|
|
248
248
|
try:
|
|
249
|
-
client =
|
|
249
|
+
client = get_control_plane_client()
|
|
250
250
|
health = client.health_check()
|
|
251
251
|
|
|
252
252
|
status = health.get("status", "unknown")
|
|
@@ -266,7 +266,7 @@ def health_check() -> None:
|
|
|
266
266
|
def list_deployments() -> None:
|
|
267
267
|
"""List deployments for the configured project"""
|
|
268
268
|
try:
|
|
269
|
-
client =
|
|
269
|
+
client = get_project_client()
|
|
270
270
|
deployments = client.list_deployments()
|
|
271
271
|
|
|
272
272
|
if not deployments:
|
|
@@ -320,7 +320,7 @@ def list_deployments() -> None:
|
|
|
320
320
|
def get_deployment(deployment_id: Optional[str]) -> None:
|
|
321
321
|
"""Get details of a specific deployment"""
|
|
322
322
|
try:
|
|
323
|
-
client =
|
|
323
|
+
client = get_project_client()
|
|
324
324
|
|
|
325
325
|
deployment_id = select_deployment(deployment_id)
|
|
326
326
|
if not deployment_id:
|
|
@@ -392,7 +392,7 @@ def create_deployment(
|
|
|
392
392
|
def delete_deployment(deployment_id: Optional[str], confirm: bool) -> None:
|
|
393
393
|
"""Delete a deployment"""
|
|
394
394
|
try:
|
|
395
|
-
client =
|
|
395
|
+
client = get_project_client()
|
|
396
396
|
|
|
397
397
|
deployment_id = select_deployment(deployment_id)
|
|
398
398
|
if not deployment_id:
|
|
@@ -418,7 +418,7 @@ def delete_deployment(deployment_id: Optional[str], confirm: bool) -> None:
|
|
|
418
418
|
def edit_deployment(deployment_id: Optional[str]) -> None:
|
|
419
419
|
"""Interactively edit a deployment"""
|
|
420
420
|
try:
|
|
421
|
-
client =
|
|
421
|
+
client = get_project_client()
|
|
422
422
|
|
|
423
423
|
deployment_id = select_deployment(deployment_id)
|
|
424
424
|
if not deployment_id:
|
|
@@ -449,7 +449,7 @@ def edit_deployment(deployment_id: Optional[str]) -> None:
|
|
|
449
449
|
def refresh_deployment(deployment_id: Optional[str]) -> None:
|
|
450
450
|
"""Refresh a deployment with the latest code from its git reference"""
|
|
451
451
|
try:
|
|
452
|
-
client =
|
|
452
|
+
client = get_project_client()
|
|
453
453
|
|
|
454
454
|
deployment_id = select_deployment(deployment_id)
|
|
455
455
|
if not deployment_id:
|
|
@@ -532,3 +532,31 @@ def serve(
|
|
|
532
532
|
except Exception as e:
|
|
533
533
|
rprint(f"[red]Error: {e}[/red]")
|
|
534
534
|
raise click.Abort()
|
|
535
|
+
|
|
536
|
+
|
|
537
|
+
@click.command("version")
|
|
538
|
+
@global_options
|
|
539
|
+
def version() -> None:
|
|
540
|
+
"""Print the version of llama_deploy"""
|
|
541
|
+
try:
|
|
542
|
+
from importlib.metadata import PackageNotFoundError, version as pkg_version
|
|
543
|
+
|
|
544
|
+
ver = pkg_version("llamactl")
|
|
545
|
+
rprint(f"client version: {ver}")
|
|
546
|
+
|
|
547
|
+
# If there is an active profile, attempt to query server version
|
|
548
|
+
profile = config_manager.get_current_profile()
|
|
549
|
+
if profile and profile.api_url:
|
|
550
|
+
try:
|
|
551
|
+
cp_client = get_control_plane_client()
|
|
552
|
+
data = cp_client.server_version()
|
|
553
|
+
server_ver = data.get("version", "unknown")
|
|
554
|
+
rprint(f"server version: {server_ver}")
|
|
555
|
+
except Exception as e:
|
|
556
|
+
rprint(f"server version: [yellow]unavailable[/yellow] ({e})")
|
|
557
|
+
except PackageNotFoundError:
|
|
558
|
+
rprint("[red]Package 'llamactl' not found[/red]")
|
|
559
|
+
raise click.Abort()
|
|
560
|
+
except Exception as e:
|
|
561
|
+
rprint(f"[red]Error: {e}[/red]")
|
|
562
|
+
raise click.Abort()
|
|
@@ -21,7 +21,7 @@ from textual.containers import Container, HorizontalGroup, Widget
|
|
|
21
21
|
from textual.validation import Length
|
|
22
22
|
from textual.widgets import Button, Input, Label, Static
|
|
23
23
|
from textual.reactive import reactive
|
|
24
|
-
from llama_deploy.cli.client import get_client
|
|
24
|
+
from llama_deploy.cli.client import get_project_client as get_client
|
|
25
25
|
from textual.message import Message
|
|
26
26
|
|
|
27
27
|
|
|
@@ -11,7 +11,7 @@ from textual.message import Message
|
|
|
11
11
|
from textual.content import Content
|
|
12
12
|
from textual.reactive import reactive
|
|
13
13
|
|
|
14
|
-
from llama_deploy.cli.client import get_client
|
|
14
|
+
from llama_deploy.cli.client import get_project_client as get_client
|
|
15
15
|
from llama_deploy.core.schema.git_validation import RepositoryValidationResponse
|
|
16
16
|
from llama_deploy.cli.textual.llama_loader import PixelLlamaLoader
|
|
17
17
|
from llama_deploy.cli.textual.github_callback_server import GitHubCallbackServer
|
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
Metadata-Version: 2.3
|
|
2
2
|
Name: llamactl
|
|
3
|
-
Version: 0.3.
|
|
3
|
+
Version: 0.3.0a2
|
|
4
4
|
Summary: A command-line interface for managing LlamaDeploy projects and deployments
|
|
5
5
|
Author: Adrian Lyjak
|
|
6
6
|
Author-email: Adrian Lyjak <adrianlyjak@gmail.com>
|
|
7
7
|
License: MIT
|
|
8
|
-
Requires-Dist: llama-deploy-core>=0.3.
|
|
9
|
-
Requires-Dist: llama-deploy-appserver>=0.3.
|
|
8
|
+
Requires-Dist: llama-deploy-core>=0.3.0a2,<0.4.0
|
|
9
|
+
Requires-Dist: llama-deploy-appserver>=0.3.0a2,<0.4.0
|
|
10
10
|
Requires-Dist: httpx>=0.24.0
|
|
11
11
|
Requires-Dist: rich>=13.0.0
|
|
12
12
|
Requires-Dist: questionary>=2.0.0
|
|
@@ -15,6 +15,7 @@ Requires-Dist: python-dotenv>=1.0.0
|
|
|
15
15
|
Requires-Dist: tenacity>=9.1.2
|
|
16
16
|
Requires-Dist: textual>=4.0.0
|
|
17
17
|
Requires-Dist: aiohttp>=3.12.14
|
|
18
|
+
Requires-Dist: copier>=9.9.0
|
|
18
19
|
Requires-Python: >=3.12, <4
|
|
19
20
|
Description-Content-Type: text/markdown
|
|
20
21
|
|
|
@@ -1,19 +1,19 @@
|
|
|
1
|
-
llama_deploy/cli/__init__.py,sha256=
|
|
2
|
-
llama_deploy/cli/client.py,sha256=
|
|
3
|
-
llama_deploy/cli/commands.py,sha256=
|
|
1
|
+
llama_deploy/cli/__init__.py,sha256=7f5381a1f1f3631a53e8d264830f591e36e9a509eae01b114fdfcd0213ff0c21,827
|
|
2
|
+
llama_deploy/cli/client.py,sha256=1e033b3f620439e67d5e5cceb3e5821039449431f2dbbc22bcff76416c638fb7,7103
|
|
3
|
+
llama_deploy/cli/commands.py,sha256=802b74da9b857b1d547b02044de3e750e928524188bd6f9c8775cea0ae4d8ad7,18102
|
|
4
4
|
llama_deploy/cli/config.py,sha256=b339d95fceb7a15a183663032396aaeb2afffe1ddf06494416a6a0183a6658ca,6275
|
|
5
5
|
llama_deploy/cli/debug.py,sha256=e85a72d473bbe1645eb31772f7349bde703d45704166f767385895c440afc762,496
|
|
6
6
|
llama_deploy/cli/env.py,sha256=bb1dcde428c779796ad2b39b58d84f08df75a15031afca577aca0db5ce9a9ea0,1015
|
|
7
|
-
llama_deploy/cli/interactive_prompts/utils.py,sha256=
|
|
7
|
+
llama_deploy/cli/interactive_prompts/utils.py,sha256=894d0cec574a5fa98de267cdc7a4f4f95fdbb4dbddf06635b2ec1d723db18940,2504
|
|
8
8
|
llama_deploy/cli/options.py,sha256=78b6e36e39fa88f0587146995e2cb66418b67d16f945f0b7570dab37cf5fc673,576
|
|
9
|
-
llama_deploy/cli/textual/deployment_form.py,sha256=
|
|
10
|
-
llama_deploy/cli/textual/git_validation.py,sha256=
|
|
9
|
+
llama_deploy/cli/textual/deployment_form.py,sha256=261ce5102b84b71b1bf1e34518516aa4d3b45b3821ce874c93b19ebb80a957f5,15076
|
|
10
|
+
llama_deploy/cli/textual/git_validation.py,sha256=d83cb37aa452d9ebbe2cfec0d37580655c87a6a2352e66999586ecd7479f223f,13301
|
|
11
11
|
llama_deploy/cli/textual/github_callback_server.py,sha256=a74b1f5741bdaa682086771fd73a145e1e22359601f16f036f72a87e64b0a152,7444
|
|
12
12
|
llama_deploy/cli/textual/llama_loader.py,sha256=dfef7118eb42d0fec033731b3f3b16ed4dbf4c551f4059c36e290e73c9aa5d13,1244
|
|
13
13
|
llama_deploy/cli/textual/profile_form.py,sha256=2c6ca4690c22b499cc327b117c97e7914d4243b73faa92c0f5ac9cfdcf59b3d7,6015
|
|
14
14
|
llama_deploy/cli/textual/secrets_form.py,sha256=1fd47a5a5ee9dfa0fd2a86f5888894820897c55fbb0cd30e60d6bc08570288b5,6303
|
|
15
15
|
llama_deploy/cli/textual/styles.tcss,sha256=72338c5634bae0547384669382c4e06deec1380ef7bbc31099b1dca8ce49b2d0,2711
|
|
16
|
-
llamactl-0.3.
|
|
17
|
-
llamactl-0.3.
|
|
18
|
-
llamactl-0.3.
|
|
19
|
-
llamactl-0.3.
|
|
16
|
+
llamactl-0.3.0a2.dist-info/WHEEL,sha256=66530aef82d5020ef5af27ae0123c71abb9261377c5bc519376c671346b12918,79
|
|
17
|
+
llamactl-0.3.0a2.dist-info/entry_points.txt,sha256=b67e1eb64305058751a651a80f2d2268b5f7046732268421e796f64d4697f83c,52
|
|
18
|
+
llamactl-0.3.0a2.dist-info/METADATA,sha256=1ac25c584163c3b51d2a2e625be9eab23042804a583f6a7d8b2e22bd8fd2e057,3166
|
|
19
|
+
llamactl-0.3.0a2.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|