llamactl 0.3.0a1__tar.gz → 0.3.0a3__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,12 +1,12 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: llamactl
3
- Version: 0.3.0a1
3
+ Version: 0.3.0a3
4
4
  Summary: A command-line interface for managing LlamaDeploy projects and deployments
5
5
  Author: Adrian Lyjak
6
6
  Author-email: Adrian Lyjak <adrianlyjak@gmail.com>
7
7
  License: MIT
8
- Requires-Dist: llama-deploy-core>=0.3.0a1,<0.4.0
9
- Requires-Dist: llama-deploy-appserver>=0.3.0a1,<0.4.0
8
+ Requires-Dist: llama-deploy-core>=0.3.0a3,<0.4.0
9
+ Requires-Dist: llama-deploy-appserver>=0.3.0a3,<0.4.0
10
10
  Requires-Dist: httpx>=0.24.0
11
11
  Requires-Dist: rich>=13.0.0
12
12
  Requires-Dist: questionary>=2.0.0
@@ -15,6 +15,7 @@ Requires-Dist: python-dotenv>=1.0.0
15
15
  Requires-Dist: tenacity>=9.1.2
16
16
  Requires-Dist: textual>=4.0.0
17
17
  Requires-Dist: aiohttp>=3.12.14
18
+ Requires-Dist: copier>=9.9.0
18
19
  Requires-Python: >=3.12, <4
19
20
  Description-Content-Type: text/markdown
20
21
 
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "llamactl"
3
- version = "0.3.0a1"
3
+ version = "0.3.0a3"
4
4
  description = "A command-line interface for managing LlamaDeploy projects and deployments"
5
5
  readme = "README.md"
6
6
  license = { text = "MIT" }
@@ -9,8 +9,8 @@ authors = [
9
9
  ]
10
10
  requires-python = ">=3.12, <4"
11
11
  dependencies = [
12
- "llama-deploy-core>=0.3.0a1,<0.4.0",
13
- "llama-deploy-appserver>=0.3.0a1,<0.4.0",
12
+ "llama-deploy-core>=0.3.0a3,<0.4.0",
13
+ "llama-deploy-appserver>=0.3.0a3,<0.4.0",
14
14
  "httpx>=0.24.0",
15
15
  "rich>=13.0.0",
16
16
  "questionary>=2.0.0",
@@ -19,6 +19,7 @@ dependencies = [
19
19
  "tenacity>=9.1.2",
20
20
  "textual>=4.0.0",
21
21
  "aiohttp>=3.12.14",
22
+ "copier>=9.9.0",
22
23
  ]
23
24
 
24
25
  [project.scripts]
@@ -1,5 +1,5 @@
1
1
  import click
2
- from .commands import projects, deployments, profile, health_check, serve
2
+ from .commands import projects, deployments, profile, health_check, serve, version
3
3
  from .options import global_options
4
4
 
5
5
 
@@ -22,6 +22,9 @@ app.add_command(health_check, name="health")
22
22
  # Add serve command at root level
23
23
  app.add_command(serve, name="serve")
24
24
 
25
+ # Add version command at root level
26
+ app.add_command(version, name="version")
27
+
25
28
 
26
29
  # Main entry point function (called by the script)
27
30
  def main() -> None:
@@ -1,4 +1,3 @@
1
- import logging
2
1
  from typing import List, Optional
3
2
 
4
3
  import httpx
@@ -18,109 +17,102 @@ from rich.console import Console
18
17
  from .config import config_manager
19
18
 
20
19
 
21
- class LlamaDeployClient:
22
- """HTTP client for communicating with the LlamaDeploy control plane API"""
23
-
24
- def __init__(
25
- self, base_url: Optional[str] = None, project_id: Optional[str] = None
26
- ):
27
- """Initialize the client with a configured profile"""
28
- self.console = Console()
29
-
30
- # Get profile data
31
- profile = config_manager.get_current_profile()
32
- if not profile:
33
- self.console.print("\n[bold red]No profile configured![/bold red]")
34
- self.console.print("\nTo get started, create a profile with:")
35
- self.console.print("[cyan]llamactl profile create[/cyan]")
36
- raise SystemExit(1)
37
-
38
- # Use profile data with optional overrides
39
- self.base_url = base_url or profile.api_url
40
- self.project_id = project_id or profile.active_project_id
41
-
42
- if not self.base_url:
43
- raise ValueError("API URL is required")
44
-
45
- if not self.project_id:
46
- raise ValueError("Project ID is required")
47
-
48
- self.base_url = self.base_url.rstrip("/")
49
-
50
- # Create persistent client with event hooks
20
+ class BaseClient:
21
+ def __init__(self, base_url: str, console: Console) -> None:
22
+ self.base_url = base_url.rstrip("/")
23
+ self.console = console
51
24
  self.client = httpx.Client(
52
25
  base_url=self.base_url, event_hooks={"response": [self._handle_response]}
53
26
  )
54
27
 
55
28
  def _handle_response(self, response: httpx.Response) -> None:
56
- """Handle response middleware - warnings and error conversion"""
57
- # Check for warnings in response headers
58
29
  if "X-Warning" in response.headers:
59
30
  self.console.print(
60
31
  f"[yellow]Warning: {response.headers['X-Warning']}[/yellow]"
61
32
  )
62
-
63
- # Convert httpx errors to our current exception format
64
33
  try:
65
34
  response.raise_for_status()
66
35
  except httpx.HTTPStatusError as e:
67
- # Try to parse JSON error response
68
36
  try:
69
- response.read() # need to collect streaming data before calling json
37
+ response.read()
70
38
  error_data = e.response.json()
71
39
  if isinstance(error_data, dict) and "detail" in error_data:
72
40
  error_message = error_data["detail"]
73
41
  else:
74
42
  error_message = str(error_data)
75
43
  except (ValueError, KeyError):
76
- # Fallback to raw response text
77
44
  error_message = e.response.text
78
-
79
45
  raise Exception(f"HTTP {e.response.status_code}: {error_message}") from e
80
46
  except httpx.RequestError as e:
81
47
  raise Exception(f"Request failed: {e}") from e
82
48
 
83
- # Health check
49
+
50
+ class ControlPlaneClient(BaseClient):
51
+ """Unscoped client for non-project endpoints."""
52
+
84
53
  def health_check(self) -> dict:
85
- """Check if the API server is healthy"""
86
54
  response = self.client.get("/health")
87
55
  return response.json()
88
56
 
89
- # Projects
57
+ def server_version(self) -> dict:
58
+ response = self.client.get("/version")
59
+ return response.json()
60
+
90
61
  def list_projects(self) -> List[ProjectSummary]:
91
- """List all projects with deployment counts"""
92
62
  response = self.client.get("/projects/")
93
63
  projects_response = ProjectsListResponse.model_validate(response.json())
94
64
  return [project for project in projects_response.projects]
95
65
 
96
- # Deployments
66
+
67
+ class ProjectClient(BaseClient):
68
+ """Project-scoped client for deployment operations."""
69
+
70
+ def __init__(
71
+ self,
72
+ base_url: Optional[str] = None,
73
+ project_id: Optional[str] = None,
74
+ console: Optional[Console] = None,
75
+ ) -> None:
76
+ # Allow default construction using active profile (for tests and convenience)
77
+ if base_url is None or project_id is None:
78
+ profile = config_manager.get_current_profile()
79
+ if not profile:
80
+ # Match previous behavior for missing profiles
81
+ (console or Console()).print(
82
+ "\n[bold red]No profile configured![/bold red]"
83
+ )
84
+ (console or Console()).print("\nTo get started, create a profile with:")
85
+ (console or Console()).print("[cyan]llamactl profile create[/cyan]")
86
+ raise SystemExit(1)
87
+ base_url = base_url or profile.api_url or ""
88
+ project_id = project_id or profile.active_project_id
89
+ if not base_url:
90
+ raise ValueError("API URL is required")
91
+ if not project_id:
92
+ raise ValueError("Project ID is required")
93
+ resolved_console = console or Console()
94
+ super().__init__(base_url, resolved_console)
95
+ self.project_id = project_id
96
+
97
97
  def list_deployments(self) -> List[DeploymentResponse]:
98
- """List deployments for the configured project"""
99
98
  response = self.client.get(f"/{self.project_id}/deployments/")
100
99
  deployments_response = DeploymentsListResponse.model_validate(response.json())
101
100
  return [deployment for deployment in deployments_response.deployments]
102
101
 
103
102
  def get_deployment(self, deployment_id: str) -> DeploymentResponse:
104
- """Get a specific deployment"""
105
103
  response = self.client.get(f"/{self.project_id}/deployments/{deployment_id}")
106
- deployment = DeploymentResponse.model_validate(response.json())
107
- return deployment
104
+ return DeploymentResponse.model_validate(response.json())
108
105
 
109
106
  def create_deployment(
110
- self,
111
- deployment_data: DeploymentCreate,
107
+ self, deployment_data: DeploymentCreate
112
108
  ) -> DeploymentResponse:
113
- """Create a new deployment"""
114
-
115
109
  response = self.client.post(
116
110
  f"/{self.project_id}/deployments/",
117
111
  json=deployment_data.model_dump(exclude_none=True),
118
112
  )
119
- deployment = DeploymentResponse.model_validate(response.json())
120
- return deployment
113
+ return DeploymentResponse.model_validate(response.json())
121
114
 
122
115
  def delete_deployment(self, deployment_id: str) -> None:
123
- """Delete a deployment"""
124
116
  self.client.delete(f"/{self.project_id}/deployments/{deployment_id}")
125
117
 
126
118
  def update_deployment(
@@ -129,19 +121,15 @@ class LlamaDeployClient:
129
121
  update_data: DeploymentUpdate,
130
122
  force_git_sha_update: bool = False,
131
123
  ) -> DeploymentResponse:
132
- """Update an existing deployment"""
133
-
134
124
  params = {}
135
125
  if force_git_sha_update:
136
126
  params["force_git_sha_update"] = True
137
-
138
127
  response = self.client.patch(
139
128
  f"/{self.project_id}/deployments/{deployment_id}",
140
129
  json=update_data.model_dump(),
141
130
  params=params,
142
131
  )
143
- deployment = DeploymentResponse.model_validate(response.json())
144
- return deployment
132
+ return DeploymentResponse.model_validate(response.json())
145
133
 
146
134
  def validate_repository(
147
135
  self,
@@ -149,10 +137,6 @@ class LlamaDeployClient:
149
137
  deployment_id: str | None = None,
150
138
  pat: str | None = None,
151
139
  ) -> RepositoryValidationResponse:
152
- """Validate a repository URL"""
153
- logging.info(
154
- f"Validating repository with params: {repo_url}, {deployment_id}, {pat}"
155
- )
156
140
  response = self.client.post(
157
141
  f"/{self.project_id}/deployments/validate-repository",
158
142
  json=RepositoryValidationRequest(
@@ -161,13 +145,37 @@ class LlamaDeployClient:
161
145
  pat=pat,
162
146
  ).model_dump(),
163
147
  )
164
- logging.info(f"Response: {response.json()}")
165
148
  return RepositoryValidationResponse.model_validate(response.json())
166
149
 
167
150
 
168
- # Global client factory function
169
- def get_client(
151
+ def get_control_plane_client(base_url: Optional[str] = None) -> ControlPlaneClient:
152
+ console = Console()
153
+ profile = config_manager.get_current_profile()
154
+ if not profile and not base_url:
155
+ console.print("\n[bold red]No profile configured![/bold red]")
156
+ console.print("\nTo get started, create a profile with:")
157
+ console.print("[cyan]llamactl profile create[/cyan]")
158
+ raise SystemExit(1)
159
+ resolved_base_url = (base_url or (profile.api_url if profile else "")).rstrip("/")
160
+ if not resolved_base_url:
161
+ raise ValueError("API URL is required")
162
+ return ControlPlaneClient(resolved_base_url, console)
163
+
164
+
165
+ def get_project_client(
170
166
  base_url: Optional[str] = None, project_id: Optional[str] = None
171
- ) -> LlamaDeployClient:
172
- """Get a client instance with optional overrides"""
173
- return LlamaDeployClient(base_url=base_url, project_id=project_id)
167
+ ) -> ProjectClient:
168
+ console = Console()
169
+ profile = config_manager.get_current_profile()
170
+ if not profile:
171
+ console.print("\n[bold red]No profile configured![/bold red]")
172
+ console.print("\nTo get started, create a profile with:")
173
+ console.print("[cyan]llamactl profile create[/cyan]")
174
+ raise SystemExit(1)
175
+ resolved_base_url = (base_url or profile.api_url or "").rstrip("/")
176
+ if not resolved_base_url:
177
+ raise ValueError("API URL is required")
178
+ resolved_project_id = project_id or profile.active_project_id
179
+ if not resolved_project_id:
180
+ raise ValueError("Project ID is required")
181
+ return ProjectClient(resolved_base_url, resolved_project_id, console)
@@ -7,9 +7,10 @@ from llama_deploy.core.config import DEFAULT_DEPLOYMENT_FILE_PATH
7
7
  from llama_deploy.core.schema.deployments import DeploymentUpdate
8
8
  from rich import print as rprint
9
9
  from rich.console import Console
10
+ from rich.text import Text
10
11
  from rich.table import Table
11
12
 
12
- from .client import get_client
13
+ from .client import get_project_client, get_control_plane_client
13
14
  from .config import config_manager
14
15
  from .interactive_prompts.utils import (
15
16
  confirm_action,
@@ -21,7 +22,7 @@ from .textual.deployment_form import create_deployment_form, edit_deployment_for
21
22
  from .textual.profile_form import create_profile_form, edit_profile_form
22
23
 
23
24
  RETRY_WAIT_SECONDS = 1
24
- console = Console()
25
+ console = Console(highlight=False)
25
26
 
26
27
 
27
28
  # Create sub-applications for organizing commands
@@ -217,7 +218,7 @@ def edit_profile(name: Optional[str]) -> None:
217
218
  def list_projects() -> None:
218
219
  """List all projects with deployment counts"""
219
220
  try:
220
- client = get_client()
221
+ client = get_control_plane_client()
221
222
  projects = client.list_projects()
222
223
 
223
224
  if not projects:
@@ -246,7 +247,7 @@ def list_projects() -> None:
246
247
  def health_check() -> None:
247
248
  """Check if the API server is healthy"""
248
249
  try:
249
- client = get_client()
250
+ client = get_control_plane_client()
250
251
  health = client.health_check()
251
252
 
252
253
  status = health.get("status", "unknown")
@@ -266,7 +267,7 @@ def health_check() -> None:
266
267
  def list_deployments() -> None:
267
268
  """List deployments for the configured project"""
268
269
  try:
269
- client = get_client()
270
+ client = get_project_client()
270
271
  deployments = client.list_deployments()
271
272
 
272
273
  if not deployments:
@@ -320,7 +321,7 @@ def list_deployments() -> None:
320
321
  def get_deployment(deployment_id: Optional[str]) -> None:
321
322
  """Get details of a specific deployment"""
322
323
  try:
323
- client = get_client()
324
+ client = get_project_client()
324
325
 
325
326
  deployment_id = select_deployment(deployment_id)
326
327
  if not deployment_id:
@@ -392,7 +393,7 @@ def create_deployment(
392
393
  def delete_deployment(deployment_id: Optional[str], confirm: bool) -> None:
393
394
  """Delete a deployment"""
394
395
  try:
395
- client = get_client()
396
+ client = get_project_client()
396
397
 
397
398
  deployment_id = select_deployment(deployment_id)
398
399
  if not deployment_id:
@@ -418,7 +419,7 @@ def delete_deployment(deployment_id: Optional[str], confirm: bool) -> None:
418
419
  def edit_deployment(deployment_id: Optional[str]) -> None:
419
420
  """Interactively edit a deployment"""
420
421
  try:
421
- client = get_client()
422
+ client = get_project_client()
422
423
 
423
424
  deployment_id = select_deployment(deployment_id)
424
425
  if not deployment_id:
@@ -449,7 +450,7 @@ def edit_deployment(deployment_id: Optional[str]) -> None:
449
450
  def refresh_deployment(deployment_id: Optional[str]) -> None:
450
451
  """Refresh a deployment with the latest code from its git reference"""
451
452
  try:
452
- client = get_client()
453
+ client = get_project_client()
453
454
 
454
455
  deployment_id = select_deployment(deployment_id)
455
456
  if not deployment_id:
@@ -532,3 +533,45 @@ def serve(
532
533
  except Exception as e:
533
534
  rprint(f"[red]Error: {e}[/red]")
534
535
  raise click.Abort()
536
+
537
+
538
+ @click.command("version")
539
+ @global_options
540
+ def version() -> None:
541
+ """Print the version of llama_deploy"""
542
+ try:
543
+ from importlib.metadata import PackageNotFoundError, version as pkg_version
544
+
545
+ ver = pkg_version("llamactl")
546
+ console.print(Text.assemble("client version: ", (ver, "green")))
547
+
548
+ # If there is an active profile, attempt to query server version
549
+ profile = config_manager.get_current_profile()
550
+ if profile and profile.api_url:
551
+ try:
552
+ cp_client = get_control_plane_client()
553
+ data = cp_client.server_version()
554
+ server_ver = data.get("version")
555
+ console.print(
556
+ Text.assemble(
557
+ "server version: ",
558
+ (
559
+ server_ver or "unknown",
560
+ "bright_yellow" if server_ver is None else "green",
561
+ ),
562
+ )
563
+ )
564
+ except Exception as e:
565
+ console.print(
566
+ Text.assemble(
567
+ "server version: ",
568
+ ("unavailable", "bright_yellow"),
569
+ (f" - {e}", "dim"),
570
+ )
571
+ )
572
+ except PackageNotFoundError:
573
+ rprint("[red]Package 'llamactl' not found[/red]")
574
+ raise click.Abort()
575
+ except Exception as e:
576
+ rprint(f"[red]Error: {e}[/red]")
577
+ raise click.Abort()
@@ -6,7 +6,7 @@ import questionary
6
6
  from rich import print as rprint
7
7
  from rich.console import Console
8
8
 
9
- from ..client import get_client
9
+ from ..client import get_project_client as get_client
10
10
  from ..config import config_manager
11
11
 
12
12
  console = Console()
@@ -21,7 +21,7 @@ from textual.containers import Container, HorizontalGroup, Widget
21
21
  from textual.validation import Length
22
22
  from textual.widgets import Button, Input, Label, Static
23
23
  from textual.reactive import reactive
24
- from llama_deploy.cli.client import get_client
24
+ from llama_deploy.cli.client import get_project_client as get_client
25
25
  from textual.message import Message
26
26
 
27
27
 
@@ -11,7 +11,7 @@ from textual.message import Message
11
11
  from textual.content import Content
12
12
  from textual.reactive import reactive
13
13
 
14
- from llama_deploy.cli.client import get_client
14
+ from llama_deploy.cli.client import get_project_client as get_client
15
15
  from llama_deploy.core.schema.git_validation import RepositoryValidationResponse
16
16
  from llama_deploy.cli.textual.llama_loader import PixelLlamaLoader
17
17
  from llama_deploy.cli.textual.github_callback_server import GitHubCallbackServer
File without changes