llamactl 0.3.16__tar.gz → 0.3.18__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. {llamactl-0.3.16 → llamactl-0.3.18}/PKG-INFO +3 -3
  2. {llamactl-0.3.16 → llamactl-0.3.18}/pyproject.toml +3 -3
  3. {llamactl-0.3.16 → llamactl-0.3.18}/src/llama_deploy/cli/__init__.py +2 -1
  4. {llamactl-0.3.16 → llamactl-0.3.18}/src/llama_deploy/cli/commands/aliased_group.py +2 -2
  5. {llamactl-0.3.16 → llamactl-0.3.18}/src/llama_deploy/cli/commands/auth.py +2 -0
  6. llamactl-0.3.18/src/llama_deploy/cli/commands/pkg.py +122 -0
  7. llamactl-0.3.18/src/llama_deploy/cli/pkg/__init__.py +10 -0
  8. llamactl-0.3.18/src/llama_deploy/cli/pkg/defaults.py +11 -0
  9. llamactl-0.3.18/src/llama_deploy/cli/pkg/options.py +84 -0
  10. llamactl-0.3.18/src/llama_deploy/cli/pkg/utils.py +46 -0
  11. {llamactl-0.3.16 → llamactl-0.3.18}/src/llama_deploy/cli/textual/deployment_form.py +19 -7
  12. {llamactl-0.3.16 → llamactl-0.3.18}/src/llama_deploy/cli/textual/deployment_monitor.py +3 -1
  13. {llamactl-0.3.16 → llamactl-0.3.18}/README.md +0 -0
  14. {llamactl-0.3.16 → llamactl-0.3.18}/src/llama_deploy/cli/app.py +0 -0
  15. {llamactl-0.3.16 → llamactl-0.3.18}/src/llama_deploy/cli/auth/client.py +0 -0
  16. {llamactl-0.3.16 → llamactl-0.3.18}/src/llama_deploy/cli/client.py +0 -0
  17. {llamactl-0.3.16 → llamactl-0.3.18}/src/llama_deploy/cli/commands/deployment.py +0 -0
  18. {llamactl-0.3.16 → llamactl-0.3.18}/src/llama_deploy/cli/commands/env.py +0 -0
  19. {llamactl-0.3.16 → llamactl-0.3.18}/src/llama_deploy/cli/commands/init.py +0 -0
  20. {llamactl-0.3.16 → llamactl-0.3.18}/src/llama_deploy/cli/commands/serve.py +0 -0
  21. {llamactl-0.3.16 → llamactl-0.3.18}/src/llama_deploy/cli/config/_config.py +0 -0
  22. {llamactl-0.3.16 → llamactl-0.3.18}/src/llama_deploy/cli/config/_migrations.py +0 -0
  23. {llamactl-0.3.16 → llamactl-0.3.18}/src/llama_deploy/cli/config/auth_service.py +0 -0
  24. {llamactl-0.3.16 → llamactl-0.3.18}/src/llama_deploy/cli/config/env_service.py +0 -0
  25. {llamactl-0.3.16 → llamactl-0.3.18}/src/llama_deploy/cli/config/migrations/0001_init.sql +0 -0
  26. {llamactl-0.3.16 → llamactl-0.3.18}/src/llama_deploy/cli/config/migrations/0002_add_auth_fields.sql +0 -0
  27. {llamactl-0.3.16 → llamactl-0.3.18}/src/llama_deploy/cli/config/migrations/__init__.py +0 -0
  28. {llamactl-0.3.16 → llamactl-0.3.18}/src/llama_deploy/cli/config/schema.py +0 -0
  29. {llamactl-0.3.16 → llamactl-0.3.18}/src/llama_deploy/cli/debug.py +0 -0
  30. {llamactl-0.3.16 → llamactl-0.3.18}/src/llama_deploy/cli/env.py +0 -0
  31. {llamactl-0.3.16 → llamactl-0.3.18}/src/llama_deploy/cli/interactive_prompts/session_utils.py +0 -0
  32. {llamactl-0.3.16 → llamactl-0.3.18}/src/llama_deploy/cli/interactive_prompts/utils.py +0 -0
  33. {llamactl-0.3.16 → llamactl-0.3.18}/src/llama_deploy/cli/options.py +0 -0
  34. {llamactl-0.3.16 → llamactl-0.3.18}/src/llama_deploy/cli/py.typed +0 -0
  35. {llamactl-0.3.16 → llamactl-0.3.18}/src/llama_deploy/cli/styles.py +0 -0
  36. {llamactl-0.3.16 → llamactl-0.3.18}/src/llama_deploy/cli/textual/deployment_help.py +0 -0
  37. {llamactl-0.3.16 → llamactl-0.3.18}/src/llama_deploy/cli/textual/git_validation.py +0 -0
  38. {llamactl-0.3.16 → llamactl-0.3.18}/src/llama_deploy/cli/textual/github_callback_server.py +0 -0
  39. {llamactl-0.3.16 → llamactl-0.3.18}/src/llama_deploy/cli/textual/llama_loader.py +0 -0
  40. {llamactl-0.3.16 → llamactl-0.3.18}/src/llama_deploy/cli/textual/secrets_form.py +0 -0
  41. {llamactl-0.3.16 → llamactl-0.3.18}/src/llama_deploy/cli/textual/styles.tcss +0 -0
  42. {llamactl-0.3.16 → llamactl-0.3.18}/src/llama_deploy/cli/utils/env_inject.py +0 -0
  43. {llamactl-0.3.16 → llamactl-0.3.18}/src/llama_deploy/cli/utils/redact.py +0 -0
  44. {llamactl-0.3.16 → llamactl-0.3.18}/src/llama_deploy/cli/utils/version.py +0 -0
@@ -1,12 +1,12 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: llamactl
3
- Version: 0.3.16
3
+ Version: 0.3.18
4
4
  Summary: A command-line interface for managing LlamaDeploy projects and deployments
5
5
  Author: Adrian Lyjak
6
6
  Author-email: Adrian Lyjak <adrianlyjak@gmail.com>
7
7
  License: MIT
8
- Requires-Dist: llama-deploy-core[client]>=0.3.16,<0.4.0
9
- Requires-Dist: llama-deploy-appserver>=0.3.16,<0.4.0
8
+ Requires-Dist: llama-deploy-core[client]>=0.3.18,<0.4.0
9
+ Requires-Dist: llama-deploy-appserver>=0.3.18,<0.4.0
10
10
  Requires-Dist: vibe-llama-core>=0.1.0
11
11
  Requires-Dist: rich>=13.0.0
12
12
  Requires-Dist: questionary>=2.0.0
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "llamactl"
3
- version = "0.3.16"
3
+ version = "0.3.18"
4
4
  description = "A command-line interface for managing LlamaDeploy projects and deployments"
5
5
  readme = "README.md"
6
6
  license = { text = "MIT" }
@@ -9,8 +9,8 @@ authors = [
9
9
  ]
10
10
  requires-python = ">=3.11, <4"
11
11
  dependencies = [
12
- "llama-deploy-core[client]>=0.3.16,<0.4.0",
13
- "llama-deploy-appserver>=0.3.16,<0.4.0",
12
+ "llama-deploy-core[client]>=0.3.18,<0.4.0",
13
+ "llama-deploy-appserver>=0.3.18,<0.4.0",
14
14
  "vibe-llama-core>=0.1.0",
15
15
  "rich>=13.0.0",
16
16
  "questionary>=2.0.0",
@@ -4,6 +4,7 @@ from llama_deploy.cli.commands.auth import auth
4
4
  from llama_deploy.cli.commands.deployment import deployments
5
5
  from llama_deploy.cli.commands.env import env_group
6
6
  from llama_deploy.cli.commands.init import init
7
+ from llama_deploy.cli.commands.pkg import pkg
7
8
  from llama_deploy.cli.commands.serve import serve
8
9
 
9
10
  from .app import app
@@ -22,7 +23,7 @@ def main() -> None:
22
23
  app()
23
24
 
24
25
 
25
- __all__ = ["app", "deployments", "auth", "serve", "init", "env_group"]
26
+ __all__ = ["app", "deployments", "auth", "serve", "init", "env_group", "pkg"]
26
27
 
27
28
 
28
29
  if __name__ == "__main__":
@@ -27,7 +27,7 @@ class AliasedGroup(click.Group):
27
27
 
28
28
  def resolve_command(
29
29
  self, ctx: click.Context, args: list[str]
30
- ) -> tuple[str, click.Command, list[str]]:
30
+ ) -> tuple[str | None, click.Command | None, list[str]]:
31
31
  # always return the full command name
32
32
  _, cmd, args = super().resolve_command(ctx, args)
33
- return cmd.name, cmd, args
33
+ return cmd.name if cmd else None, cmd, args
@@ -501,6 +501,8 @@ async def _run_device_authentication(base_url: str) -> DeviceOIDC:
501
501
  raise click.ClickException(
502
502
  "Device flow failed: token response missing access_token"
503
503
  )
504
+ if not provider.jwks_uri:
505
+ raise click.ClickException("Provider does not expose jwks_uri")
504
506
  claims = await decode_jwt_claims(token.id_token, provider.jwks_uri)
505
507
  email = claims.get("email")
506
508
  if not email:
@@ -0,0 +1,122 @@
1
+ from pathlib import Path
2
+
3
+ import click
4
+ from llama_deploy.cli.pkg import (
5
+ DEFAULT_DOCKER_IGNORE,
6
+ build_dockerfile_content,
7
+ infer_python_version,
8
+ pkg_container_options,
9
+ )
10
+ from llama_deploy.core.deployment_config import (
11
+ read_deployment_config_from_git_root_or_cwd,
12
+ )
13
+ from rich import print as rprint
14
+
15
+ from ..app import app
16
+
17
+ SUPPORTED_FORMATS = ["Docker", "Podman"]
18
+ SUPPORTED_FORMATS_STR = ", ".join(SUPPORTED_FORMATS)
19
+
20
+
21
+ @app.group(
22
+ help=f"Package your application in different formats. Currently supported: {SUPPORTED_FORMATS_STR}",
23
+ no_args_is_help=True,
24
+ context_settings={"max_content_width": None},
25
+ )
26
+ def pkg() -> None:
27
+ """Package application in different formats (Dockerfile, Podman config, Nixpack...)"""
28
+ pass
29
+
30
+
31
+ @pkg.command(
32
+ "container",
33
+ help="Generate a minimal, build-ready file to containerize your workflows through Docker or Podman (currently frontend is not supported).",
34
+ )
35
+ @pkg_container_options
36
+ def create_container_file(
37
+ deployment_file: Path,
38
+ python_version: str | None = None,
39
+ port: int = 4501,
40
+ exclude: tuple[str, ...] | None = None,
41
+ output_file: str = "Dockerfile",
42
+ dockerignore_path: str = ".dockerignore",
43
+ overwrite: bool = False,
44
+ ):
45
+ _create_file_for_container(
46
+ deployment_file=deployment_file,
47
+ python_version=python_version,
48
+ port=port,
49
+ exclude=exclude,
50
+ output_file=output_file,
51
+ dockerignore_path=dockerignore_path,
52
+ overwrite=overwrite,
53
+ )
54
+
55
+
56
+ def _check_deployment_config(deployment_file: Path) -> Path:
57
+ if not deployment_file.exists():
58
+ rprint(f"[red]Deployment file '{deployment_file}' not found[/red]")
59
+ raise click.Abort()
60
+
61
+ # Early check: appserver requires a pyproject.toml in the config directory
62
+ config_dir = deployment_file if deployment_file.is_dir() else deployment_file.parent
63
+ if not (config_dir / "pyproject.toml").exists():
64
+ rprint(
65
+ "[red]No pyproject.toml found at[/red] "
66
+ f"[bold]{config_dir}[/bold].\n"
67
+ "Add a pyproject.toml to your project and re-run 'llamactl serve'."
68
+ )
69
+ raise click.Abort()
70
+
71
+ try:
72
+ config = read_deployment_config_from_git_root_or_cwd(
73
+ Path.cwd(), deployment_file
74
+ )
75
+ except Exception:
76
+ rprint(
77
+ "[red]Error: Could not read a deployment config. This doesn't appear to be a valid llama-deploy project.[/red]"
78
+ )
79
+ raise click.Abort()
80
+ if config.ui:
81
+ rprint(
82
+ "[bold red]Containerized UI builds are currently not supported. Please remove the UI configuration from your deployment file if you wish to proceed.[/]"
83
+ )
84
+ raise click.Abort()
85
+ return config_dir
86
+
87
+
88
+ def _create_file_for_container(
89
+ deployment_file: Path,
90
+ output_file: str = "Dockerfile",
91
+ python_version: str | None = None,
92
+ port: int = 4501,
93
+ exclude: tuple[str, ...] | None = None,
94
+ dockerignore_path: str = ".dockerignore",
95
+ overwrite: bool = False,
96
+ ):
97
+ config_dir = _check_deployment_config(deployment_file=deployment_file)
98
+
99
+ if not python_version:
100
+ python_version = infer_python_version(config_dir)
101
+
102
+ dockerignore_content = DEFAULT_DOCKER_IGNORE
103
+ if exclude:
104
+ for item in exclude:
105
+ dockerignore_content += "\n" + item
106
+
107
+ dockerfile_content = build_dockerfile_content(python_version, port)
108
+
109
+ if Path(output_file).exists() and not overwrite:
110
+ rprint(
111
+ f"[red bold]Error: {output_file} already exists. If you wish to overwrite the file, pass `--overwrite` as a flag to the command.[/]"
112
+ )
113
+ raise click.Abort()
114
+ with open(output_file, "w") as f:
115
+ f.write(dockerfile_content)
116
+ if Path(dockerignore_path).exists() and not overwrite:
117
+ rprint(
118
+ f"[red bold]Error: {dockerignore_path} already exists. If you wish to overwrite the file, pass `--overwrite` as a flag to the command.[/]"
119
+ )
120
+ raise click.Abort()
121
+ with open(dockerignore_path, "w") as f:
122
+ f.write(dockerignore_content)
@@ -0,0 +1,10 @@
1
+ from .defaults import DEFAULT_DOCKER_IGNORE
2
+ from .options import pkg_container_options
3
+ from .utils import build_dockerfile_content, infer_python_version
4
+
5
+ __all__ = [
6
+ "infer_python_version",
7
+ "build_dockerfile_content",
8
+ "DEFAULT_DOCKER_IGNORE",
9
+ "pkg_container_options",
10
+ ]
@@ -0,0 +1,11 @@
1
+ DEFAULT_DOCKER_IGNORE = """
2
+ .venv/
3
+ .git/
4
+ __pycache__/
5
+ *.py[oc]
6
+ build/
7
+ dist/
8
+ wheels/
9
+ *.egg-info
10
+ .env
11
+ """
@@ -0,0 +1,84 @@
1
+ from pathlib import Path
2
+ from typing import Callable, ParamSpec, TypeVar
3
+
4
+ import click
5
+ from llama_deploy.core.config import DEFAULT_DEPLOYMENT_FILE_PATH
6
+
7
+ P = ParamSpec("P")
8
+ R = TypeVar("R")
9
+
10
+
11
+ def _deployment_file_option(f: Callable[P, R]) -> Callable[P, R]:
12
+ return click.argument(
13
+ "deployment_file",
14
+ required=False,
15
+ default=DEFAULT_DEPLOYMENT_FILE_PATH,
16
+ type=click.Path(dir_okay=True, resolve_path=True, path_type=Path),
17
+ )(f)
18
+
19
+
20
+ def _python_version_option(f: Callable[P, R]) -> Callable[P, R]:
21
+ return click.option(
22
+ "--python-version",
23
+ help="Python version for the base image. Default is inferred from the uv project configuration (.python-version or pyproject.toml). If no version can be inferred, python 3.12 is used.",
24
+ required=False,
25
+ default=None,
26
+ )(f)
27
+
28
+
29
+ def _port_option(f: Callable[P, R]) -> Callable[P, R]:
30
+ return click.option(
31
+ "--port",
32
+ help="The port to run the API server on. Defaults to 4501.",
33
+ required=False,
34
+ default=4501,
35
+ type=int,
36
+ )(f)
37
+
38
+
39
+ def _dockerignore_path_option(f: Callable[P, R]) -> Callable[P, R]:
40
+ return click.option(
41
+ "--dockerignore-path",
42
+ help="Path for the output .dockerignore file. Defaults to .dockerignore",
43
+ required=False,
44
+ default=".dockerignore",
45
+ )(f)
46
+
47
+
48
+ def _output_file_option(f: Callable[P, R]) -> Callable[P, R]:
49
+ return click.option(
50
+ "--output-file",
51
+ help="Path for the output file to build the image. Defaults to Dockerfile",
52
+ required=False,
53
+ default="Dockerfile",
54
+ )(f)
55
+
56
+
57
+ def _overwrite_option(f: Callable[P, R]) -> Callable[P, R]:
58
+ return click.option(
59
+ "--overwrite",
60
+ help="Overwrite output files",
61
+ is_flag=True,
62
+ )(f)
63
+
64
+
65
+ def _exclude_option(f: Callable[P, R]) -> Callable[P, R]:
66
+ return click.option(
67
+ "--exclude",
68
+ help="Path to exclude from the build (will be appended to .dockerignore). Can be used multiple times.",
69
+ multiple=True,
70
+ required=False,
71
+ default=None,
72
+ )(f)
73
+
74
+
75
+ def pkg_container_options(f: Callable[P, R]) -> Callable[P, R]:
76
+ return _deployment_file_option(
77
+ _python_version_option(
78
+ _port_option(
79
+ _dockerignore_path_option(
80
+ _overwrite_option(_exclude_option(_output_file_option(f)))
81
+ )
82
+ )
83
+ )
84
+ )
@@ -0,0 +1,46 @@
1
+ from pathlib import Path
2
+ from tomllib import load as load_toml
3
+
4
+
5
+ def _get_min_py_version(requires_python: str):
6
+ min_v = requires_python.split(",")[0].strip()
7
+ return (
8
+ min_v.replace("=", "")
9
+ .replace(">", "")
10
+ .replace("<", "")
11
+ .replace("~", "")
12
+ .strip()
13
+ )
14
+
15
+
16
+ def infer_python_version(config_dir: Path):
17
+ if (config_dir / ".python-version").exists():
18
+ with open(config_dir / ".python-version", "r") as f:
19
+ content = f.read()
20
+ if content.strip():
21
+ py_version = content.strip()
22
+ return py_version
23
+ with open(config_dir / "pyproject.toml", "rb") as f:
24
+ data = load_toml(f)
25
+ return _get_min_py_version(data.get("project", {}).get("requires-python", "3.12"))
26
+
27
+
28
+ def build_dockerfile_content(python_version: str | None = None, port: int = 4501):
29
+ return f"""
30
+ FROM python:{python_version}-slim-trixie
31
+
32
+ COPY --from=ghcr.io/astral-sh/uv:latest /uv /uvx /bin/
33
+
34
+ WORKDIR /app
35
+
36
+ COPY . /app/
37
+
38
+ ENV PATH=/root/.local/bin:$PATH
39
+
40
+ RUN uv sync --locked
41
+ RUN uv tool install llamactl
42
+
43
+ EXPOSE {port}
44
+
45
+ ENTRYPOINT [ "uv", "run", "llamactl", "serve", "--host", "0.0.0.0", "--port", "{port}" ]
46
+ """
@@ -6,6 +6,7 @@ import re
6
6
  from dataclasses import dataclass, field
7
7
  from pathlib import Path
8
8
  from textwrap import dedent
9
+ from typing import cast
9
10
  from urllib.parse import urlsplit
10
11
 
11
12
  from llama_deploy.cli.client import get_project_client as get_client
@@ -117,7 +118,11 @@ class DeploymentForm:
117
118
  def to_update(self) -> DeploymentUpdate:
118
119
  """Convert form data to API format"""
119
120
 
120
- secrets: dict[str, str | None] = self.secrets.copy()
121
+ secrets: dict[str, str | None] = cast(
122
+ # dict isn't covariant, so whatever, make it work
123
+ dict[str, str | None],
124
+ self.secrets.copy(),
125
+ )
121
126
  for secret in self.removed_secrets:
122
127
  secrets[secret] = None
123
128
 
@@ -287,7 +292,11 @@ class DeploymentFormWidget(Widget):
287
292
  or existing_version
288
293
  or installed_version
289
294
  )
290
- is_upgrade = Version(installed_version) > Version(existing_version)
295
+ is_upgrade = (
296
+ installed_version
297
+ and existing_version
298
+ and Version(installed_version) > Version(existing_version)
299
+ )
291
300
  label = "Upgrade" if is_upgrade else "Downgrade"
292
301
  yield Select(
293
302
  [
@@ -563,11 +572,14 @@ class DeploymentEditApp(App[DeploymentResponse | None]):
563
572
  result = self.form_data
564
573
  client = get_client()
565
574
  try:
566
- update_deployment = (
567
- await client.update_deployment(result.id, result.to_update())
568
- if result.is_editing
569
- else await client.create_deployment(result.to_create())
570
- )
575
+ if result.is_editing:
576
+ if not result.id:
577
+ raise ValueError("Deployment ID is required for update")
578
+ update_deployment = await client.update_deployment(
579
+ result.id, result.to_update()
580
+ )
581
+ else:
582
+ update_deployment = await client.create_deployment(result.to_create())
571
583
  # Save and navigate to embedded monitor screen
572
584
  self.saved_deployment = update_deployment
573
585
  # Ensure form_data carries the new ID for any subsequent operations
@@ -365,7 +365,9 @@ class DeploymentMonitorWidget(Widget):
365
365
  ev_details_widget = self.query_one("#last_event_details", Static)
366
366
  deployment_link_button = self.query_one("#deployment_link_button", Button)
367
367
  widget.update(self._render_status_line())
368
- deployment_link_button.label = f"{str(self.deployment.apiserver_url or '')}"
368
+ deployment_link_button.label = (
369
+ f"{str(self.deployment.apiserver_url or '') if self.deployment else ''}"
370
+ )
369
371
  # Update last event line
370
372
  ev_widget.update(self._render_last_event_status())
371
373
  ev_details_widget.update(self._render_last_event_details())
File without changes