llamactl 0.3.20__py3-none-any.whl → 0.3.21__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- llama_deploy/cli/commands/dev.py +31 -39
- llama_deploy/cli/textual/deployment_monitor.py +4 -1
- {llamactl-0.3.20.dist-info → llamactl-0.3.21.dist-info}/METADATA +3 -3
- {llamactl-0.3.20.dist-info → llamactl-0.3.21.dist-info}/RECORD +6 -6
- {llamactl-0.3.20.dist-info → llamactl-0.3.21.dist-info}/WHEEL +0 -0
- {llamactl-0.3.20.dist-info → llamactl-0.3.21.dist-info}/entry_points.txt +0 -0
llama_deploy/cli/commands/dev.py
CHANGED
|
@@ -3,15 +3,14 @@ from __future__ import annotations
|
|
|
3
3
|
import os
|
|
4
4
|
import subprocess
|
|
5
5
|
from pathlib import Path
|
|
6
|
-
from typing import Iterable
|
|
7
6
|
|
|
8
7
|
import click
|
|
9
8
|
from click.exceptions import Abort, Exit
|
|
9
|
+
from llama_deploy.appserver.app import prepare_server, start_preflight_in_target_venv
|
|
10
10
|
from llama_deploy.appserver.deployment_config_parser import get_deployment_config
|
|
11
11
|
from llama_deploy.appserver.settings import configure_settings, settings
|
|
12
12
|
from llama_deploy.appserver.workflow_loader import (
|
|
13
13
|
load_environment_variables,
|
|
14
|
-
load_workflows,
|
|
15
14
|
parse_environment_variables,
|
|
16
15
|
validate_required_env_vars,
|
|
17
16
|
)
|
|
@@ -62,29 +61,28 @@ def validate_command(deployment_file: Path, interactive: bool) -> None:
|
|
|
62
61
|
config_dir = _ensure_project_layout(
|
|
63
62
|
deployment_file, command_name="llamactl dev validate"
|
|
64
63
|
)
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
workflows = load_workflows(config)
|
|
70
|
-
errors: list[tuple[str, Exception]] = _run_validations(workflows.values())
|
|
64
|
+
# Ensure cloud credentials/env are available to the subprocess (if required)
|
|
65
|
+
_maybe_inject_llama_cloud_credentials(
|
|
66
|
+
deployment_file, interactive, require_cloud=False
|
|
67
|
+
)
|
|
71
68
|
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
69
|
+
prepare_server(
|
|
70
|
+
deployment_file=deployment_file,
|
|
71
|
+
install=True,
|
|
72
|
+
build=False,
|
|
73
|
+
install_ui_deps=False,
|
|
74
|
+
)
|
|
76
75
|
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
)
|
|
81
|
-
except
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
raise click.Abort()
|
|
76
|
+
# Delegate venv-targeted invocation to the appserver helper (mirrors start_server_in_target_venv)
|
|
77
|
+
|
|
78
|
+
try:
|
|
79
|
+
start_preflight_in_target_venv(cwd=Path.cwd(), deployment_file=deployment_file)
|
|
80
|
+
except subprocess.CalledProcessError as exc:
|
|
81
|
+
rprint("[red]Workflow validation failed. See errors above.[/red]")
|
|
82
|
+
raise Exit(exc.returncode)
|
|
83
|
+
|
|
84
|
+
_print_connection_summary()
|
|
85
|
+
rprint(f"[green]Validated workflows in {config_dir} successfully.[/green]")
|
|
88
86
|
|
|
89
87
|
|
|
90
88
|
@dev.command(
|
|
@@ -103,11 +101,18 @@ def validate_command(deployment_file: Path, interactive: bool) -> None:
|
|
|
103
101
|
type=click.Path(dir_okay=True, resolve_path=True, path_type=Path),
|
|
104
102
|
help="The deployment file to use for the command",
|
|
105
103
|
)
|
|
104
|
+
@click.option(
|
|
105
|
+
"no_auth",
|
|
106
|
+
"--no-auth",
|
|
107
|
+
is_flag=True,
|
|
108
|
+
help="Do not inject/authenticate with Llama Cloud credentials",
|
|
109
|
+
)
|
|
106
110
|
@interactive_option
|
|
107
111
|
@click.argument("cmd", nargs=-1, type=click.UNPROCESSED)
|
|
108
|
-
def run_command(
|
|
112
|
+
def run_command(
|
|
113
|
+
deployment_file: Path, no_auth: bool, interactive: bool, cmd: tuple[str, ...]
|
|
114
|
+
) -> None: # type: ignore
|
|
109
115
|
"""Execute COMMAND with deployment environment variables applied."""
|
|
110
|
-
_ensure_project_layout(deployment_file, command_name="llamactl dev run")
|
|
111
116
|
if not cmd:
|
|
112
117
|
raise click.ClickException(
|
|
113
118
|
"No command provided. Use '--' before the command arguments if needed."
|
|
@@ -115,7 +120,7 @@ def run_command(deployment_file: Path, interactive: bool, cmd: tuple[str, ...])
|
|
|
115
120
|
|
|
116
121
|
try:
|
|
117
122
|
config, config_parent = _prepare_environment(
|
|
118
|
-
deployment_file, interactive, require_cloud=
|
|
123
|
+
deployment_file, interactive, require_cloud=not no_auth
|
|
119
124
|
)
|
|
120
125
|
env_overrides = parse_environment_variables(config, config_parent)
|
|
121
126
|
env = os.environ.copy()
|
|
@@ -168,17 +173,4 @@ def _prepare_environment(
|
|
|
168
173
|
return config, config_parent
|
|
169
174
|
|
|
170
175
|
|
|
171
|
-
def _run_validations(workflows: Iterable[object]) -> list[tuple[str, Exception]]:
|
|
172
|
-
errors: list[tuple[str, Exception]] = []
|
|
173
|
-
for workflow in workflows:
|
|
174
|
-
workflow_name = getattr(workflow, "name", "workflow")
|
|
175
|
-
try:
|
|
176
|
-
validate_method = getattr(workflow, "_validate", None)
|
|
177
|
-
if callable(validate_method):
|
|
178
|
-
validate_method()
|
|
179
|
-
except Exception as exc:
|
|
180
|
-
errors.append((str(workflow_name), exc))
|
|
181
|
-
return errors
|
|
182
|
-
|
|
183
|
-
|
|
184
176
|
__all__ = ["dev", "validate_command", "run_command"]
|
|
@@ -390,7 +390,10 @@ class DeploymentMonitorWidget(Widget):
|
|
|
390
390
|
deployment_link_button.label = (
|
|
391
391
|
f"{str(self.deployment.apiserver_url or '') if self.deployment else ''}"
|
|
392
392
|
)
|
|
393
|
-
|
|
393
|
+
if self.deployment:
|
|
394
|
+
last_commit_button.label = f"{(str(self.deployment.git_sha or '-'))[:7]}"
|
|
395
|
+
else:
|
|
396
|
+
last_commit_button.label = "-"
|
|
394
397
|
# Update last event line
|
|
395
398
|
ev_widget.update(self._render_last_event_status())
|
|
396
399
|
ev_details_widget.update(self._render_last_event_details())
|
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
Metadata-Version: 2.3
|
|
2
2
|
Name: llamactl
|
|
3
|
-
Version: 0.3.
|
|
3
|
+
Version: 0.3.21
|
|
4
4
|
Summary: A command-line interface for managing LlamaDeploy projects and deployments
|
|
5
5
|
Author: Adrian Lyjak
|
|
6
6
|
Author-email: Adrian Lyjak <adrianlyjak@gmail.com>
|
|
7
7
|
License: MIT
|
|
8
|
-
Requires-Dist: llama-deploy-core[client]>=0.3.
|
|
9
|
-
Requires-Dist: llama-deploy-appserver>=0.3.
|
|
8
|
+
Requires-Dist: llama-deploy-core[client]>=0.3.21,<0.4.0
|
|
9
|
+
Requires-Dist: llama-deploy-appserver>=0.3.21,<0.4.0
|
|
10
10
|
Requires-Dist: vibe-llama-core>=0.1.0
|
|
11
11
|
Requires-Dist: rich>=13.0.0
|
|
12
12
|
Requires-Dist: questionary>=2.0.0
|
|
@@ -5,7 +5,7 @@ llama_deploy/cli/client.py,sha256=f4053b5183224cff55c1393e78887d1af2597219135379
|
|
|
5
5
|
llama_deploy/cli/commands/aliased_group.py,sha256=101fe7733802dfb448198331818123184523b54cb80a27f166d4ff7010a76e49,1097
|
|
6
6
|
llama_deploy/cli/commands/auth.py,sha256=48c4cc786e8c4e0fb8c0caaba690cef359cddac9b7fbb0b88505111323c07667,24754
|
|
7
7
|
llama_deploy/cli/commands/deployment.py,sha256=2571aa7f220930adc47f17e9aa33147c13156f81acfb911f3455f3800c742720,14611
|
|
8
|
-
llama_deploy/cli/commands/dev.py,sha256=
|
|
8
|
+
llama_deploy/cli/commands/dev.py,sha256=e4f17c6fdb13dd370ba594f0d4084ec8a454f41f932ecba1b6222e4e3c676409,5875
|
|
9
9
|
llama_deploy/cli/commands/env.py,sha256=36cb1b0abb9e3d1c5546d3e8a3c4c7839c4d6c2abf75763e39efb08376b3eae9,6808
|
|
10
10
|
llama_deploy/cli/commands/init.py,sha256=7254c9ad1824dc546fe33f8592034ef044700ffbaf843b9ee7dc49495ca9166e,16137
|
|
11
11
|
llama_deploy/cli/commands/pkg.py,sha256=31049a8266fba71a45920187ef983988bb5ba3b9ad81ab4b7bca6042a071a810,4068
|
|
@@ -31,7 +31,7 @@ llama_deploy/cli/py.typed,sha256=e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934
|
|
|
31
31
|
llama_deploy/cli/styles.py,sha256=15901fb567b0d10470f56a06d863819c4ed00a9f90b2a8c46b4bc2fb1dbdf6c3,307
|
|
32
32
|
llama_deploy/cli/textual/deployment_form.py,sha256=a7f6d8fffe2ae066241a397bf920c109aed7503faf5bcbf4c8b5bb46ac27cb6f,29762
|
|
33
33
|
llama_deploy/cli/textual/deployment_help.py,sha256=991d8cdcc61ae0cf79ddd27715db5452c9902d343ce20775f8651252056eca77,2859
|
|
34
|
-
llama_deploy/cli/textual/deployment_monitor.py,sha256=
|
|
34
|
+
llama_deploy/cli/textual/deployment_monitor.py,sha256=f14680cdd0d913a7b5e850560abb3923122d4f86ebd71e6efc358ad357c08387,17815
|
|
35
35
|
llama_deploy/cli/textual/git_validation.py,sha256=94c95b61d0cbc490566a406b4886c9c12e1d1793dc14038a5be37119223c9568,13419
|
|
36
36
|
llama_deploy/cli/textual/github_callback_server.py,sha256=3111cc45b3ff2632255a37e4472c85084670c94bcea25ec428f06b0761dd27bf,7584
|
|
37
37
|
llama_deploy/cli/textual/llama_loader.py,sha256=33cb32a46dd40bcf889c553e44f2672c410e26bd1d4b17aa6cca6d0a5d59c2c4,1468
|
|
@@ -40,7 +40,7 @@ llama_deploy/cli/textual/styles.tcss,sha256=2536f52ea1a654ae1f8990a25d45c845cb31
|
|
|
40
40
|
llama_deploy/cli/utils/env_inject.py,sha256=01911758bcc3cf22aad0db0d1ade56aece48d6ad6bdb7186ea213337c67f5a89,688
|
|
41
41
|
llama_deploy/cli/utils/redact.py,sha256=1e768d76b4a6708230c34f7ce8a5a82ab52795bb3d6ab0387071ab4e8d7e7934,863
|
|
42
42
|
llama_deploy/cli/utils/version.py,sha256=bf01a6dda948b868cc08c93701ed44cd36b487402404af8451d4c0996a2edb31,364
|
|
43
|
-
llamactl-0.3.
|
|
44
|
-
llamactl-0.3.
|
|
45
|
-
llamactl-0.3.
|
|
46
|
-
llamactl-0.3.
|
|
43
|
+
llamactl-0.3.21.dist-info/WHEEL,sha256=66530aef82d5020ef5af27ae0123c71abb9261377c5bc519376c671346b12918,79
|
|
44
|
+
llamactl-0.3.21.dist-info/entry_points.txt,sha256=b67e1eb64305058751a651a80f2d2268b5f7046732268421e796f64d4697f83c,52
|
|
45
|
+
llamactl-0.3.21.dist-info/METADATA,sha256=aa46592d9763fa8e9476cd6456fe51fb628c0b34f8bfef64be958b04addff872,3217
|
|
46
|
+
llamactl-0.3.21.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|