llamactl 0.3.0a18__tar.gz → 0.3.0a19__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {llamactl-0.3.0a18 → llamactl-0.3.0a19}/PKG-INFO +3 -3
- {llamactl-0.3.0a18 → llamactl-0.3.0a19}/pyproject.toml +3 -3
- {llamactl-0.3.0a18 → llamactl-0.3.0a19}/src/llama_deploy/cli/commands/auth.py +1 -1
- {llamactl-0.3.0a18 → llamactl-0.3.0a19}/src/llama_deploy/cli/commands/deployment.py +4 -4
- {llamactl-0.3.0a18 → llamactl-0.3.0a19}/src/llama_deploy/cli/commands/serve.py +2 -3
- {llamactl-0.3.0a18 → llamactl-0.3.0a19}/src/llama_deploy/cli/textual/deployment_help.py +1 -1
- {llamactl-0.3.0a18 → llamactl-0.3.0a19}/README.md +0 -0
- {llamactl-0.3.0a18 → llamactl-0.3.0a19}/src/llama_deploy/cli/__init__.py +0 -0
- {llamactl-0.3.0a18 → llamactl-0.3.0a19}/src/llama_deploy/cli/app.py +0 -0
- {llamactl-0.3.0a18 → llamactl-0.3.0a19}/src/llama_deploy/cli/client.py +0 -0
- {llamactl-0.3.0a18 → llamactl-0.3.0a19}/src/llama_deploy/cli/commands/aliased_group.py +0 -0
- {llamactl-0.3.0a18 → llamactl-0.3.0a19}/src/llama_deploy/cli/commands/env.py +0 -0
- {llamactl-0.3.0a18 → llamactl-0.3.0a19}/src/llama_deploy/cli/commands/init.py +0 -0
- {llamactl-0.3.0a18 → llamactl-0.3.0a19}/src/llama_deploy/cli/config/_config.py +0 -0
- {llamactl-0.3.0a18 → llamactl-0.3.0a19}/src/llama_deploy/cli/config/auth_service.py +0 -0
- {llamactl-0.3.0a18 → llamactl-0.3.0a19}/src/llama_deploy/cli/config/env_service.py +0 -0
- {llamactl-0.3.0a18 → llamactl-0.3.0a19}/src/llama_deploy/cli/config/schema.py +0 -0
- {llamactl-0.3.0a18 → llamactl-0.3.0a19}/src/llama_deploy/cli/debug.py +0 -0
- {llamactl-0.3.0a18 → llamactl-0.3.0a19}/src/llama_deploy/cli/env.py +0 -0
- {llamactl-0.3.0a18 → llamactl-0.3.0a19}/src/llama_deploy/cli/interactive_prompts/session_utils.py +0 -0
- {llamactl-0.3.0a18 → llamactl-0.3.0a19}/src/llama_deploy/cli/interactive_prompts/utils.py +0 -0
- {llamactl-0.3.0a18 → llamactl-0.3.0a19}/src/llama_deploy/cli/options.py +0 -0
- {llamactl-0.3.0a18 → llamactl-0.3.0a19}/src/llama_deploy/cli/platform_client.py +0 -0
- {llamactl-0.3.0a18 → llamactl-0.3.0a19}/src/llama_deploy/cli/py.typed +0 -0
- {llamactl-0.3.0a18 → llamactl-0.3.0a19}/src/llama_deploy/cli/textual/deployment_form.py +0 -0
- {llamactl-0.3.0a18 → llamactl-0.3.0a19}/src/llama_deploy/cli/textual/deployment_monitor.py +0 -0
- {llamactl-0.3.0a18 → llamactl-0.3.0a19}/src/llama_deploy/cli/textual/git_validation.py +0 -0
- {llamactl-0.3.0a18 → llamactl-0.3.0a19}/src/llama_deploy/cli/textual/github_callback_server.py +0 -0
- {llamactl-0.3.0a18 → llamactl-0.3.0a19}/src/llama_deploy/cli/textual/llama_loader.py +0 -0
- {llamactl-0.3.0a18 → llamactl-0.3.0a19}/src/llama_deploy/cli/textual/secrets_form.py +0 -0
- {llamactl-0.3.0a18 → llamactl-0.3.0a19}/src/llama_deploy/cli/textual/styles.tcss +0 -0
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
Metadata-Version: 2.3
|
|
2
2
|
Name: llamactl
|
|
3
|
-
Version: 0.3.
|
|
3
|
+
Version: 0.3.0a19
|
|
4
4
|
Summary: A command-line interface for managing LlamaDeploy projects and deployments
|
|
5
5
|
Author: Adrian Lyjak
|
|
6
6
|
Author-email: Adrian Lyjak <adrianlyjak@gmail.com>
|
|
7
7
|
License: MIT
|
|
8
|
-
Requires-Dist: llama-deploy-core[client]>=0.3.
|
|
9
|
-
Requires-Dist: llama-deploy-appserver>=0.3.
|
|
8
|
+
Requires-Dist: llama-deploy-core[client]>=0.3.0a19,<0.4.0
|
|
9
|
+
Requires-Dist: llama-deploy-appserver>=0.3.0a19,<0.4.0
|
|
10
10
|
Requires-Dist: httpx>=0.24.0,<1.0.0
|
|
11
11
|
Requires-Dist: rich>=13.0.0
|
|
12
12
|
Requires-Dist: questionary>=2.0.0
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
[project]
|
|
2
2
|
name = "llamactl"
|
|
3
|
-
version = "0.3.
|
|
3
|
+
version = "0.3.0a19"
|
|
4
4
|
description = "A command-line interface for managing LlamaDeploy projects and deployments"
|
|
5
5
|
readme = "README.md"
|
|
6
6
|
license = { text = "MIT" }
|
|
@@ -9,8 +9,8 @@ authors = [
|
|
|
9
9
|
]
|
|
10
10
|
requires-python = ">=3.11, <4"
|
|
11
11
|
dependencies = [
|
|
12
|
-
"llama-deploy-core[client]>=0.3.
|
|
13
|
-
"llama-deploy-appserver>=0.3.
|
|
12
|
+
"llama-deploy-core[client]>=0.3.0a19,<0.4.0",
|
|
13
|
+
"llama-deploy-appserver>=0.3.0a19,<0.4.0",
|
|
14
14
|
"httpx>=0.24.0,<1.0.0",
|
|
15
15
|
"rich>=13.0.0",
|
|
16
16
|
"questionary>=2.0.0",
|
|
@@ -276,7 +276,7 @@ def validate_authenticated_profile(interactive: bool) -> Auth:
|
|
|
276
276
|
|
|
277
277
|
|
|
278
278
|
def _prompt_for_api_key() -> str:
|
|
279
|
-
entered = questionary.password("Enter API key token").ask()
|
|
279
|
+
entered = questionary.password("Enter API key token to login").ask()
|
|
280
280
|
if entered:
|
|
281
281
|
return entered.strip()
|
|
282
282
|
raise click.ClickException("No API key entered")
|
|
@@ -225,15 +225,15 @@ def refresh_deployment(deployment_id: str | None, interactive: bool) -> None:
|
|
|
225
225
|
"""Update the deployment, pulling the latest code from it's branch"""
|
|
226
226
|
validate_authenticated_profile(interactive)
|
|
227
227
|
try:
|
|
228
|
-
client = get_project_client()
|
|
229
|
-
|
|
230
228
|
deployment_id = select_deployment(deployment_id)
|
|
231
229
|
if not deployment_id:
|
|
232
230
|
rprint("[yellow]No deployment selected[/yellow]")
|
|
233
231
|
return
|
|
234
232
|
|
|
235
233
|
# Get current deployment details to show what we're refreshing
|
|
236
|
-
current_deployment = asyncio.run(
|
|
234
|
+
current_deployment = asyncio.run(
|
|
235
|
+
get_project_client().get_deployment(deployment_id)
|
|
236
|
+
)
|
|
237
237
|
deployment_name = current_deployment.name
|
|
238
238
|
old_git_sha = current_deployment.git_sha or ""
|
|
239
239
|
|
|
@@ -241,7 +241,7 @@ def refresh_deployment(deployment_id: str | None, interactive: bool) -> None:
|
|
|
241
241
|
with console.status(f"Refreshing {deployment_name}..."):
|
|
242
242
|
deployment_update = DeploymentUpdate()
|
|
243
243
|
updated_deployment = asyncio.run(
|
|
244
|
-
|
|
244
|
+
get_project_client().update_deployment(
|
|
245
245
|
deployment_id,
|
|
246
246
|
deployment_update,
|
|
247
247
|
)
|
|
@@ -117,7 +117,6 @@ def _set_env_vars_from_env(env_vars: dict[str, str]):
|
|
|
117
117
|
|
|
118
118
|
|
|
119
119
|
def _set_env_vars(key: str, url: str):
|
|
120
|
-
print(f"Setting env vars: {key}, {url}")
|
|
121
120
|
os.environ["LLAMA_CLOUD_API_KEY"] = key
|
|
122
121
|
os.environ["LLAMA_CLOUD_BASE_URL"] = url
|
|
123
122
|
# kludge for common web servers to inject local auth key
|
|
@@ -176,7 +175,7 @@ def _maybe_inject_llama_cloud_credentials(
|
|
|
176
175
|
# No key available; consider prompting if interactive
|
|
177
176
|
if interactive:
|
|
178
177
|
should_login = questionary.confirm(
|
|
179
|
-
"This deployment requires Llama Cloud. Login now to inject credentials?",
|
|
178
|
+
"This deployment requires Llama Cloud. Login now to inject credentials? Otherwise the app may not work.",
|
|
180
179
|
default=True,
|
|
181
180
|
).ask()
|
|
182
181
|
if should_login:
|
|
@@ -195,5 +194,5 @@ def _maybe_inject_llama_cloud_credentials(
|
|
|
195
194
|
|
|
196
195
|
# Non-interactive session
|
|
197
196
|
rprint(
|
|
198
|
-
"[yellow]Warning: LLAMA_CLOUD_API_KEY is not set and no logged-in profile was found. The
|
|
197
|
+
"[yellow]Warning: LLAMA_CLOUD_API_KEY is not set and no logged-in profile was found. The app may not work.[/yellow]"
|
|
199
198
|
)
|
|
@@ -34,7 +34,7 @@ class DeploymentHelpWidget(Widget):
|
|
|
34
34
|
A git repository URL to pull code from. If not publically accessible, you will be prompted to install the llama deploy github app. If code is on another platform, either provide a Personal Access Token (basic access credentials) instead.
|
|
35
35
|
|
|
36
36
|
[b]Git Ref[/b]
|
|
37
|
-
The git ref to deploy. This can be a branch, tag, or commit hash. If this is a branch, after deploying, run a `[slategrey reverse]llamactl deploy
|
|
37
|
+
The git ref to deploy. This can be a branch, tag, or commit hash. If this is a branch, after deploying, run a `[slategrey reverse]llamactl deploy update[/]` to update the deployment to the latest git ref after you make updates.
|
|
38
38
|
|
|
39
39
|
[b]Config File[/b]
|
|
40
40
|
Path to a directory or file containing a `[slategrey reverse]pyproject.toml[/]` or `[slategrey reverse]llama_deploy.yaml[/]` containing the llama deploy configuration. Only necessary if you have the configuration not at the root of the repo, or you have an unconventional configuration file.
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{llamactl-0.3.0a18 → llamactl-0.3.0a19}/src/llama_deploy/cli/interactive_prompts/session_utils.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{llamactl-0.3.0a18 → llamactl-0.3.0a19}/src/llama_deploy/cli/textual/github_callback_server.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|