llamactl 0.3.0a5__py3-none-any.whl → 0.3.0a7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- llama_deploy/cli/__init__.py +2 -2
- llama_deploy/cli/client.py +1 -1
- llama_deploy/cli/commands/profile.py +7 -7
- llama_deploy/cli/commands/serve.py +8 -2
- llama_deploy/cli/config.py +1 -1
- llama_deploy/cli/interactive_prompts/utils.py +0 -2
- llama_deploy/cli/textual/deployment_form.py +29 -19
- llama_deploy/cli/textual/deployment_help.py +2 -2
- llama_deploy/cli/textual/github_callback_server.py +12 -9
- llama_deploy/cli/textual/profile_form.py +0 -1
- llama_deploy/cli/textual/secrets_form.py +1 -1
- {llamactl-0.3.0a5.dist-info → llamactl-0.3.0a7.dist-info}/METADATA +3 -3
- llamactl-0.3.0a7.dist-info/RECORD +24 -0
- llamactl-0.3.0a5.dist-info/RECORD +0 -24
- {llamactl-0.3.0a5.dist-info → llamactl-0.3.0a7.dist-info}/WHEEL +0 -0
- {llamactl-0.3.0a5.dist-info → llamactl-0.3.0a7.dist-info}/entry_points.txt +0 -0
llama_deploy/cli/__init__.py
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
from llama_deploy.cli.commands.deployment import deployments
|
|
2
|
-
from llama_deploy.cli.commands.profile import
|
|
2
|
+
from llama_deploy.cli.commands.profile import profiles
|
|
3
3
|
from llama_deploy.cli.commands.serve import serve
|
|
4
4
|
|
|
5
5
|
from .app import app
|
|
@@ -10,7 +10,7 @@ def main() -> None:
|
|
|
10
10
|
app()
|
|
11
11
|
|
|
12
12
|
|
|
13
|
-
__all__ = ["app", "deployments", "
|
|
13
|
+
__all__ = ["app", "deployments", "profiles", "serve"]
|
|
14
14
|
|
|
15
15
|
|
|
16
16
|
if __name__ == "__main__":
|
llama_deploy/cli/client.py
CHANGED
|
@@ -18,13 +18,13 @@ from ..textual.profile_form import create_profile_form, edit_profile_form
|
|
|
18
18
|
no_args_is_help=True,
|
|
19
19
|
)
|
|
20
20
|
@global_options
|
|
21
|
-
def
|
|
21
|
+
def profiles() -> None:
|
|
22
22
|
"""Manage profiles"""
|
|
23
23
|
pass
|
|
24
24
|
|
|
25
25
|
|
|
26
26
|
# Profile commands
|
|
27
|
-
@
|
|
27
|
+
@profiles.command("create")
|
|
28
28
|
@global_options
|
|
29
29
|
@click.option("--name", help="Profile name")
|
|
30
30
|
@click.option("--api-url", help="API server URL")
|
|
@@ -69,7 +69,7 @@ def create_profile(
|
|
|
69
69
|
raise click.Abort()
|
|
70
70
|
|
|
71
71
|
|
|
72
|
-
@
|
|
72
|
+
@profiles.command("list")
|
|
73
73
|
@global_options
|
|
74
74
|
def list_profiles() -> None:
|
|
75
75
|
"""List all profiles"""
|
|
@@ -100,7 +100,7 @@ def list_profiles() -> None:
|
|
|
100
100
|
raise click.Abort()
|
|
101
101
|
|
|
102
102
|
|
|
103
|
-
@
|
|
103
|
+
@profiles.command("switch")
|
|
104
104
|
@global_options
|
|
105
105
|
@click.argument("name", required=False)
|
|
106
106
|
def switch_profile(name: str | None) -> None:
|
|
@@ -124,7 +124,7 @@ def switch_profile(name: str | None) -> None:
|
|
|
124
124
|
raise click.Abort()
|
|
125
125
|
|
|
126
126
|
|
|
127
|
-
@
|
|
127
|
+
@profiles.command("delete")
|
|
128
128
|
@global_options
|
|
129
129
|
@click.argument("name", required=False)
|
|
130
130
|
def delete_profile(name: str | None) -> None:
|
|
@@ -150,7 +150,7 @@ def delete_profile(name: str | None) -> None:
|
|
|
150
150
|
raise click.Abort()
|
|
151
151
|
|
|
152
152
|
|
|
153
|
-
@
|
|
153
|
+
@profiles.command("edit")
|
|
154
154
|
@global_options
|
|
155
155
|
@click.argument("name", required=False)
|
|
156
156
|
def edit_profile(name: str | None) -> None:
|
|
@@ -189,7 +189,7 @@ def edit_profile(name: str | None) -> None:
|
|
|
189
189
|
|
|
190
190
|
|
|
191
191
|
# Projects commands
|
|
192
|
-
@
|
|
192
|
+
@profiles.command("list-projects")
|
|
193
193
|
@global_options
|
|
194
194
|
def list_projects() -> None:
|
|
195
195
|
"""List all projects with deployment counts"""
|
|
@@ -20,7 +20,7 @@ from ..options import global_options
|
|
|
20
20
|
"deployment_file",
|
|
21
21
|
required=False,
|
|
22
22
|
default=DEFAULT_DEPLOYMENT_FILE_PATH,
|
|
23
|
-
type=click.Path(dir_okay=
|
|
23
|
+
type=click.Path(dir_okay=True, resolve_path=True, path_type=Path),
|
|
24
24
|
)
|
|
25
25
|
@click.option(
|
|
26
26
|
"--no-install", is_flag=True, help="Skip installing python and js dependencies"
|
|
@@ -34,6 +34,8 @@ from ..options import global_options
|
|
|
34
34
|
is_flag=True,
|
|
35
35
|
help="Preview mode pre-builds the UI to static files, like a production build",
|
|
36
36
|
)
|
|
37
|
+
@click.option("--port", type=int, help="The port to run the API server on")
|
|
38
|
+
@click.option("--ui-port", type=int, help="The port to run the UI proxy server on")
|
|
37
39
|
@global_options
|
|
38
40
|
def serve(
|
|
39
41
|
deployment_file: Path,
|
|
@@ -41,8 +43,10 @@ def serve(
|
|
|
41
43
|
no_reload: bool,
|
|
42
44
|
no_open_browser: bool,
|
|
43
45
|
preview: bool,
|
|
46
|
+
port: int | None = None,
|
|
47
|
+
ui_port: int | None = None,
|
|
44
48
|
) -> None:
|
|
45
|
-
"""Run llama_deploy API Server in the foreground.
|
|
49
|
+
"""Run llama_deploy API Server in the foreground. Reads the deployment configuration from the current directory. Can optionally specify a deployment file path."""
|
|
46
50
|
if not deployment_file.exists():
|
|
47
51
|
rprint(f"[red]Deployment file '{deployment_file}' not found[/red]")
|
|
48
52
|
raise click.Abort()
|
|
@@ -58,6 +62,8 @@ def serve(
|
|
|
58
62
|
proxy_ui=not preview,
|
|
59
63
|
reload=not no_reload,
|
|
60
64
|
open_browser=not no_open_browser,
|
|
65
|
+
port=port,
|
|
66
|
+
ui_port=ui_port,
|
|
61
67
|
)
|
|
62
68
|
|
|
63
69
|
except KeyboardInterrupt:
|
llama_deploy/cli/config.py
CHANGED
|
@@ -2,7 +2,6 @@
|
|
|
2
2
|
|
|
3
3
|
import dataclasses
|
|
4
4
|
import logging
|
|
5
|
-
import os
|
|
6
5
|
import re
|
|
7
6
|
from dataclasses import dataclass, field
|
|
8
7
|
from pathlib import Path
|
|
@@ -20,8 +19,16 @@ from llama_deploy.cli.textual.git_validation import (
|
|
|
20
19
|
ValidationResultMessage,
|
|
21
20
|
)
|
|
22
21
|
from llama_deploy.cli.textual.secrets_form import SecretsWidget
|
|
23
|
-
from llama_deploy.core.deployment_config import
|
|
24
|
-
|
|
22
|
+
from llama_deploy.core.deployment_config import (
|
|
23
|
+
DEFAULT_DEPLOYMENT_NAME,
|
|
24
|
+
read_deployment_config,
|
|
25
|
+
)
|
|
26
|
+
from llama_deploy.core.git.git_util import (
|
|
27
|
+
get_current_branch,
|
|
28
|
+
get_git_root,
|
|
29
|
+
is_git_repo,
|
|
30
|
+
list_remotes,
|
|
31
|
+
)
|
|
25
32
|
from llama_deploy.core.schema.deployments import (
|
|
26
33
|
DeploymentCreate,
|
|
27
34
|
DeploymentResponse,
|
|
@@ -45,7 +52,7 @@ class DeploymentForm:
|
|
|
45
52
|
id: str | None = None
|
|
46
53
|
repo_url: str = ""
|
|
47
54
|
git_ref: str = "main"
|
|
48
|
-
deployment_file_path: str = "
|
|
55
|
+
deployment_file_path: str = ""
|
|
49
56
|
personal_access_token: str = ""
|
|
50
57
|
# indicates if the deployment has a personal access token (value is unknown)
|
|
51
58
|
has_existing_pat: bool = False
|
|
@@ -67,7 +74,7 @@ class DeploymentForm:
|
|
|
67
74
|
id=deployment.id,
|
|
68
75
|
repo_url=deployment.repo_url,
|
|
69
76
|
git_ref=deployment.git_ref or "main",
|
|
70
|
-
deployment_file_path=deployment.deployment_file_path
|
|
77
|
+
deployment_file_path=deployment.deployment_file_path,
|
|
71
78
|
personal_access_token="", # Always start empty for security
|
|
72
79
|
has_existing_pat=deployment.has_personal_access_token,
|
|
73
80
|
secrets={},
|
|
@@ -85,7 +92,7 @@ class DeploymentForm:
|
|
|
85
92
|
data = DeploymentUpdate(
|
|
86
93
|
repo_url=self.repo_url,
|
|
87
94
|
git_ref=self.git_ref or "main",
|
|
88
|
-
deployment_file_path=self.deployment_file_path or
|
|
95
|
+
deployment_file_path=self.deployment_file_path or None,
|
|
89
96
|
personal_access_token=(
|
|
90
97
|
""
|
|
91
98
|
if self.personal_access_token is None and not self.has_existing_pat
|
|
@@ -102,7 +109,7 @@ class DeploymentForm:
|
|
|
102
109
|
return DeploymentCreate(
|
|
103
110
|
name=self.name,
|
|
104
111
|
repo_url=self.repo_url,
|
|
105
|
-
deployment_file_path=self.deployment_file_path or
|
|
112
|
+
deployment_file_path=self.deployment_file_path or None,
|
|
106
113
|
git_ref=self.git_ref or "main",
|
|
107
114
|
personal_access_token=self.personal_access_token,
|
|
108
115
|
secrets=self.secrets,
|
|
@@ -175,10 +182,10 @@ class DeploymentFormWidget(Widget):
|
|
|
175
182
|
compact=True,
|
|
176
183
|
)
|
|
177
184
|
|
|
178
|
-
yield Label("
|
|
185
|
+
yield Label("Config File:", classes="form-label", shrink=True)
|
|
179
186
|
yield Input(
|
|
180
187
|
value=self.form_data.deployment_file_path,
|
|
181
|
-
placeholder="
|
|
188
|
+
placeholder="Optional path to config dir/file",
|
|
182
189
|
id="deployment_file_path",
|
|
183
190
|
compact=True,
|
|
184
191
|
)
|
|
@@ -283,8 +290,7 @@ class DeploymentFormWidget(Widget):
|
|
|
283
290
|
id=self.form_data.id,
|
|
284
291
|
repo_url=repo_url_input.value.strip(),
|
|
285
292
|
git_ref=git_ref_input.value.strip() or "main",
|
|
286
|
-
deployment_file_path=deployment_file_input.value.strip()
|
|
287
|
-
or "llama_deploy.yaml",
|
|
293
|
+
deployment_file_path=deployment_file_input.value.strip(),
|
|
288
294
|
personal_access_token=pat_value,
|
|
289
295
|
secrets=updated_secrets,
|
|
290
296
|
initial_secrets=self.original_form_data.initial_secrets,
|
|
@@ -467,6 +473,7 @@ def _initialize_deployment_data() -> DeploymentForm:
|
|
|
467
473
|
git_ref: str | None = None
|
|
468
474
|
secrets: dict[str, str] = {}
|
|
469
475
|
name: str | None = None
|
|
476
|
+
config_file_path: str | None = None
|
|
470
477
|
if is_git_repo():
|
|
471
478
|
seen = set[str]()
|
|
472
479
|
remotes = list_remotes()
|
|
@@ -482,22 +489,25 @@ def _initialize_deployment_data() -> DeploymentForm:
|
|
|
482
489
|
if preferred_origin:
|
|
483
490
|
repo_url = preferred_origin[0]
|
|
484
491
|
git_ref = get_current_branch()
|
|
492
|
+
root = get_git_root()
|
|
493
|
+
if root != Path.cwd():
|
|
494
|
+
config_file_path = str(Path.cwd().relative_to(root))
|
|
495
|
+
|
|
485
496
|
if Path(".env").exists():
|
|
486
497
|
secrets = load_env_secrets_from_string(Path(".env").read_text())
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
except Exception:
|
|
494
|
-
pass
|
|
498
|
+
try:
|
|
499
|
+
config = read_deployment_config(Path("."), Path("."))
|
|
500
|
+
if config.name != DEFAULT_DEPLOYMENT_NAME:
|
|
501
|
+
name = config.name
|
|
502
|
+
except Exception:
|
|
503
|
+
pass
|
|
495
504
|
|
|
496
505
|
form = DeploymentForm(
|
|
497
506
|
name=name or "",
|
|
498
507
|
repo_url=repo_url or "",
|
|
499
508
|
git_ref=git_ref or "main",
|
|
500
509
|
secrets=secrets,
|
|
510
|
+
deployment_file_path=config_file_path or "",
|
|
501
511
|
)
|
|
502
512
|
return form
|
|
503
513
|
|
|
@@ -36,8 +36,8 @@ class DeploymentHelpWidget(Widget):
|
|
|
36
36
|
[b]Git Ref[/b]
|
|
37
37
|
The git ref to deploy. This can be a branch, tag, or commit hash. If this is a branch, after deploying, run a `[slategrey reverse]llamactl deploy refresh[/]` to update the deployment to the latest git ref after you make updates.
|
|
38
38
|
|
|
39
|
-
[b]
|
|
40
|
-
|
|
39
|
+
[b]Config File[/b]
|
|
40
|
+
Path to a directory or file containing a `[slategrey reverse]pyproject.toml[/]` or `[slategrey reverse]llama_deploy.yaml[/]` containing the llama deploy configuration. Only necessary if you have the configuration not at the root of the repo, or you have an unconventional configuration file.
|
|
41
41
|
|
|
42
42
|
[b]Secrets[/b]
|
|
43
43
|
Secrets to add as environment variables to the deployment. e.g. to access a database or an API. Supports adding in `[slategrey reverse].env[/]` file format.
|
|
@@ -6,7 +6,10 @@ import webbrowser
|
|
|
6
6
|
from textwrap import dedent
|
|
7
7
|
from typing import Any, Dict, cast
|
|
8
8
|
|
|
9
|
-
from aiohttp import
|
|
9
|
+
from aiohttp.web_app import Application
|
|
10
|
+
from aiohttp.web_request import Request
|
|
11
|
+
from aiohttp.web_response import Response
|
|
12
|
+
from aiohttp.web_runner import AppRunner, TCPSite
|
|
10
13
|
|
|
11
14
|
logger = logging.getLogger(__name__)
|
|
12
15
|
|
|
@@ -18,9 +21,9 @@ class GitHubCallbackServer:
|
|
|
18
21
|
self.port = port
|
|
19
22
|
self.callback_data: Dict[str, Any] = {}
|
|
20
23
|
self.callback_received = asyncio.Event()
|
|
21
|
-
self.app:
|
|
22
|
-
self.runner:
|
|
23
|
-
self.site:
|
|
24
|
+
self.app: Application | None = None
|
|
25
|
+
self.runner: AppRunner | None = None
|
|
26
|
+
self.site: TCPSite | None = None
|
|
24
27
|
|
|
25
28
|
async def start_and_wait(self, timeout: float = 300) -> Dict[str, Any]:
|
|
26
29
|
"""Start the server and wait for a callback with timeout"""
|
|
@@ -38,19 +41,19 @@ class GitHubCallbackServer:
|
|
|
38
41
|
|
|
39
42
|
async def _start_server(self) -> None:
|
|
40
43
|
"""Start the aiohttp server"""
|
|
41
|
-
self.app =
|
|
44
|
+
self.app = Application()
|
|
42
45
|
self.app.router.add_get("/", self._handle_callback)
|
|
43
46
|
self.app.router.add_get("/{path:.*}", self._handle_callback)
|
|
44
47
|
|
|
45
|
-
self.runner =
|
|
48
|
+
self.runner = AppRunner(self.app, logger=None) # Suppress server logs
|
|
46
49
|
await self.runner.setup()
|
|
47
50
|
|
|
48
|
-
self.site =
|
|
51
|
+
self.site = TCPSite(self.runner, "localhost", self.port)
|
|
49
52
|
await self.site.start()
|
|
50
53
|
|
|
51
54
|
logger.debug(f"GitHub callback server started on port {self.port}")
|
|
52
55
|
|
|
53
|
-
async def _handle_callback(self, request:
|
|
56
|
+
async def _handle_callback(self, request: Request) -> Response:
|
|
54
57
|
"""Handle the GitHub callback"""
|
|
55
58
|
# Capture query parameters
|
|
56
59
|
query_params: dict[str, str] = dict(cast(Any, request.query))
|
|
@@ -62,7 +65,7 @@ class GitHubCallbackServer:
|
|
|
62
65
|
|
|
63
66
|
# Return success page
|
|
64
67
|
html_response = self._get_success_html()
|
|
65
|
-
return
|
|
68
|
+
return Response(text=html_response, content_type="text/html")
|
|
66
69
|
|
|
67
70
|
async def stop(self) -> None:
|
|
68
71
|
"""Stop the server and cleanup"""
|
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
Metadata-Version: 2.3
|
|
2
2
|
Name: llamactl
|
|
3
|
-
Version: 0.3.
|
|
3
|
+
Version: 0.3.0a7
|
|
4
4
|
Summary: A command-line interface for managing LlamaDeploy projects and deployments
|
|
5
5
|
Author: Adrian Lyjak
|
|
6
6
|
Author-email: Adrian Lyjak <adrianlyjak@gmail.com>
|
|
7
7
|
License: MIT
|
|
8
|
-
Requires-Dist: llama-deploy-core>=0.3.
|
|
9
|
-
Requires-Dist: llama-deploy-appserver>=0.3.
|
|
8
|
+
Requires-Dist: llama-deploy-core>=0.3.0a7,<0.4.0
|
|
9
|
+
Requires-Dist: llama-deploy-appserver>=0.3.0a7,<0.4.0
|
|
10
10
|
Requires-Dist: httpx>=0.24.0
|
|
11
11
|
Requires-Dist: rich>=13.0.0
|
|
12
12
|
Requires-Dist: questionary>=2.0.0
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
llama_deploy/cli/__init__.py,sha256=a7ac1d286680f8b95d046ed3f116c54d6301cf524ba39f0c19b39f9c95978d22,366
|
|
2
|
+
llama_deploy/cli/app.py,sha256=5200b4ac01b0ad0c405ce841fc01a12ed32f7b6474472f00a7d6c75fe274ea45,2324
|
|
3
|
+
llama_deploy/cli/client.py,sha256=3074f269c6c29b7af9822cc1fa2e647f699e7985901058d0ac534d38cb761817,6901
|
|
4
|
+
llama_deploy/cli/commands/aliased_group.py,sha256=6e2457cdea51de83bb7f02b37abb77cb9b5bff0a61bdddd66c43240b66b13f13,986
|
|
5
|
+
llama_deploy/cli/commands/deployment.py,sha256=d1aacc8c6cbe4d73b3284d04e805fb044af591f9b323cc5c4acb2c07b0ad649c,8463
|
|
6
|
+
llama_deploy/cli/commands/profile.py,sha256=933d7a434c2684c7b47bfbd7340a09e4b34d56d20624886e15fdb4e0af97ce0b,6765
|
|
7
|
+
llama_deploy/cli/commands/serve.py,sha256=22227f383bb5a9d43de7c788139c685c7370e24f495c9b1929faae80b87d4ded,2232
|
|
8
|
+
llama_deploy/cli/config.py,sha256=ebec8cf9e2112378ee6ecd626166711f3fba8cfa27cd1c931fe899c0b2a047b3,6241
|
|
9
|
+
llama_deploy/cli/debug.py,sha256=e85a72d473bbe1645eb31772f7349bde703d45704166f767385895c440afc762,496
|
|
10
|
+
llama_deploy/cli/env.py,sha256=6ebc24579815b3787829c81fd5bb9f31698a06e62c0128a788559f962b33a7af,1016
|
|
11
|
+
llama_deploy/cli/interactive_prompts/utils.py,sha256=db78eba78bf347738feb89ac3eeb77a1d11f4003980f81cf3c13842f8d41afeb,2463
|
|
12
|
+
llama_deploy/cli/options.py,sha256=38bb4a231ad0436d8b910c98ff659c0736f619efdf56c402d60bb3f755df38e0,598
|
|
13
|
+
llama_deploy/cli/textual/deployment_form.py,sha256=7cdef6df6b39fcb3bf0110e6339ca98fe213f44862123f4fbfe6f4df08ad1cf3,19578
|
|
14
|
+
llama_deploy/cli/textual/deployment_help.py,sha256=d43e9ff29db71a842cf8b491545763d581ede3132b8af518c73af85a40950046,2464
|
|
15
|
+
llama_deploy/cli/textual/git_validation.py,sha256=44e359d16aa879f4566a0077d025fdd799f500862a8462b5ed3586e528f7a273,13300
|
|
16
|
+
llama_deploy/cli/textual/github_callback_server.py,sha256=dc74c510f8a98ef6ffaab0f6d11c7ea86ee77ca5adbc7725a2a29112bae24191,7556
|
|
17
|
+
llama_deploy/cli/textual/llama_loader.py,sha256=468213a504057f21838b01f48d51f52e60aa622d6f0fe5bb800d76ced846cea9,1245
|
|
18
|
+
llama_deploy/cli/textual/profile_form.py,sha256=4410678edbd59b014f937ce760bafa51ae86f6dd58bec88f048a9eda273446aa,5956
|
|
19
|
+
llama_deploy/cli/textual/secrets_form.py,sha256=a43fbd81aad034d0d60906bfd917c107f9ace414648b0f63ac0b29eeba4050db,7061
|
|
20
|
+
llama_deploy/cli/textual/styles.tcss,sha256=536cec7627d2a16dd03bf25bb9b6e4d53f1e0d18272b07ec0dc3bf76b0a7c2e0,3056
|
|
21
|
+
llamactl-0.3.0a7.dist-info/WHEEL,sha256=66530aef82d5020ef5af27ae0123c71abb9261377c5bc519376c671346b12918,79
|
|
22
|
+
llamactl-0.3.0a7.dist-info/entry_points.txt,sha256=b67e1eb64305058751a651a80f2d2268b5f7046732268421e796f64d4697f83c,52
|
|
23
|
+
llamactl-0.3.0a7.dist-info/METADATA,sha256=72f7abfbe8472f252debd8a0aef3ea5ca26a9ad0aaeef6e9ffdbae6a1b54fb35,3166
|
|
24
|
+
llamactl-0.3.0a7.dist-info/RECORD,,
|
|
@@ -1,24 +0,0 @@
|
|
|
1
|
-
llama_deploy/cli/__init__.py,sha256=c244b77c393cba921326d3188ff3e538703b9d2f97ea5662ce9940ff7c7f7948,364
|
|
2
|
-
llama_deploy/cli/app.py,sha256=5200b4ac01b0ad0c405ce841fc01a12ed32f7b6474472f00a7d6c75fe274ea45,2324
|
|
3
|
-
llama_deploy/cli/client.py,sha256=b0a1f002bd3feab188459c6e56f75a995048016d135666a17e53c4588beea9cc,6911
|
|
4
|
-
llama_deploy/cli/commands/aliased_group.py,sha256=6e2457cdea51de83bb7f02b37abb77cb9b5bff0a61bdddd66c43240b66b13f13,986
|
|
5
|
-
llama_deploy/cli/commands/deployment.py,sha256=d1aacc8c6cbe4d73b3284d04e805fb044af591f9b323cc5c4acb2c07b0ad649c,8463
|
|
6
|
-
llama_deploy/cli/commands/profile.py,sha256=00f52683dbd86fab4ff8750cf8861d47ba13c056310c183c9979184147c471da,6758
|
|
7
|
-
llama_deploy/cli/commands/serve.py,sha256=38e29816529af97bf00b8abea3ba6eb39e9a51e6b03616c2d8611b557079ddf2,1945
|
|
8
|
-
llama_deploy/cli/config.py,sha256=1759e502ee77e72cce5358a4a85296f3e4182dd1495d21a3d9f308f92690e702,6251
|
|
9
|
-
llama_deploy/cli/debug.py,sha256=e85a72d473bbe1645eb31772f7349bde703d45704166f767385895c440afc762,496
|
|
10
|
-
llama_deploy/cli/env.py,sha256=6ebc24579815b3787829c81fd5bb9f31698a06e62c0128a788559f962b33a7af,1016
|
|
11
|
-
llama_deploy/cli/interactive_prompts/utils.py,sha256=4ecab983c3f4869e7b8fcbb385e3ea91fde3bf86e8db7afe72d2febfd45d551e,2492
|
|
12
|
-
llama_deploy/cli/options.py,sha256=38bb4a231ad0436d8b910c98ff659c0736f619efdf56c402d60bb3f755df38e0,598
|
|
13
|
-
llama_deploy/cli/textual/deployment_form.py,sha256=9a7a364ab9735585db83d5955f4bb46faf0776b050c3477e2e7e5cfca5b11b2d,19465
|
|
14
|
-
llama_deploy/cli/textual/deployment_help.py,sha256=7dae54ffbdaea201362928883cc206b496b1386023fb7f1c78d168132543e592,2339
|
|
15
|
-
llama_deploy/cli/textual/git_validation.py,sha256=44e359d16aa879f4566a0077d025fdd799f500862a8462b5ed3586e528f7a273,13300
|
|
16
|
-
llama_deploy/cli/textual/github_callback_server.py,sha256=031929592f448c5005e11cea19268fd696d60d92573c385a0397cfabacb06eb7,7444
|
|
17
|
-
llama_deploy/cli/textual/llama_loader.py,sha256=468213a504057f21838b01f48d51f52e60aa622d6f0fe5bb800d76ced846cea9,1245
|
|
18
|
-
llama_deploy/cli/textual/profile_form.py,sha256=6c2d55b7c0a1712796eebc8f05bb0597e955cf1dd6f96d37a6f0dce7865a2554,5984
|
|
19
|
-
llama_deploy/cli/textual/secrets_form.py,sha256=b46b0e5999cd7c92433b91dcfb560bb5f8de9e5c4e96abbad63ee68544c387be,7082
|
|
20
|
-
llama_deploy/cli/textual/styles.tcss,sha256=536cec7627d2a16dd03bf25bb9b6e4d53f1e0d18272b07ec0dc3bf76b0a7c2e0,3056
|
|
21
|
-
llamactl-0.3.0a5.dist-info/WHEEL,sha256=66530aef82d5020ef5af27ae0123c71abb9261377c5bc519376c671346b12918,79
|
|
22
|
-
llamactl-0.3.0a5.dist-info/entry_points.txt,sha256=b67e1eb64305058751a651a80f2d2268b5f7046732268421e796f64d4697f83c,52
|
|
23
|
-
llamactl-0.3.0a5.dist-info/METADATA,sha256=d572c107e7dc848cd3f211bf032b11a50b0ed4af613ef0e63dee8a4556fff1af,3166
|
|
24
|
-
llamactl-0.3.0a5.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|