llamactl 0.3.24__py3-none-any.whl → 0.3.26__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -79,6 +79,69 @@ def validate_command(deployment_file: Path, interactive: bool) -> None:
79
79
  rprint(f"[green]Validated workflows in {config_dir} successfully.[/green]")
80
80
 
81
81
 
82
+ @dev.command(
83
+ "export-json-graph",
84
+ help="Produce a JSON graph representation of registered workflows",
85
+ hidden=True, # perhaps expose if we have a built in visualization (mermaid, etc.)
86
+ )
87
+ @click.argument(
88
+ "deployment_file",
89
+ required=False,
90
+ default=DEFAULT_DEPLOYMENT_FILE_PATH,
91
+ type=_ClickPath(dir_okay=True, resolve_path=True, path_type=Path),
92
+ )
93
+ @click.option(
94
+ "--output",
95
+ help=(
96
+ "File where output JSON graph will be saved. "
97
+ "Defaults to workflows.json in the current directory."
98
+ ),
99
+ required=False,
100
+ default=None,
101
+ type=_ClickPath(dir_okay=True, resolve_path=True, path_type=Path),
102
+ )
103
+ @interactive_option
104
+ @global_options
105
+ def export_json_graph_command(
106
+ deployment_file: Path,
107
+ output: Path | None,
108
+ interactive: bool,
109
+ ) -> None:
110
+ """Export the configured workflows to a JSON document that may be used for graph visualization."""
111
+ if not deployment_file.exists():
112
+ rprint(f"[red]Deployment file '{deployment_file}' does not exist[/red]")
113
+ raise click.Abort()
114
+
115
+ _ensure_project_layout(
116
+ deployment_file, command_name="llamactl dev export-json-graph"
117
+ )
118
+ _maybe_inject_llama_cloud_credentials(
119
+ deployment_file, interactive, require_cloud=False
120
+ )
121
+
122
+ prepare_server(
123
+ deployment_file=deployment_file,
124
+ install=True,
125
+ build=False,
126
+ install_ui_deps=False,
127
+ )
128
+
129
+ wd = Path.cwd()
130
+ if output is None:
131
+ output = wd / "workflows.json"
132
+
133
+ try:
134
+ start_export_json_graph_in_target_venv(
135
+ cwd=wd,
136
+ deployment_file=deployment_file,
137
+ output=output,
138
+ )
139
+ except subprocess.CalledProcessError as exc:
140
+ rprint("[red]Workflow JSON graph export failed. See errors above.[/red]")
141
+ raise Exit(exc.returncode)
142
+ rprint(f"[green]Exported workflow JSON graph to {output}[/green]")
143
+
144
+
82
145
  @dev.command(
83
146
  "run",
84
147
  help=(
@@ -199,6 +262,21 @@ def start_preflight_in_target_venv(*, cwd: Path, deployment_file: Path) -> None:
199
262
  _start_preflight_in_target_venv(cwd=cwd, deployment_file=deployment_file)
200
263
 
201
264
 
265
+ def start_export_json_graph_in_target_venv(
266
+ *, cwd: Path, deployment_file: Path, output: Path
267
+ ) -> None:
268
+ """Thin wrapper so tests can monkeypatch `dev.start_export_json_graph_in_target_venv`."""
269
+ from llama_deploy.appserver.app import (
270
+ start_export_json_graph_in_target_venv as _start_export_json_graph_in_target_venv,
271
+ )
272
+
273
+ _start_export_json_graph_in_target_venv(
274
+ cwd=cwd,
275
+ deployment_file=deployment_file,
276
+ output=output,
277
+ )
278
+
279
+
202
280
  def parse_environment_variables(
203
281
  config: DeploymentConfig, config_parent: Path
204
282
  ) -> dict[str, str]:
@@ -210,4 +288,4 @@ def parse_environment_variables(
210
288
  return _parse_environment_variables(config, config_parent)
211
289
 
212
290
 
213
- __all__ = ["dev", "validate_command", "run_command"]
291
+ __all__ = ["dev", "validate_command", "run_command", "export_json_graph_command"]
@@ -76,7 +76,8 @@ class ConfigManager:
76
76
 
77
77
  with sqlite3.connect(self.db_path) as conn:
78
78
  # Apply ad-hoc SQL migrations based on PRAGMA user_version
79
- run_migrations(conn)
79
+ # Pass db_path to enable file-based locking across processes
80
+ run_migrations(conn, self.db_path)
80
81
 
81
82
  conn.commit()
82
83
 
@@ -6,10 +6,14 @@ Inspired by https://eskerda.com/sqlite-schema-migrations-python/
6
6
  from __future__ import annotations
7
7
 
8
8
  import logging
9
+ import os
9
10
  import re
10
11
  import sqlite3
11
12
  import sys
13
+ from contextlib import contextmanager
12
14
  from importlib import import_module, resources
15
+ from pathlib import Path
16
+ from typing import Generator
13
17
 
14
18
  if sys.version_info >= (3, 11):
15
19
  from importlib.resources.abc import Traversable
@@ -23,6 +27,42 @@ _MIGRATIONS_PKG = "llama_deploy.cli.config.migrations"
23
27
  _USER_VERSION_PATTERN = re.compile(r"pragma\s+user_version\s*=\s*(\d+)", re.IGNORECASE)
24
28
 
25
29
 
30
+ def _lock_file_unix(fd: int) -> None:
31
+ """Acquire exclusive lock on Unix using fcntl."""
32
+ import fcntl
33
+
34
+ fcntl.flock(fd, fcntl.LOCK_EX)
35
+
36
+
37
+ def _unlock_file_unix(fd: int) -> None:
38
+ """Release lock on Unix using fcntl."""
39
+ import fcntl
40
+
41
+ fcntl.flock(fd, fcntl.LOCK_UN)
42
+
43
+
44
+ @contextmanager
45
+ def _file_lock(lock_path: Path) -> Generator[None, None, None]:
46
+ """File lock to serialize migrations across processes.
47
+
48
+ Uses fcntl.flock on Unix. On Windows, SQLite's built-in locking provides
49
+ sufficient protection for typical CLI usage patterns.
50
+ """
51
+ if os.name == "nt":
52
+ # On Windows, rely on SQLite's own file locking
53
+ yield
54
+ return
55
+
56
+ lock_path.parent.mkdir(parents=True, exist_ok=True)
57
+ lock_file = open(lock_path, "w") # noqa: SIM115
58
+ try:
59
+ _lock_file_unix(lock_file.fileno())
60
+ yield
61
+ finally:
62
+ _unlock_file_unix(lock_file.fileno())
63
+ lock_file.close()
64
+
65
+
26
66
  def _iter_migration_files() -> list[Traversable]:
27
67
  """Yield packaged SQL migration files in lexicographic order."""
28
68
  pkg = import_module(_MIGRATIONS_PKG)
@@ -40,12 +80,8 @@ def _parse_target_version(sql_text: str) -> int | None:
40
80
  return int(match.group(1)) if match else None
41
81
 
42
82
 
43
- def run_migrations(conn: sqlite3.Connection) -> None:
44
- """Apply pending migrations found under the migrations package.
45
-
46
- Each migration file should start with a `PRAGMA user_version=N;` line.
47
- Files are applied in lexicographic order and only when N > current_version.
48
- """
83
+ def _apply_pending_migrations(conn: sqlite3.Connection) -> None:
84
+ """Apply pending migrations (internal, assumes lock is held)."""
49
85
  cur = conn.cursor()
50
86
  current_version_row = cur.execute("PRAGMA user_version").fetchone()
51
87
  current_version = int(current_version_row[0]) if current_version_row else 0
@@ -68,3 +104,20 @@ def run_migrations(conn: sqlite3.Connection) -> None:
68
104
  else:
69
105
  cur.execute("COMMIT")
70
106
  current_version = target_version
107
+
108
+
109
+ def run_migrations(conn: sqlite3.Connection, db_path: Path | None = None) -> None:
110
+ """Apply pending migrations found under the migrations package.
111
+
112
+ Each migration file should start with a `PRAGMA user_version=N;` line.
113
+ Files are applied in lexicographic order and only when N > current_version.
114
+
115
+ Uses a file lock to prevent concurrent migrations across processes when
116
+ db_path is provided.
117
+ """
118
+ if db_path is not None:
119
+ lock_path = db_path.with_suffix(".db.lock")
120
+ with _file_lock(lock_path):
121
+ _apply_pending_migrations(conn)
122
+ else:
123
+ _apply_pending_migrations(conn)
@@ -1,12 +1,12 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: llamactl
3
- Version: 0.3.24
3
+ Version: 0.3.26
4
4
  Summary: A command-line interface for managing LlamaDeploy projects and deployments
5
5
  Author: Adrian Lyjak
6
6
  Author-email: Adrian Lyjak <adrianlyjak@gmail.com>
7
7
  License: MIT
8
- Requires-Dist: llama-deploy-core[client]>=0.3.24,<0.4.0
9
- Requires-Dist: llama-deploy-appserver>=0.3.24,<0.4.0
8
+ Requires-Dist: llama-deploy-core[client]>=0.3.26,<0.4.0
9
+ Requires-Dist: llama-deploy-appserver>=0.3.26,<0.4.0
10
10
  Requires-Dist: vibe-llama-core>=0.1.0
11
11
  Requires-Dist: rich>=13.0.0
12
12
  Requires-Dist: questionary>=2.0.0
@@ -5,13 +5,13 @@ llama_deploy/cli/client.py,sha256=61c04ff808374913bf2fc1fc5838c498b4f8c779d4e056
5
5
  llama_deploy/cli/commands/aliased_group.py,sha256=364d846d9ceec465e6f2f47051ad06e1ccfbea1d7526654c1ffbd7b7ab7e6af0,1302
6
6
  llama_deploy/cli/commands/auth.py,sha256=c8b94de8c0647e241b0083782b8e241225c0c68ee2d32f85d54c29ae0d7dcb1b,26891
7
7
  llama_deploy/cli/commands/deployment.py,sha256=dc5d039224409708446b91db482c20da648eba720c1527cfdb2952a1bb07ad3e,15567
8
- llama_deploy/cli/commands/dev.py,sha256=6e137aa10ab5f175b6c139d7294776d03d284c3b1b503f54d07c6a5ae5f572cc,7035
8
+ llama_deploy/cli/commands/dev.py,sha256=10f394bc91ea71c3d1f23d280919482794ddf918e5676e7a3305f49d2a71f646,9436
9
9
  llama_deploy/cli/commands/env.py,sha256=ae8f94eb2651a10615bac37afa16447ad1d78cb78c83ad8b8ae75e878733d323,7478
10
10
  llama_deploy/cli/commands/init.py,sha256=afdb65b5e70cfaf3bdbc923d13db1a31ad23c14605e5bcd55ddaab8fff6e69a4,17514
11
11
  llama_deploy/cli/commands/pkg.py,sha256=f91a87220c1d527e02a183dac5ca52c58608128e29bedf664362af3d2d31c461,4084
12
12
  llama_deploy/cli/commands/serve.py,sha256=c66fed8b3117c278eca2eca86eb7933ba434442d4f315c1505449580f59bfdca,12974
13
- llama_deploy/cli/config/_config.py,sha256=66ba1869d620ef2f31fa3b7d7ba38c3e3718c3dabdbe90f7ea49cbdcdfc4a951,14262
14
- llama_deploy/cli/config/_migrations.py,sha256=9b5e3b9eec692a3edb58c1f99a03d04a63bfc29dbebfc1719a5daf5a9d024738,2448
13
+ llama_deploy/cli/config/_config.py,sha256=47c7b24f9d644c1b37345f1e5b754424ac924a13c24cd16ef50e6ceb6f4c606e,14349
14
+ llama_deploy/cli/config/_migrations.py,sha256=3c5caec002e7eb226dfc451a60af8a820b4e48ccf0ce53f45f5a04a7070fc703,3969
15
15
  llama_deploy/cli/config/auth_service.py,sha256=0cf47ad032aceefc27283ce2760432e915554d544876ce471746a58692cb9249,5150
16
16
  llama_deploy/cli/config/env_service.py,sha256=cd51a68f1e9aad0bdd49cd76351cd54cea612a7f669512484c42e2876fea0458,2650
17
17
  llama_deploy/cli/config/migrations/0001_init.sql,sha256=dc9ee7439d8d79262e41f2d9f1a9306ae5b0812cc4b454167d556330f0cc578c,1092
@@ -41,7 +41,7 @@ llama_deploy/cli/utils/env_inject.py,sha256=01911758bcc3cf22aad0db0d1ade56aece48
41
41
  llama_deploy/cli/utils/redact.py,sha256=1e768d76b4a6708230c34f7ce8a5a82ab52795bb3d6ab0387071ab4e8d7e7934,863
42
42
  llama_deploy/cli/utils/retry.py,sha256=62ca6f286cb4de38cc5efcef3f376b12c2e6eb9b3e5ebe555d2a60aeb0957c19,1526
43
43
  llama_deploy/cli/utils/version.py,sha256=bf01a6dda948b868cc08c93701ed44cd36b487402404af8451d4c0996a2edb31,364
44
- llamactl-0.3.24.dist-info/WHEEL,sha256=66530aef82d5020ef5af27ae0123c71abb9261377c5bc519376c671346b12918,79
45
- llamactl-0.3.24.dist-info/entry_points.txt,sha256=b67e1eb64305058751a651a80f2d2268b5f7046732268421e796f64d4697f83c,52
46
- llamactl-0.3.24.dist-info/METADATA,sha256=f7aa418942122514687264d8a966765462b20934e1c19776b3bf203a05cc61a0,3329
47
- llamactl-0.3.24.dist-info/RECORD,,
44
+ llamactl-0.3.26.dist-info/WHEEL,sha256=66530aef82d5020ef5af27ae0123c71abb9261377c5bc519376c671346b12918,79
45
+ llamactl-0.3.26.dist-info/entry_points.txt,sha256=b67e1eb64305058751a651a80f2d2268b5f7046732268421e796f64d4697f83c,52
46
+ llamactl-0.3.26.dist-info/METADATA,sha256=54d1a2167b0c29ddc7801a935e3f337a3c3d783382e8e976332580597751cc91,3329
47
+ llamactl-0.3.26.dist-info/RECORD,,