llmboost-hub 0.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,283 @@
1
+ import click
2
+ import subprocess
3
+ from typing import List
4
+ import re
5
+ import pandas as pd
6
+ import tabulate
7
+
8
+ from llmboost_hub.commands.search import do_search
9
+ from llmboost_hub.utils.config import config
10
+ from llmboost_hub.utils import gpu_info
11
+ from llmboost_hub.utils.container_utils import (
12
+ container_name_for_model,
13
+ is_container_running,
14
+ is_model_initializing,
15
+ is_model_ready2serve,
16
+ is_model_tuning,
17
+ )
18
+ from llmboost_hub.utils.model_utils import is_model_downloaded
19
+ import os
20
+
21
+
22
+ def _get_local_images() -> List[str]:
23
+ """
24
+ Return a list of local docker images in the format 'repository:tag'.
25
+
26
+ Notes:
27
+ Best-effort; falls back to an empty list on errors.
28
+ """
29
+ try:
30
+ out = subprocess.check_output(
31
+ ["docker", "images", "--format", "{{.Repository}}:{{.Tag}}"], text=True
32
+ )
33
+ lines = [l.strip() for l in out.splitlines() if l.strip()]
34
+ return lines
35
+ except Exception:
36
+ return []
37
+
38
+
39
+ def _get_installed_models(models_dir: str) -> List[str]:
40
+ """
41
+ Return list of installed model names (normalized).
42
+ Equivalent to `grep -LrE '^ "(_name_or_path|architectures)": ' "{config.LBH_MODELS}/**/*.json" | xargs dirname | sort | uniq`.
43
+
44
+ Staging dir (config.LBH_MODELS_STAGING) is ignored.
45
+ """
46
+ models: List[str] = []
47
+ try:
48
+ staging_dir_basename = os.path.basename(config.LBH_MODELS_STAGING)
49
+
50
+ for repo in os.listdir(models_dir):
51
+ repo_path = os.path.join(models_dir, repo)
52
+ if not os.path.isdir(repo_path):
53
+ continue
54
+ # skip staging directory entirely
55
+ if repo == staging_dir_basename:
56
+ continue
57
+
58
+ # collect second-level model directories under each repo
59
+ subdirs = [
60
+ d for d in os.listdir(repo_path) if os.path.isdir(os.path.join(repo_path, d))
61
+ ]
62
+ if subdirs:
63
+ models.extend(subdirs)
64
+ else:
65
+ # fallback: treat top-level dir as model (legacy layouts)
66
+ models.append(repo)
67
+ except Exception:
68
+ pass
69
+ # deduplicate while preserving order
70
+ seen = set()
71
+ uniq_models = []
72
+ for m in models:
73
+ if m not in seen:
74
+ seen.add(m)
75
+ uniq_models.append(m)
76
+ return uniq_models
77
+
78
+
79
+ def _resolve_model_path(models_root: str, model_name: str) -> str:
80
+ """
81
+ Best-effort: resolve absolute path for a model_name under LBH_MODELS by scanning
82
+ <repo>/<model> while ignoring the staging dir. Falls back to <models_root>/<model_name>.
83
+ """
84
+ try:
85
+ staging_dir = getattr(config, "LBH_MODELS_STAGING", None)
86
+ staging_dir = os.path.abspath(staging_dir) if staging_dir else None
87
+
88
+ for repo in os.listdir(models_root):
89
+ repo_path = os.path.join(models_root, repo)
90
+ if not os.path.isdir(repo_path):
91
+ continue
92
+ # skip staging directory
93
+ if staging_dir and os.path.abspath(repo_path) == staging_dir:
94
+ continue
95
+
96
+ candidate = os.path.join(repo_path, model_name)
97
+ if os.path.isdir(candidate):
98
+ return candidate
99
+ except Exception:
100
+ pass
101
+ # fallback
102
+ return os.path.join(models_root, model_name)
103
+
104
+
105
+ def do_list(query: str = r".*", local_only: bool = True, verbose: bool = False) -> dict:
106
+ """
107
+ Aggregate local docker images, installed model dirs, GPU info, and join with lookup.
108
+
109
+ Args:
110
+ query: Optional LIKE filter passed to search for narrowing models.
111
+ local_only: If True, search only uses local cache (no network).
112
+ verbose: If True, emit warnings about ambiguous GPUs.
113
+
114
+ Returns:
115
+ Dict:
116
+ - images: List[str] local images after joining
117
+ - installed_models: List[str] under `config.LBH_MODELS`
118
+ - gpus: List[str] detected GPU names
119
+ - images_df: pd.DataFrame joined on `docker_image` (may include status)
120
+ - lookup_df: pd.DataFrame of filtered lookup rows (pre-join)
121
+ """
122
+ images = _get_local_images()
123
+ models_dir = config.LBH_MODELS
124
+ installed_models = _get_installed_models(models_dir)
125
+
126
+ # Prepare local images DataFrame
127
+ local_df = pd.DataFrame({"docker_image": [str(i) for i in images]})
128
+
129
+ # Load lookup (filtered via do_search over the query and local GPUs)
130
+ cache_df = pd.DataFrame()
131
+ try:
132
+ cache_df = do_search(query=query, verbose=verbose, local_only=local_only)
133
+ # Normalize column names
134
+ cache_df.columns = [str(c).strip().lower() for c in cache_df.columns]
135
+ except Exception:
136
+ cache_df = pd.DataFrame(columns=["model", "gpu", "docker_image"])
137
+
138
+ # Inner join on docker_image to keep only known images and get model,gpu columns
139
+ if "docker_image" in cache_df.columns:
140
+ merged_df = (
141
+ local_df.merge(
142
+ cache_df[["model", "gpu", "docker_image"]], on="docker_image", how="inner"
143
+ )
144
+ # NOTE: do not drop duplicates per docker_image — show all models
145
+ .reset_index(drop=True)
146
+ )
147
+ else:
148
+ merged_df = pd.DataFrame(columns=["model", "gpu", "docker_image"])
149
+
150
+ # Add GPU match indicator column based on local GPU families
151
+ local_gpus = gpu_info.get_gpus()
152
+ local_families = {gpu_info.gpu_name2family(g) for g in local_gpus if g}
153
+ if not merged_df.empty:
154
+ merged_df = merged_df.assign(
155
+ _gpu_family=merged_df["gpu"].apply(gpu_info.gpu_name2family),
156
+ matches_local_gpu=lambda df: df["_gpu_family"].isin(local_families),
157
+ ).drop(columns=["_gpu_family"])
158
+
159
+ # Derive status column based on local presence and container/process state
160
+ if not merged_df.empty:
161
+ statuses: List[str] = []
162
+ for _, row in merged_df.iterrows():
163
+ model_id = str(row.get("model", "") or "")
164
+ downloaded = is_model_downloaded(models_dir, model_id)
165
+ cname = container_name_for_model(model_id) if model_id else ""
166
+ if not downloaded:
167
+ statuses.append("pending")
168
+ continue
169
+ if cname and is_container_running(cname):
170
+ # Priority: tuning > serving > initializing > running
171
+ if is_model_tuning(cname):
172
+ statuses.append("tuning")
173
+ elif is_model_ready2serve(cname):
174
+ statuses.append("serving")
175
+ elif is_model_initializing(cname):
176
+ statuses.append("initializing")
177
+ else:
178
+ statuses.append("running")
179
+ else:
180
+ statuses.append("stopped")
181
+ merged_df = merged_df.assign(status=statuses)
182
+
183
+ # Filter images list to those present in the joined frame
184
+ images = merged_df["docker_image"].tolist()
185
+
186
+ # GPUs via utility (standardized)
187
+ gpus: List[str] = gpu_info.get_gpus()
188
+
189
+ # Optional note about multiple GPUs (can affect matching)
190
+ if len(set(gpus)) > 1 and verbose:
191
+ click.echo("Warning: Multiple GPUs detected.")
192
+
193
+ return {
194
+ "images": images,
195
+ "installed_models": installed_models,
196
+ "gpus": gpus,
197
+ "images_df": merged_df,
198
+ "lookup_df": cache_df, # include full filtered lookup for consumers like prep
199
+ }
200
+
201
+
202
+ @click.command(name="list", context_settings={"help_option_names": ["-h", "--help"]})
203
+ @click.argument("query", required=False, default="")
204
+ @click.pass_context
205
+ def list_models(ctx: click.Context, query):
206
+ """
207
+ List supported models, their docker images, and statuses.
208
+
209
+ \b
210
+ Statuses:
211
+ - pending: Model not yet downloaded/installed locally.
212
+ - stopped: Model downloaded but container not running.
213
+ - initializing: Container starting up (model loading in progress).
214
+ - running: Container running but not serving requests.
215
+ - serving: Container running and ready to serve requests.
216
+ - tuning: Container running and performing model tuning.
217
+ """
218
+ verbose = ctx.obj.get("VERBOSE", False)
219
+ data = do_list(query=query, verbose=verbose)
220
+
221
+ # Prefer joined DataFrame with model,gpu,docker_image (+ matches_local_gpu)
222
+ df = data.get("images_df") if isinstance(data.get("images_df"), pd.DataFrame) else None
223
+ if df is None or df.empty:
224
+ # Fallback to docker_image-only display if join is empty
225
+ df = pd.DataFrame({"docker_image": data["images"]}).reset_index(drop=True)
226
+
227
+ click.echo(f"Found {len(df)} images")
228
+ if df.empty:
229
+ return
230
+
231
+ # Ensure desired column ordering if available
232
+ desired_cols = [
233
+ c
234
+ for c in ["status", "model", "gpu", "docker_image", "matches_local_gpu"]
235
+ if c in df.columns
236
+ ]
237
+ if desired_cols:
238
+ df = df[desired_cols]
239
+ df.index += 1 # start index at 1
240
+
241
+ click.echo(
242
+ tabulate.tabulate(
243
+ df.values.tolist(),
244
+ headers=list(df.columns),
245
+ showindex=list(df.index),
246
+ tablefmt="psql",
247
+ )
248
+ )
249
+
250
+ # Extra details in verbose mode
251
+ if not verbose:
252
+ return
253
+
254
+ # Tabulate installed HF models with their absolute paths
255
+ click.echo("\nInstalled HF models (LBH_MODELS):")
256
+ if data["installed_models"]:
257
+ models_dir = config.LBH_MODELS
258
+ models_list = data["installed_models"]
259
+ models_df = pd.DataFrame(
260
+ {
261
+ "model": models_list, # model_name only
262
+ "path": [_resolve_model_path(models_dir, m) for m in models_list],
263
+ }
264
+ )
265
+ models_df = models_df.sort_values(by="model").reset_index(drop=True)
266
+ models_df.index += 1
267
+ click.echo(
268
+ tabulate.tabulate(
269
+ models_df.values.tolist(),
270
+ headers=list(models_df.columns),
271
+ showindex=list(models_df.index),
272
+ tablefmt="psql",
273
+ )
274
+ )
275
+ else:
276
+ click.echo(" (no installed models found)")
277
+
278
+ click.echo("\nDetected GPUs (best-effort):")
279
+ if data["gpus"]:
280
+ for g in set(data["gpus"]):
281
+ click.echo(f" - {g} ({gpu_info.gpu_name2family(g)})")
282
+ else:
283
+ click.echo(" (unable to detect GPUs)")
@@ -0,0 +1,72 @@
1
+ import click
2
+ import os
3
+ from llmboost_hub.utils.config import config
4
+ from llmboost_hub.utils.license_wrapper import is_license_valid, save_license
5
+
6
+
7
+ def do_login(license_file: str | None, verbose: bool = False) -> dict:
8
+ """
9
+ Validate and register an LLMBoost license.
10
+
11
+ Flow:
12
+ - If an existing license file validates, return success immediately.
13
+ - Otherwise, prompt interactively for a key (hidden input), write to target path, and re-validate.
14
+ - On failure, remove the newly written file and return an error.
15
+
16
+ Args:
17
+ license_file: Optional override for the license file save path; defaults to `config.LBH_LICENSE_PATH`.
18
+ verbose: Reserved for future detailed logging.
19
+
20
+ Returns:
21
+ Dict with keys:
22
+ validated: Whether validation succeeded.
23
+ path: Path to the saved/validated license file (if any).
24
+ error: Error string on failure; None on success.
25
+ """
26
+ license_path = config.LBH_LICENSE_PATH
27
+
28
+ # If license exists and is valid, short-circuit.
29
+ if os.path.exists(license_path) and is_license_valid():
30
+ return {"validated": True, "path": license_path, "error": None}
31
+
32
+ # Prompt for license key (hidden input); normalize whitespace.
33
+ try:
34
+ key = click.prompt("Enter your LLMBoost license key", hide_input=True).strip()
35
+ except Exception:
36
+ return {"validated": False, "path": None, "error": "No license key entered."}
37
+ if not key:
38
+ return {"validated": False, "path": None, "error": "No license key entered."}
39
+
40
+ target_path = license_file or license_path
41
+ saved_path = save_license(target_path, key)
42
+
43
+ # Re-validate after saving; remove file on failure.
44
+ if is_license_valid():
45
+ return {"validated": True, "path": saved_path, "error": None}
46
+ else:
47
+ try:
48
+ os.remove(saved_path)
49
+ except Exception:
50
+ pass
51
+ return {
52
+ "validated": False,
53
+ "path": saved_path,
54
+ "error": "License validation failed after saving.",
55
+ }
56
+
57
+
58
+ @click.command(context_settings={"help_option_names": ["-h", "--help"]})
59
+ @click.option("--license-file", type=click.Path(), help="Path to license file to save (optional).")
60
+ @click.pass_context
61
+ def login(ctx: click.Context, license_file):
62
+ """
63
+ Validates LLMBoost license.
64
+ """
65
+ verbose = ctx.obj.get("VERBOSE", False)
66
+ res = do_login(license_file, verbose=verbose)
67
+
68
+ if res["validated"]:
69
+ click.echo(f"License validated and saved to {res['path']}")
70
+ return
71
+
72
+ raise click.ClickException(res["error"] or "License validation failed")