bithub 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
bithub/cli.py ADDED
@@ -0,0 +1,401 @@
1
+ """CLI entry point for bithub."""
2
+
3
+ import click
4
+ from rich.console import Console
5
+ from rich.panel import Panel
6
+ from rich.table import Table
7
+
8
+ from bithub import __version__
9
+ from bithub.config import get_default_threads
10
+ from bithub.registry import list_available_models, get_model_info
11
+
12
+ console = Console()
13
+
14
+ # Auto-detect a sensible thread count
15
+ _DEFAULT_THREADS = get_default_threads()
16
+
17
+
18
+ def _suggest_model(model_name: str) -> None:
19
+ """When a model name isn't found, suggest close matches."""
20
+ available = list(list_available_models().keys())
21
+
22
+ # Simple substring match
23
+ suggestions = [m for m in available if model_name.lower() in m.lower()]
24
+ if not suggestions:
25
+ # Try the other direction
26
+ suggestions = [m for m in available if m.lower() in model_name.lower()]
27
+
28
+ console.print(f"[red]Unknown model:[/red] {model_name}")
29
+ if suggestions:
30
+ console.print(f" Did you mean: [bold cyan]{suggestions[0]}[/bold cyan]?")
31
+ console.print(" Run [bold]bithub models[/bold] to see all available models.")
32
+
33
+
34
+ def _ensure_engine_ready() -> bool:
35
+ """Check if the engine is built. If not, offer to set it up."""
36
+ from bithub.builder import is_bitnet_cpp_built
37
+
38
+ if is_bitnet_cpp_built():
39
+ return True
40
+
41
+ console.print("[yellow]The bitnet.cpp engine is not built yet.[/yellow]")
42
+
43
+ if click.confirm(" Would you like to set it up now?", default=True):
44
+ from bithub.builder import setup_bitnet_cpp
45
+ return setup_bitnet_cpp()
46
+ else:
47
+ console.print(" Run [bold]bithub setup[/bold] when you're ready.")
48
+ return False
49
+
50
+
51
+ def _ensure_model_ready(model_name: str) -> bool:
52
+ """Check if a model is downloaded. If not, offer to pull it."""
53
+ from bithub.downloader import is_model_downloaded
54
+
55
+ if is_model_downloaded(model_name):
56
+ return True
57
+
58
+ info = get_model_info(model_name)
59
+ if not info:
60
+ _suggest_model(model_name)
61
+ return False
62
+
63
+ console.print(f"[yellow]Model {model_name} is not downloaded yet.[/yellow]")
64
+ console.print(f" ({info['name']}, {info['parameters']} params, ~{info['size_mb']}MB)")
65
+
66
+ if click.confirm(" Would you like to download it now?", default=True):
67
+ from bithub.downloader import download_model
68
+ try:
69
+ download_model(model_name)
70
+ return True
71
+ except SystemExit:
72
+ return False
73
+ else:
74
+ console.print(f" Run [bold]bithub pull {model_name}[/bold] when you're ready.")
75
+ return False
76
+
77
+
78
+ # ──────────────────────────────────────────────────────────────
79
+ # Main CLI group
80
+ # ──────────────────────────────────────────────────────────────
81
+
82
+
83
+ @click.group()
84
+ @click.version_option(version=__version__)
85
+ @click.option("--debug", is_flag=True, hidden=True, help="Enable debug logging")
86
+ @click.option("--verbose", "-v", is_flag=True, help="Enable verbose output")
87
+ @click.pass_context
88
+ def cli(ctx, debug, verbose):
89
+ """bithub — Ollama for 1-bit LLMs.
90
+
91
+ Download, manage, and serve BitNet models with a single command.
92
+
93
+ \b
94
+ Quick start:
95
+ bithub setup # one-time engine build
96
+ bithub pull 2B-4T # download a model
97
+ bithub serve 2B-4T # start OpenAI-compatible API
98
+ bithub run 2B-4T # chat in terminal
99
+ """
100
+ ctx.ensure_object(dict)
101
+ ctx.obj["debug"] = debug
102
+ ctx.obj["verbose"] = verbose
103
+ from bithub.logging_setup import setup_logging
104
+ setup_logging(debug=debug, verbose=verbose)
105
+
106
+
107
+ # ──────────────────────────────────────────────────────────────
108
+ # Model management
109
+ # ──────────────────────────────────────────────────────────────
110
+
111
+
112
+ @cli.command()
113
+ @click.argument("model_name")
114
+ @click.option("--force", is_flag=True, help="Re-download even if already present")
115
+ @click.option("--name", default=None, help="Short name for direct HF pulls")
116
+ def pull(model_name, force, name):
117
+ """Download a BitNet model from HuggingFace.
118
+
119
+ \b
120
+ Examples:
121
+ bithub pull 2B-4T # from registry
122
+ bithub pull falcon3-1B --force # re-download
123
+ bithub pull hf:microsoft/BitNet-b1.58-2B-4T-gguf # direct from HF
124
+ bithub pull hf:user/custom-model --name mymodel # with custom name
125
+ """
126
+ from bithub.downloader import is_direct_hf_pull, parse_hf_uri, download_direct_hf
127
+
128
+ if is_direct_hf_pull(model_name):
129
+ repo_id, default_name = parse_hf_uri(model_name)
130
+ download_direct_hf(repo_id, name=name or default_name, force=force)
131
+ return
132
+
133
+ info = get_model_info(model_name)
134
+ if not info:
135
+ _suggest_model(model_name)
136
+ raise SystemExit(1)
137
+
138
+ from bithub.downloader import download_model
139
+ download_model(model_name, force=force)
140
+
141
+
142
+ @cli.command()
143
+ @click.option("--force", is_flag=True, help="Re-clone and rebuild from scratch")
144
+ def setup(force):
145
+ """Clone and build bitnet.cpp (the inference engine).
146
+
147
+ Only needs to be done once. Downloads Microsoft's bitnet.cpp and
148
+ compiles it for your system.
149
+
150
+ \b
151
+ Requirements: git, cmake, clang
152
+ macOS: brew install cmake llvm git
153
+ Ubuntu: sudo apt install cmake clang git
154
+ """
155
+ from bithub.builder import setup_bitnet_cpp
156
+ success = setup_bitnet_cpp(force=force)
157
+ if success:
158
+ console.print("\n[bold green]You're all set![/bold green] Next steps:")
159
+ console.print(" 1. [bold]bithub pull 2B-4T[/bold] — download a model")
160
+ console.print(" 2. [bold]bithub serve 2B-4T[/bold] — start the API server")
161
+ console.print(" 3. [bold]bithub run 2B-4T[/bold] — chat in terminal")
162
+ else:
163
+ raise SystemExit(1)
164
+
165
+
166
+ # ──────────────────────────────────────────────────────────────
167
+ # Serve and run
168
+ # ──────────────────────────────────────────────────────────────
169
+
170
+
171
+ @cli.command()
172
+ @click.argument("model_names", nargs=-1, required=True)
173
+ @click.option("--host", default="127.0.0.1", help="Host to bind to")
174
+ @click.option("--port", default=8080, help="Port to listen on")
175
+ @click.option("--threads", "-t", default=_DEFAULT_THREADS, show_default=True,
176
+ help="Number of CPU threads per model")
177
+ @click.option("--context-size", "-c", default=2048, show_default=True,
178
+ help="Context window size")
179
+ @click.option("--lazy", is_flag=True, help="Only load models on first request")
180
+ def serve(model_names, host, port, threads, context_size, lazy):
181
+ """Start an OpenAI-compatible API server.
182
+
183
+ Accepts one or more model names. Requests are routed by the 'model'
184
+ field in the API request.
185
+
186
+ \b
187
+ Examples:
188
+ bithub serve 2B-4T
189
+ bithub serve 2B-4T falcon3-3B
190
+ bithub serve 2B-4T falcon3-3B --lazy
191
+ """
192
+ if not _ensure_engine_ready():
193
+ raise SystemExit(1)
194
+
195
+ for name in model_names:
196
+ if not _ensure_model_ready(name):
197
+ raise SystemExit(1)
198
+
199
+ from bithub.server import start_server
200
+ start_server(
201
+ model_names=list(model_names),
202
+ host=host, port=port,
203
+ threads=threads, context_size=context_size,
204
+ lazy=lazy,
205
+ )
206
+
207
+
208
+ @cli.command()
209
+ @click.argument("model_name")
210
+ @click.option("--threads", "-t", default=_DEFAULT_THREADS, show_default=True,
211
+ help="Number of CPU threads")
212
+ @click.option("--context-size", "-c", default=2048, show_default=True,
213
+ help="Context window size")
214
+ @click.option("--port", default=8081, hidden=True, help="API server port for REPL backend")
215
+ def run(model_name, threads, context_size, port):
216
+ """Chat with a model in your terminal.
217
+
218
+ Starts a local API server in the background and opens an interactive
219
+ chat session with markdown rendering, history, and slash commands.
220
+
221
+ \b
222
+ Examples:
223
+ bithub run 2B-4T
224
+ bithub run falcon3-3B -t 4
225
+
226
+ \b
227
+ Commands in chat:
228
+ /help Show commands
229
+ /clear Clear history
230
+ /system Set system prompt
231
+ /export Save conversation
232
+ /quit Exit
233
+ """
234
+ if not _ensure_engine_ready():
235
+ raise SystemExit(1)
236
+ if not _ensure_model_ready(model_name):
237
+ raise SystemExit(1)
238
+
239
+ from bithub.server import start_background_server, wait_for_server
240
+
241
+ api_url = f"http://127.0.0.1:{port}"
242
+
243
+ console.print("[dim]Starting local API server...[/dim]")
244
+ start_background_server(
245
+ model_name, host="127.0.0.1", port=port,
246
+ threads=threads, context_size=context_size,
247
+ )
248
+
249
+ if not wait_for_server(api_url):
250
+ console.print("[red]Server failed to start. Run bithub status for diagnostics.[/red]")
251
+ raise SystemExit(1)
252
+
253
+ console.print("[dim]Server ready.[/dim]\n")
254
+
255
+ from bithub.repl import start_repl
256
+ start_repl(model=model_name, api_url=api_url)
257
+
258
+
259
+ # ──────────────────────────────────────────────────────────────
260
+ # Model listing and status
261
+ # ──────────────────────────────────────────────────────────────
262
+
263
+
264
+ @cli.command("list")
265
+ def list_models():
266
+ """Show downloaded models."""
267
+ from bithub.downloader import get_downloaded_models
268
+
269
+ downloaded = get_downloaded_models()
270
+ if not downloaded:
271
+ console.print("[yellow]No models downloaded yet.[/yellow]\n")
272
+ console.print(" Get started:")
273
+ console.print(" [bold]bithub pull 2B-4T[/bold] — Microsoft's flagship (recommended)")
274
+ console.print(" [bold]bithub pull 700M[/bold] — smallest, great for testing")
275
+ console.print(" [bold]bithub models[/bold] — see all available models")
276
+ return
277
+
278
+ table = Table(title="Downloaded Models")
279
+ table.add_column("Name", style="bold cyan")
280
+ table.add_column("Size", justify="right")
281
+ table.add_column("Path", style="dim")
282
+
283
+ for m in downloaded:
284
+ table.add_row(m["name"], f"{m['size_mb']} MB", m["path"])
285
+
286
+ console.print(table)
287
+
288
+
289
+ @cli.command()
290
+ def models():
291
+ """Show all available BitNet models in the registry."""
292
+ from bithub.downloader import is_model_downloaded
293
+
294
+ table = Table(title="Available BitNet Models")
295
+ table.add_column("Name", style="bold cyan")
296
+ table.add_column("Parameters", justify="right")
297
+ table.add_column("Size", justify="right")
298
+ table.add_column("Status", justify="center")
299
+ table.add_column("Description")
300
+
301
+ for short_name, info in list_available_models().items():
302
+ installed = is_model_downloaded(short_name)
303
+ status = "[green]installed[/green]" if installed else "[dim]—[/dim]"
304
+ table.add_row(
305
+ short_name,
306
+ info["parameters"],
307
+ f"~{info['size_mb']}MB",
308
+ status,
309
+ info["description"],
310
+ )
311
+
312
+ console.print(table)
313
+ console.print("\n [dim]Tip: bithub pull <name> to download a model[/dim]")
314
+
315
+
316
+ @cli.command()
317
+ @click.argument("model_name")
318
+ @click.option("--yes", "-y", is_flag=True, help="Skip confirmation")
319
+ def rm(model_name, yes):
320
+ """Remove a downloaded model.
321
+
322
+ \b
323
+ Examples:
324
+ bithub rm 2B-4T
325
+ bithub rm falcon3-3B -y # skip confirmation
326
+ """
327
+ from bithub.downloader import is_model_downloaded, remove_model, get_model_gguf_path
328
+
329
+ if not is_model_downloaded(model_name):
330
+ console.print(f"[yellow]Model {model_name} is not downloaded.[/yellow]")
331
+ return
332
+
333
+ # Show what will be removed
334
+ gguf_path = get_model_gguf_path(model_name)
335
+ if gguf_path:
336
+ size_mb = gguf_path.stat().st_size / (1024 * 1024)
337
+ console.print(f" Model: [bold]{model_name}[/bold]")
338
+ console.print(f" File: {gguf_path.name}")
339
+ console.print(f" Size: {size_mb:.0f} MB")
340
+
341
+ if not yes and not click.confirm("\n Remove this model?", default=False):
342
+ console.print(" [dim]Cancelled.[/dim]")
343
+ return
344
+
345
+ if remove_model(model_name):
346
+ console.print(f"[green]Removed {model_name}.[/green]")
347
+ else:
348
+ console.print(f"[red]Failed to remove {model_name}.[/red]")
349
+
350
+
351
+ @cli.command()
352
+ def status():
353
+ """Show the current state of bithub."""
354
+ from bithub.builder import is_bitnet_cpp_built, get_inference_binary, get_server_binary
355
+ from bithub.downloader import get_downloaded_models
356
+ from bithub.config import BITHUB_HOME, MODELS_DIR, BITNET_CPP_DIR, get_system_info
357
+
358
+ sys_info = get_system_info()
359
+
360
+ # Header
361
+ console.print(Panel(
362
+ f"[bold]bithub[/bold] v{__version__}\n"
363
+ f"[dim]{sys_info['os']} {sys_info['arch']} / "
364
+ f"Python {sys_info['python']} / "
365
+ f"{sys_info['cpu_cores']} CPU cores[/dim]",
366
+ border_style="blue",
367
+ ))
368
+
369
+ # Paths
370
+ console.print(f" Home: [dim]{BITHUB_HOME}[/dim]")
371
+ console.print(f" Models: [dim]{MODELS_DIR}[/dim]")
372
+ console.print(f" Engine: [dim]{BITNET_CPP_DIR}[/dim]")
373
+
374
+ # Engine status
375
+ if is_bitnet_cpp_built():
376
+ cli_bin = get_inference_binary()
377
+ srv_bin = get_server_binary()
378
+ console.print(f"\n Engine: [green]Built[/green]")
379
+ if cli_bin:
380
+ console.print(f" CLI: [dim]{cli_bin}[/dim]")
381
+ if srv_bin:
382
+ console.print(f" Server: [dim]{srv_bin}[/dim]")
383
+ else:
384
+ console.print(f"\n Engine: [yellow]Not built[/yellow]")
385
+ console.print(" Run [bold]bithub setup[/bold] to get started.")
386
+
387
+ # Downloaded models
388
+ downloaded = get_downloaded_models()
389
+ total_size = sum(m["size_mb"] for m in downloaded)
390
+ console.print(f"\n Models: {len(downloaded)} downloaded ({total_size} MB total)")
391
+ for m in downloaded:
392
+ console.print(f" [cyan]{m['name']}[/cyan] ({m['size_mb']} MB)")
393
+
394
+ if not downloaded:
395
+ console.print(" [dim]Run bithub pull <name> to download a model[/dim]")
396
+
397
+ console.print()
398
+
399
+
400
+ if __name__ == "__main__":
401
+ cli()
bithub/config.py ADDED
@@ -0,0 +1,102 @@
1
+ """Paths and configuration for bithub."""
2
+
3
+ import os
4
+ import platform
5
+ from pathlib import Path
6
+
7
+ # Default home directory: ~/.bithub
8
+ BITHUB_HOME = Path(os.environ.get("BITHUB_HOME", Path.home() / ".bithub"))
9
+ MODELS_DIR = BITHUB_HOME / "models"
10
+ BITNET_CPP_DIR = BITHUB_HOME / "bitnet.cpp"
11
+ DB_PATH = BITHUB_HOME / "models.json"
12
+ LOG_PATH = BITHUB_HOME / "bithub.log"
13
+
14
+ # Pre-built binaries installed by Docker/Homebrew/install script
15
+ PREBUILT_DIR = Path(os.environ.get("BITHUB_PREBUILT_DIR", BITHUB_HOME / "prebuilt"))
16
+
17
+ # Server defaults
18
+ DEFAULT_HOST = "127.0.0.1"
19
+ DEFAULT_PORT = 8080
20
+
21
+
22
+ def get_default_threads() -> int:
23
+ """Auto-detect a sensible default thread count based on CPU cores.
24
+
25
+ Uses half the available cores (leaving room for the OS and other work),
26
+ with a minimum of 2 and a max of 8 for safety.
27
+ """
28
+ try:
29
+ cores = os.cpu_count() or 4
30
+ threads = max(2, min(cores // 2, 8))
31
+ return threads
32
+ except Exception:
33
+ return 2
34
+
35
+
36
+ def get_system_info() -> dict:
37
+ """Gather system info for diagnostics."""
38
+ return {
39
+ "os": platform.system(),
40
+ "arch": platform.machine(),
41
+ "python": platform.python_version(),
42
+ "cpu_cores": os.cpu_count() or "unknown",
43
+ "home": str(BITHUB_HOME),
44
+ }
45
+
46
+
47
+ def ensure_dirs():
48
+ """Create required directories if they don't exist."""
49
+ BITHUB_HOME.mkdir(parents=True, exist_ok=True)
50
+ MODELS_DIR.mkdir(parents=True, exist_ok=True)
51
+
52
+
53
+ import sys
54
+ import copy
55
+ import logging
56
+
57
+ if sys.version_info >= (3, 11):
58
+ import tomllib
59
+ else:
60
+ try:
61
+ import tomli as tomllib
62
+ except ImportError:
63
+ tomllib = None # type: ignore[assignment]
64
+
65
+ _DEFAULT_CONFIG: dict = {
66
+ "server": {
67
+ "port": DEFAULT_PORT,
68
+ "host": DEFAULT_HOST,
69
+ "threads": get_default_threads(),
70
+ },
71
+ "models": {
72
+ "default": None,
73
+ "directory": str(MODELS_DIR),
74
+ },
75
+ "download": {
76
+ "check_disk_space": True,
77
+ "min_free_gb": 5,
78
+ },
79
+ }
80
+
81
+
82
+ def load_config() -> dict:
83
+ """Load config from ~/.bithub/config.toml, merged over defaults.
84
+
85
+ Returns defaults if file missing, unreadable, or tomli not installed.
86
+ """
87
+ config = copy.deepcopy(_DEFAULT_CONFIG)
88
+ config_path = BITHUB_HOME / "config.toml"
89
+ if not config_path.exists():
90
+ return config
91
+ if tomllib is None:
92
+ logging.warning("Install 'tomli' for config file support on Python <3.11")
93
+ return config
94
+ try:
95
+ with open(config_path, "rb") as f:
96
+ user_config = tomllib.load(f)
97
+ for section, values in user_config.items():
98
+ if section in config and isinstance(values, dict):
99
+ config[section].update(values)
100
+ except Exception:
101
+ logging.warning("Failed to parse %s, using defaults", config_path)
102
+ return config
@@ -0,0 +1,50 @@
1
+ """Dashboard-specific API endpoints for bithub web UI."""
2
+
3
+ from typing import Optional
4
+
5
+ from fastapi import APIRouter, HTTPException
6
+
7
+ from bithub.config import load_config
8
+ from bithub.downloader import get_downloaded_models, remove_model
9
+ from bithub.model_manager import ModelManager
10
+ from bithub.registry import list_available_models
11
+
12
+ router = APIRouter(prefix="/api", tags=["dashboard"])
13
+
14
+ _manager: Optional[ModelManager] = None
15
+
16
+
17
+ def init_dashboard(manager: ModelManager) -> APIRouter:
18
+ global _manager
19
+ _manager = manager
20
+ return router
21
+
22
+
23
+ @router.get("/stats")
24
+ async def get_stats():
25
+ if _manager is None:
26
+ return {"error": "Not initialized"}
27
+ return _manager.get_stats()
28
+
29
+
30
+ @router.get("/config")
31
+ async def get_config():
32
+ return load_config()
33
+
34
+
35
+ @router.get("/models/downloaded")
36
+ async def list_downloaded():
37
+ return get_downloaded_models()
38
+
39
+
40
+ @router.get("/models/registry")
41
+ async def list_registry():
42
+ return list_available_models()
43
+
44
+
45
+ @router.delete("/models/{model_name}")
46
+ async def delete_model(model_name: str):
47
+ success = remove_model(model_name)
48
+ if not success:
49
+ raise HTTPException(status_code=404, detail=f"Model {model_name} not found")
50
+ return {"removed": True, "model": model_name}