hypercli-cli 1.0.3__tar.gz → 1.0.5__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (24) hide show
  1. {hypercli_cli-1.0.3 → hypercli_cli-1.0.5}/PKG-INFO +4 -1
  2. {hypercli_cli-1.0.3 → hypercli_cli-1.0.5}/hypercli_cli/claw.py +29 -6
  3. hypercli_cli-1.0.5/hypercli_cli/embed.py +142 -0
  4. hypercli_cli-1.0.5/hypercli_cli/stt.py +105 -0
  5. {hypercli_cli-1.0.3 → hypercli_cli-1.0.5}/pyproject.toml +5 -1
  6. {hypercli_cli-1.0.3 → hypercli_cli-1.0.5}/.gitignore +0 -0
  7. {hypercli_cli-1.0.3 → hypercli_cli-1.0.5}/README.md +0 -0
  8. {hypercli_cli-1.0.3 → hypercli_cli-1.0.5}/hypercli_cli/__init__.py +0 -0
  9. {hypercli_cli-1.0.3 → hypercli_cli-1.0.5}/hypercli_cli/agents.py +0 -0
  10. {hypercli_cli-1.0.3 → hypercli_cli-1.0.5}/hypercli_cli/billing.py +0 -0
  11. {hypercli_cli-1.0.3 → hypercli_cli-1.0.5}/hypercli_cli/cli.py +0 -0
  12. {hypercli_cli-1.0.3 → hypercli_cli-1.0.5}/hypercli_cli/comfyui.py +0 -0
  13. {hypercli_cli-1.0.3 → hypercli_cli-1.0.5}/hypercli_cli/flow.py +0 -0
  14. {hypercli_cli-1.0.3 → hypercli_cli-1.0.5}/hypercli_cli/instances.py +0 -0
  15. {hypercli_cli-1.0.3 → hypercli_cli-1.0.5}/hypercli_cli/jobs.py +0 -0
  16. {hypercli_cli-1.0.3 → hypercli_cli-1.0.5}/hypercli_cli/keys.py +0 -0
  17. {hypercli_cli-1.0.3 → hypercli_cli-1.0.5}/hypercli_cli/onboard.py +0 -0
  18. {hypercli_cli-1.0.3 → hypercli_cli-1.0.5}/hypercli_cli/output.py +0 -0
  19. {hypercli_cli-1.0.3 → hypercli_cli-1.0.5}/hypercli_cli/renders.py +0 -0
  20. {hypercli_cli-1.0.3 → hypercli_cli-1.0.5}/hypercli_cli/tui/__init__.py +0 -0
  21. {hypercli_cli-1.0.3 → hypercli_cli-1.0.5}/hypercli_cli/tui/job_monitor.py +0 -0
  22. {hypercli_cli-1.0.3 → hypercli_cli-1.0.5}/hypercli_cli/user.py +0 -0
  23. {hypercli_cli-1.0.3 → hypercli_cli-1.0.5}/hypercli_cli/voice.py +0 -0
  24. {hypercli_cli-1.0.3 → hypercli_cli-1.0.5}/hypercli_cli/wallet.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: hypercli-cli
3
- Version: 1.0.3
3
+ Version: 1.0.5
4
4
  Summary: CLI for HyperCLI - GPU orchestration and LLM API
5
5
  Project-URL: Homepage, https://hypercli.com
6
6
  Project-URL: Documentation, https://docs.hypercli.com
@@ -18,6 +18,7 @@ Requires-Dist: websocket-client>=1.6.0
18
18
  Provides-Extra: all
19
19
  Requires-Dist: argon2-cffi>=25.0.0; extra == 'all'
20
20
  Requires-Dist: eth-account>=0.13.0; extra == 'all'
21
+ Requires-Dist: faster-whisper>=1.1.0; extra == 'all'
21
22
  Requires-Dist: hypercli-sdk[comfyui]>=1.0.0; extra == 'all'
22
23
  Requires-Dist: web3>=7.0.0; extra == 'all'
23
24
  Requires-Dist: x402[evm,httpx]>=2.0.0; extra == 'all'
@@ -26,6 +27,8 @@ Requires-Dist: hypercli-sdk[comfyui]>=1.0.0; extra == 'comfyui'
26
27
  Provides-Extra: dev
27
28
  Requires-Dist: pytest>=8.0.0; extra == 'dev'
28
29
  Requires-Dist: ruff>=0.3.0; extra == 'dev'
30
+ Provides-Extra: stt
31
+ Requires-Dist: faster-whisper>=1.1.0; extra == 'stt'
29
32
  Provides-Extra: wallet
30
33
  Requires-Dist: argon2-cffi>=25.0.0; extra == 'wallet'
31
34
  Requires-Dist: eth-account>=0.13.0; extra == 'wallet'
@@ -9,6 +9,8 @@ from rich.table import Table
9
9
 
10
10
  from .onboard import onboard as _onboard_fn
11
11
  from .voice import app as voice_app
12
+ from .stt import app as stt_app
13
+ from .embed import app as embed_app
12
14
 
13
15
  app = typer.Typer(help="HyperClaw inference commands")
14
16
  console = Console()
@@ -16,6 +18,8 @@ console = Console()
16
18
  # Register subcommands
17
19
  app.command("onboard")(_onboard_fn)
18
20
  app.add_typer(voice_app, name="voice")
21
+ app.add_typer(stt_app, name="stt")
22
+ app.add_typer(embed_app, name="embed")
19
23
 
20
24
  # Check if wallet dependencies are available
21
25
  try:
@@ -582,7 +586,7 @@ def openclaw_setup(
582
586
  # Fetch current model list from LiteLLM via API
583
587
  models = fetch_models(api_key)
584
588
 
585
- # Patch only models.providers.hyperclaw
589
+ # Patch models.providers.hyperclaw + embedding config
586
590
  config.setdefault("models", {}).setdefault("providers", {})
587
591
  config["models"]["providers"]["hyperclaw"] = {
588
592
  "baseUrl": "https://api.hyperclaw.app/v1",
@@ -591,9 +595,20 @@ def openclaw_setup(
591
595
  "models": models,
592
596
  }
593
597
 
598
+ # Always set embedding provider (reuses same API key)
599
+ config.setdefault("agents", {}).setdefault("defaults", {})
600
+ config["agents"]["defaults"]["memorySearch"] = {
601
+ "provider": "openai",
602
+ "model": "qwen3-embedding-4b",
603
+ "remote": {
604
+ "baseUrl": "https://api.hyperclaw.app/v1/",
605
+ "apiKey": api_key,
606
+ }
607
+ }
608
+
594
609
  # Optionally set default model
595
610
  if default:
596
- config.setdefault("agents", {}).setdefault("defaults", {}).setdefault("model", {})
611
+ config["agents"]["defaults"].setdefault("model", {})
597
612
  config["agents"]["defaults"]["model"]["primary"] = f"hyperclaw/{models[0]['id']}"
598
613
 
599
614
  # Write back
@@ -630,14 +645,14 @@ def _resolve_api_key(key: str | None) -> str:
630
645
  raise typer.Exit(1)
631
646
 
632
647
 
633
- def _config_openclaw(api_key: str, models: list[dict]) -> dict:
634
- """OpenClaw openclaw.json provider snippet."""
648
+ def _config_openclaw(api_key: str, models: list[dict], api_base: str = PROD_API_BASE) -> dict:
649
+ """OpenClaw openclaw.json provider snippet (LLM + embeddings)."""
635
650
  return {
636
651
  "models": {
637
652
  "mode": "merge",
638
653
  "providers": {
639
654
  "hyperclaw": {
640
- "baseUrl": "https://api.hyperclaw.app/v1",
655
+ "baseUrl": f"{api_base}/v1",
641
656
  "apiKey": api_key,
642
657
  "api": "openai-completions",
643
658
  "models": models,
@@ -648,6 +663,14 @@ def _config_openclaw(api_key: str, models: list[dict]) -> dict:
648
663
  "defaults": {
649
664
  "models": {
650
665
  **{f"hyperclaw/{m['id']}": {"alias": m['id'].split('-')[0]} for m in models}
666
+ },
667
+ "memorySearch": {
668
+ "provider": "openai",
669
+ "model": "qwen3-embedding-4b",
670
+ "remote": {
671
+ "baseUrl": f"{api_base}/v1/",
672
+ "apiKey": api_key,
673
+ }
651
674
  }
652
675
  }
653
676
  }
@@ -725,7 +748,7 @@ def config_cmd(
725
748
 
726
749
  for fmt in formats:
727
750
  if fmt == "openclaw":
728
- snippet = _config_openclaw(api_key, models)
751
+ snippet = _config_openclaw(api_key, models, api_base)
729
752
  _show_snippet("OpenClaw", "~/.openclaw/openclaw.json", snippet, apply, OPENCLAW_CONFIG_PATH)
730
753
  elif fmt == "opencode":
731
754
  snippet = _config_opencode(api_key, models)
@@ -0,0 +1,142 @@
1
+ """HyperClaw Embed — text embeddings via HyperClaw API."""
2
+ import json
3
+ from pathlib import Path
4
+
5
+ import httpx
6
+ import typer
7
+ from rich.console import Console
8
+
9
+ app = typer.Typer(help="Text embeddings via HyperClaw API (qwen3-embedding-4b)")
10
+ console = Console()
11
+
12
+ HYPERCLI_DIR = Path.home() / ".hypercli"
13
+ CLAW_KEY_PATH = HYPERCLI_DIR / "claw-key.json"
14
+ PROD_API_BASE = "https://api.hyperclaw.app"
15
+ DEV_API_BASE = "https://api.dev.hyperclaw.app"
16
+
17
+
18
+ def _get_api_key(key: str | None) -> str:
19
+ if key:
20
+ return key
21
+ if CLAW_KEY_PATH.exists():
22
+ with open(CLAW_KEY_PATH) as f:
23
+ k = json.load(f).get("key", "")
24
+ if k:
25
+ return k
26
+ console.print("[red]❌ No API key found.[/red]")
27
+ console.print("Pass [bold]--key sk-...[/bold] or run [bold]hyper claw subscribe[/bold]")
28
+ raise typer.Exit(1)
29
+
30
+
31
+ @app.command("text")
32
+ def embed_text(
33
+ text: str = typer.Argument(..., help="Text to embed"),
34
+ model: str = typer.Option("qwen3-embedding-4b", "--model", "-m", help="Embedding model"),
35
+ key: str = typer.Option(None, "--key", "-k", help="API key (sk-...)"),
36
+ dev: bool = typer.Option(False, "--dev", help="Use dev API"),
37
+ json_output: bool = typer.Option(False, "--json", help="Output full JSON response"),
38
+ output: Path = typer.Option(None, "--output", "-o", help="Write embeddings to file"),
39
+ ):
40
+ """Generate embeddings for text.
41
+
42
+ Examples:
43
+ hyper claw embed text "Hello world"
44
+ hyper claw embed text "Test" --json
45
+ hyper claw embed text "Document chunk" -o embedding.json
46
+ """
47
+ api_key = _get_api_key(key)
48
+ api_base = DEV_API_BASE if dev else PROD_API_BASE
49
+ url = f"{api_base}/v1/embeddings"
50
+
51
+ try:
52
+ resp = httpx.post(
53
+ url,
54
+ json={"model": model, "input": text},
55
+ headers={"Authorization": f"Bearer {api_key}"},
56
+ timeout=30.0,
57
+ )
58
+ resp.raise_for_status()
59
+ data = resp.json()
60
+ except httpx.HTTPError as e:
61
+ console.print(f"[red]❌ Embedding request failed: {e}[/red]")
62
+ raise typer.Exit(1)
63
+
64
+ embedding = data["data"][0]["embedding"]
65
+ dims = len(embedding)
66
+ usage = data.get("usage", {})
67
+ tokens = usage.get("total_tokens", 0)
68
+
69
+ if json_output:
70
+ result = json.dumps(data, indent=2)
71
+ if output:
72
+ output.write_text(result)
73
+ console.print(f"[green]✅ Written to {output} ({dims} dimensions, {tokens} tokens)[/green]")
74
+ else:
75
+ print(result)
76
+ else:
77
+ console.print(f"[green]✅ Embedded ({dims} dimensions, {tokens} tokens)[/green]")
78
+ console.print(f"[dim]Model: {data.get('model', model)}[/dim]")
79
+ console.print(f"[dim]First 5: {embedding[:5]}[/dim]")
80
+ if output:
81
+ output.write_text(json.dumps(embedding))
82
+ console.print(f"[green]Saved to {output}[/green]")
83
+
84
+
85
+ @app.command("test")
86
+ def embed_test(
87
+ key: str = typer.Option(None, "--key", "-k", help="API key (sk-...)"),
88
+ dev: bool = typer.Option(False, "--dev", help="Use dev API"),
89
+ ):
90
+ """Quick test to verify embedding endpoint works.
91
+
92
+ Examples:
93
+ hyper claw embed test
94
+ hyper claw embed test --dev
95
+ """
96
+ api_key = _get_api_key(key)
97
+ api_base = DEV_API_BASE if dev else PROD_API_BASE
98
+ url = f"{api_base}/v1/embeddings"
99
+
100
+ test_texts = [
101
+ "The quick brown fox jumps over the lazy dog.",
102
+ "GPU orchestration and cloud computing infrastructure.",
103
+ ]
104
+
105
+ console.print(f"[bold]Testing embedding endpoint: {url}[/bold]\n")
106
+
107
+ try:
108
+ resp = httpx.post(
109
+ url,
110
+ json={"model": "qwen3-embedding-4b", "input": test_texts},
111
+ headers={"Authorization": f"Bearer {api_key}"},
112
+ timeout=30.0,
113
+ )
114
+ resp.raise_for_status()
115
+ data = resp.json()
116
+ except httpx.HTTPError as e:
117
+ console.print(f"[red]❌ FAIL: {e}[/red]")
118
+ raise typer.Exit(1)
119
+
120
+ embeddings = data.get("data", [])
121
+ if len(embeddings) != 2:
122
+ console.print(f"[red]❌ Expected 2 embeddings, got {len(embeddings)}[/red]")
123
+ raise typer.Exit(1)
124
+
125
+ dims = len(embeddings[0]["embedding"])
126
+ usage = data.get("usage", {})
127
+
128
+ # Compute cosine similarity
129
+ import math
130
+ v1 = embeddings[0]["embedding"]
131
+ v2 = embeddings[1]["embedding"]
132
+ dot = sum(a * b for a, b in zip(v1, v2))
133
+ mag1 = math.sqrt(sum(a * a for a in v1))
134
+ mag2 = math.sqrt(sum(b * b for b in v2))
135
+ cosine_sim = dot / (mag1 * mag2) if mag1 and mag2 else 0
136
+
137
+ console.print(f"[green]✅ PASS[/green]")
138
+ console.print(f" Model: {data.get('model', 'qwen3-embedding-4b')}")
139
+ console.print(f" Dimensions: {dims}")
140
+ console.print(f" Tokens: {usage.get('total_tokens', '?')}")
141
+ console.print(f" Cosine similarity: {cosine_sim:.4f}")
142
+ console.print(f" (Two unrelated sentences should be < 0.9)")
@@ -0,0 +1,105 @@
1
+ """HyperClaw STT — local speech-to-text via faster-whisper."""
2
+ import sys
3
+ from pathlib import Path
4
+
5
+ import typer
6
+ from rich.console import Console
7
+
8
+ app = typer.Typer(help="Speech-to-text via faster-whisper (local, no API key)")
9
+ console = Console()
10
+
11
+ # Lazy import to avoid crashing when faster-whisper isn't installed
12
+ _model_cache = {}
13
+
14
+
15
+ def _get_model(model_size: str, device: str, compute_type: str):
16
+ """Get or create a cached whisper model."""
17
+ key = (model_size, device, compute_type)
18
+ if key not in _model_cache:
19
+ try:
20
+ from faster_whisper import WhisperModel
21
+ except ImportError:
22
+ console.print("[red]❌ faster-whisper not installed.[/red]")
23
+ console.print("Install with: [bold]pip install 'hypercli-cli[stt]'[/bold]")
24
+ console.print("Or: [bold]pip install 'hypercli-cli[all]'[/bold]")
25
+ raise typer.Exit(1)
26
+ _model_cache[key] = WhisperModel(model_size, device=device, compute_type=compute_type)
27
+ return _model_cache[key]
28
+
29
+
30
+ @app.command("transcribe")
31
+ def transcribe(
32
+ audio_file: Path = typer.Argument(..., help="Audio file to transcribe (wav, mp3, ogg, m4a, etc.)"),
33
+ model: str = typer.Option("turbo", "--model", "-m", help="Whisper model: tiny, base, small, medium, large-v3, turbo"),
34
+ language: str = typer.Option(None, "--language", "-l", help="Language code (e.g. en, de, fr). Auto-detect if omitted."),
35
+ device: str = typer.Option("auto", "--device", "-d", help="Device: auto, cpu, cuda"),
36
+ compute_type: str = typer.Option("auto", "--compute", help="Compute type: auto, int8, float16, float32"),
37
+ json_output: bool = typer.Option(False, "--json", help="Output as JSON with timestamps"),
38
+ output: Path = typer.Option(None, "--output", "-o", help="Write transcript to file"),
39
+ ):
40
+ """Transcribe audio to text using faster-whisper (runs locally).
41
+
42
+ Examples:
43
+ hyper claw stt transcribe voice.ogg
44
+ hyper claw stt transcribe meeting.mp3 --model large-v3 --language en
45
+ hyper claw stt transcribe audio.wav --json -o transcript.json
46
+ """
47
+ if not audio_file.exists():
48
+ console.print(f"[red]❌ File not found: {audio_file}[/red]")
49
+ raise typer.Exit(1)
50
+
51
+ # Auto-select compute type based on device
52
+ if compute_type == "auto":
53
+ compute_type = "int8" if device == "cpu" else "float16"
54
+ if device == "auto":
55
+ try:
56
+ import torch
57
+ device = "cuda" if torch.cuda.is_available() else "cpu"
58
+ except ImportError:
59
+ device = "cpu"
60
+ if device == "cpu" and compute_type == "float16":
61
+ compute_type = "int8"
62
+
63
+ console.print(f"[dim]Model: {model} | Device: {device} | Compute: {compute_type}[/dim]")
64
+ console.print(f"[dim]File: {audio_file} ({audio_file.stat().st_size / 1024:.1f} KB)[/dim]")
65
+
66
+ whisper_model = _get_model(model, device, compute_type)
67
+
68
+ kwargs = {}
69
+ if language:
70
+ kwargs["language"] = language
71
+
72
+ segments, info = whisper_model.transcribe(str(audio_file), **kwargs)
73
+
74
+ if not language:
75
+ console.print(f"[dim]Detected language: {info.language} (p={info.language_probability:.2f})[/dim]")
76
+
77
+ if json_output:
78
+ import json
79
+ results = []
80
+ for seg in segments:
81
+ results.append({
82
+ "start": round(seg.start, 3),
83
+ "end": round(seg.end, 3),
84
+ "text": seg.text.strip(),
85
+ })
86
+ output_text = json.dumps({
87
+ "language": info.language,
88
+ "language_probability": round(info.language_probability, 3),
89
+ "duration": round(info.duration, 3),
90
+ "segments": results,
91
+ "text": " ".join(r["text"] for r in results),
92
+ }, indent=2, ensure_ascii=False)
93
+ else:
94
+ parts = []
95
+ for seg in segments:
96
+ parts.append(seg.text.strip())
97
+ output_text = " ".join(parts)
98
+
99
+ if output:
100
+ output.parent.mkdir(parents=True, exist_ok=True)
101
+ output.write_text(output_text)
102
+ console.print(f"[green]✅ Written to {output}[/green]")
103
+ else:
104
+ # Print to stdout (useful for piping)
105
+ print(output_text)
@@ -4,7 +4,7 @@ build-backend = "hatchling.build"
4
4
 
5
5
  [project]
6
6
  name = "hypercli-cli"
7
- version = "1.0.3"
7
+ version = "1.0.5"
8
8
  description = "CLI for HyperCLI - GPU orchestration and LLM API"
9
9
  readme = "README.md"
10
10
  requires-python = ">=3.10"
@@ -33,12 +33,16 @@ wallet = [
33
33
  "argon2-cffi>=25.0.0",
34
34
  "qrcode[pil]>=7.4.0",
35
35
  ]
36
+ stt = [
37
+ "faster-whisper>=1.1.0",
38
+ ]
36
39
  all = [
37
40
  "hypercli-sdk[comfyui]>=1.0.0",
38
41
  "x402[httpx,evm]>=2.0.0",
39
42
  "eth-account>=0.13.0",
40
43
  "web3>=7.0.0",
41
44
  "argon2-cffi>=25.0.0",
45
+ "faster-whisper>=1.1.0",
42
46
  ]
43
47
  dev = [
44
48
  "pytest>=8.0.0",
File without changes
File without changes