hypercli-cli 0.4.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hypercli_cli-0.4.0/.gitignore +51 -0
- hypercli_cli-0.4.0/PKG-INFO +124 -0
- hypercli_cli-0.4.0/README.md +105 -0
- hypercli_cli-0.4.0/c3cli/__init__.py +1 -0
- hypercli_cli-0.4.0/c3cli/billing.py +60 -0
- hypercli_cli-0.4.0/c3cli/cli.py +183 -0
- hypercli_cli-0.4.0/c3cli/comfyui.py +823 -0
- hypercli_cli-0.4.0/c3cli/instances.py +193 -0
- hypercli_cli-0.4.0/c3cli/jobs.py +239 -0
- hypercli_cli-0.4.0/c3cli/llm.py +263 -0
- hypercli_cli-0.4.0/c3cli/output.py +78 -0
- hypercli_cli-0.4.0/c3cli/renders.py +192 -0
- hypercli_cli-0.4.0/c3cli/tui/__init__.py +4 -0
- hypercli_cli-0.4.0/c3cli/tui/job_monitor.py +335 -0
- hypercli_cli-0.4.0/c3cli/user.py +19 -0
- hypercli_cli-0.4.0/pyproject.toml +40 -0
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
# Dependencies
|
|
2
|
+
node_modules/
|
|
3
|
+
.pnp
|
|
4
|
+
.pnp.js
|
|
5
|
+
|
|
6
|
+
# Python
|
|
7
|
+
__pycache__/
|
|
8
|
+
*.py[cod]
|
|
9
|
+
*$py.class
|
|
10
|
+
*.so
|
|
11
|
+
.Python
|
|
12
|
+
venv/
|
|
13
|
+
env/
|
|
14
|
+
ENV/
|
|
15
|
+
.venv/
|
|
16
|
+
*.egg-info/
|
|
17
|
+
|
|
18
|
+
# Build outputs
|
|
19
|
+
.next
|
|
20
|
+
out
|
|
21
|
+
dist
|
|
22
|
+
build
|
|
23
|
+
|
|
24
|
+
# Testing
|
|
25
|
+
coverage
|
|
26
|
+
|
|
27
|
+
# Misc
|
|
28
|
+
.DS_Store
|
|
29
|
+
*.pem
|
|
30
|
+
|
|
31
|
+
# Debug
|
|
32
|
+
npm-debug.log*
|
|
33
|
+
yarn-debug.log*
|
|
34
|
+
yarn-error.log*
|
|
35
|
+
|
|
36
|
+
# Local env files
|
|
37
|
+
.env
|
|
38
|
+
.env.local
|
|
39
|
+
.env.development.local
|
|
40
|
+
.env.test.local
|
|
41
|
+
.env.production.local
|
|
42
|
+
|
|
43
|
+
# Vercel
|
|
44
|
+
.vercel
|
|
45
|
+
|
|
46
|
+
# TypeScript
|
|
47
|
+
*.tsbuildinfo
|
|
48
|
+
next-env.d.ts
|
|
49
|
+
|
|
50
|
+
# Turbo
|
|
51
|
+
.turbo
|
|
@@ -0,0 +1,124 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: hypercli-cli
|
|
3
|
+
Version: 0.4.0
|
|
4
|
+
Summary: CLI for HyperCLI - GPU orchestration and LLM API
|
|
5
|
+
Project-URL: Homepage, https://hypercli.com
|
|
6
|
+
Project-URL: Documentation, https://docs.hypercli.com
|
|
7
|
+
Author-email: HyperCLI <support@hypercli.com>
|
|
8
|
+
License-Expression: MIT
|
|
9
|
+
Requires-Python: >=3.10
|
|
10
|
+
Requires-Dist: hypercli-sdk[comfyui]>=0.4.0
|
|
11
|
+
Requires-Dist: openai>=2.8.1
|
|
12
|
+
Requires-Dist: rich>=14.2.0
|
|
13
|
+
Requires-Dist: typer>=0.20.0
|
|
14
|
+
Requires-Dist: websocket-client>=1.6.0
|
|
15
|
+
Provides-Extra: dev
|
|
16
|
+
Requires-Dist: pytest>=8.0.0; extra == 'dev'
|
|
17
|
+
Requires-Dist: ruff>=0.3.0; extra == 'dev'
|
|
18
|
+
Description-Content-Type: text/markdown
|
|
19
|
+
|
|
20
|
+
# C3 CLI
|
|
21
|
+
|
|
22
|
+
Command-line interface for [HyperCLI](https://hypercli.com) - GPU orchestration and LLM API.
|
|
23
|
+
|
|
24
|
+
## Installation
|
|
25
|
+
|
|
26
|
+
```bash
|
|
27
|
+
pip install c3-cli
|
|
28
|
+
```
|
|
29
|
+
|
|
30
|
+
This also installs the `c3-sdk` as a dependency.
|
|
31
|
+
|
|
32
|
+
## Setup
|
|
33
|
+
|
|
34
|
+
Configure your API key:
|
|
35
|
+
|
|
36
|
+
```bash
|
|
37
|
+
c3 configure
|
|
38
|
+
```
|
|
39
|
+
|
|
40
|
+
Or set via environment:
|
|
41
|
+
|
|
42
|
+
```bash
|
|
43
|
+
export C3_API_KEY=your_key
|
|
44
|
+
```
|
|
45
|
+
|
|
46
|
+
Or create `~/.c3/config`:
|
|
47
|
+
|
|
48
|
+
```
|
|
49
|
+
C3_API_KEY=your_key
|
|
50
|
+
```
|
|
51
|
+
|
|
52
|
+
Get your API key at [hypercli.com/dashboard](https://hypercli.com/dashboard)
|
|
53
|
+
|
|
54
|
+
## Usage
|
|
55
|
+
|
|
56
|
+
### Billing
|
|
57
|
+
|
|
58
|
+
```bash
|
|
59
|
+
c3 billing balance
|
|
60
|
+
c3 billing transactions
|
|
61
|
+
c3 billing balance -o json
|
|
62
|
+
```
|
|
63
|
+
|
|
64
|
+
### LLM
|
|
65
|
+
|
|
66
|
+
```bash
|
|
67
|
+
# List models
|
|
68
|
+
c3 llm models
|
|
69
|
+
|
|
70
|
+
# Quick chat
|
|
71
|
+
c3 llm chat deepseek-v3.1 "Explain quantum computing"
|
|
72
|
+
|
|
73
|
+
# Interactive chat
|
|
74
|
+
c3 llm chat deepseek-v3.1
|
|
75
|
+
|
|
76
|
+
# With system prompt
|
|
77
|
+
c3 llm chat deepseek-v3.1 "Write a haiku" -s "You are a poet"
|
|
78
|
+
```
|
|
79
|
+
|
|
80
|
+
### Jobs
|
|
81
|
+
|
|
82
|
+
```bash
|
|
83
|
+
# List jobs
|
|
84
|
+
c3 jobs list
|
|
85
|
+
c3 jobs list -s running
|
|
86
|
+
|
|
87
|
+
# Create a job
|
|
88
|
+
c3 jobs create nvidia/cuda:12.0 -g l40s -c "python train.py"
|
|
89
|
+
|
|
90
|
+
# Create and follow logs with TUI
|
|
91
|
+
c3 jobs create nvidia/cuda:12.0 -g h100 -n 8 -c "torchrun train.py" -f
|
|
92
|
+
|
|
93
|
+
# Get job details
|
|
94
|
+
c3 jobs get <job_id>
|
|
95
|
+
|
|
96
|
+
# Stream logs
|
|
97
|
+
c3 jobs logs <job_id> -f
|
|
98
|
+
|
|
99
|
+
# Watch GPU metrics
|
|
100
|
+
c3 jobs metrics <job_id> -w
|
|
101
|
+
|
|
102
|
+
# Cancel
|
|
103
|
+
c3 jobs cancel <job_id>
|
|
104
|
+
|
|
105
|
+
# Extend runtime
|
|
106
|
+
c3 jobs extend <job_id> 7200
|
|
107
|
+
```
|
|
108
|
+
|
|
109
|
+
### User
|
|
110
|
+
|
|
111
|
+
```bash
|
|
112
|
+
c3 user
|
|
113
|
+
```
|
|
114
|
+
|
|
115
|
+
## Output Formats
|
|
116
|
+
|
|
117
|
+
```bash
|
|
118
|
+
c3 jobs list -o json
|
|
119
|
+
c3 billing balance -o table
|
|
120
|
+
```
|
|
121
|
+
|
|
122
|
+
## License
|
|
123
|
+
|
|
124
|
+
MIT
|
|
@@ -0,0 +1,105 @@
|
|
|
1
|
+
# C3 CLI
|
|
2
|
+
|
|
3
|
+
Command-line interface for [HyperCLI](https://hypercli.com) - GPU orchestration and LLM API.
|
|
4
|
+
|
|
5
|
+
## Installation
|
|
6
|
+
|
|
7
|
+
```bash
|
|
8
|
+
pip install c3-cli
|
|
9
|
+
```
|
|
10
|
+
|
|
11
|
+
This also installs the `c3-sdk` as a dependency.
|
|
12
|
+
|
|
13
|
+
## Setup
|
|
14
|
+
|
|
15
|
+
Configure your API key:
|
|
16
|
+
|
|
17
|
+
```bash
|
|
18
|
+
c3 configure
|
|
19
|
+
```
|
|
20
|
+
|
|
21
|
+
Or set via environment:
|
|
22
|
+
|
|
23
|
+
```bash
|
|
24
|
+
export C3_API_KEY=your_key
|
|
25
|
+
```
|
|
26
|
+
|
|
27
|
+
Or create `~/.c3/config`:
|
|
28
|
+
|
|
29
|
+
```
|
|
30
|
+
C3_API_KEY=your_key
|
|
31
|
+
```
|
|
32
|
+
|
|
33
|
+
Get your API key at [hypercli.com/dashboard](https://hypercli.com/dashboard)
|
|
34
|
+
|
|
35
|
+
## Usage
|
|
36
|
+
|
|
37
|
+
### Billing
|
|
38
|
+
|
|
39
|
+
```bash
|
|
40
|
+
c3 billing balance
|
|
41
|
+
c3 billing transactions
|
|
42
|
+
c3 billing balance -o json
|
|
43
|
+
```
|
|
44
|
+
|
|
45
|
+
### LLM
|
|
46
|
+
|
|
47
|
+
```bash
|
|
48
|
+
# List models
|
|
49
|
+
c3 llm models
|
|
50
|
+
|
|
51
|
+
# Quick chat
|
|
52
|
+
c3 llm chat deepseek-v3.1 "Explain quantum computing"
|
|
53
|
+
|
|
54
|
+
# Interactive chat
|
|
55
|
+
c3 llm chat deepseek-v3.1
|
|
56
|
+
|
|
57
|
+
# With system prompt
|
|
58
|
+
c3 llm chat deepseek-v3.1 "Write a haiku" -s "You are a poet"
|
|
59
|
+
```
|
|
60
|
+
|
|
61
|
+
### Jobs
|
|
62
|
+
|
|
63
|
+
```bash
|
|
64
|
+
# List jobs
|
|
65
|
+
c3 jobs list
|
|
66
|
+
c3 jobs list -s running
|
|
67
|
+
|
|
68
|
+
# Create a job
|
|
69
|
+
c3 jobs create nvidia/cuda:12.0 -g l40s -c "python train.py"
|
|
70
|
+
|
|
71
|
+
# Create and follow logs with TUI
|
|
72
|
+
c3 jobs create nvidia/cuda:12.0 -g h100 -n 8 -c "torchrun train.py" -f
|
|
73
|
+
|
|
74
|
+
# Get job details
|
|
75
|
+
c3 jobs get <job_id>
|
|
76
|
+
|
|
77
|
+
# Stream logs
|
|
78
|
+
c3 jobs logs <job_id> -f
|
|
79
|
+
|
|
80
|
+
# Watch GPU metrics
|
|
81
|
+
c3 jobs metrics <job_id> -w
|
|
82
|
+
|
|
83
|
+
# Cancel
|
|
84
|
+
c3 jobs cancel <job_id>
|
|
85
|
+
|
|
86
|
+
# Extend runtime
|
|
87
|
+
c3 jobs extend <job_id> 7200
|
|
88
|
+
```
|
|
89
|
+
|
|
90
|
+
### User
|
|
91
|
+
|
|
92
|
+
```bash
|
|
93
|
+
c3 user
|
|
94
|
+
```
|
|
95
|
+
|
|
96
|
+
## Output Formats
|
|
97
|
+
|
|
98
|
+
```bash
|
|
99
|
+
c3 jobs list -o json
|
|
100
|
+
c3 billing balance -o table
|
|
101
|
+
```
|
|
102
|
+
|
|
103
|
+
## License
|
|
104
|
+
|
|
105
|
+
MIT
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
__version__ = "0.1.0"
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
"""c3 billing commands"""
|
|
2
|
+
import typer
|
|
3
|
+
from c3 import C3
|
|
4
|
+
from .output import output, console, spinner
|
|
5
|
+
|
|
6
|
+
app = typer.Typer(help="Billing and balance commands")
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def get_client() -> C3:
|
|
10
|
+
return C3()
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
@app.command("balance")
|
|
14
|
+
def balance(
|
|
15
|
+
fmt: str = typer.Option("table", "--output", "-o", help="Output format: table|json"),
|
|
16
|
+
):
|
|
17
|
+
"""Get account balance"""
|
|
18
|
+
c3 = get_client()
|
|
19
|
+
with spinner("Fetching balance..."):
|
|
20
|
+
bal = c3.billing.balance()
|
|
21
|
+
|
|
22
|
+
if fmt == "table":
|
|
23
|
+
console.print()
|
|
24
|
+
console.print("[bold]Account Balance[/bold]")
|
|
25
|
+
console.print()
|
|
26
|
+
console.print(f" Balance: [bold green]${bal.total}[/bold green]")
|
|
27
|
+
console.print(f" Available: ${bal.available}")
|
|
28
|
+
if bal.rewards != "0.000000" and bal.rewards != "0":
|
|
29
|
+
console.print(f" [dim](Rewards: ${bal.rewards})[/dim]")
|
|
30
|
+
console.print()
|
|
31
|
+
else:
|
|
32
|
+
output(bal, fmt)
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
@app.command("transactions")
|
|
36
|
+
def transactions(
|
|
37
|
+
limit: int = typer.Option(20, "--limit", "-n", help="Number of transactions"),
|
|
38
|
+
page: int = typer.Option(1, "--page", "-p", help="Page number"),
|
|
39
|
+
fmt: str = typer.Option("table", "--output", "-o", help="Output format: table|json"),
|
|
40
|
+
):
|
|
41
|
+
"""List transactions"""
|
|
42
|
+
c3 = get_client()
|
|
43
|
+
with spinner("Fetching transactions..."):
|
|
44
|
+
txs = c3.billing.transactions(limit=limit, page=page)
|
|
45
|
+
|
|
46
|
+
if fmt == "json":
|
|
47
|
+
output(txs, fmt)
|
|
48
|
+
else:
|
|
49
|
+
output(txs, "table", ["id", "transaction_type", "amount_usd", "status", "created_at"])
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
@app.command("invoices")
|
|
53
|
+
def invoices(
|
|
54
|
+
limit: int = typer.Option(20, "--limit", "-n", help="Number of invoices"),
|
|
55
|
+
fmt: str = typer.Option("table", "--output", "-o", help="Output format: table|json"),
|
|
56
|
+
):
|
|
57
|
+
"""List invoices"""
|
|
58
|
+
c3 = get_client()
|
|
59
|
+
# TODO: Add invoices to SDK
|
|
60
|
+
console.print("[dim]Invoices endpoint not yet implemented in SDK[/dim]")
|
|
@@ -0,0 +1,183 @@
|
|
|
1
|
+
"""C3 CLI - Main entry point"""
|
|
2
|
+
import sys
|
|
3
|
+
import typer
|
|
4
|
+
from rich.console import Console
|
|
5
|
+
from rich.prompt import Prompt
|
|
6
|
+
|
|
7
|
+
from c3 import C3, APIError, configure
|
|
8
|
+
from c3.config import CONFIG_FILE
|
|
9
|
+
|
|
10
|
+
from . import billing, comfyui, instances, jobs, llm, renders, user
|
|
11
|
+
|
|
12
|
+
console = Console()
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def fuzzy_match(input_str: str, options: list[str], threshold: float = 0.5) -> list[str]:
|
|
16
|
+
"""Find similar strings using multiple heuristics"""
|
|
17
|
+
def similarity(a: str, b: str) -> float:
|
|
18
|
+
a, b = a.lower(), b.lower()
|
|
19
|
+
if a == b:
|
|
20
|
+
return 1.0
|
|
21
|
+
|
|
22
|
+
# Exact substring match
|
|
23
|
+
if a in b or b in a:
|
|
24
|
+
return 0.9
|
|
25
|
+
|
|
26
|
+
# Same characters (handles transpositions like rtx6000pro vs rtxpro6000)
|
|
27
|
+
if sorted(a) == sorted(b):
|
|
28
|
+
return 0.95
|
|
29
|
+
|
|
30
|
+
# Character set overlap
|
|
31
|
+
set_a, set_b = set(a), set(b)
|
|
32
|
+
common = set_a & set_b
|
|
33
|
+
jaccard = len(common) / len(set_a | set_b) if set_a | set_b else 0
|
|
34
|
+
|
|
35
|
+
# Prefix match bonus
|
|
36
|
+
prefix_len = 0
|
|
37
|
+
for ca, cb in zip(a, b):
|
|
38
|
+
if ca == cb:
|
|
39
|
+
prefix_len += 1
|
|
40
|
+
else:
|
|
41
|
+
break
|
|
42
|
+
prefix_bonus = prefix_len / max(len(a), len(b)) * 0.3
|
|
43
|
+
|
|
44
|
+
return jaccard + prefix_bonus
|
|
45
|
+
|
|
46
|
+
matches = [(opt, similarity(input_str, opt)) for opt in options]
|
|
47
|
+
matches = [(opt, score) for opt, score in matches if score >= threshold]
|
|
48
|
+
matches.sort(key=lambda x: x[1], reverse=True)
|
|
49
|
+
return [opt for opt, _ in matches[:3]]
|
|
50
|
+
|
|
51
|
+
app = typer.Typer(
|
|
52
|
+
name="c3",
|
|
53
|
+
help="HyperCLI CLI - GPU orchestration and LLM API",
|
|
54
|
+
no_args_is_help=True,
|
|
55
|
+
rich_markup_mode="rich",
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
# Register subcommands
|
|
59
|
+
app.add_typer(billing.app, name="billing")
|
|
60
|
+
app.add_typer(comfyui.app, name="comfyui")
|
|
61
|
+
app.add_typer(instances.app, name="instances")
|
|
62
|
+
app.add_typer(jobs.app, name="jobs")
|
|
63
|
+
app.add_typer(llm.app, name="llm")
|
|
64
|
+
app.add_typer(renders.app, name="renders")
|
|
65
|
+
app.add_typer(user.app, name="user")
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
@app.command("configure")
|
|
69
|
+
def configure_cmd():
|
|
70
|
+
"""Configure C3 CLI with your API key and API URL"""
|
|
71
|
+
import getpass
|
|
72
|
+
from c3.config import get_api_key, get_api_url, DEFAULT_API_URL
|
|
73
|
+
|
|
74
|
+
console.print("\n[bold cyan]C3 CLI Configuration[/bold cyan]\n")
|
|
75
|
+
|
|
76
|
+
# Show current config
|
|
77
|
+
current_key = get_api_key()
|
|
78
|
+
current_url = get_api_url()
|
|
79
|
+
|
|
80
|
+
if current_key:
|
|
81
|
+
key_preview = current_key[:4] + "..." + current_key[-4:] if len(current_key) > 8 else "****"
|
|
82
|
+
console.print(f"Current API key: [dim]{key_preview}[/dim]")
|
|
83
|
+
if current_url and current_url != DEFAULT_API_URL:
|
|
84
|
+
console.print(f"Current API URL: [dim]{current_url}[/dim]")
|
|
85
|
+
|
|
86
|
+
console.print()
|
|
87
|
+
console.print("Get your API key at [link=https://hypercli.com/dashboard]hypercli.com/dashboard[/link]\n")
|
|
88
|
+
|
|
89
|
+
# API Key
|
|
90
|
+
api_key = getpass.getpass("API key (enter to keep current): ") if current_key else getpass.getpass("API key: ")
|
|
91
|
+
api_key = api_key.strip() if api_key else None
|
|
92
|
+
|
|
93
|
+
if not api_key and not current_key:
|
|
94
|
+
console.print("[red]No API key provided[/red]")
|
|
95
|
+
raise typer.Exit(1)
|
|
96
|
+
|
|
97
|
+
# API URL
|
|
98
|
+
url_prompt = f"API URL (enter for default, current: {current_url}): " if current_url != DEFAULT_API_URL else "API URL (enter for default): "
|
|
99
|
+
api_url = Prompt.ask(url_prompt, default="")
|
|
100
|
+
api_url = api_url.strip() if api_url else None
|
|
101
|
+
|
|
102
|
+
# Only update what changed
|
|
103
|
+
final_key = api_key or current_key
|
|
104
|
+
final_url = api_url if api_url else (current_url if current_url != DEFAULT_API_URL else None)
|
|
105
|
+
|
|
106
|
+
configure(final_key, final_url)
|
|
107
|
+
|
|
108
|
+
console.print(f"\n[green]✓[/green] Config saved to {CONFIG_FILE}")
|
|
109
|
+
if api_key:
|
|
110
|
+
preview = api_key[:4] + "..." + api_key[-4:] if len(api_key) > 8 else "****"
|
|
111
|
+
console.print(f" API key: {preview}")
|
|
112
|
+
if final_url:
|
|
113
|
+
console.print(f" API URL: {final_url}")
|
|
114
|
+
console.print("\nTest your setup with: [cyan]c3 billing balance[/cyan]\n")
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
@app.callback()
|
|
118
|
+
def main(
|
|
119
|
+
version: bool = typer.Option(False, "--version", "-v", help="Show version"),
|
|
120
|
+
):
|
|
121
|
+
"""
|
|
122
|
+
[bold cyan]C3 CLI[/bold cyan] - HyperCLI GPU orchestration and LLM API
|
|
123
|
+
|
|
124
|
+
Set your API key: [green]c3 configure[/green]
|
|
125
|
+
|
|
126
|
+
Get started:
|
|
127
|
+
c3 instances list Browse available GPUs
|
|
128
|
+
c3 instances launch Launch a GPU instance
|
|
129
|
+
c3 jobs list View your running jobs
|
|
130
|
+
c3 llm chat -i Start a chat
|
|
131
|
+
"""
|
|
132
|
+
if version:
|
|
133
|
+
from . import __version__
|
|
134
|
+
console.print(f"c3 version {__version__}")
|
|
135
|
+
raise typer.Exit()
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
def cli():
|
|
139
|
+
"""Entry point with error handling"""
|
|
140
|
+
try:
|
|
141
|
+
app()
|
|
142
|
+
except APIError as e:
|
|
143
|
+
detail = e.detail or str(e)
|
|
144
|
+
|
|
145
|
+
# Check for GPU type errors and suggest corrections
|
|
146
|
+
if "GPU type" in detail and "not found" in detail and "Available:" in detail:
|
|
147
|
+
# Extract the invalid GPU type and available options
|
|
148
|
+
import re
|
|
149
|
+
match = re.search(r"GPU type '([^']+)' not found\. Available: \[([^\]]+)\]", detail)
|
|
150
|
+
if match:
|
|
151
|
+
invalid_type = match.group(1)
|
|
152
|
+
available_str = match.group(2)
|
|
153
|
+
available = [s.strip().strip("'") for s in available_str.split(",")]
|
|
154
|
+
|
|
155
|
+
console.print(f"[bold red]Error:[/bold red] Unknown GPU type '[yellow]{invalid_type}[/yellow]'")
|
|
156
|
+
|
|
157
|
+
# Find similar GPU types
|
|
158
|
+
suggestions = fuzzy_match(invalid_type, available)
|
|
159
|
+
if suggestions:
|
|
160
|
+
console.print(f"\n[dim]Did you mean:[/dim]")
|
|
161
|
+
for s in suggestions:
|
|
162
|
+
console.print(f" [green]{s}[/green]")
|
|
163
|
+
|
|
164
|
+
console.print(f"\n[dim]Available GPU types:[/dim] {', '.join(available)}")
|
|
165
|
+
sys.exit(1)
|
|
166
|
+
|
|
167
|
+
# Check for region errors
|
|
168
|
+
if "region" in detail.lower() and "not found" in detail.lower():
|
|
169
|
+
console.print(f"[bold red]Error:[/bold red] {detail}")
|
|
170
|
+
console.print("\n[dim]Tip: Use 'c3 jobs regions' to see available regions[/dim]")
|
|
171
|
+
sys.exit(1)
|
|
172
|
+
|
|
173
|
+
# Generic API error
|
|
174
|
+
console.print(f"[bold red]API Error ({e.status_code}):[/bold red] {detail}")
|
|
175
|
+
sys.exit(1)
|
|
176
|
+
|
|
177
|
+
except KeyboardInterrupt:
|
|
178
|
+
console.print("\n[dim]Interrupted[/dim]")
|
|
179
|
+
sys.exit(130)
|
|
180
|
+
|
|
181
|
+
|
|
182
|
+
if __name__ == "__main__":
|
|
183
|
+
cli()
|