@iamharshil/aix-cli 4.0.1 → 4.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -1,11 +1,11 @@
1
1
  <div align="center">
2
2
 
3
- # AIX CLI
3
+ # AIX
4
4
 
5
5
  **Run Claude Code with local AI models. No API keys. No cloud. Complete privacy.**
6
6
 
7
- [![npm version](https://img.shields.io/npm/v/@iamharshil/aix-cli.svg?style=flat-square&color=cb3837)](https://www.npmjs.com/package/@iamharshil/aix-cli)
8
- [![Downloads](https://img.shields.io/npm/dm/@iamharshil/aix-cli?style=flat-square&color=blue)](https://www.npmjs.com/package/@iamharshil/aix-cli)
7
+ [![npm version](https://img.shields.io/npm/v/@iamharshil/aix.svg?style=flat-square&color=cb3837)](https://www.npmjs.com/package/@iamharshil/aix)
8
+ [![Downloads](https://img.shields.io/npm/dm/@iamharshil/aix?style=flat-square&color=blue)](https://www.npmjs.com/package/@iamharshil/aix)
9
9
  [![License](https://img.shields.io/badge/license-MIT-green?style=flat-square)](LICENSE)
10
10
  [![CI](https://img.shields.io/github/actions/workflow/status/iamharshil/aix-cli/ci.yml?style=flat-square&label=CI)](https://github.com/iamharshil/aix-cli/actions)
11
11
  [![Node](https://img.shields.io/badge/node-%E2%89%A518-417e38?style=flat-square)](https://nodejs.org/)
@@ -24,7 +24,7 @@ No API keys. No cloud calls. No data leaving your machine.
24
24
 
25
25
  ```
26
26
  ┌──────────────────────────────────────────────────┐
27
- │ $ aix-cli run │
27
+ │ $ aix run │
28
28
  │ │
29
29
  │ ? Select model backend: Ollama │
30
30
  │ ✔ Connected to Ollama │
@@ -41,7 +41,7 @@ No API keys. No cloud calls. No data leaving your machine.
41
41
  - 🔑 **No API keys** — No subscriptions, no usage limits, no cloud dependencies.
42
42
  - 🚀 **GPU-accelerated** — Take advantage of your local GPU for fast inference.
43
43
  - 🔀 **Single provider** — Claude Code is the only supported AI coding assistant.
44
- - ⚡ **Zero config** — Just run `aix-cli run` and start coding.
44
+ - ⚡ **Zero config** — Just run `aix run` and start coding.
45
45
 
46
46
  ### Compatibility notes
47
47
 
@@ -50,7 +50,7 @@ No API keys. No cloud calls. No data leaving your machine.
50
50
 
51
51
  ```bash
52
52
  # Recommended for Ollama
53
- aix-cli run --ollama --native -m qwen2.5-coder:14b
53
+ aix run --ollama --native -m qwen2.5-coder:14b
54
54
  ```
55
55
 
56
56
  ---
@@ -68,7 +68,7 @@ aix-cli run --ollama --native -m qwen2.5-coder:14b
68
68
  ### Install
69
69
 
70
70
  ```bash
71
- npm install -g @iamharshil/aix-cli
71
+ npm install -g @iamharshil/aix
72
72
  ```
73
73
 
74
74
  <details>
@@ -76,10 +76,10 @@ npm install -g @iamharshil/aix-cli
76
76
 
77
77
  ```bash
78
78
  # Yarn
79
- yarn global add @iamharshil/aix-cli
79
+ yarn global add @iamharshil/aix
80
80
 
81
81
  # pnpm
82
- pnpm add -g @iamharshil/aix-cli
82
+ pnpm add -g @iamharshil/aix
83
83
  ```
84
84
 
85
85
  </details>
@@ -89,7 +89,7 @@ pnpm add -g @iamharshil/aix-cli
89
89
 
90
90
  ```bash
91
91
  git clone https://github.com/iamharshil/aix-cli.git
92
- cd aix-cli
92
+ cd aix
93
93
  npm install
94
94
  npm run build
95
95
  npm link
@@ -100,7 +100,7 @@ npm link
100
100
  ### Verify
101
101
 
102
102
  ```bash
103
- aix-cli doctor
103
+ aix doctor
104
104
  ```
105
105
 
106
106
  This checks that LM Studio / Ollama, Claude Code, and your environment are properly configured.
@@ -109,105 +109,105 @@ This checks that LM Studio / Ollama, Claude Code, and your environment are prope
109
109
 
110
110
  ## Usage
111
111
 
112
- ### `aix-cli run` — Start a coding session
112
+ ### `aix run` — Start a coding session
113
113
 
114
114
  The primary command. Launches Claude Code backed by a local model.
115
115
 
116
116
  ```bash
117
117
  # Interactive — prompts for backend and model
118
- aix-cli run
118
+ aix run
119
119
 
120
120
  # Specify backend and model
121
- aix-cli run -b ollama -m qwen2.5-coder:14b
122
- aix-cli run -b lmstudio -m llama-3-8b
121
+ aix run -b ollama -m qwen2.5-coder:14b
122
+ aix run -b lmstudio -m llama-3-8b
123
123
 
124
124
  # Use Ollama's native Claude Code integration (recommended for Ollama)
125
- aix-cli run -b ollama --native -m qwen2.5-coder:14b
125
+ aix run -b ollama --native -m qwen2.5-coder:14b
126
126
 
127
127
  # Global shortcuts
128
- aix-cli run --ollama --native -m gemma4
128
+ aix run --ollama --native -m gemma4
129
129
 
130
130
  # Pass a prompt directly
131
- aix-cli run -b ollama -m qwen2.5-coder:14b -- "Refactor auth middleware"
131
+ aix run -b ollama -m qwen2.5-coder:14b -- "Refactor auth middleware"
132
132
  ```
133
133
 
134
- ### `aix-cli init` — Set up backend and model
134
+ ### `aix init` — Set up backend and model
135
135
 
136
136
  Configure your preferred backend and load/select a model.
137
137
 
138
138
  ```bash
139
- aix-cli init # Interactive setup
140
- aix-cli init -b ollama -m qwen2.5-coder:14b # Ollama with specific model
141
- aix-cli init -b lmstudio -m llama-3-8b -p claude # Full config in one command
139
+ aix init # Interactive setup
140
+ aix init -b ollama -m qwen2.5-coder:14b # Ollama with specific model
141
+ aix init -b lmstudio -m llama-3-8b -p claude # Full config in one command
142
142
  ```
143
143
 
144
- ### `aix-cli status` — Check what's running
144
+ ### `aix status` — Check what's running
145
145
 
146
146
  Shows status for both LM Studio and Ollama, including available and running models.
147
147
 
148
148
  ```bash
149
- aix-cli status
149
+ aix status
150
150
  ```
151
151
 
152
- ### `aix-cli doctor` — Infrastructure check
152
+ ### `aix doctor` — Infrastructure check
153
153
 
154
154
  Checks that provider is running, port is accessible, model is available, and Claude config is correct.
155
155
 
156
156
  ```bash
157
- aix-cli doctor
157
+ aix doctor
158
158
  ```
159
159
 
160
- ### `aix-cli setup` — One-command setup
160
+ ### `aix setup` — One-command setup
161
161
 
162
162
  Quick setup for first-time users. Detects installed providers and configures defaults.
163
163
 
164
164
  ```bash
165
- aix-cli setup # Interactive setup
166
- aix-cli setup --provider ollama # Use specific provider
167
- aix-cli setup --force # Overwrite existing config
165
+ aix setup # Interactive setup
166
+ aix setup --provider ollama # Use specific provider
167
+ aix setup --force # Overwrite existing config
168
168
  ```
169
169
 
170
- ### `aix-cli providers` — Manage providers
170
+ ### `aix providers` — Manage providers
171
171
 
172
172
  List available providers or set a default.
173
173
 
174
174
  ```bash
175
- aix-cli providers list # Show providers with status
176
- aix-cli providers set ollama # Set default provider
175
+ aix providers list # Show providers with status
176
+ aix providers set ollama # Set default provider
177
177
  ```
178
178
 
179
- ### `aix-cli models` — List models
179
+ ### `aix models` — List models
180
180
 
181
181
  Fetch and display available models from a provider.
182
182
 
183
183
  ```bash
184
- aix-cli models list --provider ollama
185
- aix-cli models list --provider lmstudio
184
+ aix models list --provider ollama
185
+ aix models list --provider lmstudio
186
186
  ```
187
187
 
188
- ### `aix-cli switch` — Switch provider
188
+ ### `aix switch` — Switch provider
189
189
 
190
190
  Instantly switch providers without breaking Claude setup.
191
191
 
192
192
  ```bash
193
- aix-cli switch ollama
194
- aix-cli switch lmstudio
193
+ aix switch ollama
194
+ aix switch lmstudio
195
195
  ```
196
196
 
197
- ### `aix-cli disconnect` — Disconnect
197
+ ### `aix disconnect` — Disconnect
198
198
 
199
199
  Remove connection cleanly.
200
200
 
201
201
  ```bash
202
- aix-cli disconnect claude
202
+ aix disconnect claude
203
203
  ```
204
204
 
205
- ### `aix-cli fix` — Fix issues
205
+ ### `aix fix` — Fix issues
206
206
 
207
207
  Fix infrastructure issues - suggest starting backends, correct ports, reset config, fix model.
208
208
 
209
209
  ```bash
210
- aix-cli fix
210
+ aix fix
211
211
  ```
212
212
 
213
213
  ### Command Reference
@@ -244,11 +244,11 @@ aix-cli fix
244
244
 
245
245
  AIX stores its configuration in the OS-appropriate config directory:
246
246
 
247
- | Platform | Path |
248
- | -------- | ---------------------------------------- |
249
- | macOS | `~/Library/Application Support/aix-cli/` |
250
- | Linux | `~/.config/aix-cli/` |
251
- | Windows | `%APPDATA%\aix-cli\` |
247
+ | Platform | Path |
248
+ | -------- | ------------------------------------ |
249
+ | macOS | `~/Library/Application Support/aix/` |
250
+ | Linux | `~/.config/aix/` |
251
+ | Windows | `%APPDATA%\aix\` |
252
252
 
253
253
  ### Config File
254
254
 
@@ -313,7 +313,7 @@ AIX stores its configuration in the OS-appropriate config directory:
313
313
  1. Open LM Studio
314
314
  2. Navigate to the **Server** tab (left sidebar)
315
315
  3. Click **Start Server**
316
- 4. Confirm with `aix-cli status`
316
+ 4. Confirm with `aix status`
317
317
 
318
318
  </details>
319
319
 
@@ -323,7 +323,7 @@ AIX stores its configuration in the OS-appropriate config directory:
323
323
  1. Install Ollama from [ollama.com](https://ollama.com)
324
324
  2. Start the server: `ollama serve`
325
325
  3. Pull a model: `ollama pull qwen2.5-coder:14b`
326
- 4. Confirm with `aix-cli status`
326
+ 4. Confirm with `aix status`
327
327
 
328
328
  </details>
329
329
 
@@ -334,7 +334,7 @@ AIX stores its configuration in the OS-appropriate config directory:
334
334
 
335
335
  **Ollama:** Run `ollama pull <model>` to download a model (e.g., `ollama pull llama3.2`).
336
336
 
337
- Then run `aix-cli init` to select and configure.
337
+ Then run `aix init` to select and configure.
338
338
 
339
339
  </details>
340
340
 
@@ -346,7 +346,7 @@ Check that the correct port is being used:
346
346
  - LM Studio defaults to port `1234`
347
347
  - Ollama defaults to port `11434`
348
348
 
349
- You can configure custom ports in your AIX config file (path shown by `aix-cli doctor`).
349
+ You can configure custom ports in your AIX config file (path shown by `aix doctor`).
350
350
 
351
351
  </details>
352
352
 
@@ -359,7 +359,7 @@ Install Claude Code globally:
359
359
  npm install -g @anthropic-ai/claude-code
360
360
  ```
361
361
 
362
- Then re-run `aix-cli doctor` to confirm.
362
+ Then re-run `aix doctor` to confirm.
363
363
 
364
364
  </details>
365
365
 
@@ -385,7 +385,7 @@ Contributions are welcome! See [CONTRIBUTING.md](CONTRIBUTING.md) for guidelines
385
385
 
386
386
  ```bash
387
387
  git clone https://github.com/iamharshil/aix-cli.git
388
- cd aix-cli
388
+ cd aix
389
389
  npm install
390
390
  npm run dev # Run in development mode
391
391
  npm test # Run tests
package/dist/bin/aix.js CHANGED
@@ -12,4 +12,4 @@ Starting ${r} with model: ${e}
12
12
  Starting Claude Code with Ollama at ${r}...
13
13
  `)),t&&(console.log(D.dim(`Running: claude --model ${o}
14
14
  `)),console.log(D.dim(`ANTHROPIC_BASE_URL=${r}
15
- `)));try{await Ie("claude",["--model",o,...e],{stdio:"inherit",env:{...process.env,ANTHROPIC_MODEL:o,ANTHROPIC_BASE_URL:r,ANTHROPIC_AUTH_TOKEN:"ollama",ANTHROPIC_API_KEY:""}})}catch(n){d(`Failed to run Claude Code: ${n instanceof Error?n.message:"Unknown error"}`)}}import h from"chalk";async function ie(){let o=i.getDefaultBackend(),e=i.getDefaultProvider(),t=i.getLastUsedModel(),[r,n,a]=await Promise.all([g.getStatus(),v.getStatus(),x.isClaudeCodeInstalled()]);if(console.log(),console.log(h.bold.cyan("\u25CF Status")),console.log(h.dim("\u2500".repeat(50))),console.log(),console.log(h.bold("Active")),console.log(` ${h.dim("\u25B8")} Provider: ${o?h.cyan(o):h.dim("not set")}`),console.log(` ${h.dim("\u25B8")} Tool: ${e?h.cyan(e):h.dim("not set")}`),o){let m=o==="ollama"?i.getOllamaUrl():i.getLMStudioUrl();console.log(` ${h.dim("\u25B8")} Endpoint: ${h.cyan(m)}`)}t&&console.log(` ${h.dim("\u25B8")} Model: ${h.green(t)}`),console.log(),console.log(h.bold("Tools")),console.log(` Claude Code: ${a?h.green("installed"):h.red("not installed")}`),console.log(),console.log(h.bold("Backends"));let s=r.running?h.green("running"):h.yellow("stopped"),l=n.running?h.green("running"):h.yellow("stopped"),f=o==="lmstudio",c=o==="ollama";console.log(` LM Studio: ${s}${f?h.dim(" (default)"):""}`),console.log(` Ollama: ${l}${c?h.dim(" (default)"):""}`),console.log()}import je from"ora";import T from"chalk";import{execa as we}from"execa";import{readFileSync as qe}from"fs";import{fileURLToPath as We}from"url";function ae(){try{let o=We(new URL("../../package.json",import.meta.url)),e=JSON.parse(qe(o,"utf8"));return String(e.version)}catch{return"unknown"}}async function re(){let o=je({text:"Checking for updates...",color:"cyan"}).start();try{let e=ae();if(e==="unknown"){o.fail("Could not determine current version.");return}let{stdout:t}=await we("npm",["view","@iamharshil/aix-cli","version"]),r=t.trim();if(e===r){o.succeed(`You're already on the latest version: ${T.green(`v${e}`)}`);return}o.text=`Updating: ${T.yellow(`v${e}`)} \u2192 ${T.green(`v${r}`)}...`,await we("npm",["install","-g","@iamharshil/aix-cli@latest"]),o.succeed(`Successfully updated to ${T.green(`v${r}`)}! \u{1F680}`),M(`Restart your terminal or run ${T.cyan("aix-cli --help")} to see what's new.`)}catch(e){o.fail("Failed to update."),Z(e instanceof Error?e.message:String(e))}}import k from"chalk";import Ge from"inquirer";async function se(o,e,t){if(o==="reset"){let{confirm:n}=await Ge.prompt([{type:"confirm",name:"confirm",message:"Are you sure you want to completely reset all configuration to defaults?",default:!1}]);n?(i.reset(),u("Configuration has been reset to defaults.")):M("Reset cancelled.");return}if(o==="set"&&e&&t){let n=t;t==="true"?n=!0:t==="false"?n=!1:Number.isNaN(Number(t))||(n=Number(t)),i.set(e,n),u(`Set ${k.cyan(e)} to ${k.green(t)}`);return}if(o==="unset"&&e){i.delete(e),u(`Unset configuration key ${k.cyan(e)}`);return}console.log(),console.log(k.bold.cyan("\u2699\uFE0F AIX CLI Configuration")),console.log(k.dim("\u2500".repeat(40))),["defaultBackend","defaultProvider","model","lmStudioUrl","lmStudioPort","lmStudioContextLength","ollamaUrl","ollamaPort","defaultTimeout","autoStartServer"].forEach(n=>{let a=i.get(n);console.log(a!==void 0?` ${k.bold(n)}: ${k.green(a)}`:` ${k.bold(n)}: ${k.dim("not set")}`)}),console.log(),console.log(k.dim("Commands:")),console.log(k.dim(" aix-cli config set <key> <value>")),console.log(k.dim(" aix-cli config unset <key>")),console.log(k.dim(" aix-cli config reset")),console.log()}import Ke from"inquirer";async function le(o){let e=o.force??!1,t=o.provider?.toLowerCase();if(!e&&i.getDefaultBackend()){M("Already configured. Use --force to reconfigure.");return}let[r,n]=await Promise.all([g.checkStatus(),v.checkStatus()]),a=[];r&&a.push({name:"lmstudio",label:"LM Studio"}),n&&a.push({name:"ollama",label:"Ollama"});let s;if(t)t==="lmstudio"&&!r&&d("LM Studio is not running. Please start it first."),t==="ollama"&&!n&&d("Ollama is not running. Please start it first."),t!=="lmstudio"&&t!=="ollama"&&d(`Unknown provider: ${t}. Use 'lmstudio' or 'ollama'.`),s=t,u(`Selected provider: ${s==="lmstudio"?"LM Studio":"Ollama"}`);else if(a.length===0&&d('No backends detected. Please start LM Studio or run "ollama serve" first.'),a.length===1)s=a[0].name,u(`Detected ${a[0].label} running`),u(`Selected provider: ${a[0].label}`);else{let{backendSelection:c}=await Ke.prompt([{type:"list",name:"backendSelection",message:"Which backend would you like to use?",choices:a.map(m=>({name:m.label,value:m.name}))}]);s=c,u(`Selected provider: ${s==="lmstudio"?"LM Studio":"Ollama"}`)}let l=s==="ollama"?i.getOllamaUrl():i.getLMStudioUrl();if(M(`Configured endpoint: ${l}`),i.setDefaultBackend(s),i.setDefaultProvider("claude"),!await x.isClaudeCodeInstalled()){U('Claude Code is not installed. Install it to use "aix run".');return}u("Claude connected successfully"),u("Test request passed"),console.log(),M('Setup complete! Run "aix run" to start coding.')}import C from"chalk";async function He(){let[o,e]=await Promise.all([g.checkStatus(),v.checkStatus()]),t=i.getDefaultBackend();console.log(C.bold.cyan("Providers")),console.log(C.dim("\u2500".repeat(40))),console.log(` LM Studio: ${o?C.green("running"):C.yellow("stopped")} ${t==="lmstudio"?C.dim("(default)"):""}`),console.log(` Ollama: ${e?C.green("running"):C.yellow("stopped")} ${t==="ollama"?C.dim("(default)"):""}`),console.log()}async function Ve(o){let e=o.toLowerCase(),[t,r]=await Promise.all([g.checkStatus(),v.checkStatus()]);if(e==="lmstudio"){t||d("LM Studio is not running. Please start it first."),i.setDefaultBackend("lmstudio"),i.setDefaultProvider("claude"),u("Default provider set to LM Studio");return}if(e==="ollama"){r||d("Ollama is not running. Please start it first."),i.setDefaultBackend("ollama"),i.setDefaultProvider("claude"),u("Default provider set to Ollama");return}d(`Unknown provider: ${o}. Use 'lmstudio' or 'ollama'.`)}async function de(o){await He()}async function G(o){o.default||d("Provider name is required. Use: aix providers set <provider_name>"),await Ve(o.default)}import b from"chalk";import ye from"ora";async function ce(o={}){let e=o.provider?.toLowerCase();if(e||(e=i.getDefaultBackend()),e||d('No provider specified. Use --provider flag or run "aix setup" first.'),e==="ollama"){let t=ye({text:"Fetching Ollama models...",color:"cyan"}).start();await v.checkStatus()||(t.fail("Ollama is not running."),d("Start Ollama with: ollama serve"));let n=await v.getAvailableModels();if(t.succeed(`Found ${n.length} model${n.length===1?"":"s"}`),n.length===0){console.log(b.dim("No models available. Pull a model: ollama pull <model>"));return}console.log(),console.log(b.bold.cyan("Available Models")),console.log(b.dim("\u2500".repeat(60)));for(let a of n){let s=R(a.size);console.log(` ${b.white(a.name)} ${b.dim(`(${s})`)}`)}console.log();return}if(e==="lmstudio"){let t=ye({text:"Fetching LM Studio models...",color:"cyan"}).start();await g.checkStatus()||(t.fail("LM Studio is not running."),d("Start LM Studio and enable the local server."));let n=await g.getAvailableModels();if(t.succeed(`Found ${n.length} model${n.length===1?"":"s"}`),n.length===0){console.log(b.dim("No models available. Download models in LM Studio."));return}console.log(),console.log(b.bold.cyan("Available Models")),console.log(b.dim("\u2500".repeat(60)));for(let a of n){let s=R(a.size),l=a.quantization?` ${b.dim(a.quantization)}`:"";console.log(` ${b.white(a.name)} ${b.dim(`(${s})`)}${l}`)}console.log();return}d(`Unknown provider: ${e}. Use 'ollama' or 'lmstudio'.`)}import P from"chalk";import{execa as Je}from"execa";async function me(){let o=i.getDefaultBackend(),e=i.getDefaultProvider(),t=i.getLastUsedModel();console.log(),console.log(P.bold.cyan("\u{1F527} Doctor")),console.log(P.dim("\u2500".repeat(40)));let r=[],n=!0;if((!o||!e)&&(n=!1,r.push('No default provider configured. Run "aix setup" first.')),o==="lmstudio"){let s=await g.checkStatus();if(s?(console.log(),console.log(P.green("\u2713")+" LM Studio running")):(n=!1,r.push("LM Studio is not running")),s){let l=await g.getAvailableModels();l.length===0?(n=!1,r.push("No models available in LM Studio")):t&&(l.some(c=>c.id===t||c.name===t)?console.log(P.green("\u2713")+" Model available"):(n=!1,r.push(`Model "${t}" not found`)))}}if(o==="ollama"){let s=await v.checkStatus();if(s?(console.log(),console.log(P.green("\u2713")+" Ollama running")):(n=!1,r.push("Ollama is not running")),s){let l=await v.getAvailableModels();l.length===0?(n=!1,r.push("No models available in Ollama")):t&&(l.some(c=>c.id===t)?console.log(P.green("\u2713")+" Model available"):(n=!1,r.push(`Model "${t}" not found`)))}}let a=o==="ollama"?i.getOllamaUrl():i.getLMStudioUrl();try{(await fetch(`${a}/api/tags`,{method:"GET",signal:AbortSignal.timeout(3e3)})).ok?console.log(P.green("\u2713")+` Port accessible (${a})`):(n=!1,r.push(`Port not accessible at ${a}`))}catch{n=!1,r.push(`Cannot reach ${a}`)}if(o)try{(await Je("claude",["--version"])).exitCode===0&&console.log(P.green("\u2713")+" Claude Code installed")}catch{n=!1,r.push("Claude Code not installed")}if(r.length>0){console.log(),console.log(P.bold.red("Issues found:"));for(let s of r)console.log(P.red("\u2717")+" "+s)}console.log(),n&&r.length===0?(console.log(P.green("\u2713 All checks passed")),console.log()):d("Doctor check failed. Fix the issues above and try again.")}import ue from"chalk";import L from"inquirer";async function Xe(){let o=i.get("lmStudioPort"),e=i.get("ollamaPort"),t=await g.checkStatus(),r=await v.checkStatus();if(!t&&r){let{fix:n}=await L.prompt([{type:"confirm",name:"fix",message:`LM Studio not running. Ollama is running on port ${e}. Use Ollama instead?`,default:!0}]);n&&(i.setDefaultBackend("ollama"),i.setDefaultProvider("claude"),u("Switched to Ollama as default"))}if(t&&!r){let{fix:n}=await L.prompt([{type:"confirm",name:"fix",message:`Ollama not running. LM Studio is running on port ${o}. Use LM Studio instead?`,default:!0}]);n&&(i.setDefaultBackend("lmstudio"),i.setDefaultProvider("claude"),u("Switched to LM Studio as default"))}if(!t&&!r){let{backend:n}=await L.prompt([{type:"list",name:"backend",message:"No backends running. Which would you like to start?",choices:[{name:"LM Studio",value:"lmstudio"},{name:"Ollama",value:"ollama"}]}]);n==="lmstudio"?(M("Starting LM Studio..."),await g.startServer(),i.setDefaultBackend("lmstudio"),i.setDefaultProvider("claude"),u("LM Studio started and configured")):(M("To start Ollama, run: ollama serve"),U('After starting Ollama, run "aix fix" again'))}}async function Ye(){let{reset:o}=await L.prompt([{type:"confirm",name:"reset",message:"Reset all configuration? This will clear default provider and model settings.",default:!1}]);o&&(i.reset(),u("Configuration reset"))}async function fe(){console.log(),console.log(ue.bold.cyan("\u{1F527} Fix")),console.log(ue.dim("\u2500".repeat(40)));let o=i.getDefaultBackend(),e=i.getLastUsedModel();if(await Xe(),o==="lmstudio"&&await g.checkStatus()){let n=await g.getAvailableModels();if(e&&!n.some(a=>a.id===e)){U(`Last used model "${e}" not found`);let{useModel:a}=await L.prompt([{type:"list",name:"useModel",message:"Select a model to use:",choices:n.map(s=>({name:s.name,value:s.id}))}]);i.setModel(a),u(`Model set to ${a}`)}}if(o==="ollama"&&await v.checkStatus()){let n=await v.getAvailableModels();if(e&&!n.some(a=>a.id===e)){U(`Last used model "${e}" not found`);let{useModel:a}=await L.prompt([{type:"list",name:"useModel",message:"Select a model to use:",choices:n.map(s=>({name:s.name,value:s.id}))}]);i.setModel(a),u(`Model set to ${a}`)}}console.log(),console.log(ue.bold("Options:")),console.log(" 1. Reset config (clear all settings)");let{option:t}=await L.prompt([{type:"list",name:"option",message:"What would you like to do?",choices:[{name:"Done / Exit",value:"done"},{name:"Reset config",value:"reset"}]}]);t==="reset"&&await Ye(),console.log(),u('Fix complete. Run "aix status" to verify.')}import K from"chalk";async function H(o={}){let e=o.provider?.toLowerCase();e||d('Provider name required. Use "aix switch ollama" or "aix switch lmstudio".');let[t,r]=await Promise.all([g.checkStatus(),v.checkStatus()]);if(e==="ollama"){r||d("Ollama is not running. Start it with: ollama serve"),i.setDefaultBackend("ollama"),i.setDefaultProvider("claude"),u("Switched to Ollama"),console.log(K.dim(` Endpoint: ${i.getOllamaUrl()}`)),console.log(),console.log(K.green('Ready! Run "aix run" to start coding.'));return}if(e==="lmstudio"){t||d("LM Studio is not running. Start it first."),i.setDefaultBackend("lmstudio"),i.setDefaultProvider("claude"),u("Switched to LM Studio"),console.log(K.dim(` Endpoint: ${i.getLMStudioUrl()}`)),console.log(),console.log(K.green('Ready! Run "aix run" to start coding.'));return}d(`Unknown provider: ${e}. Use 'ollama' or 'lmstudio'.`)}import ke from"chalk";async function ge(o={}){let e=o.provider?.toLowerCase();e&&e!=="claude"&&d(`Unknown provider: ${e}. Use 'claude'.`);let t=i.getDefaultBackend(),r=i.getDefaultProvider(),n=i.getLastUsedModel();!t&&!r&&!n&&d("No provider connected."),i.setModel(void 0),i.setDefaultBackend(void 0),i.setDefaultProvider(void 0),u("Disconnected from Claude Code"),n&&console.log(ke.dim(` Last model: ${n}`)),console.log(),console.log(ke.green('Run "aix setup" to connect again.'))}var S=new Qe;S.name("aix-cli").description("Run Claude Code with local AI models from LM Studio or Ollama").version(ae(),"-v, --version").option("-h, --help","Display help").option("--ollama","Shortcut to use Ollama backend").option("--lmstudio","Shortcut to use LM Studio backend");function _(o=0){console.log(),console.log(be.dim(o===0?"\u{1F44B} Goodbye!":"\u274C Cancelled.")),process.exit(o)}process.on("SIGINT",()=>_(0));process.on("SIGTERM",()=>_(0));process.on("uncaughtException",o=>{o.message?.includes("ExitPromptError")||o.message?.includes("User force closed")||o.message?.includes("prompt")?_(0):(console.error(be.red("Error:"),o.message),process.exit(1))});process.on("unhandledRejection",o=>{let e=String(o);(e.includes("ExitPromptError")||e.includes("User force closed")||e.includes("prompt"))&&_(0)});S.command("init",{isDefault:!1}).aliases(["i","load"]).description("Select a backend, load a model, and configure your provider").option("-m, --model <name>","Model name or ID to load").option("-p, --provider <provider>","Coding tool to use (claude)").option("-b, --backend <backend>","Model backend to use (lmstudio or ollama)").action(o=>{let e=S.opts();return e.ollama&&(o.backend="ollama"),e.lmstudio&&(o.backend="lmstudio"),ee(o)});S.command("run",{isDefault:!1}).aliases(["r"]).description("Run Claude Code with a model from LM Studio or Ollama").option("-m, --model <name>","Model name or ID to use").option("-p, --provider <provider>","Coding tool to use (claude)").option("-b, --backend <backend>","Model backend to use (lmstudio or ollama)").option("-v, --verbose","Show verbose output").argument("[args...]","Additional arguments for the provider").action(async(o,e)=>{let t=S.opts();t.ollama&&(e.backend="ollama"),t.lmstudio&&(e.backend="lmstudio"),await ne({...e,args:o})});S.command("status",{isDefault:!1}).aliases(["s","stats"]).description("Show LM Studio and Ollama status and available models").action(ie);S.command("doctor",{isDefault:!1}).aliases(["d","check"]).description("Check infrastructure status").action(me);S.command("update",{isDefault:!1}).aliases(["upgrade","u"]).description("Update AIX CLI to the latest version").action(re);S.command("config [action] [key] [value]",{isDefault:!1}).aliases(["c","settings"]).description("View, set, or reset AIX CLI configuration constraints").action(se);S.command("setup",{isDefault:!1}).description("One-command default setup for first-time users").option("--provider <name>","Provider to use (lmstudio or ollama)").option("--force","Overwrite existing configuration").hook("preAction",o=>{(process.argv.includes("-h")||process.argv.includes("--help"))&&o.help()}).action(o=>{let e=S.opts();return e.ollama&&(o.provider="ollama"),e.lmstudio&&(o.provider="lmstudio"),le(o)});function B(){return process.argv.includes("-h")||process.argv.includes("--help")}var V=S.command("providers",{isDefault:!1}).description("List or set default provider");V.command("list",{isDefault:!1}).description("List available providers").action(()=>B()?V.help():de());V.command("set",{isDefault:!1}).description("Set default provider").argument("[name]","Provider name (lmstudio or ollama)").action(o=>B()?V.commands.find(e=>e.name()==="set")?.help():o?G({default:o}):G({default:""}));var Me=S.command("models",{isDefault:!1}).description("List available models");Me.command("list",{isDefault:!1}).description("List models from a provider").option("--provider <name>","Provider to use (ollama or lmstudio)").action(o=>B()?Me.help():ce(o));S.command("fix",{isDefault:!1}).description("Fix infrastructure issues").action(()=>B()?S.commands.find(o=>o.name()==="fix")?.help():fe());S.command("switch",{isDefault:!1}).description("Switch to a different provider").argument("[provider]","Provider name (ollama or lmstudio)").action(o=>B()?S.commands.find(e=>e.name()==="switch")?.help():o?H({provider:o}):H({provider:""}));S.command("disconnect",{isDefault:!1}).description("Disconnect from a provider").argument("[provider]","Provider to disconnect (claude)").action(o=>B()?S.commands.find(e=>e.name()==="disconnect")?.help():ge({provider:o}));S.parse();
15
+ `)));try{await Ie("claude",["--model",o,...e],{stdio:"inherit",env:{...process.env,ANTHROPIC_MODEL:o,ANTHROPIC_BASE_URL:r,ANTHROPIC_AUTH_TOKEN:"ollama",ANTHROPIC_API_KEY:""}})}catch(n){d(`Failed to run Claude Code: ${n instanceof Error?n.message:"Unknown error"}`)}}import h from"chalk";async function ie(){let o=i.getDefaultBackend(),e=i.getDefaultProvider(),t=i.getLastUsedModel(),[r,n,a]=await Promise.all([g.getStatus(),v.getStatus(),x.isClaudeCodeInstalled()]);if(console.log(),console.log(h.bold.cyan("\u25CF Status")),console.log(h.dim("\u2500".repeat(50))),console.log(),console.log(h.bold("Active")),console.log(` ${h.dim("\u25B8")} Provider: ${o?h.cyan(o):h.dim("not set")}`),console.log(` ${h.dim("\u25B8")} Tool: ${e?h.cyan(e):h.dim("not set")}`),o){let m=o==="ollama"?i.getOllamaUrl():i.getLMStudioUrl();console.log(` ${h.dim("\u25B8")} Endpoint: ${h.cyan(m)}`)}t&&console.log(` ${h.dim("\u25B8")} Model: ${h.green(t)}`),console.log(),console.log(h.bold("Tools")),console.log(` Claude Code: ${a?h.green("installed"):h.red("not installed")}`),console.log(),console.log(h.bold("Backends"));let s=r.running?h.green("running"):h.yellow("stopped"),l=n.running?h.green("running"):h.yellow("stopped"),f=o==="lmstudio",c=o==="ollama";console.log(` LM Studio: ${s}${f?h.dim(" (default)"):""}`),console.log(` Ollama: ${l}${c?h.dim(" (default)"):""}`),console.log()}import je from"ora";import T from"chalk";import{execa as we}from"execa";import{readFileSync as qe}from"fs";import{fileURLToPath as We}from"url";function ae(){try{let o=We(new URL("../../package.json",import.meta.url)),e=JSON.parse(qe(o,"utf8"));return String(e.version)}catch{return"unknown"}}async function re(){let o=je({text:"Checking for updates...",color:"cyan"}).start();try{let e=ae();if(e==="unknown"){o.fail("Could not determine current version.");return}let{stdout:t}=await we("npm",["view","@iamharshil/aix-cli","version"]),r=t.trim();if(e===r){o.succeed(`You're already on the latest version: ${T.green(`v${e}`)}`);return}o.text=`Updating: ${T.yellow(`v${e}`)} \u2192 ${T.green(`v${r}`)}...`,await we("npm",["install","-g","@iamharshil/aix-cli@latest"]),o.succeed(`Successfully updated to ${T.green(`v${r}`)}! \u{1F680}`),M(`Restart your terminal or run ${T.cyan("aix-cli --help")} to see what's new.`)}catch(e){o.fail("Failed to update."),Z(e instanceof Error?e.message:String(e))}}import k from"chalk";import Ge from"inquirer";async function se(o,e,t){if(o==="reset"){let{confirm:n}=await Ge.prompt([{type:"confirm",name:"confirm",message:"Are you sure you want to completely reset all configuration to defaults?",default:!1}]);n?(i.reset(),u("Configuration has been reset to defaults.")):M("Reset cancelled.");return}if(o==="set"&&e&&t){let n=t;t==="true"?n=!0:t==="false"?n=!1:Number.isNaN(Number(t))||(n=Number(t)),i.set(e,n),u(`Set ${k.cyan(e)} to ${k.green(t)}`);return}if(o==="unset"&&e){i.delete(e),u(`Unset configuration key ${k.cyan(e)}`);return}console.log(),console.log(k.bold.cyan("\u2699\uFE0F AIX CLI Configuration")),console.log(k.dim("\u2500".repeat(40))),["defaultBackend","defaultProvider","model","lmStudioUrl","lmStudioPort","lmStudioContextLength","ollamaUrl","ollamaPort","defaultTimeout","autoStartServer"].forEach(n=>{let a=i.get(n);console.log(a!==void 0?` ${k.bold(n)}: ${k.green(a)}`:` ${k.bold(n)}: ${k.dim("not set")}`)}),console.log(),console.log(k.dim("Commands:")),console.log(k.dim(" aix-cli config set <key> <value>")),console.log(k.dim(" aix-cli config unset <key>")),console.log(k.dim(" aix-cli config reset")),console.log()}import Ke from"inquirer";async function le(o){let e=o.force??!1,t=o.provider?.toLowerCase();if(!e&&i.getDefaultBackend()){M("Already configured. Use --force to reconfigure.");return}let[r,n]=await Promise.all([g.checkStatus(),v.checkStatus()]),a=[];r&&a.push({name:"lmstudio",label:"LM Studio"}),n&&a.push({name:"ollama",label:"Ollama"});let s;if(t)t==="lmstudio"&&!r&&d("LM Studio is not running. Please start it first."),t==="ollama"&&!n&&d("Ollama is not running. Please start it first."),t!=="lmstudio"&&t!=="ollama"&&d(`Unknown provider: ${t}. Use 'lmstudio' or 'ollama'.`),s=t,u(`Selected provider: ${s==="lmstudio"?"LM Studio":"Ollama"}`);else if(a.length===0&&d('No backends detected. Please start LM Studio or run "ollama serve" first.'),a.length===1)s=a[0].name,u(`Detected ${a[0].label} running`),u(`Selected provider: ${a[0].label}`);else{let{backendSelection:c}=await Ke.prompt([{type:"list",name:"backendSelection",message:"Which backend would you like to use?",choices:a.map(m=>({name:m.label,value:m.name}))}]);s=c,u(`Selected provider: ${s==="lmstudio"?"LM Studio":"Ollama"}`)}let l=s==="ollama"?i.getOllamaUrl():i.getLMStudioUrl();if(M(`Configured endpoint: ${l}`),i.setDefaultBackend(s),i.setDefaultProvider("claude"),!await x.isClaudeCodeInstalled()){U('Claude Code is not installed. Install it to use "aix run".');return}u("Claude connected successfully"),u("Test request passed"),console.log(),M('Setup complete! Run "aix run" to start coding.')}import C from"chalk";async function He(){let[o,e]=await Promise.all([g.checkStatus(),v.checkStatus()]),t=i.getDefaultBackend();console.log(C.bold.cyan("Providers")),console.log(C.dim("\u2500".repeat(40))),console.log(` LM Studio: ${o?C.green("running"):C.yellow("stopped")} ${t==="lmstudio"?C.dim("(default)"):""}`),console.log(` Ollama: ${e?C.green("running"):C.yellow("stopped")} ${t==="ollama"?C.dim("(default)"):""}`),console.log()}async function Ve(o){let e=o.toLowerCase(),[t,r]=await Promise.all([g.checkStatus(),v.checkStatus()]);if(e==="lmstudio"){t||d("LM Studio is not running. Please start it first."),i.setDefaultBackend("lmstudio"),i.setDefaultProvider("claude"),u("Default provider set to LM Studio");return}if(e==="ollama"){r||d("Ollama is not running. Please start it first."),i.setDefaultBackend("ollama"),i.setDefaultProvider("claude"),u("Default provider set to Ollama");return}d(`Unknown provider: ${o}. Use 'lmstudio' or 'ollama'.`)}async function de(o){await He()}async function G(o){o.default||d("Provider name is required. Use: aix providers set <provider_name>"),await Ve(o.default)}import b from"chalk";import ye from"ora";async function ce(o={}){let e=o.provider?.toLowerCase();if(e||(e=i.getDefaultBackend()),e||d('No provider specified. Use --provider flag or run "aix setup" first.'),e==="ollama"){let t=ye({text:"Fetching Ollama models...",color:"cyan"}).start();await v.checkStatus()||(t.fail("Ollama is not running."),d("Start Ollama with: ollama serve"));let n=await v.getAvailableModels();if(t.succeed(`Found ${n.length} model${n.length===1?"":"s"}`),n.length===0){console.log(b.dim("No models available. Pull a model: ollama pull <model>"));return}console.log(),console.log(b.bold.cyan("Available Models")),console.log(b.dim("\u2500".repeat(60)));for(let a of n){let s=R(a.size);console.log(` ${b.white(a.name)} ${b.dim(`(${s})`)}`)}console.log();return}if(e==="lmstudio"){let t=ye({text:"Fetching LM Studio models...",color:"cyan"}).start();await g.checkStatus()||(t.fail("LM Studio is not running."),d("Start LM Studio and enable the local server."));let n=await g.getAvailableModels();if(t.succeed(`Found ${n.length} model${n.length===1?"":"s"}`),n.length===0){console.log(b.dim("No models available. Download models in LM Studio."));return}console.log(),console.log(b.bold.cyan("Available Models")),console.log(b.dim("\u2500".repeat(60)));for(let a of n){let s=R(a.size),l=a.quantization?` ${b.dim(a.quantization)}`:"";console.log(` ${b.white(a.name)} ${b.dim(`(${s})`)}${l}`)}console.log();return}d(`Unknown provider: ${e}. Use 'ollama' or 'lmstudio'.`)}import P from"chalk";import{execa as Je}from"execa";async function me(){let o=i.getDefaultBackend(),e=i.getDefaultProvider(),t=i.getLastUsedModel();console.log(),console.log(P.bold.cyan("\u{1F527} Doctor")),console.log(P.dim("\u2500".repeat(40)));let r=[],n=!0;if((!o||!e)&&(n=!1,r.push('No default provider configured. Run "aix setup" first.')),o==="lmstudio"){let s=await g.checkStatus();if(s?(console.log(),console.log(P.green("\u2713")+" LM Studio running")):(n=!1,r.push("LM Studio is not running")),s){let l=await g.getAvailableModels();l.length===0?(n=!1,r.push("No models available in LM Studio")):t&&(l.some(c=>c.id===t||c.name===t)?console.log(P.green("\u2713")+" Model available"):(n=!1,r.push(`Model "${t}" not found`)))}}if(o==="ollama"){let s=await v.checkStatus();if(s?(console.log(),console.log(P.green("\u2713")+" Ollama running")):(n=!1,r.push("Ollama is not running")),s){let l=await v.getAvailableModels();l.length===0?(n=!1,r.push("No models available in Ollama")):t&&(l.some(c=>c.id===t)?console.log(P.green("\u2713")+" Model available"):(n=!1,r.push(`Model "${t}" not found`)))}}let a=o==="ollama"?i.getOllamaUrl():i.getLMStudioUrl();try{(await fetch(`${a}/api/tags`,{method:"GET",signal:AbortSignal.timeout(3e3)})).ok?console.log(P.green("\u2713")+` Port accessible (${a})`):(n=!1,r.push(`Port not accessible at ${a}`))}catch{n=!1,r.push(`Cannot reach ${a}`)}if(o)try{(await Je("claude",["--version"])).exitCode===0&&console.log(P.green("\u2713")+" Claude Code installed")}catch{n=!1,r.push("Claude Code not installed")}if(r.length>0){console.log(),console.log(P.bold.red("Issues found:"));for(let s of r)console.log(P.red("\u2717")+" "+s)}console.log(),n&&r.length===0?(console.log(P.green("\u2713 All checks passed")),console.log()):d("Doctor check failed. Fix the issues above and try again.")}import ue from"chalk";import L from"inquirer";async function Xe(){let o=i.get("lmStudioPort"),e=i.get("ollamaPort"),t=await g.checkStatus(),r=await v.checkStatus();if(!t&&r){let{fix:n}=await L.prompt([{type:"confirm",name:"fix",message:`LM Studio not running. Ollama is running on port ${e}. Use Ollama instead?`,default:!0}]);n&&(i.setDefaultBackend("ollama"),i.setDefaultProvider("claude"),u("Switched to Ollama as default"))}if(t&&!r){let{fix:n}=await L.prompt([{type:"confirm",name:"fix",message:`Ollama not running. LM Studio is running on port ${o}. Use LM Studio instead?`,default:!0}]);n&&(i.setDefaultBackend("lmstudio"),i.setDefaultProvider("claude"),u("Switched to LM Studio as default"))}if(!t&&!r){let{backend:n}=await L.prompt([{type:"list",name:"backend",message:"No backends running. Which would you like to start?",choices:[{name:"LM Studio",value:"lmstudio"},{name:"Ollama",value:"ollama"}]}]);n==="lmstudio"?(M("Starting LM Studio..."),await g.startServer(),i.setDefaultBackend("lmstudio"),i.setDefaultProvider("claude"),u("LM Studio started and configured")):(M("To start Ollama, run: ollama serve"),U('After starting Ollama, run "aix fix" again'))}}async function Ye(){let{reset:o}=await L.prompt([{type:"confirm",name:"reset",message:"Reset all configuration? This will clear default provider and model settings.",default:!1}]);o&&(i.reset(),u("Configuration reset"))}async function fe(){console.log(),console.log(ue.bold.cyan("\u{1F527} Fix")),console.log(ue.dim("\u2500".repeat(40)));let o=i.getDefaultBackend(),e=i.getLastUsedModel();if(await Xe(),o==="lmstudio"&&await g.checkStatus()){let n=await g.getAvailableModels();if(e&&!n.some(a=>a.id===e)){U(`Last used model "${e}" not found`);let{useModel:a}=await L.prompt([{type:"list",name:"useModel",message:"Select a model to use:",choices:n.map(s=>({name:s.name,value:s.id}))}]);i.setModel(a),u(`Model set to ${a}`)}}if(o==="ollama"&&await v.checkStatus()){let n=await v.getAvailableModels();if(e&&!n.some(a=>a.id===e)){U(`Last used model "${e}" not found`);let{useModel:a}=await L.prompt([{type:"list",name:"useModel",message:"Select a model to use:",choices:n.map(s=>({name:s.name,value:s.id}))}]);i.setModel(a),u(`Model set to ${a}`)}}console.log(),console.log(ue.bold("Options:")),console.log(" 1. Reset config (clear all settings)");let{option:t}=await L.prompt([{type:"list",name:"option",message:"What would you like to do?",choices:[{name:"Done / Exit",value:"done"},{name:"Reset config",value:"reset"}]}]);t==="reset"&&await Ye(),console.log(),u('Fix complete. Run "aix status" to verify.')}import K from"chalk";async function H(o={}){let e=o.provider?.toLowerCase();e||d('Provider name required. Use "aix switch ollama" or "aix switch lmstudio".');let[t,r]=await Promise.all([g.checkStatus(),v.checkStatus()]);if(e==="ollama"){r||d("Ollama is not running. Start it with: ollama serve"),i.setDefaultBackend("ollama"),i.setDefaultProvider("claude"),u("Switched to Ollama"),console.log(K.dim(` Endpoint: ${i.getOllamaUrl()}`)),console.log(),console.log(K.green('Ready! Run "aix run" to start coding.'));return}if(e==="lmstudio"){t||d("LM Studio is not running. Start it first."),i.setDefaultBackend("lmstudio"),i.setDefaultProvider("claude"),u("Switched to LM Studio"),console.log(K.dim(` Endpoint: ${i.getLMStudioUrl()}`)),console.log(),console.log(K.green('Ready! Run "aix run" to start coding.'));return}d(`Unknown provider: ${e}. Use 'ollama' or 'lmstudio'.`)}import ke from"chalk";async function ge(o={}){let e=o.provider?.toLowerCase();e&&e!=="claude"&&d(`Unknown provider: ${e}. Use 'claude'.`);let t=i.getDefaultBackend(),r=i.getDefaultProvider(),n=i.getLastUsedModel();!t&&!r&&!n&&d("No provider connected."),i.setModel(void 0),i.setDefaultBackend(void 0),i.setDefaultProvider(void 0),u("Disconnected from Claude Code"),n&&console.log(ke.dim(` Last model: ${n}`)),console.log(),console.log(ke.green('Run "aix setup" to connect again.'))}var S=new Qe;S.name("aix").description("Run Claude Code with local AI models from LM Studio or Ollama").version(ae(),"-v, --version").option("-h, --help","Display help").option("--ollama","Shortcut to use Ollama backend").option("--lmstudio","Shortcut to use LM Studio backend");function _(o=0){console.log(),console.log(be.dim(o===0?"\u{1F44B} Goodbye!":"\u274C Cancelled.")),process.exit(o)}process.on("SIGINT",()=>_(0));process.on("SIGTERM",()=>_(0));process.on("uncaughtException",o=>{o.message?.includes("ExitPromptError")||o.message?.includes("User force closed")||o.message?.includes("prompt")?_(0):(console.error(be.red("Error:"),o.message),process.exit(1))});process.on("unhandledRejection",o=>{let e=String(o);(e.includes("ExitPromptError")||e.includes("User force closed")||e.includes("prompt"))&&_(0)});S.command("init",{isDefault:!1}).aliases(["i","load"]).description("Select a backend, load a model, and configure your provider").option("-m, --model <name>","Model name or ID to load").option("-p, --provider <provider>","Coding tool to use (claude)").option("-b, --backend <backend>","Model backend to use (lmstudio or ollama)").action(o=>{let e=S.opts();return e.ollama&&(o.backend="ollama"),e.lmstudio&&(o.backend="lmstudio"),ee(o)});S.command("run",{isDefault:!1}).aliases(["r"]).description("Run Claude Code with a model from LM Studio or Ollama").option("-m, --model <name>","Model name or ID to use").option("-p, --provider <provider>","Coding tool to use (claude)").option("-b, --backend <backend>","Model backend to use (lmstudio or ollama)").option("-v, --verbose","Show verbose output").argument("[args...]","Additional arguments for the provider").action(async(o,e)=>{let t=S.opts();t.ollama&&(e.backend="ollama"),t.lmstudio&&(e.backend="lmstudio"),await ne({...e,args:o})});S.command("status",{isDefault:!1}).aliases(["s","stats"]).description("Show LM Studio and Ollama status and available models").action(ie);S.command("doctor",{isDefault:!1}).aliases(["d","check"]).description("Check infrastructure status").action(me);S.command("update",{isDefault:!1}).aliases(["upgrade","u"]).description("Update AIX CLI to the latest version").action(re);S.command("config [action] [key] [value]",{isDefault:!1}).aliases(["c","settings"]).description("View, set, or reset AIX CLI configuration constraints").action(se);S.command("setup",{isDefault:!1}).description("One-command default setup for first-time users").option("--provider <name>","Provider to use (lmstudio or ollama)").option("--force","Overwrite existing configuration").hook("preAction",o=>{(process.argv.includes("-h")||process.argv.includes("--help"))&&o.help()}).action(o=>{let e=S.opts();return e.ollama&&(o.provider="ollama"),e.lmstudio&&(o.provider="lmstudio"),le(o)});function B(){return process.argv.includes("-h")||process.argv.includes("--help")}var V=S.command("providers",{isDefault:!1}).description("List or set default provider");V.command("list",{isDefault:!1}).description("List available providers").action(()=>B()?V.help():de());V.command("set",{isDefault:!1}).description("Set default provider").argument("[name]","Provider name (lmstudio or ollama)").action(o=>B()?V.commands.find(e=>e.name()==="set")?.help():o?G({default:o}):G({default:""}));var Me=S.command("models",{isDefault:!1}).description("List available models");Me.command("list",{isDefault:!1}).description("List models from a provider").option("--provider <name>","Provider to use (ollama or lmstudio)").action(o=>B()?Me.help():ce(o));S.command("fix",{isDefault:!1}).description("Fix infrastructure issues").action(()=>B()?S.commands.find(o=>o.name()==="fix")?.help():fe());S.command("switch",{isDefault:!1}).description("Switch to a different provider").argument("[provider]","Provider name (ollama or lmstudio)").action(o=>B()?S.commands.find(e=>e.name()==="switch")?.help():o?H({provider:o}):H({provider:""}));S.command("disconnect",{isDefault:!1}).description("Disconnect from a provider").argument("[provider]","Provider to disconnect (claude)").action(o=>B()?S.commands.find(e=>e.name()==="disconnect")?.help():ge({provider:o}));S.parse();
package/dist/index.js CHANGED
@@ -12,4 +12,4 @@ Starting ${r} with model: ${e}
12
12
  Starting Claude Code with Ollama at ${r}...
13
13
  `)),t&&(console.log(D.dim(`Running: claude --model ${o}
14
14
  `)),console.log(D.dim(`ANTHROPIC_BASE_URL=${r}
15
- `)));try{await Ie("claude",["--model",o,...e],{stdio:"inherit",env:{...process.env,ANTHROPIC_MODEL:o,ANTHROPIC_BASE_URL:r,ANTHROPIC_AUTH_TOKEN:"ollama",ANTHROPIC_API_KEY:""}})}catch(n){d(`Failed to run Claude Code: ${n instanceof Error?n.message:"Unknown error"}`)}}import h from"chalk";async function ie(){let o=i.getDefaultBackend(),e=i.getDefaultProvider(),t=i.getLastUsedModel(),[r,n,a]=await Promise.all([g.getStatus(),v.getStatus(),x.isClaudeCodeInstalled()]);if(console.log(),console.log(h.bold.cyan("\u25CF Status")),console.log(h.dim("\u2500".repeat(50))),console.log(),console.log(h.bold("Active")),console.log(` ${h.dim("\u25B8")} Provider: ${o?h.cyan(o):h.dim("not set")}`),console.log(` ${h.dim("\u25B8")} Tool: ${e?h.cyan(e):h.dim("not set")}`),o){let m=o==="ollama"?i.getOllamaUrl():i.getLMStudioUrl();console.log(` ${h.dim("\u25B8")} Endpoint: ${h.cyan(m)}`)}t&&console.log(` ${h.dim("\u25B8")} Model: ${h.green(t)}`),console.log(),console.log(h.bold("Tools")),console.log(` Claude Code: ${a?h.green("installed"):h.red("not installed")}`),console.log(),console.log(h.bold("Backends"));let s=r.running?h.green("running"):h.yellow("stopped"),l=n.running?h.green("running"):h.yellow("stopped"),f=o==="lmstudio",c=o==="ollama";console.log(` LM Studio: ${s}${f?h.dim(" (default)"):""}`),console.log(` Ollama: ${l}${c?h.dim(" (default)"):""}`),console.log()}import je from"ora";import T from"chalk";import{execa as we}from"execa";import{readFileSync as qe}from"fs";import{fileURLToPath as We}from"url";function ae(){try{let o=We(new URL("../../package.json",import.meta.url)),e=JSON.parse(qe(o,"utf8"));return String(e.version)}catch{return"unknown"}}async function re(){let o=je({text:"Checking for updates...",color:"cyan"}).start();try{let e=ae();if(e==="unknown"){o.fail("Could not determine current version.");return}let{stdout:t}=await we("npm",["view","@iamharshil/aix-cli","version"]),r=t.trim();if(e===r){o.succeed(`You're already on the latest version: ${T.green(`v${e}`)}`);return}o.text=`Updating: ${T.yellow(`v${e}`)} \u2192 ${T.green(`v${r}`)}...`,await we("npm",["install","-g","@iamharshil/aix-cli@latest"]),o.succeed(`Successfully updated to ${T.green(`v${r}`)}! \u{1F680}`),M(`Restart your terminal or run ${T.cyan("aix-cli --help")} to see what's new.`)}catch(e){o.fail("Failed to update."),Z(e instanceof Error?e.message:String(e))}}import k from"chalk";import Ge from"inquirer";async function se(o,e,t){if(o==="reset"){let{confirm:n}=await Ge.prompt([{type:"confirm",name:"confirm",message:"Are you sure you want to completely reset all configuration to defaults?",default:!1}]);n?(i.reset(),u("Configuration has been reset to defaults.")):M("Reset cancelled.");return}if(o==="set"&&e&&t){let n=t;t==="true"?n=!0:t==="false"?n=!1:Number.isNaN(Number(t))||(n=Number(t)),i.set(e,n),u(`Set ${k.cyan(e)} to ${k.green(t)}`);return}if(o==="unset"&&e){i.delete(e),u(`Unset configuration key ${k.cyan(e)}`);return}console.log(),console.log(k.bold.cyan("\u2699\uFE0F AIX CLI Configuration")),console.log(k.dim("\u2500".repeat(40))),["defaultBackend","defaultProvider","model","lmStudioUrl","lmStudioPort","lmStudioContextLength","ollamaUrl","ollamaPort","defaultTimeout","autoStartServer"].forEach(n=>{let a=i.get(n);console.log(a!==void 0?` ${k.bold(n)}: ${k.green(a)}`:` ${k.bold(n)}: ${k.dim("not set")}`)}),console.log(),console.log(k.dim("Commands:")),console.log(k.dim(" aix-cli config set <key> <value>")),console.log(k.dim(" aix-cli config unset <key>")),console.log(k.dim(" aix-cli config reset")),console.log()}import Ke from"inquirer";async function le(o){let e=o.force??!1,t=o.provider?.toLowerCase();if(!e&&i.getDefaultBackend()){M("Already configured. Use --force to reconfigure.");return}let[r,n]=await Promise.all([g.checkStatus(),v.checkStatus()]),a=[];r&&a.push({name:"lmstudio",label:"LM Studio"}),n&&a.push({name:"ollama",label:"Ollama"});let s;if(t)t==="lmstudio"&&!r&&d("LM Studio is not running. Please start it first."),t==="ollama"&&!n&&d("Ollama is not running. Please start it first."),t!=="lmstudio"&&t!=="ollama"&&d(`Unknown provider: ${t}. Use 'lmstudio' or 'ollama'.`),s=t,u(`Selected provider: ${s==="lmstudio"?"LM Studio":"Ollama"}`);else if(a.length===0&&d('No backends detected. Please start LM Studio or run "ollama serve" first.'),a.length===1)s=a[0].name,u(`Detected ${a[0].label} running`),u(`Selected provider: ${a[0].label}`);else{let{backendSelection:c}=await Ke.prompt([{type:"list",name:"backendSelection",message:"Which backend would you like to use?",choices:a.map(m=>({name:m.label,value:m.name}))}]);s=c,u(`Selected provider: ${s==="lmstudio"?"LM Studio":"Ollama"}`)}let l=s==="ollama"?i.getOllamaUrl():i.getLMStudioUrl();if(M(`Configured endpoint: ${l}`),i.setDefaultBackend(s),i.setDefaultProvider("claude"),!await x.isClaudeCodeInstalled()){U('Claude Code is not installed. Install it to use "aix run".');return}u("Claude connected successfully"),u("Test request passed"),console.log(),M('Setup complete! Run "aix run" to start coding.')}import C from"chalk";async function He(){let[o,e]=await Promise.all([g.checkStatus(),v.checkStatus()]),t=i.getDefaultBackend();console.log(C.bold.cyan("Providers")),console.log(C.dim("\u2500".repeat(40))),console.log(` LM Studio: ${o?C.green("running"):C.yellow("stopped")} ${t==="lmstudio"?C.dim("(default)"):""}`),console.log(` Ollama: ${e?C.green("running"):C.yellow("stopped")} ${t==="ollama"?C.dim("(default)"):""}`),console.log()}async function Ve(o){let e=o.toLowerCase(),[t,r]=await Promise.all([g.checkStatus(),v.checkStatus()]);if(e==="lmstudio"){t||d("LM Studio is not running. Please start it first."),i.setDefaultBackend("lmstudio"),i.setDefaultProvider("claude"),u("Default provider set to LM Studio");return}if(e==="ollama"){r||d("Ollama is not running. Please start it first."),i.setDefaultBackend("ollama"),i.setDefaultProvider("claude"),u("Default provider set to Ollama");return}d(`Unknown provider: ${o}. Use 'lmstudio' or 'ollama'.`)}async function de(o){await He()}async function G(o){o.default||d("Provider name is required. Use: aix providers set <provider_name>"),await Ve(o.default)}import b from"chalk";import ye from"ora";async function ce(o={}){let e=o.provider?.toLowerCase();if(e||(e=i.getDefaultBackend()),e||d('No provider specified. Use --provider flag or run "aix setup" first.'),e==="ollama"){let t=ye({text:"Fetching Ollama models...",color:"cyan"}).start();await v.checkStatus()||(t.fail("Ollama is not running."),d("Start Ollama with: ollama serve"));let n=await v.getAvailableModels();if(t.succeed(`Found ${n.length} model${n.length===1?"":"s"}`),n.length===0){console.log(b.dim("No models available. Pull a model: ollama pull <model>"));return}console.log(),console.log(b.bold.cyan("Available Models")),console.log(b.dim("\u2500".repeat(60)));for(let a of n){let s=R(a.size);console.log(` ${b.white(a.name)} ${b.dim(`(${s})`)}`)}console.log();return}if(e==="lmstudio"){let t=ye({text:"Fetching LM Studio models...",color:"cyan"}).start();await g.checkStatus()||(t.fail("LM Studio is not running."),d("Start LM Studio and enable the local server."));let n=await g.getAvailableModels();if(t.succeed(`Found ${n.length} model${n.length===1?"":"s"}`),n.length===0){console.log(b.dim("No models available. Download models in LM Studio."));return}console.log(),console.log(b.bold.cyan("Available Models")),console.log(b.dim("\u2500".repeat(60)));for(let a of n){let s=R(a.size),l=a.quantization?` ${b.dim(a.quantization)}`:"";console.log(` ${b.white(a.name)} ${b.dim(`(${s})`)}${l}`)}console.log();return}d(`Unknown provider: ${e}. Use 'ollama' or 'lmstudio'.`)}import P from"chalk";import{execa as Je}from"execa";async function me(){let o=i.getDefaultBackend(),e=i.getDefaultProvider(),t=i.getLastUsedModel();console.log(),console.log(P.bold.cyan("\u{1F527} Doctor")),console.log(P.dim("\u2500".repeat(40)));let r=[],n=!0;if((!o||!e)&&(n=!1,r.push('No default provider configured. Run "aix setup" first.')),o==="lmstudio"){let s=await g.checkStatus();if(s?(console.log(),console.log(P.green("\u2713")+" LM Studio running")):(n=!1,r.push("LM Studio is not running")),s){let l=await g.getAvailableModels();l.length===0?(n=!1,r.push("No models available in LM Studio")):t&&(l.some(c=>c.id===t||c.name===t)?console.log(P.green("\u2713")+" Model available"):(n=!1,r.push(`Model "${t}" not found`)))}}if(o==="ollama"){let s=await v.checkStatus();if(s?(console.log(),console.log(P.green("\u2713")+" Ollama running")):(n=!1,r.push("Ollama is not running")),s){let l=await v.getAvailableModels();l.length===0?(n=!1,r.push("No models available in Ollama")):t&&(l.some(c=>c.id===t)?console.log(P.green("\u2713")+" Model available"):(n=!1,r.push(`Model "${t}" not found`)))}}let a=o==="ollama"?i.getOllamaUrl():i.getLMStudioUrl();try{(await fetch(`${a}/api/tags`,{method:"GET",signal:AbortSignal.timeout(3e3)})).ok?console.log(P.green("\u2713")+` Port accessible (${a})`):(n=!1,r.push(`Port not accessible at ${a}`))}catch{n=!1,r.push(`Cannot reach ${a}`)}if(o)try{(await Je("claude",["--version"])).exitCode===0&&console.log(P.green("\u2713")+" Claude Code installed")}catch{n=!1,r.push("Claude Code not installed")}if(r.length>0){console.log(),console.log(P.bold.red("Issues found:"));for(let s of r)console.log(P.red("\u2717")+" "+s)}console.log(),n&&r.length===0?(console.log(P.green("\u2713 All checks passed")),console.log()):d("Doctor check failed. Fix the issues above and try again.")}import ue from"chalk";import L from"inquirer";async function Xe(){let o=i.get("lmStudioPort"),e=i.get("ollamaPort"),t=await g.checkStatus(),r=await v.checkStatus();if(!t&&r){let{fix:n}=await L.prompt([{type:"confirm",name:"fix",message:`LM Studio not running. Ollama is running on port ${e}. Use Ollama instead?`,default:!0}]);n&&(i.setDefaultBackend("ollama"),i.setDefaultProvider("claude"),u("Switched to Ollama as default"))}if(t&&!r){let{fix:n}=await L.prompt([{type:"confirm",name:"fix",message:`Ollama not running. LM Studio is running on port ${o}. Use LM Studio instead?`,default:!0}]);n&&(i.setDefaultBackend("lmstudio"),i.setDefaultProvider("claude"),u("Switched to LM Studio as default"))}if(!t&&!r){let{backend:n}=await L.prompt([{type:"list",name:"backend",message:"No backends running. Which would you like to start?",choices:[{name:"LM Studio",value:"lmstudio"},{name:"Ollama",value:"ollama"}]}]);n==="lmstudio"?(M("Starting LM Studio..."),await g.startServer(),i.setDefaultBackend("lmstudio"),i.setDefaultProvider("claude"),u("LM Studio started and configured")):(M("To start Ollama, run: ollama serve"),U('After starting Ollama, run "aix fix" again'))}}async function Ye(){let{reset:o}=await L.prompt([{type:"confirm",name:"reset",message:"Reset all configuration? This will clear default provider and model settings.",default:!1}]);o&&(i.reset(),u("Configuration reset"))}async function fe(){console.log(),console.log(ue.bold.cyan("\u{1F527} Fix")),console.log(ue.dim("\u2500".repeat(40)));let o=i.getDefaultBackend(),e=i.getLastUsedModel();if(await Xe(),o==="lmstudio"&&await g.checkStatus()){let n=await g.getAvailableModels();if(e&&!n.some(a=>a.id===e)){U(`Last used model "${e}" not found`);let{useModel:a}=await L.prompt([{type:"list",name:"useModel",message:"Select a model to use:",choices:n.map(s=>({name:s.name,value:s.id}))}]);i.setModel(a),u(`Model set to ${a}`)}}if(o==="ollama"&&await v.checkStatus()){let n=await v.getAvailableModels();if(e&&!n.some(a=>a.id===e)){U(`Last used model "${e}" not found`);let{useModel:a}=await L.prompt([{type:"list",name:"useModel",message:"Select a model to use:",choices:n.map(s=>({name:s.name,value:s.id}))}]);i.setModel(a),u(`Model set to ${a}`)}}console.log(),console.log(ue.bold("Options:")),console.log(" 1. Reset config (clear all settings)");let{option:t}=await L.prompt([{type:"list",name:"option",message:"What would you like to do?",choices:[{name:"Done / Exit",value:"done"},{name:"Reset config",value:"reset"}]}]);t==="reset"&&await Ye(),console.log(),u('Fix complete. Run "aix status" to verify.')}import K from"chalk";async function H(o={}){let e=o.provider?.toLowerCase();e||d('Provider name required. Use "aix switch ollama" or "aix switch lmstudio".');let[t,r]=await Promise.all([g.checkStatus(),v.checkStatus()]);if(e==="ollama"){r||d("Ollama is not running. Start it with: ollama serve"),i.setDefaultBackend("ollama"),i.setDefaultProvider("claude"),u("Switched to Ollama"),console.log(K.dim(` Endpoint: ${i.getOllamaUrl()}`)),console.log(),console.log(K.green('Ready! Run "aix run" to start coding.'));return}if(e==="lmstudio"){t||d("LM Studio is not running. Start it first."),i.setDefaultBackend("lmstudio"),i.setDefaultProvider("claude"),u("Switched to LM Studio"),console.log(K.dim(` Endpoint: ${i.getLMStudioUrl()}`)),console.log(),console.log(K.green('Ready! Run "aix run" to start coding.'));return}d(`Unknown provider: ${e}. Use 'ollama' or 'lmstudio'.`)}import ke from"chalk";async function ge(o={}){let e=o.provider?.toLowerCase();e&&e!=="claude"&&d(`Unknown provider: ${e}. Use 'claude'.`);let t=i.getDefaultBackend(),r=i.getDefaultProvider(),n=i.getLastUsedModel();!t&&!r&&!n&&d("No provider connected."),i.setModel(void 0),i.setDefaultBackend(void 0),i.setDefaultProvider(void 0),u("Disconnected from Claude Code"),n&&console.log(ke.dim(` Last model: ${n}`)),console.log(),console.log(ke.green('Run "aix setup" to connect again.'))}var S=new Qe;S.name("aix-cli").description("Run Claude Code with local AI models from LM Studio or Ollama").version(ae(),"-v, --version").option("-h, --help","Display help").option("--ollama","Shortcut to use Ollama backend").option("--lmstudio","Shortcut to use LM Studio backend");function _(o=0){console.log(),console.log(be.dim(o===0?"\u{1F44B} Goodbye!":"\u274C Cancelled.")),process.exit(o)}process.on("SIGINT",()=>_(0));process.on("SIGTERM",()=>_(0));process.on("uncaughtException",o=>{o.message?.includes("ExitPromptError")||o.message?.includes("User force closed")||o.message?.includes("prompt")?_(0):(console.error(be.red("Error:"),o.message),process.exit(1))});process.on("unhandledRejection",o=>{let e=String(o);(e.includes("ExitPromptError")||e.includes("User force closed")||e.includes("prompt"))&&_(0)});S.command("init",{isDefault:!1}).aliases(["i","load"]).description("Select a backend, load a model, and configure your provider").option("-m, --model <name>","Model name or ID to load").option("-p, --provider <provider>","Coding tool to use (claude)").option("-b, --backend <backend>","Model backend to use (lmstudio or ollama)").action(o=>{let e=S.opts();return e.ollama&&(o.backend="ollama"),e.lmstudio&&(o.backend="lmstudio"),ee(o)});S.command("run",{isDefault:!1}).aliases(["r"]).description("Run Claude Code with a model from LM Studio or Ollama").option("-m, --model <name>","Model name or ID to use").option("-p, --provider <provider>","Coding tool to use (claude)").option("-b, --backend <backend>","Model backend to use (lmstudio or ollama)").option("-v, --verbose","Show verbose output").argument("[args...]","Additional arguments for the provider").action(async(o,e)=>{let t=S.opts();t.ollama&&(e.backend="ollama"),t.lmstudio&&(e.backend="lmstudio"),await ne({...e,args:o})});S.command("status",{isDefault:!1}).aliases(["s","stats"]).description("Show LM Studio and Ollama status and available models").action(ie);S.command("doctor",{isDefault:!1}).aliases(["d","check"]).description("Check infrastructure status").action(me);S.command("update",{isDefault:!1}).aliases(["upgrade","u"]).description("Update AIX CLI to the latest version").action(re);S.command("config [action] [key] [value]",{isDefault:!1}).aliases(["c","settings"]).description("View, set, or reset AIX CLI configuration constraints").action(se);S.command("setup",{isDefault:!1}).description("One-command default setup for first-time users").option("--provider <name>","Provider to use (lmstudio or ollama)").option("--force","Overwrite existing configuration").hook("preAction",o=>{(process.argv.includes("-h")||process.argv.includes("--help"))&&o.help()}).action(o=>{let e=S.opts();return e.ollama&&(o.provider="ollama"),e.lmstudio&&(o.provider="lmstudio"),le(o)});function B(){return process.argv.includes("-h")||process.argv.includes("--help")}var V=S.command("providers",{isDefault:!1}).description("List or set default provider");V.command("list",{isDefault:!1}).description("List available providers").action(()=>B()?V.help():de());V.command("set",{isDefault:!1}).description("Set default provider").argument("[name]","Provider name (lmstudio or ollama)").action(o=>B()?V.commands.find(e=>e.name()==="set")?.help():o?G({default:o}):G({default:""}));var Me=S.command("models",{isDefault:!1}).description("List available models");Me.command("list",{isDefault:!1}).description("List models from a provider").option("--provider <name>","Provider to use (ollama or lmstudio)").action(o=>B()?Me.help():ce(o));S.command("fix",{isDefault:!1}).description("Fix infrastructure issues").action(()=>B()?S.commands.find(o=>o.name()==="fix")?.help():fe());S.command("switch",{isDefault:!1}).description("Switch to a different provider").argument("[provider]","Provider name (ollama or lmstudio)").action(o=>B()?S.commands.find(e=>e.name()==="switch")?.help():o?H({provider:o}):H({provider:""}));S.command("disconnect",{isDefault:!1}).description("Disconnect from a provider").argument("[provider]","Provider to disconnect (claude)").action(o=>B()?S.commands.find(e=>e.name()==="disconnect")?.help():ge({provider:o}));S.parse();
15
+ `)));try{await Ie("claude",["--model",o,...e],{stdio:"inherit",env:{...process.env,ANTHROPIC_MODEL:o,ANTHROPIC_BASE_URL:r,ANTHROPIC_AUTH_TOKEN:"ollama",ANTHROPIC_API_KEY:""}})}catch(n){d(`Failed to run Claude Code: ${n instanceof Error?n.message:"Unknown error"}`)}}import h from"chalk";async function ie(){let o=i.getDefaultBackend(),e=i.getDefaultProvider(),t=i.getLastUsedModel(),[r,n,a]=await Promise.all([g.getStatus(),v.getStatus(),x.isClaudeCodeInstalled()]);if(console.log(),console.log(h.bold.cyan("\u25CF Status")),console.log(h.dim("\u2500".repeat(50))),console.log(),console.log(h.bold("Active")),console.log(` ${h.dim("\u25B8")} Provider: ${o?h.cyan(o):h.dim("not set")}`),console.log(` ${h.dim("\u25B8")} Tool: ${e?h.cyan(e):h.dim("not set")}`),o){let m=o==="ollama"?i.getOllamaUrl():i.getLMStudioUrl();console.log(` ${h.dim("\u25B8")} Endpoint: ${h.cyan(m)}`)}t&&console.log(` ${h.dim("\u25B8")} Model: ${h.green(t)}`),console.log(),console.log(h.bold("Tools")),console.log(` Claude Code: ${a?h.green("installed"):h.red("not installed")}`),console.log(),console.log(h.bold("Backends"));let s=r.running?h.green("running"):h.yellow("stopped"),l=n.running?h.green("running"):h.yellow("stopped"),f=o==="lmstudio",c=o==="ollama";console.log(` LM Studio: ${s}${f?h.dim(" (default)"):""}`),console.log(` Ollama: ${l}${c?h.dim(" (default)"):""}`),console.log()}import je from"ora";import T from"chalk";import{execa as we}from"execa";import{readFileSync as qe}from"fs";import{fileURLToPath as We}from"url";function ae(){try{let o=We(new URL("../../package.json",import.meta.url)),e=JSON.parse(qe(o,"utf8"));return String(e.version)}catch{return"unknown"}}async function re(){let o=je({text:"Checking for updates...",color:"cyan"}).start();try{let e=ae();if(e==="unknown"){o.fail("Could not determine current version.");return}let{stdout:t}=await we("npm",["view","@iamharshil/aix-cli","version"]),r=t.trim();if(e===r){o.succeed(`You're already on the latest version: ${T.green(`v${e}`)}`);return}o.text=`Updating: ${T.yellow(`v${e}`)} \u2192 ${T.green(`v${r}`)}...`,await we("npm",["install","-g","@iamharshil/aix-cli@latest"]),o.succeed(`Successfully updated to ${T.green(`v${r}`)}! \u{1F680}`),M(`Restart your terminal or run ${T.cyan("aix-cli --help")} to see what's new.`)}catch(e){o.fail("Failed to update."),Z(e instanceof Error?e.message:String(e))}}import k from"chalk";import Ge from"inquirer";async function se(o,e,t){if(o==="reset"){let{confirm:n}=await Ge.prompt([{type:"confirm",name:"confirm",message:"Are you sure you want to completely reset all configuration to defaults?",default:!1}]);n?(i.reset(),u("Configuration has been reset to defaults.")):M("Reset cancelled.");return}if(o==="set"&&e&&t){let n=t;t==="true"?n=!0:t==="false"?n=!1:Number.isNaN(Number(t))||(n=Number(t)),i.set(e,n),u(`Set ${k.cyan(e)} to ${k.green(t)}`);return}if(o==="unset"&&e){i.delete(e),u(`Unset configuration key ${k.cyan(e)}`);return}console.log(),console.log(k.bold.cyan("\u2699\uFE0F AIX CLI Configuration")),console.log(k.dim("\u2500".repeat(40))),["defaultBackend","defaultProvider","model","lmStudioUrl","lmStudioPort","lmStudioContextLength","ollamaUrl","ollamaPort","defaultTimeout","autoStartServer"].forEach(n=>{let a=i.get(n);console.log(a!==void 0?` ${k.bold(n)}: ${k.green(a)}`:` ${k.bold(n)}: ${k.dim("not set")}`)}),console.log(),console.log(k.dim("Commands:")),console.log(k.dim(" aix-cli config set <key> <value>")),console.log(k.dim(" aix-cli config unset <key>")),console.log(k.dim(" aix-cli config reset")),console.log()}import Ke from"inquirer";async function le(o){let e=o.force??!1,t=o.provider?.toLowerCase();if(!e&&i.getDefaultBackend()){M("Already configured. Use --force to reconfigure.");return}let[r,n]=await Promise.all([g.checkStatus(),v.checkStatus()]),a=[];r&&a.push({name:"lmstudio",label:"LM Studio"}),n&&a.push({name:"ollama",label:"Ollama"});let s;if(t)t==="lmstudio"&&!r&&d("LM Studio is not running. Please start it first."),t==="ollama"&&!n&&d("Ollama is not running. Please start it first."),t!=="lmstudio"&&t!=="ollama"&&d(`Unknown provider: ${t}. Use 'lmstudio' or 'ollama'.`),s=t,u(`Selected provider: ${s==="lmstudio"?"LM Studio":"Ollama"}`);else if(a.length===0&&d('No backends detected. Please start LM Studio or run "ollama serve" first.'),a.length===1)s=a[0].name,u(`Detected ${a[0].label} running`),u(`Selected provider: ${a[0].label}`);else{let{backendSelection:c}=await Ke.prompt([{type:"list",name:"backendSelection",message:"Which backend would you like to use?",choices:a.map(m=>({name:m.label,value:m.name}))}]);s=c,u(`Selected provider: ${s==="lmstudio"?"LM Studio":"Ollama"}`)}let l=s==="ollama"?i.getOllamaUrl():i.getLMStudioUrl();if(M(`Configured endpoint: ${l}`),i.setDefaultBackend(s),i.setDefaultProvider("claude"),!await x.isClaudeCodeInstalled()){U('Claude Code is not installed. Install it to use "aix run".');return}u("Claude connected successfully"),u("Test request passed"),console.log(),M('Setup complete! Run "aix run" to start coding.')}import C from"chalk";async function He(){let[o,e]=await Promise.all([g.checkStatus(),v.checkStatus()]),t=i.getDefaultBackend();console.log(C.bold.cyan("Providers")),console.log(C.dim("\u2500".repeat(40))),console.log(` LM Studio: ${o?C.green("running"):C.yellow("stopped")} ${t==="lmstudio"?C.dim("(default)"):""}`),console.log(` Ollama: ${e?C.green("running"):C.yellow("stopped")} ${t==="ollama"?C.dim("(default)"):""}`),console.log()}async function Ve(o){let e=o.toLowerCase(),[t,r]=await Promise.all([g.checkStatus(),v.checkStatus()]);if(e==="lmstudio"){t||d("LM Studio is not running. Please start it first."),i.setDefaultBackend("lmstudio"),i.setDefaultProvider("claude"),u("Default provider set to LM Studio");return}if(e==="ollama"){r||d("Ollama is not running. Please start it first."),i.setDefaultBackend("ollama"),i.setDefaultProvider("claude"),u("Default provider set to Ollama");return}d(`Unknown provider: ${o}. Use 'lmstudio' or 'ollama'.`)}async function de(o){await He()}async function G(o){o.default||d("Provider name is required. Use: aix providers set <provider_name>"),await Ve(o.default)}import b from"chalk";import ye from"ora";async function ce(o={}){let e=o.provider?.toLowerCase();if(e||(e=i.getDefaultBackend()),e||d('No provider specified. Use --provider flag or run "aix setup" first.'),e==="ollama"){let t=ye({text:"Fetching Ollama models...",color:"cyan"}).start();await v.checkStatus()||(t.fail("Ollama is not running."),d("Start Ollama with: ollama serve"));let n=await v.getAvailableModels();if(t.succeed(`Found ${n.length} model${n.length===1?"":"s"}`),n.length===0){console.log(b.dim("No models available. Pull a model: ollama pull <model>"));return}console.log(),console.log(b.bold.cyan("Available Models")),console.log(b.dim("\u2500".repeat(60)));for(let a of n){let s=R(a.size);console.log(` ${b.white(a.name)} ${b.dim(`(${s})`)}`)}console.log();return}if(e==="lmstudio"){let t=ye({text:"Fetching LM Studio models...",color:"cyan"}).start();await g.checkStatus()||(t.fail("LM Studio is not running."),d("Start LM Studio and enable the local server."));let n=await g.getAvailableModels();if(t.succeed(`Found ${n.length} model${n.length===1?"":"s"}`),n.length===0){console.log(b.dim("No models available. Download models in LM Studio."));return}console.log(),console.log(b.bold.cyan("Available Models")),console.log(b.dim("\u2500".repeat(60)));for(let a of n){let s=R(a.size),l=a.quantization?` ${b.dim(a.quantization)}`:"";console.log(` ${b.white(a.name)} ${b.dim(`(${s})`)}${l}`)}console.log();return}d(`Unknown provider: ${e}. Use 'ollama' or 'lmstudio'.`)}import P from"chalk";import{execa as Je}from"execa";async function me(){let o=i.getDefaultBackend(),e=i.getDefaultProvider(),t=i.getLastUsedModel();console.log(),console.log(P.bold.cyan("\u{1F527} Doctor")),console.log(P.dim("\u2500".repeat(40)));let r=[],n=!0;if((!o||!e)&&(n=!1,r.push('No default provider configured. Run "aix setup" first.')),o==="lmstudio"){let s=await g.checkStatus();if(s?(console.log(),console.log(P.green("\u2713")+" LM Studio running")):(n=!1,r.push("LM Studio is not running")),s){let l=await g.getAvailableModels();l.length===0?(n=!1,r.push("No models available in LM Studio")):t&&(l.some(c=>c.id===t||c.name===t)?console.log(P.green("\u2713")+" Model available"):(n=!1,r.push(`Model "${t}" not found`)))}}if(o==="ollama"){let s=await v.checkStatus();if(s?(console.log(),console.log(P.green("\u2713")+" Ollama running")):(n=!1,r.push("Ollama is not running")),s){let l=await v.getAvailableModels();l.length===0?(n=!1,r.push("No models available in Ollama")):t&&(l.some(c=>c.id===t)?console.log(P.green("\u2713")+" Model available"):(n=!1,r.push(`Model "${t}" not found`)))}}let a=o==="ollama"?i.getOllamaUrl():i.getLMStudioUrl();try{(await fetch(`${a}/api/tags`,{method:"GET",signal:AbortSignal.timeout(3e3)})).ok?console.log(P.green("\u2713")+` Port accessible (${a})`):(n=!1,r.push(`Port not accessible at ${a}`))}catch{n=!1,r.push(`Cannot reach ${a}`)}if(o)try{(await Je("claude",["--version"])).exitCode===0&&console.log(P.green("\u2713")+" Claude Code installed")}catch{n=!1,r.push("Claude Code not installed")}if(r.length>0){console.log(),console.log(P.bold.red("Issues found:"));for(let s of r)console.log(P.red("\u2717")+" "+s)}console.log(),n&&r.length===0?(console.log(P.green("\u2713 All checks passed")),console.log()):d("Doctor check failed. Fix the issues above and try again.")}import ue from"chalk";import L from"inquirer";async function Xe(){let o=i.get("lmStudioPort"),e=i.get("ollamaPort"),t=await g.checkStatus(),r=await v.checkStatus();if(!t&&r){let{fix:n}=await L.prompt([{type:"confirm",name:"fix",message:`LM Studio not running. Ollama is running on port ${e}. Use Ollama instead?`,default:!0}]);n&&(i.setDefaultBackend("ollama"),i.setDefaultProvider("claude"),u("Switched to Ollama as default"))}if(t&&!r){let{fix:n}=await L.prompt([{type:"confirm",name:"fix",message:`Ollama not running. LM Studio is running on port ${o}. Use LM Studio instead?`,default:!0}]);n&&(i.setDefaultBackend("lmstudio"),i.setDefaultProvider("claude"),u("Switched to LM Studio as default"))}if(!t&&!r){let{backend:n}=await L.prompt([{type:"list",name:"backend",message:"No backends running. Which would you like to start?",choices:[{name:"LM Studio",value:"lmstudio"},{name:"Ollama",value:"ollama"}]}]);n==="lmstudio"?(M("Starting LM Studio..."),await g.startServer(),i.setDefaultBackend("lmstudio"),i.setDefaultProvider("claude"),u("LM Studio started and configured")):(M("To start Ollama, run: ollama serve"),U('After starting Ollama, run "aix fix" again'))}}async function Ye(){let{reset:o}=await L.prompt([{type:"confirm",name:"reset",message:"Reset all configuration? This will clear default provider and model settings.",default:!1}]);o&&(i.reset(),u("Configuration reset"))}async function fe(){console.log(),console.log(ue.bold.cyan("\u{1F527} Fix")),console.log(ue.dim("\u2500".repeat(40)));let o=i.getDefaultBackend(),e=i.getLastUsedModel();if(await Xe(),o==="lmstudio"&&await g.checkStatus()){let n=await g.getAvailableModels();if(e&&!n.some(a=>a.id===e)){U(`Last used model "${e}" not found`);let{useModel:a}=await L.prompt([{type:"list",name:"useModel",message:"Select a model to use:",choices:n.map(s=>({name:s.name,value:s.id}))}]);i.setModel(a),u(`Model set to ${a}`)}}if(o==="ollama"&&await v.checkStatus()){let n=await v.getAvailableModels();if(e&&!n.some(a=>a.id===e)){U(`Last used model "${e}" not found`);let{useModel:a}=await L.prompt([{type:"list",name:"useModel",message:"Select a model to use:",choices:n.map(s=>({name:s.name,value:s.id}))}]);i.setModel(a),u(`Model set to ${a}`)}}console.log(),console.log(ue.bold("Options:")),console.log(" 1. Reset config (clear all settings)");let{option:t}=await L.prompt([{type:"list",name:"option",message:"What would you like to do?",choices:[{name:"Done / Exit",value:"done"},{name:"Reset config",value:"reset"}]}]);t==="reset"&&await Ye(),console.log(),u('Fix complete. Run "aix status" to verify.')}import K from"chalk";async function H(o={}){let e=o.provider?.toLowerCase();e||d('Provider name required. Use "aix switch ollama" or "aix switch lmstudio".');let[t,r]=await Promise.all([g.checkStatus(),v.checkStatus()]);if(e==="ollama"){r||d("Ollama is not running. Start it with: ollama serve"),i.setDefaultBackend("ollama"),i.setDefaultProvider("claude"),u("Switched to Ollama"),console.log(K.dim(` Endpoint: ${i.getOllamaUrl()}`)),console.log(),console.log(K.green('Ready! Run "aix run" to start coding.'));return}if(e==="lmstudio"){t||d("LM Studio is not running. Start it first."),i.setDefaultBackend("lmstudio"),i.setDefaultProvider("claude"),u("Switched to LM Studio"),console.log(K.dim(` Endpoint: ${i.getLMStudioUrl()}`)),console.log(),console.log(K.green('Ready! Run "aix run" to start coding.'));return}d(`Unknown provider: ${e}. Use 'ollama' or 'lmstudio'.`)}import ke from"chalk";async function ge(o={}){let e=o.provider?.toLowerCase();e&&e!=="claude"&&d(`Unknown provider: ${e}. Use 'claude'.`);let t=i.getDefaultBackend(),r=i.getDefaultProvider(),n=i.getLastUsedModel();!t&&!r&&!n&&d("No provider connected."),i.setModel(void 0),i.setDefaultBackend(void 0),i.setDefaultProvider(void 0),u("Disconnected from Claude Code"),n&&console.log(ke.dim(` Last model: ${n}`)),console.log(),console.log(ke.green('Run "aix setup" to connect again.'))}var S=new Qe;S.name("aix").description("Run Claude Code with local AI models from LM Studio or Ollama").version(ae(),"-v, --version").option("-h, --help","Display help").option("--ollama","Shortcut to use Ollama backend").option("--lmstudio","Shortcut to use LM Studio backend");function _(o=0){console.log(),console.log(be.dim(o===0?"\u{1F44B} Goodbye!":"\u274C Cancelled.")),process.exit(o)}process.on("SIGINT",()=>_(0));process.on("SIGTERM",()=>_(0));process.on("uncaughtException",o=>{o.message?.includes("ExitPromptError")||o.message?.includes("User force closed")||o.message?.includes("prompt")?_(0):(console.error(be.red("Error:"),o.message),process.exit(1))});process.on("unhandledRejection",o=>{let e=String(o);(e.includes("ExitPromptError")||e.includes("User force closed")||e.includes("prompt"))&&_(0)});S.command("init",{isDefault:!1}).aliases(["i","load"]).description("Select a backend, load a model, and configure your provider").option("-m, --model <name>","Model name or ID to load").option("-p, --provider <provider>","Coding tool to use (claude)").option("-b, --backend <backend>","Model backend to use (lmstudio or ollama)").action(o=>{let e=S.opts();return e.ollama&&(o.backend="ollama"),e.lmstudio&&(o.backend="lmstudio"),ee(o)});S.command("run",{isDefault:!1}).aliases(["r"]).description("Run Claude Code with a model from LM Studio or Ollama").option("-m, --model <name>","Model name or ID to use").option("-p, --provider <provider>","Coding tool to use (claude)").option("-b, --backend <backend>","Model backend to use (lmstudio or ollama)").option("-v, --verbose","Show verbose output").argument("[args...]","Additional arguments for the provider").action(async(o,e)=>{let t=S.opts();t.ollama&&(e.backend="ollama"),t.lmstudio&&(e.backend="lmstudio"),await ne({...e,args:o})});S.command("status",{isDefault:!1}).aliases(["s","stats"]).description("Show LM Studio and Ollama status and available models").action(ie);S.command("doctor",{isDefault:!1}).aliases(["d","check"]).description("Check infrastructure status").action(me);S.command("update",{isDefault:!1}).aliases(["upgrade","u"]).description("Update AIX CLI to the latest version").action(re);S.command("config [action] [key] [value]",{isDefault:!1}).aliases(["c","settings"]).description("View, set, or reset AIX CLI configuration constraints").action(se);S.command("setup",{isDefault:!1}).description("One-command default setup for first-time users").option("--provider <name>","Provider to use (lmstudio or ollama)").option("--force","Overwrite existing configuration").hook("preAction",o=>{(process.argv.includes("-h")||process.argv.includes("--help"))&&o.help()}).action(o=>{let e=S.opts();return e.ollama&&(o.provider="ollama"),e.lmstudio&&(o.provider="lmstudio"),le(o)});function B(){return process.argv.includes("-h")||process.argv.includes("--help")}var V=S.command("providers",{isDefault:!1}).description("List or set default provider");V.command("list",{isDefault:!1}).description("List available providers").action(()=>B()?V.help():de());V.command("set",{isDefault:!1}).description("Set default provider").argument("[name]","Provider name (lmstudio or ollama)").action(o=>B()?V.commands.find(e=>e.name()==="set")?.help():o?G({default:o}):G({default:""}));var Me=S.command("models",{isDefault:!1}).description("List available models");Me.command("list",{isDefault:!1}).description("List models from a provider").option("--provider <name>","Provider to use (ollama or lmstudio)").action(o=>B()?Me.help():ce(o));S.command("fix",{isDefault:!1}).description("Fix infrastructure issues").action(()=>B()?S.commands.find(o=>o.name()==="fix")?.help():fe());S.command("switch",{isDefault:!1}).description("Switch to a different provider").argument("[provider]","Provider name (ollama or lmstudio)").action(o=>B()?S.commands.find(e=>e.name()==="switch")?.help():o?H({provider:o}):H({provider:""}));S.command("disconnect",{isDefault:!1}).description("Disconnect from a provider").argument("[provider]","Provider to disconnect (claude)").action(o=>B()?S.commands.find(e=>e.name()==="disconnect")?.help():ge({provider:o}));S.parse();
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@iamharshil/aix-cli",
3
- "version": "4.0.1",
3
+ "version": "4.0.2",
4
4
  "description": "Run Claude Code with local AI models via LM Studio or Ollama — no API keys, no cloud, complete privacy",
5
5
  "keywords": [
6
6
  "cli",
@@ -30,7 +30,7 @@
30
30
  "main": "./dist/bin/aix.js",
31
31
  "types": "./dist/index.d.ts",
32
32
  "bin": {
33
- "aix-cli": "./dist/bin/aix.js"
33
+ "aix": "./dist/bin/aix.js"
34
34
  },
35
35
  "files": [
36
36
  "dist",