@iamharshil/aix-cli 3.4.6 → 4.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. package/README.md +69 -10
  2. package/dist/bin/aix.js +13 -14
  3. package/dist/commands/config.d.ts +0 -1
  4. package/dist/commands/disconnect.d.ts +5 -0
  5. package/dist/commands/doctor.d.ts +1 -0
  6. package/dist/commands/fix.d.ts +2 -0
  7. package/dist/commands/index.d.ts +12 -6
  8. package/dist/commands/init.d.ts +0 -1
  9. package/dist/commands/models.d.ts +5 -0
  10. package/dist/commands/providers.d.ts +8 -0
  11. package/dist/commands/run.d.ts +0 -1
  12. package/dist/commands/setup.d.ts +6 -0
  13. package/dist/commands/status.d.ts +0 -1
  14. package/dist/commands/switch.d.ts +5 -0
  15. package/dist/commands/update.d.ts +0 -1
  16. package/dist/index.d.ts +0 -1
  17. package/dist/index.js +13 -14
  18. package/dist/services/claude.d.ts +0 -1
  19. package/dist/services/config.d.ts +3 -4
  20. package/dist/services/lmstudio.d.ts +0 -1
  21. package/dist/services/ollama.d.ts +0 -1
  22. package/dist/types/index.d.ts +0 -1
  23. package/dist/utils/format.d.ts +0 -1
  24. package/dist/utils/prompt.d.ts +0 -1
  25. package/package.json +1 -1
  26. package/dist/bin/aix.js.map +0 -7
  27. package/dist/commands/config.d.ts.map +0 -1
  28. package/dist/commands/index.d.ts.map +0 -1
  29. package/dist/commands/init.d.ts.map +0 -1
  30. package/dist/commands/run.d.ts.map +0 -1
  31. package/dist/commands/status.d.ts.map +0 -1
  32. package/dist/commands/update.d.ts.map +0 -1
  33. package/dist/index.d.ts.map +0 -1
  34. package/dist/index.js.map +0 -7
  35. package/dist/services/claude.d.ts.map +0 -1
  36. package/dist/services/config.d.ts.map +0 -1
  37. package/dist/services/lmstudio.d.ts.map +0 -1
  38. package/dist/services/ollama.d.ts.map +0 -1
  39. package/dist/types/index.d.ts.map +0 -1
  40. package/dist/utils/format.d.ts.map +0 -1
  41. package/dist/utils/prompt.d.ts.map +0 -1
package/README.md CHANGED
@@ -149,24 +149,83 @@ Shows status for both LM Studio and Ollama, including available and running mode
149
149
  aix-cli status
150
150
  ```
151
151
 
152
- ### `aix-cli doctor` — System diagnostics
152
+ ### `aix-cli doctor` — Infrastructure check
153
153
 
154
- Verifies your environment is ready to go.
154
+ Checks that provider is running, port is accessible, model is available, and Claude config is correct.
155
155
 
156
156
  ```bash
157
157
  aix-cli doctor
158
158
  ```
159
159
 
160
+ ### `aix-cli setup` — One-command setup
161
+
162
+ Quick setup for first-time users. Detects installed providers and configures defaults.
163
+
164
+ ```bash
165
+ aix-cli setup # Interactive setup
166
+ aix-cli setup --provider ollama # Use specific provider
167
+ aix-cli setup --force # Overwrite existing config
168
+ ```
169
+
170
+ ### `aix-cli providers` — Manage providers
171
+
172
+ List available providers or set a default.
173
+
174
+ ```bash
175
+ aix-cli providers list # Show providers with status
176
+ aix-cli providers set ollama # Set default provider
177
+ ```
178
+
179
+ ### `aix-cli models` — List models
180
+
181
+ Fetch and display available models from a provider.
182
+
183
+ ```bash
184
+ aix-cli models list --provider ollama
185
+ aix-cli models list --provider lmstudio
186
+ ```
187
+
188
+ ### `aix-cli switch` — Switch provider
189
+
190
+ Instantly switch providers without breaking Claude setup.
191
+
192
+ ```bash
193
+ aix-cli switch ollama
194
+ aix-cli switch lmstudio
195
+ ```
196
+
197
+ ### `aix-cli disconnect` — Disconnect
198
+
199
+ Remove connection cleanly.
200
+
201
+ ```bash
202
+ aix-cli disconnect claude
203
+ ```
204
+
205
+ ### `aix-cli fix` — Fix issues
206
+
207
+ Fix infrastructure issues - suggest starting backends, correct ports, reset config, fix model.
208
+
209
+ ```bash
210
+ aix-cli fix
211
+ ```
212
+
160
213
  ### Command Reference
161
214
 
162
- | Command | Aliases | Description |
163
- | -------- | --------------- | ------------------------------------------------ |
164
- | `run` | `r` | Run Claude Code with a local model |
165
- | `init` | `i`, `load` | Set up backend, select model, configure provider |
166
- | `status` | `s`, `stats` | Show LM Studio & Ollama status |
167
- | `doctor` | `d`, `check` | Run system diagnostics |
168
- | `update` | `upgrade`, `u` | Update AIX CLI to the latest version |
169
- | `config` | `c`, `settings` | View, set, or reset CLI configurations |
215
+ | Command | Aliases | Description |
216
+ | ------------ | --------------- | ------------------------------------------------ |
217
+ | `run` | `r` | Run Claude Code with a local model |
218
+ | `init` | `i`, `load` | Set up backend, select model, configure provider |
219
+ | `status` | `s`, `stats` | Show active provider, tool, endpoint, model |
220
+ | `doctor` | `d`, `check` | Check infrastructure status |
221
+ | `setup` | | One-command default setup |
222
+ | `providers` | | List or set default provider |
223
+ | `models` | | List available models |
224
+ | `switch` | | Switch to a different provider |
225
+ | `disconnect` | | Disconnect from provider |
226
+ | `fix` | | Fix infrastructure issues |
227
+ | `update` | `upgrade`, `u` | Update AIX CLI to the latest version |
228
+ | `config` | `c`, `settings` | View, set, or reset CLI configurations |
170
229
 
171
230
  ### Global Options
172
231
 
package/dist/bin/aix.js CHANGED
@@ -1,16 +1,15 @@
1
1
  #!/usr/bin/env node
2
- var he=Object.defineProperty;var Se=(o=>typeof require<"u"?require:typeof Proxy<"u"?new Proxy(o,{get:(e,t)=>(typeof require<"u"?require:e)[t]}):o)(function(o){if(typeof require<"u")return require.apply(this,arguments);throw Error('Dynamic require of "'+o+'" is not supported')});var T=(o,e)=>()=>(o&&(e=o(o=0)),e);var D=(o,e)=>{for(var t in e)he(o,t,{get:e[t],enumerable:!0})};var ie={};D(ie,{ConfigService:()=>z,configService:()=>s});import ve from"conf";var z,s,M=T(()=>{"use strict";z=class{store;constructor(){this.store=new ve({projectName:"aix",defaults:{lmStudioUrl:"http://localhost",lmStudioPort:1234,lmStudioContextLength:65536,ollamaUrl:"http://localhost",ollamaPort:11434,defaultTimeout:3e4,autoStartServer:!1},clearInvalidConfig:!0})}get(e){return this.store.get(e)}set(e,t){this.store.set(e,t)}delete(e){this.store.delete(e)}setModel(e){this.store.set("model",e)}getLastUsedModel(){return this.store.get("model")}setDefaultProvider(e){this.store.set("defaultProvider",e)}getDefaultProvider(){return this.store.get("defaultProvider")}setDefaultBackend(e){this.store.set("defaultBackend",e)}getDefaultBackend(){return this.store.get("defaultBackend")}getLMStudioUrl(){let e=this.store.get("lmStudioUrl"),t=this.store.get("lmStudioPort");return`${e}:${t}`}getOllamaUrl(){let e=this.store.get("ollamaUrl"),t=this.store.get("ollamaPort");return`${e}:${t}`}reset(){this.store.clear()}},s=new z});var se={};D(se,{LMStudioService:()=>j,lmStudioService:()=>v});import{execa as _}from"execa";import ae from"ora";import V from"chalk";var ye,j,v,A=T(()=>{"use strict";M();ye=[1234,1235,1236,1237],j=class{constructor(){}getBaseUrl(){return s.getLMStudioUrl()}getApiUrl(e){return`${this.getBaseUrl()}${e}`}async checkStatus(){try{return(await fetch(this.getApiUrl("/api/status"),{method:"GET",signal:AbortSignal.timeout(3e3)})).ok}catch{return!1}}async getAvailableModels(){let e=["/api/v1/models","/api/models","/v1/models","/api/ls-model/list"];for(let t of e)try{let i=await fetch(this.getApiUrl(t),{method:"GET",signal:AbortSignal.timeout(1e4)});if(!i.ok)continue;let n=await i.json(),r=[];return Array.isArray(n)?r=n:n.models&&Array.isArray(n.models)?r=n.models:n.data&&Array.isArray(n.data)&&(r=n.data),r.map(d=>{let a=d;return{id:String(a.key||a.id||a.model||""),name:String(a.display_name||a.name||a.id||a.model||""),size:Number(a.size_bytes||a.size||a.file_size||0),quantization:String(a.quantization?typeof a.quantization=="object"?a.quantization.name:a.quantization:"")}}).filter(d=>d.id&&d.name)}catch{continue}return[]}async getStatus(){if(!await this.checkStatus())return{running:!1,port:s.get("lmStudioPort"),models:[]};try{let t=await fetch(this.getApiUrl("/api/status"),{method:"GET",signal:AbortSignal.timeout(1e4)});if(!t.ok)return{running:!1,port:s.get("lmStudioPort"),models:[]};let i=await t.json();return{running:!0,port:s.get("lmStudioPort"),models:i.models??[],activeModel:i.active_model}}catch{return{running:!1,port:s.get("lmStudioPort"),models:[]}}}async loadModel(e,t){let i=t??ae({text:`Loading model: ${V.cyan(e)}`,color:"cyan"}).start();try{let n=s.get("lmStudioContextLength"),r=["/api/v1/models/load","/api/model/load"],d=async(g,u)=>{let c={model:e};u&&(c.context_length=n);let m=await fetch(this.getApiUrl(g),{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify(c),signal:AbortSignal.timeout(3e5)});if(m.ok)return;let S="";try{S=await m.text()}catch{S=""}let P=S?` ${S}`:"";throw new Error(`Failed to load model: ${m.status} ${m.statusText}${P}`)},a;for(let g of r)try{return await d(g,!0),i.succeed(`Model ${V.green(e)} loaded successfully`),s.setModel(e),{loadSpinner:i}}catch(u){a=u;let c=u instanceof Error?u.message:String(u);if(/context|token|max.*(context|token)|too.*large/i.test(c))try{return i.warn(`Model load failed with context_length=${n}. Retrying with LM Studio defaults...`),await d(g,!1),i.succeed(`Model ${V.green(e)} loaded successfully`),s.setModel(e),{loadSpinner:i}}catch(S){a=S}}throw a instanceof Error?a:new Error(String(a))}catch(n){throw i.fail(`Failed to load model: ${n instanceof Error?n.message:"Unknown error"}`),n}}async startServer(e){let t=e??ae({text:"Starting LM Studio server...",color:"cyan"}).start();try{let i=process.platform==="darwin",n=process.platform==="linux",r=process.platform==="win32",d;if(i){let a=["/Applications/LM Studio.app",`${process.env.HOME}/Applications/LM Studio.app`];for(let g of a)try{let{existsSync:u}=await import("fs");if(u(g)){d=`open "${g}" --args --server`;break}}catch{}if(d?.startsWith("open")){await _("open",[a.find(g=>{try{let{existsSync:u}=Se("fs");return u(g)}catch{return!1}})||"/Applications/LM Studio.app","--args","--server"],{detached:!0,stdio:"ignore"}),t.succeed("LM Studio server started"),await this.waitForServer(6e4);return}}else n?d=await this.findLinuxBinary():r&&(d=await this.findWindowsExecutable());if(!d)throw t.fail("LM Studio not found. Please install it from https://lmstudio.ai"),new Error("LM Studio not installed");await _(d,["--server"],{detached:!0,stdio:"ignore",env:{...process.env,LM_STUDIO_SERVER_PORT:String(s.get("lmStudioPort"))}}),t.succeed("LM Studio server started"),await this.waitForServer(6e4)}catch(i){throw t.fail(`Failed to start LM Studio: ${i instanceof Error?i.message:"Unknown error"}`),i}}async findLinuxBinary(){let e=["/usr/bin/lm-studio","/usr/local/bin/lm-studio",`${process.env.HOME}/.local/bin/lm-studio`];for(let t of e)try{return await _("test",["-x",t]),t}catch{continue}}async findWindowsExecutable(){let e=process.env.LOCALAPPDATA,t=process.env.PROGRAMFILES,i=[e?`${e}\\Programs\\LM Studio\\lm-studio.exe`:"",t?`${t}\\LM Studio\\lm-studio.exe`:""].filter(Boolean);for(let n of i)try{return await _("cmd",["/c","if exist",`"${n}"`,"echo","yes"]),n}catch{continue}}async waitForServer(e=6e4){let t=Date.now();for(;Date.now()-t<e;){if(await this.checkStatus())return!0;await this.sleep(2e3)}return!1}sleep(e){return new Promise(t=>setTimeout(t,e))}async findAvailablePort(){for(let e of ye)try{if((await fetch(`http://localhost:${e}/api/status`,{method:"GET",signal:AbortSignal.timeout(1e3)})).ok)return s.set("lmStudioPort",e),e}catch{continue}return s.get("lmStudioPort")}async unloadModel(e){try{await fetch(this.getApiUrl("/api/model/unload"),{method:"POST",signal:AbortSignal.timeout(5e3)})}catch{}}},v=new j});var le={};D(le,{OllamaService:()=>F,ollamaService:()=>$});import we from"chalk";var F,$,O=T(()=>{"use strict";M();F=class{getBaseUrl(){return s.getOllamaUrl()}getApiUrl(e){return`${this.getBaseUrl()}${e}`}async checkStatus(){try{return(await fetch(this.getApiUrl("/api/tags"),{method:"GET",signal:AbortSignal.timeout(3e3)})).ok}catch{return!1}}async getAvailableModels(){try{let e=await fetch(this.getApiUrl("/api/tags"),{method:"GET",signal:AbortSignal.timeout(1e4)});return e.ok?((await e.json()).models??[]).map(n=>{let r=n.details??{};return{id:String(n.name??n.model??""),name:String(n.name??n.model??""),size:Number(n.size??0),quantization:String(r.quantization_level??""),family:String(r.family??""),parameterSize:String(r.parameter_size??"")}}).filter(n=>n.id&&n.name):[]}catch{return[]}}async getRunningModels(){try{let e=await fetch(this.getApiUrl("/api/ps"),{method:"GET",signal:AbortSignal.timeout(5e3)});return e.ok?((await e.json()).models??[]).map(n=>String(n.name??n.model??"")).filter(Boolean):[]}catch{return[]}}async getStatus(){if(!await this.checkStatus())return{running:!1,port:s.get("ollamaPort"),models:[],runningModels:[]};let[t,i]=await Promise.all([this.getAvailableModels(),this.getRunningModels()]);return{running:!0,port:s.get("ollamaPort"),models:t,runningModels:i}}async unloadModel(e){console.log(we.dim(`
3
- Ollama automatically manages model memory. Session ended.`))}},$=new F});var ce={};D(ce,{ClaudeService:()=>W,claudeService:()=>R});import{execa as Q}from"execa";import Pe from"chalk";var W,R,Z=T(()=>{"use strict";M();W=class{async isClaudeCodeInstalled(){try{return await Q("claude",["--version"],{stdio:"ignore"}),!0}catch{return!1}}async run(e){let{model:t,args:i=[],verbose:n=!1}=e,r=this.extractProvider(t),d=this.extractModelName(t);if(!r||!d)throw new Error(`Invalid model format: ${t}. Expected format: provider/model-name`);let a=["--model",d,...i];n&&console.log(Pe.dim(`
4
- Running: claude ${a.join(" ")}
5
- `));let g=r==="ollama"?s.getOllamaUrl():s.getLMStudioUrl(),u=r==="ollama"?"ollama":"lmstudio",c=`${g}/v1/messages`;try{let m=await fetch(c,{method:"POST",headers:{"Content-Type":"application/json","x-api-key":u},body:JSON.stringify({model:d,max_tokens:1,messages:[{role:"user",content:"test"}]}),signal:AbortSignal.timeout(3e3)});if(!m.ok&&m.status>=500)throw new Error(`HTTP ${m.status} ${m.statusText}`)}catch(m){let S=m instanceof Error?m.message:String(m),P=r==="ollama"?"Claude Code requires an Anthropic-compatible API. Ollama does not support this. Use --native flag to use Ollama's built-in launch, or switch to LM Studio.":"Ensure LM Studio server is running and the Anthropic Compatibility server is enabled. Check the Developer tab in LM Studio.";throw new Error(`Claude Code could not reach an Anthropic-compatible endpoint at ${c} (${S}). ${P}`)}try{await Q("claude",a,{stdio:"inherit",env:{...process.env,ANTHROPIC_MODEL:d,ANTHROPIC_BASE_URL:g,ANTHROPIC_AUTH_TOKEN:u,ANTHROPIC_API_KEY:""}})}catch(m){if(m instanceof Error&&"exitCode"in m){let S=m.exitCode;process.exit(S??1)}throw m}}extractProvider(e){return e.split("/")[0]}extractModelName(e){let t=e.split("/");if(!(t.length<2))return t.slice(1).join("/")}async getVersion(){try{return(await Q("claude",["--version"])).stdout}catch{return}}},R=new W});import{Command as Te}from"commander";import p from"chalk";A();O();M();import K from"ora";import h from"chalk";import H from"inquirer";import de from"inquirer";async function x(o,e){let t=o.map(r=>({name:`${r.name} (${r.id})`,value:r,short:r.name})),i=e?t.findIndex(r=>r.value.id===e):0;return(await de.prompt([{type:"list",name:"model",message:"Select a model to load:",choices:t,default:Math.max(0,i),pageSize:Math.min(o.length,15)}])).model}async function q(o,e=!0){return(await de.prompt([{type:"confirm",name:"confirm",message:o,default:e}])).confirm}import X from"chalk";function C(o){if(o===0)return"0 B";let e=1024,t=["B","KB","MB","GB","TB"],i=Math.floor(Math.log(o)/Math.log(e));return`${parseFloat((o/Math.pow(e,i)).toFixed(2))} ${t[i]}`}function y(o){console.log(X.green("\u2713")+" "+o)}function J(o){console.error(X.red("\u2717")+" "+o)}function G(o){console.log(X.blue("\u2139")+" "+o)}function f(o,e=1){J(o),process.exit(e)}async function $e(){let o=s.getDefaultBackend(),{backendSelection:e}=await H.prompt([{type:"list",name:"backendSelection",message:"Select model backend:",default:o??"lmstudio",choices:[{name:"\u{1F5A5}\uFE0F LM Studio",value:"lmstudio"},{name:"\u{1F999} Ollama",value:"ollama"}]}]),{saveDefault:t}=await H.prompt([{type:"confirm",name:"saveDefault",message:"Save as default backend?",default:!1}]);return t&&(s.setDefaultBackend(e),y(`Default backend set to ${h.cyan(e)}`)),e}async function be(o){let e=s.getDefaultProvider(),t=[{name:"Claude Code",value:"claude"}],i=e??"claude",{providerSelection:n}=await H.prompt([{type:"list",name:"providerSelection",message:"Select coding tool:",default:i,choices:t}]),{saveDefault:r}=await H.prompt([{type:"confirm",name:"saveDefault",message:"Save as default coding tool?",default:!1}]);return r&&(s.setDefaultProvider(n),y(`Default coding tool set to ${h.cyan(n)}`)),n}async function Me(o,e){let t=K({text:"Checking LM Studio status...",color:"cyan"}).start(),i=await v.checkStatus();i||(t.info("LM Studio server not running"),t.stop(),await q("Would you like to start the LM Studio server?")||f("LM Studio server must be running. Start it manually or use the Server tab in LM Studio."),await v.startServer(),i=!0),t.succeed("Connected to LM Studio");let n=K({text:"Fetching available models...",color:"cyan"}).start(),r=await v.getAvailableModels();r.length===0&&(n.fail("No models found. Download some models in LM Studio first."),f("No models available")),n.succeed(`Found ${h.bold(r.length)} model${r.length===1?"":"s"}`),console.log(),console.log(h.bold("Available Models:")),console.log(h.dim("\u2500".repeat(process.stdout.columns||80))),r.forEach((c,m)=>{let S=C(c.size),P=c.loaded?h.green(" [LOADED]"):"";console.log(` ${h.dim(String(m+1).padStart(2))}. ${c.name} ${h.dim(`(${S})`)}${P}`)}),console.log();let d=s.getLastUsedModel(),a=o.model,g=a?r.find(c=>c.id===a||c.name.includes(a)):await x(r,d);g||f("No model selected"),await v.loadModel(g.id,t);let u=g.id;y(h.bold(`
6
- Model ready: ${g.name}`)),console.log(),console.log("Start your interactive coding session:"),console.log(` ${h.cyan(`aix-cli run --provider ${e} --backend lmstudio --model ${u}`)}`),console.log()}async function ke(o,e){let t=K({text:"Checking Ollama status...",color:"cyan"}).start();await $.checkStatus()||(t.fail("Ollama is not running"),f("Ollama must be running. Start it with: ollama serve")),t.succeed("Connected to Ollama");let n=K({text:"Fetching available models...",color:"cyan"}).start(),r=await $.getAvailableModels();r.length===0&&(n.fail("No models found. Pull a model first: ollama pull <model>"),f("No models available")),n.succeed(`Found ${h.bold(r.length)} model${r.length===1?"":"s"}`);let d=await $.getRunningModels();console.log(),console.log(h.bold("Available Models:")),console.log(h.dim("\u2500".repeat(process.stdout.columns||80))),r.forEach((c,m)=>{let S=C(c.size),fe=d.includes(c.id)?h.green(" [RUNNING]"):"",pe=c.parameterSize?h.dim(` ${c.parameterSize}`):"";console.log(` ${h.dim(String(m+1).padStart(2))}. ${c.name}${pe} ${h.dim(`(${S})`)}${fe}`)}),console.log();let a=s.getLastUsedModel(),g=o.model,u=g?r.find(c=>c.id===g||c.name.includes(g)):await x(r,a);u||f("No model selected"),s.setModel(u.id),y(h.bold(`
7
- Model selected: ${u.name}`)),console.log(),console.log("Start your interactive coding session:"),console.log(` ${h.cyan(`aix-cli run --provider ${e} --backend ollama --model ${u.id}`)}`),console.log()}async function Y(o={}){let e=o.backend??await $e(),t=o.provider??await be(e);e==="ollama"?await ke(o,t):await Me(o,t)}A();O();Z();M();import k from"ora";import L from"chalk";import xe from"inquirer";import{execa as Ce}from"execa";var U=null,I=null;function ue(){let o=async()=>{if(I&&U){console.log(L.dim(`
2
+ var Pe=(o=>typeof require<"u"?require:typeof Proxy<"u"?new Proxy(o,{get:(e,t)=>(typeof require<"u"?require:e)[t]}):o)(function(o){if(typeof require<"u")return require.apply(this,arguments);throw Error('Dynamic require of "'+o+'" is not supported')});import{Command as Xe}from"commander";import ye from"chalk";import j from"ora";import S from"chalk";import q from"inquirer";import{execa as _}from"execa";import fe from"ora";import K from"chalk";import xe from"conf";var G=class{store;constructor(){this.store=new xe({projectName:"aix",defaults:{lmStudioUrl:"http://localhost",lmStudioPort:1234,lmStudioContextLength:65536,ollamaUrl:"http://localhost",ollamaPort:11434,defaultTimeout:3e4,autoStartServer:!1},clearInvalidConfig:!0})}get(e){return this.store.get(e)}set(e,t){this.store.set(e,t)}delete(e){this.store.delete(e)}setModel(e){e===void 0||e===""?this.store.delete("model"):this.store.set("model",e)}getLastUsedModel(){return this.store.get("model")}setDefaultProvider(e){e===void 0?this.store.delete("defaultProvider"):this.store.set("defaultProvider",e)}getDefaultProvider(){return this.store.get("defaultProvider")}setDefaultBackend(e){e===void 0?this.store.delete("defaultBackend"):this.store.set("defaultBackend",e)}getDefaultBackend(){return this.store.get("defaultBackend")}getLMStudioUrl(){let e=this.store.get("lmStudioUrl"),t=this.store.get("lmStudioPort");return`${e}:${t}`}getOllamaUrl(){let e=this.store.get("ollamaUrl"),t=this.store.get("ollamaPort");return`${e}:${t}`}reset(){this.store.clear()}},i=new G;var Ce=[1234,1235,1236,1237],H=class{constructor(){}getBaseUrl(){return i.getLMStudioUrl()}getApiUrl(e){return`${this.getBaseUrl()}${e}`}async checkStatus(){try{return(await fetch(this.getApiUrl("/api/status"),{method:"GET",signal:AbortSignal.timeout(3e3)})).ok}catch{return!1}}async getAvailableModels(){let e=["/api/v1/models","/api/models","/v1/models","/api/ls-model/list"];for(let t of e)try{let r=await fetch(this.getApiUrl(t),{method:"GET",signal:AbortSignal.timeout(1e4)});if(!r.ok)continue;let n=await r.json(),a=[];return Array.isArray(n)?a=n:n.models&&Array.isArray(n.models)?a=n.models:n.data&&Array.isArray(n.data)&&(a=n.data),a.map(s=>{let l=s;return{id:String(l.key||l.id||l.model||""),name:String(l.display_name||l.name||l.id||l.model||""),size:Number(l.size_bytes||l.size||l.file_size||0),quantization:String(l.quantization?typeof l.quantization=="object"?l.quantization.name:l.quantization:"")}}).filter(s=>s.id&&s.name)}catch{continue}return[]}async getStatus(){if(!await this.checkStatus())return{running:!1,port:i.get("lmStudioPort"),models:[]};try{let t=await fetch(this.getApiUrl("/api/status"),{method:"GET",signal:AbortSignal.timeout(1e4)});if(!t.ok)return{running:!1,port:i.get("lmStudioPort"),models:[]};let r=await t.json();return{running:!0,port:i.get("lmStudioPort"),models:r.models??[],activeModel:r.active_model}}catch{return{running:!1,port:i.get("lmStudioPort"),models:[]}}}async loadModel(e,t){let r=t??fe({text:`Loading model: ${K.cyan(e)}`,color:"cyan"}).start();try{let n=i.get("lmStudioContextLength"),a=["/api/v1/models/load","/api/model/load"],s=async(f,c)=>{let m={model:e};c&&(m.context_length=n);let p=await fetch(this.getApiUrl(f),{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify(m),signal:AbortSignal.timeout(3e5)});if(p.ok)return;let y="";try{y=await p.text()}catch{y=""}let O=y?` ${y}`:"";throw new Error(`Failed to load model: ${p.status} ${p.statusText}${O}`)},l;for(let f of a)try{return await s(f,!0),r.succeed(`Model ${K.green(e)} loaded successfully`),i.setModel(e),{loadSpinner:r}}catch(c){l=c;let m=c instanceof Error?c.message:String(c);if(/context|token|max.*(context|token)|too.*large/i.test(m))try{return r.warn(`Model load failed with context_length=${n}. Retrying with LM Studio defaults...`),await s(f,!1),r.succeed(`Model ${K.green(e)} loaded successfully`),i.setModel(e),{loadSpinner:r}}catch(y){l=y}}throw l instanceof Error?l:new Error(String(l))}catch(n){throw r.fail(`Failed to load model: ${n instanceof Error?n.message:"Unknown error"}`),n}}async startServer(e){let t=e??fe({text:"Starting LM Studio server...",color:"cyan"}).start();try{let r=process.platform==="darwin",n=process.platform==="linux",a=process.platform==="win32",s;if(r){let l=["/Applications/LM Studio.app",`${process.env.HOME}/Applications/LM Studio.app`];for(let f of l)try{let{existsSync:c}=await import("fs");if(c(f)){s=`open "${f}" --args --server`;break}}catch{}if(s?.startsWith("open")){await _("open",[l.find(f=>{try{let{existsSync:c}=Pe("fs");return c(f)}catch{return!1}})||"/Applications/LM Studio.app","--args","--server"],{detached:!0,stdio:"ignore"}),t.succeed("LM Studio server started"),await this.waitForServer(6e4);return}}else n?s=await this.findLinuxBinary():a&&(s=await this.findWindowsExecutable());if(!s)throw t.fail("LM Studio not found. Please install it from https://lmstudio.ai"),new Error("LM Studio not installed");await _(s,["--server"],{detached:!0,stdio:"ignore",env:{...process.env,LM_STUDIO_SERVER_PORT:String(i.get("lmStudioPort"))}}),t.succeed("LM Studio server started"),await this.waitForServer(6e4)}catch(r){throw t.fail(`Failed to start LM Studio: ${r instanceof Error?r.message:"Unknown error"}`),r}}async findLinuxBinary(){let e=["/usr/bin/lm-studio","/usr/local/bin/lm-studio",`${process.env.HOME}/.local/bin/lm-studio`];for(let t of e)try{return await _("test",["-x",t]),t}catch{continue}}async findWindowsExecutable(){let e=process.env.LOCALAPPDATA,t=process.env.PROGRAMFILES,r=[e?`${e}\\Programs\\LM Studio\\lm-studio.exe`:"",t?`${t}\\LM Studio\\lm-studio.exe`:""].filter(Boolean);for(let n of r)try{return await _("cmd",["/c","if exist",`"${n}"`,"echo","yes"]),n}catch{continue}}async waitForServer(e=6e4){let t=Date.now();for(;Date.now()-t<e;){if(await this.checkStatus())return!0;await this.sleep(2e3)}return!1}sleep(e){return new Promise(t=>setTimeout(t,e))}async findAvailablePort(){for(let e of Ce)try{if((await fetch(`http://localhost:${e}/api/status`,{method:"GET",signal:AbortSignal.timeout(1e3)})).ok)return i.set("lmStudioPort",e),e}catch{continue}return i.get("lmStudioPort")}async unloadModel(e){try{await fetch(this.getApiUrl("/api/model/unload"),{method:"POST",signal:AbortSignal.timeout(5e3)})}catch{}}},g=new H;import $e from"chalk";var V=class{getBaseUrl(){return i.getOllamaUrl()}getApiUrl(e){return`${this.getBaseUrl()}${e}`}async checkStatus(){try{return(await fetch(this.getApiUrl("/api/tags"),{method:"GET",signal:AbortSignal.timeout(3e3)})).ok}catch{return!1}}async getAvailableModels(){try{let e=await fetch(this.getApiUrl("/api/tags"),{method:"GET",signal:AbortSignal.timeout(1e4)});return e.ok?((await e.json()).models??[]).map(n=>{let a=n.details??{};return{id:String(n.name??n.model??""),name:String(n.name??n.model??""),size:Number(n.size??0),quantization:String(a.quantization_level??""),family:String(a.family??""),parameterSize:String(a.parameter_size??"")}}).filter(n=>n.id&&n.name):[]}catch{return[]}}async getRunningModels(){try{let e=await fetch(this.getApiUrl("/api/ps"),{method:"GET",signal:AbortSignal.timeout(5e3)});return e.ok?((await e.json()).models??[]).map(n=>String(n.name??n.model??"")).filter(Boolean):[]}catch{return[]}}async getStatus(){if(!await this.checkStatus())return{running:!1,port:i.get("ollamaPort"),models:[],runningModels:[]};let[t,r]=await Promise.all([this.getAvailableModels(),this.getRunningModels()]);return{running:!0,port:i.get("ollamaPort"),models:t,runningModels:r}}async unloadModel(e){console.log($e.dim(`
3
+ Ollama automatically manages model memory. Session ended.`))}},v=new V;import ge from"inquirer";async function A(o,e){let t=o.map(a=>({name:`${a.name} (${a.id})`,value:a,short:a.name})),r=e?t.findIndex(a=>a.value.id===e):0;return(await ge.prompt([{type:"list",name:"model",message:"Select a model to load:",choices:t,default:Math.max(0,r),pageSize:Math.min(o.length,15)}])).model}async function z(o,e=!0){return(await ge.prompt([{type:"confirm",name:"confirm",message:o,default:e}])).confirm}import F from"chalk";function R(o){if(o===0)return"0 B";let e=1024,t=["B","KB","MB","GB","TB"],r=Math.floor(Math.log(o)/Math.log(e));return`${parseFloat((o/Math.pow(e,r)).toFixed(2))} ${t[r]}`}function u(o){console.log(F.green("\u2713")+" "+o)}function J(o){console.error(F.red("\u2717")+" "+o)}function M(o){console.log(F.blue("\u2139")+" "+o)}function U(o){console.log(F.yellow("\u26A0")+" "+o)}function d(o,e=1){J(o),process.exit(e)}async function Le(){let o=i.getDefaultBackend(),{backendSelection:e}=await q.prompt([{type:"list",name:"backendSelection",message:"Select model backend:",default:o??"lmstudio",choices:[{name:"\u{1F5A5}\uFE0F LM Studio",value:"lmstudio"},{name:"\u{1F999} Ollama",value:"ollama"}]}]),{saveDefault:t}=await q.prompt([{type:"confirm",name:"saveDefault",message:"Save as default backend?",default:!1}]);return t&&(i.setDefaultBackend(e),u(`Default backend set to ${S.cyan(e)}`)),e}async function Oe(o){let e=i.getDefaultProvider(),t=[{name:"Claude Code",value:"claude"}],r=e??"claude",{providerSelection:n}=await q.prompt([{type:"list",name:"providerSelection",message:"Select coding tool:",default:r,choices:t}]),{saveDefault:a}=await q.prompt([{type:"confirm",name:"saveDefault",message:"Save as default coding tool?",default:!1}]);return a&&(i.setDefaultProvider(n),u(`Default coding tool set to ${S.cyan(n)}`)),n}async function Ae(o,e){let t=j({text:"Checking LM Studio status...",color:"cyan"}).start(),r=await g.checkStatus();r||(t.info("LM Studio server not running"),t.stop(),await z("Would you like to start the LM Studio server?")||d("LM Studio server must be running. Start it manually or use the Server tab in LM Studio."),await g.startServer(),r=!0),t.succeed("Connected to LM Studio");let n=j({text:"Fetching available models...",color:"cyan"}).start(),a=await g.getAvailableModels();a.length===0&&(n.fail("No models found. Download some models in LM Studio first."),d("No models available")),n.succeed(`Found ${S.bold(a.length)} model${a.length===1?"":"s"}`),console.log(),console.log(S.bold("Available Models:")),console.log(S.dim("\u2500".repeat(process.stdout.columns||80))),a.forEach((m,p)=>{let y=R(m.size),O=m.loaded?S.green(" [LOADED]"):"";console.log(` ${S.dim(String(p+1).padStart(2))}. ${m.name} ${S.dim(`(${y})`)}${O}`)}),console.log();let s=i.getLastUsedModel(),l=o.model,f=l?a.find(m=>m.id===l||m.name.includes(l)):await A(a,s);f||d("No model selected"),await g.loadModel(f.id,t);let c=f.id;u(S.bold(`
4
+ Model ready: ${f.name}`)),console.log(),console.log("Start your interactive coding session:"),console.log(` ${S.cyan(`aix-cli run --provider ${e} --backend lmstudio --model ${c}`)}`),console.log()}async function Re(o,e){let t=j({text:"Checking Ollama status...",color:"cyan"}).start();await v.checkStatus()||(t.fail("Ollama is not running"),d("Ollama must be running. Start it with: ollama serve")),t.succeed("Connected to Ollama");let n=j({text:"Fetching available models...",color:"cyan"}).start(),a=await v.getAvailableModels();a.length===0&&(n.fail("No models found. Pull a model first: ollama pull <model>"),d("No models available")),n.succeed(`Found ${S.bold(a.length)} model${a.length===1?"":"s"}`);let s=await v.getRunningModels();console.log(),console.log(S.bold("Available Models:")),console.log(S.dim("\u2500".repeat(process.stdout.columns||80))),a.forEach((m,p)=>{let y=R(m.size),Me=s.includes(m.id)?S.green(" [RUNNING]"):"",be=m.parameterSize?S.dim(` ${m.parameterSize}`):"";console.log(` ${S.dim(String(p+1).padStart(2))}. ${m.name}${be} ${S.dim(`(${y})`)}${Me}`)}),console.log();let l=i.getLastUsedModel(),f=o.model,c=f?a.find(m=>m.id===f||m.name.includes(f)):await A(a,l);c||d("No model selected"),i.setModel(c.id),u(S.bold(`
5
+ Model selected: ${c.name}`)),console.log(),console.log("Start your interactive coding session:"),console.log(` ${S.cyan(`aix-cli run --provider ${e} --backend ollama --model ${c.id}`)}`),console.log()}async function X(o={}){let e=o.backend??await Le(),t=o.provider??await Oe(e);e==="ollama"?await Re(o,t):await Ae(o,t)}import $ from"ora";import D from"chalk";import De from"inquirer";import{execa as Be}from"execa";import{execa as Y}from"execa";import Ue from"chalk";var Q=class{async isClaudeCodeInstalled(){try{return await Y("claude",["--version"],{stdio:"ignore"}),!0}catch{return!1}}async run(e){let{model:t,args:r=[],verbose:n=!1}=e,a=this.extractProvider(t),s=this.extractModelName(t);if(!a||!s)throw new Error(`Invalid model format: ${t}. Expected format: provider/model-name`);let l=["--model",s,...r];n&&console.log(Ue.dim(`
6
+ Running: claude ${l.join(" ")}
7
+ `));let f=a==="ollama"?i.getOllamaUrl():i.getLMStudioUrl(),c=a==="ollama"?"ollama":"lmstudio",m=`${f}/v1/messages`;try{let p=await fetch(m,{method:"POST",headers:{"Content-Type":"application/json","x-api-key":c},body:JSON.stringify({model:s,max_tokens:1,messages:[{role:"user",content:"test"}]}),signal:AbortSignal.timeout(3e3)});if(!p.ok&&p.status>=500)throw new Error(`HTTP ${p.status} ${p.statusText}`)}catch(p){let y=p instanceof Error?p.message:String(p),O=a==="ollama"?"Claude Code requires an Anthropic-compatible API. Ollama does not support this. Use --native flag to use Ollama's built-in launch, or switch to LM Studio.":"Ensure LM Studio server is running and the Anthropic Compatibility server is enabled. Check the Developer tab in LM Studio.";throw new Error(`Claude Code could not reach an Anthropic-compatible endpoint at ${m} (${y}). ${O}`)}try{await Y("claude",l,{stdio:"inherit",env:{...process.env,ANTHROPIC_MODEL:s,ANTHROPIC_BASE_URL:f,ANTHROPIC_AUTH_TOKEN:c,ANTHROPIC_API_KEY:""}})}catch(p){if(p instanceof Error&&"exitCode"in p){let y=p.exitCode;process.exit(y??1)}throw p}}extractProvider(e){return e.split("/")[0]}extractModelName(e){let t=e.split("/");if(!(t.length<2))return t.slice(1).join("/")}async getVersion(){try{return(await Y("claude",["--version"])).stdout}catch{return}}},x=new Q;var E=null,I=null;function pe(){let o=async()=>{if(I&&E){console.log(D.dim(`
8
8
 
9
- Cleaning up...`));try{U==="ollama"?await $.unloadModel(I):U==="lmstudio"&&await v.unloadModel(),y(`Model "${I}" unloaded`)}catch{}}};process.on("SIGINT",()=>{o().finally(()=>process.exit(0))}),process.on("SIGTERM",()=>{o().finally(()=>process.exit(0))})}function me(o){return!o||o.trim().length===0||o.length>200?!1:/^[\w\-:.]+$/.test(o)}async function Le(o){let e=s.getDefaultBackend();if(e)return e;let{backendSelection:t}=await xe.prompt([{type:"list",name:"backendSelection",message:"Select model backend:",choices:[{name:"\u{1F5A5}\uFE0F LM Studio",value:"lmstudio"},{name:"\u{1F999} Ollama",value:"ollama"}]}]);return t}async function Ae(){let o=k({text:"Checking Claude Code installation...",color:"cyan"}).start();return await R.isClaudeCodeInstalled()||(o.fail("Claude Code is not installed"),f("Claude Code is required. Install with: npm install -g @anthropic-ai/claude-code")),o.succeed("Claude Code is installed"),"claude"}function E(o){return"Claude Code"}async function Oe(o,e){let t=k({text:"Checking LM Studio status...",color:"cyan"}).start();await v.findAvailablePort();let i=await v.checkStatus();i||(t.info("LM Studio server not running"),t.stop(),await q("Would you like to start the LM Studio server?")||f("LM Studio server must be running. Start it manually or use the Server tab in LM Studio."),await v.startServer(),i=!0),t.succeed("Connected to LM Studio");let n=k({text:"Fetching available models...",color:"cyan"}).start(),r=await v.getAvailableModels();r.length===0&&(n.fail("No models found. Download some models in LM Studio first."),f("No models available")),n.succeed(`Found ${r.length} model${r.length===1?"":"s"}`);let d;if(o.model){me(o.model)||f(`Invalid model name: "${o.model}". Use letters, numbers, dashes, underscores, colon, or dot.`);let u=r.find(c=>c.id===o.model||c.name.toLowerCase().includes(o.model.toLowerCase()));u||f(`Model "${o.model}" not found. Available models: ${r.map(c=>c.name).join(", ")}`),d=u.id}else{let u=s.getLastUsedModel();d=(await x(r,u)).id}let a=k({text:`Loading model: ${L.cyan(d)}`,color:"cyan"}).start();await v.loadModel(d,a),U="lmstudio",I=d;let g=`lmstudio/${d}`;ue(),await Re(e,g,o)}async function Re(o,e,t){let i=E(o);y(L.green(`
10
- Starting ${i} with model: ${e}
11
- `));try{await R.run({model:e,args:t.args??[],verbose:t.verbose})}catch(n){f(`Failed to run ${i}: ${n instanceof Error?n.message:"Unknown error"}`)}}async function ee(o={}){let e;if(o.provider)e=o.provider;else{let r=s.getDefaultProvider();r?e=r:e=await Ae()}let t=k({text:`Checking ${E(e)} installation...`,color:"cyan"}).start();if(await R.isClaudeCodeInstalled()||(t.fail(`${E(e)} is not installed.`),f(`Please install ${E(e)} first.`)),t.succeed(`${E(e)} is installed`),(o.backend??await Le(e))==="ollama"){let r=k({text:"Checking Ollama status...",color:"cyan"}).start();await $.checkStatus()||(r.fail("Ollama is not running"),f("Ollama must be running. Start it with: ollama serve")),r.succeed("Connected to Ollama");let a=k({text:"Fetching available models...",color:"cyan"}).start(),g=await $.getAvailableModels();g.length===0&&(a.fail("No models found"),f("No models available. Pull a model first: ollama pull <model>")),a.succeed(`Found ${g.length} model${g.length===1?"":"s"}`);let u;if(o.model){me(o.model)||f(`Invalid model name: "${o.model}". Use letters, numbers, dash, underscore, colon, or dot.`);let c=g.find(m=>m.id===o.model||m.name.toLowerCase().includes(o.model.toLowerCase()));c||f(`Model "${o.model}" not found. Available: ${g.map(m=>m.name).join(", ")}`),u=c.id}else{let c=s.getLastUsedModel();u=(await x(g,c)).id}s.setModel(u),U="ollama",I=u,ue(),await Ee(u,o.args??[],o.verbose??!1);return}await Oe(o,e)}async function Ee(o,e,t){let i=s.getOllamaUrl();y(L.green(`
12
- Starting Claude Code with Ollama at ${i}...
13
- `)),t&&(console.log(L.dim(`Running: claude --model ${o}
14
- `)),console.log(L.dim(`ANTHROPIC_BASE_URL=${i}
15
- `)));try{await Ce("claude",["--model",o,...e],{stdio:"inherit",env:{...process.env,ANTHROPIC_MODEL:o,ANTHROPIC_BASE_URL:i,ANTHROPIC_AUTH_TOKEN:"ollama",ANTHROPIC_API_KEY:""}})}catch(n){f(`Failed to run Claude Code: ${n instanceof Error?n.message:"Unknown error"}`)}}A();O();import l from"chalk";async function oe(){let[o,e]=await Promise.all([v.getStatus(),$.getStatus()]);console.log(),console.log(l.bold("LM Studio")),console.log(l.dim("\u2500".repeat(50))),console.log(` ${o.running?l.green("\u25CF"):l.red("\u25CB")} Server: ${o.running?l.green("Running"):l.red("Stopped")}`),console.log(` ${l.dim("\u25B8")} Port: ${l.cyan(String(o.port))}`),console.log(` ${l.dim("\u25B8")} URL: ${l.cyan(`http://localhost:${o.port}`)}`),o.activeModel&&console.log(` ${l.dim("\u25B8")} Active Model: ${l.green(o.activeModel)}`),o.running&&o.models.length>0?(console.log(),console.log(l.bold(" Models")),o.models.forEach((t,i)=>{let n=C(t.size),r=t.id===o.activeModel?` ${l.green("[LOADED]")}`:"";console.log(` ${l.dim(String(i+1)+".")} ${t.name}${r}`),console.log(` ${l.dim("ID:")} ${t.id}`),console.log(` ${l.dim("Size:")} ${n}`),t.quantization&&console.log(` ${l.dim("Quantization:")} ${t.quantization}`)})):o.running&&console.log(` ${l.dim("No models available")}`),console.log(),console.log(l.bold("Ollama")),console.log(l.dim("\u2500".repeat(50))),console.log(` ${e.running?l.green("\u25CF"):l.red("\u25CB")} Server: ${e.running?l.green("Running"):l.red("Stopped")}`),console.log(` ${l.dim("\u25B8")} Port: ${l.cyan(String(e.port))}`),console.log(` ${l.dim("\u25B8")} URL: ${l.cyan(`http://localhost:${e.port}`)}`),e.running&&e.runningModels.length>0&&console.log(` ${l.dim("\u25B8")} Running: ${l.green(e.runningModels.join(", "))}`),e.running&&e.models.length>0?(console.log(),console.log(l.bold(" Models")),e.models.forEach((t,i)=>{let n=C(t.size),d=e.runningModels.includes(t.id)?` ${l.green("[RUNNING]")}`:"",a=t.parameterSize?` ${l.dim(t.parameterSize)}`:"";console.log(` ${l.dim(String(i+1)+".")} ${t.name}${a}${d}`),console.log(` ${l.dim("Size:")} ${n}`),t.family&&console.log(` ${l.dim("Family:")} ${t.family}`),t.quantization&&console.log(` ${l.dim("Quantization:")} ${t.quantization}`)})):e.running&&console.log(` ${l.dim("No models available")}`),console.log()}import Ue from"ora";import B from"chalk";import{execa as ge}from"execa";import{readFileSync as Ie}from"fs";import{fileURLToPath as Be}from"url";function te(){try{let o=Be(new URL("../../package.json",import.meta.url)),e=JSON.parse(Ie(o,"utf8"));return String(e.version)}catch{return"unknown"}}async function ne(){let o=Ue({text:"Checking for updates...",color:"cyan"}).start();try{let e=te();if(e==="unknown"){o.fail("Could not determine current version.");return}let{stdout:t}=await ge("npm",["view","@iamharshil/aix-cli","version"]),i=t.trim();if(e===i){o.succeed(`You're already on the latest version: ${B.green(`v${e}`)}`);return}o.text=`Updating: ${B.yellow(`v${e}`)} \u2192 ${B.green(`v${i}`)}...`,await ge("npm",["install","-g","@iamharshil/aix-cli@latest"]),o.succeed(`Successfully updated to ${B.green(`v${i}`)}! \u{1F680}`),G(`Restart your terminal or run ${B.cyan("aix-cli --help")} to see what's new.`)}catch(e){o.fail("Failed to update."),J(e instanceof Error?e.message:String(e))}}M();import w from"chalk";import Ne from"inquirer";async function re(o,e,t){if(o==="reset"){let{confirm:n}=await Ne.prompt([{type:"confirm",name:"confirm",message:"Are you sure you want to completely reset all configuration to defaults?",default:!1}]);n?(s.reset(),y("Configuration has been reset to defaults.")):G("Reset cancelled.");return}if(o==="set"&&e&&t){let n=t;t==="true"?n=!0:t==="false"?n=!1:Number.isNaN(Number(t))||(n=Number(t)),s.set(e,n),y(`Set ${w.cyan(e)} to ${w.green(t)}`);return}if(o==="unset"&&e){s.delete(e),y(`Unset configuration key ${w.cyan(e)}`);return}console.log(),console.log(w.bold.cyan("\u2699\uFE0F AIX CLI Configuration")),console.log(w.dim("\u2500".repeat(40))),["defaultBackend","defaultProvider","model","lmStudioUrl","lmStudioPort","lmStudioContextLength","ollamaUrl","ollamaPort","defaultTimeout","autoStartServer"].forEach(n=>{let r=s.get(n);console.log(r!==void 0?` ${w.bold(n)}: ${w.green(r)}`:` ${w.bold(n)}: ${w.dim("not set")}`)}),console.log(),console.log(w.dim("Commands:")),console.log(w.dim(" aix-cli config set <key> <value>")),console.log(w.dim(" aix-cli config unset <key>")),console.log(w.dim(" aix-cli config reset")),console.log()}var b=new Te;b.name("aix-cli").description("Run Claude Code with local AI models from LM Studio or Ollama").version(te()).option("--ollama","Shortcut to use Ollama backend").option("--lmstudio","Shortcut to use LM Studio backend").showHelpAfterError();function N(o=0){console.log(),console.log(p.dim(o===0?"\u{1F44B} Goodbye!":"\u274C Cancelled.")),process.exit(o)}process.on("SIGINT",()=>N(0));process.on("SIGTERM",()=>N(0));process.on("uncaughtException",o=>{o.message?.includes("ExitPromptError")||o.message?.includes("User force closed")||o.message?.includes("prompt")?N(0):(console.error(p.red("Error:"),o.message),process.exit(1))});process.on("unhandledRejection",o=>{let e=String(o);(e.includes("ExitPromptError")||e.includes("User force closed")||e.includes("prompt"))&&N(0)});b.command("init",{isDefault:!1}).aliases(["i","load"]).description("Select a backend, load a model, and configure your provider").option("-m, --model <name>","Model name or ID to load").option("-p, --provider <provider>","Coding tool to use (claude)").option("-b, --backend <backend>","Model backend to use (lmstudio or ollama)").action(o=>{let e=b.opts();return e.ollama&&(o.backend="ollama"),e.lmstudio&&(o.backend="lmstudio"),Y(o)});b.command("run",{isDefault:!1}).aliases(["r"]).description("Run Claude Code with a model from LM Studio or Ollama").option("-m, --model <name>","Model name or ID to use").option("-p, --provider <provider>","Coding tool to use (claude)").option("-b, --backend <backend>","Model backend to use (lmstudio or ollama)").option("-v, --verbose","Show verbose output").argument("[args...]","Additional arguments for the provider").action(async(o,e)=>{let t=b.opts();t.ollama&&(e.backend="ollama"),t.lmstudio&&(e.backend="lmstudio"),await ee({...e,args:o})});b.command("status",{isDefault:!1}).aliases(["s","stats"]).description("Show LM Studio and Ollama status and available models").action(oe);b.command("doctor",{isDefault:!1}).aliases(["d","check"]).description("Check system requirements and configuration").action(async()=>{let{lmStudioService:o}=await Promise.resolve().then(()=>(A(),se)),{ollamaService:e}=await Promise.resolve().then(()=>(O(),le)),{claudeService:t}=await Promise.resolve().then(()=>(Z(),ce)),{configService:i}=await Promise.resolve().then(()=>(M(),ie));console.log(p.bold.cyan("\u{1F527} AIX CLI System Check")),console.log(p.dim("\u2500".repeat(40)));let[n,r,d]=await Promise.all([o.checkStatus(),e.checkStatus(),t.isClaudeCodeInstalled()]),a=i.getDefaultProvider(),g=i.getDefaultBackend(),u=i.get("lmStudioPort"),c=i.get("ollamaPort");console.log(),console.log(p.bold("Backends")),console.log(` ${n?"\u2705":"\u26A0\uFE0F"} LM Studio: ${n?p.green("Running"):p.yellow("Not running")} ${p.dim(`(port ${u})`)}`),console.log(` ${r?"\u2705":"\u26A0\uFE0F"} Ollama: ${r?p.green("Running"):p.yellow("Not running")} ${p.dim(`(port ${c})`)}`),console.log(),console.log(p.bold("Coding Tools")),console.log(` ${d?"\u2705":"\u274C"} Claude Code: ${d?p.green("Installed"):p.red("Not installed")}`),console.log(),console.log(p.bold("Defaults")),console.log(` \u{1F4CC} Backend: ${p.cyan(g??"not set")}`),console.log(` \u{1F4CC} Coding tool: ${p.cyan(a??"not set")}`);let m=[];if(d||m.push(` \u2192 ${p.cyan("npm install -g @anthropic-ai/claude-code")}`),!n&&!r&&m.push(` \u2192 Start LM Studio or run ${p.cyan("ollama serve")}`),m.length>0){console.log(),console.log(p.bold("\u{1F4CB} Next Steps:"));for(let S of m)console.log(S)}console.log()});b.command("update",{isDefault:!1}).aliases(["upgrade","u"]).description("Update AIX CLI to the latest version").action(ne);b.command("config [action] [key] [value]",{isDefault:!1}).aliases(["c","settings"]).description("View, set, or reset AIX CLI configuration constraints").action(re);b.parse();
16
- //# sourceMappingURL=aix.js.map
9
+ Cleaning up...`));try{E==="ollama"?await v.unloadModel(I):E==="lmstudio"&&await g.unloadModel(),u(`Model "${I}" unloaded`)}catch{}}};process.on("SIGINT",()=>{o().finally(()=>process.exit(0))}),process.on("SIGTERM",()=>{o().finally(()=>process.exit(0))})}function ve(o){return!o||o.trim().length===0||o.length>200?!1:/^[\w\-:.]+$/.test(o)}async function Ee(o){let e=i.getDefaultBackend();if(e)return e;let{backendSelection:t}=await De.prompt([{type:"list",name:"backendSelection",message:"Select model backend:",choices:[{name:"\u{1F5A5}\uFE0F LM Studio",value:"lmstudio"},{name:"\u{1F999} Ollama",value:"ollama"}]}]);return t}async function Ie(){let o=$({text:"Checking Claude Code installation...",color:"cyan"}).start();return await x.isClaudeCodeInstalled()||(o.fail("Claude Code is not installed"),d("Claude Code is required. Install with: npm install -g @anthropic-ai/claude-code")),o.succeed("Claude Code is installed"),"claude"}function B(o){return"Claude Code"}async function Ne(o,e){let t=$({text:"Checking LM Studio status...",color:"cyan"}).start();await g.findAvailablePort();let r=await g.checkStatus();r||(t.info("LM Studio server not running"),t.stop(),await z("Would you like to start the LM Studio server?")||d("LM Studio server must be running. Start it manually or use the Server tab in LM Studio."),await g.startServer(),r=!0),t.succeed("Connected to LM Studio");let n=$({text:"Fetching available models...",color:"cyan"}).start(),a=await g.getAvailableModels();a.length===0&&(n.fail("No models found. Download some models in LM Studio first."),d("No models available")),n.succeed(`Found ${a.length} model${a.length===1?"":"s"}`);let s;if(o.model){ve(o.model)||d(`Invalid model name: "${o.model}". Use letters, numbers, dashes, underscores, colon, or dot.`);let c=a.find(m=>m.id===o.model||m.name.toLowerCase().includes(o.model.toLowerCase()));c||d(`Model "${o.model}" not found. Available models: ${a.map(m=>m.name).join(", ")}`),s=c.id}else{let c=i.getLastUsedModel();s=(await A(a,c)).id}let l=$({text:`Loading model: ${D.cyan(s)}`,color:"cyan"}).start();await g.loadModel(s,l),E="lmstudio",I=s;let f=`lmstudio/${s}`;pe(),await Te(e,f,o)}async function Te(o,e,t){let r=B(o);u(D.green(`
10
+ Starting ${r} with model: ${e}
11
+ `));try{await x.run({model:e,args:t.args??[],verbose:t.verbose})}catch(n){d(`Failed to run ${r}: ${n instanceof Error?n.message:"Unknown error"}`)}}async function Z(o={}){let e;if(o.provider)e=o.provider;else{let a=i.getDefaultProvider();a?e=a:e=await Ie()}let t=$({text:`Checking ${B(e)} installation...`,color:"cyan"}).start();if(await x.isClaudeCodeInstalled()||(t.fail(`${B(e)} is not installed.`),d(`Please install ${B(e)} first.`)),t.succeed(`${B(e)} is installed`),(o.backend??await Ee(e))==="ollama"){let a=$({text:"Checking Ollama status...",color:"cyan"}).start();await v.checkStatus()||(a.fail("Ollama is not running"),d("Ollama must be running. Start it with: ollama serve")),a.succeed("Connected to Ollama");let l=$({text:"Fetching available models...",color:"cyan"}).start(),f=await v.getAvailableModels();f.length===0&&(l.fail("No models found"),d("No models available. Pull a model first: ollama pull <model>")),l.succeed(`Found ${f.length} model${f.length===1?"":"s"}`);let c;if(o.model){ve(o.model)||d(`Invalid model name: "${o.model}". Use letters, numbers, dash, underscore, colon, or dot.`);let m=f.find(p=>p.id===o.model||p.name.toLowerCase().includes(o.model.toLowerCase()));m||d(`Model "${o.model}" not found. Available: ${f.map(p=>p.name).join(", ")}`),c=m.id}else{let m=i.getLastUsedModel();c=(await A(f,m)).id}i.setModel(c),E="ollama",I=c,pe(),await _e(c,o.args??[],o.verbose??!1);return}await Ne(o,e)}async function _e(o,e,t){let r=i.getOllamaUrl();u(D.green(`
12
+ Starting Claude Code with Ollama at ${r}...
13
+ `)),t&&(console.log(D.dim(`Running: claude --model ${o}
14
+ `)),console.log(D.dim(`ANTHROPIC_BASE_URL=${r}
15
+ `)));try{await Be("claude",["--model",o,...e],{stdio:"inherit",env:{...process.env,ANTHROPIC_MODEL:o,ANTHROPIC_BASE_URL:r,ANTHROPIC_AUTH_TOKEN:"ollama",ANTHROPIC_API_KEY:""}})}catch(n){d(`Failed to run Claude Code: ${n instanceof Error?n.message:"Unknown error"}`)}}import h from"chalk";async function ee(){let o=i.getDefaultBackend(),e=i.getDefaultProvider(),t=i.getLastUsedModel(),[r,n,a]=await Promise.all([g.getStatus(),v.getStatus(),x.isClaudeCodeInstalled()]);if(console.log(),console.log(h.bold.cyan("\u25CF Status")),console.log(h.dim("\u2500".repeat(50))),console.log(),console.log(h.bold("Active")),console.log(` ${h.dim("\u25B8")} Provider: ${o?h.cyan(o):h.dim("not set")}`),console.log(` ${h.dim("\u25B8")} Tool: ${e?h.cyan(e):h.dim("not set")}`),o){let m=o==="ollama"?i.getOllamaUrl():i.getLMStudioUrl();console.log(` ${h.dim("\u25B8")} Endpoint: ${h.cyan(m)}`)}t&&console.log(` ${h.dim("\u25B8")} Model: ${h.green(t)}`),console.log(),console.log(h.bold("Tools")),console.log(` Claude Code: ${a?h.green("installed"):h.red("not installed")}`),console.log(),console.log(h.bold("Backends"));let s=r.running?h.green("running"):h.yellow("stopped"),l=n.running?h.green("running"):h.yellow("stopped"),f=o==="lmstudio",c=o==="ollama";console.log(` LM Studio: ${s}${f?h.dim(" (default)"):""}`),console.log(` Ollama: ${l}${c?h.dim(" (default)"):""}`),console.log()}import ze from"ora";import N from"chalk";import{execa as he}from"execa";import{readFileSync as Fe}from"fs";import{fileURLToPath as je}from"url";function oe(){try{let o=je(new URL("../../package.json",import.meta.url)),e=JSON.parse(Fe(o,"utf8"));return String(e.version)}catch{return"unknown"}}async function te(){let o=ze({text:"Checking for updates...",color:"cyan"}).start();try{let e=oe();if(e==="unknown"){o.fail("Could not determine current version.");return}let{stdout:t}=await he("npm",["view","@iamharshil/aix-cli","version"]),r=t.trim();if(e===r){o.succeed(`You're already on the latest version: ${N.green(`v${e}`)}`);return}o.text=`Updating: ${N.yellow(`v${e}`)} \u2192 ${N.green(`v${r}`)}...`,await he("npm",["install","-g","@iamharshil/aix-cli@latest"]),o.succeed(`Successfully updated to ${N.green(`v${r}`)}! \u{1F680}`),M(`Restart your terminal or run ${N.cyan("aix-cli --help")} to see what's new.`)}catch(e){o.fail("Failed to update."),J(e instanceof Error?e.message:String(e))}}import k from"chalk";import qe from"inquirer";async function ne(o,e,t){if(o==="reset"){let{confirm:n}=await qe.prompt([{type:"confirm",name:"confirm",message:"Are you sure you want to completely reset all configuration to defaults?",default:!1}]);n?(i.reset(),u("Configuration has been reset to defaults.")):M("Reset cancelled.");return}if(o==="set"&&e&&t){let n=t;t==="true"?n=!0:t==="false"?n=!1:Number.isNaN(Number(t))||(n=Number(t)),i.set(e,n),u(`Set ${k.cyan(e)} to ${k.green(t)}`);return}if(o==="unset"&&e){i.delete(e),u(`Unset configuration key ${k.cyan(e)}`);return}console.log(),console.log(k.bold.cyan("\u2699\uFE0F AIX CLI Configuration")),console.log(k.dim("\u2500".repeat(40))),["defaultBackend","defaultProvider","model","lmStudioUrl","lmStudioPort","lmStudioContextLength","ollamaUrl","ollamaPort","defaultTimeout","autoStartServer"].forEach(n=>{let a=i.get(n);console.log(a!==void 0?` ${k.bold(n)}: ${k.green(a)}`:` ${k.bold(n)}: ${k.dim("not set")}`)}),console.log(),console.log(k.dim("Commands:")),console.log(k.dim(" aix-cli config set <key> <value>")),console.log(k.dim(" aix-cli config unset <key>")),console.log(k.dim(" aix-cli config reset")),console.log()}import We from"inquirer";async function ie(o){let e=o.force??!1,t=o.provider?.toLowerCase();if(!e&&i.getDefaultBackend()){M("Already configured. Use --force to reconfigure.");return}let[r,n]=await Promise.all([g.checkStatus(),v.checkStatus()]),a=[];r&&a.push({name:"lmstudio",label:"LM Studio"}),n&&a.push({name:"ollama",label:"Ollama"});let s;if(t)t==="lmstudio"&&!r&&d("LM Studio is not running. Please start it first."),t==="ollama"&&!n&&d("Ollama is not running. Please start it first."),t!=="lmstudio"&&t!=="ollama"&&d(`Unknown provider: ${t}. Use 'lmstudio' or 'ollama'.`),s=t,u(`Selected provider: ${s==="lmstudio"?"LM Studio":"Ollama"}`);else if(a.length===0&&d('No backends detected. Please start LM Studio or run "ollama serve" first.'),a.length===1)s=a[0].name,u(`Detected ${a[0].label} running`),u(`Selected provider: ${a[0].label}`);else{let{backendSelection:c}=await We.prompt([{type:"list",name:"backendSelection",message:"Which backend would you like to use?",choices:a.map(m=>({name:m.label,value:m.name}))}]);s=c,u(`Selected provider: ${s==="lmstudio"?"LM Studio":"Ollama"}`)}let l=s==="ollama"?i.getOllamaUrl():i.getLMStudioUrl();if(M(`Configured endpoint: ${l}`),i.setDefaultBackend(s),i.setDefaultProvider("claude"),!await x.isClaudeCodeInstalled()){U('Claude Code is not installed. Install it to use "aix run".');return}u("Claude connected successfully"),u("Test request passed"),console.log(),M('Setup complete! Run "aix run" to start coding.')}import C from"chalk";async function Ge(){let[o,e]=await Promise.all([g.checkStatus(),v.checkStatus()]),t=i.getDefaultBackend();console.log(C.bold.cyan("Providers")),console.log(C.dim("\u2500".repeat(40))),console.log(` LM Studio: ${o?C.green("running"):C.yellow("stopped")} ${t==="lmstudio"?C.dim("(default)"):""}`),console.log(` Ollama: ${e?C.green("running"):C.yellow("stopped")} ${t==="ollama"?C.dim("(default)"):""}`),console.log()}async function Ke(o){let e=o.toLowerCase(),[t,r]=await Promise.all([g.checkStatus(),v.checkStatus()]);if(e==="lmstudio"){t||d("LM Studio is not running. Please start it first."),i.setDefaultBackend("lmstudio"),i.setDefaultProvider("claude"),u("Default provider set to LM Studio");return}if(e==="ollama"){r||d("Ollama is not running. Please start it first."),i.setDefaultBackend("ollama"),i.setDefaultProvider("claude"),u("Default provider set to Ollama");return}d(`Unknown provider: ${o}. Use 'lmstudio' or 'ollama'.`)}async function ae(o){await Ge()}async function re(o){o.default||d("Provider name is required. Use: aix providers set <provider_name>"),await Ke(o.default)}import b from"chalk";import Se from"ora";async function se(o={}){let e=o.provider?.toLowerCase();if(e||(e=i.getDefaultBackend()),e||d('No provider specified. Use --provider flag or run "aix setup" first.'),e==="ollama"){let t=Se({text:"Fetching Ollama models...",color:"cyan"}).start();await v.checkStatus()||(t.fail("Ollama is not running."),d("Start Ollama with: ollama serve"));let n=await v.getAvailableModels();if(t.succeed(`Found ${n.length} model${n.length===1?"":"s"}`),n.length===0){console.log(b.dim("No models available. Pull a model: ollama pull <model>"));return}console.log(),console.log(b.bold.cyan("Available Models")),console.log(b.dim("\u2500".repeat(60)));for(let a of n){let s=R(a.size);console.log(` ${b.white(a.name)} ${b.dim(`(${s})`)}`)}console.log();return}if(e==="lmstudio"){let t=Se({text:"Fetching LM Studio models...",color:"cyan"}).start();await g.checkStatus()||(t.fail("LM Studio is not running."),d("Start LM Studio and enable the local server."));let n=await g.getAvailableModels();if(t.succeed(`Found ${n.length} model${n.length===1?"":"s"}`),n.length===0){console.log(b.dim("No models available. Download models in LM Studio."));return}console.log(),console.log(b.bold.cyan("Available Models")),console.log(b.dim("\u2500".repeat(60)));for(let a of n){let s=R(a.size),l=a.quantization?` ${b.dim(a.quantization)}`:"";console.log(` ${b.white(a.name)} ${b.dim(`(${s})`)}${l}`)}console.log();return}d(`Unknown provider: ${e}. Use 'ollama' or 'lmstudio'.`)}import P from"chalk";import{execa as He}from"execa";async function le(){let o=i.getDefaultBackend(),e=i.getDefaultProvider(),t=i.getLastUsedModel();console.log(),console.log(P.bold.cyan("\u{1F527} Doctor")),console.log(P.dim("\u2500".repeat(40)));let r=[],n=!0;if((!o||!e)&&(n=!1,r.push('No default provider configured. Run "aix setup" first.')),o==="lmstudio"){let s=await g.checkStatus();if(s?(console.log(),console.log(P.green("\u2713")+" LM Studio running")):(n=!1,r.push("LM Studio is not running")),s){let l=await g.getAvailableModels();l.length===0?(n=!1,r.push("No models available in LM Studio")):t&&(l.some(c=>c.id===t||c.name===t)?console.log(P.green("\u2713")+" Model available"):(n=!1,r.push(`Model "${t}" not found`)))}}if(o==="ollama"){let s=await v.checkStatus();if(s?(console.log(),console.log(P.green("\u2713")+" Ollama running")):(n=!1,r.push("Ollama is not running")),s){let l=await v.getAvailableModels();l.length===0?(n=!1,r.push("No models available in Ollama")):t&&(l.some(c=>c.id===t)?console.log(P.green("\u2713")+" Model available"):(n=!1,r.push(`Model "${t}" not found`)))}}let a=o==="ollama"?i.getOllamaUrl():i.getLMStudioUrl();try{(await fetch(`${a}/api/tags`,{method:"GET",signal:AbortSignal.timeout(3e3)})).ok?console.log(P.green("\u2713")+` Port accessible (${a})`):(n=!1,r.push(`Port not accessible at ${a}`))}catch{n=!1,r.push(`Cannot reach ${a}`)}if(o)try{(await He("claude",["--version"])).exitCode===0&&console.log(P.green("\u2713")+" Claude Code installed")}catch{n=!1,r.push("Claude Code not installed")}if(r.length>0){console.log(),console.log(P.bold.red("Issues found:"));for(let s of r)console.log(P.red("\u2717")+" "+s)}console.log(),n&&r.length===0?(console.log(P.green("\u2713 All checks passed")),console.log()):d("Doctor check failed. Fix the issues above and try again.")}import de from"chalk";import L from"inquirer";async function Ve(){let o=i.get("lmStudioPort"),e=i.get("ollamaPort"),t=await g.checkStatus(),r=await v.checkStatus();if(!t&&r){let{fix:n}=await L.prompt([{type:"confirm",name:"fix",message:`LM Studio not running. Ollama is running on port ${e}. Use Ollama instead?`,default:!0}]);n&&(i.setDefaultBackend("ollama"),i.setDefaultProvider("claude"),u("Switched to Ollama as default"))}if(t&&!r){let{fix:n}=await L.prompt([{type:"confirm",name:"fix",message:`Ollama not running. LM Studio is running on port ${o}. Use LM Studio instead?`,default:!0}]);n&&(i.setDefaultBackend("lmstudio"),i.setDefaultProvider("claude"),u("Switched to LM Studio as default"))}if(!t&&!r){let{backend:n}=await L.prompt([{type:"list",name:"backend",message:"No backends running. Which would you like to start?",choices:[{name:"LM Studio",value:"lmstudio"},{name:"Ollama",value:"ollama"}]}]);n==="lmstudio"?(M("Starting LM Studio..."),await g.startServer(),i.setDefaultBackend("lmstudio"),i.setDefaultProvider("claude"),u("LM Studio started and configured")):(M("To start Ollama, run: ollama serve"),U('After starting Ollama, run "aix fix" again'))}}async function Je(){let{reset:o}=await L.prompt([{type:"confirm",name:"reset",message:"Reset all configuration? This will clear default provider and model settings.",default:!1}]);o&&(i.reset(),u("Configuration reset"))}async function ce(){console.log(),console.log(de.bold.cyan("\u{1F527} Fix")),console.log(de.dim("\u2500".repeat(40)));let o=i.getDefaultBackend(),e=i.getLastUsedModel();if(await Ve(),o==="lmstudio"&&await g.checkStatus()){let n=await g.getAvailableModels();if(e&&!n.some(a=>a.id===e)){U(`Last used model "${e}" not found`);let{useModel:a}=await L.prompt([{type:"list",name:"useModel",message:"Select a model to use:",choices:n.map(s=>({name:s.name,value:s.id}))}]);i.setModel(a),u(`Model set to ${a}`)}}if(o==="ollama"&&await v.checkStatus()){let n=await v.getAvailableModels();if(e&&!n.some(a=>a.id===e)){U(`Last used model "${e}" not found`);let{useModel:a}=await L.prompt([{type:"list",name:"useModel",message:"Select a model to use:",choices:n.map(s=>({name:s.name,value:s.id}))}]);i.setModel(a),u(`Model set to ${a}`)}}console.log(),console.log(de.bold("Options:")),console.log(" 1. Reset config (clear all settings)");let{option:t}=await L.prompt([{type:"list",name:"option",message:"What would you like to do?",choices:[{name:"Done / Exit",value:"done"},{name:"Reset config",value:"reset"}]}]);t==="reset"&&await Je(),console.log(),u('Fix complete. Run "aix status" to verify.')}import W from"chalk";async function me(o={}){let e=o.provider?.toLowerCase();e||d('Provider name required. Use "aix switch ollama" or "aix switch lmstudio".');let[t,r]=await Promise.all([g.checkStatus(),v.checkStatus()]);if(e==="ollama"){r||d("Ollama is not running. Start it with: ollama serve"),i.setDefaultBackend("ollama"),i.setDefaultProvider("claude"),u("Switched to Ollama"),console.log(W.dim(` Endpoint: ${i.getOllamaUrl()}`)),console.log(),console.log(W.green('Ready! Run "aix run" to start coding.'));return}if(e==="lmstudio"){t||d("LM Studio is not running. Start it first."),i.setDefaultBackend("lmstudio"),i.setDefaultProvider("claude"),u("Switched to LM Studio"),console.log(W.dim(` Endpoint: ${i.getLMStudioUrl()}`)),console.log(),console.log(W.green('Ready! Run "aix run" to start coding.'));return}d(`Unknown provider: ${e}. Use 'ollama' or 'lmstudio'.`)}import we from"chalk";async function ue(o={}){let e=o.provider?.toLowerCase();e&&e!=="claude"&&d(`Unknown provider: ${e}. Use 'claude'.`);let t=i.getDefaultBackend(),r=i.getDefaultProvider(),n=i.getLastUsedModel();!t&&!r&&!n&&d("No provider connected."),i.setModel(void 0),i.setDefaultBackend(void 0),i.setDefaultProvider(void 0),u("Disconnected from Claude Code"),n&&console.log(we.dim(` Last model: ${n}`)),console.log(),console.log(we.green('Run "aix setup" to connect again.'))}var w=new Xe;w.name("aix-cli").description("Run Claude Code with local AI models from LM Studio or Ollama").version(oe()).option("--ollama","Shortcut to use Ollama backend").option("--lmstudio","Shortcut to use LM Studio backend").showHelpAfterError();function T(o=0){console.log(),console.log(ye.dim(o===0?"\u{1F44B} Goodbye!":"\u274C Cancelled.")),process.exit(o)}process.on("SIGINT",()=>T(0));process.on("SIGTERM",()=>T(0));process.on("uncaughtException",o=>{o.message?.includes("ExitPromptError")||o.message?.includes("User force closed")||o.message?.includes("prompt")?T(0):(console.error(ye.red("Error:"),o.message),process.exit(1))});process.on("unhandledRejection",o=>{let e=String(o);(e.includes("ExitPromptError")||e.includes("User force closed")||e.includes("prompt"))&&T(0)});w.command("init",{isDefault:!1}).aliases(["i","load"]).description("Select a backend, load a model, and configure your provider").option("-m, --model <name>","Model name or ID to load").option("-p, --provider <provider>","Coding tool to use (claude)").option("-b, --backend <backend>","Model backend to use (lmstudio or ollama)").action(o=>{let e=w.opts();return e.ollama&&(o.backend="ollama"),e.lmstudio&&(o.backend="lmstudio"),X(o)});w.command("run",{isDefault:!1}).aliases(["r"]).description("Run Claude Code with a model from LM Studio or Ollama").option("-m, --model <name>","Model name or ID to use").option("-p, --provider <provider>","Coding tool to use (claude)").option("-b, --backend <backend>","Model backend to use (lmstudio or ollama)").option("-v, --verbose","Show verbose output").argument("[args...]","Additional arguments for the provider").action(async(o,e)=>{let t=w.opts();t.ollama&&(e.backend="ollama"),t.lmstudio&&(e.backend="lmstudio"),await Z({...e,args:o})});w.command("status",{isDefault:!1}).aliases(["s","stats"]).description("Show LM Studio and Ollama status and available models").action(ee);w.command("doctor",{isDefault:!1}).aliases(["d","check"]).description("Check infrastructure status").action(le);w.command("update",{isDefault:!1}).aliases(["upgrade","u"]).description("Update AIX CLI to the latest version").action(te);w.command("config [action] [key] [value]",{isDefault:!1}).aliases(["c","settings"]).description("View, set, or reset AIX CLI configuration constraints").action(ne);w.command("setup",{isDefault:!1}).description("One-command default setup for first-time users").option("--provider <name>","Provider to use (lmstudio or ollama)").option("--force","Overwrite existing configuration").action(o=>{let e=w.opts();return e.ollama&&(o.provider="ollama"),e.lmstudio&&(o.provider="lmstudio"),ie(o)});var ke=w.command("providers",{isDefault:!1}).description("List or set default provider");ke.command("list",{isDefault:!1}).description("List available providers").action(()=>ae());ke.command("set",{isDefault:!1}).description("Set default provider").argument("<name>","Provider name (lmstudio or ollama)").action(o=>re({default:o}));var Ye=w.command("models",{isDefault:!1}).description("List available models");Ye.command("list",{isDefault:!1}).description("List models from a provider").option("--provider <name>","Provider to use (ollama or lmstudio)").action(o=>se(o));w.command("fix",{isDefault:!1}).description("Fix infrastructure issues").action(ce);w.command("switch",{isDefault:!1}).description("Switch to a different provider").argument("<provider>","Provider name (ollama or lmstudio)").action(o=>me({provider:o}));w.command("disconnect",{isDefault:!1}).description("Disconnect from a provider").argument("[provider]","Provider to disconnect (claude)").action(o=>ue({provider:o}));w.parse();
@@ -1,2 +1 @@
1
1
  export declare function configCommand(action?: string, key?: string, value?: string): Promise<void>;
2
- //# sourceMappingURL=config.d.ts.map
@@ -0,0 +1,5 @@
1
+ interface DisconnectOptions {
2
+ provider?: string;
3
+ }
4
+ export declare function disconnectCommand(options?: DisconnectOptions): Promise<void>;
5
+ export {};
@@ -0,0 +1 @@
1
+ export declare function doctorCommand(): Promise<void>;
@@ -0,0 +1,2 @@
1
+ declare function fixCommand(): Promise<void>;
2
+ export { fixCommand };
@@ -1,6 +1,12 @@
1
- export { initCommand } from "./init.js";
2
- export { runCommand } from "./run.js";
3
- export { statusCommand } from "./status.js";
4
- export { updateCommand } from "./update.js";
5
- export { configCommand } from "./config.js";
6
- //# sourceMappingURL=index.d.ts.map
1
+ export { initCommand } from './init.js';
2
+ export { runCommand } from './run.js';
3
+ export { statusCommand } from './status.js';
4
+ export { updateCommand } from './update.js';
5
+ export { configCommand } from './config.js';
6
+ export { setupCommand } from './setup.js';
7
+ export { providersListCommand, providersSetCommand } from './providers.js';
8
+ export { modelsListCommand } from './models.js';
9
+ export { doctorCommand } from './doctor.js';
10
+ export { fixCommand } from './fix.js';
11
+ export { switchCommand } from './switch.js';
12
+ export { disconnectCommand } from './disconnect.js';
@@ -1,3 +1,2 @@
1
1
  import type { InitOptions } from '../types/index.js';
2
2
  export declare function initCommand(options?: InitOptions): Promise<void>;
3
- //# sourceMappingURL=init.d.ts.map
@@ -0,0 +1,5 @@
1
+ interface ModelsOptions {
2
+ provider?: string;
3
+ }
4
+ export declare function modelsListCommand(options?: ModelsOptions): Promise<void>;
5
+ export {};
@@ -0,0 +1,8 @@
1
+ interface ProvidersSetOptions {
2
+ default?: string;
3
+ }
4
+ export declare function providersListCommand(_options?: {
5
+ list?: boolean;
6
+ }): Promise<void>;
7
+ export declare function providersSetCommand(options: ProvidersSetOptions): Promise<void>;
8
+ export {};
@@ -8,4 +8,3 @@ interface RunOptions {
8
8
  }
9
9
  export declare function runCommand(options?: RunOptions): Promise<void>;
10
10
  export {};
11
- //# sourceMappingURL=run.d.ts.map
@@ -0,0 +1,6 @@
1
+ interface SetupOptions {
2
+ provider?: string;
3
+ force?: boolean;
4
+ }
5
+ export declare function setupCommand(options: SetupOptions): Promise<void>;
6
+ export {};
@@ -1,2 +1 @@
1
1
  export declare function statusCommand(): Promise<void>;
2
- //# sourceMappingURL=status.d.ts.map
@@ -0,0 +1,5 @@
1
+ interface SwitchOptions {
2
+ provider?: string;
3
+ }
4
+ export declare function switchCommand(options?: SwitchOptions): Promise<void>;
5
+ export {};
@@ -1,3 +1,2 @@
1
1
  export declare function getCurrentVersion(): string;
2
2
  export declare function updateCommand(): Promise<void>;
3
- //# sourceMappingURL=update.d.ts.map
package/dist/index.d.ts CHANGED
@@ -1,3 +1,2 @@
1
1
  #!/usr/bin/env node
2
2
  export {};
3
- //# sourceMappingURL=index.d.ts.map