codemaxxing 0.3.1 → 0.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -12,34 +12,34 @@ Open-source terminal coding agent. Connect **any** LLM — local or remote — a
12
12
 
13
13
  Every coding agent locks you into their API. Codemaxxing doesn't. Run it with LM Studio, Ollama, OpenRouter, OpenAI, or any OpenAI-compatible endpoint. Your machine, your model, your rules.
14
14
 
15
- ## Quick Install (Recommended)
15
+ ## Install
16
16
 
17
- **Linux / macOS:**
17
+ **If you have Node.js:**
18
18
  ```bash
19
- bash -c "$(curl -fsSL https://raw.githubusercontent.com/MarcosV6/codemaxxing/main/install.sh)"
19
+ npm install -g codemaxxing
20
20
  ```
21
21
 
22
- **Windows (PowerShell as Administrator):**
23
- ```powershell
24
- curl -fsSL -o $env:TEMP\install-codemaxxing.bat https://raw.githubusercontent.com/MarcosV6/codemaxxing/main/install.bat; & $env:TEMP\install-codemaxxing.bat
22
+ **If you don't have Node.js:**
23
+
24
+ The one-line installers below will install Node.js first, then codemaxxing.
25
+
26
+ *Linux / macOS:*
27
+ ```bash
28
+ bash -c "$(curl -fsSL https://raw.githubusercontent.com/MarcosV6/codemaxxing/main/install.sh)"
25
29
  ```
26
30
 
27
- **Windows (CMD as Administrator):**
31
+ *Windows (CMD as Administrator):*
28
32
  ```
29
33
  curl -fsSL -o %TEMP%\install-codemaxxing.bat https://raw.githubusercontent.com/MarcosV6/codemaxxing/main/install.bat && %TEMP%\install-codemaxxing.bat
30
34
  ```
31
35
 
32
- > **Note:** Restart your terminal after installation to ensure everything works.
33
-
34
- ## Manual Installation
35
-
36
- **Prerequisites:** [Node.js](https://nodejs.org) 20 or later.
37
-
38
- **NPM:**
39
- ```bash
40
- npm install -g codemaxxing
36
+ *Windows (PowerShell as Administrator):*
37
+ ```powershell
38
+ curl -fsSL -o $env:TEMP\install-codemaxxing.bat https://raw.githubusercontent.com/MarcosV6/codemaxxing/main/install.bat; & $env:TEMP\install-codemaxxing.bat
41
39
  ```
42
40
 
41
+ > **Windows note:** If Node.js was just installed, you may need to close and reopen your terminal, then run `npm install -g codemaxxing` manually. This is a Windows PATH limitation.
42
+
43
43
  ## Updating
44
44
 
45
45
  ```bash
package/dist/index.js CHANGED
@@ -1,5 +1,5 @@
1
1
  #!/usr/bin/env node
2
- import { jsx as _jsx, jsxs as _jsxs } from "react/jsx-runtime";
2
+ import { jsx as _jsx, jsxs as _jsxs, Fragment as _Fragment } from "react/jsx-runtime";
3
3
  import React, { useState, useEffect, useCallback } from "react";
4
4
  import { render, Box, Text, useInput, useApp, useStdout } from "ink";
5
5
  import { EventEmitter } from "events";
@@ -13,6 +13,9 @@ import { getTheme, listThemes, THEMES, DEFAULT_THEME } from "./themes.js";
13
13
  import { PROVIDERS, getCredentials, openRouterOAuth, anthropicSetupToken, importCodexToken, importQwenToken, copilotDeviceFlow } from "./utils/auth.js";
14
14
  import { listInstalledSkills, installSkill, removeSkill, getRegistrySkills, searchRegistry, createSkillScaffold, getActiveSkills, getActiveSkillCount } from "./utils/skills.js";
15
15
  import { listServers, addServer, removeServer, getConnectedServers } from "./utils/mcp.js";
16
+ import { detectHardware, formatBytes } from "./utils/hardware.js";
17
+ import { getRecommendations, getFitIcon } from "./utils/models.js";
18
+ import { isOllamaInstalled, isOllamaRunning, getOllamaInstallCommand, startOllama, pullModel } from "./utils/ollama.js";
16
19
  const VERSION = "0.1.9";
17
20
  // ── Helpers ──
18
21
  function formatTimeAgo(date) {
@@ -137,6 +140,13 @@ function App() {
137
140
  const [skillsPickerIndex, setSkillsPickerIndex] = useState(0);
138
141
  const [sessionDisabledSkills, setSessionDisabledSkills] = useState(new Set());
139
142
  const [approval, setApproval] = useState(null);
143
+ const [wizardScreen, setWizardScreen] = useState(null);
144
+ const [wizardIndex, setWizardIndex] = useState(0);
145
+ const [wizardHardware, setWizardHardware] = useState(null);
146
+ const [wizardModels, setWizardModels] = useState([]);
147
+ const [wizardPullProgress, setWizardPullProgress] = useState(null);
148
+ const [wizardPullError, setWizardPullError] = useState(null);
149
+ const [wizardSelectedModel, setWizardSelectedModel] = useState(null);
140
150
  // Listen for paste events from stdin interceptor
141
151
  useEffect(() => {
142
152
  const handler = ({ content, lines }) => {
@@ -174,10 +184,11 @@ function App() {
174
184
  }
175
185
  else {
176
186
  info.push("✗ No local LLM server found.");
177
- info.push(" /connect — retry after starting LM Studio or Ollama");
178
- info.push(" /login — authenticate with a cloud provider");
179
187
  setConnectionInfo([...info]);
180
188
  setReady(true);
189
+ // Show the setup wizard on first run
190
+ setWizardScreen("connection");
191
+ setWizardIndex(0);
181
192
  return;
182
193
  }
183
194
  }
@@ -1114,6 +1125,223 @@ function App() {
1114
1125
  }
1115
1126
  return;
1116
1127
  }
1128
+ // ── Setup Wizard Navigation ──
1129
+ if (wizardScreen) {
1130
+ if (wizardScreen === "connection") {
1131
+ const items = ["local", "openrouter", "apikey", "existing"];
1132
+ if (key.upArrow) {
1133
+ setWizardIndex((prev) => (prev - 1 + items.length) % items.length);
1134
+ return;
1135
+ }
1136
+ if (key.downArrow) {
1137
+ setWizardIndex((prev) => (prev + 1) % items.length);
1138
+ return;
1139
+ }
1140
+ if (key.escape) {
1141
+ setWizardScreen(null);
1142
+ return;
1143
+ }
1144
+ if (key.return) {
1145
+ const selected = items[wizardIndex];
1146
+ if (selected === "local") {
1147
+ // Scan hardware and show model picker
1148
+ const hw = detectHardware();
1149
+ setWizardHardware(hw);
1150
+ const recs = getRecommendations(hw).filter(m => m.fit !== "skip");
1151
+ setWizardModels(recs);
1152
+ setWizardScreen("models");
1153
+ setWizardIndex(0);
1154
+ }
1155
+ else if (selected === "openrouter") {
1156
+ setWizardScreen(null);
1157
+ addMsg("info", "Starting OpenRouter OAuth — opening browser...");
1158
+ setLoading(true);
1159
+ setSpinnerMsg("Waiting for authorization...");
1160
+ openRouterOAuth((msg) => addMsg("info", msg))
1161
+ .then(() => {
1162
+ addMsg("info", "✅ OpenRouter authenticated! Use /connect to connect.");
1163
+ setLoading(false);
1164
+ })
1165
+ .catch((err) => { addMsg("error", `OAuth failed: ${err.message}`); setLoading(false); });
1166
+ }
1167
+ else if (selected === "apikey") {
1168
+ setWizardScreen(null);
1169
+ setLoginPicker(true);
1170
+ setLoginPickerIndex(0);
1171
+ }
1172
+ else if (selected === "existing") {
1173
+ setWizardScreen(null);
1174
+ addMsg("info", "Start your LLM server, then type /connect to retry.");
1175
+ }
1176
+ return;
1177
+ }
1178
+ return;
1179
+ }
1180
+ if (wizardScreen === "models") {
1181
+ const models = wizardModels;
1182
+ if (key.upArrow) {
1183
+ setWizardIndex((prev) => (prev - 1 + models.length) % models.length);
1184
+ return;
1185
+ }
1186
+ if (key.downArrow) {
1187
+ setWizardIndex((prev) => (prev + 1) % models.length);
1188
+ return;
1189
+ }
1190
+ if (key.escape) {
1191
+ setWizardScreen("connection");
1192
+ setWizardIndex(0);
1193
+ return;
1194
+ }
1195
+ if (key.return) {
1196
+ const selected = models[wizardIndex];
1197
+ if (selected) {
1198
+ setWizardSelectedModel(selected);
1199
+ // Check if Ollama is installed
1200
+ if (!isOllamaInstalled()) {
1201
+ setWizardScreen("install-ollama");
1202
+ }
1203
+ else {
1204
+ // Start pulling the model
1205
+ setWizardScreen("pulling");
1206
+ setWizardPullProgress({ status: "starting", percent: 0 });
1207
+ setWizardPullError(null);
1208
+ (async () => {
1209
+ try {
1210
+ // Ensure ollama is running
1211
+ const running = await isOllamaRunning();
1212
+ if (!running) {
1213
+ setWizardPullProgress({ status: "Starting Ollama server...", percent: 0 });
1214
+ startOllama();
1215
+ // Wait for it to come up
1216
+ for (let i = 0; i < 15; i++) {
1217
+ await new Promise(r => setTimeout(r, 1000));
1218
+ if (await isOllamaRunning())
1219
+ break;
1220
+ }
1221
+ if (!(await isOllamaRunning())) {
1222
+ setWizardPullError("Could not start Ollama server. Run 'ollama serve' manually, then press Enter.");
1223
+ return;
1224
+ }
1225
+ }
1226
+ await pullModel(selected.ollamaId, (p) => {
1227
+ setWizardPullProgress(p);
1228
+ });
1229
+ setWizardPullProgress({ status: "success", percent: 100 });
1230
+ // Wait briefly then connect
1231
+ await new Promise(r => setTimeout(r, 500));
1232
+ setWizardScreen(null);
1233
+ setWizardPullProgress(null);
1234
+ setWizardSelectedModel(null);
1235
+ addMsg("info", `✅ ${selected.name} installed! Connecting...`);
1236
+ await connectToProvider(true);
1237
+ }
1238
+ catch (err) {
1239
+ setWizardPullError(err.message);
1240
+ }
1241
+ })();
1242
+ }
1243
+ }
1244
+ return;
1245
+ }
1246
+ return;
1247
+ }
1248
+ if (wizardScreen === "install-ollama") {
1249
+ if (key.escape) {
1250
+ setWizardScreen("models");
1251
+ setWizardIndex(0);
1252
+ return;
1253
+ }
1254
+ if (key.return) {
1255
+ // User says they installed it — check and proceed
1256
+ if (isOllamaInstalled()) {
1257
+ const selected = wizardSelectedModel;
1258
+ if (selected) {
1259
+ setWizardScreen("pulling");
1260
+ setWizardPullProgress({ status: "starting", percent: 0 });
1261
+ setWizardPullError(null);
1262
+ (async () => {
1263
+ try {
1264
+ const running = await isOllamaRunning();
1265
+ if (!running) {
1266
+ setWizardPullProgress({ status: "Starting Ollama server...", percent: 0 });
1267
+ startOllama();
1268
+ for (let i = 0; i < 15; i++) {
1269
+ await new Promise(r => setTimeout(r, 1000));
1270
+ if (await isOllamaRunning())
1271
+ break;
1272
+ }
1273
+ if (!(await isOllamaRunning())) {
1274
+ setWizardPullError("Could not start Ollama server. Run 'ollama serve' manually, then press Enter.");
1275
+ return;
1276
+ }
1277
+ }
1278
+ await pullModel(selected.ollamaId, (p) => setWizardPullProgress(p));
1279
+ setWizardPullProgress({ status: "success", percent: 100 });
1280
+ await new Promise(r => setTimeout(r, 500));
1281
+ setWizardScreen(null);
1282
+ setWizardPullProgress(null);
1283
+ setWizardSelectedModel(null);
1284
+ addMsg("info", `✅ ${selected.name} installed! Connecting...`);
1285
+ await connectToProvider(true);
1286
+ }
1287
+ catch (err) {
1288
+ setWizardPullError(err.message);
1289
+ }
1290
+ })();
1291
+ }
1292
+ }
1293
+ else {
1294
+ addMsg("info", "Ollama not found yet. Install it and press Enter again.");
1295
+ }
1296
+ return;
1297
+ }
1298
+ return;
1299
+ }
1300
+ if (wizardScreen === "pulling") {
1301
+ // Allow retry on error
1302
+ if (wizardPullError && key.return) {
1303
+ const selected = wizardSelectedModel;
1304
+ if (selected) {
1305
+ setWizardPullError(null);
1306
+ setWizardPullProgress({ status: "retrying", percent: 0 });
1307
+ (async () => {
1308
+ try {
1309
+ const running = await isOllamaRunning();
1310
+ if (!running) {
1311
+ startOllama();
1312
+ for (let i = 0; i < 15; i++) {
1313
+ await new Promise(r => setTimeout(r, 1000));
1314
+ if (await isOllamaRunning())
1315
+ break;
1316
+ }
1317
+ }
1318
+ await pullModel(selected.ollamaId, (p) => setWizardPullProgress(p));
1319
+ setWizardPullProgress({ status: "success", percent: 100 });
1320
+ await new Promise(r => setTimeout(r, 500));
1321
+ setWizardScreen(null);
1322
+ setWizardPullProgress(null);
1323
+ setWizardSelectedModel(null);
1324
+ addMsg("info", `✅ ${selected.name} installed! Connecting...`);
1325
+ await connectToProvider(true);
1326
+ }
1327
+ catch (err) {
1328
+ setWizardPullError(err.message);
1329
+ }
1330
+ })();
1331
+ }
1332
+ return;
1333
+ }
1334
+ if (wizardPullError && key.escape) {
1335
+ setWizardScreen("models");
1336
+ setWizardIndex(0);
1337
+ setWizardPullError(null);
1338
+ setWizardPullProgress(null);
1339
+ return;
1340
+ }
1341
+ return; // Ignore keys while pulling
1342
+ }
1343
+ return;
1344
+ }
1117
1345
  // Theme picker navigation
1118
1346
  if (themePicker) {
1119
1347
  const themeKeys = listThemes();
@@ -1344,7 +1572,12 @@ function App() {
1344
1572
  })(), skillsPicker === "remove" && (() => {
1345
1573
  const installed = listInstalledSkills();
1346
1574
  return (_jsxs(Box, { flexDirection: "column", borderStyle: "single", borderColor: theme.colors.error, paddingX: 1, marginBottom: 0, children: [_jsx(Text, { bold: true, color: theme.colors.error, children: "Remove a skill:" }), installed.map((s, i) => (_jsxs(Text, { children: [i === skillsPickerIndex ? _jsx(Text, { color: theme.colors.suggestion, bold: true, children: "▸ " }) : _jsx(Text, { children: " " }), _jsxs(Text, { color: i === skillsPickerIndex ? theme.colors.suggestion : theme.colors.muted, children: [s.name, " \u2014 ", s.description] })] }, s.name))), _jsx(Text, { dimColor: true, children: " ↑↓ navigate · Enter remove · Esc back" })] }));
1347
- })(), themePicker && (_jsxs(Box, { flexDirection: "column", borderStyle: "single", borderColor: theme.colors.border, paddingX: 1, marginBottom: 0, children: [_jsx(Text, { bold: true, color: theme.colors.secondary, children: "Choose a theme:" }), listThemes().map((key, i) => (_jsxs(Text, { children: [i === themePickerIndex ? _jsx(Text, { color: theme.colors.suggestion, bold: true, children: "▸ " }) : _jsx(Text, { children: " " }), _jsx(Text, { color: i === themePickerIndex ? theme.colors.suggestion : theme.colors.primary, bold: true, children: key }), _jsxs(Text, { color: theme.colors.muted, children: [" — ", THEMES[key].description] }), key === theme.name.toLowerCase() ? _jsx(Text, { color: theme.colors.muted, children: " (current)" }) : null] }, key))), _jsx(Text, { dimColor: true, children: " ↑↓ navigate · Enter select · Esc cancel" })] })), sessionPicker && (_jsxs(Box, { flexDirection: "column", borderStyle: "single", borderColor: theme.colors.secondary, paddingX: 1, marginBottom: 0, children: [_jsx(Text, { bold: true, color: theme.colors.secondary, children: "Resume a session:" }), sessionPicker.map((s, i) => (_jsxs(Text, { children: [i === sessionPickerIndex ? _jsx(Text, { color: theme.colors.suggestion, bold: true, children: "▸ " }) : _jsx(Text, { children: " " }), _jsx(Text, { color: i === sessionPickerIndex ? theme.colors.suggestion : theme.colors.muted, children: s.display })] }, s.id))), _jsx(Text, { dimColor: true, children: " ↑↓ navigate · Enter select · Esc cancel" })] })), deleteSessionPicker && (_jsxs(Box, { flexDirection: "column", borderStyle: "single", borderColor: theme.colors.error, paddingX: 1, marginBottom: 0, children: [_jsx(Text, { bold: true, color: theme.colors.error, children: "Delete a session:" }), deleteSessionPicker.map((s, i) => (_jsxs(Text, { children: [i === deleteSessionPickerIndex ? _jsx(Text, { color: theme.colors.suggestion, bold: true, children: "▸ " }) : _jsx(Text, { children: " " }), _jsx(Text, { color: i === deleteSessionPickerIndex ? theme.colors.suggestion : theme.colors.muted, children: s.display })] }, s.id))), _jsx(Text, { dimColor: true, children: " ↑↓ navigate · Enter select · Esc cancel" })] })), deleteSessionConfirm && (_jsxs(Box, { flexDirection: "column", borderStyle: "single", borderColor: theme.colors.warning, paddingX: 1, marginBottom: 0, children: [_jsxs(Text, { bold: true, color: theme.colors.warning, children: ["Delete session ", deleteSessionConfirm.id, "?"] }), _jsxs(Text, { color: theme.colors.muted, children: [" ", deleteSessionConfirm.display] }), _jsxs(Text, { children: [_jsx(Text, { color: theme.colors.error, bold: true, children: " [y]" }), _jsx(Text, { children: "es " }), _jsx(Text, { color: theme.colors.success, bold: true, children: "[n]" }), _jsx(Text, { children: "o" })] })] })), showSuggestions && (_jsxs(Box, { flexDirection: "column", borderStyle: "single", borderColor: theme.colors.muted, paddingX: 1, marginBottom: 0, children: [cmdMatches.slice(0, 6).map((c, i) => (_jsxs(Text, { children: [i === cmdIndex ? _jsx(Text, { color: theme.colors.suggestion, bold: true, children: "▸ " }) : _jsx(Text, { children: " " }), _jsx(Text, { color: i === cmdIndex ? theme.colors.suggestion : theme.colors.primary, bold: true, children: c.cmd }), _jsxs(Text, { color: theme.colors.muted, children: [" — ", c.desc] })] }, i))), _jsx(Text, { dimColor: true, children: " ↑↓ navigate · Tab select" })] })), _jsxs(Box, { borderStyle: "single", borderColor: approval ? theme.colors.warning : theme.colors.border, paddingX: 1, children: [_jsx(Text, { color: theme.colors.secondary, bold: true, children: "> " }), approval ? (_jsx(Text, { color: theme.colors.warning, children: "waiting for approval..." })) : ready && !loading ? (_jsxs(Box, { children: [pastedChunks.map((p) => (_jsxs(Text, { color: theme.colors.muted, children: ["[Pasted text #", p.id, " +", p.lines, " lines]"] }, p.id))), _jsx(TextInput, { value: input, onChange: (v) => { setInput(v); setCmdIndex(0); }, onSubmit: handleSubmit }, inputKey)] })) : (_jsx(Text, { dimColor: true, children: loading ? "waiting for response..." : "initializing..." }))] }), agent && (_jsx(Box, { paddingX: 2, children: _jsxs(Text, { dimColor: true, children: ["💬 ", agent.getContextLength(), " messages · ~", (() => {
1575
+ })(), themePicker && (_jsxs(Box, { flexDirection: "column", borderStyle: "single", borderColor: theme.colors.border, paddingX: 1, marginBottom: 0, children: [_jsx(Text, { bold: true, color: theme.colors.secondary, children: "Choose a theme:" }), listThemes().map((key, i) => (_jsxs(Text, { children: [i === themePickerIndex ? _jsx(Text, { color: theme.colors.suggestion, bold: true, children: "▸ " }) : _jsx(Text, { children: " " }), _jsx(Text, { color: i === themePickerIndex ? theme.colors.suggestion : theme.colors.primary, bold: true, children: key }), _jsxs(Text, { color: theme.colors.muted, children: [" — ", THEMES[key].description] }), key === theme.name.toLowerCase() ? _jsx(Text, { color: theme.colors.muted, children: " (current)" }) : null] }, key))), _jsx(Text, { dimColor: true, children: " ↑↓ navigate · Enter select · Esc cancel" })] })), sessionPicker && (_jsxs(Box, { flexDirection: "column", borderStyle: "single", borderColor: theme.colors.secondary, paddingX: 1, marginBottom: 0, children: [_jsx(Text, { bold: true, color: theme.colors.secondary, children: "Resume a session:" }), sessionPicker.map((s, i) => (_jsxs(Text, { children: [i === sessionPickerIndex ? _jsx(Text, { color: theme.colors.suggestion, bold: true, children: "▸ " }) : _jsx(Text, { children: " " }), _jsx(Text, { color: i === sessionPickerIndex ? theme.colors.suggestion : theme.colors.muted, children: s.display })] }, s.id))), _jsx(Text, { dimColor: true, children: " ↑↓ navigate · Enter select · Esc cancel" })] })), deleteSessionPicker && (_jsxs(Box, { flexDirection: "column", borderStyle: "single", borderColor: theme.colors.error, paddingX: 1, marginBottom: 0, children: [_jsx(Text, { bold: true, color: theme.colors.error, children: "Delete a session:" }), deleteSessionPicker.map((s, i) => (_jsxs(Text, { children: [i === deleteSessionPickerIndex ? _jsx(Text, { color: theme.colors.suggestion, bold: true, children: "▸ " }) : _jsx(Text, { children: " " }), _jsx(Text, { color: i === deleteSessionPickerIndex ? theme.colors.suggestion : theme.colors.muted, children: s.display })] }, s.id))), _jsx(Text, { dimColor: true, children: " ↑↓ navigate · Enter select · Esc cancel" })] })), deleteSessionConfirm && (_jsxs(Box, { flexDirection: "column", borderStyle: "single", borderColor: theme.colors.warning, paddingX: 1, marginBottom: 0, children: [_jsxs(Text, { bold: true, color: theme.colors.warning, children: ["Delete session ", deleteSessionConfirm.id, "?"] }), _jsxs(Text, { color: theme.colors.muted, children: [" ", deleteSessionConfirm.display] }), _jsxs(Text, { children: [_jsx(Text, { color: theme.colors.error, bold: true, children: " [y]" }), _jsx(Text, { children: "es " }), _jsx(Text, { color: theme.colors.success, bold: true, children: "[n]" }), _jsx(Text, { children: "o" })] })] })), wizardScreen === "connection" && (_jsxs(Box, { flexDirection: "column", borderStyle: "single", borderColor: theme.colors.border, paddingX: 1, marginBottom: 0, children: [_jsx(Text, { bold: true, color: theme.colors.secondary, children: "No LLM detected. How do you want to connect?" }), _jsx(Text, { children: "" }), [
1576
+ { key: "local", icon: "\uD83D\uDDA5\uFE0F", label: "Set up a local model", desc: "free, runs on your machine" },
1577
+ { key: "openrouter", icon: "\uD83C\uDF10", label: "OpenRouter", desc: "200+ cloud models, browser login" },
1578
+ { key: "apikey", icon: "\uD83D\uDD11", label: "Enter API key manually", desc: "" },
1579
+ { key: "existing", icon: "\u2699\uFE0F", label: "I already have a server running", desc: "" },
1580
+ ].map((item, i) => (_jsxs(Text, { children: [i === wizardIndex ? _jsx(Text, { color: theme.colors.suggestion, bold: true, children: " \u25B8 " }) : _jsx(Text, { children: " " }), _jsxs(Text, { color: i === wizardIndex ? theme.colors.suggestion : theme.colors.primary, bold: true, children: [item.icon, " ", item.label] }), item.desc ? _jsxs(Text, { color: theme.colors.muted, children: [" (", item.desc, ")"] }) : null] }, item.key))), _jsx(Text, { children: "" }), _jsx(Text, { dimColor: true, children: " \u2191\u2193 navigate \u00B7 Enter to select" })] })), wizardScreen === "models" && wizardHardware && (_jsxs(Box, { flexDirection: "column", borderStyle: "single", borderColor: theme.colors.border, paddingX: 1, marginBottom: 0, children: [_jsx(Text, { bold: true, color: theme.colors.secondary, children: "Your hardware:" }), _jsxs(Text, { color: theme.colors.muted, children: [" CPU: ", wizardHardware.cpu.name, " (", wizardHardware.cpu.cores, " cores)"] }), _jsxs(Text, { color: theme.colors.muted, children: [" RAM: ", formatBytes(wizardHardware.ram)] }), wizardHardware.gpu ? (_jsxs(Text, { color: theme.colors.muted, children: [" GPU: ", wizardHardware.gpu.name, wizardHardware.gpu.vram > 0 ? ` (${formatBytes(wizardHardware.gpu.vram)})` : ""] })) : (_jsx(Text, { color: theme.colors.muted, children: " GPU: none detected" })), _jsx(Text, { children: "" }), _jsx(Text, { bold: true, color: theme.colors.secondary, children: "Recommended models:" }), _jsx(Text, { children: "" }), wizardModels.map((m, i) => (_jsxs(Text, { children: [i === wizardIndex ? _jsx(Text, { color: theme.colors.suggestion, bold: true, children: " \u25B8 " }) : _jsx(Text, { children: " " }), _jsxs(Text, { children: [getFitIcon(m.fit), " "] }), _jsx(Text, { color: i === wizardIndex ? theme.colors.suggestion : theme.colors.primary, bold: true, children: m.name }), _jsxs(Text, { color: theme.colors.muted, children: [" ~", m.size, " GB \u00B7 ", m.quality === "best" ? "Best" : m.quality === "great" ? "Great" : "Good", " quality \u00B7 ", m.speed] })] }, m.ollamaId))), wizardModels.length === 0 && (_jsx(Text, { color: theme.colors.error, children: " No suitable models found for your hardware." })), _jsx(Text, { children: "" }), _jsx(Text, { dimColor: true, children: " \u2191\u2193 navigate \u00B7 Enter to install \u00B7 Esc back" })] })), wizardScreen === "install-ollama" && (_jsxs(Box, { flexDirection: "column", borderStyle: "single", borderColor: theme.colors.warning, paddingX: 1, marginBottom: 0, children: [_jsx(Text, { bold: true, color: theme.colors.warning, children: "Ollama is required for local models." }), _jsx(Text, { children: "" }), _jsxs(Text, { color: theme.colors.primary, children: [" Install with: ", _jsx(Text, { bold: true, children: getOllamaInstallCommand(wizardHardware?.os ?? "linux") })] }), _jsx(Text, { children: "" }), _jsx(Text, { dimColor: true, children: " Run the command above, then press Enter to continue..." }), _jsx(Text, { dimColor: true, children: " Esc to go back" })] })), wizardScreen === "pulling" && wizardSelectedModel && (_jsx(Box, { flexDirection: "column", borderStyle: "single", borderColor: theme.colors.border, paddingX: 1, marginBottom: 0, children: wizardPullError ? (_jsxs(_Fragment, { children: [_jsxs(Text, { color: theme.colors.error, bold: true, children: [" \u274C Error: ", wizardPullError] }), _jsx(Text, { children: "" }), _jsx(Text, { dimColor: true, children: " Press Enter to retry \u00B7 Esc to go back" })] })) : wizardPullProgress ? (_jsxs(_Fragment, { children: [_jsxs(Text, { bold: true, color: theme.colors.secondary, children: [" Downloading ", wizardSelectedModel.name, "..."] }), wizardPullProgress.status === "downloading" || wizardPullProgress.percent > 0 ? (_jsx(_Fragment, { children: _jsxs(Text, { children: [" ", _jsxs(Text, { color: theme.colors.primary, children: ["\u2588".repeat(Math.floor(wizardPullProgress.percent / 5)), "\u2591".repeat(20 - Math.floor(wizardPullProgress.percent / 5))] }), " ", _jsxs(Text, { bold: true, children: [wizardPullProgress.percent, "%"] }), wizardPullProgress.completed != null && wizardPullProgress.total != null ? (_jsxs(Text, { color: theme.colors.muted, children: [" \u00B7 ", formatBytes(wizardPullProgress.completed), " / ", formatBytes(wizardPullProgress.total)] })) : null] }) })) : (_jsxs(Text, { color: theme.colors.muted, children: [" ", wizardPullProgress.status, "..."] }))] })) : null })), showSuggestions && (_jsxs(Box, { flexDirection: "column", borderStyle: "single", borderColor: theme.colors.muted, paddingX: 1, marginBottom: 0, children: [cmdMatches.slice(0, 6).map((c, i) => (_jsxs(Text, { children: [i === cmdIndex ? _jsx(Text, { color: theme.colors.suggestion, bold: true, children: "▸ " }) : _jsx(Text, { children: " " }), _jsx(Text, { color: i === cmdIndex ? theme.colors.suggestion : theme.colors.primary, bold: true, children: c.cmd }), _jsxs(Text, { color: theme.colors.muted, children: [" — ", c.desc] })] }, i))), _jsx(Text, { dimColor: true, children: " ↑↓ navigate · Tab select" })] })), _jsxs(Box, { borderStyle: "single", borderColor: approval ? theme.colors.warning : theme.colors.border, paddingX: 1, children: [_jsx(Text, { color: theme.colors.secondary, bold: true, children: "> " }), approval ? (_jsx(Text, { color: theme.colors.warning, children: "waiting for approval..." })) : ready && !loading ? (_jsxs(Box, { children: [pastedChunks.map((p) => (_jsxs(Text, { color: theme.colors.muted, children: ["[Pasted text #", p.id, " +", p.lines, " lines]"] }, p.id))), _jsx(TextInput, { value: input, onChange: (v) => { setInput(v); setCmdIndex(0); }, onSubmit: handleSubmit }, inputKey)] })) : (_jsx(Text, { dimColor: true, children: loading ? "waiting for response..." : "initializing..." }))] }), agent && (_jsx(Box, { paddingX: 2, children: _jsxs(Text, { dimColor: true, children: ["💬 ", agent.getContextLength(), " messages · ~", (() => {
1348
1581
  const tokens = agent.estimateTokens();
1349
1582
  return tokens >= 1000 ? `${(tokens / 1000).toFixed(1)}k` : String(tokens);
1350
1583
  })(), " tokens", (() => {
@@ -0,0 +1,17 @@
1
+ export interface HardwareInfo {
2
+ cpu: {
3
+ name: string;
4
+ cores: number;
5
+ speed: number;
6
+ };
7
+ ram: number;
8
+ gpu: {
9
+ name: string;
10
+ vram: number;
11
+ } | null;
12
+ os: "macos" | "linux" | "windows";
13
+ appleSilicon: boolean;
14
+ }
15
+ export declare function detectHardware(): HardwareInfo;
16
+ /** Format bytes to human-readable string */
17
+ export declare function formatBytes(bytes: number): string;
@@ -0,0 +1,120 @@
1
+ import os from "os";
2
+ import { execSync } from "child_process";
3
+ function getOS() {
4
+ switch (process.platform) {
5
+ case "darwin": return "macos";
6
+ case "win32": return "windows";
7
+ default: return "linux";
8
+ }
9
+ }
10
+ function getCPU() {
11
+ const cpus = os.cpus();
12
+ return {
13
+ name: cpus[0]?.model?.trim() ?? "Unknown CPU",
14
+ cores: cpus.length,
15
+ speed: cpus[0]?.speed ?? 0, // MHz
16
+ };
17
+ }
18
+ function getGPU(platform) {
19
+ try {
20
+ if (platform === "macos") {
21
+ const raw = execSync("system_profiler SPDisplaysDataType -json", {
22
+ encoding: "utf-8",
23
+ timeout: 5000,
24
+ stdio: ["pipe", "pipe", "pipe"],
25
+ });
26
+ const data = JSON.parse(raw);
27
+ const displays = data?.SPDisplaysDataType;
28
+ if (Array.isArray(displays) && displays.length > 0) {
29
+ const gpu = displays[0];
30
+ const name = gpu.sppci_model ?? gpu._name ?? "Unknown GPU";
31
+ // On Apple Silicon, VRAM is shared (unified memory) — report total RAM
32
+ const vramStr = gpu["spdisplays_vram"] ?? gpu["spdisplays_vram_shared"] ?? "";
33
+ let vram = 0;
34
+ if (vramStr) {
35
+ const match = vramStr.match(/(\d+)\s*(GB|MB)/i);
36
+ if (match) {
37
+ vram = parseInt(match[1]) * (match[2].toUpperCase() === "GB" ? 1024 * 1024 * 1024 : 1024 * 1024);
38
+ }
39
+ }
40
+ // Apple Silicon unified memory — use total RAM as VRAM
41
+ if (vram === 0 && name.toLowerCase().includes("apple")) {
42
+ vram = os.totalmem();
43
+ }
44
+ return { name, vram };
45
+ }
46
+ }
47
+ if (platform === "linux") {
48
+ // Try NVIDIA first
49
+ try {
50
+ const raw = execSync("nvidia-smi --query-gpu=name,memory.total --format=csv,noheader", {
51
+ encoding: "utf-8",
52
+ timeout: 5000,
53
+ stdio: ["pipe", "pipe", "pipe"],
54
+ });
55
+ const line = raw.trim().split("\n")[0];
56
+ if (line) {
57
+ const parts = line.split(",").map(s => s.trim());
58
+ const name = parts[0] ?? "NVIDIA GPU";
59
+ const memMatch = (parts[1] ?? "").match(/(\d+)/);
60
+ const vram = memMatch ? parseInt(memMatch[1]) * 1024 * 1024 : 0; // MiB to bytes
61
+ return { name, vram };
62
+ }
63
+ }
64
+ catch {
65
+ // No NVIDIA, try lspci
66
+ try {
67
+ const raw = execSync("lspci | grep -i vga", {
68
+ encoding: "utf-8",
69
+ timeout: 5000,
70
+ stdio: ["pipe", "pipe", "pipe"],
71
+ });
72
+ const line = raw.trim().split("\n")[0];
73
+ if (line) {
74
+ const name = line.split(":").slice(2).join(":").trim() || "Unknown GPU";
75
+ return { name, vram: 0 };
76
+ }
77
+ }
78
+ catch { /* no lspci */ }
79
+ }
80
+ }
81
+ if (platform === "windows") {
82
+ try {
83
+ const raw = execSync("wmic path win32_VideoController get Name,AdapterRAM /format:csv", {
84
+ encoding: "utf-8",
85
+ timeout: 5000,
86
+ stdio: ["pipe", "pipe", "pipe"],
87
+ });
88
+ const lines = raw.trim().split("\n").filter(l => l.trim() && !l.startsWith("Node"));
89
+ if (lines.length > 0) {
90
+ const parts = lines[0].split(",");
91
+ const adapterRAM = parseInt(parts[1] ?? "0");
92
+ const name = parts[2]?.trim() ?? "Unknown GPU";
93
+ return { name, vram: isNaN(adapterRAM) ? 0 : adapterRAM };
94
+ }
95
+ }
96
+ catch { /* no wmic */ }
97
+ }
98
+ }
99
+ catch {
100
+ // GPU detection failed
101
+ }
102
+ return null;
103
+ }
104
+ export function detectHardware() {
105
+ const platform = getOS();
106
+ const cpu = getCPU();
107
+ const ram = os.totalmem();
108
+ const gpu = getGPU(platform);
109
+ // Detect Apple Silicon
110
+ const appleSilicon = platform === "macos" && /apple\s+m/i.test(cpu.name);
111
+ return { cpu, ram, gpu, os: platform, appleSilicon };
112
+ }
113
+ /** Format bytes to human-readable string */
114
+ export function formatBytes(bytes) {
115
+ if (bytes >= 1024 * 1024 * 1024)
116
+ return `${Math.round(bytes / (1024 * 1024 * 1024))} GB`;
117
+ if (bytes >= 1024 * 1024)
118
+ return `${Math.round(bytes / (1024 * 1024))} MB`;
119
+ return `${Math.round(bytes / 1024)} KB`;
120
+ }
@@ -0,0 +1,17 @@
1
+ import type { HardwareInfo } from "./hardware.js";
2
+ export interface RecommendedModel {
3
+ name: string;
4
+ ollamaId: string;
5
+ size: number;
6
+ ramRequired: number;
7
+ vramOptimal: number;
8
+ description: string;
9
+ speed: string;
10
+ quality: "good" | "great" | "best";
11
+ }
12
+ export type ModelFit = "perfect" | "good" | "tight" | "skip";
13
+ export interface ScoredModel extends RecommendedModel {
14
+ fit: ModelFit;
15
+ }
16
+ export declare function getRecommendations(hardware: HardwareInfo): ScoredModel[];
17
+ export declare function getFitIcon(fit: ModelFit): string;
@@ -0,0 +1,113 @@
1
+ const MODELS = [
2
+ {
3
+ name: "Qwen 2.5 Coder 3B",
4
+ ollamaId: "qwen2.5-coder:3b",
5
+ size: 2,
6
+ ramRequired: 8,
7
+ vramOptimal: 4,
8
+ description: "Lightweight, fast coding model",
9
+ speed: "~60 tok/s on M1",
10
+ quality: "good",
11
+ },
12
+ {
13
+ name: "Qwen 2.5 Coder 7B",
14
+ ollamaId: "qwen2.5-coder:7b",
15
+ size: 5,
16
+ ramRequired: 16,
17
+ vramOptimal: 8,
18
+ description: "Sweet spot for most machines",
19
+ speed: "~45 tok/s on M1",
20
+ quality: "great",
21
+ },
22
+ {
23
+ name: "Qwen 2.5 Coder 14B",
24
+ ollamaId: "qwen2.5-coder:14b",
25
+ size: 9,
26
+ ramRequired: 32,
27
+ vramOptimal: 16,
28
+ description: "High quality coding",
29
+ speed: "~25 tok/s on M1 Pro",
30
+ quality: "best",
31
+ },
32
+ {
33
+ name: "Qwen 2.5 Coder 32B",
34
+ ollamaId: "qwen2.5-coder:32b",
35
+ size: 20,
36
+ ramRequired: 48,
37
+ vramOptimal: 32,
38
+ description: "Premium quality, needs lots of RAM",
39
+ speed: "~12 tok/s on M1 Max",
40
+ quality: "best",
41
+ },
42
+ {
43
+ name: "DeepSeek Coder V2 16B",
44
+ ollamaId: "deepseek-coder-v2:16b",
45
+ size: 9,
46
+ ramRequired: 32,
47
+ vramOptimal: 16,
48
+ description: "Strong alternative for coding",
49
+ speed: "~30 tok/s on M1 Pro",
50
+ quality: "great",
51
+ },
52
+ {
53
+ name: "CodeLlama 7B",
54
+ ollamaId: "codellama:7b",
55
+ size: 4,
56
+ ramRequired: 16,
57
+ vramOptimal: 8,
58
+ description: "Meta's coding model",
59
+ speed: "~40 tok/s on M1",
60
+ quality: "good",
61
+ },
62
+ {
63
+ name: "StarCoder2 7B",
64
+ ollamaId: "starcoder2:7b",
65
+ size: 4,
66
+ ramRequired: 16,
67
+ vramOptimal: 8,
68
+ description: "Good for code completion",
69
+ speed: "~40 tok/s on M1",
70
+ quality: "good",
71
+ },
72
+ ];
73
+ function scoreModel(model, ramGB, vramGB) {
74
+ if (ramGB < model.ramRequired)
75
+ return "skip";
76
+ const ramHeadroom = ramGB - model.ramRequired;
77
+ const hasGoodVRAM = vramGB >= model.vramOptimal;
78
+ if (hasGoodVRAM && ramHeadroom >= 4)
79
+ return "perfect";
80
+ if (hasGoodVRAM || ramHeadroom >= 8)
81
+ return "good";
82
+ if (ramHeadroom >= 0)
83
+ return "tight";
84
+ return "skip";
85
+ }
86
+ const qualityOrder = { best: 3, great: 2, good: 1 };
87
+ const fitOrder = { perfect: 4, good: 3, tight: 2, skip: 1 };
88
+ export function getRecommendations(hardware) {
89
+ const ramGB = hardware.ram / (1024 * 1024 * 1024);
90
+ const vramGB = hardware.gpu?.vram ? hardware.gpu.vram / (1024 * 1024 * 1024) : 0;
91
+ // Apple Silicon uses unified memory — VRAM = RAM
92
+ const effectiveVRAM = hardware.appleSilicon ? ramGB : vramGB;
93
+ const scored = MODELS.map((m) => ({
94
+ ...m,
95
+ fit: scoreModel(m, ramGB, effectiveVRAM),
96
+ }));
97
+ // Sort: perfect first, then by quality descending
98
+ scored.sort((a, b) => {
99
+ const fitDiff = (fitOrder[b.fit] ?? 0) - (fitOrder[a.fit] ?? 0);
100
+ if (fitDiff !== 0)
101
+ return fitDiff;
102
+ return (qualityOrder[b.quality] ?? 0) - (qualityOrder[a.quality] ?? 0);
103
+ });
104
+ return scored;
105
+ }
106
+ export function getFitIcon(fit) {
107
+ switch (fit) {
108
+ case "perfect": return "\u2B50"; // ⭐
109
+ case "good": return "\u2705"; // ✅
110
+ case "tight": return "\u26A0\uFE0F"; // ⚠️
111
+ case "skip": return "\u274C"; // ❌
112
+ }
113
+ }
@@ -0,0 +1,22 @@
1
+ /** Check if ollama binary exists on PATH */
2
+ export declare function isOllamaInstalled(): boolean;
3
+ /** Check if ollama server is responding */
4
+ export declare function isOllamaRunning(): Promise<boolean>;
5
+ /** Get the install command for the user's OS */
6
+ export declare function getOllamaInstallCommand(os: "macos" | "linux" | "windows"): string;
7
+ /** Start ollama serve in background */
8
+ export declare function startOllama(): void;
9
+ export interface PullProgress {
10
+ status: string;
11
+ total?: number;
12
+ completed?: number;
13
+ percent: number;
14
+ }
15
+ /**
16
+ * Pull a model from Ollama registry.
17
+ * Calls onProgress with download updates.
18
+ * Returns a promise that resolves when complete.
19
+ */
20
+ export declare function pullModel(modelId: string, onProgress?: (progress: PullProgress) => void): Promise<void>;
21
+ /** List models installed in Ollama */
22
+ export declare function listInstalledModels(): Promise<string[]>;