loclaude 0.0.1-alpha.1 → 0.0.1-alpha.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -18,7 +18,7 @@ var __toESM = (mod, isNodeMode, target) => {
18
18
  var __commonJS = (cb, mod) => () => (mod || cb((mod = { exports: {} }).exports, mod), mod.exports);
19
19
  var __require = import.meta.require;
20
20
 
21
- // ../../node_modules/cli-width/index.js
21
+ // ../../node_modules/.bun/cli-width@4.1.0/node_modules/cli-width/index.js
22
22
  var require_cli_width = __commonJS((exports, module) => {
23
23
  module.exports = cliWidth;
24
24
  function normalizeOpts(options) {
@@ -58,7 +58,7 @@ var require_cli_width = __commonJS((exports, module) => {
58
58
  }
59
59
  });
60
60
 
61
- // ../../node_modules/mute-stream/lib/index.js
61
+ // ../../node_modules/.bun/mute-stream@3.0.0/node_modules/mute-stream/lib/index.js
62
62
  var require_lib = __commonJS((exports, module) => {
63
63
  var Stream = __require("stream");
64
64
 
@@ -178,7 +178,7 @@ var require_lib = __commonJS((exports, module) => {
178
178
  module.exports = MuteStream;
179
179
  });
180
180
 
181
- // ../../node_modules/bytes/index.js
181
+ // ../../node_modules/.bun/bytes@3.1.2/node_modules/bytes/index.js
182
182
  var require_bytes = __commonJS((exports, module) => {
183
183
  /*!
184
184
  * bytes
@@ -270,7 +270,77 @@ var require_bytes = __commonJS((exports, module) => {
270
270
  }
271
271
  });
272
272
 
273
- // ../../node_modules/cac/dist/index.mjs
273
+ // ../../node_modules/.bun/picocolors@1.1.1/node_modules/picocolors/picocolors.js
274
+ var require_picocolors = __commonJS((exports, module) => {
275
+ var p = process || {};
276
+ var argv = p.argv || [];
277
+ var env = p.env || {};
278
+ var isColorSupported = !(!!env.NO_COLOR || argv.includes("--no-color")) && (!!env.FORCE_COLOR || argv.includes("--color") || p.platform === "win32" || (p.stdout || {}).isTTY && env.TERM !== "dumb" || !!env.CI);
279
+ var formatter = (open, close, replace = open) => (input) => {
280
+ let string = "" + input, index = string.indexOf(close, open.length);
281
+ return ~index ? open + replaceClose(string, close, replace, index) + close : open + string + close;
282
+ };
283
+ var replaceClose = (string, close, replace, index) => {
284
+ let result = "", cursor = 0;
285
+ do {
286
+ result += string.substring(cursor, index) + replace;
287
+ cursor = index + close.length;
288
+ index = string.indexOf(close, cursor);
289
+ } while (~index);
290
+ return result + string.substring(cursor);
291
+ };
292
+ var createColors = (enabled = isColorSupported) => {
293
+ let f = enabled ? formatter : () => String;
294
+ return {
295
+ isColorSupported: enabled,
296
+ reset: f("\x1B[0m", "\x1B[0m"),
297
+ bold: f("\x1B[1m", "\x1B[22m", "\x1B[22m\x1B[1m"),
298
+ dim: f("\x1B[2m", "\x1B[22m", "\x1B[22m\x1B[2m"),
299
+ italic: f("\x1B[3m", "\x1B[23m"),
300
+ underline: f("\x1B[4m", "\x1B[24m"),
301
+ inverse: f("\x1B[7m", "\x1B[27m"),
302
+ hidden: f("\x1B[8m", "\x1B[28m"),
303
+ strikethrough: f("\x1B[9m", "\x1B[29m"),
304
+ black: f("\x1B[30m", "\x1B[39m"),
305
+ red: f("\x1B[31m", "\x1B[39m"),
306
+ green: f("\x1B[32m", "\x1B[39m"),
307
+ yellow: f("\x1B[33m", "\x1B[39m"),
308
+ blue: f("\x1B[34m", "\x1B[39m"),
309
+ magenta: f("\x1B[35m", "\x1B[39m"),
310
+ cyan: f("\x1B[36m", "\x1B[39m"),
311
+ white: f("\x1B[37m", "\x1B[39m"),
312
+ gray: f("\x1B[90m", "\x1B[39m"),
313
+ bgBlack: f("\x1B[40m", "\x1B[49m"),
314
+ bgRed: f("\x1B[41m", "\x1B[49m"),
315
+ bgGreen: f("\x1B[42m", "\x1B[49m"),
316
+ bgYellow: f("\x1B[43m", "\x1B[49m"),
317
+ bgBlue: f("\x1B[44m", "\x1B[49m"),
318
+ bgMagenta: f("\x1B[45m", "\x1B[49m"),
319
+ bgCyan: f("\x1B[46m", "\x1B[49m"),
320
+ bgWhite: f("\x1B[47m", "\x1B[49m"),
321
+ blackBright: f("\x1B[90m", "\x1B[39m"),
322
+ redBright: f("\x1B[91m", "\x1B[39m"),
323
+ greenBright: f("\x1B[92m", "\x1B[39m"),
324
+ yellowBright: f("\x1B[93m", "\x1B[39m"),
325
+ blueBright: f("\x1B[94m", "\x1B[39m"),
326
+ magentaBright: f("\x1B[95m", "\x1B[39m"),
327
+ cyanBright: f("\x1B[96m", "\x1B[39m"),
328
+ whiteBright: f("\x1B[97m", "\x1B[39m"),
329
+ bgBlackBright: f("\x1B[100m", "\x1B[49m"),
330
+ bgRedBright: f("\x1B[101m", "\x1B[49m"),
331
+ bgGreenBright: f("\x1B[102m", "\x1B[49m"),
332
+ bgYellowBright: f("\x1B[103m", "\x1B[49m"),
333
+ bgBlueBright: f("\x1B[104m", "\x1B[49m"),
334
+ bgMagentaBright: f("\x1B[105m", "\x1B[49m"),
335
+ bgCyanBright: f("\x1B[106m", "\x1B[49m"),
336
+ bgWhiteBright: f("\x1B[107m", "\x1B[49m")
337
+ };
338
+ };
339
+ module.exports = createColors();
340
+ module.exports.createColors = createColors;
341
+ });
342
+
343
+ // ../../node_modules/.bun/cac@6.7.14/node_modules/cac/dist/index.mjs
274
344
  import { EventEmitter } from "events";
275
345
  function toArr(any) {
276
346
  return any == null ? [] : Array.isArray(any) ? any : [any];
@@ -1012,13 +1082,13 @@ function getClaudeExtraArgs() {
1012
1082
  var OLLAMA_URL = getOllamaUrl();
1013
1083
  var DEFAULT_MODEL = getDefaultModel();
1014
1084
 
1015
- // ../../node_modules/@inquirer/core/dist/lib/key.js
1085
+ // ../../node_modules/.bun/@inquirer+core@11.1.1+c195ea72dffa657e/node_modules/@inquirer/core/dist/lib/key.js
1016
1086
  var isUpKey = (key, keybindings = []) => key.name === "up" || keybindings.includes("vim") && key.name === "k" || keybindings.includes("emacs") && key.ctrl && key.name === "p";
1017
1087
  var isDownKey = (key, keybindings = []) => key.name === "down" || keybindings.includes("vim") && key.name === "j" || keybindings.includes("emacs") && key.ctrl && key.name === "n";
1018
1088
  var isBackspaceKey = (key) => key.name === "backspace";
1019
1089
  var isNumberKey = (key) => "1234567890".includes(key.name);
1020
1090
  var isEnterKey = (key) => key.name === "enter" || key.name === "return";
1021
- // ../../node_modules/@inquirer/core/dist/lib/errors.js
1091
+ // ../../node_modules/.bun/@inquirer+core@11.1.1+c195ea72dffa657e/node_modules/@inquirer/core/dist/lib/errors.js
1022
1092
  class AbortPromptError extends Error {
1023
1093
  name = "AbortPromptError";
1024
1094
  message = "Prompt was aborted";
@@ -1044,10 +1114,10 @@ class HookError extends Error {
1044
1114
  class ValidationError extends Error {
1045
1115
  name = "ValidationError";
1046
1116
  }
1047
- // ../../node_modules/@inquirer/core/dist/lib/use-state.js
1117
+ // ../../node_modules/.bun/@inquirer+core@11.1.1+c195ea72dffa657e/node_modules/@inquirer/core/dist/lib/use-state.js
1048
1118
  import { AsyncResource as AsyncResource2 } from "async_hooks";
1049
1119
 
1050
- // ../../node_modules/@inquirer/core/dist/lib/hook-engine.js
1120
+ // ../../node_modules/.bun/@inquirer+core@11.1.1+c195ea72dffa657e/node_modules/@inquirer/core/dist/lib/hook-engine.js
1051
1121
  import { AsyncLocalStorage, AsyncResource } from "async_hooks";
1052
1122
  var hookStorage = new AsyncLocalStorage;
1053
1123
  function createStore(rl) {
@@ -1152,7 +1222,7 @@ var effectScheduler = {
1152
1222
  }
1153
1223
  };
1154
1224
 
1155
- // ../../node_modules/@inquirer/core/dist/lib/use-state.js
1225
+ // ../../node_modules/.bun/@inquirer+core@11.1.1+c195ea72dffa657e/node_modules/@inquirer/core/dist/lib/use-state.js
1156
1226
  function useState(defaultValue) {
1157
1227
  return withPointer((pointer) => {
1158
1228
  const setState = AsyncResource2.bind(function setState(newValue) {
@@ -1170,7 +1240,7 @@ function useState(defaultValue) {
1170
1240
  });
1171
1241
  }
1172
1242
 
1173
- // ../../node_modules/@inquirer/core/dist/lib/use-effect.js
1243
+ // ../../node_modules/.bun/@inquirer+core@11.1.1+c195ea72dffa657e/node_modules/@inquirer/core/dist/lib/use-effect.js
1174
1244
  function useEffect(cb, depArray) {
1175
1245
  withPointer((pointer) => {
1176
1246
  const oldDeps = pointer.get();
@@ -1182,10 +1252,10 @@ function useEffect(cb, depArray) {
1182
1252
  });
1183
1253
  }
1184
1254
 
1185
- // ../../node_modules/@inquirer/core/dist/lib/theme.js
1255
+ // ../../node_modules/.bun/@inquirer+core@11.1.1+c195ea72dffa657e/node_modules/@inquirer/core/dist/lib/theme.js
1186
1256
  import { styleText } from "util";
1187
1257
 
1188
- // ../../node_modules/@inquirer/figures/dist/index.js
1258
+ // ../../node_modules/.bun/@inquirer+figures@2.0.3/node_modules/@inquirer/figures/dist/index.js
1189
1259
  import process2 from "process";
1190
1260
  function isUnicodeSupported() {
1191
1261
  if (process2.platform !== "win32") {
@@ -1474,7 +1544,7 @@ var figures = shouldUseMain ? mainSymbols : fallbackSymbols;
1474
1544
  var dist_default2 = figures;
1475
1545
  var replacements = Object.entries(specialMainSymbols);
1476
1546
 
1477
- // ../../node_modules/@inquirer/core/dist/lib/theme.js
1547
+ // ../../node_modules/.bun/@inquirer+core@11.1.1+c195ea72dffa657e/node_modules/@inquirer/core/dist/lib/theme.js
1478
1548
  var defaultTheme = {
1479
1549
  prefix: {
1480
1550
  idle: styleText("blue", "?"),
@@ -1495,7 +1565,7 @@ var defaultTheme = {
1495
1565
  }
1496
1566
  };
1497
1567
 
1498
- // ../../node_modules/@inquirer/core/dist/lib/make-theme.js
1568
+ // ../../node_modules/.bun/@inquirer+core@11.1.1+c195ea72dffa657e/node_modules/@inquirer/core/dist/lib/make-theme.js
1499
1569
  function isPlainObject(value) {
1500
1570
  if (typeof value !== "object" || value === null)
1501
1571
  return false;
@@ -1523,7 +1593,7 @@ function makeTheme(...themes) {
1523
1593
  return deepMerge2(...themesToMerge);
1524
1594
  }
1525
1595
 
1526
- // ../../node_modules/@inquirer/core/dist/lib/use-prefix.js
1596
+ // ../../node_modules/.bun/@inquirer+core@11.1.1+c195ea72dffa657e/node_modules/@inquirer/core/dist/lib/use-prefix.js
1527
1597
  function usePrefix({ status = "idle", theme }) {
1528
1598
  const [showLoader, setShowLoader] = useState(false);
1529
1599
  const [tick, setTick] = useState(0);
@@ -1553,7 +1623,7 @@ function usePrefix({ status = "idle", theme }) {
1553
1623
  const iconName = status === "loading" ? "idle" : status;
1554
1624
  return typeof prefix === "string" ? prefix : prefix[iconName] ?? prefix["idle"];
1555
1625
  }
1556
- // ../../node_modules/@inquirer/core/dist/lib/use-memo.js
1626
+ // ../../node_modules/.bun/@inquirer+core@11.1.1+c195ea72dffa657e/node_modules/@inquirer/core/dist/lib/use-memo.js
1557
1627
  function useMemo(fn, dependencies) {
1558
1628
  return withPointer((pointer) => {
1559
1629
  const prev = pointer.get();
@@ -1565,11 +1635,11 @@ function useMemo(fn, dependencies) {
1565
1635
  return prev.value;
1566
1636
  });
1567
1637
  }
1568
- // ../../node_modules/@inquirer/core/dist/lib/use-ref.js
1638
+ // ../../node_modules/.bun/@inquirer+core@11.1.1+c195ea72dffa657e/node_modules/@inquirer/core/dist/lib/use-ref.js
1569
1639
  function useRef(val) {
1570
1640
  return useState({ current: val })[0];
1571
1641
  }
1572
- // ../../node_modules/@inquirer/core/dist/lib/use-keypress.js
1642
+ // ../../node_modules/.bun/@inquirer+core@11.1.1+c195ea72dffa657e/node_modules/@inquirer/core/dist/lib/use-keypress.js
1573
1643
  function useKeypress(userHandler) {
1574
1644
  const signal = useRef(userHandler);
1575
1645
  signal.current = userHandler;
@@ -1587,10 +1657,10 @@ function useKeypress(userHandler) {
1587
1657
  };
1588
1658
  }, []);
1589
1659
  }
1590
- // ../../node_modules/@inquirer/core/dist/lib/utils.js
1660
+ // ../../node_modules/.bun/@inquirer+core@11.1.1+c195ea72dffa657e/node_modules/@inquirer/core/dist/lib/utils.js
1591
1661
  var import_cli_width = __toESM(require_cli_width(), 1);
1592
1662
 
1593
- // ../../node_modules/string-width/node_modules/strip-ansi/node_modules/ansi-regex/index.js
1663
+ // ../../node_modules/.bun/ansi-regex@6.2.2/node_modules/ansi-regex/index.js
1594
1664
  function ansiRegex({ onlyFirst = false } = {}) {
1595
1665
  const ST = "(?:\\u0007|\\u001B\\u005C|\\u009C)";
1596
1666
  const osc = `(?:\\u001B\\][\\s\\S]*?${ST})`;
@@ -1599,7 +1669,7 @@ function ansiRegex({ onlyFirst = false } = {}) {
1599
1669
  return new RegExp(pattern, onlyFirst ? undefined : "g");
1600
1670
  }
1601
1671
 
1602
- // ../../node_modules/string-width/node_modules/strip-ansi/index.js
1672
+ // ../../node_modules/.bun/strip-ansi@7.1.2/node_modules/strip-ansi/index.js
1603
1673
  var regex = ansiRegex();
1604
1674
  function stripAnsi(string) {
1605
1675
  if (typeof string !== "string") {
@@ -1608,7 +1678,7 @@ function stripAnsi(string) {
1608
1678
  return string.replace(regex, "");
1609
1679
  }
1610
1680
 
1611
- // ../../node_modules/get-east-asian-width/lookup.js
1681
+ // ../../node_modules/.bun/get-east-asian-width@1.4.0/node_modules/get-east-asian-width/lookup.js
1612
1682
  function isAmbiguous(x) {
1613
1683
  return x === 161 || x === 164 || x === 167 || x === 168 || x === 170 || x === 173 || x === 174 || x >= 176 && x <= 180 || x >= 182 && x <= 186 || x >= 188 && x <= 191 || x === 198 || x === 208 || x === 215 || x === 216 || x >= 222 && x <= 225 || x === 230 || x >= 232 && x <= 234 || x === 236 || x === 237 || x === 240 || x === 242 || x === 243 || x >= 247 && x <= 250 || x === 252 || x === 254 || x === 257 || x === 273 || x === 275 || x === 283 || x === 294 || x === 295 || x === 299 || x >= 305 && x <= 307 || x === 312 || x >= 319 && x <= 322 || x === 324 || x >= 328 && x <= 331 || x === 333 || x === 338 || x === 339 || x === 358 || x === 359 || x === 363 || x === 462 || x === 464 || x === 466 || x === 468 || x === 470 || x === 472 || x === 474 || x === 476 || x === 593 || x === 609 || x === 708 || x === 711 || x >= 713 && x <= 715 || x === 717 || x === 720 || x >= 728 && x <= 731 || x === 733 || x === 735 || x >= 768 && x <= 879 || x >= 913 && x <= 929 || x >= 931 && x <= 937 || x >= 945 && x <= 961 || x >= 963 && x <= 969 || x === 1025 || x >= 1040 && x <= 1103 || x === 1105 || x === 8208 || x >= 8211 && x <= 8214 || x === 8216 || x === 8217 || x === 8220 || x === 8221 || x >= 8224 && x <= 8226 || x >= 8228 && x <= 8231 || x === 8240 || x === 8242 || x === 8243 || x === 8245 || x === 8251 || x === 8254 || x === 8308 || x === 8319 || x >= 8321 && x <= 8324 || x === 8364 || x === 8451 || x === 8453 || x === 8457 || x === 8467 || x === 8470 || x === 8481 || x === 8482 || x === 8486 || x === 8491 || x === 8531 || x === 8532 || x >= 8539 && x <= 8542 || x >= 8544 && x <= 8555 || x >= 8560 && x <= 8569 || x === 8585 || x >= 8592 && x <= 8601 || x === 8632 || x === 8633 || x === 8658 || x === 8660 || x === 8679 || x === 8704 || x === 8706 || x === 8707 || x === 8711 || x === 8712 || x === 8715 || x === 8719 || x === 8721 || x === 8725 || x === 8730 || x >= 8733 && x <= 8736 || x === 8739 || x === 8741 || x >= 8743 && x <= 8748 || x === 8750 || x >= 8756 && x <= 8759 || x === 8764 || x === 8765 || x === 8776 || x === 8780 || x === 8786 || x === 8800 || x === 8801 || x >= 8804 && x <= 8807 || x === 8810 || x === 8811 || x === 8814 || x === 8815 || x === 8834 || x === 8835 || x === 8838 || x === 8839 || x === 8853 || x === 8857 || x === 8869 || x === 8895 || x === 8978 || x >= 9312 && x <= 9449 || x >= 9451 && x <= 9547 || x >= 9552 && x <= 9587 || x >= 9600 && x <= 9615 || x >= 9618 && x <= 9621 || x === 9632 || x === 9633 || x >= 9635 && x <= 9641 || x === 9650 || x === 9651 || x === 9654 || x === 9655 || x === 9660 || x === 9661 || x === 9664 || x === 9665 || x >= 9670 && x <= 9672 || x === 9675 || x >= 9678 && x <= 9681 || x >= 9698 && x <= 9701 || x === 9711 || x === 9733 || x === 9734 || x === 9737 || x === 9742 || x === 9743 || x === 9756 || x === 9758 || x === 9792 || x === 9794 || x === 9824 || x === 9825 || x >= 9827 && x <= 9829 || x >= 9831 && x <= 9834 || x === 9836 || x === 9837 || x === 9839 || x === 9886 || x === 9887 || x === 9919 || x >= 9926 && x <= 9933 || x >= 9935 && x <= 9939 || x >= 9941 && x <= 9953 || x === 9955 || x === 9960 || x === 9961 || x >= 9963 && x <= 9969 || x === 9972 || x >= 9974 && x <= 9977 || x === 9979 || x === 9980 || x === 9982 || x === 9983 || x === 10045 || x >= 10102 && x <= 10111 || x >= 11094 && x <= 11097 || x >= 12872 && x <= 12879 || x >= 57344 && x <= 63743 || x >= 65024 && x <= 65039 || x === 65533 || x >= 127232 && x <= 127242 || x >= 127248 && x <= 127277 || x >= 127280 && x <= 127337 || x >= 127344 && x <= 127373 || x === 127375 || x === 127376 || x >= 127387 && x <= 127404 || x >= 917760 && x <= 917999 || x >= 983040 && x <= 1048573 || x >= 1048576 && x <= 1114109;
1614
1684
  }
@@ -1619,7 +1689,7 @@ function isWide(x) {
1619
1689
  return x >= 4352 && x <= 4447 || x === 8986 || x === 8987 || x === 9001 || x === 9002 || x >= 9193 && x <= 9196 || x === 9200 || x === 9203 || x === 9725 || x === 9726 || x === 9748 || x === 9749 || x >= 9776 && x <= 9783 || x >= 9800 && x <= 9811 || x === 9855 || x >= 9866 && x <= 9871 || x === 9875 || x === 9889 || x === 9898 || x === 9899 || x === 9917 || x === 9918 || x === 9924 || x === 9925 || x === 9934 || x === 9940 || x === 9962 || x === 9970 || x === 9971 || x === 9973 || x === 9978 || x === 9981 || x === 9989 || x === 9994 || x === 9995 || x === 10024 || x === 10060 || x === 10062 || x >= 10067 && x <= 10069 || x === 10071 || x >= 10133 && x <= 10135 || x === 10160 || x === 10175 || x === 11035 || x === 11036 || x === 11088 || x === 11093 || x >= 11904 && x <= 11929 || x >= 11931 && x <= 12019 || x >= 12032 && x <= 12245 || x >= 12272 && x <= 12287 || x >= 12289 && x <= 12350 || x >= 12353 && x <= 12438 || x >= 12441 && x <= 12543 || x >= 12549 && x <= 12591 || x >= 12593 && x <= 12686 || x >= 12688 && x <= 12773 || x >= 12783 && x <= 12830 || x >= 12832 && x <= 12871 || x >= 12880 && x <= 42124 || x >= 42128 && x <= 42182 || x >= 43360 && x <= 43388 || x >= 44032 && x <= 55203 || x >= 63744 && x <= 64255 || x >= 65040 && x <= 65049 || x >= 65072 && x <= 65106 || x >= 65108 && x <= 65126 || x >= 65128 && x <= 65131 || x >= 94176 && x <= 94180 || x >= 94192 && x <= 94198 || x >= 94208 && x <= 101589 || x >= 101631 && x <= 101662 || x >= 101760 && x <= 101874 || x >= 110576 && x <= 110579 || x >= 110581 && x <= 110587 || x === 110589 || x === 110590 || x >= 110592 && x <= 110882 || x === 110898 || x >= 110928 && x <= 110930 || x === 110933 || x >= 110948 && x <= 110951 || x >= 110960 && x <= 111355 || x >= 119552 && x <= 119638 || x >= 119648 && x <= 119670 || x === 126980 || x === 127183 || x === 127374 || x >= 127377 && x <= 127386 || x >= 127488 && x <= 127490 || x >= 127504 && x <= 127547 || x >= 127552 && x <= 127560 || x === 127568 || x === 127569 || x >= 127584 && x <= 127589 || x >= 127744 && x <= 127776 || x >= 127789 && x <= 127797 || x >= 127799 && x <= 127868 || x >= 127870 && x <= 127891 || x >= 127904 && x <= 127946 || x >= 127951 && x <= 127955 || x >= 127968 && x <= 127984 || x === 127988 || x >= 127992 && x <= 128062 || x === 128064 || x >= 128066 && x <= 128252 || x >= 128255 && x <= 128317 || x >= 128331 && x <= 128334 || x >= 128336 && x <= 128359 || x === 128378 || x === 128405 || x === 128406 || x === 128420 || x >= 128507 && x <= 128591 || x >= 128640 && x <= 128709 || x === 128716 || x >= 128720 && x <= 128722 || x >= 128725 && x <= 128728 || x >= 128732 && x <= 128735 || x === 128747 || x === 128748 || x >= 128756 && x <= 128764 || x >= 128992 && x <= 129003 || x === 129008 || x >= 129292 && x <= 129338 || x >= 129340 && x <= 129349 || x >= 129351 && x <= 129535 || x >= 129648 && x <= 129660 || x >= 129664 && x <= 129674 || x >= 129678 && x <= 129734 || x === 129736 || x >= 129741 && x <= 129756 || x >= 129759 && x <= 129770 || x >= 129775 && x <= 129784 || x >= 131072 && x <= 196605 || x >= 196608 && x <= 262141;
1620
1690
  }
1621
1691
 
1622
- // ../../node_modules/get-east-asian-width/index.js
1692
+ // ../../node_modules/.bun/get-east-asian-width@1.4.0/node_modules/get-east-asian-width/index.js
1623
1693
  function validate(codePoint) {
1624
1694
  if (!Number.isSafeInteger(codePoint)) {
1625
1695
  throw new TypeError(`Expected a code point, got \`${typeof codePoint}\`.`);
@@ -1633,12 +1703,12 @@ function eastAsianWidth(codePoint, { ambiguousAsWide = false } = {}) {
1633
1703
  return 1;
1634
1704
  }
1635
1705
 
1636
- // ../../node_modules/emoji-regex/index.mjs
1706
+ // ../../node_modules/.bun/emoji-regex@10.6.0/node_modules/emoji-regex/index.mjs
1637
1707
  var emoji_regex_default = () => {
1638
1708
  return /[#*0-9]\uFE0F?\u20E3|[\xA9\xAE\u203C\u2049\u2122\u2139\u2194-\u2199\u21A9\u21AA\u231A\u231B\u2328\u23CF\u23ED-\u23EF\u23F1\u23F2\u23F8-\u23FA\u24C2\u25AA\u25AB\u25B6\u25C0\u25FB\u25FC\u25FE\u2600-\u2604\u260E\u2611\u2614\u2615\u2618\u2620\u2622\u2623\u2626\u262A\u262E\u262F\u2638-\u263A\u2640\u2642\u2648-\u2653\u265F\u2660\u2663\u2665\u2666\u2668\u267B\u267E\u267F\u2692\u2694-\u2697\u2699\u269B\u269C\u26A0\u26A7\u26AA\u26B0\u26B1\u26BD\u26BE\u26C4\u26C8\u26CF\u26D1\u26E9\u26F0-\u26F5\u26F7\u26F8\u26FA\u2702\u2708\u2709\u270F\u2712\u2714\u2716\u271D\u2721\u2733\u2734\u2744\u2747\u2757\u2763\u27A1\u2934\u2935\u2B05-\u2B07\u2B1B\u2B1C\u2B55\u3030\u303D\u3297\u3299]\uFE0F?|[\u261D\u270C\u270D](?:\uD83C[\uDFFB-\uDFFF]|\uFE0F)?|[\u270A\u270B](?:\uD83C[\uDFFB-\uDFFF])?|[\u23E9-\u23EC\u23F0\u23F3\u25FD\u2693\u26A1\u26AB\u26C5\u26CE\u26D4\u26EA\u26FD\u2705\u2728\u274C\u274E\u2753-\u2755\u2795-\u2797\u27B0\u27BF\u2B50]|\u26D3\uFE0F?(?:\u200D\uD83D\uDCA5)?|\u26F9(?:\uD83C[\uDFFB-\uDFFF]|\uFE0F)?(?:\u200D[\u2640\u2642]\uFE0F?)?|\u2764\uFE0F?(?:\u200D(?:\uD83D\uDD25|\uD83E\uDE79))?|\uD83C(?:[\uDC04\uDD70\uDD71\uDD7E\uDD7F\uDE02\uDE37\uDF21\uDF24-\uDF2C\uDF36\uDF7D\uDF96\uDF97\uDF99-\uDF9B\uDF9E\uDF9F\uDFCD\uDFCE\uDFD4-\uDFDF\uDFF5\uDFF7]\uFE0F?|[\uDF85\uDFC2\uDFC7](?:\uD83C[\uDFFB-\uDFFF])?|[\uDFC4\uDFCA](?:\uD83C[\uDFFB-\uDFFF])?(?:\u200D[\u2640\u2642]\uFE0F?)?|[\uDFCB\uDFCC](?:\uD83C[\uDFFB-\uDFFF]|\uFE0F)?(?:\u200D[\u2640\u2642]\uFE0F?)?|[\uDCCF\uDD8E\uDD91-\uDD9A\uDE01\uDE1A\uDE2F\uDE32-\uDE36\uDE38-\uDE3A\uDE50\uDE51\uDF00-\uDF20\uDF2D-\uDF35\uDF37-\uDF43\uDF45-\uDF4A\uDF4C-\uDF7C\uDF7E-\uDF84\uDF86-\uDF93\uDFA0-\uDFC1\uDFC5\uDFC6\uDFC8\uDFC9\uDFCF-\uDFD3\uDFE0-\uDFF0\uDFF8-\uDFFF]|\uDDE6\uD83C[\uDDE8-\uDDEC\uDDEE\uDDF1\uDDF2\uDDF4\uDDF6-\uDDFA\uDDFC\uDDFD\uDDFF]|\uDDE7\uD83C[\uDDE6\uDDE7\uDDE9-\uDDEF\uDDF1-\uDDF4\uDDF6-\uDDF9\uDDFB\uDDFC\uDDFE\uDDFF]|\uDDE8\uD83C[\uDDE6\uDDE8\uDDE9\uDDEB-\uDDEE\uDDF0-\uDDF7\uDDFA-\uDDFF]|\uDDE9\uD83C[\uDDEA\uDDEC\uDDEF\uDDF0\uDDF2\uDDF4\uDDFF]|\uDDEA\uD83C[\uDDE6\uDDE8\uDDEA\uDDEC\uDDED\uDDF7-\uDDFA]|\uDDEB\uD83C[\uDDEE-\uDDF0\uDDF2\uDDF4\uDDF7]|\uDDEC\uD83C[\uDDE6\uDDE7\uDDE9-\uDDEE\uDDF1-\uDDF3\uDDF5-\uDDFA\uDDFC\uDDFE]|\uDDED\uD83C[\uDDF0\uDDF2\uDDF3\uDDF7\uDDF9\uDDFA]|\uDDEE\uD83C[\uDDE8-\uDDEA\uDDF1-\uDDF4\uDDF6-\uDDF9]|\uDDEF\uD83C[\uDDEA\uDDF2\uDDF4\uDDF5]|\uDDF0\uD83C[\uDDEA\uDDEC-\uDDEE\uDDF2\uDDF3\uDDF5\uDDF7\uDDFC\uDDFE\uDDFF]|\uDDF1\uD83C[\uDDE6-\uDDE8\uDDEE\uDDF0\uDDF7-\uDDFB\uDDFE]|\uDDF2\uD83C[\uDDE6\uDDE8-\uDDED\uDDF0-\uDDFF]|\uDDF3\uD83C[\uDDE6\uDDE8\uDDEA-\uDDEC\uDDEE\uDDF1\uDDF4\uDDF5\uDDF7\uDDFA\uDDFF]|\uDDF4\uD83C\uDDF2|\uDDF5\uD83C[\uDDE6\uDDEA-\uDDED\uDDF0-\uDDF3\uDDF7-\uDDF9\uDDFC\uDDFE]|\uDDF6\uD83C\uDDE6|\uDDF7\uD83C[\uDDEA\uDDF4\uDDF8\uDDFA\uDDFC]|\uDDF8\uD83C[\uDDE6-\uDDEA\uDDEC-\uDDF4\uDDF7-\uDDF9\uDDFB\uDDFD-\uDDFF]|\uDDF9\uD83C[\uDDE6\uDDE8\uDDE9\uDDEB-\uDDED\uDDEF-\uDDF4\uDDF7\uDDF9\uDDFB\uDDFC\uDDFF]|\uDDFA\uD83C[\uDDE6\uDDEC\uDDF2\uDDF3\uDDF8\uDDFE\uDDFF]|\uDDFB\uD83C[\uDDE6\uDDE8\uDDEA\uDDEC\uDDEE\uDDF3\uDDFA]|\uDDFC\uD83C[\uDDEB\uDDF8]|\uDDFD\uD83C\uDDF0|\uDDFE\uD83C[\uDDEA\uDDF9]|\uDDFF\uD83C[\uDDE6\uDDF2\uDDFC]|\uDF44(?:\u200D\uD83D\uDFEB)?|\uDF4B(?:\u200D\uD83D\uDFE9)?|\uDFC3(?:\uD83C[\uDFFB-\uDFFF])?(?:\u200D(?:[\u2640\u2642]\uFE0F?(?:\u200D\u27A1\uFE0F?)?|\u27A1\uFE0F?))?|\uDFF3\uFE0F?(?:\u200D(?:\u26A7\uFE0F?|\uD83C\uDF08))?|\uDFF4(?:\u200D\u2620\uFE0F?|\uDB40\uDC67\uDB40\uDC62\uDB40(?:\uDC65\uDB40\uDC6E\uDB40\uDC67|\uDC73\uDB40\uDC63\uDB40\uDC74|\uDC77\uDB40\uDC6C\uDB40\uDC73)\uDB40\uDC7F)?)|\uD83D(?:[\uDC3F\uDCFD\uDD49\uDD4A\uDD6F\uDD70\uDD73\uDD76-\uDD79\uDD87\uDD8A-\uDD8D\uDDA5\uDDA8\uDDB1\uDDB2\uDDBC\uDDC2-\uDDC4\uDDD1-\uDDD3\uDDDC-\uDDDE\uDDE1\uDDE3\uDDE8\uDDEF\uDDF3\uDDFA\uDECB\uDECD-\uDECF\uDEE0-\uDEE5\uDEE9\uDEF0\uDEF3]\uFE0F?|[\uDC42\uDC43\uDC46-\uDC50\uDC66\uDC67\uDC6B-\uDC6D\uDC72\uDC74-\uDC76\uDC78\uDC7C\uDC83\uDC85\uDC8F\uDC91\uDCAA\uDD7A\uDD95\uDD96\uDE4C\uDE4F\uDEC0\uDECC](?:\uD83C[\uDFFB-\uDFFF])?|[\uDC6E-\uDC71\uDC73\uDC77\uDC81\uDC82\uDC86\uDC87\uDE45-\uDE47\uDE4B\uDE4D\uDE4E\uDEA3\uDEB4\uDEB5](?:\uD83C[\uDFFB-\uDFFF])?(?:\u200D[\u2640\u2642]\uFE0F?)?|[\uDD74\uDD90](?:\uD83C[\uDFFB-\uDFFF]|\uFE0F)?|[\uDC00-\uDC07\uDC09-\uDC14\uDC16-\uDC25\uDC27-\uDC3A\uDC3C-\uDC3E\uDC40\uDC44\uDC45\uDC51-\uDC65\uDC6A\uDC79-\uDC7B\uDC7D-\uDC80\uDC84\uDC88-\uDC8E\uDC90\uDC92-\uDCA9\uDCAB-\uDCFC\uDCFF-\uDD3D\uDD4B-\uDD4E\uDD50-\uDD67\uDDA4\uDDFB-\uDE2D\uDE2F-\uDE34\uDE37-\uDE41\uDE43\uDE44\uDE48-\uDE4A\uDE80-\uDEA2\uDEA4-\uDEB3\uDEB7-\uDEBF\uDEC1-\uDEC5\uDED0-\uDED2\uDED5-\uDED8\uDEDC-\uDEDF\uDEEB\uDEEC\uDEF4-\uDEFC\uDFE0-\uDFEB\uDFF0]|\uDC08(?:\u200D\u2B1B)?|\uDC15(?:\u200D\uD83E\uDDBA)?|\uDC26(?:\u200D(?:\u2B1B|\uD83D\uDD25))?|\uDC3B(?:\u200D\u2744\uFE0F?)?|\uDC41\uFE0F?(?:\u200D\uD83D\uDDE8\uFE0F?)?|\uDC68(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:\uDC8B\u200D\uD83D)?\uDC68|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D(?:[\uDC68\uDC69]\u200D\uD83D(?:\uDC66(?:\u200D\uD83D\uDC66)?|\uDC67(?:\u200D\uD83D[\uDC66\uDC67])?)|[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uDC66(?:\u200D\uD83D\uDC66)?|\uDC67(?:\u200D\uD83D[\uDC66\uDC67])?)|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]))|\uD83C(?:\uDFFB(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:\uDC8B\u200D\uD83D)?\uDC68\uD83C[\uDFFB-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D(?:[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uDC30\u200D\uD83D\uDC68\uD83C[\uDFFC-\uDFFF])|\uD83E(?:[\uDD1D\uDEEF]\u200D\uD83D\uDC68\uD83C[\uDFFC-\uDFFF]|[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3])))?|\uDFFC(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:\uDC8B\u200D\uD83D)?\uDC68\uD83C[\uDFFB-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D(?:[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uDC30\u200D\uD83D\uDC68\uD83C[\uDFFB\uDFFD-\uDFFF])|\uD83E(?:[\uDD1D\uDEEF]\u200D\uD83D\uDC68\uD83C[\uDFFB\uDFFD-\uDFFF]|[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3])))?|\uDFFD(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:\uDC8B\u200D\uD83D)?\uDC68\uD83C[\uDFFB-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D(?:[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uDC30\u200D\uD83D\uDC68\uD83C[\uDFFB\uDFFC\uDFFE\uDFFF])|\uD83E(?:[\uDD1D\uDEEF]\u200D\uD83D\uDC68\uD83C[\uDFFB\uDFFC\uDFFE\uDFFF]|[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3])))?|\uDFFE(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:\uDC8B\u200D\uD83D)?\uDC68\uD83C[\uDFFB-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D(?:[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uDC30\u200D\uD83D\uDC68\uD83C[\uDFFB-\uDFFD\uDFFF])|\uD83E(?:[\uDD1D\uDEEF]\u200D\uD83D\uDC68\uD83C[\uDFFB-\uDFFD\uDFFF]|[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3])))?|\uDFFF(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:\uDC8B\u200D\uD83D)?\uDC68\uD83C[\uDFFB-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D(?:[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uDC30\u200D\uD83D\uDC68\uD83C[\uDFFB-\uDFFE])|\uD83E(?:[\uDD1D\uDEEF]\u200D\uD83D\uDC68\uD83C[\uDFFB-\uDFFE]|[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3])))?))?|\uDC69(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:\uDC8B\u200D\uD83D)?[\uDC68\uDC69]|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D(?:[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uDC66(?:\u200D\uD83D\uDC66)?|\uDC67(?:\u200D\uD83D[\uDC66\uDC67])?|\uDC69\u200D\uD83D(?:\uDC66(?:\u200D\uD83D\uDC66)?|\uDC67(?:\u200D\uD83D[\uDC66\uDC67])?))|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]))|\uD83C(?:\uDFFB(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:[\uDC68\uDC69]|\uDC8B\u200D\uD83D[\uDC68\uDC69])\uD83C[\uDFFB-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D(?:[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uDC30\u200D\uD83D\uDC69\uD83C[\uDFFC-\uDFFF])|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83D[\uDC68\uDC69]\uD83C[\uDFFC-\uDFFF]|\uDEEF\u200D\uD83D\uDC69\uD83C[\uDFFC-\uDFFF])))?|\uDFFC(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:[\uDC68\uDC69]|\uDC8B\u200D\uD83D[\uDC68\uDC69])\uD83C[\uDFFB-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D(?:[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uDC30\u200D\uD83D\uDC69\uD83C[\uDFFB\uDFFD-\uDFFF])|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83D[\uDC68\uDC69]\uD83C[\uDFFB\uDFFD-\uDFFF]|\uDEEF\u200D\uD83D\uDC69\uD83C[\uDFFB\uDFFD-\uDFFF])))?|\uDFFD(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:[\uDC68\uDC69]|\uDC8B\u200D\uD83D[\uDC68\uDC69])\uD83C[\uDFFB-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D(?:[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uDC30\u200D\uD83D\uDC69\uD83C[\uDFFB\uDFFC\uDFFE\uDFFF])|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83D[\uDC68\uDC69]\uD83C[\uDFFB\uDFFC\uDFFE\uDFFF]|\uDEEF\u200D\uD83D\uDC69\uD83C[\uDFFB\uDFFC\uDFFE\uDFFF])))?|\uDFFE(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:[\uDC68\uDC69]|\uDC8B\u200D\uD83D[\uDC68\uDC69])\uD83C[\uDFFB-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D(?:[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uDC30\u200D\uD83D\uDC69\uD83C[\uDFFB-\uDFFD\uDFFF])|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83D[\uDC68\uDC69]\uD83C[\uDFFB-\uDFFD\uDFFF]|\uDEEF\u200D\uD83D\uDC69\uD83C[\uDFFB-\uDFFD\uDFFF])))?|\uDFFF(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:[\uDC68\uDC69]|\uDC8B\u200D\uD83D[\uDC68\uDC69])\uD83C[\uDFFB-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D(?:[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uDC30\u200D\uD83D\uDC69\uD83C[\uDFFB-\uDFFE])|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83D[\uDC68\uDC69]\uD83C[\uDFFB-\uDFFE]|\uDEEF\u200D\uD83D\uDC69\uD83C[\uDFFB-\uDFFE])))?))?|\uDD75(?:\uD83C[\uDFFB-\uDFFF]|\uFE0F)?(?:\u200D[\u2640\u2642]\uFE0F?)?|\uDE2E(?:\u200D\uD83D\uDCA8)?|\uDE35(?:\u200D\uD83D\uDCAB)?|\uDE36(?:\u200D\uD83C\uDF2B\uFE0F?)?|\uDE42(?:\u200D[\u2194\u2195]\uFE0F?)?|\uDEB6(?:\uD83C[\uDFFB-\uDFFF])?(?:\u200D(?:[\u2640\u2642]\uFE0F?(?:\u200D\u27A1\uFE0F?)?|\u27A1\uFE0F?))?)|\uD83E(?:[\uDD0C\uDD0F\uDD18-\uDD1F\uDD30-\uDD34\uDD36\uDD77\uDDB5\uDDB6\uDDBB\uDDD2\uDDD3\uDDD5\uDEC3-\uDEC5\uDEF0\uDEF2-\uDEF8](?:\uD83C[\uDFFB-\uDFFF])?|[\uDD26\uDD35\uDD37-\uDD39\uDD3C-\uDD3E\uDDB8\uDDB9\uDDCD\uDDCF\uDDD4\uDDD6-\uDDDD](?:\uD83C[\uDFFB-\uDFFF])?(?:\u200D[\u2640\u2642]\uFE0F?)?|[\uDDDE\uDDDF](?:\u200D[\u2640\u2642]\uFE0F?)?|[\uDD0D\uDD0E\uDD10-\uDD17\uDD20-\uDD25\uDD27-\uDD2F\uDD3A\uDD3F-\uDD45\uDD47-\uDD76\uDD78-\uDDB4\uDDB7\uDDBA\uDDBC-\uDDCC\uDDD0\uDDE0-\uDDFF\uDE70-\uDE7C\uDE80-\uDE8A\uDE8E-\uDEC2\uDEC6\uDEC8\uDECD-\uDEDC\uDEDF-\uDEEA\uDEEF]|\uDDCE(?:\uD83C[\uDFFB-\uDFFF])?(?:\u200D(?:[\u2640\u2642]\uFE0F?(?:\u200D\u27A1\uFE0F?)?|\u27A1\uFE0F?))?|\uDDD1(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3\uDE70]|\uDD1D\u200D\uD83E\uDDD1|\uDDD1\u200D\uD83E\uDDD2(?:\u200D\uD83E\uDDD2)?|\uDDD2(?:\u200D\uD83E\uDDD2)?))|\uD83C(?:\uDFFB(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1\uD83C[\uDFFC-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D(?:[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uDC30\u200D\uD83E\uDDD1\uD83C[\uDFFC-\uDFFF])|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3\uDE70]|\uDD1D\u200D\uD83E\uDDD1\uD83C[\uDFFB-\uDFFF]|\uDEEF\u200D\uD83E\uDDD1\uD83C[\uDFFC-\uDFFF])))?|\uDFFC(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1\uD83C[\uDFFB\uDFFD-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D(?:[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uDC30\u200D\uD83E\uDDD1\uD83C[\uDFFB\uDFFD-\uDFFF])|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3\uDE70]|\uDD1D\u200D\uD83E\uDDD1\uD83C[\uDFFB-\uDFFF]|\uDEEF\u200D\uD83E\uDDD1\uD83C[\uDFFB\uDFFD-\uDFFF])))?|\uDFFD(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1\uD83C[\uDFFB\uDFFC\uDFFE\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D(?:[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uDC30\u200D\uD83E\uDDD1\uD83C[\uDFFB\uDFFC\uDFFE\uDFFF])|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3\uDE70]|\uDD1D\u200D\uD83E\uDDD1\uD83C[\uDFFB-\uDFFF]|\uDEEF\u200D\uD83E\uDDD1\uD83C[\uDFFB\uDFFC\uDFFE\uDFFF])))?|\uDFFE(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1\uD83C[\uDFFB-\uDFFD\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D(?:[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uDC30\u200D\uD83E\uDDD1\uD83C[\uDFFB-\uDFFD\uDFFF])|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3\uDE70]|\uDD1D\u200D\uD83E\uDDD1\uD83C[\uDFFB-\uDFFF]|\uDEEF\u200D\uD83E\uDDD1\uD83C[\uDFFB-\uDFFD\uDFFF])))?|\uDFFF(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1\uD83C[\uDFFB-\uDFFE]|\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D(?:[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uDC30\u200D\uD83E\uDDD1\uD83C[\uDFFB-\uDFFE])|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3\uDE70]|\uDD1D\u200D\uD83E\uDDD1\uD83C[\uDFFB-\uDFFF]|\uDEEF\u200D\uD83E\uDDD1\uD83C[\uDFFB-\uDFFE])))?))?|\uDEF1(?:\uD83C(?:\uDFFB(?:\u200D\uD83E\uDEF2\uD83C[\uDFFC-\uDFFF])?|\uDFFC(?:\u200D\uD83E\uDEF2\uD83C[\uDFFB\uDFFD-\uDFFF])?|\uDFFD(?:\u200D\uD83E\uDEF2\uD83C[\uDFFB\uDFFC\uDFFE\uDFFF])?|\uDFFE(?:\u200D\uD83E\uDEF2\uD83C[\uDFFB-\uDFFD\uDFFF])?|\uDFFF(?:\u200D\uD83E\uDEF2\uD83C[\uDFFB-\uDFFE])?))?)/g;
1639
1709
  };
1640
1710
 
1641
- // ../../node_modules/string-width/index.js
1711
+ // ../../node_modules/.bun/string-width@7.2.0/node_modules/string-width/index.js
1642
1712
  var segmenter = new Intl.Segmenter;
1643
1713
  var defaultIgnorableCodePointRegex = /^\p{Default_Ignorable_Code_Point}$/u;
1644
1714
  function stringWidth(string, options = {}) {
@@ -1686,25 +1756,7 @@ function stringWidth(string, options = {}) {
1686
1756
  return width;
1687
1757
  }
1688
1758
 
1689
- // ../../node_modules/wrap-ansi/node_modules/strip-ansi/node_modules/ansi-regex/index.js
1690
- function ansiRegex2({ onlyFirst = false } = {}) {
1691
- const ST = "(?:\\u0007|\\u001B\\u005C|\\u009C)";
1692
- const osc = `(?:\\u001B\\][\\s\\S]*?${ST})`;
1693
- const csi = "[\\u001B\\u009B][[\\]()#;?]*(?:\\d{1,4}(?:[;:]\\d{0,4})*)?[\\dA-PR-TZcf-nq-uy=><~]";
1694
- const pattern = `${osc}|${csi}`;
1695
- return new RegExp(pattern, onlyFirst ? undefined : "g");
1696
- }
1697
-
1698
- // ../../node_modules/wrap-ansi/node_modules/strip-ansi/index.js
1699
- var regex2 = ansiRegex2();
1700
- function stripAnsi2(string) {
1701
- if (typeof string !== "string") {
1702
- throw new TypeError(`Expected a \`string\`, got \`${typeof string}\``);
1703
- }
1704
- return string.replace(regex2, "");
1705
- }
1706
-
1707
- // ../../node_modules/wrap-ansi/node_modules/ansi-styles/index.js
1759
+ // ../../node_modules/.bun/ansi-styles@6.2.3/node_modules/ansi-styles/index.js
1708
1760
  var ANSI_BACKGROUND_OFFSET = 10;
1709
1761
  var wrapAnsi16 = (offset = 0) => (code) => `\x1B[${code + offset}m`;
1710
1762
  var wrapAnsi256 = (offset = 0) => (code) => `\x1B[${38 + offset};5;${code}m`;
@@ -1881,7 +1933,7 @@ function assembleStyles() {
1881
1933
  var ansiStyles = assembleStyles();
1882
1934
  var ansi_styles_default = ansiStyles;
1883
1935
 
1884
- // ../../node_modules/wrap-ansi/index.js
1936
+ // ../../node_modules/.bun/wrap-ansi@9.0.2/node_modules/wrap-ansi/index.js
1885
1937
  var ESCAPES = new Set([
1886
1938
  "\x1B",
1887
1939
  "\x9B"
@@ -1899,7 +1951,7 @@ var wrapWord = (rows, word, columns) => {
1899
1951
  const characters = [...word];
1900
1952
  let isInsideEscape = false;
1901
1953
  let isInsideLinkEscape = false;
1902
- let visible = stringWidth(stripAnsi2(rows.at(-1)));
1954
+ let visible = stringWidth(stripAnsi(rows.at(-1)));
1903
1955
  for (const [index, character] of characters.entries()) {
1904
1956
  const characterLength = stringWidth(character);
1905
1957
  if (visible + characterLength <= columns) {
@@ -2043,7 +2095,7 @@ function wrapAnsi(string, columns, options) {
2043
2095
  `);
2044
2096
  }
2045
2097
 
2046
- // ../../node_modules/@inquirer/core/dist/lib/utils.js
2098
+ // ../../node_modules/.bun/@inquirer+core@11.1.1+c195ea72dffa657e/node_modules/@inquirer/core/dist/lib/utils.js
2047
2099
  function breakLines(content, width) {
2048
2100
  return content.split(`
2049
2101
  `).flatMap((line) => wrapAnsi(line, width, { trim: false, hard: true }).split(`
@@ -2054,7 +2106,7 @@ function readlineWidth() {
2054
2106
  return import_cli_width.default({ defaultWidth: 80, output: readline().output });
2055
2107
  }
2056
2108
 
2057
- // ../../node_modules/@inquirer/core/dist/lib/pagination/use-pagination.js
2109
+ // ../../node_modules/.bun/@inquirer+core@11.1.1+c195ea72dffa657e/node_modules/@inquirer/core/dist/lib/pagination/use-pagination.js
2058
2110
  function usePointerPosition({ active, renderedItems, pageSize, loop }) {
2059
2111
  const state = useRef({
2060
2112
  lastPointer: active,
@@ -2120,12 +2172,12 @@ function usePagination({ items, active, renderItem, pageSize, loop = true }) {
2120
2172
  return pageBuffer.filter((line) => typeof line === "string").join(`
2121
2173
  `);
2122
2174
  }
2123
- // ../../node_modules/@inquirer/core/dist/lib/create-prompt.js
2175
+ // ../../node_modules/.bun/@inquirer+core@11.1.1+c195ea72dffa657e/node_modules/@inquirer/core/dist/lib/create-prompt.js
2124
2176
  var import_mute_stream = __toESM(require_lib(), 1);
2125
2177
  import * as readline2 from "readline";
2126
2178
  import { AsyncResource as AsyncResource3 } from "async_hooks";
2127
2179
 
2128
- // ../../node_modules/signal-exit/dist/mjs/signals.js
2180
+ // ../../node_modules/.bun/signal-exit@4.1.0/node_modules/signal-exit/dist/mjs/signals.js
2129
2181
  var signals = [];
2130
2182
  signals.push("SIGHUP", "SIGINT", "SIGTERM");
2131
2183
  if (process.platform !== "win32") {
@@ -2135,7 +2187,7 @@ if (process.platform === "linux") {
2135
2187
  signals.push("SIGIO", "SIGPOLL", "SIGPWR", "SIGSTKFLT");
2136
2188
  }
2137
2189
 
2138
- // ../../node_modules/signal-exit/dist/mjs/index.js
2190
+ // ../../node_modules/.bun/signal-exit@4.1.0/node_modules/signal-exit/dist/mjs/index.js
2139
2191
  var processOk = (process3) => !!process3 && typeof process3 === "object" && typeof process3.removeListener === "function" && typeof process3.emit === "function" && typeof process3.reallyExit === "function" && typeof process3.listeners === "function" && typeof process3.kill === "function" && typeof process3.pid === "number" && typeof process3.on === "function";
2140
2192
  var kExitEmitter = Symbol.for("signal-exit emitter");
2141
2193
  var global = globalThis;
@@ -2333,10 +2385,10 @@ var {
2333
2385
  unload
2334
2386
  } = signalExitWrap(processOk(process3) ? new SignalExit(process3) : new SignalExitFallback);
2335
2387
 
2336
- // ../../node_modules/@inquirer/core/dist/lib/screen-manager.js
2388
+ // ../../node_modules/.bun/@inquirer+core@11.1.1+c195ea72dffa657e/node_modules/@inquirer/core/dist/lib/screen-manager.js
2337
2389
  import { stripVTControlCharacters } from "util";
2338
2390
 
2339
- // ../../node_modules/@inquirer/ansi/dist/index.js
2391
+ // ../../node_modules/.bun/@inquirer+ansi@2.0.3/node_modules/@inquirer/ansi/dist/index.js
2340
2392
  var ESC = "\x1B[";
2341
2393
  var cursorLeft = ESC + "G";
2342
2394
  var cursorHide = ESC + "?25l";
@@ -2352,7 +2404,7 @@ var cursorTo = (x, y) => {
2352
2404
  var eraseLine = ESC + "2K";
2353
2405
  var eraseLines = (lines) => lines > 0 ? (eraseLine + cursorUp(1)).repeat(lines - 1) + eraseLine + cursorLeft : "";
2354
2406
 
2355
- // ../../node_modules/@inquirer/core/dist/lib/screen-manager.js
2407
+ // ../../node_modules/.bun/@inquirer+core@11.1.1+c195ea72dffa657e/node_modules/@inquirer/core/dist/lib/screen-manager.js
2356
2408
  var height = (content) => content.split(`
2357
2409
  `).length;
2358
2410
  var lastLine = (content) => content.split(`
@@ -2417,7 +2469,7 @@ class ScreenManager {
2417
2469
  }
2418
2470
  }
2419
2471
 
2420
- // ../../node_modules/@inquirer/core/dist/lib/promise-polyfill.js
2472
+ // ../../node_modules/.bun/@inquirer+core@11.1.1+c195ea72dffa657e/node_modules/@inquirer/core/dist/lib/promise-polyfill.js
2421
2473
  class PromisePolyfill extends Promise {
2422
2474
  static withResolver() {
2423
2475
  let resolve;
@@ -2430,7 +2482,7 @@ class PromisePolyfill extends Promise {
2430
2482
  }
2431
2483
  }
2432
2484
 
2433
- // ../../node_modules/@inquirer/core/dist/lib/create-prompt.js
2485
+ // ../../node_modules/.bun/@inquirer+core@11.1.1+c195ea72dffa657e/node_modules/@inquirer/core/dist/lib/create-prompt.js
2434
2486
  function getCallSites() {
2435
2487
  const _prepareStackTrace = Error.prepareStackTrace;
2436
2488
  let result = [];
@@ -2516,7 +2568,7 @@ function createPrompt(view) {
2516
2568
  };
2517
2569
  return prompt;
2518
2570
  }
2519
- // ../../node_modules/@inquirer/core/dist/lib/Separator.js
2571
+ // ../../node_modules/.bun/@inquirer+core@11.1.1+c195ea72dffa657e/node_modules/@inquirer/core/dist/lib/Separator.js
2520
2572
  import { styleText as styleText2 } from "util";
2521
2573
  class Separator {
2522
2574
  separator = styleText2("dim", Array.from({ length: 15 }).join(dist_default2.line));
@@ -2530,7 +2582,7 @@ class Separator {
2530
2582
  return Boolean(choice && typeof choice === "object" && "type" in choice && choice.type === "separator");
2531
2583
  }
2532
2584
  }
2533
- // ../../node_modules/@inquirer/select/dist/index.js
2585
+ // ../../node_modules/.bun/@inquirer+select@5.0.4+c195ea72dffa657e/node_modules/@inquirer/select/dist/index.js
2534
2586
  import { styleText as styleText3 } from "util";
2535
2587
  var selectTheme = {
2536
2588
  icon: { cursor: dist_default2.pointer },
@@ -2689,15 +2741,67 @@ var dist_default3 = createPrompt((config, done) => {
2689
2741
  // lib/utils.ts
2690
2742
  var import_bytes = __toESM(require_bytes(), 1);
2691
2743
 
2744
+ // lib/output.ts
2745
+ var import_picocolors = __toESM(require_picocolors(), 1);
2746
+ var brand = (text) => import_picocolors.default.cyan(import_picocolors.default.bold(text));
2747
+ var success = (text) => `${import_picocolors.default.green("\u2713")} ${text}`;
2748
+ var warn = (text) => `${import_picocolors.default.yellow("\u26A0")} ${text}`;
2749
+ var error = (text) => `${import_picocolors.default.red("\u2717")} ${text}`;
2750
+ var info = (text) => `${import_picocolors.default.cyan("\u2139")} ${text}`;
2751
+ var dim = (text) => import_picocolors.default.dim(text);
2752
+ var green = (text) => import_picocolors.default.green(text);
2753
+ var yellow = (text) => import_picocolors.default.yellow(text);
2754
+ var red = (text) => import_picocolors.default.red(text);
2755
+ var cyan = (text) => import_picocolors.default.cyan(text);
2756
+ var magenta = (text) => import_picocolors.default.magenta(text);
2757
+ function header(text) {
2758
+ console.log("");
2759
+ console.log(brand(` ${text}`));
2760
+ console.log(import_picocolors.default.dim(" " + "\u2500".repeat(text.length + 2)));
2761
+ }
2762
+ function labelValue(label, value) {
2763
+ console.log(` ${import_picocolors.default.dim(label + ":")} ${value}`);
2764
+ }
2765
+ function statusLine(status, name, message, extra) {
2766
+ const icons = { ok: "\u2713", warning: "\u26A0", error: "\u2717" };
2767
+ const colors = { ok: import_picocolors.default.green, warning: import_picocolors.default.yellow, error: import_picocolors.default.red };
2768
+ let line = `${colors[status](icons[status])} ${name}: ${message}`;
2769
+ if (extra) {
2770
+ line += ` ${import_picocolors.default.dim(`(${extra})`)}`;
2771
+ }
2772
+ return line;
2773
+ }
2774
+ function tableRow(columns, widths) {
2775
+ return columns.map((col, i) => {
2776
+ const width = widths[i] || col.length;
2777
+ return col.padEnd(width);
2778
+ }).join(" ");
2779
+ }
2780
+ function tableHeader(columns, widths) {
2781
+ const headerRow = tableRow(columns.map((c) => import_picocolors.default.bold(c)), widths);
2782
+ const underlineRow = widths.map((w) => "\u2500".repeat(w)).join(" ");
2783
+ console.log(headerRow);
2784
+ console.log(import_picocolors.default.dim(underlineRow));
2785
+ }
2786
+ function url(urlStr) {
2787
+ return import_picocolors.default.underline(import_picocolors.default.cyan(urlStr));
2788
+ }
2789
+ function cmd(command) {
2790
+ return import_picocolors.default.cyan(command);
2791
+ }
2792
+ function file(filePath) {
2793
+ return import_picocolors.default.magenta(filePath);
2794
+ }
2795
+
2692
2796
  // lib/spawn.ts
2693
- async function spawn(cmd, opts = {}) {
2694
- const command = cmd[0];
2695
- const args = cmd.slice(1);
2797
+ async function spawn(cmd2, opts = {}) {
2798
+ const command = cmd2[0];
2799
+ const args = cmd2.slice(1);
2696
2800
  if (command === undefined) {
2697
2801
  throw new Error("No command provided");
2698
2802
  }
2699
2803
  if (typeof Bun !== "undefined") {
2700
- const proc = Bun.spawn(cmd, {
2804
+ const proc = Bun.spawn(cmd2, {
2701
2805
  env: opts.env ?? process.env,
2702
2806
  cwd: opts.cwd ?? process.cwd(),
2703
2807
  stdin: opts.stdin ?? "inherit",
@@ -2717,14 +2821,14 @@ async function spawn(cmd, opts = {}) {
2717
2821
  });
2718
2822
  }
2719
2823
  }
2720
- async function spawnCapture(cmd, opts = {}) {
2721
- const command = cmd[0];
2722
- const args = cmd.slice(1);
2824
+ async function spawnCapture(cmd2, opts = {}) {
2825
+ const command = cmd2[0];
2826
+ const args = cmd2.slice(1);
2723
2827
  if (command === undefined) {
2724
2828
  throw new Error("No command provided");
2725
2829
  }
2726
2830
  if (typeof Bun !== "undefined") {
2727
- const proc = Bun.spawn(cmd, {
2831
+ const proc = Bun.spawn(cmd2, {
2728
2832
  env: opts.env ?? process.env,
2729
2833
  cwd: opts.cwd,
2730
2834
  stdin: opts.stdin ?? "ignore",
@@ -2759,17 +2863,17 @@ async function spawnCapture(cmd, opts = {}) {
2759
2863
  });
2760
2864
  }
2761
2865
  }
2762
- async function commandExists(cmd) {
2866
+ async function commandExists(cmd2) {
2763
2867
  try {
2764
- const result = await spawnCapture(process.platform === "win32" ? ["where", cmd] : ["which", cmd]);
2868
+ const result = await spawnCapture(process.platform === "win32" ? ["where", cmd2] : ["which", cmd2]);
2765
2869
  return result.exitCode === 0;
2766
2870
  } catch {
2767
2871
  return false;
2768
2872
  }
2769
2873
  }
2770
- async function getCommandVersion(cmd) {
2874
+ async function getCommandVersion(cmd2) {
2771
2875
  try {
2772
- const result = await spawnCapture([cmd, "--version"]);
2876
+ const result = await spawnCapture([cmd2, "--version"]);
2773
2877
  if (result.exitCode === 0 && result.stdout) {
2774
2878
  return result.stdout.trim().split(`
2775
2879
  `)[0] ?? null;
@@ -2790,33 +2894,100 @@ async function fetchOllamaModels() {
2790
2894
  const data = await response.json();
2791
2895
  return data.models ?? [];
2792
2896
  }
2897
+ async function fetchRunningModels() {
2898
+ const ollamaUrl = getOllamaUrl();
2899
+ try {
2900
+ const response = await fetch(`${ollamaUrl}/api/ps`, {
2901
+ signal: AbortSignal.timeout(5000)
2902
+ });
2903
+ if (!response.ok) {
2904
+ return [];
2905
+ }
2906
+ const data = await response.json();
2907
+ return data.models ?? [];
2908
+ } catch (error2) {
2909
+ return [];
2910
+ }
2911
+ }
2912
+ async function isModelLoaded(modelName) {
2913
+ const runningModels = await fetchRunningModels();
2914
+ return runningModels.some((m) => m.model === modelName || m.name === modelName || m.model.startsWith(modelName + ":") || modelName.startsWith(m.model));
2915
+ }
2916
+ async function loadModel(modelName, keepAlive = "10m") {
2917
+ const ollamaUrl = getOllamaUrl();
2918
+ const response = await fetch(`${ollamaUrl}/api/generate`, {
2919
+ method: "POST",
2920
+ headers: {
2921
+ "Content-Type": "application/json"
2922
+ },
2923
+ body: JSON.stringify({
2924
+ model: modelName,
2925
+ prompt: "",
2926
+ stream: false,
2927
+ keep_alive: keepAlive
2928
+ })
2929
+ });
2930
+ if (!response.ok) {
2931
+ throw new Error(`Failed to load model: ${response.statusText}`);
2932
+ }
2933
+ await response.json();
2934
+ }
2935
+ async function ensureModelLoaded(modelName) {
2936
+ const isLoaded = await isModelLoaded(modelName);
2937
+ if (isLoaded) {
2938
+ console.log(dim(` Model ${magenta(modelName)} is already loaded`));
2939
+ return;
2940
+ }
2941
+ console.log(info(`Loading model ${magenta(modelName)}...`));
2942
+ console.log(dim(" This may take a moment on first run"));
2943
+ try {
2944
+ await loadModel(modelName, "10m");
2945
+ console.log(success(`Model ${magenta(modelName)} loaded (keep_alive: 10m)`));
2946
+ } catch (error2) {
2947
+ console.log(warn(`Could not pre-load model (will load on first request)`));
2948
+ console.log(dim(` ${error2 instanceof Error ? error2.message : "Unknown error"}`));
2949
+ }
2950
+ }
2793
2951
  async function selectModelInteractively() {
2794
2952
  const ollamaUrl = getOllamaUrl();
2795
2953
  let models;
2796
2954
  try {
2797
2955
  models = await fetchOllamaModels();
2798
- } catch (error) {
2799
- console.error("Error: Could not connect to Ollama at", ollamaUrl);
2800
- console.error("Make sure Ollama is running: loclaude docker-up");
2956
+ } catch (error2) {
2957
+ console.log(warn(`Could not connect to Ollama at ${ollamaUrl}`));
2958
+ console.log(dim(" Make sure Ollama is running: loclaude docker-up"));
2801
2959
  process.exit(1);
2802
2960
  }
2803
2961
  if (models.length === 0) {
2804
- console.error("Error: No models found in Ollama.");
2805
- console.error("Pull a model first: loclaude models-pull <model-name>");
2962
+ console.log(warn("No models found in Ollama."));
2963
+ console.log(dim(" Pull a model first: loclaude models-pull <model-name>"));
2806
2964
  process.exit(1);
2807
2965
  }
2966
+ const runningModels = await fetchRunningModels();
2967
+ const loadedModelNames = new Set(runningModels.map((m) => m.model));
2808
2968
  const selected = await dist_default3({
2809
2969
  message: "Select a model",
2810
- choices: models.map((model) => ({
2811
- name: `${model.name} (${import_bytes.default(model.size)})`,
2812
- value: model.name
2813
- }))
2970
+ choices: models.map((model) => {
2971
+ const isLoaded = loadedModelNames.has(model.name);
2972
+ const loadedIndicator = isLoaded ? " [loaded]" : "";
2973
+ return {
2974
+ name: `${model.name} (${import_bytes.default(model.size)})${loadedIndicator}`,
2975
+ value: model.name
2976
+ };
2977
+ })
2814
2978
  });
2815
2979
  return selected;
2816
2980
  }
2817
2981
  async function launchClaude(model, passthroughArgs) {
2818
2982
  const ollamaUrl = getOllamaUrl();
2819
2983
  const extraArgs = getClaudeExtraArgs();
2984
+ console.log("");
2985
+ console.log(cyan("Launching Claude Code with Ollama"));
2986
+ console.log(dim(` Model: ${magenta(model)}`));
2987
+ console.log(dim(` API: ${ollamaUrl}`));
2988
+ console.log("");
2989
+ await ensureModelLoaded(model);
2990
+ console.log("");
2820
2991
  const env = {
2821
2992
  ...process.env,
2822
2993
  ANTHROPIC_AUTH_TOKEN: "ollama",
@@ -2830,51 +3001,351 @@ async function launchClaude(model, passthroughArgs) {
2830
3001
  // lib/commands/init.ts
2831
3002
  import { existsSync as existsSync2, mkdirSync, writeFileSync, readFileSync as readFileSync2 } from "fs";
2832
3003
  import { join as join2 } from "path";
2833
- var DOCKER_COMPOSE_TEMPLATE = `services:
3004
+
3005
+ // lib/commands/doctor.ts
3006
+ async function checkDocker() {
3007
+ const exists = await commandExists("docker");
3008
+ if (!exists) {
3009
+ return {
3010
+ name: "Docker",
3011
+ status: "error",
3012
+ message: "Not installed",
3013
+ hint: "Install Docker: https://docs.docker.com/get-docker/"
3014
+ };
3015
+ }
3016
+ const version = await getCommandVersion("docker");
3017
+ return {
3018
+ name: "Docker",
3019
+ status: "ok",
3020
+ message: "Installed",
3021
+ version: version ?? undefined
3022
+ };
3023
+ }
3024
+ async function checkDockerCompose() {
3025
+ const result = await spawnCapture(["docker", "compose", "version"]);
3026
+ if (result.exitCode === 0) {
3027
+ const version = result.stdout?.trim().split(`
3028
+ `)[0];
3029
+ return {
3030
+ name: "Docker Compose",
3031
+ status: "ok",
3032
+ message: "Installed (v2)",
3033
+ version: version ?? undefined
3034
+ };
3035
+ }
3036
+ const v1Exists = await commandExists("docker-compose");
3037
+ if (v1Exists) {
3038
+ const version = await getCommandVersion("docker-compose");
3039
+ return {
3040
+ name: "Docker Compose",
3041
+ status: "warning",
3042
+ message: "Using legacy v1",
3043
+ version: version ?? undefined,
3044
+ hint: "Consider upgrading to Docker Compose v2"
3045
+ };
3046
+ }
3047
+ return {
3048
+ name: "Docker Compose",
3049
+ status: "error",
3050
+ message: "Not installed",
3051
+ hint: "Docker Compose is included with Docker Desktop, or install separately"
3052
+ };
3053
+ }
3054
+ async function checkNvidiaSmi() {
3055
+ const exists = await commandExists("nvidia-smi");
3056
+ if (!exists) {
3057
+ return {
3058
+ name: "NVIDIA GPU",
3059
+ status: "warning",
3060
+ message: "nvidia-smi not found",
3061
+ hint: "GPU support requires NVIDIA drivers. CPU-only mode will be used."
3062
+ };
3063
+ }
3064
+ const result = await spawnCapture(["nvidia-smi", "--query-gpu=name", "--format=csv,noheader"]);
3065
+ if (result.exitCode === 0 && result.stdout) {
3066
+ const gpus = result.stdout.trim().split(`
3067
+ `).filter(Boolean);
3068
+ return {
3069
+ name: "NVIDIA GPU",
3070
+ status: "ok",
3071
+ message: `${gpus.length} GPU(s) detected`,
3072
+ version: gpus[0]
3073
+ };
3074
+ }
3075
+ return {
3076
+ name: "NVIDIA GPU",
3077
+ status: "warning",
3078
+ message: "nvidia-smi failed",
3079
+ hint: "GPU may not be available. Check NVIDIA drivers."
3080
+ };
3081
+ }
3082
+ async function checkNvidiaContainerToolkit() {
3083
+ const result = await spawnCapture(["docker", "info", "--format", "{{.Runtimes}}"]);
3084
+ if (result.exitCode === 0 && result.stdout?.includes("nvidia")) {
3085
+ return {
3086
+ name: "NVIDIA Container Toolkit",
3087
+ status: "ok",
3088
+ message: "nvidia runtime available"
3089
+ };
3090
+ }
3091
+ return {
3092
+ name: "NVIDIA Container Toolkit",
3093
+ status: "warning",
3094
+ message: "nvidia runtime not found",
3095
+ hint: "Install: https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html"
3096
+ };
3097
+ }
3098
+ async function checkClaude() {
3099
+ const exists = await commandExists("claude");
3100
+ if (!exists) {
3101
+ return {
3102
+ name: "Claude Code",
3103
+ status: "error",
3104
+ message: "Not installed",
3105
+ hint: "Install: npm install -g @anthropic-ai/claude-code"
3106
+ };
3107
+ }
3108
+ const version = await getCommandVersion("claude");
3109
+ return {
3110
+ name: "Claude Code",
3111
+ status: "ok",
3112
+ message: "Installed",
3113
+ version: version ?? undefined
3114
+ };
3115
+ }
3116
+ async function checkOllamaConnection() {
3117
+ const ollamaUrl = getOllamaUrl();
3118
+ try {
3119
+ const response = await fetch(`${ollamaUrl}/api/tags`, {
3120
+ signal: AbortSignal.timeout(5000)
3121
+ });
3122
+ if (response.ok) {
3123
+ const data = await response.json();
3124
+ const modelCount = data.models?.length ?? 0;
3125
+ return {
3126
+ name: "Ollama API",
3127
+ status: "ok",
3128
+ message: `Connected (${modelCount} model${modelCount === 1 ? "" : "s"})`,
3129
+ version: ollamaUrl
3130
+ };
3131
+ }
3132
+ return {
3133
+ name: "Ollama API",
3134
+ status: "warning",
3135
+ message: `HTTP ${response.status}`,
3136
+ hint: "Ollama may not be running. Try: loclaude docker-up"
3137
+ };
3138
+ } catch (error3) {
3139
+ return {
3140
+ name: "Ollama API",
3141
+ status: "warning",
3142
+ message: "Not reachable",
3143
+ hint: `Cannot connect to ${ollamaUrl}. Start Ollama: loclaude docker-up`
3144
+ };
3145
+ }
3146
+ }
3147
+ function formatCheck(check) {
3148
+ let line = statusLine(check.status, check.name, check.message, check.version);
3149
+ if (check.hint) {
3150
+ line += `
3151
+ ${dim("\u2192")} ${dim(check.hint)}`;
3152
+ }
3153
+ return line;
3154
+ }
3155
+ async function doctor() {
3156
+ header("System Health Check");
3157
+ console.log("");
3158
+ const checks = await Promise.all([
3159
+ checkDocker(),
3160
+ checkDockerCompose(),
3161
+ checkNvidiaSmi(),
3162
+ checkNvidiaContainerToolkit(),
3163
+ checkClaude(),
3164
+ checkOllamaConnection()
3165
+ ]);
3166
+ for (const check of checks) {
3167
+ console.log(formatCheck(check));
3168
+ }
3169
+ const errors2 = checks.filter((c) => c.status === "error");
3170
+ const warnings = checks.filter((c) => c.status === "warning");
3171
+ console.log("");
3172
+ if (errors2.length > 0) {
3173
+ console.log(red(`${errors2.length} error(s) found.`) + " Fix these before proceeding.");
3174
+ process.exit(1);
3175
+ } else if (warnings.length > 0) {
3176
+ console.log(yellow(`${warnings.length} warning(s).`) + " loclaude may work with limited functionality.");
3177
+ } else {
3178
+ console.log(green("All checks passed!") + " Ready to use loclaude.");
3179
+ }
3180
+ }
3181
+ async function hasNvidiaGpu() {
3182
+ const exists = await commandExists("nvidia-smi");
3183
+ if (!exists)
3184
+ return false;
3185
+ const result = await spawnCapture(["nvidia-smi", "--query-gpu=name", "--format=csv,noheader"]);
3186
+ return result.exitCode === 0 && Boolean(result.stdout?.trim());
3187
+ }
3188
+
3189
+ // lib/commands/init.ts
3190
+ var DOCKER_COMPOSE_TEMPLATE_GPU = `# =============================================================================
3191
+ # LOCLAUDE DOCKER COMPOSE - GPU MODE
3192
+ # =============================================================================
3193
+ # This configuration runs Ollama with NVIDIA GPU acceleration for fast inference.
3194
+ # Generated by: loclaude init
3195
+ #
3196
+ # Prerequisites:
3197
+ # - NVIDIA GPU with CUDA support
3198
+ # - NVIDIA drivers installed on host
3199
+ # - NVIDIA Container Toolkit: https://docs.nvidia.com/datacenter/cloud-native/container-toolkit
3200
+ #
3201
+ # Quick test for GPU support:
3202
+ # docker run --rm --gpus all nvidia/cuda:12.0-base nvidia-smi
3203
+ #
3204
+ # =============================================================================
3205
+
3206
+ services:
3207
+ # ===========================================================================
3208
+ # OLLAMA - Local LLM Inference Server
3209
+ # ===========================================================================
3210
+ # Ollama provides the AI backend that Claude Code connects to.
3211
+ # It runs large language models locally on your hardware.
3212
+ #
3213
+ # API Documentation: https://github.com/ollama/ollama/blob/main/docs/api.md
3214
+ # Model Library: https://ollama.com/library
3215
+ # ===========================================================================
2834
3216
  ollama:
3217
+ # Official Ollama image - 'latest' ensures newest features and model support
2835
3218
  image: ollama/ollama:latest
3219
+
3220
+ # Fixed container name for easy CLI access:
3221
+ # docker exec ollama ollama list
3222
+ # docker logs ollama
2836
3223
  container_name: ollama
3224
+
3225
+ # NVIDIA Container Runtime - Required for GPU access
3226
+ # This makes CUDA libraries available inside the container
2837
3227
  runtime: nvidia
3228
+
2838
3229
  environment:
3230
+ # ---------------------------------------------------------------------------
3231
+ # GPU Configuration
3232
+ # ---------------------------------------------------------------------------
3233
+ # NVIDIA_VISIBLE_DEVICES: Which GPUs to expose to the container
3234
+ # - 'all': Use all available GPUs (recommended for most setups)
3235
+ # - '0': Use only GPU 0
3236
+ # - '0,1': Use GPUs 0 and 1
2839
3237
  - NVIDIA_VISIBLE_DEVICES=all
3238
+
3239
+ # NVIDIA_DRIVER_CAPABILITIES: What GPU features to enable
3240
+ # - 'compute': CUDA compute (required for inference)
3241
+ # - 'utility': nvidia-smi and other tools
2840
3242
  - NVIDIA_DRIVER_CAPABILITIES=compute,utility
3243
+
3244
+ # ---------------------------------------------------------------------------
3245
+ # Ollama Configuration (Optional)
3246
+ # ---------------------------------------------------------------------------
3247
+ # Uncomment these to customize Ollama behavior:
3248
+
3249
+ # Maximum number of models loaded in memory simultaneously
3250
+ # Lower this if you're running out of VRAM
3251
+ # - OLLAMA_MAX_LOADED_MODELS=1
3252
+
3253
+ # Maximum parallel inference requests per model
3254
+ # Higher values use more VRAM but handle more concurrent requests
3255
+ # - OLLAMA_NUM_PARALLEL=1
3256
+
3257
+ # Enable debug logging for troubleshooting
3258
+ # - OLLAMA_DEBUG=1
3259
+
3260
+ # Custom model storage location (inside container)
3261
+ # - OLLAMA_MODELS=/root/.ollama
3262
+
2841
3263
  volumes:
3264
+ # ---------------------------------------------------------------------------
3265
+ # Model Storage
3266
+ # ---------------------------------------------------------------------------
3267
+ # Maps ./models on your host to /root/.ollama in the container
3268
+ # This persists downloaded models across container restarts
3269
+ #
3270
+ # Disk space requirements (approximate):
3271
+ # - 7B model: ~4GB
3272
+ # - 13B model: ~8GB
3273
+ # - 30B model: ~16GB
3274
+ # - 70B model: ~40GB
2842
3275
  - ./models:/root/.ollama
3276
+
2843
3277
  ports:
3278
+ # Ollama API port - access at http://localhost:11434
3279
+ # Used by Claude Code and other Ollama clients
2844
3280
  - "11434:11434"
3281
+
3282
+ # Restart policy - keeps Ollama running unless manually stopped
2845
3283
  restart: unless-stopped
3284
+
2846
3285
  healthcheck:
3286
+ # Verify Ollama is responsive by listing models
2847
3287
  test: ["CMD", "ollama", "list"]
2848
- interval: 300s
2849
- timeout: 2s
2850
- retries: 3
2851
- start_period: 40s
3288
+ interval: 300s # Check every 5 minutes
3289
+ timeout: 2s # Fail if no response in 2 seconds
3290
+ retries: 3 # Mark unhealthy after 3 consecutive failures
3291
+ start_period: 40s # Grace period for initial model loading
3292
+
2852
3293
  deploy:
2853
3294
  resources:
2854
3295
  reservations:
2855
3296
  devices:
3297
+ # Request GPU access from Docker
2856
3298
  - driver: nvidia
2857
- count: all
2858
- capabilities: [gpu]
3299
+ count: all # Use all available GPUs
3300
+ capabilities: [gpu] # Request GPU compute capability
2859
3301
 
3302
+ # ===========================================================================
3303
+ # OPEN WEBUI - Chat Interface (Optional)
3304
+ # ===========================================================================
3305
+ # Open WebUI provides a ChatGPT-like interface for your local models.
3306
+ # Access at http://localhost:3000 after starting containers.
3307
+ #
3308
+ # Features:
3309
+ # - Multi-model chat interface
3310
+ # - Conversation history
3311
+ # - Model management UI
3312
+ # - RAG/document upload support
3313
+ #
3314
+ # Documentation: https://docs.openwebui.com/
3315
+ # ===========================================================================
2860
3316
  open-webui:
3317
+ # CUDA-enabled image for GPU-accelerated features (embeddings, etc.)
3318
+ # Change to :main if you don't need GPU features in the UI
2861
3319
  image: ghcr.io/open-webui/open-webui:cuda
3320
+
2862
3321
  container_name: open-webui
3322
+
2863
3323
  ports:
3324
+ # Web UI port - access at http://localhost:3000
2864
3325
  - "3000:8080"
3326
+
2865
3327
  environment:
3328
+ # Tell Open WebUI where to find Ollama
3329
+ # Uses Docker internal networking (service name as hostname)
2866
3330
  - OLLAMA_BASE_URL=http://ollama:11434
3331
+
3332
+ # Wait for Ollama to be ready before starting
2867
3333
  depends_on:
2868
3334
  - ollama
3335
+
2869
3336
  restart: unless-stopped
3337
+
2870
3338
  healthcheck:
2871
3339
  test: ["CMD", "curl", "-f", "http://localhost:8080/health"]
2872
3340
  interval: 30s
2873
3341
  timeout: 10s
2874
3342
  retries: 3
2875
3343
  start_period: 60s
3344
+
2876
3345
  volumes:
3346
+ # Persistent storage for conversations, settings, and user data
2877
3347
  - open-webui:/app/backend/data
3348
+
2878
3349
  deploy:
2879
3350
  resources:
2880
3351
  reservations:
@@ -2883,32 +3354,174 @@ var DOCKER_COMPOSE_TEMPLATE = `services:
2883
3354
  count: all
2884
3355
  capabilities: [gpu]
2885
3356
 
3357
+ # =============================================================================
3358
+ # VOLUMES
3359
+ # =============================================================================
3360
+ # Named volumes for persistent data that survives container recreation
2886
3361
  volumes:
2887
3362
  open-webui:
3363
+ # Open WebUI data: conversations, user settings, uploads
3364
+ # Located at /var/lib/docker/volumes/open-webui/_data on host
2888
3365
  `;
2889
- var CONFIG_TEMPLATE = `{
3366
+ var DOCKER_COMPOSE_TEMPLATE_CPU = `# =============================================================================
3367
+ # LOCLAUDE DOCKER COMPOSE - CPU MODE
3368
+ # =============================================================================
3369
+ # This configuration runs Ollama in CPU-only mode.
3370
+ # Inference will be slower than GPU mode but works on any system.
3371
+ # Generated by: loclaude init --no-gpu
3372
+ #
3373
+ # Performance notes:
3374
+ # - 7B models: ~10-20 tokens/sec on modern CPUs
3375
+ # - Larger models will be significantly slower
3376
+ # - Consider using quantized models (Q4_K_M, Q5_K_M) for better performance
3377
+ #
3378
+ # Recommended CPU-optimized models:
3379
+ # - llama3.2:3b (fast, good for simple tasks)
3380
+ # - qwen2.5-coder:7b (coding tasks)
3381
+ # - gemma2:9b (general purpose)
3382
+ #
3383
+ # =============================================================================
3384
+
3385
+ services:
3386
+ # ===========================================================================
3387
+ # OLLAMA - Local LLM Inference Server (CPU Mode)
3388
+ # ===========================================================================
3389
+ # Ollama provides the AI backend that Claude Code connects to.
3390
+ # Running in CPU mode - no GPU acceleration.
3391
+ #
3392
+ # API Documentation: https://github.com/ollama/ollama/blob/main/docs/api.md
3393
+ # Model Library: https://ollama.com/library
3394
+ # ===========================================================================
3395
+ ollama:
3396
+ # Official Ollama image - works for both CPU and GPU
3397
+ image: ollama/ollama:latest
3398
+
3399
+ # Fixed container name for easy CLI access
3400
+ container_name: ollama
3401
+
3402
+ # NOTE: No 'runtime: nvidia' - running in CPU mode
3403
+
3404
+ environment:
3405
+ # ---------------------------------------------------------------------------
3406
+ # Ollama Configuration (Optional)
3407
+ # ---------------------------------------------------------------------------
3408
+ # Uncomment these to customize Ollama behavior:
3409
+
3410
+ # Maximum number of models loaded in memory simultaneously
3411
+ # CPU mode uses system RAM instead of VRAM
3412
+ # - OLLAMA_MAX_LOADED_MODELS=1
3413
+
3414
+ # Number of CPU threads to use (default: auto-detect)
3415
+ # - OLLAMA_NUM_THREADS=8
3416
+
3417
+ # Enable debug logging for troubleshooting
3418
+ # - OLLAMA_DEBUG=1
3419
+
3420
+ volumes:
3421
+ # ---------------------------------------------------------------------------
3422
+ # Model Storage
3423
+ # ---------------------------------------------------------------------------
3424
+ # Maps ./models on your host to /root/.ollama in the container
3425
+ # This persists downloaded models across container restarts
3426
+ - ./models:/root/.ollama
3427
+
3428
+ ports:
3429
+ # Ollama API port - access at http://localhost:11434
3430
+ - "11434:11434"
3431
+
3432
+ restart: unless-stopped
3433
+
3434
+ healthcheck:
3435
+ test: ["CMD", "ollama", "list"]
3436
+ interval: 300s
3437
+ timeout: 2s
3438
+ retries: 3
3439
+ start_period: 40s
3440
+
3441
+ # CPU resource limits (optional - uncomment to constrain)
3442
+ # deploy:
3443
+ # resources:
3444
+ # limits:
3445
+ # cpus: '4' # Limit to 4 CPU cores
3446
+ # memory: 16G # Limit to 16GB RAM
3447
+ # reservations:
3448
+ # cpus: '2' # Reserve at least 2 cores
3449
+ # memory: 8G # Reserve at least 8GB RAM
3450
+
3451
+ # ===========================================================================
3452
+ # OPEN WEBUI - Chat Interface (Optional)
3453
+ # ===========================================================================
3454
+ # Open WebUI provides a ChatGPT-like interface for your local models.
3455
+ # Access at http://localhost:3000 after starting containers.
3456
+ #
3457
+ # Documentation: https://docs.openwebui.com/
3458
+ # ===========================================================================
3459
+ open-webui:
3460
+ # Standard image (no CUDA) - smaller download, CPU-only features
3461
+ image: ghcr.io/open-webui/open-webui:main
3462
+
3463
+ container_name: open-webui
3464
+
3465
+ ports:
3466
+ - "3000:8080"
3467
+
3468
+ environment:
3469
+ - OLLAMA_BASE_URL=http://ollama:11434
3470
+
3471
+ depends_on:
3472
+ - ollama
3473
+
3474
+ restart: unless-stopped
3475
+
3476
+ healthcheck:
3477
+ test: ["CMD", "curl", "-f", "http://localhost:8080/health"]
3478
+ interval: 30s
3479
+ timeout: 10s
3480
+ retries: 3
3481
+ start_period: 60s
3482
+
3483
+ volumes:
3484
+ - open-webui:/app/backend/data
3485
+
3486
+ # =============================================================================
3487
+ # VOLUMES
3488
+ # =============================================================================
3489
+ volumes:
3490
+ open-webui:
3491
+ `;
3492
+ function getConfigTemplate(gpu) {
3493
+ return `{
2890
3494
  "ollama": {
2891
3495
  "url": "http://localhost:11434",
2892
- "defaultModel": "qwen3-coder:30b"
3496
+ "defaultModel": "${gpu ? "qwen3-coder:30b" : "qwen2.5-coder:7b"}"
2893
3497
  },
2894
3498
  "docker": {
2895
3499
  "composeFile": "./docker-compose.yml",
2896
- "gpu": true
3500
+ "gpu": ${gpu}
2897
3501
  }
2898
3502
  }
2899
3503
  `;
3504
+ }
2900
3505
  var GITIGNORE_TEMPLATE = `# Ollama models (large binary files)
3506
+ # These are downloaded by Ollama and can be re-pulled anytime
2901
3507
  models/
2902
3508
  `;
2903
- var MISE_TOML_TEMPLATE = `# Mise task runner configuration
2904
- # Run \`mise tasks\` to see all available tasks
2905
- # https://mise.jdx.dev/
3509
+ var MISE_TOML_TEMPLATE = `# =============================================================================
3510
+ # MISE TASK RUNNER CONFIGURATION
3511
+ # =============================================================================
3512
+ # Mise is a task runner that provides convenient shortcuts for common operations.
3513
+ # Run 'mise tasks' to see all available tasks.
3514
+ #
3515
+ # Documentation: https://mise.jdx.dev/
3516
+ # Install: curl https://mise.jdx.dev/install.sh | sh
3517
+ # =============================================================================
2906
3518
 
2907
3519
  [tasks]
2908
3520
 
2909
3521
  # =============================================================================
2910
3522
  # Docker Management
2911
3523
  # =============================================================================
3524
+ # Commands for managing the Ollama and Open WebUI containers
2912
3525
 
2913
3526
  [tasks.up]
2914
3527
  description = "Start Ollama and Open WebUI containers"
@@ -2933,6 +3546,7 @@ run = "loclaude docker-logs --follow"
2933
3546
  # =============================================================================
2934
3547
  # Model Management
2935
3548
  # =============================================================================
3549
+ # Commands for managing Ollama models (download, remove, list)
2936
3550
 
2937
3551
  [tasks.models]
2938
3552
  description = "List installed models"
@@ -2942,9 +3556,14 @@ run = "loclaude models"
2942
3556
  description = "Pull a model (usage: mise run pull <model-name>)"
2943
3557
  run = "loclaude models-pull {{arg(name='model')}}"
2944
3558
 
3559
+ [tasks."pull:recommended"]
3560
+ description = "Pull the recommended coding model"
3561
+ run = "loclaude models-pull qwen3-coder:30b"
3562
+
2945
3563
  # =============================================================================
2946
3564
  # Claude Code
2947
3565
  # =============================================================================
3566
+ # Commands for running Claude Code with local Ollama
2948
3567
 
2949
3568
  [tasks.claude]
2950
3569
  description = "Run Claude Code with local Ollama"
@@ -2957,14 +3576,19 @@ run = "loclaude run -m {{arg(name='model')}}"
2957
3576
  # =============================================================================
2958
3577
  # Diagnostics
2959
3578
  # =============================================================================
3579
+ # Commands for checking system health and troubleshooting
2960
3580
 
2961
3581
  [tasks.doctor]
2962
3582
  description = "Check system requirements"
2963
3583
  run = "loclaude doctor"
2964
3584
 
2965
3585
  [tasks.gpu]
2966
- description = "Check GPU status"
3586
+ description = "Check GPU status (requires NVIDIA GPU)"
2967
3587
  run = "docker exec ollama nvidia-smi"
3588
+
3589
+ [tasks.config]
3590
+ description = "Show current configuration"
3591
+ run = "loclaude config"
2968
3592
  `;
2969
3593
  var README_TEMPLATE = `# Project Name
2970
3594
 
@@ -2973,18 +3597,24 @@ var README_TEMPLATE = `# Project Name
2973
3597
  ## Prerequisites
2974
3598
 
2975
3599
  - [Docker](https://docs.docker.com/get-docker/) with Docker Compose v2
2976
- - [NVIDIA GPU](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html) with drivers and container toolkit
2977
3600
  - [mise](https://mise.jdx.dev/) task runner (recommended)
2978
3601
  - [loclaude](https://www.npmjs.com/package/loclaude) CLI (\`npm install -g loclaude\`)
2979
3602
 
3603
+ ### For GPU Mode (Recommended)
3604
+
3605
+ - [NVIDIA GPU](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html) with CUDA support
3606
+ - NVIDIA drivers installed on host
3607
+ - [NVIDIA Container Toolkit](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html)
3608
+
2980
3609
  ## Quick Start
2981
3610
 
2982
3611
  \`\`\`bash
2983
3612
  # Start the LLM backend (Ollama + Open WebUI)
2984
3613
  mise run up
2985
3614
 
2986
- # Pull a model
2987
- mise run pull qwen3-coder:30b
3615
+ # Pull a model (adjust based on your hardware)
3616
+ mise run pull qwen3-coder:30b # GPU: 30B model (~16GB VRAM)
3617
+ mise run pull qwen2.5-coder:7b # CPU: 7B model (faster)
2988
3618
 
2989
3619
  # Run Claude Code with local LLM
2990
3620
  mise run claude
@@ -3019,7 +3649,7 @@ Run \`mise tasks\` to see all available commands.
3019
3649
  \`\`\`
3020
3650
  .
3021
3651
  \u251C\u2500\u2500 .claude/
3022
- \u2502 \u2514\u2500\u2500 CLAUDE.md # Claude Code instructions
3652
+ \u2502 \u2514\u2500\u2500 CLAUDE.md # Claude Code project instructions
3023
3653
  \u251C\u2500\u2500 .loclaude/
3024
3654
  \u2502 \u2514\u2500\u2500 config.json # Loclaude configuration
3025
3655
  \u251C\u2500\u2500 models/ # Ollama model storage (gitignored)
@@ -3051,6 +3681,25 @@ Run \`mise tasks\` to see all available commands.
3051
3681
  |----------|-------------|---------|
3052
3682
  | \`OLLAMA_URL\` | Ollama API endpoint | \`http://localhost:11434\` |
3053
3683
  | \`OLLAMA_MODEL\` | Default model name | \`qwen3-coder:30b\` |
3684
+ | \`LOCLAUDE_GPU\` | Enable GPU mode | \`true\` |
3685
+
3686
+ ## Recommended Models
3687
+
3688
+ ### For GPU (NVIDIA with 16GB+ VRAM)
3689
+
3690
+ | Model | Size | Use Case |
3691
+ |-------|------|----------|
3692
+ | \`qwen3-coder:30b\` | ~16GB | Best coding performance |
3693
+ | \`gpt-oss:20b\` | ~12GB | General purpose |
3694
+ | \`glm-4.7:cloud\` | Cloud | No local storage needed |
3695
+
3696
+ ### For CPU or Limited VRAM
3697
+
3698
+ | Model | Size | Use Case |
3699
+ |-------|------|----------|
3700
+ | \`qwen2.5-coder:7b\` | ~4GB | Coding on CPU |
3701
+ | \`llama3.2:3b\` | ~2GB | Fast, simple tasks |
3702
+ | \`gemma2:9b\` | ~5GB | General purpose |
3054
3703
 
3055
3704
  ## Troubleshooting
3056
3705
 
@@ -3072,6 +3721,12 @@ mise run logs
3072
3721
  mise run down && mise run up
3073
3722
  \`\`\`
3074
3723
 
3724
+ ### GPU Not Detected
3725
+
3726
+ 1. Verify NVIDIA drivers: \`nvidia-smi\`
3727
+ 2. Check Docker GPU access: \`docker run --rm --gpus all nvidia/cuda:12.0-base nvidia-smi\`
3728
+ 3. Install NVIDIA Container Toolkit if missing
3729
+
3075
3730
  ## License
3076
3731
 
3077
3732
  MIT
@@ -3138,304 +3793,153 @@ async function init(options = {}) {
3138
3793
  const claudeDir = join2(cwd, ".claude");
3139
3794
  const claudeMdPath = join2(claudeDir, "CLAUDE.md");
3140
3795
  const readmePath = join2(cwd, "README.md");
3141
- console.log(`Initializing loclaude project...
3142
- `);
3796
+ header("Initializing loclaude project");
3797
+ console.log("");
3798
+ let gpuMode;
3799
+ if (options.gpu === false) {
3800
+ gpuMode = false;
3801
+ console.log(info("CPU-only mode (--no-gpu)"));
3802
+ } else if (options.gpu === true) {
3803
+ gpuMode = true;
3804
+ console.log(info("GPU mode enabled (--gpu)"));
3805
+ } else {
3806
+ console.log(dim(" Detecting GPU..."));
3807
+ gpuMode = await hasNvidiaGpu();
3808
+ if (gpuMode) {
3809
+ console.log(success("NVIDIA GPU detected - using GPU mode"));
3810
+ } else {
3811
+ console.log(warn("No NVIDIA GPU detected - using CPU mode"));
3812
+ console.log(dim(" Use --gpu to force GPU mode if you have an NVIDIA GPU"));
3813
+ }
3814
+ }
3815
+ console.log("");
3143
3816
  if (existsSync2(readmePath) && !options.force) {
3144
- console.log("\u26A0\uFE0F README.md already exists");
3817
+ console.log(warn(`${file("README.md")} already exists`));
3145
3818
  } else {
3146
3819
  writeFileSync(readmePath, README_TEMPLATE);
3147
- console.log("\u2713 Created README.md");
3820
+ console.log(success(`Created ${file("README.md")}`));
3148
3821
  }
3149
3822
  if (existsSync2(composePath) && !options.force) {
3150
- console.log("\u26A0\uFE0F docker-compose.yml already exists");
3151
- console.log(` Use --force to overwrite
3152
- `);
3823
+ console.log(warn(`${file("docker-compose.yml")} already exists`));
3824
+ console.log(dim(" Use --force to overwrite"));
3153
3825
  } else {
3154
- let composeContent = DOCKER_COMPOSE_TEMPLATE;
3826
+ let composeContent = gpuMode ? DOCKER_COMPOSE_TEMPLATE_GPU : DOCKER_COMPOSE_TEMPLATE_CPU;
3155
3827
  if (options.noWebui) {
3156
- composeContent = composeContent.replace(/\n open-webui:[\s\S]*?capabilities: \[gpu\]\n/m, `
3157
- `).replace(/\nvolumes:\n open-webui:\n/, `
3828
+ composeContent = composeContent.replace(/\n # =+\n # OPEN WEBUI[\s\S]*?capabilities: \[gpu\]\n/m, `
3829
+ `).replace(/\n # =+\n # OPEN WEBUI[\s\S]*?open-webui:\/app\/backend\/data\n/m, `
3830
+ `).replace(/\nvolumes:\n open-webui:\n.*$/m, `
3158
3831
  `);
3159
3832
  }
3160
3833
  writeFileSync(composePath, composeContent);
3161
- console.log("\u2713 Created docker-compose.yml");
3834
+ const modeLabel = gpuMode ? cyan("GPU") : cyan("CPU");
3835
+ console.log(success(`Created ${file("docker-compose.yml")} (${modeLabel} mode)`));
3162
3836
  }
3163
3837
  if (existsSync2(miseTomlPath) && !options.force) {
3164
- console.log("\u26A0\uFE0F mise.toml already exists");
3838
+ console.log(warn(`${file("mise.toml")} already exists`));
3165
3839
  } else {
3166
3840
  writeFileSync(miseTomlPath, MISE_TOML_TEMPLATE);
3167
- console.log("\u2713 Created mise.toml");
3841
+ console.log(success(`Created ${file("mise.toml")}`));
3168
3842
  }
3169
3843
  if (!existsSync2(claudeDir)) {
3170
3844
  mkdirSync(claudeDir, { recursive: true });
3171
3845
  }
3172
3846
  if (existsSync2(claudeMdPath) && !options.force) {
3173
- console.log("\u26A0\uFE0F .claude/CLAUDE.md already exists");
3847
+ console.log(warn(`${file(".claude/CLAUDE.md")} already exists`));
3174
3848
  } else {
3175
3849
  writeFileSync(claudeMdPath, CLAUDE_MD_TEMPLATE);
3176
- console.log("\u2713 Created .claude/CLAUDE.md");
3850
+ console.log(success(`Created ${file(".claude/CLAUDE.md")}`));
3177
3851
  }
3178
3852
  if (!existsSync2(configDir)) {
3179
3853
  mkdirSync(configDir, { recursive: true });
3180
- console.log("\u2713 Created .loclaude/ directory");
3854
+ console.log(success(`Created ${file(".loclaude/")} directory`));
3181
3855
  }
3182
3856
  if (existsSync2(configPath) && !options.force) {
3183
- console.log("\u26A0\uFE0F .loclaude/config.json already exists");
3857
+ console.log(warn(`${file(".loclaude/config.json")} already exists`));
3184
3858
  } else {
3185
- writeFileSync(configPath, CONFIG_TEMPLATE);
3186
- console.log("\u2713 Created .loclaude/config.json");
3859
+ writeFileSync(configPath, getConfigTemplate(gpuMode));
3860
+ console.log(success(`Created ${file(".loclaude/config.json")}`));
3187
3861
  }
3188
3862
  if (!existsSync2(modelsDir)) {
3189
3863
  mkdirSync(modelsDir, { recursive: true });
3190
- console.log("\u2713 Created models/ directory");
3864
+ console.log(success(`Created ${file("models/")} directory`));
3191
3865
  }
3192
3866
  if (existsSync2(gitignorePath)) {
3193
3867
  const existing = readFileSync2(gitignorePath, "utf-8");
3194
3868
  if (!existing.includes("models/")) {
3195
3869
  writeFileSync(gitignorePath, existing + `
3196
3870
  ` + GITIGNORE_TEMPLATE);
3197
- console.log("\u2713 Updated .gitignore");
3871
+ console.log(success(`Updated ${file(".gitignore")}`));
3198
3872
  }
3199
3873
  } else {
3200
3874
  writeFileSync(gitignorePath, GITIGNORE_TEMPLATE);
3201
- console.log("\u2713 Created .gitignore");
3202
- }
3203
- console.log(`
3204
- \uD83C\uDF89 Project initialized!
3205
- `);
3206
- console.log("Next steps:");
3207
- console.log(" 1. Start containers: mise run up");
3208
- console.log(" 2. Pull a model: mise run pull qwen3-coder:30b");
3209
- console.log(" 3. Run Claude: mise run claude");
3210
- console.log(`
3211
- Service URLs:`);
3212
- console.log(" Ollama API: http://localhost:11434");
3213
- if (!options.noWebui) {
3214
- console.log(" Open WebUI: http://localhost:3000");
3215
- }
3216
- }
3217
- // lib/commands/doctor.ts
3218
- async function checkDocker() {
3219
- const exists = await commandExists("docker");
3220
- if (!exists) {
3221
- return {
3222
- name: "Docker",
3223
- status: "error",
3224
- message: "Not installed",
3225
- hint: "Install Docker: https://docs.docker.com/get-docker/"
3226
- };
3227
- }
3228
- const version = await getCommandVersion("docker");
3229
- return {
3230
- name: "Docker",
3231
- status: "ok",
3232
- message: "Installed",
3233
- version: version ?? undefined
3234
- };
3235
- }
3236
- async function checkDockerCompose() {
3237
- const result = await spawnCapture(["docker", "compose", "version"]);
3238
- if (result.exitCode === 0) {
3239
- const version = result.stdout?.trim().split(`
3240
- `)[0];
3241
- return {
3242
- name: "Docker Compose",
3243
- status: "ok",
3244
- message: "Installed (v2)",
3245
- version: version ?? undefined
3246
- };
3247
- }
3248
- const v1Exists = await commandExists("docker-compose");
3249
- if (v1Exists) {
3250
- const version = await getCommandVersion("docker-compose");
3251
- return {
3252
- name: "Docker Compose",
3253
- status: "warning",
3254
- message: "Using legacy v1",
3255
- version: version ?? undefined,
3256
- hint: "Consider upgrading to Docker Compose v2"
3257
- };
3875
+ console.log(success(`Created ${file(".gitignore")}`));
3258
3876
  }
3259
- return {
3260
- name: "Docker Compose",
3261
- status: "error",
3262
- message: "Not installed",
3263
- hint: "Docker Compose is included with Docker Desktop, or install separately"
3264
- };
3265
- }
3266
- async function checkNvidiaSmi() {
3267
- const exists = await commandExists("nvidia-smi");
3268
- if (!exists) {
3269
- return {
3270
- name: "NVIDIA GPU",
3271
- status: "warning",
3272
- message: "nvidia-smi not found",
3273
- hint: "GPU support requires NVIDIA drivers. CPU-only mode will be used."
3274
- };
3275
- }
3276
- const result = await spawnCapture(["nvidia-smi", "--query-gpu=name", "--format=csv,noheader"]);
3277
- if (result.exitCode === 0 && result.stdout) {
3278
- const gpus = result.stdout.trim().split(`
3279
- `).filter(Boolean);
3280
- return {
3281
- name: "NVIDIA GPU",
3282
- status: "ok",
3283
- message: `${gpus.length} GPU(s) detected`,
3284
- version: gpus[0]
3285
- };
3286
- }
3287
- return {
3288
- name: "NVIDIA GPU",
3289
- status: "warning",
3290
- message: "nvidia-smi failed",
3291
- hint: "GPU may not be available. Check NVIDIA drivers."
3292
- };
3293
- }
3294
- async function checkNvidiaContainerToolkit() {
3295
- const result = await spawnCapture(["docker", "info", "--format", "{{.Runtimes}}"]);
3296
- if (result.exitCode === 0 && result.stdout?.includes("nvidia")) {
3297
- return {
3298
- name: "NVIDIA Container Toolkit",
3299
- status: "ok",
3300
- message: "nvidia runtime available"
3301
- };
3302
- }
3303
- return {
3304
- name: "NVIDIA Container Toolkit",
3305
- status: "warning",
3306
- message: "nvidia runtime not found",
3307
- hint: "Install: https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html"
3308
- };
3309
- }
3310
- async function checkClaude() {
3311
- const exists = await commandExists("claude");
3312
- if (!exists) {
3313
- return {
3314
- name: "Claude Code",
3315
- status: "error",
3316
- message: "Not installed",
3317
- hint: "Install: npm install -g @anthropic-ai/claude-code"
3318
- };
3319
- }
3320
- const version = await getCommandVersion("claude");
3321
- return {
3322
- name: "Claude Code",
3323
- status: "ok",
3324
- message: "Installed",
3325
- version: version ?? undefined
3326
- };
3327
- }
3328
- async function checkOllamaConnection() {
3329
- const ollamaUrl = getOllamaUrl();
3330
- try {
3331
- const response = await fetch(`${ollamaUrl}/api/tags`, {
3332
- signal: AbortSignal.timeout(5000)
3333
- });
3334
- if (response.ok) {
3335
- const data = await response.json();
3336
- const modelCount = data.models?.length ?? 0;
3337
- return {
3338
- name: "Ollama API",
3339
- status: "ok",
3340
- message: `Connected (${modelCount} model${modelCount === 1 ? "" : "s"})`,
3341
- version: ollamaUrl
3342
- };
3343
- }
3344
- return {
3345
- name: "Ollama API",
3346
- status: "warning",
3347
- message: `HTTP ${response.status}`,
3348
- hint: "Ollama may not be running. Try: loclaude docker-up"
3349
- };
3350
- } catch (error) {
3351
- return {
3352
- name: "Ollama API",
3353
- status: "warning",
3354
- message: "Not reachable",
3355
- hint: `Cannot connect to ${ollamaUrl}. Start Ollama: loclaude docker-up`
3356
- };
3357
- }
3358
- }
3359
- function formatCheck(check) {
3360
- const icons = {
3361
- ok: "\u2713",
3362
- warning: "\u26A0",
3363
- error: "\u2717"
3364
- };
3365
- const colors = {
3366
- ok: "\x1B[32m",
3367
- warning: "\x1B[33m",
3368
- error: "\x1B[31m"
3369
- };
3370
- const reset = "\x1B[0m";
3371
- const icon = icons[check.status];
3372
- const color = colors[check.status];
3373
- let line = `${color}${icon}${reset} ${check.name}: ${check.message}`;
3374
- if (check.version) {
3375
- line += ` (${check.version})`;
3376
- }
3377
- if (check.hint) {
3378
- line += `
3379
- ${check.hint}`;
3380
- }
3381
- return line;
3382
- }
3383
- async function doctor() {
3384
- console.log(`Checking system requirements...
3385
- `);
3386
- const checks = await Promise.all([
3387
- checkDocker(),
3388
- checkDockerCompose(),
3389
- checkNvidiaSmi(),
3390
- checkNvidiaContainerToolkit(),
3391
- checkClaude(),
3392
- checkOllamaConnection()
3393
- ]);
3394
- for (const check of checks) {
3395
- console.log(formatCheck(check));
3396
- }
3397
- const errors2 = checks.filter((c) => c.status === "error");
3398
- const warnings = checks.filter((c) => c.status === "warning");
3877
+ const recommendedModel = gpuMode ? "qwen3-coder:30b" : "qwen2.5-coder:7b";
3399
3878
  console.log("");
3400
- if (errors2.length > 0) {
3401
- console.log(`\x1B[31m${errors2.length} error(s) found.\x1B[0m Fix these before proceeding.`);
3402
- process.exit(1);
3403
- } else if (warnings.length > 0) {
3404
- console.log(`\x1B[33m${warnings.length} warning(s).\x1B[0m loclaude may work with limited functionality.`);
3405
- } else {
3406
- console.log("\x1B[32mAll checks passed!\x1B[0m Ready to use loclaude.");
3879
+ console.log(green("Project initialized!"));
3880
+ console.log("");
3881
+ console.log(cyan("Next steps:"));
3882
+ console.log(` 1. Start containers: ${cmd("mise run up")}`);
3883
+ console.log(` 2. Pull a model: ${cmd(`mise run pull ${recommendedModel}`)}`);
3884
+ console.log(` 3. Run Claude: ${cmd("mise run claude")}`);
3885
+ console.log("");
3886
+ console.log(cyan("Service URLs:"));
3887
+ console.log(` Ollama API: ${url("http://localhost:11434")}`);
3888
+ if (!options.noWebui) {
3889
+ console.log(` Open WebUI: ${url("http://localhost:3000")}`);
3407
3890
  }
3408
3891
  }
3409
3892
  // lib/commands/config.ts
3410
- import { inspect } from "util";
3411
3893
  async function configShow() {
3412
3894
  const config = loadConfig();
3413
3895
  const activePath = getActiveConfigPath();
3414
- console.log(`Current configuration:
3415
- `);
3416
- console.log(inspect(config, false, 3, true));
3417
- console.log(`
3418
- ---`);
3896
+ header("Current Configuration");
3897
+ console.log("");
3898
+ console.log(cyan("Ollama:"));
3899
+ labelValue(" URL", config.ollama.url);
3900
+ labelValue(" Default Model", magenta(config.ollama.defaultModel));
3901
+ console.log("");
3902
+ console.log(cyan("Docker:"));
3903
+ labelValue(" Compose File", config.docker.composeFile);
3904
+ labelValue(" GPU Mode", config.docker.gpu ? green("enabled") : dim("disabled"));
3905
+ console.log("");
3906
+ console.log(cyan("Claude:"));
3907
+ if (config.claude.extraArgs.length > 0) {
3908
+ labelValue(" Extra Args", config.claude.extraArgs.join(" "));
3909
+ } else {
3910
+ labelValue(" Extra Args", dim("none"));
3911
+ }
3912
+ console.log("");
3913
+ console.log(dim("\u2500".repeat(40)));
3419
3914
  if (activePath) {
3420
- console.log(`Loaded from: ${activePath}`);
3915
+ console.log(dim(`Loaded from: ${file(activePath)}`));
3421
3916
  } else {
3422
- console.log("Using default configuration (no config file found)");
3917
+ console.log(dim("Using default configuration (no config file found)"));
3423
3918
  }
3424
3919
  }
3425
3920
  async function configPaths() {
3426
3921
  const paths = getConfigSearchPaths();
3427
3922
  const activePath = getActiveConfigPath();
3428
- console.log(`Config file search paths (in priority order):
3429
- `);
3430
- for (const path of paths) {
3431
- const isActive = path === activePath;
3432
- const marker = isActive ? " \u2190 active" : "";
3433
- console.log(` ${path}${marker}`);
3923
+ header("Config Search Paths");
3924
+ console.log("");
3925
+ console.log(dim("Files are checked in priority order (first found wins):"));
3926
+ console.log("");
3927
+ for (let i = 0;i < paths.length; i++) {
3928
+ const configPath = paths[i];
3929
+ if (!configPath)
3930
+ continue;
3931
+ const isActive = configPath === activePath;
3932
+ const num = `${i + 1}.`;
3933
+ if (isActive) {
3934
+ console.log(` ${num} ${file(configPath)} ${green("\u2190 active")}`);
3935
+ } else {
3936
+ console.log(` ${num} ${dim(configPath)}`);
3937
+ }
3434
3938
  }
3939
+ console.log("");
3435
3940
  if (!activePath) {
3436
- console.log(`
3437
- No config file found. Using defaults.`);
3438
- console.log("Run 'loclaude init' to create a project config.");
3941
+ console.log(info("No config file found. Using defaults."));
3942
+ console.log(dim(` Run ${cmd("loclaude init")} to create a project config.`));
3439
3943
  }
3440
3944
  }
3441
3945
  // lib/commands/docker.ts
@@ -3474,42 +3978,44 @@ function getComposeCommand() {
3474
3978
  async function runCompose(args, options = {}) {
3475
3979
  const composeFile = options.file ?? findComposeFile();
3476
3980
  if (!composeFile) {
3477
- console.error("Error: No docker-compose.yml found");
3478
- console.error("Run 'loclaude init' to create one, or specify --file");
3981
+ console.log(error("No docker-compose.yml found"));
3982
+ console.log(dim(` Run ${cmd("loclaude init")} to create one, or specify --file`));
3479
3983
  return 1;
3480
3984
  }
3481
- const cmd = [...getComposeCommand(), "-f", composeFile, ...args];
3482
- return spawn(cmd);
3985
+ const cmd_args = [...getComposeCommand(), "-f", composeFile, ...args];
3986
+ return spawn(cmd_args);
3483
3987
  }
3484
3988
  async function dockerUp(options = {}) {
3485
3989
  const args = ["up"];
3486
3990
  if (options.detach !== false) {
3487
3991
  args.push("-d");
3488
3992
  }
3489
- console.log(`Starting containers...
3490
- `);
3993
+ console.log(info("Starting containers..."));
3994
+ console.log("");
3491
3995
  const exitCode = await runCompose(args, options);
3492
3996
  if (exitCode === 0) {
3493
- console.log(`
3494
- \u2713 Containers started`);
3495
- console.log(`
3496
- Service URLs:`);
3497
- console.log(" Ollama API: http://localhost:11434");
3498
- console.log(" Open WebUI: http://localhost:3000");
3997
+ console.log("");
3998
+ console.log(success("Containers started"));
3999
+ console.log("");
4000
+ console.log(cyan("Service URLs:"));
4001
+ console.log(` Ollama API: ${url("http://localhost:11434")}`);
4002
+ console.log(` Open WebUI: ${url("http://localhost:3000")}`);
3499
4003
  }
3500
4004
  process.exit(exitCode);
3501
4005
  }
3502
4006
  async function dockerDown(options = {}) {
3503
- console.log(`Stopping containers...
3504
- `);
4007
+ console.log(info("Stopping containers..."));
4008
+ console.log("");
3505
4009
  const exitCode = await runCompose(["down"], options);
3506
4010
  if (exitCode === 0) {
3507
- console.log(`
3508
- \u2713 Containers stopped`);
4011
+ console.log("");
4012
+ console.log(success("Containers stopped"));
3509
4013
  }
3510
4014
  process.exit(exitCode);
3511
4015
  }
3512
4016
  async function dockerStatus(options = {}) {
4017
+ console.log(info("Container status:"));
4018
+ console.log("");
3513
4019
  const exitCode = await runCompose(["ps"], options);
3514
4020
  process.exit(exitCode);
3515
4021
  }
@@ -3520,17 +4026,21 @@ async function dockerLogs(options = {}) {
3520
4026
  }
3521
4027
  if (options.service) {
3522
4028
  args.push(options.service);
4029
+ console.log(info(`Logs for ${cyan(options.service)}:`));
4030
+ } else {
4031
+ console.log(info("Container logs:"));
3523
4032
  }
4033
+ console.log("");
3524
4034
  const exitCode = await runCompose(args, options);
3525
4035
  process.exit(exitCode);
3526
4036
  }
3527
4037
  async function dockerRestart(options = {}) {
3528
- console.log(`Restarting containers...
3529
- `);
4038
+ console.log(info("Restarting containers..."));
4039
+ console.log("");
3530
4040
  const exitCode = await runCompose(["restart"], options);
3531
4041
  if (exitCode === 0) {
3532
- console.log(`
3533
- \u2713 Containers restarted`);
4042
+ console.log("");
4043
+ console.log(success("Containers restarted"));
3534
4044
  }
3535
4045
  process.exit(exitCode);
3536
4046
  }
@@ -3547,11 +4057,11 @@ async function fetchModels() {
3547
4057
  }
3548
4058
  const data = await response.json();
3549
4059
  return data.models ?? [];
3550
- } catch (error) {
3551
- if (error instanceof Error && error.name === "TimeoutError") {
4060
+ } catch (error3) {
4061
+ if (error3 instanceof Error && error3.name === "TimeoutError") {
3552
4062
  throw new Error(`Connection to Ollama timed out (${ollamaUrl})`);
3553
4063
  }
3554
- throw error;
4064
+ throw error3;
3555
4065
  }
3556
4066
  }
3557
4067
  async function isOllamaInDocker() {
@@ -3566,83 +4076,99 @@ async function runOllamaCommand(args) {
3566
4076
  return spawn(["ollama", ...args]);
3567
4077
  }
3568
4078
  }
4079
+ function formatSize(sizeBytes) {
4080
+ const sizeStr = import_bytes2.default(sizeBytes) ?? "?";
4081
+ const sizeNum = sizeBytes / (1024 * 1024 * 1024);
4082
+ if (sizeNum > 20) {
4083
+ return yellow(sizeStr);
4084
+ } else if (sizeNum > 10) {
4085
+ return cyan(sizeStr);
4086
+ }
4087
+ return dim(sizeStr);
4088
+ }
3569
4089
  async function modelsList() {
3570
4090
  try {
3571
4091
  const models = await fetchModels();
3572
4092
  if (models.length === 0) {
3573
- console.log("No models installed.");
3574
- console.log(`
3575
- Pull a model with: loclaude models-pull <model-name>`);
3576
- console.log("Example: loclaude models-pull llama3.2");
4093
+ header("Installed Models");
4094
+ console.log("");
4095
+ console.log(info("No models installed."));
4096
+ console.log("");
4097
+ console.log(`Pull a model with: ${cmd("loclaude models-pull <model-name>")}`);
4098
+ console.log(`Example: ${cmd("loclaude models-pull llama3.2")}`);
3577
4099
  return;
3578
4100
  }
3579
- console.log(`Installed models:
3580
- `);
4101
+ header("Installed Models");
4102
+ console.log("");
3581
4103
  const nameWidth = Math.max(...models.map((m) => m.name.length), "NAME".length);
3582
4104
  const sizeWidth = 10;
3583
- console.log(`${"NAME".padEnd(nameWidth)} ${"SIZE".padStart(sizeWidth)} MODIFIED`);
3584
- console.log("-".repeat(nameWidth + sizeWidth + 30));
4105
+ const modifiedWidth = 20;
4106
+ tableHeader(["NAME", "SIZE", "MODIFIED"], [nameWidth, sizeWidth, modifiedWidth]);
3585
4107
  for (const model of models) {
3586
- const name = model.name.padEnd(nameWidth);
3587
- const size = (import_bytes2.default(model.size) ?? "?").padStart(sizeWidth);
3588
- const modified = formatRelativeTime(model.modified_at);
4108
+ const name = magenta(model.name.padEnd(nameWidth));
4109
+ const size = formatSize(model.size).padStart(sizeWidth);
4110
+ const modified = dim(formatRelativeTime(model.modified_at));
3589
4111
  console.log(`${name} ${size} ${modified}`);
3590
4112
  }
3591
- console.log(`
3592
- ${models.length} model(s) installed`);
3593
- } catch (error) {
4113
+ console.log("");
4114
+ console.log(dim(`${models.length} model(s) installed`));
4115
+ } catch (err) {
3594
4116
  const ollamaUrl = getOllamaUrl();
3595
- console.error("Error: Could not connect to Ollama at", ollamaUrl);
3596
- console.error("Make sure Ollama is running: loclaude docker-up");
4117
+ console.log(error(`Could not connect to Ollama at ${ollamaUrl}`));
4118
+ console.log(dim(` Make sure Ollama is running: ${cmd("loclaude docker-up")}`));
3597
4119
  process.exit(1);
3598
4120
  }
3599
4121
  }
3600
4122
  async function modelsPull(modelName) {
3601
4123
  if (!modelName) {
3602
- console.error("Error: Model name required");
3603
- console.error("Usage: loclaude models pull <model-name>");
3604
- console.error("Example: loclaude models pull llama3.2");
4124
+ console.log(error("Model name required"));
4125
+ console.log(dim(`Usage: ${cmd("loclaude models-pull <model-name>")}`));
4126
+ console.log(dim(`Example: ${cmd("loclaude models-pull llama3.2")}`));
3605
4127
  process.exit(1);
3606
4128
  }
3607
- console.log(`Pulling model: ${modelName}
3608
- `);
4129
+ console.log(info(`Pulling model: ${magenta(modelName)}`));
4130
+ console.log("");
3609
4131
  const exitCode = await runOllamaCommand(["pull", modelName]);
3610
4132
  if (exitCode === 0) {
3611
- console.log(`
3612
- \u2713 Model '${modelName}' pulled successfully`);
4133
+ console.log("");
4134
+ console.log(success(`Model '${magenta(modelName)}' pulled successfully`));
3613
4135
  }
3614
4136
  process.exit(exitCode);
3615
4137
  }
3616
4138
  async function modelsRm(modelName) {
3617
4139
  if (!modelName) {
3618
- console.error("Error: Model name required");
3619
- console.error("Usage: loclaude models rm <model-name>");
4140
+ console.log(error("Model name required"));
4141
+ console.log(dim(`Usage: ${cmd("loclaude models-rm <model-name>")}`));
3620
4142
  process.exit(1);
3621
4143
  }
3622
- console.log(`Removing model: ${modelName}
3623
- `);
4144
+ console.log(info(`Removing model: ${magenta(modelName)}`));
4145
+ console.log("");
3624
4146
  const exitCode = await runOllamaCommand(["rm", modelName]);
3625
4147
  if (exitCode === 0) {
3626
- console.log(`
3627
- \u2713 Model '${modelName}' removed`);
4148
+ console.log("");
4149
+ console.log(success(`Model '${magenta(modelName)}' removed`));
3628
4150
  }
3629
4151
  process.exit(exitCode);
3630
4152
  }
3631
4153
  async function modelsShow(modelName) {
3632
4154
  if (!modelName) {
3633
- console.error("Error: Model name required");
3634
- console.error("Usage: loclaude models show <model-name>");
4155
+ console.log(error("Model name required"));
4156
+ console.log(dim(`Usage: ${cmd("loclaude models-show <model-name>")}`));
3635
4157
  process.exit(1);
3636
4158
  }
4159
+ console.log(info(`Model details: ${magenta(modelName)}`));
4160
+ console.log("");
3637
4161
  const exitCode = await runOllamaCommand(["show", modelName]);
3638
4162
  process.exit(exitCode);
3639
4163
  }
3640
4164
  async function modelsRun(modelName) {
3641
4165
  if (!modelName) {
3642
- console.error("Error: Model name required");
3643
- console.error("Usage: loclaude models run <model-name>");
4166
+ console.log(error("Model name required"));
4167
+ console.log(dim(`Usage: ${cmd("loclaude models-run <model-name>")}`));
3644
4168
  process.exit(1);
3645
4169
  }
4170
+ console.log(info(`Running model: ${magenta(modelName)}`));
4171
+ console.log("");
3646
4172
  const exitCode = await runOllamaCommand(["run", modelName]);
3647
4173
  process.exit(exitCode);
3648
4174
  }
@@ -3682,7 +4208,7 @@ cli.command("run [...args]", "Run Claude Code with local Ollama", {
3682
4208
  }
3683
4209
  await launchClaude(model, args);
3684
4210
  });
3685
- cli.command("init", "Initialize a new loclaude project").option("--force", "Overwrite existing files").option("--no-webui", "Skip Open WebUI in docker-compose").action(async (options) => {
4211
+ cli.command("init", "Initialize a new loclaude project").option("--force", "Overwrite existing files").option("--no-webui", "Skip Open WebUI in docker-compose").option("--gpu", "Force GPU mode (NVIDIA)").option("--no-gpu", "Force CPU-only mode").action(async (options) => {
3686
4212
  await init(options);
3687
4213
  });
3688
4214
  cli.command("doctor", "Check system requirements and health").action(async () => {
@@ -3738,5 +4264,5 @@ export {
3738
4264
  cli
3739
4265
  };
3740
4266
 
3741
- //# debugId=39BFEE3AADE6F10964756E2164756E21
4267
+ //# debugId=F2B1940FCE29928B64756E2164756E21
3742
4268
  //# sourceMappingURL=index.bun.js.map