loclaude 0.0.1-alpha.1 → 0.0.1-alpha.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +21 -3
- package/README.md +93 -7
- package/docker/docker-compose.yml +124 -37
- package/libs/cli/CHANGELOG.md +59 -0
- package/libs/cli/LICENSE +31 -0
- package/libs/cli/README.md +5 -0
- package/libs/cli/dist/cac.d.ts.map +1 -1
- package/libs/cli/dist/commands/config.d.ts.map +1 -1
- package/libs/cli/dist/commands/docker.d.ts.map +1 -1
- package/libs/cli/dist/commands/doctor.d.ts +4 -0
- package/libs/cli/dist/commands/doctor.d.ts.map +1 -1
- package/libs/cli/dist/commands/init.d.ts +2 -0
- package/libs/cli/dist/commands/init.d.ts.map +1 -1
- package/libs/cli/dist/commands/models.d.ts.map +1 -1
- package/libs/cli/dist/index.bun.js +923 -397
- package/libs/cli/dist/index.bun.js.map +12 -12
- package/libs/cli/dist/index.js +923 -397
- package/libs/cli/dist/index.js.map +12 -12
- package/libs/cli/dist/output.d.ts +107 -0
- package/libs/cli/dist/output.d.ts.map +1 -0
- package/libs/cli/dist/types.d.ts +40 -0
- package/libs/cli/dist/types.d.ts.map +1 -1
- package/libs/cli/dist/utils.d.ts +19 -1
- package/libs/cli/dist/utils.d.ts.map +1 -1
- package/libs/cli/package.json +42 -5
- package/package.json +19 -5
package/libs/cli/dist/index.js
CHANGED
|
@@ -18,7 +18,7 @@ var __toESM = (mod, isNodeMode, target) => {
|
|
|
18
18
|
var __commonJS = (cb, mod) => () => (mod || cb((mod = { exports: {} }).exports, mod), mod.exports);
|
|
19
19
|
var __require = /* @__PURE__ */ createRequire(import.meta.url);
|
|
20
20
|
|
|
21
|
-
// ../../node_modules/cli-width/index.js
|
|
21
|
+
// ../../node_modules/.bun/cli-width@4.1.0/node_modules/cli-width/index.js
|
|
22
22
|
var require_cli_width = __commonJS((exports, module) => {
|
|
23
23
|
module.exports = cliWidth;
|
|
24
24
|
function normalizeOpts(options) {
|
|
@@ -58,14 +58,14 @@ var require_cli_width = __commonJS((exports, module) => {
|
|
|
58
58
|
}
|
|
59
59
|
});
|
|
60
60
|
|
|
61
|
-
// ../../node_modules/emoji-regex/index.js
|
|
61
|
+
// ../../node_modules/.bun/emoji-regex@10.6.0/node_modules/emoji-regex/index.js
|
|
62
62
|
var require_emoji_regex = __commonJS((exports, module) => {
|
|
63
63
|
module.exports = () => {
|
|
64
64
|
return /[#*0-9]\uFE0F?\u20E3|[\xA9\xAE\u203C\u2049\u2122\u2139\u2194-\u2199\u21A9\u21AA\u231A\u231B\u2328\u23CF\u23ED-\u23EF\u23F1\u23F2\u23F8-\u23FA\u24C2\u25AA\u25AB\u25B6\u25C0\u25FB\u25FC\u25FE\u2600-\u2604\u260E\u2611\u2614\u2615\u2618\u2620\u2622\u2623\u2626\u262A\u262E\u262F\u2638-\u263A\u2640\u2642\u2648-\u2653\u265F\u2660\u2663\u2665\u2666\u2668\u267B\u267E\u267F\u2692\u2694-\u2697\u2699\u269B\u269C\u26A0\u26A7\u26AA\u26B0\u26B1\u26BD\u26BE\u26C4\u26C8\u26CF\u26D1\u26E9\u26F0-\u26F5\u26F7\u26F8\u26FA\u2702\u2708\u2709\u270F\u2712\u2714\u2716\u271D\u2721\u2733\u2734\u2744\u2747\u2757\u2763\u27A1\u2934\u2935\u2B05-\u2B07\u2B1B\u2B1C\u2B55\u3030\u303D\u3297\u3299]\uFE0F?|[\u261D\u270C\u270D](?:\uD83C[\uDFFB-\uDFFF]|\uFE0F)?|[\u270A\u270B](?:\uD83C[\uDFFB-\uDFFF])?|[\u23E9-\u23EC\u23F0\u23F3\u25FD\u2693\u26A1\u26AB\u26C5\u26CE\u26D4\u26EA\u26FD\u2705\u2728\u274C\u274E\u2753-\u2755\u2795-\u2797\u27B0\u27BF\u2B50]|\u26D3\uFE0F?(?:\u200D\uD83D\uDCA5)?|\u26F9(?:\uD83C[\uDFFB-\uDFFF]|\uFE0F)?(?:\u200D[\u2640\u2642]\uFE0F?)?|\u2764\uFE0F?(?:\u200D(?:\uD83D\uDD25|\uD83E\uDE79))?|\uD83C(?:[\uDC04\uDD70\uDD71\uDD7E\uDD7F\uDE02\uDE37\uDF21\uDF24-\uDF2C\uDF36\uDF7D\uDF96\uDF97\uDF99-\uDF9B\uDF9E\uDF9F\uDFCD\uDFCE\uDFD4-\uDFDF\uDFF5\uDFF7]\uFE0F?|[\uDF85\uDFC2\uDFC7](?:\uD83C[\uDFFB-\uDFFF])?|[\uDFC4\uDFCA](?:\uD83C[\uDFFB-\uDFFF])?(?:\u200D[\u2640\u2642]\uFE0F?)?|[\uDFCB\uDFCC](?:\uD83C[\uDFFB-\uDFFF]|\uFE0F)?(?:\u200D[\u2640\u2642]\uFE0F?)?|[\uDCCF\uDD8E\uDD91-\uDD9A\uDE01\uDE1A\uDE2F\uDE32-\uDE36\uDE38-\uDE3A\uDE50\uDE51\uDF00-\uDF20\uDF2D-\uDF35\uDF37-\uDF43\uDF45-\uDF4A\uDF4C-\uDF7C\uDF7E-\uDF84\uDF86-\uDF93\uDFA0-\uDFC1\uDFC5\uDFC6\uDFC8\uDFC9\uDFCF-\uDFD3\uDFE0-\uDFF0\uDFF8-\uDFFF]|\uDDE6\uD83C[\uDDE8-\uDDEC\uDDEE\uDDF1\uDDF2\uDDF4\uDDF6-\uDDFA\uDDFC\uDDFD\uDDFF]|\uDDE7\uD83C[\uDDE6\uDDE7\uDDE9-\uDDEF\uDDF1-\uDDF4\uDDF6-\uDDF9\uDDFB\uDDFC\uDDFE\uDDFF]|\uDDE8\uD83C[\uDDE6\uDDE8\uDDE9\uDDEB-\uDDEE\uDDF0-\uDDF7\uDDFA-\uDDFF]|\uDDE9\uD83C[\uDDEA\uDDEC\uDDEF\uDDF0\uDDF2\uDDF4\uDDFF]|\uDDEA\uD83C[\uDDE6\uDDE8\uDDEA\uDDEC\uDDED\uDDF7-\uDDFA]|\uDDEB\uD83C[\uDDEE-\uDDF0\uDDF2\uDDF4\uDDF7]|\uDDEC\uD83C[\uDDE6\uDDE7\uDDE9-\uDDEE\uDDF1-\uDDF3\uDDF5-\uDDFA\uDDFC\uDDFE]|\uDDED\uD83C[\uDDF0\uDDF2\uDDF3\uDDF7\uDDF9\uDDFA]|\uDDEE\uD83C[\uDDE8-\uDDEA\uDDF1-\uDDF4\uDDF6-\uDDF9]|\uDDEF\uD83C[\uDDEA\uDDF2\uDDF4\uDDF5]|\uDDF0\uD83C[\uDDEA\uDDEC-\uDDEE\uDDF2\uDDF3\uDDF5\uDDF7\uDDFC\uDDFE\uDDFF]|\uDDF1\uD83C[\uDDE6-\uDDE8\uDDEE\uDDF0\uDDF7-\uDDFB\uDDFE]|\uDDF2\uD83C[\uDDE6\uDDE8-\uDDED\uDDF0-\uDDFF]|\uDDF3\uD83C[\uDDE6\uDDE8\uDDEA-\uDDEC\uDDEE\uDDF1\uDDF4\uDDF5\uDDF7\uDDFA\uDDFF]|\uDDF4\uD83C\uDDF2|\uDDF5\uD83C[\uDDE6\uDDEA-\uDDED\uDDF0-\uDDF3\uDDF7-\uDDF9\uDDFC\uDDFE]|\uDDF6\uD83C\uDDE6|\uDDF7\uD83C[\uDDEA\uDDF4\uDDF8\uDDFA\uDDFC]|\uDDF8\uD83C[\uDDE6-\uDDEA\uDDEC-\uDDF4\uDDF7-\uDDF9\uDDFB\uDDFD-\uDDFF]|\uDDF9\uD83C[\uDDE6\uDDE8\uDDE9\uDDEB-\uDDED\uDDEF-\uDDF4\uDDF7\uDDF9\uDDFB\uDDFC\uDDFF]|\uDDFA\uD83C[\uDDE6\uDDEC\uDDF2\uDDF3\uDDF8\uDDFE\uDDFF]|\uDDFB\uD83C[\uDDE6\uDDE8\uDDEA\uDDEC\uDDEE\uDDF3\uDDFA]|\uDDFC\uD83C[\uDDEB\uDDF8]|\uDDFD\uD83C\uDDF0|\uDDFE\uD83C[\uDDEA\uDDF9]|\uDDFF\uD83C[\uDDE6\uDDF2\uDDFC]|\uDF44(?:\u200D\uD83D\uDFEB)?|\uDF4B(?:\u200D\uD83D\uDFE9)?|\uDFC3(?:\uD83C[\uDFFB-\uDFFF])?(?:\u200D(?:[\u2640\u2642]\uFE0F?(?:\u200D\u27A1\uFE0F?)?|\u27A1\uFE0F?))?|\uDFF3\uFE0F?(?:\u200D(?:\u26A7\uFE0F?|\uD83C\uDF08))?|\uDFF4(?:\u200D\u2620\uFE0F?|\uDB40\uDC67\uDB40\uDC62\uDB40(?:\uDC65\uDB40\uDC6E\uDB40\uDC67|\uDC73\uDB40\uDC63\uDB40\uDC74|\uDC77\uDB40\uDC6C\uDB40\uDC73)\uDB40\uDC7F)?)|\uD83D(?:[\uDC3F\uDCFD\uDD49\uDD4A\uDD6F\uDD70\uDD73\uDD76-\uDD79\uDD87\uDD8A-\uDD8D\uDDA5\uDDA8\uDDB1\uDDB2\uDDBC\uDDC2-\uDDC4\uDDD1-\uDDD3\uDDDC-\uDDDE\uDDE1\uDDE3\uDDE8\uDDEF\uDDF3\uDDFA\uDECB\uDECD-\uDECF\uDEE0-\uDEE5\uDEE9\uDEF0\uDEF3]\uFE0F?|[\uDC42\uDC43\uDC46-\uDC50\uDC66\uDC67\uDC6B-\uDC6D\uDC72\uDC74-\uDC76\uDC78\uDC7C\uDC83\uDC85\uDC8F\uDC91\uDCAA\uDD7A\uDD95\uDD96\uDE4C\uDE4F\uDEC0\uDECC](?:\uD83C[\uDFFB-\uDFFF])?|[\uDC6E-\uDC71\uDC73\uDC77\uDC81\uDC82\uDC86\uDC87\uDE45-\uDE47\uDE4B\uDE4D\uDE4E\uDEA3\uDEB4\uDEB5](?:\uD83C[\uDFFB-\uDFFF])?(?:\u200D[\u2640\u2642]\uFE0F?)?|[\uDD74\uDD90](?:\uD83C[\uDFFB-\uDFFF]|\uFE0F)?|[\uDC00-\uDC07\uDC09-\uDC14\uDC16-\uDC25\uDC27-\uDC3A\uDC3C-\uDC3E\uDC40\uDC44\uDC45\uDC51-\uDC65\uDC6A\uDC79-\uDC7B\uDC7D-\uDC80\uDC84\uDC88-\uDC8E\uDC90\uDC92-\uDCA9\uDCAB-\uDCFC\uDCFF-\uDD3D\uDD4B-\uDD4E\uDD50-\uDD67\uDDA4\uDDFB-\uDE2D\uDE2F-\uDE34\uDE37-\uDE41\uDE43\uDE44\uDE48-\uDE4A\uDE80-\uDEA2\uDEA4-\uDEB3\uDEB7-\uDEBF\uDEC1-\uDEC5\uDED0-\uDED2\uDED5-\uDED8\uDEDC-\uDEDF\uDEEB\uDEEC\uDEF4-\uDEFC\uDFE0-\uDFEB\uDFF0]|\uDC08(?:\u200D\u2B1B)?|\uDC15(?:\u200D\uD83E\uDDBA)?|\uDC26(?:\u200D(?:\u2B1B|\uD83D\uDD25))?|\uDC3B(?:\u200D\u2744\uFE0F?)?|\uDC41\uFE0F?(?:\u200D\uD83D\uDDE8\uFE0F?)?|\uDC68(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:\uDC8B\u200D\uD83D)?\uDC68|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D(?:[\uDC68\uDC69]\u200D\uD83D(?:\uDC66(?:\u200D\uD83D\uDC66)?|\uDC67(?:\u200D\uD83D[\uDC66\uDC67])?)|[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uDC66(?:\u200D\uD83D\uDC66)?|\uDC67(?:\u200D\uD83D[\uDC66\uDC67])?)|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]))|\uD83C(?:\uDFFB(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:\uDC8B\u200D\uD83D)?\uDC68\uD83C[\uDFFB-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D(?:[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uDC30\u200D\uD83D\uDC68\uD83C[\uDFFC-\uDFFF])|\uD83E(?:[\uDD1D\uDEEF]\u200D\uD83D\uDC68\uD83C[\uDFFC-\uDFFF]|[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3])))?|\uDFFC(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:\uDC8B\u200D\uD83D)?\uDC68\uD83C[\uDFFB-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D(?:[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uDC30\u200D\uD83D\uDC68\uD83C[\uDFFB\uDFFD-\uDFFF])|\uD83E(?:[\uDD1D\uDEEF]\u200D\uD83D\uDC68\uD83C[\uDFFB\uDFFD-\uDFFF]|[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3])))?|\uDFFD(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:\uDC8B\u200D\uD83D)?\uDC68\uD83C[\uDFFB-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D(?:[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uDC30\u200D\uD83D\uDC68\uD83C[\uDFFB\uDFFC\uDFFE\uDFFF])|\uD83E(?:[\uDD1D\uDEEF]\u200D\uD83D\uDC68\uD83C[\uDFFB\uDFFC\uDFFE\uDFFF]|[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3])))?|\uDFFE(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:\uDC8B\u200D\uD83D)?\uDC68\uD83C[\uDFFB-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D(?:[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uDC30\u200D\uD83D\uDC68\uD83C[\uDFFB-\uDFFD\uDFFF])|\uD83E(?:[\uDD1D\uDEEF]\u200D\uD83D\uDC68\uD83C[\uDFFB-\uDFFD\uDFFF]|[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3])))?|\uDFFF(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:\uDC8B\u200D\uD83D)?\uDC68\uD83C[\uDFFB-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D(?:[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uDC30\u200D\uD83D\uDC68\uD83C[\uDFFB-\uDFFE])|\uD83E(?:[\uDD1D\uDEEF]\u200D\uD83D\uDC68\uD83C[\uDFFB-\uDFFE]|[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3])))?))?|\uDC69(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:\uDC8B\u200D\uD83D)?[\uDC68\uDC69]|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D(?:[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uDC66(?:\u200D\uD83D\uDC66)?|\uDC67(?:\u200D\uD83D[\uDC66\uDC67])?|\uDC69\u200D\uD83D(?:\uDC66(?:\u200D\uD83D\uDC66)?|\uDC67(?:\u200D\uD83D[\uDC66\uDC67])?))|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]))|\uD83C(?:\uDFFB(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:[\uDC68\uDC69]|\uDC8B\u200D\uD83D[\uDC68\uDC69])\uD83C[\uDFFB-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D(?:[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uDC30\u200D\uD83D\uDC69\uD83C[\uDFFC-\uDFFF])|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83D[\uDC68\uDC69]\uD83C[\uDFFC-\uDFFF]|\uDEEF\u200D\uD83D\uDC69\uD83C[\uDFFC-\uDFFF])))?|\uDFFC(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:[\uDC68\uDC69]|\uDC8B\u200D\uD83D[\uDC68\uDC69])\uD83C[\uDFFB-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D(?:[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uDC30\u200D\uD83D\uDC69\uD83C[\uDFFB\uDFFD-\uDFFF])|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83D[\uDC68\uDC69]\uD83C[\uDFFB\uDFFD-\uDFFF]|\uDEEF\u200D\uD83D\uDC69\uD83C[\uDFFB\uDFFD-\uDFFF])))?|\uDFFD(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:[\uDC68\uDC69]|\uDC8B\u200D\uD83D[\uDC68\uDC69])\uD83C[\uDFFB-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D(?:[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uDC30\u200D\uD83D\uDC69\uD83C[\uDFFB\uDFFC\uDFFE\uDFFF])|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83D[\uDC68\uDC69]\uD83C[\uDFFB\uDFFC\uDFFE\uDFFF]|\uDEEF\u200D\uD83D\uDC69\uD83C[\uDFFB\uDFFC\uDFFE\uDFFF])))?|\uDFFE(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:[\uDC68\uDC69]|\uDC8B\u200D\uD83D[\uDC68\uDC69])\uD83C[\uDFFB-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D(?:[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uDC30\u200D\uD83D\uDC69\uD83C[\uDFFB-\uDFFD\uDFFF])|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83D[\uDC68\uDC69]\uD83C[\uDFFB-\uDFFD\uDFFF]|\uDEEF\u200D\uD83D\uDC69\uD83C[\uDFFB-\uDFFD\uDFFF])))?|\uDFFF(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:[\uDC68\uDC69]|\uDC8B\u200D\uD83D[\uDC68\uDC69])\uD83C[\uDFFB-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D(?:[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uDC30\u200D\uD83D\uDC69\uD83C[\uDFFB-\uDFFE])|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83D[\uDC68\uDC69]\uD83C[\uDFFB-\uDFFE]|\uDEEF\u200D\uD83D\uDC69\uD83C[\uDFFB-\uDFFE])))?))?|\uDD75(?:\uD83C[\uDFFB-\uDFFF]|\uFE0F)?(?:\u200D[\u2640\u2642]\uFE0F?)?|\uDE2E(?:\u200D\uD83D\uDCA8)?|\uDE35(?:\u200D\uD83D\uDCAB)?|\uDE36(?:\u200D\uD83C\uDF2B\uFE0F?)?|\uDE42(?:\u200D[\u2194\u2195]\uFE0F?)?|\uDEB6(?:\uD83C[\uDFFB-\uDFFF])?(?:\u200D(?:[\u2640\u2642]\uFE0F?(?:\u200D\u27A1\uFE0F?)?|\u27A1\uFE0F?))?)|\uD83E(?:[\uDD0C\uDD0F\uDD18-\uDD1F\uDD30-\uDD34\uDD36\uDD77\uDDB5\uDDB6\uDDBB\uDDD2\uDDD3\uDDD5\uDEC3-\uDEC5\uDEF0\uDEF2-\uDEF8](?:\uD83C[\uDFFB-\uDFFF])?|[\uDD26\uDD35\uDD37-\uDD39\uDD3C-\uDD3E\uDDB8\uDDB9\uDDCD\uDDCF\uDDD4\uDDD6-\uDDDD](?:\uD83C[\uDFFB-\uDFFF])?(?:\u200D[\u2640\u2642]\uFE0F?)?|[\uDDDE\uDDDF](?:\u200D[\u2640\u2642]\uFE0F?)?|[\uDD0D\uDD0E\uDD10-\uDD17\uDD20-\uDD25\uDD27-\uDD2F\uDD3A\uDD3F-\uDD45\uDD47-\uDD76\uDD78-\uDDB4\uDDB7\uDDBA\uDDBC-\uDDCC\uDDD0\uDDE0-\uDDFF\uDE70-\uDE7C\uDE80-\uDE8A\uDE8E-\uDEC2\uDEC6\uDEC8\uDECD-\uDEDC\uDEDF-\uDEEA\uDEEF]|\uDDCE(?:\uD83C[\uDFFB-\uDFFF])?(?:\u200D(?:[\u2640\u2642]\uFE0F?(?:\u200D\u27A1\uFE0F?)?|\u27A1\uFE0F?))?|\uDDD1(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3\uDE70]|\uDD1D\u200D\uD83E\uDDD1|\uDDD1\u200D\uD83E\uDDD2(?:\u200D\uD83E\uDDD2)?|\uDDD2(?:\u200D\uD83E\uDDD2)?))|\uD83C(?:\uDFFB(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1\uD83C[\uDFFC-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D(?:[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uDC30\u200D\uD83E\uDDD1\uD83C[\uDFFC-\uDFFF])|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3\uDE70]|\uDD1D\u200D\uD83E\uDDD1\uD83C[\uDFFB-\uDFFF]|\uDEEF\u200D\uD83E\uDDD1\uD83C[\uDFFC-\uDFFF])))?|\uDFFC(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1\uD83C[\uDFFB\uDFFD-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D(?:[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uDC30\u200D\uD83E\uDDD1\uD83C[\uDFFB\uDFFD-\uDFFF])|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3\uDE70]|\uDD1D\u200D\uD83E\uDDD1\uD83C[\uDFFB-\uDFFF]|\uDEEF\u200D\uD83E\uDDD1\uD83C[\uDFFB\uDFFD-\uDFFF])))?|\uDFFD(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1\uD83C[\uDFFB\uDFFC\uDFFE\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D(?:[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uDC30\u200D\uD83E\uDDD1\uD83C[\uDFFB\uDFFC\uDFFE\uDFFF])|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3\uDE70]|\uDD1D\u200D\uD83E\uDDD1\uD83C[\uDFFB-\uDFFF]|\uDEEF\u200D\uD83E\uDDD1\uD83C[\uDFFB\uDFFC\uDFFE\uDFFF])))?|\uDFFE(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1\uD83C[\uDFFB-\uDFFD\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D(?:[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uDC30\u200D\uD83E\uDDD1\uD83C[\uDFFB-\uDFFD\uDFFF])|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3\uDE70]|\uDD1D\u200D\uD83E\uDDD1\uD83C[\uDFFB-\uDFFF]|\uDEEF\u200D\uD83E\uDDD1\uD83C[\uDFFB-\uDFFD\uDFFF])))?|\uDFFF(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1\uD83C[\uDFFB-\uDFFE]|\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D(?:[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uDC30\u200D\uD83E\uDDD1\uD83C[\uDFFB-\uDFFE])|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3\uDE70]|\uDD1D\u200D\uD83E\uDDD1\uD83C[\uDFFB-\uDFFF]|\uDEEF\u200D\uD83E\uDDD1\uD83C[\uDFFB-\uDFFE])))?))?|\uDEF1(?:\uD83C(?:\uDFFB(?:\u200D\uD83E\uDEF2\uD83C[\uDFFC-\uDFFF])?|\uDFFC(?:\u200D\uD83E\uDEF2\uD83C[\uDFFB\uDFFD-\uDFFF])?|\uDFFD(?:\u200D\uD83E\uDEF2\uD83C[\uDFFB\uDFFC\uDFFE\uDFFF])?|\uDFFE(?:\u200D\uD83E\uDEF2\uD83C[\uDFFB-\uDFFD\uDFFF])?|\uDFFF(?:\u200D\uD83E\uDEF2\uD83C[\uDFFB-\uDFFE])?))?)/g;
|
|
65
65
|
};
|
|
66
66
|
});
|
|
67
67
|
|
|
68
|
-
// ../../node_modules/mute-stream/lib/index.js
|
|
68
|
+
// ../../node_modules/.bun/mute-stream@3.0.0/node_modules/mute-stream/lib/index.js
|
|
69
69
|
var require_lib = __commonJS((exports, module) => {
|
|
70
70
|
var Stream = __require("stream");
|
|
71
71
|
|
|
@@ -185,7 +185,7 @@ var require_lib = __commonJS((exports, module) => {
|
|
|
185
185
|
module.exports = MuteStream;
|
|
186
186
|
});
|
|
187
187
|
|
|
188
|
-
// ../../node_modules/bytes/index.js
|
|
188
|
+
// ../../node_modules/.bun/bytes@3.1.2/node_modules/bytes/index.js
|
|
189
189
|
var require_bytes = __commonJS((exports, module) => {
|
|
190
190
|
/*!
|
|
191
191
|
* bytes
|
|
@@ -277,7 +277,77 @@ var require_bytes = __commonJS((exports, module) => {
|
|
|
277
277
|
}
|
|
278
278
|
});
|
|
279
279
|
|
|
280
|
-
// ../../node_modules/
|
|
280
|
+
// ../../node_modules/.bun/picocolors@1.1.1/node_modules/picocolors/picocolors.js
|
|
281
|
+
var require_picocolors = __commonJS((exports, module) => {
|
|
282
|
+
var p = process || {};
|
|
283
|
+
var argv = p.argv || [];
|
|
284
|
+
var env = p.env || {};
|
|
285
|
+
var isColorSupported = !(!!env.NO_COLOR || argv.includes("--no-color")) && (!!env.FORCE_COLOR || argv.includes("--color") || p.platform === "win32" || (p.stdout || {}).isTTY && env.TERM !== "dumb" || !!env.CI);
|
|
286
|
+
var formatter = (open, close, replace = open) => (input) => {
|
|
287
|
+
let string = "" + input, index = string.indexOf(close, open.length);
|
|
288
|
+
return ~index ? open + replaceClose(string, close, replace, index) + close : open + string + close;
|
|
289
|
+
};
|
|
290
|
+
var replaceClose = (string, close, replace, index) => {
|
|
291
|
+
let result = "", cursor = 0;
|
|
292
|
+
do {
|
|
293
|
+
result += string.substring(cursor, index) + replace;
|
|
294
|
+
cursor = index + close.length;
|
|
295
|
+
index = string.indexOf(close, cursor);
|
|
296
|
+
} while (~index);
|
|
297
|
+
return result + string.substring(cursor);
|
|
298
|
+
};
|
|
299
|
+
var createColors = (enabled = isColorSupported) => {
|
|
300
|
+
let f = enabled ? formatter : () => String;
|
|
301
|
+
return {
|
|
302
|
+
isColorSupported: enabled,
|
|
303
|
+
reset: f("\x1B[0m", "\x1B[0m"),
|
|
304
|
+
bold: f("\x1B[1m", "\x1B[22m", "\x1B[22m\x1B[1m"),
|
|
305
|
+
dim: f("\x1B[2m", "\x1B[22m", "\x1B[22m\x1B[2m"),
|
|
306
|
+
italic: f("\x1B[3m", "\x1B[23m"),
|
|
307
|
+
underline: f("\x1B[4m", "\x1B[24m"),
|
|
308
|
+
inverse: f("\x1B[7m", "\x1B[27m"),
|
|
309
|
+
hidden: f("\x1B[8m", "\x1B[28m"),
|
|
310
|
+
strikethrough: f("\x1B[9m", "\x1B[29m"),
|
|
311
|
+
black: f("\x1B[30m", "\x1B[39m"),
|
|
312
|
+
red: f("\x1B[31m", "\x1B[39m"),
|
|
313
|
+
green: f("\x1B[32m", "\x1B[39m"),
|
|
314
|
+
yellow: f("\x1B[33m", "\x1B[39m"),
|
|
315
|
+
blue: f("\x1B[34m", "\x1B[39m"),
|
|
316
|
+
magenta: f("\x1B[35m", "\x1B[39m"),
|
|
317
|
+
cyan: f("\x1B[36m", "\x1B[39m"),
|
|
318
|
+
white: f("\x1B[37m", "\x1B[39m"),
|
|
319
|
+
gray: f("\x1B[90m", "\x1B[39m"),
|
|
320
|
+
bgBlack: f("\x1B[40m", "\x1B[49m"),
|
|
321
|
+
bgRed: f("\x1B[41m", "\x1B[49m"),
|
|
322
|
+
bgGreen: f("\x1B[42m", "\x1B[49m"),
|
|
323
|
+
bgYellow: f("\x1B[43m", "\x1B[49m"),
|
|
324
|
+
bgBlue: f("\x1B[44m", "\x1B[49m"),
|
|
325
|
+
bgMagenta: f("\x1B[45m", "\x1B[49m"),
|
|
326
|
+
bgCyan: f("\x1B[46m", "\x1B[49m"),
|
|
327
|
+
bgWhite: f("\x1B[47m", "\x1B[49m"),
|
|
328
|
+
blackBright: f("\x1B[90m", "\x1B[39m"),
|
|
329
|
+
redBright: f("\x1B[91m", "\x1B[39m"),
|
|
330
|
+
greenBright: f("\x1B[92m", "\x1B[39m"),
|
|
331
|
+
yellowBright: f("\x1B[93m", "\x1B[39m"),
|
|
332
|
+
blueBright: f("\x1B[94m", "\x1B[39m"),
|
|
333
|
+
magentaBright: f("\x1B[95m", "\x1B[39m"),
|
|
334
|
+
cyanBright: f("\x1B[96m", "\x1B[39m"),
|
|
335
|
+
whiteBright: f("\x1B[97m", "\x1B[39m"),
|
|
336
|
+
bgBlackBright: f("\x1B[100m", "\x1B[49m"),
|
|
337
|
+
bgRedBright: f("\x1B[101m", "\x1B[49m"),
|
|
338
|
+
bgGreenBright: f("\x1B[102m", "\x1B[49m"),
|
|
339
|
+
bgYellowBright: f("\x1B[103m", "\x1B[49m"),
|
|
340
|
+
bgBlueBright: f("\x1B[104m", "\x1B[49m"),
|
|
341
|
+
bgMagentaBright: f("\x1B[105m", "\x1B[49m"),
|
|
342
|
+
bgCyanBright: f("\x1B[106m", "\x1B[49m"),
|
|
343
|
+
bgWhiteBright: f("\x1B[107m", "\x1B[49m")
|
|
344
|
+
};
|
|
345
|
+
};
|
|
346
|
+
module.exports = createColors();
|
|
347
|
+
module.exports.createColors = createColors;
|
|
348
|
+
});
|
|
349
|
+
|
|
350
|
+
// ../../node_modules/.bun/cac@6.7.14/node_modules/cac/dist/index.mjs
|
|
281
351
|
import { EventEmitter } from "events";
|
|
282
352
|
function toArr(any) {
|
|
283
353
|
return any == null ? [] : Array.isArray(any) ? any : [any];
|
|
@@ -1019,13 +1089,13 @@ function getClaudeExtraArgs() {
|
|
|
1019
1089
|
var OLLAMA_URL = getOllamaUrl();
|
|
1020
1090
|
var DEFAULT_MODEL = getDefaultModel();
|
|
1021
1091
|
|
|
1022
|
-
// ../../node_modules/@inquirer/core/dist/lib/key.js
|
|
1092
|
+
// ../../node_modules/.bun/@inquirer+core@11.1.1+c195ea72dffa657e/node_modules/@inquirer/core/dist/lib/key.js
|
|
1023
1093
|
var isUpKey = (key, keybindings = []) => key.name === "up" || keybindings.includes("vim") && key.name === "k" || keybindings.includes("emacs") && key.ctrl && key.name === "p";
|
|
1024
1094
|
var isDownKey = (key, keybindings = []) => key.name === "down" || keybindings.includes("vim") && key.name === "j" || keybindings.includes("emacs") && key.ctrl && key.name === "n";
|
|
1025
1095
|
var isBackspaceKey = (key) => key.name === "backspace";
|
|
1026
1096
|
var isNumberKey = (key) => "1234567890".includes(key.name);
|
|
1027
1097
|
var isEnterKey = (key) => key.name === "enter" || key.name === "return";
|
|
1028
|
-
// ../../node_modules/@inquirer/core/dist/lib/errors.js
|
|
1098
|
+
// ../../node_modules/.bun/@inquirer+core@11.1.1+c195ea72dffa657e/node_modules/@inquirer/core/dist/lib/errors.js
|
|
1029
1099
|
class AbortPromptError extends Error {
|
|
1030
1100
|
name = "AbortPromptError";
|
|
1031
1101
|
message = "Prompt was aborted";
|
|
@@ -1051,10 +1121,10 @@ class HookError extends Error {
|
|
|
1051
1121
|
class ValidationError extends Error {
|
|
1052
1122
|
name = "ValidationError";
|
|
1053
1123
|
}
|
|
1054
|
-
// ../../node_modules/@inquirer/core/dist/lib/use-state.js
|
|
1124
|
+
// ../../node_modules/.bun/@inquirer+core@11.1.1+c195ea72dffa657e/node_modules/@inquirer/core/dist/lib/use-state.js
|
|
1055
1125
|
import { AsyncResource as AsyncResource2 } from "node:async_hooks";
|
|
1056
1126
|
|
|
1057
|
-
// ../../node_modules/@inquirer/core/dist/lib/hook-engine.js
|
|
1127
|
+
// ../../node_modules/.bun/@inquirer+core@11.1.1+c195ea72dffa657e/node_modules/@inquirer/core/dist/lib/hook-engine.js
|
|
1058
1128
|
import { AsyncLocalStorage, AsyncResource } from "node:async_hooks";
|
|
1059
1129
|
var hookStorage = new AsyncLocalStorage;
|
|
1060
1130
|
function createStore(rl) {
|
|
@@ -1159,7 +1229,7 @@ var effectScheduler = {
|
|
|
1159
1229
|
}
|
|
1160
1230
|
};
|
|
1161
1231
|
|
|
1162
|
-
// ../../node_modules/@inquirer/core/dist/lib/use-state.js
|
|
1232
|
+
// ../../node_modules/.bun/@inquirer+core@11.1.1+c195ea72dffa657e/node_modules/@inquirer/core/dist/lib/use-state.js
|
|
1163
1233
|
function useState(defaultValue) {
|
|
1164
1234
|
return withPointer((pointer) => {
|
|
1165
1235
|
const setState = AsyncResource2.bind(function setState(newValue) {
|
|
@@ -1177,7 +1247,7 @@ function useState(defaultValue) {
|
|
|
1177
1247
|
});
|
|
1178
1248
|
}
|
|
1179
1249
|
|
|
1180
|
-
// ../../node_modules/@inquirer/core/dist/lib/use-effect.js
|
|
1250
|
+
// ../../node_modules/.bun/@inquirer+core@11.1.1+c195ea72dffa657e/node_modules/@inquirer/core/dist/lib/use-effect.js
|
|
1181
1251
|
function useEffect(cb, depArray) {
|
|
1182
1252
|
withPointer((pointer) => {
|
|
1183
1253
|
const oldDeps = pointer.get();
|
|
@@ -1189,10 +1259,10 @@ function useEffect(cb, depArray) {
|
|
|
1189
1259
|
});
|
|
1190
1260
|
}
|
|
1191
1261
|
|
|
1192
|
-
// ../../node_modules/@inquirer/core/dist/lib/theme.js
|
|
1262
|
+
// ../../node_modules/.bun/@inquirer+core@11.1.1+c195ea72dffa657e/node_modules/@inquirer/core/dist/lib/theme.js
|
|
1193
1263
|
import { styleText } from "node:util";
|
|
1194
1264
|
|
|
1195
|
-
// ../../node_modules/@inquirer/figures/dist/index.js
|
|
1265
|
+
// ../../node_modules/.bun/@inquirer+figures@2.0.3/node_modules/@inquirer/figures/dist/index.js
|
|
1196
1266
|
import process2 from "node:process";
|
|
1197
1267
|
function isUnicodeSupported() {
|
|
1198
1268
|
if (process2.platform !== "win32") {
|
|
@@ -1481,7 +1551,7 @@ var figures = shouldUseMain ? mainSymbols : fallbackSymbols;
|
|
|
1481
1551
|
var dist_default2 = figures;
|
|
1482
1552
|
var replacements = Object.entries(specialMainSymbols);
|
|
1483
1553
|
|
|
1484
|
-
// ../../node_modules/@inquirer/core/dist/lib/theme.js
|
|
1554
|
+
// ../../node_modules/.bun/@inquirer+core@11.1.1+c195ea72dffa657e/node_modules/@inquirer/core/dist/lib/theme.js
|
|
1485
1555
|
var defaultTheme = {
|
|
1486
1556
|
prefix: {
|
|
1487
1557
|
idle: styleText("blue", "?"),
|
|
@@ -1502,7 +1572,7 @@ var defaultTheme = {
|
|
|
1502
1572
|
}
|
|
1503
1573
|
};
|
|
1504
1574
|
|
|
1505
|
-
// ../../node_modules/@inquirer/core/dist/lib/make-theme.js
|
|
1575
|
+
// ../../node_modules/.bun/@inquirer+core@11.1.1+c195ea72dffa657e/node_modules/@inquirer/core/dist/lib/make-theme.js
|
|
1506
1576
|
function isPlainObject(value) {
|
|
1507
1577
|
if (typeof value !== "object" || value === null)
|
|
1508
1578
|
return false;
|
|
@@ -1530,7 +1600,7 @@ function makeTheme(...themes) {
|
|
|
1530
1600
|
return deepMerge2(...themesToMerge);
|
|
1531
1601
|
}
|
|
1532
1602
|
|
|
1533
|
-
// ../../node_modules/@inquirer/core/dist/lib/use-prefix.js
|
|
1603
|
+
// ../../node_modules/.bun/@inquirer+core@11.1.1+c195ea72dffa657e/node_modules/@inquirer/core/dist/lib/use-prefix.js
|
|
1534
1604
|
function usePrefix({ status = "idle", theme }) {
|
|
1535
1605
|
const [showLoader, setShowLoader] = useState(false);
|
|
1536
1606
|
const [tick, setTick] = useState(0);
|
|
@@ -1560,7 +1630,7 @@ function usePrefix({ status = "idle", theme }) {
|
|
|
1560
1630
|
const iconName = status === "loading" ? "idle" : status;
|
|
1561
1631
|
return typeof prefix === "string" ? prefix : prefix[iconName] ?? prefix["idle"];
|
|
1562
1632
|
}
|
|
1563
|
-
// ../../node_modules/@inquirer/core/dist/lib/use-memo.js
|
|
1633
|
+
// ../../node_modules/.bun/@inquirer+core@11.1.1+c195ea72dffa657e/node_modules/@inquirer/core/dist/lib/use-memo.js
|
|
1564
1634
|
function useMemo(fn, dependencies) {
|
|
1565
1635
|
return withPointer((pointer) => {
|
|
1566
1636
|
const prev = pointer.get();
|
|
@@ -1572,11 +1642,11 @@ function useMemo(fn, dependencies) {
|
|
|
1572
1642
|
return prev.value;
|
|
1573
1643
|
});
|
|
1574
1644
|
}
|
|
1575
|
-
// ../../node_modules/@inquirer/core/dist/lib/use-ref.js
|
|
1645
|
+
// ../../node_modules/.bun/@inquirer+core@11.1.1+c195ea72dffa657e/node_modules/@inquirer/core/dist/lib/use-ref.js
|
|
1576
1646
|
function useRef(val) {
|
|
1577
1647
|
return useState({ current: val })[0];
|
|
1578
1648
|
}
|
|
1579
|
-
// ../../node_modules/@inquirer/core/dist/lib/use-keypress.js
|
|
1649
|
+
// ../../node_modules/.bun/@inquirer+core@11.1.1+c195ea72dffa657e/node_modules/@inquirer/core/dist/lib/use-keypress.js
|
|
1580
1650
|
function useKeypress(userHandler) {
|
|
1581
1651
|
const signal = useRef(userHandler);
|
|
1582
1652
|
signal.current = userHandler;
|
|
@@ -1594,10 +1664,10 @@ function useKeypress(userHandler) {
|
|
|
1594
1664
|
};
|
|
1595
1665
|
}, []);
|
|
1596
1666
|
}
|
|
1597
|
-
// ../../node_modules/@inquirer/core/dist/lib/utils.js
|
|
1667
|
+
// ../../node_modules/.bun/@inquirer+core@11.1.1+c195ea72dffa657e/node_modules/@inquirer/core/dist/lib/utils.js
|
|
1598
1668
|
var import_cli_width = __toESM(require_cli_width(), 1);
|
|
1599
1669
|
|
|
1600
|
-
// ../../node_modules/
|
|
1670
|
+
// ../../node_modules/.bun/ansi-regex@6.2.2/node_modules/ansi-regex/index.js
|
|
1601
1671
|
function ansiRegex({ onlyFirst = false } = {}) {
|
|
1602
1672
|
const ST = "(?:\\u0007|\\u001B\\u005C|\\u009C)";
|
|
1603
1673
|
const osc = `(?:\\u001B\\][\\s\\S]*?${ST})`;
|
|
@@ -1606,7 +1676,7 @@ function ansiRegex({ onlyFirst = false } = {}) {
|
|
|
1606
1676
|
return new RegExp(pattern, onlyFirst ? undefined : "g");
|
|
1607
1677
|
}
|
|
1608
1678
|
|
|
1609
|
-
// ../../node_modules/
|
|
1679
|
+
// ../../node_modules/.bun/strip-ansi@7.1.2/node_modules/strip-ansi/index.js
|
|
1610
1680
|
var regex = ansiRegex();
|
|
1611
1681
|
function stripAnsi(string) {
|
|
1612
1682
|
if (typeof string !== "string") {
|
|
@@ -1615,7 +1685,7 @@ function stripAnsi(string) {
|
|
|
1615
1685
|
return string.replace(regex, "");
|
|
1616
1686
|
}
|
|
1617
1687
|
|
|
1618
|
-
// ../../node_modules/get-east-asian-width/lookup.js
|
|
1688
|
+
// ../../node_modules/.bun/get-east-asian-width@1.4.0/node_modules/get-east-asian-width/lookup.js
|
|
1619
1689
|
function isAmbiguous(x) {
|
|
1620
1690
|
return x === 161 || x === 164 || x === 167 || x === 168 || x === 170 || x === 173 || x === 174 || x >= 176 && x <= 180 || x >= 182 && x <= 186 || x >= 188 && x <= 191 || x === 198 || x === 208 || x === 215 || x === 216 || x >= 222 && x <= 225 || x === 230 || x >= 232 && x <= 234 || x === 236 || x === 237 || x === 240 || x === 242 || x === 243 || x >= 247 && x <= 250 || x === 252 || x === 254 || x === 257 || x === 273 || x === 275 || x === 283 || x === 294 || x === 295 || x === 299 || x >= 305 && x <= 307 || x === 312 || x >= 319 && x <= 322 || x === 324 || x >= 328 && x <= 331 || x === 333 || x === 338 || x === 339 || x === 358 || x === 359 || x === 363 || x === 462 || x === 464 || x === 466 || x === 468 || x === 470 || x === 472 || x === 474 || x === 476 || x === 593 || x === 609 || x === 708 || x === 711 || x >= 713 && x <= 715 || x === 717 || x === 720 || x >= 728 && x <= 731 || x === 733 || x === 735 || x >= 768 && x <= 879 || x >= 913 && x <= 929 || x >= 931 && x <= 937 || x >= 945 && x <= 961 || x >= 963 && x <= 969 || x === 1025 || x >= 1040 && x <= 1103 || x === 1105 || x === 8208 || x >= 8211 && x <= 8214 || x === 8216 || x === 8217 || x === 8220 || x === 8221 || x >= 8224 && x <= 8226 || x >= 8228 && x <= 8231 || x === 8240 || x === 8242 || x === 8243 || x === 8245 || x === 8251 || x === 8254 || x === 8308 || x === 8319 || x >= 8321 && x <= 8324 || x === 8364 || x === 8451 || x === 8453 || x === 8457 || x === 8467 || x === 8470 || x === 8481 || x === 8482 || x === 8486 || x === 8491 || x === 8531 || x === 8532 || x >= 8539 && x <= 8542 || x >= 8544 && x <= 8555 || x >= 8560 && x <= 8569 || x === 8585 || x >= 8592 && x <= 8601 || x === 8632 || x === 8633 || x === 8658 || x === 8660 || x === 8679 || x === 8704 || x === 8706 || x === 8707 || x === 8711 || x === 8712 || x === 8715 || x === 8719 || x === 8721 || x === 8725 || x === 8730 || x >= 8733 && x <= 8736 || x === 8739 || x === 8741 || x >= 8743 && x <= 8748 || x === 8750 || x >= 8756 && x <= 8759 || x === 8764 || x === 8765 || x === 8776 || x === 8780 || x === 8786 || x === 8800 || x === 8801 || x >= 8804 && x <= 8807 || x === 8810 || x === 8811 || x === 8814 || x === 8815 || x === 8834 || x === 8835 || x === 8838 || x === 8839 || x === 8853 || x === 8857 || x === 8869 || x === 8895 || x === 8978 || x >= 9312 && x <= 9449 || x >= 9451 && x <= 9547 || x >= 9552 && x <= 9587 || x >= 9600 && x <= 9615 || x >= 9618 && x <= 9621 || x === 9632 || x === 9633 || x >= 9635 && x <= 9641 || x === 9650 || x === 9651 || x === 9654 || x === 9655 || x === 9660 || x === 9661 || x === 9664 || x === 9665 || x >= 9670 && x <= 9672 || x === 9675 || x >= 9678 && x <= 9681 || x >= 9698 && x <= 9701 || x === 9711 || x === 9733 || x === 9734 || x === 9737 || x === 9742 || x === 9743 || x === 9756 || x === 9758 || x === 9792 || x === 9794 || x === 9824 || x === 9825 || x >= 9827 && x <= 9829 || x >= 9831 && x <= 9834 || x === 9836 || x === 9837 || x === 9839 || x === 9886 || x === 9887 || x === 9919 || x >= 9926 && x <= 9933 || x >= 9935 && x <= 9939 || x >= 9941 && x <= 9953 || x === 9955 || x === 9960 || x === 9961 || x >= 9963 && x <= 9969 || x === 9972 || x >= 9974 && x <= 9977 || x === 9979 || x === 9980 || x === 9982 || x === 9983 || x === 10045 || x >= 10102 && x <= 10111 || x >= 11094 && x <= 11097 || x >= 12872 && x <= 12879 || x >= 57344 && x <= 63743 || x >= 65024 && x <= 65039 || x === 65533 || x >= 127232 && x <= 127242 || x >= 127248 && x <= 127277 || x >= 127280 && x <= 127337 || x >= 127344 && x <= 127373 || x === 127375 || x === 127376 || x >= 127387 && x <= 127404 || x >= 917760 && x <= 917999 || x >= 983040 && x <= 1048573 || x >= 1048576 && x <= 1114109;
|
|
1621
1691
|
}
|
|
@@ -1626,7 +1696,7 @@ function isWide(x) {
|
|
|
1626
1696
|
return x >= 4352 && x <= 4447 || x === 8986 || x === 8987 || x === 9001 || x === 9002 || x >= 9193 && x <= 9196 || x === 9200 || x === 9203 || x === 9725 || x === 9726 || x === 9748 || x === 9749 || x >= 9776 && x <= 9783 || x >= 9800 && x <= 9811 || x === 9855 || x >= 9866 && x <= 9871 || x === 9875 || x === 9889 || x === 9898 || x === 9899 || x === 9917 || x === 9918 || x === 9924 || x === 9925 || x === 9934 || x === 9940 || x === 9962 || x === 9970 || x === 9971 || x === 9973 || x === 9978 || x === 9981 || x === 9989 || x === 9994 || x === 9995 || x === 10024 || x === 10060 || x === 10062 || x >= 10067 && x <= 10069 || x === 10071 || x >= 10133 && x <= 10135 || x === 10160 || x === 10175 || x === 11035 || x === 11036 || x === 11088 || x === 11093 || x >= 11904 && x <= 11929 || x >= 11931 && x <= 12019 || x >= 12032 && x <= 12245 || x >= 12272 && x <= 12287 || x >= 12289 && x <= 12350 || x >= 12353 && x <= 12438 || x >= 12441 && x <= 12543 || x >= 12549 && x <= 12591 || x >= 12593 && x <= 12686 || x >= 12688 && x <= 12773 || x >= 12783 && x <= 12830 || x >= 12832 && x <= 12871 || x >= 12880 && x <= 42124 || x >= 42128 && x <= 42182 || x >= 43360 && x <= 43388 || x >= 44032 && x <= 55203 || x >= 63744 && x <= 64255 || x >= 65040 && x <= 65049 || x >= 65072 && x <= 65106 || x >= 65108 && x <= 65126 || x >= 65128 && x <= 65131 || x >= 94176 && x <= 94180 || x >= 94192 && x <= 94198 || x >= 94208 && x <= 101589 || x >= 101631 && x <= 101662 || x >= 101760 && x <= 101874 || x >= 110576 && x <= 110579 || x >= 110581 && x <= 110587 || x === 110589 || x === 110590 || x >= 110592 && x <= 110882 || x === 110898 || x >= 110928 && x <= 110930 || x === 110933 || x >= 110948 && x <= 110951 || x >= 110960 && x <= 111355 || x >= 119552 && x <= 119638 || x >= 119648 && x <= 119670 || x === 126980 || x === 127183 || x === 127374 || x >= 127377 && x <= 127386 || x >= 127488 && x <= 127490 || x >= 127504 && x <= 127547 || x >= 127552 && x <= 127560 || x === 127568 || x === 127569 || x >= 127584 && x <= 127589 || x >= 127744 && x <= 127776 || x >= 127789 && x <= 127797 || x >= 127799 && x <= 127868 || x >= 127870 && x <= 127891 || x >= 127904 && x <= 127946 || x >= 127951 && x <= 127955 || x >= 127968 && x <= 127984 || x === 127988 || x >= 127992 && x <= 128062 || x === 128064 || x >= 128066 && x <= 128252 || x >= 128255 && x <= 128317 || x >= 128331 && x <= 128334 || x >= 128336 && x <= 128359 || x === 128378 || x === 128405 || x === 128406 || x === 128420 || x >= 128507 && x <= 128591 || x >= 128640 && x <= 128709 || x === 128716 || x >= 128720 && x <= 128722 || x >= 128725 && x <= 128728 || x >= 128732 && x <= 128735 || x === 128747 || x === 128748 || x >= 128756 && x <= 128764 || x >= 128992 && x <= 129003 || x === 129008 || x >= 129292 && x <= 129338 || x >= 129340 && x <= 129349 || x >= 129351 && x <= 129535 || x >= 129648 && x <= 129660 || x >= 129664 && x <= 129674 || x >= 129678 && x <= 129734 || x === 129736 || x >= 129741 && x <= 129756 || x >= 129759 && x <= 129770 || x >= 129775 && x <= 129784 || x >= 131072 && x <= 196605 || x >= 196608 && x <= 262141;
|
|
1627
1697
|
}
|
|
1628
1698
|
|
|
1629
|
-
// ../../node_modules/get-east-asian-width/index.js
|
|
1699
|
+
// ../../node_modules/.bun/get-east-asian-width@1.4.0/node_modules/get-east-asian-width/index.js
|
|
1630
1700
|
function validate(codePoint) {
|
|
1631
1701
|
if (!Number.isSafeInteger(codePoint)) {
|
|
1632
1702
|
throw new TypeError(`Expected a code point, got \`${typeof codePoint}\`.`);
|
|
@@ -1640,7 +1710,7 @@ function eastAsianWidth(codePoint, { ambiguousAsWide = false } = {}) {
|
|
|
1640
1710
|
return 1;
|
|
1641
1711
|
}
|
|
1642
1712
|
|
|
1643
|
-
// ../../node_modules/string-width/index.js
|
|
1713
|
+
// ../../node_modules/.bun/string-width@7.2.0/node_modules/string-width/index.js
|
|
1644
1714
|
var import_emoji_regex = __toESM(require_emoji_regex(), 1);
|
|
1645
1715
|
var segmenter = new Intl.Segmenter;
|
|
1646
1716
|
var defaultIgnorableCodePointRegex = /^\p{Default_Ignorable_Code_Point}$/u;
|
|
@@ -1689,25 +1759,7 @@ function stringWidth(string, options = {}) {
|
|
|
1689
1759
|
return width;
|
|
1690
1760
|
}
|
|
1691
1761
|
|
|
1692
|
-
// ../../node_modules/
|
|
1693
|
-
function ansiRegex2({ onlyFirst = false } = {}) {
|
|
1694
|
-
const ST = "(?:\\u0007|\\u001B\\u005C|\\u009C)";
|
|
1695
|
-
const osc = `(?:\\u001B\\][\\s\\S]*?${ST})`;
|
|
1696
|
-
const csi = "[\\u001B\\u009B][[\\]()#;?]*(?:\\d{1,4}(?:[;:]\\d{0,4})*)?[\\dA-PR-TZcf-nq-uy=><~]";
|
|
1697
|
-
const pattern = `${osc}|${csi}`;
|
|
1698
|
-
return new RegExp(pattern, onlyFirst ? undefined : "g");
|
|
1699
|
-
}
|
|
1700
|
-
|
|
1701
|
-
// ../../node_modules/wrap-ansi/node_modules/strip-ansi/index.js
|
|
1702
|
-
var regex2 = ansiRegex2();
|
|
1703
|
-
function stripAnsi2(string) {
|
|
1704
|
-
if (typeof string !== "string") {
|
|
1705
|
-
throw new TypeError(`Expected a \`string\`, got \`${typeof string}\``);
|
|
1706
|
-
}
|
|
1707
|
-
return string.replace(regex2, "");
|
|
1708
|
-
}
|
|
1709
|
-
|
|
1710
|
-
// ../../node_modules/wrap-ansi/node_modules/ansi-styles/index.js
|
|
1762
|
+
// ../../node_modules/.bun/ansi-styles@6.2.3/node_modules/ansi-styles/index.js
|
|
1711
1763
|
var ANSI_BACKGROUND_OFFSET = 10;
|
|
1712
1764
|
var wrapAnsi16 = (offset = 0) => (code) => `\x1B[${code + offset}m`;
|
|
1713
1765
|
var wrapAnsi256 = (offset = 0) => (code) => `\x1B[${38 + offset};5;${code}m`;
|
|
@@ -1884,7 +1936,7 @@ function assembleStyles() {
|
|
|
1884
1936
|
var ansiStyles = assembleStyles();
|
|
1885
1937
|
var ansi_styles_default = ansiStyles;
|
|
1886
1938
|
|
|
1887
|
-
// ../../node_modules/wrap-ansi/index.js
|
|
1939
|
+
// ../../node_modules/.bun/wrap-ansi@9.0.2/node_modules/wrap-ansi/index.js
|
|
1888
1940
|
var ESCAPES = new Set([
|
|
1889
1941
|
"\x1B",
|
|
1890
1942
|
""
|
|
@@ -1902,7 +1954,7 @@ var wrapWord = (rows, word, columns) => {
|
|
|
1902
1954
|
const characters = [...word];
|
|
1903
1955
|
let isInsideEscape = false;
|
|
1904
1956
|
let isInsideLinkEscape = false;
|
|
1905
|
-
let visible = stringWidth(
|
|
1957
|
+
let visible = stringWidth(stripAnsi(rows.at(-1)));
|
|
1906
1958
|
for (const [index, character] of characters.entries()) {
|
|
1907
1959
|
const characterLength = stringWidth(character);
|
|
1908
1960
|
if (visible + characterLength <= columns) {
|
|
@@ -2046,7 +2098,7 @@ function wrapAnsi(string, columns, options) {
|
|
|
2046
2098
|
`);
|
|
2047
2099
|
}
|
|
2048
2100
|
|
|
2049
|
-
// ../../node_modules/@inquirer/core/dist/lib/utils.js
|
|
2101
|
+
// ../../node_modules/.bun/@inquirer+core@11.1.1+c195ea72dffa657e/node_modules/@inquirer/core/dist/lib/utils.js
|
|
2050
2102
|
function breakLines(content, width) {
|
|
2051
2103
|
return content.split(`
|
|
2052
2104
|
`).flatMap((line) => wrapAnsi(line, width, { trim: false, hard: true }).split(`
|
|
@@ -2057,7 +2109,7 @@ function readlineWidth() {
|
|
|
2057
2109
|
return import_cli_width.default({ defaultWidth: 80, output: readline().output });
|
|
2058
2110
|
}
|
|
2059
2111
|
|
|
2060
|
-
// ../../node_modules/@inquirer/core/dist/lib/pagination/use-pagination.js
|
|
2112
|
+
// ../../node_modules/.bun/@inquirer+core@11.1.1+c195ea72dffa657e/node_modules/@inquirer/core/dist/lib/pagination/use-pagination.js
|
|
2061
2113
|
function usePointerPosition({ active, renderedItems, pageSize, loop }) {
|
|
2062
2114
|
const state = useRef({
|
|
2063
2115
|
lastPointer: active,
|
|
@@ -2123,12 +2175,12 @@ function usePagination({ items, active, renderItem, pageSize, loop = true }) {
|
|
|
2123
2175
|
return pageBuffer.filter((line) => typeof line === "string").join(`
|
|
2124
2176
|
`);
|
|
2125
2177
|
}
|
|
2126
|
-
// ../../node_modules/@inquirer/core/dist/lib/create-prompt.js
|
|
2178
|
+
// ../../node_modules/.bun/@inquirer+core@11.1.1+c195ea72dffa657e/node_modules/@inquirer/core/dist/lib/create-prompt.js
|
|
2127
2179
|
var import_mute_stream = __toESM(require_lib(), 1);
|
|
2128
2180
|
import * as readline2 from "node:readline";
|
|
2129
2181
|
import { AsyncResource as AsyncResource3 } from "node:async_hooks";
|
|
2130
2182
|
|
|
2131
|
-
// ../../node_modules/signal-exit/dist/mjs/signals.js
|
|
2183
|
+
// ../../node_modules/.bun/signal-exit@4.1.0/node_modules/signal-exit/dist/mjs/signals.js
|
|
2132
2184
|
var signals = [];
|
|
2133
2185
|
signals.push("SIGHUP", "SIGINT", "SIGTERM");
|
|
2134
2186
|
if (process.platform !== "win32") {
|
|
@@ -2138,7 +2190,7 @@ if (process.platform === "linux") {
|
|
|
2138
2190
|
signals.push("SIGIO", "SIGPOLL", "SIGPWR", "SIGSTKFLT");
|
|
2139
2191
|
}
|
|
2140
2192
|
|
|
2141
|
-
// ../../node_modules/signal-exit/dist/mjs/index.js
|
|
2193
|
+
// ../../node_modules/.bun/signal-exit@4.1.0/node_modules/signal-exit/dist/mjs/index.js
|
|
2142
2194
|
var processOk = (process3) => !!process3 && typeof process3 === "object" && typeof process3.removeListener === "function" && typeof process3.emit === "function" && typeof process3.reallyExit === "function" && typeof process3.listeners === "function" && typeof process3.kill === "function" && typeof process3.pid === "number" && typeof process3.on === "function";
|
|
2143
2195
|
var kExitEmitter = Symbol.for("signal-exit emitter");
|
|
2144
2196
|
var global = globalThis;
|
|
@@ -2336,10 +2388,10 @@ var {
|
|
|
2336
2388
|
unload
|
|
2337
2389
|
} = signalExitWrap(processOk(process3) ? new SignalExit(process3) : new SignalExitFallback);
|
|
2338
2390
|
|
|
2339
|
-
// ../../node_modules/@inquirer/core/dist/lib/screen-manager.js
|
|
2391
|
+
// ../../node_modules/.bun/@inquirer+core@11.1.1+c195ea72dffa657e/node_modules/@inquirer/core/dist/lib/screen-manager.js
|
|
2340
2392
|
import { stripVTControlCharacters } from "node:util";
|
|
2341
2393
|
|
|
2342
|
-
// ../../node_modules/@inquirer/ansi/dist/index.js
|
|
2394
|
+
// ../../node_modules/.bun/@inquirer+ansi@2.0.3/node_modules/@inquirer/ansi/dist/index.js
|
|
2343
2395
|
var ESC = "\x1B[";
|
|
2344
2396
|
var cursorLeft = ESC + "G";
|
|
2345
2397
|
var cursorHide = ESC + "?25l";
|
|
@@ -2355,7 +2407,7 @@ var cursorTo = (x, y) => {
|
|
|
2355
2407
|
var eraseLine = ESC + "2K";
|
|
2356
2408
|
var eraseLines = (lines) => lines > 0 ? (eraseLine + cursorUp(1)).repeat(lines - 1) + eraseLine + cursorLeft : "";
|
|
2357
2409
|
|
|
2358
|
-
// ../../node_modules/@inquirer/core/dist/lib/screen-manager.js
|
|
2410
|
+
// ../../node_modules/.bun/@inquirer+core@11.1.1+c195ea72dffa657e/node_modules/@inquirer/core/dist/lib/screen-manager.js
|
|
2359
2411
|
var height = (content) => content.split(`
|
|
2360
2412
|
`).length;
|
|
2361
2413
|
var lastLine = (content) => content.split(`
|
|
@@ -2420,7 +2472,7 @@ class ScreenManager {
|
|
|
2420
2472
|
}
|
|
2421
2473
|
}
|
|
2422
2474
|
|
|
2423
|
-
// ../../node_modules/@inquirer/core/dist/lib/promise-polyfill.js
|
|
2475
|
+
// ../../node_modules/.bun/@inquirer+core@11.1.1+c195ea72dffa657e/node_modules/@inquirer/core/dist/lib/promise-polyfill.js
|
|
2424
2476
|
class PromisePolyfill extends Promise {
|
|
2425
2477
|
static withResolver() {
|
|
2426
2478
|
let resolve;
|
|
@@ -2433,7 +2485,7 @@ class PromisePolyfill extends Promise {
|
|
|
2433
2485
|
}
|
|
2434
2486
|
}
|
|
2435
2487
|
|
|
2436
|
-
// ../../node_modules/@inquirer/core/dist/lib/create-prompt.js
|
|
2488
|
+
// ../../node_modules/.bun/@inquirer+core@11.1.1+c195ea72dffa657e/node_modules/@inquirer/core/dist/lib/create-prompt.js
|
|
2437
2489
|
function getCallSites() {
|
|
2438
2490
|
const _prepareStackTrace = Error.prepareStackTrace;
|
|
2439
2491
|
let result = [];
|
|
@@ -2519,7 +2571,7 @@ function createPrompt(view) {
|
|
|
2519
2571
|
};
|
|
2520
2572
|
return prompt;
|
|
2521
2573
|
}
|
|
2522
|
-
// ../../node_modules/@inquirer/core/dist/lib/Separator.js
|
|
2574
|
+
// ../../node_modules/.bun/@inquirer+core@11.1.1+c195ea72dffa657e/node_modules/@inquirer/core/dist/lib/Separator.js
|
|
2523
2575
|
import { styleText as styleText2 } from "node:util";
|
|
2524
2576
|
class Separator {
|
|
2525
2577
|
separator = styleText2("dim", Array.from({ length: 15 }).join(dist_default2.line));
|
|
@@ -2533,7 +2585,7 @@ class Separator {
|
|
|
2533
2585
|
return Boolean(choice && typeof choice === "object" && "type" in choice && choice.type === "separator");
|
|
2534
2586
|
}
|
|
2535
2587
|
}
|
|
2536
|
-
// ../../node_modules/@inquirer/select/dist/index.js
|
|
2588
|
+
// ../../node_modules/.bun/@inquirer+select@5.0.4+c195ea72dffa657e/node_modules/@inquirer/select/dist/index.js
|
|
2537
2589
|
import { styleText as styleText3 } from "node:util";
|
|
2538
2590
|
var selectTheme = {
|
|
2539
2591
|
icon: { cursor: dist_default2.pointer },
|
|
@@ -2692,15 +2744,67 @@ var dist_default3 = createPrompt((config, done) => {
|
|
|
2692
2744
|
// lib/utils.ts
|
|
2693
2745
|
var import_bytes = __toESM(require_bytes(), 1);
|
|
2694
2746
|
|
|
2747
|
+
// lib/output.ts
|
|
2748
|
+
var import_picocolors = __toESM(require_picocolors(), 1);
|
|
2749
|
+
var brand = (text) => import_picocolors.default.cyan(import_picocolors.default.bold(text));
|
|
2750
|
+
var success = (text) => `${import_picocolors.default.green("✓")} ${text}`;
|
|
2751
|
+
var warn = (text) => `${import_picocolors.default.yellow("⚠")} ${text}`;
|
|
2752
|
+
var error = (text) => `${import_picocolors.default.red("✗")} ${text}`;
|
|
2753
|
+
var info = (text) => `${import_picocolors.default.cyan("ℹ")} ${text}`;
|
|
2754
|
+
var dim = (text) => import_picocolors.default.dim(text);
|
|
2755
|
+
var green = (text) => import_picocolors.default.green(text);
|
|
2756
|
+
var yellow = (text) => import_picocolors.default.yellow(text);
|
|
2757
|
+
var red = (text) => import_picocolors.default.red(text);
|
|
2758
|
+
var cyan = (text) => import_picocolors.default.cyan(text);
|
|
2759
|
+
var magenta = (text) => import_picocolors.default.magenta(text);
|
|
2760
|
+
function header(text) {
|
|
2761
|
+
console.log("");
|
|
2762
|
+
console.log(brand(` ${text}`));
|
|
2763
|
+
console.log(import_picocolors.default.dim(" " + "─".repeat(text.length + 2)));
|
|
2764
|
+
}
|
|
2765
|
+
function labelValue(label, value) {
|
|
2766
|
+
console.log(` ${import_picocolors.default.dim(label + ":")} ${value}`);
|
|
2767
|
+
}
|
|
2768
|
+
function statusLine(status, name, message, extra) {
|
|
2769
|
+
const icons = { ok: "✓", warning: "⚠", error: "✗" };
|
|
2770
|
+
const colors = { ok: import_picocolors.default.green, warning: import_picocolors.default.yellow, error: import_picocolors.default.red };
|
|
2771
|
+
let line = `${colors[status](icons[status])} ${name}: ${message}`;
|
|
2772
|
+
if (extra) {
|
|
2773
|
+
line += ` ${import_picocolors.default.dim(`(${extra})`)}`;
|
|
2774
|
+
}
|
|
2775
|
+
return line;
|
|
2776
|
+
}
|
|
2777
|
+
function tableRow(columns, widths) {
|
|
2778
|
+
return columns.map((col, i) => {
|
|
2779
|
+
const width = widths[i] || col.length;
|
|
2780
|
+
return col.padEnd(width);
|
|
2781
|
+
}).join(" ");
|
|
2782
|
+
}
|
|
2783
|
+
function tableHeader(columns, widths) {
|
|
2784
|
+
const headerRow = tableRow(columns.map((c) => import_picocolors.default.bold(c)), widths);
|
|
2785
|
+
const underlineRow = widths.map((w) => "─".repeat(w)).join(" ");
|
|
2786
|
+
console.log(headerRow);
|
|
2787
|
+
console.log(import_picocolors.default.dim(underlineRow));
|
|
2788
|
+
}
|
|
2789
|
+
function url(urlStr) {
|
|
2790
|
+
return import_picocolors.default.underline(import_picocolors.default.cyan(urlStr));
|
|
2791
|
+
}
|
|
2792
|
+
function cmd(command) {
|
|
2793
|
+
return import_picocolors.default.cyan(command);
|
|
2794
|
+
}
|
|
2795
|
+
function file(filePath) {
|
|
2796
|
+
return import_picocolors.default.magenta(filePath);
|
|
2797
|
+
}
|
|
2798
|
+
|
|
2695
2799
|
// lib/spawn.ts
|
|
2696
|
-
async function spawn(
|
|
2697
|
-
const command =
|
|
2698
|
-
const args =
|
|
2800
|
+
async function spawn(cmd2, opts = {}) {
|
|
2801
|
+
const command = cmd2[0];
|
|
2802
|
+
const args = cmd2.slice(1);
|
|
2699
2803
|
if (command === undefined) {
|
|
2700
2804
|
throw new Error("No command provided");
|
|
2701
2805
|
}
|
|
2702
2806
|
if (typeof Bun !== "undefined") {
|
|
2703
|
-
const proc = Bun.spawn(
|
|
2807
|
+
const proc = Bun.spawn(cmd2, {
|
|
2704
2808
|
env: opts.env ?? process.env,
|
|
2705
2809
|
cwd: opts.cwd ?? process.cwd(),
|
|
2706
2810
|
stdin: opts.stdin ?? "inherit",
|
|
@@ -2720,14 +2824,14 @@ async function spawn(cmd, opts = {}) {
|
|
|
2720
2824
|
});
|
|
2721
2825
|
}
|
|
2722
2826
|
}
|
|
2723
|
-
async function spawnCapture(
|
|
2724
|
-
const command =
|
|
2725
|
-
const args =
|
|
2827
|
+
async function spawnCapture(cmd2, opts = {}) {
|
|
2828
|
+
const command = cmd2[0];
|
|
2829
|
+
const args = cmd2.slice(1);
|
|
2726
2830
|
if (command === undefined) {
|
|
2727
2831
|
throw new Error("No command provided");
|
|
2728
2832
|
}
|
|
2729
2833
|
if (typeof Bun !== "undefined") {
|
|
2730
|
-
const proc = Bun.spawn(
|
|
2834
|
+
const proc = Bun.spawn(cmd2, {
|
|
2731
2835
|
env: opts.env ?? process.env,
|
|
2732
2836
|
cwd: opts.cwd,
|
|
2733
2837
|
stdin: opts.stdin ?? "ignore",
|
|
@@ -2762,17 +2866,17 @@ async function spawnCapture(cmd, opts = {}) {
|
|
|
2762
2866
|
});
|
|
2763
2867
|
}
|
|
2764
2868
|
}
|
|
2765
|
-
async function commandExists(
|
|
2869
|
+
async function commandExists(cmd2) {
|
|
2766
2870
|
try {
|
|
2767
|
-
const result = await spawnCapture(process.platform === "win32" ? ["where",
|
|
2871
|
+
const result = await spawnCapture(process.platform === "win32" ? ["where", cmd2] : ["which", cmd2]);
|
|
2768
2872
|
return result.exitCode === 0;
|
|
2769
2873
|
} catch {
|
|
2770
2874
|
return false;
|
|
2771
2875
|
}
|
|
2772
2876
|
}
|
|
2773
|
-
async function getCommandVersion(
|
|
2877
|
+
async function getCommandVersion(cmd2) {
|
|
2774
2878
|
try {
|
|
2775
|
-
const result = await spawnCapture([
|
|
2879
|
+
const result = await spawnCapture([cmd2, "--version"]);
|
|
2776
2880
|
if (result.exitCode === 0 && result.stdout) {
|
|
2777
2881
|
return result.stdout.trim().split(`
|
|
2778
2882
|
`)[0] ?? null;
|
|
@@ -2793,33 +2897,100 @@ async function fetchOllamaModels() {
|
|
|
2793
2897
|
const data = await response.json();
|
|
2794
2898
|
return data.models ?? [];
|
|
2795
2899
|
}
|
|
2900
|
+
async function fetchRunningModels() {
|
|
2901
|
+
const ollamaUrl = getOllamaUrl();
|
|
2902
|
+
try {
|
|
2903
|
+
const response = await fetch(`${ollamaUrl}/api/ps`, {
|
|
2904
|
+
signal: AbortSignal.timeout(5000)
|
|
2905
|
+
});
|
|
2906
|
+
if (!response.ok) {
|
|
2907
|
+
return [];
|
|
2908
|
+
}
|
|
2909
|
+
const data = await response.json();
|
|
2910
|
+
return data.models ?? [];
|
|
2911
|
+
} catch (error2) {
|
|
2912
|
+
return [];
|
|
2913
|
+
}
|
|
2914
|
+
}
|
|
2915
|
+
async function isModelLoaded(modelName) {
|
|
2916
|
+
const runningModels = await fetchRunningModels();
|
|
2917
|
+
return runningModels.some((m) => m.model === modelName || m.name === modelName || m.model.startsWith(modelName + ":") || modelName.startsWith(m.model));
|
|
2918
|
+
}
|
|
2919
|
+
async function loadModel(modelName, keepAlive = "10m") {
|
|
2920
|
+
const ollamaUrl = getOllamaUrl();
|
|
2921
|
+
const response = await fetch(`${ollamaUrl}/api/generate`, {
|
|
2922
|
+
method: "POST",
|
|
2923
|
+
headers: {
|
|
2924
|
+
"Content-Type": "application/json"
|
|
2925
|
+
},
|
|
2926
|
+
body: JSON.stringify({
|
|
2927
|
+
model: modelName,
|
|
2928
|
+
prompt: "",
|
|
2929
|
+
stream: false,
|
|
2930
|
+
keep_alive: keepAlive
|
|
2931
|
+
})
|
|
2932
|
+
});
|
|
2933
|
+
if (!response.ok) {
|
|
2934
|
+
throw new Error(`Failed to load model: ${response.statusText}`);
|
|
2935
|
+
}
|
|
2936
|
+
await response.json();
|
|
2937
|
+
}
|
|
2938
|
+
async function ensureModelLoaded(modelName) {
|
|
2939
|
+
const isLoaded = await isModelLoaded(modelName);
|
|
2940
|
+
if (isLoaded) {
|
|
2941
|
+
console.log(dim(` Model ${magenta(modelName)} is already loaded`));
|
|
2942
|
+
return;
|
|
2943
|
+
}
|
|
2944
|
+
console.log(info(`Loading model ${magenta(modelName)}...`));
|
|
2945
|
+
console.log(dim(" This may take a moment on first run"));
|
|
2946
|
+
try {
|
|
2947
|
+
await loadModel(modelName, "10m");
|
|
2948
|
+
console.log(success(`Model ${magenta(modelName)} loaded (keep_alive: 10m)`));
|
|
2949
|
+
} catch (error2) {
|
|
2950
|
+
console.log(warn(`Could not pre-load model (will load on first request)`));
|
|
2951
|
+
console.log(dim(` ${error2 instanceof Error ? error2.message : "Unknown error"}`));
|
|
2952
|
+
}
|
|
2953
|
+
}
|
|
2796
2954
|
async function selectModelInteractively() {
|
|
2797
2955
|
const ollamaUrl = getOllamaUrl();
|
|
2798
2956
|
let models;
|
|
2799
2957
|
try {
|
|
2800
2958
|
models = await fetchOllamaModels();
|
|
2801
|
-
} catch (
|
|
2802
|
-
console.
|
|
2803
|
-
console.
|
|
2959
|
+
} catch (error2) {
|
|
2960
|
+
console.log(warn(`Could not connect to Ollama at ${ollamaUrl}`));
|
|
2961
|
+
console.log(dim(" Make sure Ollama is running: loclaude docker-up"));
|
|
2804
2962
|
process.exit(1);
|
|
2805
2963
|
}
|
|
2806
2964
|
if (models.length === 0) {
|
|
2807
|
-
console.
|
|
2808
|
-
console.
|
|
2965
|
+
console.log(warn("No models found in Ollama."));
|
|
2966
|
+
console.log(dim(" Pull a model first: loclaude models-pull <model-name>"));
|
|
2809
2967
|
process.exit(1);
|
|
2810
2968
|
}
|
|
2969
|
+
const runningModels = await fetchRunningModels();
|
|
2970
|
+
const loadedModelNames = new Set(runningModels.map((m) => m.model));
|
|
2811
2971
|
const selected = await dist_default3({
|
|
2812
2972
|
message: "Select a model",
|
|
2813
|
-
choices: models.map((model) =>
|
|
2814
|
-
|
|
2815
|
-
|
|
2816
|
-
|
|
2973
|
+
choices: models.map((model) => {
|
|
2974
|
+
const isLoaded = loadedModelNames.has(model.name);
|
|
2975
|
+
const loadedIndicator = isLoaded ? " [loaded]" : "";
|
|
2976
|
+
return {
|
|
2977
|
+
name: `${model.name} (${import_bytes.default(model.size)})${loadedIndicator}`,
|
|
2978
|
+
value: model.name
|
|
2979
|
+
};
|
|
2980
|
+
})
|
|
2817
2981
|
});
|
|
2818
2982
|
return selected;
|
|
2819
2983
|
}
|
|
2820
2984
|
async function launchClaude(model, passthroughArgs) {
|
|
2821
2985
|
const ollamaUrl = getOllamaUrl();
|
|
2822
2986
|
const extraArgs = getClaudeExtraArgs();
|
|
2987
|
+
console.log("");
|
|
2988
|
+
console.log(cyan("Launching Claude Code with Ollama"));
|
|
2989
|
+
console.log(dim(` Model: ${magenta(model)}`));
|
|
2990
|
+
console.log(dim(` API: ${ollamaUrl}`));
|
|
2991
|
+
console.log("");
|
|
2992
|
+
await ensureModelLoaded(model);
|
|
2993
|
+
console.log("");
|
|
2823
2994
|
const env = {
|
|
2824
2995
|
...process.env,
|
|
2825
2996
|
ANTHROPIC_AUTH_TOKEN: "ollama",
|
|
@@ -2833,51 +3004,351 @@ async function launchClaude(model, passthroughArgs) {
|
|
|
2833
3004
|
// lib/commands/init.ts
|
|
2834
3005
|
import { existsSync as existsSync2, mkdirSync, writeFileSync, readFileSync as readFileSync2 } from "fs";
|
|
2835
3006
|
import { join as join2 } from "path";
|
|
2836
|
-
|
|
3007
|
+
|
|
3008
|
+
// lib/commands/doctor.ts
|
|
3009
|
+
async function checkDocker() {
|
|
3010
|
+
const exists = await commandExists("docker");
|
|
3011
|
+
if (!exists) {
|
|
3012
|
+
return {
|
|
3013
|
+
name: "Docker",
|
|
3014
|
+
status: "error",
|
|
3015
|
+
message: "Not installed",
|
|
3016
|
+
hint: "Install Docker: https://docs.docker.com/get-docker/"
|
|
3017
|
+
};
|
|
3018
|
+
}
|
|
3019
|
+
const version = await getCommandVersion("docker");
|
|
3020
|
+
return {
|
|
3021
|
+
name: "Docker",
|
|
3022
|
+
status: "ok",
|
|
3023
|
+
message: "Installed",
|
|
3024
|
+
version: version ?? undefined
|
|
3025
|
+
};
|
|
3026
|
+
}
|
|
3027
|
+
async function checkDockerCompose() {
|
|
3028
|
+
const result = await spawnCapture(["docker", "compose", "version"]);
|
|
3029
|
+
if (result.exitCode === 0) {
|
|
3030
|
+
const version = result.stdout?.trim().split(`
|
|
3031
|
+
`)[0];
|
|
3032
|
+
return {
|
|
3033
|
+
name: "Docker Compose",
|
|
3034
|
+
status: "ok",
|
|
3035
|
+
message: "Installed (v2)",
|
|
3036
|
+
version: version ?? undefined
|
|
3037
|
+
};
|
|
3038
|
+
}
|
|
3039
|
+
const v1Exists = await commandExists("docker-compose");
|
|
3040
|
+
if (v1Exists) {
|
|
3041
|
+
const version = await getCommandVersion("docker-compose");
|
|
3042
|
+
return {
|
|
3043
|
+
name: "Docker Compose",
|
|
3044
|
+
status: "warning",
|
|
3045
|
+
message: "Using legacy v1",
|
|
3046
|
+
version: version ?? undefined,
|
|
3047
|
+
hint: "Consider upgrading to Docker Compose v2"
|
|
3048
|
+
};
|
|
3049
|
+
}
|
|
3050
|
+
return {
|
|
3051
|
+
name: "Docker Compose",
|
|
3052
|
+
status: "error",
|
|
3053
|
+
message: "Not installed",
|
|
3054
|
+
hint: "Docker Compose is included with Docker Desktop, or install separately"
|
|
3055
|
+
};
|
|
3056
|
+
}
|
|
3057
|
+
async function checkNvidiaSmi() {
|
|
3058
|
+
const exists = await commandExists("nvidia-smi");
|
|
3059
|
+
if (!exists) {
|
|
3060
|
+
return {
|
|
3061
|
+
name: "NVIDIA GPU",
|
|
3062
|
+
status: "warning",
|
|
3063
|
+
message: "nvidia-smi not found",
|
|
3064
|
+
hint: "GPU support requires NVIDIA drivers. CPU-only mode will be used."
|
|
3065
|
+
};
|
|
3066
|
+
}
|
|
3067
|
+
const result = await spawnCapture(["nvidia-smi", "--query-gpu=name", "--format=csv,noheader"]);
|
|
3068
|
+
if (result.exitCode === 0 && result.stdout) {
|
|
3069
|
+
const gpus = result.stdout.trim().split(`
|
|
3070
|
+
`).filter(Boolean);
|
|
3071
|
+
return {
|
|
3072
|
+
name: "NVIDIA GPU",
|
|
3073
|
+
status: "ok",
|
|
3074
|
+
message: `${gpus.length} GPU(s) detected`,
|
|
3075
|
+
version: gpus[0]
|
|
3076
|
+
};
|
|
3077
|
+
}
|
|
3078
|
+
return {
|
|
3079
|
+
name: "NVIDIA GPU",
|
|
3080
|
+
status: "warning",
|
|
3081
|
+
message: "nvidia-smi failed",
|
|
3082
|
+
hint: "GPU may not be available. Check NVIDIA drivers."
|
|
3083
|
+
};
|
|
3084
|
+
}
|
|
3085
|
+
async function checkNvidiaContainerToolkit() {
|
|
3086
|
+
const result = await spawnCapture(["docker", "info", "--format", "{{.Runtimes}}"]);
|
|
3087
|
+
if (result.exitCode === 0 && result.stdout?.includes("nvidia")) {
|
|
3088
|
+
return {
|
|
3089
|
+
name: "NVIDIA Container Toolkit",
|
|
3090
|
+
status: "ok",
|
|
3091
|
+
message: "nvidia runtime available"
|
|
3092
|
+
};
|
|
3093
|
+
}
|
|
3094
|
+
return {
|
|
3095
|
+
name: "NVIDIA Container Toolkit",
|
|
3096
|
+
status: "warning",
|
|
3097
|
+
message: "nvidia runtime not found",
|
|
3098
|
+
hint: "Install: https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html"
|
|
3099
|
+
};
|
|
3100
|
+
}
|
|
3101
|
+
async function checkClaude() {
|
|
3102
|
+
const exists = await commandExists("claude");
|
|
3103
|
+
if (!exists) {
|
|
3104
|
+
return {
|
|
3105
|
+
name: "Claude Code",
|
|
3106
|
+
status: "error",
|
|
3107
|
+
message: "Not installed",
|
|
3108
|
+
hint: "Install: npm install -g @anthropic-ai/claude-code"
|
|
3109
|
+
};
|
|
3110
|
+
}
|
|
3111
|
+
const version = await getCommandVersion("claude");
|
|
3112
|
+
return {
|
|
3113
|
+
name: "Claude Code",
|
|
3114
|
+
status: "ok",
|
|
3115
|
+
message: "Installed",
|
|
3116
|
+
version: version ?? undefined
|
|
3117
|
+
};
|
|
3118
|
+
}
|
|
3119
|
+
async function checkOllamaConnection() {
|
|
3120
|
+
const ollamaUrl = getOllamaUrl();
|
|
3121
|
+
try {
|
|
3122
|
+
const response = await fetch(`${ollamaUrl}/api/tags`, {
|
|
3123
|
+
signal: AbortSignal.timeout(5000)
|
|
3124
|
+
});
|
|
3125
|
+
if (response.ok) {
|
|
3126
|
+
const data = await response.json();
|
|
3127
|
+
const modelCount = data.models?.length ?? 0;
|
|
3128
|
+
return {
|
|
3129
|
+
name: "Ollama API",
|
|
3130
|
+
status: "ok",
|
|
3131
|
+
message: `Connected (${modelCount} model${modelCount === 1 ? "" : "s"})`,
|
|
3132
|
+
version: ollamaUrl
|
|
3133
|
+
};
|
|
3134
|
+
}
|
|
3135
|
+
return {
|
|
3136
|
+
name: "Ollama API",
|
|
3137
|
+
status: "warning",
|
|
3138
|
+
message: `HTTP ${response.status}`,
|
|
3139
|
+
hint: "Ollama may not be running. Try: loclaude docker-up"
|
|
3140
|
+
};
|
|
3141
|
+
} catch (error3) {
|
|
3142
|
+
return {
|
|
3143
|
+
name: "Ollama API",
|
|
3144
|
+
status: "warning",
|
|
3145
|
+
message: "Not reachable",
|
|
3146
|
+
hint: `Cannot connect to ${ollamaUrl}. Start Ollama: loclaude docker-up`
|
|
3147
|
+
};
|
|
3148
|
+
}
|
|
3149
|
+
}
|
|
3150
|
+
function formatCheck(check) {
|
|
3151
|
+
let line = statusLine(check.status, check.name, check.message, check.version);
|
|
3152
|
+
if (check.hint) {
|
|
3153
|
+
line += `
|
|
3154
|
+
${dim("→")} ${dim(check.hint)}`;
|
|
3155
|
+
}
|
|
3156
|
+
return line;
|
|
3157
|
+
}
|
|
3158
|
+
async function doctor() {
|
|
3159
|
+
header("System Health Check");
|
|
3160
|
+
console.log("");
|
|
3161
|
+
const checks = await Promise.all([
|
|
3162
|
+
checkDocker(),
|
|
3163
|
+
checkDockerCompose(),
|
|
3164
|
+
checkNvidiaSmi(),
|
|
3165
|
+
checkNvidiaContainerToolkit(),
|
|
3166
|
+
checkClaude(),
|
|
3167
|
+
checkOllamaConnection()
|
|
3168
|
+
]);
|
|
3169
|
+
for (const check of checks) {
|
|
3170
|
+
console.log(formatCheck(check));
|
|
3171
|
+
}
|
|
3172
|
+
const errors2 = checks.filter((c) => c.status === "error");
|
|
3173
|
+
const warnings = checks.filter((c) => c.status === "warning");
|
|
3174
|
+
console.log("");
|
|
3175
|
+
if (errors2.length > 0) {
|
|
3176
|
+
console.log(red(`${errors2.length} error(s) found.`) + " Fix these before proceeding.");
|
|
3177
|
+
process.exit(1);
|
|
3178
|
+
} else if (warnings.length > 0) {
|
|
3179
|
+
console.log(yellow(`${warnings.length} warning(s).`) + " loclaude may work with limited functionality.");
|
|
3180
|
+
} else {
|
|
3181
|
+
console.log(green("All checks passed!") + " Ready to use loclaude.");
|
|
3182
|
+
}
|
|
3183
|
+
}
|
|
3184
|
+
async function hasNvidiaGpu() {
|
|
3185
|
+
const exists = await commandExists("nvidia-smi");
|
|
3186
|
+
if (!exists)
|
|
3187
|
+
return false;
|
|
3188
|
+
const result = await spawnCapture(["nvidia-smi", "--query-gpu=name", "--format=csv,noheader"]);
|
|
3189
|
+
return result.exitCode === 0 && Boolean(result.stdout?.trim());
|
|
3190
|
+
}
|
|
3191
|
+
|
|
3192
|
+
// lib/commands/init.ts
|
|
3193
|
+
var DOCKER_COMPOSE_TEMPLATE_GPU = `# =============================================================================
|
|
3194
|
+
# LOCLAUDE DOCKER COMPOSE - GPU MODE
|
|
3195
|
+
# =============================================================================
|
|
3196
|
+
# This configuration runs Ollama with NVIDIA GPU acceleration for fast inference.
|
|
3197
|
+
# Generated by: loclaude init
|
|
3198
|
+
#
|
|
3199
|
+
# Prerequisites:
|
|
3200
|
+
# - NVIDIA GPU with CUDA support
|
|
3201
|
+
# - NVIDIA drivers installed on host
|
|
3202
|
+
# - NVIDIA Container Toolkit: https://docs.nvidia.com/datacenter/cloud-native/container-toolkit
|
|
3203
|
+
#
|
|
3204
|
+
# Quick test for GPU support:
|
|
3205
|
+
# docker run --rm --gpus all nvidia/cuda:12.0-base nvidia-smi
|
|
3206
|
+
#
|
|
3207
|
+
# =============================================================================
|
|
3208
|
+
|
|
3209
|
+
services:
|
|
3210
|
+
# ===========================================================================
|
|
3211
|
+
# OLLAMA - Local LLM Inference Server
|
|
3212
|
+
# ===========================================================================
|
|
3213
|
+
# Ollama provides the AI backend that Claude Code connects to.
|
|
3214
|
+
# It runs large language models locally on your hardware.
|
|
3215
|
+
#
|
|
3216
|
+
# API Documentation: https://github.com/ollama/ollama/blob/main/docs/api.md
|
|
3217
|
+
# Model Library: https://ollama.com/library
|
|
3218
|
+
# ===========================================================================
|
|
2837
3219
|
ollama:
|
|
3220
|
+
# Official Ollama image - 'latest' ensures newest features and model support
|
|
2838
3221
|
image: ollama/ollama:latest
|
|
3222
|
+
|
|
3223
|
+
# Fixed container name for easy CLI access:
|
|
3224
|
+
# docker exec ollama ollama list
|
|
3225
|
+
# docker logs ollama
|
|
2839
3226
|
container_name: ollama
|
|
3227
|
+
|
|
3228
|
+
# NVIDIA Container Runtime - Required for GPU access
|
|
3229
|
+
# This makes CUDA libraries available inside the container
|
|
2840
3230
|
runtime: nvidia
|
|
3231
|
+
|
|
2841
3232
|
environment:
|
|
3233
|
+
# ---------------------------------------------------------------------------
|
|
3234
|
+
# GPU Configuration
|
|
3235
|
+
# ---------------------------------------------------------------------------
|
|
3236
|
+
# NVIDIA_VISIBLE_DEVICES: Which GPUs to expose to the container
|
|
3237
|
+
# - 'all': Use all available GPUs (recommended for most setups)
|
|
3238
|
+
# - '0': Use only GPU 0
|
|
3239
|
+
# - '0,1': Use GPUs 0 and 1
|
|
2842
3240
|
- NVIDIA_VISIBLE_DEVICES=all
|
|
3241
|
+
|
|
3242
|
+
# NVIDIA_DRIVER_CAPABILITIES: What GPU features to enable
|
|
3243
|
+
# - 'compute': CUDA compute (required for inference)
|
|
3244
|
+
# - 'utility': nvidia-smi and other tools
|
|
2843
3245
|
- NVIDIA_DRIVER_CAPABILITIES=compute,utility
|
|
3246
|
+
|
|
3247
|
+
# ---------------------------------------------------------------------------
|
|
3248
|
+
# Ollama Configuration (Optional)
|
|
3249
|
+
# ---------------------------------------------------------------------------
|
|
3250
|
+
# Uncomment these to customize Ollama behavior:
|
|
3251
|
+
|
|
3252
|
+
# Maximum number of models loaded in memory simultaneously
|
|
3253
|
+
# Lower this if you're running out of VRAM
|
|
3254
|
+
# - OLLAMA_MAX_LOADED_MODELS=1
|
|
3255
|
+
|
|
3256
|
+
# Maximum parallel inference requests per model
|
|
3257
|
+
# Higher values use more VRAM but handle more concurrent requests
|
|
3258
|
+
# - OLLAMA_NUM_PARALLEL=1
|
|
3259
|
+
|
|
3260
|
+
# Enable debug logging for troubleshooting
|
|
3261
|
+
# - OLLAMA_DEBUG=1
|
|
3262
|
+
|
|
3263
|
+
# Custom model storage location (inside container)
|
|
3264
|
+
# - OLLAMA_MODELS=/root/.ollama
|
|
3265
|
+
|
|
2844
3266
|
volumes:
|
|
3267
|
+
# ---------------------------------------------------------------------------
|
|
3268
|
+
# Model Storage
|
|
3269
|
+
# ---------------------------------------------------------------------------
|
|
3270
|
+
# Maps ./models on your host to /root/.ollama in the container
|
|
3271
|
+
# This persists downloaded models across container restarts
|
|
3272
|
+
#
|
|
3273
|
+
# Disk space requirements (approximate):
|
|
3274
|
+
# - 7B model: ~4GB
|
|
3275
|
+
# - 13B model: ~8GB
|
|
3276
|
+
# - 30B model: ~16GB
|
|
3277
|
+
# - 70B model: ~40GB
|
|
2845
3278
|
- ./models:/root/.ollama
|
|
3279
|
+
|
|
2846
3280
|
ports:
|
|
3281
|
+
# Ollama API port - access at http://localhost:11434
|
|
3282
|
+
# Used by Claude Code and other Ollama clients
|
|
2847
3283
|
- "11434:11434"
|
|
3284
|
+
|
|
3285
|
+
# Restart policy - keeps Ollama running unless manually stopped
|
|
2848
3286
|
restart: unless-stopped
|
|
3287
|
+
|
|
2849
3288
|
healthcheck:
|
|
3289
|
+
# Verify Ollama is responsive by listing models
|
|
2850
3290
|
test: ["CMD", "ollama", "list"]
|
|
2851
|
-
interval: 300s
|
|
2852
|
-
timeout: 2s
|
|
2853
|
-
retries: 3
|
|
2854
|
-
start_period: 40s
|
|
3291
|
+
interval: 300s # Check every 5 minutes
|
|
3292
|
+
timeout: 2s # Fail if no response in 2 seconds
|
|
3293
|
+
retries: 3 # Mark unhealthy after 3 consecutive failures
|
|
3294
|
+
start_period: 40s # Grace period for initial model loading
|
|
3295
|
+
|
|
2855
3296
|
deploy:
|
|
2856
3297
|
resources:
|
|
2857
3298
|
reservations:
|
|
2858
3299
|
devices:
|
|
3300
|
+
# Request GPU access from Docker
|
|
2859
3301
|
- driver: nvidia
|
|
2860
|
-
count: all
|
|
2861
|
-
capabilities: [gpu]
|
|
3302
|
+
count: all # Use all available GPUs
|
|
3303
|
+
capabilities: [gpu] # Request GPU compute capability
|
|
2862
3304
|
|
|
3305
|
+
# ===========================================================================
|
|
3306
|
+
# OPEN WEBUI - Chat Interface (Optional)
|
|
3307
|
+
# ===========================================================================
|
|
3308
|
+
# Open WebUI provides a ChatGPT-like interface for your local models.
|
|
3309
|
+
# Access at http://localhost:3000 after starting containers.
|
|
3310
|
+
#
|
|
3311
|
+
# Features:
|
|
3312
|
+
# - Multi-model chat interface
|
|
3313
|
+
# - Conversation history
|
|
3314
|
+
# - Model management UI
|
|
3315
|
+
# - RAG/document upload support
|
|
3316
|
+
#
|
|
3317
|
+
# Documentation: https://docs.openwebui.com/
|
|
3318
|
+
# ===========================================================================
|
|
2863
3319
|
open-webui:
|
|
3320
|
+
# CUDA-enabled image for GPU-accelerated features (embeddings, etc.)
|
|
3321
|
+
# Change to :main if you don't need GPU features in the UI
|
|
2864
3322
|
image: ghcr.io/open-webui/open-webui:cuda
|
|
3323
|
+
|
|
2865
3324
|
container_name: open-webui
|
|
3325
|
+
|
|
2866
3326
|
ports:
|
|
3327
|
+
# Web UI port - access at http://localhost:3000
|
|
2867
3328
|
- "3000:8080"
|
|
3329
|
+
|
|
2868
3330
|
environment:
|
|
3331
|
+
# Tell Open WebUI where to find Ollama
|
|
3332
|
+
# Uses Docker internal networking (service name as hostname)
|
|
2869
3333
|
- OLLAMA_BASE_URL=http://ollama:11434
|
|
3334
|
+
|
|
3335
|
+
# Wait for Ollama to be ready before starting
|
|
2870
3336
|
depends_on:
|
|
2871
3337
|
- ollama
|
|
3338
|
+
|
|
2872
3339
|
restart: unless-stopped
|
|
3340
|
+
|
|
2873
3341
|
healthcheck:
|
|
2874
3342
|
test: ["CMD", "curl", "-f", "http://localhost:8080/health"]
|
|
2875
3343
|
interval: 30s
|
|
2876
3344
|
timeout: 10s
|
|
2877
3345
|
retries: 3
|
|
2878
3346
|
start_period: 60s
|
|
3347
|
+
|
|
2879
3348
|
volumes:
|
|
3349
|
+
# Persistent storage for conversations, settings, and user data
|
|
2880
3350
|
- open-webui:/app/backend/data
|
|
3351
|
+
|
|
2881
3352
|
deploy:
|
|
2882
3353
|
resources:
|
|
2883
3354
|
reservations:
|
|
@@ -2886,32 +3357,174 @@ var DOCKER_COMPOSE_TEMPLATE = `services:
|
|
|
2886
3357
|
count: all
|
|
2887
3358
|
capabilities: [gpu]
|
|
2888
3359
|
|
|
3360
|
+
# =============================================================================
|
|
3361
|
+
# VOLUMES
|
|
3362
|
+
# =============================================================================
|
|
3363
|
+
# Named volumes for persistent data that survives container recreation
|
|
2889
3364
|
volumes:
|
|
2890
3365
|
open-webui:
|
|
3366
|
+
# Open WebUI data: conversations, user settings, uploads
|
|
3367
|
+
# Located at /var/lib/docker/volumes/open-webui/_data on host
|
|
2891
3368
|
`;
|
|
2892
|
-
var
|
|
3369
|
+
var DOCKER_COMPOSE_TEMPLATE_CPU = `# =============================================================================
|
|
3370
|
+
# LOCLAUDE DOCKER COMPOSE - CPU MODE
|
|
3371
|
+
# =============================================================================
|
|
3372
|
+
# This configuration runs Ollama in CPU-only mode.
|
|
3373
|
+
# Inference will be slower than GPU mode but works on any system.
|
|
3374
|
+
# Generated by: loclaude init --no-gpu
|
|
3375
|
+
#
|
|
3376
|
+
# Performance notes:
|
|
3377
|
+
# - 7B models: ~10-20 tokens/sec on modern CPUs
|
|
3378
|
+
# - Larger models will be significantly slower
|
|
3379
|
+
# - Consider using quantized models (Q4_K_M, Q5_K_M) for better performance
|
|
3380
|
+
#
|
|
3381
|
+
# Recommended CPU-optimized models:
|
|
3382
|
+
# - llama3.2:3b (fast, good for simple tasks)
|
|
3383
|
+
# - qwen2.5-coder:7b (coding tasks)
|
|
3384
|
+
# - gemma2:9b (general purpose)
|
|
3385
|
+
#
|
|
3386
|
+
# =============================================================================
|
|
3387
|
+
|
|
3388
|
+
services:
|
|
3389
|
+
# ===========================================================================
|
|
3390
|
+
# OLLAMA - Local LLM Inference Server (CPU Mode)
|
|
3391
|
+
# ===========================================================================
|
|
3392
|
+
# Ollama provides the AI backend that Claude Code connects to.
|
|
3393
|
+
# Running in CPU mode - no GPU acceleration.
|
|
3394
|
+
#
|
|
3395
|
+
# API Documentation: https://github.com/ollama/ollama/blob/main/docs/api.md
|
|
3396
|
+
# Model Library: https://ollama.com/library
|
|
3397
|
+
# ===========================================================================
|
|
3398
|
+
ollama:
|
|
3399
|
+
# Official Ollama image - works for both CPU and GPU
|
|
3400
|
+
image: ollama/ollama:latest
|
|
3401
|
+
|
|
3402
|
+
# Fixed container name for easy CLI access
|
|
3403
|
+
container_name: ollama
|
|
3404
|
+
|
|
3405
|
+
# NOTE: No 'runtime: nvidia' - running in CPU mode
|
|
3406
|
+
|
|
3407
|
+
environment:
|
|
3408
|
+
# ---------------------------------------------------------------------------
|
|
3409
|
+
# Ollama Configuration (Optional)
|
|
3410
|
+
# ---------------------------------------------------------------------------
|
|
3411
|
+
# Uncomment these to customize Ollama behavior:
|
|
3412
|
+
|
|
3413
|
+
# Maximum number of models loaded in memory simultaneously
|
|
3414
|
+
# CPU mode uses system RAM instead of VRAM
|
|
3415
|
+
# - OLLAMA_MAX_LOADED_MODELS=1
|
|
3416
|
+
|
|
3417
|
+
# Number of CPU threads to use (default: auto-detect)
|
|
3418
|
+
# - OLLAMA_NUM_THREADS=8
|
|
3419
|
+
|
|
3420
|
+
# Enable debug logging for troubleshooting
|
|
3421
|
+
# - OLLAMA_DEBUG=1
|
|
3422
|
+
|
|
3423
|
+
volumes:
|
|
3424
|
+
# ---------------------------------------------------------------------------
|
|
3425
|
+
# Model Storage
|
|
3426
|
+
# ---------------------------------------------------------------------------
|
|
3427
|
+
# Maps ./models on your host to /root/.ollama in the container
|
|
3428
|
+
# This persists downloaded models across container restarts
|
|
3429
|
+
- ./models:/root/.ollama
|
|
3430
|
+
|
|
3431
|
+
ports:
|
|
3432
|
+
# Ollama API port - access at http://localhost:11434
|
|
3433
|
+
- "11434:11434"
|
|
3434
|
+
|
|
3435
|
+
restart: unless-stopped
|
|
3436
|
+
|
|
3437
|
+
healthcheck:
|
|
3438
|
+
test: ["CMD", "ollama", "list"]
|
|
3439
|
+
interval: 300s
|
|
3440
|
+
timeout: 2s
|
|
3441
|
+
retries: 3
|
|
3442
|
+
start_period: 40s
|
|
3443
|
+
|
|
3444
|
+
# CPU resource limits (optional - uncomment to constrain)
|
|
3445
|
+
# deploy:
|
|
3446
|
+
# resources:
|
|
3447
|
+
# limits:
|
|
3448
|
+
# cpus: '4' # Limit to 4 CPU cores
|
|
3449
|
+
# memory: 16G # Limit to 16GB RAM
|
|
3450
|
+
# reservations:
|
|
3451
|
+
# cpus: '2' # Reserve at least 2 cores
|
|
3452
|
+
# memory: 8G # Reserve at least 8GB RAM
|
|
3453
|
+
|
|
3454
|
+
# ===========================================================================
|
|
3455
|
+
# OPEN WEBUI - Chat Interface (Optional)
|
|
3456
|
+
# ===========================================================================
|
|
3457
|
+
# Open WebUI provides a ChatGPT-like interface for your local models.
|
|
3458
|
+
# Access at http://localhost:3000 after starting containers.
|
|
3459
|
+
#
|
|
3460
|
+
# Documentation: https://docs.openwebui.com/
|
|
3461
|
+
# ===========================================================================
|
|
3462
|
+
open-webui:
|
|
3463
|
+
# Standard image (no CUDA) - smaller download, CPU-only features
|
|
3464
|
+
image: ghcr.io/open-webui/open-webui:main
|
|
3465
|
+
|
|
3466
|
+
container_name: open-webui
|
|
3467
|
+
|
|
3468
|
+
ports:
|
|
3469
|
+
- "3000:8080"
|
|
3470
|
+
|
|
3471
|
+
environment:
|
|
3472
|
+
- OLLAMA_BASE_URL=http://ollama:11434
|
|
3473
|
+
|
|
3474
|
+
depends_on:
|
|
3475
|
+
- ollama
|
|
3476
|
+
|
|
3477
|
+
restart: unless-stopped
|
|
3478
|
+
|
|
3479
|
+
healthcheck:
|
|
3480
|
+
test: ["CMD", "curl", "-f", "http://localhost:8080/health"]
|
|
3481
|
+
interval: 30s
|
|
3482
|
+
timeout: 10s
|
|
3483
|
+
retries: 3
|
|
3484
|
+
start_period: 60s
|
|
3485
|
+
|
|
3486
|
+
volumes:
|
|
3487
|
+
- open-webui:/app/backend/data
|
|
3488
|
+
|
|
3489
|
+
# =============================================================================
|
|
3490
|
+
# VOLUMES
|
|
3491
|
+
# =============================================================================
|
|
3492
|
+
volumes:
|
|
3493
|
+
open-webui:
|
|
3494
|
+
`;
|
|
3495
|
+
function getConfigTemplate(gpu) {
|
|
3496
|
+
return `{
|
|
2893
3497
|
"ollama": {
|
|
2894
3498
|
"url": "http://localhost:11434",
|
|
2895
|
-
"defaultModel": "qwen3-coder:30b"
|
|
3499
|
+
"defaultModel": "${gpu ? "qwen3-coder:30b" : "qwen2.5-coder:7b"}"
|
|
2896
3500
|
},
|
|
2897
3501
|
"docker": {
|
|
2898
3502
|
"composeFile": "./docker-compose.yml",
|
|
2899
|
-
"gpu":
|
|
3503
|
+
"gpu": ${gpu}
|
|
2900
3504
|
}
|
|
2901
3505
|
}
|
|
2902
3506
|
`;
|
|
3507
|
+
}
|
|
2903
3508
|
var GITIGNORE_TEMPLATE = `# Ollama models (large binary files)
|
|
3509
|
+
# These are downloaded by Ollama and can be re-pulled anytime
|
|
2904
3510
|
models/
|
|
2905
3511
|
`;
|
|
2906
|
-
var MISE_TOML_TEMPLATE = `#
|
|
2907
|
-
#
|
|
2908
|
-
#
|
|
3512
|
+
var MISE_TOML_TEMPLATE = `# =============================================================================
|
|
3513
|
+
# MISE TASK RUNNER CONFIGURATION
|
|
3514
|
+
# =============================================================================
|
|
3515
|
+
# Mise is a task runner that provides convenient shortcuts for common operations.
|
|
3516
|
+
# Run 'mise tasks' to see all available tasks.
|
|
3517
|
+
#
|
|
3518
|
+
# Documentation: https://mise.jdx.dev/
|
|
3519
|
+
# Install: curl https://mise.jdx.dev/install.sh | sh
|
|
3520
|
+
# =============================================================================
|
|
2909
3521
|
|
|
2910
3522
|
[tasks]
|
|
2911
3523
|
|
|
2912
3524
|
# =============================================================================
|
|
2913
3525
|
# Docker Management
|
|
2914
3526
|
# =============================================================================
|
|
3527
|
+
# Commands for managing the Ollama and Open WebUI containers
|
|
2915
3528
|
|
|
2916
3529
|
[tasks.up]
|
|
2917
3530
|
description = "Start Ollama and Open WebUI containers"
|
|
@@ -2936,6 +3549,7 @@ run = "loclaude docker-logs --follow"
|
|
|
2936
3549
|
# =============================================================================
|
|
2937
3550
|
# Model Management
|
|
2938
3551
|
# =============================================================================
|
|
3552
|
+
# Commands for managing Ollama models (download, remove, list)
|
|
2939
3553
|
|
|
2940
3554
|
[tasks.models]
|
|
2941
3555
|
description = "List installed models"
|
|
@@ -2945,9 +3559,14 @@ run = "loclaude models"
|
|
|
2945
3559
|
description = "Pull a model (usage: mise run pull <model-name>)"
|
|
2946
3560
|
run = "loclaude models-pull {{arg(name='model')}}"
|
|
2947
3561
|
|
|
3562
|
+
[tasks."pull:recommended"]
|
|
3563
|
+
description = "Pull the recommended coding model"
|
|
3564
|
+
run = "loclaude models-pull qwen3-coder:30b"
|
|
3565
|
+
|
|
2948
3566
|
# =============================================================================
|
|
2949
3567
|
# Claude Code
|
|
2950
3568
|
# =============================================================================
|
|
3569
|
+
# Commands for running Claude Code with local Ollama
|
|
2951
3570
|
|
|
2952
3571
|
[tasks.claude]
|
|
2953
3572
|
description = "Run Claude Code with local Ollama"
|
|
@@ -2960,14 +3579,19 @@ run = "loclaude run -m {{arg(name='model')}}"
|
|
|
2960
3579
|
# =============================================================================
|
|
2961
3580
|
# Diagnostics
|
|
2962
3581
|
# =============================================================================
|
|
3582
|
+
# Commands for checking system health and troubleshooting
|
|
2963
3583
|
|
|
2964
3584
|
[tasks.doctor]
|
|
2965
3585
|
description = "Check system requirements"
|
|
2966
3586
|
run = "loclaude doctor"
|
|
2967
3587
|
|
|
2968
3588
|
[tasks.gpu]
|
|
2969
|
-
description = "Check GPU status"
|
|
3589
|
+
description = "Check GPU status (requires NVIDIA GPU)"
|
|
2970
3590
|
run = "docker exec ollama nvidia-smi"
|
|
3591
|
+
|
|
3592
|
+
[tasks.config]
|
|
3593
|
+
description = "Show current configuration"
|
|
3594
|
+
run = "loclaude config"
|
|
2971
3595
|
`;
|
|
2972
3596
|
var README_TEMPLATE = `# Project Name
|
|
2973
3597
|
|
|
@@ -2976,18 +3600,24 @@ var README_TEMPLATE = `# Project Name
|
|
|
2976
3600
|
## Prerequisites
|
|
2977
3601
|
|
|
2978
3602
|
- [Docker](https://docs.docker.com/get-docker/) with Docker Compose v2
|
|
2979
|
-
- [NVIDIA GPU](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html) with drivers and container toolkit
|
|
2980
3603
|
- [mise](https://mise.jdx.dev/) task runner (recommended)
|
|
2981
3604
|
- [loclaude](https://www.npmjs.com/package/loclaude) CLI (\`npm install -g loclaude\`)
|
|
2982
3605
|
|
|
3606
|
+
### For GPU Mode (Recommended)
|
|
3607
|
+
|
|
3608
|
+
- [NVIDIA GPU](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html) with CUDA support
|
|
3609
|
+
- NVIDIA drivers installed on host
|
|
3610
|
+
- [NVIDIA Container Toolkit](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html)
|
|
3611
|
+
|
|
2983
3612
|
## Quick Start
|
|
2984
3613
|
|
|
2985
3614
|
\`\`\`bash
|
|
2986
3615
|
# Start the LLM backend (Ollama + Open WebUI)
|
|
2987
3616
|
mise run up
|
|
2988
3617
|
|
|
2989
|
-
# Pull a model
|
|
2990
|
-
mise run pull qwen3-coder:30b
|
|
3618
|
+
# Pull a model (adjust based on your hardware)
|
|
3619
|
+
mise run pull qwen3-coder:30b # GPU: 30B model (~16GB VRAM)
|
|
3620
|
+
mise run pull qwen2.5-coder:7b # CPU: 7B model (faster)
|
|
2991
3621
|
|
|
2992
3622
|
# Run Claude Code with local LLM
|
|
2993
3623
|
mise run claude
|
|
@@ -3022,7 +3652,7 @@ Run \`mise tasks\` to see all available commands.
|
|
|
3022
3652
|
\`\`\`
|
|
3023
3653
|
.
|
|
3024
3654
|
├── .claude/
|
|
3025
|
-
│ └── CLAUDE.md # Claude Code instructions
|
|
3655
|
+
│ └── CLAUDE.md # Claude Code project instructions
|
|
3026
3656
|
├── .loclaude/
|
|
3027
3657
|
│ └── config.json # Loclaude configuration
|
|
3028
3658
|
├── models/ # Ollama model storage (gitignored)
|
|
@@ -3054,6 +3684,25 @@ Run \`mise tasks\` to see all available commands.
|
|
|
3054
3684
|
|----------|-------------|---------|
|
|
3055
3685
|
| \`OLLAMA_URL\` | Ollama API endpoint | \`http://localhost:11434\` |
|
|
3056
3686
|
| \`OLLAMA_MODEL\` | Default model name | \`qwen3-coder:30b\` |
|
|
3687
|
+
| \`LOCLAUDE_GPU\` | Enable GPU mode | \`true\` |
|
|
3688
|
+
|
|
3689
|
+
## Recommended Models
|
|
3690
|
+
|
|
3691
|
+
### For GPU (NVIDIA with 16GB+ VRAM)
|
|
3692
|
+
|
|
3693
|
+
| Model | Size | Use Case |
|
|
3694
|
+
|-------|------|----------|
|
|
3695
|
+
| \`qwen3-coder:30b\` | ~16GB | Best coding performance |
|
|
3696
|
+
| \`gpt-oss:20b\` | ~12GB | General purpose |
|
|
3697
|
+
| \`glm-4.7:cloud\` | Cloud | No local storage needed |
|
|
3698
|
+
|
|
3699
|
+
### For CPU or Limited VRAM
|
|
3700
|
+
|
|
3701
|
+
| Model | Size | Use Case |
|
|
3702
|
+
|-------|------|----------|
|
|
3703
|
+
| \`qwen2.5-coder:7b\` | ~4GB | Coding on CPU |
|
|
3704
|
+
| \`llama3.2:3b\` | ~2GB | Fast, simple tasks |
|
|
3705
|
+
| \`gemma2:9b\` | ~5GB | General purpose |
|
|
3057
3706
|
|
|
3058
3707
|
## Troubleshooting
|
|
3059
3708
|
|
|
@@ -3075,6 +3724,12 @@ mise run logs
|
|
|
3075
3724
|
mise run down && mise run up
|
|
3076
3725
|
\`\`\`
|
|
3077
3726
|
|
|
3727
|
+
### GPU Not Detected
|
|
3728
|
+
|
|
3729
|
+
1. Verify NVIDIA drivers: \`nvidia-smi\`
|
|
3730
|
+
2. Check Docker GPU access: \`docker run --rm --gpus all nvidia/cuda:12.0-base nvidia-smi\`
|
|
3731
|
+
3. Install NVIDIA Container Toolkit if missing
|
|
3732
|
+
|
|
3078
3733
|
## License
|
|
3079
3734
|
|
|
3080
3735
|
MIT
|
|
@@ -3141,304 +3796,153 @@ async function init(options = {}) {
|
|
|
3141
3796
|
const claudeDir = join2(cwd, ".claude");
|
|
3142
3797
|
const claudeMdPath = join2(claudeDir, "CLAUDE.md");
|
|
3143
3798
|
const readmePath = join2(cwd, "README.md");
|
|
3144
|
-
|
|
3145
|
-
|
|
3799
|
+
header("Initializing loclaude project");
|
|
3800
|
+
console.log("");
|
|
3801
|
+
let gpuMode;
|
|
3802
|
+
if (options.gpu === false) {
|
|
3803
|
+
gpuMode = false;
|
|
3804
|
+
console.log(info("CPU-only mode (--no-gpu)"));
|
|
3805
|
+
} else if (options.gpu === true) {
|
|
3806
|
+
gpuMode = true;
|
|
3807
|
+
console.log(info("GPU mode enabled (--gpu)"));
|
|
3808
|
+
} else {
|
|
3809
|
+
console.log(dim(" Detecting GPU..."));
|
|
3810
|
+
gpuMode = await hasNvidiaGpu();
|
|
3811
|
+
if (gpuMode) {
|
|
3812
|
+
console.log(success("NVIDIA GPU detected - using GPU mode"));
|
|
3813
|
+
} else {
|
|
3814
|
+
console.log(warn("No NVIDIA GPU detected - using CPU mode"));
|
|
3815
|
+
console.log(dim(" Use --gpu to force GPU mode if you have an NVIDIA GPU"));
|
|
3816
|
+
}
|
|
3817
|
+
}
|
|
3818
|
+
console.log("");
|
|
3146
3819
|
if (existsSync2(readmePath) && !options.force) {
|
|
3147
|
-
console.log("
|
|
3820
|
+
console.log(warn(`${file("README.md")} already exists`));
|
|
3148
3821
|
} else {
|
|
3149
3822
|
writeFileSync(readmePath, README_TEMPLATE);
|
|
3150
|
-
console.log(
|
|
3823
|
+
console.log(success(`Created ${file("README.md")}`));
|
|
3151
3824
|
}
|
|
3152
3825
|
if (existsSync2(composePath) && !options.force) {
|
|
3153
|
-
console.log("
|
|
3154
|
-
console.log(
|
|
3155
|
-
`);
|
|
3826
|
+
console.log(warn(`${file("docker-compose.yml")} already exists`));
|
|
3827
|
+
console.log(dim(" Use --force to overwrite"));
|
|
3156
3828
|
} else {
|
|
3157
|
-
let composeContent =
|
|
3829
|
+
let composeContent = gpuMode ? DOCKER_COMPOSE_TEMPLATE_GPU : DOCKER_COMPOSE_TEMPLATE_CPU;
|
|
3158
3830
|
if (options.noWebui) {
|
|
3159
|
-
composeContent = composeContent.replace(/\n
|
|
3160
|
-
`).replace(/\
|
|
3831
|
+
composeContent = composeContent.replace(/\n # =+\n # OPEN WEBUI[\s\S]*?capabilities: \[gpu\]\n/m, `
|
|
3832
|
+
`).replace(/\n # =+\n # OPEN WEBUI[\s\S]*?open-webui:\/app\/backend\/data\n/m, `
|
|
3833
|
+
`).replace(/\nvolumes:\n open-webui:\n.*$/m, `
|
|
3161
3834
|
`);
|
|
3162
3835
|
}
|
|
3163
3836
|
writeFileSync(composePath, composeContent);
|
|
3164
|
-
|
|
3837
|
+
const modeLabel = gpuMode ? cyan("GPU") : cyan("CPU");
|
|
3838
|
+
console.log(success(`Created ${file("docker-compose.yml")} (${modeLabel} mode)`));
|
|
3165
3839
|
}
|
|
3166
3840
|
if (existsSync2(miseTomlPath) && !options.force) {
|
|
3167
|
-
console.log("
|
|
3841
|
+
console.log(warn(`${file("mise.toml")} already exists`));
|
|
3168
3842
|
} else {
|
|
3169
3843
|
writeFileSync(miseTomlPath, MISE_TOML_TEMPLATE);
|
|
3170
|
-
console.log(
|
|
3844
|
+
console.log(success(`Created ${file("mise.toml")}`));
|
|
3171
3845
|
}
|
|
3172
3846
|
if (!existsSync2(claudeDir)) {
|
|
3173
3847
|
mkdirSync(claudeDir, { recursive: true });
|
|
3174
3848
|
}
|
|
3175
3849
|
if (existsSync2(claudeMdPath) && !options.force) {
|
|
3176
|
-
console.log("
|
|
3850
|
+
console.log(warn(`${file(".claude/CLAUDE.md")} already exists`));
|
|
3177
3851
|
} else {
|
|
3178
3852
|
writeFileSync(claudeMdPath, CLAUDE_MD_TEMPLATE);
|
|
3179
|
-
console.log(
|
|
3853
|
+
console.log(success(`Created ${file(".claude/CLAUDE.md")}`));
|
|
3180
3854
|
}
|
|
3181
3855
|
if (!existsSync2(configDir)) {
|
|
3182
3856
|
mkdirSync(configDir, { recursive: true });
|
|
3183
|
-
console.log(
|
|
3857
|
+
console.log(success(`Created ${file(".loclaude/")} directory`));
|
|
3184
3858
|
}
|
|
3185
3859
|
if (existsSync2(configPath) && !options.force) {
|
|
3186
|
-
console.log("
|
|
3860
|
+
console.log(warn(`${file(".loclaude/config.json")} already exists`));
|
|
3187
3861
|
} else {
|
|
3188
|
-
writeFileSync(configPath,
|
|
3189
|
-
console.log(
|
|
3862
|
+
writeFileSync(configPath, getConfigTemplate(gpuMode));
|
|
3863
|
+
console.log(success(`Created ${file(".loclaude/config.json")}`));
|
|
3190
3864
|
}
|
|
3191
3865
|
if (!existsSync2(modelsDir)) {
|
|
3192
3866
|
mkdirSync(modelsDir, { recursive: true });
|
|
3193
|
-
console.log(
|
|
3867
|
+
console.log(success(`Created ${file("models/")} directory`));
|
|
3194
3868
|
}
|
|
3195
3869
|
if (existsSync2(gitignorePath)) {
|
|
3196
3870
|
const existing = readFileSync2(gitignorePath, "utf-8");
|
|
3197
3871
|
if (!existing.includes("models/")) {
|
|
3198
3872
|
writeFileSync(gitignorePath, existing + `
|
|
3199
3873
|
` + GITIGNORE_TEMPLATE);
|
|
3200
|
-
console.log(
|
|
3874
|
+
console.log(success(`Updated ${file(".gitignore")}`));
|
|
3201
3875
|
}
|
|
3202
3876
|
} else {
|
|
3203
3877
|
writeFileSync(gitignorePath, GITIGNORE_TEMPLATE);
|
|
3204
|
-
console.log(
|
|
3205
|
-
}
|
|
3206
|
-
console.log(`
|
|
3207
|
-
\uD83C\uDF89 Project initialized!
|
|
3208
|
-
`);
|
|
3209
|
-
console.log("Next steps:");
|
|
3210
|
-
console.log(" 1. Start containers: mise run up");
|
|
3211
|
-
console.log(" 2. Pull a model: mise run pull qwen3-coder:30b");
|
|
3212
|
-
console.log(" 3. Run Claude: mise run claude");
|
|
3213
|
-
console.log(`
|
|
3214
|
-
Service URLs:`);
|
|
3215
|
-
console.log(" Ollama API: http://localhost:11434");
|
|
3216
|
-
if (!options.noWebui) {
|
|
3217
|
-
console.log(" Open WebUI: http://localhost:3000");
|
|
3218
|
-
}
|
|
3219
|
-
}
|
|
3220
|
-
// lib/commands/doctor.ts
|
|
3221
|
-
async function checkDocker() {
|
|
3222
|
-
const exists = await commandExists("docker");
|
|
3223
|
-
if (!exists) {
|
|
3224
|
-
return {
|
|
3225
|
-
name: "Docker",
|
|
3226
|
-
status: "error",
|
|
3227
|
-
message: "Not installed",
|
|
3228
|
-
hint: "Install Docker: https://docs.docker.com/get-docker/"
|
|
3229
|
-
};
|
|
3230
|
-
}
|
|
3231
|
-
const version = await getCommandVersion("docker");
|
|
3232
|
-
return {
|
|
3233
|
-
name: "Docker",
|
|
3234
|
-
status: "ok",
|
|
3235
|
-
message: "Installed",
|
|
3236
|
-
version: version ?? undefined
|
|
3237
|
-
};
|
|
3238
|
-
}
|
|
3239
|
-
async function checkDockerCompose() {
|
|
3240
|
-
const result = await spawnCapture(["docker", "compose", "version"]);
|
|
3241
|
-
if (result.exitCode === 0) {
|
|
3242
|
-
const version = result.stdout?.trim().split(`
|
|
3243
|
-
`)[0];
|
|
3244
|
-
return {
|
|
3245
|
-
name: "Docker Compose",
|
|
3246
|
-
status: "ok",
|
|
3247
|
-
message: "Installed (v2)",
|
|
3248
|
-
version: version ?? undefined
|
|
3249
|
-
};
|
|
3250
|
-
}
|
|
3251
|
-
const v1Exists = await commandExists("docker-compose");
|
|
3252
|
-
if (v1Exists) {
|
|
3253
|
-
const version = await getCommandVersion("docker-compose");
|
|
3254
|
-
return {
|
|
3255
|
-
name: "Docker Compose",
|
|
3256
|
-
status: "warning",
|
|
3257
|
-
message: "Using legacy v1",
|
|
3258
|
-
version: version ?? undefined,
|
|
3259
|
-
hint: "Consider upgrading to Docker Compose v2"
|
|
3260
|
-
};
|
|
3878
|
+
console.log(success(`Created ${file(".gitignore")}`));
|
|
3261
3879
|
}
|
|
3262
|
-
|
|
3263
|
-
name: "Docker Compose",
|
|
3264
|
-
status: "error",
|
|
3265
|
-
message: "Not installed",
|
|
3266
|
-
hint: "Docker Compose is included with Docker Desktop, or install separately"
|
|
3267
|
-
};
|
|
3268
|
-
}
|
|
3269
|
-
async function checkNvidiaSmi() {
|
|
3270
|
-
const exists = await commandExists("nvidia-smi");
|
|
3271
|
-
if (!exists) {
|
|
3272
|
-
return {
|
|
3273
|
-
name: "NVIDIA GPU",
|
|
3274
|
-
status: "warning",
|
|
3275
|
-
message: "nvidia-smi not found",
|
|
3276
|
-
hint: "GPU support requires NVIDIA drivers. CPU-only mode will be used."
|
|
3277
|
-
};
|
|
3278
|
-
}
|
|
3279
|
-
const result = await spawnCapture(["nvidia-smi", "--query-gpu=name", "--format=csv,noheader"]);
|
|
3280
|
-
if (result.exitCode === 0 && result.stdout) {
|
|
3281
|
-
const gpus = result.stdout.trim().split(`
|
|
3282
|
-
`).filter(Boolean);
|
|
3283
|
-
return {
|
|
3284
|
-
name: "NVIDIA GPU",
|
|
3285
|
-
status: "ok",
|
|
3286
|
-
message: `${gpus.length} GPU(s) detected`,
|
|
3287
|
-
version: gpus[0]
|
|
3288
|
-
};
|
|
3289
|
-
}
|
|
3290
|
-
return {
|
|
3291
|
-
name: "NVIDIA GPU",
|
|
3292
|
-
status: "warning",
|
|
3293
|
-
message: "nvidia-smi failed",
|
|
3294
|
-
hint: "GPU may not be available. Check NVIDIA drivers."
|
|
3295
|
-
};
|
|
3296
|
-
}
|
|
3297
|
-
async function checkNvidiaContainerToolkit() {
|
|
3298
|
-
const result = await spawnCapture(["docker", "info", "--format", "{{.Runtimes}}"]);
|
|
3299
|
-
if (result.exitCode === 0 && result.stdout?.includes("nvidia")) {
|
|
3300
|
-
return {
|
|
3301
|
-
name: "NVIDIA Container Toolkit",
|
|
3302
|
-
status: "ok",
|
|
3303
|
-
message: "nvidia runtime available"
|
|
3304
|
-
};
|
|
3305
|
-
}
|
|
3306
|
-
return {
|
|
3307
|
-
name: "NVIDIA Container Toolkit",
|
|
3308
|
-
status: "warning",
|
|
3309
|
-
message: "nvidia runtime not found",
|
|
3310
|
-
hint: "Install: https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html"
|
|
3311
|
-
};
|
|
3312
|
-
}
|
|
3313
|
-
async function checkClaude() {
|
|
3314
|
-
const exists = await commandExists("claude");
|
|
3315
|
-
if (!exists) {
|
|
3316
|
-
return {
|
|
3317
|
-
name: "Claude Code",
|
|
3318
|
-
status: "error",
|
|
3319
|
-
message: "Not installed",
|
|
3320
|
-
hint: "Install: npm install -g @anthropic-ai/claude-code"
|
|
3321
|
-
};
|
|
3322
|
-
}
|
|
3323
|
-
const version = await getCommandVersion("claude");
|
|
3324
|
-
return {
|
|
3325
|
-
name: "Claude Code",
|
|
3326
|
-
status: "ok",
|
|
3327
|
-
message: "Installed",
|
|
3328
|
-
version: version ?? undefined
|
|
3329
|
-
};
|
|
3330
|
-
}
|
|
3331
|
-
async function checkOllamaConnection() {
|
|
3332
|
-
const ollamaUrl = getOllamaUrl();
|
|
3333
|
-
try {
|
|
3334
|
-
const response = await fetch(`${ollamaUrl}/api/tags`, {
|
|
3335
|
-
signal: AbortSignal.timeout(5000)
|
|
3336
|
-
});
|
|
3337
|
-
if (response.ok) {
|
|
3338
|
-
const data = await response.json();
|
|
3339
|
-
const modelCount = data.models?.length ?? 0;
|
|
3340
|
-
return {
|
|
3341
|
-
name: "Ollama API",
|
|
3342
|
-
status: "ok",
|
|
3343
|
-
message: `Connected (${modelCount} model${modelCount === 1 ? "" : "s"})`,
|
|
3344
|
-
version: ollamaUrl
|
|
3345
|
-
};
|
|
3346
|
-
}
|
|
3347
|
-
return {
|
|
3348
|
-
name: "Ollama API",
|
|
3349
|
-
status: "warning",
|
|
3350
|
-
message: `HTTP ${response.status}`,
|
|
3351
|
-
hint: "Ollama may not be running. Try: loclaude docker-up"
|
|
3352
|
-
};
|
|
3353
|
-
} catch (error) {
|
|
3354
|
-
return {
|
|
3355
|
-
name: "Ollama API",
|
|
3356
|
-
status: "warning",
|
|
3357
|
-
message: "Not reachable",
|
|
3358
|
-
hint: `Cannot connect to ${ollamaUrl}. Start Ollama: loclaude docker-up`
|
|
3359
|
-
};
|
|
3360
|
-
}
|
|
3361
|
-
}
|
|
3362
|
-
function formatCheck(check) {
|
|
3363
|
-
const icons = {
|
|
3364
|
-
ok: "✓",
|
|
3365
|
-
warning: "⚠",
|
|
3366
|
-
error: "✗"
|
|
3367
|
-
};
|
|
3368
|
-
const colors = {
|
|
3369
|
-
ok: "\x1B[32m",
|
|
3370
|
-
warning: "\x1B[33m",
|
|
3371
|
-
error: "\x1B[31m"
|
|
3372
|
-
};
|
|
3373
|
-
const reset = "\x1B[0m";
|
|
3374
|
-
const icon = icons[check.status];
|
|
3375
|
-
const color = colors[check.status];
|
|
3376
|
-
let line = `${color}${icon}${reset} ${check.name}: ${check.message}`;
|
|
3377
|
-
if (check.version) {
|
|
3378
|
-
line += ` (${check.version})`;
|
|
3379
|
-
}
|
|
3380
|
-
if (check.hint) {
|
|
3381
|
-
line += `
|
|
3382
|
-
${check.hint}`;
|
|
3383
|
-
}
|
|
3384
|
-
return line;
|
|
3385
|
-
}
|
|
3386
|
-
async function doctor() {
|
|
3387
|
-
console.log(`Checking system requirements...
|
|
3388
|
-
`);
|
|
3389
|
-
const checks = await Promise.all([
|
|
3390
|
-
checkDocker(),
|
|
3391
|
-
checkDockerCompose(),
|
|
3392
|
-
checkNvidiaSmi(),
|
|
3393
|
-
checkNvidiaContainerToolkit(),
|
|
3394
|
-
checkClaude(),
|
|
3395
|
-
checkOllamaConnection()
|
|
3396
|
-
]);
|
|
3397
|
-
for (const check of checks) {
|
|
3398
|
-
console.log(formatCheck(check));
|
|
3399
|
-
}
|
|
3400
|
-
const errors2 = checks.filter((c) => c.status === "error");
|
|
3401
|
-
const warnings = checks.filter((c) => c.status === "warning");
|
|
3880
|
+
const recommendedModel = gpuMode ? "qwen3-coder:30b" : "qwen2.5-coder:7b";
|
|
3402
3881
|
console.log("");
|
|
3403
|
-
|
|
3404
|
-
|
|
3405
|
-
|
|
3406
|
-
|
|
3407
|
-
|
|
3408
|
-
|
|
3409
|
-
|
|
3882
|
+
console.log(green("Project initialized!"));
|
|
3883
|
+
console.log("");
|
|
3884
|
+
console.log(cyan("Next steps:"));
|
|
3885
|
+
console.log(` 1. Start containers: ${cmd("mise run up")}`);
|
|
3886
|
+
console.log(` 2. Pull a model: ${cmd(`mise run pull ${recommendedModel}`)}`);
|
|
3887
|
+
console.log(` 3. Run Claude: ${cmd("mise run claude")}`);
|
|
3888
|
+
console.log("");
|
|
3889
|
+
console.log(cyan("Service URLs:"));
|
|
3890
|
+
console.log(` Ollama API: ${url("http://localhost:11434")}`);
|
|
3891
|
+
if (!options.noWebui) {
|
|
3892
|
+
console.log(` Open WebUI: ${url("http://localhost:3000")}`);
|
|
3410
3893
|
}
|
|
3411
3894
|
}
|
|
3412
3895
|
// lib/commands/config.ts
|
|
3413
|
-
import { inspect } from "util";
|
|
3414
3896
|
async function configShow() {
|
|
3415
3897
|
const config = loadConfig();
|
|
3416
3898
|
const activePath = getActiveConfigPath();
|
|
3417
|
-
|
|
3418
|
-
|
|
3419
|
-
console.log(
|
|
3420
|
-
|
|
3421
|
-
|
|
3899
|
+
header("Current Configuration");
|
|
3900
|
+
console.log("");
|
|
3901
|
+
console.log(cyan("Ollama:"));
|
|
3902
|
+
labelValue(" URL", config.ollama.url);
|
|
3903
|
+
labelValue(" Default Model", magenta(config.ollama.defaultModel));
|
|
3904
|
+
console.log("");
|
|
3905
|
+
console.log(cyan("Docker:"));
|
|
3906
|
+
labelValue(" Compose File", config.docker.composeFile);
|
|
3907
|
+
labelValue(" GPU Mode", config.docker.gpu ? green("enabled") : dim("disabled"));
|
|
3908
|
+
console.log("");
|
|
3909
|
+
console.log(cyan("Claude:"));
|
|
3910
|
+
if (config.claude.extraArgs.length > 0) {
|
|
3911
|
+
labelValue(" Extra Args", config.claude.extraArgs.join(" "));
|
|
3912
|
+
} else {
|
|
3913
|
+
labelValue(" Extra Args", dim("none"));
|
|
3914
|
+
}
|
|
3915
|
+
console.log("");
|
|
3916
|
+
console.log(dim("─".repeat(40)));
|
|
3422
3917
|
if (activePath) {
|
|
3423
|
-
console.log(`Loaded from: ${activePath}`);
|
|
3918
|
+
console.log(dim(`Loaded from: ${file(activePath)}`));
|
|
3424
3919
|
} else {
|
|
3425
|
-
console.log("Using default configuration (no config file found)");
|
|
3920
|
+
console.log(dim("Using default configuration (no config file found)"));
|
|
3426
3921
|
}
|
|
3427
3922
|
}
|
|
3428
3923
|
async function configPaths() {
|
|
3429
3924
|
const paths = getConfigSearchPaths();
|
|
3430
3925
|
const activePath = getActiveConfigPath();
|
|
3431
|
-
|
|
3432
|
-
|
|
3433
|
-
|
|
3434
|
-
|
|
3435
|
-
|
|
3436
|
-
|
|
3926
|
+
header("Config Search Paths");
|
|
3927
|
+
console.log("");
|
|
3928
|
+
console.log(dim("Files are checked in priority order (first found wins):"));
|
|
3929
|
+
console.log("");
|
|
3930
|
+
for (let i = 0;i < paths.length; i++) {
|
|
3931
|
+
const configPath = paths[i];
|
|
3932
|
+
if (!configPath)
|
|
3933
|
+
continue;
|
|
3934
|
+
const isActive = configPath === activePath;
|
|
3935
|
+
const num = `${i + 1}.`;
|
|
3936
|
+
if (isActive) {
|
|
3937
|
+
console.log(` ${num} ${file(configPath)} ${green("← active")}`);
|
|
3938
|
+
} else {
|
|
3939
|
+
console.log(` ${num} ${dim(configPath)}`);
|
|
3940
|
+
}
|
|
3437
3941
|
}
|
|
3942
|
+
console.log("");
|
|
3438
3943
|
if (!activePath) {
|
|
3439
|
-
console.log(
|
|
3440
|
-
|
|
3441
|
-
console.log("Run 'loclaude init' to create a project config.");
|
|
3944
|
+
console.log(info("No config file found. Using defaults."));
|
|
3945
|
+
console.log(dim(` Run ${cmd("loclaude init")} to create a project config.`));
|
|
3442
3946
|
}
|
|
3443
3947
|
}
|
|
3444
3948
|
// lib/commands/docker.ts
|
|
@@ -3477,42 +3981,44 @@ function getComposeCommand() {
|
|
|
3477
3981
|
async function runCompose(args, options = {}) {
|
|
3478
3982
|
const composeFile = options.file ?? findComposeFile();
|
|
3479
3983
|
if (!composeFile) {
|
|
3480
|
-
console.error("
|
|
3481
|
-
console.
|
|
3984
|
+
console.log(error("No docker-compose.yml found"));
|
|
3985
|
+
console.log(dim(` Run ${cmd("loclaude init")} to create one, or specify --file`));
|
|
3482
3986
|
return 1;
|
|
3483
3987
|
}
|
|
3484
|
-
const
|
|
3485
|
-
return spawn(
|
|
3988
|
+
const cmd_args = [...getComposeCommand(), "-f", composeFile, ...args];
|
|
3989
|
+
return spawn(cmd_args);
|
|
3486
3990
|
}
|
|
3487
3991
|
async function dockerUp(options = {}) {
|
|
3488
3992
|
const args = ["up"];
|
|
3489
3993
|
if (options.detach !== false) {
|
|
3490
3994
|
args.push("-d");
|
|
3491
3995
|
}
|
|
3492
|
-
console.log(
|
|
3493
|
-
|
|
3996
|
+
console.log(info("Starting containers..."));
|
|
3997
|
+
console.log("");
|
|
3494
3998
|
const exitCode = await runCompose(args, options);
|
|
3495
3999
|
if (exitCode === 0) {
|
|
3496
|
-
console.log(
|
|
3497
|
-
|
|
3498
|
-
console.log(
|
|
3499
|
-
Service URLs
|
|
3500
|
-
console.log(
|
|
3501
|
-
console.log(
|
|
4000
|
+
console.log("");
|
|
4001
|
+
console.log(success("Containers started"));
|
|
4002
|
+
console.log("");
|
|
4003
|
+
console.log(cyan("Service URLs:"));
|
|
4004
|
+
console.log(` Ollama API: ${url("http://localhost:11434")}`);
|
|
4005
|
+
console.log(` Open WebUI: ${url("http://localhost:3000")}`);
|
|
3502
4006
|
}
|
|
3503
4007
|
process.exit(exitCode);
|
|
3504
4008
|
}
|
|
3505
4009
|
async function dockerDown(options = {}) {
|
|
3506
|
-
console.log(
|
|
3507
|
-
|
|
4010
|
+
console.log(info("Stopping containers..."));
|
|
4011
|
+
console.log("");
|
|
3508
4012
|
const exitCode = await runCompose(["down"], options);
|
|
3509
4013
|
if (exitCode === 0) {
|
|
3510
|
-
console.log(
|
|
3511
|
-
|
|
4014
|
+
console.log("");
|
|
4015
|
+
console.log(success("Containers stopped"));
|
|
3512
4016
|
}
|
|
3513
4017
|
process.exit(exitCode);
|
|
3514
4018
|
}
|
|
3515
4019
|
async function dockerStatus(options = {}) {
|
|
4020
|
+
console.log(info("Container status:"));
|
|
4021
|
+
console.log("");
|
|
3516
4022
|
const exitCode = await runCompose(["ps"], options);
|
|
3517
4023
|
process.exit(exitCode);
|
|
3518
4024
|
}
|
|
@@ -3523,17 +4029,21 @@ async function dockerLogs(options = {}) {
|
|
|
3523
4029
|
}
|
|
3524
4030
|
if (options.service) {
|
|
3525
4031
|
args.push(options.service);
|
|
4032
|
+
console.log(info(`Logs for ${cyan(options.service)}:`));
|
|
4033
|
+
} else {
|
|
4034
|
+
console.log(info("Container logs:"));
|
|
3526
4035
|
}
|
|
4036
|
+
console.log("");
|
|
3527
4037
|
const exitCode = await runCompose(args, options);
|
|
3528
4038
|
process.exit(exitCode);
|
|
3529
4039
|
}
|
|
3530
4040
|
async function dockerRestart(options = {}) {
|
|
3531
|
-
console.log(
|
|
3532
|
-
|
|
4041
|
+
console.log(info("Restarting containers..."));
|
|
4042
|
+
console.log("");
|
|
3533
4043
|
const exitCode = await runCompose(["restart"], options);
|
|
3534
4044
|
if (exitCode === 0) {
|
|
3535
|
-
console.log(
|
|
3536
|
-
|
|
4045
|
+
console.log("");
|
|
4046
|
+
console.log(success("Containers restarted"));
|
|
3537
4047
|
}
|
|
3538
4048
|
process.exit(exitCode);
|
|
3539
4049
|
}
|
|
@@ -3550,11 +4060,11 @@ async function fetchModels() {
|
|
|
3550
4060
|
}
|
|
3551
4061
|
const data = await response.json();
|
|
3552
4062
|
return data.models ?? [];
|
|
3553
|
-
} catch (
|
|
3554
|
-
if (
|
|
4063
|
+
} catch (error3) {
|
|
4064
|
+
if (error3 instanceof Error && error3.name === "TimeoutError") {
|
|
3555
4065
|
throw new Error(`Connection to Ollama timed out (${ollamaUrl})`);
|
|
3556
4066
|
}
|
|
3557
|
-
throw
|
|
4067
|
+
throw error3;
|
|
3558
4068
|
}
|
|
3559
4069
|
}
|
|
3560
4070
|
async function isOllamaInDocker() {
|
|
@@ -3569,83 +4079,99 @@ async function runOllamaCommand(args) {
|
|
|
3569
4079
|
return spawn(["ollama", ...args]);
|
|
3570
4080
|
}
|
|
3571
4081
|
}
|
|
4082
|
+
function formatSize(sizeBytes) {
|
|
4083
|
+
const sizeStr = import_bytes2.default(sizeBytes) ?? "?";
|
|
4084
|
+
const sizeNum = sizeBytes / (1024 * 1024 * 1024);
|
|
4085
|
+
if (sizeNum > 20) {
|
|
4086
|
+
return yellow(sizeStr);
|
|
4087
|
+
} else if (sizeNum > 10) {
|
|
4088
|
+
return cyan(sizeStr);
|
|
4089
|
+
}
|
|
4090
|
+
return dim(sizeStr);
|
|
4091
|
+
}
|
|
3572
4092
|
async function modelsList() {
|
|
3573
4093
|
try {
|
|
3574
4094
|
const models = await fetchModels();
|
|
3575
4095
|
if (models.length === 0) {
|
|
3576
|
-
|
|
3577
|
-
console.log(
|
|
3578
|
-
|
|
3579
|
-
console.log("
|
|
4096
|
+
header("Installed Models");
|
|
4097
|
+
console.log("");
|
|
4098
|
+
console.log(info("No models installed."));
|
|
4099
|
+
console.log("");
|
|
4100
|
+
console.log(`Pull a model with: ${cmd("loclaude models-pull <model-name>")}`);
|
|
4101
|
+
console.log(`Example: ${cmd("loclaude models-pull llama3.2")}`);
|
|
3580
4102
|
return;
|
|
3581
4103
|
}
|
|
3582
|
-
|
|
3583
|
-
|
|
4104
|
+
header("Installed Models");
|
|
4105
|
+
console.log("");
|
|
3584
4106
|
const nameWidth = Math.max(...models.map((m) => m.name.length), "NAME".length);
|
|
3585
4107
|
const sizeWidth = 10;
|
|
3586
|
-
|
|
3587
|
-
|
|
4108
|
+
const modifiedWidth = 20;
|
|
4109
|
+
tableHeader(["NAME", "SIZE", "MODIFIED"], [nameWidth, sizeWidth, modifiedWidth]);
|
|
3588
4110
|
for (const model of models) {
|
|
3589
|
-
const name = model.name.padEnd(nameWidth);
|
|
3590
|
-
const size = (
|
|
3591
|
-
const modified = formatRelativeTime(model.modified_at);
|
|
4111
|
+
const name = magenta(model.name.padEnd(nameWidth));
|
|
4112
|
+
const size = formatSize(model.size).padStart(sizeWidth);
|
|
4113
|
+
const modified = dim(formatRelativeTime(model.modified_at));
|
|
3592
4114
|
console.log(`${name} ${size} ${modified}`);
|
|
3593
4115
|
}
|
|
3594
|
-
console.log(
|
|
3595
|
-
|
|
3596
|
-
} catch (
|
|
4116
|
+
console.log("");
|
|
4117
|
+
console.log(dim(`${models.length} model(s) installed`));
|
|
4118
|
+
} catch (err) {
|
|
3597
4119
|
const ollamaUrl = getOllamaUrl();
|
|
3598
|
-
console.error(
|
|
3599
|
-
console.
|
|
4120
|
+
console.log(error(`Could not connect to Ollama at ${ollamaUrl}`));
|
|
4121
|
+
console.log(dim(` Make sure Ollama is running: ${cmd("loclaude docker-up")}`));
|
|
3600
4122
|
process.exit(1);
|
|
3601
4123
|
}
|
|
3602
4124
|
}
|
|
3603
4125
|
async function modelsPull(modelName) {
|
|
3604
4126
|
if (!modelName) {
|
|
3605
|
-
console.error("
|
|
3606
|
-
console.
|
|
3607
|
-
console.
|
|
4127
|
+
console.log(error("Model name required"));
|
|
4128
|
+
console.log(dim(`Usage: ${cmd("loclaude models-pull <model-name>")}`));
|
|
4129
|
+
console.log(dim(`Example: ${cmd("loclaude models-pull llama3.2")}`));
|
|
3608
4130
|
process.exit(1);
|
|
3609
4131
|
}
|
|
3610
|
-
console.log(`Pulling model: ${modelName}
|
|
3611
|
-
|
|
4132
|
+
console.log(info(`Pulling model: ${magenta(modelName)}`));
|
|
4133
|
+
console.log("");
|
|
3612
4134
|
const exitCode = await runOllamaCommand(["pull", modelName]);
|
|
3613
4135
|
if (exitCode === 0) {
|
|
3614
|
-
console.log(
|
|
3615
|
-
|
|
4136
|
+
console.log("");
|
|
4137
|
+
console.log(success(`Model '${magenta(modelName)}' pulled successfully`));
|
|
3616
4138
|
}
|
|
3617
4139
|
process.exit(exitCode);
|
|
3618
4140
|
}
|
|
3619
4141
|
async function modelsRm(modelName) {
|
|
3620
4142
|
if (!modelName) {
|
|
3621
|
-
console.error("
|
|
3622
|
-
console.
|
|
4143
|
+
console.log(error("Model name required"));
|
|
4144
|
+
console.log(dim(`Usage: ${cmd("loclaude models-rm <model-name>")}`));
|
|
3623
4145
|
process.exit(1);
|
|
3624
4146
|
}
|
|
3625
|
-
console.log(`Removing model: ${modelName}
|
|
3626
|
-
|
|
4147
|
+
console.log(info(`Removing model: ${magenta(modelName)}`));
|
|
4148
|
+
console.log("");
|
|
3627
4149
|
const exitCode = await runOllamaCommand(["rm", modelName]);
|
|
3628
4150
|
if (exitCode === 0) {
|
|
3629
|
-
console.log(
|
|
3630
|
-
|
|
4151
|
+
console.log("");
|
|
4152
|
+
console.log(success(`Model '${magenta(modelName)}' removed`));
|
|
3631
4153
|
}
|
|
3632
4154
|
process.exit(exitCode);
|
|
3633
4155
|
}
|
|
3634
4156
|
async function modelsShow(modelName) {
|
|
3635
4157
|
if (!modelName) {
|
|
3636
|
-
console.error("
|
|
3637
|
-
console.
|
|
4158
|
+
console.log(error("Model name required"));
|
|
4159
|
+
console.log(dim(`Usage: ${cmd("loclaude models-show <model-name>")}`));
|
|
3638
4160
|
process.exit(1);
|
|
3639
4161
|
}
|
|
4162
|
+
console.log(info(`Model details: ${magenta(modelName)}`));
|
|
4163
|
+
console.log("");
|
|
3640
4164
|
const exitCode = await runOllamaCommand(["show", modelName]);
|
|
3641
4165
|
process.exit(exitCode);
|
|
3642
4166
|
}
|
|
3643
4167
|
async function modelsRun(modelName) {
|
|
3644
4168
|
if (!modelName) {
|
|
3645
|
-
console.error("
|
|
3646
|
-
console.
|
|
4169
|
+
console.log(error("Model name required"));
|
|
4170
|
+
console.log(dim(`Usage: ${cmd("loclaude models-run <model-name>")}`));
|
|
3647
4171
|
process.exit(1);
|
|
3648
4172
|
}
|
|
4173
|
+
console.log(info(`Running model: ${magenta(modelName)}`));
|
|
4174
|
+
console.log("");
|
|
3649
4175
|
const exitCode = await runOllamaCommand(["run", modelName]);
|
|
3650
4176
|
process.exit(exitCode);
|
|
3651
4177
|
}
|
|
@@ -3685,7 +4211,7 @@ cli.command("run [...args]", "Run Claude Code with local Ollama", {
|
|
|
3685
4211
|
}
|
|
3686
4212
|
await launchClaude(model, args);
|
|
3687
4213
|
});
|
|
3688
|
-
cli.command("init", "Initialize a new loclaude project").option("--force", "Overwrite existing files").option("--no-webui", "Skip Open WebUI in docker-compose").action(async (options) => {
|
|
4214
|
+
cli.command("init", "Initialize a new loclaude project").option("--force", "Overwrite existing files").option("--no-webui", "Skip Open WebUI in docker-compose").option("--gpu", "Force GPU mode (NVIDIA)").option("--no-gpu", "Force CPU-only mode").action(async (options) => {
|
|
3689
4215
|
await init(options);
|
|
3690
4216
|
});
|
|
3691
4217
|
cli.command("doctor", "Check system requirements and health").action(async () => {
|
|
@@ -3741,5 +4267,5 @@ export {
|
|
|
3741
4267
|
cli
|
|
3742
4268
|
};
|
|
3743
4269
|
|
|
3744
|
-
//# debugId=
|
|
4270
|
+
//# debugId=D27E5C5FE65A8CD164756E2164756E21
|
|
3745
4271
|
//# sourceMappingURL=index.js.map
|